1use crate::{task_inventory::TaskContexts, Event, *};
2use buffer_diff::{
3 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
4};
5use fs::FakeFs;
6use futures::{future, StreamExt};
7use gpui::{App, SemanticVersion, UpdateGlobal};
8use http_client::Url;
9use language::{
10 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
11 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
12 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
13 OffsetRangeExt, Point, ToPoint,
14};
15use lsp::{
16 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
17 NumberOrString, TextDocumentEdit, WillRenameFiles,
18};
19use parking_lot::Mutex;
20use pretty_assertions::{assert_eq, assert_matches};
21use serde_json::json;
22#[cfg(not(windows))]
23use std::os;
24use std::{str::FromStr, sync::OnceLock};
25
26use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
27use task::{ResolvedTask, TaskContext};
28use unindent::Unindent as _;
29use util::{
30 assert_set_eq, path,
31 paths::PathMatcher,
32 separator,
33 test::{marked_text_offsets, TempTree},
34 uri, TryFutureExt as _,
35};
36
37#[gpui::test]
38async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
39 cx.executor().allow_parking();
40
41 let (tx, mut rx) = futures::channel::mpsc::unbounded();
42 let _thread = std::thread::spawn(move || {
43 #[cfg(not(target_os = "windows"))]
44 std::fs::metadata("/tmp").unwrap();
45 #[cfg(target_os = "windows")]
46 std::fs::metadata("C:/Windows").unwrap();
47 std::thread::sleep(Duration::from_millis(1000));
48 tx.unbounded_send(1).unwrap();
49 });
50 rx.next().await.unwrap();
51}
52
53#[gpui::test]
54async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
55 cx.executor().allow_parking();
56
57 let io_task = smol::unblock(move || {
58 println!("sleeping on thread {:?}", std::thread::current().id());
59 std::thread::sleep(Duration::from_millis(10));
60 1
61 });
62
63 let task = cx.foreground_executor().spawn(async move {
64 io_task.await;
65 });
66
67 task.await;
68}
69
70#[cfg(not(windows))]
71#[gpui::test]
72async fn test_symlinks(cx: &mut gpui::TestAppContext) {
73 init_test(cx);
74 cx.executor().allow_parking();
75
76 let dir = TempTree::new(json!({
77 "root": {
78 "apple": "",
79 "banana": {
80 "carrot": {
81 "date": "",
82 "endive": "",
83 }
84 },
85 "fennel": {
86 "grape": "",
87 }
88 }
89 }));
90
91 let root_link_path = dir.path().join("root_link");
92 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
93 os::unix::fs::symlink(
94 dir.path().join("root/fennel"),
95 dir.path().join("root/finnochio"),
96 )
97 .unwrap();
98
99 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
100
101 project.update(cx, |project, cx| {
102 let tree = project.worktrees(cx).next().unwrap().read(cx);
103 assert_eq!(tree.file_count(), 5);
104 assert_eq!(
105 tree.inode_for_path("fennel/grape"),
106 tree.inode_for_path("finnochio/grape")
107 );
108 });
109}
110
111#[gpui::test]
112async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
113 init_test(cx);
114
115 let dir = TempTree::new(json!({
116 ".editorconfig": r#"
117 root = true
118 [*.rs]
119 indent_style = tab
120 indent_size = 3
121 end_of_line = lf
122 insert_final_newline = true
123 trim_trailing_whitespace = true
124 [*.js]
125 tab_width = 10
126 "#,
127 ".zed": {
128 "settings.json": r#"{
129 "tab_size": 8,
130 "hard_tabs": false,
131 "ensure_final_newline_on_save": false,
132 "remove_trailing_whitespace_on_save": false,
133 "soft_wrap": "editor_width"
134 }"#,
135 },
136 "a.rs": "fn a() {\n A\n}",
137 "b": {
138 ".editorconfig": r#"
139 [*.rs]
140 indent_size = 2
141 "#,
142 "b.rs": "fn b() {\n B\n}",
143 },
144 "c.js": "def c\n C\nend",
145 "README.json": "tabs are better\n",
146 }));
147
148 let path = dir.path();
149 let fs = FakeFs::new(cx.executor());
150 fs.insert_tree_from_real_fs(path, path).await;
151 let project = Project::test(fs, [path], cx).await;
152
153 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
154 language_registry.add(js_lang());
155 language_registry.add(json_lang());
156 language_registry.add(rust_lang());
157
158 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
159
160 cx.executor().run_until_parked();
161
162 cx.update(|cx| {
163 let tree = worktree.read(cx);
164 let settings_for = |path: &str| {
165 let file_entry = tree.entry_for_path(path).unwrap().clone();
166 let file = File::for_entry(file_entry, worktree.clone());
167 let file_language = project
168 .read(cx)
169 .languages()
170 .language_for_file_path(file.path.as_ref());
171 let file_language = cx
172 .background_executor()
173 .block(file_language)
174 .expect("Failed to get file language");
175 let file = file as _;
176 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
177 };
178
179 let settings_a = settings_for("a.rs");
180 let settings_b = settings_for("b/b.rs");
181 let settings_c = settings_for("c.js");
182 let settings_readme = settings_for("README.json");
183
184 // .editorconfig overrides .zed/settings
185 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
186 assert_eq!(settings_a.hard_tabs, true);
187 assert_eq!(settings_a.ensure_final_newline_on_save, true);
188 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
189
190 // .editorconfig in b/ overrides .editorconfig in root
191 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
192
193 // "indent_size" is not set, so "tab_width" is used
194 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
195
196 // README.md should not be affected by .editorconfig's globe "*.rs"
197 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
198 });
199}
200
201#[gpui::test]
202async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
203 init_test(cx);
204 TaskStore::init(None);
205
206 let fs = FakeFs::new(cx.executor());
207 fs.insert_tree(
208 path!("/dir"),
209 json!({
210 ".zed": {
211 "settings.json": r#"{ "tab_size": 8 }"#,
212 "tasks.json": r#"[{
213 "label": "cargo check all",
214 "command": "cargo",
215 "args": ["check", "--all"]
216 },]"#,
217 },
218 "a": {
219 "a.rs": "fn a() {\n A\n}"
220 },
221 "b": {
222 ".zed": {
223 "settings.json": r#"{ "tab_size": 2 }"#,
224 "tasks.json": r#"[{
225 "label": "cargo check",
226 "command": "cargo",
227 "args": ["check"]
228 },]"#,
229 },
230 "b.rs": "fn b() {\n B\n}"
231 }
232 }),
233 )
234 .await;
235
236 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
237 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
238
239 cx.executor().run_until_parked();
240 let worktree_id = cx.update(|cx| {
241 project.update(cx, |project, cx| {
242 project.worktrees(cx).next().unwrap().read(cx).id()
243 })
244 });
245
246 let mut task_contexts = TaskContexts::default();
247 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
248
249 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
250 id: worktree_id,
251 directory_in_worktree: PathBuf::from(".zed"),
252 id_base: "local worktree tasks from directory \".zed\"".into(),
253 };
254
255 let all_tasks = cx
256 .update(|cx| {
257 let tree = worktree.read(cx);
258
259 let file_a = File::for_entry(
260 tree.entry_for_path("a/a.rs").unwrap().clone(),
261 worktree.clone(),
262 ) as _;
263 let settings_a = language_settings(None, Some(&file_a), cx);
264 let file_b = File::for_entry(
265 tree.entry_for_path("b/b.rs").unwrap().clone(),
266 worktree.clone(),
267 ) as _;
268 let settings_b = language_settings(None, Some(&file_b), cx);
269
270 assert_eq!(settings_a.tab_size.get(), 8);
271 assert_eq!(settings_b.tab_size.get(), 2);
272
273 get_all_tasks(&project, &task_contexts, cx)
274 })
275 .into_iter()
276 .map(|(source_kind, task)| {
277 let resolved = task.resolved.unwrap();
278 (
279 source_kind,
280 task.resolved_label,
281 resolved.args,
282 resolved.env,
283 )
284 })
285 .collect::<Vec<_>>();
286 assert_eq!(
287 all_tasks,
288 vec![
289 (
290 TaskSourceKind::Worktree {
291 id: worktree_id,
292 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
293 id_base: if cfg!(windows) {
294 "local worktree tasks from directory \"b\\\\.zed\"".into()
295 } else {
296 "local worktree tasks from directory \"b/.zed\"".into()
297 },
298 },
299 "cargo check".to_string(),
300 vec!["check".to_string()],
301 HashMap::default(),
302 ),
303 (
304 topmost_local_task_source_kind.clone(),
305 "cargo check all".to_string(),
306 vec!["check".to_string(), "--all".to_string()],
307 HashMap::default(),
308 ),
309 ]
310 );
311
312 let (_, resolved_task) = cx
313 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
314 .into_iter()
315 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
316 .expect("should have one global task");
317 project.update(cx, |project, cx| {
318 let task_inventory = project
319 .task_store
320 .read(cx)
321 .task_inventory()
322 .cloned()
323 .unwrap();
324 task_inventory.update(cx, |inventory, _| {
325 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
326 inventory
327 .update_file_based_tasks(
328 None,
329 Some(
330 &json!([{
331 "label": "cargo check unstable",
332 "command": "cargo",
333 "args": [
334 "check",
335 "--all",
336 "--all-targets"
337 ],
338 "env": {
339 "RUSTFLAGS": "-Zunstable-options"
340 }
341 }])
342 .to_string(),
343 ),
344 settings::TaskKind::Script,
345 )
346 .unwrap();
347 });
348 });
349 cx.run_until_parked();
350
351 let all_tasks = cx
352 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
353 .into_iter()
354 .map(|(source_kind, task)| {
355 let resolved = task.resolved.unwrap();
356 (
357 source_kind,
358 task.resolved_label,
359 resolved.args,
360 resolved.env,
361 )
362 })
363 .collect::<Vec<_>>();
364 assert_eq!(
365 all_tasks,
366 vec![
367 (
368 topmost_local_task_source_kind.clone(),
369 "cargo check all".to_string(),
370 vec!["check".to_string(), "--all".to_string()],
371 HashMap::default(),
372 ),
373 (
374 TaskSourceKind::Worktree {
375 id: worktree_id,
376 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
377 id_base: if cfg!(windows) {
378 "local worktree tasks from directory \"b\\\\.zed\"".into()
379 } else {
380 "local worktree tasks from directory \"b/.zed\"".into()
381 },
382 },
383 "cargo check".to_string(),
384 vec!["check".to_string()],
385 HashMap::default(),
386 ),
387 (
388 TaskSourceKind::AbsPath {
389 abs_path: paths::tasks_file().clone(),
390 id_base: "global tasks.json".into(),
391 },
392 "cargo check unstable".to_string(),
393 vec![
394 "check".to_string(),
395 "--all".to_string(),
396 "--all-targets".to_string(),
397 ],
398 HashMap::from_iter(Some((
399 "RUSTFLAGS".to_string(),
400 "-Zunstable-options".to_string()
401 ))),
402 ),
403 ]
404 );
405}
406
407#[gpui::test]
408async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
409 init_test(cx);
410 TaskStore::init(None);
411
412 let fs = FakeFs::new(cx.executor());
413 fs.insert_tree(
414 path!("/dir"),
415 json!({
416 ".zed": {
417 "tasks.json": r#"[{
418 "label": "test worktree root",
419 "command": "echo $ZED_WORKTREE_ROOT"
420 }]"#,
421 },
422 "a": {
423 "a.rs": "fn a() {\n A\n}"
424 },
425 }),
426 )
427 .await;
428
429 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
430 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
431
432 cx.executor().run_until_parked();
433 let worktree_id = cx.update(|cx| {
434 project.update(cx, |project, cx| {
435 project.worktrees(cx).next().unwrap().read(cx).id()
436 })
437 });
438
439 let active_non_worktree_item_tasks = cx.update(|cx| {
440 get_all_tasks(
441 &project,
442 &TaskContexts {
443 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
444 active_worktree_context: None,
445 other_worktree_contexts: Vec::new(),
446 },
447 cx,
448 )
449 });
450 assert!(
451 active_non_worktree_item_tasks.is_empty(),
452 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
453 );
454
455 let active_worktree_tasks = cx.update(|cx| {
456 get_all_tasks(
457 &project,
458 &TaskContexts {
459 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
460 active_worktree_context: Some((worktree_id, {
461 let mut worktree_context = TaskContext::default();
462 worktree_context
463 .task_variables
464 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
465 worktree_context
466 })),
467 other_worktree_contexts: Vec::new(),
468 },
469 cx,
470 )
471 });
472 assert_eq!(
473 active_worktree_tasks
474 .into_iter()
475 .map(|(source_kind, task)| {
476 let resolved = task.resolved.unwrap();
477 (source_kind, resolved.command)
478 })
479 .collect::<Vec<_>>(),
480 vec![(
481 TaskSourceKind::Worktree {
482 id: worktree_id,
483 directory_in_worktree: PathBuf::from(separator!(".zed")),
484 id_base: if cfg!(windows) {
485 "local worktree tasks from directory \".zed\"".into()
486 } else {
487 "local worktree tasks from directory \".zed\"".into()
488 },
489 },
490 "echo /dir".to_string(),
491 )]
492 );
493}
494
495#[gpui::test]
496async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
497 init_test(cx);
498
499 let fs = FakeFs::new(cx.executor());
500 fs.insert_tree(
501 path!("/dir"),
502 json!({
503 "test.rs": "const A: i32 = 1;",
504 "test2.rs": "",
505 "Cargo.toml": "a = 1",
506 "package.json": "{\"a\": 1}",
507 }),
508 )
509 .await;
510
511 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
512 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
513
514 let mut fake_rust_servers = language_registry.register_fake_lsp(
515 "Rust",
516 FakeLspAdapter {
517 name: "the-rust-language-server",
518 capabilities: lsp::ServerCapabilities {
519 completion_provider: Some(lsp::CompletionOptions {
520 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
521 ..Default::default()
522 }),
523 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
524 lsp::TextDocumentSyncOptions {
525 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
526 ..Default::default()
527 },
528 )),
529 ..Default::default()
530 },
531 ..Default::default()
532 },
533 );
534 let mut fake_json_servers = language_registry.register_fake_lsp(
535 "JSON",
536 FakeLspAdapter {
537 name: "the-json-language-server",
538 capabilities: lsp::ServerCapabilities {
539 completion_provider: Some(lsp::CompletionOptions {
540 trigger_characters: Some(vec![":".to_string()]),
541 ..Default::default()
542 }),
543 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
544 lsp::TextDocumentSyncOptions {
545 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
546 ..Default::default()
547 },
548 )),
549 ..Default::default()
550 },
551 ..Default::default()
552 },
553 );
554
555 // Open a buffer without an associated language server.
556 let (toml_buffer, _handle) = project
557 .update(cx, |project, cx| {
558 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
559 })
560 .await
561 .unwrap();
562
563 // Open a buffer with an associated language server before the language for it has been loaded.
564 let (rust_buffer, _handle2) = project
565 .update(cx, |project, cx| {
566 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
567 })
568 .await
569 .unwrap();
570 rust_buffer.update(cx, |buffer, _| {
571 assert_eq!(buffer.language().map(|l| l.name()), None);
572 });
573
574 // Now we add the languages to the project, and ensure they get assigned to all
575 // the relevant open buffers.
576 language_registry.add(json_lang());
577 language_registry.add(rust_lang());
578 cx.executor().run_until_parked();
579 rust_buffer.update(cx, |buffer, _| {
580 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
581 });
582
583 // A server is started up, and it is notified about Rust files.
584 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
585 assert_eq!(
586 fake_rust_server
587 .receive_notification::<lsp::notification::DidOpenTextDocument>()
588 .await
589 .text_document,
590 lsp::TextDocumentItem {
591 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
592 version: 0,
593 text: "const A: i32 = 1;".to_string(),
594 language_id: "rust".to_string(),
595 }
596 );
597
598 // The buffer is configured based on the language server's capabilities.
599 rust_buffer.update(cx, |buffer, _| {
600 assert_eq!(
601 buffer
602 .completion_triggers()
603 .into_iter()
604 .cloned()
605 .collect::<Vec<_>>(),
606 &[".".to_string(), "::".to_string()]
607 );
608 });
609 toml_buffer.update(cx, |buffer, _| {
610 assert!(buffer.completion_triggers().is_empty());
611 });
612
613 // Edit a buffer. The changes are reported to the language server.
614 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
615 assert_eq!(
616 fake_rust_server
617 .receive_notification::<lsp::notification::DidChangeTextDocument>()
618 .await
619 .text_document,
620 lsp::VersionedTextDocumentIdentifier::new(
621 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
622 1
623 )
624 );
625
626 // Open a third buffer with a different associated language server.
627 let (json_buffer, _json_handle) = project
628 .update(cx, |project, cx| {
629 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
630 })
631 .await
632 .unwrap();
633
634 // A json language server is started up and is only notified about the json buffer.
635 let mut fake_json_server = fake_json_servers.next().await.unwrap();
636 assert_eq!(
637 fake_json_server
638 .receive_notification::<lsp::notification::DidOpenTextDocument>()
639 .await
640 .text_document,
641 lsp::TextDocumentItem {
642 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
643 version: 0,
644 text: "{\"a\": 1}".to_string(),
645 language_id: "json".to_string(),
646 }
647 );
648
649 // This buffer is configured based on the second language server's
650 // capabilities.
651 json_buffer.update(cx, |buffer, _| {
652 assert_eq!(
653 buffer
654 .completion_triggers()
655 .into_iter()
656 .cloned()
657 .collect::<Vec<_>>(),
658 &[":".to_string()]
659 );
660 });
661
662 // When opening another buffer whose language server is already running,
663 // it is also configured based on the existing language server's capabilities.
664 let (rust_buffer2, _handle4) = project
665 .update(cx, |project, cx| {
666 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
667 })
668 .await
669 .unwrap();
670 rust_buffer2.update(cx, |buffer, _| {
671 assert_eq!(
672 buffer
673 .completion_triggers()
674 .into_iter()
675 .cloned()
676 .collect::<Vec<_>>(),
677 &[".".to_string(), "::".to_string()]
678 );
679 });
680
681 // Changes are reported only to servers matching the buffer's language.
682 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
683 rust_buffer2.update(cx, |buffer, cx| {
684 buffer.edit([(0..0, "let x = 1;")], None, cx)
685 });
686 assert_eq!(
687 fake_rust_server
688 .receive_notification::<lsp::notification::DidChangeTextDocument>()
689 .await
690 .text_document,
691 lsp::VersionedTextDocumentIdentifier::new(
692 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
693 1
694 )
695 );
696
697 // Save notifications are reported to all servers.
698 project
699 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
700 .await
701 .unwrap();
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidSaveTextDocument>()
705 .await
706 .text_document,
707 lsp::TextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
709 )
710 );
711 assert_eq!(
712 fake_json_server
713 .receive_notification::<lsp::notification::DidSaveTextDocument>()
714 .await
715 .text_document,
716 lsp::TextDocumentIdentifier::new(
717 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
718 )
719 );
720
721 // Renames are reported only to servers matching the buffer's language.
722 fs.rename(
723 Path::new(path!("/dir/test2.rs")),
724 Path::new(path!("/dir/test3.rs")),
725 Default::default(),
726 )
727 .await
728 .unwrap();
729 assert_eq!(
730 fake_rust_server
731 .receive_notification::<lsp::notification::DidCloseTextDocument>()
732 .await
733 .text_document,
734 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
735 );
736 assert_eq!(
737 fake_rust_server
738 .receive_notification::<lsp::notification::DidOpenTextDocument>()
739 .await
740 .text_document,
741 lsp::TextDocumentItem {
742 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
743 version: 0,
744 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
745 language_id: "rust".to_string(),
746 },
747 );
748
749 rust_buffer2.update(cx, |buffer, cx| {
750 buffer.update_diagnostics(
751 LanguageServerId(0),
752 DiagnosticSet::from_sorted_entries(
753 vec![DiagnosticEntry {
754 diagnostic: Default::default(),
755 range: Anchor::MIN..Anchor::MAX,
756 }],
757 &buffer.snapshot(),
758 ),
759 cx,
760 );
761 assert_eq!(
762 buffer
763 .snapshot()
764 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
765 .count(),
766 1
767 );
768 });
769
770 // When the rename changes the extension of the file, the buffer gets closed on the old
771 // language server and gets opened on the new one.
772 fs.rename(
773 Path::new(path!("/dir/test3.rs")),
774 Path::new(path!("/dir/test3.json")),
775 Default::default(),
776 )
777 .await
778 .unwrap();
779 assert_eq!(
780 fake_rust_server
781 .receive_notification::<lsp::notification::DidCloseTextDocument>()
782 .await
783 .text_document,
784 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
785 );
786 assert_eq!(
787 fake_json_server
788 .receive_notification::<lsp::notification::DidOpenTextDocument>()
789 .await
790 .text_document,
791 lsp::TextDocumentItem {
792 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
793 version: 0,
794 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
795 language_id: "json".to_string(),
796 },
797 );
798
799 // We clear the diagnostics, since the language has changed.
800 rust_buffer2.update(cx, |buffer, _| {
801 assert_eq!(
802 buffer
803 .snapshot()
804 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
805 .count(),
806 0
807 );
808 });
809
810 // The renamed file's version resets after changing language server.
811 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
812 assert_eq!(
813 fake_json_server
814 .receive_notification::<lsp::notification::DidChangeTextDocument>()
815 .await
816 .text_document,
817 lsp::VersionedTextDocumentIdentifier::new(
818 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
819 1
820 )
821 );
822
823 // Restart language servers
824 project.update(cx, |project, cx| {
825 project.restart_language_servers_for_buffers(
826 vec![rust_buffer.clone(), json_buffer.clone()],
827 cx,
828 );
829 });
830
831 let mut rust_shutdown_requests = fake_rust_server
832 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
833 let mut json_shutdown_requests = fake_json_server
834 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
835 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
836
837 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
838 let mut fake_json_server = fake_json_servers.next().await.unwrap();
839
840 // Ensure rust document is reopened in new rust language server
841 assert_eq!(
842 fake_rust_server
843 .receive_notification::<lsp::notification::DidOpenTextDocument>()
844 .await
845 .text_document,
846 lsp::TextDocumentItem {
847 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
848 version: 0,
849 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
850 language_id: "rust".to_string(),
851 }
852 );
853
854 // Ensure json documents are reopened in new json language server
855 assert_set_eq!(
856 [
857 fake_json_server
858 .receive_notification::<lsp::notification::DidOpenTextDocument>()
859 .await
860 .text_document,
861 fake_json_server
862 .receive_notification::<lsp::notification::DidOpenTextDocument>()
863 .await
864 .text_document,
865 ],
866 [
867 lsp::TextDocumentItem {
868 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
869 version: 0,
870 text: json_buffer.update(cx, |buffer, _| buffer.text()),
871 language_id: "json".to_string(),
872 },
873 lsp::TextDocumentItem {
874 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
875 version: 0,
876 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
877 language_id: "json".to_string(),
878 }
879 ]
880 );
881
882 // Close notifications are reported only to servers matching the buffer's language.
883 cx.update(|_| drop(_json_handle));
884 let close_message = lsp::DidCloseTextDocumentParams {
885 text_document: lsp::TextDocumentIdentifier::new(
886 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
887 ),
888 };
889 assert_eq!(
890 fake_json_server
891 .receive_notification::<lsp::notification::DidCloseTextDocument>()
892 .await,
893 close_message,
894 );
895}
896
897#[gpui::test]
898async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
899 init_test(cx);
900
901 let fs = FakeFs::new(cx.executor());
902 fs.insert_tree(
903 path!("/the-root"),
904 json!({
905 ".gitignore": "target\n",
906 "src": {
907 "a.rs": "",
908 "b.rs": "",
909 },
910 "target": {
911 "x": {
912 "out": {
913 "x.rs": ""
914 }
915 },
916 "y": {
917 "out": {
918 "y.rs": "",
919 }
920 },
921 "z": {
922 "out": {
923 "z.rs": ""
924 }
925 }
926 }
927 }),
928 )
929 .await;
930
931 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
932 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
933 language_registry.add(rust_lang());
934 let mut fake_servers = language_registry.register_fake_lsp(
935 "Rust",
936 FakeLspAdapter {
937 name: "the-language-server",
938 ..Default::default()
939 },
940 );
941
942 cx.executor().run_until_parked();
943
944 // Start the language server by opening a buffer with a compatible file extension.
945 project
946 .update(cx, |project, cx| {
947 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
948 })
949 .await
950 .unwrap();
951
952 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
953 project.update(cx, |project, cx| {
954 let worktree = project.worktrees(cx).next().unwrap();
955 assert_eq!(
956 worktree
957 .read(cx)
958 .snapshot()
959 .entries(true, 0)
960 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
961 .collect::<Vec<_>>(),
962 &[
963 (Path::new(""), false),
964 (Path::new(".gitignore"), false),
965 (Path::new("src"), false),
966 (Path::new("src/a.rs"), false),
967 (Path::new("src/b.rs"), false),
968 (Path::new("target"), true),
969 ]
970 );
971 });
972
973 let prev_read_dir_count = fs.read_dir_call_count();
974
975 // Keep track of the FS events reported to the language server.
976 let fake_server = fake_servers.next().await.unwrap();
977 let file_changes = Arc::new(Mutex::new(Vec::new()));
978 fake_server
979 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
980 registrations: vec![lsp::Registration {
981 id: Default::default(),
982 method: "workspace/didChangeWatchedFiles".to_string(),
983 register_options: serde_json::to_value(
984 lsp::DidChangeWatchedFilesRegistrationOptions {
985 watchers: vec![
986 lsp::FileSystemWatcher {
987 glob_pattern: lsp::GlobPattern::String(
988 path!("/the-root/Cargo.toml").to_string(),
989 ),
990 kind: None,
991 },
992 lsp::FileSystemWatcher {
993 glob_pattern: lsp::GlobPattern::String(
994 path!("/the-root/src/*.{rs,c}").to_string(),
995 ),
996 kind: None,
997 },
998 lsp::FileSystemWatcher {
999 glob_pattern: lsp::GlobPattern::String(
1000 path!("/the-root/target/y/**/*.rs").to_string(),
1001 ),
1002 kind: None,
1003 },
1004 ],
1005 },
1006 )
1007 .ok(),
1008 }],
1009 })
1010 .await
1011 .unwrap();
1012 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1013 let file_changes = file_changes.clone();
1014 move |params, _| {
1015 let mut file_changes = file_changes.lock();
1016 file_changes.extend(params.changes);
1017 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1018 }
1019 });
1020
1021 cx.executor().run_until_parked();
1022 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1023 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1024
1025 // Now the language server has asked us to watch an ignored directory path,
1026 // so we recursively load it.
1027 project.update(cx, |project, cx| {
1028 let worktree = project.worktrees(cx).next().unwrap();
1029 assert_eq!(
1030 worktree
1031 .read(cx)
1032 .snapshot()
1033 .entries(true, 0)
1034 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1035 .collect::<Vec<_>>(),
1036 &[
1037 (Path::new(""), false),
1038 (Path::new(".gitignore"), false),
1039 (Path::new("src"), false),
1040 (Path::new("src/a.rs"), false),
1041 (Path::new("src/b.rs"), false),
1042 (Path::new("target"), true),
1043 (Path::new("target/x"), true),
1044 (Path::new("target/y"), true),
1045 (Path::new("target/y/out"), true),
1046 (Path::new("target/y/out/y.rs"), true),
1047 (Path::new("target/z"), true),
1048 ]
1049 );
1050 });
1051
1052 // Perform some file system mutations, two of which match the watched patterns,
1053 // and one of which does not.
1054 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1055 .await
1056 .unwrap();
1057 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1058 .await
1059 .unwrap();
1060 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1061 .await
1062 .unwrap();
1063 fs.create_file(
1064 path!("/the-root/target/x/out/x2.rs").as_ref(),
1065 Default::default(),
1066 )
1067 .await
1068 .unwrap();
1069 fs.create_file(
1070 path!("/the-root/target/y/out/y2.rs").as_ref(),
1071 Default::default(),
1072 )
1073 .await
1074 .unwrap();
1075
1076 // The language server receives events for the FS mutations that match its watch patterns.
1077 cx.executor().run_until_parked();
1078 assert_eq!(
1079 &*file_changes.lock(),
1080 &[
1081 lsp::FileEvent {
1082 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1083 typ: lsp::FileChangeType::DELETED,
1084 },
1085 lsp::FileEvent {
1086 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1087 typ: lsp::FileChangeType::CREATED,
1088 },
1089 lsp::FileEvent {
1090 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1091 typ: lsp::FileChangeType::CREATED,
1092 },
1093 ]
1094 );
1095}
1096
1097#[gpui::test]
1098async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1099 init_test(cx);
1100
1101 let fs = FakeFs::new(cx.executor());
1102 fs.insert_tree(
1103 path!("/dir"),
1104 json!({
1105 "a.rs": "let a = 1;",
1106 "b.rs": "let b = 2;"
1107 }),
1108 )
1109 .await;
1110
1111 let project = Project::test(
1112 fs,
1113 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1114 cx,
1115 )
1116 .await;
1117 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1118
1119 let buffer_a = project
1120 .update(cx, |project, cx| {
1121 project.open_local_buffer(path!("/dir/a.rs"), cx)
1122 })
1123 .await
1124 .unwrap();
1125 let buffer_b = project
1126 .update(cx, |project, cx| {
1127 project.open_local_buffer(path!("/dir/b.rs"), cx)
1128 })
1129 .await
1130 .unwrap();
1131
1132 lsp_store.update(cx, |lsp_store, cx| {
1133 lsp_store
1134 .update_diagnostics(
1135 LanguageServerId(0),
1136 lsp::PublishDiagnosticsParams {
1137 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1138 version: None,
1139 diagnostics: vec![lsp::Diagnostic {
1140 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1141 severity: Some(lsp::DiagnosticSeverity::ERROR),
1142 message: "error 1".to_string(),
1143 ..Default::default()
1144 }],
1145 },
1146 &[],
1147 cx,
1148 )
1149 .unwrap();
1150 lsp_store
1151 .update_diagnostics(
1152 LanguageServerId(0),
1153 lsp::PublishDiagnosticsParams {
1154 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1155 version: None,
1156 diagnostics: vec![lsp::Diagnostic {
1157 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1158 severity: Some(DiagnosticSeverity::WARNING),
1159 message: "error 2".to_string(),
1160 ..Default::default()
1161 }],
1162 },
1163 &[],
1164 cx,
1165 )
1166 .unwrap();
1167 });
1168
1169 buffer_a.update(cx, |buffer, _| {
1170 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1171 assert_eq!(
1172 chunks
1173 .iter()
1174 .map(|(s, d)| (s.as_str(), *d))
1175 .collect::<Vec<_>>(),
1176 &[
1177 ("let ", None),
1178 ("a", Some(DiagnosticSeverity::ERROR)),
1179 (" = 1;", None),
1180 ]
1181 );
1182 });
1183 buffer_b.update(cx, |buffer, _| {
1184 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1185 assert_eq!(
1186 chunks
1187 .iter()
1188 .map(|(s, d)| (s.as_str(), *d))
1189 .collect::<Vec<_>>(),
1190 &[
1191 ("let ", None),
1192 ("b", Some(DiagnosticSeverity::WARNING)),
1193 (" = 2;", None),
1194 ]
1195 );
1196 });
1197}
1198
1199#[gpui::test]
1200async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1201 init_test(cx);
1202
1203 let fs = FakeFs::new(cx.executor());
1204 fs.insert_tree(
1205 path!("/root"),
1206 json!({
1207 "dir": {
1208 ".git": {
1209 "HEAD": "ref: refs/heads/main",
1210 },
1211 ".gitignore": "b.rs",
1212 "a.rs": "let a = 1;",
1213 "b.rs": "let b = 2;",
1214 },
1215 "other.rs": "let b = c;"
1216 }),
1217 )
1218 .await;
1219
1220 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1221 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1222 let (worktree, _) = project
1223 .update(cx, |project, cx| {
1224 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1225 })
1226 .await
1227 .unwrap();
1228 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1229
1230 let (worktree, _) = project
1231 .update(cx, |project, cx| {
1232 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1233 })
1234 .await
1235 .unwrap();
1236 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1237
1238 let server_id = LanguageServerId(0);
1239 lsp_store.update(cx, |lsp_store, cx| {
1240 lsp_store
1241 .update_diagnostics(
1242 server_id,
1243 lsp::PublishDiagnosticsParams {
1244 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1245 version: None,
1246 diagnostics: vec![lsp::Diagnostic {
1247 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1248 severity: Some(lsp::DiagnosticSeverity::ERROR),
1249 message: "unused variable 'b'".to_string(),
1250 ..Default::default()
1251 }],
1252 },
1253 &[],
1254 cx,
1255 )
1256 .unwrap();
1257 lsp_store
1258 .update_diagnostics(
1259 server_id,
1260 lsp::PublishDiagnosticsParams {
1261 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1262 version: None,
1263 diagnostics: vec![lsp::Diagnostic {
1264 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1265 severity: Some(lsp::DiagnosticSeverity::ERROR),
1266 message: "unknown variable 'c'".to_string(),
1267 ..Default::default()
1268 }],
1269 },
1270 &[],
1271 cx,
1272 )
1273 .unwrap();
1274 });
1275
1276 let main_ignored_buffer = project
1277 .update(cx, |project, cx| {
1278 project.open_buffer((main_worktree_id, "b.rs"), cx)
1279 })
1280 .await
1281 .unwrap();
1282 main_ignored_buffer.update(cx, |buffer, _| {
1283 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1284 assert_eq!(
1285 chunks
1286 .iter()
1287 .map(|(s, d)| (s.as_str(), *d))
1288 .collect::<Vec<_>>(),
1289 &[
1290 ("let ", None),
1291 ("b", Some(DiagnosticSeverity::ERROR)),
1292 (" = 2;", None),
1293 ],
1294 "Gigitnored buffers should still get in-buffer diagnostics",
1295 );
1296 });
1297 let other_buffer = project
1298 .update(cx, |project, cx| {
1299 project.open_buffer((other_worktree_id, ""), cx)
1300 })
1301 .await
1302 .unwrap();
1303 other_buffer.update(cx, |buffer, _| {
1304 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1305 assert_eq!(
1306 chunks
1307 .iter()
1308 .map(|(s, d)| (s.as_str(), *d))
1309 .collect::<Vec<_>>(),
1310 &[
1311 ("let b = ", None),
1312 ("c", Some(DiagnosticSeverity::ERROR)),
1313 (";", None),
1314 ],
1315 "Buffers from hidden projects should still get in-buffer diagnostics"
1316 );
1317 });
1318
1319 project.update(cx, |project, cx| {
1320 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1321 assert_eq!(
1322 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1323 vec![(
1324 ProjectPath {
1325 worktree_id: main_worktree_id,
1326 path: Arc::from(Path::new("b.rs")),
1327 },
1328 server_id,
1329 DiagnosticSummary {
1330 error_count: 1,
1331 warning_count: 0,
1332 }
1333 )]
1334 );
1335 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1336 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1337 });
1338}
1339
1340#[gpui::test]
1341async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1342 init_test(cx);
1343
1344 let progress_token = "the-progress-token";
1345
1346 let fs = FakeFs::new(cx.executor());
1347 fs.insert_tree(
1348 path!("/dir"),
1349 json!({
1350 "a.rs": "fn a() { A }",
1351 "b.rs": "const y: i32 = 1",
1352 }),
1353 )
1354 .await;
1355
1356 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1357 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1358
1359 language_registry.add(rust_lang());
1360 let mut fake_servers = language_registry.register_fake_lsp(
1361 "Rust",
1362 FakeLspAdapter {
1363 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1364 disk_based_diagnostics_sources: vec!["disk".into()],
1365 ..Default::default()
1366 },
1367 );
1368
1369 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1370
1371 // Cause worktree to start the fake language server
1372 let _ = project
1373 .update(cx, |project, cx| {
1374 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1375 })
1376 .await
1377 .unwrap();
1378
1379 let mut events = cx.events(&project);
1380
1381 let fake_server = fake_servers.next().await.unwrap();
1382 assert_eq!(
1383 events.next().await.unwrap(),
1384 Event::LanguageServerAdded(
1385 LanguageServerId(0),
1386 fake_server.server.name(),
1387 Some(worktree_id)
1388 ),
1389 );
1390
1391 fake_server
1392 .start_progress(format!("{}/0", progress_token))
1393 .await;
1394 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1395 assert_eq!(
1396 events.next().await.unwrap(),
1397 Event::DiskBasedDiagnosticsStarted {
1398 language_server_id: LanguageServerId(0),
1399 }
1400 );
1401
1402 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1403 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1404 version: None,
1405 diagnostics: vec![lsp::Diagnostic {
1406 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1407 severity: Some(lsp::DiagnosticSeverity::ERROR),
1408 message: "undefined variable 'A'".to_string(),
1409 ..Default::default()
1410 }],
1411 });
1412 assert_eq!(
1413 events.next().await.unwrap(),
1414 Event::DiagnosticsUpdated {
1415 language_server_id: LanguageServerId(0),
1416 path: (worktree_id, Path::new("a.rs")).into()
1417 }
1418 );
1419
1420 fake_server.end_progress(format!("{}/0", progress_token));
1421 assert_eq!(
1422 events.next().await.unwrap(),
1423 Event::DiskBasedDiagnosticsFinished {
1424 language_server_id: LanguageServerId(0)
1425 }
1426 );
1427
1428 let buffer = project
1429 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1430 .await
1431 .unwrap();
1432
1433 buffer.update(cx, |buffer, _| {
1434 let snapshot = buffer.snapshot();
1435 let diagnostics = snapshot
1436 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1437 .collect::<Vec<_>>();
1438 assert_eq!(
1439 diagnostics,
1440 &[DiagnosticEntry {
1441 range: Point::new(0, 9)..Point::new(0, 10),
1442 diagnostic: Diagnostic {
1443 severity: lsp::DiagnosticSeverity::ERROR,
1444 message: "undefined variable 'A'".to_string(),
1445 group_id: 0,
1446 is_primary: true,
1447 ..Default::default()
1448 }
1449 }]
1450 )
1451 });
1452
1453 // Ensure publishing empty diagnostics twice only results in one update event.
1454 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1455 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1456 version: None,
1457 diagnostics: Default::default(),
1458 });
1459 assert_eq!(
1460 events.next().await.unwrap(),
1461 Event::DiagnosticsUpdated {
1462 language_server_id: LanguageServerId(0),
1463 path: (worktree_id, Path::new("a.rs")).into()
1464 }
1465 );
1466
1467 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1468 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1469 version: None,
1470 diagnostics: Default::default(),
1471 });
1472 cx.executor().run_until_parked();
1473 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1474}
1475
1476#[gpui::test]
1477async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1478 init_test(cx);
1479
1480 let progress_token = "the-progress-token";
1481
1482 let fs = FakeFs::new(cx.executor());
1483 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1484
1485 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1486
1487 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1488 language_registry.add(rust_lang());
1489 let mut fake_servers = language_registry.register_fake_lsp(
1490 "Rust",
1491 FakeLspAdapter {
1492 name: "the-language-server",
1493 disk_based_diagnostics_sources: vec!["disk".into()],
1494 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1495 ..Default::default()
1496 },
1497 );
1498
1499 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1500
1501 let (buffer, _handle) = project
1502 .update(cx, |project, cx| {
1503 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1504 })
1505 .await
1506 .unwrap();
1507 // Simulate diagnostics starting to update.
1508 let fake_server = fake_servers.next().await.unwrap();
1509 fake_server.start_progress(progress_token).await;
1510
1511 // Restart the server before the diagnostics finish updating.
1512 project.update(cx, |project, cx| {
1513 project.restart_language_servers_for_buffers(vec![buffer], cx);
1514 });
1515 let mut events = cx.events(&project);
1516
1517 // Simulate the newly started server sending more diagnostics.
1518 let fake_server = fake_servers.next().await.unwrap();
1519 assert_eq!(
1520 events.next().await.unwrap(),
1521 Event::LanguageServerAdded(
1522 LanguageServerId(1),
1523 fake_server.server.name(),
1524 Some(worktree_id)
1525 )
1526 );
1527 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1528 fake_server.start_progress(progress_token).await;
1529 assert_eq!(
1530 events.next().await.unwrap(),
1531 Event::DiskBasedDiagnosticsStarted {
1532 language_server_id: LanguageServerId(1)
1533 }
1534 );
1535 project.update(cx, |project, cx| {
1536 assert_eq!(
1537 project
1538 .language_servers_running_disk_based_diagnostics(cx)
1539 .collect::<Vec<_>>(),
1540 [LanguageServerId(1)]
1541 );
1542 });
1543
1544 // All diagnostics are considered done, despite the old server's diagnostic
1545 // task never completing.
1546 fake_server.end_progress(progress_token);
1547 assert_eq!(
1548 events.next().await.unwrap(),
1549 Event::DiskBasedDiagnosticsFinished {
1550 language_server_id: LanguageServerId(1)
1551 }
1552 );
1553 project.update(cx, |project, cx| {
1554 assert_eq!(
1555 project
1556 .language_servers_running_disk_based_diagnostics(cx)
1557 .collect::<Vec<_>>(),
1558 [] as [language::LanguageServerId; 0]
1559 );
1560 });
1561}
1562
1563#[gpui::test]
1564async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1565 init_test(cx);
1566
1567 let fs = FakeFs::new(cx.executor());
1568 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1569
1570 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1571
1572 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1573 language_registry.add(rust_lang());
1574 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1575
1576 let (buffer, _) = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 // Publish diagnostics
1584 let fake_server = fake_servers.next().await.unwrap();
1585 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1586 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1587 version: None,
1588 diagnostics: vec![lsp::Diagnostic {
1589 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1590 severity: Some(lsp::DiagnosticSeverity::ERROR),
1591 message: "the message".to_string(),
1592 ..Default::default()
1593 }],
1594 });
1595
1596 cx.executor().run_until_parked();
1597 buffer.update(cx, |buffer, _| {
1598 assert_eq!(
1599 buffer
1600 .snapshot()
1601 .diagnostics_in_range::<_, usize>(0..1, false)
1602 .map(|entry| entry.diagnostic.message.clone())
1603 .collect::<Vec<_>>(),
1604 ["the message".to_string()]
1605 );
1606 });
1607 project.update(cx, |project, cx| {
1608 assert_eq!(
1609 project.diagnostic_summary(false, cx),
1610 DiagnosticSummary {
1611 error_count: 1,
1612 warning_count: 0,
1613 }
1614 );
1615 });
1616
1617 project.update(cx, |project, cx| {
1618 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1619 });
1620
1621 // The diagnostics are cleared.
1622 cx.executor().run_until_parked();
1623 buffer.update(cx, |buffer, _| {
1624 assert_eq!(
1625 buffer
1626 .snapshot()
1627 .diagnostics_in_range::<_, usize>(0..1, false)
1628 .map(|entry| entry.diagnostic.message.clone())
1629 .collect::<Vec<_>>(),
1630 Vec::<String>::new(),
1631 );
1632 });
1633 project.update(cx, |project, cx| {
1634 assert_eq!(
1635 project.diagnostic_summary(false, cx),
1636 DiagnosticSummary {
1637 error_count: 0,
1638 warning_count: 0,
1639 }
1640 );
1641 });
1642}
1643
1644#[gpui::test]
1645async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1646 init_test(cx);
1647
1648 let fs = FakeFs::new(cx.executor());
1649 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1650
1651 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1652 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1653
1654 language_registry.add(rust_lang());
1655 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1656
1657 let (buffer, _handle) = project
1658 .update(cx, |project, cx| {
1659 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1660 })
1661 .await
1662 .unwrap();
1663
1664 // Before restarting the server, report diagnostics with an unknown buffer version.
1665 let fake_server = fake_servers.next().await.unwrap();
1666 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1667 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1668 version: Some(10000),
1669 diagnostics: Vec::new(),
1670 });
1671 cx.executor().run_until_parked();
1672 project.update(cx, |project, cx| {
1673 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1674 });
1675
1676 let mut fake_server = fake_servers.next().await.unwrap();
1677 let notification = fake_server
1678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1679 .await
1680 .text_document;
1681 assert_eq!(notification.version, 0);
1682}
1683
1684#[gpui::test]
1685async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1686 init_test(cx);
1687
1688 let progress_token = "the-progress-token";
1689
1690 let fs = FakeFs::new(cx.executor());
1691 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1692
1693 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1694
1695 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1696 language_registry.add(rust_lang());
1697 let mut fake_servers = language_registry.register_fake_lsp(
1698 "Rust",
1699 FakeLspAdapter {
1700 name: "the-language-server",
1701 disk_based_diagnostics_sources: vec!["disk".into()],
1702 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1703 ..Default::default()
1704 },
1705 );
1706
1707 let (buffer, _handle) = project
1708 .update(cx, |project, cx| {
1709 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1710 })
1711 .await
1712 .unwrap();
1713
1714 // Simulate diagnostics starting to update.
1715 let mut fake_server = fake_servers.next().await.unwrap();
1716 fake_server
1717 .start_progress_with(
1718 "another-token",
1719 lsp::WorkDoneProgressBegin {
1720 cancellable: Some(false),
1721 ..Default::default()
1722 },
1723 )
1724 .await;
1725 fake_server
1726 .start_progress_with(
1727 progress_token,
1728 lsp::WorkDoneProgressBegin {
1729 cancellable: Some(true),
1730 ..Default::default()
1731 },
1732 )
1733 .await;
1734 cx.executor().run_until_parked();
1735
1736 project.update(cx, |project, cx| {
1737 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1738 });
1739
1740 let cancel_notification = fake_server
1741 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1742 .await;
1743 assert_eq!(
1744 cancel_notification.token,
1745 NumberOrString::String(progress_token.into())
1746 );
1747}
1748
1749#[gpui::test]
1750async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1751 init_test(cx);
1752
1753 let fs = FakeFs::new(cx.executor());
1754 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1755 .await;
1756
1757 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1758 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1759
1760 let mut fake_rust_servers = language_registry.register_fake_lsp(
1761 "Rust",
1762 FakeLspAdapter {
1763 name: "rust-lsp",
1764 ..Default::default()
1765 },
1766 );
1767 let mut fake_js_servers = language_registry.register_fake_lsp(
1768 "JavaScript",
1769 FakeLspAdapter {
1770 name: "js-lsp",
1771 ..Default::default()
1772 },
1773 );
1774 language_registry.add(rust_lang());
1775 language_registry.add(js_lang());
1776
1777 let _rs_buffer = project
1778 .update(cx, |project, cx| {
1779 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1780 })
1781 .await
1782 .unwrap();
1783 let _js_buffer = project
1784 .update(cx, |project, cx| {
1785 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1786 })
1787 .await
1788 .unwrap();
1789
1790 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1791 assert_eq!(
1792 fake_rust_server_1
1793 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1794 .await
1795 .text_document
1796 .uri
1797 .as_str(),
1798 uri!("file:///dir/a.rs")
1799 );
1800
1801 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1802 assert_eq!(
1803 fake_js_server
1804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1805 .await
1806 .text_document
1807 .uri
1808 .as_str(),
1809 uri!("file:///dir/b.js")
1810 );
1811
1812 // Disable Rust language server, ensuring only that server gets stopped.
1813 cx.update(|cx| {
1814 SettingsStore::update_global(cx, |settings, cx| {
1815 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1816 settings.languages.insert(
1817 "Rust".into(),
1818 LanguageSettingsContent {
1819 enable_language_server: Some(false),
1820 ..Default::default()
1821 },
1822 );
1823 });
1824 })
1825 });
1826 fake_rust_server_1
1827 .receive_notification::<lsp::notification::Exit>()
1828 .await;
1829
1830 // Enable Rust and disable JavaScript language servers, ensuring that the
1831 // former gets started again and that the latter stops.
1832 cx.update(|cx| {
1833 SettingsStore::update_global(cx, |settings, cx| {
1834 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1835 settings.languages.insert(
1836 LanguageName::new("Rust"),
1837 LanguageSettingsContent {
1838 enable_language_server: Some(true),
1839 ..Default::default()
1840 },
1841 );
1842 settings.languages.insert(
1843 LanguageName::new("JavaScript"),
1844 LanguageSettingsContent {
1845 enable_language_server: Some(false),
1846 ..Default::default()
1847 },
1848 );
1849 });
1850 })
1851 });
1852 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1853 assert_eq!(
1854 fake_rust_server_2
1855 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1856 .await
1857 .text_document
1858 .uri
1859 .as_str(),
1860 uri!("file:///dir/a.rs")
1861 );
1862 fake_js_server
1863 .receive_notification::<lsp::notification::Exit>()
1864 .await;
1865}
1866
1867#[gpui::test(iterations = 3)]
1868async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1869 init_test(cx);
1870
1871 let text = "
1872 fn a() { A }
1873 fn b() { BB }
1874 fn c() { CCC }
1875 "
1876 .unindent();
1877
1878 let fs = FakeFs::new(cx.executor());
1879 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1880
1881 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1882 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1883
1884 language_registry.add(rust_lang());
1885 let mut fake_servers = language_registry.register_fake_lsp(
1886 "Rust",
1887 FakeLspAdapter {
1888 disk_based_diagnostics_sources: vec!["disk".into()],
1889 ..Default::default()
1890 },
1891 );
1892
1893 let buffer = project
1894 .update(cx, |project, cx| {
1895 project.open_local_buffer(path!("/dir/a.rs"), cx)
1896 })
1897 .await
1898 .unwrap();
1899
1900 let _handle = project.update(cx, |project, cx| {
1901 project.register_buffer_with_language_servers(&buffer, cx)
1902 });
1903
1904 let mut fake_server = fake_servers.next().await.unwrap();
1905 let open_notification = fake_server
1906 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1907 .await;
1908
1909 // Edit the buffer, moving the content down
1910 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1911 let change_notification_1 = fake_server
1912 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1913 .await;
1914 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1915
1916 // Report some diagnostics for the initial version of the buffer
1917 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1918 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1919 version: Some(open_notification.text_document.version),
1920 diagnostics: vec![
1921 lsp::Diagnostic {
1922 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1923 severity: Some(DiagnosticSeverity::ERROR),
1924 message: "undefined variable 'A'".to_string(),
1925 source: Some("disk".to_string()),
1926 ..Default::default()
1927 },
1928 lsp::Diagnostic {
1929 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1930 severity: Some(DiagnosticSeverity::ERROR),
1931 message: "undefined variable 'BB'".to_string(),
1932 source: Some("disk".to_string()),
1933 ..Default::default()
1934 },
1935 lsp::Diagnostic {
1936 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1937 severity: Some(DiagnosticSeverity::ERROR),
1938 source: Some("disk".to_string()),
1939 message: "undefined variable 'CCC'".to_string(),
1940 ..Default::default()
1941 },
1942 ],
1943 });
1944
1945 // The diagnostics have moved down since they were created.
1946 cx.executor().run_until_parked();
1947 buffer.update(cx, |buffer, _| {
1948 assert_eq!(
1949 buffer
1950 .snapshot()
1951 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1952 .collect::<Vec<_>>(),
1953 &[
1954 DiagnosticEntry {
1955 range: Point::new(3, 9)..Point::new(3, 11),
1956 diagnostic: Diagnostic {
1957 source: Some("disk".into()),
1958 severity: DiagnosticSeverity::ERROR,
1959 message: "undefined variable 'BB'".to_string(),
1960 is_disk_based: true,
1961 group_id: 1,
1962 is_primary: true,
1963 ..Default::default()
1964 },
1965 },
1966 DiagnosticEntry {
1967 range: Point::new(4, 9)..Point::new(4, 12),
1968 diagnostic: Diagnostic {
1969 source: Some("disk".into()),
1970 severity: DiagnosticSeverity::ERROR,
1971 message: "undefined variable 'CCC'".to_string(),
1972 is_disk_based: true,
1973 group_id: 2,
1974 is_primary: true,
1975 ..Default::default()
1976 }
1977 }
1978 ]
1979 );
1980 assert_eq!(
1981 chunks_with_diagnostics(buffer, 0..buffer.len()),
1982 [
1983 ("\n\nfn a() { ".to_string(), None),
1984 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1985 (" }\nfn b() { ".to_string(), None),
1986 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1987 (" }\nfn c() { ".to_string(), None),
1988 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1989 (" }\n".to_string(), None),
1990 ]
1991 );
1992 assert_eq!(
1993 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1994 [
1995 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1996 (" }\nfn c() { ".to_string(), None),
1997 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1998 ]
1999 );
2000 });
2001
2002 // Ensure overlapping diagnostics are highlighted correctly.
2003 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2004 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2005 version: Some(open_notification.text_document.version),
2006 diagnostics: vec![
2007 lsp::Diagnostic {
2008 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2009 severity: Some(DiagnosticSeverity::ERROR),
2010 message: "undefined variable 'A'".to_string(),
2011 source: Some("disk".to_string()),
2012 ..Default::default()
2013 },
2014 lsp::Diagnostic {
2015 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2016 severity: Some(DiagnosticSeverity::WARNING),
2017 message: "unreachable statement".to_string(),
2018 source: Some("disk".to_string()),
2019 ..Default::default()
2020 },
2021 ],
2022 });
2023
2024 cx.executor().run_until_parked();
2025 buffer.update(cx, |buffer, _| {
2026 assert_eq!(
2027 buffer
2028 .snapshot()
2029 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2030 .collect::<Vec<_>>(),
2031 &[
2032 DiagnosticEntry {
2033 range: Point::new(2, 9)..Point::new(2, 12),
2034 diagnostic: Diagnostic {
2035 source: Some("disk".into()),
2036 severity: DiagnosticSeverity::WARNING,
2037 message: "unreachable statement".to_string(),
2038 is_disk_based: true,
2039 group_id: 4,
2040 is_primary: true,
2041 ..Default::default()
2042 }
2043 },
2044 DiagnosticEntry {
2045 range: Point::new(2, 9)..Point::new(2, 10),
2046 diagnostic: Diagnostic {
2047 source: Some("disk".into()),
2048 severity: DiagnosticSeverity::ERROR,
2049 message: "undefined variable 'A'".to_string(),
2050 is_disk_based: true,
2051 group_id: 3,
2052 is_primary: true,
2053 ..Default::default()
2054 },
2055 }
2056 ]
2057 );
2058 assert_eq!(
2059 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2060 [
2061 ("fn a() { ".to_string(), None),
2062 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2063 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2064 ("\n".to_string(), None),
2065 ]
2066 );
2067 assert_eq!(
2068 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2069 [
2070 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2071 ("\n".to_string(), None),
2072 ]
2073 );
2074 });
2075
2076 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2077 // changes since the last save.
2078 buffer.update(cx, |buffer, cx| {
2079 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2080 buffer.edit(
2081 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2082 None,
2083 cx,
2084 );
2085 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2086 });
2087 let change_notification_2 = fake_server
2088 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2089 .await;
2090 assert!(
2091 change_notification_2.text_document.version > change_notification_1.text_document.version
2092 );
2093
2094 // Handle out-of-order diagnostics
2095 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2096 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2097 version: Some(change_notification_2.text_document.version),
2098 diagnostics: vec![
2099 lsp::Diagnostic {
2100 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2101 severity: Some(DiagnosticSeverity::ERROR),
2102 message: "undefined variable 'BB'".to_string(),
2103 source: Some("disk".to_string()),
2104 ..Default::default()
2105 },
2106 lsp::Diagnostic {
2107 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2108 severity: Some(DiagnosticSeverity::WARNING),
2109 message: "undefined variable 'A'".to_string(),
2110 source: Some("disk".to_string()),
2111 ..Default::default()
2112 },
2113 ],
2114 });
2115
2116 cx.executor().run_until_parked();
2117 buffer.update(cx, |buffer, _| {
2118 assert_eq!(
2119 buffer
2120 .snapshot()
2121 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2122 .collect::<Vec<_>>(),
2123 &[
2124 DiagnosticEntry {
2125 range: Point::new(2, 21)..Point::new(2, 22),
2126 diagnostic: Diagnostic {
2127 source: Some("disk".into()),
2128 severity: DiagnosticSeverity::WARNING,
2129 message: "undefined variable 'A'".to_string(),
2130 is_disk_based: true,
2131 group_id: 6,
2132 is_primary: true,
2133 ..Default::default()
2134 }
2135 },
2136 DiagnosticEntry {
2137 range: Point::new(3, 9)..Point::new(3, 14),
2138 diagnostic: Diagnostic {
2139 source: Some("disk".into()),
2140 severity: DiagnosticSeverity::ERROR,
2141 message: "undefined variable 'BB'".to_string(),
2142 is_disk_based: true,
2143 group_id: 5,
2144 is_primary: true,
2145 ..Default::default()
2146 },
2147 }
2148 ]
2149 );
2150 });
2151}
2152
2153#[gpui::test]
2154async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2155 init_test(cx);
2156
2157 let text = concat!(
2158 "let one = ;\n", //
2159 "let two = \n",
2160 "let three = 3;\n",
2161 );
2162
2163 let fs = FakeFs::new(cx.executor());
2164 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2165
2166 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2167 let buffer = project
2168 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2169 .await
2170 .unwrap();
2171
2172 project.update(cx, |project, cx| {
2173 project.lsp_store.update(cx, |lsp_store, cx| {
2174 lsp_store
2175 .update_diagnostic_entries(
2176 LanguageServerId(0),
2177 PathBuf::from("/dir/a.rs"),
2178 None,
2179 vec![
2180 DiagnosticEntry {
2181 range: Unclipped(PointUtf16::new(0, 10))
2182 ..Unclipped(PointUtf16::new(0, 10)),
2183 diagnostic: Diagnostic {
2184 severity: DiagnosticSeverity::ERROR,
2185 message: "syntax error 1".to_string(),
2186 ..Default::default()
2187 },
2188 },
2189 DiagnosticEntry {
2190 range: Unclipped(PointUtf16::new(1, 10))
2191 ..Unclipped(PointUtf16::new(1, 10)),
2192 diagnostic: Diagnostic {
2193 severity: DiagnosticSeverity::ERROR,
2194 message: "syntax error 2".to_string(),
2195 ..Default::default()
2196 },
2197 },
2198 ],
2199 cx,
2200 )
2201 .unwrap();
2202 })
2203 });
2204
2205 // An empty range is extended forward to include the following character.
2206 // At the end of a line, an empty range is extended backward to include
2207 // the preceding character.
2208 buffer.update(cx, |buffer, _| {
2209 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2210 assert_eq!(
2211 chunks
2212 .iter()
2213 .map(|(s, d)| (s.as_str(), *d))
2214 .collect::<Vec<_>>(),
2215 &[
2216 ("let one = ", None),
2217 (";", Some(DiagnosticSeverity::ERROR)),
2218 ("\nlet two =", None),
2219 (" ", Some(DiagnosticSeverity::ERROR)),
2220 ("\nlet three = 3;\n", None)
2221 ]
2222 );
2223 });
2224}
2225
2226#[gpui::test]
2227async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2228 init_test(cx);
2229
2230 let fs = FakeFs::new(cx.executor());
2231 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2232 .await;
2233
2234 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2235 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2236
2237 lsp_store.update(cx, |lsp_store, cx| {
2238 lsp_store
2239 .update_diagnostic_entries(
2240 LanguageServerId(0),
2241 Path::new("/dir/a.rs").to_owned(),
2242 None,
2243 vec![DiagnosticEntry {
2244 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2245 diagnostic: Diagnostic {
2246 severity: DiagnosticSeverity::ERROR,
2247 is_primary: true,
2248 message: "syntax error a1".to_string(),
2249 ..Default::default()
2250 },
2251 }],
2252 cx,
2253 )
2254 .unwrap();
2255 lsp_store
2256 .update_diagnostic_entries(
2257 LanguageServerId(1),
2258 Path::new("/dir/a.rs").to_owned(),
2259 None,
2260 vec![DiagnosticEntry {
2261 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2262 diagnostic: Diagnostic {
2263 severity: DiagnosticSeverity::ERROR,
2264 is_primary: true,
2265 message: "syntax error b1".to_string(),
2266 ..Default::default()
2267 },
2268 }],
2269 cx,
2270 )
2271 .unwrap();
2272
2273 assert_eq!(
2274 lsp_store.diagnostic_summary(false, cx),
2275 DiagnosticSummary {
2276 error_count: 2,
2277 warning_count: 0,
2278 }
2279 );
2280 });
2281}
2282
2283#[gpui::test]
2284async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2285 init_test(cx);
2286
2287 let text = "
2288 fn a() {
2289 f1();
2290 }
2291 fn b() {
2292 f2();
2293 }
2294 fn c() {
2295 f3();
2296 }
2297 "
2298 .unindent();
2299
2300 let fs = FakeFs::new(cx.executor());
2301 fs.insert_tree(
2302 path!("/dir"),
2303 json!({
2304 "a.rs": text.clone(),
2305 }),
2306 )
2307 .await;
2308
2309 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2310 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2311
2312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2313 language_registry.add(rust_lang());
2314 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2315
2316 let (buffer, _handle) = project
2317 .update(cx, |project, cx| {
2318 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2319 })
2320 .await
2321 .unwrap();
2322
2323 let mut fake_server = fake_servers.next().await.unwrap();
2324 let lsp_document_version = fake_server
2325 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2326 .await
2327 .text_document
2328 .version;
2329
2330 // Simulate editing the buffer after the language server computes some edits.
2331 buffer.update(cx, |buffer, cx| {
2332 buffer.edit(
2333 [(
2334 Point::new(0, 0)..Point::new(0, 0),
2335 "// above first function\n",
2336 )],
2337 None,
2338 cx,
2339 );
2340 buffer.edit(
2341 [(
2342 Point::new(2, 0)..Point::new(2, 0),
2343 " // inside first function\n",
2344 )],
2345 None,
2346 cx,
2347 );
2348 buffer.edit(
2349 [(
2350 Point::new(6, 4)..Point::new(6, 4),
2351 "// inside second function ",
2352 )],
2353 None,
2354 cx,
2355 );
2356
2357 assert_eq!(
2358 buffer.text(),
2359 "
2360 // above first function
2361 fn a() {
2362 // inside first function
2363 f1();
2364 }
2365 fn b() {
2366 // inside second function f2();
2367 }
2368 fn c() {
2369 f3();
2370 }
2371 "
2372 .unindent()
2373 );
2374 });
2375
2376 let edits = lsp_store
2377 .update(cx, |lsp_store, cx| {
2378 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2379 &buffer,
2380 vec![
2381 // replace body of first function
2382 lsp::TextEdit {
2383 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2384 new_text: "
2385 fn a() {
2386 f10();
2387 }
2388 "
2389 .unindent(),
2390 },
2391 // edit inside second function
2392 lsp::TextEdit {
2393 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2394 new_text: "00".into(),
2395 },
2396 // edit inside third function via two distinct edits
2397 lsp::TextEdit {
2398 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2399 new_text: "4000".into(),
2400 },
2401 lsp::TextEdit {
2402 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2403 new_text: "".into(),
2404 },
2405 ],
2406 LanguageServerId(0),
2407 Some(lsp_document_version),
2408 cx,
2409 )
2410 })
2411 .await
2412 .unwrap();
2413
2414 buffer.update(cx, |buffer, cx| {
2415 for (range, new_text) in edits {
2416 buffer.edit([(range, new_text)], None, cx);
2417 }
2418 assert_eq!(
2419 buffer.text(),
2420 "
2421 // above first function
2422 fn a() {
2423 // inside first function
2424 f10();
2425 }
2426 fn b() {
2427 // inside second function f200();
2428 }
2429 fn c() {
2430 f4000();
2431 }
2432 "
2433 .unindent()
2434 );
2435 });
2436}
2437
2438#[gpui::test]
2439async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2440 init_test(cx);
2441
2442 let text = "
2443 use a::b;
2444 use a::c;
2445
2446 fn f() {
2447 b();
2448 c();
2449 }
2450 "
2451 .unindent();
2452
2453 let fs = FakeFs::new(cx.executor());
2454 fs.insert_tree(
2455 path!("/dir"),
2456 json!({
2457 "a.rs": text.clone(),
2458 }),
2459 )
2460 .await;
2461
2462 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2463 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2464 let buffer = project
2465 .update(cx, |project, cx| {
2466 project.open_local_buffer(path!("/dir/a.rs"), cx)
2467 })
2468 .await
2469 .unwrap();
2470
2471 // Simulate the language server sending us a small edit in the form of a very large diff.
2472 // Rust-analyzer does this when performing a merge-imports code action.
2473 let edits = lsp_store
2474 .update(cx, |lsp_store, cx| {
2475 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2476 &buffer,
2477 [
2478 // Replace the first use statement without editing the semicolon.
2479 lsp::TextEdit {
2480 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2481 new_text: "a::{b, c}".into(),
2482 },
2483 // Reinsert the remainder of the file between the semicolon and the final
2484 // newline of the file.
2485 lsp::TextEdit {
2486 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2487 new_text: "\n\n".into(),
2488 },
2489 lsp::TextEdit {
2490 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2491 new_text: "
2492 fn f() {
2493 b();
2494 c();
2495 }"
2496 .unindent(),
2497 },
2498 // Delete everything after the first newline of the file.
2499 lsp::TextEdit {
2500 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2501 new_text: "".into(),
2502 },
2503 ],
2504 LanguageServerId(0),
2505 None,
2506 cx,
2507 )
2508 })
2509 .await
2510 .unwrap();
2511
2512 buffer.update(cx, |buffer, cx| {
2513 let edits = edits
2514 .into_iter()
2515 .map(|(range, text)| {
2516 (
2517 range.start.to_point(buffer)..range.end.to_point(buffer),
2518 text,
2519 )
2520 })
2521 .collect::<Vec<_>>();
2522
2523 assert_eq!(
2524 edits,
2525 [
2526 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2527 (Point::new(1, 0)..Point::new(2, 0), "".into())
2528 ]
2529 );
2530
2531 for (range, new_text) in edits {
2532 buffer.edit([(range, new_text)], None, cx);
2533 }
2534 assert_eq!(
2535 buffer.text(),
2536 "
2537 use a::{b, c};
2538
2539 fn f() {
2540 b();
2541 c();
2542 }
2543 "
2544 .unindent()
2545 );
2546 });
2547}
2548
2549#[gpui::test]
2550async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2551 init_test(cx);
2552
2553 let text = "
2554 use a::b;
2555 use a::c;
2556
2557 fn f() {
2558 b();
2559 c();
2560 }
2561 "
2562 .unindent();
2563
2564 let fs = FakeFs::new(cx.executor());
2565 fs.insert_tree(
2566 path!("/dir"),
2567 json!({
2568 "a.rs": text.clone(),
2569 }),
2570 )
2571 .await;
2572
2573 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2574 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2575 let buffer = project
2576 .update(cx, |project, cx| {
2577 project.open_local_buffer(path!("/dir/a.rs"), cx)
2578 })
2579 .await
2580 .unwrap();
2581
2582 // Simulate the language server sending us edits in a non-ordered fashion,
2583 // with ranges sometimes being inverted or pointing to invalid locations.
2584 let edits = lsp_store
2585 .update(cx, |lsp_store, cx| {
2586 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2587 &buffer,
2588 [
2589 lsp::TextEdit {
2590 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2591 new_text: "\n\n".into(),
2592 },
2593 lsp::TextEdit {
2594 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2595 new_text: "a::{b, c}".into(),
2596 },
2597 lsp::TextEdit {
2598 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2599 new_text: "".into(),
2600 },
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2603 new_text: "
2604 fn f() {
2605 b();
2606 c();
2607 }"
2608 .unindent(),
2609 },
2610 ],
2611 LanguageServerId(0),
2612 None,
2613 cx,
2614 )
2615 })
2616 .await
2617 .unwrap();
2618
2619 buffer.update(cx, |buffer, cx| {
2620 let edits = edits
2621 .into_iter()
2622 .map(|(range, text)| {
2623 (
2624 range.start.to_point(buffer)..range.end.to_point(buffer),
2625 text,
2626 )
2627 })
2628 .collect::<Vec<_>>();
2629
2630 assert_eq!(
2631 edits,
2632 [
2633 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2634 (Point::new(1, 0)..Point::new(2, 0), "".into())
2635 ]
2636 );
2637
2638 for (range, new_text) in edits {
2639 buffer.edit([(range, new_text)], None, cx);
2640 }
2641 assert_eq!(
2642 buffer.text(),
2643 "
2644 use a::{b, c};
2645
2646 fn f() {
2647 b();
2648 c();
2649 }
2650 "
2651 .unindent()
2652 );
2653 });
2654}
2655
2656fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2657 buffer: &Buffer,
2658 range: Range<T>,
2659) -> Vec<(String, Option<DiagnosticSeverity>)> {
2660 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2661 for chunk in buffer.snapshot().chunks(range, true) {
2662 if chunks.last().map_or(false, |prev_chunk| {
2663 prev_chunk.1 == chunk.diagnostic_severity
2664 }) {
2665 chunks.last_mut().unwrap().0.push_str(chunk.text);
2666 } else {
2667 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2668 }
2669 }
2670 chunks
2671}
2672
2673#[gpui::test(iterations = 10)]
2674async fn test_definition(cx: &mut gpui::TestAppContext) {
2675 init_test(cx);
2676
2677 let fs = FakeFs::new(cx.executor());
2678 fs.insert_tree(
2679 path!("/dir"),
2680 json!({
2681 "a.rs": "const fn a() { A }",
2682 "b.rs": "const y: i32 = crate::a()",
2683 }),
2684 )
2685 .await;
2686
2687 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2688
2689 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2690 language_registry.add(rust_lang());
2691 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2692
2693 let (buffer, _handle) = project
2694 .update(cx, |project, cx| {
2695 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2696 })
2697 .await
2698 .unwrap();
2699
2700 let fake_server = fake_servers.next().await.unwrap();
2701 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2702 let params = params.text_document_position_params;
2703 assert_eq!(
2704 params.text_document.uri.to_file_path().unwrap(),
2705 Path::new(path!("/dir/b.rs")),
2706 );
2707 assert_eq!(params.position, lsp::Position::new(0, 22));
2708
2709 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2710 lsp::Location::new(
2711 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2712 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2713 ),
2714 )))
2715 });
2716 let mut definitions = project
2717 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2718 .await
2719 .unwrap();
2720
2721 // Assert no new language server started
2722 cx.executor().run_until_parked();
2723 assert!(fake_servers.try_next().is_err());
2724
2725 assert_eq!(definitions.len(), 1);
2726 let definition = definitions.pop().unwrap();
2727 cx.update(|cx| {
2728 let target_buffer = definition.target.buffer.read(cx);
2729 assert_eq!(
2730 target_buffer
2731 .file()
2732 .unwrap()
2733 .as_local()
2734 .unwrap()
2735 .abs_path(cx),
2736 Path::new(path!("/dir/a.rs")),
2737 );
2738 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2739 assert_eq!(
2740 list_worktrees(&project, cx),
2741 [
2742 (path!("/dir/a.rs").as_ref(), false),
2743 (path!("/dir/b.rs").as_ref(), true)
2744 ],
2745 );
2746
2747 drop(definition);
2748 });
2749 cx.update(|cx| {
2750 assert_eq!(
2751 list_worktrees(&project, cx),
2752 [(path!("/dir/b.rs").as_ref(), true)]
2753 );
2754 });
2755
2756 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2757 project
2758 .read(cx)
2759 .worktrees(cx)
2760 .map(|worktree| {
2761 let worktree = worktree.read(cx);
2762 (
2763 worktree.as_local().unwrap().abs_path().as_ref(),
2764 worktree.is_visible(),
2765 )
2766 })
2767 .collect::<Vec<_>>()
2768 }
2769}
2770
2771#[gpui::test]
2772async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2773 init_test(cx);
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(
2777 path!("/dir"),
2778 json!({
2779 "a.ts": "",
2780 }),
2781 )
2782 .await;
2783
2784 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2785
2786 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2787 language_registry.add(typescript_lang());
2788 let mut fake_language_servers = language_registry.register_fake_lsp(
2789 "TypeScript",
2790 FakeLspAdapter {
2791 capabilities: lsp::ServerCapabilities {
2792 completion_provider: Some(lsp::CompletionOptions {
2793 trigger_characters: Some(vec![":".to_string()]),
2794 ..Default::default()
2795 }),
2796 ..Default::default()
2797 },
2798 ..Default::default()
2799 },
2800 );
2801
2802 let (buffer, _handle) = project
2803 .update(cx, |p, cx| {
2804 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2805 })
2806 .await
2807 .unwrap();
2808
2809 let fake_server = fake_language_servers.next().await.unwrap();
2810
2811 let text = "let a = b.fqn";
2812 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2813 let completions = project.update(cx, |project, cx| {
2814 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2815 });
2816
2817 fake_server
2818 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2819 Ok(Some(lsp::CompletionResponse::Array(vec![
2820 lsp::CompletionItem {
2821 label: "fullyQualifiedName?".into(),
2822 insert_text: Some("fullyQualifiedName".into()),
2823 ..Default::default()
2824 },
2825 ])))
2826 })
2827 .next()
2828 .await;
2829 let completions = completions.await.unwrap().unwrap();
2830 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2831 assert_eq!(completions.len(), 1);
2832 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2833 assert_eq!(
2834 completions[0].old_range.to_offset(&snapshot),
2835 text.len() - 3..text.len()
2836 );
2837
2838 let text = "let a = \"atoms/cmp\"";
2839 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2840 let completions = project.update(cx, |project, cx| {
2841 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2842 });
2843
2844 fake_server
2845 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2846 Ok(Some(lsp::CompletionResponse::Array(vec![
2847 lsp::CompletionItem {
2848 label: "component".into(),
2849 ..Default::default()
2850 },
2851 ])))
2852 })
2853 .next()
2854 .await;
2855 let completions = completions.await.unwrap().unwrap();
2856 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2857 assert_eq!(completions.len(), 1);
2858 assert_eq!(completions[0].new_text, "component");
2859 assert_eq!(
2860 completions[0].old_range.to_offset(&snapshot),
2861 text.len() - 4..text.len() - 1
2862 );
2863}
2864
2865#[gpui::test]
2866async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2867 init_test(cx);
2868
2869 let fs = FakeFs::new(cx.executor());
2870 fs.insert_tree(
2871 path!("/dir"),
2872 json!({
2873 "a.ts": "",
2874 }),
2875 )
2876 .await;
2877
2878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2879
2880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2881 language_registry.add(typescript_lang());
2882 let mut fake_language_servers = language_registry.register_fake_lsp(
2883 "TypeScript",
2884 FakeLspAdapter {
2885 capabilities: lsp::ServerCapabilities {
2886 completion_provider: Some(lsp::CompletionOptions {
2887 trigger_characters: Some(vec![":".to_string()]),
2888 ..Default::default()
2889 }),
2890 ..Default::default()
2891 },
2892 ..Default::default()
2893 },
2894 );
2895
2896 let (buffer, _handle) = project
2897 .update(cx, |p, cx| {
2898 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2899 })
2900 .await
2901 .unwrap();
2902
2903 let fake_server = fake_language_servers.next().await.unwrap();
2904
2905 let text = "let a = b.fqn";
2906 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2907 let completions = project.update(cx, |project, cx| {
2908 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2909 });
2910
2911 fake_server
2912 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2913 Ok(Some(lsp::CompletionResponse::Array(vec![
2914 lsp::CompletionItem {
2915 label: "fullyQualifiedName?".into(),
2916 insert_text: Some("fully\rQualified\r\nName".into()),
2917 ..Default::default()
2918 },
2919 ])))
2920 })
2921 .next()
2922 .await;
2923 let completions = completions.await.unwrap().unwrap();
2924 assert_eq!(completions.len(), 1);
2925 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2926}
2927
2928#[gpui::test(iterations = 10)]
2929async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2930 init_test(cx);
2931
2932 let fs = FakeFs::new(cx.executor());
2933 fs.insert_tree(
2934 path!("/dir"),
2935 json!({
2936 "a.ts": "a",
2937 }),
2938 )
2939 .await;
2940
2941 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2942
2943 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2944 language_registry.add(typescript_lang());
2945 let mut fake_language_servers = language_registry.register_fake_lsp(
2946 "TypeScript",
2947 FakeLspAdapter {
2948 capabilities: lsp::ServerCapabilities {
2949 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2950 lsp::CodeActionOptions {
2951 resolve_provider: Some(true),
2952 ..lsp::CodeActionOptions::default()
2953 },
2954 )),
2955 execute_command_provider: Some(lsp::ExecuteCommandOptions {
2956 commands: vec!["_the/command".to_string()],
2957 ..lsp::ExecuteCommandOptions::default()
2958 }),
2959 ..lsp::ServerCapabilities::default()
2960 },
2961 ..FakeLspAdapter::default()
2962 },
2963 );
2964
2965 let (buffer, _handle) = project
2966 .update(cx, |p, cx| {
2967 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2968 })
2969 .await
2970 .unwrap();
2971
2972 let fake_server = fake_language_servers.next().await.unwrap();
2973
2974 // Language server returns code actions that contain commands, and not edits.
2975 let actions = project.update(cx, |project, cx| {
2976 project.code_actions(&buffer, 0..0, None, cx)
2977 });
2978 fake_server
2979 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2980 Ok(Some(vec![
2981 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2982 title: "The code action".into(),
2983 data: Some(serde_json::json!({
2984 "command": "_the/command",
2985 })),
2986 ..lsp::CodeAction::default()
2987 }),
2988 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2989 title: "two".into(),
2990 ..lsp::CodeAction::default()
2991 }),
2992 ]))
2993 })
2994 .next()
2995 .await;
2996
2997 let action = actions.await.unwrap()[0].clone();
2998 let apply = project.update(cx, |project, cx| {
2999 project.apply_code_action(buffer.clone(), action, true, cx)
3000 });
3001
3002 // Resolving the code action does not populate its edits. In absence of
3003 // edits, we must execute the given command.
3004 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
3005 |mut action, _| async move {
3006 if action.data.is_some() {
3007 action.command = Some(lsp::Command {
3008 title: "The command".into(),
3009 command: "_the/command".into(),
3010 arguments: Some(vec![json!("the-argument")]),
3011 });
3012 }
3013 Ok(action)
3014 },
3015 );
3016
3017 // While executing the command, the language server sends the editor
3018 // a `workspaceEdit` request.
3019 fake_server
3020 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3021 let fake = fake_server.clone();
3022 move |params, _| {
3023 assert_eq!(params.command, "_the/command");
3024 let fake = fake.clone();
3025 async move {
3026 fake.server
3027 .request::<lsp::request::ApplyWorkspaceEdit>(
3028 lsp::ApplyWorkspaceEditParams {
3029 label: None,
3030 edit: lsp::WorkspaceEdit {
3031 changes: Some(
3032 [(
3033 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3034 vec![lsp::TextEdit {
3035 range: lsp::Range::new(
3036 lsp::Position::new(0, 0),
3037 lsp::Position::new(0, 0),
3038 ),
3039 new_text: "X".into(),
3040 }],
3041 )]
3042 .into_iter()
3043 .collect(),
3044 ),
3045 ..Default::default()
3046 },
3047 },
3048 )
3049 .await
3050 .unwrap();
3051 Ok(Some(json!(null)))
3052 }
3053 }
3054 })
3055 .next()
3056 .await;
3057
3058 // Applying the code action returns a project transaction containing the edits
3059 // sent by the language server in its `workspaceEdit` request.
3060 let transaction = apply.await.unwrap();
3061 assert!(transaction.0.contains_key(&buffer));
3062 buffer.update(cx, |buffer, cx| {
3063 assert_eq!(buffer.text(), "Xa");
3064 buffer.undo(cx);
3065 assert_eq!(buffer.text(), "a");
3066 });
3067}
3068
3069#[gpui::test(iterations = 10)]
3070async fn test_save_file(cx: &mut gpui::TestAppContext) {
3071 init_test(cx);
3072
3073 let fs = FakeFs::new(cx.executor());
3074 fs.insert_tree(
3075 path!("/dir"),
3076 json!({
3077 "file1": "the old contents",
3078 }),
3079 )
3080 .await;
3081
3082 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3083 let buffer = project
3084 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3085 .await
3086 .unwrap();
3087 buffer.update(cx, |buffer, cx| {
3088 assert_eq!(buffer.text(), "the old contents");
3089 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3090 });
3091
3092 project
3093 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3094 .await
3095 .unwrap();
3096
3097 let new_text = fs
3098 .load(Path::new(path!("/dir/file1")))
3099 .await
3100 .unwrap()
3101 .replace("\r\n", "\n");
3102 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3103}
3104
3105#[gpui::test(iterations = 30)]
3106async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3107 init_test(cx);
3108
3109 let fs = FakeFs::new(cx.executor().clone());
3110 fs.insert_tree(
3111 path!("/dir"),
3112 json!({
3113 "file1": "the original contents",
3114 }),
3115 )
3116 .await;
3117
3118 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3119 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3120 let buffer = project
3121 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3122 .await
3123 .unwrap();
3124
3125 // Simulate buffer diffs being slow, so that they don't complete before
3126 // the next file change occurs.
3127 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3128
3129 // Change the buffer's file on disk, and then wait for the file change
3130 // to be detected by the worktree, so that the buffer starts reloading.
3131 fs.save(
3132 path!("/dir/file1").as_ref(),
3133 &"the first contents".into(),
3134 Default::default(),
3135 )
3136 .await
3137 .unwrap();
3138 worktree.next_event(cx).await;
3139
3140 // Change the buffer's file again. Depending on the random seed, the
3141 // previous file change may still be in progress.
3142 fs.save(
3143 path!("/dir/file1").as_ref(),
3144 &"the second contents".into(),
3145 Default::default(),
3146 )
3147 .await
3148 .unwrap();
3149 worktree.next_event(cx).await;
3150
3151 cx.executor().run_until_parked();
3152 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3153 buffer.read_with(cx, |buffer, _| {
3154 assert_eq!(buffer.text(), on_disk_text);
3155 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3156 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3157 });
3158}
3159
3160#[gpui::test(iterations = 30)]
3161async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3162 init_test(cx);
3163
3164 let fs = FakeFs::new(cx.executor().clone());
3165 fs.insert_tree(
3166 path!("/dir"),
3167 json!({
3168 "file1": "the original contents",
3169 }),
3170 )
3171 .await;
3172
3173 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3174 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3175 let buffer = project
3176 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3177 .await
3178 .unwrap();
3179
3180 // Simulate buffer diffs being slow, so that they don't complete before
3181 // the next file change occurs.
3182 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3183
3184 // Change the buffer's file on disk, and then wait for the file change
3185 // to be detected by the worktree, so that the buffer starts reloading.
3186 fs.save(
3187 path!("/dir/file1").as_ref(),
3188 &"the first contents".into(),
3189 Default::default(),
3190 )
3191 .await
3192 .unwrap();
3193 worktree.next_event(cx).await;
3194
3195 cx.executor()
3196 .spawn(cx.executor().simulate_random_delay())
3197 .await;
3198
3199 // Perform a noop edit, causing the buffer's version to increase.
3200 buffer.update(cx, |buffer, cx| {
3201 buffer.edit([(0..0, " ")], None, cx);
3202 buffer.undo(cx);
3203 });
3204
3205 cx.executor().run_until_parked();
3206 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3207 buffer.read_with(cx, |buffer, _| {
3208 let buffer_text = buffer.text();
3209 if buffer_text == on_disk_text {
3210 assert!(
3211 !buffer.is_dirty() && !buffer.has_conflict(),
3212 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3213 );
3214 }
3215 // If the file change occurred while the buffer was processing the first
3216 // change, the buffer will be in a conflicting state.
3217 else {
3218 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3219 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3220 }
3221 });
3222}
3223
3224#[gpui::test]
3225async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3226 init_test(cx);
3227
3228 let fs = FakeFs::new(cx.executor());
3229 fs.insert_tree(
3230 path!("/dir"),
3231 json!({
3232 "file1": "the old contents",
3233 }),
3234 )
3235 .await;
3236
3237 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3238 let buffer = project
3239 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3240 .await
3241 .unwrap();
3242 buffer.update(cx, |buffer, cx| {
3243 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3244 });
3245
3246 project
3247 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3248 .await
3249 .unwrap();
3250
3251 let new_text = fs
3252 .load(Path::new(path!("/dir/file1")))
3253 .await
3254 .unwrap()
3255 .replace("\r\n", "\n");
3256 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3257}
3258
3259#[gpui::test]
3260async fn test_save_as(cx: &mut gpui::TestAppContext) {
3261 init_test(cx);
3262
3263 let fs = FakeFs::new(cx.executor());
3264 fs.insert_tree("/dir", json!({})).await;
3265
3266 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3267
3268 let languages = project.update(cx, |project, _| project.languages().clone());
3269 languages.add(rust_lang());
3270
3271 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3272 buffer.update(cx, |buffer, cx| {
3273 buffer.edit([(0..0, "abc")], None, cx);
3274 assert!(buffer.is_dirty());
3275 assert!(!buffer.has_conflict());
3276 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3277 });
3278 project
3279 .update(cx, |project, cx| {
3280 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3281 let path = ProjectPath {
3282 worktree_id,
3283 path: Arc::from(Path::new("file1.rs")),
3284 };
3285 project.save_buffer_as(buffer.clone(), path, cx)
3286 })
3287 .await
3288 .unwrap();
3289 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3290
3291 cx.executor().run_until_parked();
3292 buffer.update(cx, |buffer, cx| {
3293 assert_eq!(
3294 buffer.file().unwrap().full_path(cx),
3295 Path::new("dir/file1.rs")
3296 );
3297 assert!(!buffer.is_dirty());
3298 assert!(!buffer.has_conflict());
3299 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3300 });
3301
3302 let opened_buffer = project
3303 .update(cx, |project, cx| {
3304 project.open_local_buffer("/dir/file1.rs", cx)
3305 })
3306 .await
3307 .unwrap();
3308 assert_eq!(opened_buffer, buffer);
3309}
3310
3311#[gpui::test(retries = 5)]
3312async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3313 use worktree::WorktreeModelHandle as _;
3314
3315 init_test(cx);
3316 cx.executor().allow_parking();
3317
3318 let dir = TempTree::new(json!({
3319 "a": {
3320 "file1": "",
3321 "file2": "",
3322 "file3": "",
3323 },
3324 "b": {
3325 "c": {
3326 "file4": "",
3327 "file5": "",
3328 }
3329 }
3330 }));
3331
3332 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3333
3334 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3335 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3336 async move { buffer.await.unwrap() }
3337 };
3338 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3339 project.update(cx, |project, cx| {
3340 let tree = project.worktrees(cx).next().unwrap();
3341 tree.read(cx)
3342 .entry_for_path(path)
3343 .unwrap_or_else(|| panic!("no entry for path {}", path))
3344 .id
3345 })
3346 };
3347
3348 let buffer2 = buffer_for_path("a/file2", cx).await;
3349 let buffer3 = buffer_for_path("a/file3", cx).await;
3350 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3351 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3352
3353 let file2_id = id_for_path("a/file2", cx);
3354 let file3_id = id_for_path("a/file3", cx);
3355 let file4_id = id_for_path("b/c/file4", cx);
3356
3357 // Create a remote copy of this worktree.
3358 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3359 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3360
3361 let updates = Arc::new(Mutex::new(Vec::new()));
3362 tree.update(cx, |tree, cx| {
3363 let updates = updates.clone();
3364 tree.observe_updates(0, cx, move |update| {
3365 updates.lock().push(update);
3366 async { true }
3367 });
3368 });
3369
3370 let remote =
3371 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3372
3373 cx.executor().run_until_parked();
3374
3375 cx.update(|cx| {
3376 assert!(!buffer2.read(cx).is_dirty());
3377 assert!(!buffer3.read(cx).is_dirty());
3378 assert!(!buffer4.read(cx).is_dirty());
3379 assert!(!buffer5.read(cx).is_dirty());
3380 });
3381
3382 // Rename and delete files and directories.
3383 tree.flush_fs_events(cx).await;
3384 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3385 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3386 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3387 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3388 tree.flush_fs_events(cx).await;
3389
3390 cx.update(|app| {
3391 assert_eq!(
3392 tree.read(app)
3393 .paths()
3394 .map(|p| p.to_str().unwrap())
3395 .collect::<Vec<_>>(),
3396 vec![
3397 "a",
3398 separator!("a/file1"),
3399 separator!("a/file2.new"),
3400 "b",
3401 "d",
3402 separator!("d/file3"),
3403 separator!("d/file4"),
3404 ]
3405 );
3406 });
3407
3408 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3409 assert_eq!(id_for_path("d/file3", cx), file3_id);
3410 assert_eq!(id_for_path("d/file4", cx), file4_id);
3411
3412 cx.update(|cx| {
3413 assert_eq!(
3414 buffer2.read(cx).file().unwrap().path().as_ref(),
3415 Path::new("a/file2.new")
3416 );
3417 assert_eq!(
3418 buffer3.read(cx).file().unwrap().path().as_ref(),
3419 Path::new("d/file3")
3420 );
3421 assert_eq!(
3422 buffer4.read(cx).file().unwrap().path().as_ref(),
3423 Path::new("d/file4")
3424 );
3425 assert_eq!(
3426 buffer5.read(cx).file().unwrap().path().as_ref(),
3427 Path::new("b/c/file5")
3428 );
3429
3430 assert_matches!(
3431 buffer2.read(cx).file().unwrap().disk_state(),
3432 DiskState::Present { .. }
3433 );
3434 assert_matches!(
3435 buffer3.read(cx).file().unwrap().disk_state(),
3436 DiskState::Present { .. }
3437 );
3438 assert_matches!(
3439 buffer4.read(cx).file().unwrap().disk_state(),
3440 DiskState::Present { .. }
3441 );
3442 assert_eq!(
3443 buffer5.read(cx).file().unwrap().disk_state(),
3444 DiskState::Deleted
3445 );
3446 });
3447
3448 // Update the remote worktree. Check that it becomes consistent with the
3449 // local worktree.
3450 cx.executor().run_until_parked();
3451
3452 remote.update(cx, |remote, _| {
3453 for update in updates.lock().drain(..) {
3454 remote.as_remote_mut().unwrap().update_from_remote(update);
3455 }
3456 });
3457 cx.executor().run_until_parked();
3458 remote.update(cx, |remote, _| {
3459 assert_eq!(
3460 remote
3461 .paths()
3462 .map(|p| p.to_str().unwrap())
3463 .collect::<Vec<_>>(),
3464 vec![
3465 "a",
3466 separator!("a/file1"),
3467 separator!("a/file2.new"),
3468 "b",
3469 "d",
3470 separator!("d/file3"),
3471 separator!("d/file4"),
3472 ]
3473 );
3474 });
3475}
3476
3477#[gpui::test(iterations = 10)]
3478async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3479 init_test(cx);
3480
3481 let fs = FakeFs::new(cx.executor());
3482 fs.insert_tree(
3483 path!("/dir"),
3484 json!({
3485 "a": {
3486 "file1": "",
3487 }
3488 }),
3489 )
3490 .await;
3491
3492 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3493 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3494 let tree_id = tree.update(cx, |tree, _| tree.id());
3495
3496 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3497 project.update(cx, |project, cx| {
3498 let tree = project.worktrees(cx).next().unwrap();
3499 tree.read(cx)
3500 .entry_for_path(path)
3501 .unwrap_or_else(|| panic!("no entry for path {}", path))
3502 .id
3503 })
3504 };
3505
3506 let dir_id = id_for_path("a", cx);
3507 let file_id = id_for_path("a/file1", cx);
3508 let buffer = project
3509 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3510 .await
3511 .unwrap();
3512 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3513
3514 project
3515 .update(cx, |project, cx| {
3516 project.rename_entry(dir_id, Path::new("b"), cx)
3517 })
3518 .unwrap()
3519 .await
3520 .to_included()
3521 .unwrap();
3522 cx.executor().run_until_parked();
3523
3524 assert_eq!(id_for_path("b", cx), dir_id);
3525 assert_eq!(id_for_path("b/file1", cx), file_id);
3526 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3527}
3528
3529#[gpui::test]
3530async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3531 init_test(cx);
3532
3533 let fs = FakeFs::new(cx.executor());
3534 fs.insert_tree(
3535 "/dir",
3536 json!({
3537 "a.txt": "a-contents",
3538 "b.txt": "b-contents",
3539 }),
3540 )
3541 .await;
3542
3543 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3544
3545 // Spawn multiple tasks to open paths, repeating some paths.
3546 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3547 (
3548 p.open_local_buffer("/dir/a.txt", cx),
3549 p.open_local_buffer("/dir/b.txt", cx),
3550 p.open_local_buffer("/dir/a.txt", cx),
3551 )
3552 });
3553
3554 let buffer_a_1 = buffer_a_1.await.unwrap();
3555 let buffer_a_2 = buffer_a_2.await.unwrap();
3556 let buffer_b = buffer_b.await.unwrap();
3557 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3558 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3559
3560 // There is only one buffer per path.
3561 let buffer_a_id = buffer_a_1.entity_id();
3562 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3563
3564 // Open the same path again while it is still open.
3565 drop(buffer_a_1);
3566 let buffer_a_3 = project
3567 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3568 .await
3569 .unwrap();
3570
3571 // There's still only one buffer per path.
3572 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3573}
3574
3575#[gpui::test]
3576async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3577 init_test(cx);
3578
3579 let fs = FakeFs::new(cx.executor());
3580 fs.insert_tree(
3581 path!("/dir"),
3582 json!({
3583 "file1": "abc",
3584 "file2": "def",
3585 "file3": "ghi",
3586 }),
3587 )
3588 .await;
3589
3590 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3591
3592 let buffer1 = project
3593 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3594 .await
3595 .unwrap();
3596 let events = Arc::new(Mutex::new(Vec::new()));
3597
3598 // initially, the buffer isn't dirty.
3599 buffer1.update(cx, |buffer, cx| {
3600 cx.subscribe(&buffer1, {
3601 let events = events.clone();
3602 move |_, _, event, _| match event {
3603 BufferEvent::Operation { .. } => {}
3604 _ => events.lock().push(event.clone()),
3605 }
3606 })
3607 .detach();
3608
3609 assert!(!buffer.is_dirty());
3610 assert!(events.lock().is_empty());
3611
3612 buffer.edit([(1..2, "")], None, cx);
3613 });
3614
3615 // after the first edit, the buffer is dirty, and emits a dirtied event.
3616 buffer1.update(cx, |buffer, cx| {
3617 assert!(buffer.text() == "ac");
3618 assert!(buffer.is_dirty());
3619 assert_eq!(
3620 *events.lock(),
3621 &[
3622 language::BufferEvent::Edited,
3623 language::BufferEvent::DirtyChanged
3624 ]
3625 );
3626 events.lock().clear();
3627 buffer.did_save(
3628 buffer.version(),
3629 buffer.file().unwrap().disk_state().mtime(),
3630 cx,
3631 );
3632 });
3633
3634 // after saving, the buffer is not dirty, and emits a saved event.
3635 buffer1.update(cx, |buffer, cx| {
3636 assert!(!buffer.is_dirty());
3637 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3638 events.lock().clear();
3639
3640 buffer.edit([(1..1, "B")], None, cx);
3641 buffer.edit([(2..2, "D")], None, cx);
3642 });
3643
3644 // after editing again, the buffer is dirty, and emits another dirty event.
3645 buffer1.update(cx, |buffer, cx| {
3646 assert!(buffer.text() == "aBDc");
3647 assert!(buffer.is_dirty());
3648 assert_eq!(
3649 *events.lock(),
3650 &[
3651 language::BufferEvent::Edited,
3652 language::BufferEvent::DirtyChanged,
3653 language::BufferEvent::Edited,
3654 ],
3655 );
3656 events.lock().clear();
3657
3658 // After restoring the buffer to its previously-saved state,
3659 // the buffer is not considered dirty anymore.
3660 buffer.edit([(1..3, "")], None, cx);
3661 assert!(buffer.text() == "ac");
3662 assert!(!buffer.is_dirty());
3663 });
3664
3665 assert_eq!(
3666 *events.lock(),
3667 &[
3668 language::BufferEvent::Edited,
3669 language::BufferEvent::DirtyChanged
3670 ]
3671 );
3672
3673 // When a file is deleted, the buffer is considered dirty.
3674 let events = Arc::new(Mutex::new(Vec::new()));
3675 let buffer2 = project
3676 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3677 .await
3678 .unwrap();
3679 buffer2.update(cx, |_, cx| {
3680 cx.subscribe(&buffer2, {
3681 let events = events.clone();
3682 move |_, _, event, _| events.lock().push(event.clone())
3683 })
3684 .detach();
3685 });
3686
3687 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3688 .await
3689 .unwrap();
3690 cx.executor().run_until_parked();
3691 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3692 assert_eq!(
3693 *events.lock(),
3694 &[
3695 language::BufferEvent::DirtyChanged,
3696 language::BufferEvent::FileHandleChanged
3697 ]
3698 );
3699
3700 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3701 let events = Arc::new(Mutex::new(Vec::new()));
3702 let buffer3 = project
3703 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3704 .await
3705 .unwrap();
3706 buffer3.update(cx, |_, cx| {
3707 cx.subscribe(&buffer3, {
3708 let events = events.clone();
3709 move |_, _, event, _| events.lock().push(event.clone())
3710 })
3711 .detach();
3712 });
3713
3714 buffer3.update(cx, |buffer, cx| {
3715 buffer.edit([(0..0, "x")], None, cx);
3716 });
3717 events.lock().clear();
3718 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3719 .await
3720 .unwrap();
3721 cx.executor().run_until_parked();
3722 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3723 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3724}
3725
3726#[gpui::test]
3727async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3728 init_test(cx);
3729
3730 let (initial_contents, initial_offsets) =
3731 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3732 let fs = FakeFs::new(cx.executor());
3733 fs.insert_tree(
3734 path!("/dir"),
3735 json!({
3736 "the-file": initial_contents,
3737 }),
3738 )
3739 .await;
3740 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3741 let buffer = project
3742 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3743 .await
3744 .unwrap();
3745
3746 let anchors = initial_offsets
3747 .iter()
3748 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3749 .collect::<Vec<_>>();
3750
3751 // Change the file on disk, adding two new lines of text, and removing
3752 // one line.
3753 buffer.update(cx, |buffer, _| {
3754 assert!(!buffer.is_dirty());
3755 assert!(!buffer.has_conflict());
3756 });
3757
3758 let (new_contents, new_offsets) =
3759 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3760 fs.save(
3761 path!("/dir/the-file").as_ref(),
3762 &new_contents.as_str().into(),
3763 LineEnding::Unix,
3764 )
3765 .await
3766 .unwrap();
3767
3768 // Because the buffer was not modified, it is reloaded from disk. Its
3769 // contents are edited according to the diff between the old and new
3770 // file contents.
3771 cx.executor().run_until_parked();
3772 buffer.update(cx, |buffer, _| {
3773 assert_eq!(buffer.text(), new_contents);
3774 assert!(!buffer.is_dirty());
3775 assert!(!buffer.has_conflict());
3776
3777 let anchor_offsets = anchors
3778 .iter()
3779 .map(|anchor| anchor.to_offset(&*buffer))
3780 .collect::<Vec<_>>();
3781 assert_eq!(anchor_offsets, new_offsets);
3782 });
3783
3784 // Modify the buffer
3785 buffer.update(cx, |buffer, cx| {
3786 buffer.edit([(0..0, " ")], None, cx);
3787 assert!(buffer.is_dirty());
3788 assert!(!buffer.has_conflict());
3789 });
3790
3791 // Change the file on disk again, adding blank lines to the beginning.
3792 fs.save(
3793 path!("/dir/the-file").as_ref(),
3794 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3795 LineEnding::Unix,
3796 )
3797 .await
3798 .unwrap();
3799
3800 // Because the buffer is modified, it doesn't reload from disk, but is
3801 // marked as having a conflict.
3802 cx.executor().run_until_parked();
3803 buffer.update(cx, |buffer, _| {
3804 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3805 assert!(buffer.has_conflict());
3806 });
3807}
3808
3809#[gpui::test]
3810async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3811 init_test(cx);
3812
3813 let fs = FakeFs::new(cx.executor());
3814 fs.insert_tree(
3815 path!("/dir"),
3816 json!({
3817 "file1": "a\nb\nc\n",
3818 "file2": "one\r\ntwo\r\nthree\r\n",
3819 }),
3820 )
3821 .await;
3822
3823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3824 let buffer1 = project
3825 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3826 .await
3827 .unwrap();
3828 let buffer2 = project
3829 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3830 .await
3831 .unwrap();
3832
3833 buffer1.update(cx, |buffer, _| {
3834 assert_eq!(buffer.text(), "a\nb\nc\n");
3835 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3836 });
3837 buffer2.update(cx, |buffer, _| {
3838 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3839 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3840 });
3841
3842 // Change a file's line endings on disk from unix to windows. The buffer's
3843 // state updates correctly.
3844 fs.save(
3845 path!("/dir/file1").as_ref(),
3846 &"aaa\nb\nc\n".into(),
3847 LineEnding::Windows,
3848 )
3849 .await
3850 .unwrap();
3851 cx.executor().run_until_parked();
3852 buffer1.update(cx, |buffer, _| {
3853 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3854 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3855 });
3856
3857 // Save a file with windows line endings. The file is written correctly.
3858 buffer2.update(cx, |buffer, cx| {
3859 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3860 });
3861 project
3862 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3863 .await
3864 .unwrap();
3865 assert_eq!(
3866 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3867 "one\r\ntwo\r\nthree\r\nfour\r\n",
3868 );
3869}
3870
3871#[gpui::test]
3872async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3873 init_test(cx);
3874
3875 let fs = FakeFs::new(cx.executor());
3876 fs.insert_tree(
3877 path!("/dir"),
3878 json!({
3879 "a.rs": "
3880 fn foo(mut v: Vec<usize>) {
3881 for x in &v {
3882 v.push(1);
3883 }
3884 }
3885 "
3886 .unindent(),
3887 }),
3888 )
3889 .await;
3890
3891 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3892 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3893 let buffer = project
3894 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3895 .await
3896 .unwrap();
3897
3898 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3899 let message = lsp::PublishDiagnosticsParams {
3900 uri: buffer_uri.clone(),
3901 diagnostics: vec![
3902 lsp::Diagnostic {
3903 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3904 severity: Some(DiagnosticSeverity::WARNING),
3905 message: "error 1".to_string(),
3906 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3907 location: lsp::Location {
3908 uri: buffer_uri.clone(),
3909 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3910 },
3911 message: "error 1 hint 1".to_string(),
3912 }]),
3913 ..Default::default()
3914 },
3915 lsp::Diagnostic {
3916 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3917 severity: Some(DiagnosticSeverity::HINT),
3918 message: "error 1 hint 1".to_string(),
3919 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3920 location: lsp::Location {
3921 uri: buffer_uri.clone(),
3922 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3923 },
3924 message: "original diagnostic".to_string(),
3925 }]),
3926 ..Default::default()
3927 },
3928 lsp::Diagnostic {
3929 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3930 severity: Some(DiagnosticSeverity::ERROR),
3931 message: "error 2".to_string(),
3932 related_information: Some(vec![
3933 lsp::DiagnosticRelatedInformation {
3934 location: lsp::Location {
3935 uri: buffer_uri.clone(),
3936 range: lsp::Range::new(
3937 lsp::Position::new(1, 13),
3938 lsp::Position::new(1, 15),
3939 ),
3940 },
3941 message: "error 2 hint 1".to_string(),
3942 },
3943 lsp::DiagnosticRelatedInformation {
3944 location: lsp::Location {
3945 uri: buffer_uri.clone(),
3946 range: lsp::Range::new(
3947 lsp::Position::new(1, 13),
3948 lsp::Position::new(1, 15),
3949 ),
3950 },
3951 message: "error 2 hint 2".to_string(),
3952 },
3953 ]),
3954 ..Default::default()
3955 },
3956 lsp::Diagnostic {
3957 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3958 severity: Some(DiagnosticSeverity::HINT),
3959 message: "error 2 hint 1".to_string(),
3960 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3961 location: lsp::Location {
3962 uri: buffer_uri.clone(),
3963 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3964 },
3965 message: "original diagnostic".to_string(),
3966 }]),
3967 ..Default::default()
3968 },
3969 lsp::Diagnostic {
3970 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3971 severity: Some(DiagnosticSeverity::HINT),
3972 message: "error 2 hint 2".to_string(),
3973 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3974 location: lsp::Location {
3975 uri: buffer_uri,
3976 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3977 },
3978 message: "original diagnostic".to_string(),
3979 }]),
3980 ..Default::default()
3981 },
3982 ],
3983 version: None,
3984 };
3985
3986 lsp_store
3987 .update(cx, |lsp_store, cx| {
3988 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3989 })
3990 .unwrap();
3991 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3992
3993 assert_eq!(
3994 buffer
3995 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3996 .collect::<Vec<_>>(),
3997 &[
3998 DiagnosticEntry {
3999 range: Point::new(1, 8)..Point::new(1, 9),
4000 diagnostic: Diagnostic {
4001 severity: DiagnosticSeverity::WARNING,
4002 message: "error 1".to_string(),
4003 group_id: 1,
4004 is_primary: true,
4005 ..Default::default()
4006 }
4007 },
4008 DiagnosticEntry {
4009 range: Point::new(1, 8)..Point::new(1, 9),
4010 diagnostic: Diagnostic {
4011 severity: DiagnosticSeverity::HINT,
4012 message: "error 1 hint 1".to_string(),
4013 group_id: 1,
4014 is_primary: false,
4015 ..Default::default()
4016 }
4017 },
4018 DiagnosticEntry {
4019 range: Point::new(1, 13)..Point::new(1, 15),
4020 diagnostic: Diagnostic {
4021 severity: DiagnosticSeverity::HINT,
4022 message: "error 2 hint 1".to_string(),
4023 group_id: 0,
4024 is_primary: false,
4025 ..Default::default()
4026 }
4027 },
4028 DiagnosticEntry {
4029 range: Point::new(1, 13)..Point::new(1, 15),
4030 diagnostic: Diagnostic {
4031 severity: DiagnosticSeverity::HINT,
4032 message: "error 2 hint 2".to_string(),
4033 group_id: 0,
4034 is_primary: false,
4035 ..Default::default()
4036 }
4037 },
4038 DiagnosticEntry {
4039 range: Point::new(2, 8)..Point::new(2, 17),
4040 diagnostic: Diagnostic {
4041 severity: DiagnosticSeverity::ERROR,
4042 message: "error 2".to_string(),
4043 group_id: 0,
4044 is_primary: true,
4045 ..Default::default()
4046 }
4047 }
4048 ]
4049 );
4050
4051 assert_eq!(
4052 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4053 &[
4054 DiagnosticEntry {
4055 range: Point::new(1, 13)..Point::new(1, 15),
4056 diagnostic: Diagnostic {
4057 severity: DiagnosticSeverity::HINT,
4058 message: "error 2 hint 1".to_string(),
4059 group_id: 0,
4060 is_primary: false,
4061 ..Default::default()
4062 }
4063 },
4064 DiagnosticEntry {
4065 range: Point::new(1, 13)..Point::new(1, 15),
4066 diagnostic: Diagnostic {
4067 severity: DiagnosticSeverity::HINT,
4068 message: "error 2 hint 2".to_string(),
4069 group_id: 0,
4070 is_primary: false,
4071 ..Default::default()
4072 }
4073 },
4074 DiagnosticEntry {
4075 range: Point::new(2, 8)..Point::new(2, 17),
4076 diagnostic: Diagnostic {
4077 severity: DiagnosticSeverity::ERROR,
4078 message: "error 2".to_string(),
4079 group_id: 0,
4080 is_primary: true,
4081 ..Default::default()
4082 }
4083 }
4084 ]
4085 );
4086
4087 assert_eq!(
4088 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4089 &[
4090 DiagnosticEntry {
4091 range: Point::new(1, 8)..Point::new(1, 9),
4092 diagnostic: Diagnostic {
4093 severity: DiagnosticSeverity::WARNING,
4094 message: "error 1".to_string(),
4095 group_id: 1,
4096 is_primary: true,
4097 ..Default::default()
4098 }
4099 },
4100 DiagnosticEntry {
4101 range: Point::new(1, 8)..Point::new(1, 9),
4102 diagnostic: Diagnostic {
4103 severity: DiagnosticSeverity::HINT,
4104 message: "error 1 hint 1".to_string(),
4105 group_id: 1,
4106 is_primary: false,
4107 ..Default::default()
4108 }
4109 },
4110 ]
4111 );
4112}
4113
4114#[gpui::test]
4115async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4116 init_test(cx);
4117
4118 let fs = FakeFs::new(cx.executor());
4119 fs.insert_tree(
4120 path!("/dir"),
4121 json!({
4122 "one.rs": "const ONE: usize = 1;",
4123 "two": {
4124 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4125 }
4126
4127 }),
4128 )
4129 .await;
4130 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4131
4132 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4133 language_registry.add(rust_lang());
4134 let watched_paths = lsp::FileOperationRegistrationOptions {
4135 filters: vec![
4136 FileOperationFilter {
4137 scheme: Some("file".to_owned()),
4138 pattern: lsp::FileOperationPattern {
4139 glob: "**/*.rs".to_owned(),
4140 matches: Some(lsp::FileOperationPatternKind::File),
4141 options: None,
4142 },
4143 },
4144 FileOperationFilter {
4145 scheme: Some("file".to_owned()),
4146 pattern: lsp::FileOperationPattern {
4147 glob: "**/**".to_owned(),
4148 matches: Some(lsp::FileOperationPatternKind::Folder),
4149 options: None,
4150 },
4151 },
4152 ],
4153 };
4154 let mut fake_servers = language_registry.register_fake_lsp(
4155 "Rust",
4156 FakeLspAdapter {
4157 capabilities: lsp::ServerCapabilities {
4158 workspace: Some(lsp::WorkspaceServerCapabilities {
4159 workspace_folders: None,
4160 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4161 did_rename: Some(watched_paths.clone()),
4162 will_rename: Some(watched_paths),
4163 ..Default::default()
4164 }),
4165 }),
4166 ..Default::default()
4167 },
4168 ..Default::default()
4169 },
4170 );
4171
4172 let _ = project
4173 .update(cx, |project, cx| {
4174 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4175 })
4176 .await
4177 .unwrap();
4178
4179 let fake_server = fake_servers.next().await.unwrap();
4180 let response = project.update(cx, |project, cx| {
4181 let worktree = project.worktrees(cx).next().unwrap();
4182 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4183 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4184 });
4185 let expected_edit = lsp::WorkspaceEdit {
4186 changes: None,
4187 document_changes: Some(DocumentChanges::Edits({
4188 vec![TextDocumentEdit {
4189 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4190 range: lsp::Range {
4191 start: lsp::Position {
4192 line: 0,
4193 character: 1,
4194 },
4195 end: lsp::Position {
4196 line: 0,
4197 character: 3,
4198 },
4199 },
4200 new_text: "This is not a drill".to_owned(),
4201 })],
4202 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4203 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4204 version: Some(1337),
4205 },
4206 }]
4207 })),
4208 change_annotations: None,
4209 };
4210 let resolved_workspace_edit = Arc::new(OnceLock::new());
4211 fake_server
4212 .handle_request::<WillRenameFiles, _, _>({
4213 let resolved_workspace_edit = resolved_workspace_edit.clone();
4214 let expected_edit = expected_edit.clone();
4215 move |params, _| {
4216 let resolved_workspace_edit = resolved_workspace_edit.clone();
4217 let expected_edit = expected_edit.clone();
4218 async move {
4219 assert_eq!(params.files.len(), 1);
4220 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4221 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4222 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4223 Ok(Some(expected_edit))
4224 }
4225 }
4226 })
4227 .next()
4228 .await
4229 .unwrap();
4230 let _ = response.await.unwrap();
4231 fake_server
4232 .handle_notification::<DidRenameFiles, _>(|params, _| {
4233 assert_eq!(params.files.len(), 1);
4234 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4235 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4236 })
4237 .next()
4238 .await
4239 .unwrap();
4240 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4241}
4242
4243#[gpui::test]
4244async fn test_rename(cx: &mut gpui::TestAppContext) {
4245 // hi
4246 init_test(cx);
4247
4248 let fs = FakeFs::new(cx.executor());
4249 fs.insert_tree(
4250 path!("/dir"),
4251 json!({
4252 "one.rs": "const ONE: usize = 1;",
4253 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4254 }),
4255 )
4256 .await;
4257
4258 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4259
4260 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4261 language_registry.add(rust_lang());
4262 let mut fake_servers = language_registry.register_fake_lsp(
4263 "Rust",
4264 FakeLspAdapter {
4265 capabilities: lsp::ServerCapabilities {
4266 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4267 prepare_provider: Some(true),
4268 work_done_progress_options: Default::default(),
4269 })),
4270 ..Default::default()
4271 },
4272 ..Default::default()
4273 },
4274 );
4275
4276 let (buffer, _handle) = project
4277 .update(cx, |project, cx| {
4278 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4279 })
4280 .await
4281 .unwrap();
4282
4283 let fake_server = fake_servers.next().await.unwrap();
4284
4285 let response = project.update(cx, |project, cx| {
4286 project.prepare_rename(buffer.clone(), 7, cx)
4287 });
4288 fake_server
4289 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4290 assert_eq!(
4291 params.text_document.uri.as_str(),
4292 uri!("file:///dir/one.rs")
4293 );
4294 assert_eq!(params.position, lsp::Position::new(0, 7));
4295 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4296 lsp::Position::new(0, 6),
4297 lsp::Position::new(0, 9),
4298 ))))
4299 })
4300 .next()
4301 .await
4302 .unwrap();
4303 let response = response.await.unwrap();
4304 let PrepareRenameResponse::Success(range) = response else {
4305 panic!("{:?}", response);
4306 };
4307 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4308 assert_eq!(range, 6..9);
4309
4310 let response = project.update(cx, |project, cx| {
4311 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4312 });
4313 fake_server
4314 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4315 assert_eq!(
4316 params.text_document_position.text_document.uri.as_str(),
4317 uri!("file:///dir/one.rs")
4318 );
4319 assert_eq!(
4320 params.text_document_position.position,
4321 lsp::Position::new(0, 7)
4322 );
4323 assert_eq!(params.new_name, "THREE");
4324 Ok(Some(lsp::WorkspaceEdit {
4325 changes: Some(
4326 [
4327 (
4328 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4329 vec![lsp::TextEdit::new(
4330 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4331 "THREE".to_string(),
4332 )],
4333 ),
4334 (
4335 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4336 vec![
4337 lsp::TextEdit::new(
4338 lsp::Range::new(
4339 lsp::Position::new(0, 24),
4340 lsp::Position::new(0, 27),
4341 ),
4342 "THREE".to_string(),
4343 ),
4344 lsp::TextEdit::new(
4345 lsp::Range::new(
4346 lsp::Position::new(0, 35),
4347 lsp::Position::new(0, 38),
4348 ),
4349 "THREE".to_string(),
4350 ),
4351 ],
4352 ),
4353 ]
4354 .into_iter()
4355 .collect(),
4356 ),
4357 ..Default::default()
4358 }))
4359 })
4360 .next()
4361 .await
4362 .unwrap();
4363 let mut transaction = response.await.unwrap().0;
4364 assert_eq!(transaction.len(), 2);
4365 assert_eq!(
4366 transaction
4367 .remove_entry(&buffer)
4368 .unwrap()
4369 .0
4370 .update(cx, |buffer, _| buffer.text()),
4371 "const THREE: usize = 1;"
4372 );
4373 assert_eq!(
4374 transaction
4375 .into_keys()
4376 .next()
4377 .unwrap()
4378 .update(cx, |buffer, _| buffer.text()),
4379 "const TWO: usize = one::THREE + one::THREE;"
4380 );
4381}
4382
4383#[gpui::test]
4384async fn test_search(cx: &mut gpui::TestAppContext) {
4385 init_test(cx);
4386
4387 let fs = FakeFs::new(cx.executor());
4388 fs.insert_tree(
4389 path!("/dir"),
4390 json!({
4391 "one.rs": "const ONE: usize = 1;",
4392 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4393 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4394 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4395 }),
4396 )
4397 .await;
4398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4399 assert_eq!(
4400 search(
4401 &project,
4402 SearchQuery::text(
4403 "TWO",
4404 false,
4405 true,
4406 false,
4407 Default::default(),
4408 Default::default(),
4409 None
4410 )
4411 .unwrap(),
4412 cx
4413 )
4414 .await
4415 .unwrap(),
4416 HashMap::from_iter([
4417 (separator!("dir/two.rs").to_string(), vec![6..9]),
4418 (separator!("dir/three.rs").to_string(), vec![37..40])
4419 ])
4420 );
4421
4422 let buffer_4 = project
4423 .update(cx, |project, cx| {
4424 project.open_local_buffer(path!("/dir/four.rs"), cx)
4425 })
4426 .await
4427 .unwrap();
4428 buffer_4.update(cx, |buffer, cx| {
4429 let text = "two::TWO";
4430 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4431 });
4432
4433 assert_eq!(
4434 search(
4435 &project,
4436 SearchQuery::text(
4437 "TWO",
4438 false,
4439 true,
4440 false,
4441 Default::default(),
4442 Default::default(),
4443 None,
4444 )
4445 .unwrap(),
4446 cx
4447 )
4448 .await
4449 .unwrap(),
4450 HashMap::from_iter([
4451 (separator!("dir/two.rs").to_string(), vec![6..9]),
4452 (separator!("dir/three.rs").to_string(), vec![37..40]),
4453 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4454 ])
4455 );
4456}
4457
4458#[gpui::test]
4459async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4460 init_test(cx);
4461
4462 let search_query = "file";
4463
4464 let fs = FakeFs::new(cx.executor());
4465 fs.insert_tree(
4466 path!("/dir"),
4467 json!({
4468 "one.rs": r#"// Rust file one"#,
4469 "one.ts": r#"// TypeScript file one"#,
4470 "two.rs": r#"// Rust file two"#,
4471 "two.ts": r#"// TypeScript file two"#,
4472 }),
4473 )
4474 .await;
4475 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4476
4477 assert!(
4478 search(
4479 &project,
4480 SearchQuery::text(
4481 search_query,
4482 false,
4483 true,
4484 false,
4485 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4486 Default::default(),
4487 None
4488 )
4489 .unwrap(),
4490 cx
4491 )
4492 .await
4493 .unwrap()
4494 .is_empty(),
4495 "If no inclusions match, no files should be returned"
4496 );
4497
4498 assert_eq!(
4499 search(
4500 &project,
4501 SearchQuery::text(
4502 search_query,
4503 false,
4504 true,
4505 false,
4506 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4507 Default::default(),
4508 None
4509 )
4510 .unwrap(),
4511 cx
4512 )
4513 .await
4514 .unwrap(),
4515 HashMap::from_iter([
4516 (separator!("dir/one.rs").to_string(), vec![8..12]),
4517 (separator!("dir/two.rs").to_string(), vec![8..12]),
4518 ]),
4519 "Rust only search should give only Rust files"
4520 );
4521
4522 assert_eq!(
4523 search(
4524 &project,
4525 SearchQuery::text(
4526 search_query,
4527 false,
4528 true,
4529 false,
4530
4531 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4532
4533 Default::default(),
4534 None,
4535 ).unwrap(),
4536 cx
4537 )
4538 .await
4539 .unwrap(),
4540 HashMap::from_iter([
4541 (separator!("dir/one.ts").to_string(), vec![14..18]),
4542 (separator!("dir/two.ts").to_string(), vec![14..18]),
4543 ]),
4544 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4545 );
4546
4547 assert_eq!(
4548 search(
4549 &project,
4550 SearchQuery::text(
4551 search_query,
4552 false,
4553 true,
4554 false,
4555
4556 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4557
4558 Default::default(),
4559 None,
4560 ).unwrap(),
4561 cx
4562 )
4563 .await
4564 .unwrap(),
4565 HashMap::from_iter([
4566 (separator!("dir/two.ts").to_string(), vec![14..18]),
4567 (separator!("dir/one.rs").to_string(), vec![8..12]),
4568 (separator!("dir/one.ts").to_string(), vec![14..18]),
4569 (separator!("dir/two.rs").to_string(), vec![8..12]),
4570 ]),
4571 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4572 );
4573}
4574
4575#[gpui::test]
4576async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4577 init_test(cx);
4578
4579 let search_query = "file";
4580
4581 let fs = FakeFs::new(cx.executor());
4582 fs.insert_tree(
4583 path!("/dir"),
4584 json!({
4585 "one.rs": r#"// Rust file one"#,
4586 "one.ts": r#"// TypeScript file one"#,
4587 "two.rs": r#"// Rust file two"#,
4588 "two.ts": r#"// TypeScript file two"#,
4589 }),
4590 )
4591 .await;
4592 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4593
4594 assert_eq!(
4595 search(
4596 &project,
4597 SearchQuery::text(
4598 search_query,
4599 false,
4600 true,
4601 false,
4602 Default::default(),
4603 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4604 None,
4605 )
4606 .unwrap(),
4607 cx
4608 )
4609 .await
4610 .unwrap(),
4611 HashMap::from_iter([
4612 (separator!("dir/one.rs").to_string(), vec![8..12]),
4613 (separator!("dir/one.ts").to_string(), vec![14..18]),
4614 (separator!("dir/two.rs").to_string(), vec![8..12]),
4615 (separator!("dir/two.ts").to_string(), vec![14..18]),
4616 ]),
4617 "If no exclusions match, all files should be returned"
4618 );
4619
4620 assert_eq!(
4621 search(
4622 &project,
4623 SearchQuery::text(
4624 search_query,
4625 false,
4626 true,
4627 false,
4628 Default::default(),
4629 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4630 None,
4631 )
4632 .unwrap(),
4633 cx
4634 )
4635 .await
4636 .unwrap(),
4637 HashMap::from_iter([
4638 (separator!("dir/one.ts").to_string(), vec![14..18]),
4639 (separator!("dir/two.ts").to_string(), vec![14..18]),
4640 ]),
4641 "Rust exclusion search should give only TypeScript files"
4642 );
4643
4644 assert_eq!(
4645 search(
4646 &project,
4647 SearchQuery::text(
4648 search_query,
4649 false,
4650 true,
4651 false,
4652 Default::default(),
4653 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4654 None,
4655 ).unwrap(),
4656 cx
4657 )
4658 .await
4659 .unwrap(),
4660 HashMap::from_iter([
4661 (separator!("dir/one.rs").to_string(), vec![8..12]),
4662 (separator!("dir/two.rs").to_string(), vec![8..12]),
4663 ]),
4664 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4665 );
4666
4667 assert!(
4668 search(
4669 &project,
4670 SearchQuery::text(
4671 search_query,
4672 false,
4673 true,
4674 false,
4675 Default::default(),
4676
4677 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4678 None,
4679
4680 ).unwrap(),
4681 cx
4682 )
4683 .await
4684 .unwrap().is_empty(),
4685 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4686 );
4687}
4688
4689#[gpui::test]
4690async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4691 init_test(cx);
4692
4693 let search_query = "file";
4694
4695 let fs = FakeFs::new(cx.executor());
4696 fs.insert_tree(
4697 path!("/dir"),
4698 json!({
4699 "one.rs": r#"// Rust file one"#,
4700 "one.ts": r#"// TypeScript file one"#,
4701 "two.rs": r#"// Rust file two"#,
4702 "two.ts": r#"// TypeScript file two"#,
4703 }),
4704 )
4705 .await;
4706 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4707
4708 assert!(
4709 search(
4710 &project,
4711 SearchQuery::text(
4712 search_query,
4713 false,
4714 true,
4715 false,
4716 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4717 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4718 None,
4719 )
4720 .unwrap(),
4721 cx
4722 )
4723 .await
4724 .unwrap()
4725 .is_empty(),
4726 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4727 );
4728
4729 assert!(
4730 search(
4731 &project,
4732 SearchQuery::text(
4733 search_query,
4734 false,
4735 true,
4736 false,
4737 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4738 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4739 None,
4740 ).unwrap(),
4741 cx
4742 )
4743 .await
4744 .unwrap()
4745 .is_empty(),
4746 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4747 );
4748
4749 assert!(
4750 search(
4751 &project,
4752 SearchQuery::text(
4753 search_query,
4754 false,
4755 true,
4756 false,
4757 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4758 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4759 None,
4760 )
4761 .unwrap(),
4762 cx
4763 )
4764 .await
4765 .unwrap()
4766 .is_empty(),
4767 "Non-matching inclusions and exclusions should not change that."
4768 );
4769
4770 assert_eq!(
4771 search(
4772 &project,
4773 SearchQuery::text(
4774 search_query,
4775 false,
4776 true,
4777 false,
4778 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4779 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4780 None,
4781 )
4782 .unwrap(),
4783 cx
4784 )
4785 .await
4786 .unwrap(),
4787 HashMap::from_iter([
4788 (separator!("dir/one.ts").to_string(), vec![14..18]),
4789 (separator!("dir/two.ts").to_string(), vec![14..18]),
4790 ]),
4791 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4792 );
4793}
4794
4795#[gpui::test]
4796async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4797 init_test(cx);
4798
4799 let fs = FakeFs::new(cx.executor());
4800 fs.insert_tree(
4801 path!("/worktree-a"),
4802 json!({
4803 "haystack.rs": r#"// NEEDLE"#,
4804 "haystack.ts": r#"// NEEDLE"#,
4805 }),
4806 )
4807 .await;
4808 fs.insert_tree(
4809 path!("/worktree-b"),
4810 json!({
4811 "haystack.rs": r#"// NEEDLE"#,
4812 "haystack.ts": r#"// NEEDLE"#,
4813 }),
4814 )
4815 .await;
4816
4817 let project = Project::test(
4818 fs.clone(),
4819 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4820 cx,
4821 )
4822 .await;
4823
4824 assert_eq!(
4825 search(
4826 &project,
4827 SearchQuery::text(
4828 "NEEDLE",
4829 false,
4830 true,
4831 false,
4832 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4833 Default::default(),
4834 None,
4835 )
4836 .unwrap(),
4837 cx
4838 )
4839 .await
4840 .unwrap(),
4841 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4842 "should only return results from included worktree"
4843 );
4844 assert_eq!(
4845 search(
4846 &project,
4847 SearchQuery::text(
4848 "NEEDLE",
4849 false,
4850 true,
4851 false,
4852 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4853 Default::default(),
4854 None,
4855 )
4856 .unwrap(),
4857 cx
4858 )
4859 .await
4860 .unwrap(),
4861 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4862 "should only return results from included worktree"
4863 );
4864
4865 assert_eq!(
4866 search(
4867 &project,
4868 SearchQuery::text(
4869 "NEEDLE",
4870 false,
4871 true,
4872 false,
4873 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4874 Default::default(),
4875 None,
4876 )
4877 .unwrap(),
4878 cx
4879 )
4880 .await
4881 .unwrap(),
4882 HashMap::from_iter([
4883 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4884 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4885 ]),
4886 "should return results from both worktrees"
4887 );
4888}
4889
4890#[gpui::test]
4891async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4892 init_test(cx);
4893
4894 let fs = FakeFs::new(cx.background_executor.clone());
4895 fs.insert_tree(
4896 path!("/dir"),
4897 json!({
4898 ".git": {},
4899 ".gitignore": "**/target\n/node_modules\n",
4900 "target": {
4901 "index.txt": "index_key:index_value"
4902 },
4903 "node_modules": {
4904 "eslint": {
4905 "index.ts": "const eslint_key = 'eslint value'",
4906 "package.json": r#"{ "some_key": "some value" }"#,
4907 },
4908 "prettier": {
4909 "index.ts": "const prettier_key = 'prettier value'",
4910 "package.json": r#"{ "other_key": "other value" }"#,
4911 },
4912 },
4913 "package.json": r#"{ "main_key": "main value" }"#,
4914 }),
4915 )
4916 .await;
4917 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4918
4919 let query = "key";
4920 assert_eq!(
4921 search(
4922 &project,
4923 SearchQuery::text(
4924 query,
4925 false,
4926 false,
4927 false,
4928 Default::default(),
4929 Default::default(),
4930 None,
4931 )
4932 .unwrap(),
4933 cx
4934 )
4935 .await
4936 .unwrap(),
4937 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4938 "Only one non-ignored file should have the query"
4939 );
4940
4941 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4942 assert_eq!(
4943 search(
4944 &project,
4945 SearchQuery::text(
4946 query,
4947 false,
4948 false,
4949 true,
4950 Default::default(),
4951 Default::default(),
4952 None,
4953 )
4954 .unwrap(),
4955 cx
4956 )
4957 .await
4958 .unwrap(),
4959 HashMap::from_iter([
4960 (separator!("dir/package.json").to_string(), vec![8..11]),
4961 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4962 (
4963 separator!("dir/node_modules/prettier/package.json").to_string(),
4964 vec![9..12]
4965 ),
4966 (
4967 separator!("dir/node_modules/prettier/index.ts").to_string(),
4968 vec![15..18]
4969 ),
4970 (
4971 separator!("dir/node_modules/eslint/index.ts").to_string(),
4972 vec![13..16]
4973 ),
4974 (
4975 separator!("dir/node_modules/eslint/package.json").to_string(),
4976 vec![8..11]
4977 ),
4978 ]),
4979 "Unrestricted search with ignored directories should find every file with the query"
4980 );
4981
4982 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4983 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4984 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4985 assert_eq!(
4986 search(
4987 &project,
4988 SearchQuery::text(
4989 query,
4990 false,
4991 false,
4992 true,
4993 files_to_include,
4994 files_to_exclude,
4995 None,
4996 )
4997 .unwrap(),
4998 cx
4999 )
5000 .await
5001 .unwrap(),
5002 HashMap::from_iter([(
5003 separator!("dir/node_modules/prettier/package.json").to_string(),
5004 vec![9..12]
5005 )]),
5006 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5007 );
5008}
5009
5010#[gpui::test]
5011async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5012 init_test(cx);
5013
5014 let fs = FakeFs::new(cx.executor().clone());
5015 fs.insert_tree(
5016 "/one/two",
5017 json!({
5018 "three": {
5019 "a.txt": "",
5020 "four": {}
5021 },
5022 "c.rs": ""
5023 }),
5024 )
5025 .await;
5026
5027 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5028 project
5029 .update(cx, |project, cx| {
5030 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5031 project.create_entry((id, "b.."), true, cx)
5032 })
5033 .await
5034 .unwrap()
5035 .to_included()
5036 .unwrap();
5037
5038 // Can't create paths outside the project
5039 let result = project
5040 .update(cx, |project, cx| {
5041 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5042 project.create_entry((id, "../../boop"), true, cx)
5043 })
5044 .await;
5045 assert!(result.is_err());
5046
5047 // Can't create paths with '..'
5048 let result = project
5049 .update(cx, |project, cx| {
5050 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5051 project.create_entry((id, "four/../beep"), true, cx)
5052 })
5053 .await;
5054 assert!(result.is_err());
5055
5056 assert_eq!(
5057 fs.paths(true),
5058 vec![
5059 PathBuf::from(path!("/")),
5060 PathBuf::from(path!("/one")),
5061 PathBuf::from(path!("/one/two")),
5062 PathBuf::from(path!("/one/two/c.rs")),
5063 PathBuf::from(path!("/one/two/three")),
5064 PathBuf::from(path!("/one/two/three/a.txt")),
5065 PathBuf::from(path!("/one/two/three/b..")),
5066 PathBuf::from(path!("/one/two/three/four")),
5067 ]
5068 );
5069
5070 // And we cannot open buffers with '..'
5071 let result = project
5072 .update(cx, |project, cx| {
5073 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5074 project.open_buffer((id, "../c.rs"), cx)
5075 })
5076 .await;
5077 assert!(result.is_err())
5078}
5079
5080#[gpui::test]
5081async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5082 init_test(cx);
5083
5084 let fs = FakeFs::new(cx.executor());
5085 fs.insert_tree(
5086 path!("/dir"),
5087 json!({
5088 "a.tsx": "a",
5089 }),
5090 )
5091 .await;
5092
5093 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5094
5095 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5096 language_registry.add(tsx_lang());
5097 let language_server_names = [
5098 "TypeScriptServer",
5099 "TailwindServer",
5100 "ESLintServer",
5101 "NoHoverCapabilitiesServer",
5102 ];
5103 let mut language_servers = [
5104 language_registry.register_fake_lsp(
5105 "tsx",
5106 FakeLspAdapter {
5107 name: language_server_names[0],
5108 capabilities: lsp::ServerCapabilities {
5109 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5110 ..lsp::ServerCapabilities::default()
5111 },
5112 ..FakeLspAdapter::default()
5113 },
5114 ),
5115 language_registry.register_fake_lsp(
5116 "tsx",
5117 FakeLspAdapter {
5118 name: language_server_names[1],
5119 capabilities: lsp::ServerCapabilities {
5120 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5121 ..lsp::ServerCapabilities::default()
5122 },
5123 ..FakeLspAdapter::default()
5124 },
5125 ),
5126 language_registry.register_fake_lsp(
5127 "tsx",
5128 FakeLspAdapter {
5129 name: language_server_names[2],
5130 capabilities: lsp::ServerCapabilities {
5131 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5132 ..lsp::ServerCapabilities::default()
5133 },
5134 ..FakeLspAdapter::default()
5135 },
5136 ),
5137 language_registry.register_fake_lsp(
5138 "tsx",
5139 FakeLspAdapter {
5140 name: language_server_names[3],
5141 capabilities: lsp::ServerCapabilities {
5142 hover_provider: None,
5143 ..lsp::ServerCapabilities::default()
5144 },
5145 ..FakeLspAdapter::default()
5146 },
5147 ),
5148 ];
5149
5150 let (buffer, _handle) = project
5151 .update(cx, |p, cx| {
5152 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5153 })
5154 .await
5155 .unwrap();
5156 cx.executor().run_until_parked();
5157
5158 let mut servers_with_hover_requests = HashMap::default();
5159 for i in 0..language_server_names.len() {
5160 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5161 panic!(
5162 "Failed to get language server #{i} with name {}",
5163 &language_server_names[i]
5164 )
5165 });
5166 let new_server_name = new_server.server.name();
5167 assert!(
5168 !servers_with_hover_requests.contains_key(&new_server_name),
5169 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5170 );
5171 match new_server_name.as_ref() {
5172 "TailwindServer" | "TypeScriptServer" => {
5173 servers_with_hover_requests.insert(
5174 new_server_name.clone(),
5175 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5176 let name = new_server_name.clone();
5177 async move {
5178 Ok(Some(lsp::Hover {
5179 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5180 format!("{name} hover"),
5181 )),
5182 range: None,
5183 }))
5184 }
5185 }),
5186 );
5187 }
5188 "ESLintServer" => {
5189 servers_with_hover_requests.insert(
5190 new_server_name,
5191 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5192 |_, _| async move { Ok(None) },
5193 ),
5194 );
5195 }
5196 "NoHoverCapabilitiesServer" => {
5197 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5198 |_, _| async move {
5199 panic!(
5200 "Should not call for hovers server with no corresponding capabilities"
5201 )
5202 },
5203 );
5204 }
5205 unexpected => panic!("Unexpected server name: {unexpected}"),
5206 }
5207 }
5208
5209 let hover_task = project.update(cx, |project, cx| {
5210 project.hover(&buffer, Point::new(0, 0), cx)
5211 });
5212 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5213 |mut hover_request| async move {
5214 hover_request
5215 .next()
5216 .await
5217 .expect("All hover requests should have been triggered")
5218 },
5219 ))
5220 .await;
5221 assert_eq!(
5222 vec!["TailwindServer hover", "TypeScriptServer hover"],
5223 hover_task
5224 .await
5225 .into_iter()
5226 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5227 .sorted()
5228 .collect::<Vec<_>>(),
5229 "Should receive hover responses from all related servers with hover capabilities"
5230 );
5231}
5232
5233#[gpui::test]
5234async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5235 init_test(cx);
5236
5237 let fs = FakeFs::new(cx.executor());
5238 fs.insert_tree(
5239 path!("/dir"),
5240 json!({
5241 "a.ts": "a",
5242 }),
5243 )
5244 .await;
5245
5246 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5247
5248 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5249 language_registry.add(typescript_lang());
5250 let mut fake_language_servers = language_registry.register_fake_lsp(
5251 "TypeScript",
5252 FakeLspAdapter {
5253 capabilities: lsp::ServerCapabilities {
5254 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5255 ..lsp::ServerCapabilities::default()
5256 },
5257 ..FakeLspAdapter::default()
5258 },
5259 );
5260
5261 let (buffer, _handle) = project
5262 .update(cx, |p, cx| {
5263 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5264 })
5265 .await
5266 .unwrap();
5267 cx.executor().run_until_parked();
5268
5269 let fake_server = fake_language_servers
5270 .next()
5271 .await
5272 .expect("failed to get the language server");
5273
5274 let mut request_handled =
5275 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5276 Ok(Some(lsp::Hover {
5277 contents: lsp::HoverContents::Array(vec![
5278 lsp::MarkedString::String("".to_string()),
5279 lsp::MarkedString::String(" ".to_string()),
5280 lsp::MarkedString::String("\n\n\n".to_string()),
5281 ]),
5282 range: None,
5283 }))
5284 });
5285
5286 let hover_task = project.update(cx, |project, cx| {
5287 project.hover(&buffer, Point::new(0, 0), cx)
5288 });
5289 let () = request_handled
5290 .next()
5291 .await
5292 .expect("All hover requests should have been triggered");
5293 assert_eq!(
5294 Vec::<String>::new(),
5295 hover_task
5296 .await
5297 .into_iter()
5298 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5299 .sorted()
5300 .collect::<Vec<_>>(),
5301 "Empty hover parts should be ignored"
5302 );
5303}
5304
5305#[gpui::test]
5306async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5307 init_test(cx);
5308
5309 let fs = FakeFs::new(cx.executor());
5310 fs.insert_tree(
5311 path!("/dir"),
5312 json!({
5313 "a.ts": "a",
5314 }),
5315 )
5316 .await;
5317
5318 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5319
5320 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5321 language_registry.add(typescript_lang());
5322 let mut fake_language_servers = language_registry.register_fake_lsp(
5323 "TypeScript",
5324 FakeLspAdapter {
5325 capabilities: lsp::ServerCapabilities {
5326 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5327 ..lsp::ServerCapabilities::default()
5328 },
5329 ..FakeLspAdapter::default()
5330 },
5331 );
5332
5333 let (buffer, _handle) = project
5334 .update(cx, |p, cx| {
5335 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5336 })
5337 .await
5338 .unwrap();
5339 cx.executor().run_until_parked();
5340
5341 let fake_server = fake_language_servers
5342 .next()
5343 .await
5344 .expect("failed to get the language server");
5345
5346 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5347 move |_, _| async move {
5348 Ok(Some(vec![
5349 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5350 title: "organize imports".to_string(),
5351 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5352 ..lsp::CodeAction::default()
5353 }),
5354 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5355 title: "fix code".to_string(),
5356 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5357 ..lsp::CodeAction::default()
5358 }),
5359 ]))
5360 },
5361 );
5362
5363 let code_actions_task = project.update(cx, |project, cx| {
5364 project.code_actions(
5365 &buffer,
5366 0..buffer.read(cx).len(),
5367 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5368 cx,
5369 )
5370 });
5371
5372 let () = request_handled
5373 .next()
5374 .await
5375 .expect("The code action request should have been triggered");
5376
5377 let code_actions = code_actions_task.await.unwrap();
5378 assert_eq!(code_actions.len(), 1);
5379 assert_eq!(
5380 code_actions[0].lsp_action.action_kind(),
5381 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5382 );
5383}
5384
5385#[gpui::test]
5386async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5387 init_test(cx);
5388
5389 let fs = FakeFs::new(cx.executor());
5390 fs.insert_tree(
5391 path!("/dir"),
5392 json!({
5393 "a.tsx": "a",
5394 }),
5395 )
5396 .await;
5397
5398 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5399
5400 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5401 language_registry.add(tsx_lang());
5402 let language_server_names = [
5403 "TypeScriptServer",
5404 "TailwindServer",
5405 "ESLintServer",
5406 "NoActionsCapabilitiesServer",
5407 ];
5408
5409 let mut language_server_rxs = [
5410 language_registry.register_fake_lsp(
5411 "tsx",
5412 FakeLspAdapter {
5413 name: language_server_names[0],
5414 capabilities: lsp::ServerCapabilities {
5415 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5416 ..lsp::ServerCapabilities::default()
5417 },
5418 ..FakeLspAdapter::default()
5419 },
5420 ),
5421 language_registry.register_fake_lsp(
5422 "tsx",
5423 FakeLspAdapter {
5424 name: language_server_names[1],
5425 capabilities: lsp::ServerCapabilities {
5426 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5427 ..lsp::ServerCapabilities::default()
5428 },
5429 ..FakeLspAdapter::default()
5430 },
5431 ),
5432 language_registry.register_fake_lsp(
5433 "tsx",
5434 FakeLspAdapter {
5435 name: language_server_names[2],
5436 capabilities: lsp::ServerCapabilities {
5437 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5438 ..lsp::ServerCapabilities::default()
5439 },
5440 ..FakeLspAdapter::default()
5441 },
5442 ),
5443 language_registry.register_fake_lsp(
5444 "tsx",
5445 FakeLspAdapter {
5446 name: language_server_names[3],
5447 capabilities: lsp::ServerCapabilities {
5448 code_action_provider: None,
5449 ..lsp::ServerCapabilities::default()
5450 },
5451 ..FakeLspAdapter::default()
5452 },
5453 ),
5454 ];
5455
5456 let (buffer, _handle) = project
5457 .update(cx, |p, cx| {
5458 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5459 })
5460 .await
5461 .unwrap();
5462 cx.executor().run_until_parked();
5463
5464 let mut servers_with_actions_requests = HashMap::default();
5465 for i in 0..language_server_names.len() {
5466 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5467 panic!(
5468 "Failed to get language server #{i} with name {}",
5469 &language_server_names[i]
5470 )
5471 });
5472 let new_server_name = new_server.server.name();
5473
5474 assert!(
5475 !servers_with_actions_requests.contains_key(&new_server_name),
5476 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5477 );
5478 match new_server_name.0.as_ref() {
5479 "TailwindServer" | "TypeScriptServer" => {
5480 servers_with_actions_requests.insert(
5481 new_server_name.clone(),
5482 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5483 move |_, _| {
5484 let name = new_server_name.clone();
5485 async move {
5486 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5487 lsp::CodeAction {
5488 title: format!("{name} code action"),
5489 ..lsp::CodeAction::default()
5490 },
5491 )]))
5492 }
5493 },
5494 ),
5495 );
5496 }
5497 "ESLintServer" => {
5498 servers_with_actions_requests.insert(
5499 new_server_name,
5500 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5501 |_, _| async move { Ok(None) },
5502 ),
5503 );
5504 }
5505 "NoActionsCapabilitiesServer" => {
5506 let _never_handled = new_server
5507 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5508 panic!(
5509 "Should not call for code actions server with no corresponding capabilities"
5510 )
5511 });
5512 }
5513 unexpected => panic!("Unexpected server name: {unexpected}"),
5514 }
5515 }
5516
5517 let code_actions_task = project.update(cx, |project, cx| {
5518 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5519 });
5520
5521 // cx.run_until_parked();
5522 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5523 |mut code_actions_request| async move {
5524 code_actions_request
5525 .next()
5526 .await
5527 .expect("All code actions requests should have been triggered")
5528 },
5529 ))
5530 .await;
5531 assert_eq!(
5532 vec!["TailwindServer code action", "TypeScriptServer code action"],
5533 code_actions_task
5534 .await
5535 .unwrap()
5536 .into_iter()
5537 .map(|code_action| code_action.lsp_action.title().to_owned())
5538 .sorted()
5539 .collect::<Vec<_>>(),
5540 "Should receive code actions responses from all related servers with hover capabilities"
5541 );
5542}
5543
5544#[gpui::test]
5545async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5546 init_test(cx);
5547
5548 let fs = FakeFs::new(cx.executor());
5549 fs.insert_tree(
5550 "/dir",
5551 json!({
5552 "a.rs": "let a = 1;",
5553 "b.rs": "let b = 2;",
5554 "c.rs": "let c = 2;",
5555 }),
5556 )
5557 .await;
5558
5559 let project = Project::test(
5560 fs,
5561 [
5562 "/dir/a.rs".as_ref(),
5563 "/dir/b.rs".as_ref(),
5564 "/dir/c.rs".as_ref(),
5565 ],
5566 cx,
5567 )
5568 .await;
5569
5570 // check the initial state and get the worktrees
5571 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5572 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5573 assert_eq!(worktrees.len(), 3);
5574
5575 let worktree_a = worktrees[0].read(cx);
5576 let worktree_b = worktrees[1].read(cx);
5577 let worktree_c = worktrees[2].read(cx);
5578
5579 // check they start in the right order
5580 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5581 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5582 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5583
5584 (
5585 worktrees[0].clone(),
5586 worktrees[1].clone(),
5587 worktrees[2].clone(),
5588 )
5589 });
5590
5591 // move first worktree to after the second
5592 // [a, b, c] -> [b, a, c]
5593 project
5594 .update(cx, |project, cx| {
5595 let first = worktree_a.read(cx);
5596 let second = worktree_b.read(cx);
5597 project.move_worktree(first.id(), second.id(), cx)
5598 })
5599 .expect("moving first after second");
5600
5601 // check the state after moving
5602 project.update(cx, |project, cx| {
5603 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5604 assert_eq!(worktrees.len(), 3);
5605
5606 let first = worktrees[0].read(cx);
5607 let second = worktrees[1].read(cx);
5608 let third = worktrees[2].read(cx);
5609
5610 // check they are now in the right order
5611 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5612 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5613 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5614 });
5615
5616 // move the second worktree to before the first
5617 // [b, a, c] -> [a, b, c]
5618 project
5619 .update(cx, |project, cx| {
5620 let second = worktree_a.read(cx);
5621 let first = worktree_b.read(cx);
5622 project.move_worktree(first.id(), second.id(), cx)
5623 })
5624 .expect("moving second before first");
5625
5626 // check the state after moving
5627 project.update(cx, |project, cx| {
5628 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5629 assert_eq!(worktrees.len(), 3);
5630
5631 let first = worktrees[0].read(cx);
5632 let second = worktrees[1].read(cx);
5633 let third = worktrees[2].read(cx);
5634
5635 // check they are now in the right order
5636 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5637 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5638 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5639 });
5640
5641 // move the second worktree to after the third
5642 // [a, b, c] -> [a, c, b]
5643 project
5644 .update(cx, |project, cx| {
5645 let second = worktree_b.read(cx);
5646 let third = worktree_c.read(cx);
5647 project.move_worktree(second.id(), third.id(), cx)
5648 })
5649 .expect("moving second after third");
5650
5651 // check the state after moving
5652 project.update(cx, |project, cx| {
5653 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5654 assert_eq!(worktrees.len(), 3);
5655
5656 let first = worktrees[0].read(cx);
5657 let second = worktrees[1].read(cx);
5658 let third = worktrees[2].read(cx);
5659
5660 // check they are now in the right order
5661 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5662 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5663 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5664 });
5665
5666 // move the third worktree to before the second
5667 // [a, c, b] -> [a, b, c]
5668 project
5669 .update(cx, |project, cx| {
5670 let third = worktree_c.read(cx);
5671 let second = worktree_b.read(cx);
5672 project.move_worktree(third.id(), second.id(), cx)
5673 })
5674 .expect("moving third before second");
5675
5676 // check the state after moving
5677 project.update(cx, |project, cx| {
5678 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5679 assert_eq!(worktrees.len(), 3);
5680
5681 let first = worktrees[0].read(cx);
5682 let second = worktrees[1].read(cx);
5683 let third = worktrees[2].read(cx);
5684
5685 // check they are now in the right order
5686 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5687 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5688 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5689 });
5690
5691 // move the first worktree to after the third
5692 // [a, b, c] -> [b, c, a]
5693 project
5694 .update(cx, |project, cx| {
5695 let first = worktree_a.read(cx);
5696 let third = worktree_c.read(cx);
5697 project.move_worktree(first.id(), third.id(), cx)
5698 })
5699 .expect("moving first after third");
5700
5701 // check the state after moving
5702 project.update(cx, |project, cx| {
5703 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5704 assert_eq!(worktrees.len(), 3);
5705
5706 let first = worktrees[0].read(cx);
5707 let second = worktrees[1].read(cx);
5708 let third = worktrees[2].read(cx);
5709
5710 // check they are now in the right order
5711 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5712 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5713 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5714 });
5715
5716 // move the third worktree to before the first
5717 // [b, c, a] -> [a, b, c]
5718 project
5719 .update(cx, |project, cx| {
5720 let third = worktree_a.read(cx);
5721 let first = worktree_b.read(cx);
5722 project.move_worktree(third.id(), first.id(), cx)
5723 })
5724 .expect("moving third before first");
5725
5726 // check the state after moving
5727 project.update(cx, |project, cx| {
5728 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5729 assert_eq!(worktrees.len(), 3);
5730
5731 let first = worktrees[0].read(cx);
5732 let second = worktrees[1].read(cx);
5733 let third = worktrees[2].read(cx);
5734
5735 // check they are now in the right order
5736 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5737 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5738 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5739 });
5740}
5741
5742#[gpui::test]
5743async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5744 init_test(cx);
5745
5746 let staged_contents = r#"
5747 fn main() {
5748 println!("hello world");
5749 }
5750 "#
5751 .unindent();
5752 let file_contents = r#"
5753 // print goodbye
5754 fn main() {
5755 println!("goodbye world");
5756 }
5757 "#
5758 .unindent();
5759
5760 let fs = FakeFs::new(cx.background_executor.clone());
5761 fs.insert_tree(
5762 "/dir",
5763 json!({
5764 ".git": {},
5765 "src": {
5766 "main.rs": file_contents,
5767 }
5768 }),
5769 )
5770 .await;
5771
5772 fs.set_index_for_repo(
5773 Path::new("/dir/.git"),
5774 &[("src/main.rs".into(), staged_contents)],
5775 );
5776
5777 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5778
5779 let buffer = project
5780 .update(cx, |project, cx| {
5781 project.open_local_buffer("/dir/src/main.rs", cx)
5782 })
5783 .await
5784 .unwrap();
5785 let unstaged_diff = project
5786 .update(cx, |project, cx| {
5787 project.open_unstaged_diff(buffer.clone(), cx)
5788 })
5789 .await
5790 .unwrap();
5791
5792 cx.run_until_parked();
5793 unstaged_diff.update(cx, |unstaged_diff, cx| {
5794 let snapshot = buffer.read(cx).snapshot();
5795 assert_hunks(
5796 unstaged_diff.hunks(&snapshot, cx),
5797 &snapshot,
5798 &unstaged_diff.base_text_string().unwrap(),
5799 &[
5800 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5801 (
5802 2..3,
5803 " println!(\"hello world\");\n",
5804 " println!(\"goodbye world\");\n",
5805 DiffHunkStatus::modified_none(),
5806 ),
5807 ],
5808 );
5809 });
5810
5811 let staged_contents = r#"
5812 // print goodbye
5813 fn main() {
5814 }
5815 "#
5816 .unindent();
5817
5818 fs.set_index_for_repo(
5819 Path::new("/dir/.git"),
5820 &[("src/main.rs".into(), staged_contents)],
5821 );
5822
5823 cx.run_until_parked();
5824 unstaged_diff.update(cx, |unstaged_diff, cx| {
5825 let snapshot = buffer.read(cx).snapshot();
5826 assert_hunks(
5827 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5828 &snapshot,
5829 &unstaged_diff.base_text().text(),
5830 &[(
5831 2..3,
5832 "",
5833 " println!(\"goodbye world\");\n",
5834 DiffHunkStatus::added_none(),
5835 )],
5836 );
5837 });
5838}
5839
5840#[gpui::test]
5841async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5842 init_test(cx);
5843
5844 let committed_contents = r#"
5845 fn main() {
5846 println!("hello world");
5847 }
5848 "#
5849 .unindent();
5850 let staged_contents = r#"
5851 fn main() {
5852 println!("goodbye world");
5853 }
5854 "#
5855 .unindent();
5856 let file_contents = r#"
5857 // print goodbye
5858 fn main() {
5859 println!("goodbye world");
5860 }
5861 "#
5862 .unindent();
5863
5864 let fs = FakeFs::new(cx.background_executor.clone());
5865 fs.insert_tree(
5866 "/dir",
5867 json!({
5868 ".git": {},
5869 "src": {
5870 "modification.rs": file_contents,
5871 }
5872 }),
5873 )
5874 .await;
5875
5876 fs.set_head_for_repo(
5877 Path::new("/dir/.git"),
5878 &[
5879 ("src/modification.rs".into(), committed_contents),
5880 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5881 ],
5882 );
5883 fs.set_index_for_repo(
5884 Path::new("/dir/.git"),
5885 &[
5886 ("src/modification.rs".into(), staged_contents),
5887 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5888 ],
5889 );
5890
5891 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5892 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5893 let language = rust_lang();
5894 language_registry.add(language.clone());
5895
5896 let buffer_1 = project
5897 .update(cx, |project, cx| {
5898 project.open_local_buffer("/dir/src/modification.rs", cx)
5899 })
5900 .await
5901 .unwrap();
5902 let diff_1 = project
5903 .update(cx, |project, cx| {
5904 project.open_uncommitted_diff(buffer_1.clone(), cx)
5905 })
5906 .await
5907 .unwrap();
5908 diff_1.read_with(cx, |diff, _| {
5909 assert_eq!(diff.base_text().language().cloned(), Some(language))
5910 });
5911 cx.run_until_parked();
5912 diff_1.update(cx, |diff, cx| {
5913 let snapshot = buffer_1.read(cx).snapshot();
5914 assert_hunks(
5915 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5916 &snapshot,
5917 &diff.base_text_string().unwrap(),
5918 &[
5919 (
5920 0..1,
5921 "",
5922 "// print goodbye\n",
5923 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5924 ),
5925 (
5926 2..3,
5927 " println!(\"hello world\");\n",
5928 " println!(\"goodbye world\");\n",
5929 DiffHunkStatus::modified_none(),
5930 ),
5931 ],
5932 );
5933 });
5934
5935 // Reset HEAD to a version that differs from both the buffer and the index.
5936 let committed_contents = r#"
5937 // print goodbye
5938 fn main() {
5939 }
5940 "#
5941 .unindent();
5942 fs.set_head_for_repo(
5943 Path::new("/dir/.git"),
5944 &[
5945 ("src/modification.rs".into(), committed_contents.clone()),
5946 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5947 ],
5948 );
5949
5950 // Buffer now has an unstaged hunk.
5951 cx.run_until_parked();
5952 diff_1.update(cx, |diff, cx| {
5953 let snapshot = buffer_1.read(cx).snapshot();
5954 assert_hunks(
5955 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5956 &snapshot,
5957 &diff.base_text().text(),
5958 &[(
5959 2..3,
5960 "",
5961 " println!(\"goodbye world\");\n",
5962 DiffHunkStatus::added_none(),
5963 )],
5964 );
5965 });
5966
5967 // Open a buffer for a file that's been deleted.
5968 let buffer_2 = project
5969 .update(cx, |project, cx| {
5970 project.open_local_buffer("/dir/src/deletion.rs", cx)
5971 })
5972 .await
5973 .unwrap();
5974 let diff_2 = project
5975 .update(cx, |project, cx| {
5976 project.open_uncommitted_diff(buffer_2.clone(), cx)
5977 })
5978 .await
5979 .unwrap();
5980 cx.run_until_parked();
5981 diff_2.update(cx, |diff, cx| {
5982 let snapshot = buffer_2.read(cx).snapshot();
5983 assert_hunks(
5984 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5985 &snapshot,
5986 &diff.base_text_string().unwrap(),
5987 &[(
5988 0..0,
5989 "// the-deleted-contents\n",
5990 "",
5991 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
5992 )],
5993 );
5994 });
5995
5996 // Stage the deletion of this file
5997 fs.set_index_for_repo(
5998 Path::new("/dir/.git"),
5999 &[("src/modification.rs".into(), committed_contents.clone())],
6000 );
6001 cx.run_until_parked();
6002 diff_2.update(cx, |diff, cx| {
6003 let snapshot = buffer_2.read(cx).snapshot();
6004 assert_hunks(
6005 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6006 &snapshot,
6007 &diff.base_text_string().unwrap(),
6008 &[(
6009 0..0,
6010 "// the-deleted-contents\n",
6011 "",
6012 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6013 )],
6014 );
6015 });
6016}
6017
6018#[gpui::test]
6019async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6020 use DiffHunkSecondaryStatus::*;
6021 init_test(cx);
6022
6023 let committed_contents = r#"
6024 zero
6025 one
6026 two
6027 three
6028 four
6029 five
6030 "#
6031 .unindent();
6032 let file_contents = r#"
6033 one
6034 TWO
6035 three
6036 FOUR
6037 five
6038 "#
6039 .unindent();
6040
6041 let fs = FakeFs::new(cx.background_executor.clone());
6042 fs.insert_tree(
6043 "/dir",
6044 json!({
6045 ".git": {},
6046 "file.txt": file_contents.clone()
6047 }),
6048 )
6049 .await;
6050
6051 fs.set_head_for_repo(
6052 "/dir/.git".as_ref(),
6053 &[("file.txt".into(), committed_contents.clone())],
6054 );
6055 fs.set_index_for_repo(
6056 "/dir/.git".as_ref(),
6057 &[("file.txt".into(), committed_contents.clone())],
6058 );
6059
6060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6061
6062 let buffer = project
6063 .update(cx, |project, cx| {
6064 project.open_local_buffer("/dir/file.txt", cx)
6065 })
6066 .await
6067 .unwrap();
6068 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6069 let uncommitted_diff = project
6070 .update(cx, |project, cx| {
6071 project.open_uncommitted_diff(buffer.clone(), cx)
6072 })
6073 .await
6074 .unwrap();
6075 let mut diff_events = cx.events(&uncommitted_diff);
6076
6077 // The hunks are initially unstaged.
6078 uncommitted_diff.read_with(cx, |diff, cx| {
6079 assert_hunks(
6080 diff.hunks(&snapshot, cx),
6081 &snapshot,
6082 &diff.base_text_string().unwrap(),
6083 &[
6084 (
6085 0..0,
6086 "zero\n",
6087 "",
6088 DiffHunkStatus::deleted(HasSecondaryHunk),
6089 ),
6090 (
6091 1..2,
6092 "two\n",
6093 "TWO\n",
6094 DiffHunkStatus::modified(HasSecondaryHunk),
6095 ),
6096 (
6097 3..4,
6098 "four\n",
6099 "FOUR\n",
6100 DiffHunkStatus::modified(HasSecondaryHunk),
6101 ),
6102 ],
6103 );
6104 });
6105
6106 // Stage a hunk. It appears as optimistically staged.
6107 uncommitted_diff.update(cx, |diff, cx| {
6108 let range =
6109 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6110 let hunks = diff
6111 .hunks_intersecting_range(range, &snapshot, cx)
6112 .collect::<Vec<_>>();
6113 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6114
6115 assert_hunks(
6116 diff.hunks(&snapshot, cx),
6117 &snapshot,
6118 &diff.base_text_string().unwrap(),
6119 &[
6120 (
6121 0..0,
6122 "zero\n",
6123 "",
6124 DiffHunkStatus::deleted(HasSecondaryHunk),
6125 ),
6126 (
6127 1..2,
6128 "two\n",
6129 "TWO\n",
6130 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6131 ),
6132 (
6133 3..4,
6134 "four\n",
6135 "FOUR\n",
6136 DiffHunkStatus::modified(HasSecondaryHunk),
6137 ),
6138 ],
6139 );
6140 });
6141
6142 // The diff emits a change event for the range of the staged hunk.
6143 assert!(matches!(
6144 diff_events.next().await.unwrap(),
6145 BufferDiffEvent::HunksStagedOrUnstaged(_)
6146 ));
6147 let event = diff_events.next().await.unwrap();
6148 if let BufferDiffEvent::DiffChanged {
6149 changed_range: Some(changed_range),
6150 } = event
6151 {
6152 let changed_range = changed_range.to_point(&snapshot);
6153 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6154 } else {
6155 panic!("Unexpected event {event:?}");
6156 }
6157
6158 // When the write to the index completes, it appears as staged.
6159 cx.run_until_parked();
6160 uncommitted_diff.update(cx, |diff, cx| {
6161 assert_hunks(
6162 diff.hunks(&snapshot, cx),
6163 &snapshot,
6164 &diff.base_text_string().unwrap(),
6165 &[
6166 (
6167 0..0,
6168 "zero\n",
6169 "",
6170 DiffHunkStatus::deleted(HasSecondaryHunk),
6171 ),
6172 (
6173 1..2,
6174 "two\n",
6175 "TWO\n",
6176 DiffHunkStatus::modified(NoSecondaryHunk),
6177 ),
6178 (
6179 3..4,
6180 "four\n",
6181 "FOUR\n",
6182 DiffHunkStatus::modified(HasSecondaryHunk),
6183 ),
6184 ],
6185 );
6186 });
6187
6188 // The diff emits a change event for the changed index text.
6189 let event = diff_events.next().await.unwrap();
6190 if let BufferDiffEvent::DiffChanged {
6191 changed_range: Some(changed_range),
6192 } = event
6193 {
6194 let changed_range = changed_range.to_point(&snapshot);
6195 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6196 } else {
6197 panic!("Unexpected event {event:?}");
6198 }
6199
6200 // Simulate a problem writing to the git index.
6201 fs.set_error_message_for_index_write(
6202 "/dir/.git".as_ref(),
6203 Some("failed to write git index".into()),
6204 );
6205
6206 // Stage another hunk.
6207 uncommitted_diff.update(cx, |diff, cx| {
6208 let range =
6209 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6210 let hunks = diff
6211 .hunks_intersecting_range(range, &snapshot, cx)
6212 .collect::<Vec<_>>();
6213 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6214
6215 assert_hunks(
6216 diff.hunks(&snapshot, cx),
6217 &snapshot,
6218 &diff.base_text_string().unwrap(),
6219 &[
6220 (
6221 0..0,
6222 "zero\n",
6223 "",
6224 DiffHunkStatus::deleted(HasSecondaryHunk),
6225 ),
6226 (
6227 1..2,
6228 "two\n",
6229 "TWO\n",
6230 DiffHunkStatus::modified(NoSecondaryHunk),
6231 ),
6232 (
6233 3..4,
6234 "four\n",
6235 "FOUR\n",
6236 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6237 ),
6238 ],
6239 );
6240 });
6241 assert!(matches!(
6242 diff_events.next().await.unwrap(),
6243 BufferDiffEvent::HunksStagedOrUnstaged(_)
6244 ));
6245 let event = diff_events.next().await.unwrap();
6246 if let BufferDiffEvent::DiffChanged {
6247 changed_range: Some(changed_range),
6248 } = event
6249 {
6250 let changed_range = changed_range.to_point(&snapshot);
6251 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6252 } else {
6253 panic!("Unexpected event {event:?}");
6254 }
6255
6256 // When the write fails, the hunk returns to being unstaged.
6257 cx.run_until_parked();
6258 uncommitted_diff.update(cx, |diff, cx| {
6259 assert_hunks(
6260 diff.hunks(&snapshot, cx),
6261 &snapshot,
6262 &diff.base_text_string().unwrap(),
6263 &[
6264 (
6265 0..0,
6266 "zero\n",
6267 "",
6268 DiffHunkStatus::deleted(HasSecondaryHunk),
6269 ),
6270 (
6271 1..2,
6272 "two\n",
6273 "TWO\n",
6274 DiffHunkStatus::modified(NoSecondaryHunk),
6275 ),
6276 (
6277 3..4,
6278 "four\n",
6279 "FOUR\n",
6280 DiffHunkStatus::modified(HasSecondaryHunk),
6281 ),
6282 ],
6283 );
6284 });
6285
6286 let event = diff_events.next().await.unwrap();
6287 if let BufferDiffEvent::DiffChanged {
6288 changed_range: Some(changed_range),
6289 } = event
6290 {
6291 let changed_range = changed_range.to_point(&snapshot);
6292 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6293 } else {
6294 panic!("Unexpected event {event:?}");
6295 }
6296
6297 // Allow writing to the git index to succeed again.
6298 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6299
6300 // Stage two hunks with separate operations.
6301 uncommitted_diff.update(cx, |diff, cx| {
6302 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6303 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6304 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6305 });
6306
6307 // Both staged hunks appear as pending.
6308 uncommitted_diff.update(cx, |diff, cx| {
6309 assert_hunks(
6310 diff.hunks(&snapshot, cx),
6311 &snapshot,
6312 &diff.base_text_string().unwrap(),
6313 &[
6314 (
6315 0..0,
6316 "zero\n",
6317 "",
6318 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6319 ),
6320 (
6321 1..2,
6322 "two\n",
6323 "TWO\n",
6324 DiffHunkStatus::modified(NoSecondaryHunk),
6325 ),
6326 (
6327 3..4,
6328 "four\n",
6329 "FOUR\n",
6330 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6331 ),
6332 ],
6333 );
6334 });
6335
6336 // Both staging operations take effect.
6337 cx.run_until_parked();
6338 uncommitted_diff.update(cx, |diff, cx| {
6339 assert_hunks(
6340 diff.hunks(&snapshot, cx),
6341 &snapshot,
6342 &diff.base_text_string().unwrap(),
6343 &[
6344 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6345 (
6346 1..2,
6347 "two\n",
6348 "TWO\n",
6349 DiffHunkStatus::modified(NoSecondaryHunk),
6350 ),
6351 (
6352 3..4,
6353 "four\n",
6354 "FOUR\n",
6355 DiffHunkStatus::modified(NoSecondaryHunk),
6356 ),
6357 ],
6358 );
6359 });
6360}
6361
6362#[allow(clippy::format_collect)]
6363#[gpui::test]
6364async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6365 use DiffHunkSecondaryStatus::*;
6366 init_test(cx);
6367
6368 let different_lines = (0..500)
6369 .step_by(5)
6370 .map(|i| format!("diff {}\n", i))
6371 .collect::<Vec<String>>();
6372 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6373 let file_contents = (0..500)
6374 .map(|i| {
6375 if i % 5 == 0 {
6376 different_lines[i / 5].clone()
6377 } else {
6378 format!("{}\n", i)
6379 }
6380 })
6381 .collect::<String>();
6382
6383 let fs = FakeFs::new(cx.background_executor.clone());
6384 fs.insert_tree(
6385 "/dir",
6386 json!({
6387 ".git": {},
6388 "file.txt": file_contents.clone()
6389 }),
6390 )
6391 .await;
6392
6393 fs.set_head_for_repo(
6394 "/dir/.git".as_ref(),
6395 &[("file.txt".into(), committed_contents.clone())],
6396 );
6397 fs.set_index_for_repo(
6398 "/dir/.git".as_ref(),
6399 &[("file.txt".into(), committed_contents.clone())],
6400 );
6401
6402 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6403
6404 let buffer = project
6405 .update(cx, |project, cx| {
6406 project.open_local_buffer("/dir/file.txt", cx)
6407 })
6408 .await
6409 .unwrap();
6410 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6411 let uncommitted_diff = project
6412 .update(cx, |project, cx| {
6413 project.open_uncommitted_diff(buffer.clone(), cx)
6414 })
6415 .await
6416 .unwrap();
6417
6418 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6419 .step_by(5)
6420 .map(|i| {
6421 (
6422 i as u32..i as u32 + 1,
6423 format!("{}\n", i),
6424 different_lines[i / 5].clone(),
6425 DiffHunkStatus::modified(HasSecondaryHunk),
6426 )
6427 })
6428 .collect();
6429
6430 // The hunks are initially unstaged
6431 uncommitted_diff.read_with(cx, |diff, cx| {
6432 assert_hunks(
6433 diff.hunks(&snapshot, cx),
6434 &snapshot,
6435 &diff.base_text_string().unwrap(),
6436 &expected_hunks,
6437 );
6438 });
6439
6440 for (_, _, _, status) in expected_hunks.iter_mut() {
6441 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6442 }
6443
6444 // Stage every hunk with a different call
6445 uncommitted_diff.update(cx, |diff, cx| {
6446 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6447 for hunk in hunks {
6448 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6449 }
6450
6451 assert_hunks(
6452 diff.hunks(&snapshot, cx),
6453 &snapshot,
6454 &diff.base_text_string().unwrap(),
6455 &expected_hunks,
6456 );
6457 });
6458
6459 // If we wait, we'll have no pending hunks
6460 cx.run_until_parked();
6461 for (_, _, _, status) in expected_hunks.iter_mut() {
6462 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6463 }
6464
6465 uncommitted_diff.update(cx, |diff, cx| {
6466 assert_hunks(
6467 diff.hunks(&snapshot, cx),
6468 &snapshot,
6469 &diff.base_text_string().unwrap(),
6470 &expected_hunks,
6471 );
6472 });
6473
6474 for (_, _, _, status) in expected_hunks.iter_mut() {
6475 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6476 }
6477
6478 // Unstage every hunk with a different call
6479 uncommitted_diff.update(cx, |diff, cx| {
6480 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6481 for hunk in hunks {
6482 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6483 }
6484
6485 assert_hunks(
6486 diff.hunks(&snapshot, cx),
6487 &snapshot,
6488 &diff.base_text_string().unwrap(),
6489 &expected_hunks,
6490 );
6491 });
6492
6493 // If we wait, we'll have no pending hunks, again
6494 cx.run_until_parked();
6495 for (_, _, _, status) in expected_hunks.iter_mut() {
6496 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6497 }
6498
6499 uncommitted_diff.update(cx, |diff, cx| {
6500 assert_hunks(
6501 diff.hunks(&snapshot, cx),
6502 &snapshot,
6503 &diff.base_text_string().unwrap(),
6504 &expected_hunks,
6505 );
6506 });
6507}
6508
6509#[gpui::test]
6510async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6511 init_test(cx);
6512
6513 let committed_contents = r#"
6514 fn main() {
6515 println!("hello from HEAD");
6516 }
6517 "#
6518 .unindent();
6519 let file_contents = r#"
6520 fn main() {
6521 println!("hello from the working copy");
6522 }
6523 "#
6524 .unindent();
6525
6526 let fs = FakeFs::new(cx.background_executor.clone());
6527 fs.insert_tree(
6528 "/dir",
6529 json!({
6530 ".git": {},
6531 "src": {
6532 "main.rs": file_contents,
6533 }
6534 }),
6535 )
6536 .await;
6537
6538 fs.set_head_for_repo(
6539 Path::new("/dir/.git"),
6540 &[("src/main.rs".into(), committed_contents.clone())],
6541 );
6542 fs.set_index_for_repo(
6543 Path::new("/dir/.git"),
6544 &[("src/main.rs".into(), committed_contents.clone())],
6545 );
6546
6547 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6548
6549 let buffer = project
6550 .update(cx, |project, cx| {
6551 project.open_local_buffer("/dir/src/main.rs", cx)
6552 })
6553 .await
6554 .unwrap();
6555 let uncommitted_diff = project
6556 .update(cx, |project, cx| {
6557 project.open_uncommitted_diff(buffer.clone(), cx)
6558 })
6559 .await
6560 .unwrap();
6561
6562 cx.run_until_parked();
6563 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6564 let snapshot = buffer.read(cx).snapshot();
6565 assert_hunks(
6566 uncommitted_diff.hunks(&snapshot, cx),
6567 &snapshot,
6568 &uncommitted_diff.base_text_string().unwrap(),
6569 &[(
6570 1..2,
6571 " println!(\"hello from HEAD\");\n",
6572 " println!(\"hello from the working copy\");\n",
6573 DiffHunkStatus {
6574 kind: DiffHunkStatusKind::Modified,
6575 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6576 },
6577 )],
6578 );
6579 });
6580}
6581
6582async fn search(
6583 project: &Entity<Project>,
6584 query: SearchQuery,
6585 cx: &mut gpui::TestAppContext,
6586) -> Result<HashMap<String, Vec<Range<usize>>>> {
6587 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6588 let mut results = HashMap::default();
6589 while let Ok(search_result) = search_rx.recv().await {
6590 match search_result {
6591 SearchResult::Buffer { buffer, ranges } => {
6592 results.entry(buffer).or_insert(ranges);
6593 }
6594 SearchResult::LimitReached => {}
6595 }
6596 }
6597 Ok(results
6598 .into_iter()
6599 .map(|(buffer, ranges)| {
6600 buffer.update(cx, |buffer, cx| {
6601 let path = buffer
6602 .file()
6603 .unwrap()
6604 .full_path(cx)
6605 .to_string_lossy()
6606 .to_string();
6607 let ranges = ranges
6608 .into_iter()
6609 .map(|range| range.to_offset(buffer))
6610 .collect::<Vec<_>>();
6611 (path, ranges)
6612 })
6613 })
6614 .collect())
6615}
6616
6617pub fn init_test(cx: &mut gpui::TestAppContext) {
6618 if std::env::var("RUST_LOG").is_ok() {
6619 env_logger::try_init().ok();
6620 }
6621
6622 cx.update(|cx| {
6623 let settings_store = SettingsStore::test(cx);
6624 cx.set_global(settings_store);
6625 release_channel::init(SemanticVersion::default(), cx);
6626 language::init(cx);
6627 Project::init_settings(cx);
6628 });
6629}
6630
6631fn json_lang() -> Arc<Language> {
6632 Arc::new(Language::new(
6633 LanguageConfig {
6634 name: "JSON".into(),
6635 matcher: LanguageMatcher {
6636 path_suffixes: vec!["json".to_string()],
6637 ..Default::default()
6638 },
6639 ..Default::default()
6640 },
6641 None,
6642 ))
6643}
6644
6645fn js_lang() -> Arc<Language> {
6646 Arc::new(Language::new(
6647 LanguageConfig {
6648 name: "JavaScript".into(),
6649 matcher: LanguageMatcher {
6650 path_suffixes: vec!["js".to_string()],
6651 ..Default::default()
6652 },
6653 ..Default::default()
6654 },
6655 None,
6656 ))
6657}
6658
6659fn rust_lang() -> Arc<Language> {
6660 Arc::new(Language::new(
6661 LanguageConfig {
6662 name: "Rust".into(),
6663 matcher: LanguageMatcher {
6664 path_suffixes: vec!["rs".to_string()],
6665 ..Default::default()
6666 },
6667 ..Default::default()
6668 },
6669 Some(tree_sitter_rust::LANGUAGE.into()),
6670 ))
6671}
6672
6673fn typescript_lang() -> Arc<Language> {
6674 Arc::new(Language::new(
6675 LanguageConfig {
6676 name: "TypeScript".into(),
6677 matcher: LanguageMatcher {
6678 path_suffixes: vec!["ts".to_string()],
6679 ..Default::default()
6680 },
6681 ..Default::default()
6682 },
6683 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6684 ))
6685}
6686
6687fn tsx_lang() -> Arc<Language> {
6688 Arc::new(Language::new(
6689 LanguageConfig {
6690 name: "tsx".into(),
6691 matcher: LanguageMatcher {
6692 path_suffixes: vec!["tsx".to_string()],
6693 ..Default::default()
6694 },
6695 ..Default::default()
6696 },
6697 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6698 ))
6699}
6700
6701fn get_all_tasks(
6702 project: &Entity<Project>,
6703 task_contexts: &TaskContexts,
6704 cx: &mut App,
6705) -> Vec<(TaskSourceKind, ResolvedTask)> {
6706 let (mut old, new) = project.update(cx, |project, cx| {
6707 project
6708 .task_store
6709 .read(cx)
6710 .task_inventory()
6711 .unwrap()
6712 .read(cx)
6713 .used_and_current_resolved_tasks(task_contexts, cx)
6714 });
6715 old.extend(new);
6716 old
6717}