1use crate::{Event, *};
2use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus};
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq, path, paths::PathMatcher, separator, test::TempTree, uri, TryFutureExt as _,
29};
30
31#[gpui::test]
32async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let (tx, mut rx) = futures::channel::mpsc::unbounded();
36 let _thread = std::thread::spawn(move || {
37 #[cfg(not(target_os = "windows"))]
38 std::fs::metadata("/tmp").unwrap();
39 #[cfg(target_os = "windows")]
40 std::fs::metadata("C:/Windows").unwrap();
41 std::thread::sleep(Duration::from_millis(1000));
42 tx.unbounded_send(1).unwrap();
43 });
44 rx.next().await.unwrap();
45}
46
47#[gpui::test]
48async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
49 cx.executor().allow_parking();
50
51 let io_task = smol::unblock(move || {
52 println!("sleeping on thread {:?}", std::thread::current().id());
53 std::thread::sleep(Duration::from_millis(10));
54 1
55 });
56
57 let task = cx.foreground_executor().spawn(async move {
58 io_task.await;
59 });
60
61 task.await;
62}
63
64#[cfg(not(windows))]
65#[gpui::test]
66async fn test_symlinks(cx: &mut gpui::TestAppContext) {
67 init_test(cx);
68 cx.executor().allow_parking();
69
70 let dir = TempTree::new(json!({
71 "root": {
72 "apple": "",
73 "banana": {
74 "carrot": {
75 "date": "",
76 "endive": "",
77 }
78 },
79 "fennel": {
80 "grape": "",
81 }
82 }
83 }));
84
85 let root_link_path = dir.path().join("root_link");
86 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
87 os::unix::fs::symlink(
88 dir.path().join("root/fennel"),
89 dir.path().join("root/finnochio"),
90 )
91 .unwrap();
92
93 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
94
95 project.update(cx, |project, cx| {
96 let tree = project.worktrees(cx).next().unwrap().read(cx);
97 assert_eq!(tree.file_count(), 5);
98 assert_eq!(
99 tree.inode_for_path("fennel/grape"),
100 tree.inode_for_path("finnochio/grape")
101 );
102 });
103}
104
105#[gpui::test]
106async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
107 init_test(cx);
108
109 let dir = TempTree::new(json!({
110 ".editorconfig": r#"
111 root = true
112 [*.rs]
113 indent_style = tab
114 indent_size = 3
115 end_of_line = lf
116 insert_final_newline = true
117 trim_trailing_whitespace = true
118 [*.js]
119 tab_width = 10
120 "#,
121 ".zed": {
122 "settings.json": r#"{
123 "tab_size": 8,
124 "hard_tabs": false,
125 "ensure_final_newline_on_save": false,
126 "remove_trailing_whitespace_on_save": false,
127 "soft_wrap": "editor_width"
128 }"#,
129 },
130 "a.rs": "fn a() {\n A\n}",
131 "b": {
132 ".editorconfig": r#"
133 [*.rs]
134 indent_size = 2
135 "#,
136 "b.rs": "fn b() {\n B\n}",
137 },
138 "c.js": "def c\n C\nend",
139 "README.json": "tabs are better\n",
140 }));
141
142 let path = dir.path();
143 let fs = FakeFs::new(cx.executor());
144 fs.insert_tree_from_real_fs(path, path).await;
145 let project = Project::test(fs, [path], cx).await;
146
147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
148 language_registry.add(js_lang());
149 language_registry.add(json_lang());
150 language_registry.add(rust_lang());
151
152 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
153
154 cx.executor().run_until_parked();
155
156 cx.update(|cx| {
157 let tree = worktree.read(cx);
158 let settings_for = |path: &str| {
159 let file_entry = tree.entry_for_path(path).unwrap().clone();
160 let file = File::for_entry(file_entry, worktree.clone());
161 let file_language = project
162 .read(cx)
163 .languages()
164 .language_for_file_path(file.path.as_ref());
165 let file_language = cx
166 .background_executor()
167 .block(file_language)
168 .expect("Failed to get file language");
169 let file = file as _;
170 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
171 };
172
173 let settings_a = settings_for("a.rs");
174 let settings_b = settings_for("b/b.rs");
175 let settings_c = settings_for("c.js");
176 let settings_readme = settings_for("README.json");
177
178 // .editorconfig overrides .zed/settings
179 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
180 assert_eq!(settings_a.hard_tabs, true);
181 assert_eq!(settings_a.ensure_final_newline_on_save, true);
182 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
183
184 // .editorconfig in b/ overrides .editorconfig in root
185 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
186
187 // "indent_size" is not set, so "tab_width" is used
188 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
189
190 // README.md should not be affected by .editorconfig's globe "*.rs"
191 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
192 });
193}
194
195#[gpui::test]
196async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
197 init_test(cx);
198 TaskStore::init(None);
199
200 let fs = FakeFs::new(cx.executor());
201 fs.insert_tree(
202 path!("/dir"),
203 json!({
204 ".zed": {
205 "settings.json": r#"{ "tab_size": 8 }"#,
206 "tasks.json": r#"[{
207 "label": "cargo check all",
208 "command": "cargo",
209 "args": ["check", "--all"]
210 },]"#,
211 },
212 "a": {
213 "a.rs": "fn a() {\n A\n}"
214 },
215 "b": {
216 ".zed": {
217 "settings.json": r#"{ "tab_size": 2 }"#,
218 "tasks.json": r#"[{
219 "label": "cargo check",
220 "command": "cargo",
221 "args": ["check"]
222 },]"#,
223 },
224 "b.rs": "fn b() {\n B\n}"
225 }
226 }),
227 )
228 .await;
229
230 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232 let task_context = TaskContext::default();
233
234 cx.executor().run_until_parked();
235 let worktree_id = cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.worktrees(cx).next().unwrap().read(cx).id()
238 })
239 });
240 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
241 id: worktree_id,
242 directory_in_worktree: PathBuf::from(".zed"),
243 id_base: "local worktree tasks from directory \".zed\"".into(),
244 };
245
246 let all_tasks = cx
247 .update(|cx| {
248 let tree = worktree.read(cx);
249
250 let file_a = File::for_entry(
251 tree.entry_for_path("a/a.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_a = language_settings(None, Some(&file_a), cx);
255 let file_b = File::for_entry(
256 tree.entry_for_path("b/b.rs").unwrap().clone(),
257 worktree.clone(),
258 ) as _;
259 let settings_b = language_settings(None, Some(&file_b), cx);
260
261 assert_eq!(settings_a.tab_size.get(), 8);
262 assert_eq!(settings_b.tab_size.get(), 2);
263
264 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
265 })
266 .into_iter()
267 .map(|(source_kind, task)| {
268 let resolved = task.resolved.unwrap();
269 (
270 source_kind,
271 task.resolved_label,
272 resolved.args,
273 resolved.env,
274 )
275 })
276 .collect::<Vec<_>>();
277 assert_eq!(
278 all_tasks,
279 vec![
280 (
281 TaskSourceKind::Worktree {
282 id: worktree_id,
283 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
284 id_base: if cfg!(windows) {
285 "local worktree tasks from directory \"b\\\\.zed\"".into()
286 } else {
287 "local worktree tasks from directory \"b/.zed\"".into()
288 },
289 },
290 "cargo check".to_string(),
291 vec!["check".to_string()],
292 HashMap::default(),
293 ),
294 (
295 topmost_local_task_source_kind.clone(),
296 "cargo check all".to_string(),
297 vec!["check".to_string(), "--all".to_string()],
298 HashMap::default(),
299 ),
300 ]
301 );
302
303 let (_, resolved_task) = cx
304 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
305 .into_iter()
306 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
307 .expect("should have one global task");
308 project.update(cx, |project, cx| {
309 let task_inventory = project
310 .task_store
311 .read(cx)
312 .task_inventory()
313 .cloned()
314 .unwrap();
315 task_inventory.update(cx, |inventory, _| {
316 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
317 inventory
318 .update_file_based_tasks(
319 None,
320 Some(
321 &json!([{
322 "label": "cargo check unstable",
323 "command": "cargo",
324 "args": [
325 "check",
326 "--all",
327 "--all-targets"
328 ],
329 "env": {
330 "RUSTFLAGS": "-Zunstable-options"
331 }
332 }])
333 .to_string(),
334 ),
335 )
336 .unwrap();
337 });
338 });
339 cx.run_until_parked();
340
341 let all_tasks = cx
342 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
343 .into_iter()
344 .map(|(source_kind, task)| {
345 let resolved = task.resolved.unwrap();
346 (
347 source_kind,
348 task.resolved_label,
349 resolved.args,
350 resolved.env,
351 )
352 })
353 .collect::<Vec<_>>();
354 assert_eq!(
355 all_tasks,
356 vec![
357 (
358 topmost_local_task_source_kind.clone(),
359 "cargo check all".to_string(),
360 vec!["check".to_string(), "--all".to_string()],
361 HashMap::default(),
362 ),
363 (
364 TaskSourceKind::Worktree {
365 id: worktree_id,
366 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
367 id_base: if cfg!(windows) {
368 "local worktree tasks from directory \"b\\\\.zed\"".into()
369 } else {
370 "local worktree tasks from directory \"b/.zed\"".into()
371 },
372 },
373 "cargo check".to_string(),
374 vec!["check".to_string()],
375 HashMap::default(),
376 ),
377 (
378 TaskSourceKind::AbsPath {
379 abs_path: paths::tasks_file().clone(),
380 id_base: "global tasks.json".into(),
381 },
382 "cargo check unstable".to_string(),
383 vec![
384 "check".to_string(),
385 "--all".to_string(),
386 "--all-targets".to_string(),
387 ],
388 HashMap::from_iter(Some((
389 "RUSTFLAGS".to_string(),
390 "-Zunstable-options".to_string()
391 ))),
392 ),
393 ]
394 );
395}
396
397#[gpui::test]
398async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
399 init_test(cx);
400
401 let fs = FakeFs::new(cx.executor());
402 fs.insert_tree(
403 path!("/dir"),
404 json!({
405 "test.rs": "const A: i32 = 1;",
406 "test2.rs": "",
407 "Cargo.toml": "a = 1",
408 "package.json": "{\"a\": 1}",
409 }),
410 )
411 .await;
412
413 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
415
416 let mut fake_rust_servers = language_registry.register_fake_lsp(
417 "Rust",
418 FakeLspAdapter {
419 name: "the-rust-language-server",
420 capabilities: lsp::ServerCapabilities {
421 completion_provider: Some(lsp::CompletionOptions {
422 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
423 ..Default::default()
424 }),
425 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
426 lsp::TextDocumentSyncOptions {
427 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
428 ..Default::default()
429 },
430 )),
431 ..Default::default()
432 },
433 ..Default::default()
434 },
435 );
436 let mut fake_json_servers = language_registry.register_fake_lsp(
437 "JSON",
438 FakeLspAdapter {
439 name: "the-json-language-server",
440 capabilities: lsp::ServerCapabilities {
441 completion_provider: Some(lsp::CompletionOptions {
442 trigger_characters: Some(vec![":".to_string()]),
443 ..Default::default()
444 }),
445 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
446 lsp::TextDocumentSyncOptions {
447 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
448 ..Default::default()
449 },
450 )),
451 ..Default::default()
452 },
453 ..Default::default()
454 },
455 );
456
457 // Open a buffer without an associated language server.
458 let (toml_buffer, _handle) = project
459 .update(cx, |project, cx| {
460 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
461 })
462 .await
463 .unwrap();
464
465 // Open a buffer with an associated language server before the language for it has been loaded.
466 let (rust_buffer, _handle2) = project
467 .update(cx, |project, cx| {
468 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
469 })
470 .await
471 .unwrap();
472 rust_buffer.update(cx, |buffer, _| {
473 assert_eq!(buffer.language().map(|l| l.name()), None);
474 });
475
476 // Now we add the languages to the project, and ensure they get assigned to all
477 // the relevant open buffers.
478 language_registry.add(json_lang());
479 language_registry.add(rust_lang());
480 cx.executor().run_until_parked();
481 rust_buffer.update(cx, |buffer, _| {
482 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
483 });
484
485 // A server is started up, and it is notified about Rust files.
486 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
487 assert_eq!(
488 fake_rust_server
489 .receive_notification::<lsp::notification::DidOpenTextDocument>()
490 .await
491 .text_document,
492 lsp::TextDocumentItem {
493 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
494 version: 0,
495 text: "const A: i32 = 1;".to_string(),
496 language_id: "rust".to_string(),
497 }
498 );
499
500 // The buffer is configured based on the language server's capabilities.
501 rust_buffer.update(cx, |buffer, _| {
502 assert_eq!(
503 buffer
504 .completion_triggers()
505 .into_iter()
506 .cloned()
507 .collect::<Vec<_>>(),
508 &[".".to_string(), "::".to_string()]
509 );
510 });
511 toml_buffer.update(cx, |buffer, _| {
512 assert!(buffer.completion_triggers().is_empty());
513 });
514
515 // Edit a buffer. The changes are reported to the language server.
516 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
517 assert_eq!(
518 fake_rust_server
519 .receive_notification::<lsp::notification::DidChangeTextDocument>()
520 .await
521 .text_document,
522 lsp::VersionedTextDocumentIdentifier::new(
523 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
524 1
525 )
526 );
527
528 // Open a third buffer with a different associated language server.
529 let (json_buffer, _json_handle) = project
530 .update(cx, |project, cx| {
531 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
532 })
533 .await
534 .unwrap();
535
536 // A json language server is started up and is only notified about the json buffer.
537 let mut fake_json_server = fake_json_servers.next().await.unwrap();
538 assert_eq!(
539 fake_json_server
540 .receive_notification::<lsp::notification::DidOpenTextDocument>()
541 .await
542 .text_document,
543 lsp::TextDocumentItem {
544 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
545 version: 0,
546 text: "{\"a\": 1}".to_string(),
547 language_id: "json".to_string(),
548 }
549 );
550
551 // This buffer is configured based on the second language server's
552 // capabilities.
553 json_buffer.update(cx, |buffer, _| {
554 assert_eq!(
555 buffer
556 .completion_triggers()
557 .into_iter()
558 .cloned()
559 .collect::<Vec<_>>(),
560 &[":".to_string()]
561 );
562 });
563
564 // When opening another buffer whose language server is already running,
565 // it is also configured based on the existing language server's capabilities.
566 let (rust_buffer2, _handle4) = project
567 .update(cx, |project, cx| {
568 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
569 })
570 .await
571 .unwrap();
572 rust_buffer2.update(cx, |buffer, _| {
573 assert_eq!(
574 buffer
575 .completion_triggers()
576 .into_iter()
577 .cloned()
578 .collect::<Vec<_>>(),
579 &[".".to_string(), "::".to_string()]
580 );
581 });
582
583 // Changes are reported only to servers matching the buffer's language.
584 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
585 rust_buffer2.update(cx, |buffer, cx| {
586 buffer.edit([(0..0, "let x = 1;")], None, cx)
587 });
588 assert_eq!(
589 fake_rust_server
590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
591 .await
592 .text_document,
593 lsp::VersionedTextDocumentIdentifier::new(
594 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
595 1
596 )
597 );
598
599 // Save notifications are reported to all servers.
600 project
601 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
602 .await
603 .unwrap();
604 assert_eq!(
605 fake_rust_server
606 .receive_notification::<lsp::notification::DidSaveTextDocument>()
607 .await
608 .text_document,
609 lsp::TextDocumentIdentifier::new(
610 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
611 )
612 );
613 assert_eq!(
614 fake_json_server
615 .receive_notification::<lsp::notification::DidSaveTextDocument>()
616 .await
617 .text_document,
618 lsp::TextDocumentIdentifier::new(
619 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
620 )
621 );
622
623 // Renames are reported only to servers matching the buffer's language.
624 fs.rename(
625 Path::new(path!("/dir/test2.rs")),
626 Path::new(path!("/dir/test3.rs")),
627 Default::default(),
628 )
629 .await
630 .unwrap();
631 assert_eq!(
632 fake_rust_server
633 .receive_notification::<lsp::notification::DidCloseTextDocument>()
634 .await
635 .text_document,
636 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
637 );
638 assert_eq!(
639 fake_rust_server
640 .receive_notification::<lsp::notification::DidOpenTextDocument>()
641 .await
642 .text_document,
643 lsp::TextDocumentItem {
644 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
645 version: 0,
646 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
647 language_id: "rust".to_string(),
648 },
649 );
650
651 rust_buffer2.update(cx, |buffer, cx| {
652 buffer.update_diagnostics(
653 LanguageServerId(0),
654 DiagnosticSet::from_sorted_entries(
655 vec![DiagnosticEntry {
656 diagnostic: Default::default(),
657 range: Anchor::MIN..Anchor::MAX,
658 }],
659 &buffer.snapshot(),
660 ),
661 cx,
662 );
663 assert_eq!(
664 buffer
665 .snapshot()
666 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
667 .count(),
668 1
669 );
670 });
671
672 // When the rename changes the extension of the file, the buffer gets closed on the old
673 // language server and gets opened on the new one.
674 fs.rename(
675 Path::new(path!("/dir/test3.rs")),
676 Path::new(path!("/dir/test3.json")),
677 Default::default(),
678 )
679 .await
680 .unwrap();
681 assert_eq!(
682 fake_rust_server
683 .receive_notification::<lsp::notification::DidCloseTextDocument>()
684 .await
685 .text_document,
686 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
687 );
688 assert_eq!(
689 fake_json_server
690 .receive_notification::<lsp::notification::DidOpenTextDocument>()
691 .await
692 .text_document,
693 lsp::TextDocumentItem {
694 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
695 version: 0,
696 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
697 language_id: "json".to_string(),
698 },
699 );
700
701 // We clear the diagnostics, since the language has changed.
702 rust_buffer2.update(cx, |buffer, _| {
703 assert_eq!(
704 buffer
705 .snapshot()
706 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
707 .count(),
708 0
709 );
710 });
711
712 // The renamed file's version resets after changing language server.
713 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
714 assert_eq!(
715 fake_json_server
716 .receive_notification::<lsp::notification::DidChangeTextDocument>()
717 .await
718 .text_document,
719 lsp::VersionedTextDocumentIdentifier::new(
720 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
721 1
722 )
723 );
724
725 // Restart language servers
726 project.update(cx, |project, cx| {
727 project.restart_language_servers_for_buffers(
728 vec![rust_buffer.clone(), json_buffer.clone()],
729 cx,
730 );
731 });
732
733 let mut rust_shutdown_requests = fake_rust_server
734 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
735 let mut json_shutdown_requests = fake_json_server
736 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
737 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
738
739 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
740 let mut fake_json_server = fake_json_servers.next().await.unwrap();
741
742 // Ensure rust document is reopened in new rust language server
743 assert_eq!(
744 fake_rust_server
745 .receive_notification::<lsp::notification::DidOpenTextDocument>()
746 .await
747 .text_document,
748 lsp::TextDocumentItem {
749 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
750 version: 0,
751 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
752 language_id: "rust".to_string(),
753 }
754 );
755
756 // Ensure json documents are reopened in new json language server
757 assert_set_eq!(
758 [
759 fake_json_server
760 .receive_notification::<lsp::notification::DidOpenTextDocument>()
761 .await
762 .text_document,
763 fake_json_server
764 .receive_notification::<lsp::notification::DidOpenTextDocument>()
765 .await
766 .text_document,
767 ],
768 [
769 lsp::TextDocumentItem {
770 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
771 version: 0,
772 text: json_buffer.update(cx, |buffer, _| buffer.text()),
773 language_id: "json".to_string(),
774 },
775 lsp::TextDocumentItem {
776 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
777 version: 0,
778 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
779 language_id: "json".to_string(),
780 }
781 ]
782 );
783
784 // Close notifications are reported only to servers matching the buffer's language.
785 cx.update(|_| drop(_json_handle));
786 let close_message = lsp::DidCloseTextDocumentParams {
787 text_document: lsp::TextDocumentIdentifier::new(
788 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
789 ),
790 };
791 assert_eq!(
792 fake_json_server
793 .receive_notification::<lsp::notification::DidCloseTextDocument>()
794 .await,
795 close_message,
796 );
797}
798
799#[gpui::test]
800async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
801 init_test(cx);
802
803 let fs = FakeFs::new(cx.executor());
804 fs.insert_tree(
805 path!("/the-root"),
806 json!({
807 ".gitignore": "target\n",
808 "src": {
809 "a.rs": "",
810 "b.rs": "",
811 },
812 "target": {
813 "x": {
814 "out": {
815 "x.rs": ""
816 }
817 },
818 "y": {
819 "out": {
820 "y.rs": "",
821 }
822 },
823 "z": {
824 "out": {
825 "z.rs": ""
826 }
827 }
828 }
829 }),
830 )
831 .await;
832
833 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
835 language_registry.add(rust_lang());
836 let mut fake_servers = language_registry.register_fake_lsp(
837 "Rust",
838 FakeLspAdapter {
839 name: "the-language-server",
840 ..Default::default()
841 },
842 );
843
844 cx.executor().run_until_parked();
845
846 // Start the language server by opening a buffer with a compatible file extension.
847 let _ = project
848 .update(cx, |project, cx| {
849 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
850 })
851 .await
852 .unwrap();
853
854 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
855 project.update(cx, |project, cx| {
856 let worktree = project.worktrees(cx).next().unwrap();
857 assert_eq!(
858 worktree
859 .read(cx)
860 .snapshot()
861 .entries(true, 0)
862 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
863 .collect::<Vec<_>>(),
864 &[
865 (Path::new(""), false),
866 (Path::new(".gitignore"), false),
867 (Path::new("src"), false),
868 (Path::new("src/a.rs"), false),
869 (Path::new("src/b.rs"), false),
870 (Path::new("target"), true),
871 ]
872 );
873 });
874
875 let prev_read_dir_count = fs.read_dir_call_count();
876
877 // Keep track of the FS events reported to the language server.
878 let fake_server = fake_servers.next().await.unwrap();
879 let file_changes = Arc::new(Mutex::new(Vec::new()));
880 fake_server
881 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
882 registrations: vec![lsp::Registration {
883 id: Default::default(),
884 method: "workspace/didChangeWatchedFiles".to_string(),
885 register_options: serde_json::to_value(
886 lsp::DidChangeWatchedFilesRegistrationOptions {
887 watchers: vec![
888 lsp::FileSystemWatcher {
889 glob_pattern: lsp::GlobPattern::String(
890 path!("/the-root/Cargo.toml").to_string(),
891 ),
892 kind: None,
893 },
894 lsp::FileSystemWatcher {
895 glob_pattern: lsp::GlobPattern::String(
896 path!("/the-root/src/*.{rs,c}").to_string(),
897 ),
898 kind: None,
899 },
900 lsp::FileSystemWatcher {
901 glob_pattern: lsp::GlobPattern::String(
902 path!("/the-root/target/y/**/*.rs").to_string(),
903 ),
904 kind: None,
905 },
906 ],
907 },
908 )
909 .ok(),
910 }],
911 })
912 .await
913 .unwrap();
914 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
915 let file_changes = file_changes.clone();
916 move |params, _| {
917 let mut file_changes = file_changes.lock();
918 file_changes.extend(params.changes);
919 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
920 }
921 });
922
923 cx.executor().run_until_parked();
924 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
925 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
926
927 // Now the language server has asked us to watch an ignored directory path,
928 // so we recursively load it.
929 project.update(cx, |project, cx| {
930 let worktree = project.worktrees(cx).next().unwrap();
931 assert_eq!(
932 worktree
933 .read(cx)
934 .snapshot()
935 .entries(true, 0)
936 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
937 .collect::<Vec<_>>(),
938 &[
939 (Path::new(""), false),
940 (Path::new(".gitignore"), false),
941 (Path::new("src"), false),
942 (Path::new("src/a.rs"), false),
943 (Path::new("src/b.rs"), false),
944 (Path::new("target"), true),
945 (Path::new("target/x"), true),
946 (Path::new("target/y"), true),
947 (Path::new("target/y/out"), true),
948 (Path::new("target/y/out/y.rs"), true),
949 (Path::new("target/z"), true),
950 ]
951 );
952 });
953
954 // Perform some file system mutations, two of which match the watched patterns,
955 // and one of which does not.
956 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
957 .await
958 .unwrap();
959 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
960 .await
961 .unwrap();
962 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
963 .await
964 .unwrap();
965 fs.create_file(
966 path!("/the-root/target/x/out/x2.rs").as_ref(),
967 Default::default(),
968 )
969 .await
970 .unwrap();
971 fs.create_file(
972 path!("/the-root/target/y/out/y2.rs").as_ref(),
973 Default::default(),
974 )
975 .await
976 .unwrap();
977
978 // The language server receives events for the FS mutations that match its watch patterns.
979 cx.executor().run_until_parked();
980 assert_eq!(
981 &*file_changes.lock(),
982 &[
983 lsp::FileEvent {
984 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
985 typ: lsp::FileChangeType::DELETED,
986 },
987 lsp::FileEvent {
988 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
989 typ: lsp::FileChangeType::CREATED,
990 },
991 lsp::FileEvent {
992 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
993 typ: lsp::FileChangeType::CREATED,
994 },
995 ]
996 );
997}
998
999#[gpui::test]
1000async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1001 init_test(cx);
1002
1003 let fs = FakeFs::new(cx.executor());
1004 fs.insert_tree(
1005 path!("/dir"),
1006 json!({
1007 "a.rs": "let a = 1;",
1008 "b.rs": "let b = 2;"
1009 }),
1010 )
1011 .await;
1012
1013 let project = Project::test(
1014 fs,
1015 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1016 cx,
1017 )
1018 .await;
1019 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1020
1021 let buffer_a = project
1022 .update(cx, |project, cx| {
1023 project.open_local_buffer(path!("/dir/a.rs"), cx)
1024 })
1025 .await
1026 .unwrap();
1027 let buffer_b = project
1028 .update(cx, |project, cx| {
1029 project.open_local_buffer(path!("/dir/b.rs"), cx)
1030 })
1031 .await
1032 .unwrap();
1033
1034 lsp_store.update(cx, |lsp_store, cx| {
1035 lsp_store
1036 .update_diagnostics(
1037 LanguageServerId(0),
1038 lsp::PublishDiagnosticsParams {
1039 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1040 version: None,
1041 diagnostics: vec![lsp::Diagnostic {
1042 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1043 severity: Some(lsp::DiagnosticSeverity::ERROR),
1044 message: "error 1".to_string(),
1045 ..Default::default()
1046 }],
1047 },
1048 &[],
1049 cx,
1050 )
1051 .unwrap();
1052 lsp_store
1053 .update_diagnostics(
1054 LanguageServerId(0),
1055 lsp::PublishDiagnosticsParams {
1056 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1057 version: None,
1058 diagnostics: vec![lsp::Diagnostic {
1059 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1060 severity: Some(DiagnosticSeverity::WARNING),
1061 message: "error 2".to_string(),
1062 ..Default::default()
1063 }],
1064 },
1065 &[],
1066 cx,
1067 )
1068 .unwrap();
1069 });
1070
1071 buffer_a.update(cx, |buffer, _| {
1072 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1073 assert_eq!(
1074 chunks
1075 .iter()
1076 .map(|(s, d)| (s.as_str(), *d))
1077 .collect::<Vec<_>>(),
1078 &[
1079 ("let ", None),
1080 ("a", Some(DiagnosticSeverity::ERROR)),
1081 (" = 1;", None),
1082 ]
1083 );
1084 });
1085 buffer_b.update(cx, |buffer, _| {
1086 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1087 assert_eq!(
1088 chunks
1089 .iter()
1090 .map(|(s, d)| (s.as_str(), *d))
1091 .collect::<Vec<_>>(),
1092 &[
1093 ("let ", None),
1094 ("b", Some(DiagnosticSeverity::WARNING)),
1095 (" = 2;", None),
1096 ]
1097 );
1098 });
1099}
1100
1101#[gpui::test]
1102async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1103 init_test(cx);
1104
1105 let fs = FakeFs::new(cx.executor());
1106 fs.insert_tree(
1107 path!("/root"),
1108 json!({
1109 "dir": {
1110 ".git": {
1111 "HEAD": "ref: refs/heads/main",
1112 },
1113 ".gitignore": "b.rs",
1114 "a.rs": "let a = 1;",
1115 "b.rs": "let b = 2;",
1116 },
1117 "other.rs": "let b = c;"
1118 }),
1119 )
1120 .await;
1121
1122 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1123 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1124 let (worktree, _) = project
1125 .update(cx, |project, cx| {
1126 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1127 })
1128 .await
1129 .unwrap();
1130 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1131
1132 let (worktree, _) = project
1133 .update(cx, |project, cx| {
1134 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1135 })
1136 .await
1137 .unwrap();
1138 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1139
1140 let server_id = LanguageServerId(0);
1141 lsp_store.update(cx, |lsp_store, cx| {
1142 lsp_store
1143 .update_diagnostics(
1144 server_id,
1145 lsp::PublishDiagnosticsParams {
1146 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1147 version: None,
1148 diagnostics: vec![lsp::Diagnostic {
1149 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1150 severity: Some(lsp::DiagnosticSeverity::ERROR),
1151 message: "unused variable 'b'".to_string(),
1152 ..Default::default()
1153 }],
1154 },
1155 &[],
1156 cx,
1157 )
1158 .unwrap();
1159 lsp_store
1160 .update_diagnostics(
1161 server_id,
1162 lsp::PublishDiagnosticsParams {
1163 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1164 version: None,
1165 diagnostics: vec![lsp::Diagnostic {
1166 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1167 severity: Some(lsp::DiagnosticSeverity::ERROR),
1168 message: "unknown variable 'c'".to_string(),
1169 ..Default::default()
1170 }],
1171 },
1172 &[],
1173 cx,
1174 )
1175 .unwrap();
1176 });
1177
1178 let main_ignored_buffer = project
1179 .update(cx, |project, cx| {
1180 project.open_buffer((main_worktree_id, "b.rs"), cx)
1181 })
1182 .await
1183 .unwrap();
1184 main_ignored_buffer.update(cx, |buffer, _| {
1185 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1186 assert_eq!(
1187 chunks
1188 .iter()
1189 .map(|(s, d)| (s.as_str(), *d))
1190 .collect::<Vec<_>>(),
1191 &[
1192 ("let ", None),
1193 ("b", Some(DiagnosticSeverity::ERROR)),
1194 (" = 2;", None),
1195 ],
1196 "Gigitnored buffers should still get in-buffer diagnostics",
1197 );
1198 });
1199 let other_buffer = project
1200 .update(cx, |project, cx| {
1201 project.open_buffer((other_worktree_id, ""), cx)
1202 })
1203 .await
1204 .unwrap();
1205 other_buffer.update(cx, |buffer, _| {
1206 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1207 assert_eq!(
1208 chunks
1209 .iter()
1210 .map(|(s, d)| (s.as_str(), *d))
1211 .collect::<Vec<_>>(),
1212 &[
1213 ("let b = ", None),
1214 ("c", Some(DiagnosticSeverity::ERROR)),
1215 (";", None),
1216 ],
1217 "Buffers from hidden projects should still get in-buffer diagnostics"
1218 );
1219 });
1220
1221 project.update(cx, |project, cx| {
1222 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1223 assert_eq!(
1224 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1225 vec![(
1226 ProjectPath {
1227 worktree_id: main_worktree_id,
1228 path: Arc::from(Path::new("b.rs")),
1229 },
1230 server_id,
1231 DiagnosticSummary {
1232 error_count: 1,
1233 warning_count: 0,
1234 }
1235 )]
1236 );
1237 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1238 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1239 });
1240}
1241
1242#[gpui::test]
1243async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1244 init_test(cx);
1245
1246 let progress_token = "the-progress-token";
1247
1248 let fs = FakeFs::new(cx.executor());
1249 fs.insert_tree(
1250 path!("/dir"),
1251 json!({
1252 "a.rs": "fn a() { A }",
1253 "b.rs": "const y: i32 = 1",
1254 }),
1255 )
1256 .await;
1257
1258 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1259 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1260
1261 language_registry.add(rust_lang());
1262 let mut fake_servers = language_registry.register_fake_lsp(
1263 "Rust",
1264 FakeLspAdapter {
1265 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1266 disk_based_diagnostics_sources: vec!["disk".into()],
1267 ..Default::default()
1268 },
1269 );
1270
1271 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1272
1273 // Cause worktree to start the fake language server
1274 let _ = project
1275 .update(cx, |project, cx| {
1276 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1277 })
1278 .await
1279 .unwrap();
1280
1281 let mut events = cx.events(&project);
1282
1283 let fake_server = fake_servers.next().await.unwrap();
1284 assert_eq!(
1285 events.next().await.unwrap(),
1286 Event::LanguageServerAdded(
1287 LanguageServerId(0),
1288 fake_server.server.name(),
1289 Some(worktree_id)
1290 ),
1291 );
1292
1293 fake_server
1294 .start_progress(format!("{}/0", progress_token))
1295 .await;
1296 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1297 assert_eq!(
1298 events.next().await.unwrap(),
1299 Event::DiskBasedDiagnosticsStarted {
1300 language_server_id: LanguageServerId(0),
1301 }
1302 );
1303
1304 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1305 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1306 version: None,
1307 diagnostics: vec![lsp::Diagnostic {
1308 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1309 severity: Some(lsp::DiagnosticSeverity::ERROR),
1310 message: "undefined variable 'A'".to_string(),
1311 ..Default::default()
1312 }],
1313 });
1314 assert_eq!(
1315 events.next().await.unwrap(),
1316 Event::DiagnosticsUpdated {
1317 language_server_id: LanguageServerId(0),
1318 path: (worktree_id, Path::new("a.rs")).into()
1319 }
1320 );
1321
1322 fake_server.end_progress(format!("{}/0", progress_token));
1323 assert_eq!(
1324 events.next().await.unwrap(),
1325 Event::DiskBasedDiagnosticsFinished {
1326 language_server_id: LanguageServerId(0)
1327 }
1328 );
1329
1330 let buffer = project
1331 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1332 .await
1333 .unwrap();
1334
1335 buffer.update(cx, |buffer, _| {
1336 let snapshot = buffer.snapshot();
1337 let diagnostics = snapshot
1338 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1339 .collect::<Vec<_>>();
1340 assert_eq!(
1341 diagnostics,
1342 &[DiagnosticEntry {
1343 range: Point::new(0, 9)..Point::new(0, 10),
1344 diagnostic: Diagnostic {
1345 severity: lsp::DiagnosticSeverity::ERROR,
1346 message: "undefined variable 'A'".to_string(),
1347 group_id: 0,
1348 is_primary: true,
1349 ..Default::default()
1350 }
1351 }]
1352 )
1353 });
1354
1355 // Ensure publishing empty diagnostics twice only results in one update event.
1356 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1357 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1358 version: None,
1359 diagnostics: Default::default(),
1360 });
1361 assert_eq!(
1362 events.next().await.unwrap(),
1363 Event::DiagnosticsUpdated {
1364 language_server_id: LanguageServerId(0),
1365 path: (worktree_id, Path::new("a.rs")).into()
1366 }
1367 );
1368
1369 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1370 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1371 version: None,
1372 diagnostics: Default::default(),
1373 });
1374 cx.executor().run_until_parked();
1375 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1376}
1377
1378#[gpui::test]
1379async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1380 init_test(cx);
1381
1382 let progress_token = "the-progress-token";
1383
1384 let fs = FakeFs::new(cx.executor());
1385 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1386
1387 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1388
1389 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1390 language_registry.add(rust_lang());
1391 let mut fake_servers = language_registry.register_fake_lsp(
1392 "Rust",
1393 FakeLspAdapter {
1394 name: "the-language-server",
1395 disk_based_diagnostics_sources: vec!["disk".into()],
1396 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1397 ..Default::default()
1398 },
1399 );
1400
1401 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1402
1403 let (buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1406 })
1407 .await
1408 .unwrap();
1409
1410 // Simulate diagnostics starting to update.
1411 let fake_server = fake_servers.next().await.unwrap();
1412 fake_server.start_progress(progress_token).await;
1413
1414 // Restart the server before the diagnostics finish updating.
1415 project.update(cx, |project, cx| {
1416 project.restart_language_servers_for_buffers([buffer], cx);
1417 });
1418 let mut events = cx.events(&project);
1419
1420 // Simulate the newly started server sending more diagnostics.
1421 let fake_server = fake_servers.next().await.unwrap();
1422 assert_eq!(
1423 events.next().await.unwrap(),
1424 Event::LanguageServerAdded(
1425 LanguageServerId(1),
1426 fake_server.server.name(),
1427 Some(worktree_id)
1428 )
1429 );
1430 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1431 fake_server.start_progress(progress_token).await;
1432 assert_eq!(
1433 events.next().await.unwrap(),
1434 Event::DiskBasedDiagnosticsStarted {
1435 language_server_id: LanguageServerId(1)
1436 }
1437 );
1438 project.update(cx, |project, cx| {
1439 assert_eq!(
1440 project
1441 .language_servers_running_disk_based_diagnostics(cx)
1442 .collect::<Vec<_>>(),
1443 [LanguageServerId(1)]
1444 );
1445 });
1446
1447 // All diagnostics are considered done, despite the old server's diagnostic
1448 // task never completing.
1449 fake_server.end_progress(progress_token);
1450 assert_eq!(
1451 events.next().await.unwrap(),
1452 Event::DiskBasedDiagnosticsFinished {
1453 language_server_id: LanguageServerId(1)
1454 }
1455 );
1456 project.update(cx, |project, cx| {
1457 assert_eq!(
1458 project
1459 .language_servers_running_disk_based_diagnostics(cx)
1460 .collect::<Vec<_>>(),
1461 [] as [language::LanguageServerId; 0]
1462 );
1463 });
1464}
1465
1466#[gpui::test]
1467async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1468 init_test(cx);
1469
1470 let fs = FakeFs::new(cx.executor());
1471 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1472
1473 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1474
1475 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1476 language_registry.add(rust_lang());
1477 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1478
1479 let (buffer, _) = project
1480 .update(cx, |project, cx| {
1481 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1482 })
1483 .await
1484 .unwrap();
1485
1486 // Publish diagnostics
1487 let fake_server = fake_servers.next().await.unwrap();
1488 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1489 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1490 version: None,
1491 diagnostics: vec![lsp::Diagnostic {
1492 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1493 severity: Some(lsp::DiagnosticSeverity::ERROR),
1494 message: "the message".to_string(),
1495 ..Default::default()
1496 }],
1497 });
1498
1499 cx.executor().run_until_parked();
1500 buffer.update(cx, |buffer, _| {
1501 assert_eq!(
1502 buffer
1503 .snapshot()
1504 .diagnostics_in_range::<_, usize>(0..1, false)
1505 .map(|entry| entry.diagnostic.message.clone())
1506 .collect::<Vec<_>>(),
1507 ["the message".to_string()]
1508 );
1509 });
1510 project.update(cx, |project, cx| {
1511 assert_eq!(
1512 project.diagnostic_summary(false, cx),
1513 DiagnosticSummary {
1514 error_count: 1,
1515 warning_count: 0,
1516 }
1517 );
1518 });
1519
1520 project.update(cx, |project, cx| {
1521 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1522 });
1523
1524 // The diagnostics are cleared.
1525 cx.executor().run_until_parked();
1526 buffer.update(cx, |buffer, _| {
1527 assert_eq!(
1528 buffer
1529 .snapshot()
1530 .diagnostics_in_range::<_, usize>(0..1, false)
1531 .map(|entry| entry.diagnostic.message.clone())
1532 .collect::<Vec<_>>(),
1533 Vec::<String>::new(),
1534 );
1535 });
1536 project.update(cx, |project, cx| {
1537 assert_eq!(
1538 project.diagnostic_summary(false, cx),
1539 DiagnosticSummary {
1540 error_count: 0,
1541 warning_count: 0,
1542 }
1543 );
1544 });
1545}
1546
1547#[gpui::test]
1548async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1549 init_test(cx);
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1553
1554 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1555 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1556
1557 language_registry.add(rust_lang());
1558 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1559
1560 let (buffer, _handle) = project
1561 .update(cx, |project, cx| {
1562 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1563 })
1564 .await
1565 .unwrap();
1566
1567 // Before restarting the server, report diagnostics with an unknown buffer version.
1568 let fake_server = fake_servers.next().await.unwrap();
1569 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1570 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1571 version: Some(10000),
1572 diagnostics: Vec::new(),
1573 });
1574 cx.executor().run_until_parked();
1575
1576 project.update(cx, |project, cx| {
1577 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1578 });
1579 let mut fake_server = fake_servers.next().await.unwrap();
1580 let notification = fake_server
1581 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1582 .await
1583 .text_document;
1584 assert_eq!(notification.version, 0);
1585}
1586
1587#[gpui::test]
1588async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1589 init_test(cx);
1590
1591 let progress_token = "the-progress-token";
1592
1593 let fs = FakeFs::new(cx.executor());
1594 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1595
1596 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1597
1598 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1599 language_registry.add(rust_lang());
1600 let mut fake_servers = language_registry.register_fake_lsp(
1601 "Rust",
1602 FakeLspAdapter {
1603 name: "the-language-server",
1604 disk_based_diagnostics_sources: vec!["disk".into()],
1605 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1606 ..Default::default()
1607 },
1608 );
1609
1610 let (buffer, _handle) = project
1611 .update(cx, |project, cx| {
1612 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1613 })
1614 .await
1615 .unwrap();
1616
1617 // Simulate diagnostics starting to update.
1618 let mut fake_server = fake_servers.next().await.unwrap();
1619 fake_server
1620 .start_progress_with(
1621 "another-token",
1622 lsp::WorkDoneProgressBegin {
1623 cancellable: Some(false),
1624 ..Default::default()
1625 },
1626 )
1627 .await;
1628 fake_server
1629 .start_progress_with(
1630 progress_token,
1631 lsp::WorkDoneProgressBegin {
1632 cancellable: Some(true),
1633 ..Default::default()
1634 },
1635 )
1636 .await;
1637 cx.executor().run_until_parked();
1638
1639 project.update(cx, |project, cx| {
1640 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1641 });
1642
1643 let cancel_notification = fake_server
1644 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1645 .await;
1646 assert_eq!(
1647 cancel_notification.token,
1648 NumberOrString::String(progress_token.into())
1649 );
1650}
1651
1652#[gpui::test]
1653async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1654 init_test(cx);
1655
1656 let fs = FakeFs::new(cx.executor());
1657 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1658 .await;
1659
1660 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1661 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1662
1663 let mut fake_rust_servers = language_registry.register_fake_lsp(
1664 "Rust",
1665 FakeLspAdapter {
1666 name: "rust-lsp",
1667 ..Default::default()
1668 },
1669 );
1670 let mut fake_js_servers = language_registry.register_fake_lsp(
1671 "JavaScript",
1672 FakeLspAdapter {
1673 name: "js-lsp",
1674 ..Default::default()
1675 },
1676 );
1677 language_registry.add(rust_lang());
1678 language_registry.add(js_lang());
1679
1680 let _rs_buffer = project
1681 .update(cx, |project, cx| {
1682 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1683 })
1684 .await
1685 .unwrap();
1686 let _js_buffer = project
1687 .update(cx, |project, cx| {
1688 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1689 })
1690 .await
1691 .unwrap();
1692
1693 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1694 assert_eq!(
1695 fake_rust_server_1
1696 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1697 .await
1698 .text_document
1699 .uri
1700 .as_str(),
1701 uri!("file:///dir/a.rs")
1702 );
1703
1704 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1705 assert_eq!(
1706 fake_js_server
1707 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1708 .await
1709 .text_document
1710 .uri
1711 .as_str(),
1712 uri!("file:///dir/b.js")
1713 );
1714
1715 // Disable Rust language server, ensuring only that server gets stopped.
1716 cx.update(|cx| {
1717 SettingsStore::update_global(cx, |settings, cx| {
1718 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1719 settings.languages.insert(
1720 "Rust".into(),
1721 LanguageSettingsContent {
1722 enable_language_server: Some(false),
1723 ..Default::default()
1724 },
1725 );
1726 });
1727 })
1728 });
1729 fake_rust_server_1
1730 .receive_notification::<lsp::notification::Exit>()
1731 .await;
1732
1733 // Enable Rust and disable JavaScript language servers, ensuring that the
1734 // former gets started again and that the latter stops.
1735 cx.update(|cx| {
1736 SettingsStore::update_global(cx, |settings, cx| {
1737 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1738 settings.languages.insert(
1739 LanguageName::new("Rust"),
1740 LanguageSettingsContent {
1741 enable_language_server: Some(true),
1742 ..Default::default()
1743 },
1744 );
1745 settings.languages.insert(
1746 LanguageName::new("JavaScript"),
1747 LanguageSettingsContent {
1748 enable_language_server: Some(false),
1749 ..Default::default()
1750 },
1751 );
1752 });
1753 })
1754 });
1755 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1756 assert_eq!(
1757 fake_rust_server_2
1758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1759 .await
1760 .text_document
1761 .uri
1762 .as_str(),
1763 uri!("file:///dir/a.rs")
1764 );
1765 fake_js_server
1766 .receive_notification::<lsp::notification::Exit>()
1767 .await;
1768}
1769
1770#[gpui::test(iterations = 3)]
1771async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1772 init_test(cx);
1773
1774 let text = "
1775 fn a() { A }
1776 fn b() { BB }
1777 fn c() { CCC }
1778 "
1779 .unindent();
1780
1781 let fs = FakeFs::new(cx.executor());
1782 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1783
1784 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1785 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1786 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1787
1788 language_registry.add(rust_lang());
1789 let mut fake_servers = language_registry.register_fake_lsp(
1790 "Rust",
1791 FakeLspAdapter {
1792 disk_based_diagnostics_sources: vec!["disk".into()],
1793 ..Default::default()
1794 },
1795 );
1796
1797 let buffer = project
1798 .update(cx, |project, cx| {
1799 project.open_local_buffer(path!("/dir/a.rs"), cx)
1800 })
1801 .await
1802 .unwrap();
1803
1804 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1805 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1806 });
1807
1808 let mut fake_server = fake_servers.next().await.unwrap();
1809 let open_notification = fake_server
1810 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1811 .await;
1812
1813 // Edit the buffer, moving the content down
1814 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1815 let change_notification_1 = fake_server
1816 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1817 .await;
1818 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1819
1820 // Report some diagnostics for the initial version of the buffer
1821 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1822 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1823 version: Some(open_notification.text_document.version),
1824 diagnostics: vec![
1825 lsp::Diagnostic {
1826 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1827 severity: Some(DiagnosticSeverity::ERROR),
1828 message: "undefined variable 'A'".to_string(),
1829 source: Some("disk".to_string()),
1830 ..Default::default()
1831 },
1832 lsp::Diagnostic {
1833 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1834 severity: Some(DiagnosticSeverity::ERROR),
1835 message: "undefined variable 'BB'".to_string(),
1836 source: Some("disk".to_string()),
1837 ..Default::default()
1838 },
1839 lsp::Diagnostic {
1840 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1841 severity: Some(DiagnosticSeverity::ERROR),
1842 source: Some("disk".to_string()),
1843 message: "undefined variable 'CCC'".to_string(),
1844 ..Default::default()
1845 },
1846 ],
1847 });
1848
1849 // The diagnostics have moved down since they were created.
1850 cx.executor().run_until_parked();
1851 buffer.update(cx, |buffer, _| {
1852 assert_eq!(
1853 buffer
1854 .snapshot()
1855 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1856 .collect::<Vec<_>>(),
1857 &[
1858 DiagnosticEntry {
1859 range: Point::new(3, 9)..Point::new(3, 11),
1860 diagnostic: Diagnostic {
1861 source: Some("disk".into()),
1862 severity: DiagnosticSeverity::ERROR,
1863 message: "undefined variable 'BB'".to_string(),
1864 is_disk_based: true,
1865 group_id: 1,
1866 is_primary: true,
1867 ..Default::default()
1868 },
1869 },
1870 DiagnosticEntry {
1871 range: Point::new(4, 9)..Point::new(4, 12),
1872 diagnostic: Diagnostic {
1873 source: Some("disk".into()),
1874 severity: DiagnosticSeverity::ERROR,
1875 message: "undefined variable 'CCC'".to_string(),
1876 is_disk_based: true,
1877 group_id: 2,
1878 is_primary: true,
1879 ..Default::default()
1880 }
1881 }
1882 ]
1883 );
1884 assert_eq!(
1885 chunks_with_diagnostics(buffer, 0..buffer.len()),
1886 [
1887 ("\n\nfn a() { ".to_string(), None),
1888 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1889 (" }\nfn b() { ".to_string(), None),
1890 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1891 (" }\nfn c() { ".to_string(), None),
1892 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1893 (" }\n".to_string(), None),
1894 ]
1895 );
1896 assert_eq!(
1897 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1898 [
1899 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1900 (" }\nfn c() { ".to_string(), None),
1901 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1902 ]
1903 );
1904 });
1905
1906 // Ensure overlapping diagnostics are highlighted correctly.
1907 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1908 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1909 version: Some(open_notification.text_document.version),
1910 diagnostics: vec![
1911 lsp::Diagnostic {
1912 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1913 severity: Some(DiagnosticSeverity::ERROR),
1914 message: "undefined variable 'A'".to_string(),
1915 source: Some("disk".to_string()),
1916 ..Default::default()
1917 },
1918 lsp::Diagnostic {
1919 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1920 severity: Some(DiagnosticSeverity::WARNING),
1921 message: "unreachable statement".to_string(),
1922 source: Some("disk".to_string()),
1923 ..Default::default()
1924 },
1925 ],
1926 });
1927
1928 cx.executor().run_until_parked();
1929 buffer.update(cx, |buffer, _| {
1930 assert_eq!(
1931 buffer
1932 .snapshot()
1933 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1934 .collect::<Vec<_>>(),
1935 &[
1936 DiagnosticEntry {
1937 range: Point::new(2, 9)..Point::new(2, 12),
1938 diagnostic: Diagnostic {
1939 source: Some("disk".into()),
1940 severity: DiagnosticSeverity::WARNING,
1941 message: "unreachable statement".to_string(),
1942 is_disk_based: true,
1943 group_id: 4,
1944 is_primary: true,
1945 ..Default::default()
1946 }
1947 },
1948 DiagnosticEntry {
1949 range: Point::new(2, 9)..Point::new(2, 10),
1950 diagnostic: Diagnostic {
1951 source: Some("disk".into()),
1952 severity: DiagnosticSeverity::ERROR,
1953 message: "undefined variable 'A'".to_string(),
1954 is_disk_based: true,
1955 group_id: 3,
1956 is_primary: true,
1957 ..Default::default()
1958 },
1959 }
1960 ]
1961 );
1962 assert_eq!(
1963 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1964 [
1965 ("fn a() { ".to_string(), None),
1966 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1967 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1968 ("\n".to_string(), None),
1969 ]
1970 );
1971 assert_eq!(
1972 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1973 [
1974 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1975 ("\n".to_string(), None),
1976 ]
1977 );
1978 });
1979
1980 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1981 // changes since the last save.
1982 buffer.update(cx, |buffer, cx| {
1983 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1984 buffer.edit(
1985 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1986 None,
1987 cx,
1988 );
1989 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1990 });
1991 let change_notification_2 = fake_server
1992 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1993 .await;
1994 assert!(
1995 change_notification_2.text_document.version > change_notification_1.text_document.version
1996 );
1997
1998 // Handle out-of-order diagnostics
1999 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2000 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2001 version: Some(change_notification_2.text_document.version),
2002 diagnostics: vec![
2003 lsp::Diagnostic {
2004 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2005 severity: Some(DiagnosticSeverity::ERROR),
2006 message: "undefined variable 'BB'".to_string(),
2007 source: Some("disk".to_string()),
2008 ..Default::default()
2009 },
2010 lsp::Diagnostic {
2011 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2012 severity: Some(DiagnosticSeverity::WARNING),
2013 message: "undefined variable 'A'".to_string(),
2014 source: Some("disk".to_string()),
2015 ..Default::default()
2016 },
2017 ],
2018 });
2019
2020 cx.executor().run_until_parked();
2021 buffer.update(cx, |buffer, _| {
2022 assert_eq!(
2023 buffer
2024 .snapshot()
2025 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2026 .collect::<Vec<_>>(),
2027 &[
2028 DiagnosticEntry {
2029 range: Point::new(2, 21)..Point::new(2, 22),
2030 diagnostic: Diagnostic {
2031 source: Some("disk".into()),
2032 severity: DiagnosticSeverity::WARNING,
2033 message: "undefined variable 'A'".to_string(),
2034 is_disk_based: true,
2035 group_id: 6,
2036 is_primary: true,
2037 ..Default::default()
2038 }
2039 },
2040 DiagnosticEntry {
2041 range: Point::new(3, 9)..Point::new(3, 14),
2042 diagnostic: Diagnostic {
2043 source: Some("disk".into()),
2044 severity: DiagnosticSeverity::ERROR,
2045 message: "undefined variable 'BB'".to_string(),
2046 is_disk_based: true,
2047 group_id: 5,
2048 is_primary: true,
2049 ..Default::default()
2050 },
2051 }
2052 ]
2053 );
2054 });
2055}
2056
2057#[gpui::test]
2058async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2059 init_test(cx);
2060
2061 let text = concat!(
2062 "let one = ;\n", //
2063 "let two = \n",
2064 "let three = 3;\n",
2065 );
2066
2067 let fs = FakeFs::new(cx.executor());
2068 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2069
2070 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2071 let buffer = project
2072 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2073 .await
2074 .unwrap();
2075
2076 project.update(cx, |project, cx| {
2077 project.lsp_store.update(cx, |lsp_store, cx| {
2078 lsp_store
2079 .update_diagnostic_entries(
2080 LanguageServerId(0),
2081 PathBuf::from("/dir/a.rs"),
2082 None,
2083 vec![
2084 DiagnosticEntry {
2085 range: Unclipped(PointUtf16::new(0, 10))
2086 ..Unclipped(PointUtf16::new(0, 10)),
2087 diagnostic: Diagnostic {
2088 severity: DiagnosticSeverity::ERROR,
2089 message: "syntax error 1".to_string(),
2090 ..Default::default()
2091 },
2092 },
2093 DiagnosticEntry {
2094 range: Unclipped(PointUtf16::new(1, 10))
2095 ..Unclipped(PointUtf16::new(1, 10)),
2096 diagnostic: Diagnostic {
2097 severity: DiagnosticSeverity::ERROR,
2098 message: "syntax error 2".to_string(),
2099 ..Default::default()
2100 },
2101 },
2102 ],
2103 cx,
2104 )
2105 .unwrap();
2106 })
2107 });
2108
2109 // An empty range is extended forward to include the following character.
2110 // At the end of a line, an empty range is extended backward to include
2111 // the preceding character.
2112 buffer.update(cx, |buffer, _| {
2113 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2114 assert_eq!(
2115 chunks
2116 .iter()
2117 .map(|(s, d)| (s.as_str(), *d))
2118 .collect::<Vec<_>>(),
2119 &[
2120 ("let one = ", None),
2121 (";", Some(DiagnosticSeverity::ERROR)),
2122 ("\nlet two =", None),
2123 (" ", Some(DiagnosticSeverity::ERROR)),
2124 ("\nlet three = 3;\n", None)
2125 ]
2126 );
2127 });
2128}
2129
2130#[gpui::test]
2131async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2132 init_test(cx);
2133
2134 let fs = FakeFs::new(cx.executor());
2135 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2136 .await;
2137
2138 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2139 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2140
2141 lsp_store.update(cx, |lsp_store, cx| {
2142 lsp_store
2143 .update_diagnostic_entries(
2144 LanguageServerId(0),
2145 Path::new("/dir/a.rs").to_owned(),
2146 None,
2147 vec![DiagnosticEntry {
2148 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2149 diagnostic: Diagnostic {
2150 severity: DiagnosticSeverity::ERROR,
2151 is_primary: true,
2152 message: "syntax error a1".to_string(),
2153 ..Default::default()
2154 },
2155 }],
2156 cx,
2157 )
2158 .unwrap();
2159 lsp_store
2160 .update_diagnostic_entries(
2161 LanguageServerId(1),
2162 Path::new("/dir/a.rs").to_owned(),
2163 None,
2164 vec![DiagnosticEntry {
2165 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2166 diagnostic: Diagnostic {
2167 severity: DiagnosticSeverity::ERROR,
2168 is_primary: true,
2169 message: "syntax error b1".to_string(),
2170 ..Default::default()
2171 },
2172 }],
2173 cx,
2174 )
2175 .unwrap();
2176
2177 assert_eq!(
2178 lsp_store.diagnostic_summary(false, cx),
2179 DiagnosticSummary {
2180 error_count: 2,
2181 warning_count: 0,
2182 }
2183 );
2184 });
2185}
2186
2187#[gpui::test]
2188async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2189 init_test(cx);
2190
2191 let text = "
2192 fn a() {
2193 f1();
2194 }
2195 fn b() {
2196 f2();
2197 }
2198 fn c() {
2199 f3();
2200 }
2201 "
2202 .unindent();
2203
2204 let fs = FakeFs::new(cx.executor());
2205 fs.insert_tree(
2206 path!("/dir"),
2207 json!({
2208 "a.rs": text.clone(),
2209 }),
2210 )
2211 .await;
2212
2213 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2214 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2215
2216 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2217 language_registry.add(rust_lang());
2218 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2219
2220 let (buffer, _handle) = project
2221 .update(cx, |project, cx| {
2222 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2223 })
2224 .await
2225 .unwrap();
2226
2227 let mut fake_server = fake_servers.next().await.unwrap();
2228 let lsp_document_version = fake_server
2229 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2230 .await
2231 .text_document
2232 .version;
2233
2234 // Simulate editing the buffer after the language server computes some edits.
2235 buffer.update(cx, |buffer, cx| {
2236 buffer.edit(
2237 [(
2238 Point::new(0, 0)..Point::new(0, 0),
2239 "// above first function\n",
2240 )],
2241 None,
2242 cx,
2243 );
2244 buffer.edit(
2245 [(
2246 Point::new(2, 0)..Point::new(2, 0),
2247 " // inside first function\n",
2248 )],
2249 None,
2250 cx,
2251 );
2252 buffer.edit(
2253 [(
2254 Point::new(6, 4)..Point::new(6, 4),
2255 "// inside second function ",
2256 )],
2257 None,
2258 cx,
2259 );
2260
2261 assert_eq!(
2262 buffer.text(),
2263 "
2264 // above first function
2265 fn a() {
2266 // inside first function
2267 f1();
2268 }
2269 fn b() {
2270 // inside second function f2();
2271 }
2272 fn c() {
2273 f3();
2274 }
2275 "
2276 .unindent()
2277 );
2278 });
2279
2280 let edits = lsp_store
2281 .update(cx, |lsp_store, cx| {
2282 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2283 &buffer,
2284 vec![
2285 // replace body of first function
2286 lsp::TextEdit {
2287 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2288 new_text: "
2289 fn a() {
2290 f10();
2291 }
2292 "
2293 .unindent(),
2294 },
2295 // edit inside second function
2296 lsp::TextEdit {
2297 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2298 new_text: "00".into(),
2299 },
2300 // edit inside third function via two distinct edits
2301 lsp::TextEdit {
2302 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2303 new_text: "4000".into(),
2304 },
2305 lsp::TextEdit {
2306 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2307 new_text: "".into(),
2308 },
2309 ],
2310 LanguageServerId(0),
2311 Some(lsp_document_version),
2312 cx,
2313 )
2314 })
2315 .await
2316 .unwrap();
2317
2318 buffer.update(cx, |buffer, cx| {
2319 for (range, new_text) in edits {
2320 buffer.edit([(range, new_text)], None, cx);
2321 }
2322 assert_eq!(
2323 buffer.text(),
2324 "
2325 // above first function
2326 fn a() {
2327 // inside first function
2328 f10();
2329 }
2330 fn b() {
2331 // inside second function f200();
2332 }
2333 fn c() {
2334 f4000();
2335 }
2336 "
2337 .unindent()
2338 );
2339 });
2340}
2341
2342#[gpui::test]
2343async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2344 init_test(cx);
2345
2346 let text = "
2347 use a::b;
2348 use a::c;
2349
2350 fn f() {
2351 b();
2352 c();
2353 }
2354 "
2355 .unindent();
2356
2357 let fs = FakeFs::new(cx.executor());
2358 fs.insert_tree(
2359 path!("/dir"),
2360 json!({
2361 "a.rs": text.clone(),
2362 }),
2363 )
2364 .await;
2365
2366 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2367 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2368 let buffer = project
2369 .update(cx, |project, cx| {
2370 project.open_local_buffer(path!("/dir/a.rs"), cx)
2371 })
2372 .await
2373 .unwrap();
2374
2375 // Simulate the language server sending us a small edit in the form of a very large diff.
2376 // Rust-analyzer does this when performing a merge-imports code action.
2377 let edits = lsp_store
2378 .update(cx, |lsp_store, cx| {
2379 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2380 &buffer,
2381 [
2382 // Replace the first use statement without editing the semicolon.
2383 lsp::TextEdit {
2384 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2385 new_text: "a::{b, c}".into(),
2386 },
2387 // Reinsert the remainder of the file between the semicolon and the final
2388 // newline of the file.
2389 lsp::TextEdit {
2390 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2391 new_text: "\n\n".into(),
2392 },
2393 lsp::TextEdit {
2394 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2395 new_text: "
2396 fn f() {
2397 b();
2398 c();
2399 }"
2400 .unindent(),
2401 },
2402 // Delete everything after the first newline of the file.
2403 lsp::TextEdit {
2404 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2405 new_text: "".into(),
2406 },
2407 ],
2408 LanguageServerId(0),
2409 None,
2410 cx,
2411 )
2412 })
2413 .await
2414 .unwrap();
2415
2416 buffer.update(cx, |buffer, cx| {
2417 let edits = edits
2418 .into_iter()
2419 .map(|(range, text)| {
2420 (
2421 range.start.to_point(buffer)..range.end.to_point(buffer),
2422 text,
2423 )
2424 })
2425 .collect::<Vec<_>>();
2426
2427 assert_eq!(
2428 edits,
2429 [
2430 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2431 (Point::new(1, 0)..Point::new(2, 0), "".into())
2432 ]
2433 );
2434
2435 for (range, new_text) in edits {
2436 buffer.edit([(range, new_text)], None, cx);
2437 }
2438 assert_eq!(
2439 buffer.text(),
2440 "
2441 use a::{b, c};
2442
2443 fn f() {
2444 b();
2445 c();
2446 }
2447 "
2448 .unindent()
2449 );
2450 });
2451}
2452
2453#[gpui::test]
2454async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2455 init_test(cx);
2456
2457 let text = "
2458 use a::b;
2459 use a::c;
2460
2461 fn f() {
2462 b();
2463 c();
2464 }
2465 "
2466 .unindent();
2467
2468 let fs = FakeFs::new(cx.executor());
2469 fs.insert_tree(
2470 path!("/dir"),
2471 json!({
2472 "a.rs": text.clone(),
2473 }),
2474 )
2475 .await;
2476
2477 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2478 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2479 let buffer = project
2480 .update(cx, |project, cx| {
2481 project.open_local_buffer(path!("/dir/a.rs"), cx)
2482 })
2483 .await
2484 .unwrap();
2485
2486 // Simulate the language server sending us edits in a non-ordered fashion,
2487 // with ranges sometimes being inverted or pointing to invalid locations.
2488 let edits = lsp_store
2489 .update(cx, |lsp_store, cx| {
2490 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2491 &buffer,
2492 [
2493 lsp::TextEdit {
2494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2495 new_text: "\n\n".into(),
2496 },
2497 lsp::TextEdit {
2498 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2499 new_text: "a::{b, c}".into(),
2500 },
2501 lsp::TextEdit {
2502 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2503 new_text: "".into(),
2504 },
2505 lsp::TextEdit {
2506 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2507 new_text: "
2508 fn f() {
2509 b();
2510 c();
2511 }"
2512 .unindent(),
2513 },
2514 ],
2515 LanguageServerId(0),
2516 None,
2517 cx,
2518 )
2519 })
2520 .await
2521 .unwrap();
2522
2523 buffer.update(cx, |buffer, cx| {
2524 let edits = edits
2525 .into_iter()
2526 .map(|(range, text)| {
2527 (
2528 range.start.to_point(buffer)..range.end.to_point(buffer),
2529 text,
2530 )
2531 })
2532 .collect::<Vec<_>>();
2533
2534 assert_eq!(
2535 edits,
2536 [
2537 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2538 (Point::new(1, 0)..Point::new(2, 0), "".into())
2539 ]
2540 );
2541
2542 for (range, new_text) in edits {
2543 buffer.edit([(range, new_text)], None, cx);
2544 }
2545 assert_eq!(
2546 buffer.text(),
2547 "
2548 use a::{b, c};
2549
2550 fn f() {
2551 b();
2552 c();
2553 }
2554 "
2555 .unindent()
2556 );
2557 });
2558}
2559
2560fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2561 buffer: &Buffer,
2562 range: Range<T>,
2563) -> Vec<(String, Option<DiagnosticSeverity>)> {
2564 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2565 for chunk in buffer.snapshot().chunks(range, true) {
2566 if chunks.last().map_or(false, |prev_chunk| {
2567 prev_chunk.1 == chunk.diagnostic_severity
2568 }) {
2569 chunks.last_mut().unwrap().0.push_str(chunk.text);
2570 } else {
2571 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2572 }
2573 }
2574 chunks
2575}
2576
2577#[gpui::test(iterations = 10)]
2578async fn test_definition(cx: &mut gpui::TestAppContext) {
2579 init_test(cx);
2580
2581 let fs = FakeFs::new(cx.executor());
2582 fs.insert_tree(
2583 path!("/dir"),
2584 json!({
2585 "a.rs": "const fn a() { A }",
2586 "b.rs": "const y: i32 = crate::a()",
2587 }),
2588 )
2589 .await;
2590
2591 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2592
2593 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2594 language_registry.add(rust_lang());
2595 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2596
2597 let (buffer, _handle) = project
2598 .update(cx, |project, cx| {
2599 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2600 })
2601 .await
2602 .unwrap();
2603
2604 let fake_server = fake_servers.next().await.unwrap();
2605 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2606 let params = params.text_document_position_params;
2607 assert_eq!(
2608 params.text_document.uri.to_file_path().unwrap(),
2609 Path::new(path!("/dir/b.rs")),
2610 );
2611 assert_eq!(params.position, lsp::Position::new(0, 22));
2612
2613 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2614 lsp::Location::new(
2615 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2616 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2617 ),
2618 )))
2619 });
2620
2621 let mut definitions = project
2622 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2623 .await
2624 .unwrap();
2625
2626 // Assert no new language server started
2627 cx.executor().run_until_parked();
2628 assert!(fake_servers.try_next().is_err());
2629
2630 assert_eq!(definitions.len(), 1);
2631 let definition = definitions.pop().unwrap();
2632 cx.update(|cx| {
2633 let target_buffer = definition.target.buffer.read(cx);
2634 assert_eq!(
2635 target_buffer
2636 .file()
2637 .unwrap()
2638 .as_local()
2639 .unwrap()
2640 .abs_path(cx),
2641 Path::new(path!("/dir/a.rs")),
2642 );
2643 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2644 assert_eq!(
2645 list_worktrees(&project, cx),
2646 [
2647 (path!("/dir/a.rs").as_ref(), false),
2648 (path!("/dir/b.rs").as_ref(), true)
2649 ],
2650 );
2651
2652 drop(definition);
2653 });
2654 cx.update(|cx| {
2655 assert_eq!(
2656 list_worktrees(&project, cx),
2657 [(path!("/dir/b.rs").as_ref(), true)]
2658 );
2659 });
2660
2661 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2662 project
2663 .read(cx)
2664 .worktrees(cx)
2665 .map(|worktree| {
2666 let worktree = worktree.read(cx);
2667 (
2668 worktree.as_local().unwrap().abs_path().as_ref(),
2669 worktree.is_visible(),
2670 )
2671 })
2672 .collect::<Vec<_>>()
2673 }
2674}
2675
2676#[gpui::test]
2677async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2678 init_test(cx);
2679
2680 let fs = FakeFs::new(cx.executor());
2681 fs.insert_tree(
2682 path!("/dir"),
2683 json!({
2684 "a.ts": "",
2685 }),
2686 )
2687 .await;
2688
2689 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2690
2691 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2692 language_registry.add(typescript_lang());
2693 let mut fake_language_servers = language_registry.register_fake_lsp(
2694 "TypeScript",
2695 FakeLspAdapter {
2696 capabilities: lsp::ServerCapabilities {
2697 completion_provider: Some(lsp::CompletionOptions {
2698 trigger_characters: Some(vec![":".to_string()]),
2699 ..Default::default()
2700 }),
2701 ..Default::default()
2702 },
2703 ..Default::default()
2704 },
2705 );
2706
2707 let (buffer, _handle) = project
2708 .update(cx, |p, cx| {
2709 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2710 })
2711 .await
2712 .unwrap();
2713
2714 let fake_server = fake_language_servers.next().await.unwrap();
2715
2716 let text = "let a = b.fqn";
2717 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2718 let completions = project.update(cx, |project, cx| {
2719 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2720 });
2721
2722 fake_server
2723 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2724 Ok(Some(lsp::CompletionResponse::Array(vec![
2725 lsp::CompletionItem {
2726 label: "fullyQualifiedName?".into(),
2727 insert_text: Some("fullyQualifiedName".into()),
2728 ..Default::default()
2729 },
2730 ])))
2731 })
2732 .next()
2733 .await;
2734 let completions = completions.await.unwrap();
2735 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2736 assert_eq!(completions.len(), 1);
2737 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2738 assert_eq!(
2739 completions[0].old_range.to_offset(&snapshot),
2740 text.len() - 3..text.len()
2741 );
2742
2743 let text = "let a = \"atoms/cmp\"";
2744 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2745 let completions = project.update(cx, |project, cx| {
2746 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2747 });
2748
2749 fake_server
2750 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2751 Ok(Some(lsp::CompletionResponse::Array(vec![
2752 lsp::CompletionItem {
2753 label: "component".into(),
2754 ..Default::default()
2755 },
2756 ])))
2757 })
2758 .next()
2759 .await;
2760 let completions = completions.await.unwrap();
2761 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2762 assert_eq!(completions.len(), 1);
2763 assert_eq!(completions[0].new_text, "component");
2764 assert_eq!(
2765 completions[0].old_range.to_offset(&snapshot),
2766 text.len() - 4..text.len() - 1
2767 );
2768}
2769
2770#[gpui::test]
2771async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2772 init_test(cx);
2773
2774 let fs = FakeFs::new(cx.executor());
2775 fs.insert_tree(
2776 path!("/dir"),
2777 json!({
2778 "a.ts": "",
2779 }),
2780 )
2781 .await;
2782
2783 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2784
2785 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2786 language_registry.add(typescript_lang());
2787 let mut fake_language_servers = language_registry.register_fake_lsp(
2788 "TypeScript",
2789 FakeLspAdapter {
2790 capabilities: lsp::ServerCapabilities {
2791 completion_provider: Some(lsp::CompletionOptions {
2792 trigger_characters: Some(vec![":".to_string()]),
2793 ..Default::default()
2794 }),
2795 ..Default::default()
2796 },
2797 ..Default::default()
2798 },
2799 );
2800
2801 let (buffer, _handle) = project
2802 .update(cx, |p, cx| {
2803 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2804 })
2805 .await
2806 .unwrap();
2807
2808 let fake_server = fake_language_servers.next().await.unwrap();
2809
2810 let text = "let a = b.fqn";
2811 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2812 let completions = project.update(cx, |project, cx| {
2813 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2814 });
2815
2816 fake_server
2817 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2818 Ok(Some(lsp::CompletionResponse::Array(vec![
2819 lsp::CompletionItem {
2820 label: "fullyQualifiedName?".into(),
2821 insert_text: Some("fully\rQualified\r\nName".into()),
2822 ..Default::default()
2823 },
2824 ])))
2825 })
2826 .next()
2827 .await;
2828 let completions = completions.await.unwrap();
2829 assert_eq!(completions.len(), 1);
2830 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2831}
2832
2833#[gpui::test(iterations = 10)]
2834async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2835 init_test(cx);
2836
2837 let fs = FakeFs::new(cx.executor());
2838 fs.insert_tree(
2839 path!("/dir"),
2840 json!({
2841 "a.ts": "a",
2842 }),
2843 )
2844 .await;
2845
2846 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2847
2848 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2849 language_registry.add(typescript_lang());
2850 let mut fake_language_servers = language_registry.register_fake_lsp(
2851 "TypeScript",
2852 FakeLspAdapter {
2853 capabilities: lsp::ServerCapabilities {
2854 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2855 lsp::CodeActionOptions {
2856 resolve_provider: Some(true),
2857 ..lsp::CodeActionOptions::default()
2858 },
2859 )),
2860 ..lsp::ServerCapabilities::default()
2861 },
2862 ..FakeLspAdapter::default()
2863 },
2864 );
2865
2866 let (buffer, _handle) = project
2867 .update(cx, |p, cx| {
2868 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2869 })
2870 .await
2871 .unwrap();
2872
2873 let fake_server = fake_language_servers.next().await.unwrap();
2874
2875 // Language server returns code actions that contain commands, and not edits.
2876 let actions = project.update(cx, |project, cx| {
2877 project.code_actions(&buffer, 0..0, None, cx)
2878 });
2879 fake_server
2880 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2881 Ok(Some(vec![
2882 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2883 title: "The code action".into(),
2884 data: Some(serde_json::json!({
2885 "command": "_the/command",
2886 })),
2887 ..lsp::CodeAction::default()
2888 }),
2889 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2890 title: "two".into(),
2891 ..lsp::CodeAction::default()
2892 }),
2893 ]))
2894 })
2895 .next()
2896 .await;
2897
2898 let action = actions.await.unwrap()[0].clone();
2899 let apply = project.update(cx, |project, cx| {
2900 project.apply_code_action(buffer.clone(), action, true, cx)
2901 });
2902
2903 // Resolving the code action does not populate its edits. In absence of
2904 // edits, we must execute the given command.
2905 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2906 |mut action, _| async move {
2907 if action.data.is_some() {
2908 action.command = Some(lsp::Command {
2909 title: "The command".into(),
2910 command: "_the/command".into(),
2911 arguments: Some(vec![json!("the-argument")]),
2912 });
2913 }
2914 Ok(action)
2915 },
2916 );
2917
2918 // While executing the command, the language server sends the editor
2919 // a `workspaceEdit` request.
2920 fake_server
2921 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2922 let fake = fake_server.clone();
2923 move |params, _| {
2924 assert_eq!(params.command, "_the/command");
2925 let fake = fake.clone();
2926 async move {
2927 fake.server
2928 .request::<lsp::request::ApplyWorkspaceEdit>(
2929 lsp::ApplyWorkspaceEditParams {
2930 label: None,
2931 edit: lsp::WorkspaceEdit {
2932 changes: Some(
2933 [(
2934 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
2935 vec![lsp::TextEdit {
2936 range: lsp::Range::new(
2937 lsp::Position::new(0, 0),
2938 lsp::Position::new(0, 0),
2939 ),
2940 new_text: "X".into(),
2941 }],
2942 )]
2943 .into_iter()
2944 .collect(),
2945 ),
2946 ..Default::default()
2947 },
2948 },
2949 )
2950 .await
2951 .unwrap();
2952 Ok(Some(json!(null)))
2953 }
2954 }
2955 })
2956 .next()
2957 .await;
2958
2959 // Applying the code action returns a project transaction containing the edits
2960 // sent by the language server in its `workspaceEdit` request.
2961 let transaction = apply.await.unwrap();
2962 assert!(transaction.0.contains_key(&buffer));
2963 buffer.update(cx, |buffer, cx| {
2964 assert_eq!(buffer.text(), "Xa");
2965 buffer.undo(cx);
2966 assert_eq!(buffer.text(), "a");
2967 });
2968}
2969
2970#[gpui::test(iterations = 10)]
2971async fn test_save_file(cx: &mut gpui::TestAppContext) {
2972 init_test(cx);
2973
2974 let fs = FakeFs::new(cx.executor());
2975 fs.insert_tree(
2976 path!("/dir"),
2977 json!({
2978 "file1": "the old contents",
2979 }),
2980 )
2981 .await;
2982
2983 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2984 let buffer = project
2985 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
2986 .await
2987 .unwrap();
2988 buffer.update(cx, |buffer, cx| {
2989 assert_eq!(buffer.text(), "the old contents");
2990 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2991 });
2992
2993 project
2994 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2995 .await
2996 .unwrap();
2997
2998 let new_text = fs
2999 .load(Path::new(path!("/dir/file1")))
3000 .await
3001 .unwrap()
3002 .replace("\r\n", "\n");
3003 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3004}
3005
3006#[gpui::test(iterations = 30)]
3007async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3008 init_test(cx);
3009
3010 let fs = FakeFs::new(cx.executor().clone());
3011 fs.insert_tree(
3012 path!("/dir"),
3013 json!({
3014 "file1": "the original contents",
3015 }),
3016 )
3017 .await;
3018
3019 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3020 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3021 let buffer = project
3022 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3023 .await
3024 .unwrap();
3025
3026 // Simulate buffer diffs being slow, so that they don't complete before
3027 // the next file change occurs.
3028 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3029
3030 // Change the buffer's file on disk, and then wait for the file change
3031 // to be detected by the worktree, so that the buffer starts reloading.
3032 fs.save(
3033 path!("/dir/file1").as_ref(),
3034 &"the first contents".into(),
3035 Default::default(),
3036 )
3037 .await
3038 .unwrap();
3039 worktree.next_event(cx).await;
3040
3041 // Change the buffer's file again. Depending on the random seed, the
3042 // previous file change may still be in progress.
3043 fs.save(
3044 path!("/dir/file1").as_ref(),
3045 &"the second contents".into(),
3046 Default::default(),
3047 )
3048 .await
3049 .unwrap();
3050 worktree.next_event(cx).await;
3051
3052 cx.executor().run_until_parked();
3053 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3054 buffer.read_with(cx, |buffer, _| {
3055 assert_eq!(buffer.text(), on_disk_text);
3056 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3057 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3058 });
3059}
3060
3061#[gpui::test(iterations = 30)]
3062async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3063 init_test(cx);
3064
3065 let fs = FakeFs::new(cx.executor().clone());
3066 fs.insert_tree(
3067 path!("/dir"),
3068 json!({
3069 "file1": "the original contents",
3070 }),
3071 )
3072 .await;
3073
3074 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3075 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3076 let buffer = project
3077 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3078 .await
3079 .unwrap();
3080
3081 // Simulate buffer diffs being slow, so that they don't complete before
3082 // the next file change occurs.
3083 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3084
3085 // Change the buffer's file on disk, and then wait for the file change
3086 // to be detected by the worktree, so that the buffer starts reloading.
3087 fs.save(
3088 path!("/dir/file1").as_ref(),
3089 &"the first contents".into(),
3090 Default::default(),
3091 )
3092 .await
3093 .unwrap();
3094 worktree.next_event(cx).await;
3095
3096 cx.executor()
3097 .spawn(cx.executor().simulate_random_delay())
3098 .await;
3099
3100 // Perform a noop edit, causing the buffer's version to increase.
3101 buffer.update(cx, |buffer, cx| {
3102 buffer.edit([(0..0, " ")], None, cx);
3103 buffer.undo(cx);
3104 });
3105
3106 cx.executor().run_until_parked();
3107 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3108 buffer.read_with(cx, |buffer, _| {
3109 let buffer_text = buffer.text();
3110 if buffer_text == on_disk_text {
3111 assert!(
3112 !buffer.is_dirty() && !buffer.has_conflict(),
3113 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3114 );
3115 }
3116 // If the file change occurred while the buffer was processing the first
3117 // change, the buffer will be in a conflicting state.
3118 else {
3119 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3120 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3121 }
3122 });
3123}
3124
3125#[gpui::test]
3126async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3127 init_test(cx);
3128
3129 let fs = FakeFs::new(cx.executor());
3130 fs.insert_tree(
3131 path!("/dir"),
3132 json!({
3133 "file1": "the old contents",
3134 }),
3135 )
3136 .await;
3137
3138 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3139 let buffer = project
3140 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3141 .await
3142 .unwrap();
3143 buffer.update(cx, |buffer, cx| {
3144 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3145 });
3146
3147 project
3148 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3149 .await
3150 .unwrap();
3151
3152 let new_text = fs
3153 .load(Path::new(path!("/dir/file1")))
3154 .await
3155 .unwrap()
3156 .replace("\r\n", "\n");
3157 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3158}
3159
3160#[gpui::test]
3161async fn test_save_as(cx: &mut gpui::TestAppContext) {
3162 init_test(cx);
3163
3164 let fs = FakeFs::new(cx.executor());
3165 fs.insert_tree("/dir", json!({})).await;
3166
3167 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3168
3169 let languages = project.update(cx, |project, _| project.languages().clone());
3170 languages.add(rust_lang());
3171
3172 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3173 buffer.update(cx, |buffer, cx| {
3174 buffer.edit([(0..0, "abc")], None, cx);
3175 assert!(buffer.is_dirty());
3176 assert!(!buffer.has_conflict());
3177 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3178 });
3179 project
3180 .update(cx, |project, cx| {
3181 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3182 let path = ProjectPath {
3183 worktree_id,
3184 path: Arc::from(Path::new("file1.rs")),
3185 };
3186 project.save_buffer_as(buffer.clone(), path, cx)
3187 })
3188 .await
3189 .unwrap();
3190 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3191
3192 cx.executor().run_until_parked();
3193 buffer.update(cx, |buffer, cx| {
3194 assert_eq!(
3195 buffer.file().unwrap().full_path(cx),
3196 Path::new("dir/file1.rs")
3197 );
3198 assert!(!buffer.is_dirty());
3199 assert!(!buffer.has_conflict());
3200 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3201 });
3202
3203 let opened_buffer = project
3204 .update(cx, |project, cx| {
3205 project.open_local_buffer("/dir/file1.rs", cx)
3206 })
3207 .await
3208 .unwrap();
3209 assert_eq!(opened_buffer, buffer);
3210}
3211
3212#[gpui::test(retries = 5)]
3213async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3214 use worktree::WorktreeModelHandle as _;
3215
3216 init_test(cx);
3217 cx.executor().allow_parking();
3218
3219 let dir = TempTree::new(json!({
3220 "a": {
3221 "file1": "",
3222 "file2": "",
3223 "file3": "",
3224 },
3225 "b": {
3226 "c": {
3227 "file4": "",
3228 "file5": "",
3229 }
3230 }
3231 }));
3232
3233 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3234
3235 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3236 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3237 async move { buffer.await.unwrap() }
3238 };
3239 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3240 project.update(cx, |project, cx| {
3241 let tree = project.worktrees(cx).next().unwrap();
3242 tree.read(cx)
3243 .entry_for_path(path)
3244 .unwrap_or_else(|| panic!("no entry for path {}", path))
3245 .id
3246 })
3247 };
3248
3249 let buffer2 = buffer_for_path("a/file2", cx).await;
3250 let buffer3 = buffer_for_path("a/file3", cx).await;
3251 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3252 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3253
3254 let file2_id = id_for_path("a/file2", cx);
3255 let file3_id = id_for_path("a/file3", cx);
3256 let file4_id = id_for_path("b/c/file4", cx);
3257
3258 // Create a remote copy of this worktree.
3259 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3260 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3261
3262 let updates = Arc::new(Mutex::new(Vec::new()));
3263 tree.update(cx, |tree, cx| {
3264 let updates = updates.clone();
3265 tree.observe_updates(0, cx, move |update| {
3266 updates.lock().push(update);
3267 async { true }
3268 });
3269 });
3270
3271 let remote =
3272 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3273
3274 cx.executor().run_until_parked();
3275
3276 cx.update(|cx| {
3277 assert!(!buffer2.read(cx).is_dirty());
3278 assert!(!buffer3.read(cx).is_dirty());
3279 assert!(!buffer4.read(cx).is_dirty());
3280 assert!(!buffer5.read(cx).is_dirty());
3281 });
3282
3283 // Rename and delete files and directories.
3284 tree.flush_fs_events(cx).await;
3285 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3286 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3287 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3288 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3289 tree.flush_fs_events(cx).await;
3290
3291 cx.update(|app| {
3292 assert_eq!(
3293 tree.read(app)
3294 .paths()
3295 .map(|p| p.to_str().unwrap())
3296 .collect::<Vec<_>>(),
3297 vec![
3298 "a",
3299 separator!("a/file1"),
3300 separator!("a/file2.new"),
3301 "b",
3302 "d",
3303 separator!("d/file3"),
3304 separator!("d/file4"),
3305 ]
3306 );
3307 });
3308
3309 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3310 assert_eq!(id_for_path("d/file3", cx), file3_id);
3311 assert_eq!(id_for_path("d/file4", cx), file4_id);
3312
3313 cx.update(|cx| {
3314 assert_eq!(
3315 buffer2.read(cx).file().unwrap().path().as_ref(),
3316 Path::new("a/file2.new")
3317 );
3318 assert_eq!(
3319 buffer3.read(cx).file().unwrap().path().as_ref(),
3320 Path::new("d/file3")
3321 );
3322 assert_eq!(
3323 buffer4.read(cx).file().unwrap().path().as_ref(),
3324 Path::new("d/file4")
3325 );
3326 assert_eq!(
3327 buffer5.read(cx).file().unwrap().path().as_ref(),
3328 Path::new("b/c/file5")
3329 );
3330
3331 assert_matches!(
3332 buffer2.read(cx).file().unwrap().disk_state(),
3333 DiskState::Present { .. }
3334 );
3335 assert_matches!(
3336 buffer3.read(cx).file().unwrap().disk_state(),
3337 DiskState::Present { .. }
3338 );
3339 assert_matches!(
3340 buffer4.read(cx).file().unwrap().disk_state(),
3341 DiskState::Present { .. }
3342 );
3343 assert_eq!(
3344 buffer5.read(cx).file().unwrap().disk_state(),
3345 DiskState::Deleted
3346 );
3347 });
3348
3349 // Update the remote worktree. Check that it becomes consistent with the
3350 // local worktree.
3351 cx.executor().run_until_parked();
3352
3353 remote.update(cx, |remote, _| {
3354 for update in updates.lock().drain(..) {
3355 remote.as_remote_mut().unwrap().update_from_remote(update);
3356 }
3357 });
3358 cx.executor().run_until_parked();
3359 remote.update(cx, |remote, _| {
3360 assert_eq!(
3361 remote
3362 .paths()
3363 .map(|p| p.to_str().unwrap())
3364 .collect::<Vec<_>>(),
3365 vec![
3366 "a",
3367 separator!("a/file1"),
3368 separator!("a/file2.new"),
3369 "b",
3370 "d",
3371 separator!("d/file3"),
3372 separator!("d/file4"),
3373 ]
3374 );
3375 });
3376}
3377
3378#[gpui::test(iterations = 10)]
3379async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3380 init_test(cx);
3381
3382 let fs = FakeFs::new(cx.executor());
3383 fs.insert_tree(
3384 "/dir",
3385 json!({
3386 "a": {
3387 "file1": "",
3388 }
3389 }),
3390 )
3391 .await;
3392
3393 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3394 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3395 let tree_id = tree.update(cx, |tree, _| tree.id());
3396
3397 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3398 project.update(cx, |project, cx| {
3399 let tree = project.worktrees(cx).next().unwrap();
3400 tree.read(cx)
3401 .entry_for_path(path)
3402 .unwrap_or_else(|| panic!("no entry for path {}", path))
3403 .id
3404 })
3405 };
3406
3407 let dir_id = id_for_path("a", cx);
3408 let file_id = id_for_path("a/file1", cx);
3409 let buffer = project
3410 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3411 .await
3412 .unwrap();
3413 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3414
3415 project
3416 .update(cx, |project, cx| {
3417 project.rename_entry(dir_id, Path::new("b"), cx)
3418 })
3419 .unwrap()
3420 .await
3421 .to_included()
3422 .unwrap();
3423 cx.executor().run_until_parked();
3424
3425 assert_eq!(id_for_path("b", cx), dir_id);
3426 assert_eq!(id_for_path("b/file1", cx), file_id);
3427 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3428}
3429
3430#[gpui::test]
3431async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3432 init_test(cx);
3433
3434 let fs = FakeFs::new(cx.executor());
3435 fs.insert_tree(
3436 "/dir",
3437 json!({
3438 "a.txt": "a-contents",
3439 "b.txt": "b-contents",
3440 }),
3441 )
3442 .await;
3443
3444 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3445
3446 // Spawn multiple tasks to open paths, repeating some paths.
3447 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3448 (
3449 p.open_local_buffer("/dir/a.txt", cx),
3450 p.open_local_buffer("/dir/b.txt", cx),
3451 p.open_local_buffer("/dir/a.txt", cx),
3452 )
3453 });
3454
3455 let buffer_a_1 = buffer_a_1.await.unwrap();
3456 let buffer_a_2 = buffer_a_2.await.unwrap();
3457 let buffer_b = buffer_b.await.unwrap();
3458 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3459 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3460
3461 // There is only one buffer per path.
3462 let buffer_a_id = buffer_a_1.entity_id();
3463 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3464
3465 // Open the same path again while it is still open.
3466 drop(buffer_a_1);
3467 let buffer_a_3 = project
3468 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3469 .await
3470 .unwrap();
3471
3472 // There's still only one buffer per path.
3473 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3474}
3475
3476#[gpui::test]
3477async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3478 init_test(cx);
3479
3480 let fs = FakeFs::new(cx.executor());
3481 fs.insert_tree(
3482 path!("/dir"),
3483 json!({
3484 "file1": "abc",
3485 "file2": "def",
3486 "file3": "ghi",
3487 }),
3488 )
3489 .await;
3490
3491 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3492
3493 let buffer1 = project
3494 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3495 .await
3496 .unwrap();
3497 let events = Arc::new(Mutex::new(Vec::new()));
3498
3499 // initially, the buffer isn't dirty.
3500 buffer1.update(cx, |buffer, cx| {
3501 cx.subscribe(&buffer1, {
3502 let events = events.clone();
3503 move |_, _, event, _| match event {
3504 BufferEvent::Operation { .. } => {}
3505 _ => events.lock().push(event.clone()),
3506 }
3507 })
3508 .detach();
3509
3510 assert!(!buffer.is_dirty());
3511 assert!(events.lock().is_empty());
3512
3513 buffer.edit([(1..2, "")], None, cx);
3514 });
3515
3516 // after the first edit, the buffer is dirty, and emits a dirtied event.
3517 buffer1.update(cx, |buffer, cx| {
3518 assert!(buffer.text() == "ac");
3519 assert!(buffer.is_dirty());
3520 assert_eq!(
3521 *events.lock(),
3522 &[
3523 language::BufferEvent::Edited,
3524 language::BufferEvent::DirtyChanged
3525 ]
3526 );
3527 events.lock().clear();
3528 buffer.did_save(
3529 buffer.version(),
3530 buffer.file().unwrap().disk_state().mtime(),
3531 cx,
3532 );
3533 });
3534
3535 // after saving, the buffer is not dirty, and emits a saved event.
3536 buffer1.update(cx, |buffer, cx| {
3537 assert!(!buffer.is_dirty());
3538 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3539 events.lock().clear();
3540
3541 buffer.edit([(1..1, "B")], None, cx);
3542 buffer.edit([(2..2, "D")], None, cx);
3543 });
3544
3545 // after editing again, the buffer is dirty, and emits another dirty event.
3546 buffer1.update(cx, |buffer, cx| {
3547 assert!(buffer.text() == "aBDc");
3548 assert!(buffer.is_dirty());
3549 assert_eq!(
3550 *events.lock(),
3551 &[
3552 language::BufferEvent::Edited,
3553 language::BufferEvent::DirtyChanged,
3554 language::BufferEvent::Edited,
3555 ],
3556 );
3557 events.lock().clear();
3558
3559 // After restoring the buffer to its previously-saved state,
3560 // the buffer is not considered dirty anymore.
3561 buffer.edit([(1..3, "")], None, cx);
3562 assert!(buffer.text() == "ac");
3563 assert!(!buffer.is_dirty());
3564 });
3565
3566 assert_eq!(
3567 *events.lock(),
3568 &[
3569 language::BufferEvent::Edited,
3570 language::BufferEvent::DirtyChanged
3571 ]
3572 );
3573
3574 // When a file is deleted, the buffer is considered dirty.
3575 let events = Arc::new(Mutex::new(Vec::new()));
3576 let buffer2 = project
3577 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3578 .await
3579 .unwrap();
3580 buffer2.update(cx, |_, cx| {
3581 cx.subscribe(&buffer2, {
3582 let events = events.clone();
3583 move |_, _, event, _| events.lock().push(event.clone())
3584 })
3585 .detach();
3586 });
3587
3588 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3589 .await
3590 .unwrap();
3591 cx.executor().run_until_parked();
3592 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3593 assert_eq!(
3594 *events.lock(),
3595 &[
3596 language::BufferEvent::DirtyChanged,
3597 language::BufferEvent::FileHandleChanged
3598 ]
3599 );
3600
3601 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3602 let events = Arc::new(Mutex::new(Vec::new()));
3603 let buffer3 = project
3604 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3605 .await
3606 .unwrap();
3607 buffer3.update(cx, |_, cx| {
3608 cx.subscribe(&buffer3, {
3609 let events = events.clone();
3610 move |_, _, event, _| events.lock().push(event.clone())
3611 })
3612 .detach();
3613 });
3614
3615 buffer3.update(cx, |buffer, cx| {
3616 buffer.edit([(0..0, "x")], None, cx);
3617 });
3618 events.lock().clear();
3619 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3620 .await
3621 .unwrap();
3622 cx.executor().run_until_parked();
3623 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3624 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3625}
3626
3627#[gpui::test]
3628async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3629 init_test(cx);
3630
3631 let initial_contents = "aaa\nbbbbb\nc\n";
3632 let fs = FakeFs::new(cx.executor());
3633 fs.insert_tree(
3634 path!("/dir"),
3635 json!({
3636 "the-file": initial_contents,
3637 }),
3638 )
3639 .await;
3640 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3641 let buffer = project
3642 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3643 .await
3644 .unwrap();
3645
3646 let anchors = (0..3)
3647 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3648 .collect::<Vec<_>>();
3649
3650 // Change the file on disk, adding two new lines of text, and removing
3651 // one line.
3652 buffer.update(cx, |buffer, _| {
3653 assert!(!buffer.is_dirty());
3654 assert!(!buffer.has_conflict());
3655 });
3656 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3657 fs.save(
3658 path!("/dir/the-file").as_ref(),
3659 &new_contents.into(),
3660 LineEnding::Unix,
3661 )
3662 .await
3663 .unwrap();
3664
3665 // Because the buffer was not modified, it is reloaded from disk. Its
3666 // contents are edited according to the diff between the old and new
3667 // file contents.
3668 cx.executor().run_until_parked();
3669 buffer.update(cx, |buffer, _| {
3670 assert_eq!(buffer.text(), new_contents);
3671 assert!(!buffer.is_dirty());
3672 assert!(!buffer.has_conflict());
3673
3674 let anchor_positions = anchors
3675 .iter()
3676 .map(|anchor| anchor.to_point(&*buffer))
3677 .collect::<Vec<_>>();
3678 assert_eq!(
3679 anchor_positions,
3680 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3681 );
3682 });
3683
3684 // Modify the buffer
3685 buffer.update(cx, |buffer, cx| {
3686 buffer.edit([(0..0, " ")], None, cx);
3687 assert!(buffer.is_dirty());
3688 assert!(!buffer.has_conflict());
3689 });
3690
3691 // Change the file on disk again, adding blank lines to the beginning.
3692 fs.save(
3693 path!("/dir/the-file").as_ref(),
3694 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3695 LineEnding::Unix,
3696 )
3697 .await
3698 .unwrap();
3699
3700 // Because the buffer is modified, it doesn't reload from disk, but is
3701 // marked as having a conflict.
3702 cx.executor().run_until_parked();
3703 buffer.update(cx, |buffer, _| {
3704 assert!(buffer.has_conflict());
3705 });
3706}
3707
3708#[gpui::test]
3709async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3710 init_test(cx);
3711
3712 let fs = FakeFs::new(cx.executor());
3713 fs.insert_tree(
3714 path!("/dir"),
3715 json!({
3716 "file1": "a\nb\nc\n",
3717 "file2": "one\r\ntwo\r\nthree\r\n",
3718 }),
3719 )
3720 .await;
3721
3722 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3723 let buffer1 = project
3724 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3725 .await
3726 .unwrap();
3727 let buffer2 = project
3728 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3729 .await
3730 .unwrap();
3731
3732 buffer1.update(cx, |buffer, _| {
3733 assert_eq!(buffer.text(), "a\nb\nc\n");
3734 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3735 });
3736 buffer2.update(cx, |buffer, _| {
3737 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3738 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3739 });
3740
3741 // Change a file's line endings on disk from unix to windows. The buffer's
3742 // state updates correctly.
3743 fs.save(
3744 path!("/dir/file1").as_ref(),
3745 &"aaa\nb\nc\n".into(),
3746 LineEnding::Windows,
3747 )
3748 .await
3749 .unwrap();
3750 cx.executor().run_until_parked();
3751 buffer1.update(cx, |buffer, _| {
3752 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3753 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3754 });
3755
3756 // Save a file with windows line endings. The file is written correctly.
3757 buffer2.update(cx, |buffer, cx| {
3758 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3759 });
3760 project
3761 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3762 .await
3763 .unwrap();
3764 assert_eq!(
3765 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3766 "one\r\ntwo\r\nthree\r\nfour\r\n",
3767 );
3768}
3769
3770#[gpui::test]
3771async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3772 init_test(cx);
3773
3774 let fs = FakeFs::new(cx.executor());
3775 fs.insert_tree(
3776 path!("/dir"),
3777 json!({
3778 "a.rs": "
3779 fn foo(mut v: Vec<usize>) {
3780 for x in &v {
3781 v.push(1);
3782 }
3783 }
3784 "
3785 .unindent(),
3786 }),
3787 )
3788 .await;
3789
3790 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3791 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3792 let buffer = project
3793 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3794 .await
3795 .unwrap();
3796
3797 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3798 let message = lsp::PublishDiagnosticsParams {
3799 uri: buffer_uri.clone(),
3800 diagnostics: vec![
3801 lsp::Diagnostic {
3802 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3803 severity: Some(DiagnosticSeverity::WARNING),
3804 message: "error 1".to_string(),
3805 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3806 location: lsp::Location {
3807 uri: buffer_uri.clone(),
3808 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3809 },
3810 message: "error 1 hint 1".to_string(),
3811 }]),
3812 ..Default::default()
3813 },
3814 lsp::Diagnostic {
3815 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3816 severity: Some(DiagnosticSeverity::HINT),
3817 message: "error 1 hint 1".to_string(),
3818 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3819 location: lsp::Location {
3820 uri: buffer_uri.clone(),
3821 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3822 },
3823 message: "original diagnostic".to_string(),
3824 }]),
3825 ..Default::default()
3826 },
3827 lsp::Diagnostic {
3828 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3829 severity: Some(DiagnosticSeverity::ERROR),
3830 message: "error 2".to_string(),
3831 related_information: Some(vec![
3832 lsp::DiagnosticRelatedInformation {
3833 location: lsp::Location {
3834 uri: buffer_uri.clone(),
3835 range: lsp::Range::new(
3836 lsp::Position::new(1, 13),
3837 lsp::Position::new(1, 15),
3838 ),
3839 },
3840 message: "error 2 hint 1".to_string(),
3841 },
3842 lsp::DiagnosticRelatedInformation {
3843 location: lsp::Location {
3844 uri: buffer_uri.clone(),
3845 range: lsp::Range::new(
3846 lsp::Position::new(1, 13),
3847 lsp::Position::new(1, 15),
3848 ),
3849 },
3850 message: "error 2 hint 2".to_string(),
3851 },
3852 ]),
3853 ..Default::default()
3854 },
3855 lsp::Diagnostic {
3856 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3857 severity: Some(DiagnosticSeverity::HINT),
3858 message: "error 2 hint 1".to_string(),
3859 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3860 location: lsp::Location {
3861 uri: buffer_uri.clone(),
3862 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3863 },
3864 message: "original diagnostic".to_string(),
3865 }]),
3866 ..Default::default()
3867 },
3868 lsp::Diagnostic {
3869 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3870 severity: Some(DiagnosticSeverity::HINT),
3871 message: "error 2 hint 2".to_string(),
3872 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3873 location: lsp::Location {
3874 uri: buffer_uri,
3875 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3876 },
3877 message: "original diagnostic".to_string(),
3878 }]),
3879 ..Default::default()
3880 },
3881 ],
3882 version: None,
3883 };
3884
3885 lsp_store
3886 .update(cx, |lsp_store, cx| {
3887 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3888 })
3889 .unwrap();
3890 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3891
3892 assert_eq!(
3893 buffer
3894 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3895 .collect::<Vec<_>>(),
3896 &[
3897 DiagnosticEntry {
3898 range: Point::new(1, 8)..Point::new(1, 9),
3899 diagnostic: Diagnostic {
3900 severity: DiagnosticSeverity::WARNING,
3901 message: "error 1".to_string(),
3902 group_id: 1,
3903 is_primary: true,
3904 ..Default::default()
3905 }
3906 },
3907 DiagnosticEntry {
3908 range: Point::new(1, 8)..Point::new(1, 9),
3909 diagnostic: Diagnostic {
3910 severity: DiagnosticSeverity::HINT,
3911 message: "error 1 hint 1".to_string(),
3912 group_id: 1,
3913 is_primary: false,
3914 ..Default::default()
3915 }
3916 },
3917 DiagnosticEntry {
3918 range: Point::new(1, 13)..Point::new(1, 15),
3919 diagnostic: Diagnostic {
3920 severity: DiagnosticSeverity::HINT,
3921 message: "error 2 hint 1".to_string(),
3922 group_id: 0,
3923 is_primary: false,
3924 ..Default::default()
3925 }
3926 },
3927 DiagnosticEntry {
3928 range: Point::new(1, 13)..Point::new(1, 15),
3929 diagnostic: Diagnostic {
3930 severity: DiagnosticSeverity::HINT,
3931 message: "error 2 hint 2".to_string(),
3932 group_id: 0,
3933 is_primary: false,
3934 ..Default::default()
3935 }
3936 },
3937 DiagnosticEntry {
3938 range: Point::new(2, 8)..Point::new(2, 17),
3939 diagnostic: Diagnostic {
3940 severity: DiagnosticSeverity::ERROR,
3941 message: "error 2".to_string(),
3942 group_id: 0,
3943 is_primary: true,
3944 ..Default::default()
3945 }
3946 }
3947 ]
3948 );
3949
3950 assert_eq!(
3951 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3952 &[
3953 DiagnosticEntry {
3954 range: Point::new(1, 13)..Point::new(1, 15),
3955 diagnostic: Diagnostic {
3956 severity: DiagnosticSeverity::HINT,
3957 message: "error 2 hint 1".to_string(),
3958 group_id: 0,
3959 is_primary: false,
3960 ..Default::default()
3961 }
3962 },
3963 DiagnosticEntry {
3964 range: Point::new(1, 13)..Point::new(1, 15),
3965 diagnostic: Diagnostic {
3966 severity: DiagnosticSeverity::HINT,
3967 message: "error 2 hint 2".to_string(),
3968 group_id: 0,
3969 is_primary: false,
3970 ..Default::default()
3971 }
3972 },
3973 DiagnosticEntry {
3974 range: Point::new(2, 8)..Point::new(2, 17),
3975 diagnostic: Diagnostic {
3976 severity: DiagnosticSeverity::ERROR,
3977 message: "error 2".to_string(),
3978 group_id: 0,
3979 is_primary: true,
3980 ..Default::default()
3981 }
3982 }
3983 ]
3984 );
3985
3986 assert_eq!(
3987 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3988 &[
3989 DiagnosticEntry {
3990 range: Point::new(1, 8)..Point::new(1, 9),
3991 diagnostic: Diagnostic {
3992 severity: DiagnosticSeverity::WARNING,
3993 message: "error 1".to_string(),
3994 group_id: 1,
3995 is_primary: true,
3996 ..Default::default()
3997 }
3998 },
3999 DiagnosticEntry {
4000 range: Point::new(1, 8)..Point::new(1, 9),
4001 diagnostic: Diagnostic {
4002 severity: DiagnosticSeverity::HINT,
4003 message: "error 1 hint 1".to_string(),
4004 group_id: 1,
4005 is_primary: false,
4006 ..Default::default()
4007 }
4008 },
4009 ]
4010 );
4011}
4012
4013#[gpui::test]
4014async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4015 init_test(cx);
4016
4017 let fs = FakeFs::new(cx.executor());
4018 fs.insert_tree(
4019 path!("/dir"),
4020 json!({
4021 "one.rs": "const ONE: usize = 1;",
4022 "two": {
4023 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4024 }
4025
4026 }),
4027 )
4028 .await;
4029 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4030
4031 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4032 language_registry.add(rust_lang());
4033 let watched_paths = lsp::FileOperationRegistrationOptions {
4034 filters: vec![
4035 FileOperationFilter {
4036 scheme: Some("file".to_owned()),
4037 pattern: lsp::FileOperationPattern {
4038 glob: "**/*.rs".to_owned(),
4039 matches: Some(lsp::FileOperationPatternKind::File),
4040 options: None,
4041 },
4042 },
4043 FileOperationFilter {
4044 scheme: Some("file".to_owned()),
4045 pattern: lsp::FileOperationPattern {
4046 glob: "**/**".to_owned(),
4047 matches: Some(lsp::FileOperationPatternKind::Folder),
4048 options: None,
4049 },
4050 },
4051 ],
4052 };
4053 let mut fake_servers = language_registry.register_fake_lsp(
4054 "Rust",
4055 FakeLspAdapter {
4056 capabilities: lsp::ServerCapabilities {
4057 workspace: Some(lsp::WorkspaceServerCapabilities {
4058 workspace_folders: None,
4059 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4060 did_rename: Some(watched_paths.clone()),
4061 will_rename: Some(watched_paths),
4062 ..Default::default()
4063 }),
4064 }),
4065 ..Default::default()
4066 },
4067 ..Default::default()
4068 },
4069 );
4070
4071 let _ = project
4072 .update(cx, |project, cx| {
4073 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4074 })
4075 .await
4076 .unwrap();
4077
4078 let fake_server = fake_servers.next().await.unwrap();
4079 let response = project.update(cx, |project, cx| {
4080 let worktree = project.worktrees(cx).next().unwrap();
4081 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4082 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4083 });
4084 let expected_edit = lsp::WorkspaceEdit {
4085 changes: None,
4086 document_changes: Some(DocumentChanges::Edits({
4087 vec![TextDocumentEdit {
4088 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4089 range: lsp::Range {
4090 start: lsp::Position {
4091 line: 0,
4092 character: 1,
4093 },
4094 end: lsp::Position {
4095 line: 0,
4096 character: 3,
4097 },
4098 },
4099 new_text: "This is not a drill".to_owned(),
4100 })],
4101 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4102 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4103 version: Some(1337),
4104 },
4105 }]
4106 })),
4107 change_annotations: None,
4108 };
4109 let resolved_workspace_edit = Arc::new(OnceLock::new());
4110 fake_server
4111 .handle_request::<WillRenameFiles, _, _>({
4112 let resolved_workspace_edit = resolved_workspace_edit.clone();
4113 let expected_edit = expected_edit.clone();
4114 move |params, _| {
4115 let resolved_workspace_edit = resolved_workspace_edit.clone();
4116 let expected_edit = expected_edit.clone();
4117 async move {
4118 assert_eq!(params.files.len(), 1);
4119 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4120 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4121 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4122 Ok(Some(expected_edit))
4123 }
4124 }
4125 })
4126 .next()
4127 .await
4128 .unwrap();
4129 let _ = response.await.unwrap();
4130 fake_server
4131 .handle_notification::<DidRenameFiles, _>(|params, _| {
4132 assert_eq!(params.files.len(), 1);
4133 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4134 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4135 })
4136 .next()
4137 .await
4138 .unwrap();
4139 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4140}
4141
4142#[gpui::test]
4143async fn test_rename(cx: &mut gpui::TestAppContext) {
4144 // hi
4145 init_test(cx);
4146
4147 let fs = FakeFs::new(cx.executor());
4148 fs.insert_tree(
4149 path!("/dir"),
4150 json!({
4151 "one.rs": "const ONE: usize = 1;",
4152 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4153 }),
4154 )
4155 .await;
4156
4157 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4158
4159 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4160 language_registry.add(rust_lang());
4161 let mut fake_servers = language_registry.register_fake_lsp(
4162 "Rust",
4163 FakeLspAdapter {
4164 capabilities: lsp::ServerCapabilities {
4165 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4166 prepare_provider: Some(true),
4167 work_done_progress_options: Default::default(),
4168 })),
4169 ..Default::default()
4170 },
4171 ..Default::default()
4172 },
4173 );
4174
4175 let (buffer, _handle) = project
4176 .update(cx, |project, cx| {
4177 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4178 })
4179 .await
4180 .unwrap();
4181
4182 let fake_server = fake_servers.next().await.unwrap();
4183
4184 let response = project.update(cx, |project, cx| {
4185 project.prepare_rename(buffer.clone(), 7, cx)
4186 });
4187 fake_server
4188 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4189 assert_eq!(
4190 params.text_document.uri.as_str(),
4191 uri!("file:///dir/one.rs")
4192 );
4193 assert_eq!(params.position, lsp::Position::new(0, 7));
4194 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4195 lsp::Position::new(0, 6),
4196 lsp::Position::new(0, 9),
4197 ))))
4198 })
4199 .next()
4200 .await
4201 .unwrap();
4202 let response = response.await.unwrap();
4203 let PrepareRenameResponse::Success(range) = response else {
4204 panic!("{:?}", response);
4205 };
4206 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4207 assert_eq!(range, 6..9);
4208
4209 let response = project.update(cx, |project, cx| {
4210 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4211 });
4212 fake_server
4213 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4214 assert_eq!(
4215 params.text_document_position.text_document.uri.as_str(),
4216 uri!("file:///dir/one.rs")
4217 );
4218 assert_eq!(
4219 params.text_document_position.position,
4220 lsp::Position::new(0, 7)
4221 );
4222 assert_eq!(params.new_name, "THREE");
4223 Ok(Some(lsp::WorkspaceEdit {
4224 changes: Some(
4225 [
4226 (
4227 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4228 vec![lsp::TextEdit::new(
4229 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4230 "THREE".to_string(),
4231 )],
4232 ),
4233 (
4234 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4235 vec![
4236 lsp::TextEdit::new(
4237 lsp::Range::new(
4238 lsp::Position::new(0, 24),
4239 lsp::Position::new(0, 27),
4240 ),
4241 "THREE".to_string(),
4242 ),
4243 lsp::TextEdit::new(
4244 lsp::Range::new(
4245 lsp::Position::new(0, 35),
4246 lsp::Position::new(0, 38),
4247 ),
4248 "THREE".to_string(),
4249 ),
4250 ],
4251 ),
4252 ]
4253 .into_iter()
4254 .collect(),
4255 ),
4256 ..Default::default()
4257 }))
4258 })
4259 .next()
4260 .await
4261 .unwrap();
4262 let mut transaction = response.await.unwrap().0;
4263 assert_eq!(transaction.len(), 2);
4264 assert_eq!(
4265 transaction
4266 .remove_entry(&buffer)
4267 .unwrap()
4268 .0
4269 .update(cx, |buffer, _| buffer.text()),
4270 "const THREE: usize = 1;"
4271 );
4272 assert_eq!(
4273 transaction
4274 .into_keys()
4275 .next()
4276 .unwrap()
4277 .update(cx, |buffer, _| buffer.text()),
4278 "const TWO: usize = one::THREE + one::THREE;"
4279 );
4280}
4281
4282#[gpui::test]
4283async fn test_search(cx: &mut gpui::TestAppContext) {
4284 init_test(cx);
4285
4286 let fs = FakeFs::new(cx.executor());
4287 fs.insert_tree(
4288 path!("/dir"),
4289 json!({
4290 "one.rs": "const ONE: usize = 1;",
4291 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4292 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4293 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4294 }),
4295 )
4296 .await;
4297 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4298 assert_eq!(
4299 search(
4300 &project,
4301 SearchQuery::text(
4302 "TWO",
4303 false,
4304 true,
4305 false,
4306 Default::default(),
4307 Default::default(),
4308 None
4309 )
4310 .unwrap(),
4311 cx
4312 )
4313 .await
4314 .unwrap(),
4315 HashMap::from_iter([
4316 (separator!("dir/two.rs").to_string(), vec![6..9]),
4317 (separator!("dir/three.rs").to_string(), vec![37..40])
4318 ])
4319 );
4320
4321 let buffer_4 = project
4322 .update(cx, |project, cx| {
4323 project.open_local_buffer(path!("/dir/four.rs"), cx)
4324 })
4325 .await
4326 .unwrap();
4327 buffer_4.update(cx, |buffer, cx| {
4328 let text = "two::TWO";
4329 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4330 });
4331
4332 assert_eq!(
4333 search(
4334 &project,
4335 SearchQuery::text(
4336 "TWO",
4337 false,
4338 true,
4339 false,
4340 Default::default(),
4341 Default::default(),
4342 None,
4343 )
4344 .unwrap(),
4345 cx
4346 )
4347 .await
4348 .unwrap(),
4349 HashMap::from_iter([
4350 (separator!("dir/two.rs").to_string(), vec![6..9]),
4351 (separator!("dir/three.rs").to_string(), vec![37..40]),
4352 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4353 ])
4354 );
4355}
4356
4357#[gpui::test]
4358async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4359 init_test(cx);
4360
4361 let search_query = "file";
4362
4363 let fs = FakeFs::new(cx.executor());
4364 fs.insert_tree(
4365 path!("/dir"),
4366 json!({
4367 "one.rs": r#"// Rust file one"#,
4368 "one.ts": r#"// TypeScript file one"#,
4369 "two.rs": r#"// Rust file two"#,
4370 "two.ts": r#"// TypeScript file two"#,
4371 }),
4372 )
4373 .await;
4374 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4375
4376 assert!(
4377 search(
4378 &project,
4379 SearchQuery::text(
4380 search_query,
4381 false,
4382 true,
4383 false,
4384 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4385 Default::default(),
4386 None
4387 )
4388 .unwrap(),
4389 cx
4390 )
4391 .await
4392 .unwrap()
4393 .is_empty(),
4394 "If no inclusions match, no files should be returned"
4395 );
4396
4397 assert_eq!(
4398 search(
4399 &project,
4400 SearchQuery::text(
4401 search_query,
4402 false,
4403 true,
4404 false,
4405 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4406 Default::default(),
4407 None
4408 )
4409 .unwrap(),
4410 cx
4411 )
4412 .await
4413 .unwrap(),
4414 HashMap::from_iter([
4415 (separator!("dir/one.rs").to_string(), vec![8..12]),
4416 (separator!("dir/two.rs").to_string(), vec![8..12]),
4417 ]),
4418 "Rust only search should give only Rust files"
4419 );
4420
4421 assert_eq!(
4422 search(
4423 &project,
4424 SearchQuery::text(
4425 search_query,
4426 false,
4427 true,
4428 false,
4429
4430 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4431
4432 Default::default(),
4433 None,
4434 ).unwrap(),
4435 cx
4436 )
4437 .await
4438 .unwrap(),
4439 HashMap::from_iter([
4440 (separator!("dir/one.ts").to_string(), vec![14..18]),
4441 (separator!("dir/two.ts").to_string(), vec![14..18]),
4442 ]),
4443 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4444 );
4445
4446 assert_eq!(
4447 search(
4448 &project,
4449 SearchQuery::text(
4450 search_query,
4451 false,
4452 true,
4453 false,
4454
4455 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4456
4457 Default::default(),
4458 None,
4459 ).unwrap(),
4460 cx
4461 )
4462 .await
4463 .unwrap(),
4464 HashMap::from_iter([
4465 (separator!("dir/two.ts").to_string(), vec![14..18]),
4466 (separator!("dir/one.rs").to_string(), vec![8..12]),
4467 (separator!("dir/one.ts").to_string(), vec![14..18]),
4468 (separator!("dir/two.rs").to_string(), vec![8..12]),
4469 ]),
4470 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4471 );
4472}
4473
4474#[gpui::test]
4475async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4476 init_test(cx);
4477
4478 let search_query = "file";
4479
4480 let fs = FakeFs::new(cx.executor());
4481 fs.insert_tree(
4482 path!("/dir"),
4483 json!({
4484 "one.rs": r#"// Rust file one"#,
4485 "one.ts": r#"// TypeScript file one"#,
4486 "two.rs": r#"// Rust file two"#,
4487 "two.ts": r#"// TypeScript file two"#,
4488 }),
4489 )
4490 .await;
4491 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4492
4493 assert_eq!(
4494 search(
4495 &project,
4496 SearchQuery::text(
4497 search_query,
4498 false,
4499 true,
4500 false,
4501 Default::default(),
4502 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4503 None,
4504 )
4505 .unwrap(),
4506 cx
4507 )
4508 .await
4509 .unwrap(),
4510 HashMap::from_iter([
4511 (separator!("dir/one.rs").to_string(), vec![8..12]),
4512 (separator!("dir/one.ts").to_string(), vec![14..18]),
4513 (separator!("dir/two.rs").to_string(), vec![8..12]),
4514 (separator!("dir/two.ts").to_string(), vec![14..18]),
4515 ]),
4516 "If no exclusions match, all files should be returned"
4517 );
4518
4519 assert_eq!(
4520 search(
4521 &project,
4522 SearchQuery::text(
4523 search_query,
4524 false,
4525 true,
4526 false,
4527 Default::default(),
4528 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4529 None,
4530 )
4531 .unwrap(),
4532 cx
4533 )
4534 .await
4535 .unwrap(),
4536 HashMap::from_iter([
4537 (separator!("dir/one.ts").to_string(), vec![14..18]),
4538 (separator!("dir/two.ts").to_string(), vec![14..18]),
4539 ]),
4540 "Rust exclusion search should give only TypeScript files"
4541 );
4542
4543 assert_eq!(
4544 search(
4545 &project,
4546 SearchQuery::text(
4547 search_query,
4548 false,
4549 true,
4550 false,
4551 Default::default(),
4552 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4553 None,
4554 ).unwrap(),
4555 cx
4556 )
4557 .await
4558 .unwrap(),
4559 HashMap::from_iter([
4560 (separator!("dir/one.rs").to_string(), vec![8..12]),
4561 (separator!("dir/two.rs").to_string(), vec![8..12]),
4562 ]),
4563 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4564 );
4565
4566 assert!(
4567 search(
4568 &project,
4569 SearchQuery::text(
4570 search_query,
4571 false,
4572 true,
4573 false,
4574 Default::default(),
4575
4576 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4577 None,
4578
4579 ).unwrap(),
4580 cx
4581 )
4582 .await
4583 .unwrap().is_empty(),
4584 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4585 );
4586}
4587
4588#[gpui::test]
4589async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4590 init_test(cx);
4591
4592 let search_query = "file";
4593
4594 let fs = FakeFs::new(cx.executor());
4595 fs.insert_tree(
4596 path!("/dir"),
4597 json!({
4598 "one.rs": r#"// Rust file one"#,
4599 "one.ts": r#"// TypeScript file one"#,
4600 "two.rs": r#"// Rust file two"#,
4601 "two.ts": r#"// TypeScript file two"#,
4602 }),
4603 )
4604 .await;
4605 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4606
4607 assert!(
4608 search(
4609 &project,
4610 SearchQuery::text(
4611 search_query,
4612 false,
4613 true,
4614 false,
4615 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4616 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4617 None,
4618 )
4619 .unwrap(),
4620 cx
4621 )
4622 .await
4623 .unwrap()
4624 .is_empty(),
4625 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4626 );
4627
4628 assert!(
4629 search(
4630 &project,
4631 SearchQuery::text(
4632 search_query,
4633 false,
4634 true,
4635 false,
4636 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4637 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4638 None,
4639 ).unwrap(),
4640 cx
4641 )
4642 .await
4643 .unwrap()
4644 .is_empty(),
4645 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4646 );
4647
4648 assert!(
4649 search(
4650 &project,
4651 SearchQuery::text(
4652 search_query,
4653 false,
4654 true,
4655 false,
4656 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4657 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4658 None,
4659 )
4660 .unwrap(),
4661 cx
4662 )
4663 .await
4664 .unwrap()
4665 .is_empty(),
4666 "Non-matching inclusions and exclusions should not change that."
4667 );
4668
4669 assert_eq!(
4670 search(
4671 &project,
4672 SearchQuery::text(
4673 search_query,
4674 false,
4675 true,
4676 false,
4677 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4678 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4679 None,
4680 )
4681 .unwrap(),
4682 cx
4683 )
4684 .await
4685 .unwrap(),
4686 HashMap::from_iter([
4687 (separator!("dir/one.ts").to_string(), vec![14..18]),
4688 (separator!("dir/two.ts").to_string(), vec![14..18]),
4689 ]),
4690 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4691 );
4692}
4693
4694#[gpui::test]
4695async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4696 init_test(cx);
4697
4698 let fs = FakeFs::new(cx.executor());
4699 fs.insert_tree(
4700 path!("/worktree-a"),
4701 json!({
4702 "haystack.rs": r#"// NEEDLE"#,
4703 "haystack.ts": r#"// NEEDLE"#,
4704 }),
4705 )
4706 .await;
4707 fs.insert_tree(
4708 path!("/worktree-b"),
4709 json!({
4710 "haystack.rs": r#"// NEEDLE"#,
4711 "haystack.ts": r#"// NEEDLE"#,
4712 }),
4713 )
4714 .await;
4715
4716 let project = Project::test(
4717 fs.clone(),
4718 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4719 cx,
4720 )
4721 .await;
4722
4723 assert_eq!(
4724 search(
4725 &project,
4726 SearchQuery::text(
4727 "NEEDLE",
4728 false,
4729 true,
4730 false,
4731 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4732 Default::default(),
4733 None,
4734 )
4735 .unwrap(),
4736 cx
4737 )
4738 .await
4739 .unwrap(),
4740 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4741 "should only return results from included worktree"
4742 );
4743 assert_eq!(
4744 search(
4745 &project,
4746 SearchQuery::text(
4747 "NEEDLE",
4748 false,
4749 true,
4750 false,
4751 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4752 Default::default(),
4753 None,
4754 )
4755 .unwrap(),
4756 cx
4757 )
4758 .await
4759 .unwrap(),
4760 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4761 "should only return results from included worktree"
4762 );
4763
4764 assert_eq!(
4765 search(
4766 &project,
4767 SearchQuery::text(
4768 "NEEDLE",
4769 false,
4770 true,
4771 false,
4772 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4773 Default::default(),
4774 None,
4775 )
4776 .unwrap(),
4777 cx
4778 )
4779 .await
4780 .unwrap(),
4781 HashMap::from_iter([
4782 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4783 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4784 ]),
4785 "should return results from both worktrees"
4786 );
4787}
4788
4789#[gpui::test]
4790async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4791 init_test(cx);
4792
4793 let fs = FakeFs::new(cx.background_executor.clone());
4794 fs.insert_tree(
4795 path!("/dir"),
4796 json!({
4797 ".git": {},
4798 ".gitignore": "**/target\n/node_modules\n",
4799 "target": {
4800 "index.txt": "index_key:index_value"
4801 },
4802 "node_modules": {
4803 "eslint": {
4804 "index.ts": "const eslint_key = 'eslint value'",
4805 "package.json": r#"{ "some_key": "some value" }"#,
4806 },
4807 "prettier": {
4808 "index.ts": "const prettier_key = 'prettier value'",
4809 "package.json": r#"{ "other_key": "other value" }"#,
4810 },
4811 },
4812 "package.json": r#"{ "main_key": "main value" }"#,
4813 }),
4814 )
4815 .await;
4816 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4817
4818 let query = "key";
4819 assert_eq!(
4820 search(
4821 &project,
4822 SearchQuery::text(
4823 query,
4824 false,
4825 false,
4826 false,
4827 Default::default(),
4828 Default::default(),
4829 None,
4830 )
4831 .unwrap(),
4832 cx
4833 )
4834 .await
4835 .unwrap(),
4836 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4837 "Only one non-ignored file should have the query"
4838 );
4839
4840 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4841 assert_eq!(
4842 search(
4843 &project,
4844 SearchQuery::text(
4845 query,
4846 false,
4847 false,
4848 true,
4849 Default::default(),
4850 Default::default(),
4851 None,
4852 )
4853 .unwrap(),
4854 cx
4855 )
4856 .await
4857 .unwrap(),
4858 HashMap::from_iter([
4859 (separator!("dir/package.json").to_string(), vec![8..11]),
4860 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4861 (
4862 separator!("dir/node_modules/prettier/package.json").to_string(),
4863 vec![9..12]
4864 ),
4865 (
4866 separator!("dir/node_modules/prettier/index.ts").to_string(),
4867 vec![15..18]
4868 ),
4869 (
4870 separator!("dir/node_modules/eslint/index.ts").to_string(),
4871 vec![13..16]
4872 ),
4873 (
4874 separator!("dir/node_modules/eslint/package.json").to_string(),
4875 vec![8..11]
4876 ),
4877 ]),
4878 "Unrestricted search with ignored directories should find every file with the query"
4879 );
4880
4881 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4882 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4883 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4884 assert_eq!(
4885 search(
4886 &project,
4887 SearchQuery::text(
4888 query,
4889 false,
4890 false,
4891 true,
4892 files_to_include,
4893 files_to_exclude,
4894 None,
4895 )
4896 .unwrap(),
4897 cx
4898 )
4899 .await
4900 .unwrap(),
4901 HashMap::from_iter([(
4902 separator!("dir/node_modules/prettier/package.json").to_string(),
4903 vec![9..12]
4904 )]),
4905 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4906 );
4907}
4908
4909#[gpui::test]
4910async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4911 init_test(cx);
4912
4913 let fs = FakeFs::new(cx.executor().clone());
4914 fs.insert_tree(
4915 "/one/two",
4916 json!({
4917 "three": {
4918 "a.txt": "",
4919 "four": {}
4920 },
4921 "c.rs": ""
4922 }),
4923 )
4924 .await;
4925
4926 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4927 project
4928 .update(cx, |project, cx| {
4929 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4930 project.create_entry((id, "b.."), true, cx)
4931 })
4932 .await
4933 .unwrap()
4934 .to_included()
4935 .unwrap();
4936
4937 // Can't create paths outside the project
4938 let result = project
4939 .update(cx, |project, cx| {
4940 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4941 project.create_entry((id, "../../boop"), true, cx)
4942 })
4943 .await;
4944 assert!(result.is_err());
4945
4946 // Can't create paths with '..'
4947 let result = project
4948 .update(cx, |project, cx| {
4949 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4950 project.create_entry((id, "four/../beep"), true, cx)
4951 })
4952 .await;
4953 assert!(result.is_err());
4954
4955 assert_eq!(
4956 fs.paths(true),
4957 vec![
4958 PathBuf::from("/"),
4959 PathBuf::from("/one"),
4960 PathBuf::from("/one/two"),
4961 PathBuf::from("/one/two/c.rs"),
4962 PathBuf::from("/one/two/three"),
4963 PathBuf::from("/one/two/three/a.txt"),
4964 PathBuf::from("/one/two/three/b.."),
4965 PathBuf::from("/one/two/three/four"),
4966 ]
4967 );
4968
4969 // And we cannot open buffers with '..'
4970 let result = project
4971 .update(cx, |project, cx| {
4972 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4973 project.open_buffer((id, "../c.rs"), cx)
4974 })
4975 .await;
4976 assert!(result.is_err())
4977}
4978
4979#[gpui::test]
4980async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4981 init_test(cx);
4982
4983 let fs = FakeFs::new(cx.executor());
4984 fs.insert_tree(
4985 path!("/dir"),
4986 json!({
4987 "a.tsx": "a",
4988 }),
4989 )
4990 .await;
4991
4992 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4993
4994 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4995 language_registry.add(tsx_lang());
4996 let language_server_names = [
4997 "TypeScriptServer",
4998 "TailwindServer",
4999 "ESLintServer",
5000 "NoHoverCapabilitiesServer",
5001 ];
5002 let mut language_servers = [
5003 language_registry.register_fake_lsp(
5004 "tsx",
5005 FakeLspAdapter {
5006 name: language_server_names[0],
5007 capabilities: lsp::ServerCapabilities {
5008 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5009 ..lsp::ServerCapabilities::default()
5010 },
5011 ..FakeLspAdapter::default()
5012 },
5013 ),
5014 language_registry.register_fake_lsp(
5015 "tsx",
5016 FakeLspAdapter {
5017 name: language_server_names[1],
5018 capabilities: lsp::ServerCapabilities {
5019 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5020 ..lsp::ServerCapabilities::default()
5021 },
5022 ..FakeLspAdapter::default()
5023 },
5024 ),
5025 language_registry.register_fake_lsp(
5026 "tsx",
5027 FakeLspAdapter {
5028 name: language_server_names[2],
5029 capabilities: lsp::ServerCapabilities {
5030 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5031 ..lsp::ServerCapabilities::default()
5032 },
5033 ..FakeLspAdapter::default()
5034 },
5035 ),
5036 language_registry.register_fake_lsp(
5037 "tsx",
5038 FakeLspAdapter {
5039 name: language_server_names[3],
5040 capabilities: lsp::ServerCapabilities {
5041 hover_provider: None,
5042 ..lsp::ServerCapabilities::default()
5043 },
5044 ..FakeLspAdapter::default()
5045 },
5046 ),
5047 ];
5048
5049 let (buffer, _handle) = project
5050 .update(cx, |p, cx| {
5051 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5052 })
5053 .await
5054 .unwrap();
5055 cx.executor().run_until_parked();
5056
5057 let mut servers_with_hover_requests = HashMap::default();
5058 for i in 0..language_server_names.len() {
5059 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5060 panic!(
5061 "Failed to get language server #{i} with name {}",
5062 &language_server_names[i]
5063 )
5064 });
5065 let new_server_name = new_server.server.name();
5066 assert!(
5067 !servers_with_hover_requests.contains_key(&new_server_name),
5068 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5069 );
5070 match new_server_name.as_ref() {
5071 "TailwindServer" | "TypeScriptServer" => {
5072 servers_with_hover_requests.insert(
5073 new_server_name.clone(),
5074 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5075 let name = new_server_name.clone();
5076 async move {
5077 Ok(Some(lsp::Hover {
5078 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5079 format!("{name} hover"),
5080 )),
5081 range: None,
5082 }))
5083 }
5084 }),
5085 );
5086 }
5087 "ESLintServer" => {
5088 servers_with_hover_requests.insert(
5089 new_server_name,
5090 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5091 |_, _| async move { Ok(None) },
5092 ),
5093 );
5094 }
5095 "NoHoverCapabilitiesServer" => {
5096 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5097 |_, _| async move {
5098 panic!(
5099 "Should not call for hovers server with no corresponding capabilities"
5100 )
5101 },
5102 );
5103 }
5104 unexpected => panic!("Unexpected server name: {unexpected}"),
5105 }
5106 }
5107
5108 let hover_task = project.update(cx, |project, cx| {
5109 project.hover(&buffer, Point::new(0, 0), cx)
5110 });
5111 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5112 |mut hover_request| async move {
5113 hover_request
5114 .next()
5115 .await
5116 .expect("All hover requests should have been triggered")
5117 },
5118 ))
5119 .await;
5120 assert_eq!(
5121 vec!["TailwindServer hover", "TypeScriptServer hover"],
5122 hover_task
5123 .await
5124 .into_iter()
5125 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5126 .sorted()
5127 .collect::<Vec<_>>(),
5128 "Should receive hover responses from all related servers with hover capabilities"
5129 );
5130}
5131
5132#[gpui::test]
5133async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5134 init_test(cx);
5135
5136 let fs = FakeFs::new(cx.executor());
5137 fs.insert_tree(
5138 path!("/dir"),
5139 json!({
5140 "a.ts": "a",
5141 }),
5142 )
5143 .await;
5144
5145 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5146
5147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5148 language_registry.add(typescript_lang());
5149 let mut fake_language_servers = language_registry.register_fake_lsp(
5150 "TypeScript",
5151 FakeLspAdapter {
5152 capabilities: lsp::ServerCapabilities {
5153 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5154 ..lsp::ServerCapabilities::default()
5155 },
5156 ..FakeLspAdapter::default()
5157 },
5158 );
5159
5160 let (buffer, _handle) = project
5161 .update(cx, |p, cx| {
5162 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5163 })
5164 .await
5165 .unwrap();
5166 cx.executor().run_until_parked();
5167
5168 let fake_server = fake_language_servers
5169 .next()
5170 .await
5171 .expect("failed to get the language server");
5172
5173 let mut request_handled =
5174 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5175 Ok(Some(lsp::Hover {
5176 contents: lsp::HoverContents::Array(vec![
5177 lsp::MarkedString::String("".to_string()),
5178 lsp::MarkedString::String(" ".to_string()),
5179 lsp::MarkedString::String("\n\n\n".to_string()),
5180 ]),
5181 range: None,
5182 }))
5183 });
5184
5185 let hover_task = project.update(cx, |project, cx| {
5186 project.hover(&buffer, Point::new(0, 0), cx)
5187 });
5188 let () = request_handled
5189 .next()
5190 .await
5191 .expect("All hover requests should have been triggered");
5192 assert_eq!(
5193 Vec::<String>::new(),
5194 hover_task
5195 .await
5196 .into_iter()
5197 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5198 .sorted()
5199 .collect::<Vec<_>>(),
5200 "Empty hover parts should be ignored"
5201 );
5202}
5203
5204#[gpui::test]
5205async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5206 init_test(cx);
5207
5208 let fs = FakeFs::new(cx.executor());
5209 fs.insert_tree(
5210 path!("/dir"),
5211 json!({
5212 "a.ts": "a",
5213 }),
5214 )
5215 .await;
5216
5217 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5218
5219 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5220 language_registry.add(typescript_lang());
5221 let mut fake_language_servers = language_registry.register_fake_lsp(
5222 "TypeScript",
5223 FakeLspAdapter {
5224 capabilities: lsp::ServerCapabilities {
5225 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5226 ..lsp::ServerCapabilities::default()
5227 },
5228 ..FakeLspAdapter::default()
5229 },
5230 );
5231
5232 let (buffer, _handle) = project
5233 .update(cx, |p, cx| {
5234 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5235 })
5236 .await
5237 .unwrap();
5238 cx.executor().run_until_parked();
5239
5240 let fake_server = fake_language_servers
5241 .next()
5242 .await
5243 .expect("failed to get the language server");
5244
5245 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5246 move |_, _| async move {
5247 Ok(Some(vec![
5248 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5249 title: "organize imports".to_string(),
5250 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5251 ..lsp::CodeAction::default()
5252 }),
5253 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5254 title: "fix code".to_string(),
5255 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5256 ..lsp::CodeAction::default()
5257 }),
5258 ]))
5259 },
5260 );
5261
5262 let code_actions_task = project.update(cx, |project, cx| {
5263 project.code_actions(
5264 &buffer,
5265 0..buffer.read(cx).len(),
5266 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5267 cx,
5268 )
5269 });
5270
5271 let () = request_handled
5272 .next()
5273 .await
5274 .expect("The code action request should have been triggered");
5275
5276 let code_actions = code_actions_task.await.unwrap();
5277 assert_eq!(code_actions.len(), 1);
5278 assert_eq!(
5279 code_actions[0].lsp_action.kind,
5280 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5281 );
5282}
5283
5284#[gpui::test]
5285async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5286 init_test(cx);
5287
5288 let fs = FakeFs::new(cx.executor());
5289 fs.insert_tree(
5290 path!("/dir"),
5291 json!({
5292 "a.tsx": "a",
5293 }),
5294 )
5295 .await;
5296
5297 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5298
5299 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5300 language_registry.add(tsx_lang());
5301 let language_server_names = [
5302 "TypeScriptServer",
5303 "TailwindServer",
5304 "ESLintServer",
5305 "NoActionsCapabilitiesServer",
5306 ];
5307
5308 let mut language_server_rxs = [
5309 language_registry.register_fake_lsp(
5310 "tsx",
5311 FakeLspAdapter {
5312 name: language_server_names[0],
5313 capabilities: lsp::ServerCapabilities {
5314 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5315 ..lsp::ServerCapabilities::default()
5316 },
5317 ..FakeLspAdapter::default()
5318 },
5319 ),
5320 language_registry.register_fake_lsp(
5321 "tsx",
5322 FakeLspAdapter {
5323 name: language_server_names[1],
5324 capabilities: lsp::ServerCapabilities {
5325 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5326 ..lsp::ServerCapabilities::default()
5327 },
5328 ..FakeLspAdapter::default()
5329 },
5330 ),
5331 language_registry.register_fake_lsp(
5332 "tsx",
5333 FakeLspAdapter {
5334 name: language_server_names[2],
5335 capabilities: lsp::ServerCapabilities {
5336 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5337 ..lsp::ServerCapabilities::default()
5338 },
5339 ..FakeLspAdapter::default()
5340 },
5341 ),
5342 language_registry.register_fake_lsp(
5343 "tsx",
5344 FakeLspAdapter {
5345 name: language_server_names[3],
5346 capabilities: lsp::ServerCapabilities {
5347 code_action_provider: None,
5348 ..lsp::ServerCapabilities::default()
5349 },
5350 ..FakeLspAdapter::default()
5351 },
5352 ),
5353 ];
5354
5355 let (buffer, _handle) = project
5356 .update(cx, |p, cx| {
5357 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5358 })
5359 .await
5360 .unwrap();
5361 cx.executor().run_until_parked();
5362
5363 let mut servers_with_actions_requests = HashMap::default();
5364 for i in 0..language_server_names.len() {
5365 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5366 panic!(
5367 "Failed to get language server #{i} with name {}",
5368 &language_server_names[i]
5369 )
5370 });
5371 let new_server_name = new_server.server.name();
5372
5373 assert!(
5374 !servers_with_actions_requests.contains_key(&new_server_name),
5375 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5376 );
5377 match new_server_name.0.as_ref() {
5378 "TailwindServer" | "TypeScriptServer" => {
5379 servers_with_actions_requests.insert(
5380 new_server_name.clone(),
5381 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5382 move |_, _| {
5383 let name = new_server_name.clone();
5384 async move {
5385 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5386 lsp::CodeAction {
5387 title: format!("{name} code action"),
5388 ..lsp::CodeAction::default()
5389 },
5390 )]))
5391 }
5392 },
5393 ),
5394 );
5395 }
5396 "ESLintServer" => {
5397 servers_with_actions_requests.insert(
5398 new_server_name,
5399 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5400 |_, _| async move { Ok(None) },
5401 ),
5402 );
5403 }
5404 "NoActionsCapabilitiesServer" => {
5405 let _never_handled = new_server
5406 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5407 panic!(
5408 "Should not call for code actions server with no corresponding capabilities"
5409 )
5410 });
5411 }
5412 unexpected => panic!("Unexpected server name: {unexpected}"),
5413 }
5414 }
5415
5416 let code_actions_task = project.update(cx, |project, cx| {
5417 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5418 });
5419
5420 // cx.run_until_parked();
5421 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5422 |mut code_actions_request| async move {
5423 code_actions_request
5424 .next()
5425 .await
5426 .expect("All code actions requests should have been triggered")
5427 },
5428 ))
5429 .await;
5430 assert_eq!(
5431 vec!["TailwindServer code action", "TypeScriptServer code action"],
5432 code_actions_task
5433 .await
5434 .unwrap()
5435 .into_iter()
5436 .map(|code_action| code_action.lsp_action.title)
5437 .sorted()
5438 .collect::<Vec<_>>(),
5439 "Should receive code actions responses from all related servers with hover capabilities"
5440 );
5441}
5442
5443#[gpui::test]
5444async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5445 init_test(cx);
5446
5447 let fs = FakeFs::new(cx.executor());
5448 fs.insert_tree(
5449 "/dir",
5450 json!({
5451 "a.rs": "let a = 1;",
5452 "b.rs": "let b = 2;",
5453 "c.rs": "let c = 2;",
5454 }),
5455 )
5456 .await;
5457
5458 let project = Project::test(
5459 fs,
5460 [
5461 "/dir/a.rs".as_ref(),
5462 "/dir/b.rs".as_ref(),
5463 "/dir/c.rs".as_ref(),
5464 ],
5465 cx,
5466 )
5467 .await;
5468
5469 // check the initial state and get the worktrees
5470 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5471 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5472 assert_eq!(worktrees.len(), 3);
5473
5474 let worktree_a = worktrees[0].read(cx);
5475 let worktree_b = worktrees[1].read(cx);
5476 let worktree_c = worktrees[2].read(cx);
5477
5478 // check they start in the right order
5479 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5480 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5481 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5482
5483 (
5484 worktrees[0].clone(),
5485 worktrees[1].clone(),
5486 worktrees[2].clone(),
5487 )
5488 });
5489
5490 // move first worktree to after the second
5491 // [a, b, c] -> [b, a, c]
5492 project
5493 .update(cx, |project, cx| {
5494 let first = worktree_a.read(cx);
5495 let second = worktree_b.read(cx);
5496 project.move_worktree(first.id(), second.id(), cx)
5497 })
5498 .expect("moving first after second");
5499
5500 // check the state after moving
5501 project.update(cx, |project, cx| {
5502 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5503 assert_eq!(worktrees.len(), 3);
5504
5505 let first = worktrees[0].read(cx);
5506 let second = worktrees[1].read(cx);
5507 let third = worktrees[2].read(cx);
5508
5509 // check they are now in the right order
5510 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5511 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5512 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5513 });
5514
5515 // move the second worktree to before the first
5516 // [b, a, c] -> [a, b, c]
5517 project
5518 .update(cx, |project, cx| {
5519 let second = worktree_a.read(cx);
5520 let first = worktree_b.read(cx);
5521 project.move_worktree(first.id(), second.id(), cx)
5522 })
5523 .expect("moving second before first");
5524
5525 // check the state after moving
5526 project.update(cx, |project, cx| {
5527 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5528 assert_eq!(worktrees.len(), 3);
5529
5530 let first = worktrees[0].read(cx);
5531 let second = worktrees[1].read(cx);
5532 let third = worktrees[2].read(cx);
5533
5534 // check they are now in the right order
5535 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5536 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5537 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5538 });
5539
5540 // move the second worktree to after the third
5541 // [a, b, c] -> [a, c, b]
5542 project
5543 .update(cx, |project, cx| {
5544 let second = worktree_b.read(cx);
5545 let third = worktree_c.read(cx);
5546 project.move_worktree(second.id(), third.id(), cx)
5547 })
5548 .expect("moving second after third");
5549
5550 // check the state after moving
5551 project.update(cx, |project, cx| {
5552 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5553 assert_eq!(worktrees.len(), 3);
5554
5555 let first = worktrees[0].read(cx);
5556 let second = worktrees[1].read(cx);
5557 let third = worktrees[2].read(cx);
5558
5559 // check they are now in the right order
5560 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5561 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5562 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5563 });
5564
5565 // move the third worktree to before the second
5566 // [a, c, b] -> [a, b, c]
5567 project
5568 .update(cx, |project, cx| {
5569 let third = worktree_c.read(cx);
5570 let second = worktree_b.read(cx);
5571 project.move_worktree(third.id(), second.id(), cx)
5572 })
5573 .expect("moving third before second");
5574
5575 // check the state after moving
5576 project.update(cx, |project, cx| {
5577 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5578 assert_eq!(worktrees.len(), 3);
5579
5580 let first = worktrees[0].read(cx);
5581 let second = worktrees[1].read(cx);
5582 let third = worktrees[2].read(cx);
5583
5584 // check they are now in the right order
5585 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5586 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5587 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5588 });
5589
5590 // move the first worktree to after the third
5591 // [a, b, c] -> [b, c, a]
5592 project
5593 .update(cx, |project, cx| {
5594 let first = worktree_a.read(cx);
5595 let third = worktree_c.read(cx);
5596 project.move_worktree(first.id(), third.id(), cx)
5597 })
5598 .expect("moving first after third");
5599
5600 // check the state after moving
5601 project.update(cx, |project, cx| {
5602 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5603 assert_eq!(worktrees.len(), 3);
5604
5605 let first = worktrees[0].read(cx);
5606 let second = worktrees[1].read(cx);
5607 let third = worktrees[2].read(cx);
5608
5609 // check they are now in the right order
5610 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5611 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5612 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5613 });
5614
5615 // move the third worktree to before the first
5616 // [b, c, a] -> [a, b, c]
5617 project
5618 .update(cx, |project, cx| {
5619 let third = worktree_a.read(cx);
5620 let first = worktree_b.read(cx);
5621 project.move_worktree(third.id(), first.id(), cx)
5622 })
5623 .expect("moving third before first");
5624
5625 // check the state after moving
5626 project.update(cx, |project, cx| {
5627 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5628 assert_eq!(worktrees.len(), 3);
5629
5630 let first = worktrees[0].read(cx);
5631 let second = worktrees[1].read(cx);
5632 let third = worktrees[2].read(cx);
5633
5634 // check they are now in the right order
5635 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5636 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5637 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5638 });
5639}
5640
5641#[gpui::test]
5642async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5643 init_test(cx);
5644
5645 let staged_contents = r#"
5646 fn main() {
5647 println!("hello world");
5648 }
5649 "#
5650 .unindent();
5651 let file_contents = r#"
5652 // print goodbye
5653 fn main() {
5654 println!("goodbye world");
5655 }
5656 "#
5657 .unindent();
5658
5659 let fs = FakeFs::new(cx.background_executor.clone());
5660 fs.insert_tree(
5661 "/dir",
5662 json!({
5663 ".git": {},
5664 "src": {
5665 "main.rs": file_contents,
5666 }
5667 }),
5668 )
5669 .await;
5670
5671 fs.set_index_for_repo(
5672 Path::new("/dir/.git"),
5673 &[("src/main.rs".into(), staged_contents)],
5674 );
5675
5676 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5677
5678 let buffer = project
5679 .update(cx, |project, cx| {
5680 project.open_local_buffer("/dir/src/main.rs", cx)
5681 })
5682 .await
5683 .unwrap();
5684 let unstaged_diff = project
5685 .update(cx, |project, cx| {
5686 project.open_unstaged_diff(buffer.clone(), cx)
5687 })
5688 .await
5689 .unwrap();
5690
5691 cx.run_until_parked();
5692 unstaged_diff.update(cx, |unstaged_diff, cx| {
5693 let snapshot = buffer.read(cx).snapshot();
5694 assert_hunks(
5695 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5696 &snapshot,
5697 &unstaged_diff.base_text_string().unwrap(),
5698 &[
5699 (0..1, "", "// print goodbye\n", DiffHunkStatus::added()),
5700 (
5701 2..3,
5702 " println!(\"hello world\");\n",
5703 " println!(\"goodbye world\");\n",
5704 DiffHunkStatus::modified(),
5705 ),
5706 ],
5707 );
5708 });
5709
5710 let staged_contents = r#"
5711 // print goodbye
5712 fn main() {
5713 }
5714 "#
5715 .unindent();
5716
5717 fs.set_index_for_repo(
5718 Path::new("/dir/.git"),
5719 &[("src/main.rs".into(), staged_contents)],
5720 );
5721
5722 cx.run_until_parked();
5723 unstaged_diff.update(cx, |unstaged_diff, cx| {
5724 let snapshot = buffer.read(cx).snapshot();
5725 assert_hunks(
5726 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5727 &snapshot,
5728 &unstaged_diff.base_text().unwrap().text(),
5729 &[(
5730 2..3,
5731 "",
5732 " println!(\"goodbye world\");\n",
5733 DiffHunkStatus::added(),
5734 )],
5735 );
5736 });
5737}
5738
5739#[gpui::test]
5740async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5741 init_test(cx);
5742
5743 let committed_contents = r#"
5744 fn main() {
5745 println!("hello world");
5746 }
5747 "#
5748 .unindent();
5749 let staged_contents = r#"
5750 fn main() {
5751 println!("goodbye world");
5752 }
5753 "#
5754 .unindent();
5755 let file_contents = r#"
5756 // print goodbye
5757 fn main() {
5758 println!("goodbye world");
5759 }
5760 "#
5761 .unindent();
5762
5763 let fs = FakeFs::new(cx.background_executor.clone());
5764 fs.insert_tree(
5765 "/dir",
5766 json!({
5767 ".git": {},
5768 "src": {
5769 "main.rs": file_contents,
5770 }
5771 }),
5772 )
5773 .await;
5774
5775 fs.set_index_for_repo(
5776 Path::new("/dir/.git"),
5777 &[("src/main.rs".into(), staged_contents)],
5778 );
5779 fs.set_head_for_repo(
5780 Path::new("/dir/.git"),
5781 &[("src/main.rs".into(), committed_contents)],
5782 );
5783
5784 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5785 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5786 let language = rust_lang();
5787 language_registry.add(language.clone());
5788
5789 let buffer = project
5790 .update(cx, |project, cx| {
5791 project.open_local_buffer("/dir/src/main.rs", cx)
5792 })
5793 .await
5794 .unwrap();
5795 let uncommitted_diff = project
5796 .update(cx, |project, cx| {
5797 project.open_uncommitted_diff(buffer.clone(), cx)
5798 })
5799 .await
5800 .unwrap();
5801
5802 uncommitted_diff.read_with(cx, |diff, _| {
5803 assert_eq!(
5804 diff.base_text().and_then(|base| base.language().cloned()),
5805 Some(language)
5806 )
5807 });
5808
5809 cx.run_until_parked();
5810 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5811 let snapshot = buffer.read(cx).snapshot();
5812 assert_hunks(
5813 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5814 &snapshot,
5815 &uncommitted_diff.base_text_string().unwrap(),
5816 &[
5817 (
5818 0..1,
5819 "",
5820 "// print goodbye\n",
5821 DiffHunkStatus::Added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5822 ),
5823 (
5824 2..3,
5825 " println!(\"hello world\");\n",
5826 " println!(\"goodbye world\");\n",
5827 DiffHunkStatus::modified(),
5828 ),
5829 ],
5830 );
5831 });
5832
5833 let committed_contents = r#"
5834 // print goodbye
5835 fn main() {
5836 }
5837 "#
5838 .unindent();
5839
5840 fs.set_head_for_repo(
5841 Path::new("/dir/.git"),
5842 &[("src/main.rs".into(), committed_contents)],
5843 );
5844
5845 cx.run_until_parked();
5846 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5847 let snapshot = buffer.read(cx).snapshot();
5848 assert_hunks(
5849 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5850 &snapshot,
5851 &uncommitted_diff.base_text().unwrap().text(),
5852 &[(
5853 2..3,
5854 "",
5855 " println!(\"goodbye world\");\n",
5856 DiffHunkStatus::added(),
5857 )],
5858 );
5859 });
5860}
5861
5862#[gpui::test]
5863async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
5864 init_test(cx);
5865
5866 let committed_contents = r#"
5867 fn main() {
5868 println!("hello from HEAD");
5869 }
5870 "#
5871 .unindent();
5872 let file_contents = r#"
5873 fn main() {
5874 println!("hello from the working copy");
5875 }
5876 "#
5877 .unindent();
5878
5879 let fs = FakeFs::new(cx.background_executor.clone());
5880 fs.insert_tree(
5881 "/dir",
5882 json!({
5883 ".git": {},
5884 "src": {
5885 "main.rs": file_contents,
5886 }
5887 }),
5888 )
5889 .await;
5890
5891 fs.set_head_for_repo(
5892 Path::new("/dir/.git"),
5893 &[("src/main.rs".into(), committed_contents)],
5894 );
5895
5896 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
5897
5898 let buffer = project
5899 .update(cx, |project, cx| {
5900 project.open_local_buffer("/dir/src/main.rs", cx)
5901 })
5902 .await
5903 .unwrap();
5904 let uncommitted_diff = project
5905 .update(cx, |project, cx| {
5906 project.open_uncommitted_diff(buffer.clone(), cx)
5907 })
5908 .await
5909 .unwrap();
5910
5911 cx.run_until_parked();
5912 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5913 let snapshot = buffer.read(cx).snapshot();
5914 assert_hunks(
5915 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5916 &snapshot,
5917 &uncommitted_diff.base_text_string().unwrap(),
5918 &[(
5919 1..2,
5920 " println!(\"hello from HEAD\");\n",
5921 " println!(\"hello from the working copy\");\n",
5922 DiffHunkStatus::modified(),
5923 )],
5924 );
5925 });
5926}
5927
5928async fn search(
5929 project: &Entity<Project>,
5930 query: SearchQuery,
5931 cx: &mut gpui::TestAppContext,
5932) -> Result<HashMap<String, Vec<Range<usize>>>> {
5933 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5934 let mut results = HashMap::default();
5935 while let Ok(search_result) = search_rx.recv().await {
5936 match search_result {
5937 SearchResult::Buffer { buffer, ranges } => {
5938 results.entry(buffer).or_insert(ranges);
5939 }
5940 SearchResult::LimitReached => {}
5941 }
5942 }
5943 Ok(results
5944 .into_iter()
5945 .map(|(buffer, ranges)| {
5946 buffer.update(cx, |buffer, cx| {
5947 let path = buffer
5948 .file()
5949 .unwrap()
5950 .full_path(cx)
5951 .to_string_lossy()
5952 .to_string();
5953 let ranges = ranges
5954 .into_iter()
5955 .map(|range| range.to_offset(buffer))
5956 .collect::<Vec<_>>();
5957 (path, ranges)
5958 })
5959 })
5960 .collect())
5961}
5962
5963pub fn init_test(cx: &mut gpui::TestAppContext) {
5964 if std::env::var("RUST_LOG").is_ok() {
5965 env_logger::try_init().ok();
5966 }
5967
5968 cx.update(|cx| {
5969 let settings_store = SettingsStore::test(cx);
5970 cx.set_global(settings_store);
5971 release_channel::init(SemanticVersion::default(), cx);
5972 language::init(cx);
5973 Project::init_settings(cx);
5974 });
5975}
5976
5977fn json_lang() -> Arc<Language> {
5978 Arc::new(Language::new(
5979 LanguageConfig {
5980 name: "JSON".into(),
5981 matcher: LanguageMatcher {
5982 path_suffixes: vec!["json".to_string()],
5983 ..Default::default()
5984 },
5985 ..Default::default()
5986 },
5987 None,
5988 ))
5989}
5990
5991fn js_lang() -> Arc<Language> {
5992 Arc::new(Language::new(
5993 LanguageConfig {
5994 name: "JavaScript".into(),
5995 matcher: LanguageMatcher {
5996 path_suffixes: vec!["js".to_string()],
5997 ..Default::default()
5998 },
5999 ..Default::default()
6000 },
6001 None,
6002 ))
6003}
6004
6005fn rust_lang() -> Arc<Language> {
6006 Arc::new(Language::new(
6007 LanguageConfig {
6008 name: "Rust".into(),
6009 matcher: LanguageMatcher {
6010 path_suffixes: vec!["rs".to_string()],
6011 ..Default::default()
6012 },
6013 ..Default::default()
6014 },
6015 Some(tree_sitter_rust::LANGUAGE.into()),
6016 ))
6017}
6018
6019fn typescript_lang() -> Arc<Language> {
6020 Arc::new(Language::new(
6021 LanguageConfig {
6022 name: "TypeScript".into(),
6023 matcher: LanguageMatcher {
6024 path_suffixes: vec!["ts".to_string()],
6025 ..Default::default()
6026 },
6027 ..Default::default()
6028 },
6029 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6030 ))
6031}
6032
6033fn tsx_lang() -> Arc<Language> {
6034 Arc::new(Language::new(
6035 LanguageConfig {
6036 name: "tsx".into(),
6037 matcher: LanguageMatcher {
6038 path_suffixes: vec!["tsx".to_string()],
6039 ..Default::default()
6040 },
6041 ..Default::default()
6042 },
6043 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6044 ))
6045}
6046
6047fn get_all_tasks(
6048 project: &Entity<Project>,
6049 worktree_id: Option<WorktreeId>,
6050 task_context: &TaskContext,
6051 cx: &mut App,
6052) -> Vec<(TaskSourceKind, ResolvedTask)> {
6053 let (mut old, new) = project.update(cx, |project, cx| {
6054 project
6055 .task_store
6056 .read(cx)
6057 .task_inventory()
6058 .unwrap()
6059 .read(cx)
6060 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
6061 });
6062 old.extend(new);
6063 old
6064}