1use crate::{Event, *};
2use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus};
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq, path, paths::PathMatcher, separator, test::TempTree, uri, TryFutureExt as _,
29};
30
31#[gpui::test]
32async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let (tx, mut rx) = futures::channel::mpsc::unbounded();
36 let _thread = std::thread::spawn(move || {
37 #[cfg(not(target_os = "windows"))]
38 std::fs::metadata("/tmp").unwrap();
39 #[cfg(target_os = "windows")]
40 std::fs::metadata("C:/Windows").unwrap();
41 std::thread::sleep(Duration::from_millis(1000));
42 tx.unbounded_send(1).unwrap();
43 });
44 rx.next().await.unwrap();
45}
46
47#[gpui::test]
48async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
49 cx.executor().allow_parking();
50
51 let io_task = smol::unblock(move || {
52 println!("sleeping on thread {:?}", std::thread::current().id());
53 std::thread::sleep(Duration::from_millis(10));
54 1
55 });
56
57 let task = cx.foreground_executor().spawn(async move {
58 io_task.await;
59 });
60
61 task.await;
62}
63
64#[cfg(not(windows))]
65#[gpui::test]
66async fn test_symlinks(cx: &mut gpui::TestAppContext) {
67 init_test(cx);
68 cx.executor().allow_parking();
69
70 let dir = TempTree::new(json!({
71 "root": {
72 "apple": "",
73 "banana": {
74 "carrot": {
75 "date": "",
76 "endive": "",
77 }
78 },
79 "fennel": {
80 "grape": "",
81 }
82 }
83 }));
84
85 let root_link_path = dir.path().join("root_link");
86 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
87 os::unix::fs::symlink(
88 dir.path().join("root/fennel"),
89 dir.path().join("root/finnochio"),
90 )
91 .unwrap();
92
93 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
94
95 project.update(cx, |project, cx| {
96 let tree = project.worktrees(cx).next().unwrap().read(cx);
97 assert_eq!(tree.file_count(), 5);
98 assert_eq!(
99 tree.inode_for_path("fennel/grape"),
100 tree.inode_for_path("finnochio/grape")
101 );
102 });
103}
104
105#[gpui::test]
106async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
107 init_test(cx);
108
109 let dir = TempTree::new(json!({
110 ".editorconfig": r#"
111 root = true
112 [*.rs]
113 indent_style = tab
114 indent_size = 3
115 end_of_line = lf
116 insert_final_newline = true
117 trim_trailing_whitespace = true
118 [*.js]
119 tab_width = 10
120 "#,
121 ".zed": {
122 "settings.json": r#"{
123 "tab_size": 8,
124 "hard_tabs": false,
125 "ensure_final_newline_on_save": false,
126 "remove_trailing_whitespace_on_save": false,
127 "soft_wrap": "editor_width"
128 }"#,
129 },
130 "a.rs": "fn a() {\n A\n}",
131 "b": {
132 ".editorconfig": r#"
133 [*.rs]
134 indent_size = 2
135 "#,
136 "b.rs": "fn b() {\n B\n}",
137 },
138 "c.js": "def c\n C\nend",
139 "README.json": "tabs are better\n",
140 }));
141
142 let path = dir.path();
143 let fs = FakeFs::new(cx.executor());
144 fs.insert_tree_from_real_fs(path, path).await;
145 let project = Project::test(fs, [path], cx).await;
146
147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
148 language_registry.add(js_lang());
149 language_registry.add(json_lang());
150 language_registry.add(rust_lang());
151
152 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
153
154 cx.executor().run_until_parked();
155
156 cx.update(|cx| {
157 let tree = worktree.read(cx);
158 let settings_for = |path: &str| {
159 let file_entry = tree.entry_for_path(path).unwrap().clone();
160 let file = File::for_entry(file_entry, worktree.clone());
161 let file_language = project
162 .read(cx)
163 .languages()
164 .language_for_file_path(file.path.as_ref());
165 let file_language = cx
166 .background_executor()
167 .block(file_language)
168 .expect("Failed to get file language");
169 let file = file as _;
170 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
171 };
172
173 let settings_a = settings_for("a.rs");
174 let settings_b = settings_for("b/b.rs");
175 let settings_c = settings_for("c.js");
176 let settings_readme = settings_for("README.json");
177
178 // .editorconfig overrides .zed/settings
179 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
180 assert_eq!(settings_a.hard_tabs, true);
181 assert_eq!(settings_a.ensure_final_newline_on_save, true);
182 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
183
184 // .editorconfig in b/ overrides .editorconfig in root
185 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
186
187 // "indent_size" is not set, so "tab_width" is used
188 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
189
190 // README.md should not be affected by .editorconfig's globe "*.rs"
191 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
192 });
193}
194
195#[gpui::test]
196async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
197 init_test(cx);
198 TaskStore::init(None);
199
200 let fs = FakeFs::new(cx.executor());
201 fs.insert_tree(
202 path!("/dir"),
203 json!({
204 ".zed": {
205 "settings.json": r#"{ "tab_size": 8 }"#,
206 "tasks.json": r#"[{
207 "label": "cargo check all",
208 "command": "cargo",
209 "args": ["check", "--all"]
210 },]"#,
211 },
212 "a": {
213 "a.rs": "fn a() {\n A\n}"
214 },
215 "b": {
216 ".zed": {
217 "settings.json": r#"{ "tab_size": 2 }"#,
218 "tasks.json": r#"[{
219 "label": "cargo check",
220 "command": "cargo",
221 "args": ["check"]
222 },]"#,
223 },
224 "b.rs": "fn b() {\n B\n}"
225 }
226 }),
227 )
228 .await;
229
230 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232 let task_context = TaskContext::default();
233
234 cx.executor().run_until_parked();
235 let worktree_id = cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.worktrees(cx).next().unwrap().read(cx).id()
238 })
239 });
240 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
241 id: worktree_id,
242 directory_in_worktree: PathBuf::from(".zed"),
243 id_base: "local worktree tasks from directory \".zed\"".into(),
244 };
245
246 let all_tasks = cx
247 .update(|cx| {
248 let tree = worktree.read(cx);
249
250 let file_a = File::for_entry(
251 tree.entry_for_path("a/a.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_a = language_settings(None, Some(&file_a), cx);
255 let file_b = File::for_entry(
256 tree.entry_for_path("b/b.rs").unwrap().clone(),
257 worktree.clone(),
258 ) as _;
259 let settings_b = language_settings(None, Some(&file_b), cx);
260
261 assert_eq!(settings_a.tab_size.get(), 8);
262 assert_eq!(settings_b.tab_size.get(), 2);
263
264 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
265 })
266 .into_iter()
267 .map(|(source_kind, task)| {
268 let resolved = task.resolved.unwrap();
269 (
270 source_kind,
271 task.resolved_label,
272 resolved.args,
273 resolved.env,
274 )
275 })
276 .collect::<Vec<_>>();
277 assert_eq!(
278 all_tasks,
279 vec![
280 (
281 TaskSourceKind::Worktree {
282 id: worktree_id,
283 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
284 id_base: if cfg!(windows) {
285 "local worktree tasks from directory \"b\\\\.zed\"".into()
286 } else {
287 "local worktree tasks from directory \"b/.zed\"".into()
288 },
289 },
290 "cargo check".to_string(),
291 vec!["check".to_string()],
292 HashMap::default(),
293 ),
294 (
295 topmost_local_task_source_kind.clone(),
296 "cargo check all".to_string(),
297 vec!["check".to_string(), "--all".to_string()],
298 HashMap::default(),
299 ),
300 ]
301 );
302
303 let (_, resolved_task) = cx
304 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
305 .into_iter()
306 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
307 .expect("should have one global task");
308 project.update(cx, |project, cx| {
309 let task_inventory = project
310 .task_store
311 .read(cx)
312 .task_inventory()
313 .cloned()
314 .unwrap();
315 task_inventory.update(cx, |inventory, _| {
316 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
317 inventory
318 .update_file_based_tasks(
319 None,
320 Some(
321 &json!([{
322 "label": "cargo check unstable",
323 "command": "cargo",
324 "args": [
325 "check",
326 "--all",
327 "--all-targets"
328 ],
329 "env": {
330 "RUSTFLAGS": "-Zunstable-options"
331 }
332 }])
333 .to_string(),
334 ),
335 )
336 .unwrap();
337 });
338 });
339 cx.run_until_parked();
340
341 let all_tasks = cx
342 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
343 .into_iter()
344 .map(|(source_kind, task)| {
345 let resolved = task.resolved.unwrap();
346 (
347 source_kind,
348 task.resolved_label,
349 resolved.args,
350 resolved.env,
351 )
352 })
353 .collect::<Vec<_>>();
354 assert_eq!(
355 all_tasks,
356 vec![
357 (
358 topmost_local_task_source_kind.clone(),
359 "cargo check all".to_string(),
360 vec!["check".to_string(), "--all".to_string()],
361 HashMap::default(),
362 ),
363 (
364 TaskSourceKind::Worktree {
365 id: worktree_id,
366 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
367 id_base: if cfg!(windows) {
368 "local worktree tasks from directory \"b\\\\.zed\"".into()
369 } else {
370 "local worktree tasks from directory \"b/.zed\"".into()
371 },
372 },
373 "cargo check".to_string(),
374 vec!["check".to_string()],
375 HashMap::default(),
376 ),
377 (
378 TaskSourceKind::AbsPath {
379 abs_path: paths::tasks_file().clone(),
380 id_base: "global tasks.json".into(),
381 },
382 "cargo check unstable".to_string(),
383 vec![
384 "check".to_string(),
385 "--all".to_string(),
386 "--all-targets".to_string(),
387 ],
388 HashMap::from_iter(Some((
389 "RUSTFLAGS".to_string(),
390 "-Zunstable-options".to_string()
391 ))),
392 ),
393 ]
394 );
395}
396
397#[gpui::test]
398async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
399 init_test(cx);
400
401 let fs = FakeFs::new(cx.executor());
402 fs.insert_tree(
403 path!("/dir"),
404 json!({
405 "test.rs": "const A: i32 = 1;",
406 "test2.rs": "",
407 "Cargo.toml": "a = 1",
408 "package.json": "{\"a\": 1}",
409 }),
410 )
411 .await;
412
413 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
415
416 let mut fake_rust_servers = language_registry.register_fake_lsp(
417 "Rust",
418 FakeLspAdapter {
419 name: "the-rust-language-server",
420 capabilities: lsp::ServerCapabilities {
421 completion_provider: Some(lsp::CompletionOptions {
422 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
423 ..Default::default()
424 }),
425 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
426 lsp::TextDocumentSyncOptions {
427 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
428 ..Default::default()
429 },
430 )),
431 ..Default::default()
432 },
433 ..Default::default()
434 },
435 );
436 let mut fake_json_servers = language_registry.register_fake_lsp(
437 "JSON",
438 FakeLspAdapter {
439 name: "the-json-language-server",
440 capabilities: lsp::ServerCapabilities {
441 completion_provider: Some(lsp::CompletionOptions {
442 trigger_characters: Some(vec![":".to_string()]),
443 ..Default::default()
444 }),
445 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
446 lsp::TextDocumentSyncOptions {
447 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
448 ..Default::default()
449 },
450 )),
451 ..Default::default()
452 },
453 ..Default::default()
454 },
455 );
456
457 // Open a buffer without an associated language server.
458 let (toml_buffer, _handle) = project
459 .update(cx, |project, cx| {
460 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
461 })
462 .await
463 .unwrap();
464
465 // Open a buffer with an associated language server before the language for it has been loaded.
466 let (rust_buffer, _handle2) = project
467 .update(cx, |project, cx| {
468 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
469 })
470 .await
471 .unwrap();
472 rust_buffer.update(cx, |buffer, _| {
473 assert_eq!(buffer.language().map(|l| l.name()), None);
474 });
475
476 // Now we add the languages to the project, and ensure they get assigned to all
477 // the relevant open buffers.
478 language_registry.add(json_lang());
479 language_registry.add(rust_lang());
480 cx.executor().run_until_parked();
481 rust_buffer.update(cx, |buffer, _| {
482 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
483 });
484
485 // A server is started up, and it is notified about Rust files.
486 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
487 assert_eq!(
488 fake_rust_server
489 .receive_notification::<lsp::notification::DidOpenTextDocument>()
490 .await
491 .text_document,
492 lsp::TextDocumentItem {
493 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
494 version: 0,
495 text: "const A: i32 = 1;".to_string(),
496 language_id: "rust".to_string(),
497 }
498 );
499
500 // The buffer is configured based on the language server's capabilities.
501 rust_buffer.update(cx, |buffer, _| {
502 assert_eq!(
503 buffer
504 .completion_triggers()
505 .into_iter()
506 .cloned()
507 .collect::<Vec<_>>(),
508 &[".".to_string(), "::".to_string()]
509 );
510 });
511 toml_buffer.update(cx, |buffer, _| {
512 assert!(buffer.completion_triggers().is_empty());
513 });
514
515 // Edit a buffer. The changes are reported to the language server.
516 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
517 assert_eq!(
518 fake_rust_server
519 .receive_notification::<lsp::notification::DidChangeTextDocument>()
520 .await
521 .text_document,
522 lsp::VersionedTextDocumentIdentifier::new(
523 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
524 1
525 )
526 );
527
528 // Open a third buffer with a different associated language server.
529 let (json_buffer, _json_handle) = project
530 .update(cx, |project, cx| {
531 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
532 })
533 .await
534 .unwrap();
535
536 // A json language server is started up and is only notified about the json buffer.
537 let mut fake_json_server = fake_json_servers.next().await.unwrap();
538 assert_eq!(
539 fake_json_server
540 .receive_notification::<lsp::notification::DidOpenTextDocument>()
541 .await
542 .text_document,
543 lsp::TextDocumentItem {
544 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
545 version: 0,
546 text: "{\"a\": 1}".to_string(),
547 language_id: "json".to_string(),
548 }
549 );
550
551 // This buffer is configured based on the second language server's
552 // capabilities.
553 json_buffer.update(cx, |buffer, _| {
554 assert_eq!(
555 buffer
556 .completion_triggers()
557 .into_iter()
558 .cloned()
559 .collect::<Vec<_>>(),
560 &[":".to_string()]
561 );
562 });
563
564 // When opening another buffer whose language server is already running,
565 // it is also configured based on the existing language server's capabilities.
566 let (rust_buffer2, _handle4) = project
567 .update(cx, |project, cx| {
568 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
569 })
570 .await
571 .unwrap();
572 rust_buffer2.update(cx, |buffer, _| {
573 assert_eq!(
574 buffer
575 .completion_triggers()
576 .into_iter()
577 .cloned()
578 .collect::<Vec<_>>(),
579 &[".".to_string(), "::".to_string()]
580 );
581 });
582
583 // Changes are reported only to servers matching the buffer's language.
584 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
585 rust_buffer2.update(cx, |buffer, cx| {
586 buffer.edit([(0..0, "let x = 1;")], None, cx)
587 });
588 assert_eq!(
589 fake_rust_server
590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
591 .await
592 .text_document,
593 lsp::VersionedTextDocumentIdentifier::new(
594 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
595 1
596 )
597 );
598
599 // Save notifications are reported to all servers.
600 project
601 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
602 .await
603 .unwrap();
604 assert_eq!(
605 fake_rust_server
606 .receive_notification::<lsp::notification::DidSaveTextDocument>()
607 .await
608 .text_document,
609 lsp::TextDocumentIdentifier::new(
610 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
611 )
612 );
613 assert_eq!(
614 fake_json_server
615 .receive_notification::<lsp::notification::DidSaveTextDocument>()
616 .await
617 .text_document,
618 lsp::TextDocumentIdentifier::new(
619 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
620 )
621 );
622
623 // Renames are reported only to servers matching the buffer's language.
624 fs.rename(
625 Path::new(path!("/dir/test2.rs")),
626 Path::new(path!("/dir/test3.rs")),
627 Default::default(),
628 )
629 .await
630 .unwrap();
631 assert_eq!(
632 fake_rust_server
633 .receive_notification::<lsp::notification::DidCloseTextDocument>()
634 .await
635 .text_document,
636 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
637 );
638 assert_eq!(
639 fake_rust_server
640 .receive_notification::<lsp::notification::DidOpenTextDocument>()
641 .await
642 .text_document,
643 lsp::TextDocumentItem {
644 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
645 version: 0,
646 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
647 language_id: "rust".to_string(),
648 },
649 );
650
651 rust_buffer2.update(cx, |buffer, cx| {
652 buffer.update_diagnostics(
653 LanguageServerId(0),
654 DiagnosticSet::from_sorted_entries(
655 vec![DiagnosticEntry {
656 diagnostic: Default::default(),
657 range: Anchor::MIN..Anchor::MAX,
658 }],
659 &buffer.snapshot(),
660 ),
661 cx,
662 );
663 assert_eq!(
664 buffer
665 .snapshot()
666 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
667 .count(),
668 1
669 );
670 });
671
672 // When the rename changes the extension of the file, the buffer gets closed on the old
673 // language server and gets opened on the new one.
674 fs.rename(
675 Path::new(path!("/dir/test3.rs")),
676 Path::new(path!("/dir/test3.json")),
677 Default::default(),
678 )
679 .await
680 .unwrap();
681 assert_eq!(
682 fake_rust_server
683 .receive_notification::<lsp::notification::DidCloseTextDocument>()
684 .await
685 .text_document,
686 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
687 );
688 assert_eq!(
689 fake_json_server
690 .receive_notification::<lsp::notification::DidOpenTextDocument>()
691 .await
692 .text_document,
693 lsp::TextDocumentItem {
694 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
695 version: 0,
696 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
697 language_id: "json".to_string(),
698 },
699 );
700
701 // We clear the diagnostics, since the language has changed.
702 rust_buffer2.update(cx, |buffer, _| {
703 assert_eq!(
704 buffer
705 .snapshot()
706 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
707 .count(),
708 0
709 );
710 });
711
712 // The renamed file's version resets after changing language server.
713 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
714 assert_eq!(
715 fake_json_server
716 .receive_notification::<lsp::notification::DidChangeTextDocument>()
717 .await
718 .text_document,
719 lsp::VersionedTextDocumentIdentifier::new(
720 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
721 1
722 )
723 );
724
725 // Restart language servers
726 project.update(cx, |project, cx| {
727 project.restart_language_servers_for_buffers(
728 vec![rust_buffer.clone(), json_buffer.clone()],
729 cx,
730 );
731 });
732
733 let mut rust_shutdown_requests = fake_rust_server
734 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
735 let mut json_shutdown_requests = fake_json_server
736 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
737 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
738
739 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
740 let mut fake_json_server = fake_json_servers.next().await.unwrap();
741
742 // Ensure rust document is reopened in new rust language server
743 assert_eq!(
744 fake_rust_server
745 .receive_notification::<lsp::notification::DidOpenTextDocument>()
746 .await
747 .text_document,
748 lsp::TextDocumentItem {
749 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
750 version: 0,
751 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
752 language_id: "rust".to_string(),
753 }
754 );
755
756 // Ensure json documents are reopened in new json language server
757 assert_set_eq!(
758 [
759 fake_json_server
760 .receive_notification::<lsp::notification::DidOpenTextDocument>()
761 .await
762 .text_document,
763 fake_json_server
764 .receive_notification::<lsp::notification::DidOpenTextDocument>()
765 .await
766 .text_document,
767 ],
768 [
769 lsp::TextDocumentItem {
770 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
771 version: 0,
772 text: json_buffer.update(cx, |buffer, _| buffer.text()),
773 language_id: "json".to_string(),
774 },
775 lsp::TextDocumentItem {
776 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
777 version: 0,
778 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
779 language_id: "json".to_string(),
780 }
781 ]
782 );
783
784 // Close notifications are reported only to servers matching the buffer's language.
785 cx.update(|_| drop(_json_handle));
786 let close_message = lsp::DidCloseTextDocumentParams {
787 text_document: lsp::TextDocumentIdentifier::new(
788 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
789 ),
790 };
791 assert_eq!(
792 fake_json_server
793 .receive_notification::<lsp::notification::DidCloseTextDocument>()
794 .await,
795 close_message,
796 );
797}
798
799#[gpui::test]
800async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
801 init_test(cx);
802
803 let fs = FakeFs::new(cx.executor());
804 fs.insert_tree(
805 path!("/the-root"),
806 json!({
807 ".gitignore": "target\n",
808 "src": {
809 "a.rs": "",
810 "b.rs": "",
811 },
812 "target": {
813 "x": {
814 "out": {
815 "x.rs": ""
816 }
817 },
818 "y": {
819 "out": {
820 "y.rs": "",
821 }
822 },
823 "z": {
824 "out": {
825 "z.rs": ""
826 }
827 }
828 }
829 }),
830 )
831 .await;
832
833 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
835 language_registry.add(rust_lang());
836 let mut fake_servers = language_registry.register_fake_lsp(
837 "Rust",
838 FakeLspAdapter {
839 name: "the-language-server",
840 ..Default::default()
841 },
842 );
843
844 cx.executor().run_until_parked();
845
846 // Start the language server by opening a buffer with a compatible file extension.
847 let _ = project
848 .update(cx, |project, cx| {
849 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
850 })
851 .await
852 .unwrap();
853
854 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
855 project.update(cx, |project, cx| {
856 let worktree = project.worktrees(cx).next().unwrap();
857 assert_eq!(
858 worktree
859 .read(cx)
860 .snapshot()
861 .entries(true, 0)
862 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
863 .collect::<Vec<_>>(),
864 &[
865 (Path::new(""), false),
866 (Path::new(".gitignore"), false),
867 (Path::new("src"), false),
868 (Path::new("src/a.rs"), false),
869 (Path::new("src/b.rs"), false),
870 (Path::new("target"), true),
871 ]
872 );
873 });
874
875 let prev_read_dir_count = fs.read_dir_call_count();
876
877 // Keep track of the FS events reported to the language server.
878 let fake_server = fake_servers.next().await.unwrap();
879 let file_changes = Arc::new(Mutex::new(Vec::new()));
880 fake_server
881 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
882 registrations: vec![lsp::Registration {
883 id: Default::default(),
884 method: "workspace/didChangeWatchedFiles".to_string(),
885 register_options: serde_json::to_value(
886 lsp::DidChangeWatchedFilesRegistrationOptions {
887 watchers: vec![
888 lsp::FileSystemWatcher {
889 glob_pattern: lsp::GlobPattern::String(
890 path!("/the-root/Cargo.toml").to_string(),
891 ),
892 kind: None,
893 },
894 lsp::FileSystemWatcher {
895 glob_pattern: lsp::GlobPattern::String(
896 path!("/the-root/src/*.{rs,c}").to_string(),
897 ),
898 kind: None,
899 },
900 lsp::FileSystemWatcher {
901 glob_pattern: lsp::GlobPattern::String(
902 path!("/the-root/target/y/**/*.rs").to_string(),
903 ),
904 kind: None,
905 },
906 ],
907 },
908 )
909 .ok(),
910 }],
911 })
912 .await
913 .unwrap();
914 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
915 let file_changes = file_changes.clone();
916 move |params, _| {
917 let mut file_changes = file_changes.lock();
918 file_changes.extend(params.changes);
919 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
920 }
921 });
922
923 cx.executor().run_until_parked();
924 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
925 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
926
927 // Now the language server has asked us to watch an ignored directory path,
928 // so we recursively load it.
929 project.update(cx, |project, cx| {
930 let worktree = project.worktrees(cx).next().unwrap();
931 assert_eq!(
932 worktree
933 .read(cx)
934 .snapshot()
935 .entries(true, 0)
936 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
937 .collect::<Vec<_>>(),
938 &[
939 (Path::new(""), false),
940 (Path::new(".gitignore"), false),
941 (Path::new("src"), false),
942 (Path::new("src/a.rs"), false),
943 (Path::new("src/b.rs"), false),
944 (Path::new("target"), true),
945 (Path::new("target/x"), true),
946 (Path::new("target/y"), true),
947 (Path::new("target/y/out"), true),
948 (Path::new("target/y/out/y.rs"), true),
949 (Path::new("target/z"), true),
950 ]
951 );
952 });
953
954 // Perform some file system mutations, two of which match the watched patterns,
955 // and one of which does not.
956 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
957 .await
958 .unwrap();
959 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
960 .await
961 .unwrap();
962 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
963 .await
964 .unwrap();
965 fs.create_file(
966 path!("/the-root/target/x/out/x2.rs").as_ref(),
967 Default::default(),
968 )
969 .await
970 .unwrap();
971 fs.create_file(
972 path!("/the-root/target/y/out/y2.rs").as_ref(),
973 Default::default(),
974 )
975 .await
976 .unwrap();
977
978 // The language server receives events for the FS mutations that match its watch patterns.
979 cx.executor().run_until_parked();
980 assert_eq!(
981 &*file_changes.lock(),
982 &[
983 lsp::FileEvent {
984 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
985 typ: lsp::FileChangeType::DELETED,
986 },
987 lsp::FileEvent {
988 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
989 typ: lsp::FileChangeType::CREATED,
990 },
991 lsp::FileEvent {
992 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
993 typ: lsp::FileChangeType::CREATED,
994 },
995 ]
996 );
997}
998
999#[gpui::test]
1000async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1001 init_test(cx);
1002
1003 let fs = FakeFs::new(cx.executor());
1004 fs.insert_tree(
1005 path!("/dir"),
1006 json!({
1007 "a.rs": "let a = 1;",
1008 "b.rs": "let b = 2;"
1009 }),
1010 )
1011 .await;
1012
1013 let project = Project::test(
1014 fs,
1015 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1016 cx,
1017 )
1018 .await;
1019 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1020
1021 let buffer_a = project
1022 .update(cx, |project, cx| {
1023 project.open_local_buffer(path!("/dir/a.rs"), cx)
1024 })
1025 .await
1026 .unwrap();
1027 let buffer_b = project
1028 .update(cx, |project, cx| {
1029 project.open_local_buffer(path!("/dir/b.rs"), cx)
1030 })
1031 .await
1032 .unwrap();
1033
1034 lsp_store.update(cx, |lsp_store, cx| {
1035 lsp_store
1036 .update_diagnostics(
1037 LanguageServerId(0),
1038 lsp::PublishDiagnosticsParams {
1039 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1040 version: None,
1041 diagnostics: vec![lsp::Diagnostic {
1042 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1043 severity: Some(lsp::DiagnosticSeverity::ERROR),
1044 message: "error 1".to_string(),
1045 ..Default::default()
1046 }],
1047 },
1048 &[],
1049 cx,
1050 )
1051 .unwrap();
1052 lsp_store
1053 .update_diagnostics(
1054 LanguageServerId(0),
1055 lsp::PublishDiagnosticsParams {
1056 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1057 version: None,
1058 diagnostics: vec![lsp::Diagnostic {
1059 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1060 severity: Some(DiagnosticSeverity::WARNING),
1061 message: "error 2".to_string(),
1062 ..Default::default()
1063 }],
1064 },
1065 &[],
1066 cx,
1067 )
1068 .unwrap();
1069 });
1070
1071 buffer_a.update(cx, |buffer, _| {
1072 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1073 assert_eq!(
1074 chunks
1075 .iter()
1076 .map(|(s, d)| (s.as_str(), *d))
1077 .collect::<Vec<_>>(),
1078 &[
1079 ("let ", None),
1080 ("a", Some(DiagnosticSeverity::ERROR)),
1081 (" = 1;", None),
1082 ]
1083 );
1084 });
1085 buffer_b.update(cx, |buffer, _| {
1086 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1087 assert_eq!(
1088 chunks
1089 .iter()
1090 .map(|(s, d)| (s.as_str(), *d))
1091 .collect::<Vec<_>>(),
1092 &[
1093 ("let ", None),
1094 ("b", Some(DiagnosticSeverity::WARNING)),
1095 (" = 2;", None),
1096 ]
1097 );
1098 });
1099}
1100
1101#[gpui::test]
1102async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1103 init_test(cx);
1104
1105 let fs = FakeFs::new(cx.executor());
1106 fs.insert_tree(
1107 path!("/root"),
1108 json!({
1109 "dir": {
1110 ".git": {
1111 "HEAD": "ref: refs/heads/main",
1112 },
1113 ".gitignore": "b.rs",
1114 "a.rs": "let a = 1;",
1115 "b.rs": "let b = 2;",
1116 },
1117 "other.rs": "let b = c;"
1118 }),
1119 )
1120 .await;
1121
1122 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1123 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1124 let (worktree, _) = project
1125 .update(cx, |project, cx| {
1126 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1127 })
1128 .await
1129 .unwrap();
1130 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1131
1132 let (worktree, _) = project
1133 .update(cx, |project, cx| {
1134 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1135 })
1136 .await
1137 .unwrap();
1138 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1139
1140 let server_id = LanguageServerId(0);
1141 lsp_store.update(cx, |lsp_store, cx| {
1142 lsp_store
1143 .update_diagnostics(
1144 server_id,
1145 lsp::PublishDiagnosticsParams {
1146 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1147 version: None,
1148 diagnostics: vec![lsp::Diagnostic {
1149 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1150 severity: Some(lsp::DiagnosticSeverity::ERROR),
1151 message: "unused variable 'b'".to_string(),
1152 ..Default::default()
1153 }],
1154 },
1155 &[],
1156 cx,
1157 )
1158 .unwrap();
1159 lsp_store
1160 .update_diagnostics(
1161 server_id,
1162 lsp::PublishDiagnosticsParams {
1163 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1164 version: None,
1165 diagnostics: vec![lsp::Diagnostic {
1166 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1167 severity: Some(lsp::DiagnosticSeverity::ERROR),
1168 message: "unknown variable 'c'".to_string(),
1169 ..Default::default()
1170 }],
1171 },
1172 &[],
1173 cx,
1174 )
1175 .unwrap();
1176 });
1177
1178 let main_ignored_buffer = project
1179 .update(cx, |project, cx| {
1180 project.open_buffer((main_worktree_id, "b.rs"), cx)
1181 })
1182 .await
1183 .unwrap();
1184 main_ignored_buffer.update(cx, |buffer, _| {
1185 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1186 assert_eq!(
1187 chunks
1188 .iter()
1189 .map(|(s, d)| (s.as_str(), *d))
1190 .collect::<Vec<_>>(),
1191 &[
1192 ("let ", None),
1193 ("b", Some(DiagnosticSeverity::ERROR)),
1194 (" = 2;", None),
1195 ],
1196 "Gigitnored buffers should still get in-buffer diagnostics",
1197 );
1198 });
1199 let other_buffer = project
1200 .update(cx, |project, cx| {
1201 project.open_buffer((other_worktree_id, ""), cx)
1202 })
1203 .await
1204 .unwrap();
1205 other_buffer.update(cx, |buffer, _| {
1206 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1207 assert_eq!(
1208 chunks
1209 .iter()
1210 .map(|(s, d)| (s.as_str(), *d))
1211 .collect::<Vec<_>>(),
1212 &[
1213 ("let b = ", None),
1214 ("c", Some(DiagnosticSeverity::ERROR)),
1215 (";", None),
1216 ],
1217 "Buffers from hidden projects should still get in-buffer diagnostics"
1218 );
1219 });
1220
1221 project.update(cx, |project, cx| {
1222 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1223 assert_eq!(
1224 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1225 vec![(
1226 ProjectPath {
1227 worktree_id: main_worktree_id,
1228 path: Arc::from(Path::new("b.rs")),
1229 },
1230 server_id,
1231 DiagnosticSummary {
1232 error_count: 1,
1233 warning_count: 0,
1234 }
1235 )]
1236 );
1237 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1238 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1239 });
1240}
1241
1242#[gpui::test]
1243async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1244 init_test(cx);
1245
1246 let progress_token = "the-progress-token";
1247
1248 let fs = FakeFs::new(cx.executor());
1249 fs.insert_tree(
1250 path!("/dir"),
1251 json!({
1252 "a.rs": "fn a() { A }",
1253 "b.rs": "const y: i32 = 1",
1254 }),
1255 )
1256 .await;
1257
1258 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1259 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1260
1261 language_registry.add(rust_lang());
1262 let mut fake_servers = language_registry.register_fake_lsp(
1263 "Rust",
1264 FakeLspAdapter {
1265 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1266 disk_based_diagnostics_sources: vec!["disk".into()],
1267 ..Default::default()
1268 },
1269 );
1270
1271 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1272
1273 // Cause worktree to start the fake language server
1274 let _ = project
1275 .update(cx, |project, cx| {
1276 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1277 })
1278 .await
1279 .unwrap();
1280
1281 let mut events = cx.events(&project);
1282
1283 let fake_server = fake_servers.next().await.unwrap();
1284 assert_eq!(
1285 events.next().await.unwrap(),
1286 Event::LanguageServerAdded(
1287 LanguageServerId(0),
1288 fake_server.server.name(),
1289 Some(worktree_id)
1290 ),
1291 );
1292
1293 fake_server
1294 .start_progress(format!("{}/0", progress_token))
1295 .await;
1296 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1297 assert_eq!(
1298 events.next().await.unwrap(),
1299 Event::DiskBasedDiagnosticsStarted {
1300 language_server_id: LanguageServerId(0),
1301 }
1302 );
1303
1304 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1305 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1306 version: None,
1307 diagnostics: vec![lsp::Diagnostic {
1308 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1309 severity: Some(lsp::DiagnosticSeverity::ERROR),
1310 message: "undefined variable 'A'".to_string(),
1311 ..Default::default()
1312 }],
1313 });
1314 assert_eq!(
1315 events.next().await.unwrap(),
1316 Event::DiagnosticsUpdated {
1317 language_server_id: LanguageServerId(0),
1318 path: (worktree_id, Path::new("a.rs")).into()
1319 }
1320 );
1321
1322 fake_server.end_progress(format!("{}/0", progress_token));
1323 assert_eq!(
1324 events.next().await.unwrap(),
1325 Event::DiskBasedDiagnosticsFinished {
1326 language_server_id: LanguageServerId(0)
1327 }
1328 );
1329
1330 let buffer = project
1331 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1332 .await
1333 .unwrap();
1334
1335 buffer.update(cx, |buffer, _| {
1336 let snapshot = buffer.snapshot();
1337 let diagnostics = snapshot
1338 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1339 .collect::<Vec<_>>();
1340 assert_eq!(
1341 diagnostics,
1342 &[DiagnosticEntry {
1343 range: Point::new(0, 9)..Point::new(0, 10),
1344 diagnostic: Diagnostic {
1345 severity: lsp::DiagnosticSeverity::ERROR,
1346 message: "undefined variable 'A'".to_string(),
1347 group_id: 0,
1348 is_primary: true,
1349 ..Default::default()
1350 }
1351 }]
1352 )
1353 });
1354
1355 // Ensure publishing empty diagnostics twice only results in one update event.
1356 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1357 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1358 version: None,
1359 diagnostics: Default::default(),
1360 });
1361 assert_eq!(
1362 events.next().await.unwrap(),
1363 Event::DiagnosticsUpdated {
1364 language_server_id: LanguageServerId(0),
1365 path: (worktree_id, Path::new("a.rs")).into()
1366 }
1367 );
1368
1369 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1370 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1371 version: None,
1372 diagnostics: Default::default(),
1373 });
1374 cx.executor().run_until_parked();
1375 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1376}
1377
1378#[gpui::test]
1379async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1380 init_test(cx);
1381
1382 let progress_token = "the-progress-token";
1383
1384 let fs = FakeFs::new(cx.executor());
1385 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1386
1387 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1388
1389 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1390 language_registry.add(rust_lang());
1391 let mut fake_servers = language_registry.register_fake_lsp(
1392 "Rust",
1393 FakeLspAdapter {
1394 name: "the-language-server",
1395 disk_based_diagnostics_sources: vec!["disk".into()],
1396 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1397 ..Default::default()
1398 },
1399 );
1400
1401 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1402
1403 let (buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 // Simulate diagnostics starting to update.
1410 let fake_server = fake_servers.next().await.unwrap();
1411 fake_server.start_progress(progress_token).await;
1412
1413 // Restart the server before the diagnostics finish updating.
1414 project.update(cx, |project, cx| {
1415 project.restart_language_servers_for_buffers(vec![buffer], cx);
1416 });
1417 let mut events = cx.events(&project);
1418
1419 // Simulate the newly started server sending more diagnostics.
1420 let fake_server = fake_servers.next().await.unwrap();
1421 assert_eq!(
1422 events.next().await.unwrap(),
1423 Event::LanguageServerAdded(
1424 LanguageServerId(1),
1425 fake_server.server.name(),
1426 Some(worktree_id)
1427 )
1428 );
1429 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1430 fake_server.start_progress(progress_token).await;
1431 assert_eq!(
1432 events.next().await.unwrap(),
1433 Event::DiskBasedDiagnosticsStarted {
1434 language_server_id: LanguageServerId(1)
1435 }
1436 );
1437 project.update(cx, |project, cx| {
1438 assert_eq!(
1439 project
1440 .language_servers_running_disk_based_diagnostics(cx)
1441 .collect::<Vec<_>>(),
1442 [LanguageServerId(1)]
1443 );
1444 });
1445
1446 // All diagnostics are considered done, despite the old server's diagnostic
1447 // task never completing.
1448 fake_server.end_progress(progress_token);
1449 assert_eq!(
1450 events.next().await.unwrap(),
1451 Event::DiskBasedDiagnosticsFinished {
1452 language_server_id: LanguageServerId(1)
1453 }
1454 );
1455 project.update(cx, |project, cx| {
1456 assert_eq!(
1457 project
1458 .language_servers_running_disk_based_diagnostics(cx)
1459 .collect::<Vec<_>>(),
1460 [] as [language::LanguageServerId; 0]
1461 );
1462 });
1463}
1464
1465#[gpui::test]
1466async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1467 init_test(cx);
1468
1469 let fs = FakeFs::new(cx.executor());
1470 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1471
1472 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1473
1474 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1475 language_registry.add(rust_lang());
1476 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1477
1478 let (buffer, _) = project
1479 .update(cx, |project, cx| {
1480 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1481 })
1482 .await
1483 .unwrap();
1484
1485 // Publish diagnostics
1486 let fake_server = fake_servers.next().await.unwrap();
1487 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1488 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1489 version: None,
1490 diagnostics: vec![lsp::Diagnostic {
1491 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1492 severity: Some(lsp::DiagnosticSeverity::ERROR),
1493 message: "the message".to_string(),
1494 ..Default::default()
1495 }],
1496 });
1497
1498 cx.executor().run_until_parked();
1499 buffer.update(cx, |buffer, _| {
1500 assert_eq!(
1501 buffer
1502 .snapshot()
1503 .diagnostics_in_range::<_, usize>(0..1, false)
1504 .map(|entry| entry.diagnostic.message.clone())
1505 .collect::<Vec<_>>(),
1506 ["the message".to_string()]
1507 );
1508 });
1509 project.update(cx, |project, cx| {
1510 assert_eq!(
1511 project.diagnostic_summary(false, cx),
1512 DiagnosticSummary {
1513 error_count: 1,
1514 warning_count: 0,
1515 }
1516 );
1517 });
1518
1519 project.update(cx, |project, cx| {
1520 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1521 });
1522
1523 // The diagnostics are cleared.
1524 cx.executor().run_until_parked();
1525 buffer.update(cx, |buffer, _| {
1526 assert_eq!(
1527 buffer
1528 .snapshot()
1529 .diagnostics_in_range::<_, usize>(0..1, false)
1530 .map(|entry| entry.diagnostic.message.clone())
1531 .collect::<Vec<_>>(),
1532 Vec::<String>::new(),
1533 );
1534 });
1535 project.update(cx, |project, cx| {
1536 assert_eq!(
1537 project.diagnostic_summary(false, cx),
1538 DiagnosticSummary {
1539 error_count: 0,
1540 warning_count: 0,
1541 }
1542 );
1543 });
1544}
1545
1546#[gpui::test]
1547async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1548 init_test(cx);
1549
1550 let fs = FakeFs::new(cx.executor());
1551 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1552
1553 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1554 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1555
1556 language_registry.add(rust_lang());
1557 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1558
1559 let (buffer, _handle) = project
1560 .update(cx, |project, cx| {
1561 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1562 })
1563 .await
1564 .unwrap();
1565
1566 // Before restarting the server, report diagnostics with an unknown buffer version.
1567 let fake_server = fake_servers.next().await.unwrap();
1568 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1569 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1570 version: Some(10000),
1571 diagnostics: Vec::new(),
1572 });
1573 cx.executor().run_until_parked();
1574 project.update(cx, |project, cx| {
1575 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1576 });
1577
1578 let mut fake_server = fake_servers.next().await.unwrap();
1579 let notification = fake_server
1580 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1581 .await
1582 .text_document;
1583 assert_eq!(notification.version, 0);
1584}
1585
1586#[gpui::test]
1587async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1588 init_test(cx);
1589
1590 let progress_token = "the-progress-token";
1591
1592 let fs = FakeFs::new(cx.executor());
1593 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1594
1595 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1596
1597 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1598 language_registry.add(rust_lang());
1599 let mut fake_servers = language_registry.register_fake_lsp(
1600 "Rust",
1601 FakeLspAdapter {
1602 name: "the-language-server",
1603 disk_based_diagnostics_sources: vec!["disk".into()],
1604 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1605 ..Default::default()
1606 },
1607 );
1608
1609 let (buffer, _handle) = project
1610 .update(cx, |project, cx| {
1611 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1612 })
1613 .await
1614 .unwrap();
1615
1616 // Simulate diagnostics starting to update.
1617 let mut fake_server = fake_servers.next().await.unwrap();
1618 fake_server
1619 .start_progress_with(
1620 "another-token",
1621 lsp::WorkDoneProgressBegin {
1622 cancellable: Some(false),
1623 ..Default::default()
1624 },
1625 )
1626 .await;
1627 fake_server
1628 .start_progress_with(
1629 progress_token,
1630 lsp::WorkDoneProgressBegin {
1631 cancellable: Some(true),
1632 ..Default::default()
1633 },
1634 )
1635 .await;
1636 cx.executor().run_until_parked();
1637
1638 project.update(cx, |project, cx| {
1639 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1640 });
1641
1642 let cancel_notification = fake_server
1643 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1644 .await;
1645 assert_eq!(
1646 cancel_notification.token,
1647 NumberOrString::String(progress_token.into())
1648 );
1649}
1650
1651#[gpui::test]
1652async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1653 init_test(cx);
1654
1655 let fs = FakeFs::new(cx.executor());
1656 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1657 .await;
1658
1659 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1661
1662 let mut fake_rust_servers = language_registry.register_fake_lsp(
1663 "Rust",
1664 FakeLspAdapter {
1665 name: "rust-lsp",
1666 ..Default::default()
1667 },
1668 );
1669 let mut fake_js_servers = language_registry.register_fake_lsp(
1670 "JavaScript",
1671 FakeLspAdapter {
1672 name: "js-lsp",
1673 ..Default::default()
1674 },
1675 );
1676 language_registry.add(rust_lang());
1677 language_registry.add(js_lang());
1678
1679 let _rs_buffer = project
1680 .update(cx, |project, cx| {
1681 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1682 })
1683 .await
1684 .unwrap();
1685 let _js_buffer = project
1686 .update(cx, |project, cx| {
1687 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1688 })
1689 .await
1690 .unwrap();
1691
1692 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1693 assert_eq!(
1694 fake_rust_server_1
1695 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1696 .await
1697 .text_document
1698 .uri
1699 .as_str(),
1700 uri!("file:///dir/a.rs")
1701 );
1702
1703 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1704 assert_eq!(
1705 fake_js_server
1706 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1707 .await
1708 .text_document
1709 .uri
1710 .as_str(),
1711 uri!("file:///dir/b.js")
1712 );
1713
1714 // Disable Rust language server, ensuring only that server gets stopped.
1715 cx.update(|cx| {
1716 SettingsStore::update_global(cx, |settings, cx| {
1717 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1718 settings.languages.insert(
1719 "Rust".into(),
1720 LanguageSettingsContent {
1721 enable_language_server: Some(false),
1722 ..Default::default()
1723 },
1724 );
1725 });
1726 })
1727 });
1728 fake_rust_server_1
1729 .receive_notification::<lsp::notification::Exit>()
1730 .await;
1731
1732 // Enable Rust and disable JavaScript language servers, ensuring that the
1733 // former gets started again and that the latter stops.
1734 cx.update(|cx| {
1735 SettingsStore::update_global(cx, |settings, cx| {
1736 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1737 settings.languages.insert(
1738 LanguageName::new("Rust"),
1739 LanguageSettingsContent {
1740 enable_language_server: Some(true),
1741 ..Default::default()
1742 },
1743 );
1744 settings.languages.insert(
1745 LanguageName::new("JavaScript"),
1746 LanguageSettingsContent {
1747 enable_language_server: Some(false),
1748 ..Default::default()
1749 },
1750 );
1751 });
1752 })
1753 });
1754 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1755 assert_eq!(
1756 fake_rust_server_2
1757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1758 .await
1759 .text_document
1760 .uri
1761 .as_str(),
1762 uri!("file:///dir/a.rs")
1763 );
1764 fake_js_server
1765 .receive_notification::<lsp::notification::Exit>()
1766 .await;
1767}
1768
1769#[gpui::test(iterations = 3)]
1770async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1771 init_test(cx);
1772
1773 let text = "
1774 fn a() { A }
1775 fn b() { BB }
1776 fn c() { CCC }
1777 "
1778 .unindent();
1779
1780 let fs = FakeFs::new(cx.executor());
1781 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1782
1783 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1784 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1785
1786 language_registry.add(rust_lang());
1787 let mut fake_servers = language_registry.register_fake_lsp(
1788 "Rust",
1789 FakeLspAdapter {
1790 disk_based_diagnostics_sources: vec!["disk".into()],
1791 ..Default::default()
1792 },
1793 );
1794
1795 let buffer = project
1796 .update(cx, |project, cx| {
1797 project.open_local_buffer(path!("/dir/a.rs"), cx)
1798 })
1799 .await
1800 .unwrap();
1801
1802 let _handle = project.update(cx, |project, cx| {
1803 project.register_buffer_with_language_servers(&buffer, cx)
1804 });
1805
1806 let mut fake_server = fake_servers.next().await.unwrap();
1807 let open_notification = fake_server
1808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1809 .await;
1810
1811 // Edit the buffer, moving the content down
1812 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1813 let change_notification_1 = fake_server
1814 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1815 .await;
1816 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1817
1818 // Report some diagnostics for the initial version of the buffer
1819 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1820 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1821 version: Some(open_notification.text_document.version),
1822 diagnostics: vec![
1823 lsp::Diagnostic {
1824 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1825 severity: Some(DiagnosticSeverity::ERROR),
1826 message: "undefined variable 'A'".to_string(),
1827 source: Some("disk".to_string()),
1828 ..Default::default()
1829 },
1830 lsp::Diagnostic {
1831 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1832 severity: Some(DiagnosticSeverity::ERROR),
1833 message: "undefined variable 'BB'".to_string(),
1834 source: Some("disk".to_string()),
1835 ..Default::default()
1836 },
1837 lsp::Diagnostic {
1838 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1839 severity: Some(DiagnosticSeverity::ERROR),
1840 source: Some("disk".to_string()),
1841 message: "undefined variable 'CCC'".to_string(),
1842 ..Default::default()
1843 },
1844 ],
1845 });
1846
1847 // The diagnostics have moved down since they were created.
1848 cx.executor().run_until_parked();
1849 buffer.update(cx, |buffer, _| {
1850 assert_eq!(
1851 buffer
1852 .snapshot()
1853 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1854 .collect::<Vec<_>>(),
1855 &[
1856 DiagnosticEntry {
1857 range: Point::new(3, 9)..Point::new(3, 11),
1858 diagnostic: Diagnostic {
1859 source: Some("disk".into()),
1860 severity: DiagnosticSeverity::ERROR,
1861 message: "undefined variable 'BB'".to_string(),
1862 is_disk_based: true,
1863 group_id: 1,
1864 is_primary: true,
1865 ..Default::default()
1866 },
1867 },
1868 DiagnosticEntry {
1869 range: Point::new(4, 9)..Point::new(4, 12),
1870 diagnostic: Diagnostic {
1871 source: Some("disk".into()),
1872 severity: DiagnosticSeverity::ERROR,
1873 message: "undefined variable 'CCC'".to_string(),
1874 is_disk_based: true,
1875 group_id: 2,
1876 is_primary: true,
1877 ..Default::default()
1878 }
1879 }
1880 ]
1881 );
1882 assert_eq!(
1883 chunks_with_diagnostics(buffer, 0..buffer.len()),
1884 [
1885 ("\n\nfn a() { ".to_string(), None),
1886 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1887 (" }\nfn b() { ".to_string(), None),
1888 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1889 (" }\nfn c() { ".to_string(), None),
1890 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1891 (" }\n".to_string(), None),
1892 ]
1893 );
1894 assert_eq!(
1895 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1896 [
1897 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1898 (" }\nfn c() { ".to_string(), None),
1899 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1900 ]
1901 );
1902 });
1903
1904 // Ensure overlapping diagnostics are highlighted correctly.
1905 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1906 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1907 version: Some(open_notification.text_document.version),
1908 diagnostics: vec![
1909 lsp::Diagnostic {
1910 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1911 severity: Some(DiagnosticSeverity::ERROR),
1912 message: "undefined variable 'A'".to_string(),
1913 source: Some("disk".to_string()),
1914 ..Default::default()
1915 },
1916 lsp::Diagnostic {
1917 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1918 severity: Some(DiagnosticSeverity::WARNING),
1919 message: "unreachable statement".to_string(),
1920 source: Some("disk".to_string()),
1921 ..Default::default()
1922 },
1923 ],
1924 });
1925
1926 cx.executor().run_until_parked();
1927 buffer.update(cx, |buffer, _| {
1928 assert_eq!(
1929 buffer
1930 .snapshot()
1931 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1932 .collect::<Vec<_>>(),
1933 &[
1934 DiagnosticEntry {
1935 range: Point::new(2, 9)..Point::new(2, 12),
1936 diagnostic: Diagnostic {
1937 source: Some("disk".into()),
1938 severity: DiagnosticSeverity::WARNING,
1939 message: "unreachable statement".to_string(),
1940 is_disk_based: true,
1941 group_id: 4,
1942 is_primary: true,
1943 ..Default::default()
1944 }
1945 },
1946 DiagnosticEntry {
1947 range: Point::new(2, 9)..Point::new(2, 10),
1948 diagnostic: Diagnostic {
1949 source: Some("disk".into()),
1950 severity: DiagnosticSeverity::ERROR,
1951 message: "undefined variable 'A'".to_string(),
1952 is_disk_based: true,
1953 group_id: 3,
1954 is_primary: true,
1955 ..Default::default()
1956 },
1957 }
1958 ]
1959 );
1960 assert_eq!(
1961 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1962 [
1963 ("fn a() { ".to_string(), None),
1964 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1965 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1966 ("\n".to_string(), None),
1967 ]
1968 );
1969 assert_eq!(
1970 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1971 [
1972 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1973 ("\n".to_string(), None),
1974 ]
1975 );
1976 });
1977
1978 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1979 // changes since the last save.
1980 buffer.update(cx, |buffer, cx| {
1981 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1982 buffer.edit(
1983 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1984 None,
1985 cx,
1986 );
1987 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1988 });
1989 let change_notification_2 = fake_server
1990 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1991 .await;
1992 assert!(
1993 change_notification_2.text_document.version > change_notification_1.text_document.version
1994 );
1995
1996 // Handle out-of-order diagnostics
1997 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1998 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1999 version: Some(change_notification_2.text_document.version),
2000 diagnostics: vec![
2001 lsp::Diagnostic {
2002 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2003 severity: Some(DiagnosticSeverity::ERROR),
2004 message: "undefined variable 'BB'".to_string(),
2005 source: Some("disk".to_string()),
2006 ..Default::default()
2007 },
2008 lsp::Diagnostic {
2009 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2010 severity: Some(DiagnosticSeverity::WARNING),
2011 message: "undefined variable 'A'".to_string(),
2012 source: Some("disk".to_string()),
2013 ..Default::default()
2014 },
2015 ],
2016 });
2017
2018 cx.executor().run_until_parked();
2019 buffer.update(cx, |buffer, _| {
2020 assert_eq!(
2021 buffer
2022 .snapshot()
2023 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2024 .collect::<Vec<_>>(),
2025 &[
2026 DiagnosticEntry {
2027 range: Point::new(2, 21)..Point::new(2, 22),
2028 diagnostic: Diagnostic {
2029 source: Some("disk".into()),
2030 severity: DiagnosticSeverity::WARNING,
2031 message: "undefined variable 'A'".to_string(),
2032 is_disk_based: true,
2033 group_id: 6,
2034 is_primary: true,
2035 ..Default::default()
2036 }
2037 },
2038 DiagnosticEntry {
2039 range: Point::new(3, 9)..Point::new(3, 14),
2040 diagnostic: Diagnostic {
2041 source: Some("disk".into()),
2042 severity: DiagnosticSeverity::ERROR,
2043 message: "undefined variable 'BB'".to_string(),
2044 is_disk_based: true,
2045 group_id: 5,
2046 is_primary: true,
2047 ..Default::default()
2048 },
2049 }
2050 ]
2051 );
2052 });
2053}
2054
2055#[gpui::test]
2056async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2057 init_test(cx);
2058
2059 let text = concat!(
2060 "let one = ;\n", //
2061 "let two = \n",
2062 "let three = 3;\n",
2063 );
2064
2065 let fs = FakeFs::new(cx.executor());
2066 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2067
2068 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2069 let buffer = project
2070 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2071 .await
2072 .unwrap();
2073
2074 project.update(cx, |project, cx| {
2075 project.lsp_store.update(cx, |lsp_store, cx| {
2076 lsp_store
2077 .update_diagnostic_entries(
2078 LanguageServerId(0),
2079 PathBuf::from("/dir/a.rs"),
2080 None,
2081 vec![
2082 DiagnosticEntry {
2083 range: Unclipped(PointUtf16::new(0, 10))
2084 ..Unclipped(PointUtf16::new(0, 10)),
2085 diagnostic: Diagnostic {
2086 severity: DiagnosticSeverity::ERROR,
2087 message: "syntax error 1".to_string(),
2088 ..Default::default()
2089 },
2090 },
2091 DiagnosticEntry {
2092 range: Unclipped(PointUtf16::new(1, 10))
2093 ..Unclipped(PointUtf16::new(1, 10)),
2094 diagnostic: Diagnostic {
2095 severity: DiagnosticSeverity::ERROR,
2096 message: "syntax error 2".to_string(),
2097 ..Default::default()
2098 },
2099 },
2100 ],
2101 cx,
2102 )
2103 .unwrap();
2104 })
2105 });
2106
2107 // An empty range is extended forward to include the following character.
2108 // At the end of a line, an empty range is extended backward to include
2109 // the preceding character.
2110 buffer.update(cx, |buffer, _| {
2111 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2112 assert_eq!(
2113 chunks
2114 .iter()
2115 .map(|(s, d)| (s.as_str(), *d))
2116 .collect::<Vec<_>>(),
2117 &[
2118 ("let one = ", None),
2119 (";", Some(DiagnosticSeverity::ERROR)),
2120 ("\nlet two =", None),
2121 (" ", Some(DiagnosticSeverity::ERROR)),
2122 ("\nlet three = 3;\n", None)
2123 ]
2124 );
2125 });
2126}
2127
2128#[gpui::test]
2129async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2130 init_test(cx);
2131
2132 let fs = FakeFs::new(cx.executor());
2133 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2134 .await;
2135
2136 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2137 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2138
2139 lsp_store.update(cx, |lsp_store, cx| {
2140 lsp_store
2141 .update_diagnostic_entries(
2142 LanguageServerId(0),
2143 Path::new("/dir/a.rs").to_owned(),
2144 None,
2145 vec![DiagnosticEntry {
2146 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2147 diagnostic: Diagnostic {
2148 severity: DiagnosticSeverity::ERROR,
2149 is_primary: true,
2150 message: "syntax error a1".to_string(),
2151 ..Default::default()
2152 },
2153 }],
2154 cx,
2155 )
2156 .unwrap();
2157 lsp_store
2158 .update_diagnostic_entries(
2159 LanguageServerId(1),
2160 Path::new("/dir/a.rs").to_owned(),
2161 None,
2162 vec![DiagnosticEntry {
2163 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2164 diagnostic: Diagnostic {
2165 severity: DiagnosticSeverity::ERROR,
2166 is_primary: true,
2167 message: "syntax error b1".to_string(),
2168 ..Default::default()
2169 },
2170 }],
2171 cx,
2172 )
2173 .unwrap();
2174
2175 assert_eq!(
2176 lsp_store.diagnostic_summary(false, cx),
2177 DiagnosticSummary {
2178 error_count: 2,
2179 warning_count: 0,
2180 }
2181 );
2182 });
2183}
2184
2185#[gpui::test]
2186async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2187 init_test(cx);
2188
2189 let text = "
2190 fn a() {
2191 f1();
2192 }
2193 fn b() {
2194 f2();
2195 }
2196 fn c() {
2197 f3();
2198 }
2199 "
2200 .unindent();
2201
2202 let fs = FakeFs::new(cx.executor());
2203 fs.insert_tree(
2204 path!("/dir"),
2205 json!({
2206 "a.rs": text.clone(),
2207 }),
2208 )
2209 .await;
2210
2211 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2212 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2213
2214 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2215 language_registry.add(rust_lang());
2216 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2217
2218 let (buffer, _handle) = project
2219 .update(cx, |project, cx| {
2220 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2221 })
2222 .await
2223 .unwrap();
2224
2225 let mut fake_server = fake_servers.next().await.unwrap();
2226 let lsp_document_version = fake_server
2227 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2228 .await
2229 .text_document
2230 .version;
2231
2232 // Simulate editing the buffer after the language server computes some edits.
2233 buffer.update(cx, |buffer, cx| {
2234 buffer.edit(
2235 [(
2236 Point::new(0, 0)..Point::new(0, 0),
2237 "// above first function\n",
2238 )],
2239 None,
2240 cx,
2241 );
2242 buffer.edit(
2243 [(
2244 Point::new(2, 0)..Point::new(2, 0),
2245 " // inside first function\n",
2246 )],
2247 None,
2248 cx,
2249 );
2250 buffer.edit(
2251 [(
2252 Point::new(6, 4)..Point::new(6, 4),
2253 "// inside second function ",
2254 )],
2255 None,
2256 cx,
2257 );
2258
2259 assert_eq!(
2260 buffer.text(),
2261 "
2262 // above first function
2263 fn a() {
2264 // inside first function
2265 f1();
2266 }
2267 fn b() {
2268 // inside second function f2();
2269 }
2270 fn c() {
2271 f3();
2272 }
2273 "
2274 .unindent()
2275 );
2276 });
2277
2278 let edits = lsp_store
2279 .update(cx, |lsp_store, cx| {
2280 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2281 &buffer,
2282 vec![
2283 // replace body of first function
2284 lsp::TextEdit {
2285 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2286 new_text: "
2287 fn a() {
2288 f10();
2289 }
2290 "
2291 .unindent(),
2292 },
2293 // edit inside second function
2294 lsp::TextEdit {
2295 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2296 new_text: "00".into(),
2297 },
2298 // edit inside third function via two distinct edits
2299 lsp::TextEdit {
2300 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2301 new_text: "4000".into(),
2302 },
2303 lsp::TextEdit {
2304 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2305 new_text: "".into(),
2306 },
2307 ],
2308 LanguageServerId(0),
2309 Some(lsp_document_version),
2310 cx,
2311 )
2312 })
2313 .await
2314 .unwrap();
2315
2316 buffer.update(cx, |buffer, cx| {
2317 for (range, new_text) in edits {
2318 buffer.edit([(range, new_text)], None, cx);
2319 }
2320 assert_eq!(
2321 buffer.text(),
2322 "
2323 // above first function
2324 fn a() {
2325 // inside first function
2326 f10();
2327 }
2328 fn b() {
2329 // inside second function f200();
2330 }
2331 fn c() {
2332 f4000();
2333 }
2334 "
2335 .unindent()
2336 );
2337 });
2338}
2339
2340#[gpui::test]
2341async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2342 init_test(cx);
2343
2344 let text = "
2345 use a::b;
2346 use a::c;
2347
2348 fn f() {
2349 b();
2350 c();
2351 }
2352 "
2353 .unindent();
2354
2355 let fs = FakeFs::new(cx.executor());
2356 fs.insert_tree(
2357 path!("/dir"),
2358 json!({
2359 "a.rs": text.clone(),
2360 }),
2361 )
2362 .await;
2363
2364 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2365 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2366 let buffer = project
2367 .update(cx, |project, cx| {
2368 project.open_local_buffer(path!("/dir/a.rs"), cx)
2369 })
2370 .await
2371 .unwrap();
2372
2373 // Simulate the language server sending us a small edit in the form of a very large diff.
2374 // Rust-analyzer does this when performing a merge-imports code action.
2375 let edits = lsp_store
2376 .update(cx, |lsp_store, cx| {
2377 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2378 &buffer,
2379 [
2380 // Replace the first use statement without editing the semicolon.
2381 lsp::TextEdit {
2382 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2383 new_text: "a::{b, c}".into(),
2384 },
2385 // Reinsert the remainder of the file between the semicolon and the final
2386 // newline of the file.
2387 lsp::TextEdit {
2388 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2389 new_text: "\n\n".into(),
2390 },
2391 lsp::TextEdit {
2392 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2393 new_text: "
2394 fn f() {
2395 b();
2396 c();
2397 }"
2398 .unindent(),
2399 },
2400 // Delete everything after the first newline of the file.
2401 lsp::TextEdit {
2402 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2403 new_text: "".into(),
2404 },
2405 ],
2406 LanguageServerId(0),
2407 None,
2408 cx,
2409 )
2410 })
2411 .await
2412 .unwrap();
2413
2414 buffer.update(cx, |buffer, cx| {
2415 let edits = edits
2416 .into_iter()
2417 .map(|(range, text)| {
2418 (
2419 range.start.to_point(buffer)..range.end.to_point(buffer),
2420 text,
2421 )
2422 })
2423 .collect::<Vec<_>>();
2424
2425 assert_eq!(
2426 edits,
2427 [
2428 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2429 (Point::new(1, 0)..Point::new(2, 0), "".into())
2430 ]
2431 );
2432
2433 for (range, new_text) in edits {
2434 buffer.edit([(range, new_text)], None, cx);
2435 }
2436 assert_eq!(
2437 buffer.text(),
2438 "
2439 use a::{b, c};
2440
2441 fn f() {
2442 b();
2443 c();
2444 }
2445 "
2446 .unindent()
2447 );
2448 });
2449}
2450
2451#[gpui::test]
2452async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2453 init_test(cx);
2454
2455 let text = "
2456 use a::b;
2457 use a::c;
2458
2459 fn f() {
2460 b();
2461 c();
2462 }
2463 "
2464 .unindent();
2465
2466 let fs = FakeFs::new(cx.executor());
2467 fs.insert_tree(
2468 path!("/dir"),
2469 json!({
2470 "a.rs": text.clone(),
2471 }),
2472 )
2473 .await;
2474
2475 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2476 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2477 let buffer = project
2478 .update(cx, |project, cx| {
2479 project.open_local_buffer(path!("/dir/a.rs"), cx)
2480 })
2481 .await
2482 .unwrap();
2483
2484 // Simulate the language server sending us edits in a non-ordered fashion,
2485 // with ranges sometimes being inverted or pointing to invalid locations.
2486 let edits = lsp_store
2487 .update(cx, |lsp_store, cx| {
2488 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2489 &buffer,
2490 [
2491 lsp::TextEdit {
2492 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2493 new_text: "\n\n".into(),
2494 },
2495 lsp::TextEdit {
2496 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2497 new_text: "a::{b, c}".into(),
2498 },
2499 lsp::TextEdit {
2500 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2501 new_text: "".into(),
2502 },
2503 lsp::TextEdit {
2504 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2505 new_text: "
2506 fn f() {
2507 b();
2508 c();
2509 }"
2510 .unindent(),
2511 },
2512 ],
2513 LanguageServerId(0),
2514 None,
2515 cx,
2516 )
2517 })
2518 .await
2519 .unwrap();
2520
2521 buffer.update(cx, |buffer, cx| {
2522 let edits = edits
2523 .into_iter()
2524 .map(|(range, text)| {
2525 (
2526 range.start.to_point(buffer)..range.end.to_point(buffer),
2527 text,
2528 )
2529 })
2530 .collect::<Vec<_>>();
2531
2532 assert_eq!(
2533 edits,
2534 [
2535 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2536 (Point::new(1, 0)..Point::new(2, 0), "".into())
2537 ]
2538 );
2539
2540 for (range, new_text) in edits {
2541 buffer.edit([(range, new_text)], None, cx);
2542 }
2543 assert_eq!(
2544 buffer.text(),
2545 "
2546 use a::{b, c};
2547
2548 fn f() {
2549 b();
2550 c();
2551 }
2552 "
2553 .unindent()
2554 );
2555 });
2556}
2557
2558fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2559 buffer: &Buffer,
2560 range: Range<T>,
2561) -> Vec<(String, Option<DiagnosticSeverity>)> {
2562 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2563 for chunk in buffer.snapshot().chunks(range, true) {
2564 if chunks.last().map_or(false, |prev_chunk| {
2565 prev_chunk.1 == chunk.diagnostic_severity
2566 }) {
2567 chunks.last_mut().unwrap().0.push_str(chunk.text);
2568 } else {
2569 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2570 }
2571 }
2572 chunks
2573}
2574
2575#[gpui::test(iterations = 10)]
2576async fn test_definition(cx: &mut gpui::TestAppContext) {
2577 init_test(cx);
2578
2579 let fs = FakeFs::new(cx.executor());
2580 fs.insert_tree(
2581 path!("/dir"),
2582 json!({
2583 "a.rs": "const fn a() { A }",
2584 "b.rs": "const y: i32 = crate::a()",
2585 }),
2586 )
2587 .await;
2588
2589 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2590
2591 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2592 language_registry.add(rust_lang());
2593 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2594
2595 let (buffer, _handle) = project
2596 .update(cx, |project, cx| {
2597 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2598 })
2599 .await
2600 .unwrap();
2601
2602 let fake_server = fake_servers.next().await.unwrap();
2603 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2604 let params = params.text_document_position_params;
2605 assert_eq!(
2606 params.text_document.uri.to_file_path().unwrap(),
2607 Path::new(path!("/dir/b.rs")),
2608 );
2609 assert_eq!(params.position, lsp::Position::new(0, 22));
2610
2611 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2612 lsp::Location::new(
2613 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2614 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2615 ),
2616 )))
2617 });
2618 let mut definitions = project
2619 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2620 .await
2621 .unwrap();
2622
2623 // Assert no new language server started
2624 cx.executor().run_until_parked();
2625 assert!(fake_servers.try_next().is_err());
2626
2627 assert_eq!(definitions.len(), 1);
2628 let definition = definitions.pop().unwrap();
2629 cx.update(|cx| {
2630 let target_buffer = definition.target.buffer.read(cx);
2631 assert_eq!(
2632 target_buffer
2633 .file()
2634 .unwrap()
2635 .as_local()
2636 .unwrap()
2637 .abs_path(cx),
2638 Path::new(path!("/dir/a.rs")),
2639 );
2640 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2641 assert_eq!(
2642 list_worktrees(&project, cx),
2643 [
2644 (path!("/dir/a.rs").as_ref(), false),
2645 (path!("/dir/b.rs").as_ref(), true)
2646 ],
2647 );
2648
2649 drop(definition);
2650 });
2651 cx.update(|cx| {
2652 assert_eq!(
2653 list_worktrees(&project, cx),
2654 [(path!("/dir/b.rs").as_ref(), true)]
2655 );
2656 });
2657
2658 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2659 project
2660 .read(cx)
2661 .worktrees(cx)
2662 .map(|worktree| {
2663 let worktree = worktree.read(cx);
2664 (
2665 worktree.as_local().unwrap().abs_path().as_ref(),
2666 worktree.is_visible(),
2667 )
2668 })
2669 .collect::<Vec<_>>()
2670 }
2671}
2672
2673#[gpui::test]
2674async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2675 init_test(cx);
2676
2677 let fs = FakeFs::new(cx.executor());
2678 fs.insert_tree(
2679 path!("/dir"),
2680 json!({
2681 "a.ts": "",
2682 }),
2683 )
2684 .await;
2685
2686 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2687
2688 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2689 language_registry.add(typescript_lang());
2690 let mut fake_language_servers = language_registry.register_fake_lsp(
2691 "TypeScript",
2692 FakeLspAdapter {
2693 capabilities: lsp::ServerCapabilities {
2694 completion_provider: Some(lsp::CompletionOptions {
2695 trigger_characters: Some(vec![":".to_string()]),
2696 ..Default::default()
2697 }),
2698 ..Default::default()
2699 },
2700 ..Default::default()
2701 },
2702 );
2703
2704 let (buffer, _handle) = project
2705 .update(cx, |p, cx| {
2706 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2707 })
2708 .await
2709 .unwrap();
2710
2711 let fake_server = fake_language_servers.next().await.unwrap();
2712
2713 let text = "let a = b.fqn";
2714 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2715 let completions = project.update(cx, |project, cx| {
2716 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2717 });
2718
2719 fake_server
2720 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2721 Ok(Some(lsp::CompletionResponse::Array(vec![
2722 lsp::CompletionItem {
2723 label: "fullyQualifiedName?".into(),
2724 insert_text: Some("fullyQualifiedName".into()),
2725 ..Default::default()
2726 },
2727 ])))
2728 })
2729 .next()
2730 .await;
2731 let completions = completions.await.unwrap();
2732 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2733 assert_eq!(completions.len(), 1);
2734 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2735 assert_eq!(
2736 completions[0].old_range.to_offset(&snapshot),
2737 text.len() - 3..text.len()
2738 );
2739
2740 let text = "let a = \"atoms/cmp\"";
2741 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2742 let completions = project.update(cx, |project, cx| {
2743 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2744 });
2745
2746 fake_server
2747 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2748 Ok(Some(lsp::CompletionResponse::Array(vec![
2749 lsp::CompletionItem {
2750 label: "component".into(),
2751 ..Default::default()
2752 },
2753 ])))
2754 })
2755 .next()
2756 .await;
2757 let completions = completions.await.unwrap();
2758 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2759 assert_eq!(completions.len(), 1);
2760 assert_eq!(completions[0].new_text, "component");
2761 assert_eq!(
2762 completions[0].old_range.to_offset(&snapshot),
2763 text.len() - 4..text.len() - 1
2764 );
2765}
2766
2767#[gpui::test]
2768async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2769 init_test(cx);
2770
2771 let fs = FakeFs::new(cx.executor());
2772 fs.insert_tree(
2773 path!("/dir"),
2774 json!({
2775 "a.ts": "",
2776 }),
2777 )
2778 .await;
2779
2780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2781
2782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2783 language_registry.add(typescript_lang());
2784 let mut fake_language_servers = language_registry.register_fake_lsp(
2785 "TypeScript",
2786 FakeLspAdapter {
2787 capabilities: lsp::ServerCapabilities {
2788 completion_provider: Some(lsp::CompletionOptions {
2789 trigger_characters: Some(vec![":".to_string()]),
2790 ..Default::default()
2791 }),
2792 ..Default::default()
2793 },
2794 ..Default::default()
2795 },
2796 );
2797
2798 let (buffer, _handle) = project
2799 .update(cx, |p, cx| {
2800 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2801 })
2802 .await
2803 .unwrap();
2804
2805 let fake_server = fake_language_servers.next().await.unwrap();
2806
2807 let text = "let a = b.fqn";
2808 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2809 let completions = project.update(cx, |project, cx| {
2810 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2811 });
2812
2813 fake_server
2814 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2815 Ok(Some(lsp::CompletionResponse::Array(vec![
2816 lsp::CompletionItem {
2817 label: "fullyQualifiedName?".into(),
2818 insert_text: Some("fully\rQualified\r\nName".into()),
2819 ..Default::default()
2820 },
2821 ])))
2822 })
2823 .next()
2824 .await;
2825 let completions = completions.await.unwrap();
2826 assert_eq!(completions.len(), 1);
2827 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2828}
2829
2830#[gpui::test(iterations = 10)]
2831async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2832 init_test(cx);
2833
2834 let fs = FakeFs::new(cx.executor());
2835 fs.insert_tree(
2836 path!("/dir"),
2837 json!({
2838 "a.ts": "a",
2839 }),
2840 )
2841 .await;
2842
2843 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2844
2845 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2846 language_registry.add(typescript_lang());
2847 let mut fake_language_servers = language_registry.register_fake_lsp(
2848 "TypeScript",
2849 FakeLspAdapter {
2850 capabilities: lsp::ServerCapabilities {
2851 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2852 lsp::CodeActionOptions {
2853 resolve_provider: Some(true),
2854 ..lsp::CodeActionOptions::default()
2855 },
2856 )),
2857 ..lsp::ServerCapabilities::default()
2858 },
2859 ..FakeLspAdapter::default()
2860 },
2861 );
2862
2863 let (buffer, _handle) = project
2864 .update(cx, |p, cx| {
2865 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2866 })
2867 .await
2868 .unwrap();
2869
2870 let fake_server = fake_language_servers.next().await.unwrap();
2871
2872 // Language server returns code actions that contain commands, and not edits.
2873 let actions = project.update(cx, |project, cx| {
2874 project.code_actions(&buffer, 0..0, None, cx)
2875 });
2876 fake_server
2877 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2878 Ok(Some(vec![
2879 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2880 title: "The code action".into(),
2881 data: Some(serde_json::json!({
2882 "command": "_the/command",
2883 })),
2884 ..lsp::CodeAction::default()
2885 }),
2886 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2887 title: "two".into(),
2888 ..lsp::CodeAction::default()
2889 }),
2890 ]))
2891 })
2892 .next()
2893 .await;
2894
2895 let action = actions.await.unwrap()[0].clone();
2896 let apply = project.update(cx, |project, cx| {
2897 project.apply_code_action(buffer.clone(), action, true, cx)
2898 });
2899
2900 // Resolving the code action does not populate its edits. In absence of
2901 // edits, we must execute the given command.
2902 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2903 |mut action, _| async move {
2904 if action.data.is_some() {
2905 action.command = Some(lsp::Command {
2906 title: "The command".into(),
2907 command: "_the/command".into(),
2908 arguments: Some(vec![json!("the-argument")]),
2909 });
2910 }
2911 Ok(action)
2912 },
2913 );
2914
2915 // While executing the command, the language server sends the editor
2916 // a `workspaceEdit` request.
2917 fake_server
2918 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2919 let fake = fake_server.clone();
2920 move |params, _| {
2921 assert_eq!(params.command, "_the/command");
2922 let fake = fake.clone();
2923 async move {
2924 fake.server
2925 .request::<lsp::request::ApplyWorkspaceEdit>(
2926 lsp::ApplyWorkspaceEditParams {
2927 label: None,
2928 edit: lsp::WorkspaceEdit {
2929 changes: Some(
2930 [(
2931 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
2932 vec![lsp::TextEdit {
2933 range: lsp::Range::new(
2934 lsp::Position::new(0, 0),
2935 lsp::Position::new(0, 0),
2936 ),
2937 new_text: "X".into(),
2938 }],
2939 )]
2940 .into_iter()
2941 .collect(),
2942 ),
2943 ..Default::default()
2944 },
2945 },
2946 )
2947 .await
2948 .unwrap();
2949 Ok(Some(json!(null)))
2950 }
2951 }
2952 })
2953 .next()
2954 .await;
2955
2956 // Applying the code action returns a project transaction containing the edits
2957 // sent by the language server in its `workspaceEdit` request.
2958 let transaction = apply.await.unwrap();
2959 assert!(transaction.0.contains_key(&buffer));
2960 buffer.update(cx, |buffer, cx| {
2961 assert_eq!(buffer.text(), "Xa");
2962 buffer.undo(cx);
2963 assert_eq!(buffer.text(), "a");
2964 });
2965}
2966
2967#[gpui::test(iterations = 10)]
2968async fn test_save_file(cx: &mut gpui::TestAppContext) {
2969 init_test(cx);
2970
2971 let fs = FakeFs::new(cx.executor());
2972 fs.insert_tree(
2973 path!("/dir"),
2974 json!({
2975 "file1": "the old contents",
2976 }),
2977 )
2978 .await;
2979
2980 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2981 let buffer = project
2982 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
2983 .await
2984 .unwrap();
2985 buffer.update(cx, |buffer, cx| {
2986 assert_eq!(buffer.text(), "the old contents");
2987 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2988 });
2989
2990 project
2991 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2992 .await
2993 .unwrap();
2994
2995 let new_text = fs
2996 .load(Path::new(path!("/dir/file1")))
2997 .await
2998 .unwrap()
2999 .replace("\r\n", "\n");
3000 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3001}
3002
3003#[gpui::test(iterations = 30)]
3004async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3005 init_test(cx);
3006
3007 let fs = FakeFs::new(cx.executor().clone());
3008 fs.insert_tree(
3009 path!("/dir"),
3010 json!({
3011 "file1": "the original contents",
3012 }),
3013 )
3014 .await;
3015
3016 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3017 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3018 let buffer = project
3019 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3020 .await
3021 .unwrap();
3022
3023 // Simulate buffer diffs being slow, so that they don't complete before
3024 // the next file change occurs.
3025 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3026
3027 // Change the buffer's file on disk, and then wait for the file change
3028 // to be detected by the worktree, so that the buffer starts reloading.
3029 fs.save(
3030 path!("/dir/file1").as_ref(),
3031 &"the first contents".into(),
3032 Default::default(),
3033 )
3034 .await
3035 .unwrap();
3036 worktree.next_event(cx).await;
3037
3038 // Change the buffer's file again. Depending on the random seed, the
3039 // previous file change may still be in progress.
3040 fs.save(
3041 path!("/dir/file1").as_ref(),
3042 &"the second contents".into(),
3043 Default::default(),
3044 )
3045 .await
3046 .unwrap();
3047 worktree.next_event(cx).await;
3048
3049 cx.executor().run_until_parked();
3050 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3051 buffer.read_with(cx, |buffer, _| {
3052 assert_eq!(buffer.text(), on_disk_text);
3053 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3054 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3055 });
3056}
3057
3058#[gpui::test(iterations = 30)]
3059async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3060 init_test(cx);
3061
3062 let fs = FakeFs::new(cx.executor().clone());
3063 fs.insert_tree(
3064 path!("/dir"),
3065 json!({
3066 "file1": "the original contents",
3067 }),
3068 )
3069 .await;
3070
3071 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3072 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3073 let buffer = project
3074 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3075 .await
3076 .unwrap();
3077
3078 // Simulate buffer diffs being slow, so that they don't complete before
3079 // the next file change occurs.
3080 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3081
3082 // Change the buffer's file on disk, and then wait for the file change
3083 // to be detected by the worktree, so that the buffer starts reloading.
3084 fs.save(
3085 path!("/dir/file1").as_ref(),
3086 &"the first contents".into(),
3087 Default::default(),
3088 )
3089 .await
3090 .unwrap();
3091 worktree.next_event(cx).await;
3092
3093 cx.executor()
3094 .spawn(cx.executor().simulate_random_delay())
3095 .await;
3096
3097 // Perform a noop edit, causing the buffer's version to increase.
3098 buffer.update(cx, |buffer, cx| {
3099 buffer.edit([(0..0, " ")], None, cx);
3100 buffer.undo(cx);
3101 });
3102
3103 cx.executor().run_until_parked();
3104 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3105 buffer.read_with(cx, |buffer, _| {
3106 let buffer_text = buffer.text();
3107 if buffer_text == on_disk_text {
3108 assert!(
3109 !buffer.is_dirty() && !buffer.has_conflict(),
3110 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3111 );
3112 }
3113 // If the file change occurred while the buffer was processing the first
3114 // change, the buffer will be in a conflicting state.
3115 else {
3116 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3117 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3118 }
3119 });
3120}
3121
3122#[gpui::test]
3123async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3124 init_test(cx);
3125
3126 let fs = FakeFs::new(cx.executor());
3127 fs.insert_tree(
3128 path!("/dir"),
3129 json!({
3130 "file1": "the old contents",
3131 }),
3132 )
3133 .await;
3134
3135 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3136 let buffer = project
3137 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3138 .await
3139 .unwrap();
3140 buffer.update(cx, |buffer, cx| {
3141 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3142 });
3143
3144 project
3145 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3146 .await
3147 .unwrap();
3148
3149 let new_text = fs
3150 .load(Path::new(path!("/dir/file1")))
3151 .await
3152 .unwrap()
3153 .replace("\r\n", "\n");
3154 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3155}
3156
3157#[gpui::test]
3158async fn test_save_as(cx: &mut gpui::TestAppContext) {
3159 init_test(cx);
3160
3161 let fs = FakeFs::new(cx.executor());
3162 fs.insert_tree("/dir", json!({})).await;
3163
3164 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3165
3166 let languages = project.update(cx, |project, _| project.languages().clone());
3167 languages.add(rust_lang());
3168
3169 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3170 buffer.update(cx, |buffer, cx| {
3171 buffer.edit([(0..0, "abc")], None, cx);
3172 assert!(buffer.is_dirty());
3173 assert!(!buffer.has_conflict());
3174 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3175 });
3176 project
3177 .update(cx, |project, cx| {
3178 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3179 let path = ProjectPath {
3180 worktree_id,
3181 path: Arc::from(Path::new("file1.rs")),
3182 };
3183 project.save_buffer_as(buffer.clone(), path, cx)
3184 })
3185 .await
3186 .unwrap();
3187 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3188
3189 cx.executor().run_until_parked();
3190 buffer.update(cx, |buffer, cx| {
3191 assert_eq!(
3192 buffer.file().unwrap().full_path(cx),
3193 Path::new("dir/file1.rs")
3194 );
3195 assert!(!buffer.is_dirty());
3196 assert!(!buffer.has_conflict());
3197 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3198 });
3199
3200 let opened_buffer = project
3201 .update(cx, |project, cx| {
3202 project.open_local_buffer("/dir/file1.rs", cx)
3203 })
3204 .await
3205 .unwrap();
3206 assert_eq!(opened_buffer, buffer);
3207}
3208
3209#[gpui::test(retries = 5)]
3210async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3211 use worktree::WorktreeModelHandle as _;
3212
3213 init_test(cx);
3214 cx.executor().allow_parking();
3215
3216 let dir = TempTree::new(json!({
3217 "a": {
3218 "file1": "",
3219 "file2": "",
3220 "file3": "",
3221 },
3222 "b": {
3223 "c": {
3224 "file4": "",
3225 "file5": "",
3226 }
3227 }
3228 }));
3229
3230 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3231
3232 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3233 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3234 async move { buffer.await.unwrap() }
3235 };
3236 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3237 project.update(cx, |project, cx| {
3238 let tree = project.worktrees(cx).next().unwrap();
3239 tree.read(cx)
3240 .entry_for_path(path)
3241 .unwrap_or_else(|| panic!("no entry for path {}", path))
3242 .id
3243 })
3244 };
3245
3246 let buffer2 = buffer_for_path("a/file2", cx).await;
3247 let buffer3 = buffer_for_path("a/file3", cx).await;
3248 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3249 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3250
3251 let file2_id = id_for_path("a/file2", cx);
3252 let file3_id = id_for_path("a/file3", cx);
3253 let file4_id = id_for_path("b/c/file4", cx);
3254
3255 // Create a remote copy of this worktree.
3256 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3257 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3258
3259 let updates = Arc::new(Mutex::new(Vec::new()));
3260 tree.update(cx, |tree, cx| {
3261 let updates = updates.clone();
3262 tree.observe_updates(0, cx, move |update| {
3263 updates.lock().push(update);
3264 async { true }
3265 });
3266 });
3267
3268 let remote =
3269 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3270
3271 cx.executor().run_until_parked();
3272
3273 cx.update(|cx| {
3274 assert!(!buffer2.read(cx).is_dirty());
3275 assert!(!buffer3.read(cx).is_dirty());
3276 assert!(!buffer4.read(cx).is_dirty());
3277 assert!(!buffer5.read(cx).is_dirty());
3278 });
3279
3280 // Rename and delete files and directories.
3281 tree.flush_fs_events(cx).await;
3282 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3283 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3284 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3285 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3286 tree.flush_fs_events(cx).await;
3287
3288 cx.update(|app| {
3289 assert_eq!(
3290 tree.read(app)
3291 .paths()
3292 .map(|p| p.to_str().unwrap())
3293 .collect::<Vec<_>>(),
3294 vec![
3295 "a",
3296 separator!("a/file1"),
3297 separator!("a/file2.new"),
3298 "b",
3299 "d",
3300 separator!("d/file3"),
3301 separator!("d/file4"),
3302 ]
3303 );
3304 });
3305
3306 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3307 assert_eq!(id_for_path("d/file3", cx), file3_id);
3308 assert_eq!(id_for_path("d/file4", cx), file4_id);
3309
3310 cx.update(|cx| {
3311 assert_eq!(
3312 buffer2.read(cx).file().unwrap().path().as_ref(),
3313 Path::new("a/file2.new")
3314 );
3315 assert_eq!(
3316 buffer3.read(cx).file().unwrap().path().as_ref(),
3317 Path::new("d/file3")
3318 );
3319 assert_eq!(
3320 buffer4.read(cx).file().unwrap().path().as_ref(),
3321 Path::new("d/file4")
3322 );
3323 assert_eq!(
3324 buffer5.read(cx).file().unwrap().path().as_ref(),
3325 Path::new("b/c/file5")
3326 );
3327
3328 assert_matches!(
3329 buffer2.read(cx).file().unwrap().disk_state(),
3330 DiskState::Present { .. }
3331 );
3332 assert_matches!(
3333 buffer3.read(cx).file().unwrap().disk_state(),
3334 DiskState::Present { .. }
3335 );
3336 assert_matches!(
3337 buffer4.read(cx).file().unwrap().disk_state(),
3338 DiskState::Present { .. }
3339 );
3340 assert_eq!(
3341 buffer5.read(cx).file().unwrap().disk_state(),
3342 DiskState::Deleted
3343 );
3344 });
3345
3346 // Update the remote worktree. Check that it becomes consistent with the
3347 // local worktree.
3348 cx.executor().run_until_parked();
3349
3350 remote.update(cx, |remote, _| {
3351 for update in updates.lock().drain(..) {
3352 remote.as_remote_mut().unwrap().update_from_remote(update);
3353 }
3354 });
3355 cx.executor().run_until_parked();
3356 remote.update(cx, |remote, _| {
3357 assert_eq!(
3358 remote
3359 .paths()
3360 .map(|p| p.to_str().unwrap())
3361 .collect::<Vec<_>>(),
3362 vec![
3363 "a",
3364 separator!("a/file1"),
3365 separator!("a/file2.new"),
3366 "b",
3367 "d",
3368 separator!("d/file3"),
3369 separator!("d/file4"),
3370 ]
3371 );
3372 });
3373}
3374
3375#[gpui::test(iterations = 10)]
3376async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3377 init_test(cx);
3378
3379 let fs = FakeFs::new(cx.executor());
3380 fs.insert_tree(
3381 path!("/dir"),
3382 json!({
3383 "a": {
3384 "file1": "",
3385 }
3386 }),
3387 )
3388 .await;
3389
3390 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3391 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3392 let tree_id = tree.update(cx, |tree, _| tree.id());
3393
3394 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3395 project.update(cx, |project, cx| {
3396 let tree = project.worktrees(cx).next().unwrap();
3397 tree.read(cx)
3398 .entry_for_path(path)
3399 .unwrap_or_else(|| panic!("no entry for path {}", path))
3400 .id
3401 })
3402 };
3403
3404 let dir_id = id_for_path("a", cx);
3405 let file_id = id_for_path("a/file1", cx);
3406 let buffer = project
3407 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3408 .await
3409 .unwrap();
3410 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3411
3412 project
3413 .update(cx, |project, cx| {
3414 project.rename_entry(dir_id, Path::new("b"), cx)
3415 })
3416 .unwrap()
3417 .await
3418 .to_included()
3419 .unwrap();
3420 cx.executor().run_until_parked();
3421
3422 assert_eq!(id_for_path("b", cx), dir_id);
3423 assert_eq!(id_for_path("b/file1", cx), file_id);
3424 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3425}
3426
3427#[gpui::test]
3428async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3429 init_test(cx);
3430
3431 let fs = FakeFs::new(cx.executor());
3432 fs.insert_tree(
3433 "/dir",
3434 json!({
3435 "a.txt": "a-contents",
3436 "b.txt": "b-contents",
3437 }),
3438 )
3439 .await;
3440
3441 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3442
3443 // Spawn multiple tasks to open paths, repeating some paths.
3444 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3445 (
3446 p.open_local_buffer("/dir/a.txt", cx),
3447 p.open_local_buffer("/dir/b.txt", cx),
3448 p.open_local_buffer("/dir/a.txt", cx),
3449 )
3450 });
3451
3452 let buffer_a_1 = buffer_a_1.await.unwrap();
3453 let buffer_a_2 = buffer_a_2.await.unwrap();
3454 let buffer_b = buffer_b.await.unwrap();
3455 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3456 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3457
3458 // There is only one buffer per path.
3459 let buffer_a_id = buffer_a_1.entity_id();
3460 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3461
3462 // Open the same path again while it is still open.
3463 drop(buffer_a_1);
3464 let buffer_a_3 = project
3465 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3466 .await
3467 .unwrap();
3468
3469 // There's still only one buffer per path.
3470 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3471}
3472
3473#[gpui::test]
3474async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3475 init_test(cx);
3476
3477 let fs = FakeFs::new(cx.executor());
3478 fs.insert_tree(
3479 path!("/dir"),
3480 json!({
3481 "file1": "abc",
3482 "file2": "def",
3483 "file3": "ghi",
3484 }),
3485 )
3486 .await;
3487
3488 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3489
3490 let buffer1 = project
3491 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3492 .await
3493 .unwrap();
3494 let events = Arc::new(Mutex::new(Vec::new()));
3495
3496 // initially, the buffer isn't dirty.
3497 buffer1.update(cx, |buffer, cx| {
3498 cx.subscribe(&buffer1, {
3499 let events = events.clone();
3500 move |_, _, event, _| match event {
3501 BufferEvent::Operation { .. } => {}
3502 _ => events.lock().push(event.clone()),
3503 }
3504 })
3505 .detach();
3506
3507 assert!(!buffer.is_dirty());
3508 assert!(events.lock().is_empty());
3509
3510 buffer.edit([(1..2, "")], None, cx);
3511 });
3512
3513 // after the first edit, the buffer is dirty, and emits a dirtied event.
3514 buffer1.update(cx, |buffer, cx| {
3515 assert!(buffer.text() == "ac");
3516 assert!(buffer.is_dirty());
3517 assert_eq!(
3518 *events.lock(),
3519 &[
3520 language::BufferEvent::Edited,
3521 language::BufferEvent::DirtyChanged
3522 ]
3523 );
3524 events.lock().clear();
3525 buffer.did_save(
3526 buffer.version(),
3527 buffer.file().unwrap().disk_state().mtime(),
3528 cx,
3529 );
3530 });
3531
3532 // after saving, the buffer is not dirty, and emits a saved event.
3533 buffer1.update(cx, |buffer, cx| {
3534 assert!(!buffer.is_dirty());
3535 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3536 events.lock().clear();
3537
3538 buffer.edit([(1..1, "B")], None, cx);
3539 buffer.edit([(2..2, "D")], None, cx);
3540 });
3541
3542 // after editing again, the buffer is dirty, and emits another dirty event.
3543 buffer1.update(cx, |buffer, cx| {
3544 assert!(buffer.text() == "aBDc");
3545 assert!(buffer.is_dirty());
3546 assert_eq!(
3547 *events.lock(),
3548 &[
3549 language::BufferEvent::Edited,
3550 language::BufferEvent::DirtyChanged,
3551 language::BufferEvent::Edited,
3552 ],
3553 );
3554 events.lock().clear();
3555
3556 // After restoring the buffer to its previously-saved state,
3557 // the buffer is not considered dirty anymore.
3558 buffer.edit([(1..3, "")], None, cx);
3559 assert!(buffer.text() == "ac");
3560 assert!(!buffer.is_dirty());
3561 });
3562
3563 assert_eq!(
3564 *events.lock(),
3565 &[
3566 language::BufferEvent::Edited,
3567 language::BufferEvent::DirtyChanged
3568 ]
3569 );
3570
3571 // When a file is deleted, the buffer is considered dirty.
3572 let events = Arc::new(Mutex::new(Vec::new()));
3573 let buffer2 = project
3574 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3575 .await
3576 .unwrap();
3577 buffer2.update(cx, |_, cx| {
3578 cx.subscribe(&buffer2, {
3579 let events = events.clone();
3580 move |_, _, event, _| events.lock().push(event.clone())
3581 })
3582 .detach();
3583 });
3584
3585 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3586 .await
3587 .unwrap();
3588 cx.executor().run_until_parked();
3589 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3590 assert_eq!(
3591 *events.lock(),
3592 &[
3593 language::BufferEvent::DirtyChanged,
3594 language::BufferEvent::FileHandleChanged
3595 ]
3596 );
3597
3598 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3599 let events = Arc::new(Mutex::new(Vec::new()));
3600 let buffer3 = project
3601 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3602 .await
3603 .unwrap();
3604 buffer3.update(cx, |_, cx| {
3605 cx.subscribe(&buffer3, {
3606 let events = events.clone();
3607 move |_, _, event, _| events.lock().push(event.clone())
3608 })
3609 .detach();
3610 });
3611
3612 buffer3.update(cx, |buffer, cx| {
3613 buffer.edit([(0..0, "x")], None, cx);
3614 });
3615 events.lock().clear();
3616 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3617 .await
3618 .unwrap();
3619 cx.executor().run_until_parked();
3620 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3621 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3622}
3623
3624#[gpui::test]
3625async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3626 init_test(cx);
3627
3628 let initial_contents = "aaa\nbbbbb\nc\n";
3629 let fs = FakeFs::new(cx.executor());
3630 fs.insert_tree(
3631 path!("/dir"),
3632 json!({
3633 "the-file": initial_contents,
3634 }),
3635 )
3636 .await;
3637 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3638 let buffer = project
3639 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3640 .await
3641 .unwrap();
3642
3643 let anchors = (0..3)
3644 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3645 .collect::<Vec<_>>();
3646
3647 // Change the file on disk, adding two new lines of text, and removing
3648 // one line.
3649 buffer.update(cx, |buffer, _| {
3650 assert!(!buffer.is_dirty());
3651 assert!(!buffer.has_conflict());
3652 });
3653 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3654 fs.save(
3655 path!("/dir/the-file").as_ref(),
3656 &new_contents.into(),
3657 LineEnding::Unix,
3658 )
3659 .await
3660 .unwrap();
3661
3662 // Because the buffer was not modified, it is reloaded from disk. Its
3663 // contents are edited according to the diff between the old and new
3664 // file contents.
3665 cx.executor().run_until_parked();
3666 buffer.update(cx, |buffer, _| {
3667 assert_eq!(buffer.text(), new_contents);
3668 assert!(!buffer.is_dirty());
3669 assert!(!buffer.has_conflict());
3670
3671 let anchor_positions = anchors
3672 .iter()
3673 .map(|anchor| anchor.to_point(&*buffer))
3674 .collect::<Vec<_>>();
3675 assert_eq!(
3676 anchor_positions,
3677 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3678 );
3679 });
3680
3681 // Modify the buffer
3682 buffer.update(cx, |buffer, cx| {
3683 buffer.edit([(0..0, " ")], None, cx);
3684 assert!(buffer.is_dirty());
3685 assert!(!buffer.has_conflict());
3686 });
3687
3688 // Change the file on disk again, adding blank lines to the beginning.
3689 fs.save(
3690 path!("/dir/the-file").as_ref(),
3691 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3692 LineEnding::Unix,
3693 )
3694 .await
3695 .unwrap();
3696
3697 // Because the buffer is modified, it doesn't reload from disk, but is
3698 // marked as having a conflict.
3699 cx.executor().run_until_parked();
3700 buffer.update(cx, |buffer, _| {
3701 assert!(buffer.has_conflict());
3702 });
3703}
3704
3705#[gpui::test]
3706async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3707 init_test(cx);
3708
3709 let fs = FakeFs::new(cx.executor());
3710 fs.insert_tree(
3711 path!("/dir"),
3712 json!({
3713 "file1": "a\nb\nc\n",
3714 "file2": "one\r\ntwo\r\nthree\r\n",
3715 }),
3716 )
3717 .await;
3718
3719 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3720 let buffer1 = project
3721 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3722 .await
3723 .unwrap();
3724 let buffer2 = project
3725 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3726 .await
3727 .unwrap();
3728
3729 buffer1.update(cx, |buffer, _| {
3730 assert_eq!(buffer.text(), "a\nb\nc\n");
3731 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3732 });
3733 buffer2.update(cx, |buffer, _| {
3734 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3735 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3736 });
3737
3738 // Change a file's line endings on disk from unix to windows. The buffer's
3739 // state updates correctly.
3740 fs.save(
3741 path!("/dir/file1").as_ref(),
3742 &"aaa\nb\nc\n".into(),
3743 LineEnding::Windows,
3744 )
3745 .await
3746 .unwrap();
3747 cx.executor().run_until_parked();
3748 buffer1.update(cx, |buffer, _| {
3749 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3750 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3751 });
3752
3753 // Save a file with windows line endings. The file is written correctly.
3754 buffer2.update(cx, |buffer, cx| {
3755 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3756 });
3757 project
3758 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3759 .await
3760 .unwrap();
3761 assert_eq!(
3762 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3763 "one\r\ntwo\r\nthree\r\nfour\r\n",
3764 );
3765}
3766
3767#[gpui::test]
3768async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3769 init_test(cx);
3770
3771 let fs = FakeFs::new(cx.executor());
3772 fs.insert_tree(
3773 path!("/dir"),
3774 json!({
3775 "a.rs": "
3776 fn foo(mut v: Vec<usize>) {
3777 for x in &v {
3778 v.push(1);
3779 }
3780 }
3781 "
3782 .unindent(),
3783 }),
3784 )
3785 .await;
3786
3787 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3788 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3789 let buffer = project
3790 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3791 .await
3792 .unwrap();
3793
3794 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3795 let message = lsp::PublishDiagnosticsParams {
3796 uri: buffer_uri.clone(),
3797 diagnostics: vec![
3798 lsp::Diagnostic {
3799 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3800 severity: Some(DiagnosticSeverity::WARNING),
3801 message: "error 1".to_string(),
3802 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3803 location: lsp::Location {
3804 uri: buffer_uri.clone(),
3805 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3806 },
3807 message: "error 1 hint 1".to_string(),
3808 }]),
3809 ..Default::default()
3810 },
3811 lsp::Diagnostic {
3812 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3813 severity: Some(DiagnosticSeverity::HINT),
3814 message: "error 1 hint 1".to_string(),
3815 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3816 location: lsp::Location {
3817 uri: buffer_uri.clone(),
3818 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3819 },
3820 message: "original diagnostic".to_string(),
3821 }]),
3822 ..Default::default()
3823 },
3824 lsp::Diagnostic {
3825 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3826 severity: Some(DiagnosticSeverity::ERROR),
3827 message: "error 2".to_string(),
3828 related_information: Some(vec![
3829 lsp::DiagnosticRelatedInformation {
3830 location: lsp::Location {
3831 uri: buffer_uri.clone(),
3832 range: lsp::Range::new(
3833 lsp::Position::new(1, 13),
3834 lsp::Position::new(1, 15),
3835 ),
3836 },
3837 message: "error 2 hint 1".to_string(),
3838 },
3839 lsp::DiagnosticRelatedInformation {
3840 location: lsp::Location {
3841 uri: buffer_uri.clone(),
3842 range: lsp::Range::new(
3843 lsp::Position::new(1, 13),
3844 lsp::Position::new(1, 15),
3845 ),
3846 },
3847 message: "error 2 hint 2".to_string(),
3848 },
3849 ]),
3850 ..Default::default()
3851 },
3852 lsp::Diagnostic {
3853 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3854 severity: Some(DiagnosticSeverity::HINT),
3855 message: "error 2 hint 1".to_string(),
3856 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3857 location: lsp::Location {
3858 uri: buffer_uri.clone(),
3859 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3860 },
3861 message: "original diagnostic".to_string(),
3862 }]),
3863 ..Default::default()
3864 },
3865 lsp::Diagnostic {
3866 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3867 severity: Some(DiagnosticSeverity::HINT),
3868 message: "error 2 hint 2".to_string(),
3869 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3870 location: lsp::Location {
3871 uri: buffer_uri,
3872 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3873 },
3874 message: "original diagnostic".to_string(),
3875 }]),
3876 ..Default::default()
3877 },
3878 ],
3879 version: None,
3880 };
3881
3882 lsp_store
3883 .update(cx, |lsp_store, cx| {
3884 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3885 })
3886 .unwrap();
3887 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3888
3889 assert_eq!(
3890 buffer
3891 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3892 .collect::<Vec<_>>(),
3893 &[
3894 DiagnosticEntry {
3895 range: Point::new(1, 8)..Point::new(1, 9),
3896 diagnostic: Diagnostic {
3897 severity: DiagnosticSeverity::WARNING,
3898 message: "error 1".to_string(),
3899 group_id: 1,
3900 is_primary: true,
3901 ..Default::default()
3902 }
3903 },
3904 DiagnosticEntry {
3905 range: Point::new(1, 8)..Point::new(1, 9),
3906 diagnostic: Diagnostic {
3907 severity: DiagnosticSeverity::HINT,
3908 message: "error 1 hint 1".to_string(),
3909 group_id: 1,
3910 is_primary: false,
3911 ..Default::default()
3912 }
3913 },
3914 DiagnosticEntry {
3915 range: Point::new(1, 13)..Point::new(1, 15),
3916 diagnostic: Diagnostic {
3917 severity: DiagnosticSeverity::HINT,
3918 message: "error 2 hint 1".to_string(),
3919 group_id: 0,
3920 is_primary: false,
3921 ..Default::default()
3922 }
3923 },
3924 DiagnosticEntry {
3925 range: Point::new(1, 13)..Point::new(1, 15),
3926 diagnostic: Diagnostic {
3927 severity: DiagnosticSeverity::HINT,
3928 message: "error 2 hint 2".to_string(),
3929 group_id: 0,
3930 is_primary: false,
3931 ..Default::default()
3932 }
3933 },
3934 DiagnosticEntry {
3935 range: Point::new(2, 8)..Point::new(2, 17),
3936 diagnostic: Diagnostic {
3937 severity: DiagnosticSeverity::ERROR,
3938 message: "error 2".to_string(),
3939 group_id: 0,
3940 is_primary: true,
3941 ..Default::default()
3942 }
3943 }
3944 ]
3945 );
3946
3947 assert_eq!(
3948 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3949 &[
3950 DiagnosticEntry {
3951 range: Point::new(1, 13)..Point::new(1, 15),
3952 diagnostic: Diagnostic {
3953 severity: DiagnosticSeverity::HINT,
3954 message: "error 2 hint 1".to_string(),
3955 group_id: 0,
3956 is_primary: false,
3957 ..Default::default()
3958 }
3959 },
3960 DiagnosticEntry {
3961 range: Point::new(1, 13)..Point::new(1, 15),
3962 diagnostic: Diagnostic {
3963 severity: DiagnosticSeverity::HINT,
3964 message: "error 2 hint 2".to_string(),
3965 group_id: 0,
3966 is_primary: false,
3967 ..Default::default()
3968 }
3969 },
3970 DiagnosticEntry {
3971 range: Point::new(2, 8)..Point::new(2, 17),
3972 diagnostic: Diagnostic {
3973 severity: DiagnosticSeverity::ERROR,
3974 message: "error 2".to_string(),
3975 group_id: 0,
3976 is_primary: true,
3977 ..Default::default()
3978 }
3979 }
3980 ]
3981 );
3982
3983 assert_eq!(
3984 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3985 &[
3986 DiagnosticEntry {
3987 range: Point::new(1, 8)..Point::new(1, 9),
3988 diagnostic: Diagnostic {
3989 severity: DiagnosticSeverity::WARNING,
3990 message: "error 1".to_string(),
3991 group_id: 1,
3992 is_primary: true,
3993 ..Default::default()
3994 }
3995 },
3996 DiagnosticEntry {
3997 range: Point::new(1, 8)..Point::new(1, 9),
3998 diagnostic: Diagnostic {
3999 severity: DiagnosticSeverity::HINT,
4000 message: "error 1 hint 1".to_string(),
4001 group_id: 1,
4002 is_primary: false,
4003 ..Default::default()
4004 }
4005 },
4006 ]
4007 );
4008}
4009
4010#[gpui::test]
4011async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4012 init_test(cx);
4013
4014 let fs = FakeFs::new(cx.executor());
4015 fs.insert_tree(
4016 path!("/dir"),
4017 json!({
4018 "one.rs": "const ONE: usize = 1;",
4019 "two": {
4020 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4021 }
4022
4023 }),
4024 )
4025 .await;
4026 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4027
4028 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4029 language_registry.add(rust_lang());
4030 let watched_paths = lsp::FileOperationRegistrationOptions {
4031 filters: vec![
4032 FileOperationFilter {
4033 scheme: Some("file".to_owned()),
4034 pattern: lsp::FileOperationPattern {
4035 glob: "**/*.rs".to_owned(),
4036 matches: Some(lsp::FileOperationPatternKind::File),
4037 options: None,
4038 },
4039 },
4040 FileOperationFilter {
4041 scheme: Some("file".to_owned()),
4042 pattern: lsp::FileOperationPattern {
4043 glob: "**/**".to_owned(),
4044 matches: Some(lsp::FileOperationPatternKind::Folder),
4045 options: None,
4046 },
4047 },
4048 ],
4049 };
4050 let mut fake_servers = language_registry.register_fake_lsp(
4051 "Rust",
4052 FakeLspAdapter {
4053 capabilities: lsp::ServerCapabilities {
4054 workspace: Some(lsp::WorkspaceServerCapabilities {
4055 workspace_folders: None,
4056 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4057 did_rename: Some(watched_paths.clone()),
4058 will_rename: Some(watched_paths),
4059 ..Default::default()
4060 }),
4061 }),
4062 ..Default::default()
4063 },
4064 ..Default::default()
4065 },
4066 );
4067
4068 let _ = project
4069 .update(cx, |project, cx| {
4070 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4071 })
4072 .await
4073 .unwrap();
4074
4075 let fake_server = fake_servers.next().await.unwrap();
4076 let response = project.update(cx, |project, cx| {
4077 let worktree = project.worktrees(cx).next().unwrap();
4078 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4079 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4080 });
4081 let expected_edit = lsp::WorkspaceEdit {
4082 changes: None,
4083 document_changes: Some(DocumentChanges::Edits({
4084 vec![TextDocumentEdit {
4085 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4086 range: lsp::Range {
4087 start: lsp::Position {
4088 line: 0,
4089 character: 1,
4090 },
4091 end: lsp::Position {
4092 line: 0,
4093 character: 3,
4094 },
4095 },
4096 new_text: "This is not a drill".to_owned(),
4097 })],
4098 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4099 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4100 version: Some(1337),
4101 },
4102 }]
4103 })),
4104 change_annotations: None,
4105 };
4106 let resolved_workspace_edit = Arc::new(OnceLock::new());
4107 fake_server
4108 .handle_request::<WillRenameFiles, _, _>({
4109 let resolved_workspace_edit = resolved_workspace_edit.clone();
4110 let expected_edit = expected_edit.clone();
4111 move |params, _| {
4112 let resolved_workspace_edit = resolved_workspace_edit.clone();
4113 let expected_edit = expected_edit.clone();
4114 async move {
4115 assert_eq!(params.files.len(), 1);
4116 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4117 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4118 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4119 Ok(Some(expected_edit))
4120 }
4121 }
4122 })
4123 .next()
4124 .await
4125 .unwrap();
4126 let _ = response.await.unwrap();
4127 fake_server
4128 .handle_notification::<DidRenameFiles, _>(|params, _| {
4129 assert_eq!(params.files.len(), 1);
4130 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4131 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4132 })
4133 .next()
4134 .await
4135 .unwrap();
4136 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4137}
4138
4139#[gpui::test]
4140async fn test_rename(cx: &mut gpui::TestAppContext) {
4141 // hi
4142 init_test(cx);
4143
4144 let fs = FakeFs::new(cx.executor());
4145 fs.insert_tree(
4146 path!("/dir"),
4147 json!({
4148 "one.rs": "const ONE: usize = 1;",
4149 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4150 }),
4151 )
4152 .await;
4153
4154 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4155
4156 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4157 language_registry.add(rust_lang());
4158 let mut fake_servers = language_registry.register_fake_lsp(
4159 "Rust",
4160 FakeLspAdapter {
4161 capabilities: lsp::ServerCapabilities {
4162 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4163 prepare_provider: Some(true),
4164 work_done_progress_options: Default::default(),
4165 })),
4166 ..Default::default()
4167 },
4168 ..Default::default()
4169 },
4170 );
4171
4172 let (buffer, _handle) = project
4173 .update(cx, |project, cx| {
4174 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4175 })
4176 .await
4177 .unwrap();
4178
4179 let fake_server = fake_servers.next().await.unwrap();
4180
4181 let response = project.update(cx, |project, cx| {
4182 project.prepare_rename(buffer.clone(), 7, cx)
4183 });
4184 fake_server
4185 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4186 assert_eq!(
4187 params.text_document.uri.as_str(),
4188 uri!("file:///dir/one.rs")
4189 );
4190 assert_eq!(params.position, lsp::Position::new(0, 7));
4191 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4192 lsp::Position::new(0, 6),
4193 lsp::Position::new(0, 9),
4194 ))))
4195 })
4196 .next()
4197 .await
4198 .unwrap();
4199 let response = response.await.unwrap();
4200 let PrepareRenameResponse::Success(range) = response else {
4201 panic!("{:?}", response);
4202 };
4203 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4204 assert_eq!(range, 6..9);
4205
4206 let response = project.update(cx, |project, cx| {
4207 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4208 });
4209 fake_server
4210 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4211 assert_eq!(
4212 params.text_document_position.text_document.uri.as_str(),
4213 uri!("file:///dir/one.rs")
4214 );
4215 assert_eq!(
4216 params.text_document_position.position,
4217 lsp::Position::new(0, 7)
4218 );
4219 assert_eq!(params.new_name, "THREE");
4220 Ok(Some(lsp::WorkspaceEdit {
4221 changes: Some(
4222 [
4223 (
4224 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4225 vec![lsp::TextEdit::new(
4226 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4227 "THREE".to_string(),
4228 )],
4229 ),
4230 (
4231 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4232 vec![
4233 lsp::TextEdit::new(
4234 lsp::Range::new(
4235 lsp::Position::new(0, 24),
4236 lsp::Position::new(0, 27),
4237 ),
4238 "THREE".to_string(),
4239 ),
4240 lsp::TextEdit::new(
4241 lsp::Range::new(
4242 lsp::Position::new(0, 35),
4243 lsp::Position::new(0, 38),
4244 ),
4245 "THREE".to_string(),
4246 ),
4247 ],
4248 ),
4249 ]
4250 .into_iter()
4251 .collect(),
4252 ),
4253 ..Default::default()
4254 }))
4255 })
4256 .next()
4257 .await
4258 .unwrap();
4259 let mut transaction = response.await.unwrap().0;
4260 assert_eq!(transaction.len(), 2);
4261 assert_eq!(
4262 transaction
4263 .remove_entry(&buffer)
4264 .unwrap()
4265 .0
4266 .update(cx, |buffer, _| buffer.text()),
4267 "const THREE: usize = 1;"
4268 );
4269 assert_eq!(
4270 transaction
4271 .into_keys()
4272 .next()
4273 .unwrap()
4274 .update(cx, |buffer, _| buffer.text()),
4275 "const TWO: usize = one::THREE + one::THREE;"
4276 );
4277}
4278
4279#[gpui::test]
4280async fn test_search(cx: &mut gpui::TestAppContext) {
4281 init_test(cx);
4282
4283 let fs = FakeFs::new(cx.executor());
4284 fs.insert_tree(
4285 path!("/dir"),
4286 json!({
4287 "one.rs": "const ONE: usize = 1;",
4288 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4289 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4290 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4291 }),
4292 )
4293 .await;
4294 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4295 assert_eq!(
4296 search(
4297 &project,
4298 SearchQuery::text(
4299 "TWO",
4300 false,
4301 true,
4302 false,
4303 Default::default(),
4304 Default::default(),
4305 None
4306 )
4307 .unwrap(),
4308 cx
4309 )
4310 .await
4311 .unwrap(),
4312 HashMap::from_iter([
4313 (separator!("dir/two.rs").to_string(), vec![6..9]),
4314 (separator!("dir/three.rs").to_string(), vec![37..40])
4315 ])
4316 );
4317
4318 let buffer_4 = project
4319 .update(cx, |project, cx| {
4320 project.open_local_buffer(path!("/dir/four.rs"), cx)
4321 })
4322 .await
4323 .unwrap();
4324 buffer_4.update(cx, |buffer, cx| {
4325 let text = "two::TWO";
4326 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4327 });
4328
4329 assert_eq!(
4330 search(
4331 &project,
4332 SearchQuery::text(
4333 "TWO",
4334 false,
4335 true,
4336 false,
4337 Default::default(),
4338 Default::default(),
4339 None,
4340 )
4341 .unwrap(),
4342 cx
4343 )
4344 .await
4345 .unwrap(),
4346 HashMap::from_iter([
4347 (separator!("dir/two.rs").to_string(), vec![6..9]),
4348 (separator!("dir/three.rs").to_string(), vec![37..40]),
4349 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4350 ])
4351 );
4352}
4353
4354#[gpui::test]
4355async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4356 init_test(cx);
4357
4358 let search_query = "file";
4359
4360 let fs = FakeFs::new(cx.executor());
4361 fs.insert_tree(
4362 path!("/dir"),
4363 json!({
4364 "one.rs": r#"// Rust file one"#,
4365 "one.ts": r#"// TypeScript file one"#,
4366 "two.rs": r#"// Rust file two"#,
4367 "two.ts": r#"// TypeScript file two"#,
4368 }),
4369 )
4370 .await;
4371 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4372
4373 assert!(
4374 search(
4375 &project,
4376 SearchQuery::text(
4377 search_query,
4378 false,
4379 true,
4380 false,
4381 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4382 Default::default(),
4383 None
4384 )
4385 .unwrap(),
4386 cx
4387 )
4388 .await
4389 .unwrap()
4390 .is_empty(),
4391 "If no inclusions match, no files should be returned"
4392 );
4393
4394 assert_eq!(
4395 search(
4396 &project,
4397 SearchQuery::text(
4398 search_query,
4399 false,
4400 true,
4401 false,
4402 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4403 Default::default(),
4404 None
4405 )
4406 .unwrap(),
4407 cx
4408 )
4409 .await
4410 .unwrap(),
4411 HashMap::from_iter([
4412 (separator!("dir/one.rs").to_string(), vec![8..12]),
4413 (separator!("dir/two.rs").to_string(), vec![8..12]),
4414 ]),
4415 "Rust only search should give only Rust files"
4416 );
4417
4418 assert_eq!(
4419 search(
4420 &project,
4421 SearchQuery::text(
4422 search_query,
4423 false,
4424 true,
4425 false,
4426
4427 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4428
4429 Default::default(),
4430 None,
4431 ).unwrap(),
4432 cx
4433 )
4434 .await
4435 .unwrap(),
4436 HashMap::from_iter([
4437 (separator!("dir/one.ts").to_string(), vec![14..18]),
4438 (separator!("dir/two.ts").to_string(), vec![14..18]),
4439 ]),
4440 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4441 );
4442
4443 assert_eq!(
4444 search(
4445 &project,
4446 SearchQuery::text(
4447 search_query,
4448 false,
4449 true,
4450 false,
4451
4452 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4453
4454 Default::default(),
4455 None,
4456 ).unwrap(),
4457 cx
4458 )
4459 .await
4460 .unwrap(),
4461 HashMap::from_iter([
4462 (separator!("dir/two.ts").to_string(), vec![14..18]),
4463 (separator!("dir/one.rs").to_string(), vec![8..12]),
4464 (separator!("dir/one.ts").to_string(), vec![14..18]),
4465 (separator!("dir/two.rs").to_string(), vec![8..12]),
4466 ]),
4467 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4468 );
4469}
4470
4471#[gpui::test]
4472async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4473 init_test(cx);
4474
4475 let search_query = "file";
4476
4477 let fs = FakeFs::new(cx.executor());
4478 fs.insert_tree(
4479 path!("/dir"),
4480 json!({
4481 "one.rs": r#"// Rust file one"#,
4482 "one.ts": r#"// TypeScript file one"#,
4483 "two.rs": r#"// Rust file two"#,
4484 "two.ts": r#"// TypeScript file two"#,
4485 }),
4486 )
4487 .await;
4488 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4489
4490 assert_eq!(
4491 search(
4492 &project,
4493 SearchQuery::text(
4494 search_query,
4495 false,
4496 true,
4497 false,
4498 Default::default(),
4499 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4500 None,
4501 )
4502 .unwrap(),
4503 cx
4504 )
4505 .await
4506 .unwrap(),
4507 HashMap::from_iter([
4508 (separator!("dir/one.rs").to_string(), vec![8..12]),
4509 (separator!("dir/one.ts").to_string(), vec![14..18]),
4510 (separator!("dir/two.rs").to_string(), vec![8..12]),
4511 (separator!("dir/two.ts").to_string(), vec![14..18]),
4512 ]),
4513 "If no exclusions match, all files should be returned"
4514 );
4515
4516 assert_eq!(
4517 search(
4518 &project,
4519 SearchQuery::text(
4520 search_query,
4521 false,
4522 true,
4523 false,
4524 Default::default(),
4525 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4526 None,
4527 )
4528 .unwrap(),
4529 cx
4530 )
4531 .await
4532 .unwrap(),
4533 HashMap::from_iter([
4534 (separator!("dir/one.ts").to_string(), vec![14..18]),
4535 (separator!("dir/two.ts").to_string(), vec![14..18]),
4536 ]),
4537 "Rust exclusion search should give only TypeScript files"
4538 );
4539
4540 assert_eq!(
4541 search(
4542 &project,
4543 SearchQuery::text(
4544 search_query,
4545 false,
4546 true,
4547 false,
4548 Default::default(),
4549 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4550 None,
4551 ).unwrap(),
4552 cx
4553 )
4554 .await
4555 .unwrap(),
4556 HashMap::from_iter([
4557 (separator!("dir/one.rs").to_string(), vec![8..12]),
4558 (separator!("dir/two.rs").to_string(), vec![8..12]),
4559 ]),
4560 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4561 );
4562
4563 assert!(
4564 search(
4565 &project,
4566 SearchQuery::text(
4567 search_query,
4568 false,
4569 true,
4570 false,
4571 Default::default(),
4572
4573 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4574 None,
4575
4576 ).unwrap(),
4577 cx
4578 )
4579 .await
4580 .unwrap().is_empty(),
4581 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4582 );
4583}
4584
4585#[gpui::test]
4586async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4587 init_test(cx);
4588
4589 let search_query = "file";
4590
4591 let fs = FakeFs::new(cx.executor());
4592 fs.insert_tree(
4593 path!("/dir"),
4594 json!({
4595 "one.rs": r#"// Rust file one"#,
4596 "one.ts": r#"// TypeScript file one"#,
4597 "two.rs": r#"// Rust file two"#,
4598 "two.ts": r#"// TypeScript file two"#,
4599 }),
4600 )
4601 .await;
4602 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4603
4604 assert!(
4605 search(
4606 &project,
4607 SearchQuery::text(
4608 search_query,
4609 false,
4610 true,
4611 false,
4612 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4613 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4614 None,
4615 )
4616 .unwrap(),
4617 cx
4618 )
4619 .await
4620 .unwrap()
4621 .is_empty(),
4622 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4623 );
4624
4625 assert!(
4626 search(
4627 &project,
4628 SearchQuery::text(
4629 search_query,
4630 false,
4631 true,
4632 false,
4633 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4634 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4635 None,
4636 ).unwrap(),
4637 cx
4638 )
4639 .await
4640 .unwrap()
4641 .is_empty(),
4642 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4643 );
4644
4645 assert!(
4646 search(
4647 &project,
4648 SearchQuery::text(
4649 search_query,
4650 false,
4651 true,
4652 false,
4653 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4654 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4655 None,
4656 )
4657 .unwrap(),
4658 cx
4659 )
4660 .await
4661 .unwrap()
4662 .is_empty(),
4663 "Non-matching inclusions and exclusions should not change that."
4664 );
4665
4666 assert_eq!(
4667 search(
4668 &project,
4669 SearchQuery::text(
4670 search_query,
4671 false,
4672 true,
4673 false,
4674 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4675 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4676 None,
4677 )
4678 .unwrap(),
4679 cx
4680 )
4681 .await
4682 .unwrap(),
4683 HashMap::from_iter([
4684 (separator!("dir/one.ts").to_string(), vec![14..18]),
4685 (separator!("dir/two.ts").to_string(), vec![14..18]),
4686 ]),
4687 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4688 );
4689}
4690
4691#[gpui::test]
4692async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4693 init_test(cx);
4694
4695 let fs = FakeFs::new(cx.executor());
4696 fs.insert_tree(
4697 path!("/worktree-a"),
4698 json!({
4699 "haystack.rs": r#"// NEEDLE"#,
4700 "haystack.ts": r#"// NEEDLE"#,
4701 }),
4702 )
4703 .await;
4704 fs.insert_tree(
4705 path!("/worktree-b"),
4706 json!({
4707 "haystack.rs": r#"// NEEDLE"#,
4708 "haystack.ts": r#"// NEEDLE"#,
4709 }),
4710 )
4711 .await;
4712
4713 let project = Project::test(
4714 fs.clone(),
4715 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4716 cx,
4717 )
4718 .await;
4719
4720 assert_eq!(
4721 search(
4722 &project,
4723 SearchQuery::text(
4724 "NEEDLE",
4725 false,
4726 true,
4727 false,
4728 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4729 Default::default(),
4730 None,
4731 )
4732 .unwrap(),
4733 cx
4734 )
4735 .await
4736 .unwrap(),
4737 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4738 "should only return results from included worktree"
4739 );
4740 assert_eq!(
4741 search(
4742 &project,
4743 SearchQuery::text(
4744 "NEEDLE",
4745 false,
4746 true,
4747 false,
4748 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4749 Default::default(),
4750 None,
4751 )
4752 .unwrap(),
4753 cx
4754 )
4755 .await
4756 .unwrap(),
4757 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4758 "should only return results from included worktree"
4759 );
4760
4761 assert_eq!(
4762 search(
4763 &project,
4764 SearchQuery::text(
4765 "NEEDLE",
4766 false,
4767 true,
4768 false,
4769 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4770 Default::default(),
4771 None,
4772 )
4773 .unwrap(),
4774 cx
4775 )
4776 .await
4777 .unwrap(),
4778 HashMap::from_iter([
4779 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4780 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4781 ]),
4782 "should return results from both worktrees"
4783 );
4784}
4785
4786#[gpui::test]
4787async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4788 init_test(cx);
4789
4790 let fs = FakeFs::new(cx.background_executor.clone());
4791 fs.insert_tree(
4792 path!("/dir"),
4793 json!({
4794 ".git": {},
4795 ".gitignore": "**/target\n/node_modules\n",
4796 "target": {
4797 "index.txt": "index_key:index_value"
4798 },
4799 "node_modules": {
4800 "eslint": {
4801 "index.ts": "const eslint_key = 'eslint value'",
4802 "package.json": r#"{ "some_key": "some value" }"#,
4803 },
4804 "prettier": {
4805 "index.ts": "const prettier_key = 'prettier value'",
4806 "package.json": r#"{ "other_key": "other value" }"#,
4807 },
4808 },
4809 "package.json": r#"{ "main_key": "main value" }"#,
4810 }),
4811 )
4812 .await;
4813 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4814
4815 let query = "key";
4816 assert_eq!(
4817 search(
4818 &project,
4819 SearchQuery::text(
4820 query,
4821 false,
4822 false,
4823 false,
4824 Default::default(),
4825 Default::default(),
4826 None,
4827 )
4828 .unwrap(),
4829 cx
4830 )
4831 .await
4832 .unwrap(),
4833 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4834 "Only one non-ignored file should have the query"
4835 );
4836
4837 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4838 assert_eq!(
4839 search(
4840 &project,
4841 SearchQuery::text(
4842 query,
4843 false,
4844 false,
4845 true,
4846 Default::default(),
4847 Default::default(),
4848 None,
4849 )
4850 .unwrap(),
4851 cx
4852 )
4853 .await
4854 .unwrap(),
4855 HashMap::from_iter([
4856 (separator!("dir/package.json").to_string(), vec![8..11]),
4857 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4858 (
4859 separator!("dir/node_modules/prettier/package.json").to_string(),
4860 vec![9..12]
4861 ),
4862 (
4863 separator!("dir/node_modules/prettier/index.ts").to_string(),
4864 vec![15..18]
4865 ),
4866 (
4867 separator!("dir/node_modules/eslint/index.ts").to_string(),
4868 vec![13..16]
4869 ),
4870 (
4871 separator!("dir/node_modules/eslint/package.json").to_string(),
4872 vec![8..11]
4873 ),
4874 ]),
4875 "Unrestricted search with ignored directories should find every file with the query"
4876 );
4877
4878 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4879 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4880 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4881 assert_eq!(
4882 search(
4883 &project,
4884 SearchQuery::text(
4885 query,
4886 false,
4887 false,
4888 true,
4889 files_to_include,
4890 files_to_exclude,
4891 None,
4892 )
4893 .unwrap(),
4894 cx
4895 )
4896 .await
4897 .unwrap(),
4898 HashMap::from_iter([(
4899 separator!("dir/node_modules/prettier/package.json").to_string(),
4900 vec![9..12]
4901 )]),
4902 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4903 );
4904}
4905
4906#[gpui::test]
4907async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4908 init_test(cx);
4909
4910 let fs = FakeFs::new(cx.executor().clone());
4911 fs.insert_tree(
4912 "/one/two",
4913 json!({
4914 "three": {
4915 "a.txt": "",
4916 "four": {}
4917 },
4918 "c.rs": ""
4919 }),
4920 )
4921 .await;
4922
4923 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4924 project
4925 .update(cx, |project, cx| {
4926 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4927 project.create_entry((id, "b.."), true, cx)
4928 })
4929 .await
4930 .unwrap()
4931 .to_included()
4932 .unwrap();
4933
4934 // Can't create paths outside the project
4935 let result = project
4936 .update(cx, |project, cx| {
4937 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4938 project.create_entry((id, "../../boop"), true, cx)
4939 })
4940 .await;
4941 assert!(result.is_err());
4942
4943 // Can't create paths with '..'
4944 let result = project
4945 .update(cx, |project, cx| {
4946 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4947 project.create_entry((id, "four/../beep"), true, cx)
4948 })
4949 .await;
4950 assert!(result.is_err());
4951
4952 assert_eq!(
4953 fs.paths(true),
4954 vec![
4955 PathBuf::from("/"),
4956 PathBuf::from("/one"),
4957 PathBuf::from("/one/two"),
4958 PathBuf::from("/one/two/c.rs"),
4959 PathBuf::from("/one/two/three"),
4960 PathBuf::from("/one/two/three/a.txt"),
4961 PathBuf::from("/one/two/three/b.."),
4962 PathBuf::from("/one/two/three/four"),
4963 ]
4964 );
4965
4966 // And we cannot open buffers with '..'
4967 let result = project
4968 .update(cx, |project, cx| {
4969 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4970 project.open_buffer((id, "../c.rs"), cx)
4971 })
4972 .await;
4973 assert!(result.is_err())
4974}
4975
4976#[gpui::test]
4977async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4978 init_test(cx);
4979
4980 let fs = FakeFs::new(cx.executor());
4981 fs.insert_tree(
4982 path!("/dir"),
4983 json!({
4984 "a.tsx": "a",
4985 }),
4986 )
4987 .await;
4988
4989 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4990
4991 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4992 language_registry.add(tsx_lang());
4993 let language_server_names = [
4994 "TypeScriptServer",
4995 "TailwindServer",
4996 "ESLintServer",
4997 "NoHoverCapabilitiesServer",
4998 ];
4999 let mut language_servers = [
5000 language_registry.register_fake_lsp(
5001 "tsx",
5002 FakeLspAdapter {
5003 name: language_server_names[0],
5004 capabilities: lsp::ServerCapabilities {
5005 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5006 ..lsp::ServerCapabilities::default()
5007 },
5008 ..FakeLspAdapter::default()
5009 },
5010 ),
5011 language_registry.register_fake_lsp(
5012 "tsx",
5013 FakeLspAdapter {
5014 name: language_server_names[1],
5015 capabilities: lsp::ServerCapabilities {
5016 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5017 ..lsp::ServerCapabilities::default()
5018 },
5019 ..FakeLspAdapter::default()
5020 },
5021 ),
5022 language_registry.register_fake_lsp(
5023 "tsx",
5024 FakeLspAdapter {
5025 name: language_server_names[2],
5026 capabilities: lsp::ServerCapabilities {
5027 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5028 ..lsp::ServerCapabilities::default()
5029 },
5030 ..FakeLspAdapter::default()
5031 },
5032 ),
5033 language_registry.register_fake_lsp(
5034 "tsx",
5035 FakeLspAdapter {
5036 name: language_server_names[3],
5037 capabilities: lsp::ServerCapabilities {
5038 hover_provider: None,
5039 ..lsp::ServerCapabilities::default()
5040 },
5041 ..FakeLspAdapter::default()
5042 },
5043 ),
5044 ];
5045
5046 let (buffer, _handle) = project
5047 .update(cx, |p, cx| {
5048 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5049 })
5050 .await
5051 .unwrap();
5052 cx.executor().run_until_parked();
5053
5054 let mut servers_with_hover_requests = HashMap::default();
5055 for i in 0..language_server_names.len() {
5056 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5057 panic!(
5058 "Failed to get language server #{i} with name {}",
5059 &language_server_names[i]
5060 )
5061 });
5062 let new_server_name = new_server.server.name();
5063 assert!(
5064 !servers_with_hover_requests.contains_key(&new_server_name),
5065 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5066 );
5067 match new_server_name.as_ref() {
5068 "TailwindServer" | "TypeScriptServer" => {
5069 servers_with_hover_requests.insert(
5070 new_server_name.clone(),
5071 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5072 let name = new_server_name.clone();
5073 async move {
5074 Ok(Some(lsp::Hover {
5075 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5076 format!("{name} hover"),
5077 )),
5078 range: None,
5079 }))
5080 }
5081 }),
5082 );
5083 }
5084 "ESLintServer" => {
5085 servers_with_hover_requests.insert(
5086 new_server_name,
5087 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5088 |_, _| async move { Ok(None) },
5089 ),
5090 );
5091 }
5092 "NoHoverCapabilitiesServer" => {
5093 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5094 |_, _| async move {
5095 panic!(
5096 "Should not call for hovers server with no corresponding capabilities"
5097 )
5098 },
5099 );
5100 }
5101 unexpected => panic!("Unexpected server name: {unexpected}"),
5102 }
5103 }
5104
5105 let hover_task = project.update(cx, |project, cx| {
5106 project.hover(&buffer, Point::new(0, 0), cx)
5107 });
5108 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5109 |mut hover_request| async move {
5110 hover_request
5111 .next()
5112 .await
5113 .expect("All hover requests should have been triggered")
5114 },
5115 ))
5116 .await;
5117 assert_eq!(
5118 vec!["TailwindServer hover", "TypeScriptServer hover"],
5119 hover_task
5120 .await
5121 .into_iter()
5122 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5123 .sorted()
5124 .collect::<Vec<_>>(),
5125 "Should receive hover responses from all related servers with hover capabilities"
5126 );
5127}
5128
5129#[gpui::test]
5130async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5131 init_test(cx);
5132
5133 let fs = FakeFs::new(cx.executor());
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "a.ts": "a",
5138 }),
5139 )
5140 .await;
5141
5142 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5143
5144 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5145 language_registry.add(typescript_lang());
5146 let mut fake_language_servers = language_registry.register_fake_lsp(
5147 "TypeScript",
5148 FakeLspAdapter {
5149 capabilities: lsp::ServerCapabilities {
5150 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5151 ..lsp::ServerCapabilities::default()
5152 },
5153 ..FakeLspAdapter::default()
5154 },
5155 );
5156
5157 let (buffer, _handle) = project
5158 .update(cx, |p, cx| {
5159 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5160 })
5161 .await
5162 .unwrap();
5163 cx.executor().run_until_parked();
5164
5165 let fake_server = fake_language_servers
5166 .next()
5167 .await
5168 .expect("failed to get the language server");
5169
5170 let mut request_handled =
5171 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5172 Ok(Some(lsp::Hover {
5173 contents: lsp::HoverContents::Array(vec![
5174 lsp::MarkedString::String("".to_string()),
5175 lsp::MarkedString::String(" ".to_string()),
5176 lsp::MarkedString::String("\n\n\n".to_string()),
5177 ]),
5178 range: None,
5179 }))
5180 });
5181
5182 let hover_task = project.update(cx, |project, cx| {
5183 project.hover(&buffer, Point::new(0, 0), cx)
5184 });
5185 let () = request_handled
5186 .next()
5187 .await
5188 .expect("All hover requests should have been triggered");
5189 assert_eq!(
5190 Vec::<String>::new(),
5191 hover_task
5192 .await
5193 .into_iter()
5194 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5195 .sorted()
5196 .collect::<Vec<_>>(),
5197 "Empty hover parts should be ignored"
5198 );
5199}
5200
5201#[gpui::test]
5202async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5203 init_test(cx);
5204
5205 let fs = FakeFs::new(cx.executor());
5206 fs.insert_tree(
5207 path!("/dir"),
5208 json!({
5209 "a.ts": "a",
5210 }),
5211 )
5212 .await;
5213
5214 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5215
5216 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5217 language_registry.add(typescript_lang());
5218 let mut fake_language_servers = language_registry.register_fake_lsp(
5219 "TypeScript",
5220 FakeLspAdapter {
5221 capabilities: lsp::ServerCapabilities {
5222 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5223 ..lsp::ServerCapabilities::default()
5224 },
5225 ..FakeLspAdapter::default()
5226 },
5227 );
5228
5229 let (buffer, _handle) = project
5230 .update(cx, |p, cx| {
5231 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5232 })
5233 .await
5234 .unwrap();
5235 cx.executor().run_until_parked();
5236
5237 let fake_server = fake_language_servers
5238 .next()
5239 .await
5240 .expect("failed to get the language server");
5241
5242 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5243 move |_, _| async move {
5244 Ok(Some(vec![
5245 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5246 title: "organize imports".to_string(),
5247 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5248 ..lsp::CodeAction::default()
5249 }),
5250 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5251 title: "fix code".to_string(),
5252 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5253 ..lsp::CodeAction::default()
5254 }),
5255 ]))
5256 },
5257 );
5258
5259 let code_actions_task = project.update(cx, |project, cx| {
5260 project.code_actions(
5261 &buffer,
5262 0..buffer.read(cx).len(),
5263 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5264 cx,
5265 )
5266 });
5267
5268 let () = request_handled
5269 .next()
5270 .await
5271 .expect("The code action request should have been triggered");
5272
5273 let code_actions = code_actions_task.await.unwrap();
5274 assert_eq!(code_actions.len(), 1);
5275 assert_eq!(
5276 code_actions[0].lsp_action.kind,
5277 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5278 );
5279}
5280
5281#[gpui::test]
5282async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5283 init_test(cx);
5284
5285 let fs = FakeFs::new(cx.executor());
5286 fs.insert_tree(
5287 path!("/dir"),
5288 json!({
5289 "a.tsx": "a",
5290 }),
5291 )
5292 .await;
5293
5294 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5295
5296 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5297 language_registry.add(tsx_lang());
5298 let language_server_names = [
5299 "TypeScriptServer",
5300 "TailwindServer",
5301 "ESLintServer",
5302 "NoActionsCapabilitiesServer",
5303 ];
5304
5305 let mut language_server_rxs = [
5306 language_registry.register_fake_lsp(
5307 "tsx",
5308 FakeLspAdapter {
5309 name: language_server_names[0],
5310 capabilities: lsp::ServerCapabilities {
5311 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5312 ..lsp::ServerCapabilities::default()
5313 },
5314 ..FakeLspAdapter::default()
5315 },
5316 ),
5317 language_registry.register_fake_lsp(
5318 "tsx",
5319 FakeLspAdapter {
5320 name: language_server_names[1],
5321 capabilities: lsp::ServerCapabilities {
5322 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5323 ..lsp::ServerCapabilities::default()
5324 },
5325 ..FakeLspAdapter::default()
5326 },
5327 ),
5328 language_registry.register_fake_lsp(
5329 "tsx",
5330 FakeLspAdapter {
5331 name: language_server_names[2],
5332 capabilities: lsp::ServerCapabilities {
5333 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5334 ..lsp::ServerCapabilities::default()
5335 },
5336 ..FakeLspAdapter::default()
5337 },
5338 ),
5339 language_registry.register_fake_lsp(
5340 "tsx",
5341 FakeLspAdapter {
5342 name: language_server_names[3],
5343 capabilities: lsp::ServerCapabilities {
5344 code_action_provider: None,
5345 ..lsp::ServerCapabilities::default()
5346 },
5347 ..FakeLspAdapter::default()
5348 },
5349 ),
5350 ];
5351
5352 let (buffer, _handle) = project
5353 .update(cx, |p, cx| {
5354 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5355 })
5356 .await
5357 .unwrap();
5358 cx.executor().run_until_parked();
5359
5360 let mut servers_with_actions_requests = HashMap::default();
5361 for i in 0..language_server_names.len() {
5362 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5363 panic!(
5364 "Failed to get language server #{i} with name {}",
5365 &language_server_names[i]
5366 )
5367 });
5368 let new_server_name = new_server.server.name();
5369
5370 assert!(
5371 !servers_with_actions_requests.contains_key(&new_server_name),
5372 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5373 );
5374 match new_server_name.0.as_ref() {
5375 "TailwindServer" | "TypeScriptServer" => {
5376 servers_with_actions_requests.insert(
5377 new_server_name.clone(),
5378 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5379 move |_, _| {
5380 let name = new_server_name.clone();
5381 async move {
5382 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5383 lsp::CodeAction {
5384 title: format!("{name} code action"),
5385 ..lsp::CodeAction::default()
5386 },
5387 )]))
5388 }
5389 },
5390 ),
5391 );
5392 }
5393 "ESLintServer" => {
5394 servers_with_actions_requests.insert(
5395 new_server_name,
5396 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5397 |_, _| async move { Ok(None) },
5398 ),
5399 );
5400 }
5401 "NoActionsCapabilitiesServer" => {
5402 let _never_handled = new_server
5403 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5404 panic!(
5405 "Should not call for code actions server with no corresponding capabilities"
5406 )
5407 });
5408 }
5409 unexpected => panic!("Unexpected server name: {unexpected}"),
5410 }
5411 }
5412
5413 let code_actions_task = project.update(cx, |project, cx| {
5414 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5415 });
5416
5417 // cx.run_until_parked();
5418 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5419 |mut code_actions_request| async move {
5420 code_actions_request
5421 .next()
5422 .await
5423 .expect("All code actions requests should have been triggered")
5424 },
5425 ))
5426 .await;
5427 assert_eq!(
5428 vec!["TailwindServer code action", "TypeScriptServer code action"],
5429 code_actions_task
5430 .await
5431 .unwrap()
5432 .into_iter()
5433 .map(|code_action| code_action.lsp_action.title)
5434 .sorted()
5435 .collect::<Vec<_>>(),
5436 "Should receive code actions responses from all related servers with hover capabilities"
5437 );
5438}
5439
5440#[gpui::test]
5441async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5442 init_test(cx);
5443
5444 let fs = FakeFs::new(cx.executor());
5445 fs.insert_tree(
5446 "/dir",
5447 json!({
5448 "a.rs": "let a = 1;",
5449 "b.rs": "let b = 2;",
5450 "c.rs": "let c = 2;",
5451 }),
5452 )
5453 .await;
5454
5455 let project = Project::test(
5456 fs,
5457 [
5458 "/dir/a.rs".as_ref(),
5459 "/dir/b.rs".as_ref(),
5460 "/dir/c.rs".as_ref(),
5461 ],
5462 cx,
5463 )
5464 .await;
5465
5466 // check the initial state and get the worktrees
5467 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5468 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5469 assert_eq!(worktrees.len(), 3);
5470
5471 let worktree_a = worktrees[0].read(cx);
5472 let worktree_b = worktrees[1].read(cx);
5473 let worktree_c = worktrees[2].read(cx);
5474
5475 // check they start in the right order
5476 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5477 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5478 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5479
5480 (
5481 worktrees[0].clone(),
5482 worktrees[1].clone(),
5483 worktrees[2].clone(),
5484 )
5485 });
5486
5487 // move first worktree to after the second
5488 // [a, b, c] -> [b, a, c]
5489 project
5490 .update(cx, |project, cx| {
5491 let first = worktree_a.read(cx);
5492 let second = worktree_b.read(cx);
5493 project.move_worktree(first.id(), second.id(), cx)
5494 })
5495 .expect("moving first after second");
5496
5497 // check the state after moving
5498 project.update(cx, |project, cx| {
5499 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5500 assert_eq!(worktrees.len(), 3);
5501
5502 let first = worktrees[0].read(cx);
5503 let second = worktrees[1].read(cx);
5504 let third = worktrees[2].read(cx);
5505
5506 // check they are now in the right order
5507 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5508 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5509 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5510 });
5511
5512 // move the second worktree to before the first
5513 // [b, a, c] -> [a, b, c]
5514 project
5515 .update(cx, |project, cx| {
5516 let second = worktree_a.read(cx);
5517 let first = worktree_b.read(cx);
5518 project.move_worktree(first.id(), second.id(), cx)
5519 })
5520 .expect("moving second before first");
5521
5522 // check the state after moving
5523 project.update(cx, |project, cx| {
5524 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5525 assert_eq!(worktrees.len(), 3);
5526
5527 let first = worktrees[0].read(cx);
5528 let second = worktrees[1].read(cx);
5529 let third = worktrees[2].read(cx);
5530
5531 // check they are now in the right order
5532 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5533 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5534 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5535 });
5536
5537 // move the second worktree to after the third
5538 // [a, b, c] -> [a, c, b]
5539 project
5540 .update(cx, |project, cx| {
5541 let second = worktree_b.read(cx);
5542 let third = worktree_c.read(cx);
5543 project.move_worktree(second.id(), third.id(), cx)
5544 })
5545 .expect("moving second after third");
5546
5547 // check the state after moving
5548 project.update(cx, |project, cx| {
5549 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5550 assert_eq!(worktrees.len(), 3);
5551
5552 let first = worktrees[0].read(cx);
5553 let second = worktrees[1].read(cx);
5554 let third = worktrees[2].read(cx);
5555
5556 // check they are now in the right order
5557 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5558 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5559 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5560 });
5561
5562 // move the third worktree to before the second
5563 // [a, c, b] -> [a, b, c]
5564 project
5565 .update(cx, |project, cx| {
5566 let third = worktree_c.read(cx);
5567 let second = worktree_b.read(cx);
5568 project.move_worktree(third.id(), second.id(), cx)
5569 })
5570 .expect("moving third before second");
5571
5572 // check the state after moving
5573 project.update(cx, |project, cx| {
5574 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5575 assert_eq!(worktrees.len(), 3);
5576
5577 let first = worktrees[0].read(cx);
5578 let second = worktrees[1].read(cx);
5579 let third = worktrees[2].read(cx);
5580
5581 // check they are now in the right order
5582 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5583 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5584 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5585 });
5586
5587 // move the first worktree to after the third
5588 // [a, b, c] -> [b, c, a]
5589 project
5590 .update(cx, |project, cx| {
5591 let first = worktree_a.read(cx);
5592 let third = worktree_c.read(cx);
5593 project.move_worktree(first.id(), third.id(), cx)
5594 })
5595 .expect("moving first after third");
5596
5597 // check the state after moving
5598 project.update(cx, |project, cx| {
5599 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5600 assert_eq!(worktrees.len(), 3);
5601
5602 let first = worktrees[0].read(cx);
5603 let second = worktrees[1].read(cx);
5604 let third = worktrees[2].read(cx);
5605
5606 // check they are now in the right order
5607 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5608 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5609 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5610 });
5611
5612 // move the third worktree to before the first
5613 // [b, c, a] -> [a, b, c]
5614 project
5615 .update(cx, |project, cx| {
5616 let third = worktree_a.read(cx);
5617 let first = worktree_b.read(cx);
5618 project.move_worktree(third.id(), first.id(), cx)
5619 })
5620 .expect("moving third before first");
5621
5622 // check the state after moving
5623 project.update(cx, |project, cx| {
5624 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5625 assert_eq!(worktrees.len(), 3);
5626
5627 let first = worktrees[0].read(cx);
5628 let second = worktrees[1].read(cx);
5629 let third = worktrees[2].read(cx);
5630
5631 // check they are now in the right order
5632 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5633 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5634 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5635 });
5636}
5637
5638#[gpui::test]
5639async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5640 init_test(cx);
5641
5642 let staged_contents = r#"
5643 fn main() {
5644 println!("hello world");
5645 }
5646 "#
5647 .unindent();
5648 let file_contents = r#"
5649 // print goodbye
5650 fn main() {
5651 println!("goodbye world");
5652 }
5653 "#
5654 .unindent();
5655
5656 let fs = FakeFs::new(cx.background_executor.clone());
5657 fs.insert_tree(
5658 "/dir",
5659 json!({
5660 ".git": {},
5661 "src": {
5662 "main.rs": file_contents,
5663 }
5664 }),
5665 )
5666 .await;
5667
5668 fs.set_index_for_repo(
5669 Path::new("/dir/.git"),
5670 &[("src/main.rs".into(), staged_contents)],
5671 );
5672
5673 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5674
5675 let buffer = project
5676 .update(cx, |project, cx| {
5677 project.open_local_buffer("/dir/src/main.rs", cx)
5678 })
5679 .await
5680 .unwrap();
5681 let unstaged_diff = project
5682 .update(cx, |project, cx| {
5683 project.open_unstaged_diff(buffer.clone(), cx)
5684 })
5685 .await
5686 .unwrap();
5687
5688 cx.run_until_parked();
5689 unstaged_diff.update(cx, |unstaged_diff, cx| {
5690 let snapshot = buffer.read(cx).snapshot();
5691 assert_hunks(
5692 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5693 &snapshot,
5694 &unstaged_diff.base_text_string().unwrap(),
5695 &[
5696 (0..1, "", "// print goodbye\n", DiffHunkStatus::added()),
5697 (
5698 2..3,
5699 " println!(\"hello world\");\n",
5700 " println!(\"goodbye world\");\n",
5701 DiffHunkStatus::modified(),
5702 ),
5703 ],
5704 );
5705 });
5706
5707 let staged_contents = r#"
5708 // print goodbye
5709 fn main() {
5710 }
5711 "#
5712 .unindent();
5713
5714 fs.set_index_for_repo(
5715 Path::new("/dir/.git"),
5716 &[("src/main.rs".into(), staged_contents)],
5717 );
5718
5719 cx.run_until_parked();
5720 unstaged_diff.update(cx, |unstaged_diff, cx| {
5721 let snapshot = buffer.read(cx).snapshot();
5722 assert_hunks(
5723 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5724 &snapshot,
5725 &unstaged_diff.base_text().unwrap().text(),
5726 &[(
5727 2..3,
5728 "",
5729 " println!(\"goodbye world\");\n",
5730 DiffHunkStatus::added(),
5731 )],
5732 );
5733 });
5734}
5735
5736#[gpui::test]
5737async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5738 init_test(cx);
5739
5740 let committed_contents = r#"
5741 fn main() {
5742 println!("hello world");
5743 }
5744 "#
5745 .unindent();
5746 let staged_contents = r#"
5747 fn main() {
5748 println!("goodbye world");
5749 }
5750 "#
5751 .unindent();
5752 let file_contents = r#"
5753 // print goodbye
5754 fn main() {
5755 println!("goodbye world");
5756 }
5757 "#
5758 .unindent();
5759
5760 let fs = FakeFs::new(cx.background_executor.clone());
5761 fs.insert_tree(
5762 "/dir",
5763 json!({
5764 ".git": {},
5765 "src": {
5766 "main.rs": file_contents,
5767 }
5768 }),
5769 )
5770 .await;
5771
5772 fs.set_index_for_repo(
5773 Path::new("/dir/.git"),
5774 &[("src/main.rs".into(), staged_contents)],
5775 );
5776 fs.set_head_for_repo(
5777 Path::new("/dir/.git"),
5778 &[("src/main.rs".into(), committed_contents)],
5779 );
5780
5781 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5783 let language = rust_lang();
5784 language_registry.add(language.clone());
5785
5786 let buffer = project
5787 .update(cx, |project, cx| {
5788 project.open_local_buffer("/dir/src/main.rs", cx)
5789 })
5790 .await
5791 .unwrap();
5792 let uncommitted_diff = project
5793 .update(cx, |project, cx| {
5794 project.open_uncommitted_diff(buffer.clone(), cx)
5795 })
5796 .await
5797 .unwrap();
5798
5799 uncommitted_diff.read_with(cx, |diff, _| {
5800 assert_eq!(
5801 diff.base_text().and_then(|base| base.language().cloned()),
5802 Some(language)
5803 )
5804 });
5805
5806 cx.run_until_parked();
5807 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5808 let snapshot = buffer.read(cx).snapshot();
5809 assert_hunks(
5810 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5811 &snapshot,
5812 &uncommitted_diff.base_text_string().unwrap(),
5813 &[
5814 (
5815 0..1,
5816 "",
5817 "// print goodbye\n",
5818 DiffHunkStatus::Added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5819 ),
5820 (
5821 2..3,
5822 " println!(\"hello world\");\n",
5823 " println!(\"goodbye world\");\n",
5824 DiffHunkStatus::modified(),
5825 ),
5826 ],
5827 );
5828 });
5829
5830 let committed_contents = r#"
5831 // print goodbye
5832 fn main() {
5833 }
5834 "#
5835 .unindent();
5836
5837 fs.set_head_for_repo(
5838 Path::new("/dir/.git"),
5839 &[("src/main.rs".into(), committed_contents)],
5840 );
5841
5842 cx.run_until_parked();
5843 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5844 let snapshot = buffer.read(cx).snapshot();
5845 assert_hunks(
5846 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5847 &snapshot,
5848 &uncommitted_diff.base_text().unwrap().text(),
5849 &[(
5850 2..3,
5851 "",
5852 " println!(\"goodbye world\");\n",
5853 DiffHunkStatus::added(),
5854 )],
5855 );
5856 });
5857}
5858
5859#[gpui::test]
5860async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
5861 init_test(cx);
5862
5863 let committed_contents = r#"
5864 fn main() {
5865 println!("hello from HEAD");
5866 }
5867 "#
5868 .unindent();
5869 let file_contents = r#"
5870 fn main() {
5871 println!("hello from the working copy");
5872 }
5873 "#
5874 .unindent();
5875
5876 let fs = FakeFs::new(cx.background_executor.clone());
5877 fs.insert_tree(
5878 "/dir",
5879 json!({
5880 ".git": {},
5881 "src": {
5882 "main.rs": file_contents,
5883 }
5884 }),
5885 )
5886 .await;
5887
5888 fs.set_head_for_repo(
5889 Path::new("/dir/.git"),
5890 &[("src/main.rs".into(), committed_contents)],
5891 );
5892
5893 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
5894
5895 let buffer = project
5896 .update(cx, |project, cx| {
5897 project.open_local_buffer("/dir/src/main.rs", cx)
5898 })
5899 .await
5900 .unwrap();
5901 let uncommitted_diff = project
5902 .update(cx, |project, cx| {
5903 project.open_uncommitted_diff(buffer.clone(), cx)
5904 })
5905 .await
5906 .unwrap();
5907
5908 cx.run_until_parked();
5909 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5910 let snapshot = buffer.read(cx).snapshot();
5911 assert_hunks(
5912 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5913 &snapshot,
5914 &uncommitted_diff.base_text_string().unwrap(),
5915 &[(
5916 1..2,
5917 " println!(\"hello from HEAD\");\n",
5918 " println!(\"hello from the working copy\");\n",
5919 DiffHunkStatus::modified(),
5920 )],
5921 );
5922 });
5923}
5924
5925async fn search(
5926 project: &Entity<Project>,
5927 query: SearchQuery,
5928 cx: &mut gpui::TestAppContext,
5929) -> Result<HashMap<String, Vec<Range<usize>>>> {
5930 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5931 let mut results = HashMap::default();
5932 while let Ok(search_result) = search_rx.recv().await {
5933 match search_result {
5934 SearchResult::Buffer { buffer, ranges } => {
5935 results.entry(buffer).or_insert(ranges);
5936 }
5937 SearchResult::LimitReached => {}
5938 }
5939 }
5940 Ok(results
5941 .into_iter()
5942 .map(|(buffer, ranges)| {
5943 buffer.update(cx, |buffer, cx| {
5944 let path = buffer
5945 .file()
5946 .unwrap()
5947 .full_path(cx)
5948 .to_string_lossy()
5949 .to_string();
5950 let ranges = ranges
5951 .into_iter()
5952 .map(|range| range.to_offset(buffer))
5953 .collect::<Vec<_>>();
5954 (path, ranges)
5955 })
5956 })
5957 .collect())
5958}
5959
5960pub fn init_test(cx: &mut gpui::TestAppContext) {
5961 if std::env::var("RUST_LOG").is_ok() {
5962 env_logger::try_init().ok();
5963 }
5964
5965 cx.update(|cx| {
5966 let settings_store = SettingsStore::test(cx);
5967 cx.set_global(settings_store);
5968 release_channel::init(SemanticVersion::default(), cx);
5969 language::init(cx);
5970 Project::init_settings(cx);
5971 });
5972}
5973
5974fn json_lang() -> Arc<Language> {
5975 Arc::new(Language::new(
5976 LanguageConfig {
5977 name: "JSON".into(),
5978 matcher: LanguageMatcher {
5979 path_suffixes: vec!["json".to_string()],
5980 ..Default::default()
5981 },
5982 ..Default::default()
5983 },
5984 None,
5985 ))
5986}
5987
5988fn js_lang() -> Arc<Language> {
5989 Arc::new(Language::new(
5990 LanguageConfig {
5991 name: "JavaScript".into(),
5992 matcher: LanguageMatcher {
5993 path_suffixes: vec!["js".to_string()],
5994 ..Default::default()
5995 },
5996 ..Default::default()
5997 },
5998 None,
5999 ))
6000}
6001
6002fn rust_lang() -> Arc<Language> {
6003 Arc::new(Language::new(
6004 LanguageConfig {
6005 name: "Rust".into(),
6006 matcher: LanguageMatcher {
6007 path_suffixes: vec!["rs".to_string()],
6008 ..Default::default()
6009 },
6010 ..Default::default()
6011 },
6012 Some(tree_sitter_rust::LANGUAGE.into()),
6013 ))
6014}
6015
6016fn typescript_lang() -> Arc<Language> {
6017 Arc::new(Language::new(
6018 LanguageConfig {
6019 name: "TypeScript".into(),
6020 matcher: LanguageMatcher {
6021 path_suffixes: vec!["ts".to_string()],
6022 ..Default::default()
6023 },
6024 ..Default::default()
6025 },
6026 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6027 ))
6028}
6029
6030fn tsx_lang() -> Arc<Language> {
6031 Arc::new(Language::new(
6032 LanguageConfig {
6033 name: "tsx".into(),
6034 matcher: LanguageMatcher {
6035 path_suffixes: vec!["tsx".to_string()],
6036 ..Default::default()
6037 },
6038 ..Default::default()
6039 },
6040 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6041 ))
6042}
6043
6044fn get_all_tasks(
6045 project: &Entity<Project>,
6046 worktree_id: Option<WorktreeId>,
6047 task_context: &TaskContext,
6048 cx: &mut App,
6049) -> Vec<(TaskSourceKind, ResolvedTask)> {
6050 let (mut old, new) = project.update(cx, |project, cx| {
6051 project
6052 .task_store
6053 .read(cx)
6054 .task_inventory()
6055 .unwrap()
6056 .read(cx)
6057 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
6058 });
6059 old.extend(new);
6060 old
6061}