1use crate::{Event, *};
2use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus};
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq, path,
29 paths::PathMatcher,
30 separator,
31 test::{marked_text_offsets, TempTree},
32 uri, TryFutureExt as _,
33};
34
35#[gpui::test]
36async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let (tx, mut rx) = futures::channel::mpsc::unbounded();
40 let _thread = std::thread::spawn(move || {
41 #[cfg(not(target_os = "windows"))]
42 std::fs::metadata("/tmp").unwrap();
43 #[cfg(target_os = "windows")]
44 std::fs::metadata("C:/Windows").unwrap();
45 std::thread::sleep(Duration::from_millis(1000));
46 tx.unbounded_send(1).unwrap();
47 });
48 rx.next().await.unwrap();
49}
50
51#[gpui::test]
52async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let io_task = smol::unblock(move || {
56 println!("sleeping on thread {:?}", std::thread::current().id());
57 std::thread::sleep(Duration::from_millis(10));
58 1
59 });
60
61 let task = cx.foreground_executor().spawn(async move {
62 io_task.await;
63 });
64
65 task.await;
66}
67
68#[cfg(not(windows))]
69#[gpui::test]
70async fn test_symlinks(cx: &mut gpui::TestAppContext) {
71 init_test(cx);
72 cx.executor().allow_parking();
73
74 let dir = TempTree::new(json!({
75 "root": {
76 "apple": "",
77 "banana": {
78 "carrot": {
79 "date": "",
80 "endive": "",
81 }
82 },
83 "fennel": {
84 "grape": "",
85 }
86 }
87 }));
88
89 let root_link_path = dir.path().join("root_link");
90 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
91 os::unix::fs::symlink(
92 dir.path().join("root/fennel"),
93 dir.path().join("root/finnochio"),
94 )
95 .unwrap();
96
97 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
98
99 project.update(cx, |project, cx| {
100 let tree = project.worktrees(cx).next().unwrap().read(cx);
101 assert_eq!(tree.file_count(), 5);
102 assert_eq!(
103 tree.inode_for_path("fennel/grape"),
104 tree.inode_for_path("finnochio/grape")
105 );
106 });
107}
108
109#[gpui::test]
110async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
111 init_test(cx);
112
113 let dir = TempTree::new(json!({
114 ".editorconfig": r#"
115 root = true
116 [*.rs]
117 indent_style = tab
118 indent_size = 3
119 end_of_line = lf
120 insert_final_newline = true
121 trim_trailing_whitespace = true
122 [*.js]
123 tab_width = 10
124 "#,
125 ".zed": {
126 "settings.json": r#"{
127 "tab_size": 8,
128 "hard_tabs": false,
129 "ensure_final_newline_on_save": false,
130 "remove_trailing_whitespace_on_save": false,
131 "soft_wrap": "editor_width"
132 }"#,
133 },
134 "a.rs": "fn a() {\n A\n}",
135 "b": {
136 ".editorconfig": r#"
137 [*.rs]
138 indent_size = 2
139 "#,
140 "b.rs": "fn b() {\n B\n}",
141 },
142 "c.js": "def c\n C\nend",
143 "README.json": "tabs are better\n",
144 }));
145
146 let path = dir.path();
147 let fs = FakeFs::new(cx.executor());
148 fs.insert_tree_from_real_fs(path, path).await;
149 let project = Project::test(fs, [path], cx).await;
150
151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
152 language_registry.add(js_lang());
153 language_registry.add(json_lang());
154 language_registry.add(rust_lang());
155
156 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
157
158 cx.executor().run_until_parked();
159
160 cx.update(|cx| {
161 let tree = worktree.read(cx);
162 let settings_for = |path: &str| {
163 let file_entry = tree.entry_for_path(path).unwrap().clone();
164 let file = File::for_entry(file_entry, worktree.clone());
165 let file_language = project
166 .read(cx)
167 .languages()
168 .language_for_file_path(file.path.as_ref());
169 let file_language = cx
170 .background_executor()
171 .block(file_language)
172 .expect("Failed to get file language");
173 let file = file as _;
174 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
175 };
176
177 let settings_a = settings_for("a.rs");
178 let settings_b = settings_for("b/b.rs");
179 let settings_c = settings_for("c.js");
180 let settings_readme = settings_for("README.json");
181
182 // .editorconfig overrides .zed/settings
183 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
184 assert_eq!(settings_a.hard_tabs, true);
185 assert_eq!(settings_a.ensure_final_newline_on_save, true);
186 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
187
188 // .editorconfig in b/ overrides .editorconfig in root
189 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
190
191 // "indent_size" is not set, so "tab_width" is used
192 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
193
194 // README.md should not be affected by .editorconfig's globe "*.rs"
195 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
196 });
197}
198
199#[gpui::test]
200async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
201 init_test(cx);
202 TaskStore::init(None);
203
204 let fs = FakeFs::new(cx.executor());
205 fs.insert_tree(
206 path!("/dir"),
207 json!({
208 ".zed": {
209 "settings.json": r#"{ "tab_size": 8 }"#,
210 "tasks.json": r#"[{
211 "label": "cargo check all",
212 "command": "cargo",
213 "args": ["check", "--all"]
214 },]"#,
215 },
216 "a": {
217 "a.rs": "fn a() {\n A\n}"
218 },
219 "b": {
220 ".zed": {
221 "settings.json": r#"{ "tab_size": 2 }"#,
222 "tasks.json": r#"[{
223 "label": "cargo check",
224 "command": "cargo",
225 "args": ["check"]
226 },]"#,
227 },
228 "b.rs": "fn b() {\n B\n}"
229 }
230 }),
231 )
232 .await;
233
234 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
235 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
236 let task_context = TaskContext::default();
237
238 cx.executor().run_until_parked();
239 let worktree_id = cx.update(|cx| {
240 project.update(cx, |project, cx| {
241 project.worktrees(cx).next().unwrap().read(cx).id()
242 })
243 });
244 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
245 id: worktree_id,
246 directory_in_worktree: PathBuf::from(".zed"),
247 id_base: "local worktree tasks from directory \".zed\"".into(),
248 };
249
250 let all_tasks = cx
251 .update(|cx| {
252 let tree = worktree.read(cx);
253
254 let file_a = File::for_entry(
255 tree.entry_for_path("a/a.rs").unwrap().clone(),
256 worktree.clone(),
257 ) as _;
258 let settings_a = language_settings(None, Some(&file_a), cx);
259 let file_b = File::for_entry(
260 tree.entry_for_path("b/b.rs").unwrap().clone(),
261 worktree.clone(),
262 ) as _;
263 let settings_b = language_settings(None, Some(&file_b), cx);
264
265 assert_eq!(settings_a.tab_size.get(), 8);
266 assert_eq!(settings_b.tab_size.get(), 2);
267
268 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
269 })
270 .into_iter()
271 .map(|(source_kind, task)| {
272 let resolved = task.resolved.unwrap();
273 (
274 source_kind,
275 task.resolved_label,
276 resolved.args,
277 resolved.env,
278 )
279 })
280 .collect::<Vec<_>>();
281 assert_eq!(
282 all_tasks,
283 vec![
284 (
285 TaskSourceKind::Worktree {
286 id: worktree_id,
287 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
288 id_base: if cfg!(windows) {
289 "local worktree tasks from directory \"b\\\\.zed\"".into()
290 } else {
291 "local worktree tasks from directory \"b/.zed\"".into()
292 },
293 },
294 "cargo check".to_string(),
295 vec!["check".to_string()],
296 HashMap::default(),
297 ),
298 (
299 topmost_local_task_source_kind.clone(),
300 "cargo check all".to_string(),
301 vec!["check".to_string(), "--all".to_string()],
302 HashMap::default(),
303 ),
304 ]
305 );
306
307 let (_, resolved_task) = cx
308 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
309 .into_iter()
310 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
311 .expect("should have one global task");
312 project.update(cx, |project, cx| {
313 let task_inventory = project
314 .task_store
315 .read(cx)
316 .task_inventory()
317 .cloned()
318 .unwrap();
319 task_inventory.update(cx, |inventory, _| {
320 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
321 inventory
322 .update_file_based_tasks(
323 None,
324 Some(
325 &json!([{
326 "label": "cargo check unstable",
327 "command": "cargo",
328 "args": [
329 "check",
330 "--all",
331 "--all-targets"
332 ],
333 "env": {
334 "RUSTFLAGS": "-Zunstable-options"
335 }
336 }])
337 .to_string(),
338 ),
339 )
340 .unwrap();
341 });
342 });
343 cx.run_until_parked();
344
345 let all_tasks = cx
346 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
347 .into_iter()
348 .map(|(source_kind, task)| {
349 let resolved = task.resolved.unwrap();
350 (
351 source_kind,
352 task.resolved_label,
353 resolved.args,
354 resolved.env,
355 )
356 })
357 .collect::<Vec<_>>();
358 assert_eq!(
359 all_tasks,
360 vec![
361 (
362 topmost_local_task_source_kind.clone(),
363 "cargo check all".to_string(),
364 vec!["check".to_string(), "--all".to_string()],
365 HashMap::default(),
366 ),
367 (
368 TaskSourceKind::Worktree {
369 id: worktree_id,
370 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
371 id_base: if cfg!(windows) {
372 "local worktree tasks from directory \"b\\\\.zed\"".into()
373 } else {
374 "local worktree tasks from directory \"b/.zed\"".into()
375 },
376 },
377 "cargo check".to_string(),
378 vec!["check".to_string()],
379 HashMap::default(),
380 ),
381 (
382 TaskSourceKind::AbsPath {
383 abs_path: paths::tasks_file().clone(),
384 id_base: "global tasks.json".into(),
385 },
386 "cargo check unstable".to_string(),
387 vec![
388 "check".to_string(),
389 "--all".to_string(),
390 "--all-targets".to_string(),
391 ],
392 HashMap::from_iter(Some((
393 "RUSTFLAGS".to_string(),
394 "-Zunstable-options".to_string()
395 ))),
396 ),
397 ]
398 );
399}
400
401#[gpui::test]
402async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
403 init_test(cx);
404
405 let fs = FakeFs::new(cx.executor());
406 fs.insert_tree(
407 path!("/dir"),
408 json!({
409 "test.rs": "const A: i32 = 1;",
410 "test2.rs": "",
411 "Cargo.toml": "a = 1",
412 "package.json": "{\"a\": 1}",
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
418 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
419
420 let mut fake_rust_servers = language_registry.register_fake_lsp(
421 "Rust",
422 FakeLspAdapter {
423 name: "the-rust-language-server",
424 capabilities: lsp::ServerCapabilities {
425 completion_provider: Some(lsp::CompletionOptions {
426 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
427 ..Default::default()
428 }),
429 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
430 lsp::TextDocumentSyncOptions {
431 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
432 ..Default::default()
433 },
434 )),
435 ..Default::default()
436 },
437 ..Default::default()
438 },
439 );
440 let mut fake_json_servers = language_registry.register_fake_lsp(
441 "JSON",
442 FakeLspAdapter {
443 name: "the-json-language-server",
444 capabilities: lsp::ServerCapabilities {
445 completion_provider: Some(lsp::CompletionOptions {
446 trigger_characters: Some(vec![":".to_string()]),
447 ..Default::default()
448 }),
449 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
450 lsp::TextDocumentSyncOptions {
451 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
452 ..Default::default()
453 },
454 )),
455 ..Default::default()
456 },
457 ..Default::default()
458 },
459 );
460
461 // Open a buffer without an associated language server.
462 let (toml_buffer, _handle) = project
463 .update(cx, |project, cx| {
464 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
465 })
466 .await
467 .unwrap();
468
469 // Open a buffer with an associated language server before the language for it has been loaded.
470 let (rust_buffer, _handle2) = project
471 .update(cx, |project, cx| {
472 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
473 })
474 .await
475 .unwrap();
476 rust_buffer.update(cx, |buffer, _| {
477 assert_eq!(buffer.language().map(|l| l.name()), None);
478 });
479
480 // Now we add the languages to the project, and ensure they get assigned to all
481 // the relevant open buffers.
482 language_registry.add(json_lang());
483 language_registry.add(rust_lang());
484 cx.executor().run_until_parked();
485 rust_buffer.update(cx, |buffer, _| {
486 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
487 });
488
489 // A server is started up, and it is notified about Rust files.
490 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
491 assert_eq!(
492 fake_rust_server
493 .receive_notification::<lsp::notification::DidOpenTextDocument>()
494 .await
495 .text_document,
496 lsp::TextDocumentItem {
497 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
498 version: 0,
499 text: "const A: i32 = 1;".to_string(),
500 language_id: "rust".to_string(),
501 }
502 );
503
504 // The buffer is configured based on the language server's capabilities.
505 rust_buffer.update(cx, |buffer, _| {
506 assert_eq!(
507 buffer
508 .completion_triggers()
509 .into_iter()
510 .cloned()
511 .collect::<Vec<_>>(),
512 &[".".to_string(), "::".to_string()]
513 );
514 });
515 toml_buffer.update(cx, |buffer, _| {
516 assert!(buffer.completion_triggers().is_empty());
517 });
518
519 // Edit a buffer. The changes are reported to the language server.
520 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
521 assert_eq!(
522 fake_rust_server
523 .receive_notification::<lsp::notification::DidChangeTextDocument>()
524 .await
525 .text_document,
526 lsp::VersionedTextDocumentIdentifier::new(
527 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
528 1
529 )
530 );
531
532 // Open a third buffer with a different associated language server.
533 let (json_buffer, _json_handle) = project
534 .update(cx, |project, cx| {
535 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
536 })
537 .await
538 .unwrap();
539
540 // A json language server is started up and is only notified about the json buffer.
541 let mut fake_json_server = fake_json_servers.next().await.unwrap();
542 assert_eq!(
543 fake_json_server
544 .receive_notification::<lsp::notification::DidOpenTextDocument>()
545 .await
546 .text_document,
547 lsp::TextDocumentItem {
548 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
549 version: 0,
550 text: "{\"a\": 1}".to_string(),
551 language_id: "json".to_string(),
552 }
553 );
554
555 // This buffer is configured based on the second language server's
556 // capabilities.
557 json_buffer.update(cx, |buffer, _| {
558 assert_eq!(
559 buffer
560 .completion_triggers()
561 .into_iter()
562 .cloned()
563 .collect::<Vec<_>>(),
564 &[":".to_string()]
565 );
566 });
567
568 // When opening another buffer whose language server is already running,
569 // it is also configured based on the existing language server's capabilities.
570 let (rust_buffer2, _handle4) = project
571 .update(cx, |project, cx| {
572 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
573 })
574 .await
575 .unwrap();
576 rust_buffer2.update(cx, |buffer, _| {
577 assert_eq!(
578 buffer
579 .completion_triggers()
580 .into_iter()
581 .cloned()
582 .collect::<Vec<_>>(),
583 &[".".to_string(), "::".to_string()]
584 );
585 });
586
587 // Changes are reported only to servers matching the buffer's language.
588 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
589 rust_buffer2.update(cx, |buffer, cx| {
590 buffer.edit([(0..0, "let x = 1;")], None, cx)
591 });
592 assert_eq!(
593 fake_rust_server
594 .receive_notification::<lsp::notification::DidChangeTextDocument>()
595 .await
596 .text_document,
597 lsp::VersionedTextDocumentIdentifier::new(
598 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
599 1
600 )
601 );
602
603 // Save notifications are reported to all servers.
604 project
605 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
606 .await
607 .unwrap();
608 assert_eq!(
609 fake_rust_server
610 .receive_notification::<lsp::notification::DidSaveTextDocument>()
611 .await
612 .text_document,
613 lsp::TextDocumentIdentifier::new(
614 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
615 )
616 );
617 assert_eq!(
618 fake_json_server
619 .receive_notification::<lsp::notification::DidSaveTextDocument>()
620 .await
621 .text_document,
622 lsp::TextDocumentIdentifier::new(
623 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
624 )
625 );
626
627 // Renames are reported only to servers matching the buffer's language.
628 fs.rename(
629 Path::new(path!("/dir/test2.rs")),
630 Path::new(path!("/dir/test3.rs")),
631 Default::default(),
632 )
633 .await
634 .unwrap();
635 assert_eq!(
636 fake_rust_server
637 .receive_notification::<lsp::notification::DidCloseTextDocument>()
638 .await
639 .text_document,
640 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
641 );
642 assert_eq!(
643 fake_rust_server
644 .receive_notification::<lsp::notification::DidOpenTextDocument>()
645 .await
646 .text_document,
647 lsp::TextDocumentItem {
648 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
649 version: 0,
650 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
651 language_id: "rust".to_string(),
652 },
653 );
654
655 rust_buffer2.update(cx, |buffer, cx| {
656 buffer.update_diagnostics(
657 LanguageServerId(0),
658 DiagnosticSet::from_sorted_entries(
659 vec![DiagnosticEntry {
660 diagnostic: Default::default(),
661 range: Anchor::MIN..Anchor::MAX,
662 }],
663 &buffer.snapshot(),
664 ),
665 cx,
666 );
667 assert_eq!(
668 buffer
669 .snapshot()
670 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
671 .count(),
672 1
673 );
674 });
675
676 // When the rename changes the extension of the file, the buffer gets closed on the old
677 // language server and gets opened on the new one.
678 fs.rename(
679 Path::new(path!("/dir/test3.rs")),
680 Path::new(path!("/dir/test3.json")),
681 Default::default(),
682 )
683 .await
684 .unwrap();
685 assert_eq!(
686 fake_rust_server
687 .receive_notification::<lsp::notification::DidCloseTextDocument>()
688 .await
689 .text_document,
690 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
691 );
692 assert_eq!(
693 fake_json_server
694 .receive_notification::<lsp::notification::DidOpenTextDocument>()
695 .await
696 .text_document,
697 lsp::TextDocumentItem {
698 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
699 version: 0,
700 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
701 language_id: "json".to_string(),
702 },
703 );
704
705 // We clear the diagnostics, since the language has changed.
706 rust_buffer2.update(cx, |buffer, _| {
707 assert_eq!(
708 buffer
709 .snapshot()
710 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
711 .count(),
712 0
713 );
714 });
715
716 // The renamed file's version resets after changing language server.
717 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
718 assert_eq!(
719 fake_json_server
720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
721 .await
722 .text_document,
723 lsp::VersionedTextDocumentIdentifier::new(
724 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
725 1
726 )
727 );
728
729 // Restart language servers
730 project.update(cx, |project, cx| {
731 project.restart_language_servers_for_buffers(
732 vec![rust_buffer.clone(), json_buffer.clone()],
733 cx,
734 );
735 });
736
737 let mut rust_shutdown_requests = fake_rust_server
738 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
739 let mut json_shutdown_requests = fake_json_server
740 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
741 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
742
743 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
744 let mut fake_json_server = fake_json_servers.next().await.unwrap();
745
746 // Ensure rust document is reopened in new rust language server
747 assert_eq!(
748 fake_rust_server
749 .receive_notification::<lsp::notification::DidOpenTextDocument>()
750 .await
751 .text_document,
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
754 version: 0,
755 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
756 language_id: "rust".to_string(),
757 }
758 );
759
760 // Ensure json documents are reopened in new json language server
761 assert_set_eq!(
762 [
763 fake_json_server
764 .receive_notification::<lsp::notification::DidOpenTextDocument>()
765 .await
766 .text_document,
767 fake_json_server
768 .receive_notification::<lsp::notification::DidOpenTextDocument>()
769 .await
770 .text_document,
771 ],
772 [
773 lsp::TextDocumentItem {
774 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
775 version: 0,
776 text: json_buffer.update(cx, |buffer, _| buffer.text()),
777 language_id: "json".to_string(),
778 },
779 lsp::TextDocumentItem {
780 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
781 version: 0,
782 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
783 language_id: "json".to_string(),
784 }
785 ]
786 );
787
788 // Close notifications are reported only to servers matching the buffer's language.
789 cx.update(|_| drop(_json_handle));
790 let close_message = lsp::DidCloseTextDocumentParams {
791 text_document: lsp::TextDocumentIdentifier::new(
792 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
793 ),
794 };
795 assert_eq!(
796 fake_json_server
797 .receive_notification::<lsp::notification::DidCloseTextDocument>()
798 .await,
799 close_message,
800 );
801}
802
803#[gpui::test]
804async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
805 init_test(cx);
806
807 let fs = FakeFs::new(cx.executor());
808 fs.insert_tree(
809 path!("/the-root"),
810 json!({
811 ".gitignore": "target\n",
812 "src": {
813 "a.rs": "",
814 "b.rs": "",
815 },
816 "target": {
817 "x": {
818 "out": {
819 "x.rs": ""
820 }
821 },
822 "y": {
823 "out": {
824 "y.rs": "",
825 }
826 },
827 "z": {
828 "out": {
829 "z.rs": ""
830 }
831 }
832 }
833 }),
834 )
835 .await;
836
837 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
838 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
839 language_registry.add(rust_lang());
840 let mut fake_servers = language_registry.register_fake_lsp(
841 "Rust",
842 FakeLspAdapter {
843 name: "the-language-server",
844 ..Default::default()
845 },
846 );
847
848 cx.executor().run_until_parked();
849
850 // Start the language server by opening a buffer with a compatible file extension.
851 let _ = project
852 .update(cx, |project, cx| {
853 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
854 })
855 .await
856 .unwrap();
857
858 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
859 project.update(cx, |project, cx| {
860 let worktree = project.worktrees(cx).next().unwrap();
861 assert_eq!(
862 worktree
863 .read(cx)
864 .snapshot()
865 .entries(true, 0)
866 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
867 .collect::<Vec<_>>(),
868 &[
869 (Path::new(""), false),
870 (Path::new(".gitignore"), false),
871 (Path::new("src"), false),
872 (Path::new("src/a.rs"), false),
873 (Path::new("src/b.rs"), false),
874 (Path::new("target"), true),
875 ]
876 );
877 });
878
879 let prev_read_dir_count = fs.read_dir_call_count();
880
881 // Keep track of the FS events reported to the language server.
882 let fake_server = fake_servers.next().await.unwrap();
883 let file_changes = Arc::new(Mutex::new(Vec::new()));
884 fake_server
885 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
886 registrations: vec![lsp::Registration {
887 id: Default::default(),
888 method: "workspace/didChangeWatchedFiles".to_string(),
889 register_options: serde_json::to_value(
890 lsp::DidChangeWatchedFilesRegistrationOptions {
891 watchers: vec![
892 lsp::FileSystemWatcher {
893 glob_pattern: lsp::GlobPattern::String(
894 path!("/the-root/Cargo.toml").to_string(),
895 ),
896 kind: None,
897 },
898 lsp::FileSystemWatcher {
899 glob_pattern: lsp::GlobPattern::String(
900 path!("/the-root/src/*.{rs,c}").to_string(),
901 ),
902 kind: None,
903 },
904 lsp::FileSystemWatcher {
905 glob_pattern: lsp::GlobPattern::String(
906 path!("/the-root/target/y/**/*.rs").to_string(),
907 ),
908 kind: None,
909 },
910 ],
911 },
912 )
913 .ok(),
914 }],
915 })
916 .await
917 .unwrap();
918 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
919 let file_changes = file_changes.clone();
920 move |params, _| {
921 let mut file_changes = file_changes.lock();
922 file_changes.extend(params.changes);
923 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
924 }
925 });
926
927 cx.executor().run_until_parked();
928 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
929 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
930
931 // Now the language server has asked us to watch an ignored directory path,
932 // so we recursively load it.
933 project.update(cx, |project, cx| {
934 let worktree = project.worktrees(cx).next().unwrap();
935 assert_eq!(
936 worktree
937 .read(cx)
938 .snapshot()
939 .entries(true, 0)
940 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
941 .collect::<Vec<_>>(),
942 &[
943 (Path::new(""), false),
944 (Path::new(".gitignore"), false),
945 (Path::new("src"), false),
946 (Path::new("src/a.rs"), false),
947 (Path::new("src/b.rs"), false),
948 (Path::new("target"), true),
949 (Path::new("target/x"), true),
950 (Path::new("target/y"), true),
951 (Path::new("target/y/out"), true),
952 (Path::new("target/y/out/y.rs"), true),
953 (Path::new("target/z"), true),
954 ]
955 );
956 });
957
958 // Perform some file system mutations, two of which match the watched patterns,
959 // and one of which does not.
960 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
961 .await
962 .unwrap();
963 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
964 .await
965 .unwrap();
966 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
967 .await
968 .unwrap();
969 fs.create_file(
970 path!("/the-root/target/x/out/x2.rs").as_ref(),
971 Default::default(),
972 )
973 .await
974 .unwrap();
975 fs.create_file(
976 path!("/the-root/target/y/out/y2.rs").as_ref(),
977 Default::default(),
978 )
979 .await
980 .unwrap();
981
982 // The language server receives events for the FS mutations that match its watch patterns.
983 cx.executor().run_until_parked();
984 assert_eq!(
985 &*file_changes.lock(),
986 &[
987 lsp::FileEvent {
988 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
989 typ: lsp::FileChangeType::DELETED,
990 },
991 lsp::FileEvent {
992 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
993 typ: lsp::FileChangeType::CREATED,
994 },
995 lsp::FileEvent {
996 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
997 typ: lsp::FileChangeType::CREATED,
998 },
999 ]
1000 );
1001}
1002
1003#[gpui::test]
1004async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1005 init_test(cx);
1006
1007 let fs = FakeFs::new(cx.executor());
1008 fs.insert_tree(
1009 path!("/dir"),
1010 json!({
1011 "a.rs": "let a = 1;",
1012 "b.rs": "let b = 2;"
1013 }),
1014 )
1015 .await;
1016
1017 let project = Project::test(
1018 fs,
1019 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1020 cx,
1021 )
1022 .await;
1023 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1024
1025 let buffer_a = project
1026 .update(cx, |project, cx| {
1027 project.open_local_buffer(path!("/dir/a.rs"), cx)
1028 })
1029 .await
1030 .unwrap();
1031 let buffer_b = project
1032 .update(cx, |project, cx| {
1033 project.open_local_buffer(path!("/dir/b.rs"), cx)
1034 })
1035 .await
1036 .unwrap();
1037
1038 lsp_store.update(cx, |lsp_store, cx| {
1039 lsp_store
1040 .update_diagnostics(
1041 LanguageServerId(0),
1042 lsp::PublishDiagnosticsParams {
1043 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1044 version: None,
1045 diagnostics: vec![lsp::Diagnostic {
1046 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1047 severity: Some(lsp::DiagnosticSeverity::ERROR),
1048 message: "error 1".to_string(),
1049 ..Default::default()
1050 }],
1051 },
1052 &[],
1053 cx,
1054 )
1055 .unwrap();
1056 lsp_store
1057 .update_diagnostics(
1058 LanguageServerId(0),
1059 lsp::PublishDiagnosticsParams {
1060 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1061 version: None,
1062 diagnostics: vec![lsp::Diagnostic {
1063 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1064 severity: Some(DiagnosticSeverity::WARNING),
1065 message: "error 2".to_string(),
1066 ..Default::default()
1067 }],
1068 },
1069 &[],
1070 cx,
1071 )
1072 .unwrap();
1073 });
1074
1075 buffer_a.update(cx, |buffer, _| {
1076 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1077 assert_eq!(
1078 chunks
1079 .iter()
1080 .map(|(s, d)| (s.as_str(), *d))
1081 .collect::<Vec<_>>(),
1082 &[
1083 ("let ", None),
1084 ("a", Some(DiagnosticSeverity::ERROR)),
1085 (" = 1;", None),
1086 ]
1087 );
1088 });
1089 buffer_b.update(cx, |buffer, _| {
1090 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1091 assert_eq!(
1092 chunks
1093 .iter()
1094 .map(|(s, d)| (s.as_str(), *d))
1095 .collect::<Vec<_>>(),
1096 &[
1097 ("let ", None),
1098 ("b", Some(DiagnosticSeverity::WARNING)),
1099 (" = 2;", None),
1100 ]
1101 );
1102 });
1103}
1104
1105#[gpui::test]
1106async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1107 init_test(cx);
1108
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 path!("/root"),
1112 json!({
1113 "dir": {
1114 ".git": {
1115 "HEAD": "ref: refs/heads/main",
1116 },
1117 ".gitignore": "b.rs",
1118 "a.rs": "let a = 1;",
1119 "b.rs": "let b = 2;",
1120 },
1121 "other.rs": "let b = c;"
1122 }),
1123 )
1124 .await;
1125
1126 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1127 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1128 let (worktree, _) = project
1129 .update(cx, |project, cx| {
1130 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1131 })
1132 .await
1133 .unwrap();
1134 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1135
1136 let (worktree, _) = project
1137 .update(cx, |project, cx| {
1138 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1139 })
1140 .await
1141 .unwrap();
1142 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1143
1144 let server_id = LanguageServerId(0);
1145 lsp_store.update(cx, |lsp_store, cx| {
1146 lsp_store
1147 .update_diagnostics(
1148 server_id,
1149 lsp::PublishDiagnosticsParams {
1150 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1151 version: None,
1152 diagnostics: vec![lsp::Diagnostic {
1153 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1154 severity: Some(lsp::DiagnosticSeverity::ERROR),
1155 message: "unused variable 'b'".to_string(),
1156 ..Default::default()
1157 }],
1158 },
1159 &[],
1160 cx,
1161 )
1162 .unwrap();
1163 lsp_store
1164 .update_diagnostics(
1165 server_id,
1166 lsp::PublishDiagnosticsParams {
1167 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1168 version: None,
1169 diagnostics: vec![lsp::Diagnostic {
1170 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1171 severity: Some(lsp::DiagnosticSeverity::ERROR),
1172 message: "unknown variable 'c'".to_string(),
1173 ..Default::default()
1174 }],
1175 },
1176 &[],
1177 cx,
1178 )
1179 .unwrap();
1180 });
1181
1182 let main_ignored_buffer = project
1183 .update(cx, |project, cx| {
1184 project.open_buffer((main_worktree_id, "b.rs"), cx)
1185 })
1186 .await
1187 .unwrap();
1188 main_ignored_buffer.update(cx, |buffer, _| {
1189 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1190 assert_eq!(
1191 chunks
1192 .iter()
1193 .map(|(s, d)| (s.as_str(), *d))
1194 .collect::<Vec<_>>(),
1195 &[
1196 ("let ", None),
1197 ("b", Some(DiagnosticSeverity::ERROR)),
1198 (" = 2;", None),
1199 ],
1200 "Gigitnored buffers should still get in-buffer diagnostics",
1201 );
1202 });
1203 let other_buffer = project
1204 .update(cx, |project, cx| {
1205 project.open_buffer((other_worktree_id, ""), cx)
1206 })
1207 .await
1208 .unwrap();
1209 other_buffer.update(cx, |buffer, _| {
1210 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1211 assert_eq!(
1212 chunks
1213 .iter()
1214 .map(|(s, d)| (s.as_str(), *d))
1215 .collect::<Vec<_>>(),
1216 &[
1217 ("let b = ", None),
1218 ("c", Some(DiagnosticSeverity::ERROR)),
1219 (";", None),
1220 ],
1221 "Buffers from hidden projects should still get in-buffer diagnostics"
1222 );
1223 });
1224
1225 project.update(cx, |project, cx| {
1226 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1227 assert_eq!(
1228 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1229 vec![(
1230 ProjectPath {
1231 worktree_id: main_worktree_id,
1232 path: Arc::from(Path::new("b.rs")),
1233 },
1234 server_id,
1235 DiagnosticSummary {
1236 error_count: 1,
1237 warning_count: 0,
1238 }
1239 )]
1240 );
1241 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1242 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1243 });
1244}
1245
1246#[gpui::test]
1247async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1248 init_test(cx);
1249
1250 let progress_token = "the-progress-token";
1251
1252 let fs = FakeFs::new(cx.executor());
1253 fs.insert_tree(
1254 path!("/dir"),
1255 json!({
1256 "a.rs": "fn a() { A }",
1257 "b.rs": "const y: i32 = 1",
1258 }),
1259 )
1260 .await;
1261
1262 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1263 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1264
1265 language_registry.add(rust_lang());
1266 let mut fake_servers = language_registry.register_fake_lsp(
1267 "Rust",
1268 FakeLspAdapter {
1269 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1270 disk_based_diagnostics_sources: vec!["disk".into()],
1271 ..Default::default()
1272 },
1273 );
1274
1275 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1276
1277 // Cause worktree to start the fake language server
1278 let _ = project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 let mut events = cx.events(&project);
1286
1287 let fake_server = fake_servers.next().await.unwrap();
1288 assert_eq!(
1289 events.next().await.unwrap(),
1290 Event::LanguageServerAdded(
1291 LanguageServerId(0),
1292 fake_server.server.name(),
1293 Some(worktree_id)
1294 ),
1295 );
1296
1297 fake_server
1298 .start_progress(format!("{}/0", progress_token))
1299 .await;
1300 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1301 assert_eq!(
1302 events.next().await.unwrap(),
1303 Event::DiskBasedDiagnosticsStarted {
1304 language_server_id: LanguageServerId(0),
1305 }
1306 );
1307
1308 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1309 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1310 version: None,
1311 diagnostics: vec![lsp::Diagnostic {
1312 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1313 severity: Some(lsp::DiagnosticSeverity::ERROR),
1314 message: "undefined variable 'A'".to_string(),
1315 ..Default::default()
1316 }],
1317 });
1318 assert_eq!(
1319 events.next().await.unwrap(),
1320 Event::DiagnosticsUpdated {
1321 language_server_id: LanguageServerId(0),
1322 path: (worktree_id, Path::new("a.rs")).into()
1323 }
1324 );
1325
1326 fake_server.end_progress(format!("{}/0", progress_token));
1327 assert_eq!(
1328 events.next().await.unwrap(),
1329 Event::DiskBasedDiagnosticsFinished {
1330 language_server_id: LanguageServerId(0)
1331 }
1332 );
1333
1334 let buffer = project
1335 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1336 .await
1337 .unwrap();
1338
1339 buffer.update(cx, |buffer, _| {
1340 let snapshot = buffer.snapshot();
1341 let diagnostics = snapshot
1342 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1343 .collect::<Vec<_>>();
1344 assert_eq!(
1345 diagnostics,
1346 &[DiagnosticEntry {
1347 range: Point::new(0, 9)..Point::new(0, 10),
1348 diagnostic: Diagnostic {
1349 severity: lsp::DiagnosticSeverity::ERROR,
1350 message: "undefined variable 'A'".to_string(),
1351 group_id: 0,
1352 is_primary: true,
1353 ..Default::default()
1354 }
1355 }]
1356 )
1357 });
1358
1359 // Ensure publishing empty diagnostics twice only results in one update event.
1360 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1361 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1362 version: None,
1363 diagnostics: Default::default(),
1364 });
1365 assert_eq!(
1366 events.next().await.unwrap(),
1367 Event::DiagnosticsUpdated {
1368 language_server_id: LanguageServerId(0),
1369 path: (worktree_id, Path::new("a.rs")).into()
1370 }
1371 );
1372
1373 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1374 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1375 version: None,
1376 diagnostics: Default::default(),
1377 });
1378 cx.executor().run_until_parked();
1379 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1380}
1381
1382#[gpui::test]
1383async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1384 init_test(cx);
1385
1386 let progress_token = "the-progress-token";
1387
1388 let fs = FakeFs::new(cx.executor());
1389 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1390
1391 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1392
1393 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1394 language_registry.add(rust_lang());
1395 let mut fake_servers = language_registry.register_fake_lsp(
1396 "Rust",
1397 FakeLspAdapter {
1398 name: "the-language-server",
1399 disk_based_diagnostics_sources: vec!["disk".into()],
1400 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1401 ..Default::default()
1402 },
1403 );
1404
1405 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1406
1407 let (buffer, _handle) = project
1408 .update(cx, |project, cx| {
1409 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1410 })
1411 .await
1412 .unwrap();
1413 // Simulate diagnostics starting to update.
1414 let fake_server = fake_servers.next().await.unwrap();
1415 fake_server.start_progress(progress_token).await;
1416
1417 // Restart the server before the diagnostics finish updating.
1418 project.update(cx, |project, cx| {
1419 project.restart_language_servers_for_buffers(vec![buffer], cx);
1420 });
1421 let mut events = cx.events(&project);
1422
1423 // Simulate the newly started server sending more diagnostics.
1424 let fake_server = fake_servers.next().await.unwrap();
1425 assert_eq!(
1426 events.next().await.unwrap(),
1427 Event::LanguageServerAdded(
1428 LanguageServerId(1),
1429 fake_server.server.name(),
1430 Some(worktree_id)
1431 )
1432 );
1433 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1434 fake_server.start_progress(progress_token).await;
1435 assert_eq!(
1436 events.next().await.unwrap(),
1437 Event::DiskBasedDiagnosticsStarted {
1438 language_server_id: LanguageServerId(1)
1439 }
1440 );
1441 project.update(cx, |project, cx| {
1442 assert_eq!(
1443 project
1444 .language_servers_running_disk_based_diagnostics(cx)
1445 .collect::<Vec<_>>(),
1446 [LanguageServerId(1)]
1447 );
1448 });
1449
1450 // All diagnostics are considered done, despite the old server's diagnostic
1451 // task never completing.
1452 fake_server.end_progress(progress_token);
1453 assert_eq!(
1454 events.next().await.unwrap(),
1455 Event::DiskBasedDiagnosticsFinished {
1456 language_server_id: LanguageServerId(1)
1457 }
1458 );
1459 project.update(cx, |project, cx| {
1460 assert_eq!(
1461 project
1462 .language_servers_running_disk_based_diagnostics(cx)
1463 .collect::<Vec<_>>(),
1464 [] as [language::LanguageServerId; 0]
1465 );
1466 });
1467}
1468
1469#[gpui::test]
1470async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1471 init_test(cx);
1472
1473 let fs = FakeFs::new(cx.executor());
1474 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1475
1476 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1477
1478 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1479 language_registry.add(rust_lang());
1480 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1481
1482 let (buffer, _) = project
1483 .update(cx, |project, cx| {
1484 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1485 })
1486 .await
1487 .unwrap();
1488
1489 // Publish diagnostics
1490 let fake_server = fake_servers.next().await.unwrap();
1491 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1492 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1493 version: None,
1494 diagnostics: vec![lsp::Diagnostic {
1495 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1496 severity: Some(lsp::DiagnosticSeverity::ERROR),
1497 message: "the message".to_string(),
1498 ..Default::default()
1499 }],
1500 });
1501
1502 cx.executor().run_until_parked();
1503 buffer.update(cx, |buffer, _| {
1504 assert_eq!(
1505 buffer
1506 .snapshot()
1507 .diagnostics_in_range::<_, usize>(0..1, false)
1508 .map(|entry| entry.diagnostic.message.clone())
1509 .collect::<Vec<_>>(),
1510 ["the message".to_string()]
1511 );
1512 });
1513 project.update(cx, |project, cx| {
1514 assert_eq!(
1515 project.diagnostic_summary(false, cx),
1516 DiagnosticSummary {
1517 error_count: 1,
1518 warning_count: 0,
1519 }
1520 );
1521 });
1522
1523 project.update(cx, |project, cx| {
1524 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1525 });
1526
1527 // The diagnostics are cleared.
1528 cx.executor().run_until_parked();
1529 buffer.update(cx, |buffer, _| {
1530 assert_eq!(
1531 buffer
1532 .snapshot()
1533 .diagnostics_in_range::<_, usize>(0..1, false)
1534 .map(|entry| entry.diagnostic.message.clone())
1535 .collect::<Vec<_>>(),
1536 Vec::<String>::new(),
1537 );
1538 });
1539 project.update(cx, |project, cx| {
1540 assert_eq!(
1541 project.diagnostic_summary(false, cx),
1542 DiagnosticSummary {
1543 error_count: 0,
1544 warning_count: 0,
1545 }
1546 );
1547 });
1548}
1549
1550#[gpui::test]
1551async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1552 init_test(cx);
1553
1554 let fs = FakeFs::new(cx.executor());
1555 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1556
1557 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1558 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1559
1560 language_registry.add(rust_lang());
1561 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1562
1563 let (buffer, _handle) = project
1564 .update(cx, |project, cx| {
1565 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1566 })
1567 .await
1568 .unwrap();
1569
1570 // Before restarting the server, report diagnostics with an unknown buffer version.
1571 let fake_server = fake_servers.next().await.unwrap();
1572 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1573 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1574 version: Some(10000),
1575 diagnostics: Vec::new(),
1576 });
1577 cx.executor().run_until_parked();
1578 project.update(cx, |project, cx| {
1579 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1580 });
1581
1582 let mut fake_server = fake_servers.next().await.unwrap();
1583 let notification = fake_server
1584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1585 .await
1586 .text_document;
1587 assert_eq!(notification.version, 0);
1588}
1589
1590#[gpui::test]
1591async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1592 init_test(cx);
1593
1594 let progress_token = "the-progress-token";
1595
1596 let fs = FakeFs::new(cx.executor());
1597 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1598
1599 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1600
1601 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1602 language_registry.add(rust_lang());
1603 let mut fake_servers = language_registry.register_fake_lsp(
1604 "Rust",
1605 FakeLspAdapter {
1606 name: "the-language-server",
1607 disk_based_diagnostics_sources: vec!["disk".into()],
1608 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1609 ..Default::default()
1610 },
1611 );
1612
1613 let (buffer, _handle) = project
1614 .update(cx, |project, cx| {
1615 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1616 })
1617 .await
1618 .unwrap();
1619
1620 // Simulate diagnostics starting to update.
1621 let mut fake_server = fake_servers.next().await.unwrap();
1622 fake_server
1623 .start_progress_with(
1624 "another-token",
1625 lsp::WorkDoneProgressBegin {
1626 cancellable: Some(false),
1627 ..Default::default()
1628 },
1629 )
1630 .await;
1631 fake_server
1632 .start_progress_with(
1633 progress_token,
1634 lsp::WorkDoneProgressBegin {
1635 cancellable: Some(true),
1636 ..Default::default()
1637 },
1638 )
1639 .await;
1640 cx.executor().run_until_parked();
1641
1642 project.update(cx, |project, cx| {
1643 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1644 });
1645
1646 let cancel_notification = fake_server
1647 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1648 .await;
1649 assert_eq!(
1650 cancel_notification.token,
1651 NumberOrString::String(progress_token.into())
1652 );
1653}
1654
1655#[gpui::test]
1656async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1657 init_test(cx);
1658
1659 let fs = FakeFs::new(cx.executor());
1660 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1661 .await;
1662
1663 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1664 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1665
1666 let mut fake_rust_servers = language_registry.register_fake_lsp(
1667 "Rust",
1668 FakeLspAdapter {
1669 name: "rust-lsp",
1670 ..Default::default()
1671 },
1672 );
1673 let mut fake_js_servers = language_registry.register_fake_lsp(
1674 "JavaScript",
1675 FakeLspAdapter {
1676 name: "js-lsp",
1677 ..Default::default()
1678 },
1679 );
1680 language_registry.add(rust_lang());
1681 language_registry.add(js_lang());
1682
1683 let _rs_buffer = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1686 })
1687 .await
1688 .unwrap();
1689 let _js_buffer = project
1690 .update(cx, |project, cx| {
1691 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1692 })
1693 .await
1694 .unwrap();
1695
1696 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1697 assert_eq!(
1698 fake_rust_server_1
1699 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1700 .await
1701 .text_document
1702 .uri
1703 .as_str(),
1704 uri!("file:///dir/a.rs")
1705 );
1706
1707 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1708 assert_eq!(
1709 fake_js_server
1710 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1711 .await
1712 .text_document
1713 .uri
1714 .as_str(),
1715 uri!("file:///dir/b.js")
1716 );
1717
1718 // Disable Rust language server, ensuring only that server gets stopped.
1719 cx.update(|cx| {
1720 SettingsStore::update_global(cx, |settings, cx| {
1721 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1722 settings.languages.insert(
1723 "Rust".into(),
1724 LanguageSettingsContent {
1725 enable_language_server: Some(false),
1726 ..Default::default()
1727 },
1728 );
1729 });
1730 })
1731 });
1732 fake_rust_server_1
1733 .receive_notification::<lsp::notification::Exit>()
1734 .await;
1735
1736 // Enable Rust and disable JavaScript language servers, ensuring that the
1737 // former gets started again and that the latter stops.
1738 cx.update(|cx| {
1739 SettingsStore::update_global(cx, |settings, cx| {
1740 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1741 settings.languages.insert(
1742 LanguageName::new("Rust"),
1743 LanguageSettingsContent {
1744 enable_language_server: Some(true),
1745 ..Default::default()
1746 },
1747 );
1748 settings.languages.insert(
1749 LanguageName::new("JavaScript"),
1750 LanguageSettingsContent {
1751 enable_language_server: Some(false),
1752 ..Default::default()
1753 },
1754 );
1755 });
1756 })
1757 });
1758 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1759 assert_eq!(
1760 fake_rust_server_2
1761 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1762 .await
1763 .text_document
1764 .uri
1765 .as_str(),
1766 uri!("file:///dir/a.rs")
1767 );
1768 fake_js_server
1769 .receive_notification::<lsp::notification::Exit>()
1770 .await;
1771}
1772
1773#[gpui::test(iterations = 3)]
1774async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1775 init_test(cx);
1776
1777 let text = "
1778 fn a() { A }
1779 fn b() { BB }
1780 fn c() { CCC }
1781 "
1782 .unindent();
1783
1784 let fs = FakeFs::new(cx.executor());
1785 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1786
1787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1789
1790 language_registry.add(rust_lang());
1791 let mut fake_servers = language_registry.register_fake_lsp(
1792 "Rust",
1793 FakeLspAdapter {
1794 disk_based_diagnostics_sources: vec!["disk".into()],
1795 ..Default::default()
1796 },
1797 );
1798
1799 let buffer = project
1800 .update(cx, |project, cx| {
1801 project.open_local_buffer(path!("/dir/a.rs"), cx)
1802 })
1803 .await
1804 .unwrap();
1805
1806 let _handle = project.update(cx, |project, cx| {
1807 project.register_buffer_with_language_servers(&buffer, cx)
1808 });
1809
1810 let mut fake_server = fake_servers.next().await.unwrap();
1811 let open_notification = fake_server
1812 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1813 .await;
1814
1815 // Edit the buffer, moving the content down
1816 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1817 let change_notification_1 = fake_server
1818 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1819 .await;
1820 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1821
1822 // Report some diagnostics for the initial version of the buffer
1823 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1824 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1825 version: Some(open_notification.text_document.version),
1826 diagnostics: vec![
1827 lsp::Diagnostic {
1828 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1829 severity: Some(DiagnosticSeverity::ERROR),
1830 message: "undefined variable 'A'".to_string(),
1831 source: Some("disk".to_string()),
1832 ..Default::default()
1833 },
1834 lsp::Diagnostic {
1835 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1836 severity: Some(DiagnosticSeverity::ERROR),
1837 message: "undefined variable 'BB'".to_string(),
1838 source: Some("disk".to_string()),
1839 ..Default::default()
1840 },
1841 lsp::Diagnostic {
1842 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1843 severity: Some(DiagnosticSeverity::ERROR),
1844 source: Some("disk".to_string()),
1845 message: "undefined variable 'CCC'".to_string(),
1846 ..Default::default()
1847 },
1848 ],
1849 });
1850
1851 // The diagnostics have moved down since they were created.
1852 cx.executor().run_until_parked();
1853 buffer.update(cx, |buffer, _| {
1854 assert_eq!(
1855 buffer
1856 .snapshot()
1857 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1858 .collect::<Vec<_>>(),
1859 &[
1860 DiagnosticEntry {
1861 range: Point::new(3, 9)..Point::new(3, 11),
1862 diagnostic: Diagnostic {
1863 source: Some("disk".into()),
1864 severity: DiagnosticSeverity::ERROR,
1865 message: "undefined variable 'BB'".to_string(),
1866 is_disk_based: true,
1867 group_id: 1,
1868 is_primary: true,
1869 ..Default::default()
1870 },
1871 },
1872 DiagnosticEntry {
1873 range: Point::new(4, 9)..Point::new(4, 12),
1874 diagnostic: Diagnostic {
1875 source: Some("disk".into()),
1876 severity: DiagnosticSeverity::ERROR,
1877 message: "undefined variable 'CCC'".to_string(),
1878 is_disk_based: true,
1879 group_id: 2,
1880 is_primary: true,
1881 ..Default::default()
1882 }
1883 }
1884 ]
1885 );
1886 assert_eq!(
1887 chunks_with_diagnostics(buffer, 0..buffer.len()),
1888 [
1889 ("\n\nfn a() { ".to_string(), None),
1890 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1891 (" }\nfn b() { ".to_string(), None),
1892 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1893 (" }\nfn c() { ".to_string(), None),
1894 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1895 (" }\n".to_string(), None),
1896 ]
1897 );
1898 assert_eq!(
1899 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1900 [
1901 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1902 (" }\nfn c() { ".to_string(), None),
1903 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1904 ]
1905 );
1906 });
1907
1908 // Ensure overlapping diagnostics are highlighted correctly.
1909 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1910 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1911 version: Some(open_notification.text_document.version),
1912 diagnostics: vec![
1913 lsp::Diagnostic {
1914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1915 severity: Some(DiagnosticSeverity::ERROR),
1916 message: "undefined variable 'A'".to_string(),
1917 source: Some("disk".to_string()),
1918 ..Default::default()
1919 },
1920 lsp::Diagnostic {
1921 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1922 severity: Some(DiagnosticSeverity::WARNING),
1923 message: "unreachable statement".to_string(),
1924 source: Some("disk".to_string()),
1925 ..Default::default()
1926 },
1927 ],
1928 });
1929
1930 cx.executor().run_until_parked();
1931 buffer.update(cx, |buffer, _| {
1932 assert_eq!(
1933 buffer
1934 .snapshot()
1935 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1936 .collect::<Vec<_>>(),
1937 &[
1938 DiagnosticEntry {
1939 range: Point::new(2, 9)..Point::new(2, 12),
1940 diagnostic: Diagnostic {
1941 source: Some("disk".into()),
1942 severity: DiagnosticSeverity::WARNING,
1943 message: "unreachable statement".to_string(),
1944 is_disk_based: true,
1945 group_id: 4,
1946 is_primary: true,
1947 ..Default::default()
1948 }
1949 },
1950 DiagnosticEntry {
1951 range: Point::new(2, 9)..Point::new(2, 10),
1952 diagnostic: Diagnostic {
1953 source: Some("disk".into()),
1954 severity: DiagnosticSeverity::ERROR,
1955 message: "undefined variable 'A'".to_string(),
1956 is_disk_based: true,
1957 group_id: 3,
1958 is_primary: true,
1959 ..Default::default()
1960 },
1961 }
1962 ]
1963 );
1964 assert_eq!(
1965 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1966 [
1967 ("fn a() { ".to_string(), None),
1968 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1969 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1970 ("\n".to_string(), None),
1971 ]
1972 );
1973 assert_eq!(
1974 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1975 [
1976 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1977 ("\n".to_string(), None),
1978 ]
1979 );
1980 });
1981
1982 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1983 // changes since the last save.
1984 buffer.update(cx, |buffer, cx| {
1985 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1986 buffer.edit(
1987 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1988 None,
1989 cx,
1990 );
1991 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1992 });
1993 let change_notification_2 = fake_server
1994 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1995 .await;
1996 assert!(
1997 change_notification_2.text_document.version > change_notification_1.text_document.version
1998 );
1999
2000 // Handle out-of-order diagnostics
2001 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2002 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2003 version: Some(change_notification_2.text_document.version),
2004 diagnostics: vec![
2005 lsp::Diagnostic {
2006 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2007 severity: Some(DiagnosticSeverity::ERROR),
2008 message: "undefined variable 'BB'".to_string(),
2009 source: Some("disk".to_string()),
2010 ..Default::default()
2011 },
2012 lsp::Diagnostic {
2013 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2014 severity: Some(DiagnosticSeverity::WARNING),
2015 message: "undefined variable 'A'".to_string(),
2016 source: Some("disk".to_string()),
2017 ..Default::default()
2018 },
2019 ],
2020 });
2021
2022 cx.executor().run_until_parked();
2023 buffer.update(cx, |buffer, _| {
2024 assert_eq!(
2025 buffer
2026 .snapshot()
2027 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2028 .collect::<Vec<_>>(),
2029 &[
2030 DiagnosticEntry {
2031 range: Point::new(2, 21)..Point::new(2, 22),
2032 diagnostic: Diagnostic {
2033 source: Some("disk".into()),
2034 severity: DiagnosticSeverity::WARNING,
2035 message: "undefined variable 'A'".to_string(),
2036 is_disk_based: true,
2037 group_id: 6,
2038 is_primary: true,
2039 ..Default::default()
2040 }
2041 },
2042 DiagnosticEntry {
2043 range: Point::new(3, 9)..Point::new(3, 14),
2044 diagnostic: Diagnostic {
2045 source: Some("disk".into()),
2046 severity: DiagnosticSeverity::ERROR,
2047 message: "undefined variable 'BB'".to_string(),
2048 is_disk_based: true,
2049 group_id: 5,
2050 is_primary: true,
2051 ..Default::default()
2052 },
2053 }
2054 ]
2055 );
2056 });
2057}
2058
2059#[gpui::test]
2060async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2061 init_test(cx);
2062
2063 let text = concat!(
2064 "let one = ;\n", //
2065 "let two = \n",
2066 "let three = 3;\n",
2067 );
2068
2069 let fs = FakeFs::new(cx.executor());
2070 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2071
2072 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2073 let buffer = project
2074 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2075 .await
2076 .unwrap();
2077
2078 project.update(cx, |project, cx| {
2079 project.lsp_store.update(cx, |lsp_store, cx| {
2080 lsp_store
2081 .update_diagnostic_entries(
2082 LanguageServerId(0),
2083 PathBuf::from("/dir/a.rs"),
2084 None,
2085 vec![
2086 DiagnosticEntry {
2087 range: Unclipped(PointUtf16::new(0, 10))
2088 ..Unclipped(PointUtf16::new(0, 10)),
2089 diagnostic: Diagnostic {
2090 severity: DiagnosticSeverity::ERROR,
2091 message: "syntax error 1".to_string(),
2092 ..Default::default()
2093 },
2094 },
2095 DiagnosticEntry {
2096 range: Unclipped(PointUtf16::new(1, 10))
2097 ..Unclipped(PointUtf16::new(1, 10)),
2098 diagnostic: Diagnostic {
2099 severity: DiagnosticSeverity::ERROR,
2100 message: "syntax error 2".to_string(),
2101 ..Default::default()
2102 },
2103 },
2104 ],
2105 cx,
2106 )
2107 .unwrap();
2108 })
2109 });
2110
2111 // An empty range is extended forward to include the following character.
2112 // At the end of a line, an empty range is extended backward to include
2113 // the preceding character.
2114 buffer.update(cx, |buffer, _| {
2115 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2116 assert_eq!(
2117 chunks
2118 .iter()
2119 .map(|(s, d)| (s.as_str(), *d))
2120 .collect::<Vec<_>>(),
2121 &[
2122 ("let one = ", None),
2123 (";", Some(DiagnosticSeverity::ERROR)),
2124 ("\nlet two =", None),
2125 (" ", Some(DiagnosticSeverity::ERROR)),
2126 ("\nlet three = 3;\n", None)
2127 ]
2128 );
2129 });
2130}
2131
2132#[gpui::test]
2133async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2134 init_test(cx);
2135
2136 let fs = FakeFs::new(cx.executor());
2137 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2138 .await;
2139
2140 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2141 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2142
2143 lsp_store.update(cx, |lsp_store, cx| {
2144 lsp_store
2145 .update_diagnostic_entries(
2146 LanguageServerId(0),
2147 Path::new("/dir/a.rs").to_owned(),
2148 None,
2149 vec![DiagnosticEntry {
2150 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2151 diagnostic: Diagnostic {
2152 severity: DiagnosticSeverity::ERROR,
2153 is_primary: true,
2154 message: "syntax error a1".to_string(),
2155 ..Default::default()
2156 },
2157 }],
2158 cx,
2159 )
2160 .unwrap();
2161 lsp_store
2162 .update_diagnostic_entries(
2163 LanguageServerId(1),
2164 Path::new("/dir/a.rs").to_owned(),
2165 None,
2166 vec![DiagnosticEntry {
2167 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2168 diagnostic: Diagnostic {
2169 severity: DiagnosticSeverity::ERROR,
2170 is_primary: true,
2171 message: "syntax error b1".to_string(),
2172 ..Default::default()
2173 },
2174 }],
2175 cx,
2176 )
2177 .unwrap();
2178
2179 assert_eq!(
2180 lsp_store.diagnostic_summary(false, cx),
2181 DiagnosticSummary {
2182 error_count: 2,
2183 warning_count: 0,
2184 }
2185 );
2186 });
2187}
2188
2189#[gpui::test]
2190async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2191 init_test(cx);
2192
2193 let text = "
2194 fn a() {
2195 f1();
2196 }
2197 fn b() {
2198 f2();
2199 }
2200 fn c() {
2201 f3();
2202 }
2203 "
2204 .unindent();
2205
2206 let fs = FakeFs::new(cx.executor());
2207 fs.insert_tree(
2208 path!("/dir"),
2209 json!({
2210 "a.rs": text.clone(),
2211 }),
2212 )
2213 .await;
2214
2215 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2216 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2217
2218 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2219 language_registry.add(rust_lang());
2220 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2221
2222 let (buffer, _handle) = project
2223 .update(cx, |project, cx| {
2224 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2225 })
2226 .await
2227 .unwrap();
2228
2229 let mut fake_server = fake_servers.next().await.unwrap();
2230 let lsp_document_version = fake_server
2231 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2232 .await
2233 .text_document
2234 .version;
2235
2236 // Simulate editing the buffer after the language server computes some edits.
2237 buffer.update(cx, |buffer, cx| {
2238 buffer.edit(
2239 [(
2240 Point::new(0, 0)..Point::new(0, 0),
2241 "// above first function\n",
2242 )],
2243 None,
2244 cx,
2245 );
2246 buffer.edit(
2247 [(
2248 Point::new(2, 0)..Point::new(2, 0),
2249 " // inside first function\n",
2250 )],
2251 None,
2252 cx,
2253 );
2254 buffer.edit(
2255 [(
2256 Point::new(6, 4)..Point::new(6, 4),
2257 "// inside second function ",
2258 )],
2259 None,
2260 cx,
2261 );
2262
2263 assert_eq!(
2264 buffer.text(),
2265 "
2266 // above first function
2267 fn a() {
2268 // inside first function
2269 f1();
2270 }
2271 fn b() {
2272 // inside second function f2();
2273 }
2274 fn c() {
2275 f3();
2276 }
2277 "
2278 .unindent()
2279 );
2280 });
2281
2282 let edits = lsp_store
2283 .update(cx, |lsp_store, cx| {
2284 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2285 &buffer,
2286 vec![
2287 // replace body of first function
2288 lsp::TextEdit {
2289 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2290 new_text: "
2291 fn a() {
2292 f10();
2293 }
2294 "
2295 .unindent(),
2296 },
2297 // edit inside second function
2298 lsp::TextEdit {
2299 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2300 new_text: "00".into(),
2301 },
2302 // edit inside third function via two distinct edits
2303 lsp::TextEdit {
2304 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2305 new_text: "4000".into(),
2306 },
2307 lsp::TextEdit {
2308 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2309 new_text: "".into(),
2310 },
2311 ],
2312 LanguageServerId(0),
2313 Some(lsp_document_version),
2314 cx,
2315 )
2316 })
2317 .await
2318 .unwrap();
2319
2320 buffer.update(cx, |buffer, cx| {
2321 for (range, new_text) in edits {
2322 buffer.edit([(range, new_text)], None, cx);
2323 }
2324 assert_eq!(
2325 buffer.text(),
2326 "
2327 // above first function
2328 fn a() {
2329 // inside first function
2330 f10();
2331 }
2332 fn b() {
2333 // inside second function f200();
2334 }
2335 fn c() {
2336 f4000();
2337 }
2338 "
2339 .unindent()
2340 );
2341 });
2342}
2343
2344#[gpui::test]
2345async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2346 init_test(cx);
2347
2348 let text = "
2349 use a::b;
2350 use a::c;
2351
2352 fn f() {
2353 b();
2354 c();
2355 }
2356 "
2357 .unindent();
2358
2359 let fs = FakeFs::new(cx.executor());
2360 fs.insert_tree(
2361 path!("/dir"),
2362 json!({
2363 "a.rs": text.clone(),
2364 }),
2365 )
2366 .await;
2367
2368 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2369 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2370 let buffer = project
2371 .update(cx, |project, cx| {
2372 project.open_local_buffer(path!("/dir/a.rs"), cx)
2373 })
2374 .await
2375 .unwrap();
2376
2377 // Simulate the language server sending us a small edit in the form of a very large diff.
2378 // Rust-analyzer does this when performing a merge-imports code action.
2379 let edits = lsp_store
2380 .update(cx, |lsp_store, cx| {
2381 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2382 &buffer,
2383 [
2384 // Replace the first use statement without editing the semicolon.
2385 lsp::TextEdit {
2386 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2387 new_text: "a::{b, c}".into(),
2388 },
2389 // Reinsert the remainder of the file between the semicolon and the final
2390 // newline of the file.
2391 lsp::TextEdit {
2392 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2393 new_text: "\n\n".into(),
2394 },
2395 lsp::TextEdit {
2396 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2397 new_text: "
2398 fn f() {
2399 b();
2400 c();
2401 }"
2402 .unindent(),
2403 },
2404 // Delete everything after the first newline of the file.
2405 lsp::TextEdit {
2406 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2407 new_text: "".into(),
2408 },
2409 ],
2410 LanguageServerId(0),
2411 None,
2412 cx,
2413 )
2414 })
2415 .await
2416 .unwrap();
2417
2418 buffer.update(cx, |buffer, cx| {
2419 let edits = edits
2420 .into_iter()
2421 .map(|(range, text)| {
2422 (
2423 range.start.to_point(buffer)..range.end.to_point(buffer),
2424 text,
2425 )
2426 })
2427 .collect::<Vec<_>>();
2428
2429 assert_eq!(
2430 edits,
2431 [
2432 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2433 (Point::new(1, 0)..Point::new(2, 0), "".into())
2434 ]
2435 );
2436
2437 for (range, new_text) in edits {
2438 buffer.edit([(range, new_text)], None, cx);
2439 }
2440 assert_eq!(
2441 buffer.text(),
2442 "
2443 use a::{b, c};
2444
2445 fn f() {
2446 b();
2447 c();
2448 }
2449 "
2450 .unindent()
2451 );
2452 });
2453}
2454
2455#[gpui::test]
2456async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2457 init_test(cx);
2458
2459 let text = "
2460 use a::b;
2461 use a::c;
2462
2463 fn f() {
2464 b();
2465 c();
2466 }
2467 "
2468 .unindent();
2469
2470 let fs = FakeFs::new(cx.executor());
2471 fs.insert_tree(
2472 path!("/dir"),
2473 json!({
2474 "a.rs": text.clone(),
2475 }),
2476 )
2477 .await;
2478
2479 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2480 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2481 let buffer = project
2482 .update(cx, |project, cx| {
2483 project.open_local_buffer(path!("/dir/a.rs"), cx)
2484 })
2485 .await
2486 .unwrap();
2487
2488 // Simulate the language server sending us edits in a non-ordered fashion,
2489 // with ranges sometimes being inverted or pointing to invalid locations.
2490 let edits = lsp_store
2491 .update(cx, |lsp_store, cx| {
2492 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2493 &buffer,
2494 [
2495 lsp::TextEdit {
2496 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2497 new_text: "\n\n".into(),
2498 },
2499 lsp::TextEdit {
2500 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2501 new_text: "a::{b, c}".into(),
2502 },
2503 lsp::TextEdit {
2504 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2505 new_text: "".into(),
2506 },
2507 lsp::TextEdit {
2508 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2509 new_text: "
2510 fn f() {
2511 b();
2512 c();
2513 }"
2514 .unindent(),
2515 },
2516 ],
2517 LanguageServerId(0),
2518 None,
2519 cx,
2520 )
2521 })
2522 .await
2523 .unwrap();
2524
2525 buffer.update(cx, |buffer, cx| {
2526 let edits = edits
2527 .into_iter()
2528 .map(|(range, text)| {
2529 (
2530 range.start.to_point(buffer)..range.end.to_point(buffer),
2531 text,
2532 )
2533 })
2534 .collect::<Vec<_>>();
2535
2536 assert_eq!(
2537 edits,
2538 [
2539 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2540 (Point::new(1, 0)..Point::new(2, 0), "".into())
2541 ]
2542 );
2543
2544 for (range, new_text) in edits {
2545 buffer.edit([(range, new_text)], None, cx);
2546 }
2547 assert_eq!(
2548 buffer.text(),
2549 "
2550 use a::{b, c};
2551
2552 fn f() {
2553 b();
2554 c();
2555 }
2556 "
2557 .unindent()
2558 );
2559 });
2560}
2561
2562fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2563 buffer: &Buffer,
2564 range: Range<T>,
2565) -> Vec<(String, Option<DiagnosticSeverity>)> {
2566 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2567 for chunk in buffer.snapshot().chunks(range, true) {
2568 if chunks.last().map_or(false, |prev_chunk| {
2569 prev_chunk.1 == chunk.diagnostic_severity
2570 }) {
2571 chunks.last_mut().unwrap().0.push_str(chunk.text);
2572 } else {
2573 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2574 }
2575 }
2576 chunks
2577}
2578
2579#[gpui::test(iterations = 10)]
2580async fn test_definition(cx: &mut gpui::TestAppContext) {
2581 init_test(cx);
2582
2583 let fs = FakeFs::new(cx.executor());
2584 fs.insert_tree(
2585 path!("/dir"),
2586 json!({
2587 "a.rs": "const fn a() { A }",
2588 "b.rs": "const y: i32 = crate::a()",
2589 }),
2590 )
2591 .await;
2592
2593 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2594
2595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2596 language_registry.add(rust_lang());
2597 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2598
2599 let (buffer, _handle) = project
2600 .update(cx, |project, cx| {
2601 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2602 })
2603 .await
2604 .unwrap();
2605
2606 let fake_server = fake_servers.next().await.unwrap();
2607 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2608 let params = params.text_document_position_params;
2609 assert_eq!(
2610 params.text_document.uri.to_file_path().unwrap(),
2611 Path::new(path!("/dir/b.rs")),
2612 );
2613 assert_eq!(params.position, lsp::Position::new(0, 22));
2614
2615 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2616 lsp::Location::new(
2617 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2618 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2619 ),
2620 )))
2621 });
2622 let mut definitions = project
2623 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2624 .await
2625 .unwrap();
2626
2627 // Assert no new language server started
2628 cx.executor().run_until_parked();
2629 assert!(fake_servers.try_next().is_err());
2630
2631 assert_eq!(definitions.len(), 1);
2632 let definition = definitions.pop().unwrap();
2633 cx.update(|cx| {
2634 let target_buffer = definition.target.buffer.read(cx);
2635 assert_eq!(
2636 target_buffer
2637 .file()
2638 .unwrap()
2639 .as_local()
2640 .unwrap()
2641 .abs_path(cx),
2642 Path::new(path!("/dir/a.rs")),
2643 );
2644 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2645 assert_eq!(
2646 list_worktrees(&project, cx),
2647 [
2648 (path!("/dir/a.rs").as_ref(), false),
2649 (path!("/dir/b.rs").as_ref(), true)
2650 ],
2651 );
2652
2653 drop(definition);
2654 });
2655 cx.update(|cx| {
2656 assert_eq!(
2657 list_worktrees(&project, cx),
2658 [(path!("/dir/b.rs").as_ref(), true)]
2659 );
2660 });
2661
2662 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2663 project
2664 .read(cx)
2665 .worktrees(cx)
2666 .map(|worktree| {
2667 let worktree = worktree.read(cx);
2668 (
2669 worktree.as_local().unwrap().abs_path().as_ref(),
2670 worktree.is_visible(),
2671 )
2672 })
2673 .collect::<Vec<_>>()
2674 }
2675}
2676
2677#[gpui::test]
2678async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2679 init_test(cx);
2680
2681 let fs = FakeFs::new(cx.executor());
2682 fs.insert_tree(
2683 path!("/dir"),
2684 json!({
2685 "a.ts": "",
2686 }),
2687 )
2688 .await;
2689
2690 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2691
2692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2693 language_registry.add(typescript_lang());
2694 let mut fake_language_servers = language_registry.register_fake_lsp(
2695 "TypeScript",
2696 FakeLspAdapter {
2697 capabilities: lsp::ServerCapabilities {
2698 completion_provider: Some(lsp::CompletionOptions {
2699 trigger_characters: Some(vec![":".to_string()]),
2700 ..Default::default()
2701 }),
2702 ..Default::default()
2703 },
2704 ..Default::default()
2705 },
2706 );
2707
2708 let (buffer, _handle) = project
2709 .update(cx, |p, cx| {
2710 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2711 })
2712 .await
2713 .unwrap();
2714
2715 let fake_server = fake_language_servers.next().await.unwrap();
2716
2717 let text = "let a = b.fqn";
2718 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2719 let completions = project.update(cx, |project, cx| {
2720 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2721 });
2722
2723 fake_server
2724 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2725 Ok(Some(lsp::CompletionResponse::Array(vec![
2726 lsp::CompletionItem {
2727 label: "fullyQualifiedName?".into(),
2728 insert_text: Some("fullyQualifiedName".into()),
2729 ..Default::default()
2730 },
2731 ])))
2732 })
2733 .next()
2734 .await;
2735 let completions = completions.await.unwrap();
2736 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2737 assert_eq!(completions.len(), 1);
2738 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2739 assert_eq!(
2740 completions[0].old_range.to_offset(&snapshot),
2741 text.len() - 3..text.len()
2742 );
2743
2744 let text = "let a = \"atoms/cmp\"";
2745 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2746 let completions = project.update(cx, |project, cx| {
2747 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2748 });
2749
2750 fake_server
2751 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2752 Ok(Some(lsp::CompletionResponse::Array(vec![
2753 lsp::CompletionItem {
2754 label: "component".into(),
2755 ..Default::default()
2756 },
2757 ])))
2758 })
2759 .next()
2760 .await;
2761 let completions = completions.await.unwrap();
2762 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2763 assert_eq!(completions.len(), 1);
2764 assert_eq!(completions[0].new_text, "component");
2765 assert_eq!(
2766 completions[0].old_range.to_offset(&snapshot),
2767 text.len() - 4..text.len() - 1
2768 );
2769}
2770
2771#[gpui::test]
2772async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2773 init_test(cx);
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(
2777 path!("/dir"),
2778 json!({
2779 "a.ts": "",
2780 }),
2781 )
2782 .await;
2783
2784 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2785
2786 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2787 language_registry.add(typescript_lang());
2788 let mut fake_language_servers = language_registry.register_fake_lsp(
2789 "TypeScript",
2790 FakeLspAdapter {
2791 capabilities: lsp::ServerCapabilities {
2792 completion_provider: Some(lsp::CompletionOptions {
2793 trigger_characters: Some(vec![":".to_string()]),
2794 ..Default::default()
2795 }),
2796 ..Default::default()
2797 },
2798 ..Default::default()
2799 },
2800 );
2801
2802 let (buffer, _handle) = project
2803 .update(cx, |p, cx| {
2804 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2805 })
2806 .await
2807 .unwrap();
2808
2809 let fake_server = fake_language_servers.next().await.unwrap();
2810
2811 let text = "let a = b.fqn";
2812 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2813 let completions = project.update(cx, |project, cx| {
2814 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2815 });
2816
2817 fake_server
2818 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2819 Ok(Some(lsp::CompletionResponse::Array(vec![
2820 lsp::CompletionItem {
2821 label: "fullyQualifiedName?".into(),
2822 insert_text: Some("fully\rQualified\r\nName".into()),
2823 ..Default::default()
2824 },
2825 ])))
2826 })
2827 .next()
2828 .await;
2829 let completions = completions.await.unwrap();
2830 assert_eq!(completions.len(), 1);
2831 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2832}
2833
2834#[gpui::test(iterations = 10)]
2835async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2836 init_test(cx);
2837
2838 let fs = FakeFs::new(cx.executor());
2839 fs.insert_tree(
2840 path!("/dir"),
2841 json!({
2842 "a.ts": "a",
2843 }),
2844 )
2845 .await;
2846
2847 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2848
2849 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2850 language_registry.add(typescript_lang());
2851 let mut fake_language_servers = language_registry.register_fake_lsp(
2852 "TypeScript",
2853 FakeLspAdapter {
2854 capabilities: lsp::ServerCapabilities {
2855 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2856 lsp::CodeActionOptions {
2857 resolve_provider: Some(true),
2858 ..lsp::CodeActionOptions::default()
2859 },
2860 )),
2861 ..lsp::ServerCapabilities::default()
2862 },
2863 ..FakeLspAdapter::default()
2864 },
2865 );
2866
2867 let (buffer, _handle) = project
2868 .update(cx, |p, cx| {
2869 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2870 })
2871 .await
2872 .unwrap();
2873
2874 let fake_server = fake_language_servers.next().await.unwrap();
2875
2876 // Language server returns code actions that contain commands, and not edits.
2877 let actions = project.update(cx, |project, cx| {
2878 project.code_actions(&buffer, 0..0, None, cx)
2879 });
2880 fake_server
2881 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2882 Ok(Some(vec![
2883 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2884 title: "The code action".into(),
2885 data: Some(serde_json::json!({
2886 "command": "_the/command",
2887 })),
2888 ..lsp::CodeAction::default()
2889 }),
2890 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2891 title: "two".into(),
2892 ..lsp::CodeAction::default()
2893 }),
2894 ]))
2895 })
2896 .next()
2897 .await;
2898
2899 let action = actions.await.unwrap()[0].clone();
2900 let apply = project.update(cx, |project, cx| {
2901 project.apply_code_action(buffer.clone(), action, true, cx)
2902 });
2903
2904 // Resolving the code action does not populate its edits. In absence of
2905 // edits, we must execute the given command.
2906 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2907 |mut action, _| async move {
2908 if action.data.is_some() {
2909 action.command = Some(lsp::Command {
2910 title: "The command".into(),
2911 command: "_the/command".into(),
2912 arguments: Some(vec![json!("the-argument")]),
2913 });
2914 }
2915 Ok(action)
2916 },
2917 );
2918
2919 // While executing the command, the language server sends the editor
2920 // a `workspaceEdit` request.
2921 fake_server
2922 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2923 let fake = fake_server.clone();
2924 move |params, _| {
2925 assert_eq!(params.command, "_the/command");
2926 let fake = fake.clone();
2927 async move {
2928 fake.server
2929 .request::<lsp::request::ApplyWorkspaceEdit>(
2930 lsp::ApplyWorkspaceEditParams {
2931 label: None,
2932 edit: lsp::WorkspaceEdit {
2933 changes: Some(
2934 [(
2935 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
2936 vec![lsp::TextEdit {
2937 range: lsp::Range::new(
2938 lsp::Position::new(0, 0),
2939 lsp::Position::new(0, 0),
2940 ),
2941 new_text: "X".into(),
2942 }],
2943 )]
2944 .into_iter()
2945 .collect(),
2946 ),
2947 ..Default::default()
2948 },
2949 },
2950 )
2951 .await
2952 .unwrap();
2953 Ok(Some(json!(null)))
2954 }
2955 }
2956 })
2957 .next()
2958 .await;
2959
2960 // Applying the code action returns a project transaction containing the edits
2961 // sent by the language server in its `workspaceEdit` request.
2962 let transaction = apply.await.unwrap();
2963 assert!(transaction.0.contains_key(&buffer));
2964 buffer.update(cx, |buffer, cx| {
2965 assert_eq!(buffer.text(), "Xa");
2966 buffer.undo(cx);
2967 assert_eq!(buffer.text(), "a");
2968 });
2969}
2970
2971#[gpui::test(iterations = 10)]
2972async fn test_save_file(cx: &mut gpui::TestAppContext) {
2973 init_test(cx);
2974
2975 let fs = FakeFs::new(cx.executor());
2976 fs.insert_tree(
2977 path!("/dir"),
2978 json!({
2979 "file1": "the old contents",
2980 }),
2981 )
2982 .await;
2983
2984 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2985 let buffer = project
2986 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
2987 .await
2988 .unwrap();
2989 buffer.update(cx, |buffer, cx| {
2990 assert_eq!(buffer.text(), "the old contents");
2991 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2992 });
2993
2994 project
2995 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2996 .await
2997 .unwrap();
2998
2999 let new_text = fs
3000 .load(Path::new(path!("/dir/file1")))
3001 .await
3002 .unwrap()
3003 .replace("\r\n", "\n");
3004 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3005}
3006
3007#[gpui::test(iterations = 30)]
3008async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3009 init_test(cx);
3010
3011 let fs = FakeFs::new(cx.executor().clone());
3012 fs.insert_tree(
3013 path!("/dir"),
3014 json!({
3015 "file1": "the original contents",
3016 }),
3017 )
3018 .await;
3019
3020 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3021 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3022 let buffer = project
3023 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3024 .await
3025 .unwrap();
3026
3027 // Simulate buffer diffs being slow, so that they don't complete before
3028 // the next file change occurs.
3029 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3030
3031 // Change the buffer's file on disk, and then wait for the file change
3032 // to be detected by the worktree, so that the buffer starts reloading.
3033 fs.save(
3034 path!("/dir/file1").as_ref(),
3035 &"the first contents".into(),
3036 Default::default(),
3037 )
3038 .await
3039 .unwrap();
3040 worktree.next_event(cx).await;
3041
3042 // Change the buffer's file again. Depending on the random seed, the
3043 // previous file change may still be in progress.
3044 fs.save(
3045 path!("/dir/file1").as_ref(),
3046 &"the second contents".into(),
3047 Default::default(),
3048 )
3049 .await
3050 .unwrap();
3051 worktree.next_event(cx).await;
3052
3053 cx.executor().run_until_parked();
3054 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3055 buffer.read_with(cx, |buffer, _| {
3056 assert_eq!(buffer.text(), on_disk_text);
3057 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3058 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3059 });
3060}
3061
3062#[gpui::test(iterations = 30)]
3063async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3064 init_test(cx);
3065
3066 let fs = FakeFs::new(cx.executor().clone());
3067 fs.insert_tree(
3068 path!("/dir"),
3069 json!({
3070 "file1": "the original contents",
3071 }),
3072 )
3073 .await;
3074
3075 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3076 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3077 let buffer = project
3078 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3079 .await
3080 .unwrap();
3081
3082 // Simulate buffer diffs being slow, so that they don't complete before
3083 // the next file change occurs.
3084 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3085
3086 // Change the buffer's file on disk, and then wait for the file change
3087 // to be detected by the worktree, so that the buffer starts reloading.
3088 fs.save(
3089 path!("/dir/file1").as_ref(),
3090 &"the first contents".into(),
3091 Default::default(),
3092 )
3093 .await
3094 .unwrap();
3095 worktree.next_event(cx).await;
3096
3097 cx.executor()
3098 .spawn(cx.executor().simulate_random_delay())
3099 .await;
3100
3101 // Perform a noop edit, causing the buffer's version to increase.
3102 buffer.update(cx, |buffer, cx| {
3103 buffer.edit([(0..0, " ")], None, cx);
3104 buffer.undo(cx);
3105 });
3106
3107 cx.executor().run_until_parked();
3108 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3109 buffer.read_with(cx, |buffer, _| {
3110 let buffer_text = buffer.text();
3111 if buffer_text == on_disk_text {
3112 assert!(
3113 !buffer.is_dirty() && !buffer.has_conflict(),
3114 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3115 );
3116 }
3117 // If the file change occurred while the buffer was processing the first
3118 // change, the buffer will be in a conflicting state.
3119 else {
3120 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3121 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3122 }
3123 });
3124}
3125
3126#[gpui::test]
3127async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3128 init_test(cx);
3129
3130 let fs = FakeFs::new(cx.executor());
3131 fs.insert_tree(
3132 path!("/dir"),
3133 json!({
3134 "file1": "the old contents",
3135 }),
3136 )
3137 .await;
3138
3139 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3140 let buffer = project
3141 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3142 .await
3143 .unwrap();
3144 buffer.update(cx, |buffer, cx| {
3145 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3146 });
3147
3148 project
3149 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3150 .await
3151 .unwrap();
3152
3153 let new_text = fs
3154 .load(Path::new(path!("/dir/file1")))
3155 .await
3156 .unwrap()
3157 .replace("\r\n", "\n");
3158 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3159}
3160
3161#[gpui::test]
3162async fn test_save_as(cx: &mut gpui::TestAppContext) {
3163 init_test(cx);
3164
3165 let fs = FakeFs::new(cx.executor());
3166 fs.insert_tree("/dir", json!({})).await;
3167
3168 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3169
3170 let languages = project.update(cx, |project, _| project.languages().clone());
3171 languages.add(rust_lang());
3172
3173 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3174 buffer.update(cx, |buffer, cx| {
3175 buffer.edit([(0..0, "abc")], None, cx);
3176 assert!(buffer.is_dirty());
3177 assert!(!buffer.has_conflict());
3178 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3179 });
3180 project
3181 .update(cx, |project, cx| {
3182 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3183 let path = ProjectPath {
3184 worktree_id,
3185 path: Arc::from(Path::new("file1.rs")),
3186 };
3187 project.save_buffer_as(buffer.clone(), path, cx)
3188 })
3189 .await
3190 .unwrap();
3191 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3192
3193 cx.executor().run_until_parked();
3194 buffer.update(cx, |buffer, cx| {
3195 assert_eq!(
3196 buffer.file().unwrap().full_path(cx),
3197 Path::new("dir/file1.rs")
3198 );
3199 assert!(!buffer.is_dirty());
3200 assert!(!buffer.has_conflict());
3201 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3202 });
3203
3204 let opened_buffer = project
3205 .update(cx, |project, cx| {
3206 project.open_local_buffer("/dir/file1.rs", cx)
3207 })
3208 .await
3209 .unwrap();
3210 assert_eq!(opened_buffer, buffer);
3211}
3212
3213#[gpui::test(retries = 5)]
3214async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3215 use worktree::WorktreeModelHandle as _;
3216
3217 init_test(cx);
3218 cx.executor().allow_parking();
3219
3220 let dir = TempTree::new(json!({
3221 "a": {
3222 "file1": "",
3223 "file2": "",
3224 "file3": "",
3225 },
3226 "b": {
3227 "c": {
3228 "file4": "",
3229 "file5": "",
3230 }
3231 }
3232 }));
3233
3234 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3235
3236 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3237 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3238 async move { buffer.await.unwrap() }
3239 };
3240 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3241 project.update(cx, |project, cx| {
3242 let tree = project.worktrees(cx).next().unwrap();
3243 tree.read(cx)
3244 .entry_for_path(path)
3245 .unwrap_or_else(|| panic!("no entry for path {}", path))
3246 .id
3247 })
3248 };
3249
3250 let buffer2 = buffer_for_path("a/file2", cx).await;
3251 let buffer3 = buffer_for_path("a/file3", cx).await;
3252 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3253 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3254
3255 let file2_id = id_for_path("a/file2", cx);
3256 let file3_id = id_for_path("a/file3", cx);
3257 let file4_id = id_for_path("b/c/file4", cx);
3258
3259 // Create a remote copy of this worktree.
3260 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3261 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3262
3263 let updates = Arc::new(Mutex::new(Vec::new()));
3264 tree.update(cx, |tree, cx| {
3265 let updates = updates.clone();
3266 tree.observe_updates(0, cx, move |update| {
3267 updates.lock().push(update);
3268 async { true }
3269 });
3270 });
3271
3272 let remote =
3273 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3274
3275 cx.executor().run_until_parked();
3276
3277 cx.update(|cx| {
3278 assert!(!buffer2.read(cx).is_dirty());
3279 assert!(!buffer3.read(cx).is_dirty());
3280 assert!(!buffer4.read(cx).is_dirty());
3281 assert!(!buffer5.read(cx).is_dirty());
3282 });
3283
3284 // Rename and delete files and directories.
3285 tree.flush_fs_events(cx).await;
3286 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3287 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3288 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3289 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3290 tree.flush_fs_events(cx).await;
3291
3292 cx.update(|app| {
3293 assert_eq!(
3294 tree.read(app)
3295 .paths()
3296 .map(|p| p.to_str().unwrap())
3297 .collect::<Vec<_>>(),
3298 vec![
3299 "a",
3300 separator!("a/file1"),
3301 separator!("a/file2.new"),
3302 "b",
3303 "d",
3304 separator!("d/file3"),
3305 separator!("d/file4"),
3306 ]
3307 );
3308 });
3309
3310 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3311 assert_eq!(id_for_path("d/file3", cx), file3_id);
3312 assert_eq!(id_for_path("d/file4", cx), file4_id);
3313
3314 cx.update(|cx| {
3315 assert_eq!(
3316 buffer2.read(cx).file().unwrap().path().as_ref(),
3317 Path::new("a/file2.new")
3318 );
3319 assert_eq!(
3320 buffer3.read(cx).file().unwrap().path().as_ref(),
3321 Path::new("d/file3")
3322 );
3323 assert_eq!(
3324 buffer4.read(cx).file().unwrap().path().as_ref(),
3325 Path::new("d/file4")
3326 );
3327 assert_eq!(
3328 buffer5.read(cx).file().unwrap().path().as_ref(),
3329 Path::new("b/c/file5")
3330 );
3331
3332 assert_matches!(
3333 buffer2.read(cx).file().unwrap().disk_state(),
3334 DiskState::Present { .. }
3335 );
3336 assert_matches!(
3337 buffer3.read(cx).file().unwrap().disk_state(),
3338 DiskState::Present { .. }
3339 );
3340 assert_matches!(
3341 buffer4.read(cx).file().unwrap().disk_state(),
3342 DiskState::Present { .. }
3343 );
3344 assert_eq!(
3345 buffer5.read(cx).file().unwrap().disk_state(),
3346 DiskState::Deleted
3347 );
3348 });
3349
3350 // Update the remote worktree. Check that it becomes consistent with the
3351 // local worktree.
3352 cx.executor().run_until_parked();
3353
3354 remote.update(cx, |remote, _| {
3355 for update in updates.lock().drain(..) {
3356 remote.as_remote_mut().unwrap().update_from_remote(update);
3357 }
3358 });
3359 cx.executor().run_until_parked();
3360 remote.update(cx, |remote, _| {
3361 assert_eq!(
3362 remote
3363 .paths()
3364 .map(|p| p.to_str().unwrap())
3365 .collect::<Vec<_>>(),
3366 vec![
3367 "a",
3368 separator!("a/file1"),
3369 separator!("a/file2.new"),
3370 "b",
3371 "d",
3372 separator!("d/file3"),
3373 separator!("d/file4"),
3374 ]
3375 );
3376 });
3377}
3378
3379#[gpui::test(iterations = 10)]
3380async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3381 init_test(cx);
3382
3383 let fs = FakeFs::new(cx.executor());
3384 fs.insert_tree(
3385 path!("/dir"),
3386 json!({
3387 "a": {
3388 "file1": "",
3389 }
3390 }),
3391 )
3392 .await;
3393
3394 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3395 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3396 let tree_id = tree.update(cx, |tree, _| tree.id());
3397
3398 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3399 project.update(cx, |project, cx| {
3400 let tree = project.worktrees(cx).next().unwrap();
3401 tree.read(cx)
3402 .entry_for_path(path)
3403 .unwrap_or_else(|| panic!("no entry for path {}", path))
3404 .id
3405 })
3406 };
3407
3408 let dir_id = id_for_path("a", cx);
3409 let file_id = id_for_path("a/file1", cx);
3410 let buffer = project
3411 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3412 .await
3413 .unwrap();
3414 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3415
3416 project
3417 .update(cx, |project, cx| {
3418 project.rename_entry(dir_id, Path::new("b"), cx)
3419 })
3420 .unwrap()
3421 .await
3422 .to_included()
3423 .unwrap();
3424 cx.executor().run_until_parked();
3425
3426 assert_eq!(id_for_path("b", cx), dir_id);
3427 assert_eq!(id_for_path("b/file1", cx), file_id);
3428 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3429}
3430
3431#[gpui::test]
3432async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3433 init_test(cx);
3434
3435 let fs = FakeFs::new(cx.executor());
3436 fs.insert_tree(
3437 "/dir",
3438 json!({
3439 "a.txt": "a-contents",
3440 "b.txt": "b-contents",
3441 }),
3442 )
3443 .await;
3444
3445 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3446
3447 // Spawn multiple tasks to open paths, repeating some paths.
3448 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3449 (
3450 p.open_local_buffer("/dir/a.txt", cx),
3451 p.open_local_buffer("/dir/b.txt", cx),
3452 p.open_local_buffer("/dir/a.txt", cx),
3453 )
3454 });
3455
3456 let buffer_a_1 = buffer_a_1.await.unwrap();
3457 let buffer_a_2 = buffer_a_2.await.unwrap();
3458 let buffer_b = buffer_b.await.unwrap();
3459 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3460 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3461
3462 // There is only one buffer per path.
3463 let buffer_a_id = buffer_a_1.entity_id();
3464 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3465
3466 // Open the same path again while it is still open.
3467 drop(buffer_a_1);
3468 let buffer_a_3 = project
3469 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3470 .await
3471 .unwrap();
3472
3473 // There's still only one buffer per path.
3474 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3475}
3476
3477#[gpui::test]
3478async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3479 init_test(cx);
3480
3481 let fs = FakeFs::new(cx.executor());
3482 fs.insert_tree(
3483 path!("/dir"),
3484 json!({
3485 "file1": "abc",
3486 "file2": "def",
3487 "file3": "ghi",
3488 }),
3489 )
3490 .await;
3491
3492 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3493
3494 let buffer1 = project
3495 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3496 .await
3497 .unwrap();
3498 let events = Arc::new(Mutex::new(Vec::new()));
3499
3500 // initially, the buffer isn't dirty.
3501 buffer1.update(cx, |buffer, cx| {
3502 cx.subscribe(&buffer1, {
3503 let events = events.clone();
3504 move |_, _, event, _| match event {
3505 BufferEvent::Operation { .. } => {}
3506 _ => events.lock().push(event.clone()),
3507 }
3508 })
3509 .detach();
3510
3511 assert!(!buffer.is_dirty());
3512 assert!(events.lock().is_empty());
3513
3514 buffer.edit([(1..2, "")], None, cx);
3515 });
3516
3517 // after the first edit, the buffer is dirty, and emits a dirtied event.
3518 buffer1.update(cx, |buffer, cx| {
3519 assert!(buffer.text() == "ac");
3520 assert!(buffer.is_dirty());
3521 assert_eq!(
3522 *events.lock(),
3523 &[
3524 language::BufferEvent::Edited,
3525 language::BufferEvent::DirtyChanged
3526 ]
3527 );
3528 events.lock().clear();
3529 buffer.did_save(
3530 buffer.version(),
3531 buffer.file().unwrap().disk_state().mtime(),
3532 cx,
3533 );
3534 });
3535
3536 // after saving, the buffer is not dirty, and emits a saved event.
3537 buffer1.update(cx, |buffer, cx| {
3538 assert!(!buffer.is_dirty());
3539 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3540 events.lock().clear();
3541
3542 buffer.edit([(1..1, "B")], None, cx);
3543 buffer.edit([(2..2, "D")], None, cx);
3544 });
3545
3546 // after editing again, the buffer is dirty, and emits another dirty event.
3547 buffer1.update(cx, |buffer, cx| {
3548 assert!(buffer.text() == "aBDc");
3549 assert!(buffer.is_dirty());
3550 assert_eq!(
3551 *events.lock(),
3552 &[
3553 language::BufferEvent::Edited,
3554 language::BufferEvent::DirtyChanged,
3555 language::BufferEvent::Edited,
3556 ],
3557 );
3558 events.lock().clear();
3559
3560 // After restoring the buffer to its previously-saved state,
3561 // the buffer is not considered dirty anymore.
3562 buffer.edit([(1..3, "")], None, cx);
3563 assert!(buffer.text() == "ac");
3564 assert!(!buffer.is_dirty());
3565 });
3566
3567 assert_eq!(
3568 *events.lock(),
3569 &[
3570 language::BufferEvent::Edited,
3571 language::BufferEvent::DirtyChanged
3572 ]
3573 );
3574
3575 // When a file is deleted, the buffer is considered dirty.
3576 let events = Arc::new(Mutex::new(Vec::new()));
3577 let buffer2 = project
3578 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3579 .await
3580 .unwrap();
3581 buffer2.update(cx, |_, cx| {
3582 cx.subscribe(&buffer2, {
3583 let events = events.clone();
3584 move |_, _, event, _| events.lock().push(event.clone())
3585 })
3586 .detach();
3587 });
3588
3589 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3590 .await
3591 .unwrap();
3592 cx.executor().run_until_parked();
3593 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3594 assert_eq!(
3595 *events.lock(),
3596 &[
3597 language::BufferEvent::DirtyChanged,
3598 language::BufferEvent::FileHandleChanged
3599 ]
3600 );
3601
3602 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3603 let events = Arc::new(Mutex::new(Vec::new()));
3604 let buffer3 = project
3605 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3606 .await
3607 .unwrap();
3608 buffer3.update(cx, |_, cx| {
3609 cx.subscribe(&buffer3, {
3610 let events = events.clone();
3611 move |_, _, event, _| events.lock().push(event.clone())
3612 })
3613 .detach();
3614 });
3615
3616 buffer3.update(cx, |buffer, cx| {
3617 buffer.edit([(0..0, "x")], None, cx);
3618 });
3619 events.lock().clear();
3620 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3621 .await
3622 .unwrap();
3623 cx.executor().run_until_parked();
3624 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3625 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3626}
3627
3628#[gpui::test]
3629async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3630 init_test(cx);
3631
3632 let (initial_contents, initial_offsets) =
3633 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3634 let fs = FakeFs::new(cx.executor());
3635 fs.insert_tree(
3636 path!("/dir"),
3637 json!({
3638 "the-file": initial_contents,
3639 }),
3640 )
3641 .await;
3642 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3643 let buffer = project
3644 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3645 .await
3646 .unwrap();
3647
3648 let anchors = initial_offsets
3649 .iter()
3650 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3651 .collect::<Vec<_>>();
3652
3653 // Change the file on disk, adding two new lines of text, and removing
3654 // one line.
3655 buffer.update(cx, |buffer, _| {
3656 assert!(!buffer.is_dirty());
3657 assert!(!buffer.has_conflict());
3658 });
3659
3660 let (new_contents, new_offsets) =
3661 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3662 fs.save(
3663 path!("/dir/the-file").as_ref(),
3664 &new_contents.as_str().into(),
3665 LineEnding::Unix,
3666 )
3667 .await
3668 .unwrap();
3669
3670 // Because the buffer was not modified, it is reloaded from disk. Its
3671 // contents are edited according to the diff between the old and new
3672 // file contents.
3673 cx.executor().run_until_parked();
3674 buffer.update(cx, |buffer, _| {
3675 assert_eq!(buffer.text(), new_contents);
3676 assert!(!buffer.is_dirty());
3677 assert!(!buffer.has_conflict());
3678
3679 let anchor_offsets = anchors
3680 .iter()
3681 .map(|anchor| anchor.to_offset(&*buffer))
3682 .collect::<Vec<_>>();
3683 assert_eq!(anchor_offsets, new_offsets);
3684 });
3685
3686 // Modify the buffer
3687 buffer.update(cx, |buffer, cx| {
3688 buffer.edit([(0..0, " ")], None, cx);
3689 assert!(buffer.is_dirty());
3690 assert!(!buffer.has_conflict());
3691 });
3692
3693 // Change the file on disk again, adding blank lines to the beginning.
3694 fs.save(
3695 path!("/dir/the-file").as_ref(),
3696 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3697 LineEnding::Unix,
3698 )
3699 .await
3700 .unwrap();
3701
3702 // Because the buffer is modified, it doesn't reload from disk, but is
3703 // marked as having a conflict.
3704 cx.executor().run_until_parked();
3705 buffer.update(cx, |buffer, _| {
3706 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3707 assert!(buffer.has_conflict());
3708 });
3709}
3710
3711#[gpui::test]
3712async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3713 init_test(cx);
3714
3715 let fs = FakeFs::new(cx.executor());
3716 fs.insert_tree(
3717 path!("/dir"),
3718 json!({
3719 "file1": "a\nb\nc\n",
3720 "file2": "one\r\ntwo\r\nthree\r\n",
3721 }),
3722 )
3723 .await;
3724
3725 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3726 let buffer1 = project
3727 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3728 .await
3729 .unwrap();
3730 let buffer2 = project
3731 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3732 .await
3733 .unwrap();
3734
3735 buffer1.update(cx, |buffer, _| {
3736 assert_eq!(buffer.text(), "a\nb\nc\n");
3737 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3738 });
3739 buffer2.update(cx, |buffer, _| {
3740 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3741 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3742 });
3743
3744 // Change a file's line endings on disk from unix to windows. The buffer's
3745 // state updates correctly.
3746 fs.save(
3747 path!("/dir/file1").as_ref(),
3748 &"aaa\nb\nc\n".into(),
3749 LineEnding::Windows,
3750 )
3751 .await
3752 .unwrap();
3753 cx.executor().run_until_parked();
3754 buffer1.update(cx, |buffer, _| {
3755 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3756 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3757 });
3758
3759 // Save a file with windows line endings. The file is written correctly.
3760 buffer2.update(cx, |buffer, cx| {
3761 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3762 });
3763 project
3764 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3765 .await
3766 .unwrap();
3767 assert_eq!(
3768 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3769 "one\r\ntwo\r\nthree\r\nfour\r\n",
3770 );
3771}
3772
3773#[gpui::test]
3774async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3775 init_test(cx);
3776
3777 let fs = FakeFs::new(cx.executor());
3778 fs.insert_tree(
3779 path!("/dir"),
3780 json!({
3781 "a.rs": "
3782 fn foo(mut v: Vec<usize>) {
3783 for x in &v {
3784 v.push(1);
3785 }
3786 }
3787 "
3788 .unindent(),
3789 }),
3790 )
3791 .await;
3792
3793 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3794 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3795 let buffer = project
3796 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3797 .await
3798 .unwrap();
3799
3800 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3801 let message = lsp::PublishDiagnosticsParams {
3802 uri: buffer_uri.clone(),
3803 diagnostics: vec![
3804 lsp::Diagnostic {
3805 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3806 severity: Some(DiagnosticSeverity::WARNING),
3807 message: "error 1".to_string(),
3808 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3809 location: lsp::Location {
3810 uri: buffer_uri.clone(),
3811 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3812 },
3813 message: "error 1 hint 1".to_string(),
3814 }]),
3815 ..Default::default()
3816 },
3817 lsp::Diagnostic {
3818 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3819 severity: Some(DiagnosticSeverity::HINT),
3820 message: "error 1 hint 1".to_string(),
3821 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3822 location: lsp::Location {
3823 uri: buffer_uri.clone(),
3824 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3825 },
3826 message: "original diagnostic".to_string(),
3827 }]),
3828 ..Default::default()
3829 },
3830 lsp::Diagnostic {
3831 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3832 severity: Some(DiagnosticSeverity::ERROR),
3833 message: "error 2".to_string(),
3834 related_information: Some(vec![
3835 lsp::DiagnosticRelatedInformation {
3836 location: lsp::Location {
3837 uri: buffer_uri.clone(),
3838 range: lsp::Range::new(
3839 lsp::Position::new(1, 13),
3840 lsp::Position::new(1, 15),
3841 ),
3842 },
3843 message: "error 2 hint 1".to_string(),
3844 },
3845 lsp::DiagnosticRelatedInformation {
3846 location: lsp::Location {
3847 uri: buffer_uri.clone(),
3848 range: lsp::Range::new(
3849 lsp::Position::new(1, 13),
3850 lsp::Position::new(1, 15),
3851 ),
3852 },
3853 message: "error 2 hint 2".to_string(),
3854 },
3855 ]),
3856 ..Default::default()
3857 },
3858 lsp::Diagnostic {
3859 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3860 severity: Some(DiagnosticSeverity::HINT),
3861 message: "error 2 hint 1".to_string(),
3862 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3863 location: lsp::Location {
3864 uri: buffer_uri.clone(),
3865 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3866 },
3867 message: "original diagnostic".to_string(),
3868 }]),
3869 ..Default::default()
3870 },
3871 lsp::Diagnostic {
3872 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3873 severity: Some(DiagnosticSeverity::HINT),
3874 message: "error 2 hint 2".to_string(),
3875 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3876 location: lsp::Location {
3877 uri: buffer_uri,
3878 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3879 },
3880 message: "original diagnostic".to_string(),
3881 }]),
3882 ..Default::default()
3883 },
3884 ],
3885 version: None,
3886 };
3887
3888 lsp_store
3889 .update(cx, |lsp_store, cx| {
3890 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3891 })
3892 .unwrap();
3893 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3894
3895 assert_eq!(
3896 buffer
3897 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3898 .collect::<Vec<_>>(),
3899 &[
3900 DiagnosticEntry {
3901 range: Point::new(1, 8)..Point::new(1, 9),
3902 diagnostic: Diagnostic {
3903 severity: DiagnosticSeverity::WARNING,
3904 message: "error 1".to_string(),
3905 group_id: 1,
3906 is_primary: true,
3907 ..Default::default()
3908 }
3909 },
3910 DiagnosticEntry {
3911 range: Point::new(1, 8)..Point::new(1, 9),
3912 diagnostic: Diagnostic {
3913 severity: DiagnosticSeverity::HINT,
3914 message: "error 1 hint 1".to_string(),
3915 group_id: 1,
3916 is_primary: false,
3917 ..Default::default()
3918 }
3919 },
3920 DiagnosticEntry {
3921 range: Point::new(1, 13)..Point::new(1, 15),
3922 diagnostic: Diagnostic {
3923 severity: DiagnosticSeverity::HINT,
3924 message: "error 2 hint 1".to_string(),
3925 group_id: 0,
3926 is_primary: false,
3927 ..Default::default()
3928 }
3929 },
3930 DiagnosticEntry {
3931 range: Point::new(1, 13)..Point::new(1, 15),
3932 diagnostic: Diagnostic {
3933 severity: DiagnosticSeverity::HINT,
3934 message: "error 2 hint 2".to_string(),
3935 group_id: 0,
3936 is_primary: false,
3937 ..Default::default()
3938 }
3939 },
3940 DiagnosticEntry {
3941 range: Point::new(2, 8)..Point::new(2, 17),
3942 diagnostic: Diagnostic {
3943 severity: DiagnosticSeverity::ERROR,
3944 message: "error 2".to_string(),
3945 group_id: 0,
3946 is_primary: true,
3947 ..Default::default()
3948 }
3949 }
3950 ]
3951 );
3952
3953 assert_eq!(
3954 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3955 &[
3956 DiagnosticEntry {
3957 range: Point::new(1, 13)..Point::new(1, 15),
3958 diagnostic: Diagnostic {
3959 severity: DiagnosticSeverity::HINT,
3960 message: "error 2 hint 1".to_string(),
3961 group_id: 0,
3962 is_primary: false,
3963 ..Default::default()
3964 }
3965 },
3966 DiagnosticEntry {
3967 range: Point::new(1, 13)..Point::new(1, 15),
3968 diagnostic: Diagnostic {
3969 severity: DiagnosticSeverity::HINT,
3970 message: "error 2 hint 2".to_string(),
3971 group_id: 0,
3972 is_primary: false,
3973 ..Default::default()
3974 }
3975 },
3976 DiagnosticEntry {
3977 range: Point::new(2, 8)..Point::new(2, 17),
3978 diagnostic: Diagnostic {
3979 severity: DiagnosticSeverity::ERROR,
3980 message: "error 2".to_string(),
3981 group_id: 0,
3982 is_primary: true,
3983 ..Default::default()
3984 }
3985 }
3986 ]
3987 );
3988
3989 assert_eq!(
3990 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3991 &[
3992 DiagnosticEntry {
3993 range: Point::new(1, 8)..Point::new(1, 9),
3994 diagnostic: Diagnostic {
3995 severity: DiagnosticSeverity::WARNING,
3996 message: "error 1".to_string(),
3997 group_id: 1,
3998 is_primary: true,
3999 ..Default::default()
4000 }
4001 },
4002 DiagnosticEntry {
4003 range: Point::new(1, 8)..Point::new(1, 9),
4004 diagnostic: Diagnostic {
4005 severity: DiagnosticSeverity::HINT,
4006 message: "error 1 hint 1".to_string(),
4007 group_id: 1,
4008 is_primary: false,
4009 ..Default::default()
4010 }
4011 },
4012 ]
4013 );
4014}
4015
4016#[gpui::test]
4017async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4018 init_test(cx);
4019
4020 let fs = FakeFs::new(cx.executor());
4021 fs.insert_tree(
4022 path!("/dir"),
4023 json!({
4024 "one.rs": "const ONE: usize = 1;",
4025 "two": {
4026 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4027 }
4028
4029 }),
4030 )
4031 .await;
4032 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4033
4034 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4035 language_registry.add(rust_lang());
4036 let watched_paths = lsp::FileOperationRegistrationOptions {
4037 filters: vec![
4038 FileOperationFilter {
4039 scheme: Some("file".to_owned()),
4040 pattern: lsp::FileOperationPattern {
4041 glob: "**/*.rs".to_owned(),
4042 matches: Some(lsp::FileOperationPatternKind::File),
4043 options: None,
4044 },
4045 },
4046 FileOperationFilter {
4047 scheme: Some("file".to_owned()),
4048 pattern: lsp::FileOperationPattern {
4049 glob: "**/**".to_owned(),
4050 matches: Some(lsp::FileOperationPatternKind::Folder),
4051 options: None,
4052 },
4053 },
4054 ],
4055 };
4056 let mut fake_servers = language_registry.register_fake_lsp(
4057 "Rust",
4058 FakeLspAdapter {
4059 capabilities: lsp::ServerCapabilities {
4060 workspace: Some(lsp::WorkspaceServerCapabilities {
4061 workspace_folders: None,
4062 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4063 did_rename: Some(watched_paths.clone()),
4064 will_rename: Some(watched_paths),
4065 ..Default::default()
4066 }),
4067 }),
4068 ..Default::default()
4069 },
4070 ..Default::default()
4071 },
4072 );
4073
4074 let _ = project
4075 .update(cx, |project, cx| {
4076 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4077 })
4078 .await
4079 .unwrap();
4080
4081 let fake_server = fake_servers.next().await.unwrap();
4082 let response = project.update(cx, |project, cx| {
4083 let worktree = project.worktrees(cx).next().unwrap();
4084 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4085 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4086 });
4087 let expected_edit = lsp::WorkspaceEdit {
4088 changes: None,
4089 document_changes: Some(DocumentChanges::Edits({
4090 vec![TextDocumentEdit {
4091 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4092 range: lsp::Range {
4093 start: lsp::Position {
4094 line: 0,
4095 character: 1,
4096 },
4097 end: lsp::Position {
4098 line: 0,
4099 character: 3,
4100 },
4101 },
4102 new_text: "This is not a drill".to_owned(),
4103 })],
4104 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4105 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4106 version: Some(1337),
4107 },
4108 }]
4109 })),
4110 change_annotations: None,
4111 };
4112 let resolved_workspace_edit = Arc::new(OnceLock::new());
4113 fake_server
4114 .handle_request::<WillRenameFiles, _, _>({
4115 let resolved_workspace_edit = resolved_workspace_edit.clone();
4116 let expected_edit = expected_edit.clone();
4117 move |params, _| {
4118 let resolved_workspace_edit = resolved_workspace_edit.clone();
4119 let expected_edit = expected_edit.clone();
4120 async move {
4121 assert_eq!(params.files.len(), 1);
4122 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4123 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4124 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4125 Ok(Some(expected_edit))
4126 }
4127 }
4128 })
4129 .next()
4130 .await
4131 .unwrap();
4132 let _ = response.await.unwrap();
4133 fake_server
4134 .handle_notification::<DidRenameFiles, _>(|params, _| {
4135 assert_eq!(params.files.len(), 1);
4136 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4137 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4138 })
4139 .next()
4140 .await
4141 .unwrap();
4142 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4143}
4144
4145#[gpui::test]
4146async fn test_rename(cx: &mut gpui::TestAppContext) {
4147 // hi
4148 init_test(cx);
4149
4150 let fs = FakeFs::new(cx.executor());
4151 fs.insert_tree(
4152 path!("/dir"),
4153 json!({
4154 "one.rs": "const ONE: usize = 1;",
4155 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4156 }),
4157 )
4158 .await;
4159
4160 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4161
4162 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4163 language_registry.add(rust_lang());
4164 let mut fake_servers = language_registry.register_fake_lsp(
4165 "Rust",
4166 FakeLspAdapter {
4167 capabilities: lsp::ServerCapabilities {
4168 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4169 prepare_provider: Some(true),
4170 work_done_progress_options: Default::default(),
4171 })),
4172 ..Default::default()
4173 },
4174 ..Default::default()
4175 },
4176 );
4177
4178 let (buffer, _handle) = project
4179 .update(cx, |project, cx| {
4180 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4181 })
4182 .await
4183 .unwrap();
4184
4185 let fake_server = fake_servers.next().await.unwrap();
4186
4187 let response = project.update(cx, |project, cx| {
4188 project.prepare_rename(buffer.clone(), 7, cx)
4189 });
4190 fake_server
4191 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4192 assert_eq!(
4193 params.text_document.uri.as_str(),
4194 uri!("file:///dir/one.rs")
4195 );
4196 assert_eq!(params.position, lsp::Position::new(0, 7));
4197 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4198 lsp::Position::new(0, 6),
4199 lsp::Position::new(0, 9),
4200 ))))
4201 })
4202 .next()
4203 .await
4204 .unwrap();
4205 let response = response.await.unwrap();
4206 let PrepareRenameResponse::Success(range) = response else {
4207 panic!("{:?}", response);
4208 };
4209 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4210 assert_eq!(range, 6..9);
4211
4212 let response = project.update(cx, |project, cx| {
4213 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4214 });
4215 fake_server
4216 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4217 assert_eq!(
4218 params.text_document_position.text_document.uri.as_str(),
4219 uri!("file:///dir/one.rs")
4220 );
4221 assert_eq!(
4222 params.text_document_position.position,
4223 lsp::Position::new(0, 7)
4224 );
4225 assert_eq!(params.new_name, "THREE");
4226 Ok(Some(lsp::WorkspaceEdit {
4227 changes: Some(
4228 [
4229 (
4230 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4231 vec![lsp::TextEdit::new(
4232 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4233 "THREE".to_string(),
4234 )],
4235 ),
4236 (
4237 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4238 vec![
4239 lsp::TextEdit::new(
4240 lsp::Range::new(
4241 lsp::Position::new(0, 24),
4242 lsp::Position::new(0, 27),
4243 ),
4244 "THREE".to_string(),
4245 ),
4246 lsp::TextEdit::new(
4247 lsp::Range::new(
4248 lsp::Position::new(0, 35),
4249 lsp::Position::new(0, 38),
4250 ),
4251 "THREE".to_string(),
4252 ),
4253 ],
4254 ),
4255 ]
4256 .into_iter()
4257 .collect(),
4258 ),
4259 ..Default::default()
4260 }))
4261 })
4262 .next()
4263 .await
4264 .unwrap();
4265 let mut transaction = response.await.unwrap().0;
4266 assert_eq!(transaction.len(), 2);
4267 assert_eq!(
4268 transaction
4269 .remove_entry(&buffer)
4270 .unwrap()
4271 .0
4272 .update(cx, |buffer, _| buffer.text()),
4273 "const THREE: usize = 1;"
4274 );
4275 assert_eq!(
4276 transaction
4277 .into_keys()
4278 .next()
4279 .unwrap()
4280 .update(cx, |buffer, _| buffer.text()),
4281 "const TWO: usize = one::THREE + one::THREE;"
4282 );
4283}
4284
4285#[gpui::test]
4286async fn test_search(cx: &mut gpui::TestAppContext) {
4287 init_test(cx);
4288
4289 let fs = FakeFs::new(cx.executor());
4290 fs.insert_tree(
4291 path!("/dir"),
4292 json!({
4293 "one.rs": "const ONE: usize = 1;",
4294 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4295 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4296 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4297 }),
4298 )
4299 .await;
4300 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4301 assert_eq!(
4302 search(
4303 &project,
4304 SearchQuery::text(
4305 "TWO",
4306 false,
4307 true,
4308 false,
4309 Default::default(),
4310 Default::default(),
4311 None
4312 )
4313 .unwrap(),
4314 cx
4315 )
4316 .await
4317 .unwrap(),
4318 HashMap::from_iter([
4319 (separator!("dir/two.rs").to_string(), vec![6..9]),
4320 (separator!("dir/three.rs").to_string(), vec![37..40])
4321 ])
4322 );
4323
4324 let buffer_4 = project
4325 .update(cx, |project, cx| {
4326 project.open_local_buffer(path!("/dir/four.rs"), cx)
4327 })
4328 .await
4329 .unwrap();
4330 buffer_4.update(cx, |buffer, cx| {
4331 let text = "two::TWO";
4332 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4333 });
4334
4335 assert_eq!(
4336 search(
4337 &project,
4338 SearchQuery::text(
4339 "TWO",
4340 false,
4341 true,
4342 false,
4343 Default::default(),
4344 Default::default(),
4345 None,
4346 )
4347 .unwrap(),
4348 cx
4349 )
4350 .await
4351 .unwrap(),
4352 HashMap::from_iter([
4353 (separator!("dir/two.rs").to_string(), vec![6..9]),
4354 (separator!("dir/three.rs").to_string(), vec![37..40]),
4355 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4356 ])
4357 );
4358}
4359
4360#[gpui::test]
4361async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4362 init_test(cx);
4363
4364 let search_query = "file";
4365
4366 let fs = FakeFs::new(cx.executor());
4367 fs.insert_tree(
4368 path!("/dir"),
4369 json!({
4370 "one.rs": r#"// Rust file one"#,
4371 "one.ts": r#"// TypeScript file one"#,
4372 "two.rs": r#"// Rust file two"#,
4373 "two.ts": r#"// TypeScript file two"#,
4374 }),
4375 )
4376 .await;
4377 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4378
4379 assert!(
4380 search(
4381 &project,
4382 SearchQuery::text(
4383 search_query,
4384 false,
4385 true,
4386 false,
4387 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4388 Default::default(),
4389 None
4390 )
4391 .unwrap(),
4392 cx
4393 )
4394 .await
4395 .unwrap()
4396 .is_empty(),
4397 "If no inclusions match, no files should be returned"
4398 );
4399
4400 assert_eq!(
4401 search(
4402 &project,
4403 SearchQuery::text(
4404 search_query,
4405 false,
4406 true,
4407 false,
4408 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4409 Default::default(),
4410 None
4411 )
4412 .unwrap(),
4413 cx
4414 )
4415 .await
4416 .unwrap(),
4417 HashMap::from_iter([
4418 (separator!("dir/one.rs").to_string(), vec![8..12]),
4419 (separator!("dir/two.rs").to_string(), vec![8..12]),
4420 ]),
4421 "Rust only search should give only Rust files"
4422 );
4423
4424 assert_eq!(
4425 search(
4426 &project,
4427 SearchQuery::text(
4428 search_query,
4429 false,
4430 true,
4431 false,
4432
4433 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4434
4435 Default::default(),
4436 None,
4437 ).unwrap(),
4438 cx
4439 )
4440 .await
4441 .unwrap(),
4442 HashMap::from_iter([
4443 (separator!("dir/one.ts").to_string(), vec![14..18]),
4444 (separator!("dir/two.ts").to_string(), vec![14..18]),
4445 ]),
4446 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4447 );
4448
4449 assert_eq!(
4450 search(
4451 &project,
4452 SearchQuery::text(
4453 search_query,
4454 false,
4455 true,
4456 false,
4457
4458 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4459
4460 Default::default(),
4461 None,
4462 ).unwrap(),
4463 cx
4464 )
4465 .await
4466 .unwrap(),
4467 HashMap::from_iter([
4468 (separator!("dir/two.ts").to_string(), vec![14..18]),
4469 (separator!("dir/one.rs").to_string(), vec![8..12]),
4470 (separator!("dir/one.ts").to_string(), vec![14..18]),
4471 (separator!("dir/two.rs").to_string(), vec![8..12]),
4472 ]),
4473 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4474 );
4475}
4476
4477#[gpui::test]
4478async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4479 init_test(cx);
4480
4481 let search_query = "file";
4482
4483 let fs = FakeFs::new(cx.executor());
4484 fs.insert_tree(
4485 path!("/dir"),
4486 json!({
4487 "one.rs": r#"// Rust file one"#,
4488 "one.ts": r#"// TypeScript file one"#,
4489 "two.rs": r#"// Rust file two"#,
4490 "two.ts": r#"// TypeScript file two"#,
4491 }),
4492 )
4493 .await;
4494 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4495
4496 assert_eq!(
4497 search(
4498 &project,
4499 SearchQuery::text(
4500 search_query,
4501 false,
4502 true,
4503 false,
4504 Default::default(),
4505 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4506 None,
4507 )
4508 .unwrap(),
4509 cx
4510 )
4511 .await
4512 .unwrap(),
4513 HashMap::from_iter([
4514 (separator!("dir/one.rs").to_string(), vec![8..12]),
4515 (separator!("dir/one.ts").to_string(), vec![14..18]),
4516 (separator!("dir/two.rs").to_string(), vec![8..12]),
4517 (separator!("dir/two.ts").to_string(), vec![14..18]),
4518 ]),
4519 "If no exclusions match, all files should be returned"
4520 );
4521
4522 assert_eq!(
4523 search(
4524 &project,
4525 SearchQuery::text(
4526 search_query,
4527 false,
4528 true,
4529 false,
4530 Default::default(),
4531 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4532 None,
4533 )
4534 .unwrap(),
4535 cx
4536 )
4537 .await
4538 .unwrap(),
4539 HashMap::from_iter([
4540 (separator!("dir/one.ts").to_string(), vec![14..18]),
4541 (separator!("dir/two.ts").to_string(), vec![14..18]),
4542 ]),
4543 "Rust exclusion search should give only TypeScript files"
4544 );
4545
4546 assert_eq!(
4547 search(
4548 &project,
4549 SearchQuery::text(
4550 search_query,
4551 false,
4552 true,
4553 false,
4554 Default::default(),
4555 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4556 None,
4557 ).unwrap(),
4558 cx
4559 )
4560 .await
4561 .unwrap(),
4562 HashMap::from_iter([
4563 (separator!("dir/one.rs").to_string(), vec![8..12]),
4564 (separator!("dir/two.rs").to_string(), vec![8..12]),
4565 ]),
4566 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4567 );
4568
4569 assert!(
4570 search(
4571 &project,
4572 SearchQuery::text(
4573 search_query,
4574 false,
4575 true,
4576 false,
4577 Default::default(),
4578
4579 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4580 None,
4581
4582 ).unwrap(),
4583 cx
4584 )
4585 .await
4586 .unwrap().is_empty(),
4587 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4588 );
4589}
4590
4591#[gpui::test]
4592async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4593 init_test(cx);
4594
4595 let search_query = "file";
4596
4597 let fs = FakeFs::new(cx.executor());
4598 fs.insert_tree(
4599 path!("/dir"),
4600 json!({
4601 "one.rs": r#"// Rust file one"#,
4602 "one.ts": r#"// TypeScript file one"#,
4603 "two.rs": r#"// Rust file two"#,
4604 "two.ts": r#"// TypeScript file two"#,
4605 }),
4606 )
4607 .await;
4608 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4609
4610 assert!(
4611 search(
4612 &project,
4613 SearchQuery::text(
4614 search_query,
4615 false,
4616 true,
4617 false,
4618 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4619 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4620 None,
4621 )
4622 .unwrap(),
4623 cx
4624 )
4625 .await
4626 .unwrap()
4627 .is_empty(),
4628 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4629 );
4630
4631 assert!(
4632 search(
4633 &project,
4634 SearchQuery::text(
4635 search_query,
4636 false,
4637 true,
4638 false,
4639 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4640 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4641 None,
4642 ).unwrap(),
4643 cx
4644 )
4645 .await
4646 .unwrap()
4647 .is_empty(),
4648 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4649 );
4650
4651 assert!(
4652 search(
4653 &project,
4654 SearchQuery::text(
4655 search_query,
4656 false,
4657 true,
4658 false,
4659 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4660 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4661 None,
4662 )
4663 .unwrap(),
4664 cx
4665 )
4666 .await
4667 .unwrap()
4668 .is_empty(),
4669 "Non-matching inclusions and exclusions should not change that."
4670 );
4671
4672 assert_eq!(
4673 search(
4674 &project,
4675 SearchQuery::text(
4676 search_query,
4677 false,
4678 true,
4679 false,
4680 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4681 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4682 None,
4683 )
4684 .unwrap(),
4685 cx
4686 )
4687 .await
4688 .unwrap(),
4689 HashMap::from_iter([
4690 (separator!("dir/one.ts").to_string(), vec![14..18]),
4691 (separator!("dir/two.ts").to_string(), vec![14..18]),
4692 ]),
4693 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4694 );
4695}
4696
4697#[gpui::test]
4698async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4699 init_test(cx);
4700
4701 let fs = FakeFs::new(cx.executor());
4702 fs.insert_tree(
4703 path!("/worktree-a"),
4704 json!({
4705 "haystack.rs": r#"// NEEDLE"#,
4706 "haystack.ts": r#"// NEEDLE"#,
4707 }),
4708 )
4709 .await;
4710 fs.insert_tree(
4711 path!("/worktree-b"),
4712 json!({
4713 "haystack.rs": r#"// NEEDLE"#,
4714 "haystack.ts": r#"// NEEDLE"#,
4715 }),
4716 )
4717 .await;
4718
4719 let project = Project::test(
4720 fs.clone(),
4721 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4722 cx,
4723 )
4724 .await;
4725
4726 assert_eq!(
4727 search(
4728 &project,
4729 SearchQuery::text(
4730 "NEEDLE",
4731 false,
4732 true,
4733 false,
4734 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4735 Default::default(),
4736 None,
4737 )
4738 .unwrap(),
4739 cx
4740 )
4741 .await
4742 .unwrap(),
4743 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4744 "should only return results from included worktree"
4745 );
4746 assert_eq!(
4747 search(
4748 &project,
4749 SearchQuery::text(
4750 "NEEDLE",
4751 false,
4752 true,
4753 false,
4754 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4755 Default::default(),
4756 None,
4757 )
4758 .unwrap(),
4759 cx
4760 )
4761 .await
4762 .unwrap(),
4763 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4764 "should only return results from included worktree"
4765 );
4766
4767 assert_eq!(
4768 search(
4769 &project,
4770 SearchQuery::text(
4771 "NEEDLE",
4772 false,
4773 true,
4774 false,
4775 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4776 Default::default(),
4777 None,
4778 )
4779 .unwrap(),
4780 cx
4781 )
4782 .await
4783 .unwrap(),
4784 HashMap::from_iter([
4785 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4786 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4787 ]),
4788 "should return results from both worktrees"
4789 );
4790}
4791
4792#[gpui::test]
4793async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4794 init_test(cx);
4795
4796 let fs = FakeFs::new(cx.background_executor.clone());
4797 fs.insert_tree(
4798 path!("/dir"),
4799 json!({
4800 ".git": {},
4801 ".gitignore": "**/target\n/node_modules\n",
4802 "target": {
4803 "index.txt": "index_key:index_value"
4804 },
4805 "node_modules": {
4806 "eslint": {
4807 "index.ts": "const eslint_key = 'eslint value'",
4808 "package.json": r#"{ "some_key": "some value" }"#,
4809 },
4810 "prettier": {
4811 "index.ts": "const prettier_key = 'prettier value'",
4812 "package.json": r#"{ "other_key": "other value" }"#,
4813 },
4814 },
4815 "package.json": r#"{ "main_key": "main value" }"#,
4816 }),
4817 )
4818 .await;
4819 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4820
4821 let query = "key";
4822 assert_eq!(
4823 search(
4824 &project,
4825 SearchQuery::text(
4826 query,
4827 false,
4828 false,
4829 false,
4830 Default::default(),
4831 Default::default(),
4832 None,
4833 )
4834 .unwrap(),
4835 cx
4836 )
4837 .await
4838 .unwrap(),
4839 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4840 "Only one non-ignored file should have the query"
4841 );
4842
4843 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4844 assert_eq!(
4845 search(
4846 &project,
4847 SearchQuery::text(
4848 query,
4849 false,
4850 false,
4851 true,
4852 Default::default(),
4853 Default::default(),
4854 None,
4855 )
4856 .unwrap(),
4857 cx
4858 )
4859 .await
4860 .unwrap(),
4861 HashMap::from_iter([
4862 (separator!("dir/package.json").to_string(), vec![8..11]),
4863 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4864 (
4865 separator!("dir/node_modules/prettier/package.json").to_string(),
4866 vec![9..12]
4867 ),
4868 (
4869 separator!("dir/node_modules/prettier/index.ts").to_string(),
4870 vec![15..18]
4871 ),
4872 (
4873 separator!("dir/node_modules/eslint/index.ts").to_string(),
4874 vec![13..16]
4875 ),
4876 (
4877 separator!("dir/node_modules/eslint/package.json").to_string(),
4878 vec![8..11]
4879 ),
4880 ]),
4881 "Unrestricted search with ignored directories should find every file with the query"
4882 );
4883
4884 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4885 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4886 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4887 assert_eq!(
4888 search(
4889 &project,
4890 SearchQuery::text(
4891 query,
4892 false,
4893 false,
4894 true,
4895 files_to_include,
4896 files_to_exclude,
4897 None,
4898 )
4899 .unwrap(),
4900 cx
4901 )
4902 .await
4903 .unwrap(),
4904 HashMap::from_iter([(
4905 separator!("dir/node_modules/prettier/package.json").to_string(),
4906 vec![9..12]
4907 )]),
4908 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4909 );
4910}
4911
4912#[gpui::test]
4913async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4914 init_test(cx);
4915
4916 let fs = FakeFs::new(cx.executor().clone());
4917 fs.insert_tree(
4918 "/one/two",
4919 json!({
4920 "three": {
4921 "a.txt": "",
4922 "four": {}
4923 },
4924 "c.rs": ""
4925 }),
4926 )
4927 .await;
4928
4929 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4930 project
4931 .update(cx, |project, cx| {
4932 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4933 project.create_entry((id, "b.."), true, cx)
4934 })
4935 .await
4936 .unwrap()
4937 .to_included()
4938 .unwrap();
4939
4940 // Can't create paths outside the project
4941 let result = project
4942 .update(cx, |project, cx| {
4943 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4944 project.create_entry((id, "../../boop"), true, cx)
4945 })
4946 .await;
4947 assert!(result.is_err());
4948
4949 // Can't create paths with '..'
4950 let result = project
4951 .update(cx, |project, cx| {
4952 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4953 project.create_entry((id, "four/../beep"), true, cx)
4954 })
4955 .await;
4956 assert!(result.is_err());
4957
4958 assert_eq!(
4959 fs.paths(true),
4960 vec![
4961 PathBuf::from(path!("/")),
4962 PathBuf::from(path!("/one")),
4963 PathBuf::from(path!("/one/two")),
4964 PathBuf::from(path!("/one/two/c.rs")),
4965 PathBuf::from(path!("/one/two/three")),
4966 PathBuf::from(path!("/one/two/three/a.txt")),
4967 PathBuf::from(path!("/one/two/three/b..")),
4968 PathBuf::from(path!("/one/two/three/four")),
4969 ]
4970 );
4971
4972 // And we cannot open buffers with '..'
4973 let result = project
4974 .update(cx, |project, cx| {
4975 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4976 project.open_buffer((id, "../c.rs"), cx)
4977 })
4978 .await;
4979 assert!(result.is_err())
4980}
4981
4982#[gpui::test]
4983async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4984 init_test(cx);
4985
4986 let fs = FakeFs::new(cx.executor());
4987 fs.insert_tree(
4988 path!("/dir"),
4989 json!({
4990 "a.tsx": "a",
4991 }),
4992 )
4993 .await;
4994
4995 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4996
4997 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4998 language_registry.add(tsx_lang());
4999 let language_server_names = [
5000 "TypeScriptServer",
5001 "TailwindServer",
5002 "ESLintServer",
5003 "NoHoverCapabilitiesServer",
5004 ];
5005 let mut language_servers = [
5006 language_registry.register_fake_lsp(
5007 "tsx",
5008 FakeLspAdapter {
5009 name: language_server_names[0],
5010 capabilities: lsp::ServerCapabilities {
5011 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5012 ..lsp::ServerCapabilities::default()
5013 },
5014 ..FakeLspAdapter::default()
5015 },
5016 ),
5017 language_registry.register_fake_lsp(
5018 "tsx",
5019 FakeLspAdapter {
5020 name: language_server_names[1],
5021 capabilities: lsp::ServerCapabilities {
5022 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5023 ..lsp::ServerCapabilities::default()
5024 },
5025 ..FakeLspAdapter::default()
5026 },
5027 ),
5028 language_registry.register_fake_lsp(
5029 "tsx",
5030 FakeLspAdapter {
5031 name: language_server_names[2],
5032 capabilities: lsp::ServerCapabilities {
5033 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5034 ..lsp::ServerCapabilities::default()
5035 },
5036 ..FakeLspAdapter::default()
5037 },
5038 ),
5039 language_registry.register_fake_lsp(
5040 "tsx",
5041 FakeLspAdapter {
5042 name: language_server_names[3],
5043 capabilities: lsp::ServerCapabilities {
5044 hover_provider: None,
5045 ..lsp::ServerCapabilities::default()
5046 },
5047 ..FakeLspAdapter::default()
5048 },
5049 ),
5050 ];
5051
5052 let (buffer, _handle) = project
5053 .update(cx, |p, cx| {
5054 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5055 })
5056 .await
5057 .unwrap();
5058 cx.executor().run_until_parked();
5059
5060 let mut servers_with_hover_requests = HashMap::default();
5061 for i in 0..language_server_names.len() {
5062 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5063 panic!(
5064 "Failed to get language server #{i} with name {}",
5065 &language_server_names[i]
5066 )
5067 });
5068 let new_server_name = new_server.server.name();
5069 assert!(
5070 !servers_with_hover_requests.contains_key(&new_server_name),
5071 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5072 );
5073 match new_server_name.as_ref() {
5074 "TailwindServer" | "TypeScriptServer" => {
5075 servers_with_hover_requests.insert(
5076 new_server_name.clone(),
5077 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5078 let name = new_server_name.clone();
5079 async move {
5080 Ok(Some(lsp::Hover {
5081 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5082 format!("{name} hover"),
5083 )),
5084 range: None,
5085 }))
5086 }
5087 }),
5088 );
5089 }
5090 "ESLintServer" => {
5091 servers_with_hover_requests.insert(
5092 new_server_name,
5093 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5094 |_, _| async move { Ok(None) },
5095 ),
5096 );
5097 }
5098 "NoHoverCapabilitiesServer" => {
5099 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5100 |_, _| async move {
5101 panic!(
5102 "Should not call for hovers server with no corresponding capabilities"
5103 )
5104 },
5105 );
5106 }
5107 unexpected => panic!("Unexpected server name: {unexpected}"),
5108 }
5109 }
5110
5111 let hover_task = project.update(cx, |project, cx| {
5112 project.hover(&buffer, Point::new(0, 0), cx)
5113 });
5114 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5115 |mut hover_request| async move {
5116 hover_request
5117 .next()
5118 .await
5119 .expect("All hover requests should have been triggered")
5120 },
5121 ))
5122 .await;
5123 assert_eq!(
5124 vec!["TailwindServer hover", "TypeScriptServer hover"],
5125 hover_task
5126 .await
5127 .into_iter()
5128 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5129 .sorted()
5130 .collect::<Vec<_>>(),
5131 "Should receive hover responses from all related servers with hover capabilities"
5132 );
5133}
5134
5135#[gpui::test]
5136async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5137 init_test(cx);
5138
5139 let fs = FakeFs::new(cx.executor());
5140 fs.insert_tree(
5141 path!("/dir"),
5142 json!({
5143 "a.ts": "a",
5144 }),
5145 )
5146 .await;
5147
5148 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5149
5150 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5151 language_registry.add(typescript_lang());
5152 let mut fake_language_servers = language_registry.register_fake_lsp(
5153 "TypeScript",
5154 FakeLspAdapter {
5155 capabilities: lsp::ServerCapabilities {
5156 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5157 ..lsp::ServerCapabilities::default()
5158 },
5159 ..FakeLspAdapter::default()
5160 },
5161 );
5162
5163 let (buffer, _handle) = project
5164 .update(cx, |p, cx| {
5165 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5166 })
5167 .await
5168 .unwrap();
5169 cx.executor().run_until_parked();
5170
5171 let fake_server = fake_language_servers
5172 .next()
5173 .await
5174 .expect("failed to get the language server");
5175
5176 let mut request_handled =
5177 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5178 Ok(Some(lsp::Hover {
5179 contents: lsp::HoverContents::Array(vec![
5180 lsp::MarkedString::String("".to_string()),
5181 lsp::MarkedString::String(" ".to_string()),
5182 lsp::MarkedString::String("\n\n\n".to_string()),
5183 ]),
5184 range: None,
5185 }))
5186 });
5187
5188 let hover_task = project.update(cx, |project, cx| {
5189 project.hover(&buffer, Point::new(0, 0), cx)
5190 });
5191 let () = request_handled
5192 .next()
5193 .await
5194 .expect("All hover requests should have been triggered");
5195 assert_eq!(
5196 Vec::<String>::new(),
5197 hover_task
5198 .await
5199 .into_iter()
5200 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5201 .sorted()
5202 .collect::<Vec<_>>(),
5203 "Empty hover parts should be ignored"
5204 );
5205}
5206
5207#[gpui::test]
5208async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5209 init_test(cx);
5210
5211 let fs = FakeFs::new(cx.executor());
5212 fs.insert_tree(
5213 path!("/dir"),
5214 json!({
5215 "a.ts": "a",
5216 }),
5217 )
5218 .await;
5219
5220 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5221
5222 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5223 language_registry.add(typescript_lang());
5224 let mut fake_language_servers = language_registry.register_fake_lsp(
5225 "TypeScript",
5226 FakeLspAdapter {
5227 capabilities: lsp::ServerCapabilities {
5228 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5229 ..lsp::ServerCapabilities::default()
5230 },
5231 ..FakeLspAdapter::default()
5232 },
5233 );
5234
5235 let (buffer, _handle) = project
5236 .update(cx, |p, cx| {
5237 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5238 })
5239 .await
5240 .unwrap();
5241 cx.executor().run_until_parked();
5242
5243 let fake_server = fake_language_servers
5244 .next()
5245 .await
5246 .expect("failed to get the language server");
5247
5248 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5249 move |_, _| async move {
5250 Ok(Some(vec![
5251 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5252 title: "organize imports".to_string(),
5253 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5254 ..lsp::CodeAction::default()
5255 }),
5256 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5257 title: "fix code".to_string(),
5258 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5259 ..lsp::CodeAction::default()
5260 }),
5261 ]))
5262 },
5263 );
5264
5265 let code_actions_task = project.update(cx, |project, cx| {
5266 project.code_actions(
5267 &buffer,
5268 0..buffer.read(cx).len(),
5269 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5270 cx,
5271 )
5272 });
5273
5274 let () = request_handled
5275 .next()
5276 .await
5277 .expect("The code action request should have been triggered");
5278
5279 let code_actions = code_actions_task.await.unwrap();
5280 assert_eq!(code_actions.len(), 1);
5281 assert_eq!(
5282 code_actions[0].lsp_action.kind,
5283 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5284 );
5285}
5286
5287#[gpui::test]
5288async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5289 init_test(cx);
5290
5291 let fs = FakeFs::new(cx.executor());
5292 fs.insert_tree(
5293 path!("/dir"),
5294 json!({
5295 "a.tsx": "a",
5296 }),
5297 )
5298 .await;
5299
5300 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5301
5302 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5303 language_registry.add(tsx_lang());
5304 let language_server_names = [
5305 "TypeScriptServer",
5306 "TailwindServer",
5307 "ESLintServer",
5308 "NoActionsCapabilitiesServer",
5309 ];
5310
5311 let mut language_server_rxs = [
5312 language_registry.register_fake_lsp(
5313 "tsx",
5314 FakeLspAdapter {
5315 name: language_server_names[0],
5316 capabilities: lsp::ServerCapabilities {
5317 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5318 ..lsp::ServerCapabilities::default()
5319 },
5320 ..FakeLspAdapter::default()
5321 },
5322 ),
5323 language_registry.register_fake_lsp(
5324 "tsx",
5325 FakeLspAdapter {
5326 name: language_server_names[1],
5327 capabilities: lsp::ServerCapabilities {
5328 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5329 ..lsp::ServerCapabilities::default()
5330 },
5331 ..FakeLspAdapter::default()
5332 },
5333 ),
5334 language_registry.register_fake_lsp(
5335 "tsx",
5336 FakeLspAdapter {
5337 name: language_server_names[2],
5338 capabilities: lsp::ServerCapabilities {
5339 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5340 ..lsp::ServerCapabilities::default()
5341 },
5342 ..FakeLspAdapter::default()
5343 },
5344 ),
5345 language_registry.register_fake_lsp(
5346 "tsx",
5347 FakeLspAdapter {
5348 name: language_server_names[3],
5349 capabilities: lsp::ServerCapabilities {
5350 code_action_provider: None,
5351 ..lsp::ServerCapabilities::default()
5352 },
5353 ..FakeLspAdapter::default()
5354 },
5355 ),
5356 ];
5357
5358 let (buffer, _handle) = project
5359 .update(cx, |p, cx| {
5360 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5361 })
5362 .await
5363 .unwrap();
5364 cx.executor().run_until_parked();
5365
5366 let mut servers_with_actions_requests = HashMap::default();
5367 for i in 0..language_server_names.len() {
5368 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5369 panic!(
5370 "Failed to get language server #{i} with name {}",
5371 &language_server_names[i]
5372 )
5373 });
5374 let new_server_name = new_server.server.name();
5375
5376 assert!(
5377 !servers_with_actions_requests.contains_key(&new_server_name),
5378 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5379 );
5380 match new_server_name.0.as_ref() {
5381 "TailwindServer" | "TypeScriptServer" => {
5382 servers_with_actions_requests.insert(
5383 new_server_name.clone(),
5384 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5385 move |_, _| {
5386 let name = new_server_name.clone();
5387 async move {
5388 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5389 lsp::CodeAction {
5390 title: format!("{name} code action"),
5391 ..lsp::CodeAction::default()
5392 },
5393 )]))
5394 }
5395 },
5396 ),
5397 );
5398 }
5399 "ESLintServer" => {
5400 servers_with_actions_requests.insert(
5401 new_server_name,
5402 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5403 |_, _| async move { Ok(None) },
5404 ),
5405 );
5406 }
5407 "NoActionsCapabilitiesServer" => {
5408 let _never_handled = new_server
5409 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5410 panic!(
5411 "Should not call for code actions server with no corresponding capabilities"
5412 )
5413 });
5414 }
5415 unexpected => panic!("Unexpected server name: {unexpected}"),
5416 }
5417 }
5418
5419 let code_actions_task = project.update(cx, |project, cx| {
5420 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5421 });
5422
5423 // cx.run_until_parked();
5424 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5425 |mut code_actions_request| async move {
5426 code_actions_request
5427 .next()
5428 .await
5429 .expect("All code actions requests should have been triggered")
5430 },
5431 ))
5432 .await;
5433 assert_eq!(
5434 vec!["TailwindServer code action", "TypeScriptServer code action"],
5435 code_actions_task
5436 .await
5437 .unwrap()
5438 .into_iter()
5439 .map(|code_action| code_action.lsp_action.title)
5440 .sorted()
5441 .collect::<Vec<_>>(),
5442 "Should receive code actions responses from all related servers with hover capabilities"
5443 );
5444}
5445
5446#[gpui::test]
5447async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5448 init_test(cx);
5449
5450 let fs = FakeFs::new(cx.executor());
5451 fs.insert_tree(
5452 "/dir",
5453 json!({
5454 "a.rs": "let a = 1;",
5455 "b.rs": "let b = 2;",
5456 "c.rs": "let c = 2;",
5457 }),
5458 )
5459 .await;
5460
5461 let project = Project::test(
5462 fs,
5463 [
5464 "/dir/a.rs".as_ref(),
5465 "/dir/b.rs".as_ref(),
5466 "/dir/c.rs".as_ref(),
5467 ],
5468 cx,
5469 )
5470 .await;
5471
5472 // check the initial state and get the worktrees
5473 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5474 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5475 assert_eq!(worktrees.len(), 3);
5476
5477 let worktree_a = worktrees[0].read(cx);
5478 let worktree_b = worktrees[1].read(cx);
5479 let worktree_c = worktrees[2].read(cx);
5480
5481 // check they start in the right order
5482 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5483 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5484 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5485
5486 (
5487 worktrees[0].clone(),
5488 worktrees[1].clone(),
5489 worktrees[2].clone(),
5490 )
5491 });
5492
5493 // move first worktree to after the second
5494 // [a, b, c] -> [b, a, c]
5495 project
5496 .update(cx, |project, cx| {
5497 let first = worktree_a.read(cx);
5498 let second = worktree_b.read(cx);
5499 project.move_worktree(first.id(), second.id(), cx)
5500 })
5501 .expect("moving first after second");
5502
5503 // check the state after moving
5504 project.update(cx, |project, cx| {
5505 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5506 assert_eq!(worktrees.len(), 3);
5507
5508 let first = worktrees[0].read(cx);
5509 let second = worktrees[1].read(cx);
5510 let third = worktrees[2].read(cx);
5511
5512 // check they are now in the right order
5513 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5514 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5515 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5516 });
5517
5518 // move the second worktree to before the first
5519 // [b, a, c] -> [a, b, c]
5520 project
5521 .update(cx, |project, cx| {
5522 let second = worktree_a.read(cx);
5523 let first = worktree_b.read(cx);
5524 project.move_worktree(first.id(), second.id(), cx)
5525 })
5526 .expect("moving second before first");
5527
5528 // check the state after moving
5529 project.update(cx, |project, cx| {
5530 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5531 assert_eq!(worktrees.len(), 3);
5532
5533 let first = worktrees[0].read(cx);
5534 let second = worktrees[1].read(cx);
5535 let third = worktrees[2].read(cx);
5536
5537 // check they are now in the right order
5538 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5539 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5540 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5541 });
5542
5543 // move the second worktree to after the third
5544 // [a, b, c] -> [a, c, b]
5545 project
5546 .update(cx, |project, cx| {
5547 let second = worktree_b.read(cx);
5548 let third = worktree_c.read(cx);
5549 project.move_worktree(second.id(), third.id(), cx)
5550 })
5551 .expect("moving second after third");
5552
5553 // check the state after moving
5554 project.update(cx, |project, cx| {
5555 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5556 assert_eq!(worktrees.len(), 3);
5557
5558 let first = worktrees[0].read(cx);
5559 let second = worktrees[1].read(cx);
5560 let third = worktrees[2].read(cx);
5561
5562 // check they are now in the right order
5563 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5564 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5565 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5566 });
5567
5568 // move the third worktree to before the second
5569 // [a, c, b] -> [a, b, c]
5570 project
5571 .update(cx, |project, cx| {
5572 let third = worktree_c.read(cx);
5573 let second = worktree_b.read(cx);
5574 project.move_worktree(third.id(), second.id(), cx)
5575 })
5576 .expect("moving third before second");
5577
5578 // check the state after moving
5579 project.update(cx, |project, cx| {
5580 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5581 assert_eq!(worktrees.len(), 3);
5582
5583 let first = worktrees[0].read(cx);
5584 let second = worktrees[1].read(cx);
5585 let third = worktrees[2].read(cx);
5586
5587 // check they are now in the right order
5588 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5589 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5590 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5591 });
5592
5593 // move the first worktree to after the third
5594 // [a, b, c] -> [b, c, a]
5595 project
5596 .update(cx, |project, cx| {
5597 let first = worktree_a.read(cx);
5598 let third = worktree_c.read(cx);
5599 project.move_worktree(first.id(), third.id(), cx)
5600 })
5601 .expect("moving first after third");
5602
5603 // check the state after moving
5604 project.update(cx, |project, cx| {
5605 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5606 assert_eq!(worktrees.len(), 3);
5607
5608 let first = worktrees[0].read(cx);
5609 let second = worktrees[1].read(cx);
5610 let third = worktrees[2].read(cx);
5611
5612 // check they are now in the right order
5613 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5614 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5615 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5616 });
5617
5618 // move the third worktree to before the first
5619 // [b, c, a] -> [a, b, c]
5620 project
5621 .update(cx, |project, cx| {
5622 let third = worktree_a.read(cx);
5623 let first = worktree_b.read(cx);
5624 project.move_worktree(third.id(), first.id(), cx)
5625 })
5626 .expect("moving third before first");
5627
5628 // check the state after moving
5629 project.update(cx, |project, cx| {
5630 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5631 assert_eq!(worktrees.len(), 3);
5632
5633 let first = worktrees[0].read(cx);
5634 let second = worktrees[1].read(cx);
5635 let third = worktrees[2].read(cx);
5636
5637 // check they are now in the right order
5638 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5639 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5640 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5641 });
5642}
5643
5644#[gpui::test]
5645async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5646 init_test(cx);
5647
5648 let staged_contents = r#"
5649 fn main() {
5650 println!("hello world");
5651 }
5652 "#
5653 .unindent();
5654 let file_contents = r#"
5655 // print goodbye
5656 fn main() {
5657 println!("goodbye world");
5658 }
5659 "#
5660 .unindent();
5661
5662 let fs = FakeFs::new(cx.background_executor.clone());
5663 fs.insert_tree(
5664 "/dir",
5665 json!({
5666 ".git": {},
5667 "src": {
5668 "main.rs": file_contents,
5669 }
5670 }),
5671 )
5672 .await;
5673
5674 fs.set_index_for_repo(
5675 Path::new("/dir/.git"),
5676 &[("src/main.rs".into(), staged_contents)],
5677 );
5678
5679 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5680
5681 let buffer = project
5682 .update(cx, |project, cx| {
5683 project.open_local_buffer("/dir/src/main.rs", cx)
5684 })
5685 .await
5686 .unwrap();
5687 let unstaged_diff = project
5688 .update(cx, |project, cx| {
5689 project.open_unstaged_diff(buffer.clone(), cx)
5690 })
5691 .await
5692 .unwrap();
5693
5694 cx.run_until_parked();
5695 unstaged_diff.update(cx, |unstaged_diff, cx| {
5696 let snapshot = buffer.read(cx).snapshot();
5697 assert_hunks(
5698 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5699 &snapshot,
5700 &unstaged_diff.base_text_string().unwrap(),
5701 &[
5702 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5703 (
5704 2..3,
5705 " println!(\"hello world\");\n",
5706 " println!(\"goodbye world\");\n",
5707 DiffHunkStatus::modified_none(),
5708 ),
5709 ],
5710 );
5711 });
5712
5713 let staged_contents = r#"
5714 // print goodbye
5715 fn main() {
5716 }
5717 "#
5718 .unindent();
5719
5720 fs.set_index_for_repo(
5721 Path::new("/dir/.git"),
5722 &[("src/main.rs".into(), staged_contents)],
5723 );
5724
5725 cx.run_until_parked();
5726 unstaged_diff.update(cx, |unstaged_diff, cx| {
5727 let snapshot = buffer.read(cx).snapshot();
5728 assert_hunks(
5729 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5730 &snapshot,
5731 &unstaged_diff.base_text().unwrap().text(),
5732 &[(
5733 2..3,
5734 "",
5735 " println!(\"goodbye world\");\n",
5736 DiffHunkStatus::added_none(),
5737 )],
5738 );
5739 });
5740}
5741
5742#[gpui::test]
5743async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5744 init_test(cx);
5745
5746 let committed_contents = r#"
5747 fn main() {
5748 println!("hello world");
5749 }
5750 "#
5751 .unindent();
5752 let staged_contents = r#"
5753 fn main() {
5754 println!("goodbye world");
5755 }
5756 "#
5757 .unindent();
5758 let file_contents = r#"
5759 // print goodbye
5760 fn main() {
5761 println!("goodbye world");
5762 }
5763 "#
5764 .unindent();
5765
5766 let fs = FakeFs::new(cx.background_executor.clone());
5767 fs.insert_tree(
5768 "/dir",
5769 json!({
5770 ".git": {},
5771 "src": {
5772 "main.rs": file_contents,
5773 }
5774 }),
5775 )
5776 .await;
5777
5778 fs.set_index_for_repo(
5779 Path::new("/dir/.git"),
5780 &[("src/main.rs".into(), staged_contents)],
5781 );
5782 fs.set_head_for_repo(
5783 Path::new("/dir/.git"),
5784 &[("src/main.rs".into(), committed_contents)],
5785 );
5786
5787 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5789 let language = rust_lang();
5790 language_registry.add(language.clone());
5791
5792 let buffer = project
5793 .update(cx, |project, cx| {
5794 project.open_local_buffer("/dir/src/main.rs", cx)
5795 })
5796 .await
5797 .unwrap();
5798 let uncommitted_diff = project
5799 .update(cx, |project, cx| {
5800 project.open_uncommitted_diff(buffer.clone(), cx)
5801 })
5802 .await
5803 .unwrap();
5804
5805 uncommitted_diff.read_with(cx, |diff, _| {
5806 assert_eq!(
5807 diff.base_text().and_then(|base| base.language().cloned()),
5808 Some(language)
5809 )
5810 });
5811
5812 cx.run_until_parked();
5813 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5814 let snapshot = buffer.read(cx).snapshot();
5815 assert_hunks(
5816 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5817 &snapshot,
5818 &uncommitted_diff.base_text_string().unwrap(),
5819 &[
5820 (
5821 0..1,
5822 "",
5823 "// print goodbye\n",
5824 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5825 ),
5826 (
5827 2..3,
5828 " println!(\"hello world\");\n",
5829 " println!(\"goodbye world\");\n",
5830 DiffHunkStatus::modified_none(),
5831 ),
5832 ],
5833 );
5834 });
5835
5836 let committed_contents = r#"
5837 // print goodbye
5838 fn main() {
5839 }
5840 "#
5841 .unindent();
5842
5843 fs.set_head_for_repo(
5844 Path::new("/dir/.git"),
5845 &[("src/main.rs".into(), committed_contents)],
5846 );
5847
5848 cx.run_until_parked();
5849 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5850 let snapshot = buffer.read(cx).snapshot();
5851 assert_hunks(
5852 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5853 &snapshot,
5854 &uncommitted_diff.base_text().unwrap().text(),
5855 &[(
5856 2..3,
5857 "",
5858 " println!(\"goodbye world\");\n",
5859 DiffHunkStatus::added_none(),
5860 )],
5861 );
5862 });
5863}
5864
5865#[gpui::test]
5866async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
5867 init_test(cx);
5868
5869 let committed_contents = r#"
5870 fn main() {
5871 println!("hello from HEAD");
5872 }
5873 "#
5874 .unindent();
5875 let file_contents = r#"
5876 fn main() {
5877 println!("hello from the working copy");
5878 }
5879 "#
5880 .unindent();
5881
5882 let fs = FakeFs::new(cx.background_executor.clone());
5883 fs.insert_tree(
5884 "/dir",
5885 json!({
5886 ".git": {},
5887 "src": {
5888 "main.rs": file_contents,
5889 }
5890 }),
5891 )
5892 .await;
5893
5894 fs.set_head_for_repo(
5895 Path::new("/dir/.git"),
5896 &[("src/main.rs".into(), committed_contents)],
5897 );
5898
5899 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
5900
5901 let buffer = project
5902 .update(cx, |project, cx| {
5903 project.open_local_buffer("/dir/src/main.rs", cx)
5904 })
5905 .await
5906 .unwrap();
5907 let uncommitted_diff = project
5908 .update(cx, |project, cx| {
5909 project.open_uncommitted_diff(buffer.clone(), cx)
5910 })
5911 .await
5912 .unwrap();
5913
5914 cx.run_until_parked();
5915 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5916 let snapshot = buffer.read(cx).snapshot();
5917 assert_hunks(
5918 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5919 &snapshot,
5920 &uncommitted_diff.base_text_string().unwrap(),
5921 &[(
5922 1..2,
5923 " println!(\"hello from HEAD\");\n",
5924 " println!(\"hello from the working copy\");\n",
5925 DiffHunkStatus::modified_none(),
5926 )],
5927 );
5928 });
5929}
5930
5931async fn search(
5932 project: &Entity<Project>,
5933 query: SearchQuery,
5934 cx: &mut gpui::TestAppContext,
5935) -> Result<HashMap<String, Vec<Range<usize>>>> {
5936 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5937 let mut results = HashMap::default();
5938 while let Ok(search_result) = search_rx.recv().await {
5939 match search_result {
5940 SearchResult::Buffer { buffer, ranges } => {
5941 results.entry(buffer).or_insert(ranges);
5942 }
5943 SearchResult::LimitReached => {}
5944 }
5945 }
5946 Ok(results
5947 .into_iter()
5948 .map(|(buffer, ranges)| {
5949 buffer.update(cx, |buffer, cx| {
5950 let path = buffer
5951 .file()
5952 .unwrap()
5953 .full_path(cx)
5954 .to_string_lossy()
5955 .to_string();
5956 let ranges = ranges
5957 .into_iter()
5958 .map(|range| range.to_offset(buffer))
5959 .collect::<Vec<_>>();
5960 (path, ranges)
5961 })
5962 })
5963 .collect())
5964}
5965
5966pub fn init_test(cx: &mut gpui::TestAppContext) {
5967 if std::env::var("RUST_LOG").is_ok() {
5968 env_logger::try_init().ok();
5969 }
5970
5971 cx.update(|cx| {
5972 let settings_store = SettingsStore::test(cx);
5973 cx.set_global(settings_store);
5974 release_channel::init(SemanticVersion::default(), cx);
5975 language::init(cx);
5976 Project::init_settings(cx);
5977 });
5978}
5979
5980fn json_lang() -> Arc<Language> {
5981 Arc::new(Language::new(
5982 LanguageConfig {
5983 name: "JSON".into(),
5984 matcher: LanguageMatcher {
5985 path_suffixes: vec!["json".to_string()],
5986 ..Default::default()
5987 },
5988 ..Default::default()
5989 },
5990 None,
5991 ))
5992}
5993
5994fn js_lang() -> Arc<Language> {
5995 Arc::new(Language::new(
5996 LanguageConfig {
5997 name: "JavaScript".into(),
5998 matcher: LanguageMatcher {
5999 path_suffixes: vec!["js".to_string()],
6000 ..Default::default()
6001 },
6002 ..Default::default()
6003 },
6004 None,
6005 ))
6006}
6007
6008fn rust_lang() -> Arc<Language> {
6009 Arc::new(Language::new(
6010 LanguageConfig {
6011 name: "Rust".into(),
6012 matcher: LanguageMatcher {
6013 path_suffixes: vec!["rs".to_string()],
6014 ..Default::default()
6015 },
6016 ..Default::default()
6017 },
6018 Some(tree_sitter_rust::LANGUAGE.into()),
6019 ))
6020}
6021
6022fn typescript_lang() -> Arc<Language> {
6023 Arc::new(Language::new(
6024 LanguageConfig {
6025 name: "TypeScript".into(),
6026 matcher: LanguageMatcher {
6027 path_suffixes: vec!["ts".to_string()],
6028 ..Default::default()
6029 },
6030 ..Default::default()
6031 },
6032 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6033 ))
6034}
6035
6036fn tsx_lang() -> Arc<Language> {
6037 Arc::new(Language::new(
6038 LanguageConfig {
6039 name: "tsx".into(),
6040 matcher: LanguageMatcher {
6041 path_suffixes: vec!["tsx".to_string()],
6042 ..Default::default()
6043 },
6044 ..Default::default()
6045 },
6046 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6047 ))
6048}
6049
6050fn get_all_tasks(
6051 project: &Entity<Project>,
6052 worktree_id: Option<WorktreeId>,
6053 task_context: &TaskContext,
6054 cx: &mut App,
6055) -> Vec<(TaskSourceKind, ResolvedTask)> {
6056 let (mut old, new) = project.update(cx, |project, cx| {
6057 project
6058 .task_store
6059 .read(cx)
6060 .task_inventory()
6061 .unwrap()
6062 .read(cx)
6063 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
6064 });
6065 old.extend(new);
6066 old
6067}