1use crate::{Event, *};
2use ::git::diff::assert_hunks;
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq,
29 paths::{replace_path_separator, PathMatcher},
30 test::temp_tree,
31 TryFutureExt as _,
32};
33
34#[gpui::test]
35async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let (tx, mut rx) = futures::channel::mpsc::unbounded();
39 let _thread = std::thread::spawn(move || {
40 std::fs::metadata("/tmp").unwrap();
41 std::thread::sleep(Duration::from_millis(1000));
42 tx.unbounded_send(1).unwrap();
43 });
44 rx.next().await.unwrap();
45}
46
47#[gpui::test]
48async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
49 cx.executor().allow_parking();
50
51 let io_task = smol::unblock(move || {
52 println!("sleeping on thread {:?}", std::thread::current().id());
53 std::thread::sleep(Duration::from_millis(10));
54 1
55 });
56
57 let task = cx.foreground_executor().spawn(async move {
58 io_task.await;
59 });
60
61 task.await;
62}
63
64#[cfg(not(windows))]
65#[gpui::test]
66async fn test_symlinks(cx: &mut gpui::TestAppContext) {
67 init_test(cx);
68 cx.executor().allow_parking();
69
70 let dir = temp_tree(json!({
71 "root": {
72 "apple": "",
73 "banana": {
74 "carrot": {
75 "date": "",
76 "endive": "",
77 }
78 },
79 "fennel": {
80 "grape": "",
81 }
82 }
83 }));
84
85 let root_link_path = dir.path().join("root_link");
86 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
87 os::unix::fs::symlink(
88 dir.path().join("root/fennel"),
89 dir.path().join("root/finnochio"),
90 )
91 .unwrap();
92
93 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
94
95 project.update(cx, |project, cx| {
96 let tree = project.worktrees(cx).next().unwrap().read(cx);
97 assert_eq!(tree.file_count(), 5);
98 assert_eq!(
99 tree.inode_for_path("fennel/grape"),
100 tree.inode_for_path("finnochio/grape")
101 );
102 });
103}
104
105#[gpui::test]
106async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
107 init_test(cx);
108
109 let dir = temp_tree(json!({
110 ".editorconfig": r#"
111 root = true
112 [*.rs]
113 indent_style = tab
114 indent_size = 3
115 end_of_line = lf
116 insert_final_newline = true
117 trim_trailing_whitespace = true
118 [*.js]
119 tab_width = 10
120 "#,
121 ".zed": {
122 "settings.json": r#"{
123 "tab_size": 8,
124 "hard_tabs": false,
125 "ensure_final_newline_on_save": false,
126 "remove_trailing_whitespace_on_save": false,
127 "soft_wrap": "editor_width"
128 }"#,
129 },
130 "a.rs": "fn a() {\n A\n}",
131 "b": {
132 ".editorconfig": r#"
133 [*.rs]
134 indent_size = 2
135 "#,
136 "b.rs": "fn b() {\n B\n}",
137 },
138 "c.js": "def c\n C\nend",
139 "README.json": "tabs are better\n",
140 }));
141
142 let path = dir.path();
143 let fs = FakeFs::new(cx.executor());
144 fs.insert_tree_from_real_fs(path, path).await;
145 let project = Project::test(fs, [path], cx).await;
146
147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
148 language_registry.add(js_lang());
149 language_registry.add(json_lang());
150 language_registry.add(rust_lang());
151
152 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
153
154 cx.executor().run_until_parked();
155
156 cx.update(|cx| {
157 let tree = worktree.read(cx);
158 let settings_for = |path: &str| {
159 let file_entry = tree.entry_for_path(path).unwrap().clone();
160 let file = File::for_entry(file_entry, worktree.clone());
161 let file_language = project
162 .read(cx)
163 .languages()
164 .language_for_file_path(file.path.as_ref());
165 let file_language = cx
166 .background_executor()
167 .block(file_language)
168 .expect("Failed to get file language");
169 let file = file as _;
170 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
171 };
172
173 let settings_a = settings_for("a.rs");
174 let settings_b = settings_for("b/b.rs");
175 let settings_c = settings_for("c.js");
176 let settings_readme = settings_for("README.json");
177
178 // .editorconfig overrides .zed/settings
179 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
180 assert_eq!(settings_a.hard_tabs, true);
181 assert_eq!(settings_a.ensure_final_newline_on_save, true);
182 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
183
184 // .editorconfig in b/ overrides .editorconfig in root
185 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
186
187 // "indent_size" is not set, so "tab_width" is used
188 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
189
190 // README.md should not be affected by .editorconfig's globe "*.rs"
191 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
192 });
193}
194
195#[gpui::test]
196async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
197 init_test(cx);
198 TaskStore::init(None);
199
200 let fs = FakeFs::new(cx.executor());
201 fs.insert_tree(
202 "/the-root",
203 json!({
204 ".zed": {
205 "settings.json": r#"{ "tab_size": 8 }"#,
206 "tasks.json": r#"[{
207 "label": "cargo check all",
208 "command": "cargo",
209 "args": ["check", "--all"]
210 },]"#,
211 },
212 "a": {
213 "a.rs": "fn a() {\n A\n}"
214 },
215 "b": {
216 ".zed": {
217 "settings.json": r#"{ "tab_size": 2 }"#,
218 "tasks.json": r#"[{
219 "label": "cargo check",
220 "command": "cargo",
221 "args": ["check"]
222 },]"#,
223 },
224 "b.rs": "fn b() {\n B\n}"
225 }
226 }),
227 )
228 .await;
229
230 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232 let task_context = TaskContext::default();
233
234 cx.executor().run_until_parked();
235 let worktree_id = cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.worktrees(cx).next().unwrap().read(cx).id()
238 })
239 });
240 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
241 id: worktree_id,
242 directory_in_worktree: PathBuf::from(".zed"),
243 id_base: "local worktree tasks from directory \".zed\"".into(),
244 };
245
246 let all_tasks = cx
247 .update(|cx| {
248 let tree = worktree.read(cx);
249
250 let file_a = File::for_entry(
251 tree.entry_for_path("a/a.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_a = language_settings(None, Some(&file_a), cx);
255 let file_b = File::for_entry(
256 tree.entry_for_path("b/b.rs").unwrap().clone(),
257 worktree.clone(),
258 ) as _;
259 let settings_b = language_settings(None, Some(&file_b), cx);
260
261 assert_eq!(settings_a.tab_size.get(), 8);
262 assert_eq!(settings_b.tab_size.get(), 2);
263
264 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
265 })
266 .into_iter()
267 .map(|(source_kind, task)| {
268 let resolved = task.resolved.unwrap();
269 (
270 source_kind,
271 task.resolved_label,
272 resolved.args,
273 resolved.env,
274 )
275 })
276 .collect::<Vec<_>>();
277 assert_eq!(
278 all_tasks,
279 vec![
280 (
281 TaskSourceKind::Worktree {
282 id: worktree_id,
283 directory_in_worktree: PathBuf::from("b/.zed"),
284 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
285 },
286 "cargo check".to_string(),
287 vec!["check".to_string()],
288 HashMap::default(),
289 ),
290 (
291 topmost_local_task_source_kind.clone(),
292 "cargo check all".to_string(),
293 vec!["check".to_string(), "--all".to_string()],
294 HashMap::default(),
295 ),
296 ]
297 );
298
299 let (_, resolved_task) = cx
300 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
301 .into_iter()
302 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
303 .expect("should have one global task");
304 project.update(cx, |project, cx| {
305 let task_inventory = project
306 .task_store
307 .read(cx)
308 .task_inventory()
309 .cloned()
310 .unwrap();
311 task_inventory.update(cx, |inventory, _| {
312 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
313 inventory
314 .update_file_based_tasks(
315 None,
316 Some(
317 &json!([{
318 "label": "cargo check unstable",
319 "command": "cargo",
320 "args": [
321 "check",
322 "--all",
323 "--all-targets"
324 ],
325 "env": {
326 "RUSTFLAGS": "-Zunstable-options"
327 }
328 }])
329 .to_string(),
330 ),
331 )
332 .unwrap();
333 });
334 });
335 cx.run_until_parked();
336
337 let all_tasks = cx
338 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
339 .into_iter()
340 .map(|(source_kind, task)| {
341 let resolved = task.resolved.unwrap();
342 (
343 source_kind,
344 task.resolved_label,
345 resolved.args,
346 resolved.env,
347 )
348 })
349 .collect::<Vec<_>>();
350 assert_eq!(
351 all_tasks,
352 vec![
353 (
354 topmost_local_task_source_kind.clone(),
355 "cargo check all".to_string(),
356 vec!["check".to_string(), "--all".to_string()],
357 HashMap::default(),
358 ),
359 (
360 TaskSourceKind::Worktree {
361 id: worktree_id,
362 directory_in_worktree: PathBuf::from("b/.zed"),
363 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
364 },
365 "cargo check".to_string(),
366 vec!["check".to_string()],
367 HashMap::default(),
368 ),
369 (
370 TaskSourceKind::AbsPath {
371 abs_path: paths::tasks_file().clone(),
372 id_base: "global tasks.json".into(),
373 },
374 "cargo check unstable".to_string(),
375 vec![
376 "check".to_string(),
377 "--all".to_string(),
378 "--all-targets".to_string(),
379 ],
380 HashMap::from_iter(Some((
381 "RUSTFLAGS".to_string(),
382 "-Zunstable-options".to_string()
383 ))),
384 ),
385 ]
386 );
387}
388
389#[gpui::test]
390async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
391 init_test(cx);
392
393 let fs = FakeFs::new(cx.executor());
394 fs.insert_tree(
395 "/the-root",
396 json!({
397 "test.rs": "const A: i32 = 1;",
398 "test2.rs": "",
399 "Cargo.toml": "a = 1",
400 "package.json": "{\"a\": 1}",
401 }),
402 )
403 .await;
404
405 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
407
408 let mut fake_rust_servers = language_registry.register_fake_lsp(
409 "Rust",
410 FakeLspAdapter {
411 name: "the-rust-language-server",
412 capabilities: lsp::ServerCapabilities {
413 completion_provider: Some(lsp::CompletionOptions {
414 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
415 ..Default::default()
416 }),
417 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
418 lsp::TextDocumentSyncOptions {
419 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
420 ..Default::default()
421 },
422 )),
423 ..Default::default()
424 },
425 ..Default::default()
426 },
427 );
428 let mut fake_json_servers = language_registry.register_fake_lsp(
429 "JSON",
430 FakeLspAdapter {
431 name: "the-json-language-server",
432 capabilities: lsp::ServerCapabilities {
433 completion_provider: Some(lsp::CompletionOptions {
434 trigger_characters: Some(vec![":".to_string()]),
435 ..Default::default()
436 }),
437 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
438 lsp::TextDocumentSyncOptions {
439 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
440 ..Default::default()
441 },
442 )),
443 ..Default::default()
444 },
445 ..Default::default()
446 },
447 );
448
449 // Open a buffer without an associated language server.
450 let (toml_buffer, _handle) = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
453 })
454 .await
455 .unwrap();
456
457 // Open a buffer with an associated language server before the language for it has been loaded.
458 let (rust_buffer, _handle2) = project
459 .update(cx, |project, cx| {
460 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
461 })
462 .await
463 .unwrap();
464 rust_buffer.update(cx, |buffer, _| {
465 assert_eq!(buffer.language().map(|l| l.name()), None);
466 });
467
468 // Now we add the languages to the project, and ensure they get assigned to all
469 // the relevant open buffers.
470 language_registry.add(json_lang());
471 language_registry.add(rust_lang());
472 cx.executor().run_until_parked();
473 rust_buffer.update(cx, |buffer, _| {
474 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
475 });
476
477 // A server is started up, and it is notified about Rust files.
478 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
479 assert_eq!(
480 fake_rust_server
481 .receive_notification::<lsp::notification::DidOpenTextDocument>()
482 .await
483 .text_document,
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
486 version: 0,
487 text: "const A: i32 = 1;".to_string(),
488 language_id: "rust".to_string(),
489 }
490 );
491
492 // The buffer is configured based on the language server's capabilities.
493 rust_buffer.update(cx, |buffer, _| {
494 assert_eq!(
495 buffer
496 .completion_triggers()
497 .into_iter()
498 .cloned()
499 .collect::<Vec<_>>(),
500 &[".".to_string(), "::".to_string()]
501 );
502 });
503 toml_buffer.update(cx, |buffer, _| {
504 assert!(buffer.completion_triggers().is_empty());
505 });
506
507 // Edit a buffer. The changes are reported to the language server.
508 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
509 assert_eq!(
510 fake_rust_server
511 .receive_notification::<lsp::notification::DidChangeTextDocument>()
512 .await
513 .text_document,
514 lsp::VersionedTextDocumentIdentifier::new(
515 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
516 1
517 )
518 );
519
520 // Open a third buffer with a different associated language server.
521 let (json_buffer, _json_handle) = project
522 .update(cx, |project, cx| {
523 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
524 })
525 .await
526 .unwrap();
527
528 // A json language server is started up and is only notified about the json buffer.
529 let mut fake_json_server = fake_json_servers.next().await.unwrap();
530 assert_eq!(
531 fake_json_server
532 .receive_notification::<lsp::notification::DidOpenTextDocument>()
533 .await
534 .text_document,
535 lsp::TextDocumentItem {
536 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
537 version: 0,
538 text: "{\"a\": 1}".to_string(),
539 language_id: "json".to_string(),
540 }
541 );
542
543 // This buffer is configured based on the second language server's
544 // capabilities.
545 json_buffer.update(cx, |buffer, _| {
546 assert_eq!(
547 buffer
548 .completion_triggers()
549 .into_iter()
550 .cloned()
551 .collect::<Vec<_>>(),
552 &[":".to_string()]
553 );
554 });
555
556 // When opening another buffer whose language server is already running,
557 // it is also configured based on the existing language server's capabilities.
558 let (rust_buffer2, _handle4) = project
559 .update(cx, |project, cx| {
560 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
561 })
562 .await
563 .unwrap();
564 rust_buffer2.update(cx, |buffer, _| {
565 assert_eq!(
566 buffer
567 .completion_triggers()
568 .into_iter()
569 .cloned()
570 .collect::<Vec<_>>(),
571 &[".".to_string(), "::".to_string()]
572 );
573 });
574
575 // Changes are reported only to servers matching the buffer's language.
576 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
577 rust_buffer2.update(cx, |buffer, cx| {
578 buffer.edit([(0..0, "let x = 1;")], None, cx)
579 });
580 assert_eq!(
581 fake_rust_server
582 .receive_notification::<lsp::notification::DidChangeTextDocument>()
583 .await
584 .text_document,
585 lsp::VersionedTextDocumentIdentifier::new(
586 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
587 1
588 )
589 );
590
591 // Save notifications are reported to all servers.
592 project
593 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
594 .await
595 .unwrap();
596 assert_eq!(
597 fake_rust_server
598 .receive_notification::<lsp::notification::DidSaveTextDocument>()
599 .await
600 .text_document,
601 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
602 );
603 assert_eq!(
604 fake_json_server
605 .receive_notification::<lsp::notification::DidSaveTextDocument>()
606 .await
607 .text_document,
608 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
609 );
610
611 // Renames are reported only to servers matching the buffer's language.
612 fs.rename(
613 Path::new("/the-root/test2.rs"),
614 Path::new("/the-root/test3.rs"),
615 Default::default(),
616 )
617 .await
618 .unwrap();
619 assert_eq!(
620 fake_rust_server
621 .receive_notification::<lsp::notification::DidCloseTextDocument>()
622 .await
623 .text_document,
624 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
625 );
626 assert_eq!(
627 fake_rust_server
628 .receive_notification::<lsp::notification::DidOpenTextDocument>()
629 .await
630 .text_document,
631 lsp::TextDocumentItem {
632 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
633 version: 0,
634 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
635 language_id: "rust".to_string(),
636 },
637 );
638
639 rust_buffer2.update(cx, |buffer, cx| {
640 buffer.update_diagnostics(
641 LanguageServerId(0),
642 DiagnosticSet::from_sorted_entries(
643 vec![DiagnosticEntry {
644 diagnostic: Default::default(),
645 range: Anchor::MIN..Anchor::MAX,
646 }],
647 &buffer.snapshot(),
648 ),
649 cx,
650 );
651 assert_eq!(
652 buffer
653 .snapshot()
654 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
655 .count(),
656 1
657 );
658 });
659
660 // When the rename changes the extension of the file, the buffer gets closed on the old
661 // language server and gets opened on the new one.
662 fs.rename(
663 Path::new("/the-root/test3.rs"),
664 Path::new("/the-root/test3.json"),
665 Default::default(),
666 )
667 .await
668 .unwrap();
669 assert_eq!(
670 fake_rust_server
671 .receive_notification::<lsp::notification::DidCloseTextDocument>()
672 .await
673 .text_document,
674 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
675 );
676 assert_eq!(
677 fake_json_server
678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
679 .await
680 .text_document,
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: "json".to_string(),
686 },
687 );
688
689 // We clear the diagnostics, since the language has changed.
690 rust_buffer2.update(cx, |buffer, _| {
691 assert_eq!(
692 buffer
693 .snapshot()
694 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
695 .count(),
696 0
697 );
698 });
699
700 // The renamed file's version resets after changing language server.
701 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
702 assert_eq!(
703 fake_json_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
709 1
710 )
711 );
712
713 // Restart language servers
714 project.update(cx, |project, cx| {
715 project.restart_language_servers_for_buffers(
716 vec![rust_buffer.clone(), json_buffer.clone()],
717 cx,
718 );
719 });
720
721 let mut rust_shutdown_requests = fake_rust_server
722 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
723 let mut json_shutdown_requests = fake_json_server
724 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
725 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
726
727 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
728 let mut fake_json_server = fake_json_servers.next().await.unwrap();
729
730 // Ensure rust document is reopened in new rust language server
731 assert_eq!(
732 fake_rust_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
738 version: 0,
739 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
740 language_id: "rust".to_string(),
741 }
742 );
743
744 // Ensure json documents are reopened in new json language server
745 assert_set_eq!(
746 [
747 fake_json_server
748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
749 .await
750 .text_document,
751 fake_json_server
752 .receive_notification::<lsp::notification::DidOpenTextDocument>()
753 .await
754 .text_document,
755 ],
756 [
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
759 version: 0,
760 text: json_buffer.update(cx, |buffer, _| buffer.text()),
761 language_id: "json".to_string(),
762 },
763 lsp::TextDocumentItem {
764 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
765 version: 0,
766 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
767 language_id: "json".to_string(),
768 }
769 ]
770 );
771
772 // Close notifications are reported only to servers matching the buffer's language.
773 cx.update(|_| drop(_json_handle));
774 let close_message = lsp::DidCloseTextDocumentParams {
775 text_document: lsp::TextDocumentIdentifier::new(
776 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
777 ),
778 };
779 assert_eq!(
780 fake_json_server
781 .receive_notification::<lsp::notification::DidCloseTextDocument>()
782 .await,
783 close_message,
784 );
785}
786
787#[gpui::test]
788async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
789 fn add_root_for_windows(path: &str) -> String {
790 if cfg!(windows) {
791 format!("C:{}", path)
792 } else {
793 path.to_string()
794 }
795 }
796
797 init_test(cx);
798
799 let fs = FakeFs::new(cx.executor());
800 fs.insert_tree(
801 add_root_for_windows("/the-root"),
802 json!({
803 ".gitignore": "target\n",
804 "src": {
805 "a.rs": "",
806 "b.rs": "",
807 },
808 "target": {
809 "x": {
810 "out": {
811 "x.rs": ""
812 }
813 },
814 "y": {
815 "out": {
816 "y.rs": "",
817 }
818 },
819 "z": {
820 "out": {
821 "z.rs": ""
822 }
823 }
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await;
830 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
831 language_registry.add(rust_lang());
832 let mut fake_servers = language_registry.register_fake_lsp(
833 "Rust",
834 FakeLspAdapter {
835 name: "the-language-server",
836 ..Default::default()
837 },
838 );
839
840 cx.executor().run_until_parked();
841
842 // Start the language server by opening a buffer with a compatible file extension.
843 let _ = project
844 .update(cx, |project, cx| {
845 project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx)
846 })
847 .await
848 .unwrap();
849
850 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
851 project.update(cx, |project, cx| {
852 let worktree = project.worktrees(cx).next().unwrap();
853 assert_eq!(
854 worktree
855 .read(cx)
856 .snapshot()
857 .entries(true, 0)
858 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
859 .collect::<Vec<_>>(),
860 &[
861 (Path::new(""), false),
862 (Path::new(".gitignore"), false),
863 (Path::new("src"), false),
864 (Path::new("src/a.rs"), false),
865 (Path::new("src/b.rs"), false),
866 (Path::new("target"), true),
867 ]
868 );
869 });
870
871 let prev_read_dir_count = fs.read_dir_call_count();
872
873 // Keep track of the FS events reported to the language server.
874 let fake_server = fake_servers.next().await.unwrap();
875 let file_changes = Arc::new(Mutex::new(Vec::new()));
876 fake_server
877 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
878 registrations: vec![lsp::Registration {
879 id: Default::default(),
880 method: "workspace/didChangeWatchedFiles".to_string(),
881 register_options: serde_json::to_value(
882 lsp::DidChangeWatchedFilesRegistrationOptions {
883 watchers: vec![
884 lsp::FileSystemWatcher {
885 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
886 "/the-root/Cargo.toml",
887 )),
888 kind: None,
889 },
890 lsp::FileSystemWatcher {
891 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
892 "/the-root/src/*.{rs,c}",
893 )),
894 kind: None,
895 },
896 lsp::FileSystemWatcher {
897 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
898 "/the-root/target/y/**/*.rs",
899 )),
900 kind: None,
901 },
902 ],
903 },
904 )
905 .ok(),
906 }],
907 })
908 .await
909 .unwrap();
910 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
911 let file_changes = file_changes.clone();
912 move |params, _| {
913 let mut file_changes = file_changes.lock();
914 file_changes.extend(params.changes);
915 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
916 }
917 });
918
919 cx.executor().run_until_parked();
920 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
921 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
922
923 // Now the language server has asked us to watch an ignored directory path,
924 // so we recursively load it.
925 project.update(cx, |project, cx| {
926 let worktree = project.worktrees(cx).next().unwrap();
927 assert_eq!(
928 worktree
929 .read(cx)
930 .snapshot()
931 .entries(true, 0)
932 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
933 .collect::<Vec<_>>(),
934 &[
935 (Path::new(""), false),
936 (Path::new(".gitignore"), false),
937 (Path::new("src"), false),
938 (Path::new("src/a.rs"), false),
939 (Path::new("src/b.rs"), false),
940 (Path::new("target"), true),
941 (Path::new("target/x"), true),
942 (Path::new("target/y"), true),
943 (Path::new("target/y/out"), true),
944 (Path::new("target/y/out/y.rs"), true),
945 (Path::new("target/z"), true),
946 ]
947 );
948 });
949
950 // Perform some file system mutations, two of which match the watched patterns,
951 // and one of which does not.
952 fs.create_file(
953 add_root_for_windows("/the-root/src/c.rs").as_ref(),
954 Default::default(),
955 )
956 .await
957 .unwrap();
958 fs.create_file(
959 add_root_for_windows("/the-root/src/d.txt").as_ref(),
960 Default::default(),
961 )
962 .await
963 .unwrap();
964 fs.remove_file(
965 add_root_for_windows("/the-root/src/b.rs").as_ref(),
966 Default::default(),
967 )
968 .await
969 .unwrap();
970 fs.create_file(
971 add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(),
972 Default::default(),
973 )
974 .await
975 .unwrap();
976 fs.create_file(
977 add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(),
978 Default::default(),
979 )
980 .await
981 .unwrap();
982
983 // The language server receives events for the FS mutations that match its watch patterns.
984 cx.executor().run_until_parked();
985 assert_eq!(
986 &*file_changes.lock(),
987 &[
988 lsp::FileEvent {
989 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(),
990 typ: lsp::FileChangeType::DELETED,
991 },
992 lsp::FileEvent {
993 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(),
994 typ: lsp::FileChangeType::CREATED,
995 },
996 lsp::FileEvent {
997 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs"))
998 .unwrap(),
999 typ: lsp::FileChangeType::CREATED,
1000 },
1001 ]
1002 );
1003}
1004
1005#[gpui::test]
1006async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1007 init_test(cx);
1008
1009 let fs = FakeFs::new(cx.executor());
1010 fs.insert_tree(
1011 "/dir",
1012 json!({
1013 "a.rs": "let a = 1;",
1014 "b.rs": "let b = 2;"
1015 }),
1016 )
1017 .await;
1018
1019 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
1020 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1021
1022 let buffer_a = project
1023 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1024 .await
1025 .unwrap();
1026 let buffer_b = project
1027 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1028 .await
1029 .unwrap();
1030
1031 lsp_store.update(cx, |lsp_store, cx| {
1032 lsp_store
1033 .update_diagnostics(
1034 LanguageServerId(0),
1035 lsp::PublishDiagnosticsParams {
1036 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1037 version: None,
1038 diagnostics: vec![lsp::Diagnostic {
1039 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1040 severity: Some(lsp::DiagnosticSeverity::ERROR),
1041 message: "error 1".to_string(),
1042 ..Default::default()
1043 }],
1044 },
1045 &[],
1046 cx,
1047 )
1048 .unwrap();
1049 lsp_store
1050 .update_diagnostics(
1051 LanguageServerId(0),
1052 lsp::PublishDiagnosticsParams {
1053 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1054 version: None,
1055 diagnostics: vec![lsp::Diagnostic {
1056 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1057 severity: Some(DiagnosticSeverity::WARNING),
1058 message: "error 2".to_string(),
1059 ..Default::default()
1060 }],
1061 },
1062 &[],
1063 cx,
1064 )
1065 .unwrap();
1066 });
1067
1068 buffer_a.update(cx, |buffer, _| {
1069 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1070 assert_eq!(
1071 chunks
1072 .iter()
1073 .map(|(s, d)| (s.as_str(), *d))
1074 .collect::<Vec<_>>(),
1075 &[
1076 ("let ", None),
1077 ("a", Some(DiagnosticSeverity::ERROR)),
1078 (" = 1;", None),
1079 ]
1080 );
1081 });
1082 buffer_b.update(cx, |buffer, _| {
1083 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1084 assert_eq!(
1085 chunks
1086 .iter()
1087 .map(|(s, d)| (s.as_str(), *d))
1088 .collect::<Vec<_>>(),
1089 &[
1090 ("let ", None),
1091 ("b", Some(DiagnosticSeverity::WARNING)),
1092 (" = 2;", None),
1093 ]
1094 );
1095 });
1096}
1097
1098#[gpui::test]
1099async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1100 init_test(cx);
1101
1102 let fs = FakeFs::new(cx.executor());
1103 fs.insert_tree(
1104 "/root",
1105 json!({
1106 "dir": {
1107 ".git": {
1108 "HEAD": "ref: refs/heads/main",
1109 },
1110 ".gitignore": "b.rs",
1111 "a.rs": "let a = 1;",
1112 "b.rs": "let b = 2;",
1113 },
1114 "other.rs": "let b = c;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1120 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1121 let (worktree, _) = project
1122 .update(cx, |project, cx| {
1123 project.find_or_create_worktree("/root/dir", true, cx)
1124 })
1125 .await
1126 .unwrap();
1127 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1128
1129 let (worktree, _) = project
1130 .update(cx, |project, cx| {
1131 project.find_or_create_worktree("/root/other.rs", false, cx)
1132 })
1133 .await
1134 .unwrap();
1135 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1136
1137 let server_id = LanguageServerId(0);
1138 lsp_store.update(cx, |lsp_store, cx| {
1139 lsp_store
1140 .update_diagnostics(
1141 server_id,
1142 lsp::PublishDiagnosticsParams {
1143 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1144 version: None,
1145 diagnostics: vec![lsp::Diagnostic {
1146 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1147 severity: Some(lsp::DiagnosticSeverity::ERROR),
1148 message: "unused variable 'b'".to_string(),
1149 ..Default::default()
1150 }],
1151 },
1152 &[],
1153 cx,
1154 )
1155 .unwrap();
1156 lsp_store
1157 .update_diagnostics(
1158 server_id,
1159 lsp::PublishDiagnosticsParams {
1160 uri: Url::from_file_path("/root/other.rs").unwrap(),
1161 version: None,
1162 diagnostics: vec![lsp::Diagnostic {
1163 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1164 severity: Some(lsp::DiagnosticSeverity::ERROR),
1165 message: "unknown variable 'c'".to_string(),
1166 ..Default::default()
1167 }],
1168 },
1169 &[],
1170 cx,
1171 )
1172 .unwrap();
1173 });
1174
1175 let main_ignored_buffer = project
1176 .update(cx, |project, cx| {
1177 project.open_buffer((main_worktree_id, "b.rs"), cx)
1178 })
1179 .await
1180 .unwrap();
1181 main_ignored_buffer.update(cx, |buffer, _| {
1182 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1183 assert_eq!(
1184 chunks
1185 .iter()
1186 .map(|(s, d)| (s.as_str(), *d))
1187 .collect::<Vec<_>>(),
1188 &[
1189 ("let ", None),
1190 ("b", Some(DiagnosticSeverity::ERROR)),
1191 (" = 2;", None),
1192 ],
1193 "Gigitnored buffers should still get in-buffer diagnostics",
1194 );
1195 });
1196 let other_buffer = project
1197 .update(cx, |project, cx| {
1198 project.open_buffer((other_worktree_id, ""), cx)
1199 })
1200 .await
1201 .unwrap();
1202 other_buffer.update(cx, |buffer, _| {
1203 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1204 assert_eq!(
1205 chunks
1206 .iter()
1207 .map(|(s, d)| (s.as_str(), *d))
1208 .collect::<Vec<_>>(),
1209 &[
1210 ("let b = ", None),
1211 ("c", Some(DiagnosticSeverity::ERROR)),
1212 (";", None),
1213 ],
1214 "Buffers from hidden projects should still get in-buffer diagnostics"
1215 );
1216 });
1217
1218 project.update(cx, |project, cx| {
1219 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1220 assert_eq!(
1221 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1222 vec![(
1223 ProjectPath {
1224 worktree_id: main_worktree_id,
1225 path: Arc::from(Path::new("b.rs")),
1226 },
1227 server_id,
1228 DiagnosticSummary {
1229 error_count: 1,
1230 warning_count: 0,
1231 }
1232 )]
1233 );
1234 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1235 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1236 });
1237}
1238
1239#[gpui::test]
1240async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1241 init_test(cx);
1242
1243 let progress_token = "the-progress-token";
1244
1245 let fs = FakeFs::new(cx.executor());
1246 fs.insert_tree(
1247 "/dir",
1248 json!({
1249 "a.rs": "fn a() { A }",
1250 "b.rs": "const y: i32 = 1",
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257
1258 language_registry.add(rust_lang());
1259 let mut fake_servers = language_registry.register_fake_lsp(
1260 "Rust",
1261 FakeLspAdapter {
1262 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1263 disk_based_diagnostics_sources: vec!["disk".into()],
1264 ..Default::default()
1265 },
1266 );
1267
1268 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1269
1270 // Cause worktree to start the fake language server
1271 let _ = project
1272 .update(cx, |project, cx| {
1273 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1274 })
1275 .await
1276 .unwrap();
1277
1278 let mut events = cx.events(&project);
1279
1280 let fake_server = fake_servers.next().await.unwrap();
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::LanguageServerAdded(
1284 LanguageServerId(0),
1285 fake_server.server.name(),
1286 Some(worktree_id)
1287 ),
1288 );
1289
1290 fake_server
1291 .start_progress(format!("{}/0", progress_token))
1292 .await;
1293 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1294 assert_eq!(
1295 events.next().await.unwrap(),
1296 Event::DiskBasedDiagnosticsStarted {
1297 language_server_id: LanguageServerId(0),
1298 }
1299 );
1300
1301 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1302 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1303 version: None,
1304 diagnostics: vec![lsp::Diagnostic {
1305 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1306 severity: Some(lsp::DiagnosticSeverity::ERROR),
1307 message: "undefined variable 'A'".to_string(),
1308 ..Default::default()
1309 }],
1310 });
1311 assert_eq!(
1312 events.next().await.unwrap(),
1313 Event::DiagnosticsUpdated {
1314 language_server_id: LanguageServerId(0),
1315 path: (worktree_id, Path::new("a.rs")).into()
1316 }
1317 );
1318
1319 fake_server.end_progress(format!("{}/0", progress_token));
1320 assert_eq!(
1321 events.next().await.unwrap(),
1322 Event::DiskBasedDiagnosticsFinished {
1323 language_server_id: LanguageServerId(0)
1324 }
1325 );
1326
1327 let buffer = project
1328 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1329 .await
1330 .unwrap();
1331
1332 buffer.update(cx, |buffer, _| {
1333 let snapshot = buffer.snapshot();
1334 let diagnostics = snapshot
1335 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1336 .collect::<Vec<_>>();
1337 assert_eq!(
1338 diagnostics,
1339 &[DiagnosticEntry {
1340 range: Point::new(0, 9)..Point::new(0, 10),
1341 diagnostic: Diagnostic {
1342 severity: lsp::DiagnosticSeverity::ERROR,
1343 message: "undefined variable 'A'".to_string(),
1344 group_id: 0,
1345 is_primary: true,
1346 ..Default::default()
1347 }
1348 }]
1349 )
1350 });
1351
1352 // Ensure publishing empty diagnostics twice only results in one update event.
1353 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1354 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1355 version: None,
1356 diagnostics: Default::default(),
1357 });
1358 assert_eq!(
1359 events.next().await.unwrap(),
1360 Event::DiagnosticsUpdated {
1361 language_server_id: LanguageServerId(0),
1362 path: (worktree_id, Path::new("a.rs")).into()
1363 }
1364 );
1365
1366 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1367 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1368 version: None,
1369 diagnostics: Default::default(),
1370 });
1371 cx.executor().run_until_parked();
1372 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1373}
1374
1375#[gpui::test]
1376async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1377 init_test(cx);
1378
1379 let progress_token = "the-progress-token";
1380
1381 let fs = FakeFs::new(cx.executor());
1382 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1383
1384 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1385
1386 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1387 language_registry.add(rust_lang());
1388 let mut fake_servers = language_registry.register_fake_lsp(
1389 "Rust",
1390 FakeLspAdapter {
1391 name: "the-language-server",
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1394 ..Default::default()
1395 },
1396 );
1397
1398 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1399
1400 let (buffer, _handle) = project
1401 .update(cx, |project, cx| {
1402 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1403 })
1404 .await
1405 .unwrap();
1406
1407 // Simulate diagnostics starting to update.
1408 let fake_server = fake_servers.next().await.unwrap();
1409 fake_server.start_progress(progress_token).await;
1410
1411 // Restart the server before the diagnostics finish updating.
1412 project.update(cx, |project, cx| {
1413 project.restart_language_servers_for_buffers([buffer], cx);
1414 });
1415 let mut events = cx.events(&project);
1416
1417 // Simulate the newly started server sending more diagnostics.
1418 let fake_server = fake_servers.next().await.unwrap();
1419 assert_eq!(
1420 events.next().await.unwrap(),
1421 Event::LanguageServerAdded(
1422 LanguageServerId(1),
1423 fake_server.server.name(),
1424 Some(worktree_id)
1425 )
1426 );
1427 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1428 fake_server.start_progress(progress_token).await;
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsStarted {
1432 language_server_id: LanguageServerId(1)
1433 }
1434 );
1435 project.update(cx, |project, cx| {
1436 assert_eq!(
1437 project
1438 .language_servers_running_disk_based_diagnostics(cx)
1439 .collect::<Vec<_>>(),
1440 [LanguageServerId(1)]
1441 );
1442 });
1443
1444 // All diagnostics are considered done, despite the old server's diagnostic
1445 // task never completing.
1446 fake_server.end_progress(progress_token);
1447 assert_eq!(
1448 events.next().await.unwrap(),
1449 Event::DiskBasedDiagnosticsFinished {
1450 language_server_id: LanguageServerId(1)
1451 }
1452 );
1453 project.update(cx, |project, cx| {
1454 assert_eq!(
1455 project
1456 .language_servers_running_disk_based_diagnostics(cx)
1457 .collect::<Vec<_>>(),
1458 [] as [language::LanguageServerId; 0]
1459 );
1460 });
1461}
1462
1463#[gpui::test]
1464async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1465 init_test(cx);
1466
1467 let fs = FakeFs::new(cx.executor());
1468 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1469
1470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1471
1472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1473 language_registry.add(rust_lang());
1474 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1475
1476 let (buffer, _) = project
1477 .update(cx, |project, cx| {
1478 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1479 })
1480 .await
1481 .unwrap();
1482
1483 // Publish diagnostics
1484 let fake_server = fake_servers.next().await.unwrap();
1485 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1486 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1487 version: None,
1488 diagnostics: vec![lsp::Diagnostic {
1489 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1490 severity: Some(lsp::DiagnosticSeverity::ERROR),
1491 message: "the message".to_string(),
1492 ..Default::default()
1493 }],
1494 });
1495
1496 cx.executor().run_until_parked();
1497 buffer.update(cx, |buffer, _| {
1498 assert_eq!(
1499 buffer
1500 .snapshot()
1501 .diagnostics_in_range::<_, usize>(0..1, false)
1502 .map(|entry| entry.diagnostic.message.clone())
1503 .collect::<Vec<_>>(),
1504 ["the message".to_string()]
1505 );
1506 });
1507 project.update(cx, |project, cx| {
1508 assert_eq!(
1509 project.diagnostic_summary(false, cx),
1510 DiagnosticSummary {
1511 error_count: 1,
1512 warning_count: 0,
1513 }
1514 );
1515 });
1516
1517 project.update(cx, |project, cx| {
1518 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1519 });
1520
1521 // The diagnostics are cleared.
1522 cx.executor().run_until_parked();
1523 buffer.update(cx, |buffer, _| {
1524 assert_eq!(
1525 buffer
1526 .snapshot()
1527 .diagnostics_in_range::<_, usize>(0..1, false)
1528 .map(|entry| entry.diagnostic.message.clone())
1529 .collect::<Vec<_>>(),
1530 Vec::<String>::new(),
1531 );
1532 });
1533 project.update(cx, |project, cx| {
1534 assert_eq!(
1535 project.diagnostic_summary(false, cx),
1536 DiagnosticSummary {
1537 error_count: 0,
1538 warning_count: 0,
1539 }
1540 );
1541 });
1542}
1543
1544#[gpui::test]
1545async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let fs = FakeFs::new(cx.executor());
1549 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1550
1551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1552 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1553
1554 language_registry.add(rust_lang());
1555 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1556
1557 let (buffer, _handle) = project
1558 .update(cx, |project, cx| {
1559 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1560 })
1561 .await
1562 .unwrap();
1563
1564 // Before restarting the server, report diagnostics with an unknown buffer version.
1565 let fake_server = fake_servers.next().await.unwrap();
1566 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1567 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1568 version: Some(10000),
1569 diagnostics: Vec::new(),
1570 });
1571 cx.executor().run_until_parked();
1572
1573 project.update(cx, |project, cx| {
1574 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1575 });
1576 let mut fake_server = fake_servers.next().await.unwrap();
1577 let notification = fake_server
1578 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1579 .await
1580 .text_document;
1581 assert_eq!(notification.version, 0);
1582}
1583
1584#[gpui::test]
1585async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1586 init_test(cx);
1587
1588 let progress_token = "the-progress-token";
1589
1590 let fs = FakeFs::new(cx.executor());
1591 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1592
1593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1594
1595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1596 language_registry.add(rust_lang());
1597 let mut fake_servers = language_registry.register_fake_lsp(
1598 "Rust",
1599 FakeLspAdapter {
1600 name: "the-language-server",
1601 disk_based_diagnostics_sources: vec!["disk".into()],
1602 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1603 ..Default::default()
1604 },
1605 );
1606
1607 let (buffer, _handle) = project
1608 .update(cx, |project, cx| {
1609 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1610 })
1611 .await
1612 .unwrap();
1613
1614 // Simulate diagnostics starting to update.
1615 let mut fake_server = fake_servers.next().await.unwrap();
1616 fake_server
1617 .start_progress_with(
1618 "another-token",
1619 lsp::WorkDoneProgressBegin {
1620 cancellable: Some(false),
1621 ..Default::default()
1622 },
1623 )
1624 .await;
1625 fake_server
1626 .start_progress_with(
1627 progress_token,
1628 lsp::WorkDoneProgressBegin {
1629 cancellable: Some(true),
1630 ..Default::default()
1631 },
1632 )
1633 .await;
1634 cx.executor().run_until_parked();
1635
1636 project.update(cx, |project, cx| {
1637 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1638 });
1639
1640 let cancel_notification = fake_server
1641 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1642 .await;
1643 assert_eq!(
1644 cancel_notification.token,
1645 NumberOrString::String(progress_token.into())
1646 );
1647}
1648
1649#[gpui::test]
1650async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1651 init_test(cx);
1652
1653 let fs = FakeFs::new(cx.executor());
1654 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1655 .await;
1656
1657 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1658 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1659
1660 let mut fake_rust_servers = language_registry.register_fake_lsp(
1661 "Rust",
1662 FakeLspAdapter {
1663 name: "rust-lsp",
1664 ..Default::default()
1665 },
1666 );
1667 let mut fake_js_servers = language_registry.register_fake_lsp(
1668 "JavaScript",
1669 FakeLspAdapter {
1670 name: "js-lsp",
1671 ..Default::default()
1672 },
1673 );
1674 language_registry.add(rust_lang());
1675 language_registry.add(js_lang());
1676
1677 let _rs_buffer = project
1678 .update(cx, |project, cx| {
1679 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1680 })
1681 .await
1682 .unwrap();
1683 let _js_buffer = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1686 })
1687 .await
1688 .unwrap();
1689
1690 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1691 assert_eq!(
1692 fake_rust_server_1
1693 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1694 .await
1695 .text_document
1696 .uri
1697 .as_str(),
1698 "file:///dir/a.rs"
1699 );
1700
1701 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1702 assert_eq!(
1703 fake_js_server
1704 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1705 .await
1706 .text_document
1707 .uri
1708 .as_str(),
1709 "file:///dir/b.js"
1710 );
1711
1712 // Disable Rust language server, ensuring only that server gets stopped.
1713 cx.update(|cx| {
1714 SettingsStore::update_global(cx, |settings, cx| {
1715 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1716 settings.languages.insert(
1717 "Rust".into(),
1718 LanguageSettingsContent {
1719 enable_language_server: Some(false),
1720 ..Default::default()
1721 },
1722 );
1723 });
1724 })
1725 });
1726 fake_rust_server_1
1727 .receive_notification::<lsp::notification::Exit>()
1728 .await;
1729
1730 // Enable Rust and disable JavaScript language servers, ensuring that the
1731 // former gets started again and that the latter stops.
1732 cx.update(|cx| {
1733 SettingsStore::update_global(cx, |settings, cx| {
1734 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1735 settings.languages.insert(
1736 LanguageName::new("Rust"),
1737 LanguageSettingsContent {
1738 enable_language_server: Some(true),
1739 ..Default::default()
1740 },
1741 );
1742 settings.languages.insert(
1743 LanguageName::new("JavaScript"),
1744 LanguageSettingsContent {
1745 enable_language_server: Some(false),
1746 ..Default::default()
1747 },
1748 );
1749 });
1750 })
1751 });
1752 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1753 assert_eq!(
1754 fake_rust_server_2
1755 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1756 .await
1757 .text_document
1758 .uri
1759 .as_str(),
1760 "file:///dir/a.rs"
1761 );
1762 fake_js_server
1763 .receive_notification::<lsp::notification::Exit>()
1764 .await;
1765}
1766
1767#[gpui::test(iterations = 3)]
1768async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1769 init_test(cx);
1770
1771 let text = "
1772 fn a() { A }
1773 fn b() { BB }
1774 fn c() { CCC }
1775 "
1776 .unindent();
1777
1778 let fs = FakeFs::new(cx.executor());
1779 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1780
1781 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1782 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1783 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1784
1785 language_registry.add(rust_lang());
1786 let mut fake_servers = language_registry.register_fake_lsp(
1787 "Rust",
1788 FakeLspAdapter {
1789 disk_based_diagnostics_sources: vec!["disk".into()],
1790 ..Default::default()
1791 },
1792 );
1793
1794 let buffer = project
1795 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1796 .await
1797 .unwrap();
1798
1799 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1800 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1801 });
1802
1803 let mut fake_server = fake_servers.next().await.unwrap();
1804 let open_notification = fake_server
1805 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1806 .await;
1807
1808 // Edit the buffer, moving the content down
1809 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1810 let change_notification_1 = fake_server
1811 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1812 .await;
1813 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1814
1815 // Report some diagnostics for the initial version of the buffer
1816 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1817 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1818 version: Some(open_notification.text_document.version),
1819 diagnostics: vec![
1820 lsp::Diagnostic {
1821 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1822 severity: Some(DiagnosticSeverity::ERROR),
1823 message: "undefined variable 'A'".to_string(),
1824 source: Some("disk".to_string()),
1825 ..Default::default()
1826 },
1827 lsp::Diagnostic {
1828 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1829 severity: Some(DiagnosticSeverity::ERROR),
1830 message: "undefined variable 'BB'".to_string(),
1831 source: Some("disk".to_string()),
1832 ..Default::default()
1833 },
1834 lsp::Diagnostic {
1835 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1836 severity: Some(DiagnosticSeverity::ERROR),
1837 source: Some("disk".to_string()),
1838 message: "undefined variable 'CCC'".to_string(),
1839 ..Default::default()
1840 },
1841 ],
1842 });
1843
1844 // The diagnostics have moved down since they were created.
1845 cx.executor().run_until_parked();
1846 buffer.update(cx, |buffer, _| {
1847 assert_eq!(
1848 buffer
1849 .snapshot()
1850 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1851 .collect::<Vec<_>>(),
1852 &[
1853 DiagnosticEntry {
1854 range: Point::new(3, 9)..Point::new(3, 11),
1855 diagnostic: Diagnostic {
1856 source: Some("disk".into()),
1857 severity: DiagnosticSeverity::ERROR,
1858 message: "undefined variable 'BB'".to_string(),
1859 is_disk_based: true,
1860 group_id: 1,
1861 is_primary: true,
1862 ..Default::default()
1863 },
1864 },
1865 DiagnosticEntry {
1866 range: Point::new(4, 9)..Point::new(4, 12),
1867 diagnostic: Diagnostic {
1868 source: Some("disk".into()),
1869 severity: DiagnosticSeverity::ERROR,
1870 message: "undefined variable 'CCC'".to_string(),
1871 is_disk_based: true,
1872 group_id: 2,
1873 is_primary: true,
1874 ..Default::default()
1875 }
1876 }
1877 ]
1878 );
1879 assert_eq!(
1880 chunks_with_diagnostics(buffer, 0..buffer.len()),
1881 [
1882 ("\n\nfn a() { ".to_string(), None),
1883 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1884 (" }\nfn b() { ".to_string(), None),
1885 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1886 (" }\nfn c() { ".to_string(), None),
1887 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1888 (" }\n".to_string(), None),
1889 ]
1890 );
1891 assert_eq!(
1892 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1893 [
1894 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1895 (" }\nfn c() { ".to_string(), None),
1896 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1897 ]
1898 );
1899 });
1900
1901 // Ensure overlapping diagnostics are highlighted correctly.
1902 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1903 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1904 version: Some(open_notification.text_document.version),
1905 diagnostics: vec![
1906 lsp::Diagnostic {
1907 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1908 severity: Some(DiagnosticSeverity::ERROR),
1909 message: "undefined variable 'A'".to_string(),
1910 source: Some("disk".to_string()),
1911 ..Default::default()
1912 },
1913 lsp::Diagnostic {
1914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1915 severity: Some(DiagnosticSeverity::WARNING),
1916 message: "unreachable statement".to_string(),
1917 source: Some("disk".to_string()),
1918 ..Default::default()
1919 },
1920 ],
1921 });
1922
1923 cx.executor().run_until_parked();
1924 buffer.update(cx, |buffer, _| {
1925 assert_eq!(
1926 buffer
1927 .snapshot()
1928 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1929 .collect::<Vec<_>>(),
1930 &[
1931 DiagnosticEntry {
1932 range: Point::new(2, 9)..Point::new(2, 12),
1933 diagnostic: Diagnostic {
1934 source: Some("disk".into()),
1935 severity: DiagnosticSeverity::WARNING,
1936 message: "unreachable statement".to_string(),
1937 is_disk_based: true,
1938 group_id: 4,
1939 is_primary: true,
1940 ..Default::default()
1941 }
1942 },
1943 DiagnosticEntry {
1944 range: Point::new(2, 9)..Point::new(2, 10),
1945 diagnostic: Diagnostic {
1946 source: Some("disk".into()),
1947 severity: DiagnosticSeverity::ERROR,
1948 message: "undefined variable 'A'".to_string(),
1949 is_disk_based: true,
1950 group_id: 3,
1951 is_primary: true,
1952 ..Default::default()
1953 },
1954 }
1955 ]
1956 );
1957 assert_eq!(
1958 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1959 [
1960 ("fn a() { ".to_string(), None),
1961 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1962 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1963 ("\n".to_string(), None),
1964 ]
1965 );
1966 assert_eq!(
1967 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1968 [
1969 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1970 ("\n".to_string(), None),
1971 ]
1972 );
1973 });
1974
1975 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1976 // changes since the last save.
1977 buffer.update(cx, |buffer, cx| {
1978 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1979 buffer.edit(
1980 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1981 None,
1982 cx,
1983 );
1984 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1985 });
1986 let change_notification_2 = fake_server
1987 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1988 .await;
1989 assert!(
1990 change_notification_2.text_document.version > change_notification_1.text_document.version
1991 );
1992
1993 // Handle out-of-order diagnostics
1994 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1995 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1996 version: Some(change_notification_2.text_document.version),
1997 diagnostics: vec![
1998 lsp::Diagnostic {
1999 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2000 severity: Some(DiagnosticSeverity::ERROR),
2001 message: "undefined variable 'BB'".to_string(),
2002 source: Some("disk".to_string()),
2003 ..Default::default()
2004 },
2005 lsp::Diagnostic {
2006 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2007 severity: Some(DiagnosticSeverity::WARNING),
2008 message: "undefined variable 'A'".to_string(),
2009 source: Some("disk".to_string()),
2010 ..Default::default()
2011 },
2012 ],
2013 });
2014
2015 cx.executor().run_until_parked();
2016 buffer.update(cx, |buffer, _| {
2017 assert_eq!(
2018 buffer
2019 .snapshot()
2020 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2021 .collect::<Vec<_>>(),
2022 &[
2023 DiagnosticEntry {
2024 range: Point::new(2, 21)..Point::new(2, 22),
2025 diagnostic: Diagnostic {
2026 source: Some("disk".into()),
2027 severity: DiagnosticSeverity::WARNING,
2028 message: "undefined variable 'A'".to_string(),
2029 is_disk_based: true,
2030 group_id: 6,
2031 is_primary: true,
2032 ..Default::default()
2033 }
2034 },
2035 DiagnosticEntry {
2036 range: Point::new(3, 9)..Point::new(3, 14),
2037 diagnostic: Diagnostic {
2038 source: Some("disk".into()),
2039 severity: DiagnosticSeverity::ERROR,
2040 message: "undefined variable 'BB'".to_string(),
2041 is_disk_based: true,
2042 group_id: 5,
2043 is_primary: true,
2044 ..Default::default()
2045 },
2046 }
2047 ]
2048 );
2049 });
2050}
2051
2052#[gpui::test]
2053async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2054 init_test(cx);
2055
2056 let text = concat!(
2057 "let one = ;\n", //
2058 "let two = \n",
2059 "let three = 3;\n",
2060 );
2061
2062 let fs = FakeFs::new(cx.executor());
2063 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2064
2065 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2066 let buffer = project
2067 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2068 .await
2069 .unwrap();
2070
2071 project.update(cx, |project, cx| {
2072 project.lsp_store.update(cx, |lsp_store, cx| {
2073 lsp_store
2074 .update_diagnostic_entries(
2075 LanguageServerId(0),
2076 PathBuf::from("/dir/a.rs"),
2077 None,
2078 vec![
2079 DiagnosticEntry {
2080 range: Unclipped(PointUtf16::new(0, 10))
2081 ..Unclipped(PointUtf16::new(0, 10)),
2082 diagnostic: Diagnostic {
2083 severity: DiagnosticSeverity::ERROR,
2084 message: "syntax error 1".to_string(),
2085 ..Default::default()
2086 },
2087 },
2088 DiagnosticEntry {
2089 range: Unclipped(PointUtf16::new(1, 10))
2090 ..Unclipped(PointUtf16::new(1, 10)),
2091 diagnostic: Diagnostic {
2092 severity: DiagnosticSeverity::ERROR,
2093 message: "syntax error 2".to_string(),
2094 ..Default::default()
2095 },
2096 },
2097 ],
2098 cx,
2099 )
2100 .unwrap();
2101 })
2102 });
2103
2104 // An empty range is extended forward to include the following character.
2105 // At the end of a line, an empty range is extended backward to include
2106 // the preceding character.
2107 buffer.update(cx, |buffer, _| {
2108 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2109 assert_eq!(
2110 chunks
2111 .iter()
2112 .map(|(s, d)| (s.as_str(), *d))
2113 .collect::<Vec<_>>(),
2114 &[
2115 ("let one = ", None),
2116 (";", Some(DiagnosticSeverity::ERROR)),
2117 ("\nlet two =", None),
2118 (" ", Some(DiagnosticSeverity::ERROR)),
2119 ("\nlet three = 3;\n", None)
2120 ]
2121 );
2122 });
2123}
2124
2125#[gpui::test]
2126async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2127 init_test(cx);
2128
2129 let fs = FakeFs::new(cx.executor());
2130 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2131 .await;
2132
2133 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2134 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2135
2136 lsp_store.update(cx, |lsp_store, cx| {
2137 lsp_store
2138 .update_diagnostic_entries(
2139 LanguageServerId(0),
2140 Path::new("/dir/a.rs").to_owned(),
2141 None,
2142 vec![DiagnosticEntry {
2143 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2144 diagnostic: Diagnostic {
2145 severity: DiagnosticSeverity::ERROR,
2146 is_primary: true,
2147 message: "syntax error a1".to_string(),
2148 ..Default::default()
2149 },
2150 }],
2151 cx,
2152 )
2153 .unwrap();
2154 lsp_store
2155 .update_diagnostic_entries(
2156 LanguageServerId(1),
2157 Path::new("/dir/a.rs").to_owned(),
2158 None,
2159 vec![DiagnosticEntry {
2160 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2161 diagnostic: Diagnostic {
2162 severity: DiagnosticSeverity::ERROR,
2163 is_primary: true,
2164 message: "syntax error b1".to_string(),
2165 ..Default::default()
2166 },
2167 }],
2168 cx,
2169 )
2170 .unwrap();
2171
2172 assert_eq!(
2173 lsp_store.diagnostic_summary(false, cx),
2174 DiagnosticSummary {
2175 error_count: 2,
2176 warning_count: 0,
2177 }
2178 );
2179 });
2180}
2181
2182#[gpui::test]
2183async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2184 init_test(cx);
2185
2186 let text = "
2187 fn a() {
2188 f1();
2189 }
2190 fn b() {
2191 f2();
2192 }
2193 fn c() {
2194 f3();
2195 }
2196 "
2197 .unindent();
2198
2199 let fs = FakeFs::new(cx.executor());
2200 fs.insert_tree(
2201 "/dir",
2202 json!({
2203 "a.rs": text.clone(),
2204 }),
2205 )
2206 .await;
2207
2208 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2209 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2210
2211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2212 language_registry.add(rust_lang());
2213 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2214
2215 let (buffer, _handle) = project
2216 .update(cx, |project, cx| {
2217 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2218 })
2219 .await
2220 .unwrap();
2221
2222 let mut fake_server = fake_servers.next().await.unwrap();
2223 let lsp_document_version = fake_server
2224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2225 .await
2226 .text_document
2227 .version;
2228
2229 // Simulate editing the buffer after the language server computes some edits.
2230 buffer.update(cx, |buffer, cx| {
2231 buffer.edit(
2232 [(
2233 Point::new(0, 0)..Point::new(0, 0),
2234 "// above first function\n",
2235 )],
2236 None,
2237 cx,
2238 );
2239 buffer.edit(
2240 [(
2241 Point::new(2, 0)..Point::new(2, 0),
2242 " // inside first function\n",
2243 )],
2244 None,
2245 cx,
2246 );
2247 buffer.edit(
2248 [(
2249 Point::new(6, 4)..Point::new(6, 4),
2250 "// inside second function ",
2251 )],
2252 None,
2253 cx,
2254 );
2255
2256 assert_eq!(
2257 buffer.text(),
2258 "
2259 // above first function
2260 fn a() {
2261 // inside first function
2262 f1();
2263 }
2264 fn b() {
2265 // inside second function f2();
2266 }
2267 fn c() {
2268 f3();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274
2275 let edits = lsp_store
2276 .update(cx, |lsp_store, cx| {
2277 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2278 &buffer,
2279 vec![
2280 // replace body of first function
2281 lsp::TextEdit {
2282 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2283 new_text: "
2284 fn a() {
2285 f10();
2286 }
2287 "
2288 .unindent(),
2289 },
2290 // edit inside second function
2291 lsp::TextEdit {
2292 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2293 new_text: "00".into(),
2294 },
2295 // edit inside third function via two distinct edits
2296 lsp::TextEdit {
2297 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2298 new_text: "4000".into(),
2299 },
2300 lsp::TextEdit {
2301 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2302 new_text: "".into(),
2303 },
2304 ],
2305 LanguageServerId(0),
2306 Some(lsp_document_version),
2307 cx,
2308 )
2309 })
2310 .await
2311 .unwrap();
2312
2313 buffer.update(cx, |buffer, cx| {
2314 for (range, new_text) in edits {
2315 buffer.edit([(range, new_text)], None, cx);
2316 }
2317 assert_eq!(
2318 buffer.text(),
2319 "
2320 // above first function
2321 fn a() {
2322 // inside first function
2323 f10();
2324 }
2325 fn b() {
2326 // inside second function f200();
2327 }
2328 fn c() {
2329 f4000();
2330 }
2331 "
2332 .unindent()
2333 );
2334 });
2335}
2336
2337#[gpui::test]
2338async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2339 init_test(cx);
2340
2341 let text = "
2342 use a::b;
2343 use a::c;
2344
2345 fn f() {
2346 b();
2347 c();
2348 }
2349 "
2350 .unindent();
2351
2352 let fs = FakeFs::new(cx.executor());
2353 fs.insert_tree(
2354 "/dir",
2355 json!({
2356 "a.rs": text.clone(),
2357 }),
2358 )
2359 .await;
2360
2361 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2362 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2363 let buffer = project
2364 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2365 .await
2366 .unwrap();
2367
2368 // Simulate the language server sending us a small edit in the form of a very large diff.
2369 // Rust-analyzer does this when performing a merge-imports code action.
2370 let edits = lsp_store
2371 .update(cx, |lsp_store, cx| {
2372 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2373 &buffer,
2374 [
2375 // Replace the first use statement without editing the semicolon.
2376 lsp::TextEdit {
2377 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2378 new_text: "a::{b, c}".into(),
2379 },
2380 // Reinsert the remainder of the file between the semicolon and the final
2381 // newline of the file.
2382 lsp::TextEdit {
2383 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2384 new_text: "\n\n".into(),
2385 },
2386 lsp::TextEdit {
2387 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2388 new_text: "
2389 fn f() {
2390 b();
2391 c();
2392 }"
2393 .unindent(),
2394 },
2395 // Delete everything after the first newline of the file.
2396 lsp::TextEdit {
2397 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2398 new_text: "".into(),
2399 },
2400 ],
2401 LanguageServerId(0),
2402 None,
2403 cx,
2404 )
2405 })
2406 .await
2407 .unwrap();
2408
2409 buffer.update(cx, |buffer, cx| {
2410 let edits = edits
2411 .into_iter()
2412 .map(|(range, text)| {
2413 (
2414 range.start.to_point(buffer)..range.end.to_point(buffer),
2415 text,
2416 )
2417 })
2418 .collect::<Vec<_>>();
2419
2420 assert_eq!(
2421 edits,
2422 [
2423 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2424 (Point::new(1, 0)..Point::new(2, 0), "".into())
2425 ]
2426 );
2427
2428 for (range, new_text) in edits {
2429 buffer.edit([(range, new_text)], None, cx);
2430 }
2431 assert_eq!(
2432 buffer.text(),
2433 "
2434 use a::{b, c};
2435
2436 fn f() {
2437 b();
2438 c();
2439 }
2440 "
2441 .unindent()
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let text = "
2451 use a::b;
2452 use a::c;
2453
2454 fn f() {
2455 b();
2456 c();
2457 }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(
2463 "/dir",
2464 json!({
2465 "a.rs": text.clone(),
2466 }),
2467 )
2468 .await;
2469
2470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2471 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2472 let buffer = project
2473 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2474 .await
2475 .unwrap();
2476
2477 // Simulate the language server sending us edits in a non-ordered fashion,
2478 // with ranges sometimes being inverted or pointing to invalid locations.
2479 let edits = lsp_store
2480 .update(cx, |lsp_store, cx| {
2481 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2482 &buffer,
2483 [
2484 lsp::TextEdit {
2485 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2486 new_text: "\n\n".into(),
2487 },
2488 lsp::TextEdit {
2489 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2490 new_text: "a::{b, c}".into(),
2491 },
2492 lsp::TextEdit {
2493 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2494 new_text: "".into(),
2495 },
2496 lsp::TextEdit {
2497 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2498 new_text: "
2499 fn f() {
2500 b();
2501 c();
2502 }"
2503 .unindent(),
2504 },
2505 ],
2506 LanguageServerId(0),
2507 None,
2508 cx,
2509 )
2510 })
2511 .await
2512 .unwrap();
2513
2514 buffer.update(cx, |buffer, cx| {
2515 let edits = edits
2516 .into_iter()
2517 .map(|(range, text)| {
2518 (
2519 range.start.to_point(buffer)..range.end.to_point(buffer),
2520 text,
2521 )
2522 })
2523 .collect::<Vec<_>>();
2524
2525 assert_eq!(
2526 edits,
2527 [
2528 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2529 (Point::new(1, 0)..Point::new(2, 0), "".into())
2530 ]
2531 );
2532
2533 for (range, new_text) in edits {
2534 buffer.edit([(range, new_text)], None, cx);
2535 }
2536 assert_eq!(
2537 buffer.text(),
2538 "
2539 use a::{b, c};
2540
2541 fn f() {
2542 b();
2543 c();
2544 }
2545 "
2546 .unindent()
2547 );
2548 });
2549}
2550
2551fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2552 buffer: &Buffer,
2553 range: Range<T>,
2554) -> Vec<(String, Option<DiagnosticSeverity>)> {
2555 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2556 for chunk in buffer.snapshot().chunks(range, true) {
2557 if chunks.last().map_or(false, |prev_chunk| {
2558 prev_chunk.1 == chunk.diagnostic_severity
2559 }) {
2560 chunks.last_mut().unwrap().0.push_str(chunk.text);
2561 } else {
2562 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2563 }
2564 }
2565 chunks
2566}
2567
2568#[gpui::test(iterations = 10)]
2569async fn test_definition(cx: &mut gpui::TestAppContext) {
2570 init_test(cx);
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 "/dir",
2575 json!({
2576 "a.rs": "const fn a() { A }",
2577 "b.rs": "const y: i32 = crate::a()",
2578 }),
2579 )
2580 .await;
2581
2582 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2583
2584 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2585 language_registry.add(rust_lang());
2586 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2587
2588 let (buffer, _handle) = project
2589 .update(cx, |project, cx| {
2590 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2591 })
2592 .await
2593 .unwrap();
2594
2595 let fake_server = fake_servers.next().await.unwrap();
2596 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2597 let params = params.text_document_position_params;
2598 assert_eq!(
2599 params.text_document.uri.to_file_path().unwrap(),
2600 Path::new("/dir/b.rs"),
2601 );
2602 assert_eq!(params.position, lsp::Position::new(0, 22));
2603
2604 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2605 lsp::Location::new(
2606 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2607 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2608 ),
2609 )))
2610 });
2611
2612 let mut definitions = project
2613 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2614 .await
2615 .unwrap();
2616
2617 // Assert no new language server started
2618 cx.executor().run_until_parked();
2619 assert!(fake_servers.try_next().is_err());
2620
2621 assert_eq!(definitions.len(), 1);
2622 let definition = definitions.pop().unwrap();
2623 cx.update(|cx| {
2624 let target_buffer = definition.target.buffer.read(cx);
2625 assert_eq!(
2626 target_buffer
2627 .file()
2628 .unwrap()
2629 .as_local()
2630 .unwrap()
2631 .abs_path(cx),
2632 Path::new("/dir/a.rs"),
2633 );
2634 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2635 assert_eq!(
2636 list_worktrees(&project, cx),
2637 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2638 );
2639
2640 drop(definition);
2641 });
2642 cx.update(|cx| {
2643 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2644 });
2645
2646 fn list_worktrees<'a>(
2647 project: &'a Model<Project>,
2648 cx: &'a AppContext,
2649 ) -> Vec<(&'a Path, bool)> {
2650 project
2651 .read(cx)
2652 .worktrees(cx)
2653 .map(|worktree| {
2654 let worktree = worktree.read(cx);
2655 (
2656 worktree.as_local().unwrap().abs_path().as_ref(),
2657 worktree.is_visible(),
2658 )
2659 })
2660 .collect::<Vec<_>>()
2661 }
2662}
2663
2664#[gpui::test]
2665async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2666 init_test(cx);
2667
2668 let fs = FakeFs::new(cx.executor());
2669 fs.insert_tree(
2670 "/dir",
2671 json!({
2672 "a.ts": "",
2673 }),
2674 )
2675 .await;
2676
2677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2678
2679 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2680 language_registry.add(typescript_lang());
2681 let mut fake_language_servers = language_registry.register_fake_lsp(
2682 "TypeScript",
2683 FakeLspAdapter {
2684 capabilities: lsp::ServerCapabilities {
2685 completion_provider: Some(lsp::CompletionOptions {
2686 trigger_characters: Some(vec![":".to_string()]),
2687 ..Default::default()
2688 }),
2689 ..Default::default()
2690 },
2691 ..Default::default()
2692 },
2693 );
2694
2695 let (buffer, _handle) = project
2696 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2697 .await
2698 .unwrap();
2699
2700 let fake_server = fake_language_servers.next().await.unwrap();
2701
2702 let text = "let a = b.fqn";
2703 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2704 let completions = project.update(cx, |project, cx| {
2705 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2706 });
2707
2708 fake_server
2709 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2710 Ok(Some(lsp::CompletionResponse::Array(vec![
2711 lsp::CompletionItem {
2712 label: "fullyQualifiedName?".into(),
2713 insert_text: Some("fullyQualifiedName".into()),
2714 ..Default::default()
2715 },
2716 ])))
2717 })
2718 .next()
2719 .await;
2720 let completions = completions.await.unwrap();
2721 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2722 assert_eq!(completions.len(), 1);
2723 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2724 assert_eq!(
2725 completions[0].old_range.to_offset(&snapshot),
2726 text.len() - 3..text.len()
2727 );
2728
2729 let text = "let a = \"atoms/cmp\"";
2730 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2731 let completions = project.update(cx, |project, cx| {
2732 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2733 });
2734
2735 fake_server
2736 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2737 Ok(Some(lsp::CompletionResponse::Array(vec![
2738 lsp::CompletionItem {
2739 label: "component".into(),
2740 ..Default::default()
2741 },
2742 ])))
2743 })
2744 .next()
2745 .await;
2746 let completions = completions.await.unwrap();
2747 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2748 assert_eq!(completions.len(), 1);
2749 assert_eq!(completions[0].new_text, "component");
2750 assert_eq!(
2751 completions[0].old_range.to_offset(&snapshot),
2752 text.len() - 4..text.len() - 1
2753 );
2754}
2755
2756#[gpui::test]
2757async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2758 init_test(cx);
2759
2760 let fs = FakeFs::new(cx.executor());
2761 fs.insert_tree(
2762 "/dir",
2763 json!({
2764 "a.ts": "",
2765 }),
2766 )
2767 .await;
2768
2769 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2770
2771 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2772 language_registry.add(typescript_lang());
2773 let mut fake_language_servers = language_registry.register_fake_lsp(
2774 "TypeScript",
2775 FakeLspAdapter {
2776 capabilities: lsp::ServerCapabilities {
2777 completion_provider: Some(lsp::CompletionOptions {
2778 trigger_characters: Some(vec![":".to_string()]),
2779 ..Default::default()
2780 }),
2781 ..Default::default()
2782 },
2783 ..Default::default()
2784 },
2785 );
2786
2787 let (buffer, _handle) = project
2788 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2789 .await
2790 .unwrap();
2791
2792 let fake_server = fake_language_servers.next().await.unwrap();
2793
2794 let text = "let a = b.fqn";
2795 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2796 let completions = project.update(cx, |project, cx| {
2797 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2798 });
2799
2800 fake_server
2801 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2802 Ok(Some(lsp::CompletionResponse::Array(vec![
2803 lsp::CompletionItem {
2804 label: "fullyQualifiedName?".into(),
2805 insert_text: Some("fully\rQualified\r\nName".into()),
2806 ..Default::default()
2807 },
2808 ])))
2809 })
2810 .next()
2811 .await;
2812 let completions = completions.await.unwrap();
2813 assert_eq!(completions.len(), 1);
2814 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2815}
2816
2817#[gpui::test(iterations = 10)]
2818async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2819 init_test(cx);
2820
2821 let fs = FakeFs::new(cx.executor());
2822 fs.insert_tree(
2823 "/dir",
2824 json!({
2825 "a.ts": "a",
2826 }),
2827 )
2828 .await;
2829
2830 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2831
2832 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2833 language_registry.add(typescript_lang());
2834 let mut fake_language_servers = language_registry.register_fake_lsp(
2835 "TypeScript",
2836 FakeLspAdapter {
2837 capabilities: lsp::ServerCapabilities {
2838 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2839 lsp::CodeActionOptions {
2840 resolve_provider: Some(true),
2841 ..lsp::CodeActionOptions::default()
2842 },
2843 )),
2844 ..lsp::ServerCapabilities::default()
2845 },
2846 ..FakeLspAdapter::default()
2847 },
2848 );
2849
2850 let (buffer, _handle) = project
2851 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2852 .await
2853 .unwrap();
2854
2855 let fake_server = fake_language_servers.next().await.unwrap();
2856
2857 // Language server returns code actions that contain commands, and not edits.
2858 let actions = project.update(cx, |project, cx| {
2859 project.code_actions(&buffer, 0..0, None, cx)
2860 });
2861 fake_server
2862 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2863 Ok(Some(vec![
2864 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2865 title: "The code action".into(),
2866 data: Some(serde_json::json!({
2867 "command": "_the/command",
2868 })),
2869 ..lsp::CodeAction::default()
2870 }),
2871 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2872 title: "two".into(),
2873 ..lsp::CodeAction::default()
2874 }),
2875 ]))
2876 })
2877 .next()
2878 .await;
2879
2880 let action = actions.await.unwrap()[0].clone();
2881 let apply = project.update(cx, |project, cx| {
2882 project.apply_code_action(buffer.clone(), action, true, cx)
2883 });
2884
2885 // Resolving the code action does not populate its edits. In absence of
2886 // edits, we must execute the given command.
2887 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2888 |mut action, _| async move {
2889 if action.data.is_some() {
2890 action.command = Some(lsp::Command {
2891 title: "The command".into(),
2892 command: "_the/command".into(),
2893 arguments: Some(vec![json!("the-argument")]),
2894 });
2895 }
2896 Ok(action)
2897 },
2898 );
2899
2900 // While executing the command, the language server sends the editor
2901 // a `workspaceEdit` request.
2902 fake_server
2903 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2904 let fake = fake_server.clone();
2905 move |params, _| {
2906 assert_eq!(params.command, "_the/command");
2907 let fake = fake.clone();
2908 async move {
2909 fake.server
2910 .request::<lsp::request::ApplyWorkspaceEdit>(
2911 lsp::ApplyWorkspaceEditParams {
2912 label: None,
2913 edit: lsp::WorkspaceEdit {
2914 changes: Some(
2915 [(
2916 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2917 vec![lsp::TextEdit {
2918 range: lsp::Range::new(
2919 lsp::Position::new(0, 0),
2920 lsp::Position::new(0, 0),
2921 ),
2922 new_text: "X".into(),
2923 }],
2924 )]
2925 .into_iter()
2926 .collect(),
2927 ),
2928 ..Default::default()
2929 },
2930 },
2931 )
2932 .await
2933 .unwrap();
2934 Ok(Some(json!(null)))
2935 }
2936 }
2937 })
2938 .next()
2939 .await;
2940
2941 // Applying the code action returns a project transaction containing the edits
2942 // sent by the language server in its `workspaceEdit` request.
2943 let transaction = apply.await.unwrap();
2944 assert!(transaction.0.contains_key(&buffer));
2945 buffer.update(cx, |buffer, cx| {
2946 assert_eq!(buffer.text(), "Xa");
2947 buffer.undo(cx);
2948 assert_eq!(buffer.text(), "a");
2949 });
2950}
2951
2952#[gpui::test(iterations = 10)]
2953async fn test_save_file(cx: &mut gpui::TestAppContext) {
2954 init_test(cx);
2955
2956 let fs = FakeFs::new(cx.executor());
2957 fs.insert_tree(
2958 "/dir",
2959 json!({
2960 "file1": "the old contents",
2961 }),
2962 )
2963 .await;
2964
2965 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2966 let buffer = project
2967 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2968 .await
2969 .unwrap();
2970 buffer.update(cx, |buffer, cx| {
2971 assert_eq!(buffer.text(), "the old contents");
2972 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2973 });
2974
2975 project
2976 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2977 .await
2978 .unwrap();
2979
2980 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2981 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2982}
2983
2984#[gpui::test(iterations = 30)]
2985async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2986 init_test(cx);
2987
2988 let fs = FakeFs::new(cx.executor().clone());
2989 fs.insert_tree(
2990 "/dir",
2991 json!({
2992 "file1": "the original contents",
2993 }),
2994 )
2995 .await;
2996
2997 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2998 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2999 let buffer = project
3000 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3001 .await
3002 .unwrap();
3003
3004 // Simulate buffer diffs being slow, so that they don't complete before
3005 // the next file change occurs.
3006 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3007
3008 // Change the buffer's file on disk, and then wait for the file change
3009 // to be detected by the worktree, so that the buffer starts reloading.
3010 fs.save(
3011 "/dir/file1".as_ref(),
3012 &"the first contents".into(),
3013 Default::default(),
3014 )
3015 .await
3016 .unwrap();
3017 worktree.next_event(cx).await;
3018
3019 // Change the buffer's file again. Depending on the random seed, the
3020 // previous file change may still be in progress.
3021 fs.save(
3022 "/dir/file1".as_ref(),
3023 &"the second contents".into(),
3024 Default::default(),
3025 )
3026 .await
3027 .unwrap();
3028 worktree.next_event(cx).await;
3029
3030 cx.executor().run_until_parked();
3031 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3032 buffer.read_with(cx, |buffer, _| {
3033 assert_eq!(buffer.text(), on_disk_text);
3034 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3035 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3036 });
3037}
3038
3039#[gpui::test(iterations = 30)]
3040async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3041 init_test(cx);
3042
3043 let fs = FakeFs::new(cx.executor().clone());
3044 fs.insert_tree(
3045 "/dir",
3046 json!({
3047 "file1": "the original contents",
3048 }),
3049 )
3050 .await;
3051
3052 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3053 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3054 let buffer = project
3055 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3056 .await
3057 .unwrap();
3058
3059 // Simulate buffer diffs being slow, so that they don't complete before
3060 // the next file change occurs.
3061 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3062
3063 // Change the buffer's file on disk, and then wait for the file change
3064 // to be detected by the worktree, so that the buffer starts reloading.
3065 fs.save(
3066 "/dir/file1".as_ref(),
3067 &"the first contents".into(),
3068 Default::default(),
3069 )
3070 .await
3071 .unwrap();
3072 worktree.next_event(cx).await;
3073
3074 cx.executor()
3075 .spawn(cx.executor().simulate_random_delay())
3076 .await;
3077
3078 // Perform a noop edit, causing the buffer's version to increase.
3079 buffer.update(cx, |buffer, cx| {
3080 buffer.edit([(0..0, " ")], None, cx);
3081 buffer.undo(cx);
3082 });
3083
3084 cx.executor().run_until_parked();
3085 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3086 buffer.read_with(cx, |buffer, _| {
3087 let buffer_text = buffer.text();
3088 if buffer_text == on_disk_text {
3089 assert!(
3090 !buffer.is_dirty() && !buffer.has_conflict(),
3091 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3092 );
3093 }
3094 // If the file change occurred while the buffer was processing the first
3095 // change, the buffer will be in a conflicting state.
3096 else {
3097 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3098 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3099 }
3100 });
3101}
3102
3103#[gpui::test]
3104async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3105 init_test(cx);
3106
3107 let fs = FakeFs::new(cx.executor());
3108 fs.insert_tree(
3109 "/dir",
3110 json!({
3111 "file1": "the old contents",
3112 }),
3113 )
3114 .await;
3115
3116 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3117 let buffer = project
3118 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3119 .await
3120 .unwrap();
3121 buffer.update(cx, |buffer, cx| {
3122 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3123 });
3124
3125 project
3126 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3127 .await
3128 .unwrap();
3129
3130 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3131 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3132}
3133
3134#[gpui::test]
3135async fn test_save_as(cx: &mut gpui::TestAppContext) {
3136 init_test(cx);
3137
3138 let fs = FakeFs::new(cx.executor());
3139 fs.insert_tree("/dir", json!({})).await;
3140
3141 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3142
3143 let languages = project.update(cx, |project, _| project.languages().clone());
3144 languages.add(rust_lang());
3145
3146 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3147 buffer.update(cx, |buffer, cx| {
3148 buffer.edit([(0..0, "abc")], None, cx);
3149 assert!(buffer.is_dirty());
3150 assert!(!buffer.has_conflict());
3151 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3152 });
3153 project
3154 .update(cx, |project, cx| {
3155 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3156 let path = ProjectPath {
3157 worktree_id,
3158 path: Arc::from(Path::new("file1.rs")),
3159 };
3160 project.save_buffer_as(buffer.clone(), path, cx)
3161 })
3162 .await
3163 .unwrap();
3164 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3165
3166 cx.executor().run_until_parked();
3167 buffer.update(cx, |buffer, cx| {
3168 assert_eq!(
3169 buffer.file().unwrap().full_path(cx),
3170 Path::new("dir/file1.rs")
3171 );
3172 assert!(!buffer.is_dirty());
3173 assert!(!buffer.has_conflict());
3174 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3175 });
3176
3177 let opened_buffer = project
3178 .update(cx, |project, cx| {
3179 project.open_local_buffer("/dir/file1.rs", cx)
3180 })
3181 .await
3182 .unwrap();
3183 assert_eq!(opened_buffer, buffer);
3184}
3185
3186#[gpui::test(retries = 5)]
3187async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3188 use worktree::WorktreeModelHandle as _;
3189
3190 init_test(cx);
3191 cx.executor().allow_parking();
3192
3193 let dir = temp_tree(json!({
3194 "a": {
3195 "file1": "",
3196 "file2": "",
3197 "file3": "",
3198 },
3199 "b": {
3200 "c": {
3201 "file4": "",
3202 "file5": "",
3203 }
3204 }
3205 }));
3206
3207 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3208
3209 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3210 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3211 async move { buffer.await.unwrap() }
3212 };
3213 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3214 project.update(cx, |project, cx| {
3215 let tree = project.worktrees(cx).next().unwrap();
3216 tree.read(cx)
3217 .entry_for_path(path)
3218 .unwrap_or_else(|| panic!("no entry for path {}", path))
3219 .id
3220 })
3221 };
3222
3223 let buffer2 = buffer_for_path("a/file2", cx).await;
3224 let buffer3 = buffer_for_path("a/file3", cx).await;
3225 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3226 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3227
3228 let file2_id = id_for_path("a/file2", cx);
3229 let file3_id = id_for_path("a/file3", cx);
3230 let file4_id = id_for_path("b/c/file4", cx);
3231
3232 // Create a remote copy of this worktree.
3233 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3234 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3235
3236 let updates = Arc::new(Mutex::new(Vec::new()));
3237 tree.update(cx, |tree, cx| {
3238 let updates = updates.clone();
3239 tree.observe_updates(0, cx, move |update| {
3240 updates.lock().push(update);
3241 async { true }
3242 });
3243 });
3244
3245 let remote =
3246 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3247
3248 cx.executor().run_until_parked();
3249
3250 cx.update(|cx| {
3251 assert!(!buffer2.read(cx).is_dirty());
3252 assert!(!buffer3.read(cx).is_dirty());
3253 assert!(!buffer4.read(cx).is_dirty());
3254 assert!(!buffer5.read(cx).is_dirty());
3255 });
3256
3257 // Rename and delete files and directories.
3258 tree.flush_fs_events(cx).await;
3259 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3260 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3261 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3262 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3263 tree.flush_fs_events(cx).await;
3264
3265 let expected_paths = vec![
3266 "a",
3267 "a/file1",
3268 "a/file2.new",
3269 "b",
3270 "d",
3271 "d/file3",
3272 "d/file4",
3273 ]
3274 .into_iter()
3275 .map(replace_path_separator)
3276 .collect::<Vec<_>>();
3277
3278 cx.update(|app| {
3279 assert_eq!(
3280 tree.read(app)
3281 .paths()
3282 .map(|p| p.to_str().unwrap())
3283 .collect::<Vec<_>>(),
3284 expected_paths
3285 );
3286 });
3287
3288 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3289 assert_eq!(id_for_path("d/file3", cx), file3_id);
3290 assert_eq!(id_for_path("d/file4", cx), file4_id);
3291
3292 cx.update(|cx| {
3293 assert_eq!(
3294 buffer2.read(cx).file().unwrap().path().as_ref(),
3295 Path::new("a/file2.new")
3296 );
3297 assert_eq!(
3298 buffer3.read(cx).file().unwrap().path().as_ref(),
3299 Path::new("d/file3")
3300 );
3301 assert_eq!(
3302 buffer4.read(cx).file().unwrap().path().as_ref(),
3303 Path::new("d/file4")
3304 );
3305 assert_eq!(
3306 buffer5.read(cx).file().unwrap().path().as_ref(),
3307 Path::new("b/c/file5")
3308 );
3309
3310 assert_matches!(
3311 buffer2.read(cx).file().unwrap().disk_state(),
3312 DiskState::Present { .. }
3313 );
3314 assert_matches!(
3315 buffer3.read(cx).file().unwrap().disk_state(),
3316 DiskState::Present { .. }
3317 );
3318 assert_matches!(
3319 buffer4.read(cx).file().unwrap().disk_state(),
3320 DiskState::Present { .. }
3321 );
3322 assert_eq!(
3323 buffer5.read(cx).file().unwrap().disk_state(),
3324 DiskState::Deleted
3325 );
3326 });
3327
3328 // Update the remote worktree. Check that it becomes consistent with the
3329 // local worktree.
3330 cx.executor().run_until_parked();
3331
3332 remote.update(cx, |remote, _| {
3333 for update in updates.lock().drain(..) {
3334 remote.as_remote_mut().unwrap().update_from_remote(update);
3335 }
3336 });
3337 cx.executor().run_until_parked();
3338 remote.update(cx, |remote, _| {
3339 assert_eq!(
3340 remote
3341 .paths()
3342 .map(|p| p.to_str().unwrap())
3343 .collect::<Vec<_>>(),
3344 expected_paths
3345 );
3346 });
3347}
3348
3349#[gpui::test(iterations = 10)]
3350async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3351 init_test(cx);
3352
3353 let fs = FakeFs::new(cx.executor());
3354 fs.insert_tree(
3355 "/dir",
3356 json!({
3357 "a": {
3358 "file1": "",
3359 }
3360 }),
3361 )
3362 .await;
3363
3364 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3365 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3366 let tree_id = tree.update(cx, |tree, _| tree.id());
3367
3368 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3369 project.update(cx, |project, cx| {
3370 let tree = project.worktrees(cx).next().unwrap();
3371 tree.read(cx)
3372 .entry_for_path(path)
3373 .unwrap_or_else(|| panic!("no entry for path {}", path))
3374 .id
3375 })
3376 };
3377
3378 let dir_id = id_for_path("a", cx);
3379 let file_id = id_for_path("a/file1", cx);
3380 let buffer = project
3381 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3382 .await
3383 .unwrap();
3384 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3385
3386 project
3387 .update(cx, |project, cx| {
3388 project.rename_entry(dir_id, Path::new("b"), cx)
3389 })
3390 .unwrap()
3391 .await
3392 .to_included()
3393 .unwrap();
3394 cx.executor().run_until_parked();
3395
3396 assert_eq!(id_for_path("b", cx), dir_id);
3397 assert_eq!(id_for_path("b/file1", cx), file_id);
3398 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3399}
3400
3401#[gpui::test]
3402async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3403 init_test(cx);
3404
3405 let fs = FakeFs::new(cx.executor());
3406 fs.insert_tree(
3407 "/dir",
3408 json!({
3409 "a.txt": "a-contents",
3410 "b.txt": "b-contents",
3411 }),
3412 )
3413 .await;
3414
3415 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3416
3417 // Spawn multiple tasks to open paths, repeating some paths.
3418 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3419 (
3420 p.open_local_buffer("/dir/a.txt", cx),
3421 p.open_local_buffer("/dir/b.txt", cx),
3422 p.open_local_buffer("/dir/a.txt", cx),
3423 )
3424 });
3425
3426 let buffer_a_1 = buffer_a_1.await.unwrap();
3427 let buffer_a_2 = buffer_a_2.await.unwrap();
3428 let buffer_b = buffer_b.await.unwrap();
3429 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3430 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3431
3432 // There is only one buffer per path.
3433 let buffer_a_id = buffer_a_1.entity_id();
3434 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3435
3436 // Open the same path again while it is still open.
3437 drop(buffer_a_1);
3438 let buffer_a_3 = project
3439 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3440 .await
3441 .unwrap();
3442
3443 // There's still only one buffer per path.
3444 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3445}
3446
3447#[gpui::test]
3448async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3449 init_test(cx);
3450
3451 let fs = FakeFs::new(cx.executor());
3452 fs.insert_tree(
3453 "/dir",
3454 json!({
3455 "file1": "abc",
3456 "file2": "def",
3457 "file3": "ghi",
3458 }),
3459 )
3460 .await;
3461
3462 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3463
3464 let buffer1 = project
3465 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3466 .await
3467 .unwrap();
3468 let events = Arc::new(Mutex::new(Vec::new()));
3469
3470 // initially, the buffer isn't dirty.
3471 buffer1.update(cx, |buffer, cx| {
3472 cx.subscribe(&buffer1, {
3473 let events = events.clone();
3474 move |_, _, event, _| match event {
3475 BufferEvent::Operation { .. } => {}
3476 _ => events.lock().push(event.clone()),
3477 }
3478 })
3479 .detach();
3480
3481 assert!(!buffer.is_dirty());
3482 assert!(events.lock().is_empty());
3483
3484 buffer.edit([(1..2, "")], None, cx);
3485 });
3486
3487 // after the first edit, the buffer is dirty, and emits a dirtied event.
3488 buffer1.update(cx, |buffer, cx| {
3489 assert!(buffer.text() == "ac");
3490 assert!(buffer.is_dirty());
3491 assert_eq!(
3492 *events.lock(),
3493 &[
3494 language::BufferEvent::Edited,
3495 language::BufferEvent::DirtyChanged
3496 ]
3497 );
3498 events.lock().clear();
3499 buffer.did_save(
3500 buffer.version(),
3501 buffer.file().unwrap().disk_state().mtime(),
3502 cx,
3503 );
3504 });
3505
3506 // after saving, the buffer is not dirty, and emits a saved event.
3507 buffer1.update(cx, |buffer, cx| {
3508 assert!(!buffer.is_dirty());
3509 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3510 events.lock().clear();
3511
3512 buffer.edit([(1..1, "B")], None, cx);
3513 buffer.edit([(2..2, "D")], None, cx);
3514 });
3515
3516 // after editing again, the buffer is dirty, and emits another dirty event.
3517 buffer1.update(cx, |buffer, cx| {
3518 assert!(buffer.text() == "aBDc");
3519 assert!(buffer.is_dirty());
3520 assert_eq!(
3521 *events.lock(),
3522 &[
3523 language::BufferEvent::Edited,
3524 language::BufferEvent::DirtyChanged,
3525 language::BufferEvent::Edited,
3526 ],
3527 );
3528 events.lock().clear();
3529
3530 // After restoring the buffer to its previously-saved state,
3531 // the buffer is not considered dirty anymore.
3532 buffer.edit([(1..3, "")], None, cx);
3533 assert!(buffer.text() == "ac");
3534 assert!(!buffer.is_dirty());
3535 });
3536
3537 assert_eq!(
3538 *events.lock(),
3539 &[
3540 language::BufferEvent::Edited,
3541 language::BufferEvent::DirtyChanged
3542 ]
3543 );
3544
3545 // When a file is deleted, the buffer is considered dirty.
3546 let events = Arc::new(Mutex::new(Vec::new()));
3547 let buffer2 = project
3548 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3549 .await
3550 .unwrap();
3551 buffer2.update(cx, |_, cx| {
3552 cx.subscribe(&buffer2, {
3553 let events = events.clone();
3554 move |_, _, event, _| events.lock().push(event.clone())
3555 })
3556 .detach();
3557 });
3558
3559 fs.remove_file("/dir/file2".as_ref(), Default::default())
3560 .await
3561 .unwrap();
3562 cx.executor().run_until_parked();
3563 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3564 assert_eq!(
3565 *events.lock(),
3566 &[
3567 language::BufferEvent::DirtyChanged,
3568 language::BufferEvent::FileHandleChanged
3569 ]
3570 );
3571
3572 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3573 let events = Arc::new(Mutex::new(Vec::new()));
3574 let buffer3 = project
3575 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3576 .await
3577 .unwrap();
3578 buffer3.update(cx, |_, cx| {
3579 cx.subscribe(&buffer3, {
3580 let events = events.clone();
3581 move |_, _, event, _| events.lock().push(event.clone())
3582 })
3583 .detach();
3584 });
3585
3586 buffer3.update(cx, |buffer, cx| {
3587 buffer.edit([(0..0, "x")], None, cx);
3588 });
3589 events.lock().clear();
3590 fs.remove_file("/dir/file3".as_ref(), Default::default())
3591 .await
3592 .unwrap();
3593 cx.executor().run_until_parked();
3594 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3595 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3596}
3597
3598#[gpui::test]
3599async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3600 init_test(cx);
3601
3602 let initial_contents = "aaa\nbbbbb\nc\n";
3603 let fs = FakeFs::new(cx.executor());
3604 fs.insert_tree(
3605 "/dir",
3606 json!({
3607 "the-file": initial_contents,
3608 }),
3609 )
3610 .await;
3611 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3612 let buffer = project
3613 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3614 .await
3615 .unwrap();
3616
3617 let anchors = (0..3)
3618 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3619 .collect::<Vec<_>>();
3620
3621 // Change the file on disk, adding two new lines of text, and removing
3622 // one line.
3623 buffer.update(cx, |buffer, _| {
3624 assert!(!buffer.is_dirty());
3625 assert!(!buffer.has_conflict());
3626 });
3627 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3628 fs.save(
3629 "/dir/the-file".as_ref(),
3630 &new_contents.into(),
3631 LineEnding::Unix,
3632 )
3633 .await
3634 .unwrap();
3635
3636 // Because the buffer was not modified, it is reloaded from disk. Its
3637 // contents are edited according to the diff between the old and new
3638 // file contents.
3639 cx.executor().run_until_parked();
3640 buffer.update(cx, |buffer, _| {
3641 assert_eq!(buffer.text(), new_contents);
3642 assert!(!buffer.is_dirty());
3643 assert!(!buffer.has_conflict());
3644
3645 let anchor_positions = anchors
3646 .iter()
3647 .map(|anchor| anchor.to_point(&*buffer))
3648 .collect::<Vec<_>>();
3649 assert_eq!(
3650 anchor_positions,
3651 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3652 );
3653 });
3654
3655 // Modify the buffer
3656 buffer.update(cx, |buffer, cx| {
3657 buffer.edit([(0..0, " ")], None, cx);
3658 assert!(buffer.is_dirty());
3659 assert!(!buffer.has_conflict());
3660 });
3661
3662 // Change the file on disk again, adding blank lines to the beginning.
3663 fs.save(
3664 "/dir/the-file".as_ref(),
3665 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3666 LineEnding::Unix,
3667 )
3668 .await
3669 .unwrap();
3670
3671 // Because the buffer is modified, it doesn't reload from disk, but is
3672 // marked as having a conflict.
3673 cx.executor().run_until_parked();
3674 buffer.update(cx, |buffer, _| {
3675 assert!(buffer.has_conflict());
3676 });
3677}
3678
3679#[gpui::test]
3680async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3681 init_test(cx);
3682
3683 let fs = FakeFs::new(cx.executor());
3684 fs.insert_tree(
3685 "/dir",
3686 json!({
3687 "file1": "a\nb\nc\n",
3688 "file2": "one\r\ntwo\r\nthree\r\n",
3689 }),
3690 )
3691 .await;
3692
3693 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3694 let buffer1 = project
3695 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3696 .await
3697 .unwrap();
3698 let buffer2 = project
3699 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3700 .await
3701 .unwrap();
3702
3703 buffer1.update(cx, |buffer, _| {
3704 assert_eq!(buffer.text(), "a\nb\nc\n");
3705 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3706 });
3707 buffer2.update(cx, |buffer, _| {
3708 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3709 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3710 });
3711
3712 // Change a file's line endings on disk from unix to windows. The buffer's
3713 // state updates correctly.
3714 fs.save(
3715 "/dir/file1".as_ref(),
3716 &"aaa\nb\nc\n".into(),
3717 LineEnding::Windows,
3718 )
3719 .await
3720 .unwrap();
3721 cx.executor().run_until_parked();
3722 buffer1.update(cx, |buffer, _| {
3723 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3724 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3725 });
3726
3727 // Save a file with windows line endings. The file is written correctly.
3728 buffer2.update(cx, |buffer, cx| {
3729 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3730 });
3731 project
3732 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3733 .await
3734 .unwrap();
3735 assert_eq!(
3736 fs.load("/dir/file2".as_ref()).await.unwrap(),
3737 "one\r\ntwo\r\nthree\r\nfour\r\n",
3738 );
3739}
3740
3741#[gpui::test]
3742async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3743 init_test(cx);
3744
3745 let fs = FakeFs::new(cx.executor());
3746 fs.insert_tree(
3747 "/the-dir",
3748 json!({
3749 "a.rs": "
3750 fn foo(mut v: Vec<usize>) {
3751 for x in &v {
3752 v.push(1);
3753 }
3754 }
3755 "
3756 .unindent(),
3757 }),
3758 )
3759 .await;
3760
3761 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3762 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3763 let buffer = project
3764 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3765 .await
3766 .unwrap();
3767
3768 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3769 let message = lsp::PublishDiagnosticsParams {
3770 uri: buffer_uri.clone(),
3771 diagnostics: vec![
3772 lsp::Diagnostic {
3773 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3774 severity: Some(DiagnosticSeverity::WARNING),
3775 message: "error 1".to_string(),
3776 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3777 location: lsp::Location {
3778 uri: buffer_uri.clone(),
3779 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3780 },
3781 message: "error 1 hint 1".to_string(),
3782 }]),
3783 ..Default::default()
3784 },
3785 lsp::Diagnostic {
3786 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3787 severity: Some(DiagnosticSeverity::HINT),
3788 message: "error 1 hint 1".to_string(),
3789 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3790 location: lsp::Location {
3791 uri: buffer_uri.clone(),
3792 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3793 },
3794 message: "original diagnostic".to_string(),
3795 }]),
3796 ..Default::default()
3797 },
3798 lsp::Diagnostic {
3799 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3800 severity: Some(DiagnosticSeverity::ERROR),
3801 message: "error 2".to_string(),
3802 related_information: Some(vec![
3803 lsp::DiagnosticRelatedInformation {
3804 location: lsp::Location {
3805 uri: buffer_uri.clone(),
3806 range: lsp::Range::new(
3807 lsp::Position::new(1, 13),
3808 lsp::Position::new(1, 15),
3809 ),
3810 },
3811 message: "error 2 hint 1".to_string(),
3812 },
3813 lsp::DiagnosticRelatedInformation {
3814 location: lsp::Location {
3815 uri: buffer_uri.clone(),
3816 range: lsp::Range::new(
3817 lsp::Position::new(1, 13),
3818 lsp::Position::new(1, 15),
3819 ),
3820 },
3821 message: "error 2 hint 2".to_string(),
3822 },
3823 ]),
3824 ..Default::default()
3825 },
3826 lsp::Diagnostic {
3827 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3828 severity: Some(DiagnosticSeverity::HINT),
3829 message: "error 2 hint 1".to_string(),
3830 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3831 location: lsp::Location {
3832 uri: buffer_uri.clone(),
3833 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3834 },
3835 message: "original diagnostic".to_string(),
3836 }]),
3837 ..Default::default()
3838 },
3839 lsp::Diagnostic {
3840 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3841 severity: Some(DiagnosticSeverity::HINT),
3842 message: "error 2 hint 2".to_string(),
3843 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3844 location: lsp::Location {
3845 uri: buffer_uri,
3846 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3847 },
3848 message: "original diagnostic".to_string(),
3849 }]),
3850 ..Default::default()
3851 },
3852 ],
3853 version: None,
3854 };
3855
3856 lsp_store
3857 .update(cx, |lsp_store, cx| {
3858 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3859 })
3860 .unwrap();
3861 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3862
3863 assert_eq!(
3864 buffer
3865 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3866 .collect::<Vec<_>>(),
3867 &[
3868 DiagnosticEntry {
3869 range: Point::new(1, 8)..Point::new(1, 9),
3870 diagnostic: Diagnostic {
3871 severity: DiagnosticSeverity::WARNING,
3872 message: "error 1".to_string(),
3873 group_id: 1,
3874 is_primary: true,
3875 ..Default::default()
3876 }
3877 },
3878 DiagnosticEntry {
3879 range: Point::new(1, 8)..Point::new(1, 9),
3880 diagnostic: Diagnostic {
3881 severity: DiagnosticSeverity::HINT,
3882 message: "error 1 hint 1".to_string(),
3883 group_id: 1,
3884 is_primary: false,
3885 ..Default::default()
3886 }
3887 },
3888 DiagnosticEntry {
3889 range: Point::new(1, 13)..Point::new(1, 15),
3890 diagnostic: Diagnostic {
3891 severity: DiagnosticSeverity::HINT,
3892 message: "error 2 hint 1".to_string(),
3893 group_id: 0,
3894 is_primary: false,
3895 ..Default::default()
3896 }
3897 },
3898 DiagnosticEntry {
3899 range: Point::new(1, 13)..Point::new(1, 15),
3900 diagnostic: Diagnostic {
3901 severity: DiagnosticSeverity::HINT,
3902 message: "error 2 hint 2".to_string(),
3903 group_id: 0,
3904 is_primary: false,
3905 ..Default::default()
3906 }
3907 },
3908 DiagnosticEntry {
3909 range: Point::new(2, 8)..Point::new(2, 17),
3910 diagnostic: Diagnostic {
3911 severity: DiagnosticSeverity::ERROR,
3912 message: "error 2".to_string(),
3913 group_id: 0,
3914 is_primary: true,
3915 ..Default::default()
3916 }
3917 }
3918 ]
3919 );
3920
3921 assert_eq!(
3922 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3923 &[
3924 DiagnosticEntry {
3925 range: Point::new(1, 13)..Point::new(1, 15),
3926 diagnostic: Diagnostic {
3927 severity: DiagnosticSeverity::HINT,
3928 message: "error 2 hint 1".to_string(),
3929 group_id: 0,
3930 is_primary: false,
3931 ..Default::default()
3932 }
3933 },
3934 DiagnosticEntry {
3935 range: Point::new(1, 13)..Point::new(1, 15),
3936 diagnostic: Diagnostic {
3937 severity: DiagnosticSeverity::HINT,
3938 message: "error 2 hint 2".to_string(),
3939 group_id: 0,
3940 is_primary: false,
3941 ..Default::default()
3942 }
3943 },
3944 DiagnosticEntry {
3945 range: Point::new(2, 8)..Point::new(2, 17),
3946 diagnostic: Diagnostic {
3947 severity: DiagnosticSeverity::ERROR,
3948 message: "error 2".to_string(),
3949 group_id: 0,
3950 is_primary: true,
3951 ..Default::default()
3952 }
3953 }
3954 ]
3955 );
3956
3957 assert_eq!(
3958 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3959 &[
3960 DiagnosticEntry {
3961 range: Point::new(1, 8)..Point::new(1, 9),
3962 diagnostic: Diagnostic {
3963 severity: DiagnosticSeverity::WARNING,
3964 message: "error 1".to_string(),
3965 group_id: 1,
3966 is_primary: true,
3967 ..Default::default()
3968 }
3969 },
3970 DiagnosticEntry {
3971 range: Point::new(1, 8)..Point::new(1, 9),
3972 diagnostic: Diagnostic {
3973 severity: DiagnosticSeverity::HINT,
3974 message: "error 1 hint 1".to_string(),
3975 group_id: 1,
3976 is_primary: false,
3977 ..Default::default()
3978 }
3979 },
3980 ]
3981 );
3982}
3983
3984#[gpui::test]
3985async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3986 init_test(cx);
3987
3988 let fs = FakeFs::new(cx.executor());
3989 fs.insert_tree(
3990 "/dir",
3991 json!({
3992 "one.rs": "const ONE: usize = 1;",
3993 "two": {
3994 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3995 }
3996
3997 }),
3998 )
3999 .await;
4000 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4001
4002 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4003 language_registry.add(rust_lang());
4004 let watched_paths = lsp::FileOperationRegistrationOptions {
4005 filters: vec![
4006 FileOperationFilter {
4007 scheme: Some("file".to_owned()),
4008 pattern: lsp::FileOperationPattern {
4009 glob: "**/*.rs".to_owned(),
4010 matches: Some(lsp::FileOperationPatternKind::File),
4011 options: None,
4012 },
4013 },
4014 FileOperationFilter {
4015 scheme: Some("file".to_owned()),
4016 pattern: lsp::FileOperationPattern {
4017 glob: "**/**".to_owned(),
4018 matches: Some(lsp::FileOperationPatternKind::Folder),
4019 options: None,
4020 },
4021 },
4022 ],
4023 };
4024 let mut fake_servers = language_registry.register_fake_lsp(
4025 "Rust",
4026 FakeLspAdapter {
4027 capabilities: lsp::ServerCapabilities {
4028 workspace: Some(lsp::WorkspaceServerCapabilities {
4029 workspace_folders: None,
4030 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4031 did_rename: Some(watched_paths.clone()),
4032 will_rename: Some(watched_paths),
4033 ..Default::default()
4034 }),
4035 }),
4036 ..Default::default()
4037 },
4038 ..Default::default()
4039 },
4040 );
4041
4042 let _ = project
4043 .update(cx, |project, cx| {
4044 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4045 })
4046 .await
4047 .unwrap();
4048
4049 let fake_server = fake_servers.next().await.unwrap();
4050 let response = project.update(cx, |project, cx| {
4051 let worktree = project.worktrees(cx).next().unwrap();
4052 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4053 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4054 });
4055 let expected_edit = lsp::WorkspaceEdit {
4056 changes: None,
4057 document_changes: Some(DocumentChanges::Edits({
4058 vec![TextDocumentEdit {
4059 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4060 range: lsp::Range {
4061 start: lsp::Position {
4062 line: 0,
4063 character: 1,
4064 },
4065 end: lsp::Position {
4066 line: 0,
4067 character: 3,
4068 },
4069 },
4070 new_text: "This is not a drill".to_owned(),
4071 })],
4072 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4073 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4074 version: Some(1337),
4075 },
4076 }]
4077 })),
4078 change_annotations: None,
4079 };
4080 let resolved_workspace_edit = Arc::new(OnceLock::new());
4081 fake_server
4082 .handle_request::<WillRenameFiles, _, _>({
4083 let resolved_workspace_edit = resolved_workspace_edit.clone();
4084 let expected_edit = expected_edit.clone();
4085 move |params, _| {
4086 let resolved_workspace_edit = resolved_workspace_edit.clone();
4087 let expected_edit = expected_edit.clone();
4088 async move {
4089 assert_eq!(params.files.len(), 1);
4090 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4091 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4092 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4093 Ok(Some(expected_edit))
4094 }
4095 }
4096 })
4097 .next()
4098 .await
4099 .unwrap();
4100 let _ = response.await.unwrap();
4101 fake_server
4102 .handle_notification::<DidRenameFiles, _>(|params, _| {
4103 assert_eq!(params.files.len(), 1);
4104 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4105 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4106 })
4107 .next()
4108 .await
4109 .unwrap();
4110 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4111}
4112
4113#[gpui::test]
4114async fn test_rename(cx: &mut gpui::TestAppContext) {
4115 // hi
4116 init_test(cx);
4117
4118 let fs = FakeFs::new(cx.executor());
4119 fs.insert_tree(
4120 "/dir",
4121 json!({
4122 "one.rs": "const ONE: usize = 1;",
4123 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4124 }),
4125 )
4126 .await;
4127
4128 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4129
4130 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4131 language_registry.add(rust_lang());
4132 let mut fake_servers = language_registry.register_fake_lsp(
4133 "Rust",
4134 FakeLspAdapter {
4135 capabilities: lsp::ServerCapabilities {
4136 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4137 prepare_provider: Some(true),
4138 work_done_progress_options: Default::default(),
4139 })),
4140 ..Default::default()
4141 },
4142 ..Default::default()
4143 },
4144 );
4145
4146 let (buffer, _handle) = project
4147 .update(cx, |project, cx| {
4148 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4149 })
4150 .await
4151 .unwrap();
4152
4153 let fake_server = fake_servers.next().await.unwrap();
4154
4155 let response = project.update(cx, |project, cx| {
4156 project.prepare_rename(buffer.clone(), 7, cx)
4157 });
4158 fake_server
4159 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4160 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4161 assert_eq!(params.position, lsp::Position::new(0, 7));
4162 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4163 lsp::Position::new(0, 6),
4164 lsp::Position::new(0, 9),
4165 ))))
4166 })
4167 .next()
4168 .await
4169 .unwrap();
4170 let response = response.await.unwrap();
4171 let PrepareRenameResponse::Success(range) = response else {
4172 panic!("{:?}", response);
4173 };
4174 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4175 assert_eq!(range, 6..9);
4176
4177 let response = project.update(cx, |project, cx| {
4178 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4179 });
4180 fake_server
4181 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4182 assert_eq!(
4183 params.text_document_position.text_document.uri.as_str(),
4184 "file:///dir/one.rs"
4185 );
4186 assert_eq!(
4187 params.text_document_position.position,
4188 lsp::Position::new(0, 7)
4189 );
4190 assert_eq!(params.new_name, "THREE");
4191 Ok(Some(lsp::WorkspaceEdit {
4192 changes: Some(
4193 [
4194 (
4195 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4196 vec![lsp::TextEdit::new(
4197 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4198 "THREE".to_string(),
4199 )],
4200 ),
4201 (
4202 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4203 vec![
4204 lsp::TextEdit::new(
4205 lsp::Range::new(
4206 lsp::Position::new(0, 24),
4207 lsp::Position::new(0, 27),
4208 ),
4209 "THREE".to_string(),
4210 ),
4211 lsp::TextEdit::new(
4212 lsp::Range::new(
4213 lsp::Position::new(0, 35),
4214 lsp::Position::new(0, 38),
4215 ),
4216 "THREE".to_string(),
4217 ),
4218 ],
4219 ),
4220 ]
4221 .into_iter()
4222 .collect(),
4223 ),
4224 ..Default::default()
4225 }))
4226 })
4227 .next()
4228 .await
4229 .unwrap();
4230 let mut transaction = response.await.unwrap().0;
4231 assert_eq!(transaction.len(), 2);
4232 assert_eq!(
4233 transaction
4234 .remove_entry(&buffer)
4235 .unwrap()
4236 .0
4237 .update(cx, |buffer, _| buffer.text()),
4238 "const THREE: usize = 1;"
4239 );
4240 assert_eq!(
4241 transaction
4242 .into_keys()
4243 .next()
4244 .unwrap()
4245 .update(cx, |buffer, _| buffer.text()),
4246 "const TWO: usize = one::THREE + one::THREE;"
4247 );
4248}
4249
4250#[gpui::test]
4251async fn test_search(cx: &mut gpui::TestAppContext) {
4252 init_test(cx);
4253
4254 let fs = FakeFs::new(cx.executor());
4255 fs.insert_tree(
4256 "/dir",
4257 json!({
4258 "one.rs": "const ONE: usize = 1;",
4259 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4260 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4261 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4262 }),
4263 )
4264 .await;
4265 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4266 assert_eq!(
4267 search(
4268 &project,
4269 SearchQuery::text(
4270 "TWO",
4271 false,
4272 true,
4273 false,
4274 Default::default(),
4275 Default::default(),
4276 None
4277 )
4278 .unwrap(),
4279 cx
4280 )
4281 .await
4282 .unwrap(),
4283 HashMap::from_iter([
4284 ("dir/two.rs".to_string(), vec![6..9]),
4285 ("dir/three.rs".to_string(), vec![37..40])
4286 ])
4287 );
4288
4289 let buffer_4 = project
4290 .update(cx, |project, cx| {
4291 project.open_local_buffer("/dir/four.rs", cx)
4292 })
4293 .await
4294 .unwrap();
4295 buffer_4.update(cx, |buffer, cx| {
4296 let text = "two::TWO";
4297 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4298 });
4299
4300 assert_eq!(
4301 search(
4302 &project,
4303 SearchQuery::text(
4304 "TWO",
4305 false,
4306 true,
4307 false,
4308 Default::default(),
4309 Default::default(),
4310 None,
4311 )
4312 .unwrap(),
4313 cx
4314 )
4315 .await
4316 .unwrap(),
4317 HashMap::from_iter([
4318 ("dir/two.rs".to_string(), vec![6..9]),
4319 ("dir/three.rs".to_string(), vec![37..40]),
4320 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4321 ])
4322 );
4323}
4324
4325#[gpui::test]
4326async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4327 init_test(cx);
4328
4329 let search_query = "file";
4330
4331 let fs = FakeFs::new(cx.executor());
4332 fs.insert_tree(
4333 "/dir",
4334 json!({
4335 "one.rs": r#"// Rust file one"#,
4336 "one.ts": r#"// TypeScript file one"#,
4337 "two.rs": r#"// Rust file two"#,
4338 "two.ts": r#"// TypeScript file two"#,
4339 }),
4340 )
4341 .await;
4342 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4343
4344 assert!(
4345 search(
4346 &project,
4347 SearchQuery::text(
4348 search_query,
4349 false,
4350 true,
4351 false,
4352 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4353 Default::default(),
4354 None
4355 )
4356 .unwrap(),
4357 cx
4358 )
4359 .await
4360 .unwrap()
4361 .is_empty(),
4362 "If no inclusions match, no files should be returned"
4363 );
4364
4365 assert_eq!(
4366 search(
4367 &project,
4368 SearchQuery::text(
4369 search_query,
4370 false,
4371 true,
4372 false,
4373 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4374 Default::default(),
4375 None
4376 )
4377 .unwrap(),
4378 cx
4379 )
4380 .await
4381 .unwrap(),
4382 HashMap::from_iter([
4383 ("dir/one.rs".to_string(), vec![8..12]),
4384 ("dir/two.rs".to_string(), vec![8..12]),
4385 ]),
4386 "Rust only search should give only Rust files"
4387 );
4388
4389 assert_eq!(
4390 search(
4391 &project,
4392 SearchQuery::text(
4393 search_query,
4394 false,
4395 true,
4396 false,
4397
4398 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4399
4400 Default::default(),
4401 None,
4402 ).unwrap(),
4403 cx
4404 )
4405 .await
4406 .unwrap(),
4407 HashMap::from_iter([
4408 ("dir/one.ts".to_string(), vec![14..18]),
4409 ("dir/two.ts".to_string(), vec![14..18]),
4410 ]),
4411 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4412 );
4413
4414 assert_eq!(
4415 search(
4416 &project,
4417 SearchQuery::text(
4418 search_query,
4419 false,
4420 true,
4421 false,
4422
4423 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4424
4425 Default::default(),
4426 None,
4427 ).unwrap(),
4428 cx
4429 )
4430 .await
4431 .unwrap(),
4432 HashMap::from_iter([
4433 ("dir/two.ts".to_string(), vec![14..18]),
4434 ("dir/one.rs".to_string(), vec![8..12]),
4435 ("dir/one.ts".to_string(), vec![14..18]),
4436 ("dir/two.rs".to_string(), vec![8..12]),
4437 ]),
4438 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4439 );
4440}
4441
4442#[gpui::test]
4443async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4444 init_test(cx);
4445
4446 let search_query = "file";
4447
4448 let fs = FakeFs::new(cx.executor());
4449 fs.insert_tree(
4450 "/dir",
4451 json!({
4452 "one.rs": r#"// Rust file one"#,
4453 "one.ts": r#"// TypeScript file one"#,
4454 "two.rs": r#"// Rust file two"#,
4455 "two.ts": r#"// TypeScript file two"#,
4456 }),
4457 )
4458 .await;
4459 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4460
4461 assert_eq!(
4462 search(
4463 &project,
4464 SearchQuery::text(
4465 search_query,
4466 false,
4467 true,
4468 false,
4469 Default::default(),
4470 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4471 None,
4472 )
4473 .unwrap(),
4474 cx
4475 )
4476 .await
4477 .unwrap(),
4478 HashMap::from_iter([
4479 ("dir/one.rs".to_string(), vec![8..12]),
4480 ("dir/one.ts".to_string(), vec![14..18]),
4481 ("dir/two.rs".to_string(), vec![8..12]),
4482 ("dir/two.ts".to_string(), vec![14..18]),
4483 ]),
4484 "If no exclusions match, all files should be returned"
4485 );
4486
4487 assert_eq!(
4488 search(
4489 &project,
4490 SearchQuery::text(
4491 search_query,
4492 false,
4493 true,
4494 false,
4495 Default::default(),
4496 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4497 None,
4498 )
4499 .unwrap(),
4500 cx
4501 )
4502 .await
4503 .unwrap(),
4504 HashMap::from_iter([
4505 ("dir/one.ts".to_string(), vec![14..18]),
4506 ("dir/two.ts".to_string(), vec![14..18]),
4507 ]),
4508 "Rust exclusion search should give only TypeScript files"
4509 );
4510
4511 assert_eq!(
4512 search(
4513 &project,
4514 SearchQuery::text(
4515 search_query,
4516 false,
4517 true,
4518 false,
4519 Default::default(),
4520 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4521 None,
4522 ).unwrap(),
4523 cx
4524 )
4525 .await
4526 .unwrap(),
4527 HashMap::from_iter([
4528 ("dir/one.rs".to_string(), vec![8..12]),
4529 ("dir/two.rs".to_string(), vec![8..12]),
4530 ]),
4531 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4532 );
4533
4534 assert!(
4535 search(
4536 &project,
4537 SearchQuery::text(
4538 search_query,
4539 false,
4540 true,
4541 false,
4542 Default::default(),
4543
4544 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4545 None,
4546
4547 ).unwrap(),
4548 cx
4549 )
4550 .await
4551 .unwrap().is_empty(),
4552 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4553 );
4554}
4555
4556#[gpui::test]
4557async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4558 init_test(cx);
4559
4560 let search_query = "file";
4561
4562 let fs = FakeFs::new(cx.executor());
4563 fs.insert_tree(
4564 "/dir",
4565 json!({
4566 "one.rs": r#"// Rust file one"#,
4567 "one.ts": r#"// TypeScript file one"#,
4568 "two.rs": r#"// Rust file two"#,
4569 "two.ts": r#"// TypeScript file two"#,
4570 }),
4571 )
4572 .await;
4573 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4574
4575 assert!(
4576 search(
4577 &project,
4578 SearchQuery::text(
4579 search_query,
4580 false,
4581 true,
4582 false,
4583 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4584 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4585 None,
4586 )
4587 .unwrap(),
4588 cx
4589 )
4590 .await
4591 .unwrap()
4592 .is_empty(),
4593 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4594 );
4595
4596 assert!(
4597 search(
4598 &project,
4599 SearchQuery::text(
4600 search_query,
4601 false,
4602 true,
4603 false,
4604 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4605 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4606 None,
4607 ).unwrap(),
4608 cx
4609 )
4610 .await
4611 .unwrap()
4612 .is_empty(),
4613 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4614 );
4615
4616 assert!(
4617 search(
4618 &project,
4619 SearchQuery::text(
4620 search_query,
4621 false,
4622 true,
4623 false,
4624 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4625 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4626 None,
4627 )
4628 .unwrap(),
4629 cx
4630 )
4631 .await
4632 .unwrap()
4633 .is_empty(),
4634 "Non-matching inclusions and exclusions should not change that."
4635 );
4636
4637 assert_eq!(
4638 search(
4639 &project,
4640 SearchQuery::text(
4641 search_query,
4642 false,
4643 true,
4644 false,
4645 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4646 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4647 None,
4648 )
4649 .unwrap(),
4650 cx
4651 )
4652 .await
4653 .unwrap(),
4654 HashMap::from_iter([
4655 ("dir/one.ts".to_string(), vec![14..18]),
4656 ("dir/two.ts".to_string(), vec![14..18]),
4657 ]),
4658 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4659 );
4660}
4661
4662#[gpui::test]
4663async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4664 init_test(cx);
4665
4666 let fs = FakeFs::new(cx.executor());
4667 fs.insert_tree(
4668 "/worktree-a",
4669 json!({
4670 "haystack.rs": r#"// NEEDLE"#,
4671 "haystack.ts": r#"// NEEDLE"#,
4672 }),
4673 )
4674 .await;
4675 fs.insert_tree(
4676 "/worktree-b",
4677 json!({
4678 "haystack.rs": r#"// NEEDLE"#,
4679 "haystack.ts": r#"// NEEDLE"#,
4680 }),
4681 )
4682 .await;
4683
4684 let project = Project::test(
4685 fs.clone(),
4686 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4687 cx,
4688 )
4689 .await;
4690
4691 assert_eq!(
4692 search(
4693 &project,
4694 SearchQuery::text(
4695 "NEEDLE",
4696 false,
4697 true,
4698 false,
4699 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4700 Default::default(),
4701 None,
4702 )
4703 .unwrap(),
4704 cx
4705 )
4706 .await
4707 .unwrap(),
4708 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4709 "should only return results from included worktree"
4710 );
4711 assert_eq!(
4712 search(
4713 &project,
4714 SearchQuery::text(
4715 "NEEDLE",
4716 false,
4717 true,
4718 false,
4719 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4720 Default::default(),
4721 None,
4722 )
4723 .unwrap(),
4724 cx
4725 )
4726 .await
4727 .unwrap(),
4728 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4729 "should only return results from included worktree"
4730 );
4731
4732 assert_eq!(
4733 search(
4734 &project,
4735 SearchQuery::text(
4736 "NEEDLE",
4737 false,
4738 true,
4739 false,
4740 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4741 Default::default(),
4742 None,
4743 )
4744 .unwrap(),
4745 cx
4746 )
4747 .await
4748 .unwrap(),
4749 HashMap::from_iter([
4750 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4751 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4752 ]),
4753 "should return results from both worktrees"
4754 );
4755}
4756
4757#[gpui::test]
4758async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4759 init_test(cx);
4760
4761 let fs = FakeFs::new(cx.background_executor.clone());
4762 fs.insert_tree(
4763 "/dir",
4764 json!({
4765 ".git": {},
4766 ".gitignore": "**/target\n/node_modules\n",
4767 "target": {
4768 "index.txt": "index_key:index_value"
4769 },
4770 "node_modules": {
4771 "eslint": {
4772 "index.ts": "const eslint_key = 'eslint value'",
4773 "package.json": r#"{ "some_key": "some value" }"#,
4774 },
4775 "prettier": {
4776 "index.ts": "const prettier_key = 'prettier value'",
4777 "package.json": r#"{ "other_key": "other value" }"#,
4778 },
4779 },
4780 "package.json": r#"{ "main_key": "main value" }"#,
4781 }),
4782 )
4783 .await;
4784 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4785
4786 let query = "key";
4787 assert_eq!(
4788 search(
4789 &project,
4790 SearchQuery::text(
4791 query,
4792 false,
4793 false,
4794 false,
4795 Default::default(),
4796 Default::default(),
4797 None,
4798 )
4799 .unwrap(),
4800 cx
4801 )
4802 .await
4803 .unwrap(),
4804 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4805 "Only one non-ignored file should have the query"
4806 );
4807
4808 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4809 assert_eq!(
4810 search(
4811 &project,
4812 SearchQuery::text(
4813 query,
4814 false,
4815 false,
4816 true,
4817 Default::default(),
4818 Default::default(),
4819 None,
4820 )
4821 .unwrap(),
4822 cx
4823 )
4824 .await
4825 .unwrap(),
4826 HashMap::from_iter([
4827 ("dir/package.json".to_string(), vec![8..11]),
4828 ("dir/target/index.txt".to_string(), vec![6..9]),
4829 (
4830 "dir/node_modules/prettier/package.json".to_string(),
4831 vec![9..12]
4832 ),
4833 (
4834 "dir/node_modules/prettier/index.ts".to_string(),
4835 vec![15..18]
4836 ),
4837 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4838 (
4839 "dir/node_modules/eslint/package.json".to_string(),
4840 vec![8..11]
4841 ),
4842 ]),
4843 "Unrestricted search with ignored directories should find every file with the query"
4844 );
4845
4846 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4847 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4848 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4849 assert_eq!(
4850 search(
4851 &project,
4852 SearchQuery::text(
4853 query,
4854 false,
4855 false,
4856 true,
4857 files_to_include,
4858 files_to_exclude,
4859 None,
4860 )
4861 .unwrap(),
4862 cx
4863 )
4864 .await
4865 .unwrap(),
4866 HashMap::from_iter([(
4867 "dir/node_modules/prettier/package.json".to_string(),
4868 vec![9..12]
4869 )]),
4870 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4871 );
4872}
4873
4874#[gpui::test]
4875async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4876 init_test(cx);
4877
4878 let fs = FakeFs::new(cx.executor().clone());
4879 fs.insert_tree(
4880 "/one/two",
4881 json!({
4882 "three": {
4883 "a.txt": "",
4884 "four": {}
4885 },
4886 "c.rs": ""
4887 }),
4888 )
4889 .await;
4890
4891 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4892 project
4893 .update(cx, |project, cx| {
4894 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4895 project.create_entry((id, "b.."), true, cx)
4896 })
4897 .await
4898 .unwrap()
4899 .to_included()
4900 .unwrap();
4901
4902 // Can't create paths outside the project
4903 let result = project
4904 .update(cx, |project, cx| {
4905 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4906 project.create_entry((id, "../../boop"), true, cx)
4907 })
4908 .await;
4909 assert!(result.is_err());
4910
4911 // Can't create paths with '..'
4912 let result = project
4913 .update(cx, |project, cx| {
4914 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4915 project.create_entry((id, "four/../beep"), true, cx)
4916 })
4917 .await;
4918 assert!(result.is_err());
4919
4920 assert_eq!(
4921 fs.paths(true),
4922 vec![
4923 PathBuf::from("/"),
4924 PathBuf::from("/one"),
4925 PathBuf::from("/one/two"),
4926 PathBuf::from("/one/two/c.rs"),
4927 PathBuf::from("/one/two/three"),
4928 PathBuf::from("/one/two/three/a.txt"),
4929 PathBuf::from("/one/two/three/b.."),
4930 PathBuf::from("/one/two/three/four"),
4931 ]
4932 );
4933
4934 // And we cannot open buffers with '..'
4935 let result = project
4936 .update(cx, |project, cx| {
4937 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4938 project.open_buffer((id, "../c.rs"), cx)
4939 })
4940 .await;
4941 assert!(result.is_err())
4942}
4943
4944#[gpui::test]
4945async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4946 init_test(cx);
4947
4948 let fs = FakeFs::new(cx.executor());
4949 fs.insert_tree(
4950 "/dir",
4951 json!({
4952 "a.tsx": "a",
4953 }),
4954 )
4955 .await;
4956
4957 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4958
4959 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4960 language_registry.add(tsx_lang());
4961 let language_server_names = [
4962 "TypeScriptServer",
4963 "TailwindServer",
4964 "ESLintServer",
4965 "NoHoverCapabilitiesServer",
4966 ];
4967 let mut language_servers = [
4968 language_registry.register_fake_lsp(
4969 "tsx",
4970 FakeLspAdapter {
4971 name: language_server_names[0],
4972 capabilities: lsp::ServerCapabilities {
4973 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4974 ..lsp::ServerCapabilities::default()
4975 },
4976 ..FakeLspAdapter::default()
4977 },
4978 ),
4979 language_registry.register_fake_lsp(
4980 "tsx",
4981 FakeLspAdapter {
4982 name: language_server_names[1],
4983 capabilities: lsp::ServerCapabilities {
4984 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4985 ..lsp::ServerCapabilities::default()
4986 },
4987 ..FakeLspAdapter::default()
4988 },
4989 ),
4990 language_registry.register_fake_lsp(
4991 "tsx",
4992 FakeLspAdapter {
4993 name: language_server_names[2],
4994 capabilities: lsp::ServerCapabilities {
4995 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4996 ..lsp::ServerCapabilities::default()
4997 },
4998 ..FakeLspAdapter::default()
4999 },
5000 ),
5001 language_registry.register_fake_lsp(
5002 "tsx",
5003 FakeLspAdapter {
5004 name: language_server_names[3],
5005 capabilities: lsp::ServerCapabilities {
5006 hover_provider: None,
5007 ..lsp::ServerCapabilities::default()
5008 },
5009 ..FakeLspAdapter::default()
5010 },
5011 ),
5012 ];
5013
5014 let (buffer, _handle) = project
5015 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5016 .await
5017 .unwrap();
5018 cx.executor().run_until_parked();
5019
5020 let mut servers_with_hover_requests = HashMap::default();
5021 for i in 0..language_server_names.len() {
5022 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5023 panic!(
5024 "Failed to get language server #{i} with name {}",
5025 &language_server_names[i]
5026 )
5027 });
5028 let new_server_name = new_server.server.name();
5029 assert!(
5030 !servers_with_hover_requests.contains_key(&new_server_name),
5031 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5032 );
5033 match new_server_name.as_ref() {
5034 "TailwindServer" | "TypeScriptServer" => {
5035 servers_with_hover_requests.insert(
5036 new_server_name.clone(),
5037 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5038 let name = new_server_name.clone();
5039 async move {
5040 Ok(Some(lsp::Hover {
5041 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5042 format!("{name} hover"),
5043 )),
5044 range: None,
5045 }))
5046 }
5047 }),
5048 );
5049 }
5050 "ESLintServer" => {
5051 servers_with_hover_requests.insert(
5052 new_server_name,
5053 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5054 |_, _| async move { Ok(None) },
5055 ),
5056 );
5057 }
5058 "NoHoverCapabilitiesServer" => {
5059 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5060 |_, _| async move {
5061 panic!(
5062 "Should not call for hovers server with no corresponding capabilities"
5063 )
5064 },
5065 );
5066 }
5067 unexpected => panic!("Unexpected server name: {unexpected}"),
5068 }
5069 }
5070
5071 let hover_task = project.update(cx, |project, cx| {
5072 project.hover(&buffer, Point::new(0, 0), cx)
5073 });
5074 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5075 |mut hover_request| async move {
5076 hover_request
5077 .next()
5078 .await
5079 .expect("All hover requests should have been triggered")
5080 },
5081 ))
5082 .await;
5083 assert_eq!(
5084 vec!["TailwindServer hover", "TypeScriptServer hover"],
5085 hover_task
5086 .await
5087 .into_iter()
5088 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5089 .sorted()
5090 .collect::<Vec<_>>(),
5091 "Should receive hover responses from all related servers with hover capabilities"
5092 );
5093}
5094
5095#[gpui::test]
5096async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5097 init_test(cx);
5098
5099 let fs = FakeFs::new(cx.executor());
5100 fs.insert_tree(
5101 "/dir",
5102 json!({
5103 "a.ts": "a",
5104 }),
5105 )
5106 .await;
5107
5108 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5109
5110 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5111 language_registry.add(typescript_lang());
5112 let mut fake_language_servers = language_registry.register_fake_lsp(
5113 "TypeScript",
5114 FakeLspAdapter {
5115 capabilities: lsp::ServerCapabilities {
5116 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5117 ..lsp::ServerCapabilities::default()
5118 },
5119 ..FakeLspAdapter::default()
5120 },
5121 );
5122
5123 let (buffer, _handle) = project
5124 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5125 .await
5126 .unwrap();
5127 cx.executor().run_until_parked();
5128
5129 let fake_server = fake_language_servers
5130 .next()
5131 .await
5132 .expect("failed to get the language server");
5133
5134 let mut request_handled =
5135 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5136 Ok(Some(lsp::Hover {
5137 contents: lsp::HoverContents::Array(vec![
5138 lsp::MarkedString::String("".to_string()),
5139 lsp::MarkedString::String(" ".to_string()),
5140 lsp::MarkedString::String("\n\n\n".to_string()),
5141 ]),
5142 range: None,
5143 }))
5144 });
5145
5146 let hover_task = project.update(cx, |project, cx| {
5147 project.hover(&buffer, Point::new(0, 0), cx)
5148 });
5149 let () = request_handled
5150 .next()
5151 .await
5152 .expect("All hover requests should have been triggered");
5153 assert_eq!(
5154 Vec::<String>::new(),
5155 hover_task
5156 .await
5157 .into_iter()
5158 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5159 .sorted()
5160 .collect::<Vec<_>>(),
5161 "Empty hover parts should be ignored"
5162 );
5163}
5164
5165#[gpui::test]
5166async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5167 init_test(cx);
5168
5169 let fs = FakeFs::new(cx.executor());
5170 fs.insert_tree(
5171 "/dir",
5172 json!({
5173 "a.ts": "a",
5174 }),
5175 )
5176 .await;
5177
5178 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5179
5180 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5181 language_registry.add(typescript_lang());
5182 let mut fake_language_servers = language_registry.register_fake_lsp(
5183 "TypeScript",
5184 FakeLspAdapter {
5185 capabilities: lsp::ServerCapabilities {
5186 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5187 ..lsp::ServerCapabilities::default()
5188 },
5189 ..FakeLspAdapter::default()
5190 },
5191 );
5192
5193 let (buffer, _handle) = project
5194 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5195 .await
5196 .unwrap();
5197 cx.executor().run_until_parked();
5198
5199 let fake_server = fake_language_servers
5200 .next()
5201 .await
5202 .expect("failed to get the language server");
5203
5204 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5205 move |_, _| async move {
5206 Ok(Some(vec![
5207 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5208 title: "organize imports".to_string(),
5209 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5210 ..lsp::CodeAction::default()
5211 }),
5212 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5213 title: "fix code".to_string(),
5214 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5215 ..lsp::CodeAction::default()
5216 }),
5217 ]))
5218 },
5219 );
5220
5221 let code_actions_task = project.update(cx, |project, cx| {
5222 project.code_actions(
5223 &buffer,
5224 0..buffer.read(cx).len(),
5225 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5226 cx,
5227 )
5228 });
5229
5230 let () = request_handled
5231 .next()
5232 .await
5233 .expect("The code action request should have been triggered");
5234
5235 let code_actions = code_actions_task.await.unwrap();
5236 assert_eq!(code_actions.len(), 1);
5237 assert_eq!(
5238 code_actions[0].lsp_action.kind,
5239 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5240 );
5241}
5242
5243#[gpui::test]
5244async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5245 init_test(cx);
5246
5247 let fs = FakeFs::new(cx.executor());
5248 fs.insert_tree(
5249 "/dir",
5250 json!({
5251 "a.tsx": "a",
5252 }),
5253 )
5254 .await;
5255
5256 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5257
5258 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5259 language_registry.add(tsx_lang());
5260 let language_server_names = [
5261 "TypeScriptServer",
5262 "TailwindServer",
5263 "ESLintServer",
5264 "NoActionsCapabilitiesServer",
5265 ];
5266
5267 let mut language_server_rxs = [
5268 language_registry.register_fake_lsp(
5269 "tsx",
5270 FakeLspAdapter {
5271 name: language_server_names[0],
5272 capabilities: lsp::ServerCapabilities {
5273 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5274 ..lsp::ServerCapabilities::default()
5275 },
5276 ..FakeLspAdapter::default()
5277 },
5278 ),
5279 language_registry.register_fake_lsp(
5280 "tsx",
5281 FakeLspAdapter {
5282 name: language_server_names[1],
5283 capabilities: lsp::ServerCapabilities {
5284 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5285 ..lsp::ServerCapabilities::default()
5286 },
5287 ..FakeLspAdapter::default()
5288 },
5289 ),
5290 language_registry.register_fake_lsp(
5291 "tsx",
5292 FakeLspAdapter {
5293 name: language_server_names[2],
5294 capabilities: lsp::ServerCapabilities {
5295 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5296 ..lsp::ServerCapabilities::default()
5297 },
5298 ..FakeLspAdapter::default()
5299 },
5300 ),
5301 language_registry.register_fake_lsp(
5302 "tsx",
5303 FakeLspAdapter {
5304 name: language_server_names[3],
5305 capabilities: lsp::ServerCapabilities {
5306 code_action_provider: None,
5307 ..lsp::ServerCapabilities::default()
5308 },
5309 ..FakeLspAdapter::default()
5310 },
5311 ),
5312 ];
5313
5314 let (buffer, _handle) = project
5315 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5316 .await
5317 .unwrap();
5318 cx.executor().run_until_parked();
5319
5320 let mut servers_with_actions_requests = HashMap::default();
5321 for i in 0..language_server_names.len() {
5322 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5323 panic!(
5324 "Failed to get language server #{i} with name {}",
5325 &language_server_names[i]
5326 )
5327 });
5328 let new_server_name = new_server.server.name();
5329
5330 assert!(
5331 !servers_with_actions_requests.contains_key(&new_server_name),
5332 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5333 );
5334 match new_server_name.0.as_ref() {
5335 "TailwindServer" | "TypeScriptServer" => {
5336 servers_with_actions_requests.insert(
5337 new_server_name.clone(),
5338 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5339 move |_, _| {
5340 let name = new_server_name.clone();
5341 async move {
5342 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5343 lsp::CodeAction {
5344 title: format!("{name} code action"),
5345 ..lsp::CodeAction::default()
5346 },
5347 )]))
5348 }
5349 },
5350 ),
5351 );
5352 }
5353 "ESLintServer" => {
5354 servers_with_actions_requests.insert(
5355 new_server_name,
5356 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5357 |_, _| async move { Ok(None) },
5358 ),
5359 );
5360 }
5361 "NoActionsCapabilitiesServer" => {
5362 let _never_handled = new_server
5363 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5364 panic!(
5365 "Should not call for code actions server with no corresponding capabilities"
5366 )
5367 });
5368 }
5369 unexpected => panic!("Unexpected server name: {unexpected}"),
5370 }
5371 }
5372
5373 let code_actions_task = project.update(cx, |project, cx| {
5374 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5375 });
5376
5377 // cx.run_until_parked();
5378 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5379 |mut code_actions_request| async move {
5380 code_actions_request
5381 .next()
5382 .await
5383 .expect("All code actions requests should have been triggered")
5384 },
5385 ))
5386 .await;
5387 assert_eq!(
5388 vec!["TailwindServer code action", "TypeScriptServer code action"],
5389 code_actions_task
5390 .await
5391 .unwrap()
5392 .into_iter()
5393 .map(|code_action| code_action.lsp_action.title)
5394 .sorted()
5395 .collect::<Vec<_>>(),
5396 "Should receive code actions responses from all related servers with hover capabilities"
5397 );
5398}
5399
5400#[gpui::test]
5401async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5402 init_test(cx);
5403
5404 let fs = FakeFs::new(cx.executor());
5405 fs.insert_tree(
5406 "/dir",
5407 json!({
5408 "a.rs": "let a = 1;",
5409 "b.rs": "let b = 2;",
5410 "c.rs": "let c = 2;",
5411 }),
5412 )
5413 .await;
5414
5415 let project = Project::test(
5416 fs,
5417 [
5418 "/dir/a.rs".as_ref(),
5419 "/dir/b.rs".as_ref(),
5420 "/dir/c.rs".as_ref(),
5421 ],
5422 cx,
5423 )
5424 .await;
5425
5426 // check the initial state and get the worktrees
5427 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5428 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5429 assert_eq!(worktrees.len(), 3);
5430
5431 let worktree_a = worktrees[0].read(cx);
5432 let worktree_b = worktrees[1].read(cx);
5433 let worktree_c = worktrees[2].read(cx);
5434
5435 // check they start in the right order
5436 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5437 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5438 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5439
5440 (
5441 worktrees[0].clone(),
5442 worktrees[1].clone(),
5443 worktrees[2].clone(),
5444 )
5445 });
5446
5447 // move first worktree to after the second
5448 // [a, b, c] -> [b, a, c]
5449 project
5450 .update(cx, |project, cx| {
5451 let first = worktree_a.read(cx);
5452 let second = worktree_b.read(cx);
5453 project.move_worktree(first.id(), second.id(), cx)
5454 })
5455 .expect("moving first after second");
5456
5457 // check the state after moving
5458 project.update(cx, |project, cx| {
5459 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5460 assert_eq!(worktrees.len(), 3);
5461
5462 let first = worktrees[0].read(cx);
5463 let second = worktrees[1].read(cx);
5464 let third = worktrees[2].read(cx);
5465
5466 // check they are now in the right order
5467 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5468 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5469 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5470 });
5471
5472 // move the second worktree to before the first
5473 // [b, a, c] -> [a, b, c]
5474 project
5475 .update(cx, |project, cx| {
5476 let second = worktree_a.read(cx);
5477 let first = worktree_b.read(cx);
5478 project.move_worktree(first.id(), second.id(), cx)
5479 })
5480 .expect("moving second before first");
5481
5482 // check the state after moving
5483 project.update(cx, |project, cx| {
5484 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5485 assert_eq!(worktrees.len(), 3);
5486
5487 let first = worktrees[0].read(cx);
5488 let second = worktrees[1].read(cx);
5489 let third = worktrees[2].read(cx);
5490
5491 // check they are now in the right order
5492 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5493 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5494 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5495 });
5496
5497 // move the second worktree to after the third
5498 // [a, b, c] -> [a, c, b]
5499 project
5500 .update(cx, |project, cx| {
5501 let second = worktree_b.read(cx);
5502 let third = worktree_c.read(cx);
5503 project.move_worktree(second.id(), third.id(), cx)
5504 })
5505 .expect("moving second after third");
5506
5507 // check the state after moving
5508 project.update(cx, |project, cx| {
5509 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5510 assert_eq!(worktrees.len(), 3);
5511
5512 let first = worktrees[0].read(cx);
5513 let second = worktrees[1].read(cx);
5514 let third = worktrees[2].read(cx);
5515
5516 // check they are now in the right order
5517 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5518 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5519 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5520 });
5521
5522 // move the third worktree to before the second
5523 // [a, c, b] -> [a, b, c]
5524 project
5525 .update(cx, |project, cx| {
5526 let third = worktree_c.read(cx);
5527 let second = worktree_b.read(cx);
5528 project.move_worktree(third.id(), second.id(), cx)
5529 })
5530 .expect("moving third before second");
5531
5532 // check the state after moving
5533 project.update(cx, |project, cx| {
5534 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5535 assert_eq!(worktrees.len(), 3);
5536
5537 let first = worktrees[0].read(cx);
5538 let second = worktrees[1].read(cx);
5539 let third = worktrees[2].read(cx);
5540
5541 // check they are now in the right order
5542 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5543 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5544 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5545 });
5546
5547 // move the first worktree to after the third
5548 // [a, b, c] -> [b, c, a]
5549 project
5550 .update(cx, |project, cx| {
5551 let first = worktree_a.read(cx);
5552 let third = worktree_c.read(cx);
5553 project.move_worktree(first.id(), third.id(), cx)
5554 })
5555 .expect("moving first after third");
5556
5557 // check the state after moving
5558 project.update(cx, |project, cx| {
5559 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5560 assert_eq!(worktrees.len(), 3);
5561
5562 let first = worktrees[0].read(cx);
5563 let second = worktrees[1].read(cx);
5564 let third = worktrees[2].read(cx);
5565
5566 // check they are now in the right order
5567 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5568 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5569 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5570 });
5571
5572 // move the third worktree to before the first
5573 // [b, c, a] -> [a, b, c]
5574 project
5575 .update(cx, |project, cx| {
5576 let third = worktree_a.read(cx);
5577 let first = worktree_b.read(cx);
5578 project.move_worktree(third.id(), first.id(), cx)
5579 })
5580 .expect("moving third before first");
5581
5582 // check the state after moving
5583 project.update(cx, |project, cx| {
5584 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5585 assert_eq!(worktrees.len(), 3);
5586
5587 let first = worktrees[0].read(cx);
5588 let second = worktrees[1].read(cx);
5589 let third = worktrees[2].read(cx);
5590
5591 // check they are now in the right order
5592 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5593 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5594 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5595 });
5596}
5597
5598#[gpui::test]
5599async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5600 init_test(cx);
5601
5602 let staged_contents = r#"
5603 fn main() {
5604 println!("hello world");
5605 }
5606 "#
5607 .unindent();
5608 let file_contents = r#"
5609 // print goodbye
5610 fn main() {
5611 println!("goodbye world");
5612 }
5613 "#
5614 .unindent();
5615
5616 let fs = FakeFs::new(cx.background_executor.clone());
5617 fs.insert_tree(
5618 "/dir",
5619 json!({
5620 ".git": {},
5621 "src": {
5622 "main.rs": file_contents,
5623 }
5624 }),
5625 )
5626 .await;
5627
5628 fs.set_index_for_repo(
5629 Path::new("/dir/.git"),
5630 &[(Path::new("src/main.rs"), staged_contents)],
5631 );
5632
5633 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5634
5635 let buffer = project
5636 .update(cx, |project, cx| {
5637 project.open_local_buffer("/dir/src/main.rs", cx)
5638 })
5639 .await
5640 .unwrap();
5641 let unstaged_changes = project
5642 .update(cx, |project, cx| {
5643 project.open_unstaged_changes(buffer.clone(), cx)
5644 })
5645 .await
5646 .unwrap();
5647
5648 cx.run_until_parked();
5649 unstaged_changes.update(cx, |unstaged_changes, cx| {
5650 let snapshot = buffer.read(cx).snapshot();
5651 assert_hunks(
5652 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5653 &snapshot,
5654 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5655 &[
5656 (0..1, "", "// print goodbye\n"),
5657 (
5658 2..3,
5659 " println!(\"hello world\");\n",
5660 " println!(\"goodbye world\");\n",
5661 ),
5662 ],
5663 );
5664 });
5665
5666 let staged_contents = r#"
5667 // print goodbye
5668 fn main() {
5669 }
5670 "#
5671 .unindent();
5672
5673 fs.set_index_for_repo(
5674 Path::new("/dir/.git"),
5675 &[(Path::new("src/main.rs"), staged_contents)],
5676 );
5677
5678 cx.run_until_parked();
5679 unstaged_changes.update(cx, |unstaged_changes, cx| {
5680 let snapshot = buffer.read(cx).snapshot();
5681 assert_hunks(
5682 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5683 &snapshot,
5684 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5685 &[(2..3, "", " println!(\"goodbye world\");\n")],
5686 );
5687 });
5688}
5689
5690async fn search(
5691 project: &Model<Project>,
5692 query: SearchQuery,
5693 cx: &mut gpui::TestAppContext,
5694) -> Result<HashMap<String, Vec<Range<usize>>>> {
5695 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5696 let mut results = HashMap::default();
5697 while let Ok(search_result) = search_rx.recv().await {
5698 match search_result {
5699 SearchResult::Buffer { buffer, ranges } => {
5700 results.entry(buffer).or_insert(ranges);
5701 }
5702 SearchResult::LimitReached => {}
5703 }
5704 }
5705 Ok(results
5706 .into_iter()
5707 .map(|(buffer, ranges)| {
5708 buffer.update(cx, |buffer, cx| {
5709 let path = buffer
5710 .file()
5711 .unwrap()
5712 .full_path(cx)
5713 .to_string_lossy()
5714 .to_string();
5715 let ranges = ranges
5716 .into_iter()
5717 .map(|range| range.to_offset(buffer))
5718 .collect::<Vec<_>>();
5719 (path, ranges)
5720 })
5721 })
5722 .collect())
5723}
5724
5725pub fn init_test(cx: &mut gpui::TestAppContext) {
5726 if std::env::var("RUST_LOG").is_ok() {
5727 env_logger::try_init().ok();
5728 }
5729
5730 cx.update(|cx| {
5731 let settings_store = SettingsStore::test(cx);
5732 cx.set_global(settings_store);
5733 release_channel::init(SemanticVersion::default(), cx);
5734 language::init(cx);
5735 Project::init_settings(cx);
5736 });
5737}
5738
5739fn json_lang() -> Arc<Language> {
5740 Arc::new(Language::new(
5741 LanguageConfig {
5742 name: "JSON".into(),
5743 matcher: LanguageMatcher {
5744 path_suffixes: vec!["json".to_string()],
5745 ..Default::default()
5746 },
5747 ..Default::default()
5748 },
5749 None,
5750 ))
5751}
5752
5753fn js_lang() -> Arc<Language> {
5754 Arc::new(Language::new(
5755 LanguageConfig {
5756 name: "JavaScript".into(),
5757 matcher: LanguageMatcher {
5758 path_suffixes: vec!["js".to_string()],
5759 ..Default::default()
5760 },
5761 ..Default::default()
5762 },
5763 None,
5764 ))
5765}
5766
5767fn rust_lang() -> Arc<Language> {
5768 Arc::new(Language::new(
5769 LanguageConfig {
5770 name: "Rust".into(),
5771 matcher: LanguageMatcher {
5772 path_suffixes: vec!["rs".to_string()],
5773 ..Default::default()
5774 },
5775 ..Default::default()
5776 },
5777 Some(tree_sitter_rust::LANGUAGE.into()),
5778 ))
5779}
5780
5781fn typescript_lang() -> Arc<Language> {
5782 Arc::new(Language::new(
5783 LanguageConfig {
5784 name: "TypeScript".into(),
5785 matcher: LanguageMatcher {
5786 path_suffixes: vec!["ts".to_string()],
5787 ..Default::default()
5788 },
5789 ..Default::default()
5790 },
5791 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5792 ))
5793}
5794
5795fn tsx_lang() -> Arc<Language> {
5796 Arc::new(Language::new(
5797 LanguageConfig {
5798 name: "tsx".into(),
5799 matcher: LanguageMatcher {
5800 path_suffixes: vec!["tsx".to_string()],
5801 ..Default::default()
5802 },
5803 ..Default::default()
5804 },
5805 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5806 ))
5807}
5808
5809fn get_all_tasks(
5810 project: &Model<Project>,
5811 worktree_id: Option<WorktreeId>,
5812 task_context: &TaskContext,
5813 cx: &mut AppContext,
5814) -> Vec<(TaskSourceKind, ResolvedTask)> {
5815 let (mut old, new) = project.update(cx, |project, cx| {
5816 project
5817 .task_store
5818 .read(cx)
5819 .task_inventory()
5820 .unwrap()
5821 .read(cx)
5822 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5823 });
5824 old.extend(new);
5825 old
5826}