1use crate::{Event, *};
2use ::git::diff::assert_hunks;
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq,
29 paths::{replace_path_separator, PathMatcher},
30 test::temp_tree,
31 TryFutureExt as _,
32};
33
34#[gpui::test]
35async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let (tx, mut rx) = futures::channel::mpsc::unbounded();
39 let _thread = std::thread::spawn(move || {
40 std::fs::metadata("/tmp").unwrap();
41 std::thread::sleep(Duration::from_millis(1000));
42 tx.unbounded_send(1).unwrap();
43 });
44 rx.next().await.unwrap();
45}
46
47#[gpui::test]
48async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
49 cx.executor().allow_parking();
50
51 let io_task = smol::unblock(move || {
52 println!("sleeping on thread {:?}", std::thread::current().id());
53 std::thread::sleep(Duration::from_millis(10));
54 1
55 });
56
57 let task = cx.foreground_executor().spawn(async move {
58 io_task.await;
59 });
60
61 task.await;
62}
63
64#[cfg(not(windows))]
65#[gpui::test]
66async fn test_symlinks(cx: &mut gpui::TestAppContext) {
67 init_test(cx);
68 cx.executor().allow_parking();
69
70 let dir = temp_tree(json!({
71 "root": {
72 "apple": "",
73 "banana": {
74 "carrot": {
75 "date": "",
76 "endive": "",
77 }
78 },
79 "fennel": {
80 "grape": "",
81 }
82 }
83 }));
84
85 let root_link_path = dir.path().join("root_link");
86 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
87 os::unix::fs::symlink(
88 dir.path().join("root/fennel"),
89 dir.path().join("root/finnochio"),
90 )
91 .unwrap();
92
93 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
94
95 project.update(cx, |project, cx| {
96 let tree = project.worktrees(cx).next().unwrap().read(cx);
97 assert_eq!(tree.file_count(), 5);
98 assert_eq!(
99 tree.inode_for_path("fennel/grape"),
100 tree.inode_for_path("finnochio/grape")
101 );
102 });
103}
104
105#[gpui::test]
106async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
107 init_test(cx);
108
109 let dir = temp_tree(json!({
110 ".editorconfig": r#"
111 root = true
112 [*.rs]
113 indent_style = tab
114 indent_size = 3
115 end_of_line = lf
116 insert_final_newline = true
117 trim_trailing_whitespace = true
118 [*.js]
119 tab_width = 10
120 "#,
121 ".zed": {
122 "settings.json": r#"{
123 "tab_size": 8,
124 "hard_tabs": false,
125 "ensure_final_newline_on_save": false,
126 "remove_trailing_whitespace_on_save": false,
127 "soft_wrap": "editor_width"
128 }"#,
129 },
130 "a.rs": "fn a() {\n A\n}",
131 "b": {
132 ".editorconfig": r#"
133 [*.rs]
134 indent_size = 2
135 "#,
136 "b.rs": "fn b() {\n B\n}",
137 },
138 "c.js": "def c\n C\nend",
139 "README.json": "tabs are better\n",
140 }));
141
142 let path = dir.path();
143 let fs = FakeFs::new(cx.executor());
144 fs.insert_tree_from_real_fs(path, path).await;
145 let project = Project::test(fs, [path], cx).await;
146
147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
148 language_registry.add(js_lang());
149 language_registry.add(json_lang());
150 language_registry.add(rust_lang());
151
152 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
153
154 cx.executor().run_until_parked();
155
156 cx.update(|cx| {
157 let tree = worktree.read(cx);
158 let settings_for = |path: &str| {
159 let file_entry = tree.entry_for_path(path).unwrap().clone();
160 let file = File::for_entry(file_entry, worktree.clone());
161 let file_language = project
162 .read(cx)
163 .languages()
164 .language_for_file_path(file.path.as_ref());
165 let file_language = cx
166 .background_executor()
167 .block(file_language)
168 .expect("Failed to get file language");
169 let file = file as _;
170 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
171 };
172
173 let settings_a = settings_for("a.rs");
174 let settings_b = settings_for("b/b.rs");
175 let settings_c = settings_for("c.js");
176 let settings_readme = settings_for("README.json");
177
178 // .editorconfig overrides .zed/settings
179 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
180 assert_eq!(settings_a.hard_tabs, true);
181 assert_eq!(settings_a.ensure_final_newline_on_save, true);
182 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
183
184 // .editorconfig in b/ overrides .editorconfig in root
185 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
186
187 // "indent_size" is not set, so "tab_width" is used
188 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
189
190 // README.md should not be affected by .editorconfig's globe "*.rs"
191 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
192 });
193}
194
195#[gpui::test]
196async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
197 init_test(cx);
198 TaskStore::init(None);
199
200 let fs = FakeFs::new(cx.executor());
201 fs.insert_tree(
202 "/the-root",
203 json!({
204 ".zed": {
205 "settings.json": r#"{ "tab_size": 8 }"#,
206 "tasks.json": r#"[{
207 "label": "cargo check all",
208 "command": "cargo",
209 "args": ["check", "--all"]
210 },]"#,
211 },
212 "a": {
213 "a.rs": "fn a() {\n A\n}"
214 },
215 "b": {
216 ".zed": {
217 "settings.json": r#"{ "tab_size": 2 }"#,
218 "tasks.json": r#"[{
219 "label": "cargo check",
220 "command": "cargo",
221 "args": ["check"]
222 },]"#,
223 },
224 "b.rs": "fn b() {\n B\n}"
225 }
226 }),
227 )
228 .await;
229
230 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232 let task_context = TaskContext::default();
233
234 cx.executor().run_until_parked();
235 let worktree_id = cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.worktrees(cx).next().unwrap().read(cx).id()
238 })
239 });
240 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
241 id: worktree_id,
242 directory_in_worktree: PathBuf::from(".zed"),
243 id_base: "local worktree tasks from directory \".zed\"".into(),
244 };
245
246 let all_tasks = cx
247 .update(|cx| {
248 let tree = worktree.read(cx);
249
250 let file_a = File::for_entry(
251 tree.entry_for_path("a/a.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_a = language_settings(None, Some(&file_a), cx);
255 let file_b = File::for_entry(
256 tree.entry_for_path("b/b.rs").unwrap().clone(),
257 worktree.clone(),
258 ) as _;
259 let settings_b = language_settings(None, Some(&file_b), cx);
260
261 assert_eq!(settings_a.tab_size.get(), 8);
262 assert_eq!(settings_b.tab_size.get(), 2);
263
264 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
265 })
266 .into_iter()
267 .map(|(source_kind, task)| {
268 let resolved = task.resolved.unwrap();
269 (
270 source_kind,
271 task.resolved_label,
272 resolved.args,
273 resolved.env,
274 )
275 })
276 .collect::<Vec<_>>();
277 assert_eq!(
278 all_tasks,
279 vec![
280 (
281 TaskSourceKind::Worktree {
282 id: worktree_id,
283 directory_in_worktree: PathBuf::from("b/.zed"),
284 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
285 },
286 "cargo check".to_string(),
287 vec!["check".to_string()],
288 HashMap::default(),
289 ),
290 (
291 topmost_local_task_source_kind.clone(),
292 "cargo check all".to_string(),
293 vec!["check".to_string(), "--all".to_string()],
294 HashMap::default(),
295 ),
296 ]
297 );
298
299 let (_, resolved_task) = cx
300 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
301 .into_iter()
302 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
303 .expect("should have one global task");
304 project.update(cx, |project, cx| {
305 let task_inventory = project
306 .task_store
307 .read(cx)
308 .task_inventory()
309 .cloned()
310 .unwrap();
311 task_inventory.update(cx, |inventory, _| {
312 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
313 inventory
314 .update_file_based_tasks(
315 None,
316 Some(
317 &json!([{
318 "label": "cargo check unstable",
319 "command": "cargo",
320 "args": [
321 "check",
322 "--all",
323 "--all-targets"
324 ],
325 "env": {
326 "RUSTFLAGS": "-Zunstable-options"
327 }
328 }])
329 .to_string(),
330 ),
331 )
332 .unwrap();
333 });
334 });
335 cx.run_until_parked();
336
337 let all_tasks = cx
338 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
339 .into_iter()
340 .map(|(source_kind, task)| {
341 let resolved = task.resolved.unwrap();
342 (
343 source_kind,
344 task.resolved_label,
345 resolved.args,
346 resolved.env,
347 )
348 })
349 .collect::<Vec<_>>();
350 assert_eq!(
351 all_tasks,
352 vec![
353 (
354 topmost_local_task_source_kind.clone(),
355 "cargo check all".to_string(),
356 vec!["check".to_string(), "--all".to_string()],
357 HashMap::default(),
358 ),
359 (
360 TaskSourceKind::Worktree {
361 id: worktree_id,
362 directory_in_worktree: PathBuf::from("b/.zed"),
363 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
364 },
365 "cargo check".to_string(),
366 vec!["check".to_string()],
367 HashMap::default(),
368 ),
369 (
370 TaskSourceKind::AbsPath {
371 abs_path: paths::tasks_file().clone(),
372 id_base: "global tasks.json".into(),
373 },
374 "cargo check unstable".to_string(),
375 vec![
376 "check".to_string(),
377 "--all".to_string(),
378 "--all-targets".to_string(),
379 ],
380 HashMap::from_iter(Some((
381 "RUSTFLAGS".to_string(),
382 "-Zunstable-options".to_string()
383 ))),
384 ),
385 ]
386 );
387}
388
389#[gpui::test]
390async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
391 init_test(cx);
392
393 let fs = FakeFs::new(cx.executor());
394 fs.insert_tree(
395 "/the-root",
396 json!({
397 "test.rs": "const A: i32 = 1;",
398 "test2.rs": "",
399 "Cargo.toml": "a = 1",
400 "package.json": "{\"a\": 1}",
401 }),
402 )
403 .await;
404
405 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
407
408 let mut fake_rust_servers = language_registry.register_fake_lsp(
409 "Rust",
410 FakeLspAdapter {
411 name: "the-rust-language-server",
412 capabilities: lsp::ServerCapabilities {
413 completion_provider: Some(lsp::CompletionOptions {
414 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
415 ..Default::default()
416 }),
417 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
418 lsp::TextDocumentSyncOptions {
419 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
420 ..Default::default()
421 },
422 )),
423 ..Default::default()
424 },
425 ..Default::default()
426 },
427 );
428 let mut fake_json_servers = language_registry.register_fake_lsp(
429 "JSON",
430 FakeLspAdapter {
431 name: "the-json-language-server",
432 capabilities: lsp::ServerCapabilities {
433 completion_provider: Some(lsp::CompletionOptions {
434 trigger_characters: Some(vec![":".to_string()]),
435 ..Default::default()
436 }),
437 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
438 lsp::TextDocumentSyncOptions {
439 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
440 ..Default::default()
441 },
442 )),
443 ..Default::default()
444 },
445 ..Default::default()
446 },
447 );
448
449 // Open a buffer without an associated language server.
450 let (toml_buffer, _handle) = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
453 })
454 .await
455 .unwrap();
456
457 // Open a buffer with an associated language server before the language for it has been loaded.
458 let (rust_buffer, _handle2) = project
459 .update(cx, |project, cx| {
460 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
461 })
462 .await
463 .unwrap();
464 rust_buffer.update(cx, |buffer, _| {
465 assert_eq!(buffer.language().map(|l| l.name()), None);
466 });
467
468 // Now we add the languages to the project, and ensure they get assigned to all
469 // the relevant open buffers.
470 language_registry.add(json_lang());
471 language_registry.add(rust_lang());
472 cx.executor().run_until_parked();
473 rust_buffer.update(cx, |buffer, _| {
474 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
475 });
476
477 // A server is started up, and it is notified about Rust files.
478 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
479 assert_eq!(
480 fake_rust_server
481 .receive_notification::<lsp::notification::DidOpenTextDocument>()
482 .await
483 .text_document,
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
486 version: 0,
487 text: "const A: i32 = 1;".to_string(),
488 language_id: "rust".to_string(),
489 }
490 );
491
492 // The buffer is configured based on the language server's capabilities.
493 rust_buffer.update(cx, |buffer, _| {
494 assert_eq!(
495 buffer
496 .completion_triggers()
497 .into_iter()
498 .cloned()
499 .collect::<Vec<_>>(),
500 &[".".to_string(), "::".to_string()]
501 );
502 });
503 toml_buffer.update(cx, |buffer, _| {
504 assert!(buffer.completion_triggers().is_empty());
505 });
506
507 // Edit a buffer. The changes are reported to the language server.
508 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
509 assert_eq!(
510 fake_rust_server
511 .receive_notification::<lsp::notification::DidChangeTextDocument>()
512 .await
513 .text_document,
514 lsp::VersionedTextDocumentIdentifier::new(
515 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
516 1
517 )
518 );
519
520 // Open a third buffer with a different associated language server.
521 let (json_buffer, _json_handle) = project
522 .update(cx, |project, cx| {
523 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
524 })
525 .await
526 .unwrap();
527
528 // A json language server is started up and is only notified about the json buffer.
529 let mut fake_json_server = fake_json_servers.next().await.unwrap();
530 assert_eq!(
531 fake_json_server
532 .receive_notification::<lsp::notification::DidOpenTextDocument>()
533 .await
534 .text_document,
535 lsp::TextDocumentItem {
536 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
537 version: 0,
538 text: "{\"a\": 1}".to_string(),
539 language_id: "json".to_string(),
540 }
541 );
542
543 // This buffer is configured based on the second language server's
544 // capabilities.
545 json_buffer.update(cx, |buffer, _| {
546 assert_eq!(
547 buffer
548 .completion_triggers()
549 .into_iter()
550 .cloned()
551 .collect::<Vec<_>>(),
552 &[":".to_string()]
553 );
554 });
555
556 // When opening another buffer whose language server is already running,
557 // it is also configured based on the existing language server's capabilities.
558 let (rust_buffer2, _handle4) = project
559 .update(cx, |project, cx| {
560 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
561 })
562 .await
563 .unwrap();
564 rust_buffer2.update(cx, |buffer, _| {
565 assert_eq!(
566 buffer
567 .completion_triggers()
568 .into_iter()
569 .cloned()
570 .collect::<Vec<_>>(),
571 &[".".to_string(), "::".to_string()]
572 );
573 });
574
575 // Changes are reported only to servers matching the buffer's language.
576 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
577 rust_buffer2.update(cx, |buffer, cx| {
578 buffer.edit([(0..0, "let x = 1;")], None, cx)
579 });
580 assert_eq!(
581 fake_rust_server
582 .receive_notification::<lsp::notification::DidChangeTextDocument>()
583 .await
584 .text_document,
585 lsp::VersionedTextDocumentIdentifier::new(
586 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
587 1
588 )
589 );
590
591 // Save notifications are reported to all servers.
592 project
593 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
594 .await
595 .unwrap();
596 assert_eq!(
597 fake_rust_server
598 .receive_notification::<lsp::notification::DidSaveTextDocument>()
599 .await
600 .text_document,
601 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
602 );
603 assert_eq!(
604 fake_json_server
605 .receive_notification::<lsp::notification::DidSaveTextDocument>()
606 .await
607 .text_document,
608 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
609 );
610
611 // Renames are reported only to servers matching the buffer's language.
612 fs.rename(
613 Path::new("/the-root/test2.rs"),
614 Path::new("/the-root/test3.rs"),
615 Default::default(),
616 )
617 .await
618 .unwrap();
619 assert_eq!(
620 fake_rust_server
621 .receive_notification::<lsp::notification::DidCloseTextDocument>()
622 .await
623 .text_document,
624 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
625 );
626 assert_eq!(
627 fake_rust_server
628 .receive_notification::<lsp::notification::DidOpenTextDocument>()
629 .await
630 .text_document,
631 lsp::TextDocumentItem {
632 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
633 version: 0,
634 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
635 language_id: "rust".to_string(),
636 },
637 );
638
639 rust_buffer2.update(cx, |buffer, cx| {
640 buffer.update_diagnostics(
641 LanguageServerId(0),
642 DiagnosticSet::from_sorted_entries(
643 vec![DiagnosticEntry {
644 diagnostic: Default::default(),
645 range: Anchor::MIN..Anchor::MAX,
646 }],
647 &buffer.snapshot(),
648 ),
649 cx,
650 );
651 assert_eq!(
652 buffer
653 .snapshot()
654 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
655 .count(),
656 1
657 );
658 });
659
660 // When the rename changes the extension of the file, the buffer gets closed on the old
661 // language server and gets opened on the new one.
662 fs.rename(
663 Path::new("/the-root/test3.rs"),
664 Path::new("/the-root/test3.json"),
665 Default::default(),
666 )
667 .await
668 .unwrap();
669 assert_eq!(
670 fake_rust_server
671 .receive_notification::<lsp::notification::DidCloseTextDocument>()
672 .await
673 .text_document,
674 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
675 );
676 assert_eq!(
677 fake_json_server
678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
679 .await
680 .text_document,
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: "json".to_string(),
686 },
687 );
688
689 // We clear the diagnostics, since the language has changed.
690 rust_buffer2.update(cx, |buffer, _| {
691 assert_eq!(
692 buffer
693 .snapshot()
694 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
695 .count(),
696 0
697 );
698 });
699
700 // The renamed file's version resets after changing language server.
701 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
702 assert_eq!(
703 fake_json_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
709 1
710 )
711 );
712
713 // Restart language servers
714 project.update(cx, |project, cx| {
715 project.restart_language_servers_for_buffers(
716 vec![rust_buffer.clone(), json_buffer.clone()],
717 cx,
718 );
719 });
720
721 let mut rust_shutdown_requests = fake_rust_server
722 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
723 let mut json_shutdown_requests = fake_json_server
724 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
725 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
726
727 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
728 let mut fake_json_server = fake_json_servers.next().await.unwrap();
729
730 // Ensure rust document is reopened in new rust language server
731 assert_eq!(
732 fake_rust_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
738 version: 0,
739 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
740 language_id: "rust".to_string(),
741 }
742 );
743
744 // Ensure json documents are reopened in new json language server
745 assert_set_eq!(
746 [
747 fake_json_server
748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
749 .await
750 .text_document,
751 fake_json_server
752 .receive_notification::<lsp::notification::DidOpenTextDocument>()
753 .await
754 .text_document,
755 ],
756 [
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
759 version: 0,
760 text: json_buffer.update(cx, |buffer, _| buffer.text()),
761 language_id: "json".to_string(),
762 },
763 lsp::TextDocumentItem {
764 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
765 version: 0,
766 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
767 language_id: "json".to_string(),
768 }
769 ]
770 );
771
772 // Close notifications are reported only to servers matching the buffer's language.
773 cx.update(|_| drop(_json_handle));
774 let close_message = lsp::DidCloseTextDocumentParams {
775 text_document: lsp::TextDocumentIdentifier::new(
776 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
777 ),
778 };
779 assert_eq!(
780 fake_json_server
781 .receive_notification::<lsp::notification::DidCloseTextDocument>()
782 .await,
783 close_message,
784 );
785}
786
787#[gpui::test]
788async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
789 fn add_root_for_windows(path: &str) -> String {
790 if cfg!(windows) {
791 format!("C:{}", path)
792 } else {
793 path.to_string()
794 }
795 }
796
797 init_test(cx);
798
799 let fs = FakeFs::new(cx.executor());
800 fs.insert_tree(
801 add_root_for_windows("/the-root"),
802 json!({
803 ".gitignore": "target\n",
804 "src": {
805 "a.rs": "",
806 "b.rs": "",
807 },
808 "target": {
809 "x": {
810 "out": {
811 "x.rs": ""
812 }
813 },
814 "y": {
815 "out": {
816 "y.rs": "",
817 }
818 },
819 "z": {
820 "out": {
821 "z.rs": ""
822 }
823 }
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await;
830 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
831 language_registry.add(rust_lang());
832 let mut fake_servers = language_registry.register_fake_lsp(
833 "Rust",
834 FakeLspAdapter {
835 name: "the-language-server",
836 ..Default::default()
837 },
838 );
839
840 cx.executor().run_until_parked();
841
842 // Start the language server by opening a buffer with a compatible file extension.
843 let _ = project
844 .update(cx, |project, cx| {
845 project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx)
846 })
847 .await
848 .unwrap();
849
850 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
851 project.update(cx, |project, cx| {
852 let worktree = project.worktrees(cx).next().unwrap();
853 assert_eq!(
854 worktree
855 .read(cx)
856 .snapshot()
857 .entries(true, 0)
858 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
859 .collect::<Vec<_>>(),
860 &[
861 (Path::new(""), false),
862 (Path::new(".gitignore"), false),
863 (Path::new("src"), false),
864 (Path::new("src/a.rs"), false),
865 (Path::new("src/b.rs"), false),
866 (Path::new("target"), true),
867 ]
868 );
869 });
870
871 let prev_read_dir_count = fs.read_dir_call_count();
872
873 // Keep track of the FS events reported to the language server.
874 let fake_server = fake_servers.next().await.unwrap();
875 let file_changes = Arc::new(Mutex::new(Vec::new()));
876 fake_server
877 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
878 registrations: vec![lsp::Registration {
879 id: Default::default(),
880 method: "workspace/didChangeWatchedFiles".to_string(),
881 register_options: serde_json::to_value(
882 lsp::DidChangeWatchedFilesRegistrationOptions {
883 watchers: vec![
884 lsp::FileSystemWatcher {
885 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
886 "/the-root/Cargo.toml",
887 )),
888 kind: None,
889 },
890 lsp::FileSystemWatcher {
891 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
892 "/the-root/src/*.{rs,c}",
893 )),
894 kind: None,
895 },
896 lsp::FileSystemWatcher {
897 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
898 "/the-root/target/y/**/*.rs",
899 )),
900 kind: None,
901 },
902 ],
903 },
904 )
905 .ok(),
906 }],
907 })
908 .await
909 .unwrap();
910 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
911 let file_changes = file_changes.clone();
912 move |params, _| {
913 let mut file_changes = file_changes.lock();
914 file_changes.extend(params.changes);
915 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
916 }
917 });
918
919 cx.executor().run_until_parked();
920 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
921 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
922
923 // Now the language server has asked us to watch an ignored directory path,
924 // so we recursively load it.
925 project.update(cx, |project, cx| {
926 let worktree = project.worktrees(cx).next().unwrap();
927 assert_eq!(
928 worktree
929 .read(cx)
930 .snapshot()
931 .entries(true, 0)
932 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
933 .collect::<Vec<_>>(),
934 &[
935 (Path::new(""), false),
936 (Path::new(".gitignore"), false),
937 (Path::new("src"), false),
938 (Path::new("src/a.rs"), false),
939 (Path::new("src/b.rs"), false),
940 (Path::new("target"), true),
941 (Path::new("target/x"), true),
942 (Path::new("target/y"), true),
943 (Path::new("target/y/out"), true),
944 (Path::new("target/y/out/y.rs"), true),
945 (Path::new("target/z"), true),
946 ]
947 );
948 });
949
950 // Perform some file system mutations, two of which match the watched patterns,
951 // and one of which does not.
952 fs.create_file(
953 add_root_for_windows("/the-root/src/c.rs").as_ref(),
954 Default::default(),
955 )
956 .await
957 .unwrap();
958 fs.create_file(
959 add_root_for_windows("/the-root/src/d.txt").as_ref(),
960 Default::default(),
961 )
962 .await
963 .unwrap();
964 fs.remove_file(
965 add_root_for_windows("/the-root/src/b.rs").as_ref(),
966 Default::default(),
967 )
968 .await
969 .unwrap();
970 fs.create_file(
971 add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(),
972 Default::default(),
973 )
974 .await
975 .unwrap();
976 fs.create_file(
977 add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(),
978 Default::default(),
979 )
980 .await
981 .unwrap();
982
983 // The language server receives events for the FS mutations that match its watch patterns.
984 cx.executor().run_until_parked();
985 assert_eq!(
986 &*file_changes.lock(),
987 &[
988 lsp::FileEvent {
989 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(),
990 typ: lsp::FileChangeType::DELETED,
991 },
992 lsp::FileEvent {
993 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(),
994 typ: lsp::FileChangeType::CREATED,
995 },
996 lsp::FileEvent {
997 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs"))
998 .unwrap(),
999 typ: lsp::FileChangeType::CREATED,
1000 },
1001 ]
1002 );
1003}
1004
1005#[gpui::test]
1006async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1007 init_test(cx);
1008
1009 let fs = FakeFs::new(cx.executor());
1010 fs.insert_tree(
1011 "/dir",
1012 json!({
1013 "a.rs": "let a = 1;",
1014 "b.rs": "let b = 2;"
1015 }),
1016 )
1017 .await;
1018
1019 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
1020 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1021
1022 let buffer_a = project
1023 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1024 .await
1025 .unwrap();
1026 let buffer_b = project
1027 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1028 .await
1029 .unwrap();
1030
1031 lsp_store.update(cx, |lsp_store, cx| {
1032 lsp_store
1033 .update_diagnostics(
1034 LanguageServerId(0),
1035 lsp::PublishDiagnosticsParams {
1036 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1037 version: None,
1038 diagnostics: vec![lsp::Diagnostic {
1039 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1040 severity: Some(lsp::DiagnosticSeverity::ERROR),
1041 message: "error 1".to_string(),
1042 ..Default::default()
1043 }],
1044 },
1045 &[],
1046 cx,
1047 )
1048 .unwrap();
1049 lsp_store
1050 .update_diagnostics(
1051 LanguageServerId(0),
1052 lsp::PublishDiagnosticsParams {
1053 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1054 version: None,
1055 diagnostics: vec![lsp::Diagnostic {
1056 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1057 severity: Some(DiagnosticSeverity::WARNING),
1058 message: "error 2".to_string(),
1059 ..Default::default()
1060 }],
1061 },
1062 &[],
1063 cx,
1064 )
1065 .unwrap();
1066 });
1067
1068 buffer_a.update(cx, |buffer, _| {
1069 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1070 assert_eq!(
1071 chunks
1072 .iter()
1073 .map(|(s, d)| (s.as_str(), *d))
1074 .collect::<Vec<_>>(),
1075 &[
1076 ("let ", None),
1077 ("a", Some(DiagnosticSeverity::ERROR)),
1078 (" = 1;", None),
1079 ]
1080 );
1081 });
1082 buffer_b.update(cx, |buffer, _| {
1083 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1084 assert_eq!(
1085 chunks
1086 .iter()
1087 .map(|(s, d)| (s.as_str(), *d))
1088 .collect::<Vec<_>>(),
1089 &[
1090 ("let ", None),
1091 ("b", Some(DiagnosticSeverity::WARNING)),
1092 (" = 2;", None),
1093 ]
1094 );
1095 });
1096}
1097
1098#[gpui::test]
1099async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1100 init_test(cx);
1101
1102 let fs = FakeFs::new(cx.executor());
1103 fs.insert_tree(
1104 "/root",
1105 json!({
1106 "dir": {
1107 ".git": {
1108 "HEAD": "ref: refs/heads/main",
1109 },
1110 ".gitignore": "b.rs",
1111 "a.rs": "let a = 1;",
1112 "b.rs": "let b = 2;",
1113 },
1114 "other.rs": "let b = c;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1120 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1121 let (worktree, _) = project
1122 .update(cx, |project, cx| {
1123 project.find_or_create_worktree("/root/dir", true, cx)
1124 })
1125 .await
1126 .unwrap();
1127 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1128
1129 let (worktree, _) = project
1130 .update(cx, |project, cx| {
1131 project.find_or_create_worktree("/root/other.rs", false, cx)
1132 })
1133 .await
1134 .unwrap();
1135 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1136
1137 let server_id = LanguageServerId(0);
1138 lsp_store.update(cx, |lsp_store, cx| {
1139 lsp_store
1140 .update_diagnostics(
1141 server_id,
1142 lsp::PublishDiagnosticsParams {
1143 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1144 version: None,
1145 diagnostics: vec![lsp::Diagnostic {
1146 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1147 severity: Some(lsp::DiagnosticSeverity::ERROR),
1148 message: "unused variable 'b'".to_string(),
1149 ..Default::default()
1150 }],
1151 },
1152 &[],
1153 cx,
1154 )
1155 .unwrap();
1156 lsp_store
1157 .update_diagnostics(
1158 server_id,
1159 lsp::PublishDiagnosticsParams {
1160 uri: Url::from_file_path("/root/other.rs").unwrap(),
1161 version: None,
1162 diagnostics: vec![lsp::Diagnostic {
1163 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1164 severity: Some(lsp::DiagnosticSeverity::ERROR),
1165 message: "unknown variable 'c'".to_string(),
1166 ..Default::default()
1167 }],
1168 },
1169 &[],
1170 cx,
1171 )
1172 .unwrap();
1173 });
1174
1175 let main_ignored_buffer = project
1176 .update(cx, |project, cx| {
1177 project.open_buffer((main_worktree_id, "b.rs"), cx)
1178 })
1179 .await
1180 .unwrap();
1181 main_ignored_buffer.update(cx, |buffer, _| {
1182 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1183 assert_eq!(
1184 chunks
1185 .iter()
1186 .map(|(s, d)| (s.as_str(), *d))
1187 .collect::<Vec<_>>(),
1188 &[
1189 ("let ", None),
1190 ("b", Some(DiagnosticSeverity::ERROR)),
1191 (" = 2;", None),
1192 ],
1193 "Gigitnored buffers should still get in-buffer diagnostics",
1194 );
1195 });
1196 let other_buffer = project
1197 .update(cx, |project, cx| {
1198 project.open_buffer((other_worktree_id, ""), cx)
1199 })
1200 .await
1201 .unwrap();
1202 other_buffer.update(cx, |buffer, _| {
1203 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1204 assert_eq!(
1205 chunks
1206 .iter()
1207 .map(|(s, d)| (s.as_str(), *d))
1208 .collect::<Vec<_>>(),
1209 &[
1210 ("let b = ", None),
1211 ("c", Some(DiagnosticSeverity::ERROR)),
1212 (";", None),
1213 ],
1214 "Buffers from hidden projects should still get in-buffer diagnostics"
1215 );
1216 });
1217
1218 project.update(cx, |project, cx| {
1219 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1220 assert_eq!(
1221 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1222 vec![(
1223 ProjectPath {
1224 worktree_id: main_worktree_id,
1225 path: Arc::from(Path::new("b.rs")),
1226 },
1227 server_id,
1228 DiagnosticSummary {
1229 error_count: 1,
1230 warning_count: 0,
1231 }
1232 )]
1233 );
1234 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1235 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1236 });
1237}
1238
1239#[gpui::test]
1240async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1241 init_test(cx);
1242
1243 let progress_token = "the-progress-token";
1244
1245 let fs = FakeFs::new(cx.executor());
1246 fs.insert_tree(
1247 "/dir",
1248 json!({
1249 "a.rs": "fn a() { A }",
1250 "b.rs": "const y: i32 = 1",
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257
1258 language_registry.add(rust_lang());
1259 let mut fake_servers = language_registry.register_fake_lsp(
1260 "Rust",
1261 FakeLspAdapter {
1262 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1263 disk_based_diagnostics_sources: vec!["disk".into()],
1264 ..Default::default()
1265 },
1266 );
1267
1268 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1269
1270 // Cause worktree to start the fake language server
1271 let _ = project
1272 .update(cx, |project, cx| {
1273 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1274 })
1275 .await
1276 .unwrap();
1277
1278 let mut events = cx.events(&project);
1279
1280 let fake_server = fake_servers.next().await.unwrap();
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::LanguageServerAdded(
1284 LanguageServerId(0),
1285 fake_server.server.name(),
1286 Some(worktree_id)
1287 ),
1288 );
1289
1290 fake_server
1291 .start_progress(format!("{}/0", progress_token))
1292 .await;
1293 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1294 assert_eq!(
1295 events.next().await.unwrap(),
1296 Event::DiskBasedDiagnosticsStarted {
1297 language_server_id: LanguageServerId(0),
1298 }
1299 );
1300
1301 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1302 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1303 version: None,
1304 diagnostics: vec![lsp::Diagnostic {
1305 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1306 severity: Some(lsp::DiagnosticSeverity::ERROR),
1307 message: "undefined variable 'A'".to_string(),
1308 ..Default::default()
1309 }],
1310 });
1311 assert_eq!(
1312 events.next().await.unwrap(),
1313 Event::DiagnosticsUpdated {
1314 language_server_id: LanguageServerId(0),
1315 path: (worktree_id, Path::new("a.rs")).into()
1316 }
1317 );
1318
1319 fake_server.end_progress(format!("{}/0", progress_token));
1320 assert_eq!(
1321 events.next().await.unwrap(),
1322 Event::DiskBasedDiagnosticsFinished {
1323 language_server_id: LanguageServerId(0)
1324 }
1325 );
1326
1327 let buffer = project
1328 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1329 .await
1330 .unwrap();
1331
1332 buffer.update(cx, |buffer, _| {
1333 let snapshot = buffer.snapshot();
1334 let diagnostics = snapshot
1335 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1336 .collect::<Vec<_>>();
1337 assert_eq!(
1338 diagnostics,
1339 &[DiagnosticEntry {
1340 range: Point::new(0, 9)..Point::new(0, 10),
1341 diagnostic: Diagnostic {
1342 severity: lsp::DiagnosticSeverity::ERROR,
1343 message: "undefined variable 'A'".to_string(),
1344 group_id: 0,
1345 is_primary: true,
1346 ..Default::default()
1347 }
1348 }]
1349 )
1350 });
1351
1352 // Ensure publishing empty diagnostics twice only results in one update event.
1353 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1354 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1355 version: None,
1356 diagnostics: Default::default(),
1357 });
1358 assert_eq!(
1359 events.next().await.unwrap(),
1360 Event::DiagnosticsUpdated {
1361 language_server_id: LanguageServerId(0),
1362 path: (worktree_id, Path::new("a.rs")).into()
1363 }
1364 );
1365
1366 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1367 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1368 version: None,
1369 diagnostics: Default::default(),
1370 });
1371 cx.executor().run_until_parked();
1372 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1373}
1374
1375#[gpui::test]
1376async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1377 init_test(cx);
1378
1379 let progress_token = "the-progress-token";
1380
1381 let fs = FakeFs::new(cx.executor());
1382 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1383
1384 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1385
1386 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1387 language_registry.add(rust_lang());
1388 let mut fake_servers = language_registry.register_fake_lsp(
1389 "Rust",
1390 FakeLspAdapter {
1391 name: "the-language-server",
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1394 ..Default::default()
1395 },
1396 );
1397
1398 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1399
1400 let (buffer, _handle) = project
1401 .update(cx, |project, cx| {
1402 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1403 })
1404 .await
1405 .unwrap();
1406
1407 // Simulate diagnostics starting to update.
1408 let fake_server = fake_servers.next().await.unwrap();
1409 fake_server.start_progress(progress_token).await;
1410
1411 // Restart the server before the diagnostics finish updating.
1412 project.update(cx, |project, cx| {
1413 project.restart_language_servers_for_buffers([buffer], cx);
1414 });
1415 let mut events = cx.events(&project);
1416
1417 // Simulate the newly started server sending more diagnostics.
1418 let fake_server = fake_servers.next().await.unwrap();
1419 assert_eq!(
1420 events.next().await.unwrap(),
1421 Event::LanguageServerAdded(
1422 LanguageServerId(1),
1423 fake_server.server.name(),
1424 Some(worktree_id)
1425 )
1426 );
1427 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1428 fake_server.start_progress(progress_token).await;
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsStarted {
1432 language_server_id: LanguageServerId(1)
1433 }
1434 );
1435 project.update(cx, |project, cx| {
1436 assert_eq!(
1437 project
1438 .language_servers_running_disk_based_diagnostics(cx)
1439 .collect::<Vec<_>>(),
1440 [LanguageServerId(1)]
1441 );
1442 });
1443
1444 // All diagnostics are considered done, despite the old server's diagnostic
1445 // task never completing.
1446 fake_server.end_progress(progress_token);
1447 assert_eq!(
1448 events.next().await.unwrap(),
1449 Event::DiskBasedDiagnosticsFinished {
1450 language_server_id: LanguageServerId(1)
1451 }
1452 );
1453 project.update(cx, |project, cx| {
1454 assert_eq!(
1455 project
1456 .language_servers_running_disk_based_diagnostics(cx)
1457 .collect::<Vec<_>>(),
1458 [] as [language::LanguageServerId; 0]
1459 );
1460 });
1461}
1462
1463#[gpui::test]
1464async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1465 init_test(cx);
1466
1467 let fs = FakeFs::new(cx.executor());
1468 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1469
1470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1471
1472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1473 language_registry.add(rust_lang());
1474 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1475
1476 let (buffer, _) = project
1477 .update(cx, |project, cx| {
1478 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1479 })
1480 .await
1481 .unwrap();
1482
1483 // Publish diagnostics
1484 let fake_server = fake_servers.next().await.unwrap();
1485 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1486 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1487 version: None,
1488 diagnostics: vec![lsp::Diagnostic {
1489 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1490 severity: Some(lsp::DiagnosticSeverity::ERROR),
1491 message: "the message".to_string(),
1492 ..Default::default()
1493 }],
1494 });
1495
1496 cx.executor().run_until_parked();
1497 buffer.update(cx, |buffer, _| {
1498 assert_eq!(
1499 buffer
1500 .snapshot()
1501 .diagnostics_in_range::<_, usize>(0..1, false)
1502 .map(|entry| entry.diagnostic.message.clone())
1503 .collect::<Vec<_>>(),
1504 ["the message".to_string()]
1505 );
1506 });
1507 project.update(cx, |project, cx| {
1508 assert_eq!(
1509 project.diagnostic_summary(false, cx),
1510 DiagnosticSummary {
1511 error_count: 1,
1512 warning_count: 0,
1513 }
1514 );
1515 });
1516
1517 project.update(cx, |project, cx| {
1518 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1519 });
1520
1521 // The diagnostics are cleared.
1522 cx.executor().run_until_parked();
1523 buffer.update(cx, |buffer, _| {
1524 assert_eq!(
1525 buffer
1526 .snapshot()
1527 .diagnostics_in_range::<_, usize>(0..1, false)
1528 .map(|entry| entry.diagnostic.message.clone())
1529 .collect::<Vec<_>>(),
1530 Vec::<String>::new(),
1531 );
1532 });
1533 project.update(cx, |project, cx| {
1534 assert_eq!(
1535 project.diagnostic_summary(false, cx),
1536 DiagnosticSummary {
1537 error_count: 0,
1538 warning_count: 0,
1539 }
1540 );
1541 });
1542}
1543
1544#[gpui::test]
1545async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let fs = FakeFs::new(cx.executor());
1549 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1550
1551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1552 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1553
1554 language_registry.add(rust_lang());
1555 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1556
1557 let (buffer, _handle) = project
1558 .update(cx, |project, cx| {
1559 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1560 })
1561 .await
1562 .unwrap();
1563
1564 // Before restarting the server, report diagnostics with an unknown buffer version.
1565 let fake_server = fake_servers.next().await.unwrap();
1566 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1567 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1568 version: Some(10000),
1569 diagnostics: Vec::new(),
1570 });
1571 cx.executor().run_until_parked();
1572
1573 project.update(cx, |project, cx| {
1574 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1575 });
1576 let mut fake_server = fake_servers.next().await.unwrap();
1577 let notification = fake_server
1578 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1579 .await
1580 .text_document;
1581 assert_eq!(notification.version, 0);
1582}
1583
1584#[gpui::test]
1585async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1586 init_test(cx);
1587
1588 let progress_token = "the-progress-token";
1589
1590 let fs = FakeFs::new(cx.executor());
1591 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1592
1593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1594
1595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1596 language_registry.add(rust_lang());
1597 let mut fake_servers = language_registry.register_fake_lsp(
1598 "Rust",
1599 FakeLspAdapter {
1600 name: "the-language-server",
1601 disk_based_diagnostics_sources: vec!["disk".into()],
1602 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1603 ..Default::default()
1604 },
1605 );
1606
1607 let (buffer, _handle) = project
1608 .update(cx, |project, cx| {
1609 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1610 })
1611 .await
1612 .unwrap();
1613
1614 // Simulate diagnostics starting to update.
1615 let mut fake_server = fake_servers.next().await.unwrap();
1616 fake_server
1617 .start_progress_with(
1618 "another-token",
1619 lsp::WorkDoneProgressBegin {
1620 cancellable: Some(false),
1621 ..Default::default()
1622 },
1623 )
1624 .await;
1625 fake_server
1626 .start_progress_with(
1627 progress_token,
1628 lsp::WorkDoneProgressBegin {
1629 cancellable: Some(true),
1630 ..Default::default()
1631 },
1632 )
1633 .await;
1634 cx.executor().run_until_parked();
1635
1636 project.update(cx, |project, cx| {
1637 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1638 });
1639
1640 let cancel_notification = fake_server
1641 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1642 .await;
1643 assert_eq!(
1644 cancel_notification.token,
1645 NumberOrString::String(progress_token.into())
1646 );
1647}
1648
1649#[gpui::test]
1650async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1651 init_test(cx);
1652
1653 let fs = FakeFs::new(cx.executor());
1654 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1655 .await;
1656
1657 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1658 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1659
1660 let mut fake_rust_servers = language_registry.register_fake_lsp(
1661 "Rust",
1662 FakeLspAdapter {
1663 name: "rust-lsp",
1664 ..Default::default()
1665 },
1666 );
1667 let mut fake_js_servers = language_registry.register_fake_lsp(
1668 "JavaScript",
1669 FakeLspAdapter {
1670 name: "js-lsp",
1671 ..Default::default()
1672 },
1673 );
1674 language_registry.add(rust_lang());
1675 language_registry.add(js_lang());
1676
1677 let _rs_buffer = project
1678 .update(cx, |project, cx| {
1679 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1680 })
1681 .await
1682 .unwrap();
1683 let _js_buffer = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1686 })
1687 .await
1688 .unwrap();
1689
1690 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1691 assert_eq!(
1692 fake_rust_server_1
1693 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1694 .await
1695 .text_document
1696 .uri
1697 .as_str(),
1698 "file:///dir/a.rs"
1699 );
1700
1701 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1702 assert_eq!(
1703 fake_js_server
1704 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1705 .await
1706 .text_document
1707 .uri
1708 .as_str(),
1709 "file:///dir/b.js"
1710 );
1711
1712 // Disable Rust language server, ensuring only that server gets stopped.
1713 cx.update(|cx| {
1714 SettingsStore::update_global(cx, |settings, cx| {
1715 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1716 settings.languages.insert(
1717 "Rust".into(),
1718 LanguageSettingsContent {
1719 enable_language_server: Some(false),
1720 ..Default::default()
1721 },
1722 );
1723 });
1724 })
1725 });
1726 fake_rust_server_1
1727 .receive_notification::<lsp::notification::Exit>()
1728 .await;
1729
1730 // Enable Rust and disable JavaScript language servers, ensuring that the
1731 // former gets started again and that the latter stops.
1732 cx.update(|cx| {
1733 SettingsStore::update_global(cx, |settings, cx| {
1734 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1735 settings.languages.insert(
1736 LanguageName::new("Rust"),
1737 LanguageSettingsContent {
1738 enable_language_server: Some(true),
1739 ..Default::default()
1740 },
1741 );
1742 settings.languages.insert(
1743 LanguageName::new("JavaScript"),
1744 LanguageSettingsContent {
1745 enable_language_server: Some(false),
1746 ..Default::default()
1747 },
1748 );
1749 });
1750 })
1751 });
1752 let _rs_buffer = project
1753 .update(cx, |project, cx| {
1754 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1755 })
1756 .await
1757 .unwrap();
1758 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1759 assert_eq!(
1760 fake_rust_server_2
1761 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1762 .await
1763 .text_document
1764 .uri
1765 .as_str(),
1766 "file:///dir/a.rs"
1767 );
1768 fake_js_server
1769 .receive_notification::<lsp::notification::Exit>()
1770 .await;
1771}
1772
1773#[gpui::test(iterations = 3)]
1774async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1775 init_test(cx);
1776
1777 let text = "
1778 fn a() { A }
1779 fn b() { BB }
1780 fn c() { CCC }
1781 "
1782 .unindent();
1783
1784 let fs = FakeFs::new(cx.executor());
1785 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1786
1787 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1788 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1789 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1790
1791 language_registry.add(rust_lang());
1792 let mut fake_servers = language_registry.register_fake_lsp(
1793 "Rust",
1794 FakeLspAdapter {
1795 disk_based_diagnostics_sources: vec!["disk".into()],
1796 ..Default::default()
1797 },
1798 );
1799
1800 let buffer = project
1801 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1802 .await
1803 .unwrap();
1804
1805 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1807 });
1808
1809 let mut fake_server = fake_servers.next().await.unwrap();
1810 let open_notification = fake_server
1811 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1812 .await;
1813
1814 // Edit the buffer, moving the content down
1815 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1816 let change_notification_1 = fake_server
1817 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1818 .await;
1819 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1820
1821 // Report some diagnostics for the initial version of the buffer
1822 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1823 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1824 version: Some(open_notification.text_document.version),
1825 diagnostics: vec![
1826 lsp::Diagnostic {
1827 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1828 severity: Some(DiagnosticSeverity::ERROR),
1829 message: "undefined variable 'A'".to_string(),
1830 source: Some("disk".to_string()),
1831 ..Default::default()
1832 },
1833 lsp::Diagnostic {
1834 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1835 severity: Some(DiagnosticSeverity::ERROR),
1836 message: "undefined variable 'BB'".to_string(),
1837 source: Some("disk".to_string()),
1838 ..Default::default()
1839 },
1840 lsp::Diagnostic {
1841 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1842 severity: Some(DiagnosticSeverity::ERROR),
1843 source: Some("disk".to_string()),
1844 message: "undefined variable 'CCC'".to_string(),
1845 ..Default::default()
1846 },
1847 ],
1848 });
1849
1850 // The diagnostics have moved down since they were created.
1851 cx.executor().run_until_parked();
1852 buffer.update(cx, |buffer, _| {
1853 assert_eq!(
1854 buffer
1855 .snapshot()
1856 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1857 .collect::<Vec<_>>(),
1858 &[
1859 DiagnosticEntry {
1860 range: Point::new(3, 9)..Point::new(3, 11),
1861 diagnostic: Diagnostic {
1862 source: Some("disk".into()),
1863 severity: DiagnosticSeverity::ERROR,
1864 message: "undefined variable 'BB'".to_string(),
1865 is_disk_based: true,
1866 group_id: 1,
1867 is_primary: true,
1868 ..Default::default()
1869 },
1870 },
1871 DiagnosticEntry {
1872 range: Point::new(4, 9)..Point::new(4, 12),
1873 diagnostic: Diagnostic {
1874 source: Some("disk".into()),
1875 severity: DiagnosticSeverity::ERROR,
1876 message: "undefined variable 'CCC'".to_string(),
1877 is_disk_based: true,
1878 group_id: 2,
1879 is_primary: true,
1880 ..Default::default()
1881 }
1882 }
1883 ]
1884 );
1885 assert_eq!(
1886 chunks_with_diagnostics(buffer, 0..buffer.len()),
1887 [
1888 ("\n\nfn a() { ".to_string(), None),
1889 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1890 (" }\nfn b() { ".to_string(), None),
1891 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1892 (" }\nfn c() { ".to_string(), None),
1893 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1894 (" }\n".to_string(), None),
1895 ]
1896 );
1897 assert_eq!(
1898 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1899 [
1900 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1901 (" }\nfn c() { ".to_string(), None),
1902 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1903 ]
1904 );
1905 });
1906
1907 // Ensure overlapping diagnostics are highlighted correctly.
1908 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1909 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1910 version: Some(open_notification.text_document.version),
1911 diagnostics: vec![
1912 lsp::Diagnostic {
1913 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1914 severity: Some(DiagnosticSeverity::ERROR),
1915 message: "undefined variable 'A'".to_string(),
1916 source: Some("disk".to_string()),
1917 ..Default::default()
1918 },
1919 lsp::Diagnostic {
1920 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1921 severity: Some(DiagnosticSeverity::WARNING),
1922 message: "unreachable statement".to_string(),
1923 source: Some("disk".to_string()),
1924 ..Default::default()
1925 },
1926 ],
1927 });
1928
1929 cx.executor().run_until_parked();
1930 buffer.update(cx, |buffer, _| {
1931 assert_eq!(
1932 buffer
1933 .snapshot()
1934 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1935 .collect::<Vec<_>>(),
1936 &[
1937 DiagnosticEntry {
1938 range: Point::new(2, 9)..Point::new(2, 12),
1939 diagnostic: Diagnostic {
1940 source: Some("disk".into()),
1941 severity: DiagnosticSeverity::WARNING,
1942 message: "unreachable statement".to_string(),
1943 is_disk_based: true,
1944 group_id: 4,
1945 is_primary: true,
1946 ..Default::default()
1947 }
1948 },
1949 DiagnosticEntry {
1950 range: Point::new(2, 9)..Point::new(2, 10),
1951 diagnostic: Diagnostic {
1952 source: Some("disk".into()),
1953 severity: DiagnosticSeverity::ERROR,
1954 message: "undefined variable 'A'".to_string(),
1955 is_disk_based: true,
1956 group_id: 3,
1957 is_primary: true,
1958 ..Default::default()
1959 },
1960 }
1961 ]
1962 );
1963 assert_eq!(
1964 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1965 [
1966 ("fn a() { ".to_string(), None),
1967 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1968 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1969 ("\n".to_string(), None),
1970 ]
1971 );
1972 assert_eq!(
1973 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1974 [
1975 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1976 ("\n".to_string(), None),
1977 ]
1978 );
1979 });
1980
1981 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1982 // changes since the last save.
1983 buffer.update(cx, |buffer, cx| {
1984 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1985 buffer.edit(
1986 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1987 None,
1988 cx,
1989 );
1990 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1991 });
1992 let change_notification_2 = fake_server
1993 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1994 .await;
1995 assert!(
1996 change_notification_2.text_document.version > change_notification_1.text_document.version
1997 );
1998
1999 // Handle out-of-order diagnostics
2000 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2001 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2002 version: Some(change_notification_2.text_document.version),
2003 diagnostics: vec![
2004 lsp::Diagnostic {
2005 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2006 severity: Some(DiagnosticSeverity::ERROR),
2007 message: "undefined variable 'BB'".to_string(),
2008 source: Some("disk".to_string()),
2009 ..Default::default()
2010 },
2011 lsp::Diagnostic {
2012 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2013 severity: Some(DiagnosticSeverity::WARNING),
2014 message: "undefined variable 'A'".to_string(),
2015 source: Some("disk".to_string()),
2016 ..Default::default()
2017 },
2018 ],
2019 });
2020
2021 cx.executor().run_until_parked();
2022 buffer.update(cx, |buffer, _| {
2023 assert_eq!(
2024 buffer
2025 .snapshot()
2026 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2027 .collect::<Vec<_>>(),
2028 &[
2029 DiagnosticEntry {
2030 range: Point::new(2, 21)..Point::new(2, 22),
2031 diagnostic: Diagnostic {
2032 source: Some("disk".into()),
2033 severity: DiagnosticSeverity::WARNING,
2034 message: "undefined variable 'A'".to_string(),
2035 is_disk_based: true,
2036 group_id: 6,
2037 is_primary: true,
2038 ..Default::default()
2039 }
2040 },
2041 DiagnosticEntry {
2042 range: Point::new(3, 9)..Point::new(3, 14),
2043 diagnostic: Diagnostic {
2044 source: Some("disk".into()),
2045 severity: DiagnosticSeverity::ERROR,
2046 message: "undefined variable 'BB'".to_string(),
2047 is_disk_based: true,
2048 group_id: 5,
2049 is_primary: true,
2050 ..Default::default()
2051 },
2052 }
2053 ]
2054 );
2055 });
2056}
2057
2058#[gpui::test]
2059async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2060 init_test(cx);
2061
2062 let text = concat!(
2063 "let one = ;\n", //
2064 "let two = \n",
2065 "let three = 3;\n",
2066 );
2067
2068 let fs = FakeFs::new(cx.executor());
2069 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2070
2071 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2072 let buffer = project
2073 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2074 .await
2075 .unwrap();
2076
2077 project.update(cx, |project, cx| {
2078 project.lsp_store.update(cx, |lsp_store, cx| {
2079 lsp_store
2080 .update_diagnostic_entries(
2081 LanguageServerId(0),
2082 PathBuf::from("/dir/a.rs"),
2083 None,
2084 vec![
2085 DiagnosticEntry {
2086 range: Unclipped(PointUtf16::new(0, 10))
2087 ..Unclipped(PointUtf16::new(0, 10)),
2088 diagnostic: Diagnostic {
2089 severity: DiagnosticSeverity::ERROR,
2090 message: "syntax error 1".to_string(),
2091 ..Default::default()
2092 },
2093 },
2094 DiagnosticEntry {
2095 range: Unclipped(PointUtf16::new(1, 10))
2096 ..Unclipped(PointUtf16::new(1, 10)),
2097 diagnostic: Diagnostic {
2098 severity: DiagnosticSeverity::ERROR,
2099 message: "syntax error 2".to_string(),
2100 ..Default::default()
2101 },
2102 },
2103 ],
2104 cx,
2105 )
2106 .unwrap();
2107 })
2108 });
2109
2110 // An empty range is extended forward to include the following character.
2111 // At the end of a line, an empty range is extended backward to include
2112 // the preceding character.
2113 buffer.update(cx, |buffer, _| {
2114 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2115 assert_eq!(
2116 chunks
2117 .iter()
2118 .map(|(s, d)| (s.as_str(), *d))
2119 .collect::<Vec<_>>(),
2120 &[
2121 ("let one = ", None),
2122 (";", Some(DiagnosticSeverity::ERROR)),
2123 ("\nlet two =", None),
2124 (" ", Some(DiagnosticSeverity::ERROR)),
2125 ("\nlet three = 3;\n", None)
2126 ]
2127 );
2128 });
2129}
2130
2131#[gpui::test]
2132async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2133 init_test(cx);
2134
2135 let fs = FakeFs::new(cx.executor());
2136 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2137 .await;
2138
2139 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2140 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2141
2142 lsp_store.update(cx, |lsp_store, cx| {
2143 lsp_store
2144 .update_diagnostic_entries(
2145 LanguageServerId(0),
2146 Path::new("/dir/a.rs").to_owned(),
2147 None,
2148 vec![DiagnosticEntry {
2149 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2150 diagnostic: Diagnostic {
2151 severity: DiagnosticSeverity::ERROR,
2152 is_primary: true,
2153 message: "syntax error a1".to_string(),
2154 ..Default::default()
2155 },
2156 }],
2157 cx,
2158 )
2159 .unwrap();
2160 lsp_store
2161 .update_diagnostic_entries(
2162 LanguageServerId(1),
2163 Path::new("/dir/a.rs").to_owned(),
2164 None,
2165 vec![DiagnosticEntry {
2166 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2167 diagnostic: Diagnostic {
2168 severity: DiagnosticSeverity::ERROR,
2169 is_primary: true,
2170 message: "syntax error b1".to_string(),
2171 ..Default::default()
2172 },
2173 }],
2174 cx,
2175 )
2176 .unwrap();
2177
2178 assert_eq!(
2179 lsp_store.diagnostic_summary(false, cx),
2180 DiagnosticSummary {
2181 error_count: 2,
2182 warning_count: 0,
2183 }
2184 );
2185 });
2186}
2187
2188#[gpui::test]
2189async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2190 init_test(cx);
2191
2192 let text = "
2193 fn a() {
2194 f1();
2195 }
2196 fn b() {
2197 f2();
2198 }
2199 fn c() {
2200 f3();
2201 }
2202 "
2203 .unindent();
2204
2205 let fs = FakeFs::new(cx.executor());
2206 fs.insert_tree(
2207 "/dir",
2208 json!({
2209 "a.rs": text.clone(),
2210 }),
2211 )
2212 .await;
2213
2214 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2215 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2216
2217 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2218 language_registry.add(rust_lang());
2219 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2220
2221 let (buffer, _handle) = project
2222 .update(cx, |project, cx| {
2223 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2224 })
2225 .await
2226 .unwrap();
2227
2228 let mut fake_server = fake_servers.next().await.unwrap();
2229 let lsp_document_version = fake_server
2230 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2231 .await
2232 .text_document
2233 .version;
2234
2235 // Simulate editing the buffer after the language server computes some edits.
2236 buffer.update(cx, |buffer, cx| {
2237 buffer.edit(
2238 [(
2239 Point::new(0, 0)..Point::new(0, 0),
2240 "// above first function\n",
2241 )],
2242 None,
2243 cx,
2244 );
2245 buffer.edit(
2246 [(
2247 Point::new(2, 0)..Point::new(2, 0),
2248 " // inside first function\n",
2249 )],
2250 None,
2251 cx,
2252 );
2253 buffer.edit(
2254 [(
2255 Point::new(6, 4)..Point::new(6, 4),
2256 "// inside second function ",
2257 )],
2258 None,
2259 cx,
2260 );
2261
2262 assert_eq!(
2263 buffer.text(),
2264 "
2265 // above first function
2266 fn a() {
2267 // inside first function
2268 f1();
2269 }
2270 fn b() {
2271 // inside second function f2();
2272 }
2273 fn c() {
2274 f3();
2275 }
2276 "
2277 .unindent()
2278 );
2279 });
2280
2281 let edits = lsp_store
2282 .update(cx, |lsp_store, cx| {
2283 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2284 &buffer,
2285 vec![
2286 // replace body of first function
2287 lsp::TextEdit {
2288 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2289 new_text: "
2290 fn a() {
2291 f10();
2292 }
2293 "
2294 .unindent(),
2295 },
2296 // edit inside second function
2297 lsp::TextEdit {
2298 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2299 new_text: "00".into(),
2300 },
2301 // edit inside third function via two distinct edits
2302 lsp::TextEdit {
2303 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2304 new_text: "4000".into(),
2305 },
2306 lsp::TextEdit {
2307 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2308 new_text: "".into(),
2309 },
2310 ],
2311 LanguageServerId(0),
2312 Some(lsp_document_version),
2313 cx,
2314 )
2315 })
2316 .await
2317 .unwrap();
2318
2319 buffer.update(cx, |buffer, cx| {
2320 for (range, new_text) in edits {
2321 buffer.edit([(range, new_text)], None, cx);
2322 }
2323 assert_eq!(
2324 buffer.text(),
2325 "
2326 // above first function
2327 fn a() {
2328 // inside first function
2329 f10();
2330 }
2331 fn b() {
2332 // inside second function f200();
2333 }
2334 fn c() {
2335 f4000();
2336 }
2337 "
2338 .unindent()
2339 );
2340 });
2341}
2342
2343#[gpui::test]
2344async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2345 init_test(cx);
2346
2347 let text = "
2348 use a::b;
2349 use a::c;
2350
2351 fn f() {
2352 b();
2353 c();
2354 }
2355 "
2356 .unindent();
2357
2358 let fs = FakeFs::new(cx.executor());
2359 fs.insert_tree(
2360 "/dir",
2361 json!({
2362 "a.rs": text.clone(),
2363 }),
2364 )
2365 .await;
2366
2367 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2368 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2369 let buffer = project
2370 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2371 .await
2372 .unwrap();
2373
2374 // Simulate the language server sending us a small edit in the form of a very large diff.
2375 // Rust-analyzer does this when performing a merge-imports code action.
2376 let edits = lsp_store
2377 .update(cx, |lsp_store, cx| {
2378 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2379 &buffer,
2380 [
2381 // Replace the first use statement without editing the semicolon.
2382 lsp::TextEdit {
2383 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2384 new_text: "a::{b, c}".into(),
2385 },
2386 // Reinsert the remainder of the file between the semicolon and the final
2387 // newline of the file.
2388 lsp::TextEdit {
2389 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2390 new_text: "\n\n".into(),
2391 },
2392 lsp::TextEdit {
2393 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2394 new_text: "
2395 fn f() {
2396 b();
2397 c();
2398 }"
2399 .unindent(),
2400 },
2401 // Delete everything after the first newline of the file.
2402 lsp::TextEdit {
2403 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2404 new_text: "".into(),
2405 },
2406 ],
2407 LanguageServerId(0),
2408 None,
2409 cx,
2410 )
2411 })
2412 .await
2413 .unwrap();
2414
2415 buffer.update(cx, |buffer, cx| {
2416 let edits = edits
2417 .into_iter()
2418 .map(|(range, text)| {
2419 (
2420 range.start.to_point(buffer)..range.end.to_point(buffer),
2421 text,
2422 )
2423 })
2424 .collect::<Vec<_>>();
2425
2426 assert_eq!(
2427 edits,
2428 [
2429 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2430 (Point::new(1, 0)..Point::new(2, 0), "".into())
2431 ]
2432 );
2433
2434 for (range, new_text) in edits {
2435 buffer.edit([(range, new_text)], None, cx);
2436 }
2437 assert_eq!(
2438 buffer.text(),
2439 "
2440 use a::{b, c};
2441
2442 fn f() {
2443 b();
2444 c();
2445 }
2446 "
2447 .unindent()
2448 );
2449 });
2450}
2451
2452#[gpui::test]
2453async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2454 init_test(cx);
2455
2456 let text = "
2457 use a::b;
2458 use a::c;
2459
2460 fn f() {
2461 b();
2462 c();
2463 }
2464 "
2465 .unindent();
2466
2467 let fs = FakeFs::new(cx.executor());
2468 fs.insert_tree(
2469 "/dir",
2470 json!({
2471 "a.rs": text.clone(),
2472 }),
2473 )
2474 .await;
2475
2476 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2477 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2478 let buffer = project
2479 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2480 .await
2481 .unwrap();
2482
2483 // Simulate the language server sending us edits in a non-ordered fashion,
2484 // with ranges sometimes being inverted or pointing to invalid locations.
2485 let edits = lsp_store
2486 .update(cx, |lsp_store, cx| {
2487 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2488 &buffer,
2489 [
2490 lsp::TextEdit {
2491 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2492 new_text: "\n\n".into(),
2493 },
2494 lsp::TextEdit {
2495 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2496 new_text: "a::{b, c}".into(),
2497 },
2498 lsp::TextEdit {
2499 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2500 new_text: "".into(),
2501 },
2502 lsp::TextEdit {
2503 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2504 new_text: "
2505 fn f() {
2506 b();
2507 c();
2508 }"
2509 .unindent(),
2510 },
2511 ],
2512 LanguageServerId(0),
2513 None,
2514 cx,
2515 )
2516 })
2517 .await
2518 .unwrap();
2519
2520 buffer.update(cx, |buffer, cx| {
2521 let edits = edits
2522 .into_iter()
2523 .map(|(range, text)| {
2524 (
2525 range.start.to_point(buffer)..range.end.to_point(buffer),
2526 text,
2527 )
2528 })
2529 .collect::<Vec<_>>();
2530
2531 assert_eq!(
2532 edits,
2533 [
2534 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2535 (Point::new(1, 0)..Point::new(2, 0), "".into())
2536 ]
2537 );
2538
2539 for (range, new_text) in edits {
2540 buffer.edit([(range, new_text)], None, cx);
2541 }
2542 assert_eq!(
2543 buffer.text(),
2544 "
2545 use a::{b, c};
2546
2547 fn f() {
2548 b();
2549 c();
2550 }
2551 "
2552 .unindent()
2553 );
2554 });
2555}
2556
2557fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2558 buffer: &Buffer,
2559 range: Range<T>,
2560) -> Vec<(String, Option<DiagnosticSeverity>)> {
2561 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2562 for chunk in buffer.snapshot().chunks(range, true) {
2563 if chunks.last().map_or(false, |prev_chunk| {
2564 prev_chunk.1 == chunk.diagnostic_severity
2565 }) {
2566 chunks.last_mut().unwrap().0.push_str(chunk.text);
2567 } else {
2568 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2569 }
2570 }
2571 chunks
2572}
2573
2574#[gpui::test(iterations = 10)]
2575async fn test_definition(cx: &mut gpui::TestAppContext) {
2576 init_test(cx);
2577
2578 let fs = FakeFs::new(cx.executor());
2579 fs.insert_tree(
2580 "/dir",
2581 json!({
2582 "b.rs": "const y: i32 = crate::a()",
2583 }),
2584 )
2585 .await;
2586 fs.insert_tree(
2587 "/another_dir",
2588 json!({
2589 "a.rs": "const fn a() { A }"}),
2590 )
2591 .await;
2592
2593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2594
2595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2596 language_registry.add(rust_lang());
2597 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2598 let (buffer, _handle) = project
2599 .update(cx, |project, cx| {
2600 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2601 })
2602 .await
2603 .unwrap();
2604 let fake_server = fake_servers.next().await.unwrap();
2605 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2606 let params = params.text_document_position_params;
2607 assert_eq!(
2608 params.text_document.uri.to_file_path().unwrap(),
2609 Path::new("/dir/b.rs"),
2610 );
2611 assert_eq!(params.position, lsp::Position::new(0, 22));
2612
2613 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2614 lsp::Location::new(
2615 lsp::Url::from_file_path("/another_dir/a.rs").unwrap(),
2616 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2617 ),
2618 )))
2619 });
2620 let mut definitions = project
2621 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2622 .await
2623 .unwrap();
2624
2625 // Assert no new language server started
2626 cx.executor().run_until_parked();
2627 assert!(fake_servers.try_next().is_err());
2628
2629 assert_eq!(definitions.len(), 1);
2630 let definition = definitions.pop().unwrap();
2631 cx.update(|cx| {
2632 let target_buffer = definition.target.buffer.read(cx);
2633 assert_eq!(
2634 target_buffer
2635 .file()
2636 .unwrap()
2637 .as_local()
2638 .unwrap()
2639 .abs_path(cx),
2640 Path::new("/another_dir/a.rs"),
2641 );
2642 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2643 assert_eq!(
2644 list_worktrees(&project, cx),
2645 [
2646 ("/another_dir/a.rs".as_ref(), false),
2647 ("/dir".as_ref(), true)
2648 ],
2649 );
2650
2651 drop(definition);
2652 });
2653 cx.update(|cx| {
2654 assert_eq!(list_worktrees(&project, cx), [("/dir".as_ref(), true)]);
2655 });
2656
2657 fn list_worktrees<'a>(
2658 project: &'a Model<Project>,
2659 cx: &'a AppContext,
2660 ) -> Vec<(&'a Path, bool)> {
2661 project
2662 .read(cx)
2663 .worktrees(cx)
2664 .map(|worktree| {
2665 let worktree = worktree.read(cx);
2666 (
2667 worktree.as_local().unwrap().abs_path().as_ref(),
2668 worktree.is_visible(),
2669 )
2670 })
2671 .collect::<Vec<_>>()
2672 }
2673}
2674
2675#[gpui::test]
2676async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2677 init_test(cx);
2678
2679 let fs = FakeFs::new(cx.executor());
2680 fs.insert_tree(
2681 "/dir",
2682 json!({
2683 "a.ts": "",
2684 }),
2685 )
2686 .await;
2687
2688 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2689
2690 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2691 language_registry.add(typescript_lang());
2692 let mut fake_language_servers = language_registry.register_fake_lsp(
2693 "TypeScript",
2694 FakeLspAdapter {
2695 capabilities: lsp::ServerCapabilities {
2696 completion_provider: Some(lsp::CompletionOptions {
2697 trigger_characters: Some(vec![":".to_string()]),
2698 ..Default::default()
2699 }),
2700 ..Default::default()
2701 },
2702 ..Default::default()
2703 },
2704 );
2705
2706 let (buffer, _handle) = project
2707 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2708 .await
2709 .unwrap();
2710
2711 let fake_server = fake_language_servers.next().await.unwrap();
2712
2713 let text = "let a = b.fqn";
2714 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2715 let completions = project.update(cx, |project, cx| {
2716 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2717 });
2718
2719 fake_server
2720 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2721 Ok(Some(lsp::CompletionResponse::Array(vec![
2722 lsp::CompletionItem {
2723 label: "fullyQualifiedName?".into(),
2724 insert_text: Some("fullyQualifiedName".into()),
2725 ..Default::default()
2726 },
2727 ])))
2728 })
2729 .next()
2730 .await;
2731 let completions = completions.await.unwrap();
2732 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2733 assert_eq!(completions.len(), 1);
2734 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2735 assert_eq!(
2736 completions[0].old_range.to_offset(&snapshot),
2737 text.len() - 3..text.len()
2738 );
2739
2740 let text = "let a = \"atoms/cmp\"";
2741 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2742 let completions = project.update(cx, |project, cx| {
2743 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2744 });
2745
2746 fake_server
2747 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2748 Ok(Some(lsp::CompletionResponse::Array(vec![
2749 lsp::CompletionItem {
2750 label: "component".into(),
2751 ..Default::default()
2752 },
2753 ])))
2754 })
2755 .next()
2756 .await;
2757 let completions = completions.await.unwrap();
2758 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2759 assert_eq!(completions.len(), 1);
2760 assert_eq!(completions[0].new_text, "component");
2761 assert_eq!(
2762 completions[0].old_range.to_offset(&snapshot),
2763 text.len() - 4..text.len() - 1
2764 );
2765}
2766
2767#[gpui::test]
2768async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2769 init_test(cx);
2770
2771 let fs = FakeFs::new(cx.executor());
2772 fs.insert_tree(
2773 "/dir",
2774 json!({
2775 "a.ts": "",
2776 }),
2777 )
2778 .await;
2779
2780 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2781
2782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2783 language_registry.add(typescript_lang());
2784 let mut fake_language_servers = language_registry.register_fake_lsp(
2785 "TypeScript",
2786 FakeLspAdapter {
2787 capabilities: lsp::ServerCapabilities {
2788 completion_provider: Some(lsp::CompletionOptions {
2789 trigger_characters: Some(vec![":".to_string()]),
2790 ..Default::default()
2791 }),
2792 ..Default::default()
2793 },
2794 ..Default::default()
2795 },
2796 );
2797
2798 let (buffer, _handle) = project
2799 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2800 .await
2801 .unwrap();
2802
2803 let fake_server = fake_language_servers.next().await.unwrap();
2804
2805 let text = "let a = b.fqn";
2806 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2807 let completions = project.update(cx, |project, cx| {
2808 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2809 });
2810
2811 fake_server
2812 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2813 Ok(Some(lsp::CompletionResponse::Array(vec![
2814 lsp::CompletionItem {
2815 label: "fullyQualifiedName?".into(),
2816 insert_text: Some("fully\rQualified\r\nName".into()),
2817 ..Default::default()
2818 },
2819 ])))
2820 })
2821 .next()
2822 .await;
2823 let completions = completions.await.unwrap();
2824 assert_eq!(completions.len(), 1);
2825 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2826}
2827
2828#[gpui::test(iterations = 10)]
2829async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2830 init_test(cx);
2831
2832 let fs = FakeFs::new(cx.executor());
2833 fs.insert_tree(
2834 "/dir",
2835 json!({
2836 "a.ts": "a",
2837 }),
2838 )
2839 .await;
2840
2841 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2842
2843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2844 language_registry.add(typescript_lang());
2845 let mut fake_language_servers = language_registry.register_fake_lsp(
2846 "TypeScript",
2847 FakeLspAdapter {
2848 capabilities: lsp::ServerCapabilities {
2849 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2850 lsp::CodeActionOptions {
2851 resolve_provider: Some(true),
2852 ..lsp::CodeActionOptions::default()
2853 },
2854 )),
2855 ..lsp::ServerCapabilities::default()
2856 },
2857 ..FakeLspAdapter::default()
2858 },
2859 );
2860
2861 let (buffer, _handle) = project
2862 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2863 .await
2864 .unwrap();
2865
2866 let fake_server = fake_language_servers.next().await.unwrap();
2867
2868 // Language server returns code actions that contain commands, and not edits.
2869 let actions = project.update(cx, |project, cx| {
2870 project.code_actions(&buffer, 0..0, None, cx)
2871 });
2872 fake_server
2873 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2874 Ok(Some(vec![
2875 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2876 title: "The code action".into(),
2877 data: Some(serde_json::json!({
2878 "command": "_the/command",
2879 })),
2880 ..lsp::CodeAction::default()
2881 }),
2882 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2883 title: "two".into(),
2884 ..lsp::CodeAction::default()
2885 }),
2886 ]))
2887 })
2888 .next()
2889 .await;
2890
2891 let action = actions.await.unwrap()[0].clone();
2892 let apply = project.update(cx, |project, cx| {
2893 project.apply_code_action(buffer.clone(), action, true, cx)
2894 });
2895
2896 // Resolving the code action does not populate its edits. In absence of
2897 // edits, we must execute the given command.
2898 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2899 |mut action, _| async move {
2900 if action.data.is_some() {
2901 action.command = Some(lsp::Command {
2902 title: "The command".into(),
2903 command: "_the/command".into(),
2904 arguments: Some(vec![json!("the-argument")]),
2905 });
2906 }
2907 Ok(action)
2908 },
2909 );
2910
2911 // While executing the command, the language server sends the editor
2912 // a `workspaceEdit` request.
2913 fake_server
2914 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2915 let fake = fake_server.clone();
2916 move |params, _| {
2917 assert_eq!(params.command, "_the/command");
2918 let fake = fake.clone();
2919 async move {
2920 fake.server
2921 .request::<lsp::request::ApplyWorkspaceEdit>(
2922 lsp::ApplyWorkspaceEditParams {
2923 label: None,
2924 edit: lsp::WorkspaceEdit {
2925 changes: Some(
2926 [(
2927 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2928 vec![lsp::TextEdit {
2929 range: lsp::Range::new(
2930 lsp::Position::new(0, 0),
2931 lsp::Position::new(0, 0),
2932 ),
2933 new_text: "X".into(),
2934 }],
2935 )]
2936 .into_iter()
2937 .collect(),
2938 ),
2939 ..Default::default()
2940 },
2941 },
2942 )
2943 .await
2944 .unwrap();
2945 Ok(Some(json!(null)))
2946 }
2947 }
2948 })
2949 .next()
2950 .await;
2951
2952 // Applying the code action returns a project transaction containing the edits
2953 // sent by the language server in its `workspaceEdit` request.
2954 let transaction = apply.await.unwrap();
2955 assert!(transaction.0.contains_key(&buffer));
2956 buffer.update(cx, |buffer, cx| {
2957 assert_eq!(buffer.text(), "Xa");
2958 buffer.undo(cx);
2959 assert_eq!(buffer.text(), "a");
2960 });
2961}
2962
2963#[gpui::test(iterations = 10)]
2964async fn test_save_file(cx: &mut gpui::TestAppContext) {
2965 init_test(cx);
2966
2967 let fs = FakeFs::new(cx.executor());
2968 fs.insert_tree(
2969 "/dir",
2970 json!({
2971 "file1": "the old contents",
2972 }),
2973 )
2974 .await;
2975
2976 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2977 let buffer = project
2978 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2979 .await
2980 .unwrap();
2981 buffer.update(cx, |buffer, cx| {
2982 assert_eq!(buffer.text(), "the old contents");
2983 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2984 });
2985
2986 project
2987 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2988 .await
2989 .unwrap();
2990
2991 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2992 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2993}
2994
2995#[gpui::test(iterations = 30)]
2996async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2997 init_test(cx);
2998
2999 let fs = FakeFs::new(cx.executor().clone());
3000 fs.insert_tree(
3001 "/dir",
3002 json!({
3003 "file1": "the original contents",
3004 }),
3005 )
3006 .await;
3007
3008 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3009 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3010 let buffer = project
3011 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3012 .await
3013 .unwrap();
3014
3015 // Simulate buffer diffs being slow, so that they don't complete before
3016 // the next file change occurs.
3017 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3018
3019 // Change the buffer's file on disk, and then wait for the file change
3020 // to be detected by the worktree, so that the buffer starts reloading.
3021 fs.save(
3022 "/dir/file1".as_ref(),
3023 &"the first contents".into(),
3024 Default::default(),
3025 )
3026 .await
3027 .unwrap();
3028 worktree.next_event(cx).await;
3029
3030 // Change the buffer's file again. Depending on the random seed, the
3031 // previous file change may still be in progress.
3032 fs.save(
3033 "/dir/file1".as_ref(),
3034 &"the second contents".into(),
3035 Default::default(),
3036 )
3037 .await
3038 .unwrap();
3039 worktree.next_event(cx).await;
3040
3041 cx.executor().run_until_parked();
3042 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3043 buffer.read_with(cx, |buffer, _| {
3044 assert_eq!(buffer.text(), on_disk_text);
3045 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3046 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3047 });
3048}
3049
3050#[gpui::test(iterations = 30)]
3051async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3052 init_test(cx);
3053
3054 let fs = FakeFs::new(cx.executor().clone());
3055 fs.insert_tree(
3056 "/dir",
3057 json!({
3058 "file1": "the original contents",
3059 }),
3060 )
3061 .await;
3062
3063 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3064 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3065 let buffer = project
3066 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3067 .await
3068 .unwrap();
3069
3070 // Simulate buffer diffs being slow, so that they don't complete before
3071 // the next file change occurs.
3072 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3073
3074 // Change the buffer's file on disk, and then wait for the file change
3075 // to be detected by the worktree, so that the buffer starts reloading.
3076 fs.save(
3077 "/dir/file1".as_ref(),
3078 &"the first contents".into(),
3079 Default::default(),
3080 )
3081 .await
3082 .unwrap();
3083 worktree.next_event(cx).await;
3084
3085 cx.executor()
3086 .spawn(cx.executor().simulate_random_delay())
3087 .await;
3088
3089 // Perform a noop edit, causing the buffer's version to increase.
3090 buffer.update(cx, |buffer, cx| {
3091 buffer.edit([(0..0, " ")], None, cx);
3092 buffer.undo(cx);
3093 });
3094
3095 cx.executor().run_until_parked();
3096 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3097 buffer.read_with(cx, |buffer, _| {
3098 let buffer_text = buffer.text();
3099 if buffer_text == on_disk_text {
3100 assert!(
3101 !buffer.is_dirty() && !buffer.has_conflict(),
3102 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3103 );
3104 }
3105 // If the file change occurred while the buffer was processing the first
3106 // change, the buffer will be in a conflicting state.
3107 else {
3108 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3109 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3110 }
3111 });
3112}
3113
3114#[gpui::test]
3115async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3116 init_test(cx);
3117
3118 let fs = FakeFs::new(cx.executor());
3119 fs.insert_tree(
3120 "/dir",
3121 json!({
3122 "file1": "the old contents",
3123 }),
3124 )
3125 .await;
3126
3127 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3128 let buffer = project
3129 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3130 .await
3131 .unwrap();
3132 buffer.update(cx, |buffer, cx| {
3133 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3134 });
3135
3136 project
3137 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3138 .await
3139 .unwrap();
3140
3141 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3142 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3143}
3144
3145#[gpui::test]
3146async fn test_save_as(cx: &mut gpui::TestAppContext) {
3147 init_test(cx);
3148
3149 let fs = FakeFs::new(cx.executor());
3150 fs.insert_tree("/dir", json!({})).await;
3151
3152 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3153
3154 let languages = project.update(cx, |project, _| project.languages().clone());
3155 languages.add(rust_lang());
3156
3157 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3158 buffer.update(cx, |buffer, cx| {
3159 buffer.edit([(0..0, "abc")], None, cx);
3160 assert!(buffer.is_dirty());
3161 assert!(!buffer.has_conflict());
3162 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3163 });
3164 project
3165 .update(cx, |project, cx| {
3166 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3167 let path = ProjectPath {
3168 worktree_id,
3169 path: Arc::from(Path::new("file1.rs")),
3170 };
3171 project.save_buffer_as(buffer.clone(), path, cx)
3172 })
3173 .await
3174 .unwrap();
3175 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3176
3177 cx.executor().run_until_parked();
3178 buffer.update(cx, |buffer, cx| {
3179 assert_eq!(
3180 buffer.file().unwrap().full_path(cx),
3181 Path::new("dir/file1.rs")
3182 );
3183 assert!(!buffer.is_dirty());
3184 assert!(!buffer.has_conflict());
3185 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3186 });
3187
3188 let opened_buffer = project
3189 .update(cx, |project, cx| {
3190 project.open_local_buffer("/dir/file1.rs", cx)
3191 })
3192 .await
3193 .unwrap();
3194 assert_eq!(opened_buffer, buffer);
3195}
3196
3197#[gpui::test(retries = 5)]
3198async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3199 use worktree::WorktreeModelHandle as _;
3200
3201 init_test(cx);
3202 cx.executor().allow_parking();
3203
3204 let dir = temp_tree(json!({
3205 "a": {
3206 "file1": "",
3207 "file2": "",
3208 "file3": "",
3209 },
3210 "b": {
3211 "c": {
3212 "file4": "",
3213 "file5": "",
3214 }
3215 }
3216 }));
3217
3218 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3219
3220 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3221 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3222 async move { buffer.await.unwrap() }
3223 };
3224 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3225 project.update(cx, |project, cx| {
3226 let tree = project.worktrees(cx).next().unwrap();
3227 tree.read(cx)
3228 .entry_for_path(path)
3229 .unwrap_or_else(|| panic!("no entry for path {}", path))
3230 .id
3231 })
3232 };
3233
3234 let buffer2 = buffer_for_path("a/file2", cx).await;
3235 let buffer3 = buffer_for_path("a/file3", cx).await;
3236 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3237 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3238
3239 let file2_id = id_for_path("a/file2", cx);
3240 let file3_id = id_for_path("a/file3", cx);
3241 let file4_id = id_for_path("b/c/file4", cx);
3242
3243 // Create a remote copy of this worktree.
3244 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3245 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3246
3247 let updates = Arc::new(Mutex::new(Vec::new()));
3248 tree.update(cx, |tree, cx| {
3249 let updates = updates.clone();
3250 tree.observe_updates(0, cx, move |update| {
3251 updates.lock().push(update);
3252 async { true }
3253 });
3254 });
3255
3256 let remote =
3257 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3258
3259 cx.executor().run_until_parked();
3260
3261 cx.update(|cx| {
3262 assert!(!buffer2.read(cx).is_dirty());
3263 assert!(!buffer3.read(cx).is_dirty());
3264 assert!(!buffer4.read(cx).is_dirty());
3265 assert!(!buffer5.read(cx).is_dirty());
3266 });
3267
3268 // Rename and delete files and directories.
3269 tree.flush_fs_events(cx).await;
3270 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3271 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3272 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3273 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3274 tree.flush_fs_events(cx).await;
3275
3276 let expected_paths = vec![
3277 "a",
3278 "a/file1",
3279 "a/file2.new",
3280 "b",
3281 "d",
3282 "d/file3",
3283 "d/file4",
3284 ]
3285 .into_iter()
3286 .map(replace_path_separator)
3287 .collect::<Vec<_>>();
3288
3289 cx.update(|app| {
3290 assert_eq!(
3291 tree.read(app)
3292 .paths()
3293 .map(|p| p.to_str().unwrap())
3294 .collect::<Vec<_>>(),
3295 expected_paths
3296 );
3297 });
3298
3299 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3300 assert_eq!(id_for_path("d/file3", cx), file3_id);
3301 assert_eq!(id_for_path("d/file4", cx), file4_id);
3302
3303 cx.update(|cx| {
3304 assert_eq!(
3305 buffer2.read(cx).file().unwrap().path().as_ref(),
3306 Path::new("a/file2.new")
3307 );
3308 assert_eq!(
3309 buffer3.read(cx).file().unwrap().path().as_ref(),
3310 Path::new("d/file3")
3311 );
3312 assert_eq!(
3313 buffer4.read(cx).file().unwrap().path().as_ref(),
3314 Path::new("d/file4")
3315 );
3316 assert_eq!(
3317 buffer5.read(cx).file().unwrap().path().as_ref(),
3318 Path::new("b/c/file5")
3319 );
3320
3321 assert_matches!(
3322 buffer2.read(cx).file().unwrap().disk_state(),
3323 DiskState::Present { .. }
3324 );
3325 assert_matches!(
3326 buffer3.read(cx).file().unwrap().disk_state(),
3327 DiskState::Present { .. }
3328 );
3329 assert_matches!(
3330 buffer4.read(cx).file().unwrap().disk_state(),
3331 DiskState::Present { .. }
3332 );
3333 assert_eq!(
3334 buffer5.read(cx).file().unwrap().disk_state(),
3335 DiskState::Deleted
3336 );
3337 });
3338
3339 // Update the remote worktree. Check that it becomes consistent with the
3340 // local worktree.
3341 cx.executor().run_until_parked();
3342
3343 remote.update(cx, |remote, _| {
3344 for update in updates.lock().drain(..) {
3345 remote.as_remote_mut().unwrap().update_from_remote(update);
3346 }
3347 });
3348 cx.executor().run_until_parked();
3349 remote.update(cx, |remote, _| {
3350 assert_eq!(
3351 remote
3352 .paths()
3353 .map(|p| p.to_str().unwrap())
3354 .collect::<Vec<_>>(),
3355 expected_paths
3356 );
3357 });
3358}
3359
3360#[gpui::test(iterations = 10)]
3361async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3362 init_test(cx);
3363
3364 let fs = FakeFs::new(cx.executor());
3365 fs.insert_tree(
3366 "/dir",
3367 json!({
3368 "a": {
3369 "file1": "",
3370 }
3371 }),
3372 )
3373 .await;
3374
3375 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3376 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3377 let tree_id = tree.update(cx, |tree, _| tree.id());
3378
3379 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3380 project.update(cx, |project, cx| {
3381 let tree = project.worktrees(cx).next().unwrap();
3382 tree.read(cx)
3383 .entry_for_path(path)
3384 .unwrap_or_else(|| panic!("no entry for path {}", path))
3385 .id
3386 })
3387 };
3388
3389 let dir_id = id_for_path("a", cx);
3390 let file_id = id_for_path("a/file1", cx);
3391 let buffer = project
3392 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3393 .await
3394 .unwrap();
3395 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3396
3397 project
3398 .update(cx, |project, cx| {
3399 project.rename_entry(dir_id, Path::new("b"), cx)
3400 })
3401 .unwrap()
3402 .await
3403 .to_included()
3404 .unwrap();
3405 cx.executor().run_until_parked();
3406
3407 assert_eq!(id_for_path("b", cx), dir_id);
3408 assert_eq!(id_for_path("b/file1", cx), file_id);
3409 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3410}
3411
3412#[gpui::test]
3413async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3414 init_test(cx);
3415
3416 let fs = FakeFs::new(cx.executor());
3417 fs.insert_tree(
3418 "/dir",
3419 json!({
3420 "a.txt": "a-contents",
3421 "b.txt": "b-contents",
3422 }),
3423 )
3424 .await;
3425
3426 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3427
3428 // Spawn multiple tasks to open paths, repeating some paths.
3429 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3430 (
3431 p.open_local_buffer("/dir/a.txt", cx),
3432 p.open_local_buffer("/dir/b.txt", cx),
3433 p.open_local_buffer("/dir/a.txt", cx),
3434 )
3435 });
3436
3437 let buffer_a_1 = buffer_a_1.await.unwrap();
3438 let buffer_a_2 = buffer_a_2.await.unwrap();
3439 let buffer_b = buffer_b.await.unwrap();
3440 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3441 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3442
3443 // There is only one buffer per path.
3444 let buffer_a_id = buffer_a_1.entity_id();
3445 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3446
3447 // Open the same path again while it is still open.
3448 drop(buffer_a_1);
3449 let buffer_a_3 = project
3450 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3451 .await
3452 .unwrap();
3453
3454 // There's still only one buffer per path.
3455 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3456}
3457
3458#[gpui::test]
3459async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3460 init_test(cx);
3461
3462 let fs = FakeFs::new(cx.executor());
3463 fs.insert_tree(
3464 "/dir",
3465 json!({
3466 "file1": "abc",
3467 "file2": "def",
3468 "file3": "ghi",
3469 }),
3470 )
3471 .await;
3472
3473 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3474
3475 let buffer1 = project
3476 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3477 .await
3478 .unwrap();
3479 let events = Arc::new(Mutex::new(Vec::new()));
3480
3481 // initially, the buffer isn't dirty.
3482 buffer1.update(cx, |buffer, cx| {
3483 cx.subscribe(&buffer1, {
3484 let events = events.clone();
3485 move |_, _, event, _| match event {
3486 BufferEvent::Operation { .. } => {}
3487 _ => events.lock().push(event.clone()),
3488 }
3489 })
3490 .detach();
3491
3492 assert!(!buffer.is_dirty());
3493 assert!(events.lock().is_empty());
3494
3495 buffer.edit([(1..2, "")], None, cx);
3496 });
3497
3498 // after the first edit, the buffer is dirty, and emits a dirtied event.
3499 buffer1.update(cx, |buffer, cx| {
3500 assert!(buffer.text() == "ac");
3501 assert!(buffer.is_dirty());
3502 assert_eq!(
3503 *events.lock(),
3504 &[
3505 language::BufferEvent::Edited,
3506 language::BufferEvent::DirtyChanged
3507 ]
3508 );
3509 events.lock().clear();
3510 buffer.did_save(
3511 buffer.version(),
3512 buffer.file().unwrap().disk_state().mtime(),
3513 cx,
3514 );
3515 });
3516
3517 // after saving, the buffer is not dirty, and emits a saved event.
3518 buffer1.update(cx, |buffer, cx| {
3519 assert!(!buffer.is_dirty());
3520 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3521 events.lock().clear();
3522
3523 buffer.edit([(1..1, "B")], None, cx);
3524 buffer.edit([(2..2, "D")], None, cx);
3525 });
3526
3527 // after editing again, the buffer is dirty, and emits another dirty event.
3528 buffer1.update(cx, |buffer, cx| {
3529 assert!(buffer.text() == "aBDc");
3530 assert!(buffer.is_dirty());
3531 assert_eq!(
3532 *events.lock(),
3533 &[
3534 language::BufferEvent::Edited,
3535 language::BufferEvent::DirtyChanged,
3536 language::BufferEvent::Edited,
3537 ],
3538 );
3539 events.lock().clear();
3540
3541 // After restoring the buffer to its previously-saved state,
3542 // the buffer is not considered dirty anymore.
3543 buffer.edit([(1..3, "")], None, cx);
3544 assert!(buffer.text() == "ac");
3545 assert!(!buffer.is_dirty());
3546 });
3547
3548 assert_eq!(
3549 *events.lock(),
3550 &[
3551 language::BufferEvent::Edited,
3552 language::BufferEvent::DirtyChanged
3553 ]
3554 );
3555
3556 // When a file is deleted, the buffer is considered dirty.
3557 let events = Arc::new(Mutex::new(Vec::new()));
3558 let buffer2 = project
3559 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3560 .await
3561 .unwrap();
3562 buffer2.update(cx, |_, cx| {
3563 cx.subscribe(&buffer2, {
3564 let events = events.clone();
3565 move |_, _, event, _| events.lock().push(event.clone())
3566 })
3567 .detach();
3568 });
3569
3570 fs.remove_file("/dir/file2".as_ref(), Default::default())
3571 .await
3572 .unwrap();
3573 cx.executor().run_until_parked();
3574 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3575 assert_eq!(
3576 *events.lock(),
3577 &[
3578 language::BufferEvent::DirtyChanged,
3579 language::BufferEvent::FileHandleChanged
3580 ]
3581 );
3582
3583 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3584 let events = Arc::new(Mutex::new(Vec::new()));
3585 let buffer3 = project
3586 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3587 .await
3588 .unwrap();
3589 buffer3.update(cx, |_, cx| {
3590 cx.subscribe(&buffer3, {
3591 let events = events.clone();
3592 move |_, _, event, _| events.lock().push(event.clone())
3593 })
3594 .detach();
3595 });
3596
3597 buffer3.update(cx, |buffer, cx| {
3598 buffer.edit([(0..0, "x")], None, cx);
3599 });
3600 events.lock().clear();
3601 fs.remove_file("/dir/file3".as_ref(), Default::default())
3602 .await
3603 .unwrap();
3604 cx.executor().run_until_parked();
3605 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3606 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3607}
3608
3609#[gpui::test]
3610async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3611 init_test(cx);
3612
3613 let initial_contents = "aaa\nbbbbb\nc\n";
3614 let fs = FakeFs::new(cx.executor());
3615 fs.insert_tree(
3616 "/dir",
3617 json!({
3618 "the-file": initial_contents,
3619 }),
3620 )
3621 .await;
3622 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3623 let buffer = project
3624 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3625 .await
3626 .unwrap();
3627
3628 let anchors = (0..3)
3629 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3630 .collect::<Vec<_>>();
3631
3632 // Change the file on disk, adding two new lines of text, and removing
3633 // one line.
3634 buffer.update(cx, |buffer, _| {
3635 assert!(!buffer.is_dirty());
3636 assert!(!buffer.has_conflict());
3637 });
3638 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3639 fs.save(
3640 "/dir/the-file".as_ref(),
3641 &new_contents.into(),
3642 LineEnding::Unix,
3643 )
3644 .await
3645 .unwrap();
3646
3647 // Because the buffer was not modified, it is reloaded from disk. Its
3648 // contents are edited according to the diff between the old and new
3649 // file contents.
3650 cx.executor().run_until_parked();
3651 buffer.update(cx, |buffer, _| {
3652 assert_eq!(buffer.text(), new_contents);
3653 assert!(!buffer.is_dirty());
3654 assert!(!buffer.has_conflict());
3655
3656 let anchor_positions = anchors
3657 .iter()
3658 .map(|anchor| anchor.to_point(&*buffer))
3659 .collect::<Vec<_>>();
3660 assert_eq!(
3661 anchor_positions,
3662 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3663 );
3664 });
3665
3666 // Modify the buffer
3667 buffer.update(cx, |buffer, cx| {
3668 buffer.edit([(0..0, " ")], None, cx);
3669 assert!(buffer.is_dirty());
3670 assert!(!buffer.has_conflict());
3671 });
3672
3673 // Change the file on disk again, adding blank lines to the beginning.
3674 fs.save(
3675 "/dir/the-file".as_ref(),
3676 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3677 LineEnding::Unix,
3678 )
3679 .await
3680 .unwrap();
3681
3682 // Because the buffer is modified, it doesn't reload from disk, but is
3683 // marked as having a conflict.
3684 cx.executor().run_until_parked();
3685 buffer.update(cx, |buffer, _| {
3686 assert!(buffer.has_conflict());
3687 });
3688}
3689
3690#[gpui::test]
3691async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3692 init_test(cx);
3693
3694 let fs = FakeFs::new(cx.executor());
3695 fs.insert_tree(
3696 "/dir",
3697 json!({
3698 "file1": "a\nb\nc\n",
3699 "file2": "one\r\ntwo\r\nthree\r\n",
3700 }),
3701 )
3702 .await;
3703
3704 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3705 let buffer1 = project
3706 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3707 .await
3708 .unwrap();
3709 let buffer2 = project
3710 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3711 .await
3712 .unwrap();
3713
3714 buffer1.update(cx, |buffer, _| {
3715 assert_eq!(buffer.text(), "a\nb\nc\n");
3716 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3717 });
3718 buffer2.update(cx, |buffer, _| {
3719 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3720 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3721 });
3722
3723 // Change a file's line endings on disk from unix to windows. The buffer's
3724 // state updates correctly.
3725 fs.save(
3726 "/dir/file1".as_ref(),
3727 &"aaa\nb\nc\n".into(),
3728 LineEnding::Windows,
3729 )
3730 .await
3731 .unwrap();
3732 cx.executor().run_until_parked();
3733 buffer1.update(cx, |buffer, _| {
3734 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3735 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3736 });
3737
3738 // Save a file with windows line endings. The file is written correctly.
3739 buffer2.update(cx, |buffer, cx| {
3740 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3741 });
3742 project
3743 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3744 .await
3745 .unwrap();
3746 assert_eq!(
3747 fs.load("/dir/file2".as_ref()).await.unwrap(),
3748 "one\r\ntwo\r\nthree\r\nfour\r\n",
3749 );
3750}
3751
3752#[gpui::test]
3753async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3754 init_test(cx);
3755
3756 let fs = FakeFs::new(cx.executor());
3757 fs.insert_tree(
3758 "/the-dir",
3759 json!({
3760 "a.rs": "
3761 fn foo(mut v: Vec<usize>) {
3762 for x in &v {
3763 v.push(1);
3764 }
3765 }
3766 "
3767 .unindent(),
3768 }),
3769 )
3770 .await;
3771
3772 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3773 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3774 let buffer = project
3775 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3776 .await
3777 .unwrap();
3778
3779 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3780 let message = lsp::PublishDiagnosticsParams {
3781 uri: buffer_uri.clone(),
3782 diagnostics: vec![
3783 lsp::Diagnostic {
3784 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3785 severity: Some(DiagnosticSeverity::WARNING),
3786 message: "error 1".to_string(),
3787 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3788 location: lsp::Location {
3789 uri: buffer_uri.clone(),
3790 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3791 },
3792 message: "error 1 hint 1".to_string(),
3793 }]),
3794 ..Default::default()
3795 },
3796 lsp::Diagnostic {
3797 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3798 severity: Some(DiagnosticSeverity::HINT),
3799 message: "error 1 hint 1".to_string(),
3800 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3801 location: lsp::Location {
3802 uri: buffer_uri.clone(),
3803 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3804 },
3805 message: "original diagnostic".to_string(),
3806 }]),
3807 ..Default::default()
3808 },
3809 lsp::Diagnostic {
3810 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3811 severity: Some(DiagnosticSeverity::ERROR),
3812 message: "error 2".to_string(),
3813 related_information: Some(vec![
3814 lsp::DiagnosticRelatedInformation {
3815 location: lsp::Location {
3816 uri: buffer_uri.clone(),
3817 range: lsp::Range::new(
3818 lsp::Position::new(1, 13),
3819 lsp::Position::new(1, 15),
3820 ),
3821 },
3822 message: "error 2 hint 1".to_string(),
3823 },
3824 lsp::DiagnosticRelatedInformation {
3825 location: lsp::Location {
3826 uri: buffer_uri.clone(),
3827 range: lsp::Range::new(
3828 lsp::Position::new(1, 13),
3829 lsp::Position::new(1, 15),
3830 ),
3831 },
3832 message: "error 2 hint 2".to_string(),
3833 },
3834 ]),
3835 ..Default::default()
3836 },
3837 lsp::Diagnostic {
3838 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3839 severity: Some(DiagnosticSeverity::HINT),
3840 message: "error 2 hint 1".to_string(),
3841 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3842 location: lsp::Location {
3843 uri: buffer_uri.clone(),
3844 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3845 },
3846 message: "original diagnostic".to_string(),
3847 }]),
3848 ..Default::default()
3849 },
3850 lsp::Diagnostic {
3851 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3852 severity: Some(DiagnosticSeverity::HINT),
3853 message: "error 2 hint 2".to_string(),
3854 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3855 location: lsp::Location {
3856 uri: buffer_uri,
3857 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3858 },
3859 message: "original diagnostic".to_string(),
3860 }]),
3861 ..Default::default()
3862 },
3863 ],
3864 version: None,
3865 };
3866
3867 lsp_store
3868 .update(cx, |lsp_store, cx| {
3869 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3870 })
3871 .unwrap();
3872 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3873
3874 assert_eq!(
3875 buffer
3876 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3877 .collect::<Vec<_>>(),
3878 &[
3879 DiagnosticEntry {
3880 range: Point::new(1, 8)..Point::new(1, 9),
3881 diagnostic: Diagnostic {
3882 severity: DiagnosticSeverity::WARNING,
3883 message: "error 1".to_string(),
3884 group_id: 1,
3885 is_primary: true,
3886 ..Default::default()
3887 }
3888 },
3889 DiagnosticEntry {
3890 range: Point::new(1, 8)..Point::new(1, 9),
3891 diagnostic: Diagnostic {
3892 severity: DiagnosticSeverity::HINT,
3893 message: "error 1 hint 1".to_string(),
3894 group_id: 1,
3895 is_primary: false,
3896 ..Default::default()
3897 }
3898 },
3899 DiagnosticEntry {
3900 range: Point::new(1, 13)..Point::new(1, 15),
3901 diagnostic: Diagnostic {
3902 severity: DiagnosticSeverity::HINT,
3903 message: "error 2 hint 1".to_string(),
3904 group_id: 0,
3905 is_primary: false,
3906 ..Default::default()
3907 }
3908 },
3909 DiagnosticEntry {
3910 range: Point::new(1, 13)..Point::new(1, 15),
3911 diagnostic: Diagnostic {
3912 severity: DiagnosticSeverity::HINT,
3913 message: "error 2 hint 2".to_string(),
3914 group_id: 0,
3915 is_primary: false,
3916 ..Default::default()
3917 }
3918 },
3919 DiagnosticEntry {
3920 range: Point::new(2, 8)..Point::new(2, 17),
3921 diagnostic: Diagnostic {
3922 severity: DiagnosticSeverity::ERROR,
3923 message: "error 2".to_string(),
3924 group_id: 0,
3925 is_primary: true,
3926 ..Default::default()
3927 }
3928 }
3929 ]
3930 );
3931
3932 assert_eq!(
3933 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3934 &[
3935 DiagnosticEntry {
3936 range: Point::new(1, 13)..Point::new(1, 15),
3937 diagnostic: Diagnostic {
3938 severity: DiagnosticSeverity::HINT,
3939 message: "error 2 hint 1".to_string(),
3940 group_id: 0,
3941 is_primary: false,
3942 ..Default::default()
3943 }
3944 },
3945 DiagnosticEntry {
3946 range: Point::new(1, 13)..Point::new(1, 15),
3947 diagnostic: Diagnostic {
3948 severity: DiagnosticSeverity::HINT,
3949 message: "error 2 hint 2".to_string(),
3950 group_id: 0,
3951 is_primary: false,
3952 ..Default::default()
3953 }
3954 },
3955 DiagnosticEntry {
3956 range: Point::new(2, 8)..Point::new(2, 17),
3957 diagnostic: Diagnostic {
3958 severity: DiagnosticSeverity::ERROR,
3959 message: "error 2".to_string(),
3960 group_id: 0,
3961 is_primary: true,
3962 ..Default::default()
3963 }
3964 }
3965 ]
3966 );
3967
3968 assert_eq!(
3969 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3970 &[
3971 DiagnosticEntry {
3972 range: Point::new(1, 8)..Point::new(1, 9),
3973 diagnostic: Diagnostic {
3974 severity: DiagnosticSeverity::WARNING,
3975 message: "error 1".to_string(),
3976 group_id: 1,
3977 is_primary: true,
3978 ..Default::default()
3979 }
3980 },
3981 DiagnosticEntry {
3982 range: Point::new(1, 8)..Point::new(1, 9),
3983 diagnostic: Diagnostic {
3984 severity: DiagnosticSeverity::HINT,
3985 message: "error 1 hint 1".to_string(),
3986 group_id: 1,
3987 is_primary: false,
3988 ..Default::default()
3989 }
3990 },
3991 ]
3992 );
3993}
3994
3995#[gpui::test]
3996async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3997 init_test(cx);
3998
3999 let fs = FakeFs::new(cx.executor());
4000 fs.insert_tree(
4001 "/dir",
4002 json!({
4003 "one.rs": "const ONE: usize = 1;",
4004 "two": {
4005 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4006 }
4007
4008 }),
4009 )
4010 .await;
4011 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4012
4013 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4014 language_registry.add(rust_lang());
4015 let watched_paths = lsp::FileOperationRegistrationOptions {
4016 filters: vec![
4017 FileOperationFilter {
4018 scheme: Some("file".to_owned()),
4019 pattern: lsp::FileOperationPattern {
4020 glob: "**/*.rs".to_owned(),
4021 matches: Some(lsp::FileOperationPatternKind::File),
4022 options: None,
4023 },
4024 },
4025 FileOperationFilter {
4026 scheme: Some("file".to_owned()),
4027 pattern: lsp::FileOperationPattern {
4028 glob: "**/**".to_owned(),
4029 matches: Some(lsp::FileOperationPatternKind::Folder),
4030 options: None,
4031 },
4032 },
4033 ],
4034 };
4035 let mut fake_servers = language_registry.register_fake_lsp(
4036 "Rust",
4037 FakeLspAdapter {
4038 capabilities: lsp::ServerCapabilities {
4039 workspace: Some(lsp::WorkspaceServerCapabilities {
4040 workspace_folders: None,
4041 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4042 did_rename: Some(watched_paths.clone()),
4043 will_rename: Some(watched_paths),
4044 ..Default::default()
4045 }),
4046 }),
4047 ..Default::default()
4048 },
4049 ..Default::default()
4050 },
4051 );
4052
4053 let _ = project
4054 .update(cx, |project, cx| {
4055 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4056 })
4057 .await
4058 .unwrap();
4059
4060 let fake_server = fake_servers.next().await.unwrap();
4061 let response = project.update(cx, |project, cx| {
4062 let worktree = project.worktrees(cx).next().unwrap();
4063 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4064 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4065 });
4066 let expected_edit = lsp::WorkspaceEdit {
4067 changes: None,
4068 document_changes: Some(DocumentChanges::Edits({
4069 vec![TextDocumentEdit {
4070 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4071 range: lsp::Range {
4072 start: lsp::Position {
4073 line: 0,
4074 character: 1,
4075 },
4076 end: lsp::Position {
4077 line: 0,
4078 character: 3,
4079 },
4080 },
4081 new_text: "This is not a drill".to_owned(),
4082 })],
4083 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4084 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4085 version: Some(1337),
4086 },
4087 }]
4088 })),
4089 change_annotations: None,
4090 };
4091 let resolved_workspace_edit = Arc::new(OnceLock::new());
4092 fake_server
4093 .handle_request::<WillRenameFiles, _, _>({
4094 let resolved_workspace_edit = resolved_workspace_edit.clone();
4095 let expected_edit = expected_edit.clone();
4096 move |params, _| {
4097 let resolved_workspace_edit = resolved_workspace_edit.clone();
4098 let expected_edit = expected_edit.clone();
4099 async move {
4100 assert_eq!(params.files.len(), 1);
4101 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4102 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4103 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4104 Ok(Some(expected_edit))
4105 }
4106 }
4107 })
4108 .next()
4109 .await
4110 .unwrap();
4111 let _ = response.await.unwrap();
4112 fake_server
4113 .handle_notification::<DidRenameFiles, _>(|params, _| {
4114 assert_eq!(params.files.len(), 1);
4115 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4116 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4117 })
4118 .next()
4119 .await
4120 .unwrap();
4121 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4122}
4123
4124#[gpui::test]
4125async fn test_rename(cx: &mut gpui::TestAppContext) {
4126 // hi
4127 init_test(cx);
4128
4129 let fs = FakeFs::new(cx.executor());
4130 fs.insert_tree(
4131 "/dir",
4132 json!({
4133 "one.rs": "const ONE: usize = 1;",
4134 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4135 }),
4136 )
4137 .await;
4138
4139 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4140
4141 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4142 language_registry.add(rust_lang());
4143 let mut fake_servers = language_registry.register_fake_lsp(
4144 "Rust",
4145 FakeLspAdapter {
4146 capabilities: lsp::ServerCapabilities {
4147 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4148 prepare_provider: Some(true),
4149 work_done_progress_options: Default::default(),
4150 })),
4151 ..Default::default()
4152 },
4153 ..Default::default()
4154 },
4155 );
4156
4157 let (buffer, _handle) = project
4158 .update(cx, |project, cx| {
4159 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4160 })
4161 .await
4162 .unwrap();
4163
4164 let fake_server = fake_servers.next().await.unwrap();
4165
4166 let response = project.update(cx, |project, cx| {
4167 project.prepare_rename(buffer.clone(), 7, cx)
4168 });
4169 fake_server
4170 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4171 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4172 assert_eq!(params.position, lsp::Position::new(0, 7));
4173 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4174 lsp::Position::new(0, 6),
4175 lsp::Position::new(0, 9),
4176 ))))
4177 })
4178 .next()
4179 .await
4180 .unwrap();
4181 let response = response.await.unwrap();
4182 let PrepareRenameResponse::Success(range) = response else {
4183 panic!("{:?}", response);
4184 };
4185 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4186 assert_eq!(range, 6..9);
4187
4188 let response = project.update(cx, |project, cx| {
4189 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4190 });
4191 fake_server
4192 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4193 assert_eq!(
4194 params.text_document_position.text_document.uri.as_str(),
4195 "file:///dir/one.rs"
4196 );
4197 assert_eq!(
4198 params.text_document_position.position,
4199 lsp::Position::new(0, 7)
4200 );
4201 assert_eq!(params.new_name, "THREE");
4202 Ok(Some(lsp::WorkspaceEdit {
4203 changes: Some(
4204 [
4205 (
4206 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4207 vec![lsp::TextEdit::new(
4208 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4209 "THREE".to_string(),
4210 )],
4211 ),
4212 (
4213 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4214 vec![
4215 lsp::TextEdit::new(
4216 lsp::Range::new(
4217 lsp::Position::new(0, 24),
4218 lsp::Position::new(0, 27),
4219 ),
4220 "THREE".to_string(),
4221 ),
4222 lsp::TextEdit::new(
4223 lsp::Range::new(
4224 lsp::Position::new(0, 35),
4225 lsp::Position::new(0, 38),
4226 ),
4227 "THREE".to_string(),
4228 ),
4229 ],
4230 ),
4231 ]
4232 .into_iter()
4233 .collect(),
4234 ),
4235 ..Default::default()
4236 }))
4237 })
4238 .next()
4239 .await
4240 .unwrap();
4241 let mut transaction = response.await.unwrap().0;
4242 assert_eq!(transaction.len(), 2);
4243 assert_eq!(
4244 transaction
4245 .remove_entry(&buffer)
4246 .unwrap()
4247 .0
4248 .update(cx, |buffer, _| buffer.text()),
4249 "const THREE: usize = 1;"
4250 );
4251 assert_eq!(
4252 transaction
4253 .into_keys()
4254 .next()
4255 .unwrap()
4256 .update(cx, |buffer, _| buffer.text()),
4257 "const TWO: usize = one::THREE + one::THREE;"
4258 );
4259}
4260
4261#[gpui::test]
4262async fn test_search(cx: &mut gpui::TestAppContext) {
4263 init_test(cx);
4264
4265 let fs = FakeFs::new(cx.executor());
4266 fs.insert_tree(
4267 "/dir",
4268 json!({
4269 "one.rs": "const ONE: usize = 1;",
4270 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4271 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4272 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4273 }),
4274 )
4275 .await;
4276 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4277 assert_eq!(
4278 search(
4279 &project,
4280 SearchQuery::text(
4281 "TWO",
4282 false,
4283 true,
4284 false,
4285 Default::default(),
4286 Default::default(),
4287 None
4288 )
4289 .unwrap(),
4290 cx
4291 )
4292 .await
4293 .unwrap(),
4294 HashMap::from_iter([
4295 ("dir/two.rs".to_string(), vec![6..9]),
4296 ("dir/three.rs".to_string(), vec![37..40])
4297 ])
4298 );
4299
4300 let buffer_4 = project
4301 .update(cx, |project, cx| {
4302 project.open_local_buffer("/dir/four.rs", cx)
4303 })
4304 .await
4305 .unwrap();
4306 buffer_4.update(cx, |buffer, cx| {
4307 let text = "two::TWO";
4308 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4309 });
4310
4311 assert_eq!(
4312 search(
4313 &project,
4314 SearchQuery::text(
4315 "TWO",
4316 false,
4317 true,
4318 false,
4319 Default::default(),
4320 Default::default(),
4321 None,
4322 )
4323 .unwrap(),
4324 cx
4325 )
4326 .await
4327 .unwrap(),
4328 HashMap::from_iter([
4329 ("dir/two.rs".to_string(), vec![6..9]),
4330 ("dir/three.rs".to_string(), vec![37..40]),
4331 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4332 ])
4333 );
4334}
4335
4336#[gpui::test]
4337async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4338 init_test(cx);
4339
4340 let search_query = "file";
4341
4342 let fs = FakeFs::new(cx.executor());
4343 fs.insert_tree(
4344 "/dir",
4345 json!({
4346 "one.rs": r#"// Rust file one"#,
4347 "one.ts": r#"// TypeScript file one"#,
4348 "two.rs": r#"// Rust file two"#,
4349 "two.ts": r#"// TypeScript file two"#,
4350 }),
4351 )
4352 .await;
4353 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4354
4355 assert!(
4356 search(
4357 &project,
4358 SearchQuery::text(
4359 search_query,
4360 false,
4361 true,
4362 false,
4363 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4364 Default::default(),
4365 None
4366 )
4367 .unwrap(),
4368 cx
4369 )
4370 .await
4371 .unwrap()
4372 .is_empty(),
4373 "If no inclusions match, no files should be returned"
4374 );
4375
4376 assert_eq!(
4377 search(
4378 &project,
4379 SearchQuery::text(
4380 search_query,
4381 false,
4382 true,
4383 false,
4384 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4385 Default::default(),
4386 None
4387 )
4388 .unwrap(),
4389 cx
4390 )
4391 .await
4392 .unwrap(),
4393 HashMap::from_iter([
4394 ("dir/one.rs".to_string(), vec![8..12]),
4395 ("dir/two.rs".to_string(), vec![8..12]),
4396 ]),
4397 "Rust only search should give only Rust files"
4398 );
4399
4400 assert_eq!(
4401 search(
4402 &project,
4403 SearchQuery::text(
4404 search_query,
4405 false,
4406 true,
4407 false,
4408
4409 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4410
4411 Default::default(),
4412 None,
4413 ).unwrap(),
4414 cx
4415 )
4416 .await
4417 .unwrap(),
4418 HashMap::from_iter([
4419 ("dir/one.ts".to_string(), vec![14..18]),
4420 ("dir/two.ts".to_string(), vec![14..18]),
4421 ]),
4422 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4423 );
4424
4425 assert_eq!(
4426 search(
4427 &project,
4428 SearchQuery::text(
4429 search_query,
4430 false,
4431 true,
4432 false,
4433
4434 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4435
4436 Default::default(),
4437 None,
4438 ).unwrap(),
4439 cx
4440 )
4441 .await
4442 .unwrap(),
4443 HashMap::from_iter([
4444 ("dir/two.ts".to_string(), vec![14..18]),
4445 ("dir/one.rs".to_string(), vec![8..12]),
4446 ("dir/one.ts".to_string(), vec![14..18]),
4447 ("dir/two.rs".to_string(), vec![8..12]),
4448 ]),
4449 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4450 );
4451}
4452
4453#[gpui::test]
4454async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4455 init_test(cx);
4456
4457 let search_query = "file";
4458
4459 let fs = FakeFs::new(cx.executor());
4460 fs.insert_tree(
4461 "/dir",
4462 json!({
4463 "one.rs": r#"// Rust file one"#,
4464 "one.ts": r#"// TypeScript file one"#,
4465 "two.rs": r#"// Rust file two"#,
4466 "two.ts": r#"// TypeScript file two"#,
4467 }),
4468 )
4469 .await;
4470 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4471
4472 assert_eq!(
4473 search(
4474 &project,
4475 SearchQuery::text(
4476 search_query,
4477 false,
4478 true,
4479 false,
4480 Default::default(),
4481 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4482 None,
4483 )
4484 .unwrap(),
4485 cx
4486 )
4487 .await
4488 .unwrap(),
4489 HashMap::from_iter([
4490 ("dir/one.rs".to_string(), vec![8..12]),
4491 ("dir/one.ts".to_string(), vec![14..18]),
4492 ("dir/two.rs".to_string(), vec![8..12]),
4493 ("dir/two.ts".to_string(), vec![14..18]),
4494 ]),
4495 "If no exclusions match, all files should be returned"
4496 );
4497
4498 assert_eq!(
4499 search(
4500 &project,
4501 SearchQuery::text(
4502 search_query,
4503 false,
4504 true,
4505 false,
4506 Default::default(),
4507 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4508 None,
4509 )
4510 .unwrap(),
4511 cx
4512 )
4513 .await
4514 .unwrap(),
4515 HashMap::from_iter([
4516 ("dir/one.ts".to_string(), vec![14..18]),
4517 ("dir/two.ts".to_string(), vec![14..18]),
4518 ]),
4519 "Rust exclusion search should give only TypeScript files"
4520 );
4521
4522 assert_eq!(
4523 search(
4524 &project,
4525 SearchQuery::text(
4526 search_query,
4527 false,
4528 true,
4529 false,
4530 Default::default(),
4531 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4532 None,
4533 ).unwrap(),
4534 cx
4535 )
4536 .await
4537 .unwrap(),
4538 HashMap::from_iter([
4539 ("dir/one.rs".to_string(), vec![8..12]),
4540 ("dir/two.rs".to_string(), vec![8..12]),
4541 ]),
4542 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4543 );
4544
4545 assert!(
4546 search(
4547 &project,
4548 SearchQuery::text(
4549 search_query,
4550 false,
4551 true,
4552 false,
4553 Default::default(),
4554
4555 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4556 None,
4557
4558 ).unwrap(),
4559 cx
4560 )
4561 .await
4562 .unwrap().is_empty(),
4563 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4564 );
4565}
4566
4567#[gpui::test]
4568async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4569 init_test(cx);
4570
4571 let search_query = "file";
4572
4573 let fs = FakeFs::new(cx.executor());
4574 fs.insert_tree(
4575 "/dir",
4576 json!({
4577 "one.rs": r#"// Rust file one"#,
4578 "one.ts": r#"// TypeScript file one"#,
4579 "two.rs": r#"// Rust file two"#,
4580 "two.ts": r#"// TypeScript file two"#,
4581 }),
4582 )
4583 .await;
4584 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4585
4586 assert!(
4587 search(
4588 &project,
4589 SearchQuery::text(
4590 search_query,
4591 false,
4592 true,
4593 false,
4594 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4595 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4596 None,
4597 )
4598 .unwrap(),
4599 cx
4600 )
4601 .await
4602 .unwrap()
4603 .is_empty(),
4604 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4605 );
4606
4607 assert!(
4608 search(
4609 &project,
4610 SearchQuery::text(
4611 search_query,
4612 false,
4613 true,
4614 false,
4615 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4616 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4617 None,
4618 ).unwrap(),
4619 cx
4620 )
4621 .await
4622 .unwrap()
4623 .is_empty(),
4624 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4625 );
4626
4627 assert!(
4628 search(
4629 &project,
4630 SearchQuery::text(
4631 search_query,
4632 false,
4633 true,
4634 false,
4635 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4636 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4637 None,
4638 )
4639 .unwrap(),
4640 cx
4641 )
4642 .await
4643 .unwrap()
4644 .is_empty(),
4645 "Non-matching inclusions and exclusions should not change that."
4646 );
4647
4648 assert_eq!(
4649 search(
4650 &project,
4651 SearchQuery::text(
4652 search_query,
4653 false,
4654 true,
4655 false,
4656 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4657 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4658 None,
4659 )
4660 .unwrap(),
4661 cx
4662 )
4663 .await
4664 .unwrap(),
4665 HashMap::from_iter([
4666 ("dir/one.ts".to_string(), vec![14..18]),
4667 ("dir/two.ts".to_string(), vec![14..18]),
4668 ]),
4669 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4670 );
4671}
4672
4673#[gpui::test]
4674async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4675 init_test(cx);
4676
4677 let fs = FakeFs::new(cx.executor());
4678 fs.insert_tree(
4679 "/worktree-a",
4680 json!({
4681 "haystack.rs": r#"// NEEDLE"#,
4682 "haystack.ts": r#"// NEEDLE"#,
4683 }),
4684 )
4685 .await;
4686 fs.insert_tree(
4687 "/worktree-b",
4688 json!({
4689 "haystack.rs": r#"// NEEDLE"#,
4690 "haystack.ts": r#"// NEEDLE"#,
4691 }),
4692 )
4693 .await;
4694
4695 let project = Project::test(
4696 fs.clone(),
4697 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4698 cx,
4699 )
4700 .await;
4701
4702 assert_eq!(
4703 search(
4704 &project,
4705 SearchQuery::text(
4706 "NEEDLE",
4707 false,
4708 true,
4709 false,
4710 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4711 Default::default(),
4712 None,
4713 )
4714 .unwrap(),
4715 cx
4716 )
4717 .await
4718 .unwrap(),
4719 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4720 "should only return results from included worktree"
4721 );
4722 assert_eq!(
4723 search(
4724 &project,
4725 SearchQuery::text(
4726 "NEEDLE",
4727 false,
4728 true,
4729 false,
4730 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4731 Default::default(),
4732 None,
4733 )
4734 .unwrap(),
4735 cx
4736 )
4737 .await
4738 .unwrap(),
4739 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4740 "should only return results from included worktree"
4741 );
4742
4743 assert_eq!(
4744 search(
4745 &project,
4746 SearchQuery::text(
4747 "NEEDLE",
4748 false,
4749 true,
4750 false,
4751 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4752 Default::default(),
4753 None,
4754 )
4755 .unwrap(),
4756 cx
4757 )
4758 .await
4759 .unwrap(),
4760 HashMap::from_iter([
4761 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4762 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4763 ]),
4764 "should return results from both worktrees"
4765 );
4766}
4767
4768#[gpui::test]
4769async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4770 init_test(cx);
4771
4772 let fs = FakeFs::new(cx.background_executor.clone());
4773 fs.insert_tree(
4774 "/dir",
4775 json!({
4776 ".git": {},
4777 ".gitignore": "**/target\n/node_modules\n",
4778 "target": {
4779 "index.txt": "index_key:index_value"
4780 },
4781 "node_modules": {
4782 "eslint": {
4783 "index.ts": "const eslint_key = 'eslint value'",
4784 "package.json": r#"{ "some_key": "some value" }"#,
4785 },
4786 "prettier": {
4787 "index.ts": "const prettier_key = 'prettier value'",
4788 "package.json": r#"{ "other_key": "other value" }"#,
4789 },
4790 },
4791 "package.json": r#"{ "main_key": "main value" }"#,
4792 }),
4793 )
4794 .await;
4795 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4796
4797 let query = "key";
4798 assert_eq!(
4799 search(
4800 &project,
4801 SearchQuery::text(
4802 query,
4803 false,
4804 false,
4805 false,
4806 Default::default(),
4807 Default::default(),
4808 None,
4809 )
4810 .unwrap(),
4811 cx
4812 )
4813 .await
4814 .unwrap(),
4815 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4816 "Only one non-ignored file should have the query"
4817 );
4818
4819 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4820 assert_eq!(
4821 search(
4822 &project,
4823 SearchQuery::text(
4824 query,
4825 false,
4826 false,
4827 true,
4828 Default::default(),
4829 Default::default(),
4830 None,
4831 )
4832 .unwrap(),
4833 cx
4834 )
4835 .await
4836 .unwrap(),
4837 HashMap::from_iter([
4838 ("dir/package.json".to_string(), vec![8..11]),
4839 ("dir/target/index.txt".to_string(), vec![6..9]),
4840 (
4841 "dir/node_modules/prettier/package.json".to_string(),
4842 vec![9..12]
4843 ),
4844 (
4845 "dir/node_modules/prettier/index.ts".to_string(),
4846 vec![15..18]
4847 ),
4848 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4849 (
4850 "dir/node_modules/eslint/package.json".to_string(),
4851 vec![8..11]
4852 ),
4853 ]),
4854 "Unrestricted search with ignored directories should find every file with the query"
4855 );
4856
4857 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4858 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4859 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4860 assert_eq!(
4861 search(
4862 &project,
4863 SearchQuery::text(
4864 query,
4865 false,
4866 false,
4867 true,
4868 files_to_include,
4869 files_to_exclude,
4870 None,
4871 )
4872 .unwrap(),
4873 cx
4874 )
4875 .await
4876 .unwrap(),
4877 HashMap::from_iter([(
4878 "dir/node_modules/prettier/package.json".to_string(),
4879 vec![9..12]
4880 )]),
4881 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4882 );
4883}
4884
4885#[gpui::test]
4886async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4887 init_test(cx);
4888
4889 let fs = FakeFs::new(cx.executor().clone());
4890 fs.insert_tree(
4891 "/one/two",
4892 json!({
4893 "three": {
4894 "a.txt": "",
4895 "four": {}
4896 },
4897 "c.rs": ""
4898 }),
4899 )
4900 .await;
4901
4902 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4903 project
4904 .update(cx, |project, cx| {
4905 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4906 project.create_entry((id, "b.."), true, cx)
4907 })
4908 .await
4909 .unwrap()
4910 .to_included()
4911 .unwrap();
4912
4913 // Can't create paths outside the project
4914 let result = project
4915 .update(cx, |project, cx| {
4916 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4917 project.create_entry((id, "../../boop"), true, cx)
4918 })
4919 .await;
4920 assert!(result.is_err());
4921
4922 // Can't create paths with '..'
4923 let result = project
4924 .update(cx, |project, cx| {
4925 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4926 project.create_entry((id, "four/../beep"), true, cx)
4927 })
4928 .await;
4929 assert!(result.is_err());
4930
4931 assert_eq!(
4932 fs.paths(true),
4933 vec![
4934 PathBuf::from("/"),
4935 PathBuf::from("/one"),
4936 PathBuf::from("/one/two"),
4937 PathBuf::from("/one/two/c.rs"),
4938 PathBuf::from("/one/two/three"),
4939 PathBuf::from("/one/two/three/a.txt"),
4940 PathBuf::from("/one/two/three/b.."),
4941 PathBuf::from("/one/two/three/four"),
4942 ]
4943 );
4944
4945 // And we cannot open buffers with '..'
4946 let result = project
4947 .update(cx, |project, cx| {
4948 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4949 project.open_buffer((id, "../c.rs"), cx)
4950 })
4951 .await;
4952 assert!(result.is_err())
4953}
4954
4955#[gpui::test]
4956async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4957 init_test(cx);
4958
4959 let fs = FakeFs::new(cx.executor());
4960 fs.insert_tree(
4961 "/dir",
4962 json!({
4963 "a.tsx": "a",
4964 }),
4965 )
4966 .await;
4967
4968 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4969
4970 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4971 language_registry.add(tsx_lang());
4972 let language_server_names = [
4973 "TypeScriptServer",
4974 "TailwindServer",
4975 "ESLintServer",
4976 "NoHoverCapabilitiesServer",
4977 ];
4978 let mut language_servers = [
4979 language_registry.register_fake_lsp(
4980 "tsx",
4981 FakeLspAdapter {
4982 name: language_server_names[0],
4983 capabilities: lsp::ServerCapabilities {
4984 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4985 ..lsp::ServerCapabilities::default()
4986 },
4987 ..FakeLspAdapter::default()
4988 },
4989 ),
4990 language_registry.register_fake_lsp(
4991 "tsx",
4992 FakeLspAdapter {
4993 name: language_server_names[1],
4994 capabilities: lsp::ServerCapabilities {
4995 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4996 ..lsp::ServerCapabilities::default()
4997 },
4998 ..FakeLspAdapter::default()
4999 },
5000 ),
5001 language_registry.register_fake_lsp(
5002 "tsx",
5003 FakeLspAdapter {
5004 name: language_server_names[2],
5005 capabilities: lsp::ServerCapabilities {
5006 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5007 ..lsp::ServerCapabilities::default()
5008 },
5009 ..FakeLspAdapter::default()
5010 },
5011 ),
5012 language_registry.register_fake_lsp(
5013 "tsx",
5014 FakeLspAdapter {
5015 name: language_server_names[3],
5016 capabilities: lsp::ServerCapabilities {
5017 hover_provider: None,
5018 ..lsp::ServerCapabilities::default()
5019 },
5020 ..FakeLspAdapter::default()
5021 },
5022 ),
5023 ];
5024
5025 let (buffer, _handle) = project
5026 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5027 .await
5028 .unwrap();
5029 cx.executor().run_until_parked();
5030
5031 let mut servers_with_hover_requests = HashMap::default();
5032 for i in 0..language_server_names.len() {
5033 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5034 panic!(
5035 "Failed to get language server #{i} with name {}",
5036 &language_server_names[i]
5037 )
5038 });
5039 let new_server_name = new_server.server.name();
5040 assert!(
5041 !servers_with_hover_requests.contains_key(&new_server_name),
5042 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5043 );
5044 match new_server_name.as_ref() {
5045 "TailwindServer" | "TypeScriptServer" => {
5046 servers_with_hover_requests.insert(
5047 new_server_name.clone(),
5048 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5049 let name = new_server_name.clone();
5050 async move {
5051 Ok(Some(lsp::Hover {
5052 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5053 format!("{name} hover"),
5054 )),
5055 range: None,
5056 }))
5057 }
5058 }),
5059 );
5060 }
5061 "ESLintServer" => {
5062 servers_with_hover_requests.insert(
5063 new_server_name,
5064 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5065 |_, _| async move { Ok(None) },
5066 ),
5067 );
5068 }
5069 "NoHoverCapabilitiesServer" => {
5070 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5071 |_, _| async move {
5072 panic!(
5073 "Should not call for hovers server with no corresponding capabilities"
5074 )
5075 },
5076 );
5077 }
5078 unexpected => panic!("Unexpected server name: {unexpected}"),
5079 }
5080 }
5081
5082 let hover_task = project.update(cx, |project, cx| {
5083 project.hover(&buffer, Point::new(0, 0), cx)
5084 });
5085 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5086 |mut hover_request| async move {
5087 hover_request
5088 .next()
5089 .await
5090 .expect("All hover requests should have been triggered")
5091 },
5092 ))
5093 .await;
5094 assert_eq!(
5095 vec!["TailwindServer hover", "TypeScriptServer hover"],
5096 hover_task
5097 .await
5098 .into_iter()
5099 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5100 .sorted()
5101 .collect::<Vec<_>>(),
5102 "Should receive hover responses from all related servers with hover capabilities"
5103 );
5104}
5105
5106#[gpui::test]
5107async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5108 init_test(cx);
5109
5110 let fs = FakeFs::new(cx.executor());
5111 fs.insert_tree(
5112 "/dir",
5113 json!({
5114 "a.ts": "a",
5115 }),
5116 )
5117 .await;
5118
5119 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5120
5121 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5122 language_registry.add(typescript_lang());
5123 let mut fake_language_servers = language_registry.register_fake_lsp(
5124 "TypeScript",
5125 FakeLspAdapter {
5126 capabilities: lsp::ServerCapabilities {
5127 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5128 ..lsp::ServerCapabilities::default()
5129 },
5130 ..FakeLspAdapter::default()
5131 },
5132 );
5133
5134 let (buffer, _handle) = project
5135 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5136 .await
5137 .unwrap();
5138 cx.executor().run_until_parked();
5139
5140 let fake_server = fake_language_servers
5141 .next()
5142 .await
5143 .expect("failed to get the language server");
5144
5145 let mut request_handled =
5146 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5147 Ok(Some(lsp::Hover {
5148 contents: lsp::HoverContents::Array(vec![
5149 lsp::MarkedString::String("".to_string()),
5150 lsp::MarkedString::String(" ".to_string()),
5151 lsp::MarkedString::String("\n\n\n".to_string()),
5152 ]),
5153 range: None,
5154 }))
5155 });
5156
5157 let hover_task = project.update(cx, |project, cx| {
5158 project.hover(&buffer, Point::new(0, 0), cx)
5159 });
5160 let () = request_handled
5161 .next()
5162 .await
5163 .expect("All hover requests should have been triggered");
5164 assert_eq!(
5165 Vec::<String>::new(),
5166 hover_task
5167 .await
5168 .into_iter()
5169 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5170 .sorted()
5171 .collect::<Vec<_>>(),
5172 "Empty hover parts should be ignored"
5173 );
5174}
5175
5176#[gpui::test]
5177async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5178 init_test(cx);
5179
5180 let fs = FakeFs::new(cx.executor());
5181 fs.insert_tree(
5182 "/dir",
5183 json!({
5184 "a.ts": "a",
5185 }),
5186 )
5187 .await;
5188
5189 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5190
5191 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5192 language_registry.add(typescript_lang());
5193 let mut fake_language_servers = language_registry.register_fake_lsp(
5194 "TypeScript",
5195 FakeLspAdapter {
5196 capabilities: lsp::ServerCapabilities {
5197 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5198 ..lsp::ServerCapabilities::default()
5199 },
5200 ..FakeLspAdapter::default()
5201 },
5202 );
5203
5204 let (buffer, _handle) = project
5205 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5206 .await
5207 .unwrap();
5208 cx.executor().run_until_parked();
5209
5210 let fake_server = fake_language_servers
5211 .next()
5212 .await
5213 .expect("failed to get the language server");
5214
5215 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5216 move |_, _| async move {
5217 Ok(Some(vec![
5218 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5219 title: "organize imports".to_string(),
5220 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5221 ..lsp::CodeAction::default()
5222 }),
5223 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5224 title: "fix code".to_string(),
5225 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5226 ..lsp::CodeAction::default()
5227 }),
5228 ]))
5229 },
5230 );
5231
5232 let code_actions_task = project.update(cx, |project, cx| {
5233 project.code_actions(
5234 &buffer,
5235 0..buffer.read(cx).len(),
5236 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5237 cx,
5238 )
5239 });
5240
5241 let () = request_handled
5242 .next()
5243 .await
5244 .expect("The code action request should have been triggered");
5245
5246 let code_actions = code_actions_task.await.unwrap();
5247 assert_eq!(code_actions.len(), 1);
5248 assert_eq!(
5249 code_actions[0].lsp_action.kind,
5250 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5251 );
5252}
5253
5254#[gpui::test]
5255async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5256 init_test(cx);
5257
5258 let fs = FakeFs::new(cx.executor());
5259 fs.insert_tree(
5260 "/dir",
5261 json!({
5262 "a.tsx": "a",
5263 }),
5264 )
5265 .await;
5266
5267 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5268
5269 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5270 language_registry.add(tsx_lang());
5271 let language_server_names = [
5272 "TypeScriptServer",
5273 "TailwindServer",
5274 "ESLintServer",
5275 "NoActionsCapabilitiesServer",
5276 ];
5277
5278 let mut language_server_rxs = [
5279 language_registry.register_fake_lsp(
5280 "tsx",
5281 FakeLspAdapter {
5282 name: language_server_names[0],
5283 capabilities: lsp::ServerCapabilities {
5284 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5285 ..lsp::ServerCapabilities::default()
5286 },
5287 ..FakeLspAdapter::default()
5288 },
5289 ),
5290 language_registry.register_fake_lsp(
5291 "tsx",
5292 FakeLspAdapter {
5293 name: language_server_names[1],
5294 capabilities: lsp::ServerCapabilities {
5295 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5296 ..lsp::ServerCapabilities::default()
5297 },
5298 ..FakeLspAdapter::default()
5299 },
5300 ),
5301 language_registry.register_fake_lsp(
5302 "tsx",
5303 FakeLspAdapter {
5304 name: language_server_names[2],
5305 capabilities: lsp::ServerCapabilities {
5306 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5307 ..lsp::ServerCapabilities::default()
5308 },
5309 ..FakeLspAdapter::default()
5310 },
5311 ),
5312 language_registry.register_fake_lsp(
5313 "tsx",
5314 FakeLspAdapter {
5315 name: language_server_names[3],
5316 capabilities: lsp::ServerCapabilities {
5317 code_action_provider: None,
5318 ..lsp::ServerCapabilities::default()
5319 },
5320 ..FakeLspAdapter::default()
5321 },
5322 ),
5323 ];
5324
5325 let (buffer, _handle) = project
5326 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5327 .await
5328 .unwrap();
5329 cx.executor().run_until_parked();
5330
5331 let mut servers_with_actions_requests = HashMap::default();
5332 for i in 0..language_server_names.len() {
5333 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5334 panic!(
5335 "Failed to get language server #{i} with name {}",
5336 &language_server_names[i]
5337 )
5338 });
5339 let new_server_name = new_server.server.name();
5340
5341 assert!(
5342 !servers_with_actions_requests.contains_key(&new_server_name),
5343 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5344 );
5345 match new_server_name.0.as_ref() {
5346 "TailwindServer" | "TypeScriptServer" => {
5347 servers_with_actions_requests.insert(
5348 new_server_name.clone(),
5349 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5350 move |_, _| {
5351 let name = new_server_name.clone();
5352 async move {
5353 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5354 lsp::CodeAction {
5355 title: format!("{name} code action"),
5356 ..lsp::CodeAction::default()
5357 },
5358 )]))
5359 }
5360 },
5361 ),
5362 );
5363 }
5364 "ESLintServer" => {
5365 servers_with_actions_requests.insert(
5366 new_server_name,
5367 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5368 |_, _| async move { Ok(None) },
5369 ),
5370 );
5371 }
5372 "NoActionsCapabilitiesServer" => {
5373 let _never_handled = new_server
5374 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5375 panic!(
5376 "Should not call for code actions server with no corresponding capabilities"
5377 )
5378 });
5379 }
5380 unexpected => panic!("Unexpected server name: {unexpected}"),
5381 }
5382 }
5383
5384 let code_actions_task = project.update(cx, |project, cx| {
5385 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5386 });
5387
5388 // cx.run_until_parked();
5389 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5390 |mut code_actions_request| async move {
5391 code_actions_request
5392 .next()
5393 .await
5394 .expect("All code actions requests should have been triggered")
5395 },
5396 ))
5397 .await;
5398 assert_eq!(
5399 vec!["TailwindServer code action", "TypeScriptServer code action"],
5400 code_actions_task
5401 .await
5402 .unwrap()
5403 .into_iter()
5404 .map(|code_action| code_action.lsp_action.title)
5405 .sorted()
5406 .collect::<Vec<_>>(),
5407 "Should receive code actions responses from all related servers with hover capabilities"
5408 );
5409}
5410
5411#[gpui::test]
5412async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5413 init_test(cx);
5414
5415 let fs = FakeFs::new(cx.executor());
5416 fs.insert_tree(
5417 "/dir",
5418 json!({
5419 "a.rs": "let a = 1;",
5420 "b.rs": "let b = 2;",
5421 "c.rs": "let c = 2;",
5422 }),
5423 )
5424 .await;
5425
5426 let project = Project::test(
5427 fs,
5428 [
5429 "/dir/a.rs".as_ref(),
5430 "/dir/b.rs".as_ref(),
5431 "/dir/c.rs".as_ref(),
5432 ],
5433 cx,
5434 )
5435 .await;
5436
5437 // check the initial state and get the worktrees
5438 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5439 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5440 assert_eq!(worktrees.len(), 3);
5441
5442 let worktree_a = worktrees[0].read(cx);
5443 let worktree_b = worktrees[1].read(cx);
5444 let worktree_c = worktrees[2].read(cx);
5445
5446 // check they start in the right order
5447 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5448 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5449 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5450
5451 (
5452 worktrees[0].clone(),
5453 worktrees[1].clone(),
5454 worktrees[2].clone(),
5455 )
5456 });
5457
5458 // move first worktree to after the second
5459 // [a, b, c] -> [b, a, c]
5460 project
5461 .update(cx, |project, cx| {
5462 let first = worktree_a.read(cx);
5463 let second = worktree_b.read(cx);
5464 project.move_worktree(first.id(), second.id(), cx)
5465 })
5466 .expect("moving first after second");
5467
5468 // check the state after moving
5469 project.update(cx, |project, cx| {
5470 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5471 assert_eq!(worktrees.len(), 3);
5472
5473 let first = worktrees[0].read(cx);
5474 let second = worktrees[1].read(cx);
5475 let third = worktrees[2].read(cx);
5476
5477 // check they are now in the right order
5478 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5479 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5480 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5481 });
5482
5483 // move the second worktree to before the first
5484 // [b, a, c] -> [a, b, c]
5485 project
5486 .update(cx, |project, cx| {
5487 let second = worktree_a.read(cx);
5488 let first = worktree_b.read(cx);
5489 project.move_worktree(first.id(), second.id(), cx)
5490 })
5491 .expect("moving second before first");
5492
5493 // check the state after moving
5494 project.update(cx, |project, cx| {
5495 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5496 assert_eq!(worktrees.len(), 3);
5497
5498 let first = worktrees[0].read(cx);
5499 let second = worktrees[1].read(cx);
5500 let third = worktrees[2].read(cx);
5501
5502 // check they are now in the right order
5503 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5504 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5505 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5506 });
5507
5508 // move the second worktree to after the third
5509 // [a, b, c] -> [a, c, b]
5510 project
5511 .update(cx, |project, cx| {
5512 let second = worktree_b.read(cx);
5513 let third = worktree_c.read(cx);
5514 project.move_worktree(second.id(), third.id(), cx)
5515 })
5516 .expect("moving second after third");
5517
5518 // check the state after moving
5519 project.update(cx, |project, cx| {
5520 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5521 assert_eq!(worktrees.len(), 3);
5522
5523 let first = worktrees[0].read(cx);
5524 let second = worktrees[1].read(cx);
5525 let third = worktrees[2].read(cx);
5526
5527 // check they are now in the right order
5528 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5529 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5530 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5531 });
5532
5533 // move the third worktree to before the second
5534 // [a, c, b] -> [a, b, c]
5535 project
5536 .update(cx, |project, cx| {
5537 let third = worktree_c.read(cx);
5538 let second = worktree_b.read(cx);
5539 project.move_worktree(third.id(), second.id(), cx)
5540 })
5541 .expect("moving third before second");
5542
5543 // check the state after moving
5544 project.update(cx, |project, cx| {
5545 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5546 assert_eq!(worktrees.len(), 3);
5547
5548 let first = worktrees[0].read(cx);
5549 let second = worktrees[1].read(cx);
5550 let third = worktrees[2].read(cx);
5551
5552 // check they are now in the right order
5553 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5554 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5555 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5556 });
5557
5558 // move the first worktree to after the third
5559 // [a, b, c] -> [b, c, a]
5560 project
5561 .update(cx, |project, cx| {
5562 let first = worktree_a.read(cx);
5563 let third = worktree_c.read(cx);
5564 project.move_worktree(first.id(), third.id(), cx)
5565 })
5566 .expect("moving first after third");
5567
5568 // check the state after moving
5569 project.update(cx, |project, cx| {
5570 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5571 assert_eq!(worktrees.len(), 3);
5572
5573 let first = worktrees[0].read(cx);
5574 let second = worktrees[1].read(cx);
5575 let third = worktrees[2].read(cx);
5576
5577 // check they are now in the right order
5578 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5579 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5580 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5581 });
5582
5583 // move the third worktree to before the first
5584 // [b, c, a] -> [a, b, c]
5585 project
5586 .update(cx, |project, cx| {
5587 let third = worktree_a.read(cx);
5588 let first = worktree_b.read(cx);
5589 project.move_worktree(third.id(), first.id(), cx)
5590 })
5591 .expect("moving third before first");
5592
5593 // check the state after moving
5594 project.update(cx, |project, cx| {
5595 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5596 assert_eq!(worktrees.len(), 3);
5597
5598 let first = worktrees[0].read(cx);
5599 let second = worktrees[1].read(cx);
5600 let third = worktrees[2].read(cx);
5601
5602 // check they are now in the right order
5603 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5604 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5605 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5606 });
5607}
5608
5609#[gpui::test]
5610async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5611 init_test(cx);
5612
5613 let staged_contents = r#"
5614 fn main() {
5615 println!("hello world");
5616 }
5617 "#
5618 .unindent();
5619 let file_contents = r#"
5620 // print goodbye
5621 fn main() {
5622 println!("goodbye world");
5623 }
5624 "#
5625 .unindent();
5626
5627 let fs = FakeFs::new(cx.background_executor.clone());
5628 fs.insert_tree(
5629 "/dir",
5630 json!({
5631 ".git": {},
5632 "src": {
5633 "main.rs": file_contents,
5634 }
5635 }),
5636 )
5637 .await;
5638
5639 fs.set_index_for_repo(
5640 Path::new("/dir/.git"),
5641 &[(Path::new("src/main.rs"), staged_contents)],
5642 );
5643
5644 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5645
5646 let buffer = project
5647 .update(cx, |project, cx| {
5648 project.open_local_buffer("/dir/src/main.rs", cx)
5649 })
5650 .await
5651 .unwrap();
5652 let unstaged_changes = project
5653 .update(cx, |project, cx| {
5654 project.open_unstaged_changes(buffer.clone(), cx)
5655 })
5656 .await
5657 .unwrap();
5658
5659 cx.run_until_parked();
5660 unstaged_changes.update(cx, |unstaged_changes, cx| {
5661 let snapshot = buffer.read(cx).snapshot();
5662 assert_hunks(
5663 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5664 &snapshot,
5665 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5666 &[
5667 (0..1, "", "// print goodbye\n"),
5668 (
5669 2..3,
5670 " println!(\"hello world\");\n",
5671 " println!(\"goodbye world\");\n",
5672 ),
5673 ],
5674 );
5675 });
5676
5677 let staged_contents = r#"
5678 // print goodbye
5679 fn main() {
5680 }
5681 "#
5682 .unindent();
5683
5684 fs.set_index_for_repo(
5685 Path::new("/dir/.git"),
5686 &[(Path::new("src/main.rs"), staged_contents)],
5687 );
5688
5689 cx.run_until_parked();
5690 unstaged_changes.update(cx, |unstaged_changes, cx| {
5691 let snapshot = buffer.read(cx).snapshot();
5692 assert_hunks(
5693 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5694 &snapshot,
5695 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5696 &[(2..3, "", " println!(\"goodbye world\");\n")],
5697 );
5698 });
5699}
5700
5701async fn search(
5702 project: &Model<Project>,
5703 query: SearchQuery,
5704 cx: &mut gpui::TestAppContext,
5705) -> Result<HashMap<String, Vec<Range<usize>>>> {
5706 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5707 let mut results = HashMap::default();
5708 while let Ok(search_result) = search_rx.recv().await {
5709 match search_result {
5710 SearchResult::Buffer { buffer, ranges } => {
5711 results.entry(buffer).or_insert(ranges);
5712 }
5713 SearchResult::LimitReached => {}
5714 }
5715 }
5716 Ok(results
5717 .into_iter()
5718 .map(|(buffer, ranges)| {
5719 buffer.update(cx, |buffer, cx| {
5720 let path = buffer
5721 .file()
5722 .unwrap()
5723 .full_path(cx)
5724 .to_string_lossy()
5725 .to_string();
5726 let ranges = ranges
5727 .into_iter()
5728 .map(|range| range.to_offset(buffer))
5729 .collect::<Vec<_>>();
5730 (path, ranges)
5731 })
5732 })
5733 .collect())
5734}
5735
5736pub fn init_test(cx: &mut gpui::TestAppContext) {
5737 if std::env::var("RUST_LOG").is_ok() {
5738 env_logger::try_init().ok();
5739 }
5740
5741 cx.update(|cx| {
5742 let settings_store = SettingsStore::test(cx);
5743 cx.set_global(settings_store);
5744 release_channel::init(SemanticVersion::default(), cx);
5745 language::init(cx);
5746 Project::init_settings(cx);
5747 });
5748}
5749
5750fn json_lang() -> Arc<Language> {
5751 Arc::new(Language::new(
5752 LanguageConfig {
5753 name: "JSON".into(),
5754 matcher: LanguageMatcher {
5755 path_suffixes: vec!["json".to_string()],
5756 ..Default::default()
5757 },
5758 ..Default::default()
5759 },
5760 None,
5761 ))
5762}
5763
5764fn js_lang() -> Arc<Language> {
5765 Arc::new(Language::new(
5766 LanguageConfig {
5767 name: "JavaScript".into(),
5768 matcher: LanguageMatcher {
5769 path_suffixes: vec!["js".to_string()],
5770 ..Default::default()
5771 },
5772 ..Default::default()
5773 },
5774 None,
5775 ))
5776}
5777
5778fn rust_lang() -> Arc<Language> {
5779 Arc::new(Language::new(
5780 LanguageConfig {
5781 name: "Rust".into(),
5782 matcher: LanguageMatcher {
5783 path_suffixes: vec!["rs".to_string()],
5784 ..Default::default()
5785 },
5786 ..Default::default()
5787 },
5788 Some(tree_sitter_rust::LANGUAGE.into()),
5789 ))
5790}
5791
5792fn typescript_lang() -> Arc<Language> {
5793 Arc::new(Language::new(
5794 LanguageConfig {
5795 name: "TypeScript".into(),
5796 matcher: LanguageMatcher {
5797 path_suffixes: vec!["ts".to_string()],
5798 ..Default::default()
5799 },
5800 ..Default::default()
5801 },
5802 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5803 ))
5804}
5805
5806fn tsx_lang() -> Arc<Language> {
5807 Arc::new(Language::new(
5808 LanguageConfig {
5809 name: "tsx".into(),
5810 matcher: LanguageMatcher {
5811 path_suffixes: vec!["tsx".to_string()],
5812 ..Default::default()
5813 },
5814 ..Default::default()
5815 },
5816 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5817 ))
5818}
5819
5820fn get_all_tasks(
5821 project: &Model<Project>,
5822 worktree_id: Option<WorktreeId>,
5823 task_context: &TaskContext,
5824 cx: &mut AppContext,
5825) -> Vec<(TaskSourceKind, ResolvedTask)> {
5826 let (mut old, new) = project.update(cx, |project, cx| {
5827 project
5828 .task_store
5829 .read(cx)
5830 .task_inventory()
5831 .unwrap()
5832 .read(cx)
5833 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5834 });
5835 old.extend(new);
5836 old
5837}