1use crate::{Event, *};
2use ::git::diff::assert_hunks;
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq,
29 paths::{replace_path_separator, PathMatcher},
30 test::TempTree,
31 TryFutureExt as _,
32};
33
34#[gpui::test]
35async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let (tx, mut rx) = futures::channel::mpsc::unbounded();
39 let _thread = std::thread::spawn(move || {
40 std::fs::metadata("/tmp").unwrap();
41 std::thread::sleep(Duration::from_millis(1000));
42 tx.unbounded_send(1).unwrap();
43 });
44 rx.next().await.unwrap();
45}
46
47#[gpui::test]
48async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
49 cx.executor().allow_parking();
50
51 let io_task = smol::unblock(move || {
52 println!("sleeping on thread {:?}", std::thread::current().id());
53 std::thread::sleep(Duration::from_millis(10));
54 1
55 });
56
57 let task = cx.foreground_executor().spawn(async move {
58 io_task.await;
59 });
60
61 task.await;
62}
63
64#[cfg(not(windows))]
65#[gpui::test]
66async fn test_symlinks(cx: &mut gpui::TestAppContext) {
67 init_test(cx);
68 cx.executor().allow_parking();
69
70 let dir = TempTree::new(json!({
71 "root": {
72 "apple": "",
73 "banana": {
74 "carrot": {
75 "date": "",
76 "endive": "",
77 }
78 },
79 "fennel": {
80 "grape": "",
81 }
82 }
83 }));
84
85 let root_link_path = dir.path().join("root_link");
86 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
87 os::unix::fs::symlink(
88 dir.path().join("root/fennel"),
89 dir.path().join("root/finnochio"),
90 )
91 .unwrap();
92
93 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
94
95 project.update(cx, |project, cx| {
96 let tree = project.worktrees(cx).next().unwrap().read(cx);
97 assert_eq!(tree.file_count(), 5);
98 assert_eq!(
99 tree.inode_for_path("fennel/grape"),
100 tree.inode_for_path("finnochio/grape")
101 );
102 });
103}
104
105#[gpui::test]
106async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
107 init_test(cx);
108
109 let dir = TempTree::new(json!({
110 ".editorconfig": r#"
111 root = true
112 [*.rs]
113 indent_style = tab
114 indent_size = 3
115 end_of_line = lf
116 insert_final_newline = true
117 trim_trailing_whitespace = true
118 [*.js]
119 tab_width = 10
120 "#,
121 ".zed": {
122 "settings.json": r#"{
123 "tab_size": 8,
124 "hard_tabs": false,
125 "ensure_final_newline_on_save": false,
126 "remove_trailing_whitespace_on_save": false,
127 "soft_wrap": "editor_width"
128 }"#,
129 },
130 "a.rs": "fn a() {\n A\n}",
131 "b": {
132 ".editorconfig": r#"
133 [*.rs]
134 indent_size = 2
135 "#,
136 "b.rs": "fn b() {\n B\n}",
137 },
138 "c.js": "def c\n C\nend",
139 "README.json": "tabs are better\n",
140 }));
141
142 let path = dir.path();
143 let fs = FakeFs::new(cx.executor());
144 fs.insert_tree_from_real_fs(path, path).await;
145 let project = Project::test(fs, [path], cx).await;
146
147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
148 language_registry.add(js_lang());
149 language_registry.add(json_lang());
150 language_registry.add(rust_lang());
151
152 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
153
154 cx.executor().run_until_parked();
155
156 cx.update(|cx| {
157 let tree = worktree.read(cx);
158 let settings_for = |path: &str| {
159 let file_entry = tree.entry_for_path(path).unwrap().clone();
160 let file = File::for_entry(file_entry, worktree.clone());
161 let file_language = project
162 .read(cx)
163 .languages()
164 .language_for_file_path(file.path.as_ref());
165 let file_language = cx
166 .background_executor()
167 .block(file_language)
168 .expect("Failed to get file language");
169 let file = file as _;
170 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
171 };
172
173 let settings_a = settings_for("a.rs");
174 let settings_b = settings_for("b/b.rs");
175 let settings_c = settings_for("c.js");
176 let settings_readme = settings_for("README.json");
177
178 // .editorconfig overrides .zed/settings
179 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
180 assert_eq!(settings_a.hard_tabs, true);
181 assert_eq!(settings_a.ensure_final_newline_on_save, true);
182 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
183
184 // .editorconfig in b/ overrides .editorconfig in root
185 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
186
187 // "indent_size" is not set, so "tab_width" is used
188 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
189
190 // README.md should not be affected by .editorconfig's globe "*.rs"
191 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
192 });
193}
194
195#[gpui::test]
196async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
197 init_test(cx);
198 TaskStore::init(None);
199
200 let fs = FakeFs::new(cx.executor());
201 fs.insert_tree(
202 "/the-root",
203 json!({
204 ".zed": {
205 "settings.json": r#"{ "tab_size": 8 }"#,
206 "tasks.json": r#"[{
207 "label": "cargo check all",
208 "command": "cargo",
209 "args": ["check", "--all"]
210 },]"#,
211 },
212 "a": {
213 "a.rs": "fn a() {\n A\n}"
214 },
215 "b": {
216 ".zed": {
217 "settings.json": r#"{ "tab_size": 2 }"#,
218 "tasks.json": r#"[{
219 "label": "cargo check",
220 "command": "cargo",
221 "args": ["check"]
222 },]"#,
223 },
224 "b.rs": "fn b() {\n B\n}"
225 }
226 }),
227 )
228 .await;
229
230 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232 let task_context = TaskContext::default();
233
234 cx.executor().run_until_parked();
235 let worktree_id = cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.worktrees(cx).next().unwrap().read(cx).id()
238 })
239 });
240 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
241 id: worktree_id,
242 directory_in_worktree: PathBuf::from(".zed"),
243 id_base: "local worktree tasks from directory \".zed\"".into(),
244 };
245
246 let all_tasks = cx
247 .update(|cx| {
248 let tree = worktree.read(cx);
249
250 let file_a = File::for_entry(
251 tree.entry_for_path("a/a.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_a = language_settings(None, Some(&file_a), cx);
255 let file_b = File::for_entry(
256 tree.entry_for_path("b/b.rs").unwrap().clone(),
257 worktree.clone(),
258 ) as _;
259 let settings_b = language_settings(None, Some(&file_b), cx);
260
261 assert_eq!(settings_a.tab_size.get(), 8);
262 assert_eq!(settings_b.tab_size.get(), 2);
263
264 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
265 })
266 .into_iter()
267 .map(|(source_kind, task)| {
268 let resolved = task.resolved.unwrap();
269 (
270 source_kind,
271 task.resolved_label,
272 resolved.args,
273 resolved.env,
274 )
275 })
276 .collect::<Vec<_>>();
277 assert_eq!(
278 all_tasks,
279 vec![
280 (
281 TaskSourceKind::Worktree {
282 id: worktree_id,
283 directory_in_worktree: PathBuf::from("b/.zed"),
284 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
285 },
286 "cargo check".to_string(),
287 vec!["check".to_string()],
288 HashMap::default(),
289 ),
290 (
291 topmost_local_task_source_kind.clone(),
292 "cargo check all".to_string(),
293 vec!["check".to_string(), "--all".to_string()],
294 HashMap::default(),
295 ),
296 ]
297 );
298
299 let (_, resolved_task) = cx
300 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
301 .into_iter()
302 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
303 .expect("should have one global task");
304 project.update(cx, |project, cx| {
305 let task_inventory = project
306 .task_store
307 .read(cx)
308 .task_inventory()
309 .cloned()
310 .unwrap();
311 task_inventory.update(cx, |inventory, _| {
312 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
313 inventory
314 .update_file_based_tasks(
315 None,
316 Some(
317 &json!([{
318 "label": "cargo check unstable",
319 "command": "cargo",
320 "args": [
321 "check",
322 "--all",
323 "--all-targets"
324 ],
325 "env": {
326 "RUSTFLAGS": "-Zunstable-options"
327 }
328 }])
329 .to_string(),
330 ),
331 )
332 .unwrap();
333 });
334 });
335 cx.run_until_parked();
336
337 let all_tasks = cx
338 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
339 .into_iter()
340 .map(|(source_kind, task)| {
341 let resolved = task.resolved.unwrap();
342 (
343 source_kind,
344 task.resolved_label,
345 resolved.args,
346 resolved.env,
347 )
348 })
349 .collect::<Vec<_>>();
350 assert_eq!(
351 all_tasks,
352 vec![
353 (
354 topmost_local_task_source_kind.clone(),
355 "cargo check all".to_string(),
356 vec!["check".to_string(), "--all".to_string()],
357 HashMap::default(),
358 ),
359 (
360 TaskSourceKind::Worktree {
361 id: worktree_id,
362 directory_in_worktree: PathBuf::from("b/.zed"),
363 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
364 },
365 "cargo check".to_string(),
366 vec!["check".to_string()],
367 HashMap::default(),
368 ),
369 (
370 TaskSourceKind::AbsPath {
371 abs_path: paths::tasks_file().clone(),
372 id_base: "global tasks.json".into(),
373 },
374 "cargo check unstable".to_string(),
375 vec![
376 "check".to_string(),
377 "--all".to_string(),
378 "--all-targets".to_string(),
379 ],
380 HashMap::from_iter(Some((
381 "RUSTFLAGS".to_string(),
382 "-Zunstable-options".to_string()
383 ))),
384 ),
385 ]
386 );
387}
388
389#[gpui::test]
390async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
391 init_test(cx);
392
393 let fs = FakeFs::new(cx.executor());
394 fs.insert_tree(
395 "/the-root",
396 json!({
397 "test.rs": "const A: i32 = 1;",
398 "test2.rs": "",
399 "Cargo.toml": "a = 1",
400 "package.json": "{\"a\": 1}",
401 }),
402 )
403 .await;
404
405 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
407
408 let mut fake_rust_servers = language_registry.register_fake_lsp(
409 "Rust",
410 FakeLspAdapter {
411 name: "the-rust-language-server",
412 capabilities: lsp::ServerCapabilities {
413 completion_provider: Some(lsp::CompletionOptions {
414 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
415 ..Default::default()
416 }),
417 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
418 lsp::TextDocumentSyncOptions {
419 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
420 ..Default::default()
421 },
422 )),
423 ..Default::default()
424 },
425 ..Default::default()
426 },
427 );
428 let mut fake_json_servers = language_registry.register_fake_lsp(
429 "JSON",
430 FakeLspAdapter {
431 name: "the-json-language-server",
432 capabilities: lsp::ServerCapabilities {
433 completion_provider: Some(lsp::CompletionOptions {
434 trigger_characters: Some(vec![":".to_string()]),
435 ..Default::default()
436 }),
437 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
438 lsp::TextDocumentSyncOptions {
439 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
440 ..Default::default()
441 },
442 )),
443 ..Default::default()
444 },
445 ..Default::default()
446 },
447 );
448
449 // Open a buffer without an associated language server.
450 let (toml_buffer, _handle) = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
453 })
454 .await
455 .unwrap();
456
457 // Open a buffer with an associated language server before the language for it has been loaded.
458 let (rust_buffer, _handle2) = project
459 .update(cx, |project, cx| {
460 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
461 })
462 .await
463 .unwrap();
464 rust_buffer.update(cx, |buffer, _| {
465 assert_eq!(buffer.language().map(|l| l.name()), None);
466 });
467
468 // Now we add the languages to the project, and ensure they get assigned to all
469 // the relevant open buffers.
470 language_registry.add(json_lang());
471 language_registry.add(rust_lang());
472 cx.executor().run_until_parked();
473 rust_buffer.update(cx, |buffer, _| {
474 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
475 });
476
477 // A server is started up, and it is notified about Rust files.
478 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
479 assert_eq!(
480 fake_rust_server
481 .receive_notification::<lsp::notification::DidOpenTextDocument>()
482 .await
483 .text_document,
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
486 version: 0,
487 text: "const A: i32 = 1;".to_string(),
488 language_id: "rust".to_string(),
489 }
490 );
491
492 // The buffer is configured based on the language server's capabilities.
493 rust_buffer.update(cx, |buffer, _| {
494 assert_eq!(
495 buffer
496 .completion_triggers()
497 .into_iter()
498 .cloned()
499 .collect::<Vec<_>>(),
500 &[".".to_string(), "::".to_string()]
501 );
502 });
503 toml_buffer.update(cx, |buffer, _| {
504 assert!(buffer.completion_triggers().is_empty());
505 });
506
507 // Edit a buffer. The changes are reported to the language server.
508 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
509 assert_eq!(
510 fake_rust_server
511 .receive_notification::<lsp::notification::DidChangeTextDocument>()
512 .await
513 .text_document,
514 lsp::VersionedTextDocumentIdentifier::new(
515 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
516 1
517 )
518 );
519
520 // Open a third buffer with a different associated language server.
521 let (json_buffer, _json_handle) = project
522 .update(cx, |project, cx| {
523 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
524 })
525 .await
526 .unwrap();
527
528 // A json language server is started up and is only notified about the json buffer.
529 let mut fake_json_server = fake_json_servers.next().await.unwrap();
530 assert_eq!(
531 fake_json_server
532 .receive_notification::<lsp::notification::DidOpenTextDocument>()
533 .await
534 .text_document,
535 lsp::TextDocumentItem {
536 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
537 version: 0,
538 text: "{\"a\": 1}".to_string(),
539 language_id: "json".to_string(),
540 }
541 );
542
543 // This buffer is configured based on the second language server's
544 // capabilities.
545 json_buffer.update(cx, |buffer, _| {
546 assert_eq!(
547 buffer
548 .completion_triggers()
549 .into_iter()
550 .cloned()
551 .collect::<Vec<_>>(),
552 &[":".to_string()]
553 );
554 });
555
556 // When opening another buffer whose language server is already running,
557 // it is also configured based on the existing language server's capabilities.
558 let (rust_buffer2, _handle4) = project
559 .update(cx, |project, cx| {
560 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
561 })
562 .await
563 .unwrap();
564 rust_buffer2.update(cx, |buffer, _| {
565 assert_eq!(
566 buffer
567 .completion_triggers()
568 .into_iter()
569 .cloned()
570 .collect::<Vec<_>>(),
571 &[".".to_string(), "::".to_string()]
572 );
573 });
574
575 // Changes are reported only to servers matching the buffer's language.
576 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
577 rust_buffer2.update(cx, |buffer, cx| {
578 buffer.edit([(0..0, "let x = 1;")], None, cx)
579 });
580 assert_eq!(
581 fake_rust_server
582 .receive_notification::<lsp::notification::DidChangeTextDocument>()
583 .await
584 .text_document,
585 lsp::VersionedTextDocumentIdentifier::new(
586 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
587 1
588 )
589 );
590
591 // Save notifications are reported to all servers.
592 project
593 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
594 .await
595 .unwrap();
596 assert_eq!(
597 fake_rust_server
598 .receive_notification::<lsp::notification::DidSaveTextDocument>()
599 .await
600 .text_document,
601 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
602 );
603 assert_eq!(
604 fake_json_server
605 .receive_notification::<lsp::notification::DidSaveTextDocument>()
606 .await
607 .text_document,
608 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
609 );
610
611 // Renames are reported only to servers matching the buffer's language.
612 fs.rename(
613 Path::new("/the-root/test2.rs"),
614 Path::new("/the-root/test3.rs"),
615 Default::default(),
616 )
617 .await
618 .unwrap();
619 assert_eq!(
620 fake_rust_server
621 .receive_notification::<lsp::notification::DidCloseTextDocument>()
622 .await
623 .text_document,
624 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
625 );
626 assert_eq!(
627 fake_rust_server
628 .receive_notification::<lsp::notification::DidOpenTextDocument>()
629 .await
630 .text_document,
631 lsp::TextDocumentItem {
632 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
633 version: 0,
634 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
635 language_id: "rust".to_string(),
636 },
637 );
638
639 rust_buffer2.update(cx, |buffer, cx| {
640 buffer.update_diagnostics(
641 LanguageServerId(0),
642 DiagnosticSet::from_sorted_entries(
643 vec![DiagnosticEntry {
644 diagnostic: Default::default(),
645 range: Anchor::MIN..Anchor::MAX,
646 }],
647 &buffer.snapshot(),
648 ),
649 cx,
650 );
651 assert_eq!(
652 buffer
653 .snapshot()
654 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
655 .count(),
656 1
657 );
658 });
659
660 // When the rename changes the extension of the file, the buffer gets closed on the old
661 // language server and gets opened on the new one.
662 fs.rename(
663 Path::new("/the-root/test3.rs"),
664 Path::new("/the-root/test3.json"),
665 Default::default(),
666 )
667 .await
668 .unwrap();
669 assert_eq!(
670 fake_rust_server
671 .receive_notification::<lsp::notification::DidCloseTextDocument>()
672 .await
673 .text_document,
674 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
675 );
676 assert_eq!(
677 fake_json_server
678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
679 .await
680 .text_document,
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: "json".to_string(),
686 },
687 );
688
689 // We clear the diagnostics, since the language has changed.
690 rust_buffer2.update(cx, |buffer, _| {
691 assert_eq!(
692 buffer
693 .snapshot()
694 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
695 .count(),
696 0
697 );
698 });
699
700 // The renamed file's version resets after changing language server.
701 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
702 assert_eq!(
703 fake_json_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
709 1
710 )
711 );
712
713 // Restart language servers
714 project.update(cx, |project, cx| {
715 project.restart_language_servers_for_buffers(
716 vec![rust_buffer.clone(), json_buffer.clone()],
717 cx,
718 );
719 });
720
721 let mut rust_shutdown_requests = fake_rust_server
722 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
723 let mut json_shutdown_requests = fake_json_server
724 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
725 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
726
727 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
728 let mut fake_json_server = fake_json_servers.next().await.unwrap();
729
730 // Ensure rust document is reopened in new rust language server
731 assert_eq!(
732 fake_rust_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
738 version: 0,
739 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
740 language_id: "rust".to_string(),
741 }
742 );
743
744 // Ensure json documents are reopened in new json language server
745 assert_set_eq!(
746 [
747 fake_json_server
748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
749 .await
750 .text_document,
751 fake_json_server
752 .receive_notification::<lsp::notification::DidOpenTextDocument>()
753 .await
754 .text_document,
755 ],
756 [
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
759 version: 0,
760 text: json_buffer.update(cx, |buffer, _| buffer.text()),
761 language_id: "json".to_string(),
762 },
763 lsp::TextDocumentItem {
764 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
765 version: 0,
766 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
767 language_id: "json".to_string(),
768 }
769 ]
770 );
771
772 // Close notifications are reported only to servers matching the buffer's language.
773 cx.update(|_| drop(_json_handle));
774 let close_message = lsp::DidCloseTextDocumentParams {
775 text_document: lsp::TextDocumentIdentifier::new(
776 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
777 ),
778 };
779 assert_eq!(
780 fake_json_server
781 .receive_notification::<lsp::notification::DidCloseTextDocument>()
782 .await,
783 close_message,
784 );
785}
786
787#[gpui::test]
788async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
789 fn add_root_for_windows(path: &str) -> String {
790 if cfg!(windows) {
791 format!("C:{}", path)
792 } else {
793 path.to_string()
794 }
795 }
796
797 init_test(cx);
798
799 let fs = FakeFs::new(cx.executor());
800 fs.insert_tree(
801 add_root_for_windows("/the-root"),
802 json!({
803 ".gitignore": "target\n",
804 "src": {
805 "a.rs": "",
806 "b.rs": "",
807 },
808 "target": {
809 "x": {
810 "out": {
811 "x.rs": ""
812 }
813 },
814 "y": {
815 "out": {
816 "y.rs": "",
817 }
818 },
819 "z": {
820 "out": {
821 "z.rs": ""
822 }
823 }
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await;
830 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
831 language_registry.add(rust_lang());
832 let mut fake_servers = language_registry.register_fake_lsp(
833 "Rust",
834 FakeLspAdapter {
835 name: "the-language-server",
836 ..Default::default()
837 },
838 );
839
840 cx.executor().run_until_parked();
841
842 // Start the language server by opening a buffer with a compatible file extension.
843 let _ = project
844 .update(cx, |project, cx| {
845 project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx)
846 })
847 .await
848 .unwrap();
849
850 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
851 project.update(cx, |project, cx| {
852 let worktree = project.worktrees(cx).next().unwrap();
853 assert_eq!(
854 worktree
855 .read(cx)
856 .snapshot()
857 .entries(true, 0)
858 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
859 .collect::<Vec<_>>(),
860 &[
861 (Path::new(""), false),
862 (Path::new(".gitignore"), false),
863 (Path::new("src"), false),
864 (Path::new("src/a.rs"), false),
865 (Path::new("src/b.rs"), false),
866 (Path::new("target"), true),
867 ]
868 );
869 });
870
871 let prev_read_dir_count = fs.read_dir_call_count();
872
873 // Keep track of the FS events reported to the language server.
874 let fake_server = fake_servers.next().await.unwrap();
875 let file_changes = Arc::new(Mutex::new(Vec::new()));
876 fake_server
877 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
878 registrations: vec![lsp::Registration {
879 id: Default::default(),
880 method: "workspace/didChangeWatchedFiles".to_string(),
881 register_options: serde_json::to_value(
882 lsp::DidChangeWatchedFilesRegistrationOptions {
883 watchers: vec![
884 lsp::FileSystemWatcher {
885 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
886 "/the-root/Cargo.toml",
887 )),
888 kind: None,
889 },
890 lsp::FileSystemWatcher {
891 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
892 "/the-root/src/*.{rs,c}",
893 )),
894 kind: None,
895 },
896 lsp::FileSystemWatcher {
897 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
898 "/the-root/target/y/**/*.rs",
899 )),
900 kind: None,
901 },
902 ],
903 },
904 )
905 .ok(),
906 }],
907 })
908 .await
909 .unwrap();
910 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
911 let file_changes = file_changes.clone();
912 move |params, _| {
913 let mut file_changes = file_changes.lock();
914 file_changes.extend(params.changes);
915 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
916 }
917 });
918
919 cx.executor().run_until_parked();
920 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
921 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
922
923 // Now the language server has asked us to watch an ignored directory path,
924 // so we recursively load it.
925 project.update(cx, |project, cx| {
926 let worktree = project.worktrees(cx).next().unwrap();
927 assert_eq!(
928 worktree
929 .read(cx)
930 .snapshot()
931 .entries(true, 0)
932 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
933 .collect::<Vec<_>>(),
934 &[
935 (Path::new(""), false),
936 (Path::new(".gitignore"), false),
937 (Path::new("src"), false),
938 (Path::new("src/a.rs"), false),
939 (Path::new("src/b.rs"), false),
940 (Path::new("target"), true),
941 (Path::new("target/x"), true),
942 (Path::new("target/y"), true),
943 (Path::new("target/y/out"), true),
944 (Path::new("target/y/out/y.rs"), true),
945 (Path::new("target/z"), true),
946 ]
947 );
948 });
949
950 // Perform some file system mutations, two of which match the watched patterns,
951 // and one of which does not.
952 fs.create_file(
953 add_root_for_windows("/the-root/src/c.rs").as_ref(),
954 Default::default(),
955 )
956 .await
957 .unwrap();
958 fs.create_file(
959 add_root_for_windows("/the-root/src/d.txt").as_ref(),
960 Default::default(),
961 )
962 .await
963 .unwrap();
964 fs.remove_file(
965 add_root_for_windows("/the-root/src/b.rs").as_ref(),
966 Default::default(),
967 )
968 .await
969 .unwrap();
970 fs.create_file(
971 add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(),
972 Default::default(),
973 )
974 .await
975 .unwrap();
976 fs.create_file(
977 add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(),
978 Default::default(),
979 )
980 .await
981 .unwrap();
982
983 // The language server receives events for the FS mutations that match its watch patterns.
984 cx.executor().run_until_parked();
985 assert_eq!(
986 &*file_changes.lock(),
987 &[
988 lsp::FileEvent {
989 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(),
990 typ: lsp::FileChangeType::DELETED,
991 },
992 lsp::FileEvent {
993 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(),
994 typ: lsp::FileChangeType::CREATED,
995 },
996 lsp::FileEvent {
997 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs"))
998 .unwrap(),
999 typ: lsp::FileChangeType::CREATED,
1000 },
1001 ]
1002 );
1003}
1004
1005#[gpui::test]
1006async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1007 init_test(cx);
1008
1009 let fs = FakeFs::new(cx.executor());
1010 fs.insert_tree(
1011 "/dir",
1012 json!({
1013 "a.rs": "let a = 1;",
1014 "b.rs": "let b = 2;"
1015 }),
1016 )
1017 .await;
1018
1019 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
1020 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1021
1022 let buffer_a = project
1023 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1024 .await
1025 .unwrap();
1026 let buffer_b = project
1027 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1028 .await
1029 .unwrap();
1030
1031 lsp_store.update(cx, |lsp_store, cx| {
1032 lsp_store
1033 .update_diagnostics(
1034 LanguageServerId(0),
1035 lsp::PublishDiagnosticsParams {
1036 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1037 version: None,
1038 diagnostics: vec![lsp::Diagnostic {
1039 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1040 severity: Some(lsp::DiagnosticSeverity::ERROR),
1041 message: "error 1".to_string(),
1042 ..Default::default()
1043 }],
1044 },
1045 &[],
1046 cx,
1047 )
1048 .unwrap();
1049 lsp_store
1050 .update_diagnostics(
1051 LanguageServerId(0),
1052 lsp::PublishDiagnosticsParams {
1053 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1054 version: None,
1055 diagnostics: vec![lsp::Diagnostic {
1056 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1057 severity: Some(DiagnosticSeverity::WARNING),
1058 message: "error 2".to_string(),
1059 ..Default::default()
1060 }],
1061 },
1062 &[],
1063 cx,
1064 )
1065 .unwrap();
1066 });
1067
1068 buffer_a.update(cx, |buffer, _| {
1069 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1070 assert_eq!(
1071 chunks
1072 .iter()
1073 .map(|(s, d)| (s.as_str(), *d))
1074 .collect::<Vec<_>>(),
1075 &[
1076 ("let ", None),
1077 ("a", Some(DiagnosticSeverity::ERROR)),
1078 (" = 1;", None),
1079 ]
1080 );
1081 });
1082 buffer_b.update(cx, |buffer, _| {
1083 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1084 assert_eq!(
1085 chunks
1086 .iter()
1087 .map(|(s, d)| (s.as_str(), *d))
1088 .collect::<Vec<_>>(),
1089 &[
1090 ("let ", None),
1091 ("b", Some(DiagnosticSeverity::WARNING)),
1092 (" = 2;", None),
1093 ]
1094 );
1095 });
1096}
1097
1098#[gpui::test]
1099async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1100 init_test(cx);
1101
1102 let fs = FakeFs::new(cx.executor());
1103 fs.insert_tree(
1104 "/root",
1105 json!({
1106 "dir": {
1107 ".git": {
1108 "HEAD": "ref: refs/heads/main",
1109 },
1110 ".gitignore": "b.rs",
1111 "a.rs": "let a = 1;",
1112 "b.rs": "let b = 2;",
1113 },
1114 "other.rs": "let b = c;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1120 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1121 let (worktree, _) = project
1122 .update(cx, |project, cx| {
1123 project.find_or_create_worktree("/root/dir", true, cx)
1124 })
1125 .await
1126 .unwrap();
1127 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1128
1129 let (worktree, _) = project
1130 .update(cx, |project, cx| {
1131 project.find_or_create_worktree("/root/other.rs", false, cx)
1132 })
1133 .await
1134 .unwrap();
1135 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1136
1137 let server_id = LanguageServerId(0);
1138 lsp_store.update(cx, |lsp_store, cx| {
1139 lsp_store
1140 .update_diagnostics(
1141 server_id,
1142 lsp::PublishDiagnosticsParams {
1143 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1144 version: None,
1145 diagnostics: vec![lsp::Diagnostic {
1146 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1147 severity: Some(lsp::DiagnosticSeverity::ERROR),
1148 message: "unused variable 'b'".to_string(),
1149 ..Default::default()
1150 }],
1151 },
1152 &[],
1153 cx,
1154 )
1155 .unwrap();
1156 lsp_store
1157 .update_diagnostics(
1158 server_id,
1159 lsp::PublishDiagnosticsParams {
1160 uri: Url::from_file_path("/root/other.rs").unwrap(),
1161 version: None,
1162 diagnostics: vec![lsp::Diagnostic {
1163 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1164 severity: Some(lsp::DiagnosticSeverity::ERROR),
1165 message: "unknown variable 'c'".to_string(),
1166 ..Default::default()
1167 }],
1168 },
1169 &[],
1170 cx,
1171 )
1172 .unwrap();
1173 });
1174
1175 let main_ignored_buffer = project
1176 .update(cx, |project, cx| {
1177 project.open_buffer((main_worktree_id, "b.rs"), cx)
1178 })
1179 .await
1180 .unwrap();
1181 main_ignored_buffer.update(cx, |buffer, _| {
1182 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1183 assert_eq!(
1184 chunks
1185 .iter()
1186 .map(|(s, d)| (s.as_str(), *d))
1187 .collect::<Vec<_>>(),
1188 &[
1189 ("let ", None),
1190 ("b", Some(DiagnosticSeverity::ERROR)),
1191 (" = 2;", None),
1192 ],
1193 "Gigitnored buffers should still get in-buffer diagnostics",
1194 );
1195 });
1196 let other_buffer = project
1197 .update(cx, |project, cx| {
1198 project.open_buffer((other_worktree_id, ""), cx)
1199 })
1200 .await
1201 .unwrap();
1202 other_buffer.update(cx, |buffer, _| {
1203 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1204 assert_eq!(
1205 chunks
1206 .iter()
1207 .map(|(s, d)| (s.as_str(), *d))
1208 .collect::<Vec<_>>(),
1209 &[
1210 ("let b = ", None),
1211 ("c", Some(DiagnosticSeverity::ERROR)),
1212 (";", None),
1213 ],
1214 "Buffers from hidden projects should still get in-buffer diagnostics"
1215 );
1216 });
1217
1218 project.update(cx, |project, cx| {
1219 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1220 assert_eq!(
1221 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1222 vec![(
1223 ProjectPath {
1224 worktree_id: main_worktree_id,
1225 path: Arc::from(Path::new("b.rs")),
1226 },
1227 server_id,
1228 DiagnosticSummary {
1229 error_count: 1,
1230 warning_count: 0,
1231 }
1232 )]
1233 );
1234 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1235 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1236 });
1237}
1238
1239#[gpui::test]
1240async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1241 init_test(cx);
1242
1243 let progress_token = "the-progress-token";
1244
1245 let fs = FakeFs::new(cx.executor());
1246 fs.insert_tree(
1247 "/dir",
1248 json!({
1249 "a.rs": "fn a() { A }",
1250 "b.rs": "const y: i32 = 1",
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257
1258 language_registry.add(rust_lang());
1259 let mut fake_servers = language_registry.register_fake_lsp(
1260 "Rust",
1261 FakeLspAdapter {
1262 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1263 disk_based_diagnostics_sources: vec!["disk".into()],
1264 ..Default::default()
1265 },
1266 );
1267
1268 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1269
1270 // Cause worktree to start the fake language server
1271 let _ = project
1272 .update(cx, |project, cx| {
1273 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1274 })
1275 .await
1276 .unwrap();
1277
1278 let mut events = cx.events(&project);
1279
1280 let fake_server = fake_servers.next().await.unwrap();
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::LanguageServerAdded(
1284 LanguageServerId(0),
1285 fake_server.server.name(),
1286 Some(worktree_id)
1287 ),
1288 );
1289
1290 fake_server
1291 .start_progress(format!("{}/0", progress_token))
1292 .await;
1293 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1294 assert_eq!(
1295 events.next().await.unwrap(),
1296 Event::DiskBasedDiagnosticsStarted {
1297 language_server_id: LanguageServerId(0),
1298 }
1299 );
1300
1301 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1302 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1303 version: None,
1304 diagnostics: vec![lsp::Diagnostic {
1305 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1306 severity: Some(lsp::DiagnosticSeverity::ERROR),
1307 message: "undefined variable 'A'".to_string(),
1308 ..Default::default()
1309 }],
1310 });
1311 assert_eq!(
1312 events.next().await.unwrap(),
1313 Event::DiagnosticsUpdated {
1314 language_server_id: LanguageServerId(0),
1315 path: (worktree_id, Path::new("a.rs")).into()
1316 }
1317 );
1318
1319 fake_server.end_progress(format!("{}/0", progress_token));
1320 assert_eq!(
1321 events.next().await.unwrap(),
1322 Event::DiskBasedDiagnosticsFinished {
1323 language_server_id: LanguageServerId(0)
1324 }
1325 );
1326
1327 let buffer = project
1328 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1329 .await
1330 .unwrap();
1331
1332 buffer.update(cx, |buffer, _| {
1333 let snapshot = buffer.snapshot();
1334 let diagnostics = snapshot
1335 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1336 .collect::<Vec<_>>();
1337 assert_eq!(
1338 diagnostics,
1339 &[DiagnosticEntry {
1340 range: Point::new(0, 9)..Point::new(0, 10),
1341 diagnostic: Diagnostic {
1342 severity: lsp::DiagnosticSeverity::ERROR,
1343 message: "undefined variable 'A'".to_string(),
1344 group_id: 0,
1345 is_primary: true,
1346 ..Default::default()
1347 }
1348 }]
1349 )
1350 });
1351
1352 // Ensure publishing empty diagnostics twice only results in one update event.
1353 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1354 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1355 version: None,
1356 diagnostics: Default::default(),
1357 });
1358 assert_eq!(
1359 events.next().await.unwrap(),
1360 Event::DiagnosticsUpdated {
1361 language_server_id: LanguageServerId(0),
1362 path: (worktree_id, Path::new("a.rs")).into()
1363 }
1364 );
1365
1366 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1367 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1368 version: None,
1369 diagnostics: Default::default(),
1370 });
1371 cx.executor().run_until_parked();
1372 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1373}
1374
1375#[gpui::test]
1376async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1377 init_test(cx);
1378
1379 let progress_token = "the-progress-token";
1380
1381 let fs = FakeFs::new(cx.executor());
1382 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1383
1384 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1385
1386 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1387 language_registry.add(rust_lang());
1388 let mut fake_servers = language_registry.register_fake_lsp(
1389 "Rust",
1390 FakeLspAdapter {
1391 name: "the-language-server",
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1394 ..Default::default()
1395 },
1396 );
1397
1398 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1399
1400 let (buffer, _handle) = project
1401 .update(cx, |project, cx| {
1402 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1403 })
1404 .await
1405 .unwrap();
1406
1407 // Simulate diagnostics starting to update.
1408 let fake_server = fake_servers.next().await.unwrap();
1409 fake_server.start_progress(progress_token).await;
1410
1411 // Restart the server before the diagnostics finish updating.
1412 project.update(cx, |project, cx| {
1413 project.restart_language_servers_for_buffers([buffer], cx);
1414 });
1415 let mut events = cx.events(&project);
1416
1417 // Simulate the newly started server sending more diagnostics.
1418 let fake_server = fake_servers.next().await.unwrap();
1419 assert_eq!(
1420 events.next().await.unwrap(),
1421 Event::LanguageServerAdded(
1422 LanguageServerId(1),
1423 fake_server.server.name(),
1424 Some(worktree_id)
1425 )
1426 );
1427 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1428 fake_server.start_progress(progress_token).await;
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsStarted {
1432 language_server_id: LanguageServerId(1)
1433 }
1434 );
1435 project.update(cx, |project, cx| {
1436 assert_eq!(
1437 project
1438 .language_servers_running_disk_based_diagnostics(cx)
1439 .collect::<Vec<_>>(),
1440 [LanguageServerId(1)]
1441 );
1442 });
1443
1444 // All diagnostics are considered done, despite the old server's diagnostic
1445 // task never completing.
1446 fake_server.end_progress(progress_token);
1447 assert_eq!(
1448 events.next().await.unwrap(),
1449 Event::DiskBasedDiagnosticsFinished {
1450 language_server_id: LanguageServerId(1)
1451 }
1452 );
1453 project.update(cx, |project, cx| {
1454 assert_eq!(
1455 project
1456 .language_servers_running_disk_based_diagnostics(cx)
1457 .collect::<Vec<_>>(),
1458 [] as [language::LanguageServerId; 0]
1459 );
1460 });
1461}
1462
1463#[gpui::test]
1464async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1465 init_test(cx);
1466
1467 let fs = FakeFs::new(cx.executor());
1468 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1469
1470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1471
1472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1473 language_registry.add(rust_lang());
1474 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1475
1476 let (buffer, _) = project
1477 .update(cx, |project, cx| {
1478 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1479 })
1480 .await
1481 .unwrap();
1482
1483 // Publish diagnostics
1484 let fake_server = fake_servers.next().await.unwrap();
1485 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1486 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1487 version: None,
1488 diagnostics: vec![lsp::Diagnostic {
1489 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1490 severity: Some(lsp::DiagnosticSeverity::ERROR),
1491 message: "the message".to_string(),
1492 ..Default::default()
1493 }],
1494 });
1495
1496 cx.executor().run_until_parked();
1497 buffer.update(cx, |buffer, _| {
1498 assert_eq!(
1499 buffer
1500 .snapshot()
1501 .diagnostics_in_range::<_, usize>(0..1, false)
1502 .map(|entry| entry.diagnostic.message.clone())
1503 .collect::<Vec<_>>(),
1504 ["the message".to_string()]
1505 );
1506 });
1507 project.update(cx, |project, cx| {
1508 assert_eq!(
1509 project.diagnostic_summary(false, cx),
1510 DiagnosticSummary {
1511 error_count: 1,
1512 warning_count: 0,
1513 }
1514 );
1515 });
1516
1517 project.update(cx, |project, cx| {
1518 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1519 });
1520
1521 // The diagnostics are cleared.
1522 cx.executor().run_until_parked();
1523 buffer.update(cx, |buffer, _| {
1524 assert_eq!(
1525 buffer
1526 .snapshot()
1527 .diagnostics_in_range::<_, usize>(0..1, false)
1528 .map(|entry| entry.diagnostic.message.clone())
1529 .collect::<Vec<_>>(),
1530 Vec::<String>::new(),
1531 );
1532 });
1533 project.update(cx, |project, cx| {
1534 assert_eq!(
1535 project.diagnostic_summary(false, cx),
1536 DiagnosticSummary {
1537 error_count: 0,
1538 warning_count: 0,
1539 }
1540 );
1541 });
1542}
1543
1544#[gpui::test]
1545async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let fs = FakeFs::new(cx.executor());
1549 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1550
1551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1552 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1553
1554 language_registry.add(rust_lang());
1555 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1556
1557 let (buffer, _handle) = project
1558 .update(cx, |project, cx| {
1559 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1560 })
1561 .await
1562 .unwrap();
1563
1564 // Before restarting the server, report diagnostics with an unknown buffer version.
1565 let fake_server = fake_servers.next().await.unwrap();
1566 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1567 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1568 version: Some(10000),
1569 diagnostics: Vec::new(),
1570 });
1571 cx.executor().run_until_parked();
1572
1573 project.update(cx, |project, cx| {
1574 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1575 });
1576 let mut fake_server = fake_servers.next().await.unwrap();
1577 let notification = fake_server
1578 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1579 .await
1580 .text_document;
1581 assert_eq!(notification.version, 0);
1582}
1583
1584#[gpui::test]
1585async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1586 init_test(cx);
1587
1588 let progress_token = "the-progress-token";
1589
1590 let fs = FakeFs::new(cx.executor());
1591 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1592
1593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1594
1595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1596 language_registry.add(rust_lang());
1597 let mut fake_servers = language_registry.register_fake_lsp(
1598 "Rust",
1599 FakeLspAdapter {
1600 name: "the-language-server",
1601 disk_based_diagnostics_sources: vec!["disk".into()],
1602 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1603 ..Default::default()
1604 },
1605 );
1606
1607 let (buffer, _handle) = project
1608 .update(cx, |project, cx| {
1609 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1610 })
1611 .await
1612 .unwrap();
1613
1614 // Simulate diagnostics starting to update.
1615 let mut fake_server = fake_servers.next().await.unwrap();
1616 fake_server
1617 .start_progress_with(
1618 "another-token",
1619 lsp::WorkDoneProgressBegin {
1620 cancellable: Some(false),
1621 ..Default::default()
1622 },
1623 )
1624 .await;
1625 fake_server
1626 .start_progress_with(
1627 progress_token,
1628 lsp::WorkDoneProgressBegin {
1629 cancellable: Some(true),
1630 ..Default::default()
1631 },
1632 )
1633 .await;
1634 cx.executor().run_until_parked();
1635
1636 project.update(cx, |project, cx| {
1637 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1638 });
1639
1640 let cancel_notification = fake_server
1641 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1642 .await;
1643 assert_eq!(
1644 cancel_notification.token,
1645 NumberOrString::String(progress_token.into())
1646 );
1647}
1648
1649#[gpui::test]
1650async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1651 init_test(cx);
1652
1653 let fs = FakeFs::new(cx.executor());
1654 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1655 .await;
1656
1657 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1658 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1659
1660 let mut fake_rust_servers = language_registry.register_fake_lsp(
1661 "Rust",
1662 FakeLspAdapter {
1663 name: "rust-lsp",
1664 ..Default::default()
1665 },
1666 );
1667 let mut fake_js_servers = language_registry.register_fake_lsp(
1668 "JavaScript",
1669 FakeLspAdapter {
1670 name: "js-lsp",
1671 ..Default::default()
1672 },
1673 );
1674 language_registry.add(rust_lang());
1675 language_registry.add(js_lang());
1676
1677 let _rs_buffer = project
1678 .update(cx, |project, cx| {
1679 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1680 })
1681 .await
1682 .unwrap();
1683 let _js_buffer = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1686 })
1687 .await
1688 .unwrap();
1689
1690 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1691 assert_eq!(
1692 fake_rust_server_1
1693 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1694 .await
1695 .text_document
1696 .uri
1697 .as_str(),
1698 "file:///dir/a.rs"
1699 );
1700
1701 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1702 assert_eq!(
1703 fake_js_server
1704 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1705 .await
1706 .text_document
1707 .uri
1708 .as_str(),
1709 "file:///dir/b.js"
1710 );
1711
1712 // Disable Rust language server, ensuring only that server gets stopped.
1713 cx.update(|cx| {
1714 SettingsStore::update_global(cx, |settings, cx| {
1715 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1716 settings.languages.insert(
1717 "Rust".into(),
1718 LanguageSettingsContent {
1719 enable_language_server: Some(false),
1720 ..Default::default()
1721 },
1722 );
1723 });
1724 })
1725 });
1726 fake_rust_server_1
1727 .receive_notification::<lsp::notification::Exit>()
1728 .await;
1729
1730 // Enable Rust and disable JavaScript language servers, ensuring that the
1731 // former gets started again and that the latter stops.
1732 cx.update(|cx| {
1733 SettingsStore::update_global(cx, |settings, cx| {
1734 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1735 settings.languages.insert(
1736 LanguageName::new("Rust"),
1737 LanguageSettingsContent {
1738 enable_language_server: Some(true),
1739 ..Default::default()
1740 },
1741 );
1742 settings.languages.insert(
1743 LanguageName::new("JavaScript"),
1744 LanguageSettingsContent {
1745 enable_language_server: Some(false),
1746 ..Default::default()
1747 },
1748 );
1749 });
1750 })
1751 });
1752 let _rs_buffer = project
1753 .update(cx, |project, cx| {
1754 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1755 })
1756 .await
1757 .unwrap();
1758 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1759 assert_eq!(
1760 fake_rust_server_2
1761 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1762 .await
1763 .text_document
1764 .uri
1765 .as_str(),
1766 "file:///dir/a.rs"
1767 );
1768 fake_js_server
1769 .receive_notification::<lsp::notification::Exit>()
1770 .await;
1771}
1772
1773#[gpui::test(iterations = 3)]
1774async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1775 init_test(cx);
1776
1777 let text = "
1778 fn a() { A }
1779 fn b() { BB }
1780 fn c() { CCC }
1781 "
1782 .unindent();
1783
1784 let fs = FakeFs::new(cx.executor());
1785 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1786
1787 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1788 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1789 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1790
1791 language_registry.add(rust_lang());
1792 let mut fake_servers = language_registry.register_fake_lsp(
1793 "Rust",
1794 FakeLspAdapter {
1795 disk_based_diagnostics_sources: vec!["disk".into()],
1796 ..Default::default()
1797 },
1798 );
1799
1800 let buffer = project
1801 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1802 .await
1803 .unwrap();
1804
1805 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1807 });
1808
1809 let mut fake_server = fake_servers.next().await.unwrap();
1810 let open_notification = fake_server
1811 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1812 .await;
1813
1814 // Edit the buffer, moving the content down
1815 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1816 let change_notification_1 = fake_server
1817 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1818 .await;
1819 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1820
1821 // Report some diagnostics for the initial version of the buffer
1822 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1823 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1824 version: Some(open_notification.text_document.version),
1825 diagnostics: vec![
1826 lsp::Diagnostic {
1827 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1828 severity: Some(DiagnosticSeverity::ERROR),
1829 message: "undefined variable 'A'".to_string(),
1830 source: Some("disk".to_string()),
1831 ..Default::default()
1832 },
1833 lsp::Diagnostic {
1834 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1835 severity: Some(DiagnosticSeverity::ERROR),
1836 message: "undefined variable 'BB'".to_string(),
1837 source: Some("disk".to_string()),
1838 ..Default::default()
1839 },
1840 lsp::Diagnostic {
1841 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1842 severity: Some(DiagnosticSeverity::ERROR),
1843 source: Some("disk".to_string()),
1844 message: "undefined variable 'CCC'".to_string(),
1845 ..Default::default()
1846 },
1847 ],
1848 });
1849
1850 // The diagnostics have moved down since they were created.
1851 cx.executor().run_until_parked();
1852 buffer.update(cx, |buffer, _| {
1853 assert_eq!(
1854 buffer
1855 .snapshot()
1856 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1857 .collect::<Vec<_>>(),
1858 &[
1859 DiagnosticEntry {
1860 range: Point::new(3, 9)..Point::new(3, 11),
1861 diagnostic: Diagnostic {
1862 source: Some("disk".into()),
1863 severity: DiagnosticSeverity::ERROR,
1864 message: "undefined variable 'BB'".to_string(),
1865 is_disk_based: true,
1866 group_id: 1,
1867 is_primary: true,
1868 ..Default::default()
1869 },
1870 },
1871 DiagnosticEntry {
1872 range: Point::new(4, 9)..Point::new(4, 12),
1873 diagnostic: Diagnostic {
1874 source: Some("disk".into()),
1875 severity: DiagnosticSeverity::ERROR,
1876 message: "undefined variable 'CCC'".to_string(),
1877 is_disk_based: true,
1878 group_id: 2,
1879 is_primary: true,
1880 ..Default::default()
1881 }
1882 }
1883 ]
1884 );
1885 assert_eq!(
1886 chunks_with_diagnostics(buffer, 0..buffer.len()),
1887 [
1888 ("\n\nfn a() { ".to_string(), None),
1889 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1890 (" }\nfn b() { ".to_string(), None),
1891 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1892 (" }\nfn c() { ".to_string(), None),
1893 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1894 (" }\n".to_string(), None),
1895 ]
1896 );
1897 assert_eq!(
1898 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1899 [
1900 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1901 (" }\nfn c() { ".to_string(), None),
1902 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1903 ]
1904 );
1905 });
1906
1907 // Ensure overlapping diagnostics are highlighted correctly.
1908 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1909 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1910 version: Some(open_notification.text_document.version),
1911 diagnostics: vec![
1912 lsp::Diagnostic {
1913 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1914 severity: Some(DiagnosticSeverity::ERROR),
1915 message: "undefined variable 'A'".to_string(),
1916 source: Some("disk".to_string()),
1917 ..Default::default()
1918 },
1919 lsp::Diagnostic {
1920 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1921 severity: Some(DiagnosticSeverity::WARNING),
1922 message: "unreachable statement".to_string(),
1923 source: Some("disk".to_string()),
1924 ..Default::default()
1925 },
1926 ],
1927 });
1928
1929 cx.executor().run_until_parked();
1930 buffer.update(cx, |buffer, _| {
1931 assert_eq!(
1932 buffer
1933 .snapshot()
1934 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1935 .collect::<Vec<_>>(),
1936 &[
1937 DiagnosticEntry {
1938 range: Point::new(2, 9)..Point::new(2, 12),
1939 diagnostic: Diagnostic {
1940 source: Some("disk".into()),
1941 severity: DiagnosticSeverity::WARNING,
1942 message: "unreachable statement".to_string(),
1943 is_disk_based: true,
1944 group_id: 4,
1945 is_primary: true,
1946 ..Default::default()
1947 }
1948 },
1949 DiagnosticEntry {
1950 range: Point::new(2, 9)..Point::new(2, 10),
1951 diagnostic: Diagnostic {
1952 source: Some("disk".into()),
1953 severity: DiagnosticSeverity::ERROR,
1954 message: "undefined variable 'A'".to_string(),
1955 is_disk_based: true,
1956 group_id: 3,
1957 is_primary: true,
1958 ..Default::default()
1959 },
1960 }
1961 ]
1962 );
1963 assert_eq!(
1964 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1965 [
1966 ("fn a() { ".to_string(), None),
1967 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1968 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1969 ("\n".to_string(), None),
1970 ]
1971 );
1972 assert_eq!(
1973 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1974 [
1975 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1976 ("\n".to_string(), None),
1977 ]
1978 );
1979 });
1980
1981 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1982 // changes since the last save.
1983 buffer.update(cx, |buffer, cx| {
1984 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1985 buffer.edit(
1986 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1987 None,
1988 cx,
1989 );
1990 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1991 });
1992 let change_notification_2 = fake_server
1993 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1994 .await;
1995 assert!(
1996 change_notification_2.text_document.version > change_notification_1.text_document.version
1997 );
1998
1999 // Handle out-of-order diagnostics
2000 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2001 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2002 version: Some(change_notification_2.text_document.version),
2003 diagnostics: vec![
2004 lsp::Diagnostic {
2005 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2006 severity: Some(DiagnosticSeverity::ERROR),
2007 message: "undefined variable 'BB'".to_string(),
2008 source: Some("disk".to_string()),
2009 ..Default::default()
2010 },
2011 lsp::Diagnostic {
2012 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2013 severity: Some(DiagnosticSeverity::WARNING),
2014 message: "undefined variable 'A'".to_string(),
2015 source: Some("disk".to_string()),
2016 ..Default::default()
2017 },
2018 ],
2019 });
2020
2021 cx.executor().run_until_parked();
2022 buffer.update(cx, |buffer, _| {
2023 assert_eq!(
2024 buffer
2025 .snapshot()
2026 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2027 .collect::<Vec<_>>(),
2028 &[
2029 DiagnosticEntry {
2030 range: Point::new(2, 21)..Point::new(2, 22),
2031 diagnostic: Diagnostic {
2032 source: Some("disk".into()),
2033 severity: DiagnosticSeverity::WARNING,
2034 message: "undefined variable 'A'".to_string(),
2035 is_disk_based: true,
2036 group_id: 6,
2037 is_primary: true,
2038 ..Default::default()
2039 }
2040 },
2041 DiagnosticEntry {
2042 range: Point::new(3, 9)..Point::new(3, 14),
2043 diagnostic: Diagnostic {
2044 source: Some("disk".into()),
2045 severity: DiagnosticSeverity::ERROR,
2046 message: "undefined variable 'BB'".to_string(),
2047 is_disk_based: true,
2048 group_id: 5,
2049 is_primary: true,
2050 ..Default::default()
2051 },
2052 }
2053 ]
2054 );
2055 });
2056}
2057
2058#[gpui::test]
2059async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2060 init_test(cx);
2061
2062 let text = concat!(
2063 "let one = ;\n", //
2064 "let two = \n",
2065 "let three = 3;\n",
2066 );
2067
2068 let fs = FakeFs::new(cx.executor());
2069 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2070
2071 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2072 let buffer = project
2073 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2074 .await
2075 .unwrap();
2076
2077 project.update(cx, |project, cx| {
2078 project.lsp_store.update(cx, |lsp_store, cx| {
2079 lsp_store
2080 .update_diagnostic_entries(
2081 LanguageServerId(0),
2082 PathBuf::from("/dir/a.rs"),
2083 None,
2084 vec![
2085 DiagnosticEntry {
2086 range: Unclipped(PointUtf16::new(0, 10))
2087 ..Unclipped(PointUtf16::new(0, 10)),
2088 diagnostic: Diagnostic {
2089 severity: DiagnosticSeverity::ERROR,
2090 message: "syntax error 1".to_string(),
2091 ..Default::default()
2092 },
2093 },
2094 DiagnosticEntry {
2095 range: Unclipped(PointUtf16::new(1, 10))
2096 ..Unclipped(PointUtf16::new(1, 10)),
2097 diagnostic: Diagnostic {
2098 severity: DiagnosticSeverity::ERROR,
2099 message: "syntax error 2".to_string(),
2100 ..Default::default()
2101 },
2102 },
2103 ],
2104 cx,
2105 )
2106 .unwrap();
2107 })
2108 });
2109
2110 // An empty range is extended forward to include the following character.
2111 // At the end of a line, an empty range is extended backward to include
2112 // the preceding character.
2113 buffer.update(cx, |buffer, _| {
2114 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2115 assert_eq!(
2116 chunks
2117 .iter()
2118 .map(|(s, d)| (s.as_str(), *d))
2119 .collect::<Vec<_>>(),
2120 &[
2121 ("let one = ", None),
2122 (";", Some(DiagnosticSeverity::ERROR)),
2123 ("\nlet two =", None),
2124 (" ", Some(DiagnosticSeverity::ERROR)),
2125 ("\nlet three = 3;\n", None)
2126 ]
2127 );
2128 });
2129}
2130
2131#[gpui::test]
2132async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2133 init_test(cx);
2134
2135 let fs = FakeFs::new(cx.executor());
2136 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2137 .await;
2138
2139 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2140 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2141
2142 lsp_store.update(cx, |lsp_store, cx| {
2143 lsp_store
2144 .update_diagnostic_entries(
2145 LanguageServerId(0),
2146 Path::new("/dir/a.rs").to_owned(),
2147 None,
2148 vec![DiagnosticEntry {
2149 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2150 diagnostic: Diagnostic {
2151 severity: DiagnosticSeverity::ERROR,
2152 is_primary: true,
2153 message: "syntax error a1".to_string(),
2154 ..Default::default()
2155 },
2156 }],
2157 cx,
2158 )
2159 .unwrap();
2160 lsp_store
2161 .update_diagnostic_entries(
2162 LanguageServerId(1),
2163 Path::new("/dir/a.rs").to_owned(),
2164 None,
2165 vec![DiagnosticEntry {
2166 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2167 diagnostic: Diagnostic {
2168 severity: DiagnosticSeverity::ERROR,
2169 is_primary: true,
2170 message: "syntax error b1".to_string(),
2171 ..Default::default()
2172 },
2173 }],
2174 cx,
2175 )
2176 .unwrap();
2177
2178 assert_eq!(
2179 lsp_store.diagnostic_summary(false, cx),
2180 DiagnosticSummary {
2181 error_count: 2,
2182 warning_count: 0,
2183 }
2184 );
2185 });
2186}
2187
2188#[gpui::test]
2189async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2190 init_test(cx);
2191
2192 let text = "
2193 fn a() {
2194 f1();
2195 }
2196 fn b() {
2197 f2();
2198 }
2199 fn c() {
2200 f3();
2201 }
2202 "
2203 .unindent();
2204
2205 let fs = FakeFs::new(cx.executor());
2206 fs.insert_tree(
2207 "/dir",
2208 json!({
2209 "a.rs": text.clone(),
2210 }),
2211 )
2212 .await;
2213
2214 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2215 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2216
2217 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2218 language_registry.add(rust_lang());
2219 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2220
2221 let (buffer, _handle) = project
2222 .update(cx, |project, cx| {
2223 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2224 })
2225 .await
2226 .unwrap();
2227
2228 let mut fake_server = fake_servers.next().await.unwrap();
2229 let lsp_document_version = fake_server
2230 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2231 .await
2232 .text_document
2233 .version;
2234
2235 // Simulate editing the buffer after the language server computes some edits.
2236 buffer.update(cx, |buffer, cx| {
2237 buffer.edit(
2238 [(
2239 Point::new(0, 0)..Point::new(0, 0),
2240 "// above first function\n",
2241 )],
2242 None,
2243 cx,
2244 );
2245 buffer.edit(
2246 [(
2247 Point::new(2, 0)..Point::new(2, 0),
2248 " // inside first function\n",
2249 )],
2250 None,
2251 cx,
2252 );
2253 buffer.edit(
2254 [(
2255 Point::new(6, 4)..Point::new(6, 4),
2256 "// inside second function ",
2257 )],
2258 None,
2259 cx,
2260 );
2261
2262 assert_eq!(
2263 buffer.text(),
2264 "
2265 // above first function
2266 fn a() {
2267 // inside first function
2268 f1();
2269 }
2270 fn b() {
2271 // inside second function f2();
2272 }
2273 fn c() {
2274 f3();
2275 }
2276 "
2277 .unindent()
2278 );
2279 });
2280
2281 let edits = lsp_store
2282 .update(cx, |lsp_store, cx| {
2283 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2284 &buffer,
2285 vec![
2286 // replace body of first function
2287 lsp::TextEdit {
2288 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2289 new_text: "
2290 fn a() {
2291 f10();
2292 }
2293 "
2294 .unindent(),
2295 },
2296 // edit inside second function
2297 lsp::TextEdit {
2298 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2299 new_text: "00".into(),
2300 },
2301 // edit inside third function via two distinct edits
2302 lsp::TextEdit {
2303 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2304 new_text: "4000".into(),
2305 },
2306 lsp::TextEdit {
2307 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2308 new_text: "".into(),
2309 },
2310 ],
2311 LanguageServerId(0),
2312 Some(lsp_document_version),
2313 cx,
2314 )
2315 })
2316 .await
2317 .unwrap();
2318
2319 buffer.update(cx, |buffer, cx| {
2320 for (range, new_text) in edits {
2321 buffer.edit([(range, new_text)], None, cx);
2322 }
2323 assert_eq!(
2324 buffer.text(),
2325 "
2326 // above first function
2327 fn a() {
2328 // inside first function
2329 f10();
2330 }
2331 fn b() {
2332 // inside second function f200();
2333 }
2334 fn c() {
2335 f4000();
2336 }
2337 "
2338 .unindent()
2339 );
2340 });
2341}
2342
2343#[gpui::test]
2344async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2345 init_test(cx);
2346
2347 let text = "
2348 use a::b;
2349 use a::c;
2350
2351 fn f() {
2352 b();
2353 c();
2354 }
2355 "
2356 .unindent();
2357
2358 let fs = FakeFs::new(cx.executor());
2359 fs.insert_tree(
2360 "/dir",
2361 json!({
2362 "a.rs": text.clone(),
2363 }),
2364 )
2365 .await;
2366
2367 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2368 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2369 let buffer = project
2370 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2371 .await
2372 .unwrap();
2373
2374 // Simulate the language server sending us a small edit in the form of a very large diff.
2375 // Rust-analyzer does this when performing a merge-imports code action.
2376 let edits = lsp_store
2377 .update(cx, |lsp_store, cx| {
2378 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2379 &buffer,
2380 [
2381 // Replace the first use statement without editing the semicolon.
2382 lsp::TextEdit {
2383 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2384 new_text: "a::{b, c}".into(),
2385 },
2386 // Reinsert the remainder of the file between the semicolon and the final
2387 // newline of the file.
2388 lsp::TextEdit {
2389 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2390 new_text: "\n\n".into(),
2391 },
2392 lsp::TextEdit {
2393 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2394 new_text: "
2395 fn f() {
2396 b();
2397 c();
2398 }"
2399 .unindent(),
2400 },
2401 // Delete everything after the first newline of the file.
2402 lsp::TextEdit {
2403 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2404 new_text: "".into(),
2405 },
2406 ],
2407 LanguageServerId(0),
2408 None,
2409 cx,
2410 )
2411 })
2412 .await
2413 .unwrap();
2414
2415 buffer.update(cx, |buffer, cx| {
2416 let edits = edits
2417 .into_iter()
2418 .map(|(range, text)| {
2419 (
2420 range.start.to_point(buffer)..range.end.to_point(buffer),
2421 text,
2422 )
2423 })
2424 .collect::<Vec<_>>();
2425
2426 assert_eq!(
2427 edits,
2428 [
2429 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2430 (Point::new(1, 0)..Point::new(2, 0), "".into())
2431 ]
2432 );
2433
2434 for (range, new_text) in edits {
2435 buffer.edit([(range, new_text)], None, cx);
2436 }
2437 assert_eq!(
2438 buffer.text(),
2439 "
2440 use a::{b, c};
2441
2442 fn f() {
2443 b();
2444 c();
2445 }
2446 "
2447 .unindent()
2448 );
2449 });
2450}
2451
2452#[gpui::test]
2453async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2454 init_test(cx);
2455
2456 let text = "
2457 use a::b;
2458 use a::c;
2459
2460 fn f() {
2461 b();
2462 c();
2463 }
2464 "
2465 .unindent();
2466
2467 let fs = FakeFs::new(cx.executor());
2468 fs.insert_tree(
2469 "/dir",
2470 json!({
2471 "a.rs": text.clone(),
2472 }),
2473 )
2474 .await;
2475
2476 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2477 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2478 let buffer = project
2479 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2480 .await
2481 .unwrap();
2482
2483 // Simulate the language server sending us edits in a non-ordered fashion,
2484 // with ranges sometimes being inverted or pointing to invalid locations.
2485 let edits = lsp_store
2486 .update(cx, |lsp_store, cx| {
2487 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2488 &buffer,
2489 [
2490 lsp::TextEdit {
2491 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2492 new_text: "\n\n".into(),
2493 },
2494 lsp::TextEdit {
2495 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2496 new_text: "a::{b, c}".into(),
2497 },
2498 lsp::TextEdit {
2499 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2500 new_text: "".into(),
2501 },
2502 lsp::TextEdit {
2503 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2504 new_text: "
2505 fn f() {
2506 b();
2507 c();
2508 }"
2509 .unindent(),
2510 },
2511 ],
2512 LanguageServerId(0),
2513 None,
2514 cx,
2515 )
2516 })
2517 .await
2518 .unwrap();
2519
2520 buffer.update(cx, |buffer, cx| {
2521 let edits = edits
2522 .into_iter()
2523 .map(|(range, text)| {
2524 (
2525 range.start.to_point(buffer)..range.end.to_point(buffer),
2526 text,
2527 )
2528 })
2529 .collect::<Vec<_>>();
2530
2531 assert_eq!(
2532 edits,
2533 [
2534 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2535 (Point::new(1, 0)..Point::new(2, 0), "".into())
2536 ]
2537 );
2538
2539 for (range, new_text) in edits {
2540 buffer.edit([(range, new_text)], None, cx);
2541 }
2542 assert_eq!(
2543 buffer.text(),
2544 "
2545 use a::{b, c};
2546
2547 fn f() {
2548 b();
2549 c();
2550 }
2551 "
2552 .unindent()
2553 );
2554 });
2555}
2556
2557fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2558 buffer: &Buffer,
2559 range: Range<T>,
2560) -> Vec<(String, Option<DiagnosticSeverity>)> {
2561 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2562 for chunk in buffer.snapshot().chunks(range, true) {
2563 if chunks.last().map_or(false, |prev_chunk| {
2564 prev_chunk.1 == chunk.diagnostic_severity
2565 }) {
2566 chunks.last_mut().unwrap().0.push_str(chunk.text);
2567 } else {
2568 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2569 }
2570 }
2571 chunks
2572}
2573
2574#[gpui::test(iterations = 10)]
2575async fn test_definition(cx: &mut gpui::TestAppContext) {
2576 init_test(cx);
2577
2578 let fs = FakeFs::new(cx.executor());
2579 fs.insert_tree(
2580 "/dir",
2581 json!({
2582 "b.rs": "const y: i32 = crate::a()",
2583 }),
2584 )
2585 .await;
2586 fs.insert_tree(
2587 "/another_dir",
2588 json!({
2589 "a.rs": "const fn a() { A }"}),
2590 )
2591 .await;
2592
2593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2594
2595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2596 language_registry.add(rust_lang());
2597 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2598 let (buffer, _handle) = project
2599 .update(cx, |project, cx| {
2600 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2601 })
2602 .await
2603 .unwrap();
2604 let fake_server = fake_servers.next().await.unwrap();
2605 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2606 let params = params.text_document_position_params;
2607 assert_eq!(
2608 params.text_document.uri.to_file_path().unwrap(),
2609 Path::new("/dir/b.rs"),
2610 );
2611 assert_eq!(params.position, lsp::Position::new(0, 22));
2612
2613 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2614 lsp::Location::new(
2615 lsp::Url::from_file_path("/another_dir/a.rs").unwrap(),
2616 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2617 ),
2618 )))
2619 });
2620 let mut definitions = project
2621 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2622 .await
2623 .unwrap();
2624
2625 // Assert no new language server started
2626 cx.executor().run_until_parked();
2627 assert!(fake_servers.try_next().is_err());
2628
2629 assert_eq!(definitions.len(), 1);
2630 let definition = definitions.pop().unwrap();
2631 cx.update(|cx| {
2632 let target_buffer = definition.target.buffer.read(cx);
2633 assert_eq!(
2634 target_buffer
2635 .file()
2636 .unwrap()
2637 .as_local()
2638 .unwrap()
2639 .abs_path(cx),
2640 Path::new("/another_dir/a.rs"),
2641 );
2642 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2643 assert_eq!(
2644 list_worktrees(&project, cx),
2645 [
2646 ("/another_dir/a.rs".as_ref(), false),
2647 ("/dir".as_ref(), true)
2648 ],
2649 );
2650
2651 drop(definition);
2652 });
2653 cx.update(|cx| {
2654 assert_eq!(list_worktrees(&project, cx), [("/dir".as_ref(), true)]);
2655 });
2656
2657 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2658 project
2659 .read(cx)
2660 .worktrees(cx)
2661 .map(|worktree| {
2662 let worktree = worktree.read(cx);
2663 (
2664 worktree.as_local().unwrap().abs_path().as_ref(),
2665 worktree.is_visible(),
2666 )
2667 })
2668 .collect::<Vec<_>>()
2669 }
2670}
2671
2672#[gpui::test]
2673async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2674 init_test(cx);
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree(
2678 "/dir",
2679 json!({
2680 "a.ts": "",
2681 }),
2682 )
2683 .await;
2684
2685 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2686
2687 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2688 language_registry.add(typescript_lang());
2689 let mut fake_language_servers = language_registry.register_fake_lsp(
2690 "TypeScript",
2691 FakeLspAdapter {
2692 capabilities: lsp::ServerCapabilities {
2693 completion_provider: Some(lsp::CompletionOptions {
2694 trigger_characters: Some(vec![":".to_string()]),
2695 ..Default::default()
2696 }),
2697 ..Default::default()
2698 },
2699 ..Default::default()
2700 },
2701 );
2702
2703 let (buffer, _handle) = project
2704 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2705 .await
2706 .unwrap();
2707
2708 let fake_server = fake_language_servers.next().await.unwrap();
2709
2710 let text = "let a = b.fqn";
2711 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2712 let completions = project.update(cx, |project, cx| {
2713 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2714 });
2715
2716 fake_server
2717 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2718 Ok(Some(lsp::CompletionResponse::Array(vec![
2719 lsp::CompletionItem {
2720 label: "fullyQualifiedName?".into(),
2721 insert_text: Some("fullyQualifiedName".into()),
2722 ..Default::default()
2723 },
2724 ])))
2725 })
2726 .next()
2727 .await;
2728 let completions = completions.await.unwrap();
2729 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2730 assert_eq!(completions.len(), 1);
2731 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2732 assert_eq!(
2733 completions[0].old_range.to_offset(&snapshot),
2734 text.len() - 3..text.len()
2735 );
2736
2737 let text = "let a = \"atoms/cmp\"";
2738 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2739 let completions = project.update(cx, |project, cx| {
2740 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2741 });
2742
2743 fake_server
2744 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2745 Ok(Some(lsp::CompletionResponse::Array(vec![
2746 lsp::CompletionItem {
2747 label: "component".into(),
2748 ..Default::default()
2749 },
2750 ])))
2751 })
2752 .next()
2753 .await;
2754 let completions = completions.await.unwrap();
2755 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2756 assert_eq!(completions.len(), 1);
2757 assert_eq!(completions[0].new_text, "component");
2758 assert_eq!(
2759 completions[0].old_range.to_offset(&snapshot),
2760 text.len() - 4..text.len() - 1
2761 );
2762}
2763
2764#[gpui::test]
2765async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2766 init_test(cx);
2767
2768 let fs = FakeFs::new(cx.executor());
2769 fs.insert_tree(
2770 "/dir",
2771 json!({
2772 "a.ts": "",
2773 }),
2774 )
2775 .await;
2776
2777 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2778
2779 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2780 language_registry.add(typescript_lang());
2781 let mut fake_language_servers = language_registry.register_fake_lsp(
2782 "TypeScript",
2783 FakeLspAdapter {
2784 capabilities: lsp::ServerCapabilities {
2785 completion_provider: Some(lsp::CompletionOptions {
2786 trigger_characters: Some(vec![":".to_string()]),
2787 ..Default::default()
2788 }),
2789 ..Default::default()
2790 },
2791 ..Default::default()
2792 },
2793 );
2794
2795 let (buffer, _handle) = project
2796 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2797 .await
2798 .unwrap();
2799
2800 let fake_server = fake_language_servers.next().await.unwrap();
2801
2802 let text = "let a = b.fqn";
2803 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2804 let completions = project.update(cx, |project, cx| {
2805 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2806 });
2807
2808 fake_server
2809 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2810 Ok(Some(lsp::CompletionResponse::Array(vec![
2811 lsp::CompletionItem {
2812 label: "fullyQualifiedName?".into(),
2813 insert_text: Some("fully\rQualified\r\nName".into()),
2814 ..Default::default()
2815 },
2816 ])))
2817 })
2818 .next()
2819 .await;
2820 let completions = completions.await.unwrap();
2821 assert_eq!(completions.len(), 1);
2822 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2823}
2824
2825#[gpui::test(iterations = 10)]
2826async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2827 init_test(cx);
2828
2829 let fs = FakeFs::new(cx.executor());
2830 fs.insert_tree(
2831 "/dir",
2832 json!({
2833 "a.ts": "a",
2834 }),
2835 )
2836 .await;
2837
2838 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2839
2840 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2841 language_registry.add(typescript_lang());
2842 let mut fake_language_servers = language_registry.register_fake_lsp(
2843 "TypeScript",
2844 FakeLspAdapter {
2845 capabilities: lsp::ServerCapabilities {
2846 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2847 lsp::CodeActionOptions {
2848 resolve_provider: Some(true),
2849 ..lsp::CodeActionOptions::default()
2850 },
2851 )),
2852 ..lsp::ServerCapabilities::default()
2853 },
2854 ..FakeLspAdapter::default()
2855 },
2856 );
2857
2858 let (buffer, _handle) = project
2859 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2860 .await
2861 .unwrap();
2862
2863 let fake_server = fake_language_servers.next().await.unwrap();
2864
2865 // Language server returns code actions that contain commands, and not edits.
2866 let actions = project.update(cx, |project, cx| {
2867 project.code_actions(&buffer, 0..0, None, cx)
2868 });
2869 fake_server
2870 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2871 Ok(Some(vec![
2872 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2873 title: "The code action".into(),
2874 data: Some(serde_json::json!({
2875 "command": "_the/command",
2876 })),
2877 ..lsp::CodeAction::default()
2878 }),
2879 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2880 title: "two".into(),
2881 ..lsp::CodeAction::default()
2882 }),
2883 ]))
2884 })
2885 .next()
2886 .await;
2887
2888 let action = actions.await.unwrap()[0].clone();
2889 let apply = project.update(cx, |project, cx| {
2890 project.apply_code_action(buffer.clone(), action, true, cx)
2891 });
2892
2893 // Resolving the code action does not populate its edits. In absence of
2894 // edits, we must execute the given command.
2895 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2896 |mut action, _| async move {
2897 if action.data.is_some() {
2898 action.command = Some(lsp::Command {
2899 title: "The command".into(),
2900 command: "_the/command".into(),
2901 arguments: Some(vec![json!("the-argument")]),
2902 });
2903 }
2904 Ok(action)
2905 },
2906 );
2907
2908 // While executing the command, the language server sends the editor
2909 // a `workspaceEdit` request.
2910 fake_server
2911 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2912 let fake = fake_server.clone();
2913 move |params, _| {
2914 assert_eq!(params.command, "_the/command");
2915 let fake = fake.clone();
2916 async move {
2917 fake.server
2918 .request::<lsp::request::ApplyWorkspaceEdit>(
2919 lsp::ApplyWorkspaceEditParams {
2920 label: None,
2921 edit: lsp::WorkspaceEdit {
2922 changes: Some(
2923 [(
2924 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2925 vec![lsp::TextEdit {
2926 range: lsp::Range::new(
2927 lsp::Position::new(0, 0),
2928 lsp::Position::new(0, 0),
2929 ),
2930 new_text: "X".into(),
2931 }],
2932 )]
2933 .into_iter()
2934 .collect(),
2935 ),
2936 ..Default::default()
2937 },
2938 },
2939 )
2940 .await
2941 .unwrap();
2942 Ok(Some(json!(null)))
2943 }
2944 }
2945 })
2946 .next()
2947 .await;
2948
2949 // Applying the code action returns a project transaction containing the edits
2950 // sent by the language server in its `workspaceEdit` request.
2951 let transaction = apply.await.unwrap();
2952 assert!(transaction.0.contains_key(&buffer));
2953 buffer.update(cx, |buffer, cx| {
2954 assert_eq!(buffer.text(), "Xa");
2955 buffer.undo(cx);
2956 assert_eq!(buffer.text(), "a");
2957 });
2958}
2959
2960#[gpui::test(iterations = 10)]
2961async fn test_save_file(cx: &mut gpui::TestAppContext) {
2962 init_test(cx);
2963
2964 let fs = FakeFs::new(cx.executor());
2965 fs.insert_tree(
2966 "/dir",
2967 json!({
2968 "file1": "the old contents",
2969 }),
2970 )
2971 .await;
2972
2973 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2974 let buffer = project
2975 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2976 .await
2977 .unwrap();
2978 buffer.update(cx, |buffer, cx| {
2979 assert_eq!(buffer.text(), "the old contents");
2980 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2981 });
2982
2983 project
2984 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2985 .await
2986 .unwrap();
2987
2988 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2989 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2990}
2991
2992#[gpui::test(iterations = 30)]
2993async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2994 init_test(cx);
2995
2996 let fs = FakeFs::new(cx.executor().clone());
2997 fs.insert_tree(
2998 "/dir",
2999 json!({
3000 "file1": "the original contents",
3001 }),
3002 )
3003 .await;
3004
3005 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3006 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3007 let buffer = project
3008 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3009 .await
3010 .unwrap();
3011
3012 // Simulate buffer diffs being slow, so that they don't complete before
3013 // the next file change occurs.
3014 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3015
3016 // Change the buffer's file on disk, and then wait for the file change
3017 // to be detected by the worktree, so that the buffer starts reloading.
3018 fs.save(
3019 "/dir/file1".as_ref(),
3020 &"the first contents".into(),
3021 Default::default(),
3022 )
3023 .await
3024 .unwrap();
3025 worktree.next_event(cx).await;
3026
3027 // Change the buffer's file again. Depending on the random seed, the
3028 // previous file change may still be in progress.
3029 fs.save(
3030 "/dir/file1".as_ref(),
3031 &"the second contents".into(),
3032 Default::default(),
3033 )
3034 .await
3035 .unwrap();
3036 worktree.next_event(cx).await;
3037
3038 cx.executor().run_until_parked();
3039 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3040 buffer.read_with(cx, |buffer, _| {
3041 assert_eq!(buffer.text(), on_disk_text);
3042 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3043 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3044 });
3045}
3046
3047#[gpui::test(iterations = 30)]
3048async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3049 init_test(cx);
3050
3051 let fs = FakeFs::new(cx.executor().clone());
3052 fs.insert_tree(
3053 "/dir",
3054 json!({
3055 "file1": "the original contents",
3056 }),
3057 )
3058 .await;
3059
3060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3061 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3062 let buffer = project
3063 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3064 .await
3065 .unwrap();
3066
3067 // Simulate buffer diffs being slow, so that they don't complete before
3068 // the next file change occurs.
3069 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3070
3071 // Change the buffer's file on disk, and then wait for the file change
3072 // to be detected by the worktree, so that the buffer starts reloading.
3073 fs.save(
3074 "/dir/file1".as_ref(),
3075 &"the first contents".into(),
3076 Default::default(),
3077 )
3078 .await
3079 .unwrap();
3080 worktree.next_event(cx).await;
3081
3082 cx.executor()
3083 .spawn(cx.executor().simulate_random_delay())
3084 .await;
3085
3086 // Perform a noop edit, causing the buffer's version to increase.
3087 buffer.update(cx, |buffer, cx| {
3088 buffer.edit([(0..0, " ")], None, cx);
3089 buffer.undo(cx);
3090 });
3091
3092 cx.executor().run_until_parked();
3093 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3094 buffer.read_with(cx, |buffer, _| {
3095 let buffer_text = buffer.text();
3096 if buffer_text == on_disk_text {
3097 assert!(
3098 !buffer.is_dirty() && !buffer.has_conflict(),
3099 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3100 );
3101 }
3102 // If the file change occurred while the buffer was processing the first
3103 // change, the buffer will be in a conflicting state.
3104 else {
3105 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3106 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3107 }
3108 });
3109}
3110
3111#[gpui::test]
3112async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3113 init_test(cx);
3114
3115 let fs = FakeFs::new(cx.executor());
3116 fs.insert_tree(
3117 "/dir",
3118 json!({
3119 "file1": "the old contents",
3120 }),
3121 )
3122 .await;
3123
3124 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3125 let buffer = project
3126 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3127 .await
3128 .unwrap();
3129 buffer.update(cx, |buffer, cx| {
3130 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3131 });
3132
3133 project
3134 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3135 .await
3136 .unwrap();
3137
3138 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3139 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3140}
3141
3142#[gpui::test]
3143async fn test_save_as(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree("/dir", json!({})).await;
3148
3149 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3150
3151 let languages = project.update(cx, |project, _| project.languages().clone());
3152 languages.add(rust_lang());
3153
3154 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3155 buffer.update(cx, |buffer, cx| {
3156 buffer.edit([(0..0, "abc")], None, cx);
3157 assert!(buffer.is_dirty());
3158 assert!(!buffer.has_conflict());
3159 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3160 });
3161 project
3162 .update(cx, |project, cx| {
3163 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3164 let path = ProjectPath {
3165 worktree_id,
3166 path: Arc::from(Path::new("file1.rs")),
3167 };
3168 project.save_buffer_as(buffer.clone(), path, cx)
3169 })
3170 .await
3171 .unwrap();
3172 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3173
3174 cx.executor().run_until_parked();
3175 buffer.update(cx, |buffer, cx| {
3176 assert_eq!(
3177 buffer.file().unwrap().full_path(cx),
3178 Path::new("dir/file1.rs")
3179 );
3180 assert!(!buffer.is_dirty());
3181 assert!(!buffer.has_conflict());
3182 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3183 });
3184
3185 let opened_buffer = project
3186 .update(cx, |project, cx| {
3187 project.open_local_buffer("/dir/file1.rs", cx)
3188 })
3189 .await
3190 .unwrap();
3191 assert_eq!(opened_buffer, buffer);
3192}
3193
3194#[gpui::test(retries = 5)]
3195async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3196 use worktree::WorktreeModelHandle as _;
3197
3198 init_test(cx);
3199 cx.executor().allow_parking();
3200
3201 let dir = TempTree::new(json!({
3202 "a": {
3203 "file1": "",
3204 "file2": "",
3205 "file3": "",
3206 },
3207 "b": {
3208 "c": {
3209 "file4": "",
3210 "file5": "",
3211 }
3212 }
3213 }));
3214
3215 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3216
3217 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3218 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3219 async move { buffer.await.unwrap() }
3220 };
3221 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3222 project.update(cx, |project, cx| {
3223 let tree = project.worktrees(cx).next().unwrap();
3224 tree.read(cx)
3225 .entry_for_path(path)
3226 .unwrap_or_else(|| panic!("no entry for path {}", path))
3227 .id
3228 })
3229 };
3230
3231 let buffer2 = buffer_for_path("a/file2", cx).await;
3232 let buffer3 = buffer_for_path("a/file3", cx).await;
3233 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3234 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3235
3236 let file2_id = id_for_path("a/file2", cx);
3237 let file3_id = id_for_path("a/file3", cx);
3238 let file4_id = id_for_path("b/c/file4", cx);
3239
3240 // Create a remote copy of this worktree.
3241 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3242 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3243
3244 let updates = Arc::new(Mutex::new(Vec::new()));
3245 tree.update(cx, |tree, cx| {
3246 let updates = updates.clone();
3247 tree.observe_updates(0, cx, move |update| {
3248 updates.lock().push(update);
3249 async { true }
3250 });
3251 });
3252
3253 let remote =
3254 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3255
3256 cx.executor().run_until_parked();
3257
3258 cx.update(|cx| {
3259 assert!(!buffer2.read(cx).is_dirty());
3260 assert!(!buffer3.read(cx).is_dirty());
3261 assert!(!buffer4.read(cx).is_dirty());
3262 assert!(!buffer5.read(cx).is_dirty());
3263 });
3264
3265 // Rename and delete files and directories.
3266 tree.flush_fs_events(cx).await;
3267 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3268 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3269 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3270 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3271 tree.flush_fs_events(cx).await;
3272
3273 let expected_paths = vec![
3274 "a",
3275 "a/file1",
3276 "a/file2.new",
3277 "b",
3278 "d",
3279 "d/file3",
3280 "d/file4",
3281 ]
3282 .into_iter()
3283 .map(replace_path_separator)
3284 .collect::<Vec<_>>();
3285
3286 cx.update(|app| {
3287 assert_eq!(
3288 tree.read(app)
3289 .paths()
3290 .map(|p| p.to_str().unwrap())
3291 .collect::<Vec<_>>(),
3292 expected_paths
3293 );
3294 });
3295
3296 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3297 assert_eq!(id_for_path("d/file3", cx), file3_id);
3298 assert_eq!(id_for_path("d/file4", cx), file4_id);
3299
3300 cx.update(|cx| {
3301 assert_eq!(
3302 buffer2.read(cx).file().unwrap().path().as_ref(),
3303 Path::new("a/file2.new")
3304 );
3305 assert_eq!(
3306 buffer3.read(cx).file().unwrap().path().as_ref(),
3307 Path::new("d/file3")
3308 );
3309 assert_eq!(
3310 buffer4.read(cx).file().unwrap().path().as_ref(),
3311 Path::new("d/file4")
3312 );
3313 assert_eq!(
3314 buffer5.read(cx).file().unwrap().path().as_ref(),
3315 Path::new("b/c/file5")
3316 );
3317
3318 assert_matches!(
3319 buffer2.read(cx).file().unwrap().disk_state(),
3320 DiskState::Present { .. }
3321 );
3322 assert_matches!(
3323 buffer3.read(cx).file().unwrap().disk_state(),
3324 DiskState::Present { .. }
3325 );
3326 assert_matches!(
3327 buffer4.read(cx).file().unwrap().disk_state(),
3328 DiskState::Present { .. }
3329 );
3330 assert_eq!(
3331 buffer5.read(cx).file().unwrap().disk_state(),
3332 DiskState::Deleted
3333 );
3334 });
3335
3336 // Update the remote worktree. Check that it becomes consistent with the
3337 // local worktree.
3338 cx.executor().run_until_parked();
3339
3340 remote.update(cx, |remote, _| {
3341 for update in updates.lock().drain(..) {
3342 remote.as_remote_mut().unwrap().update_from_remote(update);
3343 }
3344 });
3345 cx.executor().run_until_parked();
3346 remote.update(cx, |remote, _| {
3347 assert_eq!(
3348 remote
3349 .paths()
3350 .map(|p| p.to_str().unwrap())
3351 .collect::<Vec<_>>(),
3352 expected_paths
3353 );
3354 });
3355}
3356
3357#[gpui::test(iterations = 10)]
3358async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3359 init_test(cx);
3360
3361 let fs = FakeFs::new(cx.executor());
3362 fs.insert_tree(
3363 "/dir",
3364 json!({
3365 "a": {
3366 "file1": "",
3367 }
3368 }),
3369 )
3370 .await;
3371
3372 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3373 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3374 let tree_id = tree.update(cx, |tree, _| tree.id());
3375
3376 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3377 project.update(cx, |project, cx| {
3378 let tree = project.worktrees(cx).next().unwrap();
3379 tree.read(cx)
3380 .entry_for_path(path)
3381 .unwrap_or_else(|| panic!("no entry for path {}", path))
3382 .id
3383 })
3384 };
3385
3386 let dir_id = id_for_path("a", cx);
3387 let file_id = id_for_path("a/file1", cx);
3388 let buffer = project
3389 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3390 .await
3391 .unwrap();
3392 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3393
3394 project
3395 .update(cx, |project, cx| {
3396 project.rename_entry(dir_id, Path::new("b"), cx)
3397 })
3398 .unwrap()
3399 .await
3400 .to_included()
3401 .unwrap();
3402 cx.executor().run_until_parked();
3403
3404 assert_eq!(id_for_path("b", cx), dir_id);
3405 assert_eq!(id_for_path("b/file1", cx), file_id);
3406 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3407}
3408
3409#[gpui::test]
3410async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3411 init_test(cx);
3412
3413 let fs = FakeFs::new(cx.executor());
3414 fs.insert_tree(
3415 "/dir",
3416 json!({
3417 "a.txt": "a-contents",
3418 "b.txt": "b-contents",
3419 }),
3420 )
3421 .await;
3422
3423 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3424
3425 // Spawn multiple tasks to open paths, repeating some paths.
3426 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3427 (
3428 p.open_local_buffer("/dir/a.txt", cx),
3429 p.open_local_buffer("/dir/b.txt", cx),
3430 p.open_local_buffer("/dir/a.txt", cx),
3431 )
3432 });
3433
3434 let buffer_a_1 = buffer_a_1.await.unwrap();
3435 let buffer_a_2 = buffer_a_2.await.unwrap();
3436 let buffer_b = buffer_b.await.unwrap();
3437 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3438 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3439
3440 // There is only one buffer per path.
3441 let buffer_a_id = buffer_a_1.entity_id();
3442 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3443
3444 // Open the same path again while it is still open.
3445 drop(buffer_a_1);
3446 let buffer_a_3 = project
3447 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3448 .await
3449 .unwrap();
3450
3451 // There's still only one buffer per path.
3452 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3453}
3454
3455#[gpui::test]
3456async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3457 init_test(cx);
3458
3459 let fs = FakeFs::new(cx.executor());
3460 fs.insert_tree(
3461 "/dir",
3462 json!({
3463 "file1": "abc",
3464 "file2": "def",
3465 "file3": "ghi",
3466 }),
3467 )
3468 .await;
3469
3470 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3471
3472 let buffer1 = project
3473 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3474 .await
3475 .unwrap();
3476 let events = Arc::new(Mutex::new(Vec::new()));
3477
3478 // initially, the buffer isn't dirty.
3479 buffer1.update(cx, |buffer, cx| {
3480 cx.subscribe(&buffer1, {
3481 let events = events.clone();
3482 move |_, _, event, _| match event {
3483 BufferEvent::Operation { .. } => {}
3484 _ => events.lock().push(event.clone()),
3485 }
3486 })
3487 .detach();
3488
3489 assert!(!buffer.is_dirty());
3490 assert!(events.lock().is_empty());
3491
3492 buffer.edit([(1..2, "")], None, cx);
3493 });
3494
3495 // after the first edit, the buffer is dirty, and emits a dirtied event.
3496 buffer1.update(cx, |buffer, cx| {
3497 assert!(buffer.text() == "ac");
3498 assert!(buffer.is_dirty());
3499 assert_eq!(
3500 *events.lock(),
3501 &[
3502 language::BufferEvent::Edited,
3503 language::BufferEvent::DirtyChanged
3504 ]
3505 );
3506 events.lock().clear();
3507 buffer.did_save(
3508 buffer.version(),
3509 buffer.file().unwrap().disk_state().mtime(),
3510 cx,
3511 );
3512 });
3513
3514 // after saving, the buffer is not dirty, and emits a saved event.
3515 buffer1.update(cx, |buffer, cx| {
3516 assert!(!buffer.is_dirty());
3517 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3518 events.lock().clear();
3519
3520 buffer.edit([(1..1, "B")], None, cx);
3521 buffer.edit([(2..2, "D")], None, cx);
3522 });
3523
3524 // after editing again, the buffer is dirty, and emits another dirty event.
3525 buffer1.update(cx, |buffer, cx| {
3526 assert!(buffer.text() == "aBDc");
3527 assert!(buffer.is_dirty());
3528 assert_eq!(
3529 *events.lock(),
3530 &[
3531 language::BufferEvent::Edited,
3532 language::BufferEvent::DirtyChanged,
3533 language::BufferEvent::Edited,
3534 ],
3535 );
3536 events.lock().clear();
3537
3538 // After restoring the buffer to its previously-saved state,
3539 // the buffer is not considered dirty anymore.
3540 buffer.edit([(1..3, "")], None, cx);
3541 assert!(buffer.text() == "ac");
3542 assert!(!buffer.is_dirty());
3543 });
3544
3545 assert_eq!(
3546 *events.lock(),
3547 &[
3548 language::BufferEvent::Edited,
3549 language::BufferEvent::DirtyChanged
3550 ]
3551 );
3552
3553 // When a file is deleted, the buffer is considered dirty.
3554 let events = Arc::new(Mutex::new(Vec::new()));
3555 let buffer2 = project
3556 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3557 .await
3558 .unwrap();
3559 buffer2.update(cx, |_, cx| {
3560 cx.subscribe(&buffer2, {
3561 let events = events.clone();
3562 move |_, _, event, _| events.lock().push(event.clone())
3563 })
3564 .detach();
3565 });
3566
3567 fs.remove_file("/dir/file2".as_ref(), Default::default())
3568 .await
3569 .unwrap();
3570 cx.executor().run_until_parked();
3571 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3572 assert_eq!(
3573 *events.lock(),
3574 &[
3575 language::BufferEvent::DirtyChanged,
3576 language::BufferEvent::FileHandleChanged
3577 ]
3578 );
3579
3580 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3581 let events = Arc::new(Mutex::new(Vec::new()));
3582 let buffer3 = project
3583 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3584 .await
3585 .unwrap();
3586 buffer3.update(cx, |_, cx| {
3587 cx.subscribe(&buffer3, {
3588 let events = events.clone();
3589 move |_, _, event, _| events.lock().push(event.clone())
3590 })
3591 .detach();
3592 });
3593
3594 buffer3.update(cx, |buffer, cx| {
3595 buffer.edit([(0..0, "x")], None, cx);
3596 });
3597 events.lock().clear();
3598 fs.remove_file("/dir/file3".as_ref(), Default::default())
3599 .await
3600 .unwrap();
3601 cx.executor().run_until_parked();
3602 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3603 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3604}
3605
3606#[gpui::test]
3607async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3608 init_test(cx);
3609
3610 let initial_contents = "aaa\nbbbbb\nc\n";
3611 let fs = FakeFs::new(cx.executor());
3612 fs.insert_tree(
3613 "/dir",
3614 json!({
3615 "the-file": initial_contents,
3616 }),
3617 )
3618 .await;
3619 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3620 let buffer = project
3621 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3622 .await
3623 .unwrap();
3624
3625 let anchors = (0..3)
3626 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3627 .collect::<Vec<_>>();
3628
3629 // Change the file on disk, adding two new lines of text, and removing
3630 // one line.
3631 buffer.update(cx, |buffer, _| {
3632 assert!(!buffer.is_dirty());
3633 assert!(!buffer.has_conflict());
3634 });
3635 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3636 fs.save(
3637 "/dir/the-file".as_ref(),
3638 &new_contents.into(),
3639 LineEnding::Unix,
3640 )
3641 .await
3642 .unwrap();
3643
3644 // Because the buffer was not modified, it is reloaded from disk. Its
3645 // contents are edited according to the diff between the old and new
3646 // file contents.
3647 cx.executor().run_until_parked();
3648 buffer.update(cx, |buffer, _| {
3649 assert_eq!(buffer.text(), new_contents);
3650 assert!(!buffer.is_dirty());
3651 assert!(!buffer.has_conflict());
3652
3653 let anchor_positions = anchors
3654 .iter()
3655 .map(|anchor| anchor.to_point(&*buffer))
3656 .collect::<Vec<_>>();
3657 assert_eq!(
3658 anchor_positions,
3659 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3660 );
3661 });
3662
3663 // Modify the buffer
3664 buffer.update(cx, |buffer, cx| {
3665 buffer.edit([(0..0, " ")], None, cx);
3666 assert!(buffer.is_dirty());
3667 assert!(!buffer.has_conflict());
3668 });
3669
3670 // Change the file on disk again, adding blank lines to the beginning.
3671 fs.save(
3672 "/dir/the-file".as_ref(),
3673 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3674 LineEnding::Unix,
3675 )
3676 .await
3677 .unwrap();
3678
3679 // Because the buffer is modified, it doesn't reload from disk, but is
3680 // marked as having a conflict.
3681 cx.executor().run_until_parked();
3682 buffer.update(cx, |buffer, _| {
3683 assert!(buffer.has_conflict());
3684 });
3685}
3686
3687#[gpui::test]
3688async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3689 init_test(cx);
3690
3691 let fs = FakeFs::new(cx.executor());
3692 fs.insert_tree(
3693 "/dir",
3694 json!({
3695 "file1": "a\nb\nc\n",
3696 "file2": "one\r\ntwo\r\nthree\r\n",
3697 }),
3698 )
3699 .await;
3700
3701 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3702 let buffer1 = project
3703 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3704 .await
3705 .unwrap();
3706 let buffer2 = project
3707 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3708 .await
3709 .unwrap();
3710
3711 buffer1.update(cx, |buffer, _| {
3712 assert_eq!(buffer.text(), "a\nb\nc\n");
3713 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3714 });
3715 buffer2.update(cx, |buffer, _| {
3716 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3717 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3718 });
3719
3720 // Change a file's line endings on disk from unix to windows. The buffer's
3721 // state updates correctly.
3722 fs.save(
3723 "/dir/file1".as_ref(),
3724 &"aaa\nb\nc\n".into(),
3725 LineEnding::Windows,
3726 )
3727 .await
3728 .unwrap();
3729 cx.executor().run_until_parked();
3730 buffer1.update(cx, |buffer, _| {
3731 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3732 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3733 });
3734
3735 // Save a file with windows line endings. The file is written correctly.
3736 buffer2.update(cx, |buffer, cx| {
3737 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3738 });
3739 project
3740 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3741 .await
3742 .unwrap();
3743 assert_eq!(
3744 fs.load("/dir/file2".as_ref()).await.unwrap(),
3745 "one\r\ntwo\r\nthree\r\nfour\r\n",
3746 );
3747}
3748
3749#[gpui::test]
3750async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3751 init_test(cx);
3752
3753 let fs = FakeFs::new(cx.executor());
3754 fs.insert_tree(
3755 "/the-dir",
3756 json!({
3757 "a.rs": "
3758 fn foo(mut v: Vec<usize>) {
3759 for x in &v {
3760 v.push(1);
3761 }
3762 }
3763 "
3764 .unindent(),
3765 }),
3766 )
3767 .await;
3768
3769 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3770 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3771 let buffer = project
3772 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3773 .await
3774 .unwrap();
3775
3776 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3777 let message = lsp::PublishDiagnosticsParams {
3778 uri: buffer_uri.clone(),
3779 diagnostics: vec![
3780 lsp::Diagnostic {
3781 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3782 severity: Some(DiagnosticSeverity::WARNING),
3783 message: "error 1".to_string(),
3784 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3785 location: lsp::Location {
3786 uri: buffer_uri.clone(),
3787 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3788 },
3789 message: "error 1 hint 1".to_string(),
3790 }]),
3791 ..Default::default()
3792 },
3793 lsp::Diagnostic {
3794 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3795 severity: Some(DiagnosticSeverity::HINT),
3796 message: "error 1 hint 1".to_string(),
3797 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3798 location: lsp::Location {
3799 uri: buffer_uri.clone(),
3800 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3801 },
3802 message: "original diagnostic".to_string(),
3803 }]),
3804 ..Default::default()
3805 },
3806 lsp::Diagnostic {
3807 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3808 severity: Some(DiagnosticSeverity::ERROR),
3809 message: "error 2".to_string(),
3810 related_information: Some(vec![
3811 lsp::DiagnosticRelatedInformation {
3812 location: lsp::Location {
3813 uri: buffer_uri.clone(),
3814 range: lsp::Range::new(
3815 lsp::Position::new(1, 13),
3816 lsp::Position::new(1, 15),
3817 ),
3818 },
3819 message: "error 2 hint 1".to_string(),
3820 },
3821 lsp::DiagnosticRelatedInformation {
3822 location: lsp::Location {
3823 uri: buffer_uri.clone(),
3824 range: lsp::Range::new(
3825 lsp::Position::new(1, 13),
3826 lsp::Position::new(1, 15),
3827 ),
3828 },
3829 message: "error 2 hint 2".to_string(),
3830 },
3831 ]),
3832 ..Default::default()
3833 },
3834 lsp::Diagnostic {
3835 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3836 severity: Some(DiagnosticSeverity::HINT),
3837 message: "error 2 hint 1".to_string(),
3838 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3839 location: lsp::Location {
3840 uri: buffer_uri.clone(),
3841 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3842 },
3843 message: "original diagnostic".to_string(),
3844 }]),
3845 ..Default::default()
3846 },
3847 lsp::Diagnostic {
3848 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3849 severity: Some(DiagnosticSeverity::HINT),
3850 message: "error 2 hint 2".to_string(),
3851 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3852 location: lsp::Location {
3853 uri: buffer_uri,
3854 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3855 },
3856 message: "original diagnostic".to_string(),
3857 }]),
3858 ..Default::default()
3859 },
3860 ],
3861 version: None,
3862 };
3863
3864 lsp_store
3865 .update(cx, |lsp_store, cx| {
3866 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3867 })
3868 .unwrap();
3869 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3870
3871 assert_eq!(
3872 buffer
3873 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3874 .collect::<Vec<_>>(),
3875 &[
3876 DiagnosticEntry {
3877 range: Point::new(1, 8)..Point::new(1, 9),
3878 diagnostic: Diagnostic {
3879 severity: DiagnosticSeverity::WARNING,
3880 message: "error 1".to_string(),
3881 group_id: 1,
3882 is_primary: true,
3883 ..Default::default()
3884 }
3885 },
3886 DiagnosticEntry {
3887 range: Point::new(1, 8)..Point::new(1, 9),
3888 diagnostic: Diagnostic {
3889 severity: DiagnosticSeverity::HINT,
3890 message: "error 1 hint 1".to_string(),
3891 group_id: 1,
3892 is_primary: false,
3893 ..Default::default()
3894 }
3895 },
3896 DiagnosticEntry {
3897 range: Point::new(1, 13)..Point::new(1, 15),
3898 diagnostic: Diagnostic {
3899 severity: DiagnosticSeverity::HINT,
3900 message: "error 2 hint 1".to_string(),
3901 group_id: 0,
3902 is_primary: false,
3903 ..Default::default()
3904 }
3905 },
3906 DiagnosticEntry {
3907 range: Point::new(1, 13)..Point::new(1, 15),
3908 diagnostic: Diagnostic {
3909 severity: DiagnosticSeverity::HINT,
3910 message: "error 2 hint 2".to_string(),
3911 group_id: 0,
3912 is_primary: false,
3913 ..Default::default()
3914 }
3915 },
3916 DiagnosticEntry {
3917 range: Point::new(2, 8)..Point::new(2, 17),
3918 diagnostic: Diagnostic {
3919 severity: DiagnosticSeverity::ERROR,
3920 message: "error 2".to_string(),
3921 group_id: 0,
3922 is_primary: true,
3923 ..Default::default()
3924 }
3925 }
3926 ]
3927 );
3928
3929 assert_eq!(
3930 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3931 &[
3932 DiagnosticEntry {
3933 range: Point::new(1, 13)..Point::new(1, 15),
3934 diagnostic: Diagnostic {
3935 severity: DiagnosticSeverity::HINT,
3936 message: "error 2 hint 1".to_string(),
3937 group_id: 0,
3938 is_primary: false,
3939 ..Default::default()
3940 }
3941 },
3942 DiagnosticEntry {
3943 range: Point::new(1, 13)..Point::new(1, 15),
3944 diagnostic: Diagnostic {
3945 severity: DiagnosticSeverity::HINT,
3946 message: "error 2 hint 2".to_string(),
3947 group_id: 0,
3948 is_primary: false,
3949 ..Default::default()
3950 }
3951 },
3952 DiagnosticEntry {
3953 range: Point::new(2, 8)..Point::new(2, 17),
3954 diagnostic: Diagnostic {
3955 severity: DiagnosticSeverity::ERROR,
3956 message: "error 2".to_string(),
3957 group_id: 0,
3958 is_primary: true,
3959 ..Default::default()
3960 }
3961 }
3962 ]
3963 );
3964
3965 assert_eq!(
3966 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3967 &[
3968 DiagnosticEntry {
3969 range: Point::new(1, 8)..Point::new(1, 9),
3970 diagnostic: Diagnostic {
3971 severity: DiagnosticSeverity::WARNING,
3972 message: "error 1".to_string(),
3973 group_id: 1,
3974 is_primary: true,
3975 ..Default::default()
3976 }
3977 },
3978 DiagnosticEntry {
3979 range: Point::new(1, 8)..Point::new(1, 9),
3980 diagnostic: Diagnostic {
3981 severity: DiagnosticSeverity::HINT,
3982 message: "error 1 hint 1".to_string(),
3983 group_id: 1,
3984 is_primary: false,
3985 ..Default::default()
3986 }
3987 },
3988 ]
3989 );
3990}
3991
3992#[gpui::test]
3993async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3994 init_test(cx);
3995
3996 let fs = FakeFs::new(cx.executor());
3997 fs.insert_tree(
3998 "/dir",
3999 json!({
4000 "one.rs": "const ONE: usize = 1;",
4001 "two": {
4002 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4003 }
4004
4005 }),
4006 )
4007 .await;
4008 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4009
4010 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4011 language_registry.add(rust_lang());
4012 let watched_paths = lsp::FileOperationRegistrationOptions {
4013 filters: vec![
4014 FileOperationFilter {
4015 scheme: Some("file".to_owned()),
4016 pattern: lsp::FileOperationPattern {
4017 glob: "**/*.rs".to_owned(),
4018 matches: Some(lsp::FileOperationPatternKind::File),
4019 options: None,
4020 },
4021 },
4022 FileOperationFilter {
4023 scheme: Some("file".to_owned()),
4024 pattern: lsp::FileOperationPattern {
4025 glob: "**/**".to_owned(),
4026 matches: Some(lsp::FileOperationPatternKind::Folder),
4027 options: None,
4028 },
4029 },
4030 ],
4031 };
4032 let mut fake_servers = language_registry.register_fake_lsp(
4033 "Rust",
4034 FakeLspAdapter {
4035 capabilities: lsp::ServerCapabilities {
4036 workspace: Some(lsp::WorkspaceServerCapabilities {
4037 workspace_folders: None,
4038 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4039 did_rename: Some(watched_paths.clone()),
4040 will_rename: Some(watched_paths),
4041 ..Default::default()
4042 }),
4043 }),
4044 ..Default::default()
4045 },
4046 ..Default::default()
4047 },
4048 );
4049
4050 let _ = project
4051 .update(cx, |project, cx| {
4052 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4053 })
4054 .await
4055 .unwrap();
4056
4057 let fake_server = fake_servers.next().await.unwrap();
4058 let response = project.update(cx, |project, cx| {
4059 let worktree = project.worktrees(cx).next().unwrap();
4060 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4061 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4062 });
4063 let expected_edit = lsp::WorkspaceEdit {
4064 changes: None,
4065 document_changes: Some(DocumentChanges::Edits({
4066 vec![TextDocumentEdit {
4067 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4068 range: lsp::Range {
4069 start: lsp::Position {
4070 line: 0,
4071 character: 1,
4072 },
4073 end: lsp::Position {
4074 line: 0,
4075 character: 3,
4076 },
4077 },
4078 new_text: "This is not a drill".to_owned(),
4079 })],
4080 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4081 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4082 version: Some(1337),
4083 },
4084 }]
4085 })),
4086 change_annotations: None,
4087 };
4088 let resolved_workspace_edit = Arc::new(OnceLock::new());
4089 fake_server
4090 .handle_request::<WillRenameFiles, _, _>({
4091 let resolved_workspace_edit = resolved_workspace_edit.clone();
4092 let expected_edit = expected_edit.clone();
4093 move |params, _| {
4094 let resolved_workspace_edit = resolved_workspace_edit.clone();
4095 let expected_edit = expected_edit.clone();
4096 async move {
4097 assert_eq!(params.files.len(), 1);
4098 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4099 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4100 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4101 Ok(Some(expected_edit))
4102 }
4103 }
4104 })
4105 .next()
4106 .await
4107 .unwrap();
4108 let _ = response.await.unwrap();
4109 fake_server
4110 .handle_notification::<DidRenameFiles, _>(|params, _| {
4111 assert_eq!(params.files.len(), 1);
4112 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4113 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4114 })
4115 .next()
4116 .await
4117 .unwrap();
4118 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4119}
4120
4121#[gpui::test]
4122async fn test_rename(cx: &mut gpui::TestAppContext) {
4123 // hi
4124 init_test(cx);
4125
4126 let fs = FakeFs::new(cx.executor());
4127 fs.insert_tree(
4128 "/dir",
4129 json!({
4130 "one.rs": "const ONE: usize = 1;",
4131 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4132 }),
4133 )
4134 .await;
4135
4136 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4137
4138 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4139 language_registry.add(rust_lang());
4140 let mut fake_servers = language_registry.register_fake_lsp(
4141 "Rust",
4142 FakeLspAdapter {
4143 capabilities: lsp::ServerCapabilities {
4144 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4145 prepare_provider: Some(true),
4146 work_done_progress_options: Default::default(),
4147 })),
4148 ..Default::default()
4149 },
4150 ..Default::default()
4151 },
4152 );
4153
4154 let (buffer, _handle) = project
4155 .update(cx, |project, cx| {
4156 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4157 })
4158 .await
4159 .unwrap();
4160
4161 let fake_server = fake_servers.next().await.unwrap();
4162
4163 let response = project.update(cx, |project, cx| {
4164 project.prepare_rename(buffer.clone(), 7, cx)
4165 });
4166 fake_server
4167 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4168 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4169 assert_eq!(params.position, lsp::Position::new(0, 7));
4170 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4171 lsp::Position::new(0, 6),
4172 lsp::Position::new(0, 9),
4173 ))))
4174 })
4175 .next()
4176 .await
4177 .unwrap();
4178 let response = response.await.unwrap();
4179 let PrepareRenameResponse::Success(range) = response else {
4180 panic!("{:?}", response);
4181 };
4182 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4183 assert_eq!(range, 6..9);
4184
4185 let response = project.update(cx, |project, cx| {
4186 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4187 });
4188 fake_server
4189 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4190 assert_eq!(
4191 params.text_document_position.text_document.uri.as_str(),
4192 "file:///dir/one.rs"
4193 );
4194 assert_eq!(
4195 params.text_document_position.position,
4196 lsp::Position::new(0, 7)
4197 );
4198 assert_eq!(params.new_name, "THREE");
4199 Ok(Some(lsp::WorkspaceEdit {
4200 changes: Some(
4201 [
4202 (
4203 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4204 vec![lsp::TextEdit::new(
4205 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4206 "THREE".to_string(),
4207 )],
4208 ),
4209 (
4210 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4211 vec![
4212 lsp::TextEdit::new(
4213 lsp::Range::new(
4214 lsp::Position::new(0, 24),
4215 lsp::Position::new(0, 27),
4216 ),
4217 "THREE".to_string(),
4218 ),
4219 lsp::TextEdit::new(
4220 lsp::Range::new(
4221 lsp::Position::new(0, 35),
4222 lsp::Position::new(0, 38),
4223 ),
4224 "THREE".to_string(),
4225 ),
4226 ],
4227 ),
4228 ]
4229 .into_iter()
4230 .collect(),
4231 ),
4232 ..Default::default()
4233 }))
4234 })
4235 .next()
4236 .await
4237 .unwrap();
4238 let mut transaction = response.await.unwrap().0;
4239 assert_eq!(transaction.len(), 2);
4240 assert_eq!(
4241 transaction
4242 .remove_entry(&buffer)
4243 .unwrap()
4244 .0
4245 .update(cx, |buffer, _| buffer.text()),
4246 "const THREE: usize = 1;"
4247 );
4248 assert_eq!(
4249 transaction
4250 .into_keys()
4251 .next()
4252 .unwrap()
4253 .update(cx, |buffer, _| buffer.text()),
4254 "const TWO: usize = one::THREE + one::THREE;"
4255 );
4256}
4257
4258#[gpui::test]
4259async fn test_search(cx: &mut gpui::TestAppContext) {
4260 init_test(cx);
4261
4262 let fs = FakeFs::new(cx.executor());
4263 fs.insert_tree(
4264 "/dir",
4265 json!({
4266 "one.rs": "const ONE: usize = 1;",
4267 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4268 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4269 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4270 }),
4271 )
4272 .await;
4273 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4274 assert_eq!(
4275 search(
4276 &project,
4277 SearchQuery::text(
4278 "TWO",
4279 false,
4280 true,
4281 false,
4282 Default::default(),
4283 Default::default(),
4284 None
4285 )
4286 .unwrap(),
4287 cx
4288 )
4289 .await
4290 .unwrap(),
4291 HashMap::from_iter([
4292 ("dir/two.rs".to_string(), vec![6..9]),
4293 ("dir/three.rs".to_string(), vec![37..40])
4294 ])
4295 );
4296
4297 let buffer_4 = project
4298 .update(cx, |project, cx| {
4299 project.open_local_buffer("/dir/four.rs", cx)
4300 })
4301 .await
4302 .unwrap();
4303 buffer_4.update(cx, |buffer, cx| {
4304 let text = "two::TWO";
4305 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4306 });
4307
4308 assert_eq!(
4309 search(
4310 &project,
4311 SearchQuery::text(
4312 "TWO",
4313 false,
4314 true,
4315 false,
4316 Default::default(),
4317 Default::default(),
4318 None,
4319 )
4320 .unwrap(),
4321 cx
4322 )
4323 .await
4324 .unwrap(),
4325 HashMap::from_iter([
4326 ("dir/two.rs".to_string(), vec![6..9]),
4327 ("dir/three.rs".to_string(), vec![37..40]),
4328 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4329 ])
4330 );
4331}
4332
4333#[gpui::test]
4334async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4335 init_test(cx);
4336
4337 let search_query = "file";
4338
4339 let fs = FakeFs::new(cx.executor());
4340 fs.insert_tree(
4341 "/dir",
4342 json!({
4343 "one.rs": r#"// Rust file one"#,
4344 "one.ts": r#"// TypeScript file one"#,
4345 "two.rs": r#"// Rust file two"#,
4346 "two.ts": r#"// TypeScript file two"#,
4347 }),
4348 )
4349 .await;
4350 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4351
4352 assert!(
4353 search(
4354 &project,
4355 SearchQuery::text(
4356 search_query,
4357 false,
4358 true,
4359 false,
4360 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4361 Default::default(),
4362 None
4363 )
4364 .unwrap(),
4365 cx
4366 )
4367 .await
4368 .unwrap()
4369 .is_empty(),
4370 "If no inclusions match, no files should be returned"
4371 );
4372
4373 assert_eq!(
4374 search(
4375 &project,
4376 SearchQuery::text(
4377 search_query,
4378 false,
4379 true,
4380 false,
4381 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4382 Default::default(),
4383 None
4384 )
4385 .unwrap(),
4386 cx
4387 )
4388 .await
4389 .unwrap(),
4390 HashMap::from_iter([
4391 ("dir/one.rs".to_string(), vec![8..12]),
4392 ("dir/two.rs".to_string(), vec![8..12]),
4393 ]),
4394 "Rust only search should give only Rust files"
4395 );
4396
4397 assert_eq!(
4398 search(
4399 &project,
4400 SearchQuery::text(
4401 search_query,
4402 false,
4403 true,
4404 false,
4405
4406 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4407
4408 Default::default(),
4409 None,
4410 ).unwrap(),
4411 cx
4412 )
4413 .await
4414 .unwrap(),
4415 HashMap::from_iter([
4416 ("dir/one.ts".to_string(), vec![14..18]),
4417 ("dir/two.ts".to_string(), vec![14..18]),
4418 ]),
4419 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4420 );
4421
4422 assert_eq!(
4423 search(
4424 &project,
4425 SearchQuery::text(
4426 search_query,
4427 false,
4428 true,
4429 false,
4430
4431 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4432
4433 Default::default(),
4434 None,
4435 ).unwrap(),
4436 cx
4437 )
4438 .await
4439 .unwrap(),
4440 HashMap::from_iter([
4441 ("dir/two.ts".to_string(), vec![14..18]),
4442 ("dir/one.rs".to_string(), vec![8..12]),
4443 ("dir/one.ts".to_string(), vec![14..18]),
4444 ("dir/two.rs".to_string(), vec![8..12]),
4445 ]),
4446 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4447 );
4448}
4449
4450#[gpui::test]
4451async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4452 init_test(cx);
4453
4454 let search_query = "file";
4455
4456 let fs = FakeFs::new(cx.executor());
4457 fs.insert_tree(
4458 "/dir",
4459 json!({
4460 "one.rs": r#"// Rust file one"#,
4461 "one.ts": r#"// TypeScript file one"#,
4462 "two.rs": r#"// Rust file two"#,
4463 "two.ts": r#"// TypeScript file two"#,
4464 }),
4465 )
4466 .await;
4467 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4468
4469 assert_eq!(
4470 search(
4471 &project,
4472 SearchQuery::text(
4473 search_query,
4474 false,
4475 true,
4476 false,
4477 Default::default(),
4478 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4479 None,
4480 )
4481 .unwrap(),
4482 cx
4483 )
4484 .await
4485 .unwrap(),
4486 HashMap::from_iter([
4487 ("dir/one.rs".to_string(), vec![8..12]),
4488 ("dir/one.ts".to_string(), vec![14..18]),
4489 ("dir/two.rs".to_string(), vec![8..12]),
4490 ("dir/two.ts".to_string(), vec![14..18]),
4491 ]),
4492 "If no exclusions match, all files should be returned"
4493 );
4494
4495 assert_eq!(
4496 search(
4497 &project,
4498 SearchQuery::text(
4499 search_query,
4500 false,
4501 true,
4502 false,
4503 Default::default(),
4504 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4505 None,
4506 )
4507 .unwrap(),
4508 cx
4509 )
4510 .await
4511 .unwrap(),
4512 HashMap::from_iter([
4513 ("dir/one.ts".to_string(), vec![14..18]),
4514 ("dir/two.ts".to_string(), vec![14..18]),
4515 ]),
4516 "Rust exclusion search should give only TypeScript files"
4517 );
4518
4519 assert_eq!(
4520 search(
4521 &project,
4522 SearchQuery::text(
4523 search_query,
4524 false,
4525 true,
4526 false,
4527 Default::default(),
4528 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4529 None,
4530 ).unwrap(),
4531 cx
4532 )
4533 .await
4534 .unwrap(),
4535 HashMap::from_iter([
4536 ("dir/one.rs".to_string(), vec![8..12]),
4537 ("dir/two.rs".to_string(), vec![8..12]),
4538 ]),
4539 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4540 );
4541
4542 assert!(
4543 search(
4544 &project,
4545 SearchQuery::text(
4546 search_query,
4547 false,
4548 true,
4549 false,
4550 Default::default(),
4551
4552 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4553 None,
4554
4555 ).unwrap(),
4556 cx
4557 )
4558 .await
4559 .unwrap().is_empty(),
4560 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4561 );
4562}
4563
4564#[gpui::test]
4565async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4566 init_test(cx);
4567
4568 let search_query = "file";
4569
4570 let fs = FakeFs::new(cx.executor());
4571 fs.insert_tree(
4572 "/dir",
4573 json!({
4574 "one.rs": r#"// Rust file one"#,
4575 "one.ts": r#"// TypeScript file one"#,
4576 "two.rs": r#"// Rust file two"#,
4577 "two.ts": r#"// TypeScript file two"#,
4578 }),
4579 )
4580 .await;
4581 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4582
4583 assert!(
4584 search(
4585 &project,
4586 SearchQuery::text(
4587 search_query,
4588 false,
4589 true,
4590 false,
4591 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4592 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4593 None,
4594 )
4595 .unwrap(),
4596 cx
4597 )
4598 .await
4599 .unwrap()
4600 .is_empty(),
4601 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4602 );
4603
4604 assert!(
4605 search(
4606 &project,
4607 SearchQuery::text(
4608 search_query,
4609 false,
4610 true,
4611 false,
4612 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4613 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4614 None,
4615 ).unwrap(),
4616 cx
4617 )
4618 .await
4619 .unwrap()
4620 .is_empty(),
4621 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4622 );
4623
4624 assert!(
4625 search(
4626 &project,
4627 SearchQuery::text(
4628 search_query,
4629 false,
4630 true,
4631 false,
4632 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4633 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4634 None,
4635 )
4636 .unwrap(),
4637 cx
4638 )
4639 .await
4640 .unwrap()
4641 .is_empty(),
4642 "Non-matching inclusions and exclusions should not change that."
4643 );
4644
4645 assert_eq!(
4646 search(
4647 &project,
4648 SearchQuery::text(
4649 search_query,
4650 false,
4651 true,
4652 false,
4653 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4654 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4655 None,
4656 )
4657 .unwrap(),
4658 cx
4659 )
4660 .await
4661 .unwrap(),
4662 HashMap::from_iter([
4663 ("dir/one.ts".to_string(), vec![14..18]),
4664 ("dir/two.ts".to_string(), vec![14..18]),
4665 ]),
4666 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4667 );
4668}
4669
4670#[gpui::test]
4671async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4672 init_test(cx);
4673
4674 let fs = FakeFs::new(cx.executor());
4675 fs.insert_tree(
4676 "/worktree-a",
4677 json!({
4678 "haystack.rs": r#"// NEEDLE"#,
4679 "haystack.ts": r#"// NEEDLE"#,
4680 }),
4681 )
4682 .await;
4683 fs.insert_tree(
4684 "/worktree-b",
4685 json!({
4686 "haystack.rs": r#"// NEEDLE"#,
4687 "haystack.ts": r#"// NEEDLE"#,
4688 }),
4689 )
4690 .await;
4691
4692 let project = Project::test(
4693 fs.clone(),
4694 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4695 cx,
4696 )
4697 .await;
4698
4699 assert_eq!(
4700 search(
4701 &project,
4702 SearchQuery::text(
4703 "NEEDLE",
4704 false,
4705 true,
4706 false,
4707 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4708 Default::default(),
4709 None,
4710 )
4711 .unwrap(),
4712 cx
4713 )
4714 .await
4715 .unwrap(),
4716 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4717 "should only return results from included worktree"
4718 );
4719 assert_eq!(
4720 search(
4721 &project,
4722 SearchQuery::text(
4723 "NEEDLE",
4724 false,
4725 true,
4726 false,
4727 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4728 Default::default(),
4729 None,
4730 )
4731 .unwrap(),
4732 cx
4733 )
4734 .await
4735 .unwrap(),
4736 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4737 "should only return results from included worktree"
4738 );
4739
4740 assert_eq!(
4741 search(
4742 &project,
4743 SearchQuery::text(
4744 "NEEDLE",
4745 false,
4746 true,
4747 false,
4748 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4749 Default::default(),
4750 None,
4751 )
4752 .unwrap(),
4753 cx
4754 )
4755 .await
4756 .unwrap(),
4757 HashMap::from_iter([
4758 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4759 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4760 ]),
4761 "should return results from both worktrees"
4762 );
4763}
4764
4765#[gpui::test]
4766async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4767 init_test(cx);
4768
4769 let fs = FakeFs::new(cx.background_executor.clone());
4770 fs.insert_tree(
4771 "/dir",
4772 json!({
4773 ".git": {},
4774 ".gitignore": "**/target\n/node_modules\n",
4775 "target": {
4776 "index.txt": "index_key:index_value"
4777 },
4778 "node_modules": {
4779 "eslint": {
4780 "index.ts": "const eslint_key = 'eslint value'",
4781 "package.json": r#"{ "some_key": "some value" }"#,
4782 },
4783 "prettier": {
4784 "index.ts": "const prettier_key = 'prettier value'",
4785 "package.json": r#"{ "other_key": "other value" }"#,
4786 },
4787 },
4788 "package.json": r#"{ "main_key": "main value" }"#,
4789 }),
4790 )
4791 .await;
4792 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4793
4794 let query = "key";
4795 assert_eq!(
4796 search(
4797 &project,
4798 SearchQuery::text(
4799 query,
4800 false,
4801 false,
4802 false,
4803 Default::default(),
4804 Default::default(),
4805 None,
4806 )
4807 .unwrap(),
4808 cx
4809 )
4810 .await
4811 .unwrap(),
4812 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4813 "Only one non-ignored file should have the query"
4814 );
4815
4816 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4817 assert_eq!(
4818 search(
4819 &project,
4820 SearchQuery::text(
4821 query,
4822 false,
4823 false,
4824 true,
4825 Default::default(),
4826 Default::default(),
4827 None,
4828 )
4829 .unwrap(),
4830 cx
4831 )
4832 .await
4833 .unwrap(),
4834 HashMap::from_iter([
4835 ("dir/package.json".to_string(), vec![8..11]),
4836 ("dir/target/index.txt".to_string(), vec![6..9]),
4837 (
4838 "dir/node_modules/prettier/package.json".to_string(),
4839 vec![9..12]
4840 ),
4841 (
4842 "dir/node_modules/prettier/index.ts".to_string(),
4843 vec![15..18]
4844 ),
4845 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4846 (
4847 "dir/node_modules/eslint/package.json".to_string(),
4848 vec![8..11]
4849 ),
4850 ]),
4851 "Unrestricted search with ignored directories should find every file with the query"
4852 );
4853
4854 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4855 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4856 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4857 assert_eq!(
4858 search(
4859 &project,
4860 SearchQuery::text(
4861 query,
4862 false,
4863 false,
4864 true,
4865 files_to_include,
4866 files_to_exclude,
4867 None,
4868 )
4869 .unwrap(),
4870 cx
4871 )
4872 .await
4873 .unwrap(),
4874 HashMap::from_iter([(
4875 "dir/node_modules/prettier/package.json".to_string(),
4876 vec![9..12]
4877 )]),
4878 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4879 );
4880}
4881
4882#[gpui::test]
4883async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4884 init_test(cx);
4885
4886 let fs = FakeFs::new(cx.executor().clone());
4887 fs.insert_tree(
4888 "/one/two",
4889 json!({
4890 "three": {
4891 "a.txt": "",
4892 "four": {}
4893 },
4894 "c.rs": ""
4895 }),
4896 )
4897 .await;
4898
4899 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4900 project
4901 .update(cx, |project, cx| {
4902 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4903 project.create_entry((id, "b.."), true, cx)
4904 })
4905 .await
4906 .unwrap()
4907 .to_included()
4908 .unwrap();
4909
4910 // Can't create paths outside the project
4911 let result = project
4912 .update(cx, |project, cx| {
4913 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4914 project.create_entry((id, "../../boop"), true, cx)
4915 })
4916 .await;
4917 assert!(result.is_err());
4918
4919 // Can't create paths with '..'
4920 let result = project
4921 .update(cx, |project, cx| {
4922 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4923 project.create_entry((id, "four/../beep"), true, cx)
4924 })
4925 .await;
4926 assert!(result.is_err());
4927
4928 assert_eq!(
4929 fs.paths(true),
4930 vec![
4931 PathBuf::from("/"),
4932 PathBuf::from("/one"),
4933 PathBuf::from("/one/two"),
4934 PathBuf::from("/one/two/c.rs"),
4935 PathBuf::from("/one/two/three"),
4936 PathBuf::from("/one/two/three/a.txt"),
4937 PathBuf::from("/one/two/three/b.."),
4938 PathBuf::from("/one/two/three/four"),
4939 ]
4940 );
4941
4942 // And we cannot open buffers with '..'
4943 let result = project
4944 .update(cx, |project, cx| {
4945 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4946 project.open_buffer((id, "../c.rs"), cx)
4947 })
4948 .await;
4949 assert!(result.is_err())
4950}
4951
4952#[gpui::test]
4953async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4954 init_test(cx);
4955
4956 let fs = FakeFs::new(cx.executor());
4957 fs.insert_tree(
4958 "/dir",
4959 json!({
4960 "a.tsx": "a",
4961 }),
4962 )
4963 .await;
4964
4965 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4966
4967 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4968 language_registry.add(tsx_lang());
4969 let language_server_names = [
4970 "TypeScriptServer",
4971 "TailwindServer",
4972 "ESLintServer",
4973 "NoHoverCapabilitiesServer",
4974 ];
4975 let mut language_servers = [
4976 language_registry.register_fake_lsp(
4977 "tsx",
4978 FakeLspAdapter {
4979 name: language_server_names[0],
4980 capabilities: lsp::ServerCapabilities {
4981 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4982 ..lsp::ServerCapabilities::default()
4983 },
4984 ..FakeLspAdapter::default()
4985 },
4986 ),
4987 language_registry.register_fake_lsp(
4988 "tsx",
4989 FakeLspAdapter {
4990 name: language_server_names[1],
4991 capabilities: lsp::ServerCapabilities {
4992 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4993 ..lsp::ServerCapabilities::default()
4994 },
4995 ..FakeLspAdapter::default()
4996 },
4997 ),
4998 language_registry.register_fake_lsp(
4999 "tsx",
5000 FakeLspAdapter {
5001 name: language_server_names[2],
5002 capabilities: lsp::ServerCapabilities {
5003 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5004 ..lsp::ServerCapabilities::default()
5005 },
5006 ..FakeLspAdapter::default()
5007 },
5008 ),
5009 language_registry.register_fake_lsp(
5010 "tsx",
5011 FakeLspAdapter {
5012 name: language_server_names[3],
5013 capabilities: lsp::ServerCapabilities {
5014 hover_provider: None,
5015 ..lsp::ServerCapabilities::default()
5016 },
5017 ..FakeLspAdapter::default()
5018 },
5019 ),
5020 ];
5021
5022 let (buffer, _handle) = project
5023 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5024 .await
5025 .unwrap();
5026 cx.executor().run_until_parked();
5027
5028 let mut servers_with_hover_requests = HashMap::default();
5029 for i in 0..language_server_names.len() {
5030 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5031 panic!(
5032 "Failed to get language server #{i} with name {}",
5033 &language_server_names[i]
5034 )
5035 });
5036 let new_server_name = new_server.server.name();
5037 assert!(
5038 !servers_with_hover_requests.contains_key(&new_server_name),
5039 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5040 );
5041 match new_server_name.as_ref() {
5042 "TailwindServer" | "TypeScriptServer" => {
5043 servers_with_hover_requests.insert(
5044 new_server_name.clone(),
5045 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5046 let name = new_server_name.clone();
5047 async move {
5048 Ok(Some(lsp::Hover {
5049 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5050 format!("{name} hover"),
5051 )),
5052 range: None,
5053 }))
5054 }
5055 }),
5056 );
5057 }
5058 "ESLintServer" => {
5059 servers_with_hover_requests.insert(
5060 new_server_name,
5061 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5062 |_, _| async move { Ok(None) },
5063 ),
5064 );
5065 }
5066 "NoHoverCapabilitiesServer" => {
5067 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5068 |_, _| async move {
5069 panic!(
5070 "Should not call for hovers server with no corresponding capabilities"
5071 )
5072 },
5073 );
5074 }
5075 unexpected => panic!("Unexpected server name: {unexpected}"),
5076 }
5077 }
5078
5079 let hover_task = project.update(cx, |project, cx| {
5080 project.hover(&buffer, Point::new(0, 0), cx)
5081 });
5082 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5083 |mut hover_request| async move {
5084 hover_request
5085 .next()
5086 .await
5087 .expect("All hover requests should have been triggered")
5088 },
5089 ))
5090 .await;
5091 assert_eq!(
5092 vec!["TailwindServer hover", "TypeScriptServer hover"],
5093 hover_task
5094 .await
5095 .into_iter()
5096 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5097 .sorted()
5098 .collect::<Vec<_>>(),
5099 "Should receive hover responses from all related servers with hover capabilities"
5100 );
5101}
5102
5103#[gpui::test]
5104async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5105 init_test(cx);
5106
5107 let fs = FakeFs::new(cx.executor());
5108 fs.insert_tree(
5109 "/dir",
5110 json!({
5111 "a.ts": "a",
5112 }),
5113 )
5114 .await;
5115
5116 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5117
5118 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5119 language_registry.add(typescript_lang());
5120 let mut fake_language_servers = language_registry.register_fake_lsp(
5121 "TypeScript",
5122 FakeLspAdapter {
5123 capabilities: lsp::ServerCapabilities {
5124 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5125 ..lsp::ServerCapabilities::default()
5126 },
5127 ..FakeLspAdapter::default()
5128 },
5129 );
5130
5131 let (buffer, _handle) = project
5132 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5133 .await
5134 .unwrap();
5135 cx.executor().run_until_parked();
5136
5137 let fake_server = fake_language_servers
5138 .next()
5139 .await
5140 .expect("failed to get the language server");
5141
5142 let mut request_handled =
5143 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5144 Ok(Some(lsp::Hover {
5145 contents: lsp::HoverContents::Array(vec![
5146 lsp::MarkedString::String("".to_string()),
5147 lsp::MarkedString::String(" ".to_string()),
5148 lsp::MarkedString::String("\n\n\n".to_string()),
5149 ]),
5150 range: None,
5151 }))
5152 });
5153
5154 let hover_task = project.update(cx, |project, cx| {
5155 project.hover(&buffer, Point::new(0, 0), cx)
5156 });
5157 let () = request_handled
5158 .next()
5159 .await
5160 .expect("All hover requests should have been triggered");
5161 assert_eq!(
5162 Vec::<String>::new(),
5163 hover_task
5164 .await
5165 .into_iter()
5166 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5167 .sorted()
5168 .collect::<Vec<_>>(),
5169 "Empty hover parts should be ignored"
5170 );
5171}
5172
5173#[gpui::test]
5174async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5175 init_test(cx);
5176
5177 let fs = FakeFs::new(cx.executor());
5178 fs.insert_tree(
5179 "/dir",
5180 json!({
5181 "a.ts": "a",
5182 }),
5183 )
5184 .await;
5185
5186 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5187
5188 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5189 language_registry.add(typescript_lang());
5190 let mut fake_language_servers = language_registry.register_fake_lsp(
5191 "TypeScript",
5192 FakeLspAdapter {
5193 capabilities: lsp::ServerCapabilities {
5194 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5195 ..lsp::ServerCapabilities::default()
5196 },
5197 ..FakeLspAdapter::default()
5198 },
5199 );
5200
5201 let (buffer, _handle) = project
5202 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5203 .await
5204 .unwrap();
5205 cx.executor().run_until_parked();
5206
5207 let fake_server = fake_language_servers
5208 .next()
5209 .await
5210 .expect("failed to get the language server");
5211
5212 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5213 move |_, _| async move {
5214 Ok(Some(vec![
5215 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5216 title: "organize imports".to_string(),
5217 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5218 ..lsp::CodeAction::default()
5219 }),
5220 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5221 title: "fix code".to_string(),
5222 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5223 ..lsp::CodeAction::default()
5224 }),
5225 ]))
5226 },
5227 );
5228
5229 let code_actions_task = project.update(cx, |project, cx| {
5230 project.code_actions(
5231 &buffer,
5232 0..buffer.read(cx).len(),
5233 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5234 cx,
5235 )
5236 });
5237
5238 let () = request_handled
5239 .next()
5240 .await
5241 .expect("The code action request should have been triggered");
5242
5243 let code_actions = code_actions_task.await.unwrap();
5244 assert_eq!(code_actions.len(), 1);
5245 assert_eq!(
5246 code_actions[0].lsp_action.kind,
5247 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5248 );
5249}
5250
5251#[gpui::test]
5252async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5253 init_test(cx);
5254
5255 let fs = FakeFs::new(cx.executor());
5256 fs.insert_tree(
5257 "/dir",
5258 json!({
5259 "a.tsx": "a",
5260 }),
5261 )
5262 .await;
5263
5264 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5265
5266 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5267 language_registry.add(tsx_lang());
5268 let language_server_names = [
5269 "TypeScriptServer",
5270 "TailwindServer",
5271 "ESLintServer",
5272 "NoActionsCapabilitiesServer",
5273 ];
5274
5275 let mut language_server_rxs = [
5276 language_registry.register_fake_lsp(
5277 "tsx",
5278 FakeLspAdapter {
5279 name: language_server_names[0],
5280 capabilities: lsp::ServerCapabilities {
5281 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5282 ..lsp::ServerCapabilities::default()
5283 },
5284 ..FakeLspAdapter::default()
5285 },
5286 ),
5287 language_registry.register_fake_lsp(
5288 "tsx",
5289 FakeLspAdapter {
5290 name: language_server_names[1],
5291 capabilities: lsp::ServerCapabilities {
5292 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5293 ..lsp::ServerCapabilities::default()
5294 },
5295 ..FakeLspAdapter::default()
5296 },
5297 ),
5298 language_registry.register_fake_lsp(
5299 "tsx",
5300 FakeLspAdapter {
5301 name: language_server_names[2],
5302 capabilities: lsp::ServerCapabilities {
5303 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5304 ..lsp::ServerCapabilities::default()
5305 },
5306 ..FakeLspAdapter::default()
5307 },
5308 ),
5309 language_registry.register_fake_lsp(
5310 "tsx",
5311 FakeLspAdapter {
5312 name: language_server_names[3],
5313 capabilities: lsp::ServerCapabilities {
5314 code_action_provider: None,
5315 ..lsp::ServerCapabilities::default()
5316 },
5317 ..FakeLspAdapter::default()
5318 },
5319 ),
5320 ];
5321
5322 let (buffer, _handle) = project
5323 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5324 .await
5325 .unwrap();
5326 cx.executor().run_until_parked();
5327
5328 let mut servers_with_actions_requests = HashMap::default();
5329 for i in 0..language_server_names.len() {
5330 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5331 panic!(
5332 "Failed to get language server #{i} with name {}",
5333 &language_server_names[i]
5334 )
5335 });
5336 let new_server_name = new_server.server.name();
5337
5338 assert!(
5339 !servers_with_actions_requests.contains_key(&new_server_name),
5340 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5341 );
5342 match new_server_name.0.as_ref() {
5343 "TailwindServer" | "TypeScriptServer" => {
5344 servers_with_actions_requests.insert(
5345 new_server_name.clone(),
5346 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5347 move |_, _| {
5348 let name = new_server_name.clone();
5349 async move {
5350 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5351 lsp::CodeAction {
5352 title: format!("{name} code action"),
5353 ..lsp::CodeAction::default()
5354 },
5355 )]))
5356 }
5357 },
5358 ),
5359 );
5360 }
5361 "ESLintServer" => {
5362 servers_with_actions_requests.insert(
5363 new_server_name,
5364 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5365 |_, _| async move { Ok(None) },
5366 ),
5367 );
5368 }
5369 "NoActionsCapabilitiesServer" => {
5370 let _never_handled = new_server
5371 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5372 panic!(
5373 "Should not call for code actions server with no corresponding capabilities"
5374 )
5375 });
5376 }
5377 unexpected => panic!("Unexpected server name: {unexpected}"),
5378 }
5379 }
5380
5381 let code_actions_task = project.update(cx, |project, cx| {
5382 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5383 });
5384
5385 // cx.run_until_parked();
5386 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5387 |mut code_actions_request| async move {
5388 code_actions_request
5389 .next()
5390 .await
5391 .expect("All code actions requests should have been triggered")
5392 },
5393 ))
5394 .await;
5395 assert_eq!(
5396 vec!["TailwindServer code action", "TypeScriptServer code action"],
5397 code_actions_task
5398 .await
5399 .unwrap()
5400 .into_iter()
5401 .map(|code_action| code_action.lsp_action.title)
5402 .sorted()
5403 .collect::<Vec<_>>(),
5404 "Should receive code actions responses from all related servers with hover capabilities"
5405 );
5406}
5407
5408#[gpui::test]
5409async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5410 init_test(cx);
5411
5412 let fs = FakeFs::new(cx.executor());
5413 fs.insert_tree(
5414 "/dir",
5415 json!({
5416 "a.rs": "let a = 1;",
5417 "b.rs": "let b = 2;",
5418 "c.rs": "let c = 2;",
5419 }),
5420 )
5421 .await;
5422
5423 let project = Project::test(
5424 fs,
5425 [
5426 "/dir/a.rs".as_ref(),
5427 "/dir/b.rs".as_ref(),
5428 "/dir/c.rs".as_ref(),
5429 ],
5430 cx,
5431 )
5432 .await;
5433
5434 // check the initial state and get the worktrees
5435 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5436 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5437 assert_eq!(worktrees.len(), 3);
5438
5439 let worktree_a = worktrees[0].read(cx);
5440 let worktree_b = worktrees[1].read(cx);
5441 let worktree_c = worktrees[2].read(cx);
5442
5443 // check they start in the right order
5444 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5445 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5446 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5447
5448 (
5449 worktrees[0].clone(),
5450 worktrees[1].clone(),
5451 worktrees[2].clone(),
5452 )
5453 });
5454
5455 // move first worktree to after the second
5456 // [a, b, c] -> [b, a, c]
5457 project
5458 .update(cx, |project, cx| {
5459 let first = worktree_a.read(cx);
5460 let second = worktree_b.read(cx);
5461 project.move_worktree(first.id(), second.id(), cx)
5462 })
5463 .expect("moving first after second");
5464
5465 // check the state after moving
5466 project.update(cx, |project, cx| {
5467 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5468 assert_eq!(worktrees.len(), 3);
5469
5470 let first = worktrees[0].read(cx);
5471 let second = worktrees[1].read(cx);
5472 let third = worktrees[2].read(cx);
5473
5474 // check they are now in the right order
5475 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5476 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5477 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5478 });
5479
5480 // move the second worktree to before the first
5481 // [b, a, c] -> [a, b, c]
5482 project
5483 .update(cx, |project, cx| {
5484 let second = worktree_a.read(cx);
5485 let first = worktree_b.read(cx);
5486 project.move_worktree(first.id(), second.id(), cx)
5487 })
5488 .expect("moving second before first");
5489
5490 // check the state after moving
5491 project.update(cx, |project, cx| {
5492 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5493 assert_eq!(worktrees.len(), 3);
5494
5495 let first = worktrees[0].read(cx);
5496 let second = worktrees[1].read(cx);
5497 let third = worktrees[2].read(cx);
5498
5499 // check they are now in the right order
5500 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5501 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5502 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5503 });
5504
5505 // move the second worktree to after the third
5506 // [a, b, c] -> [a, c, b]
5507 project
5508 .update(cx, |project, cx| {
5509 let second = worktree_b.read(cx);
5510 let third = worktree_c.read(cx);
5511 project.move_worktree(second.id(), third.id(), cx)
5512 })
5513 .expect("moving second after third");
5514
5515 // check the state after moving
5516 project.update(cx, |project, cx| {
5517 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5518 assert_eq!(worktrees.len(), 3);
5519
5520 let first = worktrees[0].read(cx);
5521 let second = worktrees[1].read(cx);
5522 let third = worktrees[2].read(cx);
5523
5524 // check they are now in the right order
5525 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5526 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5527 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5528 });
5529
5530 // move the third worktree to before the second
5531 // [a, c, b] -> [a, b, c]
5532 project
5533 .update(cx, |project, cx| {
5534 let third = worktree_c.read(cx);
5535 let second = worktree_b.read(cx);
5536 project.move_worktree(third.id(), second.id(), cx)
5537 })
5538 .expect("moving third before second");
5539
5540 // check the state after moving
5541 project.update(cx, |project, cx| {
5542 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5543 assert_eq!(worktrees.len(), 3);
5544
5545 let first = worktrees[0].read(cx);
5546 let second = worktrees[1].read(cx);
5547 let third = worktrees[2].read(cx);
5548
5549 // check they are now in the right order
5550 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5551 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5552 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5553 });
5554
5555 // move the first worktree to after the third
5556 // [a, b, c] -> [b, c, a]
5557 project
5558 .update(cx, |project, cx| {
5559 let first = worktree_a.read(cx);
5560 let third = worktree_c.read(cx);
5561 project.move_worktree(first.id(), third.id(), cx)
5562 })
5563 .expect("moving first after third");
5564
5565 // check the state after moving
5566 project.update(cx, |project, cx| {
5567 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5568 assert_eq!(worktrees.len(), 3);
5569
5570 let first = worktrees[0].read(cx);
5571 let second = worktrees[1].read(cx);
5572 let third = worktrees[2].read(cx);
5573
5574 // check they are now in the right order
5575 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5576 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5577 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5578 });
5579
5580 // move the third worktree to before the first
5581 // [b, c, a] -> [a, b, c]
5582 project
5583 .update(cx, |project, cx| {
5584 let third = worktree_a.read(cx);
5585 let first = worktree_b.read(cx);
5586 project.move_worktree(third.id(), first.id(), cx)
5587 })
5588 .expect("moving third before first");
5589
5590 // check the state after moving
5591 project.update(cx, |project, cx| {
5592 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5593 assert_eq!(worktrees.len(), 3);
5594
5595 let first = worktrees[0].read(cx);
5596 let second = worktrees[1].read(cx);
5597 let third = worktrees[2].read(cx);
5598
5599 // check they are now in the right order
5600 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5601 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5602 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5603 });
5604}
5605
5606#[gpui::test]
5607async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5608 init_test(cx);
5609
5610 let staged_contents = r#"
5611 fn main() {
5612 println!("hello world");
5613 }
5614 "#
5615 .unindent();
5616 let file_contents = r#"
5617 // print goodbye
5618 fn main() {
5619 println!("goodbye world");
5620 }
5621 "#
5622 .unindent();
5623
5624 let fs = FakeFs::new(cx.background_executor.clone());
5625 fs.insert_tree(
5626 "/dir",
5627 json!({
5628 ".git": {},
5629 "src": {
5630 "main.rs": file_contents,
5631 }
5632 }),
5633 )
5634 .await;
5635
5636 fs.set_index_for_repo(
5637 Path::new("/dir/.git"),
5638 &[(Path::new("src/main.rs"), staged_contents)],
5639 );
5640
5641 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5642
5643 let buffer = project
5644 .update(cx, |project, cx| {
5645 project.open_local_buffer("/dir/src/main.rs", cx)
5646 })
5647 .await
5648 .unwrap();
5649 let unstaged_changes = project
5650 .update(cx, |project, cx| {
5651 project.open_unstaged_changes(buffer.clone(), cx)
5652 })
5653 .await
5654 .unwrap();
5655
5656 cx.run_until_parked();
5657 unstaged_changes.update(cx, |unstaged_changes, cx| {
5658 let snapshot = buffer.read(cx).snapshot();
5659 assert_hunks(
5660 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5661 &snapshot,
5662 &unstaged_changes.base_text.as_ref().unwrap().text(),
5663 &[
5664 (0..1, "", "// print goodbye\n"),
5665 (
5666 2..3,
5667 " println!(\"hello world\");\n",
5668 " println!(\"goodbye world\");\n",
5669 ),
5670 ],
5671 );
5672 });
5673
5674 let staged_contents = r#"
5675 // print goodbye
5676 fn main() {
5677 }
5678 "#
5679 .unindent();
5680
5681 fs.set_index_for_repo(
5682 Path::new("/dir/.git"),
5683 &[(Path::new("src/main.rs"), staged_contents)],
5684 );
5685
5686 cx.run_until_parked();
5687 unstaged_changes.update(cx, |unstaged_changes, cx| {
5688 let snapshot = buffer.read(cx).snapshot();
5689 assert_hunks(
5690 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5691 &snapshot,
5692 &unstaged_changes.base_text.as_ref().unwrap().text(),
5693 &[(2..3, "", " println!(\"goodbye world\");\n")],
5694 );
5695 });
5696}
5697
5698async fn search(
5699 project: &Entity<Project>,
5700 query: SearchQuery,
5701 cx: &mut gpui::TestAppContext,
5702) -> Result<HashMap<String, Vec<Range<usize>>>> {
5703 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5704 let mut results = HashMap::default();
5705 while let Ok(search_result) = search_rx.recv().await {
5706 match search_result {
5707 SearchResult::Buffer { buffer, ranges } => {
5708 results.entry(buffer).or_insert(ranges);
5709 }
5710 SearchResult::LimitReached => {}
5711 }
5712 }
5713 Ok(results
5714 .into_iter()
5715 .map(|(buffer, ranges)| {
5716 buffer.update(cx, |buffer, cx| {
5717 let path = buffer
5718 .file()
5719 .unwrap()
5720 .full_path(cx)
5721 .to_string_lossy()
5722 .to_string();
5723 let ranges = ranges
5724 .into_iter()
5725 .map(|range| range.to_offset(buffer))
5726 .collect::<Vec<_>>();
5727 (path, ranges)
5728 })
5729 })
5730 .collect())
5731}
5732
5733pub fn init_test(cx: &mut gpui::TestAppContext) {
5734 if std::env::var("RUST_LOG").is_ok() {
5735 env_logger::try_init().ok();
5736 }
5737
5738 cx.update(|cx| {
5739 let settings_store = SettingsStore::test(cx);
5740 cx.set_global(settings_store);
5741 release_channel::init(SemanticVersion::default(), cx);
5742 language::init(cx);
5743 Project::init_settings(cx);
5744 });
5745}
5746
5747fn json_lang() -> Arc<Language> {
5748 Arc::new(Language::new(
5749 LanguageConfig {
5750 name: "JSON".into(),
5751 matcher: LanguageMatcher {
5752 path_suffixes: vec!["json".to_string()],
5753 ..Default::default()
5754 },
5755 ..Default::default()
5756 },
5757 None,
5758 ))
5759}
5760
5761fn js_lang() -> Arc<Language> {
5762 Arc::new(Language::new(
5763 LanguageConfig {
5764 name: "JavaScript".into(),
5765 matcher: LanguageMatcher {
5766 path_suffixes: vec!["js".to_string()],
5767 ..Default::default()
5768 },
5769 ..Default::default()
5770 },
5771 None,
5772 ))
5773}
5774
5775fn rust_lang() -> Arc<Language> {
5776 Arc::new(Language::new(
5777 LanguageConfig {
5778 name: "Rust".into(),
5779 matcher: LanguageMatcher {
5780 path_suffixes: vec!["rs".to_string()],
5781 ..Default::default()
5782 },
5783 ..Default::default()
5784 },
5785 Some(tree_sitter_rust::LANGUAGE.into()),
5786 ))
5787}
5788
5789fn typescript_lang() -> Arc<Language> {
5790 Arc::new(Language::new(
5791 LanguageConfig {
5792 name: "TypeScript".into(),
5793 matcher: LanguageMatcher {
5794 path_suffixes: vec!["ts".to_string()],
5795 ..Default::default()
5796 },
5797 ..Default::default()
5798 },
5799 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5800 ))
5801}
5802
5803fn tsx_lang() -> Arc<Language> {
5804 Arc::new(Language::new(
5805 LanguageConfig {
5806 name: "tsx".into(),
5807 matcher: LanguageMatcher {
5808 path_suffixes: vec!["tsx".to_string()],
5809 ..Default::default()
5810 },
5811 ..Default::default()
5812 },
5813 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5814 ))
5815}
5816
5817fn get_all_tasks(
5818 project: &Entity<Project>,
5819 worktree_id: Option<WorktreeId>,
5820 task_context: &TaskContext,
5821 cx: &mut App,
5822) -> Vec<(TaskSourceKind, ResolvedTask)> {
5823 let (mut old, new) = project.update(cx, |project, cx| {
5824 project
5825 .task_store
5826 .read(cx)
5827 .task_inventory()
5828 .unwrap()
5829 .read(cx)
5830 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5831 });
5832 old.extend(new);
5833 old
5834}