1use crate::{Event, *};
2use ::git::diff::assert_hunks;
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq,
29 paths::{replace_path_separator, PathMatcher},
30 test::TempTree,
31 TryFutureExt as _,
32};
33
34#[gpui::test]
35async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let (tx, mut rx) = futures::channel::mpsc::unbounded();
39 let _thread = std::thread::spawn(move || {
40 std::fs::metadata("/tmp").unwrap();
41 std::thread::sleep(Duration::from_millis(1000));
42 tx.unbounded_send(1).unwrap();
43 });
44 rx.next().await.unwrap();
45}
46
47#[gpui::test]
48async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
49 cx.executor().allow_parking();
50
51 let io_task = smol::unblock(move || {
52 println!("sleeping on thread {:?}", std::thread::current().id());
53 std::thread::sleep(Duration::from_millis(10));
54 1
55 });
56
57 let task = cx.foreground_executor().spawn(async move {
58 io_task.await;
59 });
60
61 task.await;
62}
63
64#[cfg(not(windows))]
65#[gpui::test]
66async fn test_symlinks(cx: &mut gpui::TestAppContext) {
67 init_test(cx);
68 cx.executor().allow_parking();
69
70 let dir = TempTree::new(json!({
71 "root": {
72 "apple": "",
73 "banana": {
74 "carrot": {
75 "date": "",
76 "endive": "",
77 }
78 },
79 "fennel": {
80 "grape": "",
81 }
82 }
83 }));
84
85 let root_link_path = dir.path().join("root_link");
86 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
87 os::unix::fs::symlink(
88 dir.path().join("root/fennel"),
89 dir.path().join("root/finnochio"),
90 )
91 .unwrap();
92
93 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
94
95 project.update(cx, |project, cx| {
96 let tree = project.worktrees(cx).next().unwrap().read(cx);
97 assert_eq!(tree.file_count(), 5);
98 assert_eq!(
99 tree.inode_for_path("fennel/grape"),
100 tree.inode_for_path("finnochio/grape")
101 );
102 });
103}
104
105#[gpui::test]
106async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
107 init_test(cx);
108
109 let dir = TempTree::new(json!({
110 ".editorconfig": r#"
111 root = true
112 [*.rs]
113 indent_style = tab
114 indent_size = 3
115 end_of_line = lf
116 insert_final_newline = true
117 trim_trailing_whitespace = true
118 [*.js]
119 tab_width = 10
120 "#,
121 ".zed": {
122 "settings.json": r#"{
123 "tab_size": 8,
124 "hard_tabs": false,
125 "ensure_final_newline_on_save": false,
126 "remove_trailing_whitespace_on_save": false,
127 "soft_wrap": "editor_width"
128 }"#,
129 },
130 "a.rs": "fn a() {\n A\n}",
131 "b": {
132 ".editorconfig": r#"
133 [*.rs]
134 indent_size = 2
135 "#,
136 "b.rs": "fn b() {\n B\n}",
137 },
138 "c.js": "def c\n C\nend",
139 "README.json": "tabs are better\n",
140 }));
141
142 let path = dir.path();
143 let fs = FakeFs::new(cx.executor());
144 fs.insert_tree_from_real_fs(path, path).await;
145 let project = Project::test(fs, [path], cx).await;
146
147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
148 language_registry.add(js_lang());
149 language_registry.add(json_lang());
150 language_registry.add(rust_lang());
151
152 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
153
154 cx.executor().run_until_parked();
155
156 cx.update(|cx| {
157 let tree = worktree.read(cx);
158 let settings_for = |path: &str| {
159 let file_entry = tree.entry_for_path(path).unwrap().clone();
160 let file = File::for_entry(file_entry, worktree.clone());
161 let file_language = project
162 .read(cx)
163 .languages()
164 .language_for_file_path(file.path.as_ref());
165 let file_language = cx
166 .background_executor()
167 .block(file_language)
168 .expect("Failed to get file language");
169 let file = file as _;
170 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
171 };
172
173 let settings_a = settings_for("a.rs");
174 let settings_b = settings_for("b/b.rs");
175 let settings_c = settings_for("c.js");
176 let settings_readme = settings_for("README.json");
177
178 // .editorconfig overrides .zed/settings
179 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
180 assert_eq!(settings_a.hard_tabs, true);
181 assert_eq!(settings_a.ensure_final_newline_on_save, true);
182 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
183
184 // .editorconfig in b/ overrides .editorconfig in root
185 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
186
187 // "indent_size" is not set, so "tab_width" is used
188 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
189
190 // README.md should not be affected by .editorconfig's globe "*.rs"
191 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
192 });
193}
194
195#[gpui::test]
196async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
197 init_test(cx);
198 TaskStore::init(None);
199
200 let fs = FakeFs::new(cx.executor());
201 fs.insert_tree(
202 "/the-root",
203 json!({
204 ".zed": {
205 "settings.json": r#"{ "tab_size": 8 }"#,
206 "tasks.json": r#"[{
207 "label": "cargo check all",
208 "command": "cargo",
209 "args": ["check", "--all"]
210 },]"#,
211 },
212 "a": {
213 "a.rs": "fn a() {\n A\n}"
214 },
215 "b": {
216 ".zed": {
217 "settings.json": r#"{ "tab_size": 2 }"#,
218 "tasks.json": r#"[{
219 "label": "cargo check",
220 "command": "cargo",
221 "args": ["check"]
222 },]"#,
223 },
224 "b.rs": "fn b() {\n B\n}"
225 }
226 }),
227 )
228 .await;
229
230 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232 let task_context = TaskContext::default();
233
234 cx.executor().run_until_parked();
235 let worktree_id = cx.update(|cx| {
236 project.update(cx, |project, cx| {
237 project.worktrees(cx).next().unwrap().read(cx).id()
238 })
239 });
240 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
241 id: worktree_id,
242 directory_in_worktree: PathBuf::from(".zed"),
243 id_base: "local worktree tasks from directory \".zed\"".into(),
244 };
245
246 let all_tasks = cx
247 .update(|cx| {
248 let tree = worktree.read(cx);
249
250 let file_a = File::for_entry(
251 tree.entry_for_path("a/a.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_a = language_settings(None, Some(&file_a), cx);
255 let file_b = File::for_entry(
256 tree.entry_for_path("b/b.rs").unwrap().clone(),
257 worktree.clone(),
258 ) as _;
259 let settings_b = language_settings(None, Some(&file_b), cx);
260
261 assert_eq!(settings_a.tab_size.get(), 8);
262 assert_eq!(settings_b.tab_size.get(), 2);
263
264 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
265 })
266 .into_iter()
267 .map(|(source_kind, task)| {
268 let resolved = task.resolved.unwrap();
269 (
270 source_kind,
271 task.resolved_label,
272 resolved.args,
273 resolved.env,
274 )
275 })
276 .collect::<Vec<_>>();
277 assert_eq!(
278 all_tasks,
279 vec![
280 (
281 TaskSourceKind::Worktree {
282 id: worktree_id,
283 directory_in_worktree: PathBuf::from("b/.zed"),
284 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
285 },
286 "cargo check".to_string(),
287 vec!["check".to_string()],
288 HashMap::default(),
289 ),
290 (
291 topmost_local_task_source_kind.clone(),
292 "cargo check all".to_string(),
293 vec!["check".to_string(), "--all".to_string()],
294 HashMap::default(),
295 ),
296 ]
297 );
298
299 let (_, resolved_task) = cx
300 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
301 .into_iter()
302 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
303 .expect("should have one global task");
304 project.update(cx, |project, cx| {
305 let task_inventory = project
306 .task_store
307 .read(cx)
308 .task_inventory()
309 .cloned()
310 .unwrap();
311 task_inventory.update(cx, |inventory, _| {
312 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
313 inventory
314 .update_file_based_tasks(
315 None,
316 Some(
317 &json!([{
318 "label": "cargo check unstable",
319 "command": "cargo",
320 "args": [
321 "check",
322 "--all",
323 "--all-targets"
324 ],
325 "env": {
326 "RUSTFLAGS": "-Zunstable-options"
327 }
328 }])
329 .to_string(),
330 ),
331 )
332 .unwrap();
333 });
334 });
335 cx.run_until_parked();
336
337 let all_tasks = cx
338 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
339 .into_iter()
340 .map(|(source_kind, task)| {
341 let resolved = task.resolved.unwrap();
342 (
343 source_kind,
344 task.resolved_label,
345 resolved.args,
346 resolved.env,
347 )
348 })
349 .collect::<Vec<_>>();
350 assert_eq!(
351 all_tasks,
352 vec![
353 (
354 topmost_local_task_source_kind.clone(),
355 "cargo check all".to_string(),
356 vec!["check".to_string(), "--all".to_string()],
357 HashMap::default(),
358 ),
359 (
360 TaskSourceKind::Worktree {
361 id: worktree_id,
362 directory_in_worktree: PathBuf::from("b/.zed"),
363 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
364 },
365 "cargo check".to_string(),
366 vec!["check".to_string()],
367 HashMap::default(),
368 ),
369 (
370 TaskSourceKind::AbsPath {
371 abs_path: paths::tasks_file().clone(),
372 id_base: "global tasks.json".into(),
373 },
374 "cargo check unstable".to_string(),
375 vec![
376 "check".to_string(),
377 "--all".to_string(),
378 "--all-targets".to_string(),
379 ],
380 HashMap::from_iter(Some((
381 "RUSTFLAGS".to_string(),
382 "-Zunstable-options".to_string()
383 ))),
384 ),
385 ]
386 );
387}
388
389#[gpui::test]
390async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
391 init_test(cx);
392
393 let fs = FakeFs::new(cx.executor());
394 fs.insert_tree(
395 "/the-root",
396 json!({
397 "test.rs": "const A: i32 = 1;",
398 "test2.rs": "",
399 "Cargo.toml": "a = 1",
400 "package.json": "{\"a\": 1}",
401 }),
402 )
403 .await;
404
405 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
407
408 let mut fake_rust_servers = language_registry.register_fake_lsp(
409 "Rust",
410 FakeLspAdapter {
411 name: "the-rust-language-server",
412 capabilities: lsp::ServerCapabilities {
413 completion_provider: Some(lsp::CompletionOptions {
414 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
415 ..Default::default()
416 }),
417 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
418 lsp::TextDocumentSyncOptions {
419 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
420 ..Default::default()
421 },
422 )),
423 ..Default::default()
424 },
425 ..Default::default()
426 },
427 );
428 let mut fake_json_servers = language_registry.register_fake_lsp(
429 "JSON",
430 FakeLspAdapter {
431 name: "the-json-language-server",
432 capabilities: lsp::ServerCapabilities {
433 completion_provider: Some(lsp::CompletionOptions {
434 trigger_characters: Some(vec![":".to_string()]),
435 ..Default::default()
436 }),
437 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
438 lsp::TextDocumentSyncOptions {
439 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
440 ..Default::default()
441 },
442 )),
443 ..Default::default()
444 },
445 ..Default::default()
446 },
447 );
448
449 // Open a buffer without an associated language server.
450 let (toml_buffer, _handle) = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
453 })
454 .await
455 .unwrap();
456
457 // Open a buffer with an associated language server before the language for it has been loaded.
458 let (rust_buffer, _handle2) = project
459 .update(cx, |project, cx| {
460 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
461 })
462 .await
463 .unwrap();
464 rust_buffer.update(cx, |buffer, _| {
465 assert_eq!(buffer.language().map(|l| l.name()), None);
466 });
467
468 // Now we add the languages to the project, and ensure they get assigned to all
469 // the relevant open buffers.
470 language_registry.add(json_lang());
471 language_registry.add(rust_lang());
472 cx.executor().run_until_parked();
473 rust_buffer.update(cx, |buffer, _| {
474 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
475 });
476
477 // A server is started up, and it is notified about Rust files.
478 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
479 assert_eq!(
480 fake_rust_server
481 .receive_notification::<lsp::notification::DidOpenTextDocument>()
482 .await
483 .text_document,
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
486 version: 0,
487 text: "const A: i32 = 1;".to_string(),
488 language_id: "rust".to_string(),
489 }
490 );
491
492 // The buffer is configured based on the language server's capabilities.
493 rust_buffer.update(cx, |buffer, _| {
494 assert_eq!(
495 buffer
496 .completion_triggers()
497 .into_iter()
498 .cloned()
499 .collect::<Vec<_>>(),
500 &[".".to_string(), "::".to_string()]
501 );
502 });
503 toml_buffer.update(cx, |buffer, _| {
504 assert!(buffer.completion_triggers().is_empty());
505 });
506
507 // Edit a buffer. The changes are reported to the language server.
508 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
509 assert_eq!(
510 fake_rust_server
511 .receive_notification::<lsp::notification::DidChangeTextDocument>()
512 .await
513 .text_document,
514 lsp::VersionedTextDocumentIdentifier::new(
515 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
516 1
517 )
518 );
519
520 // Open a third buffer with a different associated language server.
521 let (json_buffer, _json_handle) = project
522 .update(cx, |project, cx| {
523 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
524 })
525 .await
526 .unwrap();
527
528 // A json language server is started up and is only notified about the json buffer.
529 let mut fake_json_server = fake_json_servers.next().await.unwrap();
530 assert_eq!(
531 fake_json_server
532 .receive_notification::<lsp::notification::DidOpenTextDocument>()
533 .await
534 .text_document,
535 lsp::TextDocumentItem {
536 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
537 version: 0,
538 text: "{\"a\": 1}".to_string(),
539 language_id: "json".to_string(),
540 }
541 );
542
543 // This buffer is configured based on the second language server's
544 // capabilities.
545 json_buffer.update(cx, |buffer, _| {
546 assert_eq!(
547 buffer
548 .completion_triggers()
549 .into_iter()
550 .cloned()
551 .collect::<Vec<_>>(),
552 &[":".to_string()]
553 );
554 });
555
556 // When opening another buffer whose language server is already running,
557 // it is also configured based on the existing language server's capabilities.
558 let (rust_buffer2, _handle4) = project
559 .update(cx, |project, cx| {
560 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
561 })
562 .await
563 .unwrap();
564 rust_buffer2.update(cx, |buffer, _| {
565 assert_eq!(
566 buffer
567 .completion_triggers()
568 .into_iter()
569 .cloned()
570 .collect::<Vec<_>>(),
571 &[".".to_string(), "::".to_string()]
572 );
573 });
574
575 // Changes are reported only to servers matching the buffer's language.
576 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
577 rust_buffer2.update(cx, |buffer, cx| {
578 buffer.edit([(0..0, "let x = 1;")], None, cx)
579 });
580 assert_eq!(
581 fake_rust_server
582 .receive_notification::<lsp::notification::DidChangeTextDocument>()
583 .await
584 .text_document,
585 lsp::VersionedTextDocumentIdentifier::new(
586 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
587 1
588 )
589 );
590
591 // Save notifications are reported to all servers.
592 project
593 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
594 .await
595 .unwrap();
596 assert_eq!(
597 fake_rust_server
598 .receive_notification::<lsp::notification::DidSaveTextDocument>()
599 .await
600 .text_document,
601 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
602 );
603 assert_eq!(
604 fake_json_server
605 .receive_notification::<lsp::notification::DidSaveTextDocument>()
606 .await
607 .text_document,
608 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
609 );
610
611 // Renames are reported only to servers matching the buffer's language.
612 fs.rename(
613 Path::new("/the-root/test2.rs"),
614 Path::new("/the-root/test3.rs"),
615 Default::default(),
616 )
617 .await
618 .unwrap();
619 assert_eq!(
620 fake_rust_server
621 .receive_notification::<lsp::notification::DidCloseTextDocument>()
622 .await
623 .text_document,
624 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
625 );
626 assert_eq!(
627 fake_rust_server
628 .receive_notification::<lsp::notification::DidOpenTextDocument>()
629 .await
630 .text_document,
631 lsp::TextDocumentItem {
632 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
633 version: 0,
634 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
635 language_id: "rust".to_string(),
636 },
637 );
638
639 rust_buffer2.update(cx, |buffer, cx| {
640 buffer.update_diagnostics(
641 LanguageServerId(0),
642 DiagnosticSet::from_sorted_entries(
643 vec![DiagnosticEntry {
644 diagnostic: Default::default(),
645 range: Anchor::MIN..Anchor::MAX,
646 }],
647 &buffer.snapshot(),
648 ),
649 cx,
650 );
651 assert_eq!(
652 buffer
653 .snapshot()
654 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
655 .count(),
656 1
657 );
658 });
659
660 // When the rename changes the extension of the file, the buffer gets closed on the old
661 // language server and gets opened on the new one.
662 fs.rename(
663 Path::new("/the-root/test3.rs"),
664 Path::new("/the-root/test3.json"),
665 Default::default(),
666 )
667 .await
668 .unwrap();
669 assert_eq!(
670 fake_rust_server
671 .receive_notification::<lsp::notification::DidCloseTextDocument>()
672 .await
673 .text_document,
674 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
675 );
676 assert_eq!(
677 fake_json_server
678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
679 .await
680 .text_document,
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: "json".to_string(),
686 },
687 );
688
689 // We clear the diagnostics, since the language has changed.
690 rust_buffer2.update(cx, |buffer, _| {
691 assert_eq!(
692 buffer
693 .snapshot()
694 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
695 .count(),
696 0
697 );
698 });
699
700 // The renamed file's version resets after changing language server.
701 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
702 assert_eq!(
703 fake_json_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
709 1
710 )
711 );
712
713 // Restart language servers
714 project.update(cx, |project, cx| {
715 project.restart_language_servers_for_buffers(
716 vec![rust_buffer.clone(), json_buffer.clone()],
717 cx,
718 );
719 });
720
721 let mut rust_shutdown_requests = fake_rust_server
722 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
723 let mut json_shutdown_requests = fake_json_server
724 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
725 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
726
727 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
728 let mut fake_json_server = fake_json_servers.next().await.unwrap();
729
730 // Ensure rust document is reopened in new rust language server
731 assert_eq!(
732 fake_rust_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
738 version: 0,
739 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
740 language_id: "rust".to_string(),
741 }
742 );
743
744 // Ensure json documents are reopened in new json language server
745 assert_set_eq!(
746 [
747 fake_json_server
748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
749 .await
750 .text_document,
751 fake_json_server
752 .receive_notification::<lsp::notification::DidOpenTextDocument>()
753 .await
754 .text_document,
755 ],
756 [
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
759 version: 0,
760 text: json_buffer.update(cx, |buffer, _| buffer.text()),
761 language_id: "json".to_string(),
762 },
763 lsp::TextDocumentItem {
764 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
765 version: 0,
766 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
767 language_id: "json".to_string(),
768 }
769 ]
770 );
771
772 // Close notifications are reported only to servers matching the buffer's language.
773 cx.update(|_| drop(_json_handle));
774 let close_message = lsp::DidCloseTextDocumentParams {
775 text_document: lsp::TextDocumentIdentifier::new(
776 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
777 ),
778 };
779 assert_eq!(
780 fake_json_server
781 .receive_notification::<lsp::notification::DidCloseTextDocument>()
782 .await,
783 close_message,
784 );
785}
786
787#[gpui::test]
788async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
789 fn add_root_for_windows(path: &str) -> String {
790 if cfg!(windows) {
791 format!("C:{}", path)
792 } else {
793 path.to_string()
794 }
795 }
796
797 init_test(cx);
798
799 let fs = FakeFs::new(cx.executor());
800 fs.insert_tree(
801 add_root_for_windows("/the-root"),
802 json!({
803 ".gitignore": "target\n",
804 "src": {
805 "a.rs": "",
806 "b.rs": "",
807 },
808 "target": {
809 "x": {
810 "out": {
811 "x.rs": ""
812 }
813 },
814 "y": {
815 "out": {
816 "y.rs": "",
817 }
818 },
819 "z": {
820 "out": {
821 "z.rs": ""
822 }
823 }
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await;
830 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
831 language_registry.add(rust_lang());
832 let mut fake_servers = language_registry.register_fake_lsp(
833 "Rust",
834 FakeLspAdapter {
835 name: "the-language-server",
836 ..Default::default()
837 },
838 );
839
840 cx.executor().run_until_parked();
841
842 // Start the language server by opening a buffer with a compatible file extension.
843 let _ = project
844 .update(cx, |project, cx| {
845 project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx)
846 })
847 .await
848 .unwrap();
849
850 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
851 project.update(cx, |project, cx| {
852 let worktree = project.worktrees(cx).next().unwrap();
853 assert_eq!(
854 worktree
855 .read(cx)
856 .snapshot()
857 .entries(true, 0)
858 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
859 .collect::<Vec<_>>(),
860 &[
861 (Path::new(""), false),
862 (Path::new(".gitignore"), false),
863 (Path::new("src"), false),
864 (Path::new("src/a.rs"), false),
865 (Path::new("src/b.rs"), false),
866 (Path::new("target"), true),
867 ]
868 );
869 });
870
871 let prev_read_dir_count = fs.read_dir_call_count();
872
873 // Keep track of the FS events reported to the language server.
874 let fake_server = fake_servers.next().await.unwrap();
875 let file_changes = Arc::new(Mutex::new(Vec::new()));
876 fake_server
877 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
878 registrations: vec![lsp::Registration {
879 id: Default::default(),
880 method: "workspace/didChangeWatchedFiles".to_string(),
881 register_options: serde_json::to_value(
882 lsp::DidChangeWatchedFilesRegistrationOptions {
883 watchers: vec![
884 lsp::FileSystemWatcher {
885 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
886 "/the-root/Cargo.toml",
887 )),
888 kind: None,
889 },
890 lsp::FileSystemWatcher {
891 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
892 "/the-root/src/*.{rs,c}",
893 )),
894 kind: None,
895 },
896 lsp::FileSystemWatcher {
897 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
898 "/the-root/target/y/**/*.rs",
899 )),
900 kind: None,
901 },
902 ],
903 },
904 )
905 .ok(),
906 }],
907 })
908 .await
909 .unwrap();
910 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
911 let file_changes = file_changes.clone();
912 move |params, _| {
913 let mut file_changes = file_changes.lock();
914 file_changes.extend(params.changes);
915 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
916 }
917 });
918
919 cx.executor().run_until_parked();
920 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
921 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
922
923 // Now the language server has asked us to watch an ignored directory path,
924 // so we recursively load it.
925 project.update(cx, |project, cx| {
926 let worktree = project.worktrees(cx).next().unwrap();
927 assert_eq!(
928 worktree
929 .read(cx)
930 .snapshot()
931 .entries(true, 0)
932 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
933 .collect::<Vec<_>>(),
934 &[
935 (Path::new(""), false),
936 (Path::new(".gitignore"), false),
937 (Path::new("src"), false),
938 (Path::new("src/a.rs"), false),
939 (Path::new("src/b.rs"), false),
940 (Path::new("target"), true),
941 (Path::new("target/x"), true),
942 (Path::new("target/y"), true),
943 (Path::new("target/y/out"), true),
944 (Path::new("target/y/out/y.rs"), true),
945 (Path::new("target/z"), true),
946 ]
947 );
948 });
949
950 // Perform some file system mutations, two of which match the watched patterns,
951 // and one of which does not.
952 fs.create_file(
953 add_root_for_windows("/the-root/src/c.rs").as_ref(),
954 Default::default(),
955 )
956 .await
957 .unwrap();
958 fs.create_file(
959 add_root_for_windows("/the-root/src/d.txt").as_ref(),
960 Default::default(),
961 )
962 .await
963 .unwrap();
964 fs.remove_file(
965 add_root_for_windows("/the-root/src/b.rs").as_ref(),
966 Default::default(),
967 )
968 .await
969 .unwrap();
970 fs.create_file(
971 add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(),
972 Default::default(),
973 )
974 .await
975 .unwrap();
976 fs.create_file(
977 add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(),
978 Default::default(),
979 )
980 .await
981 .unwrap();
982
983 // The language server receives events for the FS mutations that match its watch patterns.
984 cx.executor().run_until_parked();
985 assert_eq!(
986 &*file_changes.lock(),
987 &[
988 lsp::FileEvent {
989 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(),
990 typ: lsp::FileChangeType::DELETED,
991 },
992 lsp::FileEvent {
993 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(),
994 typ: lsp::FileChangeType::CREATED,
995 },
996 lsp::FileEvent {
997 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs"))
998 .unwrap(),
999 typ: lsp::FileChangeType::CREATED,
1000 },
1001 ]
1002 );
1003}
1004
1005#[gpui::test]
1006async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1007 init_test(cx);
1008
1009 let fs = FakeFs::new(cx.executor());
1010 fs.insert_tree(
1011 "/dir",
1012 json!({
1013 "a.rs": "let a = 1;",
1014 "b.rs": "let b = 2;"
1015 }),
1016 )
1017 .await;
1018
1019 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
1020 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1021
1022 let buffer_a = project
1023 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1024 .await
1025 .unwrap();
1026 let buffer_b = project
1027 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1028 .await
1029 .unwrap();
1030
1031 lsp_store.update(cx, |lsp_store, cx| {
1032 lsp_store
1033 .update_diagnostics(
1034 LanguageServerId(0),
1035 lsp::PublishDiagnosticsParams {
1036 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1037 version: None,
1038 diagnostics: vec![lsp::Diagnostic {
1039 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1040 severity: Some(lsp::DiagnosticSeverity::ERROR),
1041 message: "error 1".to_string(),
1042 ..Default::default()
1043 }],
1044 },
1045 &[],
1046 cx,
1047 )
1048 .unwrap();
1049 lsp_store
1050 .update_diagnostics(
1051 LanguageServerId(0),
1052 lsp::PublishDiagnosticsParams {
1053 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1054 version: None,
1055 diagnostics: vec![lsp::Diagnostic {
1056 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1057 severity: Some(DiagnosticSeverity::WARNING),
1058 message: "error 2".to_string(),
1059 ..Default::default()
1060 }],
1061 },
1062 &[],
1063 cx,
1064 )
1065 .unwrap();
1066 });
1067
1068 buffer_a.update(cx, |buffer, _| {
1069 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1070 assert_eq!(
1071 chunks
1072 .iter()
1073 .map(|(s, d)| (s.as_str(), *d))
1074 .collect::<Vec<_>>(),
1075 &[
1076 ("let ", None),
1077 ("a", Some(DiagnosticSeverity::ERROR)),
1078 (" = 1;", None),
1079 ]
1080 );
1081 });
1082 buffer_b.update(cx, |buffer, _| {
1083 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1084 assert_eq!(
1085 chunks
1086 .iter()
1087 .map(|(s, d)| (s.as_str(), *d))
1088 .collect::<Vec<_>>(),
1089 &[
1090 ("let ", None),
1091 ("b", Some(DiagnosticSeverity::WARNING)),
1092 (" = 2;", None),
1093 ]
1094 );
1095 });
1096}
1097
1098#[gpui::test]
1099async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1100 init_test(cx);
1101
1102 let fs = FakeFs::new(cx.executor());
1103 fs.insert_tree(
1104 "/root",
1105 json!({
1106 "dir": {
1107 ".git": {
1108 "HEAD": "ref: refs/heads/main",
1109 },
1110 ".gitignore": "b.rs",
1111 "a.rs": "let a = 1;",
1112 "b.rs": "let b = 2;",
1113 },
1114 "other.rs": "let b = c;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1120 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1121 let (worktree, _) = project
1122 .update(cx, |project, cx| {
1123 project.find_or_create_worktree("/root/dir", true, cx)
1124 })
1125 .await
1126 .unwrap();
1127 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1128
1129 let (worktree, _) = project
1130 .update(cx, |project, cx| {
1131 project.find_or_create_worktree("/root/other.rs", false, cx)
1132 })
1133 .await
1134 .unwrap();
1135 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1136
1137 let server_id = LanguageServerId(0);
1138 lsp_store.update(cx, |lsp_store, cx| {
1139 lsp_store
1140 .update_diagnostics(
1141 server_id,
1142 lsp::PublishDiagnosticsParams {
1143 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1144 version: None,
1145 diagnostics: vec![lsp::Diagnostic {
1146 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1147 severity: Some(lsp::DiagnosticSeverity::ERROR),
1148 message: "unused variable 'b'".to_string(),
1149 ..Default::default()
1150 }],
1151 },
1152 &[],
1153 cx,
1154 )
1155 .unwrap();
1156 lsp_store
1157 .update_diagnostics(
1158 server_id,
1159 lsp::PublishDiagnosticsParams {
1160 uri: Url::from_file_path("/root/other.rs").unwrap(),
1161 version: None,
1162 diagnostics: vec![lsp::Diagnostic {
1163 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1164 severity: Some(lsp::DiagnosticSeverity::ERROR),
1165 message: "unknown variable 'c'".to_string(),
1166 ..Default::default()
1167 }],
1168 },
1169 &[],
1170 cx,
1171 )
1172 .unwrap();
1173 });
1174
1175 let main_ignored_buffer = project
1176 .update(cx, |project, cx| {
1177 project.open_buffer((main_worktree_id, "b.rs"), cx)
1178 })
1179 .await
1180 .unwrap();
1181 main_ignored_buffer.update(cx, |buffer, _| {
1182 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1183 assert_eq!(
1184 chunks
1185 .iter()
1186 .map(|(s, d)| (s.as_str(), *d))
1187 .collect::<Vec<_>>(),
1188 &[
1189 ("let ", None),
1190 ("b", Some(DiagnosticSeverity::ERROR)),
1191 (" = 2;", None),
1192 ],
1193 "Gigitnored buffers should still get in-buffer diagnostics",
1194 );
1195 });
1196 let other_buffer = project
1197 .update(cx, |project, cx| {
1198 project.open_buffer((other_worktree_id, ""), cx)
1199 })
1200 .await
1201 .unwrap();
1202 other_buffer.update(cx, |buffer, _| {
1203 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1204 assert_eq!(
1205 chunks
1206 .iter()
1207 .map(|(s, d)| (s.as_str(), *d))
1208 .collect::<Vec<_>>(),
1209 &[
1210 ("let b = ", None),
1211 ("c", Some(DiagnosticSeverity::ERROR)),
1212 (";", None),
1213 ],
1214 "Buffers from hidden projects should still get in-buffer diagnostics"
1215 );
1216 });
1217
1218 project.update(cx, |project, cx| {
1219 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1220 assert_eq!(
1221 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1222 vec![(
1223 ProjectPath {
1224 worktree_id: main_worktree_id,
1225 path: Arc::from(Path::new("b.rs")),
1226 },
1227 server_id,
1228 DiagnosticSummary {
1229 error_count: 1,
1230 warning_count: 0,
1231 }
1232 )]
1233 );
1234 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1235 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1236 });
1237}
1238
1239#[gpui::test]
1240async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1241 init_test(cx);
1242
1243 let progress_token = "the-progress-token";
1244
1245 let fs = FakeFs::new(cx.executor());
1246 fs.insert_tree(
1247 "/dir",
1248 json!({
1249 "a.rs": "fn a() { A }",
1250 "b.rs": "const y: i32 = 1",
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257
1258 language_registry.add(rust_lang());
1259 let mut fake_servers = language_registry.register_fake_lsp(
1260 "Rust",
1261 FakeLspAdapter {
1262 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1263 disk_based_diagnostics_sources: vec!["disk".into()],
1264 ..Default::default()
1265 },
1266 );
1267
1268 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1269
1270 // Cause worktree to start the fake language server
1271 let _ = project
1272 .update(cx, |project, cx| {
1273 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1274 })
1275 .await
1276 .unwrap();
1277
1278 let mut events = cx.events(&project);
1279
1280 let fake_server = fake_servers.next().await.unwrap();
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::LanguageServerAdded(
1284 LanguageServerId(0),
1285 fake_server.server.name(),
1286 Some(worktree_id)
1287 ),
1288 );
1289
1290 fake_server
1291 .start_progress(format!("{}/0", progress_token))
1292 .await;
1293 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1294 assert_eq!(
1295 events.next().await.unwrap(),
1296 Event::DiskBasedDiagnosticsStarted {
1297 language_server_id: LanguageServerId(0),
1298 }
1299 );
1300
1301 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1302 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1303 version: None,
1304 diagnostics: vec![lsp::Diagnostic {
1305 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1306 severity: Some(lsp::DiagnosticSeverity::ERROR),
1307 message: "undefined variable 'A'".to_string(),
1308 ..Default::default()
1309 }],
1310 });
1311 assert_eq!(
1312 events.next().await.unwrap(),
1313 Event::DiagnosticsUpdated {
1314 language_server_id: LanguageServerId(0),
1315 path: (worktree_id, Path::new("a.rs")).into()
1316 }
1317 );
1318
1319 fake_server.end_progress(format!("{}/0", progress_token));
1320 assert_eq!(
1321 events.next().await.unwrap(),
1322 Event::DiskBasedDiagnosticsFinished {
1323 language_server_id: LanguageServerId(0)
1324 }
1325 );
1326
1327 let buffer = project
1328 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1329 .await
1330 .unwrap();
1331
1332 buffer.update(cx, |buffer, _| {
1333 let snapshot = buffer.snapshot();
1334 let diagnostics = snapshot
1335 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1336 .collect::<Vec<_>>();
1337 assert_eq!(
1338 diagnostics,
1339 &[DiagnosticEntry {
1340 range: Point::new(0, 9)..Point::new(0, 10),
1341 diagnostic: Diagnostic {
1342 severity: lsp::DiagnosticSeverity::ERROR,
1343 message: "undefined variable 'A'".to_string(),
1344 group_id: 0,
1345 is_primary: true,
1346 ..Default::default()
1347 }
1348 }]
1349 )
1350 });
1351
1352 // Ensure publishing empty diagnostics twice only results in one update event.
1353 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1354 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1355 version: None,
1356 diagnostics: Default::default(),
1357 });
1358 assert_eq!(
1359 events.next().await.unwrap(),
1360 Event::DiagnosticsUpdated {
1361 language_server_id: LanguageServerId(0),
1362 path: (worktree_id, Path::new("a.rs")).into()
1363 }
1364 );
1365
1366 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1367 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1368 version: None,
1369 diagnostics: Default::default(),
1370 });
1371 cx.executor().run_until_parked();
1372 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1373}
1374
1375#[gpui::test]
1376async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1377 init_test(cx);
1378
1379 let progress_token = "the-progress-token";
1380
1381 let fs = FakeFs::new(cx.executor());
1382 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1383
1384 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1385
1386 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1387 language_registry.add(rust_lang());
1388 let mut fake_servers = language_registry.register_fake_lsp(
1389 "Rust",
1390 FakeLspAdapter {
1391 name: "the-language-server",
1392 disk_based_diagnostics_sources: vec!["disk".into()],
1393 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1394 ..Default::default()
1395 },
1396 );
1397
1398 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1399
1400 let (buffer, _handle) = project
1401 .update(cx, |project, cx| {
1402 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1403 })
1404 .await
1405 .unwrap();
1406
1407 // Simulate diagnostics starting to update.
1408 let fake_server = fake_servers.next().await.unwrap();
1409 fake_server.start_progress(progress_token).await;
1410
1411 // Restart the server before the diagnostics finish updating.
1412 project.update(cx, |project, cx| {
1413 project.restart_language_servers_for_buffers([buffer], cx);
1414 });
1415 let mut events = cx.events(&project);
1416
1417 // Simulate the newly started server sending more diagnostics.
1418 let fake_server = fake_servers.next().await.unwrap();
1419 assert_eq!(
1420 events.next().await.unwrap(),
1421 Event::LanguageServerAdded(
1422 LanguageServerId(1),
1423 fake_server.server.name(),
1424 Some(worktree_id)
1425 )
1426 );
1427 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1428 fake_server.start_progress(progress_token).await;
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsStarted {
1432 language_server_id: LanguageServerId(1)
1433 }
1434 );
1435 project.update(cx, |project, cx| {
1436 assert_eq!(
1437 project
1438 .language_servers_running_disk_based_diagnostics(cx)
1439 .collect::<Vec<_>>(),
1440 [LanguageServerId(1)]
1441 );
1442 });
1443
1444 // All diagnostics are considered done, despite the old server's diagnostic
1445 // task never completing.
1446 fake_server.end_progress(progress_token);
1447 assert_eq!(
1448 events.next().await.unwrap(),
1449 Event::DiskBasedDiagnosticsFinished {
1450 language_server_id: LanguageServerId(1)
1451 }
1452 );
1453 project.update(cx, |project, cx| {
1454 assert_eq!(
1455 project
1456 .language_servers_running_disk_based_diagnostics(cx)
1457 .collect::<Vec<_>>(),
1458 [] as [language::LanguageServerId; 0]
1459 );
1460 });
1461}
1462
1463#[gpui::test]
1464async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1465 init_test(cx);
1466
1467 let fs = FakeFs::new(cx.executor());
1468 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1469
1470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1471
1472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1473 language_registry.add(rust_lang());
1474 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1475
1476 let (buffer, _) = project
1477 .update(cx, |project, cx| {
1478 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1479 })
1480 .await
1481 .unwrap();
1482
1483 // Publish diagnostics
1484 let fake_server = fake_servers.next().await.unwrap();
1485 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1486 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1487 version: None,
1488 diagnostics: vec![lsp::Diagnostic {
1489 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1490 severity: Some(lsp::DiagnosticSeverity::ERROR),
1491 message: "the message".to_string(),
1492 ..Default::default()
1493 }],
1494 });
1495
1496 cx.executor().run_until_parked();
1497 buffer.update(cx, |buffer, _| {
1498 assert_eq!(
1499 buffer
1500 .snapshot()
1501 .diagnostics_in_range::<_, usize>(0..1, false)
1502 .map(|entry| entry.diagnostic.message.clone())
1503 .collect::<Vec<_>>(),
1504 ["the message".to_string()]
1505 );
1506 });
1507 project.update(cx, |project, cx| {
1508 assert_eq!(
1509 project.diagnostic_summary(false, cx),
1510 DiagnosticSummary {
1511 error_count: 1,
1512 warning_count: 0,
1513 }
1514 );
1515 });
1516
1517 project.update(cx, |project, cx| {
1518 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1519 });
1520
1521 // The diagnostics are cleared.
1522 cx.executor().run_until_parked();
1523 buffer.update(cx, |buffer, _| {
1524 assert_eq!(
1525 buffer
1526 .snapshot()
1527 .diagnostics_in_range::<_, usize>(0..1, false)
1528 .map(|entry| entry.diagnostic.message.clone())
1529 .collect::<Vec<_>>(),
1530 Vec::<String>::new(),
1531 );
1532 });
1533 project.update(cx, |project, cx| {
1534 assert_eq!(
1535 project.diagnostic_summary(false, cx),
1536 DiagnosticSummary {
1537 error_count: 0,
1538 warning_count: 0,
1539 }
1540 );
1541 });
1542}
1543
1544#[gpui::test]
1545async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let fs = FakeFs::new(cx.executor());
1549 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1550
1551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1552 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1553
1554 language_registry.add(rust_lang());
1555 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1556
1557 let (buffer, _handle) = project
1558 .update(cx, |project, cx| {
1559 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1560 })
1561 .await
1562 .unwrap();
1563
1564 // Before restarting the server, report diagnostics with an unknown buffer version.
1565 let fake_server = fake_servers.next().await.unwrap();
1566 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1567 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1568 version: Some(10000),
1569 diagnostics: Vec::new(),
1570 });
1571 cx.executor().run_until_parked();
1572
1573 project.update(cx, |project, cx| {
1574 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1575 });
1576 let mut fake_server = fake_servers.next().await.unwrap();
1577 let notification = fake_server
1578 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1579 .await
1580 .text_document;
1581 assert_eq!(notification.version, 0);
1582}
1583
1584#[gpui::test]
1585async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1586 init_test(cx);
1587
1588 let progress_token = "the-progress-token";
1589
1590 let fs = FakeFs::new(cx.executor());
1591 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1592
1593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1594
1595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1596 language_registry.add(rust_lang());
1597 let mut fake_servers = language_registry.register_fake_lsp(
1598 "Rust",
1599 FakeLspAdapter {
1600 name: "the-language-server",
1601 disk_based_diagnostics_sources: vec!["disk".into()],
1602 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1603 ..Default::default()
1604 },
1605 );
1606
1607 let (buffer, _handle) = project
1608 .update(cx, |project, cx| {
1609 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1610 })
1611 .await
1612 .unwrap();
1613
1614 // Simulate diagnostics starting to update.
1615 let mut fake_server = fake_servers.next().await.unwrap();
1616 fake_server
1617 .start_progress_with(
1618 "another-token",
1619 lsp::WorkDoneProgressBegin {
1620 cancellable: Some(false),
1621 ..Default::default()
1622 },
1623 )
1624 .await;
1625 fake_server
1626 .start_progress_with(
1627 progress_token,
1628 lsp::WorkDoneProgressBegin {
1629 cancellable: Some(true),
1630 ..Default::default()
1631 },
1632 )
1633 .await;
1634 cx.executor().run_until_parked();
1635
1636 project.update(cx, |project, cx| {
1637 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1638 });
1639
1640 let cancel_notification = fake_server
1641 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1642 .await;
1643 assert_eq!(
1644 cancel_notification.token,
1645 NumberOrString::String(progress_token.into())
1646 );
1647}
1648
1649#[gpui::test]
1650async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1651 init_test(cx);
1652
1653 let fs = FakeFs::new(cx.executor());
1654 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1655 .await;
1656
1657 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1658 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1659
1660 let mut fake_rust_servers = language_registry.register_fake_lsp(
1661 "Rust",
1662 FakeLspAdapter {
1663 name: "rust-lsp",
1664 ..Default::default()
1665 },
1666 );
1667 let mut fake_js_servers = language_registry.register_fake_lsp(
1668 "JavaScript",
1669 FakeLspAdapter {
1670 name: "js-lsp",
1671 ..Default::default()
1672 },
1673 );
1674 language_registry.add(rust_lang());
1675 language_registry.add(js_lang());
1676
1677 let _rs_buffer = project
1678 .update(cx, |project, cx| {
1679 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1680 })
1681 .await
1682 .unwrap();
1683 let _js_buffer = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1686 })
1687 .await
1688 .unwrap();
1689
1690 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1691 assert_eq!(
1692 fake_rust_server_1
1693 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1694 .await
1695 .text_document
1696 .uri
1697 .as_str(),
1698 "file:///dir/a.rs"
1699 );
1700
1701 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1702 assert_eq!(
1703 fake_js_server
1704 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1705 .await
1706 .text_document
1707 .uri
1708 .as_str(),
1709 "file:///dir/b.js"
1710 );
1711
1712 // Disable Rust language server, ensuring only that server gets stopped.
1713 cx.update(|cx| {
1714 SettingsStore::update_global(cx, |settings, cx| {
1715 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1716 settings.languages.insert(
1717 "Rust".into(),
1718 LanguageSettingsContent {
1719 enable_language_server: Some(false),
1720 ..Default::default()
1721 },
1722 );
1723 });
1724 })
1725 });
1726 fake_rust_server_1
1727 .receive_notification::<lsp::notification::Exit>()
1728 .await;
1729
1730 // Enable Rust and disable JavaScript language servers, ensuring that the
1731 // former gets started again and that the latter stops.
1732 cx.update(|cx| {
1733 SettingsStore::update_global(cx, |settings, cx| {
1734 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1735 settings.languages.insert(
1736 LanguageName::new("Rust"),
1737 LanguageSettingsContent {
1738 enable_language_server: Some(true),
1739 ..Default::default()
1740 },
1741 );
1742 settings.languages.insert(
1743 LanguageName::new("JavaScript"),
1744 LanguageSettingsContent {
1745 enable_language_server: Some(false),
1746 ..Default::default()
1747 },
1748 );
1749 });
1750 })
1751 });
1752 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1753 assert_eq!(
1754 fake_rust_server_2
1755 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1756 .await
1757 .text_document
1758 .uri
1759 .as_str(),
1760 "file:///dir/a.rs"
1761 );
1762 fake_js_server
1763 .receive_notification::<lsp::notification::Exit>()
1764 .await;
1765}
1766
1767#[gpui::test(iterations = 3)]
1768async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1769 init_test(cx);
1770
1771 let text = "
1772 fn a() { A }
1773 fn b() { BB }
1774 fn c() { CCC }
1775 "
1776 .unindent();
1777
1778 let fs = FakeFs::new(cx.executor());
1779 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1780
1781 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1782 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1783 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1784
1785 language_registry.add(rust_lang());
1786 let mut fake_servers = language_registry.register_fake_lsp(
1787 "Rust",
1788 FakeLspAdapter {
1789 disk_based_diagnostics_sources: vec!["disk".into()],
1790 ..Default::default()
1791 },
1792 );
1793
1794 let buffer = project
1795 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1796 .await
1797 .unwrap();
1798
1799 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1800 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1801 });
1802
1803 let mut fake_server = fake_servers.next().await.unwrap();
1804 let open_notification = fake_server
1805 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1806 .await;
1807
1808 // Edit the buffer, moving the content down
1809 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1810 let change_notification_1 = fake_server
1811 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1812 .await;
1813 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1814
1815 // Report some diagnostics for the initial version of the buffer
1816 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1817 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1818 version: Some(open_notification.text_document.version),
1819 diagnostics: vec![
1820 lsp::Diagnostic {
1821 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1822 severity: Some(DiagnosticSeverity::ERROR),
1823 message: "undefined variable 'A'".to_string(),
1824 source: Some("disk".to_string()),
1825 ..Default::default()
1826 },
1827 lsp::Diagnostic {
1828 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1829 severity: Some(DiagnosticSeverity::ERROR),
1830 message: "undefined variable 'BB'".to_string(),
1831 source: Some("disk".to_string()),
1832 ..Default::default()
1833 },
1834 lsp::Diagnostic {
1835 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1836 severity: Some(DiagnosticSeverity::ERROR),
1837 source: Some("disk".to_string()),
1838 message: "undefined variable 'CCC'".to_string(),
1839 ..Default::default()
1840 },
1841 ],
1842 });
1843
1844 // The diagnostics have moved down since they were created.
1845 cx.executor().run_until_parked();
1846 buffer.update(cx, |buffer, _| {
1847 assert_eq!(
1848 buffer
1849 .snapshot()
1850 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1851 .collect::<Vec<_>>(),
1852 &[
1853 DiagnosticEntry {
1854 range: Point::new(3, 9)..Point::new(3, 11),
1855 diagnostic: Diagnostic {
1856 source: Some("disk".into()),
1857 severity: DiagnosticSeverity::ERROR,
1858 message: "undefined variable 'BB'".to_string(),
1859 is_disk_based: true,
1860 group_id: 1,
1861 is_primary: true,
1862 ..Default::default()
1863 },
1864 },
1865 DiagnosticEntry {
1866 range: Point::new(4, 9)..Point::new(4, 12),
1867 diagnostic: Diagnostic {
1868 source: Some("disk".into()),
1869 severity: DiagnosticSeverity::ERROR,
1870 message: "undefined variable 'CCC'".to_string(),
1871 is_disk_based: true,
1872 group_id: 2,
1873 is_primary: true,
1874 ..Default::default()
1875 }
1876 }
1877 ]
1878 );
1879 assert_eq!(
1880 chunks_with_diagnostics(buffer, 0..buffer.len()),
1881 [
1882 ("\n\nfn a() { ".to_string(), None),
1883 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1884 (" }\nfn b() { ".to_string(), None),
1885 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1886 (" }\nfn c() { ".to_string(), None),
1887 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1888 (" }\n".to_string(), None),
1889 ]
1890 );
1891 assert_eq!(
1892 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1893 [
1894 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1895 (" }\nfn c() { ".to_string(), None),
1896 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1897 ]
1898 );
1899 });
1900
1901 // Ensure overlapping diagnostics are highlighted correctly.
1902 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1903 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1904 version: Some(open_notification.text_document.version),
1905 diagnostics: vec![
1906 lsp::Diagnostic {
1907 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1908 severity: Some(DiagnosticSeverity::ERROR),
1909 message: "undefined variable 'A'".to_string(),
1910 source: Some("disk".to_string()),
1911 ..Default::default()
1912 },
1913 lsp::Diagnostic {
1914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1915 severity: Some(DiagnosticSeverity::WARNING),
1916 message: "unreachable statement".to_string(),
1917 source: Some("disk".to_string()),
1918 ..Default::default()
1919 },
1920 ],
1921 });
1922
1923 cx.executor().run_until_parked();
1924 buffer.update(cx, |buffer, _| {
1925 assert_eq!(
1926 buffer
1927 .snapshot()
1928 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1929 .collect::<Vec<_>>(),
1930 &[
1931 DiagnosticEntry {
1932 range: Point::new(2, 9)..Point::new(2, 12),
1933 diagnostic: Diagnostic {
1934 source: Some("disk".into()),
1935 severity: DiagnosticSeverity::WARNING,
1936 message: "unreachable statement".to_string(),
1937 is_disk_based: true,
1938 group_id: 4,
1939 is_primary: true,
1940 ..Default::default()
1941 }
1942 },
1943 DiagnosticEntry {
1944 range: Point::new(2, 9)..Point::new(2, 10),
1945 diagnostic: Diagnostic {
1946 source: Some("disk".into()),
1947 severity: DiagnosticSeverity::ERROR,
1948 message: "undefined variable 'A'".to_string(),
1949 is_disk_based: true,
1950 group_id: 3,
1951 is_primary: true,
1952 ..Default::default()
1953 },
1954 }
1955 ]
1956 );
1957 assert_eq!(
1958 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1959 [
1960 ("fn a() { ".to_string(), None),
1961 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1962 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1963 ("\n".to_string(), None),
1964 ]
1965 );
1966 assert_eq!(
1967 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1968 [
1969 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1970 ("\n".to_string(), None),
1971 ]
1972 );
1973 });
1974
1975 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1976 // changes since the last save.
1977 buffer.update(cx, |buffer, cx| {
1978 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1979 buffer.edit(
1980 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1981 None,
1982 cx,
1983 );
1984 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1985 });
1986 let change_notification_2 = fake_server
1987 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1988 .await;
1989 assert!(
1990 change_notification_2.text_document.version > change_notification_1.text_document.version
1991 );
1992
1993 // Handle out-of-order diagnostics
1994 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1995 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1996 version: Some(change_notification_2.text_document.version),
1997 diagnostics: vec![
1998 lsp::Diagnostic {
1999 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2000 severity: Some(DiagnosticSeverity::ERROR),
2001 message: "undefined variable 'BB'".to_string(),
2002 source: Some("disk".to_string()),
2003 ..Default::default()
2004 },
2005 lsp::Diagnostic {
2006 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2007 severity: Some(DiagnosticSeverity::WARNING),
2008 message: "undefined variable 'A'".to_string(),
2009 source: Some("disk".to_string()),
2010 ..Default::default()
2011 },
2012 ],
2013 });
2014
2015 cx.executor().run_until_parked();
2016 buffer.update(cx, |buffer, _| {
2017 assert_eq!(
2018 buffer
2019 .snapshot()
2020 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2021 .collect::<Vec<_>>(),
2022 &[
2023 DiagnosticEntry {
2024 range: Point::new(2, 21)..Point::new(2, 22),
2025 diagnostic: Diagnostic {
2026 source: Some("disk".into()),
2027 severity: DiagnosticSeverity::WARNING,
2028 message: "undefined variable 'A'".to_string(),
2029 is_disk_based: true,
2030 group_id: 6,
2031 is_primary: true,
2032 ..Default::default()
2033 }
2034 },
2035 DiagnosticEntry {
2036 range: Point::new(3, 9)..Point::new(3, 14),
2037 diagnostic: Diagnostic {
2038 source: Some("disk".into()),
2039 severity: DiagnosticSeverity::ERROR,
2040 message: "undefined variable 'BB'".to_string(),
2041 is_disk_based: true,
2042 group_id: 5,
2043 is_primary: true,
2044 ..Default::default()
2045 },
2046 }
2047 ]
2048 );
2049 });
2050}
2051
2052#[gpui::test]
2053async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2054 init_test(cx);
2055
2056 let text = concat!(
2057 "let one = ;\n", //
2058 "let two = \n",
2059 "let three = 3;\n",
2060 );
2061
2062 let fs = FakeFs::new(cx.executor());
2063 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2064
2065 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2066 let buffer = project
2067 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2068 .await
2069 .unwrap();
2070
2071 project.update(cx, |project, cx| {
2072 project.lsp_store.update(cx, |lsp_store, cx| {
2073 lsp_store
2074 .update_diagnostic_entries(
2075 LanguageServerId(0),
2076 PathBuf::from("/dir/a.rs"),
2077 None,
2078 vec![
2079 DiagnosticEntry {
2080 range: Unclipped(PointUtf16::new(0, 10))
2081 ..Unclipped(PointUtf16::new(0, 10)),
2082 diagnostic: Diagnostic {
2083 severity: DiagnosticSeverity::ERROR,
2084 message: "syntax error 1".to_string(),
2085 ..Default::default()
2086 },
2087 },
2088 DiagnosticEntry {
2089 range: Unclipped(PointUtf16::new(1, 10))
2090 ..Unclipped(PointUtf16::new(1, 10)),
2091 diagnostic: Diagnostic {
2092 severity: DiagnosticSeverity::ERROR,
2093 message: "syntax error 2".to_string(),
2094 ..Default::default()
2095 },
2096 },
2097 ],
2098 cx,
2099 )
2100 .unwrap();
2101 })
2102 });
2103
2104 // An empty range is extended forward to include the following character.
2105 // At the end of a line, an empty range is extended backward to include
2106 // the preceding character.
2107 buffer.update(cx, |buffer, _| {
2108 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2109 assert_eq!(
2110 chunks
2111 .iter()
2112 .map(|(s, d)| (s.as_str(), *d))
2113 .collect::<Vec<_>>(),
2114 &[
2115 ("let one = ", None),
2116 (";", Some(DiagnosticSeverity::ERROR)),
2117 ("\nlet two =", None),
2118 (" ", Some(DiagnosticSeverity::ERROR)),
2119 ("\nlet three = 3;\n", None)
2120 ]
2121 );
2122 });
2123}
2124
2125#[gpui::test]
2126async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2127 init_test(cx);
2128
2129 let fs = FakeFs::new(cx.executor());
2130 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2131 .await;
2132
2133 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2134 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2135
2136 lsp_store.update(cx, |lsp_store, cx| {
2137 lsp_store
2138 .update_diagnostic_entries(
2139 LanguageServerId(0),
2140 Path::new("/dir/a.rs").to_owned(),
2141 None,
2142 vec![DiagnosticEntry {
2143 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2144 diagnostic: Diagnostic {
2145 severity: DiagnosticSeverity::ERROR,
2146 is_primary: true,
2147 message: "syntax error a1".to_string(),
2148 ..Default::default()
2149 },
2150 }],
2151 cx,
2152 )
2153 .unwrap();
2154 lsp_store
2155 .update_diagnostic_entries(
2156 LanguageServerId(1),
2157 Path::new("/dir/a.rs").to_owned(),
2158 None,
2159 vec![DiagnosticEntry {
2160 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2161 diagnostic: Diagnostic {
2162 severity: DiagnosticSeverity::ERROR,
2163 is_primary: true,
2164 message: "syntax error b1".to_string(),
2165 ..Default::default()
2166 },
2167 }],
2168 cx,
2169 )
2170 .unwrap();
2171
2172 assert_eq!(
2173 lsp_store.diagnostic_summary(false, cx),
2174 DiagnosticSummary {
2175 error_count: 2,
2176 warning_count: 0,
2177 }
2178 );
2179 });
2180}
2181
2182#[gpui::test]
2183async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2184 init_test(cx);
2185
2186 let text = "
2187 fn a() {
2188 f1();
2189 }
2190 fn b() {
2191 f2();
2192 }
2193 fn c() {
2194 f3();
2195 }
2196 "
2197 .unindent();
2198
2199 let fs = FakeFs::new(cx.executor());
2200 fs.insert_tree(
2201 "/dir",
2202 json!({
2203 "a.rs": text.clone(),
2204 }),
2205 )
2206 .await;
2207
2208 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2209 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2210
2211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2212 language_registry.add(rust_lang());
2213 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2214
2215 let (buffer, _handle) = project
2216 .update(cx, |project, cx| {
2217 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2218 })
2219 .await
2220 .unwrap();
2221
2222 let mut fake_server = fake_servers.next().await.unwrap();
2223 let lsp_document_version = fake_server
2224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2225 .await
2226 .text_document
2227 .version;
2228
2229 // Simulate editing the buffer after the language server computes some edits.
2230 buffer.update(cx, |buffer, cx| {
2231 buffer.edit(
2232 [(
2233 Point::new(0, 0)..Point::new(0, 0),
2234 "// above first function\n",
2235 )],
2236 None,
2237 cx,
2238 );
2239 buffer.edit(
2240 [(
2241 Point::new(2, 0)..Point::new(2, 0),
2242 " // inside first function\n",
2243 )],
2244 None,
2245 cx,
2246 );
2247 buffer.edit(
2248 [(
2249 Point::new(6, 4)..Point::new(6, 4),
2250 "// inside second function ",
2251 )],
2252 None,
2253 cx,
2254 );
2255
2256 assert_eq!(
2257 buffer.text(),
2258 "
2259 // above first function
2260 fn a() {
2261 // inside first function
2262 f1();
2263 }
2264 fn b() {
2265 // inside second function f2();
2266 }
2267 fn c() {
2268 f3();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274
2275 let edits = lsp_store
2276 .update(cx, |lsp_store, cx| {
2277 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2278 &buffer,
2279 vec![
2280 // replace body of first function
2281 lsp::TextEdit {
2282 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2283 new_text: "
2284 fn a() {
2285 f10();
2286 }
2287 "
2288 .unindent(),
2289 },
2290 // edit inside second function
2291 lsp::TextEdit {
2292 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2293 new_text: "00".into(),
2294 },
2295 // edit inside third function via two distinct edits
2296 lsp::TextEdit {
2297 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2298 new_text: "4000".into(),
2299 },
2300 lsp::TextEdit {
2301 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2302 new_text: "".into(),
2303 },
2304 ],
2305 LanguageServerId(0),
2306 Some(lsp_document_version),
2307 cx,
2308 )
2309 })
2310 .await
2311 .unwrap();
2312
2313 buffer.update(cx, |buffer, cx| {
2314 for (range, new_text) in edits {
2315 buffer.edit([(range, new_text)], None, cx);
2316 }
2317 assert_eq!(
2318 buffer.text(),
2319 "
2320 // above first function
2321 fn a() {
2322 // inside first function
2323 f10();
2324 }
2325 fn b() {
2326 // inside second function f200();
2327 }
2328 fn c() {
2329 f4000();
2330 }
2331 "
2332 .unindent()
2333 );
2334 });
2335}
2336
2337#[gpui::test]
2338async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2339 init_test(cx);
2340
2341 let text = "
2342 use a::b;
2343 use a::c;
2344
2345 fn f() {
2346 b();
2347 c();
2348 }
2349 "
2350 .unindent();
2351
2352 let fs = FakeFs::new(cx.executor());
2353 fs.insert_tree(
2354 "/dir",
2355 json!({
2356 "a.rs": text.clone(),
2357 }),
2358 )
2359 .await;
2360
2361 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2362 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2363 let buffer = project
2364 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2365 .await
2366 .unwrap();
2367
2368 // Simulate the language server sending us a small edit in the form of a very large diff.
2369 // Rust-analyzer does this when performing a merge-imports code action.
2370 let edits = lsp_store
2371 .update(cx, |lsp_store, cx| {
2372 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2373 &buffer,
2374 [
2375 // Replace the first use statement without editing the semicolon.
2376 lsp::TextEdit {
2377 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2378 new_text: "a::{b, c}".into(),
2379 },
2380 // Reinsert the remainder of the file between the semicolon and the final
2381 // newline of the file.
2382 lsp::TextEdit {
2383 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2384 new_text: "\n\n".into(),
2385 },
2386 lsp::TextEdit {
2387 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2388 new_text: "
2389 fn f() {
2390 b();
2391 c();
2392 }"
2393 .unindent(),
2394 },
2395 // Delete everything after the first newline of the file.
2396 lsp::TextEdit {
2397 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2398 new_text: "".into(),
2399 },
2400 ],
2401 LanguageServerId(0),
2402 None,
2403 cx,
2404 )
2405 })
2406 .await
2407 .unwrap();
2408
2409 buffer.update(cx, |buffer, cx| {
2410 let edits = edits
2411 .into_iter()
2412 .map(|(range, text)| {
2413 (
2414 range.start.to_point(buffer)..range.end.to_point(buffer),
2415 text,
2416 )
2417 })
2418 .collect::<Vec<_>>();
2419
2420 assert_eq!(
2421 edits,
2422 [
2423 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2424 (Point::new(1, 0)..Point::new(2, 0), "".into())
2425 ]
2426 );
2427
2428 for (range, new_text) in edits {
2429 buffer.edit([(range, new_text)], None, cx);
2430 }
2431 assert_eq!(
2432 buffer.text(),
2433 "
2434 use a::{b, c};
2435
2436 fn f() {
2437 b();
2438 c();
2439 }
2440 "
2441 .unindent()
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let text = "
2451 use a::b;
2452 use a::c;
2453
2454 fn f() {
2455 b();
2456 c();
2457 }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(
2463 "/dir",
2464 json!({
2465 "a.rs": text.clone(),
2466 }),
2467 )
2468 .await;
2469
2470 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2471 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2472 let buffer = project
2473 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2474 .await
2475 .unwrap();
2476
2477 // Simulate the language server sending us edits in a non-ordered fashion,
2478 // with ranges sometimes being inverted or pointing to invalid locations.
2479 let edits = lsp_store
2480 .update(cx, |lsp_store, cx| {
2481 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2482 &buffer,
2483 [
2484 lsp::TextEdit {
2485 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2486 new_text: "\n\n".into(),
2487 },
2488 lsp::TextEdit {
2489 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2490 new_text: "a::{b, c}".into(),
2491 },
2492 lsp::TextEdit {
2493 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2494 new_text: "".into(),
2495 },
2496 lsp::TextEdit {
2497 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2498 new_text: "
2499 fn f() {
2500 b();
2501 c();
2502 }"
2503 .unindent(),
2504 },
2505 ],
2506 LanguageServerId(0),
2507 None,
2508 cx,
2509 )
2510 })
2511 .await
2512 .unwrap();
2513
2514 buffer.update(cx, |buffer, cx| {
2515 let edits = edits
2516 .into_iter()
2517 .map(|(range, text)| {
2518 (
2519 range.start.to_point(buffer)..range.end.to_point(buffer),
2520 text,
2521 )
2522 })
2523 .collect::<Vec<_>>();
2524
2525 assert_eq!(
2526 edits,
2527 [
2528 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2529 (Point::new(1, 0)..Point::new(2, 0), "".into())
2530 ]
2531 );
2532
2533 for (range, new_text) in edits {
2534 buffer.edit([(range, new_text)], None, cx);
2535 }
2536 assert_eq!(
2537 buffer.text(),
2538 "
2539 use a::{b, c};
2540
2541 fn f() {
2542 b();
2543 c();
2544 }
2545 "
2546 .unindent()
2547 );
2548 });
2549}
2550
2551fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2552 buffer: &Buffer,
2553 range: Range<T>,
2554) -> Vec<(String, Option<DiagnosticSeverity>)> {
2555 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2556 for chunk in buffer.snapshot().chunks(range, true) {
2557 if chunks.last().map_or(false, |prev_chunk| {
2558 prev_chunk.1 == chunk.diagnostic_severity
2559 }) {
2560 chunks.last_mut().unwrap().0.push_str(chunk.text);
2561 } else {
2562 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2563 }
2564 }
2565 chunks
2566}
2567
2568#[gpui::test(iterations = 10)]
2569async fn test_definition(cx: &mut gpui::TestAppContext) {
2570 init_test(cx);
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 "/dir",
2575 json!({
2576 "a.rs": "const fn a() { A }",
2577 "b.rs": "const y: i32 = crate::a()",
2578 }),
2579 )
2580 .await;
2581
2582 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2583
2584 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2585 language_registry.add(rust_lang());
2586 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2587
2588 let (buffer, _handle) = project
2589 .update(cx, |project, cx| {
2590 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2591 })
2592 .await
2593 .unwrap();
2594
2595 let fake_server = fake_servers.next().await.unwrap();
2596 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2597 let params = params.text_document_position_params;
2598 assert_eq!(
2599 params.text_document.uri.to_file_path().unwrap(),
2600 Path::new("/dir/b.rs"),
2601 );
2602 assert_eq!(params.position, lsp::Position::new(0, 22));
2603
2604 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2605 lsp::Location::new(
2606 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2607 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2608 ),
2609 )))
2610 });
2611
2612 let mut definitions = project
2613 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2614 .await
2615 .unwrap();
2616
2617 // Assert no new language server started
2618 cx.executor().run_until_parked();
2619 assert!(fake_servers.try_next().is_err());
2620
2621 assert_eq!(definitions.len(), 1);
2622 let definition = definitions.pop().unwrap();
2623 cx.update(|cx| {
2624 let target_buffer = definition.target.buffer.read(cx);
2625 assert_eq!(
2626 target_buffer
2627 .file()
2628 .unwrap()
2629 .as_local()
2630 .unwrap()
2631 .abs_path(cx),
2632 Path::new("/dir/a.rs"),
2633 );
2634 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2635 assert_eq!(
2636 list_worktrees(&project, cx),
2637 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2638 );
2639
2640 drop(definition);
2641 });
2642 cx.update(|cx| {
2643 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2644 });
2645
2646 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2647 project
2648 .read(cx)
2649 .worktrees(cx)
2650 .map(|worktree| {
2651 let worktree = worktree.read(cx);
2652 (
2653 worktree.as_local().unwrap().abs_path().as_ref(),
2654 worktree.is_visible(),
2655 )
2656 })
2657 .collect::<Vec<_>>()
2658 }
2659}
2660
2661#[gpui::test]
2662async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2663 init_test(cx);
2664
2665 let fs = FakeFs::new(cx.executor());
2666 fs.insert_tree(
2667 "/dir",
2668 json!({
2669 "a.ts": "",
2670 }),
2671 )
2672 .await;
2673
2674 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2675
2676 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2677 language_registry.add(typescript_lang());
2678 let mut fake_language_servers = language_registry.register_fake_lsp(
2679 "TypeScript",
2680 FakeLspAdapter {
2681 capabilities: lsp::ServerCapabilities {
2682 completion_provider: Some(lsp::CompletionOptions {
2683 trigger_characters: Some(vec![":".to_string()]),
2684 ..Default::default()
2685 }),
2686 ..Default::default()
2687 },
2688 ..Default::default()
2689 },
2690 );
2691
2692 let (buffer, _handle) = project
2693 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2694 .await
2695 .unwrap();
2696
2697 let fake_server = fake_language_servers.next().await.unwrap();
2698
2699 let text = "let a = b.fqn";
2700 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2701 let completions = project.update(cx, |project, cx| {
2702 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2703 });
2704
2705 fake_server
2706 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2707 Ok(Some(lsp::CompletionResponse::Array(vec![
2708 lsp::CompletionItem {
2709 label: "fullyQualifiedName?".into(),
2710 insert_text: Some("fullyQualifiedName".into()),
2711 ..Default::default()
2712 },
2713 ])))
2714 })
2715 .next()
2716 .await;
2717 let completions = completions.await.unwrap();
2718 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2719 assert_eq!(completions.len(), 1);
2720 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2721 assert_eq!(
2722 completions[0].old_range.to_offset(&snapshot),
2723 text.len() - 3..text.len()
2724 );
2725
2726 let text = "let a = \"atoms/cmp\"";
2727 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2728 let completions = project.update(cx, |project, cx| {
2729 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2730 });
2731
2732 fake_server
2733 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2734 Ok(Some(lsp::CompletionResponse::Array(vec![
2735 lsp::CompletionItem {
2736 label: "component".into(),
2737 ..Default::default()
2738 },
2739 ])))
2740 })
2741 .next()
2742 .await;
2743 let completions = completions.await.unwrap();
2744 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2745 assert_eq!(completions.len(), 1);
2746 assert_eq!(completions[0].new_text, "component");
2747 assert_eq!(
2748 completions[0].old_range.to_offset(&snapshot),
2749 text.len() - 4..text.len() - 1
2750 );
2751}
2752
2753#[gpui::test]
2754async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2755 init_test(cx);
2756
2757 let fs = FakeFs::new(cx.executor());
2758 fs.insert_tree(
2759 "/dir",
2760 json!({
2761 "a.ts": "",
2762 }),
2763 )
2764 .await;
2765
2766 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2767
2768 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2769 language_registry.add(typescript_lang());
2770 let mut fake_language_servers = language_registry.register_fake_lsp(
2771 "TypeScript",
2772 FakeLspAdapter {
2773 capabilities: lsp::ServerCapabilities {
2774 completion_provider: Some(lsp::CompletionOptions {
2775 trigger_characters: Some(vec![":".to_string()]),
2776 ..Default::default()
2777 }),
2778 ..Default::default()
2779 },
2780 ..Default::default()
2781 },
2782 );
2783
2784 let (buffer, _handle) = project
2785 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2786 .await
2787 .unwrap();
2788
2789 let fake_server = fake_language_servers.next().await.unwrap();
2790
2791 let text = "let a = b.fqn";
2792 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2793 let completions = project.update(cx, |project, cx| {
2794 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2795 });
2796
2797 fake_server
2798 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2799 Ok(Some(lsp::CompletionResponse::Array(vec![
2800 lsp::CompletionItem {
2801 label: "fullyQualifiedName?".into(),
2802 insert_text: Some("fully\rQualified\r\nName".into()),
2803 ..Default::default()
2804 },
2805 ])))
2806 })
2807 .next()
2808 .await;
2809 let completions = completions.await.unwrap();
2810 assert_eq!(completions.len(), 1);
2811 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2812}
2813
2814#[gpui::test(iterations = 10)]
2815async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2816 init_test(cx);
2817
2818 let fs = FakeFs::new(cx.executor());
2819 fs.insert_tree(
2820 "/dir",
2821 json!({
2822 "a.ts": "a",
2823 }),
2824 )
2825 .await;
2826
2827 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2828
2829 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2830 language_registry.add(typescript_lang());
2831 let mut fake_language_servers = language_registry.register_fake_lsp(
2832 "TypeScript",
2833 FakeLspAdapter {
2834 capabilities: lsp::ServerCapabilities {
2835 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2836 lsp::CodeActionOptions {
2837 resolve_provider: Some(true),
2838 ..lsp::CodeActionOptions::default()
2839 },
2840 )),
2841 ..lsp::ServerCapabilities::default()
2842 },
2843 ..FakeLspAdapter::default()
2844 },
2845 );
2846
2847 let (buffer, _handle) = project
2848 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2849 .await
2850 .unwrap();
2851
2852 let fake_server = fake_language_servers.next().await.unwrap();
2853
2854 // Language server returns code actions that contain commands, and not edits.
2855 let actions = project.update(cx, |project, cx| {
2856 project.code_actions(&buffer, 0..0, None, cx)
2857 });
2858 fake_server
2859 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2860 Ok(Some(vec![
2861 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2862 title: "The code action".into(),
2863 data: Some(serde_json::json!({
2864 "command": "_the/command",
2865 })),
2866 ..lsp::CodeAction::default()
2867 }),
2868 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2869 title: "two".into(),
2870 ..lsp::CodeAction::default()
2871 }),
2872 ]))
2873 })
2874 .next()
2875 .await;
2876
2877 let action = actions.await.unwrap()[0].clone();
2878 let apply = project.update(cx, |project, cx| {
2879 project.apply_code_action(buffer.clone(), action, true, cx)
2880 });
2881
2882 // Resolving the code action does not populate its edits. In absence of
2883 // edits, we must execute the given command.
2884 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2885 |mut action, _| async move {
2886 if action.data.is_some() {
2887 action.command = Some(lsp::Command {
2888 title: "The command".into(),
2889 command: "_the/command".into(),
2890 arguments: Some(vec![json!("the-argument")]),
2891 });
2892 }
2893 Ok(action)
2894 },
2895 );
2896
2897 // While executing the command, the language server sends the editor
2898 // a `workspaceEdit` request.
2899 fake_server
2900 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2901 let fake = fake_server.clone();
2902 move |params, _| {
2903 assert_eq!(params.command, "_the/command");
2904 let fake = fake.clone();
2905 async move {
2906 fake.server
2907 .request::<lsp::request::ApplyWorkspaceEdit>(
2908 lsp::ApplyWorkspaceEditParams {
2909 label: None,
2910 edit: lsp::WorkspaceEdit {
2911 changes: Some(
2912 [(
2913 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2914 vec![lsp::TextEdit {
2915 range: lsp::Range::new(
2916 lsp::Position::new(0, 0),
2917 lsp::Position::new(0, 0),
2918 ),
2919 new_text: "X".into(),
2920 }],
2921 )]
2922 .into_iter()
2923 .collect(),
2924 ),
2925 ..Default::default()
2926 },
2927 },
2928 )
2929 .await
2930 .unwrap();
2931 Ok(Some(json!(null)))
2932 }
2933 }
2934 })
2935 .next()
2936 .await;
2937
2938 // Applying the code action returns a project transaction containing the edits
2939 // sent by the language server in its `workspaceEdit` request.
2940 let transaction = apply.await.unwrap();
2941 assert!(transaction.0.contains_key(&buffer));
2942 buffer.update(cx, |buffer, cx| {
2943 assert_eq!(buffer.text(), "Xa");
2944 buffer.undo(cx);
2945 assert_eq!(buffer.text(), "a");
2946 });
2947}
2948
2949#[gpui::test(iterations = 10)]
2950async fn test_save_file(cx: &mut gpui::TestAppContext) {
2951 init_test(cx);
2952
2953 let fs = FakeFs::new(cx.executor());
2954 fs.insert_tree(
2955 "/dir",
2956 json!({
2957 "file1": "the old contents",
2958 }),
2959 )
2960 .await;
2961
2962 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2963 let buffer = project
2964 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2965 .await
2966 .unwrap();
2967 buffer.update(cx, |buffer, cx| {
2968 assert_eq!(buffer.text(), "the old contents");
2969 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2970 });
2971
2972 project
2973 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2974 .await
2975 .unwrap();
2976
2977 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2978 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2979}
2980
2981#[gpui::test(iterations = 30)]
2982async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2983 init_test(cx);
2984
2985 let fs = FakeFs::new(cx.executor().clone());
2986 fs.insert_tree(
2987 "/dir",
2988 json!({
2989 "file1": "the original contents",
2990 }),
2991 )
2992 .await;
2993
2994 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2995 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2996 let buffer = project
2997 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2998 .await
2999 .unwrap();
3000
3001 // Simulate buffer diffs being slow, so that they don't complete before
3002 // the next file change occurs.
3003 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3004
3005 // Change the buffer's file on disk, and then wait for the file change
3006 // to be detected by the worktree, so that the buffer starts reloading.
3007 fs.save(
3008 "/dir/file1".as_ref(),
3009 &"the first contents".into(),
3010 Default::default(),
3011 )
3012 .await
3013 .unwrap();
3014 worktree.next_event(cx).await;
3015
3016 // Change the buffer's file again. Depending on the random seed, the
3017 // previous file change may still be in progress.
3018 fs.save(
3019 "/dir/file1".as_ref(),
3020 &"the second contents".into(),
3021 Default::default(),
3022 )
3023 .await
3024 .unwrap();
3025 worktree.next_event(cx).await;
3026
3027 cx.executor().run_until_parked();
3028 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3029 buffer.read_with(cx, |buffer, _| {
3030 assert_eq!(buffer.text(), on_disk_text);
3031 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3032 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3033 });
3034}
3035
3036#[gpui::test(iterations = 30)]
3037async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3038 init_test(cx);
3039
3040 let fs = FakeFs::new(cx.executor().clone());
3041 fs.insert_tree(
3042 "/dir",
3043 json!({
3044 "file1": "the original contents",
3045 }),
3046 )
3047 .await;
3048
3049 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3050 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3051 let buffer = project
3052 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3053 .await
3054 .unwrap();
3055
3056 // Simulate buffer diffs being slow, so that they don't complete before
3057 // the next file change occurs.
3058 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3059
3060 // Change the buffer's file on disk, and then wait for the file change
3061 // to be detected by the worktree, so that the buffer starts reloading.
3062 fs.save(
3063 "/dir/file1".as_ref(),
3064 &"the first contents".into(),
3065 Default::default(),
3066 )
3067 .await
3068 .unwrap();
3069 worktree.next_event(cx).await;
3070
3071 cx.executor()
3072 .spawn(cx.executor().simulate_random_delay())
3073 .await;
3074
3075 // Perform a noop edit, causing the buffer's version to increase.
3076 buffer.update(cx, |buffer, cx| {
3077 buffer.edit([(0..0, " ")], None, cx);
3078 buffer.undo(cx);
3079 });
3080
3081 cx.executor().run_until_parked();
3082 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3083 buffer.read_with(cx, |buffer, _| {
3084 let buffer_text = buffer.text();
3085 if buffer_text == on_disk_text {
3086 assert!(
3087 !buffer.is_dirty() && !buffer.has_conflict(),
3088 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3089 );
3090 }
3091 // If the file change occurred while the buffer was processing the first
3092 // change, the buffer will be in a conflicting state.
3093 else {
3094 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3095 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3096 }
3097 });
3098}
3099
3100#[gpui::test]
3101async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3102 init_test(cx);
3103
3104 let fs = FakeFs::new(cx.executor());
3105 fs.insert_tree(
3106 "/dir",
3107 json!({
3108 "file1": "the old contents",
3109 }),
3110 )
3111 .await;
3112
3113 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3114 let buffer = project
3115 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3116 .await
3117 .unwrap();
3118 buffer.update(cx, |buffer, cx| {
3119 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3120 });
3121
3122 project
3123 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3124 .await
3125 .unwrap();
3126
3127 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3128 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3129}
3130
3131#[gpui::test]
3132async fn test_save_as(cx: &mut gpui::TestAppContext) {
3133 init_test(cx);
3134
3135 let fs = FakeFs::new(cx.executor());
3136 fs.insert_tree("/dir", json!({})).await;
3137
3138 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3139
3140 let languages = project.update(cx, |project, _| project.languages().clone());
3141 languages.add(rust_lang());
3142
3143 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3144 buffer.update(cx, |buffer, cx| {
3145 buffer.edit([(0..0, "abc")], None, cx);
3146 assert!(buffer.is_dirty());
3147 assert!(!buffer.has_conflict());
3148 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3149 });
3150 project
3151 .update(cx, |project, cx| {
3152 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3153 let path = ProjectPath {
3154 worktree_id,
3155 path: Arc::from(Path::new("file1.rs")),
3156 };
3157 project.save_buffer_as(buffer.clone(), path, cx)
3158 })
3159 .await
3160 .unwrap();
3161 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3162
3163 cx.executor().run_until_parked();
3164 buffer.update(cx, |buffer, cx| {
3165 assert_eq!(
3166 buffer.file().unwrap().full_path(cx),
3167 Path::new("dir/file1.rs")
3168 );
3169 assert!(!buffer.is_dirty());
3170 assert!(!buffer.has_conflict());
3171 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3172 });
3173
3174 let opened_buffer = project
3175 .update(cx, |project, cx| {
3176 project.open_local_buffer("/dir/file1.rs", cx)
3177 })
3178 .await
3179 .unwrap();
3180 assert_eq!(opened_buffer, buffer);
3181}
3182
3183#[gpui::test(retries = 5)]
3184async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3185 use worktree::WorktreeModelHandle as _;
3186
3187 init_test(cx);
3188 cx.executor().allow_parking();
3189
3190 let dir = TempTree::new(json!({
3191 "a": {
3192 "file1": "",
3193 "file2": "",
3194 "file3": "",
3195 },
3196 "b": {
3197 "c": {
3198 "file4": "",
3199 "file5": "",
3200 }
3201 }
3202 }));
3203
3204 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3205
3206 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3207 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3208 async move { buffer.await.unwrap() }
3209 };
3210 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3211 project.update(cx, |project, cx| {
3212 let tree = project.worktrees(cx).next().unwrap();
3213 tree.read(cx)
3214 .entry_for_path(path)
3215 .unwrap_or_else(|| panic!("no entry for path {}", path))
3216 .id
3217 })
3218 };
3219
3220 let buffer2 = buffer_for_path("a/file2", cx).await;
3221 let buffer3 = buffer_for_path("a/file3", cx).await;
3222 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3223 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3224
3225 let file2_id = id_for_path("a/file2", cx);
3226 let file3_id = id_for_path("a/file3", cx);
3227 let file4_id = id_for_path("b/c/file4", cx);
3228
3229 // Create a remote copy of this worktree.
3230 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3231 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3232
3233 let updates = Arc::new(Mutex::new(Vec::new()));
3234 tree.update(cx, |tree, cx| {
3235 let updates = updates.clone();
3236 tree.observe_updates(0, cx, move |update| {
3237 updates.lock().push(update);
3238 async { true }
3239 });
3240 });
3241
3242 let remote =
3243 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3244
3245 cx.executor().run_until_parked();
3246
3247 cx.update(|cx| {
3248 assert!(!buffer2.read(cx).is_dirty());
3249 assert!(!buffer3.read(cx).is_dirty());
3250 assert!(!buffer4.read(cx).is_dirty());
3251 assert!(!buffer5.read(cx).is_dirty());
3252 });
3253
3254 // Rename and delete files and directories.
3255 tree.flush_fs_events(cx).await;
3256 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3257 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3258 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3259 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3260 tree.flush_fs_events(cx).await;
3261
3262 let expected_paths = vec![
3263 "a",
3264 "a/file1",
3265 "a/file2.new",
3266 "b",
3267 "d",
3268 "d/file3",
3269 "d/file4",
3270 ]
3271 .into_iter()
3272 .map(replace_path_separator)
3273 .collect::<Vec<_>>();
3274
3275 cx.update(|app| {
3276 assert_eq!(
3277 tree.read(app)
3278 .paths()
3279 .map(|p| p.to_str().unwrap())
3280 .collect::<Vec<_>>(),
3281 expected_paths
3282 );
3283 });
3284
3285 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3286 assert_eq!(id_for_path("d/file3", cx), file3_id);
3287 assert_eq!(id_for_path("d/file4", cx), file4_id);
3288
3289 cx.update(|cx| {
3290 assert_eq!(
3291 buffer2.read(cx).file().unwrap().path().as_ref(),
3292 Path::new("a/file2.new")
3293 );
3294 assert_eq!(
3295 buffer3.read(cx).file().unwrap().path().as_ref(),
3296 Path::new("d/file3")
3297 );
3298 assert_eq!(
3299 buffer4.read(cx).file().unwrap().path().as_ref(),
3300 Path::new("d/file4")
3301 );
3302 assert_eq!(
3303 buffer5.read(cx).file().unwrap().path().as_ref(),
3304 Path::new("b/c/file5")
3305 );
3306
3307 assert_matches!(
3308 buffer2.read(cx).file().unwrap().disk_state(),
3309 DiskState::Present { .. }
3310 );
3311 assert_matches!(
3312 buffer3.read(cx).file().unwrap().disk_state(),
3313 DiskState::Present { .. }
3314 );
3315 assert_matches!(
3316 buffer4.read(cx).file().unwrap().disk_state(),
3317 DiskState::Present { .. }
3318 );
3319 assert_eq!(
3320 buffer5.read(cx).file().unwrap().disk_state(),
3321 DiskState::Deleted
3322 );
3323 });
3324
3325 // Update the remote worktree. Check that it becomes consistent with the
3326 // local worktree.
3327 cx.executor().run_until_parked();
3328
3329 remote.update(cx, |remote, _| {
3330 for update in updates.lock().drain(..) {
3331 remote.as_remote_mut().unwrap().update_from_remote(update);
3332 }
3333 });
3334 cx.executor().run_until_parked();
3335 remote.update(cx, |remote, _| {
3336 assert_eq!(
3337 remote
3338 .paths()
3339 .map(|p| p.to_str().unwrap())
3340 .collect::<Vec<_>>(),
3341 expected_paths
3342 );
3343 });
3344}
3345
3346#[gpui::test(iterations = 10)]
3347async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3348 init_test(cx);
3349
3350 let fs = FakeFs::new(cx.executor());
3351 fs.insert_tree(
3352 "/dir",
3353 json!({
3354 "a": {
3355 "file1": "",
3356 }
3357 }),
3358 )
3359 .await;
3360
3361 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3362 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3363 let tree_id = tree.update(cx, |tree, _| tree.id());
3364
3365 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3366 project.update(cx, |project, cx| {
3367 let tree = project.worktrees(cx).next().unwrap();
3368 tree.read(cx)
3369 .entry_for_path(path)
3370 .unwrap_or_else(|| panic!("no entry for path {}", path))
3371 .id
3372 })
3373 };
3374
3375 let dir_id = id_for_path("a", cx);
3376 let file_id = id_for_path("a/file1", cx);
3377 let buffer = project
3378 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3379 .await
3380 .unwrap();
3381 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3382
3383 project
3384 .update(cx, |project, cx| {
3385 project.rename_entry(dir_id, Path::new("b"), cx)
3386 })
3387 .unwrap()
3388 .await
3389 .to_included()
3390 .unwrap();
3391 cx.executor().run_until_parked();
3392
3393 assert_eq!(id_for_path("b", cx), dir_id);
3394 assert_eq!(id_for_path("b/file1", cx), file_id);
3395 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3396}
3397
3398#[gpui::test]
3399async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3400 init_test(cx);
3401
3402 let fs = FakeFs::new(cx.executor());
3403 fs.insert_tree(
3404 "/dir",
3405 json!({
3406 "a.txt": "a-contents",
3407 "b.txt": "b-contents",
3408 }),
3409 )
3410 .await;
3411
3412 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3413
3414 // Spawn multiple tasks to open paths, repeating some paths.
3415 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3416 (
3417 p.open_local_buffer("/dir/a.txt", cx),
3418 p.open_local_buffer("/dir/b.txt", cx),
3419 p.open_local_buffer("/dir/a.txt", cx),
3420 )
3421 });
3422
3423 let buffer_a_1 = buffer_a_1.await.unwrap();
3424 let buffer_a_2 = buffer_a_2.await.unwrap();
3425 let buffer_b = buffer_b.await.unwrap();
3426 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3427 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3428
3429 // There is only one buffer per path.
3430 let buffer_a_id = buffer_a_1.entity_id();
3431 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3432
3433 // Open the same path again while it is still open.
3434 drop(buffer_a_1);
3435 let buffer_a_3 = project
3436 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3437 .await
3438 .unwrap();
3439
3440 // There's still only one buffer per path.
3441 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3442}
3443
3444#[gpui::test]
3445async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3446 init_test(cx);
3447
3448 let fs = FakeFs::new(cx.executor());
3449 fs.insert_tree(
3450 "/dir",
3451 json!({
3452 "file1": "abc",
3453 "file2": "def",
3454 "file3": "ghi",
3455 }),
3456 )
3457 .await;
3458
3459 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3460
3461 let buffer1 = project
3462 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3463 .await
3464 .unwrap();
3465 let events = Arc::new(Mutex::new(Vec::new()));
3466
3467 // initially, the buffer isn't dirty.
3468 buffer1.update(cx, |buffer, cx| {
3469 cx.subscribe(&buffer1, {
3470 let events = events.clone();
3471 move |_, _, event, _| match event {
3472 BufferEvent::Operation { .. } => {}
3473 _ => events.lock().push(event.clone()),
3474 }
3475 })
3476 .detach();
3477
3478 assert!(!buffer.is_dirty());
3479 assert!(events.lock().is_empty());
3480
3481 buffer.edit([(1..2, "")], None, cx);
3482 });
3483
3484 // after the first edit, the buffer is dirty, and emits a dirtied event.
3485 buffer1.update(cx, |buffer, cx| {
3486 assert!(buffer.text() == "ac");
3487 assert!(buffer.is_dirty());
3488 assert_eq!(
3489 *events.lock(),
3490 &[
3491 language::BufferEvent::Edited,
3492 language::BufferEvent::DirtyChanged
3493 ]
3494 );
3495 events.lock().clear();
3496 buffer.did_save(
3497 buffer.version(),
3498 buffer.file().unwrap().disk_state().mtime(),
3499 cx,
3500 );
3501 });
3502
3503 // after saving, the buffer is not dirty, and emits a saved event.
3504 buffer1.update(cx, |buffer, cx| {
3505 assert!(!buffer.is_dirty());
3506 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3507 events.lock().clear();
3508
3509 buffer.edit([(1..1, "B")], None, cx);
3510 buffer.edit([(2..2, "D")], None, cx);
3511 });
3512
3513 // after editing again, the buffer is dirty, and emits another dirty event.
3514 buffer1.update(cx, |buffer, cx| {
3515 assert!(buffer.text() == "aBDc");
3516 assert!(buffer.is_dirty());
3517 assert_eq!(
3518 *events.lock(),
3519 &[
3520 language::BufferEvent::Edited,
3521 language::BufferEvent::DirtyChanged,
3522 language::BufferEvent::Edited,
3523 ],
3524 );
3525 events.lock().clear();
3526
3527 // After restoring the buffer to its previously-saved state,
3528 // the buffer is not considered dirty anymore.
3529 buffer.edit([(1..3, "")], None, cx);
3530 assert!(buffer.text() == "ac");
3531 assert!(!buffer.is_dirty());
3532 });
3533
3534 assert_eq!(
3535 *events.lock(),
3536 &[
3537 language::BufferEvent::Edited,
3538 language::BufferEvent::DirtyChanged
3539 ]
3540 );
3541
3542 // When a file is deleted, the buffer is considered dirty.
3543 let events = Arc::new(Mutex::new(Vec::new()));
3544 let buffer2 = project
3545 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3546 .await
3547 .unwrap();
3548 buffer2.update(cx, |_, cx| {
3549 cx.subscribe(&buffer2, {
3550 let events = events.clone();
3551 move |_, _, event, _| events.lock().push(event.clone())
3552 })
3553 .detach();
3554 });
3555
3556 fs.remove_file("/dir/file2".as_ref(), Default::default())
3557 .await
3558 .unwrap();
3559 cx.executor().run_until_parked();
3560 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3561 assert_eq!(
3562 *events.lock(),
3563 &[
3564 language::BufferEvent::DirtyChanged,
3565 language::BufferEvent::FileHandleChanged
3566 ]
3567 );
3568
3569 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3570 let events = Arc::new(Mutex::new(Vec::new()));
3571 let buffer3 = project
3572 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3573 .await
3574 .unwrap();
3575 buffer3.update(cx, |_, cx| {
3576 cx.subscribe(&buffer3, {
3577 let events = events.clone();
3578 move |_, _, event, _| events.lock().push(event.clone())
3579 })
3580 .detach();
3581 });
3582
3583 buffer3.update(cx, |buffer, cx| {
3584 buffer.edit([(0..0, "x")], None, cx);
3585 });
3586 events.lock().clear();
3587 fs.remove_file("/dir/file3".as_ref(), Default::default())
3588 .await
3589 .unwrap();
3590 cx.executor().run_until_parked();
3591 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3592 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3593}
3594
3595#[gpui::test]
3596async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3597 init_test(cx);
3598
3599 let initial_contents = "aaa\nbbbbb\nc\n";
3600 let fs = FakeFs::new(cx.executor());
3601 fs.insert_tree(
3602 "/dir",
3603 json!({
3604 "the-file": initial_contents,
3605 }),
3606 )
3607 .await;
3608 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3609 let buffer = project
3610 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3611 .await
3612 .unwrap();
3613
3614 let anchors = (0..3)
3615 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3616 .collect::<Vec<_>>();
3617
3618 // Change the file on disk, adding two new lines of text, and removing
3619 // one line.
3620 buffer.update(cx, |buffer, _| {
3621 assert!(!buffer.is_dirty());
3622 assert!(!buffer.has_conflict());
3623 });
3624 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3625 fs.save(
3626 "/dir/the-file".as_ref(),
3627 &new_contents.into(),
3628 LineEnding::Unix,
3629 )
3630 .await
3631 .unwrap();
3632
3633 // Because the buffer was not modified, it is reloaded from disk. Its
3634 // contents are edited according to the diff between the old and new
3635 // file contents.
3636 cx.executor().run_until_parked();
3637 buffer.update(cx, |buffer, _| {
3638 assert_eq!(buffer.text(), new_contents);
3639 assert!(!buffer.is_dirty());
3640 assert!(!buffer.has_conflict());
3641
3642 let anchor_positions = anchors
3643 .iter()
3644 .map(|anchor| anchor.to_point(&*buffer))
3645 .collect::<Vec<_>>();
3646 assert_eq!(
3647 anchor_positions,
3648 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3649 );
3650 });
3651
3652 // Modify the buffer
3653 buffer.update(cx, |buffer, cx| {
3654 buffer.edit([(0..0, " ")], None, cx);
3655 assert!(buffer.is_dirty());
3656 assert!(!buffer.has_conflict());
3657 });
3658
3659 // Change the file on disk again, adding blank lines to the beginning.
3660 fs.save(
3661 "/dir/the-file".as_ref(),
3662 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3663 LineEnding::Unix,
3664 )
3665 .await
3666 .unwrap();
3667
3668 // Because the buffer is modified, it doesn't reload from disk, but is
3669 // marked as having a conflict.
3670 cx.executor().run_until_parked();
3671 buffer.update(cx, |buffer, _| {
3672 assert!(buffer.has_conflict());
3673 });
3674}
3675
3676#[gpui::test]
3677async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3678 init_test(cx);
3679
3680 let fs = FakeFs::new(cx.executor());
3681 fs.insert_tree(
3682 "/dir",
3683 json!({
3684 "file1": "a\nb\nc\n",
3685 "file2": "one\r\ntwo\r\nthree\r\n",
3686 }),
3687 )
3688 .await;
3689
3690 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3691 let buffer1 = project
3692 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3693 .await
3694 .unwrap();
3695 let buffer2 = project
3696 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3697 .await
3698 .unwrap();
3699
3700 buffer1.update(cx, |buffer, _| {
3701 assert_eq!(buffer.text(), "a\nb\nc\n");
3702 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3703 });
3704 buffer2.update(cx, |buffer, _| {
3705 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3706 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3707 });
3708
3709 // Change a file's line endings on disk from unix to windows. The buffer's
3710 // state updates correctly.
3711 fs.save(
3712 "/dir/file1".as_ref(),
3713 &"aaa\nb\nc\n".into(),
3714 LineEnding::Windows,
3715 )
3716 .await
3717 .unwrap();
3718 cx.executor().run_until_parked();
3719 buffer1.update(cx, |buffer, _| {
3720 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3721 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3722 });
3723
3724 // Save a file with windows line endings. The file is written correctly.
3725 buffer2.update(cx, |buffer, cx| {
3726 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3727 });
3728 project
3729 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3730 .await
3731 .unwrap();
3732 assert_eq!(
3733 fs.load("/dir/file2".as_ref()).await.unwrap(),
3734 "one\r\ntwo\r\nthree\r\nfour\r\n",
3735 );
3736}
3737
3738#[gpui::test]
3739async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3740 init_test(cx);
3741
3742 let fs = FakeFs::new(cx.executor());
3743 fs.insert_tree(
3744 "/the-dir",
3745 json!({
3746 "a.rs": "
3747 fn foo(mut v: Vec<usize>) {
3748 for x in &v {
3749 v.push(1);
3750 }
3751 }
3752 "
3753 .unindent(),
3754 }),
3755 )
3756 .await;
3757
3758 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3759 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3760 let buffer = project
3761 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3762 .await
3763 .unwrap();
3764
3765 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3766 let message = lsp::PublishDiagnosticsParams {
3767 uri: buffer_uri.clone(),
3768 diagnostics: vec![
3769 lsp::Diagnostic {
3770 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3771 severity: Some(DiagnosticSeverity::WARNING),
3772 message: "error 1".to_string(),
3773 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3774 location: lsp::Location {
3775 uri: buffer_uri.clone(),
3776 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3777 },
3778 message: "error 1 hint 1".to_string(),
3779 }]),
3780 ..Default::default()
3781 },
3782 lsp::Diagnostic {
3783 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3784 severity: Some(DiagnosticSeverity::HINT),
3785 message: "error 1 hint 1".to_string(),
3786 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3787 location: lsp::Location {
3788 uri: buffer_uri.clone(),
3789 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3790 },
3791 message: "original diagnostic".to_string(),
3792 }]),
3793 ..Default::default()
3794 },
3795 lsp::Diagnostic {
3796 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3797 severity: Some(DiagnosticSeverity::ERROR),
3798 message: "error 2".to_string(),
3799 related_information: Some(vec![
3800 lsp::DiagnosticRelatedInformation {
3801 location: lsp::Location {
3802 uri: buffer_uri.clone(),
3803 range: lsp::Range::new(
3804 lsp::Position::new(1, 13),
3805 lsp::Position::new(1, 15),
3806 ),
3807 },
3808 message: "error 2 hint 1".to_string(),
3809 },
3810 lsp::DiagnosticRelatedInformation {
3811 location: lsp::Location {
3812 uri: buffer_uri.clone(),
3813 range: lsp::Range::new(
3814 lsp::Position::new(1, 13),
3815 lsp::Position::new(1, 15),
3816 ),
3817 },
3818 message: "error 2 hint 2".to_string(),
3819 },
3820 ]),
3821 ..Default::default()
3822 },
3823 lsp::Diagnostic {
3824 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3825 severity: Some(DiagnosticSeverity::HINT),
3826 message: "error 2 hint 1".to_string(),
3827 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3828 location: lsp::Location {
3829 uri: buffer_uri.clone(),
3830 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3831 },
3832 message: "original diagnostic".to_string(),
3833 }]),
3834 ..Default::default()
3835 },
3836 lsp::Diagnostic {
3837 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3838 severity: Some(DiagnosticSeverity::HINT),
3839 message: "error 2 hint 2".to_string(),
3840 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3841 location: lsp::Location {
3842 uri: buffer_uri,
3843 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3844 },
3845 message: "original diagnostic".to_string(),
3846 }]),
3847 ..Default::default()
3848 },
3849 ],
3850 version: None,
3851 };
3852
3853 lsp_store
3854 .update(cx, |lsp_store, cx| {
3855 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3856 })
3857 .unwrap();
3858 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3859
3860 assert_eq!(
3861 buffer
3862 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3863 .collect::<Vec<_>>(),
3864 &[
3865 DiagnosticEntry {
3866 range: Point::new(1, 8)..Point::new(1, 9),
3867 diagnostic: Diagnostic {
3868 severity: DiagnosticSeverity::WARNING,
3869 message: "error 1".to_string(),
3870 group_id: 1,
3871 is_primary: true,
3872 ..Default::default()
3873 }
3874 },
3875 DiagnosticEntry {
3876 range: Point::new(1, 8)..Point::new(1, 9),
3877 diagnostic: Diagnostic {
3878 severity: DiagnosticSeverity::HINT,
3879 message: "error 1 hint 1".to_string(),
3880 group_id: 1,
3881 is_primary: false,
3882 ..Default::default()
3883 }
3884 },
3885 DiagnosticEntry {
3886 range: Point::new(1, 13)..Point::new(1, 15),
3887 diagnostic: Diagnostic {
3888 severity: DiagnosticSeverity::HINT,
3889 message: "error 2 hint 1".to_string(),
3890 group_id: 0,
3891 is_primary: false,
3892 ..Default::default()
3893 }
3894 },
3895 DiagnosticEntry {
3896 range: Point::new(1, 13)..Point::new(1, 15),
3897 diagnostic: Diagnostic {
3898 severity: DiagnosticSeverity::HINT,
3899 message: "error 2 hint 2".to_string(),
3900 group_id: 0,
3901 is_primary: false,
3902 ..Default::default()
3903 }
3904 },
3905 DiagnosticEntry {
3906 range: Point::new(2, 8)..Point::new(2, 17),
3907 diagnostic: Diagnostic {
3908 severity: DiagnosticSeverity::ERROR,
3909 message: "error 2".to_string(),
3910 group_id: 0,
3911 is_primary: true,
3912 ..Default::default()
3913 }
3914 }
3915 ]
3916 );
3917
3918 assert_eq!(
3919 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3920 &[
3921 DiagnosticEntry {
3922 range: Point::new(1, 13)..Point::new(1, 15),
3923 diagnostic: Diagnostic {
3924 severity: DiagnosticSeverity::HINT,
3925 message: "error 2 hint 1".to_string(),
3926 group_id: 0,
3927 is_primary: false,
3928 ..Default::default()
3929 }
3930 },
3931 DiagnosticEntry {
3932 range: Point::new(1, 13)..Point::new(1, 15),
3933 diagnostic: Diagnostic {
3934 severity: DiagnosticSeverity::HINT,
3935 message: "error 2 hint 2".to_string(),
3936 group_id: 0,
3937 is_primary: false,
3938 ..Default::default()
3939 }
3940 },
3941 DiagnosticEntry {
3942 range: Point::new(2, 8)..Point::new(2, 17),
3943 diagnostic: Diagnostic {
3944 severity: DiagnosticSeverity::ERROR,
3945 message: "error 2".to_string(),
3946 group_id: 0,
3947 is_primary: true,
3948 ..Default::default()
3949 }
3950 }
3951 ]
3952 );
3953
3954 assert_eq!(
3955 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3956 &[
3957 DiagnosticEntry {
3958 range: Point::new(1, 8)..Point::new(1, 9),
3959 diagnostic: Diagnostic {
3960 severity: DiagnosticSeverity::WARNING,
3961 message: "error 1".to_string(),
3962 group_id: 1,
3963 is_primary: true,
3964 ..Default::default()
3965 }
3966 },
3967 DiagnosticEntry {
3968 range: Point::new(1, 8)..Point::new(1, 9),
3969 diagnostic: Diagnostic {
3970 severity: DiagnosticSeverity::HINT,
3971 message: "error 1 hint 1".to_string(),
3972 group_id: 1,
3973 is_primary: false,
3974 ..Default::default()
3975 }
3976 },
3977 ]
3978 );
3979}
3980
3981#[gpui::test]
3982async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3983 init_test(cx);
3984
3985 let fs = FakeFs::new(cx.executor());
3986 fs.insert_tree(
3987 "/dir",
3988 json!({
3989 "one.rs": "const ONE: usize = 1;",
3990 "two": {
3991 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3992 }
3993
3994 }),
3995 )
3996 .await;
3997 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3998
3999 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4000 language_registry.add(rust_lang());
4001 let watched_paths = lsp::FileOperationRegistrationOptions {
4002 filters: vec![
4003 FileOperationFilter {
4004 scheme: Some("file".to_owned()),
4005 pattern: lsp::FileOperationPattern {
4006 glob: "**/*.rs".to_owned(),
4007 matches: Some(lsp::FileOperationPatternKind::File),
4008 options: None,
4009 },
4010 },
4011 FileOperationFilter {
4012 scheme: Some("file".to_owned()),
4013 pattern: lsp::FileOperationPattern {
4014 glob: "**/**".to_owned(),
4015 matches: Some(lsp::FileOperationPatternKind::Folder),
4016 options: None,
4017 },
4018 },
4019 ],
4020 };
4021 let mut fake_servers = language_registry.register_fake_lsp(
4022 "Rust",
4023 FakeLspAdapter {
4024 capabilities: lsp::ServerCapabilities {
4025 workspace: Some(lsp::WorkspaceServerCapabilities {
4026 workspace_folders: None,
4027 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4028 did_rename: Some(watched_paths.clone()),
4029 will_rename: Some(watched_paths),
4030 ..Default::default()
4031 }),
4032 }),
4033 ..Default::default()
4034 },
4035 ..Default::default()
4036 },
4037 );
4038
4039 let _ = project
4040 .update(cx, |project, cx| {
4041 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4042 })
4043 .await
4044 .unwrap();
4045
4046 let fake_server = fake_servers.next().await.unwrap();
4047 let response = project.update(cx, |project, cx| {
4048 let worktree = project.worktrees(cx).next().unwrap();
4049 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4050 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4051 });
4052 let expected_edit = lsp::WorkspaceEdit {
4053 changes: None,
4054 document_changes: Some(DocumentChanges::Edits({
4055 vec![TextDocumentEdit {
4056 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4057 range: lsp::Range {
4058 start: lsp::Position {
4059 line: 0,
4060 character: 1,
4061 },
4062 end: lsp::Position {
4063 line: 0,
4064 character: 3,
4065 },
4066 },
4067 new_text: "This is not a drill".to_owned(),
4068 })],
4069 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4070 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4071 version: Some(1337),
4072 },
4073 }]
4074 })),
4075 change_annotations: None,
4076 };
4077 let resolved_workspace_edit = Arc::new(OnceLock::new());
4078 fake_server
4079 .handle_request::<WillRenameFiles, _, _>({
4080 let resolved_workspace_edit = resolved_workspace_edit.clone();
4081 let expected_edit = expected_edit.clone();
4082 move |params, _| {
4083 let resolved_workspace_edit = resolved_workspace_edit.clone();
4084 let expected_edit = expected_edit.clone();
4085 async move {
4086 assert_eq!(params.files.len(), 1);
4087 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4088 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4089 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4090 Ok(Some(expected_edit))
4091 }
4092 }
4093 })
4094 .next()
4095 .await
4096 .unwrap();
4097 let _ = response.await.unwrap();
4098 fake_server
4099 .handle_notification::<DidRenameFiles, _>(|params, _| {
4100 assert_eq!(params.files.len(), 1);
4101 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4102 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4103 })
4104 .next()
4105 .await
4106 .unwrap();
4107 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4108}
4109
4110#[gpui::test]
4111async fn test_rename(cx: &mut gpui::TestAppContext) {
4112 // hi
4113 init_test(cx);
4114
4115 let fs = FakeFs::new(cx.executor());
4116 fs.insert_tree(
4117 "/dir",
4118 json!({
4119 "one.rs": "const ONE: usize = 1;",
4120 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4121 }),
4122 )
4123 .await;
4124
4125 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4126
4127 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4128 language_registry.add(rust_lang());
4129 let mut fake_servers = language_registry.register_fake_lsp(
4130 "Rust",
4131 FakeLspAdapter {
4132 capabilities: lsp::ServerCapabilities {
4133 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4134 prepare_provider: Some(true),
4135 work_done_progress_options: Default::default(),
4136 })),
4137 ..Default::default()
4138 },
4139 ..Default::default()
4140 },
4141 );
4142
4143 let (buffer, _handle) = project
4144 .update(cx, |project, cx| {
4145 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4146 })
4147 .await
4148 .unwrap();
4149
4150 let fake_server = fake_servers.next().await.unwrap();
4151
4152 let response = project.update(cx, |project, cx| {
4153 project.prepare_rename(buffer.clone(), 7, cx)
4154 });
4155 fake_server
4156 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4157 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4158 assert_eq!(params.position, lsp::Position::new(0, 7));
4159 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4160 lsp::Position::new(0, 6),
4161 lsp::Position::new(0, 9),
4162 ))))
4163 })
4164 .next()
4165 .await
4166 .unwrap();
4167 let response = response.await.unwrap();
4168 let PrepareRenameResponse::Success(range) = response else {
4169 panic!("{:?}", response);
4170 };
4171 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4172 assert_eq!(range, 6..9);
4173
4174 let response = project.update(cx, |project, cx| {
4175 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4176 });
4177 fake_server
4178 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4179 assert_eq!(
4180 params.text_document_position.text_document.uri.as_str(),
4181 "file:///dir/one.rs"
4182 );
4183 assert_eq!(
4184 params.text_document_position.position,
4185 lsp::Position::new(0, 7)
4186 );
4187 assert_eq!(params.new_name, "THREE");
4188 Ok(Some(lsp::WorkspaceEdit {
4189 changes: Some(
4190 [
4191 (
4192 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4193 vec![lsp::TextEdit::new(
4194 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4195 "THREE".to_string(),
4196 )],
4197 ),
4198 (
4199 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4200 vec![
4201 lsp::TextEdit::new(
4202 lsp::Range::new(
4203 lsp::Position::new(0, 24),
4204 lsp::Position::new(0, 27),
4205 ),
4206 "THREE".to_string(),
4207 ),
4208 lsp::TextEdit::new(
4209 lsp::Range::new(
4210 lsp::Position::new(0, 35),
4211 lsp::Position::new(0, 38),
4212 ),
4213 "THREE".to_string(),
4214 ),
4215 ],
4216 ),
4217 ]
4218 .into_iter()
4219 .collect(),
4220 ),
4221 ..Default::default()
4222 }))
4223 })
4224 .next()
4225 .await
4226 .unwrap();
4227 let mut transaction = response.await.unwrap().0;
4228 assert_eq!(transaction.len(), 2);
4229 assert_eq!(
4230 transaction
4231 .remove_entry(&buffer)
4232 .unwrap()
4233 .0
4234 .update(cx, |buffer, _| buffer.text()),
4235 "const THREE: usize = 1;"
4236 );
4237 assert_eq!(
4238 transaction
4239 .into_keys()
4240 .next()
4241 .unwrap()
4242 .update(cx, |buffer, _| buffer.text()),
4243 "const TWO: usize = one::THREE + one::THREE;"
4244 );
4245}
4246
4247#[gpui::test]
4248async fn test_search(cx: &mut gpui::TestAppContext) {
4249 init_test(cx);
4250
4251 let fs = FakeFs::new(cx.executor());
4252 fs.insert_tree(
4253 "/dir",
4254 json!({
4255 "one.rs": "const ONE: usize = 1;",
4256 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4257 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4258 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4259 }),
4260 )
4261 .await;
4262 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4263 assert_eq!(
4264 search(
4265 &project,
4266 SearchQuery::text(
4267 "TWO",
4268 false,
4269 true,
4270 false,
4271 Default::default(),
4272 Default::default(),
4273 None
4274 )
4275 .unwrap(),
4276 cx
4277 )
4278 .await
4279 .unwrap(),
4280 HashMap::from_iter([
4281 ("dir/two.rs".to_string(), vec![6..9]),
4282 ("dir/three.rs".to_string(), vec![37..40])
4283 ])
4284 );
4285
4286 let buffer_4 = project
4287 .update(cx, |project, cx| {
4288 project.open_local_buffer("/dir/four.rs", cx)
4289 })
4290 .await
4291 .unwrap();
4292 buffer_4.update(cx, |buffer, cx| {
4293 let text = "two::TWO";
4294 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4295 });
4296
4297 assert_eq!(
4298 search(
4299 &project,
4300 SearchQuery::text(
4301 "TWO",
4302 false,
4303 true,
4304 false,
4305 Default::default(),
4306 Default::default(),
4307 None,
4308 )
4309 .unwrap(),
4310 cx
4311 )
4312 .await
4313 .unwrap(),
4314 HashMap::from_iter([
4315 ("dir/two.rs".to_string(), vec![6..9]),
4316 ("dir/three.rs".to_string(), vec![37..40]),
4317 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4318 ])
4319 );
4320}
4321
4322#[gpui::test]
4323async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4324 init_test(cx);
4325
4326 let search_query = "file";
4327
4328 let fs = FakeFs::new(cx.executor());
4329 fs.insert_tree(
4330 "/dir",
4331 json!({
4332 "one.rs": r#"// Rust file one"#,
4333 "one.ts": r#"// TypeScript file one"#,
4334 "two.rs": r#"// Rust file two"#,
4335 "two.ts": r#"// TypeScript file two"#,
4336 }),
4337 )
4338 .await;
4339 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4340
4341 assert!(
4342 search(
4343 &project,
4344 SearchQuery::text(
4345 search_query,
4346 false,
4347 true,
4348 false,
4349 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4350 Default::default(),
4351 None
4352 )
4353 .unwrap(),
4354 cx
4355 )
4356 .await
4357 .unwrap()
4358 .is_empty(),
4359 "If no inclusions match, no files should be returned"
4360 );
4361
4362 assert_eq!(
4363 search(
4364 &project,
4365 SearchQuery::text(
4366 search_query,
4367 false,
4368 true,
4369 false,
4370 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4371 Default::default(),
4372 None
4373 )
4374 .unwrap(),
4375 cx
4376 )
4377 .await
4378 .unwrap(),
4379 HashMap::from_iter([
4380 ("dir/one.rs".to_string(), vec![8..12]),
4381 ("dir/two.rs".to_string(), vec![8..12]),
4382 ]),
4383 "Rust only search should give only Rust files"
4384 );
4385
4386 assert_eq!(
4387 search(
4388 &project,
4389 SearchQuery::text(
4390 search_query,
4391 false,
4392 true,
4393 false,
4394
4395 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4396
4397 Default::default(),
4398 None,
4399 ).unwrap(),
4400 cx
4401 )
4402 .await
4403 .unwrap(),
4404 HashMap::from_iter([
4405 ("dir/one.ts".to_string(), vec![14..18]),
4406 ("dir/two.ts".to_string(), vec![14..18]),
4407 ]),
4408 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4409 );
4410
4411 assert_eq!(
4412 search(
4413 &project,
4414 SearchQuery::text(
4415 search_query,
4416 false,
4417 true,
4418 false,
4419
4420 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4421
4422 Default::default(),
4423 None,
4424 ).unwrap(),
4425 cx
4426 )
4427 .await
4428 .unwrap(),
4429 HashMap::from_iter([
4430 ("dir/two.ts".to_string(), vec![14..18]),
4431 ("dir/one.rs".to_string(), vec![8..12]),
4432 ("dir/one.ts".to_string(), vec![14..18]),
4433 ("dir/two.rs".to_string(), vec![8..12]),
4434 ]),
4435 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4436 );
4437}
4438
4439#[gpui::test]
4440async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4441 init_test(cx);
4442
4443 let search_query = "file";
4444
4445 let fs = FakeFs::new(cx.executor());
4446 fs.insert_tree(
4447 "/dir",
4448 json!({
4449 "one.rs": r#"// Rust file one"#,
4450 "one.ts": r#"// TypeScript file one"#,
4451 "two.rs": r#"// Rust file two"#,
4452 "two.ts": r#"// TypeScript file two"#,
4453 }),
4454 )
4455 .await;
4456 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4457
4458 assert_eq!(
4459 search(
4460 &project,
4461 SearchQuery::text(
4462 search_query,
4463 false,
4464 true,
4465 false,
4466 Default::default(),
4467 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4468 None,
4469 )
4470 .unwrap(),
4471 cx
4472 )
4473 .await
4474 .unwrap(),
4475 HashMap::from_iter([
4476 ("dir/one.rs".to_string(), vec![8..12]),
4477 ("dir/one.ts".to_string(), vec![14..18]),
4478 ("dir/two.rs".to_string(), vec![8..12]),
4479 ("dir/two.ts".to_string(), vec![14..18]),
4480 ]),
4481 "If no exclusions match, all files should be returned"
4482 );
4483
4484 assert_eq!(
4485 search(
4486 &project,
4487 SearchQuery::text(
4488 search_query,
4489 false,
4490 true,
4491 false,
4492 Default::default(),
4493 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4494 None,
4495 )
4496 .unwrap(),
4497 cx
4498 )
4499 .await
4500 .unwrap(),
4501 HashMap::from_iter([
4502 ("dir/one.ts".to_string(), vec![14..18]),
4503 ("dir/two.ts".to_string(), vec![14..18]),
4504 ]),
4505 "Rust exclusion search should give only TypeScript files"
4506 );
4507
4508 assert_eq!(
4509 search(
4510 &project,
4511 SearchQuery::text(
4512 search_query,
4513 false,
4514 true,
4515 false,
4516 Default::default(),
4517 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4518 None,
4519 ).unwrap(),
4520 cx
4521 )
4522 .await
4523 .unwrap(),
4524 HashMap::from_iter([
4525 ("dir/one.rs".to_string(), vec![8..12]),
4526 ("dir/two.rs".to_string(), vec![8..12]),
4527 ]),
4528 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4529 );
4530
4531 assert!(
4532 search(
4533 &project,
4534 SearchQuery::text(
4535 search_query,
4536 false,
4537 true,
4538 false,
4539 Default::default(),
4540
4541 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4542 None,
4543
4544 ).unwrap(),
4545 cx
4546 )
4547 .await
4548 .unwrap().is_empty(),
4549 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4550 );
4551}
4552
4553#[gpui::test]
4554async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4555 init_test(cx);
4556
4557 let search_query = "file";
4558
4559 let fs = FakeFs::new(cx.executor());
4560 fs.insert_tree(
4561 "/dir",
4562 json!({
4563 "one.rs": r#"// Rust file one"#,
4564 "one.ts": r#"// TypeScript file one"#,
4565 "two.rs": r#"// Rust file two"#,
4566 "two.ts": r#"// TypeScript file two"#,
4567 }),
4568 )
4569 .await;
4570 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4571
4572 assert!(
4573 search(
4574 &project,
4575 SearchQuery::text(
4576 search_query,
4577 false,
4578 true,
4579 false,
4580 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4581 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4582 None,
4583 )
4584 .unwrap(),
4585 cx
4586 )
4587 .await
4588 .unwrap()
4589 .is_empty(),
4590 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4591 );
4592
4593 assert!(
4594 search(
4595 &project,
4596 SearchQuery::text(
4597 search_query,
4598 false,
4599 true,
4600 false,
4601 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4602 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4603 None,
4604 ).unwrap(),
4605 cx
4606 )
4607 .await
4608 .unwrap()
4609 .is_empty(),
4610 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4611 );
4612
4613 assert!(
4614 search(
4615 &project,
4616 SearchQuery::text(
4617 search_query,
4618 false,
4619 true,
4620 false,
4621 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4622 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4623 None,
4624 )
4625 .unwrap(),
4626 cx
4627 )
4628 .await
4629 .unwrap()
4630 .is_empty(),
4631 "Non-matching inclusions and exclusions should not change that."
4632 );
4633
4634 assert_eq!(
4635 search(
4636 &project,
4637 SearchQuery::text(
4638 search_query,
4639 false,
4640 true,
4641 false,
4642 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4643 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4644 None,
4645 )
4646 .unwrap(),
4647 cx
4648 )
4649 .await
4650 .unwrap(),
4651 HashMap::from_iter([
4652 ("dir/one.ts".to_string(), vec![14..18]),
4653 ("dir/two.ts".to_string(), vec![14..18]),
4654 ]),
4655 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4656 );
4657}
4658
4659#[gpui::test]
4660async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4661 init_test(cx);
4662
4663 let fs = FakeFs::new(cx.executor());
4664 fs.insert_tree(
4665 "/worktree-a",
4666 json!({
4667 "haystack.rs": r#"// NEEDLE"#,
4668 "haystack.ts": r#"// NEEDLE"#,
4669 }),
4670 )
4671 .await;
4672 fs.insert_tree(
4673 "/worktree-b",
4674 json!({
4675 "haystack.rs": r#"// NEEDLE"#,
4676 "haystack.ts": r#"// NEEDLE"#,
4677 }),
4678 )
4679 .await;
4680
4681 let project = Project::test(
4682 fs.clone(),
4683 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4684 cx,
4685 )
4686 .await;
4687
4688 assert_eq!(
4689 search(
4690 &project,
4691 SearchQuery::text(
4692 "NEEDLE",
4693 false,
4694 true,
4695 false,
4696 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4697 Default::default(),
4698 None,
4699 )
4700 .unwrap(),
4701 cx
4702 )
4703 .await
4704 .unwrap(),
4705 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4706 "should only return results from included worktree"
4707 );
4708 assert_eq!(
4709 search(
4710 &project,
4711 SearchQuery::text(
4712 "NEEDLE",
4713 false,
4714 true,
4715 false,
4716 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4717 Default::default(),
4718 None,
4719 )
4720 .unwrap(),
4721 cx
4722 )
4723 .await
4724 .unwrap(),
4725 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4726 "should only return results from included worktree"
4727 );
4728
4729 assert_eq!(
4730 search(
4731 &project,
4732 SearchQuery::text(
4733 "NEEDLE",
4734 false,
4735 true,
4736 false,
4737 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4738 Default::default(),
4739 None,
4740 )
4741 .unwrap(),
4742 cx
4743 )
4744 .await
4745 .unwrap(),
4746 HashMap::from_iter([
4747 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4748 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4749 ]),
4750 "should return results from both worktrees"
4751 );
4752}
4753
4754#[gpui::test]
4755async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4756 init_test(cx);
4757
4758 let fs = FakeFs::new(cx.background_executor.clone());
4759 fs.insert_tree(
4760 "/dir",
4761 json!({
4762 ".git": {},
4763 ".gitignore": "**/target\n/node_modules\n",
4764 "target": {
4765 "index.txt": "index_key:index_value"
4766 },
4767 "node_modules": {
4768 "eslint": {
4769 "index.ts": "const eslint_key = 'eslint value'",
4770 "package.json": r#"{ "some_key": "some value" }"#,
4771 },
4772 "prettier": {
4773 "index.ts": "const prettier_key = 'prettier value'",
4774 "package.json": r#"{ "other_key": "other value" }"#,
4775 },
4776 },
4777 "package.json": r#"{ "main_key": "main value" }"#,
4778 }),
4779 )
4780 .await;
4781 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4782
4783 let query = "key";
4784 assert_eq!(
4785 search(
4786 &project,
4787 SearchQuery::text(
4788 query,
4789 false,
4790 false,
4791 false,
4792 Default::default(),
4793 Default::default(),
4794 None,
4795 )
4796 .unwrap(),
4797 cx
4798 )
4799 .await
4800 .unwrap(),
4801 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4802 "Only one non-ignored file should have the query"
4803 );
4804
4805 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4806 assert_eq!(
4807 search(
4808 &project,
4809 SearchQuery::text(
4810 query,
4811 false,
4812 false,
4813 true,
4814 Default::default(),
4815 Default::default(),
4816 None,
4817 )
4818 .unwrap(),
4819 cx
4820 )
4821 .await
4822 .unwrap(),
4823 HashMap::from_iter([
4824 ("dir/package.json".to_string(), vec![8..11]),
4825 ("dir/target/index.txt".to_string(), vec![6..9]),
4826 (
4827 "dir/node_modules/prettier/package.json".to_string(),
4828 vec![9..12]
4829 ),
4830 (
4831 "dir/node_modules/prettier/index.ts".to_string(),
4832 vec![15..18]
4833 ),
4834 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4835 (
4836 "dir/node_modules/eslint/package.json".to_string(),
4837 vec![8..11]
4838 ),
4839 ]),
4840 "Unrestricted search with ignored directories should find every file with the query"
4841 );
4842
4843 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4844 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4845 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4846 assert_eq!(
4847 search(
4848 &project,
4849 SearchQuery::text(
4850 query,
4851 false,
4852 false,
4853 true,
4854 files_to_include,
4855 files_to_exclude,
4856 None,
4857 )
4858 .unwrap(),
4859 cx
4860 )
4861 .await
4862 .unwrap(),
4863 HashMap::from_iter([(
4864 "dir/node_modules/prettier/package.json".to_string(),
4865 vec![9..12]
4866 )]),
4867 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4868 );
4869}
4870
4871#[gpui::test]
4872async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4873 init_test(cx);
4874
4875 let fs = FakeFs::new(cx.executor().clone());
4876 fs.insert_tree(
4877 "/one/two",
4878 json!({
4879 "three": {
4880 "a.txt": "",
4881 "four": {}
4882 },
4883 "c.rs": ""
4884 }),
4885 )
4886 .await;
4887
4888 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4889 project
4890 .update(cx, |project, cx| {
4891 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4892 project.create_entry((id, "b.."), true, cx)
4893 })
4894 .await
4895 .unwrap()
4896 .to_included()
4897 .unwrap();
4898
4899 // Can't create paths outside the project
4900 let result = project
4901 .update(cx, |project, cx| {
4902 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4903 project.create_entry((id, "../../boop"), true, cx)
4904 })
4905 .await;
4906 assert!(result.is_err());
4907
4908 // Can't create paths with '..'
4909 let result = project
4910 .update(cx, |project, cx| {
4911 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4912 project.create_entry((id, "four/../beep"), true, cx)
4913 })
4914 .await;
4915 assert!(result.is_err());
4916
4917 assert_eq!(
4918 fs.paths(true),
4919 vec![
4920 PathBuf::from("/"),
4921 PathBuf::from("/one"),
4922 PathBuf::from("/one/two"),
4923 PathBuf::from("/one/two/c.rs"),
4924 PathBuf::from("/one/two/three"),
4925 PathBuf::from("/one/two/three/a.txt"),
4926 PathBuf::from("/one/two/three/b.."),
4927 PathBuf::from("/one/two/three/four"),
4928 ]
4929 );
4930
4931 // And we cannot open buffers with '..'
4932 let result = project
4933 .update(cx, |project, cx| {
4934 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4935 project.open_buffer((id, "../c.rs"), cx)
4936 })
4937 .await;
4938 assert!(result.is_err())
4939}
4940
4941#[gpui::test]
4942async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4943 init_test(cx);
4944
4945 let fs = FakeFs::new(cx.executor());
4946 fs.insert_tree(
4947 "/dir",
4948 json!({
4949 "a.tsx": "a",
4950 }),
4951 )
4952 .await;
4953
4954 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4955
4956 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4957 language_registry.add(tsx_lang());
4958 let language_server_names = [
4959 "TypeScriptServer",
4960 "TailwindServer",
4961 "ESLintServer",
4962 "NoHoverCapabilitiesServer",
4963 ];
4964 let mut language_servers = [
4965 language_registry.register_fake_lsp(
4966 "tsx",
4967 FakeLspAdapter {
4968 name: language_server_names[0],
4969 capabilities: lsp::ServerCapabilities {
4970 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4971 ..lsp::ServerCapabilities::default()
4972 },
4973 ..FakeLspAdapter::default()
4974 },
4975 ),
4976 language_registry.register_fake_lsp(
4977 "tsx",
4978 FakeLspAdapter {
4979 name: language_server_names[1],
4980 capabilities: lsp::ServerCapabilities {
4981 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4982 ..lsp::ServerCapabilities::default()
4983 },
4984 ..FakeLspAdapter::default()
4985 },
4986 ),
4987 language_registry.register_fake_lsp(
4988 "tsx",
4989 FakeLspAdapter {
4990 name: language_server_names[2],
4991 capabilities: lsp::ServerCapabilities {
4992 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4993 ..lsp::ServerCapabilities::default()
4994 },
4995 ..FakeLspAdapter::default()
4996 },
4997 ),
4998 language_registry.register_fake_lsp(
4999 "tsx",
5000 FakeLspAdapter {
5001 name: language_server_names[3],
5002 capabilities: lsp::ServerCapabilities {
5003 hover_provider: None,
5004 ..lsp::ServerCapabilities::default()
5005 },
5006 ..FakeLspAdapter::default()
5007 },
5008 ),
5009 ];
5010
5011 let (buffer, _handle) = project
5012 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5013 .await
5014 .unwrap();
5015 cx.executor().run_until_parked();
5016
5017 let mut servers_with_hover_requests = HashMap::default();
5018 for i in 0..language_server_names.len() {
5019 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5020 panic!(
5021 "Failed to get language server #{i} with name {}",
5022 &language_server_names[i]
5023 )
5024 });
5025 let new_server_name = new_server.server.name();
5026 assert!(
5027 !servers_with_hover_requests.contains_key(&new_server_name),
5028 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5029 );
5030 match new_server_name.as_ref() {
5031 "TailwindServer" | "TypeScriptServer" => {
5032 servers_with_hover_requests.insert(
5033 new_server_name.clone(),
5034 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5035 let name = new_server_name.clone();
5036 async move {
5037 Ok(Some(lsp::Hover {
5038 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5039 format!("{name} hover"),
5040 )),
5041 range: None,
5042 }))
5043 }
5044 }),
5045 );
5046 }
5047 "ESLintServer" => {
5048 servers_with_hover_requests.insert(
5049 new_server_name,
5050 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5051 |_, _| async move { Ok(None) },
5052 ),
5053 );
5054 }
5055 "NoHoverCapabilitiesServer" => {
5056 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5057 |_, _| async move {
5058 panic!(
5059 "Should not call for hovers server with no corresponding capabilities"
5060 )
5061 },
5062 );
5063 }
5064 unexpected => panic!("Unexpected server name: {unexpected}"),
5065 }
5066 }
5067
5068 let hover_task = project.update(cx, |project, cx| {
5069 project.hover(&buffer, Point::new(0, 0), cx)
5070 });
5071 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5072 |mut hover_request| async move {
5073 hover_request
5074 .next()
5075 .await
5076 .expect("All hover requests should have been triggered")
5077 },
5078 ))
5079 .await;
5080 assert_eq!(
5081 vec!["TailwindServer hover", "TypeScriptServer hover"],
5082 hover_task
5083 .await
5084 .into_iter()
5085 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5086 .sorted()
5087 .collect::<Vec<_>>(),
5088 "Should receive hover responses from all related servers with hover capabilities"
5089 );
5090}
5091
5092#[gpui::test]
5093async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5094 init_test(cx);
5095
5096 let fs = FakeFs::new(cx.executor());
5097 fs.insert_tree(
5098 "/dir",
5099 json!({
5100 "a.ts": "a",
5101 }),
5102 )
5103 .await;
5104
5105 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5106
5107 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5108 language_registry.add(typescript_lang());
5109 let mut fake_language_servers = language_registry.register_fake_lsp(
5110 "TypeScript",
5111 FakeLspAdapter {
5112 capabilities: lsp::ServerCapabilities {
5113 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5114 ..lsp::ServerCapabilities::default()
5115 },
5116 ..FakeLspAdapter::default()
5117 },
5118 );
5119
5120 let (buffer, _handle) = project
5121 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5122 .await
5123 .unwrap();
5124 cx.executor().run_until_parked();
5125
5126 let fake_server = fake_language_servers
5127 .next()
5128 .await
5129 .expect("failed to get the language server");
5130
5131 let mut request_handled =
5132 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5133 Ok(Some(lsp::Hover {
5134 contents: lsp::HoverContents::Array(vec![
5135 lsp::MarkedString::String("".to_string()),
5136 lsp::MarkedString::String(" ".to_string()),
5137 lsp::MarkedString::String("\n\n\n".to_string()),
5138 ]),
5139 range: None,
5140 }))
5141 });
5142
5143 let hover_task = project.update(cx, |project, cx| {
5144 project.hover(&buffer, Point::new(0, 0), cx)
5145 });
5146 let () = request_handled
5147 .next()
5148 .await
5149 .expect("All hover requests should have been triggered");
5150 assert_eq!(
5151 Vec::<String>::new(),
5152 hover_task
5153 .await
5154 .into_iter()
5155 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5156 .sorted()
5157 .collect::<Vec<_>>(),
5158 "Empty hover parts should be ignored"
5159 );
5160}
5161
5162#[gpui::test]
5163async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5164 init_test(cx);
5165
5166 let fs = FakeFs::new(cx.executor());
5167 fs.insert_tree(
5168 "/dir",
5169 json!({
5170 "a.ts": "a",
5171 }),
5172 )
5173 .await;
5174
5175 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5176
5177 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5178 language_registry.add(typescript_lang());
5179 let mut fake_language_servers = language_registry.register_fake_lsp(
5180 "TypeScript",
5181 FakeLspAdapter {
5182 capabilities: lsp::ServerCapabilities {
5183 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5184 ..lsp::ServerCapabilities::default()
5185 },
5186 ..FakeLspAdapter::default()
5187 },
5188 );
5189
5190 let (buffer, _handle) = project
5191 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5192 .await
5193 .unwrap();
5194 cx.executor().run_until_parked();
5195
5196 let fake_server = fake_language_servers
5197 .next()
5198 .await
5199 .expect("failed to get the language server");
5200
5201 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5202 move |_, _| async move {
5203 Ok(Some(vec![
5204 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5205 title: "organize imports".to_string(),
5206 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5207 ..lsp::CodeAction::default()
5208 }),
5209 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5210 title: "fix code".to_string(),
5211 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5212 ..lsp::CodeAction::default()
5213 }),
5214 ]))
5215 },
5216 );
5217
5218 let code_actions_task = project.update(cx, |project, cx| {
5219 project.code_actions(
5220 &buffer,
5221 0..buffer.read(cx).len(),
5222 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5223 cx,
5224 )
5225 });
5226
5227 let () = request_handled
5228 .next()
5229 .await
5230 .expect("The code action request should have been triggered");
5231
5232 let code_actions = code_actions_task.await.unwrap();
5233 assert_eq!(code_actions.len(), 1);
5234 assert_eq!(
5235 code_actions[0].lsp_action.kind,
5236 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5237 );
5238}
5239
5240#[gpui::test]
5241async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5242 init_test(cx);
5243
5244 let fs = FakeFs::new(cx.executor());
5245 fs.insert_tree(
5246 "/dir",
5247 json!({
5248 "a.tsx": "a",
5249 }),
5250 )
5251 .await;
5252
5253 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5254
5255 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5256 language_registry.add(tsx_lang());
5257 let language_server_names = [
5258 "TypeScriptServer",
5259 "TailwindServer",
5260 "ESLintServer",
5261 "NoActionsCapabilitiesServer",
5262 ];
5263
5264 let mut language_server_rxs = [
5265 language_registry.register_fake_lsp(
5266 "tsx",
5267 FakeLspAdapter {
5268 name: language_server_names[0],
5269 capabilities: lsp::ServerCapabilities {
5270 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5271 ..lsp::ServerCapabilities::default()
5272 },
5273 ..FakeLspAdapter::default()
5274 },
5275 ),
5276 language_registry.register_fake_lsp(
5277 "tsx",
5278 FakeLspAdapter {
5279 name: language_server_names[1],
5280 capabilities: lsp::ServerCapabilities {
5281 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5282 ..lsp::ServerCapabilities::default()
5283 },
5284 ..FakeLspAdapter::default()
5285 },
5286 ),
5287 language_registry.register_fake_lsp(
5288 "tsx",
5289 FakeLspAdapter {
5290 name: language_server_names[2],
5291 capabilities: lsp::ServerCapabilities {
5292 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5293 ..lsp::ServerCapabilities::default()
5294 },
5295 ..FakeLspAdapter::default()
5296 },
5297 ),
5298 language_registry.register_fake_lsp(
5299 "tsx",
5300 FakeLspAdapter {
5301 name: language_server_names[3],
5302 capabilities: lsp::ServerCapabilities {
5303 code_action_provider: None,
5304 ..lsp::ServerCapabilities::default()
5305 },
5306 ..FakeLspAdapter::default()
5307 },
5308 ),
5309 ];
5310
5311 let (buffer, _handle) = project
5312 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5313 .await
5314 .unwrap();
5315 cx.executor().run_until_parked();
5316
5317 let mut servers_with_actions_requests = HashMap::default();
5318 for i in 0..language_server_names.len() {
5319 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5320 panic!(
5321 "Failed to get language server #{i} with name {}",
5322 &language_server_names[i]
5323 )
5324 });
5325 let new_server_name = new_server.server.name();
5326
5327 assert!(
5328 !servers_with_actions_requests.contains_key(&new_server_name),
5329 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5330 );
5331 match new_server_name.0.as_ref() {
5332 "TailwindServer" | "TypeScriptServer" => {
5333 servers_with_actions_requests.insert(
5334 new_server_name.clone(),
5335 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5336 move |_, _| {
5337 let name = new_server_name.clone();
5338 async move {
5339 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5340 lsp::CodeAction {
5341 title: format!("{name} code action"),
5342 ..lsp::CodeAction::default()
5343 },
5344 )]))
5345 }
5346 },
5347 ),
5348 );
5349 }
5350 "ESLintServer" => {
5351 servers_with_actions_requests.insert(
5352 new_server_name,
5353 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5354 |_, _| async move { Ok(None) },
5355 ),
5356 );
5357 }
5358 "NoActionsCapabilitiesServer" => {
5359 let _never_handled = new_server
5360 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5361 panic!(
5362 "Should not call for code actions server with no corresponding capabilities"
5363 )
5364 });
5365 }
5366 unexpected => panic!("Unexpected server name: {unexpected}"),
5367 }
5368 }
5369
5370 let code_actions_task = project.update(cx, |project, cx| {
5371 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5372 });
5373
5374 // cx.run_until_parked();
5375 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5376 |mut code_actions_request| async move {
5377 code_actions_request
5378 .next()
5379 .await
5380 .expect("All code actions requests should have been triggered")
5381 },
5382 ))
5383 .await;
5384 assert_eq!(
5385 vec!["TailwindServer code action", "TypeScriptServer code action"],
5386 code_actions_task
5387 .await
5388 .unwrap()
5389 .into_iter()
5390 .map(|code_action| code_action.lsp_action.title)
5391 .sorted()
5392 .collect::<Vec<_>>(),
5393 "Should receive code actions responses from all related servers with hover capabilities"
5394 );
5395}
5396
5397#[gpui::test]
5398async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5399 init_test(cx);
5400
5401 let fs = FakeFs::new(cx.executor());
5402 fs.insert_tree(
5403 "/dir",
5404 json!({
5405 "a.rs": "let a = 1;",
5406 "b.rs": "let b = 2;",
5407 "c.rs": "let c = 2;",
5408 }),
5409 )
5410 .await;
5411
5412 let project = Project::test(
5413 fs,
5414 [
5415 "/dir/a.rs".as_ref(),
5416 "/dir/b.rs".as_ref(),
5417 "/dir/c.rs".as_ref(),
5418 ],
5419 cx,
5420 )
5421 .await;
5422
5423 // check the initial state and get the worktrees
5424 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5425 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5426 assert_eq!(worktrees.len(), 3);
5427
5428 let worktree_a = worktrees[0].read(cx);
5429 let worktree_b = worktrees[1].read(cx);
5430 let worktree_c = worktrees[2].read(cx);
5431
5432 // check they start in the right order
5433 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5434 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5435 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5436
5437 (
5438 worktrees[0].clone(),
5439 worktrees[1].clone(),
5440 worktrees[2].clone(),
5441 )
5442 });
5443
5444 // move first worktree to after the second
5445 // [a, b, c] -> [b, a, c]
5446 project
5447 .update(cx, |project, cx| {
5448 let first = worktree_a.read(cx);
5449 let second = worktree_b.read(cx);
5450 project.move_worktree(first.id(), second.id(), cx)
5451 })
5452 .expect("moving first after second");
5453
5454 // check the state after moving
5455 project.update(cx, |project, cx| {
5456 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5457 assert_eq!(worktrees.len(), 3);
5458
5459 let first = worktrees[0].read(cx);
5460 let second = worktrees[1].read(cx);
5461 let third = worktrees[2].read(cx);
5462
5463 // check they are now in the right order
5464 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5465 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5466 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5467 });
5468
5469 // move the second worktree to before the first
5470 // [b, a, c] -> [a, b, c]
5471 project
5472 .update(cx, |project, cx| {
5473 let second = worktree_a.read(cx);
5474 let first = worktree_b.read(cx);
5475 project.move_worktree(first.id(), second.id(), cx)
5476 })
5477 .expect("moving second before first");
5478
5479 // check the state after moving
5480 project.update(cx, |project, cx| {
5481 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5482 assert_eq!(worktrees.len(), 3);
5483
5484 let first = worktrees[0].read(cx);
5485 let second = worktrees[1].read(cx);
5486 let third = worktrees[2].read(cx);
5487
5488 // check they are now in the right order
5489 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5490 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5491 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5492 });
5493
5494 // move the second worktree to after the third
5495 // [a, b, c] -> [a, c, b]
5496 project
5497 .update(cx, |project, cx| {
5498 let second = worktree_b.read(cx);
5499 let third = worktree_c.read(cx);
5500 project.move_worktree(second.id(), third.id(), cx)
5501 })
5502 .expect("moving second after third");
5503
5504 // check the state after moving
5505 project.update(cx, |project, cx| {
5506 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5507 assert_eq!(worktrees.len(), 3);
5508
5509 let first = worktrees[0].read(cx);
5510 let second = worktrees[1].read(cx);
5511 let third = worktrees[2].read(cx);
5512
5513 // check they are now in the right order
5514 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5515 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5516 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5517 });
5518
5519 // move the third worktree to before the second
5520 // [a, c, b] -> [a, b, c]
5521 project
5522 .update(cx, |project, cx| {
5523 let third = worktree_c.read(cx);
5524 let second = worktree_b.read(cx);
5525 project.move_worktree(third.id(), second.id(), cx)
5526 })
5527 .expect("moving third before second");
5528
5529 // check the state after moving
5530 project.update(cx, |project, cx| {
5531 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5532 assert_eq!(worktrees.len(), 3);
5533
5534 let first = worktrees[0].read(cx);
5535 let second = worktrees[1].read(cx);
5536 let third = worktrees[2].read(cx);
5537
5538 // check they are now in the right order
5539 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5540 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5541 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5542 });
5543
5544 // move the first worktree to after the third
5545 // [a, b, c] -> [b, c, a]
5546 project
5547 .update(cx, |project, cx| {
5548 let first = worktree_a.read(cx);
5549 let third = worktree_c.read(cx);
5550 project.move_worktree(first.id(), third.id(), cx)
5551 })
5552 .expect("moving first after third");
5553
5554 // check the state after moving
5555 project.update(cx, |project, cx| {
5556 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5557 assert_eq!(worktrees.len(), 3);
5558
5559 let first = worktrees[0].read(cx);
5560 let second = worktrees[1].read(cx);
5561 let third = worktrees[2].read(cx);
5562
5563 // check they are now in the right order
5564 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5565 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5566 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5567 });
5568
5569 // move the third worktree to before the first
5570 // [b, c, a] -> [a, b, c]
5571 project
5572 .update(cx, |project, cx| {
5573 let third = worktree_a.read(cx);
5574 let first = worktree_b.read(cx);
5575 project.move_worktree(third.id(), first.id(), cx)
5576 })
5577 .expect("moving third before first");
5578
5579 // check the state after moving
5580 project.update(cx, |project, cx| {
5581 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5582 assert_eq!(worktrees.len(), 3);
5583
5584 let first = worktrees[0].read(cx);
5585 let second = worktrees[1].read(cx);
5586 let third = worktrees[2].read(cx);
5587
5588 // check they are now in the right order
5589 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5590 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5591 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5592 });
5593}
5594
5595#[gpui::test]
5596async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5597 init_test(cx);
5598
5599 let staged_contents = r#"
5600 fn main() {
5601 println!("hello world");
5602 }
5603 "#
5604 .unindent();
5605 let file_contents = r#"
5606 // print goodbye
5607 fn main() {
5608 println!("goodbye world");
5609 }
5610 "#
5611 .unindent();
5612
5613 let fs = FakeFs::new(cx.background_executor.clone());
5614 fs.insert_tree(
5615 "/dir",
5616 json!({
5617 ".git": {},
5618 "src": {
5619 "main.rs": file_contents,
5620 }
5621 }),
5622 )
5623 .await;
5624
5625 fs.set_index_for_repo(
5626 Path::new("/dir/.git"),
5627 &[(Path::new("src/main.rs"), staged_contents)],
5628 );
5629
5630 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5631
5632 let buffer = project
5633 .update(cx, |project, cx| {
5634 project.open_local_buffer("/dir/src/main.rs", cx)
5635 })
5636 .await
5637 .unwrap();
5638 let unstaged_changes = project
5639 .update(cx, |project, cx| {
5640 project.open_unstaged_changes(buffer.clone(), cx)
5641 })
5642 .await
5643 .unwrap();
5644
5645 cx.run_until_parked();
5646 unstaged_changes.update(cx, |unstaged_changes, cx| {
5647 let snapshot = buffer.read(cx).snapshot();
5648 assert_hunks(
5649 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5650 &snapshot,
5651 &unstaged_changes.base_text.as_ref().unwrap().text(),
5652 &[
5653 (0..1, "", "// print goodbye\n"),
5654 (
5655 2..3,
5656 " println!(\"hello world\");\n",
5657 " println!(\"goodbye world\");\n",
5658 ),
5659 ],
5660 );
5661 });
5662
5663 let staged_contents = r#"
5664 // print goodbye
5665 fn main() {
5666 }
5667 "#
5668 .unindent();
5669
5670 fs.set_index_for_repo(
5671 Path::new("/dir/.git"),
5672 &[(Path::new("src/main.rs"), staged_contents)],
5673 );
5674
5675 cx.run_until_parked();
5676 unstaged_changes.update(cx, |unstaged_changes, cx| {
5677 let snapshot = buffer.read(cx).snapshot();
5678 assert_hunks(
5679 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5680 &snapshot,
5681 &unstaged_changes.base_text.as_ref().unwrap().text(),
5682 &[(2..3, "", " println!(\"goodbye world\");\n")],
5683 );
5684 });
5685}
5686
5687async fn search(
5688 project: &Entity<Project>,
5689 query: SearchQuery,
5690 cx: &mut gpui::TestAppContext,
5691) -> Result<HashMap<String, Vec<Range<usize>>>> {
5692 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5693 let mut results = HashMap::default();
5694 while let Ok(search_result) = search_rx.recv().await {
5695 match search_result {
5696 SearchResult::Buffer { buffer, ranges } => {
5697 results.entry(buffer).or_insert(ranges);
5698 }
5699 SearchResult::LimitReached => {}
5700 }
5701 }
5702 Ok(results
5703 .into_iter()
5704 .map(|(buffer, ranges)| {
5705 buffer.update(cx, |buffer, cx| {
5706 let path = buffer
5707 .file()
5708 .unwrap()
5709 .full_path(cx)
5710 .to_string_lossy()
5711 .to_string();
5712 let ranges = ranges
5713 .into_iter()
5714 .map(|range| range.to_offset(buffer))
5715 .collect::<Vec<_>>();
5716 (path, ranges)
5717 })
5718 })
5719 .collect())
5720}
5721
5722pub fn init_test(cx: &mut gpui::TestAppContext) {
5723 if std::env::var("RUST_LOG").is_ok() {
5724 env_logger::try_init().ok();
5725 }
5726
5727 cx.update(|cx| {
5728 let settings_store = SettingsStore::test(cx);
5729 cx.set_global(settings_store);
5730 release_channel::init(SemanticVersion::default(), cx);
5731 language::init(cx);
5732 Project::init_settings(cx);
5733 });
5734}
5735
5736fn json_lang() -> Arc<Language> {
5737 Arc::new(Language::new(
5738 LanguageConfig {
5739 name: "JSON".into(),
5740 matcher: LanguageMatcher {
5741 path_suffixes: vec!["json".to_string()],
5742 ..Default::default()
5743 },
5744 ..Default::default()
5745 },
5746 None,
5747 ))
5748}
5749
5750fn js_lang() -> Arc<Language> {
5751 Arc::new(Language::new(
5752 LanguageConfig {
5753 name: "JavaScript".into(),
5754 matcher: LanguageMatcher {
5755 path_suffixes: vec!["js".to_string()],
5756 ..Default::default()
5757 },
5758 ..Default::default()
5759 },
5760 None,
5761 ))
5762}
5763
5764fn rust_lang() -> Arc<Language> {
5765 Arc::new(Language::new(
5766 LanguageConfig {
5767 name: "Rust".into(),
5768 matcher: LanguageMatcher {
5769 path_suffixes: vec!["rs".to_string()],
5770 ..Default::default()
5771 },
5772 ..Default::default()
5773 },
5774 Some(tree_sitter_rust::LANGUAGE.into()),
5775 ))
5776}
5777
5778fn typescript_lang() -> Arc<Language> {
5779 Arc::new(Language::new(
5780 LanguageConfig {
5781 name: "TypeScript".into(),
5782 matcher: LanguageMatcher {
5783 path_suffixes: vec!["ts".to_string()],
5784 ..Default::default()
5785 },
5786 ..Default::default()
5787 },
5788 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5789 ))
5790}
5791
5792fn tsx_lang() -> Arc<Language> {
5793 Arc::new(Language::new(
5794 LanguageConfig {
5795 name: "tsx".into(),
5796 matcher: LanguageMatcher {
5797 path_suffixes: vec!["tsx".to_string()],
5798 ..Default::default()
5799 },
5800 ..Default::default()
5801 },
5802 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5803 ))
5804}
5805
5806fn get_all_tasks(
5807 project: &Entity<Project>,
5808 worktree_id: Option<WorktreeId>,
5809 task_context: &TaskContext,
5810 cx: &mut App,
5811) -> Vec<(TaskSourceKind, ResolvedTask)> {
5812 let (mut old, new) = project.update(cx, |project, cx| {
5813 project
5814 .task_store
5815 .read(cx)
5816 .task_inventory()
5817 .unwrap()
5818 .read(cx)
5819 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5820 });
5821 old.extend(new);
5822 old
5823}