1use crate::{Event, *};
2use ::git::diff::assert_hunks;
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
28
29#[gpui::test]
30async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
31 cx.executor().allow_parking();
32
33 let (tx, mut rx) = futures::channel::mpsc::unbounded();
34 let _thread = std::thread::spawn(move || {
35 std::fs::metadata("/tmp").unwrap();
36 std::thread::sleep(Duration::from_millis(1000));
37 tx.unbounded_send(1).unwrap();
38 });
39 rx.next().await.unwrap();
40}
41
42#[gpui::test]
43async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
44 cx.executor().allow_parking();
45
46 let io_task = smol::unblock(move || {
47 println!("sleeping on thread {:?}", std::thread::current().id());
48 std::thread::sleep(Duration::from_millis(10));
49 1
50 });
51
52 let task = cx.foreground_executor().spawn(async move {
53 io_task.await;
54 });
55
56 task.await;
57}
58
59#[cfg(not(windows))]
60#[gpui::test]
61async fn test_symlinks(cx: &mut gpui::TestAppContext) {
62 init_test(cx);
63 cx.executor().allow_parking();
64
65 let dir = temp_tree(json!({
66 "root": {
67 "apple": "",
68 "banana": {
69 "carrot": {
70 "date": "",
71 "endive": "",
72 }
73 },
74 "fennel": {
75 "grape": "",
76 }
77 }
78 }));
79
80 let root_link_path = dir.path().join("root_link");
81 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
82 os::unix::fs::symlink(
83 dir.path().join("root/fennel"),
84 dir.path().join("root/finnochio"),
85 )
86 .unwrap();
87
88 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
89
90 project.update(cx, |project, cx| {
91 let tree = project.worktrees(cx).next().unwrap().read(cx);
92 assert_eq!(tree.file_count(), 5);
93 assert_eq!(
94 tree.inode_for_path("fennel/grape"),
95 tree.inode_for_path("finnochio/grape")
96 );
97 });
98}
99
100#[gpui::test]
101async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
102 init_test(cx);
103
104 let dir = temp_tree(json!({
105 ".editorconfig": r#"
106 root = true
107 [*.rs]
108 indent_style = tab
109 indent_size = 3
110 end_of_line = lf
111 insert_final_newline = true
112 trim_trailing_whitespace = true
113 [*.js]
114 tab_width = 10
115 "#,
116 ".zed": {
117 "settings.json": r#"{
118 "tab_size": 8,
119 "hard_tabs": false,
120 "ensure_final_newline_on_save": false,
121 "remove_trailing_whitespace_on_save": false,
122 "soft_wrap": "editor_width"
123 }"#,
124 },
125 "a.rs": "fn a() {\n A\n}",
126 "b": {
127 ".editorconfig": r#"
128 [*.rs]
129 indent_size = 2
130 "#,
131 "b.rs": "fn b() {\n B\n}",
132 },
133 "c.js": "def c\n C\nend",
134 "README.json": "tabs are better\n",
135 }));
136
137 let path = dir.path();
138 let fs = FakeFs::new(cx.executor());
139 fs.insert_tree_from_real_fs(path, path).await;
140 let project = Project::test(fs, [path], cx).await;
141
142 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
143 language_registry.add(js_lang());
144 language_registry.add(json_lang());
145 language_registry.add(rust_lang());
146
147 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
148
149 cx.executor().run_until_parked();
150
151 cx.update(|cx| {
152 let tree = worktree.read(cx);
153 let settings_for = |path: &str| {
154 let file_entry = tree.entry_for_path(path).unwrap().clone();
155 let file = File::for_entry(file_entry, worktree.clone());
156 let file_language = project
157 .read(cx)
158 .languages()
159 .language_for_file_path(file.path.as_ref());
160 let file_language = cx
161 .background_executor()
162 .block(file_language)
163 .expect("Failed to get file language");
164 let file = file as _;
165 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
166 };
167
168 let settings_a = settings_for("a.rs");
169 let settings_b = settings_for("b/b.rs");
170 let settings_c = settings_for("c.js");
171 let settings_readme = settings_for("README.json");
172
173 // .editorconfig overrides .zed/settings
174 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
175 assert_eq!(settings_a.hard_tabs, true);
176 assert_eq!(settings_a.ensure_final_newline_on_save, true);
177 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
178
179 // .editorconfig in b/ overrides .editorconfig in root
180 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
181
182 // "indent_size" is not set, so "tab_width" is used
183 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
184
185 // README.md should not be affected by .editorconfig's globe "*.rs"
186 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
187 });
188}
189
190#[gpui::test]
191async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 TaskStore::init(None);
194
195 let fs = FakeFs::new(cx.executor());
196 fs.insert_tree(
197 "/the-root",
198 json!({
199 ".zed": {
200 "settings.json": r#"{ "tab_size": 8 }"#,
201 "tasks.json": r#"[{
202 "label": "cargo check all",
203 "command": "cargo",
204 "args": ["check", "--all"]
205 },]"#,
206 },
207 "a": {
208 "a.rs": "fn a() {\n A\n}"
209 },
210 "b": {
211 ".zed": {
212 "settings.json": r#"{ "tab_size": 2 }"#,
213 "tasks.json": r#"[{
214 "label": "cargo check",
215 "command": "cargo",
216 "args": ["check"]
217 },]"#,
218 },
219 "b.rs": "fn b() {\n B\n}"
220 }
221 }),
222 )
223 .await;
224
225 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
226 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
227 let task_context = TaskContext::default();
228
229 cx.executor().run_until_parked();
230 let worktree_id = cx.update(|cx| {
231 project.update(cx, |project, cx| {
232 project.worktrees(cx).next().unwrap().read(cx).id()
233 })
234 });
235 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
236 id: worktree_id,
237 directory_in_worktree: PathBuf::from(".zed"),
238 id_base: "local worktree tasks from directory \".zed\"".into(),
239 };
240
241 let all_tasks = cx
242 .update(|cx| {
243 let tree = worktree.read(cx);
244
245 let file_a = File::for_entry(
246 tree.entry_for_path("a/a.rs").unwrap().clone(),
247 worktree.clone(),
248 ) as _;
249 let settings_a = language_settings(None, Some(&file_a), cx);
250 let file_b = File::for_entry(
251 tree.entry_for_path("b/b.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_b = language_settings(None, Some(&file_b), cx);
255
256 assert_eq!(settings_a.tab_size.get(), 8);
257 assert_eq!(settings_b.tab_size.get(), 2);
258
259 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
260 })
261 .into_iter()
262 .map(|(source_kind, task)| {
263 let resolved = task.resolved.unwrap();
264 (
265 source_kind,
266 task.resolved_label,
267 resolved.args,
268 resolved.env,
269 )
270 })
271 .collect::<Vec<_>>();
272 assert_eq!(
273 all_tasks,
274 vec![
275 (
276 TaskSourceKind::Worktree {
277 id: worktree_id,
278 directory_in_worktree: PathBuf::from("b/.zed"),
279 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
280 },
281 "cargo check".to_string(),
282 vec!["check".to_string()],
283 HashMap::default(),
284 ),
285 (
286 topmost_local_task_source_kind.clone(),
287 "cargo check all".to_string(),
288 vec!["check".to_string(), "--all".to_string()],
289 HashMap::default(),
290 ),
291 ]
292 );
293
294 let (_, resolved_task) = cx
295 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
296 .into_iter()
297 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
298 .expect("should have one global task");
299 project.update(cx, |project, cx| {
300 let task_inventory = project
301 .task_store
302 .read(cx)
303 .task_inventory()
304 .cloned()
305 .unwrap();
306 task_inventory.update(cx, |inventory, _| {
307 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
308 inventory
309 .update_file_based_tasks(
310 None,
311 Some(
312 &json!([{
313 "label": "cargo check unstable",
314 "command": "cargo",
315 "args": [
316 "check",
317 "--all",
318 "--all-targets"
319 ],
320 "env": {
321 "RUSTFLAGS": "-Zunstable-options"
322 }
323 }])
324 .to_string(),
325 ),
326 )
327 .unwrap();
328 });
329 });
330 cx.run_until_parked();
331
332 let all_tasks = cx
333 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
334 .into_iter()
335 .map(|(source_kind, task)| {
336 let resolved = task.resolved.unwrap();
337 (
338 source_kind,
339 task.resolved_label,
340 resolved.args,
341 resolved.env,
342 )
343 })
344 .collect::<Vec<_>>();
345 assert_eq!(
346 all_tasks,
347 vec![
348 (
349 topmost_local_task_source_kind.clone(),
350 "cargo check all".to_string(),
351 vec!["check".to_string(), "--all".to_string()],
352 HashMap::default(),
353 ),
354 (
355 TaskSourceKind::Worktree {
356 id: worktree_id,
357 directory_in_worktree: PathBuf::from("b/.zed"),
358 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
359 },
360 "cargo check".to_string(),
361 vec!["check".to_string()],
362 HashMap::default(),
363 ),
364 (
365 TaskSourceKind::AbsPath {
366 abs_path: paths::tasks_file().clone(),
367 id_base: "global tasks.json".into(),
368 },
369 "cargo check unstable".to_string(),
370 vec![
371 "check".to_string(),
372 "--all".to_string(),
373 "--all-targets".to_string(),
374 ],
375 HashMap::from_iter(Some((
376 "RUSTFLAGS".to_string(),
377 "-Zunstable-options".to_string()
378 ))),
379 ),
380 ]
381 );
382}
383
384#[gpui::test]
385async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
386 init_test(cx);
387
388 let fs = FakeFs::new(cx.executor());
389 fs.insert_tree(
390 "/the-root",
391 json!({
392 "test.rs": "const A: i32 = 1;",
393 "test2.rs": "",
394 "Cargo.toml": "a = 1",
395 "package.json": "{\"a\": 1}",
396 }),
397 )
398 .await;
399
400 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
401 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
402
403 let mut fake_rust_servers = language_registry.register_fake_lsp(
404 "Rust",
405 FakeLspAdapter {
406 name: "the-rust-language-server",
407 capabilities: lsp::ServerCapabilities {
408 completion_provider: Some(lsp::CompletionOptions {
409 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
410 ..Default::default()
411 }),
412 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
413 lsp::TextDocumentSyncOptions {
414 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
415 ..Default::default()
416 },
417 )),
418 ..Default::default()
419 },
420 ..Default::default()
421 },
422 );
423 let mut fake_json_servers = language_registry.register_fake_lsp(
424 "JSON",
425 FakeLspAdapter {
426 name: "the-json-language-server",
427 capabilities: lsp::ServerCapabilities {
428 completion_provider: Some(lsp::CompletionOptions {
429 trigger_characters: Some(vec![":".to_string()]),
430 ..Default::default()
431 }),
432 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
433 lsp::TextDocumentSyncOptions {
434 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
435 ..Default::default()
436 },
437 )),
438 ..Default::default()
439 },
440 ..Default::default()
441 },
442 );
443
444 // Open a buffer without an associated language server.
445 let (toml_buffer, _handle) = project
446 .update(cx, |project, cx| {
447 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
448 })
449 .await
450 .unwrap();
451
452 // Open a buffer with an associated language server before the language for it has been loaded.
453 let (rust_buffer, _handle2) = project
454 .update(cx, |project, cx| {
455 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
456 })
457 .await
458 .unwrap();
459 rust_buffer.update(cx, |buffer, _| {
460 assert_eq!(buffer.language().map(|l| l.name()), None);
461 });
462
463 // Now we add the languages to the project, and ensure they get assigned to all
464 // the relevant open buffers.
465 language_registry.add(json_lang());
466 language_registry.add(rust_lang());
467 cx.executor().run_until_parked();
468 rust_buffer.update(cx, |buffer, _| {
469 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
470 });
471
472 // A server is started up, and it is notified about Rust files.
473 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 lsp::TextDocumentItem {
480 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
481 version: 0,
482 text: "const A: i32 = 1;".to_string(),
483 language_id: "rust".to_string(),
484 }
485 );
486
487 // The buffer is configured based on the language server's capabilities.
488 rust_buffer.update(cx, |buffer, _| {
489 assert_eq!(
490 buffer
491 .completion_triggers()
492 .into_iter()
493 .cloned()
494 .collect::<Vec<_>>(),
495 &[".".to_string(), "::".to_string()]
496 );
497 });
498 toml_buffer.update(cx, |buffer, _| {
499 assert!(buffer.completion_triggers().is_empty());
500 });
501
502 // Edit a buffer. The changes are reported to the language server.
503 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
504 assert_eq!(
505 fake_rust_server
506 .receive_notification::<lsp::notification::DidChangeTextDocument>()
507 .await
508 .text_document,
509 lsp::VersionedTextDocumentIdentifier::new(
510 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
511 1
512 )
513 );
514
515 // Open a third buffer with a different associated language server.
516 let (json_buffer, _json_handle) = project
517 .update(cx, |project, cx| {
518 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
519 })
520 .await
521 .unwrap();
522
523 // A json language server is started up and is only notified about the json buffer.
524 let mut fake_json_server = fake_json_servers.next().await.unwrap();
525 assert_eq!(
526 fake_json_server
527 .receive_notification::<lsp::notification::DidOpenTextDocument>()
528 .await
529 .text_document,
530 lsp::TextDocumentItem {
531 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
532 version: 0,
533 text: "{\"a\": 1}".to_string(),
534 language_id: "json".to_string(),
535 }
536 );
537
538 // This buffer is configured based on the second language server's
539 // capabilities.
540 json_buffer.update(cx, |buffer, _| {
541 assert_eq!(
542 buffer
543 .completion_triggers()
544 .into_iter()
545 .cloned()
546 .collect::<Vec<_>>(),
547 &[":".to_string()]
548 );
549 });
550
551 // When opening another buffer whose language server is already running,
552 // it is also configured based on the existing language server's capabilities.
553 let (rust_buffer2, _handle4) = project
554 .update(cx, |project, cx| {
555 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
556 })
557 .await
558 .unwrap();
559 rust_buffer2.update(cx, |buffer, _| {
560 assert_eq!(
561 buffer
562 .completion_triggers()
563 .into_iter()
564 .cloned()
565 .collect::<Vec<_>>(),
566 &[".".to_string(), "::".to_string()]
567 );
568 });
569
570 // Changes are reported only to servers matching the buffer's language.
571 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
572 rust_buffer2.update(cx, |buffer, cx| {
573 buffer.edit([(0..0, "let x = 1;")], None, cx)
574 });
575 assert_eq!(
576 fake_rust_server
577 .receive_notification::<lsp::notification::DidChangeTextDocument>()
578 .await
579 .text_document,
580 lsp::VersionedTextDocumentIdentifier::new(
581 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
582 1
583 )
584 );
585
586 // Save notifications are reported to all servers.
587 project
588 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
589 .await
590 .unwrap();
591 assert_eq!(
592 fake_rust_server
593 .receive_notification::<lsp::notification::DidSaveTextDocument>()
594 .await
595 .text_document,
596 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
597 );
598 assert_eq!(
599 fake_json_server
600 .receive_notification::<lsp::notification::DidSaveTextDocument>()
601 .await
602 .text_document,
603 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
604 );
605
606 // Renames are reported only to servers matching the buffer's language.
607 fs.rename(
608 Path::new("/the-root/test2.rs"),
609 Path::new("/the-root/test3.rs"),
610 Default::default(),
611 )
612 .await
613 .unwrap();
614 assert_eq!(
615 fake_rust_server
616 .receive_notification::<lsp::notification::DidCloseTextDocument>()
617 .await
618 .text_document,
619 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
620 );
621 assert_eq!(
622 fake_rust_server
623 .receive_notification::<lsp::notification::DidOpenTextDocument>()
624 .await
625 .text_document,
626 lsp::TextDocumentItem {
627 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
628 version: 0,
629 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
630 language_id: "rust".to_string(),
631 },
632 );
633
634 rust_buffer2.update(cx, |buffer, cx| {
635 buffer.update_diagnostics(
636 LanguageServerId(0),
637 DiagnosticSet::from_sorted_entries(
638 vec![DiagnosticEntry {
639 diagnostic: Default::default(),
640 range: Anchor::MIN..Anchor::MAX,
641 }],
642 &buffer.snapshot(),
643 ),
644 cx,
645 );
646 assert_eq!(
647 buffer
648 .snapshot()
649 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
650 .count(),
651 1
652 );
653 });
654
655 // When the rename changes the extension of the file, the buffer gets closed on the old
656 // language server and gets opened on the new one.
657 fs.rename(
658 Path::new("/the-root/test3.rs"),
659 Path::new("/the-root/test3.json"),
660 Default::default(),
661 )
662 .await
663 .unwrap();
664 assert_eq!(
665 fake_rust_server
666 .receive_notification::<lsp::notification::DidCloseTextDocument>()
667 .await
668 .text_document,
669 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
670 );
671 assert_eq!(
672 fake_json_server
673 .receive_notification::<lsp::notification::DidOpenTextDocument>()
674 .await
675 .text_document,
676 lsp::TextDocumentItem {
677 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
678 version: 0,
679 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
680 language_id: "json".to_string(),
681 },
682 );
683
684 // We clear the diagnostics, since the language has changed.
685 rust_buffer2.update(cx, |buffer, _| {
686 assert_eq!(
687 buffer
688 .snapshot()
689 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
690 .count(),
691 0
692 );
693 });
694
695 // The renamed file's version resets after changing language server.
696 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
700 .await
701 .text_document,
702 lsp::VersionedTextDocumentIdentifier::new(
703 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
704 1
705 )
706 );
707
708 // Restart language servers
709 project.update(cx, |project, cx| {
710 project.restart_language_servers_for_buffers(
711 vec![rust_buffer.clone(), json_buffer.clone()],
712 cx,
713 );
714 });
715
716 let mut rust_shutdown_requests = fake_rust_server
717 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
718 let mut json_shutdown_requests = fake_json_server
719 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
720 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
721
722 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
723 let mut fake_json_server = fake_json_servers.next().await.unwrap();
724
725 // Ensure rust document is reopened in new rust language server
726 assert_eq!(
727 fake_rust_server
728 .receive_notification::<lsp::notification::DidOpenTextDocument>()
729 .await
730 .text_document,
731 lsp::TextDocumentItem {
732 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
733 version: 0,
734 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
735 language_id: "rust".to_string(),
736 }
737 );
738
739 // Ensure json documents are reopened in new json language server
740 assert_set_eq!(
741 [
742 fake_json_server
743 .receive_notification::<lsp::notification::DidOpenTextDocument>()
744 .await
745 .text_document,
746 fake_json_server
747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
748 .await
749 .text_document,
750 ],
751 [
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
754 version: 0,
755 text: json_buffer.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 },
758 lsp::TextDocumentItem {
759 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
760 version: 0,
761 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
762 language_id: "json".to_string(),
763 }
764 ]
765 );
766
767 // Close notifications are reported only to servers matching the buffer's language.
768 cx.update(|_| drop(_json_handle));
769 let close_message = lsp::DidCloseTextDocumentParams {
770 text_document: lsp::TextDocumentIdentifier::new(
771 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
772 ),
773 };
774 assert_eq!(
775 fake_json_server
776 .receive_notification::<lsp::notification::DidCloseTextDocument>()
777 .await,
778 close_message,
779 );
780}
781
782#[gpui::test]
783async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
784 fn add_root_for_windows(path: &str) -> String {
785 if cfg!(windows) {
786 format!("C:{}", path)
787 } else {
788 path.to_string()
789 }
790 }
791
792 init_test(cx);
793
794 let fs = FakeFs::new(cx.executor());
795 fs.insert_tree(
796 add_root_for_windows("/the-root"),
797 json!({
798 ".gitignore": "target\n",
799 "src": {
800 "a.rs": "",
801 "b.rs": "",
802 },
803 "target": {
804 "x": {
805 "out": {
806 "x.rs": ""
807 }
808 },
809 "y": {
810 "out": {
811 "y.rs": "",
812 }
813 },
814 "z": {
815 "out": {
816 "z.rs": ""
817 }
818 }
819 }
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [add_root_for_windows("/the-root").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826 language_registry.add(rust_lang());
827 let mut fake_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-language-server",
831 ..Default::default()
832 },
833 );
834
835 cx.executor().run_until_parked();
836
837 // Start the language server by opening a buffer with a compatible file extension.
838 let _ = project
839 .update(cx, |project, cx| {
840 project.open_local_buffer_with_lsp(add_root_for_windows("/the-root/src/a.rs"), cx)
841 })
842 .await
843 .unwrap();
844
845 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
846 project.update(cx, |project, cx| {
847 let worktree = project.worktrees(cx).next().unwrap();
848 assert_eq!(
849 worktree
850 .read(cx)
851 .snapshot()
852 .entries(true, 0)
853 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
854 .collect::<Vec<_>>(),
855 &[
856 (Path::new(""), false),
857 (Path::new(".gitignore"), false),
858 (Path::new("src"), false),
859 (Path::new("src/a.rs"), false),
860 (Path::new("src/b.rs"), false),
861 (Path::new("target"), true),
862 ]
863 );
864 });
865
866 let prev_read_dir_count = fs.read_dir_call_count();
867
868 // Keep track of the FS events reported to the language server.
869 let fake_server = fake_servers.next().await.unwrap();
870 let file_changes = Arc::new(Mutex::new(Vec::new()));
871 fake_server
872 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
873 registrations: vec![lsp::Registration {
874 id: Default::default(),
875 method: "workspace/didChangeWatchedFiles".to_string(),
876 register_options: serde_json::to_value(
877 lsp::DidChangeWatchedFilesRegistrationOptions {
878 watchers: vec![
879 lsp::FileSystemWatcher {
880 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
881 "/the-root/Cargo.toml",
882 )),
883 kind: None,
884 },
885 lsp::FileSystemWatcher {
886 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
887 "/the-root/src/*.{rs,c}",
888 )),
889 kind: None,
890 },
891 lsp::FileSystemWatcher {
892 glob_pattern: lsp::GlobPattern::String(add_root_for_windows(
893 "/the-root/target/y/**/*.rs",
894 )),
895 kind: None,
896 },
897 ],
898 },
899 )
900 .ok(),
901 }],
902 })
903 .await
904 .unwrap();
905 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
906 let file_changes = file_changes.clone();
907 move |params, _| {
908 let mut file_changes = file_changes.lock();
909 file_changes.extend(params.changes);
910 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
911 }
912 });
913
914 cx.executor().run_until_parked();
915 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
916 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
917
918 // Now the language server has asked us to watch an ignored directory path,
919 // so we recursively load it.
920 project.update(cx, |project, cx| {
921 let worktree = project.worktrees(cx).next().unwrap();
922 assert_eq!(
923 worktree
924 .read(cx)
925 .snapshot()
926 .entries(true, 0)
927 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
928 .collect::<Vec<_>>(),
929 &[
930 (Path::new(""), false),
931 (Path::new(".gitignore"), false),
932 (Path::new("src"), false),
933 (Path::new("src/a.rs"), false),
934 (Path::new("src/b.rs"), false),
935 (Path::new("target"), true),
936 (Path::new("target/x"), true),
937 (Path::new("target/y"), true),
938 (Path::new("target/y/out"), true),
939 (Path::new("target/y/out/y.rs"), true),
940 (Path::new("target/z"), true),
941 ]
942 );
943 });
944
945 // Perform some file system mutations, two of which match the watched patterns,
946 // and one of which does not.
947 fs.create_file(
948 add_root_for_windows("/the-root/src/c.rs").as_ref(),
949 Default::default(),
950 )
951 .await
952 .unwrap();
953 fs.create_file(
954 add_root_for_windows("/the-root/src/d.txt").as_ref(),
955 Default::default(),
956 )
957 .await
958 .unwrap();
959 fs.remove_file(
960 add_root_for_windows("/the-root/src/b.rs").as_ref(),
961 Default::default(),
962 )
963 .await
964 .unwrap();
965 fs.create_file(
966 add_root_for_windows("/the-root/target/x/out/x2.rs").as_ref(),
967 Default::default(),
968 )
969 .await
970 .unwrap();
971 fs.create_file(
972 add_root_for_windows("/the-root/target/y/out/y2.rs").as_ref(),
973 Default::default(),
974 )
975 .await
976 .unwrap();
977
978 // The language server receives events for the FS mutations that match its watch patterns.
979 cx.executor().run_until_parked();
980 assert_eq!(
981 &*file_changes.lock(),
982 &[
983 lsp::FileEvent {
984 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/b.rs")).unwrap(),
985 typ: lsp::FileChangeType::DELETED,
986 },
987 lsp::FileEvent {
988 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/src/c.rs")).unwrap(),
989 typ: lsp::FileChangeType::CREATED,
990 },
991 lsp::FileEvent {
992 uri: lsp::Url::from_file_path(add_root_for_windows("/the-root/target/y/out/y2.rs"))
993 .unwrap(),
994 typ: lsp::FileChangeType::CREATED,
995 },
996 ]
997 );
998}
999
1000#[gpui::test]
1001async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1002 init_test(cx);
1003
1004 let fs = FakeFs::new(cx.executor());
1005 fs.insert_tree(
1006 "/dir",
1007 json!({
1008 "a.rs": "let a = 1;",
1009 "b.rs": "let b = 2;"
1010 }),
1011 )
1012 .await;
1013
1014 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
1015 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1016
1017 let buffer_a = project
1018 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1019 .await
1020 .unwrap();
1021 let buffer_b = project
1022 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1023 .await
1024 .unwrap();
1025
1026 lsp_store.update(cx, |lsp_store, cx| {
1027 lsp_store
1028 .update_diagnostics(
1029 LanguageServerId(0),
1030 lsp::PublishDiagnosticsParams {
1031 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1032 version: None,
1033 diagnostics: vec![lsp::Diagnostic {
1034 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1035 severity: Some(lsp::DiagnosticSeverity::ERROR),
1036 message: "error 1".to_string(),
1037 ..Default::default()
1038 }],
1039 },
1040 &[],
1041 cx,
1042 )
1043 .unwrap();
1044 lsp_store
1045 .update_diagnostics(
1046 LanguageServerId(0),
1047 lsp::PublishDiagnosticsParams {
1048 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1049 version: None,
1050 diagnostics: vec![lsp::Diagnostic {
1051 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1052 severity: Some(DiagnosticSeverity::WARNING),
1053 message: "error 2".to_string(),
1054 ..Default::default()
1055 }],
1056 },
1057 &[],
1058 cx,
1059 )
1060 .unwrap();
1061 });
1062
1063 buffer_a.update(cx, |buffer, _| {
1064 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1065 assert_eq!(
1066 chunks
1067 .iter()
1068 .map(|(s, d)| (s.as_str(), *d))
1069 .collect::<Vec<_>>(),
1070 &[
1071 ("let ", None),
1072 ("a", Some(DiagnosticSeverity::ERROR)),
1073 (" = 1;", None),
1074 ]
1075 );
1076 });
1077 buffer_b.update(cx, |buffer, _| {
1078 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1079 assert_eq!(
1080 chunks
1081 .iter()
1082 .map(|(s, d)| (s.as_str(), *d))
1083 .collect::<Vec<_>>(),
1084 &[
1085 ("let ", None),
1086 ("b", Some(DiagnosticSeverity::WARNING)),
1087 (" = 2;", None),
1088 ]
1089 );
1090 });
1091}
1092
1093#[gpui::test]
1094async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1095 init_test(cx);
1096
1097 let fs = FakeFs::new(cx.executor());
1098 fs.insert_tree(
1099 "/root",
1100 json!({
1101 "dir": {
1102 ".git": {
1103 "HEAD": "ref: refs/heads/main",
1104 },
1105 ".gitignore": "b.rs",
1106 "a.rs": "let a = 1;",
1107 "b.rs": "let b = 2;",
1108 },
1109 "other.rs": "let b = c;"
1110 }),
1111 )
1112 .await;
1113
1114 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1115 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1116 let (worktree, _) = project
1117 .update(cx, |project, cx| {
1118 project.find_or_create_worktree("/root/dir", true, cx)
1119 })
1120 .await
1121 .unwrap();
1122 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1123
1124 let (worktree, _) = project
1125 .update(cx, |project, cx| {
1126 project.find_or_create_worktree("/root/other.rs", false, cx)
1127 })
1128 .await
1129 .unwrap();
1130 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1131
1132 let server_id = LanguageServerId(0);
1133 lsp_store.update(cx, |lsp_store, cx| {
1134 lsp_store
1135 .update_diagnostics(
1136 server_id,
1137 lsp::PublishDiagnosticsParams {
1138 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1139 version: None,
1140 diagnostics: vec![lsp::Diagnostic {
1141 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1142 severity: Some(lsp::DiagnosticSeverity::ERROR),
1143 message: "unused variable 'b'".to_string(),
1144 ..Default::default()
1145 }],
1146 },
1147 &[],
1148 cx,
1149 )
1150 .unwrap();
1151 lsp_store
1152 .update_diagnostics(
1153 server_id,
1154 lsp::PublishDiagnosticsParams {
1155 uri: Url::from_file_path("/root/other.rs").unwrap(),
1156 version: None,
1157 diagnostics: vec![lsp::Diagnostic {
1158 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1159 severity: Some(lsp::DiagnosticSeverity::ERROR),
1160 message: "unknown variable 'c'".to_string(),
1161 ..Default::default()
1162 }],
1163 },
1164 &[],
1165 cx,
1166 )
1167 .unwrap();
1168 });
1169
1170 let main_ignored_buffer = project
1171 .update(cx, |project, cx| {
1172 project.open_buffer((main_worktree_id, "b.rs"), cx)
1173 })
1174 .await
1175 .unwrap();
1176 main_ignored_buffer.update(cx, |buffer, _| {
1177 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1178 assert_eq!(
1179 chunks
1180 .iter()
1181 .map(|(s, d)| (s.as_str(), *d))
1182 .collect::<Vec<_>>(),
1183 &[
1184 ("let ", None),
1185 ("b", Some(DiagnosticSeverity::ERROR)),
1186 (" = 2;", None),
1187 ],
1188 "Gigitnored buffers should still get in-buffer diagnostics",
1189 );
1190 });
1191 let other_buffer = project
1192 .update(cx, |project, cx| {
1193 project.open_buffer((other_worktree_id, ""), cx)
1194 })
1195 .await
1196 .unwrap();
1197 other_buffer.update(cx, |buffer, _| {
1198 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1199 assert_eq!(
1200 chunks
1201 .iter()
1202 .map(|(s, d)| (s.as_str(), *d))
1203 .collect::<Vec<_>>(),
1204 &[
1205 ("let b = ", None),
1206 ("c", Some(DiagnosticSeverity::ERROR)),
1207 (";", None),
1208 ],
1209 "Buffers from hidden projects should still get in-buffer diagnostics"
1210 );
1211 });
1212
1213 project.update(cx, |project, cx| {
1214 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1215 assert_eq!(
1216 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1217 vec![(
1218 ProjectPath {
1219 worktree_id: main_worktree_id,
1220 path: Arc::from(Path::new("b.rs")),
1221 },
1222 server_id,
1223 DiagnosticSummary {
1224 error_count: 1,
1225 warning_count: 0,
1226 }
1227 )]
1228 );
1229 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1230 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1231 });
1232}
1233
1234#[gpui::test]
1235async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1236 init_test(cx);
1237
1238 let progress_token = "the-progress-token";
1239
1240 let fs = FakeFs::new(cx.executor());
1241 fs.insert_tree(
1242 "/dir",
1243 json!({
1244 "a.rs": "fn a() { A }",
1245 "b.rs": "const y: i32 = 1",
1246 }),
1247 )
1248 .await;
1249
1250 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1251 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1252
1253 language_registry.add(rust_lang());
1254 let mut fake_servers = language_registry.register_fake_lsp(
1255 "Rust",
1256 FakeLspAdapter {
1257 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1258 disk_based_diagnostics_sources: vec!["disk".into()],
1259 ..Default::default()
1260 },
1261 );
1262
1263 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1264
1265 // Cause worktree to start the fake language server
1266 let _ = project
1267 .update(cx, |project, cx| {
1268 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1269 })
1270 .await
1271 .unwrap();
1272
1273 let mut events = cx.events(&project);
1274
1275 let fake_server = fake_servers.next().await.unwrap();
1276 assert_eq!(
1277 events.next().await.unwrap(),
1278 Event::LanguageServerAdded(
1279 LanguageServerId(0),
1280 fake_server.server.name(),
1281 Some(worktree_id)
1282 ),
1283 );
1284
1285 fake_server
1286 .start_progress(format!("{}/0", progress_token))
1287 .await;
1288 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1289 assert_eq!(
1290 events.next().await.unwrap(),
1291 Event::DiskBasedDiagnosticsStarted {
1292 language_server_id: LanguageServerId(0),
1293 }
1294 );
1295
1296 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1297 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1298 version: None,
1299 diagnostics: vec![lsp::Diagnostic {
1300 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1301 severity: Some(lsp::DiagnosticSeverity::ERROR),
1302 message: "undefined variable 'A'".to_string(),
1303 ..Default::default()
1304 }],
1305 });
1306 assert_eq!(
1307 events.next().await.unwrap(),
1308 Event::DiagnosticsUpdated {
1309 language_server_id: LanguageServerId(0),
1310 path: (worktree_id, Path::new("a.rs")).into()
1311 }
1312 );
1313
1314 fake_server.end_progress(format!("{}/0", progress_token));
1315 assert_eq!(
1316 events.next().await.unwrap(),
1317 Event::DiskBasedDiagnosticsFinished {
1318 language_server_id: LanguageServerId(0)
1319 }
1320 );
1321
1322 let buffer = project
1323 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1324 .await
1325 .unwrap();
1326
1327 buffer.update(cx, |buffer, _| {
1328 let snapshot = buffer.snapshot();
1329 let diagnostics = snapshot
1330 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1331 .collect::<Vec<_>>();
1332 assert_eq!(
1333 diagnostics,
1334 &[DiagnosticEntry {
1335 range: Point::new(0, 9)..Point::new(0, 10),
1336 diagnostic: Diagnostic {
1337 severity: lsp::DiagnosticSeverity::ERROR,
1338 message: "undefined variable 'A'".to_string(),
1339 group_id: 0,
1340 is_primary: true,
1341 ..Default::default()
1342 }
1343 }]
1344 )
1345 });
1346
1347 // Ensure publishing empty diagnostics twice only results in one update event.
1348 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1349 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1350 version: None,
1351 diagnostics: Default::default(),
1352 });
1353 assert_eq!(
1354 events.next().await.unwrap(),
1355 Event::DiagnosticsUpdated {
1356 language_server_id: LanguageServerId(0),
1357 path: (worktree_id, Path::new("a.rs")).into()
1358 }
1359 );
1360
1361 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1362 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1363 version: None,
1364 diagnostics: Default::default(),
1365 });
1366 cx.executor().run_until_parked();
1367 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1368}
1369
1370#[gpui::test]
1371async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1372 init_test(cx);
1373
1374 let progress_token = "the-progress-token";
1375
1376 let fs = FakeFs::new(cx.executor());
1377 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1378
1379 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1380
1381 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1382 language_registry.add(rust_lang());
1383 let mut fake_servers = language_registry.register_fake_lsp(
1384 "Rust",
1385 FakeLspAdapter {
1386 name: "the-language-server",
1387 disk_based_diagnostics_sources: vec!["disk".into()],
1388 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1389 ..Default::default()
1390 },
1391 );
1392
1393 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1394
1395 let (buffer, _handle) = project
1396 .update(cx, |project, cx| {
1397 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1398 })
1399 .await
1400 .unwrap();
1401
1402 // Simulate diagnostics starting to update.
1403 let fake_server = fake_servers.next().await.unwrap();
1404 fake_server.start_progress(progress_token).await;
1405
1406 // Restart the server before the diagnostics finish updating.
1407 project.update(cx, |project, cx| {
1408 project.restart_language_servers_for_buffers([buffer], cx);
1409 });
1410 let mut events = cx.events(&project);
1411
1412 // Simulate the newly started server sending more diagnostics.
1413 let fake_server = fake_servers.next().await.unwrap();
1414 assert_eq!(
1415 events.next().await.unwrap(),
1416 Event::LanguageServerAdded(
1417 LanguageServerId(1),
1418 fake_server.server.name(),
1419 Some(worktree_id)
1420 )
1421 );
1422 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1423 fake_server.start_progress(progress_token).await;
1424 assert_eq!(
1425 events.next().await.unwrap(),
1426 Event::DiskBasedDiagnosticsStarted {
1427 language_server_id: LanguageServerId(1)
1428 }
1429 );
1430 project.update(cx, |project, cx| {
1431 assert_eq!(
1432 project
1433 .language_servers_running_disk_based_diagnostics(cx)
1434 .collect::<Vec<_>>(),
1435 [LanguageServerId(1)]
1436 );
1437 });
1438
1439 // All diagnostics are considered done, despite the old server's diagnostic
1440 // task never completing.
1441 fake_server.end_progress(progress_token);
1442 assert_eq!(
1443 events.next().await.unwrap(),
1444 Event::DiskBasedDiagnosticsFinished {
1445 language_server_id: LanguageServerId(1)
1446 }
1447 );
1448 project.update(cx, |project, cx| {
1449 assert_eq!(
1450 project
1451 .language_servers_running_disk_based_diagnostics(cx)
1452 .collect::<Vec<_>>(),
1453 [] as [language::LanguageServerId; 0]
1454 );
1455 });
1456}
1457
1458#[gpui::test]
1459async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1460 init_test(cx);
1461
1462 let fs = FakeFs::new(cx.executor());
1463 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1464
1465 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1466
1467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1468 language_registry.add(rust_lang());
1469 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1470
1471 let (buffer, _) = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1474 })
1475 .await
1476 .unwrap();
1477
1478 // Publish diagnostics
1479 let fake_server = fake_servers.next().await.unwrap();
1480 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1481 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1482 version: None,
1483 diagnostics: vec![lsp::Diagnostic {
1484 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1485 severity: Some(lsp::DiagnosticSeverity::ERROR),
1486 message: "the message".to_string(),
1487 ..Default::default()
1488 }],
1489 });
1490
1491 cx.executor().run_until_parked();
1492 buffer.update(cx, |buffer, _| {
1493 assert_eq!(
1494 buffer
1495 .snapshot()
1496 .diagnostics_in_range::<_, usize>(0..1, false)
1497 .map(|entry| entry.diagnostic.message.clone())
1498 .collect::<Vec<_>>(),
1499 ["the message".to_string()]
1500 );
1501 });
1502 project.update(cx, |project, cx| {
1503 assert_eq!(
1504 project.diagnostic_summary(false, cx),
1505 DiagnosticSummary {
1506 error_count: 1,
1507 warning_count: 0,
1508 }
1509 );
1510 });
1511
1512 project.update(cx, |project, cx| {
1513 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1514 });
1515
1516 // The diagnostics are cleared.
1517 cx.executor().run_until_parked();
1518 buffer.update(cx, |buffer, _| {
1519 assert_eq!(
1520 buffer
1521 .snapshot()
1522 .diagnostics_in_range::<_, usize>(0..1, false)
1523 .map(|entry| entry.diagnostic.message.clone())
1524 .collect::<Vec<_>>(),
1525 Vec::<String>::new(),
1526 );
1527 });
1528 project.update(cx, |project, cx| {
1529 assert_eq!(
1530 project.diagnostic_summary(false, cx),
1531 DiagnosticSummary {
1532 error_count: 0,
1533 warning_count: 0,
1534 }
1535 );
1536 });
1537}
1538
1539#[gpui::test]
1540async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1541 init_test(cx);
1542
1543 let fs = FakeFs::new(cx.executor());
1544 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1545
1546 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1547 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1548
1549 language_registry.add(rust_lang());
1550 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1551
1552 let (buffer, _handle) = project
1553 .update(cx, |project, cx| {
1554 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1555 })
1556 .await
1557 .unwrap();
1558
1559 // Before restarting the server, report diagnostics with an unknown buffer version.
1560 let fake_server = fake_servers.next().await.unwrap();
1561 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1562 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1563 version: Some(10000),
1564 diagnostics: Vec::new(),
1565 });
1566 cx.executor().run_until_parked();
1567
1568 project.update(cx, |project, cx| {
1569 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1570 });
1571 let mut fake_server = fake_servers.next().await.unwrap();
1572 let notification = fake_server
1573 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1574 .await
1575 .text_document;
1576 assert_eq!(notification.version, 0);
1577}
1578
1579#[gpui::test]
1580async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1581 init_test(cx);
1582
1583 let progress_token = "the-progress-token";
1584
1585 let fs = FakeFs::new(cx.executor());
1586 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1587
1588 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1589
1590 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1591 language_registry.add(rust_lang());
1592 let mut fake_servers = language_registry.register_fake_lsp(
1593 "Rust",
1594 FakeLspAdapter {
1595 name: "the-language-server",
1596 disk_based_diagnostics_sources: vec!["disk".into()],
1597 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1598 ..Default::default()
1599 },
1600 );
1601
1602 let (buffer, _handle) = project
1603 .update(cx, |project, cx| {
1604 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1605 })
1606 .await
1607 .unwrap();
1608
1609 // Simulate diagnostics starting to update.
1610 let mut fake_server = fake_servers.next().await.unwrap();
1611 fake_server
1612 .start_progress_with(
1613 "another-token",
1614 lsp::WorkDoneProgressBegin {
1615 cancellable: Some(false),
1616 ..Default::default()
1617 },
1618 )
1619 .await;
1620 fake_server
1621 .start_progress_with(
1622 progress_token,
1623 lsp::WorkDoneProgressBegin {
1624 cancellable: Some(true),
1625 ..Default::default()
1626 },
1627 )
1628 .await;
1629 cx.executor().run_until_parked();
1630
1631 project.update(cx, |project, cx| {
1632 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1633 });
1634
1635 let cancel_notification = fake_server
1636 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1637 .await;
1638 assert_eq!(
1639 cancel_notification.token,
1640 NumberOrString::String(progress_token.into())
1641 );
1642}
1643
1644#[gpui::test]
1645async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1646 init_test(cx);
1647
1648 let fs = FakeFs::new(cx.executor());
1649 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1650 .await;
1651
1652 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1653 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1654
1655 let mut fake_rust_servers = language_registry.register_fake_lsp(
1656 "Rust",
1657 FakeLspAdapter {
1658 name: "rust-lsp",
1659 ..Default::default()
1660 },
1661 );
1662 let mut fake_js_servers = language_registry.register_fake_lsp(
1663 "JavaScript",
1664 FakeLspAdapter {
1665 name: "js-lsp",
1666 ..Default::default()
1667 },
1668 );
1669 language_registry.add(rust_lang());
1670 language_registry.add(js_lang());
1671
1672 let _rs_buffer = project
1673 .update(cx, |project, cx| {
1674 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1675 })
1676 .await
1677 .unwrap();
1678 let _js_buffer = project
1679 .update(cx, |project, cx| {
1680 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1681 })
1682 .await
1683 .unwrap();
1684
1685 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1686 assert_eq!(
1687 fake_rust_server_1
1688 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1689 .await
1690 .text_document
1691 .uri
1692 .as_str(),
1693 "file:///dir/a.rs"
1694 );
1695
1696 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1697 assert_eq!(
1698 fake_js_server
1699 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1700 .await
1701 .text_document
1702 .uri
1703 .as_str(),
1704 "file:///dir/b.js"
1705 );
1706
1707 // Disable Rust language server, ensuring only that server gets stopped.
1708 cx.update(|cx| {
1709 SettingsStore::update_global(cx, |settings, cx| {
1710 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1711 settings.languages.insert(
1712 "Rust".into(),
1713 LanguageSettingsContent {
1714 enable_language_server: Some(false),
1715 ..Default::default()
1716 },
1717 );
1718 });
1719 })
1720 });
1721 fake_rust_server_1
1722 .receive_notification::<lsp::notification::Exit>()
1723 .await;
1724
1725 // Enable Rust and disable JavaScript language servers, ensuring that the
1726 // former gets started again and that the latter stops.
1727 cx.update(|cx| {
1728 SettingsStore::update_global(cx, |settings, cx| {
1729 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1730 settings.languages.insert(
1731 LanguageName::new("Rust"),
1732 LanguageSettingsContent {
1733 enable_language_server: Some(true),
1734 ..Default::default()
1735 },
1736 );
1737 settings.languages.insert(
1738 LanguageName::new("JavaScript"),
1739 LanguageSettingsContent {
1740 enable_language_server: Some(false),
1741 ..Default::default()
1742 },
1743 );
1744 });
1745 })
1746 });
1747 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1748 assert_eq!(
1749 fake_rust_server_2
1750 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1751 .await
1752 .text_document
1753 .uri
1754 .as_str(),
1755 "file:///dir/a.rs"
1756 );
1757 fake_js_server
1758 .receive_notification::<lsp::notification::Exit>()
1759 .await;
1760}
1761
1762#[gpui::test(iterations = 3)]
1763async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1764 init_test(cx);
1765
1766 let text = "
1767 fn a() { A }
1768 fn b() { BB }
1769 fn c() { CCC }
1770 "
1771 .unindent();
1772
1773 let fs = FakeFs::new(cx.executor());
1774 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1775
1776 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1777 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1779
1780 language_registry.add(rust_lang());
1781 let mut fake_servers = language_registry.register_fake_lsp(
1782 "Rust",
1783 FakeLspAdapter {
1784 disk_based_diagnostics_sources: vec!["disk".into()],
1785 ..Default::default()
1786 },
1787 );
1788
1789 let buffer = project
1790 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1791 .await
1792 .unwrap();
1793
1794 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1795 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1796 });
1797
1798 let mut fake_server = fake_servers.next().await.unwrap();
1799 let open_notification = fake_server
1800 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1801 .await;
1802
1803 // Edit the buffer, moving the content down
1804 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1805 let change_notification_1 = fake_server
1806 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1807 .await;
1808 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1809
1810 // Report some diagnostics for the initial version of the buffer
1811 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1812 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1813 version: Some(open_notification.text_document.version),
1814 diagnostics: vec![
1815 lsp::Diagnostic {
1816 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1817 severity: Some(DiagnosticSeverity::ERROR),
1818 message: "undefined variable 'A'".to_string(),
1819 source: Some("disk".to_string()),
1820 ..Default::default()
1821 },
1822 lsp::Diagnostic {
1823 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1824 severity: Some(DiagnosticSeverity::ERROR),
1825 message: "undefined variable 'BB'".to_string(),
1826 source: Some("disk".to_string()),
1827 ..Default::default()
1828 },
1829 lsp::Diagnostic {
1830 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1831 severity: Some(DiagnosticSeverity::ERROR),
1832 source: Some("disk".to_string()),
1833 message: "undefined variable 'CCC'".to_string(),
1834 ..Default::default()
1835 },
1836 ],
1837 });
1838
1839 // The diagnostics have moved down since they were created.
1840 cx.executor().run_until_parked();
1841 buffer.update(cx, |buffer, _| {
1842 assert_eq!(
1843 buffer
1844 .snapshot()
1845 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1846 .collect::<Vec<_>>(),
1847 &[
1848 DiagnosticEntry {
1849 range: Point::new(3, 9)..Point::new(3, 11),
1850 diagnostic: Diagnostic {
1851 source: Some("disk".into()),
1852 severity: DiagnosticSeverity::ERROR,
1853 message: "undefined variable 'BB'".to_string(),
1854 is_disk_based: true,
1855 group_id: 1,
1856 is_primary: true,
1857 ..Default::default()
1858 },
1859 },
1860 DiagnosticEntry {
1861 range: Point::new(4, 9)..Point::new(4, 12),
1862 diagnostic: Diagnostic {
1863 source: Some("disk".into()),
1864 severity: DiagnosticSeverity::ERROR,
1865 message: "undefined variable 'CCC'".to_string(),
1866 is_disk_based: true,
1867 group_id: 2,
1868 is_primary: true,
1869 ..Default::default()
1870 }
1871 }
1872 ]
1873 );
1874 assert_eq!(
1875 chunks_with_diagnostics(buffer, 0..buffer.len()),
1876 [
1877 ("\n\nfn a() { ".to_string(), None),
1878 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1879 (" }\nfn b() { ".to_string(), None),
1880 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1881 (" }\nfn c() { ".to_string(), None),
1882 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1883 (" }\n".to_string(), None),
1884 ]
1885 );
1886 assert_eq!(
1887 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1888 [
1889 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1890 (" }\nfn c() { ".to_string(), None),
1891 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1892 ]
1893 );
1894 });
1895
1896 // Ensure overlapping diagnostics are highlighted correctly.
1897 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1898 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1899 version: Some(open_notification.text_document.version),
1900 diagnostics: vec![
1901 lsp::Diagnostic {
1902 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1903 severity: Some(DiagnosticSeverity::ERROR),
1904 message: "undefined variable 'A'".to_string(),
1905 source: Some("disk".to_string()),
1906 ..Default::default()
1907 },
1908 lsp::Diagnostic {
1909 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1910 severity: Some(DiagnosticSeverity::WARNING),
1911 message: "unreachable statement".to_string(),
1912 source: Some("disk".to_string()),
1913 ..Default::default()
1914 },
1915 ],
1916 });
1917
1918 cx.executor().run_until_parked();
1919 buffer.update(cx, |buffer, _| {
1920 assert_eq!(
1921 buffer
1922 .snapshot()
1923 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1924 .collect::<Vec<_>>(),
1925 &[
1926 DiagnosticEntry {
1927 range: Point::new(2, 9)..Point::new(2, 12),
1928 diagnostic: Diagnostic {
1929 source: Some("disk".into()),
1930 severity: DiagnosticSeverity::WARNING,
1931 message: "unreachable statement".to_string(),
1932 is_disk_based: true,
1933 group_id: 4,
1934 is_primary: true,
1935 ..Default::default()
1936 }
1937 },
1938 DiagnosticEntry {
1939 range: Point::new(2, 9)..Point::new(2, 10),
1940 diagnostic: Diagnostic {
1941 source: Some("disk".into()),
1942 severity: DiagnosticSeverity::ERROR,
1943 message: "undefined variable 'A'".to_string(),
1944 is_disk_based: true,
1945 group_id: 3,
1946 is_primary: true,
1947 ..Default::default()
1948 },
1949 }
1950 ]
1951 );
1952 assert_eq!(
1953 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1954 [
1955 ("fn a() { ".to_string(), None),
1956 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1957 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1958 ("\n".to_string(), None),
1959 ]
1960 );
1961 assert_eq!(
1962 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1963 [
1964 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1965 ("\n".to_string(), None),
1966 ]
1967 );
1968 });
1969
1970 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1971 // changes since the last save.
1972 buffer.update(cx, |buffer, cx| {
1973 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1974 buffer.edit(
1975 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1976 None,
1977 cx,
1978 );
1979 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1980 });
1981 let change_notification_2 = fake_server
1982 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1983 .await;
1984 assert!(
1985 change_notification_2.text_document.version > change_notification_1.text_document.version
1986 );
1987
1988 // Handle out-of-order diagnostics
1989 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1990 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1991 version: Some(change_notification_2.text_document.version),
1992 diagnostics: vec![
1993 lsp::Diagnostic {
1994 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1995 severity: Some(DiagnosticSeverity::ERROR),
1996 message: "undefined variable 'BB'".to_string(),
1997 source: Some("disk".to_string()),
1998 ..Default::default()
1999 },
2000 lsp::Diagnostic {
2001 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2002 severity: Some(DiagnosticSeverity::WARNING),
2003 message: "undefined variable 'A'".to_string(),
2004 source: Some("disk".to_string()),
2005 ..Default::default()
2006 },
2007 ],
2008 });
2009
2010 cx.executor().run_until_parked();
2011 buffer.update(cx, |buffer, _| {
2012 assert_eq!(
2013 buffer
2014 .snapshot()
2015 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2016 .collect::<Vec<_>>(),
2017 &[
2018 DiagnosticEntry {
2019 range: Point::new(2, 21)..Point::new(2, 22),
2020 diagnostic: Diagnostic {
2021 source: Some("disk".into()),
2022 severity: DiagnosticSeverity::WARNING,
2023 message: "undefined variable 'A'".to_string(),
2024 is_disk_based: true,
2025 group_id: 6,
2026 is_primary: true,
2027 ..Default::default()
2028 }
2029 },
2030 DiagnosticEntry {
2031 range: Point::new(3, 9)..Point::new(3, 14),
2032 diagnostic: Diagnostic {
2033 source: Some("disk".into()),
2034 severity: DiagnosticSeverity::ERROR,
2035 message: "undefined variable 'BB'".to_string(),
2036 is_disk_based: true,
2037 group_id: 5,
2038 is_primary: true,
2039 ..Default::default()
2040 },
2041 }
2042 ]
2043 );
2044 });
2045}
2046
2047#[gpui::test]
2048async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2049 init_test(cx);
2050
2051 let text = concat!(
2052 "let one = ;\n", //
2053 "let two = \n",
2054 "let three = 3;\n",
2055 );
2056
2057 let fs = FakeFs::new(cx.executor());
2058 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2059
2060 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2061 let buffer = project
2062 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2063 .await
2064 .unwrap();
2065
2066 project.update(cx, |project, cx| {
2067 project.lsp_store.update(cx, |lsp_store, cx| {
2068 lsp_store
2069 .update_diagnostic_entries(
2070 LanguageServerId(0),
2071 PathBuf::from("/dir/a.rs"),
2072 None,
2073 vec![
2074 DiagnosticEntry {
2075 range: Unclipped(PointUtf16::new(0, 10))
2076 ..Unclipped(PointUtf16::new(0, 10)),
2077 diagnostic: Diagnostic {
2078 severity: DiagnosticSeverity::ERROR,
2079 message: "syntax error 1".to_string(),
2080 ..Default::default()
2081 },
2082 },
2083 DiagnosticEntry {
2084 range: Unclipped(PointUtf16::new(1, 10))
2085 ..Unclipped(PointUtf16::new(1, 10)),
2086 diagnostic: Diagnostic {
2087 severity: DiagnosticSeverity::ERROR,
2088 message: "syntax error 2".to_string(),
2089 ..Default::default()
2090 },
2091 },
2092 ],
2093 cx,
2094 )
2095 .unwrap();
2096 })
2097 });
2098
2099 // An empty range is extended forward to include the following character.
2100 // At the end of a line, an empty range is extended backward to include
2101 // the preceding character.
2102 buffer.update(cx, |buffer, _| {
2103 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2104 assert_eq!(
2105 chunks
2106 .iter()
2107 .map(|(s, d)| (s.as_str(), *d))
2108 .collect::<Vec<_>>(),
2109 &[
2110 ("let one = ", None),
2111 (";", Some(DiagnosticSeverity::ERROR)),
2112 ("\nlet two =", None),
2113 (" ", Some(DiagnosticSeverity::ERROR)),
2114 ("\nlet three = 3;\n", None)
2115 ]
2116 );
2117 });
2118}
2119
2120#[gpui::test]
2121async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2122 init_test(cx);
2123
2124 let fs = FakeFs::new(cx.executor());
2125 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2126 .await;
2127
2128 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2129 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2130
2131 lsp_store.update(cx, |lsp_store, cx| {
2132 lsp_store
2133 .update_diagnostic_entries(
2134 LanguageServerId(0),
2135 Path::new("/dir/a.rs").to_owned(),
2136 None,
2137 vec![DiagnosticEntry {
2138 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2139 diagnostic: Diagnostic {
2140 severity: DiagnosticSeverity::ERROR,
2141 is_primary: true,
2142 message: "syntax error a1".to_string(),
2143 ..Default::default()
2144 },
2145 }],
2146 cx,
2147 )
2148 .unwrap();
2149 lsp_store
2150 .update_diagnostic_entries(
2151 LanguageServerId(1),
2152 Path::new("/dir/a.rs").to_owned(),
2153 None,
2154 vec![DiagnosticEntry {
2155 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2156 diagnostic: Diagnostic {
2157 severity: DiagnosticSeverity::ERROR,
2158 is_primary: true,
2159 message: "syntax error b1".to_string(),
2160 ..Default::default()
2161 },
2162 }],
2163 cx,
2164 )
2165 .unwrap();
2166
2167 assert_eq!(
2168 lsp_store.diagnostic_summary(false, cx),
2169 DiagnosticSummary {
2170 error_count: 2,
2171 warning_count: 0,
2172 }
2173 );
2174 });
2175}
2176
2177#[gpui::test]
2178async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2179 init_test(cx);
2180
2181 let text = "
2182 fn a() {
2183 f1();
2184 }
2185 fn b() {
2186 f2();
2187 }
2188 fn c() {
2189 f3();
2190 }
2191 "
2192 .unindent();
2193
2194 let fs = FakeFs::new(cx.executor());
2195 fs.insert_tree(
2196 "/dir",
2197 json!({
2198 "a.rs": text.clone(),
2199 }),
2200 )
2201 .await;
2202
2203 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2204 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2205
2206 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2207 language_registry.add(rust_lang());
2208 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2209
2210 let (buffer, _handle) = project
2211 .update(cx, |project, cx| {
2212 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2213 })
2214 .await
2215 .unwrap();
2216
2217 let mut fake_server = fake_servers.next().await.unwrap();
2218 let lsp_document_version = fake_server
2219 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2220 .await
2221 .text_document
2222 .version;
2223
2224 // Simulate editing the buffer after the language server computes some edits.
2225 buffer.update(cx, |buffer, cx| {
2226 buffer.edit(
2227 [(
2228 Point::new(0, 0)..Point::new(0, 0),
2229 "// above first function\n",
2230 )],
2231 None,
2232 cx,
2233 );
2234 buffer.edit(
2235 [(
2236 Point::new(2, 0)..Point::new(2, 0),
2237 " // inside first function\n",
2238 )],
2239 None,
2240 cx,
2241 );
2242 buffer.edit(
2243 [(
2244 Point::new(6, 4)..Point::new(6, 4),
2245 "// inside second function ",
2246 )],
2247 None,
2248 cx,
2249 );
2250
2251 assert_eq!(
2252 buffer.text(),
2253 "
2254 // above first function
2255 fn a() {
2256 // inside first function
2257 f1();
2258 }
2259 fn b() {
2260 // inside second function f2();
2261 }
2262 fn c() {
2263 f3();
2264 }
2265 "
2266 .unindent()
2267 );
2268 });
2269
2270 let edits = lsp_store
2271 .update(cx, |lsp_store, cx| {
2272 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2273 &buffer,
2274 vec![
2275 // replace body of first function
2276 lsp::TextEdit {
2277 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2278 new_text: "
2279 fn a() {
2280 f10();
2281 }
2282 "
2283 .unindent(),
2284 },
2285 // edit inside second function
2286 lsp::TextEdit {
2287 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2288 new_text: "00".into(),
2289 },
2290 // edit inside third function via two distinct edits
2291 lsp::TextEdit {
2292 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2293 new_text: "4000".into(),
2294 },
2295 lsp::TextEdit {
2296 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2297 new_text: "".into(),
2298 },
2299 ],
2300 LanguageServerId(0),
2301 Some(lsp_document_version),
2302 cx,
2303 )
2304 })
2305 .await
2306 .unwrap();
2307
2308 buffer.update(cx, |buffer, cx| {
2309 for (range, new_text) in edits {
2310 buffer.edit([(range, new_text)], None, cx);
2311 }
2312 assert_eq!(
2313 buffer.text(),
2314 "
2315 // above first function
2316 fn a() {
2317 // inside first function
2318 f10();
2319 }
2320 fn b() {
2321 // inside second function f200();
2322 }
2323 fn c() {
2324 f4000();
2325 }
2326 "
2327 .unindent()
2328 );
2329 });
2330}
2331
2332#[gpui::test]
2333async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2334 init_test(cx);
2335
2336 let text = "
2337 use a::b;
2338 use a::c;
2339
2340 fn f() {
2341 b();
2342 c();
2343 }
2344 "
2345 .unindent();
2346
2347 let fs = FakeFs::new(cx.executor());
2348 fs.insert_tree(
2349 "/dir",
2350 json!({
2351 "a.rs": text.clone(),
2352 }),
2353 )
2354 .await;
2355
2356 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2357 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2358 let buffer = project
2359 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2360 .await
2361 .unwrap();
2362
2363 // Simulate the language server sending us a small edit in the form of a very large diff.
2364 // Rust-analyzer does this when performing a merge-imports code action.
2365 let edits = lsp_store
2366 .update(cx, |lsp_store, cx| {
2367 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2368 &buffer,
2369 [
2370 // Replace the first use statement without editing the semicolon.
2371 lsp::TextEdit {
2372 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2373 new_text: "a::{b, c}".into(),
2374 },
2375 // Reinsert the remainder of the file between the semicolon and the final
2376 // newline of the file.
2377 lsp::TextEdit {
2378 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2379 new_text: "\n\n".into(),
2380 },
2381 lsp::TextEdit {
2382 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2383 new_text: "
2384 fn f() {
2385 b();
2386 c();
2387 }"
2388 .unindent(),
2389 },
2390 // Delete everything after the first newline of the file.
2391 lsp::TextEdit {
2392 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2393 new_text: "".into(),
2394 },
2395 ],
2396 LanguageServerId(0),
2397 None,
2398 cx,
2399 )
2400 })
2401 .await
2402 .unwrap();
2403
2404 buffer.update(cx, |buffer, cx| {
2405 let edits = edits
2406 .into_iter()
2407 .map(|(range, text)| {
2408 (
2409 range.start.to_point(buffer)..range.end.to_point(buffer),
2410 text,
2411 )
2412 })
2413 .collect::<Vec<_>>();
2414
2415 assert_eq!(
2416 edits,
2417 [
2418 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2419 (Point::new(1, 0)..Point::new(2, 0), "".into())
2420 ]
2421 );
2422
2423 for (range, new_text) in edits {
2424 buffer.edit([(range, new_text)], None, cx);
2425 }
2426 assert_eq!(
2427 buffer.text(),
2428 "
2429 use a::{b, c};
2430
2431 fn f() {
2432 b();
2433 c();
2434 }
2435 "
2436 .unindent()
2437 );
2438 });
2439}
2440
2441#[gpui::test]
2442async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2443 init_test(cx);
2444
2445 let text = "
2446 use a::b;
2447 use a::c;
2448
2449 fn f() {
2450 b();
2451 c();
2452 }
2453 "
2454 .unindent();
2455
2456 let fs = FakeFs::new(cx.executor());
2457 fs.insert_tree(
2458 "/dir",
2459 json!({
2460 "a.rs": text.clone(),
2461 }),
2462 )
2463 .await;
2464
2465 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2466 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2467 let buffer = project
2468 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2469 .await
2470 .unwrap();
2471
2472 // Simulate the language server sending us edits in a non-ordered fashion,
2473 // with ranges sometimes being inverted or pointing to invalid locations.
2474 let edits = lsp_store
2475 .update(cx, |lsp_store, cx| {
2476 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2477 &buffer,
2478 [
2479 lsp::TextEdit {
2480 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2481 new_text: "\n\n".into(),
2482 },
2483 lsp::TextEdit {
2484 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2485 new_text: "a::{b, c}".into(),
2486 },
2487 lsp::TextEdit {
2488 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2489 new_text: "".into(),
2490 },
2491 lsp::TextEdit {
2492 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2493 new_text: "
2494 fn f() {
2495 b();
2496 c();
2497 }"
2498 .unindent(),
2499 },
2500 ],
2501 LanguageServerId(0),
2502 None,
2503 cx,
2504 )
2505 })
2506 .await
2507 .unwrap();
2508
2509 buffer.update(cx, |buffer, cx| {
2510 let edits = edits
2511 .into_iter()
2512 .map(|(range, text)| {
2513 (
2514 range.start.to_point(buffer)..range.end.to_point(buffer),
2515 text,
2516 )
2517 })
2518 .collect::<Vec<_>>();
2519
2520 assert_eq!(
2521 edits,
2522 [
2523 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2524 (Point::new(1, 0)..Point::new(2, 0), "".into())
2525 ]
2526 );
2527
2528 for (range, new_text) in edits {
2529 buffer.edit([(range, new_text)], None, cx);
2530 }
2531 assert_eq!(
2532 buffer.text(),
2533 "
2534 use a::{b, c};
2535
2536 fn f() {
2537 b();
2538 c();
2539 }
2540 "
2541 .unindent()
2542 );
2543 });
2544}
2545
2546fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2547 buffer: &Buffer,
2548 range: Range<T>,
2549) -> Vec<(String, Option<DiagnosticSeverity>)> {
2550 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2551 for chunk in buffer.snapshot().chunks(range, true) {
2552 if chunks.last().map_or(false, |prev_chunk| {
2553 prev_chunk.1 == chunk.diagnostic_severity
2554 }) {
2555 chunks.last_mut().unwrap().0.push_str(chunk.text);
2556 } else {
2557 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2558 }
2559 }
2560 chunks
2561}
2562
2563#[gpui::test(iterations = 10)]
2564async fn test_definition(cx: &mut gpui::TestAppContext) {
2565 init_test(cx);
2566
2567 let fs = FakeFs::new(cx.executor());
2568 fs.insert_tree(
2569 "/dir",
2570 json!({
2571 "a.rs": "const fn a() { A }",
2572 "b.rs": "const y: i32 = crate::a()",
2573 }),
2574 )
2575 .await;
2576
2577 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2578
2579 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2580 language_registry.add(rust_lang());
2581 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2582
2583 let (buffer, _handle) = project
2584 .update(cx, |project, cx| {
2585 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2586 })
2587 .await
2588 .unwrap();
2589
2590 let fake_server = fake_servers.next().await.unwrap();
2591 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2592 let params = params.text_document_position_params;
2593 assert_eq!(
2594 params.text_document.uri.to_file_path().unwrap(),
2595 Path::new("/dir/b.rs"),
2596 );
2597 assert_eq!(params.position, lsp::Position::new(0, 22));
2598
2599 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2600 lsp::Location::new(
2601 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2602 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2603 ),
2604 )))
2605 });
2606
2607 let mut definitions = project
2608 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2609 .await
2610 .unwrap();
2611
2612 // Assert no new language server started
2613 cx.executor().run_until_parked();
2614 assert!(fake_servers.try_next().is_err());
2615
2616 assert_eq!(definitions.len(), 1);
2617 let definition = definitions.pop().unwrap();
2618 cx.update(|cx| {
2619 let target_buffer = definition.target.buffer.read(cx);
2620 assert_eq!(
2621 target_buffer
2622 .file()
2623 .unwrap()
2624 .as_local()
2625 .unwrap()
2626 .abs_path(cx),
2627 Path::new("/dir/a.rs"),
2628 );
2629 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2630 assert_eq!(
2631 list_worktrees(&project, cx),
2632 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2633 );
2634
2635 drop(definition);
2636 });
2637 cx.update(|cx| {
2638 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2639 });
2640
2641 fn list_worktrees<'a>(
2642 project: &'a Model<Project>,
2643 cx: &'a AppContext,
2644 ) -> Vec<(&'a Path, bool)> {
2645 project
2646 .read(cx)
2647 .worktrees(cx)
2648 .map(|worktree| {
2649 let worktree = worktree.read(cx);
2650 (
2651 worktree.as_local().unwrap().abs_path().as_ref(),
2652 worktree.is_visible(),
2653 )
2654 })
2655 .collect::<Vec<_>>()
2656 }
2657}
2658
2659#[gpui::test]
2660async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2661 init_test(cx);
2662
2663 let fs = FakeFs::new(cx.executor());
2664 fs.insert_tree(
2665 "/dir",
2666 json!({
2667 "a.ts": "",
2668 }),
2669 )
2670 .await;
2671
2672 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2673
2674 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2675 language_registry.add(typescript_lang());
2676 let mut fake_language_servers = language_registry.register_fake_lsp(
2677 "TypeScript",
2678 FakeLspAdapter {
2679 capabilities: lsp::ServerCapabilities {
2680 completion_provider: Some(lsp::CompletionOptions {
2681 trigger_characters: Some(vec![":".to_string()]),
2682 ..Default::default()
2683 }),
2684 ..Default::default()
2685 },
2686 ..Default::default()
2687 },
2688 );
2689
2690 let (buffer, _handle) = project
2691 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2692 .await
2693 .unwrap();
2694
2695 let fake_server = fake_language_servers.next().await.unwrap();
2696
2697 let text = "let a = b.fqn";
2698 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2699 let completions = project.update(cx, |project, cx| {
2700 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2701 });
2702
2703 fake_server
2704 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2705 Ok(Some(lsp::CompletionResponse::Array(vec![
2706 lsp::CompletionItem {
2707 label: "fullyQualifiedName?".into(),
2708 insert_text: Some("fullyQualifiedName".into()),
2709 ..Default::default()
2710 },
2711 ])))
2712 })
2713 .next()
2714 .await;
2715 let completions = completions.await.unwrap();
2716 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2717 assert_eq!(completions.len(), 1);
2718 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2719 assert_eq!(
2720 completions[0].old_range.to_offset(&snapshot),
2721 text.len() - 3..text.len()
2722 );
2723
2724 let text = "let a = \"atoms/cmp\"";
2725 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2726 let completions = project.update(cx, |project, cx| {
2727 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2728 });
2729
2730 fake_server
2731 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2732 Ok(Some(lsp::CompletionResponse::Array(vec![
2733 lsp::CompletionItem {
2734 label: "component".into(),
2735 ..Default::default()
2736 },
2737 ])))
2738 })
2739 .next()
2740 .await;
2741 let completions = completions.await.unwrap();
2742 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2743 assert_eq!(completions.len(), 1);
2744 assert_eq!(completions[0].new_text, "component");
2745 assert_eq!(
2746 completions[0].old_range.to_offset(&snapshot),
2747 text.len() - 4..text.len() - 1
2748 );
2749}
2750
2751#[gpui::test]
2752async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2753 init_test(cx);
2754
2755 let fs = FakeFs::new(cx.executor());
2756 fs.insert_tree(
2757 "/dir",
2758 json!({
2759 "a.ts": "",
2760 }),
2761 )
2762 .await;
2763
2764 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2765
2766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2767 language_registry.add(typescript_lang());
2768 let mut fake_language_servers = language_registry.register_fake_lsp(
2769 "TypeScript",
2770 FakeLspAdapter {
2771 capabilities: lsp::ServerCapabilities {
2772 completion_provider: Some(lsp::CompletionOptions {
2773 trigger_characters: Some(vec![":".to_string()]),
2774 ..Default::default()
2775 }),
2776 ..Default::default()
2777 },
2778 ..Default::default()
2779 },
2780 );
2781
2782 let (buffer, _handle) = project
2783 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2784 .await
2785 .unwrap();
2786
2787 let fake_server = fake_language_servers.next().await.unwrap();
2788
2789 let text = "let a = b.fqn";
2790 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2791 let completions = project.update(cx, |project, cx| {
2792 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2793 });
2794
2795 fake_server
2796 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2797 Ok(Some(lsp::CompletionResponse::Array(vec![
2798 lsp::CompletionItem {
2799 label: "fullyQualifiedName?".into(),
2800 insert_text: Some("fully\rQualified\r\nName".into()),
2801 ..Default::default()
2802 },
2803 ])))
2804 })
2805 .next()
2806 .await;
2807 let completions = completions.await.unwrap();
2808 assert_eq!(completions.len(), 1);
2809 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2810}
2811
2812#[gpui::test(iterations = 10)]
2813async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2814 init_test(cx);
2815
2816 let fs = FakeFs::new(cx.executor());
2817 fs.insert_tree(
2818 "/dir",
2819 json!({
2820 "a.ts": "a",
2821 }),
2822 )
2823 .await;
2824
2825 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2826
2827 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2828 language_registry.add(typescript_lang());
2829 let mut fake_language_servers = language_registry.register_fake_lsp(
2830 "TypeScript",
2831 FakeLspAdapter {
2832 capabilities: lsp::ServerCapabilities {
2833 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2834 lsp::CodeActionOptions {
2835 resolve_provider: Some(true),
2836 ..lsp::CodeActionOptions::default()
2837 },
2838 )),
2839 ..lsp::ServerCapabilities::default()
2840 },
2841 ..FakeLspAdapter::default()
2842 },
2843 );
2844
2845 let (buffer, _handle) = project
2846 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2847 .await
2848 .unwrap();
2849
2850 let fake_server = fake_language_servers.next().await.unwrap();
2851
2852 // Language server returns code actions that contain commands, and not edits.
2853 let actions = project.update(cx, |project, cx| {
2854 project.code_actions(&buffer, 0..0, None, cx)
2855 });
2856 fake_server
2857 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2858 Ok(Some(vec![
2859 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2860 title: "The code action".into(),
2861 data: Some(serde_json::json!({
2862 "command": "_the/command",
2863 })),
2864 ..lsp::CodeAction::default()
2865 }),
2866 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2867 title: "two".into(),
2868 ..lsp::CodeAction::default()
2869 }),
2870 ]))
2871 })
2872 .next()
2873 .await;
2874
2875 let action = actions.await.unwrap()[0].clone();
2876 let apply = project.update(cx, |project, cx| {
2877 project.apply_code_action(buffer.clone(), action, true, cx)
2878 });
2879
2880 // Resolving the code action does not populate its edits. In absence of
2881 // edits, we must execute the given command.
2882 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2883 |mut action, _| async move {
2884 if action.data.is_some() {
2885 action.command = Some(lsp::Command {
2886 title: "The command".into(),
2887 command: "_the/command".into(),
2888 arguments: Some(vec![json!("the-argument")]),
2889 });
2890 }
2891 Ok(action)
2892 },
2893 );
2894
2895 // While executing the command, the language server sends the editor
2896 // a `workspaceEdit` request.
2897 fake_server
2898 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2899 let fake = fake_server.clone();
2900 move |params, _| {
2901 assert_eq!(params.command, "_the/command");
2902 let fake = fake.clone();
2903 async move {
2904 fake.server
2905 .request::<lsp::request::ApplyWorkspaceEdit>(
2906 lsp::ApplyWorkspaceEditParams {
2907 label: None,
2908 edit: lsp::WorkspaceEdit {
2909 changes: Some(
2910 [(
2911 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2912 vec![lsp::TextEdit {
2913 range: lsp::Range::new(
2914 lsp::Position::new(0, 0),
2915 lsp::Position::new(0, 0),
2916 ),
2917 new_text: "X".into(),
2918 }],
2919 )]
2920 .into_iter()
2921 .collect(),
2922 ),
2923 ..Default::default()
2924 },
2925 },
2926 )
2927 .await
2928 .unwrap();
2929 Ok(Some(json!(null)))
2930 }
2931 }
2932 })
2933 .next()
2934 .await;
2935
2936 // Applying the code action returns a project transaction containing the edits
2937 // sent by the language server in its `workspaceEdit` request.
2938 let transaction = apply.await.unwrap();
2939 assert!(transaction.0.contains_key(&buffer));
2940 buffer.update(cx, |buffer, cx| {
2941 assert_eq!(buffer.text(), "Xa");
2942 buffer.undo(cx);
2943 assert_eq!(buffer.text(), "a");
2944 });
2945}
2946
2947#[gpui::test(iterations = 10)]
2948async fn test_save_file(cx: &mut gpui::TestAppContext) {
2949 init_test(cx);
2950
2951 let fs = FakeFs::new(cx.executor());
2952 fs.insert_tree(
2953 "/dir",
2954 json!({
2955 "file1": "the old contents",
2956 }),
2957 )
2958 .await;
2959
2960 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2961 let buffer = project
2962 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2963 .await
2964 .unwrap();
2965 buffer.update(cx, |buffer, cx| {
2966 assert_eq!(buffer.text(), "the old contents");
2967 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2968 });
2969
2970 project
2971 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2972 .await
2973 .unwrap();
2974
2975 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2976 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2977}
2978
2979#[gpui::test(iterations = 30)]
2980async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2981 init_test(cx);
2982
2983 let fs = FakeFs::new(cx.executor().clone());
2984 fs.insert_tree(
2985 "/dir",
2986 json!({
2987 "file1": "the original contents",
2988 }),
2989 )
2990 .await;
2991
2992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2993 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2994 let buffer = project
2995 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2996 .await
2997 .unwrap();
2998
2999 // Simulate buffer diffs being slow, so that they don't complete before
3000 // the next file change occurs.
3001 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3002
3003 // Change the buffer's file on disk, and then wait for the file change
3004 // to be detected by the worktree, so that the buffer starts reloading.
3005 fs.save(
3006 "/dir/file1".as_ref(),
3007 &"the first contents".into(),
3008 Default::default(),
3009 )
3010 .await
3011 .unwrap();
3012 worktree.next_event(cx).await;
3013
3014 // Change the buffer's file again. Depending on the random seed, the
3015 // previous file change may still be in progress.
3016 fs.save(
3017 "/dir/file1".as_ref(),
3018 &"the second contents".into(),
3019 Default::default(),
3020 )
3021 .await
3022 .unwrap();
3023 worktree.next_event(cx).await;
3024
3025 cx.executor().run_until_parked();
3026 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3027 buffer.read_with(cx, |buffer, _| {
3028 assert_eq!(buffer.text(), on_disk_text);
3029 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3030 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3031 });
3032}
3033
3034#[gpui::test(iterations = 30)]
3035async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3036 init_test(cx);
3037
3038 let fs = FakeFs::new(cx.executor().clone());
3039 fs.insert_tree(
3040 "/dir",
3041 json!({
3042 "file1": "the original contents",
3043 }),
3044 )
3045 .await;
3046
3047 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3048 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3049 let buffer = project
3050 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3051 .await
3052 .unwrap();
3053
3054 // Simulate buffer diffs being slow, so that they don't complete before
3055 // the next file change occurs.
3056 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3057
3058 // Change the buffer's file on disk, and then wait for the file change
3059 // to be detected by the worktree, so that the buffer starts reloading.
3060 fs.save(
3061 "/dir/file1".as_ref(),
3062 &"the first contents".into(),
3063 Default::default(),
3064 )
3065 .await
3066 .unwrap();
3067 worktree.next_event(cx).await;
3068
3069 cx.executor()
3070 .spawn(cx.executor().simulate_random_delay())
3071 .await;
3072
3073 // Perform a noop edit, causing the buffer's version to increase.
3074 buffer.update(cx, |buffer, cx| {
3075 buffer.edit([(0..0, " ")], None, cx);
3076 buffer.undo(cx);
3077 });
3078
3079 cx.executor().run_until_parked();
3080 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3081 buffer.read_with(cx, |buffer, _| {
3082 let buffer_text = buffer.text();
3083 if buffer_text == on_disk_text {
3084 assert!(
3085 !buffer.is_dirty() && !buffer.has_conflict(),
3086 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3087 );
3088 }
3089 // If the file change occurred while the buffer was processing the first
3090 // change, the buffer will be in a conflicting state.
3091 else {
3092 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3093 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3094 }
3095 });
3096}
3097
3098#[gpui::test]
3099async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3100 init_test(cx);
3101
3102 let fs = FakeFs::new(cx.executor());
3103 fs.insert_tree(
3104 "/dir",
3105 json!({
3106 "file1": "the old contents",
3107 }),
3108 )
3109 .await;
3110
3111 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3112 let buffer = project
3113 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3114 .await
3115 .unwrap();
3116 buffer.update(cx, |buffer, cx| {
3117 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3118 });
3119
3120 project
3121 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3122 .await
3123 .unwrap();
3124
3125 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3126 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3127}
3128
3129#[gpui::test]
3130async fn test_save_as(cx: &mut gpui::TestAppContext) {
3131 init_test(cx);
3132
3133 let fs = FakeFs::new(cx.executor());
3134 fs.insert_tree("/dir", json!({})).await;
3135
3136 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3137
3138 let languages = project.update(cx, |project, _| project.languages().clone());
3139 languages.add(rust_lang());
3140
3141 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3142 buffer.update(cx, |buffer, cx| {
3143 buffer.edit([(0..0, "abc")], None, cx);
3144 assert!(buffer.is_dirty());
3145 assert!(!buffer.has_conflict());
3146 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3147 });
3148 project
3149 .update(cx, |project, cx| {
3150 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3151 let path = ProjectPath {
3152 worktree_id,
3153 path: Arc::from(Path::new("file1.rs")),
3154 };
3155 project.save_buffer_as(buffer.clone(), path, cx)
3156 })
3157 .await
3158 .unwrap();
3159 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3160
3161 cx.executor().run_until_parked();
3162 buffer.update(cx, |buffer, cx| {
3163 assert_eq!(
3164 buffer.file().unwrap().full_path(cx),
3165 Path::new("dir/file1.rs")
3166 );
3167 assert!(!buffer.is_dirty());
3168 assert!(!buffer.has_conflict());
3169 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3170 });
3171
3172 let opened_buffer = project
3173 .update(cx, |project, cx| {
3174 project.open_local_buffer("/dir/file1.rs", cx)
3175 })
3176 .await
3177 .unwrap();
3178 assert_eq!(opened_buffer, buffer);
3179}
3180
3181#[gpui::test(retries = 5)]
3182async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3183 use worktree::WorktreeModelHandle as _;
3184
3185 init_test(cx);
3186 cx.executor().allow_parking();
3187
3188 let dir = temp_tree(json!({
3189 "a": {
3190 "file1": "",
3191 "file2": "",
3192 "file3": "",
3193 },
3194 "b": {
3195 "c": {
3196 "file4": "",
3197 "file5": "",
3198 }
3199 }
3200 }));
3201
3202 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3203
3204 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3205 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3206 async move { buffer.await.unwrap() }
3207 };
3208 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3209 project.update(cx, |project, cx| {
3210 let tree = project.worktrees(cx).next().unwrap();
3211 tree.read(cx)
3212 .entry_for_path(path)
3213 .unwrap_or_else(|| panic!("no entry for path {}", path))
3214 .id
3215 })
3216 };
3217
3218 let buffer2 = buffer_for_path("a/file2", cx).await;
3219 let buffer3 = buffer_for_path("a/file3", cx).await;
3220 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3221 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3222
3223 let file2_id = id_for_path("a/file2", cx);
3224 let file3_id = id_for_path("a/file3", cx);
3225 let file4_id = id_for_path("b/c/file4", cx);
3226
3227 // Create a remote copy of this worktree.
3228 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3229 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3230
3231 let updates = Arc::new(Mutex::new(Vec::new()));
3232 tree.update(cx, |tree, cx| {
3233 let updates = updates.clone();
3234 tree.observe_updates(0, cx, move |update| {
3235 updates.lock().push(update);
3236 async { true }
3237 });
3238 });
3239
3240 let remote =
3241 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3242
3243 cx.executor().run_until_parked();
3244
3245 cx.update(|cx| {
3246 assert!(!buffer2.read(cx).is_dirty());
3247 assert!(!buffer3.read(cx).is_dirty());
3248 assert!(!buffer4.read(cx).is_dirty());
3249 assert!(!buffer5.read(cx).is_dirty());
3250 });
3251
3252 // Rename and delete files and directories.
3253 tree.flush_fs_events(cx).await;
3254 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3255 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3256 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3257 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3258 tree.flush_fs_events(cx).await;
3259
3260 let expected_paths = vec![
3261 "a",
3262 "a/file1",
3263 "a/file2.new",
3264 "b",
3265 "d",
3266 "d/file3",
3267 "d/file4",
3268 ];
3269
3270 cx.update(|app| {
3271 assert_eq!(
3272 tree.read(app)
3273 .paths()
3274 .map(|p| p.to_str().unwrap())
3275 .collect::<Vec<_>>(),
3276 expected_paths
3277 );
3278 });
3279
3280 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3281 assert_eq!(id_for_path("d/file3", cx), file3_id);
3282 assert_eq!(id_for_path("d/file4", cx), file4_id);
3283
3284 cx.update(|cx| {
3285 assert_eq!(
3286 buffer2.read(cx).file().unwrap().path().as_ref(),
3287 Path::new("a/file2.new")
3288 );
3289 assert_eq!(
3290 buffer3.read(cx).file().unwrap().path().as_ref(),
3291 Path::new("d/file3")
3292 );
3293 assert_eq!(
3294 buffer4.read(cx).file().unwrap().path().as_ref(),
3295 Path::new("d/file4")
3296 );
3297 assert_eq!(
3298 buffer5.read(cx).file().unwrap().path().as_ref(),
3299 Path::new("b/c/file5")
3300 );
3301
3302 assert_matches!(
3303 buffer2.read(cx).file().unwrap().disk_state(),
3304 DiskState::Present { .. }
3305 );
3306 assert_matches!(
3307 buffer3.read(cx).file().unwrap().disk_state(),
3308 DiskState::Present { .. }
3309 );
3310 assert_matches!(
3311 buffer4.read(cx).file().unwrap().disk_state(),
3312 DiskState::Present { .. }
3313 );
3314 assert_eq!(
3315 buffer5.read(cx).file().unwrap().disk_state(),
3316 DiskState::Deleted
3317 );
3318 });
3319
3320 // Update the remote worktree. Check that it becomes consistent with the
3321 // local worktree.
3322 cx.executor().run_until_parked();
3323
3324 remote.update(cx, |remote, _| {
3325 for update in updates.lock().drain(..) {
3326 remote.as_remote_mut().unwrap().update_from_remote(update);
3327 }
3328 });
3329 cx.executor().run_until_parked();
3330 remote.update(cx, |remote, _| {
3331 assert_eq!(
3332 remote
3333 .paths()
3334 .map(|p| p.to_str().unwrap())
3335 .collect::<Vec<_>>(),
3336 expected_paths
3337 );
3338 });
3339}
3340
3341#[gpui::test(iterations = 10)]
3342async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3343 init_test(cx);
3344
3345 let fs = FakeFs::new(cx.executor());
3346 fs.insert_tree(
3347 "/dir",
3348 json!({
3349 "a": {
3350 "file1": "",
3351 }
3352 }),
3353 )
3354 .await;
3355
3356 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3357 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3358 let tree_id = tree.update(cx, |tree, _| tree.id());
3359
3360 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3361 project.update(cx, |project, cx| {
3362 let tree = project.worktrees(cx).next().unwrap();
3363 tree.read(cx)
3364 .entry_for_path(path)
3365 .unwrap_or_else(|| panic!("no entry for path {}", path))
3366 .id
3367 })
3368 };
3369
3370 let dir_id = id_for_path("a", cx);
3371 let file_id = id_for_path("a/file1", cx);
3372 let buffer = project
3373 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3374 .await
3375 .unwrap();
3376 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3377
3378 project
3379 .update(cx, |project, cx| {
3380 project.rename_entry(dir_id, Path::new("b"), cx)
3381 })
3382 .unwrap()
3383 .await
3384 .to_included()
3385 .unwrap();
3386 cx.executor().run_until_parked();
3387
3388 assert_eq!(id_for_path("b", cx), dir_id);
3389 assert_eq!(id_for_path("b/file1", cx), file_id);
3390 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3391}
3392
3393#[gpui::test]
3394async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3395 init_test(cx);
3396
3397 let fs = FakeFs::new(cx.executor());
3398 fs.insert_tree(
3399 "/dir",
3400 json!({
3401 "a.txt": "a-contents",
3402 "b.txt": "b-contents",
3403 }),
3404 )
3405 .await;
3406
3407 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3408
3409 // Spawn multiple tasks to open paths, repeating some paths.
3410 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3411 (
3412 p.open_local_buffer("/dir/a.txt", cx),
3413 p.open_local_buffer("/dir/b.txt", cx),
3414 p.open_local_buffer("/dir/a.txt", cx),
3415 )
3416 });
3417
3418 let buffer_a_1 = buffer_a_1.await.unwrap();
3419 let buffer_a_2 = buffer_a_2.await.unwrap();
3420 let buffer_b = buffer_b.await.unwrap();
3421 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3422 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3423
3424 // There is only one buffer per path.
3425 let buffer_a_id = buffer_a_1.entity_id();
3426 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3427
3428 // Open the same path again while it is still open.
3429 drop(buffer_a_1);
3430 let buffer_a_3 = project
3431 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3432 .await
3433 .unwrap();
3434
3435 // There's still only one buffer per path.
3436 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3437}
3438
3439#[gpui::test]
3440async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3441 init_test(cx);
3442
3443 let fs = FakeFs::new(cx.executor());
3444 fs.insert_tree(
3445 "/dir",
3446 json!({
3447 "file1": "abc",
3448 "file2": "def",
3449 "file3": "ghi",
3450 }),
3451 )
3452 .await;
3453
3454 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3455
3456 let buffer1 = project
3457 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3458 .await
3459 .unwrap();
3460 let events = Arc::new(Mutex::new(Vec::new()));
3461
3462 // initially, the buffer isn't dirty.
3463 buffer1.update(cx, |buffer, cx| {
3464 cx.subscribe(&buffer1, {
3465 let events = events.clone();
3466 move |_, _, event, _| match event {
3467 BufferEvent::Operation { .. } => {}
3468 _ => events.lock().push(event.clone()),
3469 }
3470 })
3471 .detach();
3472
3473 assert!(!buffer.is_dirty());
3474 assert!(events.lock().is_empty());
3475
3476 buffer.edit([(1..2, "")], None, cx);
3477 });
3478
3479 // after the first edit, the buffer is dirty, and emits a dirtied event.
3480 buffer1.update(cx, |buffer, cx| {
3481 assert!(buffer.text() == "ac");
3482 assert!(buffer.is_dirty());
3483 assert_eq!(
3484 *events.lock(),
3485 &[
3486 language::BufferEvent::Edited,
3487 language::BufferEvent::DirtyChanged
3488 ]
3489 );
3490 events.lock().clear();
3491 buffer.did_save(
3492 buffer.version(),
3493 buffer.file().unwrap().disk_state().mtime(),
3494 cx,
3495 );
3496 });
3497
3498 // after saving, the buffer is not dirty, and emits a saved event.
3499 buffer1.update(cx, |buffer, cx| {
3500 assert!(!buffer.is_dirty());
3501 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3502 events.lock().clear();
3503
3504 buffer.edit([(1..1, "B")], None, cx);
3505 buffer.edit([(2..2, "D")], None, cx);
3506 });
3507
3508 // after editing again, the buffer is dirty, and emits another dirty event.
3509 buffer1.update(cx, |buffer, cx| {
3510 assert!(buffer.text() == "aBDc");
3511 assert!(buffer.is_dirty());
3512 assert_eq!(
3513 *events.lock(),
3514 &[
3515 language::BufferEvent::Edited,
3516 language::BufferEvent::DirtyChanged,
3517 language::BufferEvent::Edited,
3518 ],
3519 );
3520 events.lock().clear();
3521
3522 // After restoring the buffer to its previously-saved state,
3523 // the buffer is not considered dirty anymore.
3524 buffer.edit([(1..3, "")], None, cx);
3525 assert!(buffer.text() == "ac");
3526 assert!(!buffer.is_dirty());
3527 });
3528
3529 assert_eq!(
3530 *events.lock(),
3531 &[
3532 language::BufferEvent::Edited,
3533 language::BufferEvent::DirtyChanged
3534 ]
3535 );
3536
3537 // When a file is deleted, the buffer is considered dirty.
3538 let events = Arc::new(Mutex::new(Vec::new()));
3539 let buffer2 = project
3540 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3541 .await
3542 .unwrap();
3543 buffer2.update(cx, |_, cx| {
3544 cx.subscribe(&buffer2, {
3545 let events = events.clone();
3546 move |_, _, event, _| events.lock().push(event.clone())
3547 })
3548 .detach();
3549 });
3550
3551 fs.remove_file("/dir/file2".as_ref(), Default::default())
3552 .await
3553 .unwrap();
3554 cx.executor().run_until_parked();
3555 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3556 assert_eq!(
3557 *events.lock(),
3558 &[
3559 language::BufferEvent::DirtyChanged,
3560 language::BufferEvent::FileHandleChanged
3561 ]
3562 );
3563
3564 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3565 let events = Arc::new(Mutex::new(Vec::new()));
3566 let buffer3 = project
3567 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3568 .await
3569 .unwrap();
3570 buffer3.update(cx, |_, cx| {
3571 cx.subscribe(&buffer3, {
3572 let events = events.clone();
3573 move |_, _, event, _| events.lock().push(event.clone())
3574 })
3575 .detach();
3576 });
3577
3578 buffer3.update(cx, |buffer, cx| {
3579 buffer.edit([(0..0, "x")], None, cx);
3580 });
3581 events.lock().clear();
3582 fs.remove_file("/dir/file3".as_ref(), Default::default())
3583 .await
3584 .unwrap();
3585 cx.executor().run_until_parked();
3586 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3587 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3588}
3589
3590#[gpui::test]
3591async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3592 init_test(cx);
3593
3594 let initial_contents = "aaa\nbbbbb\nc\n";
3595 let fs = FakeFs::new(cx.executor());
3596 fs.insert_tree(
3597 "/dir",
3598 json!({
3599 "the-file": initial_contents,
3600 }),
3601 )
3602 .await;
3603 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3604 let buffer = project
3605 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3606 .await
3607 .unwrap();
3608
3609 let anchors = (0..3)
3610 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3611 .collect::<Vec<_>>();
3612
3613 // Change the file on disk, adding two new lines of text, and removing
3614 // one line.
3615 buffer.update(cx, |buffer, _| {
3616 assert!(!buffer.is_dirty());
3617 assert!(!buffer.has_conflict());
3618 });
3619 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3620 fs.save(
3621 "/dir/the-file".as_ref(),
3622 &new_contents.into(),
3623 LineEnding::Unix,
3624 )
3625 .await
3626 .unwrap();
3627
3628 // Because the buffer was not modified, it is reloaded from disk. Its
3629 // contents are edited according to the diff between the old and new
3630 // file contents.
3631 cx.executor().run_until_parked();
3632 buffer.update(cx, |buffer, _| {
3633 assert_eq!(buffer.text(), new_contents);
3634 assert!(!buffer.is_dirty());
3635 assert!(!buffer.has_conflict());
3636
3637 let anchor_positions = anchors
3638 .iter()
3639 .map(|anchor| anchor.to_point(&*buffer))
3640 .collect::<Vec<_>>();
3641 assert_eq!(
3642 anchor_positions,
3643 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3644 );
3645 });
3646
3647 // Modify the buffer
3648 buffer.update(cx, |buffer, cx| {
3649 buffer.edit([(0..0, " ")], None, cx);
3650 assert!(buffer.is_dirty());
3651 assert!(!buffer.has_conflict());
3652 });
3653
3654 // Change the file on disk again, adding blank lines to the beginning.
3655 fs.save(
3656 "/dir/the-file".as_ref(),
3657 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3658 LineEnding::Unix,
3659 )
3660 .await
3661 .unwrap();
3662
3663 // Because the buffer is modified, it doesn't reload from disk, but is
3664 // marked as having a conflict.
3665 cx.executor().run_until_parked();
3666 buffer.update(cx, |buffer, _| {
3667 assert!(buffer.has_conflict());
3668 });
3669}
3670
3671#[gpui::test]
3672async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3673 init_test(cx);
3674
3675 let fs = FakeFs::new(cx.executor());
3676 fs.insert_tree(
3677 "/dir",
3678 json!({
3679 "file1": "a\nb\nc\n",
3680 "file2": "one\r\ntwo\r\nthree\r\n",
3681 }),
3682 )
3683 .await;
3684
3685 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3686 let buffer1 = project
3687 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3688 .await
3689 .unwrap();
3690 let buffer2 = project
3691 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3692 .await
3693 .unwrap();
3694
3695 buffer1.update(cx, |buffer, _| {
3696 assert_eq!(buffer.text(), "a\nb\nc\n");
3697 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3698 });
3699 buffer2.update(cx, |buffer, _| {
3700 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3701 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3702 });
3703
3704 // Change a file's line endings on disk from unix to windows. The buffer's
3705 // state updates correctly.
3706 fs.save(
3707 "/dir/file1".as_ref(),
3708 &"aaa\nb\nc\n".into(),
3709 LineEnding::Windows,
3710 )
3711 .await
3712 .unwrap();
3713 cx.executor().run_until_parked();
3714 buffer1.update(cx, |buffer, _| {
3715 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3716 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3717 });
3718
3719 // Save a file with windows line endings. The file is written correctly.
3720 buffer2.update(cx, |buffer, cx| {
3721 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3722 });
3723 project
3724 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3725 .await
3726 .unwrap();
3727 assert_eq!(
3728 fs.load("/dir/file2".as_ref()).await.unwrap(),
3729 "one\r\ntwo\r\nthree\r\nfour\r\n",
3730 );
3731}
3732
3733#[gpui::test]
3734async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3735 init_test(cx);
3736
3737 let fs = FakeFs::new(cx.executor());
3738 fs.insert_tree(
3739 "/the-dir",
3740 json!({
3741 "a.rs": "
3742 fn foo(mut v: Vec<usize>) {
3743 for x in &v {
3744 v.push(1);
3745 }
3746 }
3747 "
3748 .unindent(),
3749 }),
3750 )
3751 .await;
3752
3753 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3754 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3755 let buffer = project
3756 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3757 .await
3758 .unwrap();
3759
3760 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3761 let message = lsp::PublishDiagnosticsParams {
3762 uri: buffer_uri.clone(),
3763 diagnostics: vec![
3764 lsp::Diagnostic {
3765 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3766 severity: Some(DiagnosticSeverity::WARNING),
3767 message: "error 1".to_string(),
3768 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3769 location: lsp::Location {
3770 uri: buffer_uri.clone(),
3771 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3772 },
3773 message: "error 1 hint 1".to_string(),
3774 }]),
3775 ..Default::default()
3776 },
3777 lsp::Diagnostic {
3778 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3779 severity: Some(DiagnosticSeverity::HINT),
3780 message: "error 1 hint 1".to_string(),
3781 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3782 location: lsp::Location {
3783 uri: buffer_uri.clone(),
3784 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3785 },
3786 message: "original diagnostic".to_string(),
3787 }]),
3788 ..Default::default()
3789 },
3790 lsp::Diagnostic {
3791 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3792 severity: Some(DiagnosticSeverity::ERROR),
3793 message: "error 2".to_string(),
3794 related_information: Some(vec![
3795 lsp::DiagnosticRelatedInformation {
3796 location: lsp::Location {
3797 uri: buffer_uri.clone(),
3798 range: lsp::Range::new(
3799 lsp::Position::new(1, 13),
3800 lsp::Position::new(1, 15),
3801 ),
3802 },
3803 message: "error 2 hint 1".to_string(),
3804 },
3805 lsp::DiagnosticRelatedInformation {
3806 location: lsp::Location {
3807 uri: buffer_uri.clone(),
3808 range: lsp::Range::new(
3809 lsp::Position::new(1, 13),
3810 lsp::Position::new(1, 15),
3811 ),
3812 },
3813 message: "error 2 hint 2".to_string(),
3814 },
3815 ]),
3816 ..Default::default()
3817 },
3818 lsp::Diagnostic {
3819 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3820 severity: Some(DiagnosticSeverity::HINT),
3821 message: "error 2 hint 1".to_string(),
3822 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3823 location: lsp::Location {
3824 uri: buffer_uri.clone(),
3825 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3826 },
3827 message: "original diagnostic".to_string(),
3828 }]),
3829 ..Default::default()
3830 },
3831 lsp::Diagnostic {
3832 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3833 severity: Some(DiagnosticSeverity::HINT),
3834 message: "error 2 hint 2".to_string(),
3835 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3836 location: lsp::Location {
3837 uri: buffer_uri,
3838 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3839 },
3840 message: "original diagnostic".to_string(),
3841 }]),
3842 ..Default::default()
3843 },
3844 ],
3845 version: None,
3846 };
3847
3848 lsp_store
3849 .update(cx, |lsp_store, cx| {
3850 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3851 })
3852 .unwrap();
3853 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3854
3855 assert_eq!(
3856 buffer
3857 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3858 .collect::<Vec<_>>(),
3859 &[
3860 DiagnosticEntry {
3861 range: Point::new(1, 8)..Point::new(1, 9),
3862 diagnostic: Diagnostic {
3863 severity: DiagnosticSeverity::WARNING,
3864 message: "error 1".to_string(),
3865 group_id: 1,
3866 is_primary: true,
3867 ..Default::default()
3868 }
3869 },
3870 DiagnosticEntry {
3871 range: Point::new(1, 8)..Point::new(1, 9),
3872 diagnostic: Diagnostic {
3873 severity: DiagnosticSeverity::HINT,
3874 message: "error 1 hint 1".to_string(),
3875 group_id: 1,
3876 is_primary: false,
3877 ..Default::default()
3878 }
3879 },
3880 DiagnosticEntry {
3881 range: Point::new(1, 13)..Point::new(1, 15),
3882 diagnostic: Diagnostic {
3883 severity: DiagnosticSeverity::HINT,
3884 message: "error 2 hint 1".to_string(),
3885 group_id: 0,
3886 is_primary: false,
3887 ..Default::default()
3888 }
3889 },
3890 DiagnosticEntry {
3891 range: Point::new(1, 13)..Point::new(1, 15),
3892 diagnostic: Diagnostic {
3893 severity: DiagnosticSeverity::HINT,
3894 message: "error 2 hint 2".to_string(),
3895 group_id: 0,
3896 is_primary: false,
3897 ..Default::default()
3898 }
3899 },
3900 DiagnosticEntry {
3901 range: Point::new(2, 8)..Point::new(2, 17),
3902 diagnostic: Diagnostic {
3903 severity: DiagnosticSeverity::ERROR,
3904 message: "error 2".to_string(),
3905 group_id: 0,
3906 is_primary: true,
3907 ..Default::default()
3908 }
3909 }
3910 ]
3911 );
3912
3913 assert_eq!(
3914 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3915 &[
3916 DiagnosticEntry {
3917 range: Point::new(1, 13)..Point::new(1, 15),
3918 diagnostic: Diagnostic {
3919 severity: DiagnosticSeverity::HINT,
3920 message: "error 2 hint 1".to_string(),
3921 group_id: 0,
3922 is_primary: false,
3923 ..Default::default()
3924 }
3925 },
3926 DiagnosticEntry {
3927 range: Point::new(1, 13)..Point::new(1, 15),
3928 diagnostic: Diagnostic {
3929 severity: DiagnosticSeverity::HINT,
3930 message: "error 2 hint 2".to_string(),
3931 group_id: 0,
3932 is_primary: false,
3933 ..Default::default()
3934 }
3935 },
3936 DiagnosticEntry {
3937 range: Point::new(2, 8)..Point::new(2, 17),
3938 diagnostic: Diagnostic {
3939 severity: DiagnosticSeverity::ERROR,
3940 message: "error 2".to_string(),
3941 group_id: 0,
3942 is_primary: true,
3943 ..Default::default()
3944 }
3945 }
3946 ]
3947 );
3948
3949 assert_eq!(
3950 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3951 &[
3952 DiagnosticEntry {
3953 range: Point::new(1, 8)..Point::new(1, 9),
3954 diagnostic: Diagnostic {
3955 severity: DiagnosticSeverity::WARNING,
3956 message: "error 1".to_string(),
3957 group_id: 1,
3958 is_primary: true,
3959 ..Default::default()
3960 }
3961 },
3962 DiagnosticEntry {
3963 range: Point::new(1, 8)..Point::new(1, 9),
3964 diagnostic: Diagnostic {
3965 severity: DiagnosticSeverity::HINT,
3966 message: "error 1 hint 1".to_string(),
3967 group_id: 1,
3968 is_primary: false,
3969 ..Default::default()
3970 }
3971 },
3972 ]
3973 );
3974}
3975
3976#[gpui::test]
3977async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3978 init_test(cx);
3979
3980 let fs = FakeFs::new(cx.executor());
3981 fs.insert_tree(
3982 "/dir",
3983 json!({
3984 "one.rs": "const ONE: usize = 1;",
3985 "two": {
3986 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3987 }
3988
3989 }),
3990 )
3991 .await;
3992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3993
3994 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3995 language_registry.add(rust_lang());
3996 let watched_paths = lsp::FileOperationRegistrationOptions {
3997 filters: vec![
3998 FileOperationFilter {
3999 scheme: Some("file".to_owned()),
4000 pattern: lsp::FileOperationPattern {
4001 glob: "**/*.rs".to_owned(),
4002 matches: Some(lsp::FileOperationPatternKind::File),
4003 options: None,
4004 },
4005 },
4006 FileOperationFilter {
4007 scheme: Some("file".to_owned()),
4008 pattern: lsp::FileOperationPattern {
4009 glob: "**/**".to_owned(),
4010 matches: Some(lsp::FileOperationPatternKind::Folder),
4011 options: None,
4012 },
4013 },
4014 ],
4015 };
4016 let mut fake_servers = language_registry.register_fake_lsp(
4017 "Rust",
4018 FakeLspAdapter {
4019 capabilities: lsp::ServerCapabilities {
4020 workspace: Some(lsp::WorkspaceServerCapabilities {
4021 workspace_folders: None,
4022 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4023 did_rename: Some(watched_paths.clone()),
4024 will_rename: Some(watched_paths),
4025 ..Default::default()
4026 }),
4027 }),
4028 ..Default::default()
4029 },
4030 ..Default::default()
4031 },
4032 );
4033
4034 let _ = project
4035 .update(cx, |project, cx| {
4036 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4037 })
4038 .await
4039 .unwrap();
4040
4041 let fake_server = fake_servers.next().await.unwrap();
4042 let response = project.update(cx, |project, cx| {
4043 let worktree = project.worktrees(cx).next().unwrap();
4044 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4045 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4046 });
4047 let expected_edit = lsp::WorkspaceEdit {
4048 changes: None,
4049 document_changes: Some(DocumentChanges::Edits({
4050 vec![TextDocumentEdit {
4051 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4052 range: lsp::Range {
4053 start: lsp::Position {
4054 line: 0,
4055 character: 1,
4056 },
4057 end: lsp::Position {
4058 line: 0,
4059 character: 3,
4060 },
4061 },
4062 new_text: "This is not a drill".to_owned(),
4063 })],
4064 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4065 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4066 version: Some(1337),
4067 },
4068 }]
4069 })),
4070 change_annotations: None,
4071 };
4072 let resolved_workspace_edit = Arc::new(OnceLock::new());
4073 fake_server
4074 .handle_request::<WillRenameFiles, _, _>({
4075 let resolved_workspace_edit = resolved_workspace_edit.clone();
4076 let expected_edit = expected_edit.clone();
4077 move |params, _| {
4078 let resolved_workspace_edit = resolved_workspace_edit.clone();
4079 let expected_edit = expected_edit.clone();
4080 async move {
4081 assert_eq!(params.files.len(), 1);
4082 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4083 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4084 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4085 Ok(Some(expected_edit))
4086 }
4087 }
4088 })
4089 .next()
4090 .await
4091 .unwrap();
4092 let _ = response.await.unwrap();
4093 fake_server
4094 .handle_notification::<DidRenameFiles, _>(|params, _| {
4095 assert_eq!(params.files.len(), 1);
4096 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4097 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4098 })
4099 .next()
4100 .await
4101 .unwrap();
4102 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4103}
4104
4105#[gpui::test]
4106async fn test_rename(cx: &mut gpui::TestAppContext) {
4107 // hi
4108 init_test(cx);
4109
4110 let fs = FakeFs::new(cx.executor());
4111 fs.insert_tree(
4112 "/dir",
4113 json!({
4114 "one.rs": "const ONE: usize = 1;",
4115 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4116 }),
4117 )
4118 .await;
4119
4120 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4121
4122 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4123 language_registry.add(rust_lang());
4124 let mut fake_servers = language_registry.register_fake_lsp(
4125 "Rust",
4126 FakeLspAdapter {
4127 capabilities: lsp::ServerCapabilities {
4128 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4129 prepare_provider: Some(true),
4130 work_done_progress_options: Default::default(),
4131 })),
4132 ..Default::default()
4133 },
4134 ..Default::default()
4135 },
4136 );
4137
4138 let (buffer, _handle) = project
4139 .update(cx, |project, cx| {
4140 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4141 })
4142 .await
4143 .unwrap();
4144
4145 let fake_server = fake_servers.next().await.unwrap();
4146
4147 let response = project.update(cx, |project, cx| {
4148 project.prepare_rename(buffer.clone(), 7, cx)
4149 });
4150 fake_server
4151 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4152 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4153 assert_eq!(params.position, lsp::Position::new(0, 7));
4154 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4155 lsp::Position::new(0, 6),
4156 lsp::Position::new(0, 9),
4157 ))))
4158 })
4159 .next()
4160 .await
4161 .unwrap();
4162 let response = response.await.unwrap();
4163 let PrepareRenameResponse::Success(range) = response else {
4164 panic!("{:?}", response);
4165 };
4166 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4167 assert_eq!(range, 6..9);
4168
4169 let response = project.update(cx, |project, cx| {
4170 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4171 });
4172 fake_server
4173 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4174 assert_eq!(
4175 params.text_document_position.text_document.uri.as_str(),
4176 "file:///dir/one.rs"
4177 );
4178 assert_eq!(
4179 params.text_document_position.position,
4180 lsp::Position::new(0, 7)
4181 );
4182 assert_eq!(params.new_name, "THREE");
4183 Ok(Some(lsp::WorkspaceEdit {
4184 changes: Some(
4185 [
4186 (
4187 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4188 vec![lsp::TextEdit::new(
4189 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4190 "THREE".to_string(),
4191 )],
4192 ),
4193 (
4194 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4195 vec![
4196 lsp::TextEdit::new(
4197 lsp::Range::new(
4198 lsp::Position::new(0, 24),
4199 lsp::Position::new(0, 27),
4200 ),
4201 "THREE".to_string(),
4202 ),
4203 lsp::TextEdit::new(
4204 lsp::Range::new(
4205 lsp::Position::new(0, 35),
4206 lsp::Position::new(0, 38),
4207 ),
4208 "THREE".to_string(),
4209 ),
4210 ],
4211 ),
4212 ]
4213 .into_iter()
4214 .collect(),
4215 ),
4216 ..Default::default()
4217 }))
4218 })
4219 .next()
4220 .await
4221 .unwrap();
4222 let mut transaction = response.await.unwrap().0;
4223 assert_eq!(transaction.len(), 2);
4224 assert_eq!(
4225 transaction
4226 .remove_entry(&buffer)
4227 .unwrap()
4228 .0
4229 .update(cx, |buffer, _| buffer.text()),
4230 "const THREE: usize = 1;"
4231 );
4232 assert_eq!(
4233 transaction
4234 .into_keys()
4235 .next()
4236 .unwrap()
4237 .update(cx, |buffer, _| buffer.text()),
4238 "const TWO: usize = one::THREE + one::THREE;"
4239 );
4240}
4241
4242#[gpui::test]
4243async fn test_search(cx: &mut gpui::TestAppContext) {
4244 init_test(cx);
4245
4246 let fs = FakeFs::new(cx.executor());
4247 fs.insert_tree(
4248 "/dir",
4249 json!({
4250 "one.rs": "const ONE: usize = 1;",
4251 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4252 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4253 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4254 }),
4255 )
4256 .await;
4257 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4258 assert_eq!(
4259 search(
4260 &project,
4261 SearchQuery::text(
4262 "TWO",
4263 false,
4264 true,
4265 false,
4266 Default::default(),
4267 Default::default(),
4268 None
4269 )
4270 .unwrap(),
4271 cx
4272 )
4273 .await
4274 .unwrap(),
4275 HashMap::from_iter([
4276 ("dir/two.rs".to_string(), vec![6..9]),
4277 ("dir/three.rs".to_string(), vec![37..40])
4278 ])
4279 );
4280
4281 let buffer_4 = project
4282 .update(cx, |project, cx| {
4283 project.open_local_buffer("/dir/four.rs", cx)
4284 })
4285 .await
4286 .unwrap();
4287 buffer_4.update(cx, |buffer, cx| {
4288 let text = "two::TWO";
4289 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4290 });
4291
4292 assert_eq!(
4293 search(
4294 &project,
4295 SearchQuery::text(
4296 "TWO",
4297 false,
4298 true,
4299 false,
4300 Default::default(),
4301 Default::default(),
4302 None,
4303 )
4304 .unwrap(),
4305 cx
4306 )
4307 .await
4308 .unwrap(),
4309 HashMap::from_iter([
4310 ("dir/two.rs".to_string(), vec![6..9]),
4311 ("dir/three.rs".to_string(), vec![37..40]),
4312 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4313 ])
4314 );
4315}
4316
4317#[gpui::test]
4318async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4319 init_test(cx);
4320
4321 let search_query = "file";
4322
4323 let fs = FakeFs::new(cx.executor());
4324 fs.insert_tree(
4325 "/dir",
4326 json!({
4327 "one.rs": r#"// Rust file one"#,
4328 "one.ts": r#"// TypeScript file one"#,
4329 "two.rs": r#"// Rust file two"#,
4330 "two.ts": r#"// TypeScript file two"#,
4331 }),
4332 )
4333 .await;
4334 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4335
4336 assert!(
4337 search(
4338 &project,
4339 SearchQuery::text(
4340 search_query,
4341 false,
4342 true,
4343 false,
4344 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4345 Default::default(),
4346 None
4347 )
4348 .unwrap(),
4349 cx
4350 )
4351 .await
4352 .unwrap()
4353 .is_empty(),
4354 "If no inclusions match, no files should be returned"
4355 );
4356
4357 assert_eq!(
4358 search(
4359 &project,
4360 SearchQuery::text(
4361 search_query,
4362 false,
4363 true,
4364 false,
4365 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4366 Default::default(),
4367 None
4368 )
4369 .unwrap(),
4370 cx
4371 )
4372 .await
4373 .unwrap(),
4374 HashMap::from_iter([
4375 ("dir/one.rs".to_string(), vec![8..12]),
4376 ("dir/two.rs".to_string(), vec![8..12]),
4377 ]),
4378 "Rust only search should give only Rust files"
4379 );
4380
4381 assert_eq!(
4382 search(
4383 &project,
4384 SearchQuery::text(
4385 search_query,
4386 false,
4387 true,
4388 false,
4389
4390 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4391
4392 Default::default(),
4393 None,
4394 ).unwrap(),
4395 cx
4396 )
4397 .await
4398 .unwrap(),
4399 HashMap::from_iter([
4400 ("dir/one.ts".to_string(), vec![14..18]),
4401 ("dir/two.ts".to_string(), vec![14..18]),
4402 ]),
4403 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4404 );
4405
4406 assert_eq!(
4407 search(
4408 &project,
4409 SearchQuery::text(
4410 search_query,
4411 false,
4412 true,
4413 false,
4414
4415 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4416
4417 Default::default(),
4418 None,
4419 ).unwrap(),
4420 cx
4421 )
4422 .await
4423 .unwrap(),
4424 HashMap::from_iter([
4425 ("dir/two.ts".to_string(), vec![14..18]),
4426 ("dir/one.rs".to_string(), vec![8..12]),
4427 ("dir/one.ts".to_string(), vec![14..18]),
4428 ("dir/two.rs".to_string(), vec![8..12]),
4429 ]),
4430 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4431 );
4432}
4433
4434#[gpui::test]
4435async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4436 init_test(cx);
4437
4438 let search_query = "file";
4439
4440 let fs = FakeFs::new(cx.executor());
4441 fs.insert_tree(
4442 "/dir",
4443 json!({
4444 "one.rs": r#"// Rust file one"#,
4445 "one.ts": r#"// TypeScript file one"#,
4446 "two.rs": r#"// Rust file two"#,
4447 "two.ts": r#"// TypeScript file two"#,
4448 }),
4449 )
4450 .await;
4451 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4452
4453 assert_eq!(
4454 search(
4455 &project,
4456 SearchQuery::text(
4457 search_query,
4458 false,
4459 true,
4460 false,
4461 Default::default(),
4462 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4463 None,
4464 )
4465 .unwrap(),
4466 cx
4467 )
4468 .await
4469 .unwrap(),
4470 HashMap::from_iter([
4471 ("dir/one.rs".to_string(), vec![8..12]),
4472 ("dir/one.ts".to_string(), vec![14..18]),
4473 ("dir/two.rs".to_string(), vec![8..12]),
4474 ("dir/two.ts".to_string(), vec![14..18]),
4475 ]),
4476 "If no exclusions match, all files should be returned"
4477 );
4478
4479 assert_eq!(
4480 search(
4481 &project,
4482 SearchQuery::text(
4483 search_query,
4484 false,
4485 true,
4486 false,
4487 Default::default(),
4488 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4489 None,
4490 )
4491 .unwrap(),
4492 cx
4493 )
4494 .await
4495 .unwrap(),
4496 HashMap::from_iter([
4497 ("dir/one.ts".to_string(), vec![14..18]),
4498 ("dir/two.ts".to_string(), vec![14..18]),
4499 ]),
4500 "Rust exclusion search should give only TypeScript files"
4501 );
4502
4503 assert_eq!(
4504 search(
4505 &project,
4506 SearchQuery::text(
4507 search_query,
4508 false,
4509 true,
4510 false,
4511 Default::default(),
4512 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4513 None,
4514 ).unwrap(),
4515 cx
4516 )
4517 .await
4518 .unwrap(),
4519 HashMap::from_iter([
4520 ("dir/one.rs".to_string(), vec![8..12]),
4521 ("dir/two.rs".to_string(), vec![8..12]),
4522 ]),
4523 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4524 );
4525
4526 assert!(
4527 search(
4528 &project,
4529 SearchQuery::text(
4530 search_query,
4531 false,
4532 true,
4533 false,
4534 Default::default(),
4535
4536 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4537 None,
4538
4539 ).unwrap(),
4540 cx
4541 )
4542 .await
4543 .unwrap().is_empty(),
4544 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4545 );
4546}
4547
4548#[gpui::test]
4549async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4550 init_test(cx);
4551
4552 let search_query = "file";
4553
4554 let fs = FakeFs::new(cx.executor());
4555 fs.insert_tree(
4556 "/dir",
4557 json!({
4558 "one.rs": r#"// Rust file one"#,
4559 "one.ts": r#"// TypeScript file one"#,
4560 "two.rs": r#"// Rust file two"#,
4561 "two.ts": r#"// TypeScript file two"#,
4562 }),
4563 )
4564 .await;
4565 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4566
4567 assert!(
4568 search(
4569 &project,
4570 SearchQuery::text(
4571 search_query,
4572 false,
4573 true,
4574 false,
4575 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4576 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4577 None,
4578 )
4579 .unwrap(),
4580 cx
4581 )
4582 .await
4583 .unwrap()
4584 .is_empty(),
4585 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4586 );
4587
4588 assert!(
4589 search(
4590 &project,
4591 SearchQuery::text(
4592 search_query,
4593 false,
4594 true,
4595 false,
4596 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4597 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4598 None,
4599 ).unwrap(),
4600 cx
4601 )
4602 .await
4603 .unwrap()
4604 .is_empty(),
4605 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4606 );
4607
4608 assert!(
4609 search(
4610 &project,
4611 SearchQuery::text(
4612 search_query,
4613 false,
4614 true,
4615 false,
4616 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4617 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4618 None,
4619 )
4620 .unwrap(),
4621 cx
4622 )
4623 .await
4624 .unwrap()
4625 .is_empty(),
4626 "Non-matching inclusions and exclusions should not change that."
4627 );
4628
4629 assert_eq!(
4630 search(
4631 &project,
4632 SearchQuery::text(
4633 search_query,
4634 false,
4635 true,
4636 false,
4637 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4638 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4639 None,
4640 )
4641 .unwrap(),
4642 cx
4643 )
4644 .await
4645 .unwrap(),
4646 HashMap::from_iter([
4647 ("dir/one.ts".to_string(), vec![14..18]),
4648 ("dir/two.ts".to_string(), vec![14..18]),
4649 ]),
4650 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4651 );
4652}
4653
4654#[gpui::test]
4655async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4656 init_test(cx);
4657
4658 let fs = FakeFs::new(cx.executor());
4659 fs.insert_tree(
4660 "/worktree-a",
4661 json!({
4662 "haystack.rs": r#"// NEEDLE"#,
4663 "haystack.ts": r#"// NEEDLE"#,
4664 }),
4665 )
4666 .await;
4667 fs.insert_tree(
4668 "/worktree-b",
4669 json!({
4670 "haystack.rs": r#"// NEEDLE"#,
4671 "haystack.ts": r#"// NEEDLE"#,
4672 }),
4673 )
4674 .await;
4675
4676 let project = Project::test(
4677 fs.clone(),
4678 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4679 cx,
4680 )
4681 .await;
4682
4683 assert_eq!(
4684 search(
4685 &project,
4686 SearchQuery::text(
4687 "NEEDLE",
4688 false,
4689 true,
4690 false,
4691 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4692 Default::default(),
4693 None,
4694 )
4695 .unwrap(),
4696 cx
4697 )
4698 .await
4699 .unwrap(),
4700 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4701 "should only return results from included worktree"
4702 );
4703 assert_eq!(
4704 search(
4705 &project,
4706 SearchQuery::text(
4707 "NEEDLE",
4708 false,
4709 true,
4710 false,
4711 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4712 Default::default(),
4713 None,
4714 )
4715 .unwrap(),
4716 cx
4717 )
4718 .await
4719 .unwrap(),
4720 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4721 "should only return results from included worktree"
4722 );
4723
4724 assert_eq!(
4725 search(
4726 &project,
4727 SearchQuery::text(
4728 "NEEDLE",
4729 false,
4730 true,
4731 false,
4732 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4733 Default::default(),
4734 None,
4735 )
4736 .unwrap(),
4737 cx
4738 )
4739 .await
4740 .unwrap(),
4741 HashMap::from_iter([
4742 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4743 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4744 ]),
4745 "should return results from both worktrees"
4746 );
4747}
4748
4749#[gpui::test]
4750async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4751 init_test(cx);
4752
4753 let fs = FakeFs::new(cx.background_executor.clone());
4754 fs.insert_tree(
4755 "/dir",
4756 json!({
4757 ".git": {},
4758 ".gitignore": "**/target\n/node_modules\n",
4759 "target": {
4760 "index.txt": "index_key:index_value"
4761 },
4762 "node_modules": {
4763 "eslint": {
4764 "index.ts": "const eslint_key = 'eslint value'",
4765 "package.json": r#"{ "some_key": "some value" }"#,
4766 },
4767 "prettier": {
4768 "index.ts": "const prettier_key = 'prettier value'",
4769 "package.json": r#"{ "other_key": "other value" }"#,
4770 },
4771 },
4772 "package.json": r#"{ "main_key": "main value" }"#,
4773 }),
4774 )
4775 .await;
4776 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4777
4778 let query = "key";
4779 assert_eq!(
4780 search(
4781 &project,
4782 SearchQuery::text(
4783 query,
4784 false,
4785 false,
4786 false,
4787 Default::default(),
4788 Default::default(),
4789 None,
4790 )
4791 .unwrap(),
4792 cx
4793 )
4794 .await
4795 .unwrap(),
4796 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4797 "Only one non-ignored file should have the query"
4798 );
4799
4800 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4801 assert_eq!(
4802 search(
4803 &project,
4804 SearchQuery::text(
4805 query,
4806 false,
4807 false,
4808 true,
4809 Default::default(),
4810 Default::default(),
4811 None,
4812 )
4813 .unwrap(),
4814 cx
4815 )
4816 .await
4817 .unwrap(),
4818 HashMap::from_iter([
4819 ("dir/package.json".to_string(), vec![8..11]),
4820 ("dir/target/index.txt".to_string(), vec![6..9]),
4821 (
4822 "dir/node_modules/prettier/package.json".to_string(),
4823 vec![9..12]
4824 ),
4825 (
4826 "dir/node_modules/prettier/index.ts".to_string(),
4827 vec![15..18]
4828 ),
4829 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4830 (
4831 "dir/node_modules/eslint/package.json".to_string(),
4832 vec![8..11]
4833 ),
4834 ]),
4835 "Unrestricted search with ignored directories should find every file with the query"
4836 );
4837
4838 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4839 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4840 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4841 assert_eq!(
4842 search(
4843 &project,
4844 SearchQuery::text(
4845 query,
4846 false,
4847 false,
4848 true,
4849 files_to_include,
4850 files_to_exclude,
4851 None,
4852 )
4853 .unwrap(),
4854 cx
4855 )
4856 .await
4857 .unwrap(),
4858 HashMap::from_iter([(
4859 "dir/node_modules/prettier/package.json".to_string(),
4860 vec![9..12]
4861 )]),
4862 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4863 );
4864}
4865
4866#[gpui::test]
4867async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4868 init_test(cx);
4869
4870 let fs = FakeFs::new(cx.executor().clone());
4871 fs.insert_tree(
4872 "/one/two",
4873 json!({
4874 "three": {
4875 "a.txt": "",
4876 "four": {}
4877 },
4878 "c.rs": ""
4879 }),
4880 )
4881 .await;
4882
4883 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4884 project
4885 .update(cx, |project, cx| {
4886 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4887 project.create_entry((id, "b.."), true, cx)
4888 })
4889 .await
4890 .unwrap()
4891 .to_included()
4892 .unwrap();
4893
4894 // Can't create paths outside the project
4895 let result = project
4896 .update(cx, |project, cx| {
4897 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4898 project.create_entry((id, "../../boop"), true, cx)
4899 })
4900 .await;
4901 assert!(result.is_err());
4902
4903 // Can't create paths with '..'
4904 let result = project
4905 .update(cx, |project, cx| {
4906 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4907 project.create_entry((id, "four/../beep"), true, cx)
4908 })
4909 .await;
4910 assert!(result.is_err());
4911
4912 assert_eq!(
4913 fs.paths(true),
4914 vec![
4915 PathBuf::from("/"),
4916 PathBuf::from("/one"),
4917 PathBuf::from("/one/two"),
4918 PathBuf::from("/one/two/c.rs"),
4919 PathBuf::from("/one/two/three"),
4920 PathBuf::from("/one/two/three/a.txt"),
4921 PathBuf::from("/one/two/three/b.."),
4922 PathBuf::from("/one/two/three/four"),
4923 ]
4924 );
4925
4926 // And we cannot open buffers with '..'
4927 let result = project
4928 .update(cx, |project, cx| {
4929 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4930 project.open_buffer((id, "../c.rs"), cx)
4931 })
4932 .await;
4933 assert!(result.is_err())
4934}
4935
4936#[gpui::test]
4937async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4938 init_test(cx);
4939
4940 let fs = FakeFs::new(cx.executor());
4941 fs.insert_tree(
4942 "/dir",
4943 json!({
4944 "a.tsx": "a",
4945 }),
4946 )
4947 .await;
4948
4949 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4950
4951 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4952 language_registry.add(tsx_lang());
4953 let language_server_names = [
4954 "TypeScriptServer",
4955 "TailwindServer",
4956 "ESLintServer",
4957 "NoHoverCapabilitiesServer",
4958 ];
4959 let mut language_servers = [
4960 language_registry.register_fake_lsp(
4961 "tsx",
4962 FakeLspAdapter {
4963 name: language_server_names[0],
4964 capabilities: lsp::ServerCapabilities {
4965 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4966 ..lsp::ServerCapabilities::default()
4967 },
4968 ..FakeLspAdapter::default()
4969 },
4970 ),
4971 language_registry.register_fake_lsp(
4972 "tsx",
4973 FakeLspAdapter {
4974 name: language_server_names[1],
4975 capabilities: lsp::ServerCapabilities {
4976 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4977 ..lsp::ServerCapabilities::default()
4978 },
4979 ..FakeLspAdapter::default()
4980 },
4981 ),
4982 language_registry.register_fake_lsp(
4983 "tsx",
4984 FakeLspAdapter {
4985 name: language_server_names[2],
4986 capabilities: lsp::ServerCapabilities {
4987 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4988 ..lsp::ServerCapabilities::default()
4989 },
4990 ..FakeLspAdapter::default()
4991 },
4992 ),
4993 language_registry.register_fake_lsp(
4994 "tsx",
4995 FakeLspAdapter {
4996 name: language_server_names[3],
4997 capabilities: lsp::ServerCapabilities {
4998 hover_provider: None,
4999 ..lsp::ServerCapabilities::default()
5000 },
5001 ..FakeLspAdapter::default()
5002 },
5003 ),
5004 ];
5005
5006 let (buffer, _handle) = project
5007 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5008 .await
5009 .unwrap();
5010 cx.executor().run_until_parked();
5011
5012 let mut servers_with_hover_requests = HashMap::default();
5013 for i in 0..language_server_names.len() {
5014 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5015 panic!(
5016 "Failed to get language server #{i} with name {}",
5017 &language_server_names[i]
5018 )
5019 });
5020 let new_server_name = new_server.server.name();
5021 assert!(
5022 !servers_with_hover_requests.contains_key(&new_server_name),
5023 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5024 );
5025 match new_server_name.as_ref() {
5026 "TailwindServer" | "TypeScriptServer" => {
5027 servers_with_hover_requests.insert(
5028 new_server_name.clone(),
5029 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5030 let name = new_server_name.clone();
5031 async move {
5032 Ok(Some(lsp::Hover {
5033 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5034 format!("{name} hover"),
5035 )),
5036 range: None,
5037 }))
5038 }
5039 }),
5040 );
5041 }
5042 "ESLintServer" => {
5043 servers_with_hover_requests.insert(
5044 new_server_name,
5045 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5046 |_, _| async move { Ok(None) },
5047 ),
5048 );
5049 }
5050 "NoHoverCapabilitiesServer" => {
5051 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5052 |_, _| async move {
5053 panic!(
5054 "Should not call for hovers server with no corresponding capabilities"
5055 )
5056 },
5057 );
5058 }
5059 unexpected => panic!("Unexpected server name: {unexpected}"),
5060 }
5061 }
5062
5063 let hover_task = project.update(cx, |project, cx| {
5064 project.hover(&buffer, Point::new(0, 0), cx)
5065 });
5066 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5067 |mut hover_request| async move {
5068 hover_request
5069 .next()
5070 .await
5071 .expect("All hover requests should have been triggered")
5072 },
5073 ))
5074 .await;
5075 assert_eq!(
5076 vec!["TailwindServer hover", "TypeScriptServer hover"],
5077 hover_task
5078 .await
5079 .into_iter()
5080 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5081 .sorted()
5082 .collect::<Vec<_>>(),
5083 "Should receive hover responses from all related servers with hover capabilities"
5084 );
5085}
5086
5087#[gpui::test]
5088async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5089 init_test(cx);
5090
5091 let fs = FakeFs::new(cx.executor());
5092 fs.insert_tree(
5093 "/dir",
5094 json!({
5095 "a.ts": "a",
5096 }),
5097 )
5098 .await;
5099
5100 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5101
5102 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5103 language_registry.add(typescript_lang());
5104 let mut fake_language_servers = language_registry.register_fake_lsp(
5105 "TypeScript",
5106 FakeLspAdapter {
5107 capabilities: lsp::ServerCapabilities {
5108 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5109 ..lsp::ServerCapabilities::default()
5110 },
5111 ..FakeLspAdapter::default()
5112 },
5113 );
5114
5115 let (buffer, _handle) = project
5116 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5117 .await
5118 .unwrap();
5119 cx.executor().run_until_parked();
5120
5121 let fake_server = fake_language_servers
5122 .next()
5123 .await
5124 .expect("failed to get the language server");
5125
5126 let mut request_handled =
5127 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5128 Ok(Some(lsp::Hover {
5129 contents: lsp::HoverContents::Array(vec![
5130 lsp::MarkedString::String("".to_string()),
5131 lsp::MarkedString::String(" ".to_string()),
5132 lsp::MarkedString::String("\n\n\n".to_string()),
5133 ]),
5134 range: None,
5135 }))
5136 });
5137
5138 let hover_task = project.update(cx, |project, cx| {
5139 project.hover(&buffer, Point::new(0, 0), cx)
5140 });
5141 let () = request_handled
5142 .next()
5143 .await
5144 .expect("All hover requests should have been triggered");
5145 assert_eq!(
5146 Vec::<String>::new(),
5147 hover_task
5148 .await
5149 .into_iter()
5150 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5151 .sorted()
5152 .collect::<Vec<_>>(),
5153 "Empty hover parts should be ignored"
5154 );
5155}
5156
5157#[gpui::test]
5158async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5159 init_test(cx);
5160
5161 let fs = FakeFs::new(cx.executor());
5162 fs.insert_tree(
5163 "/dir",
5164 json!({
5165 "a.ts": "a",
5166 }),
5167 )
5168 .await;
5169
5170 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5171
5172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5173 language_registry.add(typescript_lang());
5174 let mut fake_language_servers = language_registry.register_fake_lsp(
5175 "TypeScript",
5176 FakeLspAdapter {
5177 capabilities: lsp::ServerCapabilities {
5178 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5179 ..lsp::ServerCapabilities::default()
5180 },
5181 ..FakeLspAdapter::default()
5182 },
5183 );
5184
5185 let (buffer, _handle) = project
5186 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5187 .await
5188 .unwrap();
5189 cx.executor().run_until_parked();
5190
5191 let fake_server = fake_language_servers
5192 .next()
5193 .await
5194 .expect("failed to get the language server");
5195
5196 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5197 move |_, _| async move {
5198 Ok(Some(vec![
5199 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5200 title: "organize imports".to_string(),
5201 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5202 ..lsp::CodeAction::default()
5203 }),
5204 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5205 title: "fix code".to_string(),
5206 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5207 ..lsp::CodeAction::default()
5208 }),
5209 ]))
5210 },
5211 );
5212
5213 let code_actions_task = project.update(cx, |project, cx| {
5214 project.code_actions(
5215 &buffer,
5216 0..buffer.read(cx).len(),
5217 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5218 cx,
5219 )
5220 });
5221
5222 let () = request_handled
5223 .next()
5224 .await
5225 .expect("The code action request should have been triggered");
5226
5227 let code_actions = code_actions_task.await.unwrap();
5228 assert_eq!(code_actions.len(), 1);
5229 assert_eq!(
5230 code_actions[0].lsp_action.kind,
5231 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5232 );
5233}
5234
5235#[gpui::test]
5236async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5237 init_test(cx);
5238
5239 let fs = FakeFs::new(cx.executor());
5240 fs.insert_tree(
5241 "/dir",
5242 json!({
5243 "a.tsx": "a",
5244 }),
5245 )
5246 .await;
5247
5248 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5249
5250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5251 language_registry.add(tsx_lang());
5252 let language_server_names = [
5253 "TypeScriptServer",
5254 "TailwindServer",
5255 "ESLintServer",
5256 "NoActionsCapabilitiesServer",
5257 ];
5258
5259 let mut language_server_rxs = [
5260 language_registry.register_fake_lsp(
5261 "tsx",
5262 FakeLspAdapter {
5263 name: language_server_names[0],
5264 capabilities: lsp::ServerCapabilities {
5265 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5266 ..lsp::ServerCapabilities::default()
5267 },
5268 ..FakeLspAdapter::default()
5269 },
5270 ),
5271 language_registry.register_fake_lsp(
5272 "tsx",
5273 FakeLspAdapter {
5274 name: language_server_names[1],
5275 capabilities: lsp::ServerCapabilities {
5276 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5277 ..lsp::ServerCapabilities::default()
5278 },
5279 ..FakeLspAdapter::default()
5280 },
5281 ),
5282 language_registry.register_fake_lsp(
5283 "tsx",
5284 FakeLspAdapter {
5285 name: language_server_names[2],
5286 capabilities: lsp::ServerCapabilities {
5287 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5288 ..lsp::ServerCapabilities::default()
5289 },
5290 ..FakeLspAdapter::default()
5291 },
5292 ),
5293 language_registry.register_fake_lsp(
5294 "tsx",
5295 FakeLspAdapter {
5296 name: language_server_names[3],
5297 capabilities: lsp::ServerCapabilities {
5298 code_action_provider: None,
5299 ..lsp::ServerCapabilities::default()
5300 },
5301 ..FakeLspAdapter::default()
5302 },
5303 ),
5304 ];
5305
5306 let (buffer, _handle) = project
5307 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5308 .await
5309 .unwrap();
5310 cx.executor().run_until_parked();
5311
5312 let mut servers_with_actions_requests = HashMap::default();
5313 for i in 0..language_server_names.len() {
5314 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5315 panic!(
5316 "Failed to get language server #{i} with name {}",
5317 &language_server_names[i]
5318 )
5319 });
5320 let new_server_name = new_server.server.name();
5321
5322 assert!(
5323 !servers_with_actions_requests.contains_key(&new_server_name),
5324 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5325 );
5326 match new_server_name.0.as_ref() {
5327 "TailwindServer" | "TypeScriptServer" => {
5328 servers_with_actions_requests.insert(
5329 new_server_name.clone(),
5330 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5331 move |_, _| {
5332 let name = new_server_name.clone();
5333 async move {
5334 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5335 lsp::CodeAction {
5336 title: format!("{name} code action"),
5337 ..lsp::CodeAction::default()
5338 },
5339 )]))
5340 }
5341 },
5342 ),
5343 );
5344 }
5345 "ESLintServer" => {
5346 servers_with_actions_requests.insert(
5347 new_server_name,
5348 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5349 |_, _| async move { Ok(None) },
5350 ),
5351 );
5352 }
5353 "NoActionsCapabilitiesServer" => {
5354 let _never_handled = new_server
5355 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5356 panic!(
5357 "Should not call for code actions server with no corresponding capabilities"
5358 )
5359 });
5360 }
5361 unexpected => panic!("Unexpected server name: {unexpected}"),
5362 }
5363 }
5364
5365 let code_actions_task = project.update(cx, |project, cx| {
5366 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5367 });
5368
5369 // cx.run_until_parked();
5370 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5371 |mut code_actions_request| async move {
5372 code_actions_request
5373 .next()
5374 .await
5375 .expect("All code actions requests should have been triggered")
5376 },
5377 ))
5378 .await;
5379 assert_eq!(
5380 vec!["TailwindServer code action", "TypeScriptServer code action"],
5381 code_actions_task
5382 .await
5383 .unwrap()
5384 .into_iter()
5385 .map(|code_action| code_action.lsp_action.title)
5386 .sorted()
5387 .collect::<Vec<_>>(),
5388 "Should receive code actions responses from all related servers with hover capabilities"
5389 );
5390}
5391
5392#[gpui::test]
5393async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5394 init_test(cx);
5395
5396 let fs = FakeFs::new(cx.executor());
5397 fs.insert_tree(
5398 "/dir",
5399 json!({
5400 "a.rs": "let a = 1;",
5401 "b.rs": "let b = 2;",
5402 "c.rs": "let c = 2;",
5403 }),
5404 )
5405 .await;
5406
5407 let project = Project::test(
5408 fs,
5409 [
5410 "/dir/a.rs".as_ref(),
5411 "/dir/b.rs".as_ref(),
5412 "/dir/c.rs".as_ref(),
5413 ],
5414 cx,
5415 )
5416 .await;
5417
5418 // check the initial state and get the worktrees
5419 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5420 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5421 assert_eq!(worktrees.len(), 3);
5422
5423 let worktree_a = worktrees[0].read(cx);
5424 let worktree_b = worktrees[1].read(cx);
5425 let worktree_c = worktrees[2].read(cx);
5426
5427 // check they start in the right order
5428 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5429 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5430 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5431
5432 (
5433 worktrees[0].clone(),
5434 worktrees[1].clone(),
5435 worktrees[2].clone(),
5436 )
5437 });
5438
5439 // move first worktree to after the second
5440 // [a, b, c] -> [b, a, c]
5441 project
5442 .update(cx, |project, cx| {
5443 let first = worktree_a.read(cx);
5444 let second = worktree_b.read(cx);
5445 project.move_worktree(first.id(), second.id(), cx)
5446 })
5447 .expect("moving first after second");
5448
5449 // check the state after moving
5450 project.update(cx, |project, cx| {
5451 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5452 assert_eq!(worktrees.len(), 3);
5453
5454 let first = worktrees[0].read(cx);
5455 let second = worktrees[1].read(cx);
5456 let third = worktrees[2].read(cx);
5457
5458 // check they are now in the right order
5459 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5460 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5461 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5462 });
5463
5464 // move the second worktree to before the first
5465 // [b, a, c] -> [a, b, c]
5466 project
5467 .update(cx, |project, cx| {
5468 let second = worktree_a.read(cx);
5469 let first = worktree_b.read(cx);
5470 project.move_worktree(first.id(), second.id(), cx)
5471 })
5472 .expect("moving second before first");
5473
5474 // check the state after moving
5475 project.update(cx, |project, cx| {
5476 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5477 assert_eq!(worktrees.len(), 3);
5478
5479 let first = worktrees[0].read(cx);
5480 let second = worktrees[1].read(cx);
5481 let third = worktrees[2].read(cx);
5482
5483 // check they are now in the right order
5484 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5485 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5486 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5487 });
5488
5489 // move the second worktree to after the third
5490 // [a, b, c] -> [a, c, b]
5491 project
5492 .update(cx, |project, cx| {
5493 let second = worktree_b.read(cx);
5494 let third = worktree_c.read(cx);
5495 project.move_worktree(second.id(), third.id(), cx)
5496 })
5497 .expect("moving second after third");
5498
5499 // check the state after moving
5500 project.update(cx, |project, cx| {
5501 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5502 assert_eq!(worktrees.len(), 3);
5503
5504 let first = worktrees[0].read(cx);
5505 let second = worktrees[1].read(cx);
5506 let third = worktrees[2].read(cx);
5507
5508 // check they are now in the right order
5509 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5510 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5511 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5512 });
5513
5514 // move the third worktree to before the second
5515 // [a, c, b] -> [a, b, c]
5516 project
5517 .update(cx, |project, cx| {
5518 let third = worktree_c.read(cx);
5519 let second = worktree_b.read(cx);
5520 project.move_worktree(third.id(), second.id(), cx)
5521 })
5522 .expect("moving third before second");
5523
5524 // check the state after moving
5525 project.update(cx, |project, cx| {
5526 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5527 assert_eq!(worktrees.len(), 3);
5528
5529 let first = worktrees[0].read(cx);
5530 let second = worktrees[1].read(cx);
5531 let third = worktrees[2].read(cx);
5532
5533 // check they are now in the right order
5534 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5535 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5536 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5537 });
5538
5539 // move the first worktree to after the third
5540 // [a, b, c] -> [b, c, a]
5541 project
5542 .update(cx, |project, cx| {
5543 let first = worktree_a.read(cx);
5544 let third = worktree_c.read(cx);
5545 project.move_worktree(first.id(), third.id(), cx)
5546 })
5547 .expect("moving first after third");
5548
5549 // check the state after moving
5550 project.update(cx, |project, cx| {
5551 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5552 assert_eq!(worktrees.len(), 3);
5553
5554 let first = worktrees[0].read(cx);
5555 let second = worktrees[1].read(cx);
5556 let third = worktrees[2].read(cx);
5557
5558 // check they are now in the right order
5559 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5560 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5561 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5562 });
5563
5564 // move the third worktree to before the first
5565 // [b, c, a] -> [a, b, c]
5566 project
5567 .update(cx, |project, cx| {
5568 let third = worktree_a.read(cx);
5569 let first = worktree_b.read(cx);
5570 project.move_worktree(third.id(), first.id(), cx)
5571 })
5572 .expect("moving third before first");
5573
5574 // check the state after moving
5575 project.update(cx, |project, cx| {
5576 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5577 assert_eq!(worktrees.len(), 3);
5578
5579 let first = worktrees[0].read(cx);
5580 let second = worktrees[1].read(cx);
5581 let third = worktrees[2].read(cx);
5582
5583 // check they are now in the right order
5584 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5585 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5586 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5587 });
5588}
5589
5590#[gpui::test]
5591async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5592 init_test(cx);
5593
5594 let staged_contents = r#"
5595 fn main() {
5596 println!("hello world");
5597 }
5598 "#
5599 .unindent();
5600 let file_contents = r#"
5601 // print goodbye
5602 fn main() {
5603 println!("goodbye world");
5604 }
5605 "#
5606 .unindent();
5607
5608 let fs = FakeFs::new(cx.background_executor.clone());
5609 fs.insert_tree(
5610 "/dir",
5611 json!({
5612 ".git": {},
5613 "src": {
5614 "main.rs": file_contents,
5615 }
5616 }),
5617 )
5618 .await;
5619
5620 fs.set_index_for_repo(
5621 Path::new("/dir/.git"),
5622 &[(Path::new("src/main.rs"), staged_contents)],
5623 );
5624
5625 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5626
5627 let buffer = project
5628 .update(cx, |project, cx| {
5629 project.open_local_buffer("/dir/src/main.rs", cx)
5630 })
5631 .await
5632 .unwrap();
5633 let unstaged_changes = project
5634 .update(cx, |project, cx| {
5635 project.open_unstaged_changes(buffer.clone(), cx)
5636 })
5637 .await
5638 .unwrap();
5639
5640 cx.run_until_parked();
5641 unstaged_changes.update(cx, |unstaged_changes, cx| {
5642 let snapshot = buffer.read(cx).snapshot();
5643 assert_hunks(
5644 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5645 &snapshot,
5646 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5647 &[
5648 (0..1, "", "// print goodbye\n"),
5649 (
5650 2..3,
5651 " println!(\"hello world\");\n",
5652 " println!(\"goodbye world\");\n",
5653 ),
5654 ],
5655 );
5656 });
5657
5658 let staged_contents = r#"
5659 // print goodbye
5660 fn main() {
5661 }
5662 "#
5663 .unindent();
5664
5665 fs.set_index_for_repo(
5666 Path::new("/dir/.git"),
5667 &[(Path::new("src/main.rs"), staged_contents)],
5668 );
5669
5670 cx.run_until_parked();
5671 unstaged_changes.update(cx, |unstaged_changes, cx| {
5672 let snapshot = buffer.read(cx).snapshot();
5673 assert_hunks(
5674 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5675 &snapshot,
5676 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5677 &[(2..3, "", " println!(\"goodbye world\");\n")],
5678 );
5679 });
5680}
5681
5682async fn search(
5683 project: &Model<Project>,
5684 query: SearchQuery,
5685 cx: &mut gpui::TestAppContext,
5686) -> Result<HashMap<String, Vec<Range<usize>>>> {
5687 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5688 let mut results = HashMap::default();
5689 while let Ok(search_result) = search_rx.recv().await {
5690 match search_result {
5691 SearchResult::Buffer { buffer, ranges } => {
5692 results.entry(buffer).or_insert(ranges);
5693 }
5694 SearchResult::LimitReached => {}
5695 }
5696 }
5697 Ok(results
5698 .into_iter()
5699 .map(|(buffer, ranges)| {
5700 buffer.update(cx, |buffer, cx| {
5701 let path = buffer
5702 .file()
5703 .unwrap()
5704 .full_path(cx)
5705 .to_string_lossy()
5706 .to_string();
5707 let ranges = ranges
5708 .into_iter()
5709 .map(|range| range.to_offset(buffer))
5710 .collect::<Vec<_>>();
5711 (path, ranges)
5712 })
5713 })
5714 .collect())
5715}
5716
5717pub fn init_test(cx: &mut gpui::TestAppContext) {
5718 if std::env::var("RUST_LOG").is_ok() {
5719 env_logger::try_init().ok();
5720 }
5721
5722 cx.update(|cx| {
5723 let settings_store = SettingsStore::test(cx);
5724 cx.set_global(settings_store);
5725 release_channel::init(SemanticVersion::default(), cx);
5726 language::init(cx);
5727 Project::init_settings(cx);
5728 });
5729}
5730
5731fn json_lang() -> Arc<Language> {
5732 Arc::new(Language::new(
5733 LanguageConfig {
5734 name: "JSON".into(),
5735 matcher: LanguageMatcher {
5736 path_suffixes: vec!["json".to_string()],
5737 ..Default::default()
5738 },
5739 ..Default::default()
5740 },
5741 None,
5742 ))
5743}
5744
5745fn js_lang() -> Arc<Language> {
5746 Arc::new(Language::new(
5747 LanguageConfig {
5748 name: "JavaScript".into(),
5749 matcher: LanguageMatcher {
5750 path_suffixes: vec!["js".to_string()],
5751 ..Default::default()
5752 },
5753 ..Default::default()
5754 },
5755 None,
5756 ))
5757}
5758
5759fn rust_lang() -> Arc<Language> {
5760 Arc::new(Language::new(
5761 LanguageConfig {
5762 name: "Rust".into(),
5763 matcher: LanguageMatcher {
5764 path_suffixes: vec!["rs".to_string()],
5765 ..Default::default()
5766 },
5767 ..Default::default()
5768 },
5769 Some(tree_sitter_rust::LANGUAGE.into()),
5770 ))
5771}
5772
5773fn typescript_lang() -> Arc<Language> {
5774 Arc::new(Language::new(
5775 LanguageConfig {
5776 name: "TypeScript".into(),
5777 matcher: LanguageMatcher {
5778 path_suffixes: vec!["ts".to_string()],
5779 ..Default::default()
5780 },
5781 ..Default::default()
5782 },
5783 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5784 ))
5785}
5786
5787fn tsx_lang() -> Arc<Language> {
5788 Arc::new(Language::new(
5789 LanguageConfig {
5790 name: "tsx".into(),
5791 matcher: LanguageMatcher {
5792 path_suffixes: vec!["tsx".to_string()],
5793 ..Default::default()
5794 },
5795 ..Default::default()
5796 },
5797 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5798 ))
5799}
5800
5801fn get_all_tasks(
5802 project: &Model<Project>,
5803 worktree_id: Option<WorktreeId>,
5804 task_context: &TaskContext,
5805 cx: &mut AppContext,
5806) -> Vec<(TaskSourceKind, ResolvedTask)> {
5807 let (mut old, new) = project.update(cx, |project, cx| {
5808 project
5809 .task_store
5810 .read(cx)
5811 .task_inventory()
5812 .unwrap()
5813 .read(cx)
5814 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5815 });
5816 old.extend(new);
5817 old
5818}