1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, task_store::TaskSettingsLocation, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use git::repository::RepoPath;
10use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
11use http_client::Url;
12use language::{
13 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
14 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
15 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
16 OffsetRangeExt, Point, ToPoint,
17};
18use lsp::{
19 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
20 NumberOrString, TextDocumentEdit, WillRenameFiles,
21};
22use parking_lot::Mutex;
23use paths::tasks_file;
24use pretty_assertions::{assert_eq, assert_matches};
25use serde_json::json;
26#[cfg(not(windows))]
27use std::os;
28use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
29use task::{ResolvedTask, TaskContext};
30use unindent::Unindent as _;
31use util::{
32 assert_set_eq, path,
33 paths::PathMatcher,
34 separator,
35 test::{marked_text_offsets, TempTree},
36 uri, TryFutureExt as _,
37};
38use worktree::WorktreeModelHandle as _;
39
40#[gpui::test]
41async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
42 cx.executor().allow_parking();
43
44 let (tx, mut rx) = futures::channel::mpsc::unbounded();
45 let _thread = std::thread::spawn(move || {
46 #[cfg(not(target_os = "windows"))]
47 std::fs::metadata("/tmp").unwrap();
48 #[cfg(target_os = "windows")]
49 std::fs::metadata("C:/Windows").unwrap();
50 std::thread::sleep(Duration::from_millis(1000));
51 tx.unbounded_send(1).unwrap();
52 });
53 rx.next().await.unwrap();
54}
55
56#[gpui::test]
57async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
58 cx.executor().allow_parking();
59
60 let io_task = smol::unblock(move || {
61 println!("sleeping on thread {:?}", std::thread::current().id());
62 std::thread::sleep(Duration::from_millis(10));
63 1
64 });
65
66 let task = cx.foreground_executor().spawn(async move {
67 io_task.await;
68 });
69
70 task.await;
71}
72
73#[cfg(not(windows))]
74#[gpui::test]
75async fn test_symlinks(cx: &mut gpui::TestAppContext) {
76 init_test(cx);
77 cx.executor().allow_parking();
78
79 let dir = TempTree::new(json!({
80 "root": {
81 "apple": "",
82 "banana": {
83 "carrot": {
84 "date": "",
85 "endive": "",
86 }
87 },
88 "fennel": {
89 "grape": "",
90 }
91 }
92 }));
93
94 let root_link_path = dir.path().join("root_link");
95 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
96 os::unix::fs::symlink(
97 dir.path().join("root/fennel"),
98 dir.path().join("root/finnochio"),
99 )
100 .unwrap();
101
102 let project = Project::test(
103 Arc::new(RealFs::new(None, cx.executor())),
104 [root_link_path.as_ref()],
105 cx,
106 )
107 .await;
108
109 project.update(cx, |project, cx| {
110 let tree = project.worktrees(cx).next().unwrap().read(cx);
111 assert_eq!(tree.file_count(), 5);
112 assert_eq!(
113 tree.inode_for_path("fennel/grape"),
114 tree.inode_for_path("finnochio/grape")
115 );
116 });
117}
118
119#[gpui::test]
120async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
121 init_test(cx);
122
123 let dir = TempTree::new(json!({
124 ".editorconfig": r#"
125 root = true
126 [*.rs]
127 indent_style = tab
128 indent_size = 3
129 end_of_line = lf
130 insert_final_newline = true
131 trim_trailing_whitespace = true
132 [*.js]
133 tab_width = 10
134 "#,
135 ".zed": {
136 "settings.json": r#"{
137 "tab_size": 8,
138 "hard_tabs": false,
139 "ensure_final_newline_on_save": false,
140 "remove_trailing_whitespace_on_save": false,
141 "soft_wrap": "editor_width"
142 }"#,
143 },
144 "a.rs": "fn a() {\n A\n}",
145 "b": {
146 ".editorconfig": r#"
147 [*.rs]
148 indent_size = 2
149 "#,
150 "b.rs": "fn b() {\n B\n}",
151 },
152 "c.js": "def c\n C\nend",
153 "README.json": "tabs are better\n",
154 }));
155
156 let path = dir.path();
157 let fs = FakeFs::new(cx.executor());
158 fs.insert_tree_from_real_fs(path, path).await;
159 let project = Project::test(fs, [path], cx).await;
160
161 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
162 language_registry.add(js_lang());
163 language_registry.add(json_lang());
164 language_registry.add(rust_lang());
165
166 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
167
168 cx.executor().run_until_parked();
169
170 cx.update(|cx| {
171 let tree = worktree.read(cx);
172 let settings_for = |path: &str| {
173 let file_entry = tree.entry_for_path(path).unwrap().clone();
174 let file = File::for_entry(file_entry, worktree.clone());
175 let file_language = project
176 .read(cx)
177 .languages()
178 .language_for_file_path(file.path.as_ref());
179 let file_language = cx
180 .background_executor()
181 .block(file_language)
182 .expect("Failed to get file language");
183 let file = file as _;
184 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
185 };
186
187 let settings_a = settings_for("a.rs");
188 let settings_b = settings_for("b/b.rs");
189 let settings_c = settings_for("c.js");
190 let settings_readme = settings_for("README.json");
191
192 // .editorconfig overrides .zed/settings
193 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
194 assert_eq!(settings_a.hard_tabs, true);
195 assert_eq!(settings_a.ensure_final_newline_on_save, true);
196 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
197
198 // .editorconfig in b/ overrides .editorconfig in root
199 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
200
201 // "indent_size" is not set, so "tab_width" is used
202 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
203
204 // README.md should not be affected by .editorconfig's globe "*.rs"
205 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
206 });
207}
208
209#[gpui::test]
210async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
211 init_test(cx);
212 TaskStore::init(None);
213
214 let fs = FakeFs::new(cx.executor());
215 fs.insert_tree(
216 path!("/dir"),
217 json!({
218 ".zed": {
219 "settings.json": r#"{ "tab_size": 8 }"#,
220 "tasks.json": r#"[{
221 "label": "cargo check all",
222 "command": "cargo",
223 "args": ["check", "--all"]
224 },]"#,
225 },
226 "a": {
227 "a.rs": "fn a() {\n A\n}"
228 },
229 "b": {
230 ".zed": {
231 "settings.json": r#"{ "tab_size": 2 }"#,
232 "tasks.json": r#"[{
233 "label": "cargo check",
234 "command": "cargo",
235 "args": ["check"]
236 },]"#,
237 },
238 "b.rs": "fn b() {\n B\n}"
239 }
240 }),
241 )
242 .await;
243
244 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
245 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
246
247 cx.executor().run_until_parked();
248 let worktree_id = cx.update(|cx| {
249 project.update(cx, |project, cx| {
250 project.worktrees(cx).next().unwrap().read(cx).id()
251 })
252 });
253
254 let mut task_contexts = TaskContexts::default();
255 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
256
257 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
258 id: worktree_id,
259 directory_in_worktree: PathBuf::from(".zed"),
260 id_base: "local worktree tasks from directory \".zed\"".into(),
261 };
262
263 let all_tasks = cx
264 .update(|cx| {
265 let tree = worktree.read(cx);
266
267 let file_a = File::for_entry(
268 tree.entry_for_path("a/a.rs").unwrap().clone(),
269 worktree.clone(),
270 ) as _;
271 let settings_a = language_settings(None, Some(&file_a), cx);
272 let file_b = File::for_entry(
273 tree.entry_for_path("b/b.rs").unwrap().clone(),
274 worktree.clone(),
275 ) as _;
276 let settings_b = language_settings(None, Some(&file_b), cx);
277
278 assert_eq!(settings_a.tab_size.get(), 8);
279 assert_eq!(settings_b.tab_size.get(), 2);
280
281 get_all_tasks(&project, &task_contexts, cx)
282 })
283 .into_iter()
284 .map(|(source_kind, task)| {
285 let resolved = task.resolved.unwrap();
286 (
287 source_kind,
288 task.resolved_label,
289 resolved.args,
290 resolved.env,
291 )
292 })
293 .collect::<Vec<_>>();
294 assert_eq!(
295 all_tasks,
296 vec![
297 (
298 TaskSourceKind::Worktree {
299 id: worktree_id,
300 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
301 id_base: if cfg!(windows) {
302 "local worktree tasks from directory \"b\\\\.zed\"".into()
303 } else {
304 "local worktree tasks from directory \"b/.zed\"".into()
305 },
306 },
307 "cargo check".to_string(),
308 vec!["check".to_string()],
309 HashMap::default(),
310 ),
311 (
312 topmost_local_task_source_kind.clone(),
313 "cargo check all".to_string(),
314 vec!["check".to_string(), "--all".to_string()],
315 HashMap::default(),
316 ),
317 ]
318 );
319
320 let (_, resolved_task) = cx
321 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
322 .into_iter()
323 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
324 .expect("should have one global task");
325 project.update(cx, |project, cx| {
326 let task_inventory = project
327 .task_store
328 .read(cx)
329 .task_inventory()
330 .cloned()
331 .unwrap();
332 task_inventory.update(cx, |inventory, _| {
333 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
334 inventory
335 .update_file_based_tasks(
336 TaskSettingsLocation::Global(tasks_file()),
337 Some(
338 &json!([{
339 "label": "cargo check unstable",
340 "command": "cargo",
341 "args": [
342 "check",
343 "--all",
344 "--all-targets"
345 ],
346 "env": {
347 "RUSTFLAGS": "-Zunstable-options"
348 }
349 }])
350 .to_string(),
351 ),
352 settings::TaskKind::Script,
353 )
354 .unwrap();
355 });
356 });
357 cx.run_until_parked();
358
359 let all_tasks = cx
360 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved.unwrap();
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 topmost_local_task_source_kind.clone(),
377 "cargo check all".to_string(),
378 vec!["check".to_string(), "--all".to_string()],
379 HashMap::default(),
380 ),
381 (
382 TaskSourceKind::Worktree {
383 id: worktree_id,
384 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
385 id_base: if cfg!(windows) {
386 "local worktree tasks from directory \"b\\\\.zed\"".into()
387 } else {
388 "local worktree tasks from directory \"b/.zed\"".into()
389 },
390 },
391 "cargo check".to_string(),
392 vec!["check".to_string()],
393 HashMap::default(),
394 ),
395 (
396 TaskSourceKind::AbsPath {
397 abs_path: paths::tasks_file().clone(),
398 id_base: "global tasks.json".into(),
399 },
400 "cargo check unstable".to_string(),
401 vec![
402 "check".to_string(),
403 "--all".to_string(),
404 "--all-targets".to_string(),
405 ],
406 HashMap::from_iter(Some((
407 "RUSTFLAGS".to_string(),
408 "-Zunstable-options".to_string()
409 ))),
410 ),
411 ]
412 );
413}
414
415#[gpui::test]
416async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
417 init_test(cx);
418 TaskStore::init(None);
419
420 let fs = FakeFs::new(cx.executor());
421 fs.insert_tree(
422 path!("/dir"),
423 json!({
424 ".zed": {
425 "tasks.json": r#"[{
426 "label": "test worktree root",
427 "command": "echo $ZED_WORKTREE_ROOT"
428 }]"#,
429 },
430 "a": {
431 "a.rs": "fn a() {\n A\n}"
432 },
433 }),
434 )
435 .await;
436
437 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
438 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
439
440 cx.executor().run_until_parked();
441 let worktree_id = cx.update(|cx| {
442 project.update(cx, |project, cx| {
443 project.worktrees(cx).next().unwrap().read(cx).id()
444 })
445 });
446
447 let active_non_worktree_item_tasks = cx.update(|cx| {
448 get_all_tasks(
449 &project,
450 &TaskContexts {
451 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
452 active_worktree_context: None,
453 other_worktree_contexts: Vec::new(),
454 },
455 cx,
456 )
457 });
458 assert!(
459 active_non_worktree_item_tasks.is_empty(),
460 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
461 );
462
463 let active_worktree_tasks = cx.update(|cx| {
464 get_all_tasks(
465 &project,
466 &TaskContexts {
467 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
468 active_worktree_context: Some((worktree_id, {
469 let mut worktree_context = TaskContext::default();
470 worktree_context
471 .task_variables
472 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
473 worktree_context
474 })),
475 other_worktree_contexts: Vec::new(),
476 },
477 cx,
478 )
479 });
480 assert_eq!(
481 active_worktree_tasks
482 .into_iter()
483 .map(|(source_kind, task)| {
484 let resolved = task.resolved.unwrap();
485 (source_kind, resolved.command)
486 })
487 .collect::<Vec<_>>(),
488 vec![(
489 TaskSourceKind::Worktree {
490 id: worktree_id,
491 directory_in_worktree: PathBuf::from(separator!(".zed")),
492 id_base: if cfg!(windows) {
493 "local worktree tasks from directory \".zed\"".into()
494 } else {
495 "local worktree tasks from directory \".zed\"".into()
496 },
497 },
498 "echo /dir".to_string(),
499 )]
500 );
501}
502
503#[gpui::test]
504async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506
507 let fs = FakeFs::new(cx.executor());
508 fs.insert_tree(
509 path!("/dir"),
510 json!({
511 "test.rs": "const A: i32 = 1;",
512 "test2.rs": "",
513 "Cargo.toml": "a = 1",
514 "package.json": "{\"a\": 1}",
515 }),
516 )
517 .await;
518
519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
520 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
521
522 let mut fake_rust_servers = language_registry.register_fake_lsp(
523 "Rust",
524 FakeLspAdapter {
525 name: "the-rust-language-server",
526 capabilities: lsp::ServerCapabilities {
527 completion_provider: Some(lsp::CompletionOptions {
528 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
529 ..Default::default()
530 }),
531 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
532 lsp::TextDocumentSyncOptions {
533 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
534 ..Default::default()
535 },
536 )),
537 ..Default::default()
538 },
539 ..Default::default()
540 },
541 );
542 let mut fake_json_servers = language_registry.register_fake_lsp(
543 "JSON",
544 FakeLspAdapter {
545 name: "the-json-language-server",
546 capabilities: lsp::ServerCapabilities {
547 completion_provider: Some(lsp::CompletionOptions {
548 trigger_characters: Some(vec![":".to_string()]),
549 ..Default::default()
550 }),
551 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
552 lsp::TextDocumentSyncOptions {
553 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
554 ..Default::default()
555 },
556 )),
557 ..Default::default()
558 },
559 ..Default::default()
560 },
561 );
562
563 // Open a buffer without an associated language server.
564 let (toml_buffer, _handle) = project
565 .update(cx, |project, cx| {
566 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
567 })
568 .await
569 .unwrap();
570
571 // Open a buffer with an associated language server before the language for it has been loaded.
572 let (rust_buffer, _handle2) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
575 })
576 .await
577 .unwrap();
578 rust_buffer.update(cx, |buffer, _| {
579 assert_eq!(buffer.language().map(|l| l.name()), None);
580 });
581
582 // Now we add the languages to the project, and ensure they get assigned to all
583 // the relevant open buffers.
584 language_registry.add(json_lang());
585 language_registry.add(rust_lang());
586 cx.executor().run_until_parked();
587 rust_buffer.update(cx, |buffer, _| {
588 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
589 });
590
591 // A server is started up, and it is notified about Rust files.
592 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
593 assert_eq!(
594 fake_rust_server
595 .receive_notification::<lsp::notification::DidOpenTextDocument>()
596 .await
597 .text_document,
598 lsp::TextDocumentItem {
599 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
600 version: 0,
601 text: "const A: i32 = 1;".to_string(),
602 language_id: "rust".to_string(),
603 }
604 );
605
606 // The buffer is configured based on the language server's capabilities.
607 rust_buffer.update(cx, |buffer, _| {
608 assert_eq!(
609 buffer
610 .completion_triggers()
611 .into_iter()
612 .cloned()
613 .collect::<Vec<_>>(),
614 &[".".to_string(), "::".to_string()]
615 );
616 });
617 toml_buffer.update(cx, |buffer, _| {
618 assert!(buffer.completion_triggers().is_empty());
619 });
620
621 // Edit a buffer. The changes are reported to the language server.
622 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
623 assert_eq!(
624 fake_rust_server
625 .receive_notification::<lsp::notification::DidChangeTextDocument>()
626 .await
627 .text_document,
628 lsp::VersionedTextDocumentIdentifier::new(
629 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
630 1
631 )
632 );
633
634 // Open a third buffer with a different associated language server.
635 let (json_buffer, _json_handle) = project
636 .update(cx, |project, cx| {
637 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
638 })
639 .await
640 .unwrap();
641
642 // A json language server is started up and is only notified about the json buffer.
643 let mut fake_json_server = fake_json_servers.next().await.unwrap();
644 assert_eq!(
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 lsp::TextDocumentItem {
650 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
651 version: 0,
652 text: "{\"a\": 1}".to_string(),
653 language_id: "json".to_string(),
654 }
655 );
656
657 // This buffer is configured based on the second language server's
658 // capabilities.
659 json_buffer.update(cx, |buffer, _| {
660 assert_eq!(
661 buffer
662 .completion_triggers()
663 .into_iter()
664 .cloned()
665 .collect::<Vec<_>>(),
666 &[":".to_string()]
667 );
668 });
669
670 // When opening another buffer whose language server is already running,
671 // it is also configured based on the existing language server's capabilities.
672 let (rust_buffer2, _handle4) = project
673 .update(cx, |project, cx| {
674 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
675 })
676 .await
677 .unwrap();
678 rust_buffer2.update(cx, |buffer, _| {
679 assert_eq!(
680 buffer
681 .completion_triggers()
682 .into_iter()
683 .cloned()
684 .collect::<Vec<_>>(),
685 &[".".to_string(), "::".to_string()]
686 );
687 });
688
689 // Changes are reported only to servers matching the buffer's language.
690 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
691 rust_buffer2.update(cx, |buffer, cx| {
692 buffer.edit([(0..0, "let x = 1;")], None, cx)
693 });
694 assert_eq!(
695 fake_rust_server
696 .receive_notification::<lsp::notification::DidChangeTextDocument>()
697 .await
698 .text_document,
699 lsp::VersionedTextDocumentIdentifier::new(
700 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
701 1
702 )
703 );
704
705 // Save notifications are reported to all servers.
706 project
707 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
708 .await
709 .unwrap();
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidSaveTextDocument>()
713 .await
714 .text_document,
715 lsp::TextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
717 )
718 );
719 assert_eq!(
720 fake_json_server
721 .receive_notification::<lsp::notification::DidSaveTextDocument>()
722 .await
723 .text_document,
724 lsp::TextDocumentIdentifier::new(
725 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
726 )
727 );
728
729 // Renames are reported only to servers matching the buffer's language.
730 fs.rename(
731 Path::new(path!("/dir/test2.rs")),
732 Path::new(path!("/dir/test3.rs")),
733 Default::default(),
734 )
735 .await
736 .unwrap();
737 assert_eq!(
738 fake_rust_server
739 .receive_notification::<lsp::notification::DidCloseTextDocument>()
740 .await
741 .text_document,
742 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
743 );
744 assert_eq!(
745 fake_rust_server
746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
747 .await
748 .text_document,
749 lsp::TextDocumentItem {
750 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
751 version: 0,
752 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
753 language_id: "rust".to_string(),
754 },
755 );
756
757 rust_buffer2.update(cx, |buffer, cx| {
758 buffer.update_diagnostics(
759 LanguageServerId(0),
760 DiagnosticSet::from_sorted_entries(
761 vec![DiagnosticEntry {
762 diagnostic: Default::default(),
763 range: Anchor::MIN..Anchor::MAX,
764 }],
765 &buffer.snapshot(),
766 ),
767 cx,
768 );
769 assert_eq!(
770 buffer
771 .snapshot()
772 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
773 .count(),
774 1
775 );
776 });
777
778 // When the rename changes the extension of the file, the buffer gets closed on the old
779 // language server and gets opened on the new one.
780 fs.rename(
781 Path::new(path!("/dir/test3.rs")),
782 Path::new(path!("/dir/test3.json")),
783 Default::default(),
784 )
785 .await
786 .unwrap();
787 assert_eq!(
788 fake_rust_server
789 .receive_notification::<lsp::notification::DidCloseTextDocument>()
790 .await
791 .text_document,
792 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
793 );
794 assert_eq!(
795 fake_json_server
796 .receive_notification::<lsp::notification::DidOpenTextDocument>()
797 .await
798 .text_document,
799 lsp::TextDocumentItem {
800 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
801 version: 0,
802 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
803 language_id: "json".to_string(),
804 },
805 );
806
807 // We clear the diagnostics, since the language has changed.
808 rust_buffer2.update(cx, |buffer, _| {
809 assert_eq!(
810 buffer
811 .snapshot()
812 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
813 .count(),
814 0
815 );
816 });
817
818 // The renamed file's version resets after changing language server.
819 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
820 assert_eq!(
821 fake_json_server
822 .receive_notification::<lsp::notification::DidChangeTextDocument>()
823 .await
824 .text_document,
825 lsp::VersionedTextDocumentIdentifier::new(
826 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
827 1
828 )
829 );
830
831 // Restart language servers
832 project.update(cx, |project, cx| {
833 project.restart_language_servers_for_buffers(
834 vec![rust_buffer.clone(), json_buffer.clone()],
835 cx,
836 );
837 });
838
839 let mut rust_shutdown_requests = fake_rust_server
840 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
841 let mut json_shutdown_requests = fake_json_server
842 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
843 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
844
845 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
846 let mut fake_json_server = fake_json_servers.next().await.unwrap();
847
848 // Ensure rust document is reopened in new rust language server
849 assert_eq!(
850 fake_rust_server
851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
852 .await
853 .text_document,
854 lsp::TextDocumentItem {
855 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
856 version: 0,
857 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
858 language_id: "rust".to_string(),
859 }
860 );
861
862 // Ensure json documents are reopened in new json language server
863 assert_set_eq!(
864 [
865 fake_json_server
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document,
869 fake_json_server
870 .receive_notification::<lsp::notification::DidOpenTextDocument>()
871 .await
872 .text_document,
873 ],
874 [
875 lsp::TextDocumentItem {
876 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
877 version: 0,
878 text: json_buffer.update(cx, |buffer, _| buffer.text()),
879 language_id: "json".to_string(),
880 },
881 lsp::TextDocumentItem {
882 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
883 version: 0,
884 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
885 language_id: "json".to_string(),
886 }
887 ]
888 );
889
890 // Close notifications are reported only to servers matching the buffer's language.
891 cx.update(|_| drop(_json_handle));
892 let close_message = lsp::DidCloseTextDocumentParams {
893 text_document: lsp::TextDocumentIdentifier::new(
894 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
895 ),
896 };
897 assert_eq!(
898 fake_json_server
899 .receive_notification::<lsp::notification::DidCloseTextDocument>()
900 .await,
901 close_message,
902 );
903}
904
905#[gpui::test]
906async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
907 init_test(cx);
908
909 let fs = FakeFs::new(cx.executor());
910 fs.insert_tree(
911 path!("/the-root"),
912 json!({
913 ".gitignore": "target\n",
914 "src": {
915 "a.rs": "",
916 "b.rs": "",
917 },
918 "target": {
919 "x": {
920 "out": {
921 "x.rs": ""
922 }
923 },
924 "y": {
925 "out": {
926 "y.rs": "",
927 }
928 },
929 "z": {
930 "out": {
931 "z.rs": ""
932 }
933 }
934 }
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
940 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
941 language_registry.add(rust_lang());
942 let mut fake_servers = language_registry.register_fake_lsp(
943 "Rust",
944 FakeLspAdapter {
945 name: "the-language-server",
946 ..Default::default()
947 },
948 );
949
950 cx.executor().run_until_parked();
951
952 // Start the language server by opening a buffer with a compatible file extension.
953 project
954 .update(cx, |project, cx| {
955 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
956 })
957 .await
958 .unwrap();
959
960 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
961 project.update(cx, |project, cx| {
962 let worktree = project.worktrees(cx).next().unwrap();
963 assert_eq!(
964 worktree
965 .read(cx)
966 .snapshot()
967 .entries(true, 0)
968 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
969 .collect::<Vec<_>>(),
970 &[
971 (Path::new(""), false),
972 (Path::new(".gitignore"), false),
973 (Path::new("src"), false),
974 (Path::new("src/a.rs"), false),
975 (Path::new("src/b.rs"), false),
976 (Path::new("target"), true),
977 ]
978 );
979 });
980
981 let prev_read_dir_count = fs.read_dir_call_count();
982
983 // Keep track of the FS events reported to the language server.
984 let fake_server = fake_servers.next().await.unwrap();
985 let file_changes = Arc::new(Mutex::new(Vec::new()));
986 fake_server
987 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
988 registrations: vec![lsp::Registration {
989 id: Default::default(),
990 method: "workspace/didChangeWatchedFiles".to_string(),
991 register_options: serde_json::to_value(
992 lsp::DidChangeWatchedFilesRegistrationOptions {
993 watchers: vec![
994 lsp::FileSystemWatcher {
995 glob_pattern: lsp::GlobPattern::String(
996 path!("/the-root/Cargo.toml").to_string(),
997 ),
998 kind: None,
999 },
1000 lsp::FileSystemWatcher {
1001 glob_pattern: lsp::GlobPattern::String(
1002 path!("/the-root/src/*.{rs,c}").to_string(),
1003 ),
1004 kind: None,
1005 },
1006 lsp::FileSystemWatcher {
1007 glob_pattern: lsp::GlobPattern::String(
1008 path!("/the-root/target/y/**/*.rs").to_string(),
1009 ),
1010 kind: None,
1011 },
1012 ],
1013 },
1014 )
1015 .ok(),
1016 }],
1017 })
1018 .await
1019 .unwrap();
1020 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1021 let file_changes = file_changes.clone();
1022 move |params, _| {
1023 let mut file_changes = file_changes.lock();
1024 file_changes.extend(params.changes);
1025 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1026 }
1027 });
1028
1029 cx.executor().run_until_parked();
1030 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1031 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1032
1033 // Now the language server has asked us to watch an ignored directory path,
1034 // so we recursively load it.
1035 project.update(cx, |project, cx| {
1036 let worktree = project.worktrees(cx).next().unwrap();
1037 assert_eq!(
1038 worktree
1039 .read(cx)
1040 .snapshot()
1041 .entries(true, 0)
1042 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1043 .collect::<Vec<_>>(),
1044 &[
1045 (Path::new(""), false),
1046 (Path::new(".gitignore"), false),
1047 (Path::new("src"), false),
1048 (Path::new("src/a.rs"), false),
1049 (Path::new("src/b.rs"), false),
1050 (Path::new("target"), true),
1051 (Path::new("target/x"), true),
1052 (Path::new("target/y"), true),
1053 (Path::new("target/y/out"), true),
1054 (Path::new("target/y/out/y.rs"), true),
1055 (Path::new("target/z"), true),
1056 ]
1057 );
1058 });
1059
1060 // Perform some file system mutations, two of which match the watched patterns,
1061 // and one of which does not.
1062 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1063 .await
1064 .unwrap();
1065 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1066 .await
1067 .unwrap();
1068 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1069 .await
1070 .unwrap();
1071 fs.create_file(
1072 path!("/the-root/target/x/out/x2.rs").as_ref(),
1073 Default::default(),
1074 )
1075 .await
1076 .unwrap();
1077 fs.create_file(
1078 path!("/the-root/target/y/out/y2.rs").as_ref(),
1079 Default::default(),
1080 )
1081 .await
1082 .unwrap();
1083
1084 // The language server receives events for the FS mutations that match its watch patterns.
1085 cx.executor().run_until_parked();
1086 assert_eq!(
1087 &*file_changes.lock(),
1088 &[
1089 lsp::FileEvent {
1090 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1091 typ: lsp::FileChangeType::DELETED,
1092 },
1093 lsp::FileEvent {
1094 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1095 typ: lsp::FileChangeType::CREATED,
1096 },
1097 lsp::FileEvent {
1098 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1099 typ: lsp::FileChangeType::CREATED,
1100 },
1101 ]
1102 );
1103}
1104
1105#[gpui::test]
1106async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1107 init_test(cx);
1108
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 path!("/dir"),
1112 json!({
1113 "a.rs": "let a = 1;",
1114 "b.rs": "let b = 2;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(
1120 fs,
1121 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1122 cx,
1123 )
1124 .await;
1125 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1126
1127 let buffer_a = project
1128 .update(cx, |project, cx| {
1129 project.open_local_buffer(path!("/dir/a.rs"), cx)
1130 })
1131 .await
1132 .unwrap();
1133 let buffer_b = project
1134 .update(cx, |project, cx| {
1135 project.open_local_buffer(path!("/dir/b.rs"), cx)
1136 })
1137 .await
1138 .unwrap();
1139
1140 lsp_store.update(cx, |lsp_store, cx| {
1141 lsp_store
1142 .update_diagnostics(
1143 LanguageServerId(0),
1144 lsp::PublishDiagnosticsParams {
1145 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1146 version: None,
1147 diagnostics: vec![lsp::Diagnostic {
1148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1149 severity: Some(lsp::DiagnosticSeverity::ERROR),
1150 message: "error 1".to_string(),
1151 ..Default::default()
1152 }],
1153 },
1154 &[],
1155 cx,
1156 )
1157 .unwrap();
1158 lsp_store
1159 .update_diagnostics(
1160 LanguageServerId(0),
1161 lsp::PublishDiagnosticsParams {
1162 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1163 version: None,
1164 diagnostics: vec![lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1166 severity: Some(DiagnosticSeverity::WARNING),
1167 message: "error 2".to_string(),
1168 ..Default::default()
1169 }],
1170 },
1171 &[],
1172 cx,
1173 )
1174 .unwrap();
1175 });
1176
1177 buffer_a.update(cx, |buffer, _| {
1178 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1179 assert_eq!(
1180 chunks
1181 .iter()
1182 .map(|(s, d)| (s.as_str(), *d))
1183 .collect::<Vec<_>>(),
1184 &[
1185 ("let ", None),
1186 ("a", Some(DiagnosticSeverity::ERROR)),
1187 (" = 1;", None),
1188 ]
1189 );
1190 });
1191 buffer_b.update(cx, |buffer, _| {
1192 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1193 assert_eq!(
1194 chunks
1195 .iter()
1196 .map(|(s, d)| (s.as_str(), *d))
1197 .collect::<Vec<_>>(),
1198 &[
1199 ("let ", None),
1200 ("b", Some(DiagnosticSeverity::WARNING)),
1201 (" = 2;", None),
1202 ]
1203 );
1204 });
1205}
1206
1207#[gpui::test]
1208async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1209 init_test(cx);
1210
1211 let fs = FakeFs::new(cx.executor());
1212 fs.insert_tree(
1213 path!("/root"),
1214 json!({
1215 "dir": {
1216 ".git": {
1217 "HEAD": "ref: refs/heads/main",
1218 },
1219 ".gitignore": "b.rs",
1220 "a.rs": "let a = 1;",
1221 "b.rs": "let b = 2;",
1222 },
1223 "other.rs": "let b = c;"
1224 }),
1225 )
1226 .await;
1227
1228 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1229 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1230 let (worktree, _) = project
1231 .update(cx, |project, cx| {
1232 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1233 })
1234 .await
1235 .unwrap();
1236 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1237
1238 let (worktree, _) = project
1239 .update(cx, |project, cx| {
1240 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1241 })
1242 .await
1243 .unwrap();
1244 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1245
1246 let server_id = LanguageServerId(0);
1247 lsp_store.update(cx, |lsp_store, cx| {
1248 lsp_store
1249 .update_diagnostics(
1250 server_id,
1251 lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1253 version: None,
1254 diagnostics: vec![lsp::Diagnostic {
1255 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1256 severity: Some(lsp::DiagnosticSeverity::ERROR),
1257 message: "unused variable 'b'".to_string(),
1258 ..Default::default()
1259 }],
1260 },
1261 &[],
1262 cx,
1263 )
1264 .unwrap();
1265 lsp_store
1266 .update_diagnostics(
1267 server_id,
1268 lsp::PublishDiagnosticsParams {
1269 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1270 version: None,
1271 diagnostics: vec![lsp::Diagnostic {
1272 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1273 severity: Some(lsp::DiagnosticSeverity::ERROR),
1274 message: "unknown variable 'c'".to_string(),
1275 ..Default::default()
1276 }],
1277 },
1278 &[],
1279 cx,
1280 )
1281 .unwrap();
1282 });
1283
1284 let main_ignored_buffer = project
1285 .update(cx, |project, cx| {
1286 project.open_buffer((main_worktree_id, "b.rs"), cx)
1287 })
1288 .await
1289 .unwrap();
1290 main_ignored_buffer.update(cx, |buffer, _| {
1291 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1292 assert_eq!(
1293 chunks
1294 .iter()
1295 .map(|(s, d)| (s.as_str(), *d))
1296 .collect::<Vec<_>>(),
1297 &[
1298 ("let ", None),
1299 ("b", Some(DiagnosticSeverity::ERROR)),
1300 (" = 2;", None),
1301 ],
1302 "Gigitnored buffers should still get in-buffer diagnostics",
1303 );
1304 });
1305 let other_buffer = project
1306 .update(cx, |project, cx| {
1307 project.open_buffer((other_worktree_id, ""), cx)
1308 })
1309 .await
1310 .unwrap();
1311 other_buffer.update(cx, |buffer, _| {
1312 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1313 assert_eq!(
1314 chunks
1315 .iter()
1316 .map(|(s, d)| (s.as_str(), *d))
1317 .collect::<Vec<_>>(),
1318 &[
1319 ("let b = ", None),
1320 ("c", Some(DiagnosticSeverity::ERROR)),
1321 (";", None),
1322 ],
1323 "Buffers from hidden projects should still get in-buffer diagnostics"
1324 );
1325 });
1326
1327 project.update(cx, |project, cx| {
1328 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1329 assert_eq!(
1330 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1331 vec![(
1332 ProjectPath {
1333 worktree_id: main_worktree_id,
1334 path: Arc::from(Path::new("b.rs")),
1335 },
1336 server_id,
1337 DiagnosticSummary {
1338 error_count: 1,
1339 warning_count: 0,
1340 }
1341 )]
1342 );
1343 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1344 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1345 });
1346}
1347
1348#[gpui::test]
1349async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1350 init_test(cx);
1351
1352 let progress_token = "the-progress-token";
1353
1354 let fs = FakeFs::new(cx.executor());
1355 fs.insert_tree(
1356 path!("/dir"),
1357 json!({
1358 "a.rs": "fn a() { A }",
1359 "b.rs": "const y: i32 = 1",
1360 }),
1361 )
1362 .await;
1363
1364 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1365 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1366
1367 language_registry.add(rust_lang());
1368 let mut fake_servers = language_registry.register_fake_lsp(
1369 "Rust",
1370 FakeLspAdapter {
1371 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1372 disk_based_diagnostics_sources: vec!["disk".into()],
1373 ..Default::default()
1374 },
1375 );
1376
1377 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1378
1379 // Cause worktree to start the fake language server
1380 let _ = project
1381 .update(cx, |project, cx| {
1382 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1383 })
1384 .await
1385 .unwrap();
1386
1387 let mut events = cx.events(&project);
1388
1389 let fake_server = fake_servers.next().await.unwrap();
1390 assert_eq!(
1391 events.next().await.unwrap(),
1392 Event::LanguageServerAdded(
1393 LanguageServerId(0),
1394 fake_server.server.name(),
1395 Some(worktree_id)
1396 ),
1397 );
1398
1399 fake_server
1400 .start_progress(format!("{}/0", progress_token))
1401 .await;
1402 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1403 assert_eq!(
1404 events.next().await.unwrap(),
1405 Event::DiskBasedDiagnosticsStarted {
1406 language_server_id: LanguageServerId(0),
1407 }
1408 );
1409
1410 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1411 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1412 version: None,
1413 diagnostics: vec![lsp::Diagnostic {
1414 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1415 severity: Some(lsp::DiagnosticSeverity::ERROR),
1416 message: "undefined variable 'A'".to_string(),
1417 ..Default::default()
1418 }],
1419 });
1420 assert_eq!(
1421 events.next().await.unwrap(),
1422 Event::DiagnosticsUpdated {
1423 language_server_id: LanguageServerId(0),
1424 path: (worktree_id, Path::new("a.rs")).into()
1425 }
1426 );
1427
1428 fake_server.end_progress(format!("{}/0", progress_token));
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsFinished {
1432 language_server_id: LanguageServerId(0)
1433 }
1434 );
1435
1436 let buffer = project
1437 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1438 .await
1439 .unwrap();
1440
1441 buffer.update(cx, |buffer, _| {
1442 let snapshot = buffer.snapshot();
1443 let diagnostics = snapshot
1444 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1445 .collect::<Vec<_>>();
1446 assert_eq!(
1447 diagnostics,
1448 &[DiagnosticEntry {
1449 range: Point::new(0, 9)..Point::new(0, 10),
1450 diagnostic: Diagnostic {
1451 severity: lsp::DiagnosticSeverity::ERROR,
1452 message: "undefined variable 'A'".to_string(),
1453 group_id: 0,
1454 is_primary: true,
1455 ..Default::default()
1456 }
1457 }]
1458 )
1459 });
1460
1461 // Ensure publishing empty diagnostics twice only results in one update event.
1462 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1463 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1464 version: None,
1465 diagnostics: Default::default(),
1466 });
1467 assert_eq!(
1468 events.next().await.unwrap(),
1469 Event::DiagnosticsUpdated {
1470 language_server_id: LanguageServerId(0),
1471 path: (worktree_id, Path::new("a.rs")).into()
1472 }
1473 );
1474
1475 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1476 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1477 version: None,
1478 diagnostics: Default::default(),
1479 });
1480 cx.executor().run_until_parked();
1481 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1482}
1483
1484#[gpui::test]
1485async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1486 init_test(cx);
1487
1488 let progress_token = "the-progress-token";
1489
1490 let fs = FakeFs::new(cx.executor());
1491 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1492
1493 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1494
1495 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1496 language_registry.add(rust_lang());
1497 let mut fake_servers = language_registry.register_fake_lsp(
1498 "Rust",
1499 FakeLspAdapter {
1500 name: "the-language-server",
1501 disk_based_diagnostics_sources: vec!["disk".into()],
1502 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1503 ..Default::default()
1504 },
1505 );
1506
1507 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1508
1509 let (buffer, _handle) = project
1510 .update(cx, |project, cx| {
1511 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1512 })
1513 .await
1514 .unwrap();
1515 // Simulate diagnostics starting to update.
1516 let fake_server = fake_servers.next().await.unwrap();
1517 fake_server.start_progress(progress_token).await;
1518
1519 // Restart the server before the diagnostics finish updating.
1520 project.update(cx, |project, cx| {
1521 project.restart_language_servers_for_buffers(vec![buffer], cx);
1522 });
1523 let mut events = cx.events(&project);
1524
1525 // Simulate the newly started server sending more diagnostics.
1526 let fake_server = fake_servers.next().await.unwrap();
1527 assert_eq!(
1528 events.next().await.unwrap(),
1529 Event::LanguageServerAdded(
1530 LanguageServerId(1),
1531 fake_server.server.name(),
1532 Some(worktree_id)
1533 )
1534 );
1535 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1536 fake_server.start_progress(progress_token).await;
1537 assert_eq!(
1538 events.next().await.unwrap(),
1539 Event::DiskBasedDiagnosticsStarted {
1540 language_server_id: LanguageServerId(1)
1541 }
1542 );
1543 project.update(cx, |project, cx| {
1544 assert_eq!(
1545 project
1546 .language_servers_running_disk_based_diagnostics(cx)
1547 .collect::<Vec<_>>(),
1548 [LanguageServerId(1)]
1549 );
1550 });
1551
1552 // All diagnostics are considered done, despite the old server's diagnostic
1553 // task never completing.
1554 fake_server.end_progress(progress_token);
1555 assert_eq!(
1556 events.next().await.unwrap(),
1557 Event::DiskBasedDiagnosticsFinished {
1558 language_server_id: LanguageServerId(1)
1559 }
1560 );
1561 project.update(cx, |project, cx| {
1562 assert_eq!(
1563 project
1564 .language_servers_running_disk_based_diagnostics(cx)
1565 .collect::<Vec<_>>(),
1566 [] as [language::LanguageServerId; 0]
1567 );
1568 });
1569}
1570
1571#[gpui::test]
1572async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1573 init_test(cx);
1574
1575 let fs = FakeFs::new(cx.executor());
1576 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1577
1578 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1579
1580 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1581 language_registry.add(rust_lang());
1582 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1583
1584 let (buffer, _) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590
1591 // Publish diagnostics
1592 let fake_server = fake_servers.next().await.unwrap();
1593 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1594 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1595 version: None,
1596 diagnostics: vec![lsp::Diagnostic {
1597 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1598 severity: Some(lsp::DiagnosticSeverity::ERROR),
1599 message: "the message".to_string(),
1600 ..Default::default()
1601 }],
1602 });
1603
1604 cx.executor().run_until_parked();
1605 buffer.update(cx, |buffer, _| {
1606 assert_eq!(
1607 buffer
1608 .snapshot()
1609 .diagnostics_in_range::<_, usize>(0..1, false)
1610 .map(|entry| entry.diagnostic.message.clone())
1611 .collect::<Vec<_>>(),
1612 ["the message".to_string()]
1613 );
1614 });
1615 project.update(cx, |project, cx| {
1616 assert_eq!(
1617 project.diagnostic_summary(false, cx),
1618 DiagnosticSummary {
1619 error_count: 1,
1620 warning_count: 0,
1621 }
1622 );
1623 });
1624
1625 project.update(cx, |project, cx| {
1626 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1627 });
1628
1629 // The diagnostics are cleared.
1630 cx.executor().run_until_parked();
1631 buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .snapshot()
1635 .diagnostics_in_range::<_, usize>(0..1, false)
1636 .map(|entry| entry.diagnostic.message.clone())
1637 .collect::<Vec<_>>(),
1638 Vec::<String>::new(),
1639 );
1640 });
1641 project.update(cx, |project, cx| {
1642 assert_eq!(
1643 project.diagnostic_summary(false, cx),
1644 DiagnosticSummary {
1645 error_count: 0,
1646 warning_count: 0,
1647 }
1648 );
1649 });
1650}
1651
1652#[gpui::test]
1653async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1654 init_test(cx);
1655
1656 let fs = FakeFs::new(cx.executor());
1657 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1658
1659 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1661
1662 language_registry.add(rust_lang());
1663 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1664
1665 let (buffer, _handle) = project
1666 .update(cx, |project, cx| {
1667 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1668 })
1669 .await
1670 .unwrap();
1671
1672 // Before restarting the server, report diagnostics with an unknown buffer version.
1673 let fake_server = fake_servers.next().await.unwrap();
1674 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1675 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1676 version: Some(10000),
1677 diagnostics: Vec::new(),
1678 });
1679 cx.executor().run_until_parked();
1680 project.update(cx, |project, cx| {
1681 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1682 });
1683
1684 let mut fake_server = fake_servers.next().await.unwrap();
1685 let notification = fake_server
1686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1687 .await
1688 .text_document;
1689 assert_eq!(notification.version, 0);
1690}
1691
1692#[gpui::test]
1693async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1694 init_test(cx);
1695
1696 let progress_token = "the-progress-token";
1697
1698 let fs = FakeFs::new(cx.executor());
1699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1700
1701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1702
1703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1704 language_registry.add(rust_lang());
1705 let mut fake_servers = language_registry.register_fake_lsp(
1706 "Rust",
1707 FakeLspAdapter {
1708 name: "the-language-server",
1709 disk_based_diagnostics_sources: vec!["disk".into()],
1710 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1711 ..Default::default()
1712 },
1713 );
1714
1715 let (buffer, _handle) = project
1716 .update(cx, |project, cx| {
1717 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1718 })
1719 .await
1720 .unwrap();
1721
1722 // Simulate diagnostics starting to update.
1723 let mut fake_server = fake_servers.next().await.unwrap();
1724 fake_server
1725 .start_progress_with(
1726 "another-token",
1727 lsp::WorkDoneProgressBegin {
1728 cancellable: Some(false),
1729 ..Default::default()
1730 },
1731 )
1732 .await;
1733 fake_server
1734 .start_progress_with(
1735 progress_token,
1736 lsp::WorkDoneProgressBegin {
1737 cancellable: Some(true),
1738 ..Default::default()
1739 },
1740 )
1741 .await;
1742 cx.executor().run_until_parked();
1743
1744 project.update(cx, |project, cx| {
1745 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1746 });
1747
1748 let cancel_notification = fake_server
1749 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1750 .await;
1751 assert_eq!(
1752 cancel_notification.token,
1753 NumberOrString::String(progress_token.into())
1754 );
1755}
1756
1757#[gpui::test]
1758async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1759 init_test(cx);
1760
1761 let fs = FakeFs::new(cx.executor());
1762 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1763 .await;
1764
1765 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1767
1768 let mut fake_rust_servers = language_registry.register_fake_lsp(
1769 "Rust",
1770 FakeLspAdapter {
1771 name: "rust-lsp",
1772 ..Default::default()
1773 },
1774 );
1775 let mut fake_js_servers = language_registry.register_fake_lsp(
1776 "JavaScript",
1777 FakeLspAdapter {
1778 name: "js-lsp",
1779 ..Default::default()
1780 },
1781 );
1782 language_registry.add(rust_lang());
1783 language_registry.add(js_lang());
1784
1785 let _rs_buffer = project
1786 .update(cx, |project, cx| {
1787 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1788 })
1789 .await
1790 .unwrap();
1791 let _js_buffer = project
1792 .update(cx, |project, cx| {
1793 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1794 })
1795 .await
1796 .unwrap();
1797
1798 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1799 assert_eq!(
1800 fake_rust_server_1
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document
1804 .uri
1805 .as_str(),
1806 uri!("file:///dir/a.rs")
1807 );
1808
1809 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1810 assert_eq!(
1811 fake_js_server
1812 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1813 .await
1814 .text_document
1815 .uri
1816 .as_str(),
1817 uri!("file:///dir/b.js")
1818 );
1819
1820 // Disable Rust language server, ensuring only that server gets stopped.
1821 cx.update(|cx| {
1822 SettingsStore::update_global(cx, |settings, cx| {
1823 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1824 settings.languages.insert(
1825 "Rust".into(),
1826 LanguageSettingsContent {
1827 enable_language_server: Some(false),
1828 ..Default::default()
1829 },
1830 );
1831 });
1832 })
1833 });
1834 fake_rust_server_1
1835 .receive_notification::<lsp::notification::Exit>()
1836 .await;
1837
1838 // Enable Rust and disable JavaScript language servers, ensuring that the
1839 // former gets started again and that the latter stops.
1840 cx.update(|cx| {
1841 SettingsStore::update_global(cx, |settings, cx| {
1842 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1843 settings.languages.insert(
1844 LanguageName::new("Rust"),
1845 LanguageSettingsContent {
1846 enable_language_server: Some(true),
1847 ..Default::default()
1848 },
1849 );
1850 settings.languages.insert(
1851 LanguageName::new("JavaScript"),
1852 LanguageSettingsContent {
1853 enable_language_server: Some(false),
1854 ..Default::default()
1855 },
1856 );
1857 });
1858 })
1859 });
1860 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1861 assert_eq!(
1862 fake_rust_server_2
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document
1866 .uri
1867 .as_str(),
1868 uri!("file:///dir/a.rs")
1869 );
1870 fake_js_server
1871 .receive_notification::<lsp::notification::Exit>()
1872 .await;
1873}
1874
1875#[gpui::test(iterations = 3)]
1876async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1877 init_test(cx);
1878
1879 let text = "
1880 fn a() { A }
1881 fn b() { BB }
1882 fn c() { CCC }
1883 "
1884 .unindent();
1885
1886 let fs = FakeFs::new(cx.executor());
1887 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1888
1889 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1890 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1891
1892 language_registry.add(rust_lang());
1893 let mut fake_servers = language_registry.register_fake_lsp(
1894 "Rust",
1895 FakeLspAdapter {
1896 disk_based_diagnostics_sources: vec!["disk".into()],
1897 ..Default::default()
1898 },
1899 );
1900
1901 let buffer = project
1902 .update(cx, |project, cx| {
1903 project.open_local_buffer(path!("/dir/a.rs"), cx)
1904 })
1905 .await
1906 .unwrap();
1907
1908 let _handle = project.update(cx, |project, cx| {
1909 project.register_buffer_with_language_servers(&buffer, cx)
1910 });
1911
1912 let mut fake_server = fake_servers.next().await.unwrap();
1913 let open_notification = fake_server
1914 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1915 .await;
1916
1917 // Edit the buffer, moving the content down
1918 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1919 let change_notification_1 = fake_server
1920 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1921 .await;
1922 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1923
1924 // Report some diagnostics for the initial version of the buffer
1925 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1926 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1927 version: Some(open_notification.text_document.version),
1928 diagnostics: vec![
1929 lsp::Diagnostic {
1930 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1931 severity: Some(DiagnosticSeverity::ERROR),
1932 message: "undefined variable 'A'".to_string(),
1933 source: Some("disk".to_string()),
1934 ..Default::default()
1935 },
1936 lsp::Diagnostic {
1937 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1938 severity: Some(DiagnosticSeverity::ERROR),
1939 message: "undefined variable 'BB'".to_string(),
1940 source: Some("disk".to_string()),
1941 ..Default::default()
1942 },
1943 lsp::Diagnostic {
1944 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1945 severity: Some(DiagnosticSeverity::ERROR),
1946 source: Some("disk".to_string()),
1947 message: "undefined variable 'CCC'".to_string(),
1948 ..Default::default()
1949 },
1950 ],
1951 });
1952
1953 // The diagnostics have moved down since they were created.
1954 cx.executor().run_until_parked();
1955 buffer.update(cx, |buffer, _| {
1956 assert_eq!(
1957 buffer
1958 .snapshot()
1959 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1960 .collect::<Vec<_>>(),
1961 &[
1962 DiagnosticEntry {
1963 range: Point::new(3, 9)..Point::new(3, 11),
1964 diagnostic: Diagnostic {
1965 source: Some("disk".into()),
1966 severity: DiagnosticSeverity::ERROR,
1967 message: "undefined variable 'BB'".to_string(),
1968 is_disk_based: true,
1969 group_id: 1,
1970 is_primary: true,
1971 ..Default::default()
1972 },
1973 },
1974 DiagnosticEntry {
1975 range: Point::new(4, 9)..Point::new(4, 12),
1976 diagnostic: Diagnostic {
1977 source: Some("disk".into()),
1978 severity: DiagnosticSeverity::ERROR,
1979 message: "undefined variable 'CCC'".to_string(),
1980 is_disk_based: true,
1981 group_id: 2,
1982 is_primary: true,
1983 ..Default::default()
1984 }
1985 }
1986 ]
1987 );
1988 assert_eq!(
1989 chunks_with_diagnostics(buffer, 0..buffer.len()),
1990 [
1991 ("\n\nfn a() { ".to_string(), None),
1992 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1993 (" }\nfn b() { ".to_string(), None),
1994 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 (" }\nfn c() { ".to_string(), None),
1996 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 (" }\n".to_string(), None),
1998 ]
1999 );
2000 assert_eq!(
2001 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2002 [
2003 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2004 (" }\nfn c() { ".to_string(), None),
2005 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2006 ]
2007 );
2008 });
2009
2010 // Ensure overlapping diagnostics are highlighted correctly.
2011 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2012 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2013 version: Some(open_notification.text_document.version),
2014 diagnostics: vec![
2015 lsp::Diagnostic {
2016 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2017 severity: Some(DiagnosticSeverity::ERROR),
2018 message: "undefined variable 'A'".to_string(),
2019 source: Some("disk".to_string()),
2020 ..Default::default()
2021 },
2022 lsp::Diagnostic {
2023 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2024 severity: Some(DiagnosticSeverity::WARNING),
2025 message: "unreachable statement".to_string(),
2026 source: Some("disk".to_string()),
2027 ..Default::default()
2028 },
2029 ],
2030 });
2031
2032 cx.executor().run_until_parked();
2033 buffer.update(cx, |buffer, _| {
2034 assert_eq!(
2035 buffer
2036 .snapshot()
2037 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2038 .collect::<Vec<_>>(),
2039 &[
2040 DiagnosticEntry {
2041 range: Point::new(2, 9)..Point::new(2, 12),
2042 diagnostic: Diagnostic {
2043 source: Some("disk".into()),
2044 severity: DiagnosticSeverity::WARNING,
2045 message: "unreachable statement".to_string(),
2046 is_disk_based: true,
2047 group_id: 4,
2048 is_primary: true,
2049 ..Default::default()
2050 }
2051 },
2052 DiagnosticEntry {
2053 range: Point::new(2, 9)..Point::new(2, 10),
2054 diagnostic: Diagnostic {
2055 source: Some("disk".into()),
2056 severity: DiagnosticSeverity::ERROR,
2057 message: "undefined variable 'A'".to_string(),
2058 is_disk_based: true,
2059 group_id: 3,
2060 is_primary: true,
2061 ..Default::default()
2062 },
2063 }
2064 ]
2065 );
2066 assert_eq!(
2067 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2068 [
2069 ("fn a() { ".to_string(), None),
2070 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2071 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2072 ("\n".to_string(), None),
2073 ]
2074 );
2075 assert_eq!(
2076 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2077 [
2078 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2079 ("\n".to_string(), None),
2080 ]
2081 );
2082 });
2083
2084 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2085 // changes since the last save.
2086 buffer.update(cx, |buffer, cx| {
2087 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2088 buffer.edit(
2089 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2090 None,
2091 cx,
2092 );
2093 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2094 });
2095 let change_notification_2 = fake_server
2096 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2097 .await;
2098 assert!(
2099 change_notification_2.text_document.version > change_notification_1.text_document.version
2100 );
2101
2102 // Handle out-of-order diagnostics
2103 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2104 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2105 version: Some(change_notification_2.text_document.version),
2106 diagnostics: vec![
2107 lsp::Diagnostic {
2108 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2109 severity: Some(DiagnosticSeverity::ERROR),
2110 message: "undefined variable 'BB'".to_string(),
2111 source: Some("disk".to_string()),
2112 ..Default::default()
2113 },
2114 lsp::Diagnostic {
2115 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2116 severity: Some(DiagnosticSeverity::WARNING),
2117 message: "undefined variable 'A'".to_string(),
2118 source: Some("disk".to_string()),
2119 ..Default::default()
2120 },
2121 ],
2122 });
2123
2124 cx.executor().run_until_parked();
2125 buffer.update(cx, |buffer, _| {
2126 assert_eq!(
2127 buffer
2128 .snapshot()
2129 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2130 .collect::<Vec<_>>(),
2131 &[
2132 DiagnosticEntry {
2133 range: Point::new(2, 21)..Point::new(2, 22),
2134 diagnostic: Diagnostic {
2135 source: Some("disk".into()),
2136 severity: DiagnosticSeverity::WARNING,
2137 message: "undefined variable 'A'".to_string(),
2138 is_disk_based: true,
2139 group_id: 6,
2140 is_primary: true,
2141 ..Default::default()
2142 }
2143 },
2144 DiagnosticEntry {
2145 range: Point::new(3, 9)..Point::new(3, 14),
2146 diagnostic: Diagnostic {
2147 source: Some("disk".into()),
2148 severity: DiagnosticSeverity::ERROR,
2149 message: "undefined variable 'BB'".to_string(),
2150 is_disk_based: true,
2151 group_id: 5,
2152 is_primary: true,
2153 ..Default::default()
2154 },
2155 }
2156 ]
2157 );
2158 });
2159}
2160
2161#[gpui::test]
2162async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2163 init_test(cx);
2164
2165 let text = concat!(
2166 "let one = ;\n", //
2167 "let two = \n",
2168 "let three = 3;\n",
2169 );
2170
2171 let fs = FakeFs::new(cx.executor());
2172 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2173
2174 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2175 let buffer = project
2176 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2177 .await
2178 .unwrap();
2179
2180 project.update(cx, |project, cx| {
2181 project.lsp_store.update(cx, |lsp_store, cx| {
2182 lsp_store
2183 .update_diagnostic_entries(
2184 LanguageServerId(0),
2185 PathBuf::from("/dir/a.rs"),
2186 None,
2187 vec![
2188 DiagnosticEntry {
2189 range: Unclipped(PointUtf16::new(0, 10))
2190 ..Unclipped(PointUtf16::new(0, 10)),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "syntax error 1".to_string(),
2194 ..Default::default()
2195 },
2196 },
2197 DiagnosticEntry {
2198 range: Unclipped(PointUtf16::new(1, 10))
2199 ..Unclipped(PointUtf16::new(1, 10)),
2200 diagnostic: Diagnostic {
2201 severity: DiagnosticSeverity::ERROR,
2202 message: "syntax error 2".to_string(),
2203 ..Default::default()
2204 },
2205 },
2206 ],
2207 cx,
2208 )
2209 .unwrap();
2210 })
2211 });
2212
2213 // An empty range is extended forward to include the following character.
2214 // At the end of a line, an empty range is extended backward to include
2215 // the preceding character.
2216 buffer.update(cx, |buffer, _| {
2217 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2218 assert_eq!(
2219 chunks
2220 .iter()
2221 .map(|(s, d)| (s.as_str(), *d))
2222 .collect::<Vec<_>>(),
2223 &[
2224 ("let one = ", None),
2225 (";", Some(DiagnosticSeverity::ERROR)),
2226 ("\nlet two =", None),
2227 (" ", Some(DiagnosticSeverity::ERROR)),
2228 ("\nlet three = 3;\n", None)
2229 ]
2230 );
2231 });
2232}
2233
2234#[gpui::test]
2235async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2236 init_test(cx);
2237
2238 let fs = FakeFs::new(cx.executor());
2239 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2240 .await;
2241
2242 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2243 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2244
2245 lsp_store.update(cx, |lsp_store, cx| {
2246 lsp_store
2247 .update_diagnostic_entries(
2248 LanguageServerId(0),
2249 Path::new("/dir/a.rs").to_owned(),
2250 None,
2251 vec![DiagnosticEntry {
2252 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2253 diagnostic: Diagnostic {
2254 severity: DiagnosticSeverity::ERROR,
2255 is_primary: true,
2256 message: "syntax error a1".to_string(),
2257 ..Default::default()
2258 },
2259 }],
2260 cx,
2261 )
2262 .unwrap();
2263 lsp_store
2264 .update_diagnostic_entries(
2265 LanguageServerId(1),
2266 Path::new("/dir/a.rs").to_owned(),
2267 None,
2268 vec![DiagnosticEntry {
2269 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2270 diagnostic: Diagnostic {
2271 severity: DiagnosticSeverity::ERROR,
2272 is_primary: true,
2273 message: "syntax error b1".to_string(),
2274 ..Default::default()
2275 },
2276 }],
2277 cx,
2278 )
2279 .unwrap();
2280
2281 assert_eq!(
2282 lsp_store.diagnostic_summary(false, cx),
2283 DiagnosticSummary {
2284 error_count: 2,
2285 warning_count: 0,
2286 }
2287 );
2288 });
2289}
2290
2291#[gpui::test]
2292async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2293 init_test(cx);
2294
2295 let text = "
2296 fn a() {
2297 f1();
2298 }
2299 fn b() {
2300 f2();
2301 }
2302 fn c() {
2303 f3();
2304 }
2305 "
2306 .unindent();
2307
2308 let fs = FakeFs::new(cx.executor());
2309 fs.insert_tree(
2310 path!("/dir"),
2311 json!({
2312 "a.rs": text.clone(),
2313 }),
2314 )
2315 .await;
2316
2317 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2318 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2319
2320 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2321 language_registry.add(rust_lang());
2322 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2323
2324 let (buffer, _handle) = project
2325 .update(cx, |project, cx| {
2326 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2327 })
2328 .await
2329 .unwrap();
2330
2331 let mut fake_server = fake_servers.next().await.unwrap();
2332 let lsp_document_version = fake_server
2333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2334 .await
2335 .text_document
2336 .version;
2337
2338 // Simulate editing the buffer after the language server computes some edits.
2339 buffer.update(cx, |buffer, cx| {
2340 buffer.edit(
2341 [(
2342 Point::new(0, 0)..Point::new(0, 0),
2343 "// above first function\n",
2344 )],
2345 None,
2346 cx,
2347 );
2348 buffer.edit(
2349 [(
2350 Point::new(2, 0)..Point::new(2, 0),
2351 " // inside first function\n",
2352 )],
2353 None,
2354 cx,
2355 );
2356 buffer.edit(
2357 [(
2358 Point::new(6, 4)..Point::new(6, 4),
2359 "// inside second function ",
2360 )],
2361 None,
2362 cx,
2363 );
2364
2365 assert_eq!(
2366 buffer.text(),
2367 "
2368 // above first function
2369 fn a() {
2370 // inside first function
2371 f1();
2372 }
2373 fn b() {
2374 // inside second function f2();
2375 }
2376 fn c() {
2377 f3();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383
2384 let edits = lsp_store
2385 .update(cx, |lsp_store, cx| {
2386 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2387 &buffer,
2388 vec![
2389 // replace body of first function
2390 lsp::TextEdit {
2391 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2392 new_text: "
2393 fn a() {
2394 f10();
2395 }
2396 "
2397 .unindent(),
2398 },
2399 // edit inside second function
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2402 new_text: "00".into(),
2403 },
2404 // edit inside third function via two distinct edits
2405 lsp::TextEdit {
2406 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2407 new_text: "4000".into(),
2408 },
2409 lsp::TextEdit {
2410 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2411 new_text: "".into(),
2412 },
2413 ],
2414 LanguageServerId(0),
2415 Some(lsp_document_version),
2416 cx,
2417 )
2418 })
2419 .await
2420 .unwrap();
2421
2422 buffer.update(cx, |buffer, cx| {
2423 for (range, new_text) in edits {
2424 buffer.edit([(range, new_text)], None, cx);
2425 }
2426 assert_eq!(
2427 buffer.text(),
2428 "
2429 // above first function
2430 fn a() {
2431 // inside first function
2432 f10();
2433 }
2434 fn b() {
2435 // inside second function f200();
2436 }
2437 fn c() {
2438 f4000();
2439 }
2440 "
2441 .unindent()
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let text = "
2451 use a::b;
2452 use a::c;
2453
2454 fn f() {
2455 b();
2456 c();
2457 }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(
2463 path!("/dir"),
2464 json!({
2465 "a.rs": text.clone(),
2466 }),
2467 )
2468 .await;
2469
2470 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2471 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2472 let buffer = project
2473 .update(cx, |project, cx| {
2474 project.open_local_buffer(path!("/dir/a.rs"), cx)
2475 })
2476 .await
2477 .unwrap();
2478
2479 // Simulate the language server sending us a small edit in the form of a very large diff.
2480 // Rust-analyzer does this when performing a merge-imports code action.
2481 let edits = lsp_store
2482 .update(cx, |lsp_store, cx| {
2483 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2484 &buffer,
2485 [
2486 // Replace the first use statement without editing the semicolon.
2487 lsp::TextEdit {
2488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2489 new_text: "a::{b, c}".into(),
2490 },
2491 // Reinsert the remainder of the file between the semicolon and the final
2492 // newline of the file.
2493 lsp::TextEdit {
2494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2495 new_text: "\n\n".into(),
2496 },
2497 lsp::TextEdit {
2498 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2499 new_text: "
2500 fn f() {
2501 b();
2502 c();
2503 }"
2504 .unindent(),
2505 },
2506 // Delete everything after the first newline of the file.
2507 lsp::TextEdit {
2508 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2509 new_text: "".into(),
2510 },
2511 ],
2512 LanguageServerId(0),
2513 None,
2514 cx,
2515 )
2516 })
2517 .await
2518 .unwrap();
2519
2520 buffer.update(cx, |buffer, cx| {
2521 let edits = edits
2522 .into_iter()
2523 .map(|(range, text)| {
2524 (
2525 range.start.to_point(buffer)..range.end.to_point(buffer),
2526 text,
2527 )
2528 })
2529 .collect::<Vec<_>>();
2530
2531 assert_eq!(
2532 edits,
2533 [
2534 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2535 (Point::new(1, 0)..Point::new(2, 0), "".into())
2536 ]
2537 );
2538
2539 for (range, new_text) in edits {
2540 buffer.edit([(range, new_text)], None, cx);
2541 }
2542 assert_eq!(
2543 buffer.text(),
2544 "
2545 use a::{b, c};
2546
2547 fn f() {
2548 b();
2549 c();
2550 }
2551 "
2552 .unindent()
2553 );
2554 });
2555}
2556
2557#[gpui::test]
2558async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2559 init_test(cx);
2560
2561 let text = "
2562 use a::b;
2563 use a::c;
2564
2565 fn f() {
2566 b();
2567 c();
2568 }
2569 "
2570 .unindent();
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 path!("/dir"),
2575 json!({
2576 "a.rs": text.clone(),
2577 }),
2578 )
2579 .await;
2580
2581 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2582 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2583 let buffer = project
2584 .update(cx, |project, cx| {
2585 project.open_local_buffer(path!("/dir/a.rs"), cx)
2586 })
2587 .await
2588 .unwrap();
2589
2590 // Simulate the language server sending us edits in a non-ordered fashion,
2591 // with ranges sometimes being inverted or pointing to invalid locations.
2592 let edits = lsp_store
2593 .update(cx, |lsp_store, cx| {
2594 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2595 &buffer,
2596 [
2597 lsp::TextEdit {
2598 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2599 new_text: "\n\n".into(),
2600 },
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2603 new_text: "a::{b, c}".into(),
2604 },
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2607 new_text: "".into(),
2608 },
2609 lsp::TextEdit {
2610 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2611 new_text: "
2612 fn f() {
2613 b();
2614 c();
2615 }"
2616 .unindent(),
2617 },
2618 ],
2619 LanguageServerId(0),
2620 None,
2621 cx,
2622 )
2623 })
2624 .await
2625 .unwrap();
2626
2627 buffer.update(cx, |buffer, cx| {
2628 let edits = edits
2629 .into_iter()
2630 .map(|(range, text)| {
2631 (
2632 range.start.to_point(buffer)..range.end.to_point(buffer),
2633 text,
2634 )
2635 })
2636 .collect::<Vec<_>>();
2637
2638 assert_eq!(
2639 edits,
2640 [
2641 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2642 (Point::new(1, 0)..Point::new(2, 0), "".into())
2643 ]
2644 );
2645
2646 for (range, new_text) in edits {
2647 buffer.edit([(range, new_text)], None, cx);
2648 }
2649 assert_eq!(
2650 buffer.text(),
2651 "
2652 use a::{b, c};
2653
2654 fn f() {
2655 b();
2656 c();
2657 }
2658 "
2659 .unindent()
2660 );
2661 });
2662}
2663
2664fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2665 buffer: &Buffer,
2666 range: Range<T>,
2667) -> Vec<(String, Option<DiagnosticSeverity>)> {
2668 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2669 for chunk in buffer.snapshot().chunks(range, true) {
2670 if chunks.last().map_or(false, |prev_chunk| {
2671 prev_chunk.1 == chunk.diagnostic_severity
2672 }) {
2673 chunks.last_mut().unwrap().0.push_str(chunk.text);
2674 } else {
2675 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2676 }
2677 }
2678 chunks
2679}
2680
2681#[gpui::test(iterations = 10)]
2682async fn test_definition(cx: &mut gpui::TestAppContext) {
2683 init_test(cx);
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": "const fn a() { A }",
2690 "b.rs": "const y: i32 = crate::a()",
2691 }),
2692 )
2693 .await;
2694
2695 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2696
2697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2698 language_registry.add(rust_lang());
2699 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2700
2701 let (buffer, _handle) = project
2702 .update(cx, |project, cx| {
2703 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2704 })
2705 .await
2706 .unwrap();
2707
2708 let fake_server = fake_servers.next().await.unwrap();
2709 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2710 let params = params.text_document_position_params;
2711 assert_eq!(
2712 params.text_document.uri.to_file_path().unwrap(),
2713 Path::new(path!("/dir/b.rs")),
2714 );
2715 assert_eq!(params.position, lsp::Position::new(0, 22));
2716
2717 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2718 lsp::Location::new(
2719 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2720 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2721 ),
2722 )))
2723 });
2724 let mut definitions = project
2725 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2726 .await
2727 .unwrap();
2728
2729 // Assert no new language server started
2730 cx.executor().run_until_parked();
2731 assert!(fake_servers.try_next().is_err());
2732
2733 assert_eq!(definitions.len(), 1);
2734 let definition = definitions.pop().unwrap();
2735 cx.update(|cx| {
2736 let target_buffer = definition.target.buffer.read(cx);
2737 assert_eq!(
2738 target_buffer
2739 .file()
2740 .unwrap()
2741 .as_local()
2742 .unwrap()
2743 .abs_path(cx),
2744 Path::new(path!("/dir/a.rs")),
2745 );
2746 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2747 assert_eq!(
2748 list_worktrees(&project, cx),
2749 [
2750 (path!("/dir/a.rs").as_ref(), false),
2751 (path!("/dir/b.rs").as_ref(), true)
2752 ],
2753 );
2754
2755 drop(definition);
2756 });
2757 cx.update(|cx| {
2758 assert_eq!(
2759 list_worktrees(&project, cx),
2760 [(path!("/dir/b.rs").as_ref(), true)]
2761 );
2762 });
2763
2764 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2765 project
2766 .read(cx)
2767 .worktrees(cx)
2768 .map(|worktree| {
2769 let worktree = worktree.read(cx);
2770 (
2771 worktree.as_local().unwrap().abs_path().as_ref(),
2772 worktree.is_visible(),
2773 )
2774 })
2775 .collect::<Vec<_>>()
2776 }
2777}
2778
2779#[gpui::test]
2780async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree(
2785 path!("/dir"),
2786 json!({
2787 "a.ts": "",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2793
2794 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2795 language_registry.add(typescript_lang());
2796 let mut fake_language_servers = language_registry.register_fake_lsp(
2797 "TypeScript",
2798 FakeLspAdapter {
2799 capabilities: lsp::ServerCapabilities {
2800 completion_provider: Some(lsp::CompletionOptions {
2801 trigger_characters: Some(vec![".".to_string()]),
2802 ..Default::default()
2803 }),
2804 ..Default::default()
2805 },
2806 ..Default::default()
2807 },
2808 );
2809
2810 let (buffer, _handle) = project
2811 .update(cx, |p, cx| {
2812 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2813 })
2814 .await
2815 .unwrap();
2816
2817 let fake_server = fake_language_servers.next().await.unwrap();
2818
2819 // When text_edit exists, it takes precedence over insert_text and label
2820 let text = "let a = obj.fqn";
2821 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2822 let completions = project.update(cx, |project, cx| {
2823 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2824 });
2825
2826 fake_server
2827 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2828 Ok(Some(lsp::CompletionResponse::Array(vec![
2829 lsp::CompletionItem {
2830 label: "labelText".into(),
2831 insert_text: Some("insertText".into()),
2832 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2833 range: lsp::Range::new(
2834 lsp::Position::new(0, text.len() as u32 - 3),
2835 lsp::Position::new(0, text.len() as u32),
2836 ),
2837 new_text: "textEditText".into(),
2838 })),
2839 ..Default::default()
2840 },
2841 ])))
2842 })
2843 .next()
2844 .await;
2845
2846 let completions = completions.await.unwrap().unwrap();
2847 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2848
2849 assert_eq!(completions.len(), 1);
2850 assert_eq!(completions[0].new_text, "textEditText");
2851 assert_eq!(
2852 completions[0].old_range.to_offset(&snapshot),
2853 text.len() - 3..text.len()
2854 );
2855}
2856
2857#[gpui::test]
2858async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
2859 init_test(cx);
2860
2861 let fs = FakeFs::new(cx.executor());
2862 fs.insert_tree(
2863 path!("/dir"),
2864 json!({
2865 "a.ts": "",
2866 }),
2867 )
2868 .await;
2869
2870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2871
2872 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2873 language_registry.add(typescript_lang());
2874 let mut fake_language_servers = language_registry.register_fake_lsp(
2875 "TypeScript",
2876 FakeLspAdapter {
2877 capabilities: lsp::ServerCapabilities {
2878 completion_provider: Some(lsp::CompletionOptions {
2879 trigger_characters: Some(vec![".".to_string()]),
2880 ..Default::default()
2881 }),
2882 ..Default::default()
2883 },
2884 ..Default::default()
2885 },
2886 );
2887
2888 let (buffer, _handle) = project
2889 .update(cx, |p, cx| {
2890 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2891 })
2892 .await
2893 .unwrap();
2894
2895 let fake_server = fake_language_servers.next().await.unwrap();
2896 let text = "let a = obj.fqn";
2897
2898 // Test 1: When text_edit is None but insert_text exists with default edit_range
2899 {
2900 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2901 let completions = project.update(cx, |project, cx| {
2902 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2903 });
2904
2905 fake_server
2906 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2907 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2908 is_incomplete: false,
2909 item_defaults: Some(lsp::CompletionListItemDefaults {
2910 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2911 lsp::Range::new(
2912 lsp::Position::new(0, text.len() as u32 - 3),
2913 lsp::Position::new(0, text.len() as u32),
2914 ),
2915 )),
2916 ..Default::default()
2917 }),
2918 items: vec![lsp::CompletionItem {
2919 label: "labelText".into(),
2920 insert_text: Some("insertText".into()),
2921 text_edit: None,
2922 ..Default::default()
2923 }],
2924 })))
2925 })
2926 .next()
2927 .await;
2928
2929 let completions = completions.await.unwrap().unwrap();
2930 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2931
2932 assert_eq!(completions.len(), 1);
2933 assert_eq!(completions[0].new_text, "insertText");
2934 assert_eq!(
2935 completions[0].old_range.to_offset(&snapshot),
2936 text.len() - 3..text.len()
2937 );
2938 }
2939
2940 // Test 2: When both text_edit and insert_text are None with default edit_range
2941 {
2942 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2943 let completions = project.update(cx, |project, cx| {
2944 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2945 });
2946
2947 fake_server
2948 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2949 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2950 is_incomplete: false,
2951 item_defaults: Some(lsp::CompletionListItemDefaults {
2952 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2953 lsp::Range::new(
2954 lsp::Position::new(0, text.len() as u32 - 3),
2955 lsp::Position::new(0, text.len() as u32),
2956 ),
2957 )),
2958 ..Default::default()
2959 }),
2960 items: vec![lsp::CompletionItem {
2961 label: "labelText".into(),
2962 insert_text: None,
2963 text_edit: None,
2964 ..Default::default()
2965 }],
2966 })))
2967 })
2968 .next()
2969 .await;
2970
2971 let completions = completions.await.unwrap().unwrap();
2972 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2973
2974 assert_eq!(completions.len(), 1);
2975 assert_eq!(completions[0].new_text, "labelText");
2976 assert_eq!(
2977 completions[0].old_range.to_offset(&snapshot),
2978 text.len() - 3..text.len()
2979 );
2980 }
2981}
2982
2983#[gpui::test]
2984async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2985 init_test(cx);
2986
2987 let fs = FakeFs::new(cx.executor());
2988 fs.insert_tree(
2989 path!("/dir"),
2990 json!({
2991 "a.ts": "",
2992 }),
2993 )
2994 .await;
2995
2996 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2997
2998 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2999 language_registry.add(typescript_lang());
3000 let mut fake_language_servers = language_registry.register_fake_lsp(
3001 "TypeScript",
3002 FakeLspAdapter {
3003 capabilities: lsp::ServerCapabilities {
3004 completion_provider: Some(lsp::CompletionOptions {
3005 trigger_characters: Some(vec![":".to_string()]),
3006 ..Default::default()
3007 }),
3008 ..Default::default()
3009 },
3010 ..Default::default()
3011 },
3012 );
3013
3014 let (buffer, _handle) = project
3015 .update(cx, |p, cx| {
3016 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3017 })
3018 .await
3019 .unwrap();
3020
3021 let fake_server = fake_language_servers.next().await.unwrap();
3022
3023 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3024 let text = "let a = b.fqn";
3025 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3026 let completions = project.update(cx, |project, cx| {
3027 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3028 });
3029
3030 fake_server
3031 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3032 Ok(Some(lsp::CompletionResponse::Array(vec![
3033 lsp::CompletionItem {
3034 label: "fullyQualifiedName?".into(),
3035 insert_text: Some("fullyQualifiedName".into()),
3036 ..Default::default()
3037 },
3038 ])))
3039 })
3040 .next()
3041 .await;
3042 let completions = completions.await.unwrap().unwrap();
3043 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3044 assert_eq!(completions.len(), 1);
3045 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3046 assert_eq!(
3047 completions[0].old_range.to_offset(&snapshot),
3048 text.len() - 3..text.len()
3049 );
3050
3051 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3052 let text = "let a = \"atoms/cmp\"";
3053 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3054 let completions = project.update(cx, |project, cx| {
3055 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3056 });
3057
3058 fake_server
3059 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3060 Ok(Some(lsp::CompletionResponse::Array(vec![
3061 lsp::CompletionItem {
3062 label: "component".into(),
3063 ..Default::default()
3064 },
3065 ])))
3066 })
3067 .next()
3068 .await;
3069 let completions = completions.await.unwrap().unwrap();
3070 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3071 assert_eq!(completions.len(), 1);
3072 assert_eq!(completions[0].new_text, "component");
3073 assert_eq!(
3074 completions[0].old_range.to_offset(&snapshot),
3075 text.len() - 4..text.len() - 1
3076 );
3077}
3078
3079#[gpui::test]
3080async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3081 init_test(cx);
3082
3083 let fs = FakeFs::new(cx.executor());
3084 fs.insert_tree(
3085 path!("/dir"),
3086 json!({
3087 "a.ts": "",
3088 }),
3089 )
3090 .await;
3091
3092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3093
3094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3095 language_registry.add(typescript_lang());
3096 let mut fake_language_servers = language_registry.register_fake_lsp(
3097 "TypeScript",
3098 FakeLspAdapter {
3099 capabilities: lsp::ServerCapabilities {
3100 completion_provider: Some(lsp::CompletionOptions {
3101 trigger_characters: Some(vec![":".to_string()]),
3102 ..Default::default()
3103 }),
3104 ..Default::default()
3105 },
3106 ..Default::default()
3107 },
3108 );
3109
3110 let (buffer, _handle) = project
3111 .update(cx, |p, cx| {
3112 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3113 })
3114 .await
3115 .unwrap();
3116
3117 let fake_server = fake_language_servers.next().await.unwrap();
3118
3119 let text = "let a = b.fqn";
3120 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3121 let completions = project.update(cx, |project, cx| {
3122 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3123 });
3124
3125 fake_server
3126 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3127 Ok(Some(lsp::CompletionResponse::Array(vec![
3128 lsp::CompletionItem {
3129 label: "fullyQualifiedName?".into(),
3130 insert_text: Some("fully\rQualified\r\nName".into()),
3131 ..Default::default()
3132 },
3133 ])))
3134 })
3135 .next()
3136 .await;
3137 let completions = completions.await.unwrap().unwrap();
3138 assert_eq!(completions.len(), 1);
3139 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3140}
3141
3142#[gpui::test(iterations = 10)]
3143async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(
3148 path!("/dir"),
3149 json!({
3150 "a.ts": "a",
3151 }),
3152 )
3153 .await;
3154
3155 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3156
3157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3158 language_registry.add(typescript_lang());
3159 let mut fake_language_servers = language_registry.register_fake_lsp(
3160 "TypeScript",
3161 FakeLspAdapter {
3162 capabilities: lsp::ServerCapabilities {
3163 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3164 lsp::CodeActionOptions {
3165 resolve_provider: Some(true),
3166 ..lsp::CodeActionOptions::default()
3167 },
3168 )),
3169 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3170 commands: vec!["_the/command".to_string()],
3171 ..lsp::ExecuteCommandOptions::default()
3172 }),
3173 ..lsp::ServerCapabilities::default()
3174 },
3175 ..FakeLspAdapter::default()
3176 },
3177 );
3178
3179 let (buffer, _handle) = project
3180 .update(cx, |p, cx| {
3181 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3182 })
3183 .await
3184 .unwrap();
3185
3186 let fake_server = fake_language_servers.next().await.unwrap();
3187
3188 // Language server returns code actions that contain commands, and not edits.
3189 let actions = project.update(cx, |project, cx| {
3190 project.code_actions(&buffer, 0..0, None, cx)
3191 });
3192 fake_server
3193 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3194 Ok(Some(vec![
3195 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3196 title: "The code action".into(),
3197 data: Some(serde_json::json!({
3198 "command": "_the/command",
3199 })),
3200 ..lsp::CodeAction::default()
3201 }),
3202 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3203 title: "two".into(),
3204 ..lsp::CodeAction::default()
3205 }),
3206 ]))
3207 })
3208 .next()
3209 .await;
3210
3211 let action = actions.await.unwrap()[0].clone();
3212 let apply = project.update(cx, |project, cx| {
3213 project.apply_code_action(buffer.clone(), action, true, cx)
3214 });
3215
3216 // Resolving the code action does not populate its edits. In absence of
3217 // edits, we must execute the given command.
3218 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3219 |mut action, _| async move {
3220 if action.data.is_some() {
3221 action.command = Some(lsp::Command {
3222 title: "The command".into(),
3223 command: "_the/command".into(),
3224 arguments: Some(vec![json!("the-argument")]),
3225 });
3226 }
3227 Ok(action)
3228 },
3229 );
3230
3231 // While executing the command, the language server sends the editor
3232 // a `workspaceEdit` request.
3233 fake_server
3234 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3235 let fake = fake_server.clone();
3236 move |params, _| {
3237 assert_eq!(params.command, "_the/command");
3238 let fake = fake.clone();
3239 async move {
3240 fake.server
3241 .request::<lsp::request::ApplyWorkspaceEdit>(
3242 lsp::ApplyWorkspaceEditParams {
3243 label: None,
3244 edit: lsp::WorkspaceEdit {
3245 changes: Some(
3246 [(
3247 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3248 vec![lsp::TextEdit {
3249 range: lsp::Range::new(
3250 lsp::Position::new(0, 0),
3251 lsp::Position::new(0, 0),
3252 ),
3253 new_text: "X".into(),
3254 }],
3255 )]
3256 .into_iter()
3257 .collect(),
3258 ),
3259 ..Default::default()
3260 },
3261 },
3262 )
3263 .await
3264 .unwrap();
3265 Ok(Some(json!(null)))
3266 }
3267 }
3268 })
3269 .next()
3270 .await;
3271
3272 // Applying the code action returns a project transaction containing the edits
3273 // sent by the language server in its `workspaceEdit` request.
3274 let transaction = apply.await.unwrap();
3275 assert!(transaction.0.contains_key(&buffer));
3276 buffer.update(cx, |buffer, cx| {
3277 assert_eq!(buffer.text(), "Xa");
3278 buffer.undo(cx);
3279 assert_eq!(buffer.text(), "a");
3280 });
3281}
3282
3283#[gpui::test(iterations = 10)]
3284async fn test_save_file(cx: &mut gpui::TestAppContext) {
3285 init_test(cx);
3286
3287 let fs = FakeFs::new(cx.executor());
3288 fs.insert_tree(
3289 path!("/dir"),
3290 json!({
3291 "file1": "the old contents",
3292 }),
3293 )
3294 .await;
3295
3296 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3297 let buffer = project
3298 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3299 .await
3300 .unwrap();
3301 buffer.update(cx, |buffer, cx| {
3302 assert_eq!(buffer.text(), "the old contents");
3303 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3304 });
3305
3306 project
3307 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3308 .await
3309 .unwrap();
3310
3311 let new_text = fs
3312 .load(Path::new(path!("/dir/file1")))
3313 .await
3314 .unwrap()
3315 .replace("\r\n", "\n");
3316 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3317}
3318
3319#[gpui::test(iterations = 30)]
3320async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3321 init_test(cx);
3322
3323 let fs = FakeFs::new(cx.executor().clone());
3324 fs.insert_tree(
3325 path!("/dir"),
3326 json!({
3327 "file1": "the original contents",
3328 }),
3329 )
3330 .await;
3331
3332 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3333 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3334 let buffer = project
3335 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3336 .await
3337 .unwrap();
3338
3339 // Simulate buffer diffs being slow, so that they don't complete before
3340 // the next file change occurs.
3341 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3342
3343 // Change the buffer's file on disk, and then wait for the file change
3344 // to be detected by the worktree, so that the buffer starts reloading.
3345 fs.save(
3346 path!("/dir/file1").as_ref(),
3347 &"the first contents".into(),
3348 Default::default(),
3349 )
3350 .await
3351 .unwrap();
3352 worktree.next_event(cx).await;
3353
3354 // Change the buffer's file again. Depending on the random seed, the
3355 // previous file change may still be in progress.
3356 fs.save(
3357 path!("/dir/file1").as_ref(),
3358 &"the second contents".into(),
3359 Default::default(),
3360 )
3361 .await
3362 .unwrap();
3363 worktree.next_event(cx).await;
3364
3365 cx.executor().run_until_parked();
3366 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3367 buffer.read_with(cx, |buffer, _| {
3368 assert_eq!(buffer.text(), on_disk_text);
3369 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3370 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3371 });
3372}
3373
3374#[gpui::test(iterations = 30)]
3375async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3376 init_test(cx);
3377
3378 let fs = FakeFs::new(cx.executor().clone());
3379 fs.insert_tree(
3380 path!("/dir"),
3381 json!({
3382 "file1": "the original contents",
3383 }),
3384 )
3385 .await;
3386
3387 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3388 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3389 let buffer = project
3390 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3391 .await
3392 .unwrap();
3393
3394 // Simulate buffer diffs being slow, so that they don't complete before
3395 // the next file change occurs.
3396 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3397
3398 // Change the buffer's file on disk, and then wait for the file change
3399 // to be detected by the worktree, so that the buffer starts reloading.
3400 fs.save(
3401 path!("/dir/file1").as_ref(),
3402 &"the first contents".into(),
3403 Default::default(),
3404 )
3405 .await
3406 .unwrap();
3407 worktree.next_event(cx).await;
3408
3409 cx.executor()
3410 .spawn(cx.executor().simulate_random_delay())
3411 .await;
3412
3413 // Perform a noop edit, causing the buffer's version to increase.
3414 buffer.update(cx, |buffer, cx| {
3415 buffer.edit([(0..0, " ")], None, cx);
3416 buffer.undo(cx);
3417 });
3418
3419 cx.executor().run_until_parked();
3420 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3421 buffer.read_with(cx, |buffer, _| {
3422 let buffer_text = buffer.text();
3423 if buffer_text == on_disk_text {
3424 assert!(
3425 !buffer.is_dirty() && !buffer.has_conflict(),
3426 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3427 );
3428 }
3429 // If the file change occurred while the buffer was processing the first
3430 // change, the buffer will be in a conflicting state.
3431 else {
3432 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3433 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3434 }
3435 });
3436}
3437
3438#[gpui::test]
3439async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let fs = FakeFs::new(cx.executor());
3443 fs.insert_tree(
3444 path!("/dir"),
3445 json!({
3446 "file1": "the old contents",
3447 }),
3448 )
3449 .await;
3450
3451 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3452 let buffer = project
3453 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3454 .await
3455 .unwrap();
3456 buffer.update(cx, |buffer, cx| {
3457 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3458 });
3459
3460 project
3461 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3462 .await
3463 .unwrap();
3464
3465 let new_text = fs
3466 .load(Path::new(path!("/dir/file1")))
3467 .await
3468 .unwrap()
3469 .replace("\r\n", "\n");
3470 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3471}
3472
3473#[gpui::test]
3474async fn test_save_as(cx: &mut gpui::TestAppContext) {
3475 init_test(cx);
3476
3477 let fs = FakeFs::new(cx.executor());
3478 fs.insert_tree("/dir", json!({})).await;
3479
3480 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3481
3482 let languages = project.update(cx, |project, _| project.languages().clone());
3483 languages.add(rust_lang());
3484
3485 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3486 buffer.update(cx, |buffer, cx| {
3487 buffer.edit([(0..0, "abc")], None, cx);
3488 assert!(buffer.is_dirty());
3489 assert!(!buffer.has_conflict());
3490 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3491 });
3492 project
3493 .update(cx, |project, cx| {
3494 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3495 let path = ProjectPath {
3496 worktree_id,
3497 path: Arc::from(Path::new("file1.rs")),
3498 };
3499 project.save_buffer_as(buffer.clone(), path, cx)
3500 })
3501 .await
3502 .unwrap();
3503 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3504
3505 cx.executor().run_until_parked();
3506 buffer.update(cx, |buffer, cx| {
3507 assert_eq!(
3508 buffer.file().unwrap().full_path(cx),
3509 Path::new("dir/file1.rs")
3510 );
3511 assert!(!buffer.is_dirty());
3512 assert!(!buffer.has_conflict());
3513 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3514 });
3515
3516 let opened_buffer = project
3517 .update(cx, |project, cx| {
3518 project.open_local_buffer("/dir/file1.rs", cx)
3519 })
3520 .await
3521 .unwrap();
3522 assert_eq!(opened_buffer, buffer);
3523}
3524
3525#[gpui::test(retries = 5)]
3526async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3527 use worktree::WorktreeModelHandle as _;
3528
3529 init_test(cx);
3530 cx.executor().allow_parking();
3531
3532 let dir = TempTree::new(json!({
3533 "a": {
3534 "file1": "",
3535 "file2": "",
3536 "file3": "",
3537 },
3538 "b": {
3539 "c": {
3540 "file4": "",
3541 "file5": "",
3542 }
3543 }
3544 }));
3545
3546 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3547
3548 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3549 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3550 async move { buffer.await.unwrap() }
3551 };
3552 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3553 project.update(cx, |project, cx| {
3554 let tree = project.worktrees(cx).next().unwrap();
3555 tree.read(cx)
3556 .entry_for_path(path)
3557 .unwrap_or_else(|| panic!("no entry for path {}", path))
3558 .id
3559 })
3560 };
3561
3562 let buffer2 = buffer_for_path("a/file2", cx).await;
3563 let buffer3 = buffer_for_path("a/file3", cx).await;
3564 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3565 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3566
3567 let file2_id = id_for_path("a/file2", cx);
3568 let file3_id = id_for_path("a/file3", cx);
3569 let file4_id = id_for_path("b/c/file4", cx);
3570
3571 // Create a remote copy of this worktree.
3572 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3573 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3574
3575 let updates = Arc::new(Mutex::new(Vec::new()));
3576 tree.update(cx, |tree, cx| {
3577 let updates = updates.clone();
3578 tree.observe_updates(0, cx, move |update| {
3579 updates.lock().push(update);
3580 async { true }
3581 });
3582 });
3583
3584 let remote =
3585 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3586
3587 cx.executor().run_until_parked();
3588
3589 cx.update(|cx| {
3590 assert!(!buffer2.read(cx).is_dirty());
3591 assert!(!buffer3.read(cx).is_dirty());
3592 assert!(!buffer4.read(cx).is_dirty());
3593 assert!(!buffer5.read(cx).is_dirty());
3594 });
3595
3596 // Rename and delete files and directories.
3597 tree.flush_fs_events(cx).await;
3598 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3599 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3600 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3601 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3602 tree.flush_fs_events(cx).await;
3603
3604 cx.update(|app| {
3605 assert_eq!(
3606 tree.read(app)
3607 .paths()
3608 .map(|p| p.to_str().unwrap())
3609 .collect::<Vec<_>>(),
3610 vec![
3611 "a",
3612 separator!("a/file1"),
3613 separator!("a/file2.new"),
3614 "b",
3615 "d",
3616 separator!("d/file3"),
3617 separator!("d/file4"),
3618 ]
3619 );
3620 });
3621
3622 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3623 assert_eq!(id_for_path("d/file3", cx), file3_id);
3624 assert_eq!(id_for_path("d/file4", cx), file4_id);
3625
3626 cx.update(|cx| {
3627 assert_eq!(
3628 buffer2.read(cx).file().unwrap().path().as_ref(),
3629 Path::new("a/file2.new")
3630 );
3631 assert_eq!(
3632 buffer3.read(cx).file().unwrap().path().as_ref(),
3633 Path::new("d/file3")
3634 );
3635 assert_eq!(
3636 buffer4.read(cx).file().unwrap().path().as_ref(),
3637 Path::new("d/file4")
3638 );
3639 assert_eq!(
3640 buffer5.read(cx).file().unwrap().path().as_ref(),
3641 Path::new("b/c/file5")
3642 );
3643
3644 assert_matches!(
3645 buffer2.read(cx).file().unwrap().disk_state(),
3646 DiskState::Present { .. }
3647 );
3648 assert_matches!(
3649 buffer3.read(cx).file().unwrap().disk_state(),
3650 DiskState::Present { .. }
3651 );
3652 assert_matches!(
3653 buffer4.read(cx).file().unwrap().disk_state(),
3654 DiskState::Present { .. }
3655 );
3656 assert_eq!(
3657 buffer5.read(cx).file().unwrap().disk_state(),
3658 DiskState::Deleted
3659 );
3660 });
3661
3662 // Update the remote worktree. Check that it becomes consistent with the
3663 // local worktree.
3664 cx.executor().run_until_parked();
3665
3666 remote.update(cx, |remote, _| {
3667 for update in updates.lock().drain(..) {
3668 remote.as_remote_mut().unwrap().update_from_remote(update);
3669 }
3670 });
3671 cx.executor().run_until_parked();
3672 remote.update(cx, |remote, _| {
3673 assert_eq!(
3674 remote
3675 .paths()
3676 .map(|p| p.to_str().unwrap())
3677 .collect::<Vec<_>>(),
3678 vec![
3679 "a",
3680 separator!("a/file1"),
3681 separator!("a/file2.new"),
3682 "b",
3683 "d",
3684 separator!("d/file3"),
3685 separator!("d/file4"),
3686 ]
3687 );
3688 });
3689}
3690
3691#[gpui::test(iterations = 10)]
3692async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3693 init_test(cx);
3694
3695 let fs = FakeFs::new(cx.executor());
3696 fs.insert_tree(
3697 path!("/dir"),
3698 json!({
3699 "a": {
3700 "file1": "",
3701 }
3702 }),
3703 )
3704 .await;
3705
3706 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3707 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3708 let tree_id = tree.update(cx, |tree, _| tree.id());
3709
3710 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3711 project.update(cx, |project, cx| {
3712 let tree = project.worktrees(cx).next().unwrap();
3713 tree.read(cx)
3714 .entry_for_path(path)
3715 .unwrap_or_else(|| panic!("no entry for path {}", path))
3716 .id
3717 })
3718 };
3719
3720 let dir_id = id_for_path("a", cx);
3721 let file_id = id_for_path("a/file1", cx);
3722 let buffer = project
3723 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3724 .await
3725 .unwrap();
3726 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3727
3728 project
3729 .update(cx, |project, cx| {
3730 project.rename_entry(dir_id, Path::new("b"), cx)
3731 })
3732 .unwrap()
3733 .await
3734 .to_included()
3735 .unwrap();
3736 cx.executor().run_until_parked();
3737
3738 assert_eq!(id_for_path("b", cx), dir_id);
3739 assert_eq!(id_for_path("b/file1", cx), file_id);
3740 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3741}
3742
3743#[gpui::test]
3744async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3745 init_test(cx);
3746
3747 let fs = FakeFs::new(cx.executor());
3748 fs.insert_tree(
3749 "/dir",
3750 json!({
3751 "a.txt": "a-contents",
3752 "b.txt": "b-contents",
3753 }),
3754 )
3755 .await;
3756
3757 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3758
3759 // Spawn multiple tasks to open paths, repeating some paths.
3760 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3761 (
3762 p.open_local_buffer("/dir/a.txt", cx),
3763 p.open_local_buffer("/dir/b.txt", cx),
3764 p.open_local_buffer("/dir/a.txt", cx),
3765 )
3766 });
3767
3768 let buffer_a_1 = buffer_a_1.await.unwrap();
3769 let buffer_a_2 = buffer_a_2.await.unwrap();
3770 let buffer_b = buffer_b.await.unwrap();
3771 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3772 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3773
3774 // There is only one buffer per path.
3775 let buffer_a_id = buffer_a_1.entity_id();
3776 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3777
3778 // Open the same path again while it is still open.
3779 drop(buffer_a_1);
3780 let buffer_a_3 = project
3781 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3782 .await
3783 .unwrap();
3784
3785 // There's still only one buffer per path.
3786 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3787}
3788
3789#[gpui::test]
3790async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3791 init_test(cx);
3792
3793 let fs = FakeFs::new(cx.executor());
3794 fs.insert_tree(
3795 path!("/dir"),
3796 json!({
3797 "file1": "abc",
3798 "file2": "def",
3799 "file3": "ghi",
3800 }),
3801 )
3802 .await;
3803
3804 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3805
3806 let buffer1 = project
3807 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3808 .await
3809 .unwrap();
3810 let events = Arc::new(Mutex::new(Vec::new()));
3811
3812 // initially, the buffer isn't dirty.
3813 buffer1.update(cx, |buffer, cx| {
3814 cx.subscribe(&buffer1, {
3815 let events = events.clone();
3816 move |_, _, event, _| match event {
3817 BufferEvent::Operation { .. } => {}
3818 _ => events.lock().push(event.clone()),
3819 }
3820 })
3821 .detach();
3822
3823 assert!(!buffer.is_dirty());
3824 assert!(events.lock().is_empty());
3825
3826 buffer.edit([(1..2, "")], None, cx);
3827 });
3828
3829 // after the first edit, the buffer is dirty, and emits a dirtied event.
3830 buffer1.update(cx, |buffer, cx| {
3831 assert!(buffer.text() == "ac");
3832 assert!(buffer.is_dirty());
3833 assert_eq!(
3834 *events.lock(),
3835 &[
3836 language::BufferEvent::Edited,
3837 language::BufferEvent::DirtyChanged
3838 ]
3839 );
3840 events.lock().clear();
3841 buffer.did_save(
3842 buffer.version(),
3843 buffer.file().unwrap().disk_state().mtime(),
3844 cx,
3845 );
3846 });
3847
3848 // after saving, the buffer is not dirty, and emits a saved event.
3849 buffer1.update(cx, |buffer, cx| {
3850 assert!(!buffer.is_dirty());
3851 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3852 events.lock().clear();
3853
3854 buffer.edit([(1..1, "B")], None, cx);
3855 buffer.edit([(2..2, "D")], None, cx);
3856 });
3857
3858 // after editing again, the buffer is dirty, and emits another dirty event.
3859 buffer1.update(cx, |buffer, cx| {
3860 assert!(buffer.text() == "aBDc");
3861 assert!(buffer.is_dirty());
3862 assert_eq!(
3863 *events.lock(),
3864 &[
3865 language::BufferEvent::Edited,
3866 language::BufferEvent::DirtyChanged,
3867 language::BufferEvent::Edited,
3868 ],
3869 );
3870 events.lock().clear();
3871
3872 // After restoring the buffer to its previously-saved state,
3873 // the buffer is not considered dirty anymore.
3874 buffer.edit([(1..3, "")], None, cx);
3875 assert!(buffer.text() == "ac");
3876 assert!(!buffer.is_dirty());
3877 });
3878
3879 assert_eq!(
3880 *events.lock(),
3881 &[
3882 language::BufferEvent::Edited,
3883 language::BufferEvent::DirtyChanged
3884 ]
3885 );
3886
3887 // When a file is deleted, it is not considered dirty.
3888 let events = Arc::new(Mutex::new(Vec::new()));
3889 let buffer2 = project
3890 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3891 .await
3892 .unwrap();
3893 buffer2.update(cx, |_, cx| {
3894 cx.subscribe(&buffer2, {
3895 let events = events.clone();
3896 move |_, _, event, _| match event {
3897 BufferEvent::Operation { .. } => {}
3898 _ => events.lock().push(event.clone()),
3899 }
3900 })
3901 .detach();
3902 });
3903
3904 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3905 .await
3906 .unwrap();
3907 cx.executor().run_until_parked();
3908 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3909 assert_eq!(
3910 mem::take(&mut *events.lock()),
3911 &[language::BufferEvent::FileHandleChanged]
3912 );
3913
3914 // Buffer becomes dirty when edited.
3915 buffer2.update(cx, |buffer, cx| {
3916 buffer.edit([(2..3, "")], None, cx);
3917 assert_eq!(buffer.is_dirty(), true);
3918 });
3919 assert_eq!(
3920 mem::take(&mut *events.lock()),
3921 &[
3922 language::BufferEvent::Edited,
3923 language::BufferEvent::DirtyChanged
3924 ]
3925 );
3926
3927 // Buffer becomes clean again when all of its content is removed, because
3928 // the file was deleted.
3929 buffer2.update(cx, |buffer, cx| {
3930 buffer.edit([(0..2, "")], None, cx);
3931 assert_eq!(buffer.is_empty(), true);
3932 assert_eq!(buffer.is_dirty(), false);
3933 });
3934 assert_eq!(
3935 *events.lock(),
3936 &[
3937 language::BufferEvent::Edited,
3938 language::BufferEvent::DirtyChanged
3939 ]
3940 );
3941
3942 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3943 let events = Arc::new(Mutex::new(Vec::new()));
3944 let buffer3 = project
3945 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3946 .await
3947 .unwrap();
3948 buffer3.update(cx, |_, cx| {
3949 cx.subscribe(&buffer3, {
3950 let events = events.clone();
3951 move |_, _, event, _| match event {
3952 BufferEvent::Operation { .. } => {}
3953 _ => events.lock().push(event.clone()),
3954 }
3955 })
3956 .detach();
3957 });
3958
3959 buffer3.update(cx, |buffer, cx| {
3960 buffer.edit([(0..0, "x")], None, cx);
3961 });
3962 events.lock().clear();
3963 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3964 .await
3965 .unwrap();
3966 cx.executor().run_until_parked();
3967 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3968 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3969}
3970
3971#[gpui::test]
3972async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3973 init_test(cx);
3974
3975 let (initial_contents, initial_offsets) =
3976 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3977 let fs = FakeFs::new(cx.executor());
3978 fs.insert_tree(
3979 path!("/dir"),
3980 json!({
3981 "the-file": initial_contents,
3982 }),
3983 )
3984 .await;
3985 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3986 let buffer = project
3987 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3988 .await
3989 .unwrap();
3990
3991 let anchors = initial_offsets
3992 .iter()
3993 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3994 .collect::<Vec<_>>();
3995
3996 // Change the file on disk, adding two new lines of text, and removing
3997 // one line.
3998 buffer.update(cx, |buffer, _| {
3999 assert!(!buffer.is_dirty());
4000 assert!(!buffer.has_conflict());
4001 });
4002
4003 let (new_contents, new_offsets) =
4004 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4005 fs.save(
4006 path!("/dir/the-file").as_ref(),
4007 &new_contents.as_str().into(),
4008 LineEnding::Unix,
4009 )
4010 .await
4011 .unwrap();
4012
4013 // Because the buffer was not modified, it is reloaded from disk. Its
4014 // contents are edited according to the diff between the old and new
4015 // file contents.
4016 cx.executor().run_until_parked();
4017 buffer.update(cx, |buffer, _| {
4018 assert_eq!(buffer.text(), new_contents);
4019 assert!(!buffer.is_dirty());
4020 assert!(!buffer.has_conflict());
4021
4022 let anchor_offsets = anchors
4023 .iter()
4024 .map(|anchor| anchor.to_offset(&*buffer))
4025 .collect::<Vec<_>>();
4026 assert_eq!(anchor_offsets, new_offsets);
4027 });
4028
4029 // Modify the buffer
4030 buffer.update(cx, |buffer, cx| {
4031 buffer.edit([(0..0, " ")], None, cx);
4032 assert!(buffer.is_dirty());
4033 assert!(!buffer.has_conflict());
4034 });
4035
4036 // Change the file on disk again, adding blank lines to the beginning.
4037 fs.save(
4038 path!("/dir/the-file").as_ref(),
4039 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4040 LineEnding::Unix,
4041 )
4042 .await
4043 .unwrap();
4044
4045 // Because the buffer is modified, it doesn't reload from disk, but is
4046 // marked as having a conflict.
4047 cx.executor().run_until_parked();
4048 buffer.update(cx, |buffer, _| {
4049 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4050 assert!(buffer.has_conflict());
4051 });
4052}
4053
4054#[gpui::test]
4055async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4056 init_test(cx);
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 path!("/dir"),
4061 json!({
4062 "file1": "a\nb\nc\n",
4063 "file2": "one\r\ntwo\r\nthree\r\n",
4064 }),
4065 )
4066 .await;
4067
4068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4069 let buffer1 = project
4070 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4071 .await
4072 .unwrap();
4073 let buffer2 = project
4074 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4075 .await
4076 .unwrap();
4077
4078 buffer1.update(cx, |buffer, _| {
4079 assert_eq!(buffer.text(), "a\nb\nc\n");
4080 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4081 });
4082 buffer2.update(cx, |buffer, _| {
4083 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4084 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4085 });
4086
4087 // Change a file's line endings on disk from unix to windows. The buffer's
4088 // state updates correctly.
4089 fs.save(
4090 path!("/dir/file1").as_ref(),
4091 &"aaa\nb\nc\n".into(),
4092 LineEnding::Windows,
4093 )
4094 .await
4095 .unwrap();
4096 cx.executor().run_until_parked();
4097 buffer1.update(cx, |buffer, _| {
4098 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4099 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4100 });
4101
4102 // Save a file with windows line endings. The file is written correctly.
4103 buffer2.update(cx, |buffer, cx| {
4104 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4105 });
4106 project
4107 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4108 .await
4109 .unwrap();
4110 assert_eq!(
4111 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4112 "one\r\ntwo\r\nthree\r\nfour\r\n",
4113 );
4114}
4115
4116#[gpui::test]
4117async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4118 init_test(cx);
4119
4120 let fs = FakeFs::new(cx.executor());
4121 fs.insert_tree(
4122 path!("/dir"),
4123 json!({
4124 "a.rs": "
4125 fn foo(mut v: Vec<usize>) {
4126 for x in &v {
4127 v.push(1);
4128 }
4129 }
4130 "
4131 .unindent(),
4132 }),
4133 )
4134 .await;
4135
4136 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4137 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4138 let buffer = project
4139 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4140 .await
4141 .unwrap();
4142
4143 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4144 let message = lsp::PublishDiagnosticsParams {
4145 uri: buffer_uri.clone(),
4146 diagnostics: vec![
4147 lsp::Diagnostic {
4148 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4149 severity: Some(DiagnosticSeverity::WARNING),
4150 message: "error 1".to_string(),
4151 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4152 location: lsp::Location {
4153 uri: buffer_uri.clone(),
4154 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4155 },
4156 message: "error 1 hint 1".to_string(),
4157 }]),
4158 ..Default::default()
4159 },
4160 lsp::Diagnostic {
4161 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4162 severity: Some(DiagnosticSeverity::HINT),
4163 message: "error 1 hint 1".to_string(),
4164 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4165 location: lsp::Location {
4166 uri: buffer_uri.clone(),
4167 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4168 },
4169 message: "original diagnostic".to_string(),
4170 }]),
4171 ..Default::default()
4172 },
4173 lsp::Diagnostic {
4174 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4175 severity: Some(DiagnosticSeverity::ERROR),
4176 message: "error 2".to_string(),
4177 related_information: Some(vec![
4178 lsp::DiagnosticRelatedInformation {
4179 location: lsp::Location {
4180 uri: buffer_uri.clone(),
4181 range: lsp::Range::new(
4182 lsp::Position::new(1, 13),
4183 lsp::Position::new(1, 15),
4184 ),
4185 },
4186 message: "error 2 hint 1".to_string(),
4187 },
4188 lsp::DiagnosticRelatedInformation {
4189 location: lsp::Location {
4190 uri: buffer_uri.clone(),
4191 range: lsp::Range::new(
4192 lsp::Position::new(1, 13),
4193 lsp::Position::new(1, 15),
4194 ),
4195 },
4196 message: "error 2 hint 2".to_string(),
4197 },
4198 ]),
4199 ..Default::default()
4200 },
4201 lsp::Diagnostic {
4202 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4203 severity: Some(DiagnosticSeverity::HINT),
4204 message: "error 2 hint 1".to_string(),
4205 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4206 location: lsp::Location {
4207 uri: buffer_uri.clone(),
4208 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4209 },
4210 message: "original diagnostic".to_string(),
4211 }]),
4212 ..Default::default()
4213 },
4214 lsp::Diagnostic {
4215 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4216 severity: Some(DiagnosticSeverity::HINT),
4217 message: "error 2 hint 2".to_string(),
4218 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4219 location: lsp::Location {
4220 uri: buffer_uri,
4221 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4222 },
4223 message: "original diagnostic".to_string(),
4224 }]),
4225 ..Default::default()
4226 },
4227 ],
4228 version: None,
4229 };
4230
4231 lsp_store
4232 .update(cx, |lsp_store, cx| {
4233 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4234 })
4235 .unwrap();
4236 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4237
4238 assert_eq!(
4239 buffer
4240 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4241 .collect::<Vec<_>>(),
4242 &[
4243 DiagnosticEntry {
4244 range: Point::new(1, 8)..Point::new(1, 9),
4245 diagnostic: Diagnostic {
4246 severity: DiagnosticSeverity::WARNING,
4247 message: "error 1".to_string(),
4248 group_id: 1,
4249 is_primary: true,
4250 ..Default::default()
4251 }
4252 },
4253 DiagnosticEntry {
4254 range: Point::new(1, 8)..Point::new(1, 9),
4255 diagnostic: Diagnostic {
4256 severity: DiagnosticSeverity::HINT,
4257 message: "error 1 hint 1".to_string(),
4258 group_id: 1,
4259 is_primary: false,
4260 ..Default::default()
4261 }
4262 },
4263 DiagnosticEntry {
4264 range: Point::new(1, 13)..Point::new(1, 15),
4265 diagnostic: Diagnostic {
4266 severity: DiagnosticSeverity::HINT,
4267 message: "error 2 hint 1".to_string(),
4268 group_id: 0,
4269 is_primary: false,
4270 ..Default::default()
4271 }
4272 },
4273 DiagnosticEntry {
4274 range: Point::new(1, 13)..Point::new(1, 15),
4275 diagnostic: Diagnostic {
4276 severity: DiagnosticSeverity::HINT,
4277 message: "error 2 hint 2".to_string(),
4278 group_id: 0,
4279 is_primary: false,
4280 ..Default::default()
4281 }
4282 },
4283 DiagnosticEntry {
4284 range: Point::new(2, 8)..Point::new(2, 17),
4285 diagnostic: Diagnostic {
4286 severity: DiagnosticSeverity::ERROR,
4287 message: "error 2".to_string(),
4288 group_id: 0,
4289 is_primary: true,
4290 ..Default::default()
4291 }
4292 }
4293 ]
4294 );
4295
4296 assert_eq!(
4297 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4298 &[
4299 DiagnosticEntry {
4300 range: Point::new(1, 13)..Point::new(1, 15),
4301 diagnostic: Diagnostic {
4302 severity: DiagnosticSeverity::HINT,
4303 message: "error 2 hint 1".to_string(),
4304 group_id: 0,
4305 is_primary: false,
4306 ..Default::default()
4307 }
4308 },
4309 DiagnosticEntry {
4310 range: Point::new(1, 13)..Point::new(1, 15),
4311 diagnostic: Diagnostic {
4312 severity: DiagnosticSeverity::HINT,
4313 message: "error 2 hint 2".to_string(),
4314 group_id: 0,
4315 is_primary: false,
4316 ..Default::default()
4317 }
4318 },
4319 DiagnosticEntry {
4320 range: Point::new(2, 8)..Point::new(2, 17),
4321 diagnostic: Diagnostic {
4322 severity: DiagnosticSeverity::ERROR,
4323 message: "error 2".to_string(),
4324 group_id: 0,
4325 is_primary: true,
4326 ..Default::default()
4327 }
4328 }
4329 ]
4330 );
4331
4332 assert_eq!(
4333 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4334 &[
4335 DiagnosticEntry {
4336 range: Point::new(1, 8)..Point::new(1, 9),
4337 diagnostic: Diagnostic {
4338 severity: DiagnosticSeverity::WARNING,
4339 message: "error 1".to_string(),
4340 group_id: 1,
4341 is_primary: true,
4342 ..Default::default()
4343 }
4344 },
4345 DiagnosticEntry {
4346 range: Point::new(1, 8)..Point::new(1, 9),
4347 diagnostic: Diagnostic {
4348 severity: DiagnosticSeverity::HINT,
4349 message: "error 1 hint 1".to_string(),
4350 group_id: 1,
4351 is_primary: false,
4352 ..Default::default()
4353 }
4354 },
4355 ]
4356 );
4357}
4358
4359#[gpui::test]
4360async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4361 init_test(cx);
4362
4363 let fs = FakeFs::new(cx.executor());
4364 fs.insert_tree(
4365 path!("/dir"),
4366 json!({
4367 "one.rs": "const ONE: usize = 1;",
4368 "two": {
4369 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4370 }
4371
4372 }),
4373 )
4374 .await;
4375 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4376
4377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4378 language_registry.add(rust_lang());
4379 let watched_paths = lsp::FileOperationRegistrationOptions {
4380 filters: vec![
4381 FileOperationFilter {
4382 scheme: Some("file".to_owned()),
4383 pattern: lsp::FileOperationPattern {
4384 glob: "**/*.rs".to_owned(),
4385 matches: Some(lsp::FileOperationPatternKind::File),
4386 options: None,
4387 },
4388 },
4389 FileOperationFilter {
4390 scheme: Some("file".to_owned()),
4391 pattern: lsp::FileOperationPattern {
4392 glob: "**/**".to_owned(),
4393 matches: Some(lsp::FileOperationPatternKind::Folder),
4394 options: None,
4395 },
4396 },
4397 ],
4398 };
4399 let mut fake_servers = language_registry.register_fake_lsp(
4400 "Rust",
4401 FakeLspAdapter {
4402 capabilities: lsp::ServerCapabilities {
4403 workspace: Some(lsp::WorkspaceServerCapabilities {
4404 workspace_folders: None,
4405 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4406 did_rename: Some(watched_paths.clone()),
4407 will_rename: Some(watched_paths),
4408 ..Default::default()
4409 }),
4410 }),
4411 ..Default::default()
4412 },
4413 ..Default::default()
4414 },
4415 );
4416
4417 let _ = project
4418 .update(cx, |project, cx| {
4419 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4420 })
4421 .await
4422 .unwrap();
4423
4424 let fake_server = fake_servers.next().await.unwrap();
4425 let response = project.update(cx, |project, cx| {
4426 let worktree = project.worktrees(cx).next().unwrap();
4427 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4428 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4429 });
4430 let expected_edit = lsp::WorkspaceEdit {
4431 changes: None,
4432 document_changes: Some(DocumentChanges::Edits({
4433 vec![TextDocumentEdit {
4434 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4435 range: lsp::Range {
4436 start: lsp::Position {
4437 line: 0,
4438 character: 1,
4439 },
4440 end: lsp::Position {
4441 line: 0,
4442 character: 3,
4443 },
4444 },
4445 new_text: "This is not a drill".to_owned(),
4446 })],
4447 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4448 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4449 version: Some(1337),
4450 },
4451 }]
4452 })),
4453 change_annotations: None,
4454 };
4455 let resolved_workspace_edit = Arc::new(OnceLock::new());
4456 fake_server
4457 .set_request_handler::<WillRenameFiles, _, _>({
4458 let resolved_workspace_edit = resolved_workspace_edit.clone();
4459 let expected_edit = expected_edit.clone();
4460 move |params, _| {
4461 let resolved_workspace_edit = resolved_workspace_edit.clone();
4462 let expected_edit = expected_edit.clone();
4463 async move {
4464 assert_eq!(params.files.len(), 1);
4465 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4466 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4467 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4468 Ok(Some(expected_edit))
4469 }
4470 }
4471 })
4472 .next()
4473 .await
4474 .unwrap();
4475 let _ = response.await.unwrap();
4476 fake_server
4477 .handle_notification::<DidRenameFiles, _>(|params, _| {
4478 assert_eq!(params.files.len(), 1);
4479 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4480 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4481 })
4482 .next()
4483 .await
4484 .unwrap();
4485 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4486}
4487
4488#[gpui::test]
4489async fn test_rename(cx: &mut gpui::TestAppContext) {
4490 // hi
4491 init_test(cx);
4492
4493 let fs = FakeFs::new(cx.executor());
4494 fs.insert_tree(
4495 path!("/dir"),
4496 json!({
4497 "one.rs": "const ONE: usize = 1;",
4498 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4504
4505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4506 language_registry.add(rust_lang());
4507 let mut fake_servers = language_registry.register_fake_lsp(
4508 "Rust",
4509 FakeLspAdapter {
4510 capabilities: lsp::ServerCapabilities {
4511 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4512 prepare_provider: Some(true),
4513 work_done_progress_options: Default::default(),
4514 })),
4515 ..Default::default()
4516 },
4517 ..Default::default()
4518 },
4519 );
4520
4521 let (buffer, _handle) = project
4522 .update(cx, |project, cx| {
4523 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4524 })
4525 .await
4526 .unwrap();
4527
4528 let fake_server = fake_servers.next().await.unwrap();
4529
4530 let response = project.update(cx, |project, cx| {
4531 project.prepare_rename(buffer.clone(), 7, cx)
4532 });
4533 fake_server
4534 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4535 assert_eq!(
4536 params.text_document.uri.as_str(),
4537 uri!("file:///dir/one.rs")
4538 );
4539 assert_eq!(params.position, lsp::Position::new(0, 7));
4540 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4541 lsp::Position::new(0, 6),
4542 lsp::Position::new(0, 9),
4543 ))))
4544 })
4545 .next()
4546 .await
4547 .unwrap();
4548 let response = response.await.unwrap();
4549 let PrepareRenameResponse::Success(range) = response else {
4550 panic!("{:?}", response);
4551 };
4552 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4553 assert_eq!(range, 6..9);
4554
4555 let response = project.update(cx, |project, cx| {
4556 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4557 });
4558 fake_server
4559 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4560 assert_eq!(
4561 params.text_document_position.text_document.uri.as_str(),
4562 uri!("file:///dir/one.rs")
4563 );
4564 assert_eq!(
4565 params.text_document_position.position,
4566 lsp::Position::new(0, 7)
4567 );
4568 assert_eq!(params.new_name, "THREE");
4569 Ok(Some(lsp::WorkspaceEdit {
4570 changes: Some(
4571 [
4572 (
4573 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4574 vec![lsp::TextEdit::new(
4575 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4576 "THREE".to_string(),
4577 )],
4578 ),
4579 (
4580 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4581 vec![
4582 lsp::TextEdit::new(
4583 lsp::Range::new(
4584 lsp::Position::new(0, 24),
4585 lsp::Position::new(0, 27),
4586 ),
4587 "THREE".to_string(),
4588 ),
4589 lsp::TextEdit::new(
4590 lsp::Range::new(
4591 lsp::Position::new(0, 35),
4592 lsp::Position::new(0, 38),
4593 ),
4594 "THREE".to_string(),
4595 ),
4596 ],
4597 ),
4598 ]
4599 .into_iter()
4600 .collect(),
4601 ),
4602 ..Default::default()
4603 }))
4604 })
4605 .next()
4606 .await
4607 .unwrap();
4608 let mut transaction = response.await.unwrap().0;
4609 assert_eq!(transaction.len(), 2);
4610 assert_eq!(
4611 transaction
4612 .remove_entry(&buffer)
4613 .unwrap()
4614 .0
4615 .update(cx, |buffer, _| buffer.text()),
4616 "const THREE: usize = 1;"
4617 );
4618 assert_eq!(
4619 transaction
4620 .into_keys()
4621 .next()
4622 .unwrap()
4623 .update(cx, |buffer, _| buffer.text()),
4624 "const TWO: usize = one::THREE + one::THREE;"
4625 );
4626}
4627
4628#[gpui::test]
4629async fn test_search(cx: &mut gpui::TestAppContext) {
4630 init_test(cx);
4631
4632 let fs = FakeFs::new(cx.executor());
4633 fs.insert_tree(
4634 path!("/dir"),
4635 json!({
4636 "one.rs": "const ONE: usize = 1;",
4637 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4638 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4639 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4640 }),
4641 )
4642 .await;
4643 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4644 assert_eq!(
4645 search(
4646 &project,
4647 SearchQuery::text(
4648 "TWO",
4649 false,
4650 true,
4651 false,
4652 Default::default(),
4653 Default::default(),
4654 None
4655 )
4656 .unwrap(),
4657 cx
4658 )
4659 .await
4660 .unwrap(),
4661 HashMap::from_iter([
4662 (separator!("dir/two.rs").to_string(), vec![6..9]),
4663 (separator!("dir/three.rs").to_string(), vec![37..40])
4664 ])
4665 );
4666
4667 let buffer_4 = project
4668 .update(cx, |project, cx| {
4669 project.open_local_buffer(path!("/dir/four.rs"), cx)
4670 })
4671 .await
4672 .unwrap();
4673 buffer_4.update(cx, |buffer, cx| {
4674 let text = "two::TWO";
4675 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4676 });
4677
4678 assert_eq!(
4679 search(
4680 &project,
4681 SearchQuery::text(
4682 "TWO",
4683 false,
4684 true,
4685 false,
4686 Default::default(),
4687 Default::default(),
4688 None,
4689 )
4690 .unwrap(),
4691 cx
4692 )
4693 .await
4694 .unwrap(),
4695 HashMap::from_iter([
4696 (separator!("dir/two.rs").to_string(), vec![6..9]),
4697 (separator!("dir/three.rs").to_string(), vec![37..40]),
4698 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4699 ])
4700 );
4701}
4702
4703#[gpui::test]
4704async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4705 init_test(cx);
4706
4707 let search_query = "file";
4708
4709 let fs = FakeFs::new(cx.executor());
4710 fs.insert_tree(
4711 path!("/dir"),
4712 json!({
4713 "one.rs": r#"// Rust file one"#,
4714 "one.ts": r#"// TypeScript file one"#,
4715 "two.rs": r#"// Rust file two"#,
4716 "two.ts": r#"// TypeScript file two"#,
4717 }),
4718 )
4719 .await;
4720 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4721
4722 assert!(
4723 search(
4724 &project,
4725 SearchQuery::text(
4726 search_query,
4727 false,
4728 true,
4729 false,
4730 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4731 Default::default(),
4732 None
4733 )
4734 .unwrap(),
4735 cx
4736 )
4737 .await
4738 .unwrap()
4739 .is_empty(),
4740 "If no inclusions match, no files should be returned"
4741 );
4742
4743 assert_eq!(
4744 search(
4745 &project,
4746 SearchQuery::text(
4747 search_query,
4748 false,
4749 true,
4750 false,
4751 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4752 Default::default(),
4753 None
4754 )
4755 .unwrap(),
4756 cx
4757 )
4758 .await
4759 .unwrap(),
4760 HashMap::from_iter([
4761 (separator!("dir/one.rs").to_string(), vec![8..12]),
4762 (separator!("dir/two.rs").to_string(), vec![8..12]),
4763 ]),
4764 "Rust only search should give only Rust files"
4765 );
4766
4767 assert_eq!(
4768 search(
4769 &project,
4770 SearchQuery::text(
4771 search_query,
4772 false,
4773 true,
4774 false,
4775
4776 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4777
4778 Default::default(),
4779 None,
4780 ).unwrap(),
4781 cx
4782 )
4783 .await
4784 .unwrap(),
4785 HashMap::from_iter([
4786 (separator!("dir/one.ts").to_string(), vec![14..18]),
4787 (separator!("dir/two.ts").to_string(), vec![14..18]),
4788 ]),
4789 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4790 );
4791
4792 assert_eq!(
4793 search(
4794 &project,
4795 SearchQuery::text(
4796 search_query,
4797 false,
4798 true,
4799 false,
4800
4801 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4802
4803 Default::default(),
4804 None,
4805 ).unwrap(),
4806 cx
4807 )
4808 .await
4809 .unwrap(),
4810 HashMap::from_iter([
4811 (separator!("dir/two.ts").to_string(), vec![14..18]),
4812 (separator!("dir/one.rs").to_string(), vec![8..12]),
4813 (separator!("dir/one.ts").to_string(), vec![14..18]),
4814 (separator!("dir/two.rs").to_string(), vec![8..12]),
4815 ]),
4816 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4817 );
4818}
4819
4820#[gpui::test]
4821async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4822 init_test(cx);
4823
4824 let search_query = "file";
4825
4826 let fs = FakeFs::new(cx.executor());
4827 fs.insert_tree(
4828 path!("/dir"),
4829 json!({
4830 "one.rs": r#"// Rust file one"#,
4831 "one.ts": r#"// TypeScript file one"#,
4832 "two.rs": r#"// Rust file two"#,
4833 "two.ts": r#"// TypeScript file two"#,
4834 }),
4835 )
4836 .await;
4837 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4838
4839 assert_eq!(
4840 search(
4841 &project,
4842 SearchQuery::text(
4843 search_query,
4844 false,
4845 true,
4846 false,
4847 Default::default(),
4848 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4849 None,
4850 )
4851 .unwrap(),
4852 cx
4853 )
4854 .await
4855 .unwrap(),
4856 HashMap::from_iter([
4857 (separator!("dir/one.rs").to_string(), vec![8..12]),
4858 (separator!("dir/one.ts").to_string(), vec![14..18]),
4859 (separator!("dir/two.rs").to_string(), vec![8..12]),
4860 (separator!("dir/two.ts").to_string(), vec![14..18]),
4861 ]),
4862 "If no exclusions match, all files should be returned"
4863 );
4864
4865 assert_eq!(
4866 search(
4867 &project,
4868 SearchQuery::text(
4869 search_query,
4870 false,
4871 true,
4872 false,
4873 Default::default(),
4874 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4875 None,
4876 )
4877 .unwrap(),
4878 cx
4879 )
4880 .await
4881 .unwrap(),
4882 HashMap::from_iter([
4883 (separator!("dir/one.ts").to_string(), vec![14..18]),
4884 (separator!("dir/two.ts").to_string(), vec![14..18]),
4885 ]),
4886 "Rust exclusion search should give only TypeScript files"
4887 );
4888
4889 assert_eq!(
4890 search(
4891 &project,
4892 SearchQuery::text(
4893 search_query,
4894 false,
4895 true,
4896 false,
4897 Default::default(),
4898 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4899 None,
4900 ).unwrap(),
4901 cx
4902 )
4903 .await
4904 .unwrap(),
4905 HashMap::from_iter([
4906 (separator!("dir/one.rs").to_string(), vec![8..12]),
4907 (separator!("dir/two.rs").to_string(), vec![8..12]),
4908 ]),
4909 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4910 );
4911
4912 assert!(
4913 search(
4914 &project,
4915 SearchQuery::text(
4916 search_query,
4917 false,
4918 true,
4919 false,
4920 Default::default(),
4921
4922 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4923 None,
4924
4925 ).unwrap(),
4926 cx
4927 )
4928 .await
4929 .unwrap().is_empty(),
4930 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4931 );
4932}
4933
4934#[gpui::test]
4935async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4936 init_test(cx);
4937
4938 let search_query = "file";
4939
4940 let fs = FakeFs::new(cx.executor());
4941 fs.insert_tree(
4942 path!("/dir"),
4943 json!({
4944 "one.rs": r#"// Rust file one"#,
4945 "one.ts": r#"// TypeScript file one"#,
4946 "two.rs": r#"// Rust file two"#,
4947 "two.ts": r#"// TypeScript file two"#,
4948 }),
4949 )
4950 .await;
4951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4952
4953 assert!(
4954 search(
4955 &project,
4956 SearchQuery::text(
4957 search_query,
4958 false,
4959 true,
4960 false,
4961 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4962 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4963 None,
4964 )
4965 .unwrap(),
4966 cx
4967 )
4968 .await
4969 .unwrap()
4970 .is_empty(),
4971 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4972 );
4973
4974 assert!(
4975 search(
4976 &project,
4977 SearchQuery::text(
4978 search_query,
4979 false,
4980 true,
4981 false,
4982 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4983 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4984 None,
4985 ).unwrap(),
4986 cx
4987 )
4988 .await
4989 .unwrap()
4990 .is_empty(),
4991 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4992 );
4993
4994 assert!(
4995 search(
4996 &project,
4997 SearchQuery::text(
4998 search_query,
4999 false,
5000 true,
5001 false,
5002 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5003 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5004 None,
5005 )
5006 .unwrap(),
5007 cx
5008 )
5009 .await
5010 .unwrap()
5011 .is_empty(),
5012 "Non-matching inclusions and exclusions should not change that."
5013 );
5014
5015 assert_eq!(
5016 search(
5017 &project,
5018 SearchQuery::text(
5019 search_query,
5020 false,
5021 true,
5022 false,
5023 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5024 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5025 None,
5026 )
5027 .unwrap(),
5028 cx
5029 )
5030 .await
5031 .unwrap(),
5032 HashMap::from_iter([
5033 (separator!("dir/one.ts").to_string(), vec![14..18]),
5034 (separator!("dir/two.ts").to_string(), vec![14..18]),
5035 ]),
5036 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5037 );
5038}
5039
5040#[gpui::test]
5041async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5042 init_test(cx);
5043
5044 let fs = FakeFs::new(cx.executor());
5045 fs.insert_tree(
5046 path!("/worktree-a"),
5047 json!({
5048 "haystack.rs": r#"// NEEDLE"#,
5049 "haystack.ts": r#"// NEEDLE"#,
5050 }),
5051 )
5052 .await;
5053 fs.insert_tree(
5054 path!("/worktree-b"),
5055 json!({
5056 "haystack.rs": r#"// NEEDLE"#,
5057 "haystack.ts": r#"// NEEDLE"#,
5058 }),
5059 )
5060 .await;
5061
5062 let project = Project::test(
5063 fs.clone(),
5064 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5065 cx,
5066 )
5067 .await;
5068
5069 assert_eq!(
5070 search(
5071 &project,
5072 SearchQuery::text(
5073 "NEEDLE",
5074 false,
5075 true,
5076 false,
5077 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5078 Default::default(),
5079 None,
5080 )
5081 .unwrap(),
5082 cx
5083 )
5084 .await
5085 .unwrap(),
5086 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5087 "should only return results from included worktree"
5088 );
5089 assert_eq!(
5090 search(
5091 &project,
5092 SearchQuery::text(
5093 "NEEDLE",
5094 false,
5095 true,
5096 false,
5097 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5098 Default::default(),
5099 None,
5100 )
5101 .unwrap(),
5102 cx
5103 )
5104 .await
5105 .unwrap(),
5106 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5107 "should only return results from included worktree"
5108 );
5109
5110 assert_eq!(
5111 search(
5112 &project,
5113 SearchQuery::text(
5114 "NEEDLE",
5115 false,
5116 true,
5117 false,
5118 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5119 Default::default(),
5120 None,
5121 )
5122 .unwrap(),
5123 cx
5124 )
5125 .await
5126 .unwrap(),
5127 HashMap::from_iter([
5128 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5129 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5130 ]),
5131 "should return results from both worktrees"
5132 );
5133}
5134
5135#[gpui::test]
5136async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5137 init_test(cx);
5138
5139 let fs = FakeFs::new(cx.background_executor.clone());
5140 fs.insert_tree(
5141 path!("/dir"),
5142 json!({
5143 ".git": {},
5144 ".gitignore": "**/target\n/node_modules\n",
5145 "target": {
5146 "index.txt": "index_key:index_value"
5147 },
5148 "node_modules": {
5149 "eslint": {
5150 "index.ts": "const eslint_key = 'eslint value'",
5151 "package.json": r#"{ "some_key": "some value" }"#,
5152 },
5153 "prettier": {
5154 "index.ts": "const prettier_key = 'prettier value'",
5155 "package.json": r#"{ "other_key": "other value" }"#,
5156 },
5157 },
5158 "package.json": r#"{ "main_key": "main value" }"#,
5159 }),
5160 )
5161 .await;
5162 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5163
5164 let query = "key";
5165 assert_eq!(
5166 search(
5167 &project,
5168 SearchQuery::text(
5169 query,
5170 false,
5171 false,
5172 false,
5173 Default::default(),
5174 Default::default(),
5175 None,
5176 )
5177 .unwrap(),
5178 cx
5179 )
5180 .await
5181 .unwrap(),
5182 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5183 "Only one non-ignored file should have the query"
5184 );
5185
5186 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5187 assert_eq!(
5188 search(
5189 &project,
5190 SearchQuery::text(
5191 query,
5192 false,
5193 false,
5194 true,
5195 Default::default(),
5196 Default::default(),
5197 None,
5198 )
5199 .unwrap(),
5200 cx
5201 )
5202 .await
5203 .unwrap(),
5204 HashMap::from_iter([
5205 (separator!("dir/package.json").to_string(), vec![8..11]),
5206 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5207 (
5208 separator!("dir/node_modules/prettier/package.json").to_string(),
5209 vec![9..12]
5210 ),
5211 (
5212 separator!("dir/node_modules/prettier/index.ts").to_string(),
5213 vec![15..18]
5214 ),
5215 (
5216 separator!("dir/node_modules/eslint/index.ts").to_string(),
5217 vec![13..16]
5218 ),
5219 (
5220 separator!("dir/node_modules/eslint/package.json").to_string(),
5221 vec![8..11]
5222 ),
5223 ]),
5224 "Unrestricted search with ignored directories should find every file with the query"
5225 );
5226
5227 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5228 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5229 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5230 assert_eq!(
5231 search(
5232 &project,
5233 SearchQuery::text(
5234 query,
5235 false,
5236 false,
5237 true,
5238 files_to_include,
5239 files_to_exclude,
5240 None,
5241 )
5242 .unwrap(),
5243 cx
5244 )
5245 .await
5246 .unwrap(),
5247 HashMap::from_iter([(
5248 separator!("dir/node_modules/prettier/package.json").to_string(),
5249 vec![9..12]
5250 )]),
5251 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5252 );
5253}
5254
5255#[gpui::test]
5256async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5257 init_test(cx);
5258
5259 let fs = FakeFs::new(cx.executor().clone());
5260 fs.insert_tree(
5261 "/one/two",
5262 json!({
5263 "three": {
5264 "a.txt": "",
5265 "four": {}
5266 },
5267 "c.rs": ""
5268 }),
5269 )
5270 .await;
5271
5272 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5273 project
5274 .update(cx, |project, cx| {
5275 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5276 project.create_entry((id, "b.."), true, cx)
5277 })
5278 .await
5279 .unwrap()
5280 .to_included()
5281 .unwrap();
5282
5283 // Can't create paths outside the project
5284 let result = project
5285 .update(cx, |project, cx| {
5286 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5287 project.create_entry((id, "../../boop"), true, cx)
5288 })
5289 .await;
5290 assert!(result.is_err());
5291
5292 // Can't create paths with '..'
5293 let result = project
5294 .update(cx, |project, cx| {
5295 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5296 project.create_entry((id, "four/../beep"), true, cx)
5297 })
5298 .await;
5299 assert!(result.is_err());
5300
5301 assert_eq!(
5302 fs.paths(true),
5303 vec![
5304 PathBuf::from(path!("/")),
5305 PathBuf::from(path!("/one")),
5306 PathBuf::from(path!("/one/two")),
5307 PathBuf::from(path!("/one/two/c.rs")),
5308 PathBuf::from(path!("/one/two/three")),
5309 PathBuf::from(path!("/one/two/three/a.txt")),
5310 PathBuf::from(path!("/one/two/three/b..")),
5311 PathBuf::from(path!("/one/two/three/four")),
5312 ]
5313 );
5314
5315 // And we cannot open buffers with '..'
5316 let result = project
5317 .update(cx, |project, cx| {
5318 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5319 project.open_buffer((id, "../c.rs"), cx)
5320 })
5321 .await;
5322 assert!(result.is_err())
5323}
5324
5325#[gpui::test]
5326async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5327 init_test(cx);
5328
5329 let fs = FakeFs::new(cx.executor());
5330 fs.insert_tree(
5331 path!("/dir"),
5332 json!({
5333 "a.tsx": "a",
5334 }),
5335 )
5336 .await;
5337
5338 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5339
5340 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5341 language_registry.add(tsx_lang());
5342 let language_server_names = [
5343 "TypeScriptServer",
5344 "TailwindServer",
5345 "ESLintServer",
5346 "NoHoverCapabilitiesServer",
5347 ];
5348 let mut language_servers = [
5349 language_registry.register_fake_lsp(
5350 "tsx",
5351 FakeLspAdapter {
5352 name: language_server_names[0],
5353 capabilities: lsp::ServerCapabilities {
5354 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5355 ..lsp::ServerCapabilities::default()
5356 },
5357 ..FakeLspAdapter::default()
5358 },
5359 ),
5360 language_registry.register_fake_lsp(
5361 "tsx",
5362 FakeLspAdapter {
5363 name: language_server_names[1],
5364 capabilities: lsp::ServerCapabilities {
5365 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5366 ..lsp::ServerCapabilities::default()
5367 },
5368 ..FakeLspAdapter::default()
5369 },
5370 ),
5371 language_registry.register_fake_lsp(
5372 "tsx",
5373 FakeLspAdapter {
5374 name: language_server_names[2],
5375 capabilities: lsp::ServerCapabilities {
5376 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5377 ..lsp::ServerCapabilities::default()
5378 },
5379 ..FakeLspAdapter::default()
5380 },
5381 ),
5382 language_registry.register_fake_lsp(
5383 "tsx",
5384 FakeLspAdapter {
5385 name: language_server_names[3],
5386 capabilities: lsp::ServerCapabilities {
5387 hover_provider: None,
5388 ..lsp::ServerCapabilities::default()
5389 },
5390 ..FakeLspAdapter::default()
5391 },
5392 ),
5393 ];
5394
5395 let (buffer, _handle) = project
5396 .update(cx, |p, cx| {
5397 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5398 })
5399 .await
5400 .unwrap();
5401 cx.executor().run_until_parked();
5402
5403 let mut servers_with_hover_requests = HashMap::default();
5404 for i in 0..language_server_names.len() {
5405 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5406 panic!(
5407 "Failed to get language server #{i} with name {}",
5408 &language_server_names[i]
5409 )
5410 });
5411 let new_server_name = new_server.server.name();
5412 assert!(
5413 !servers_with_hover_requests.contains_key(&new_server_name),
5414 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5415 );
5416 match new_server_name.as_ref() {
5417 "TailwindServer" | "TypeScriptServer" => {
5418 servers_with_hover_requests.insert(
5419 new_server_name.clone(),
5420 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5421 move |_, _| {
5422 let name = new_server_name.clone();
5423 async move {
5424 Ok(Some(lsp::Hover {
5425 contents: lsp::HoverContents::Scalar(
5426 lsp::MarkedString::String(format!("{name} hover")),
5427 ),
5428 range: None,
5429 }))
5430 }
5431 },
5432 ),
5433 );
5434 }
5435 "ESLintServer" => {
5436 servers_with_hover_requests.insert(
5437 new_server_name,
5438 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5439 |_, _| async move { Ok(None) },
5440 ),
5441 );
5442 }
5443 "NoHoverCapabilitiesServer" => {
5444 let _never_handled = new_server
5445 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5446 panic!(
5447 "Should not call for hovers server with no corresponding capabilities"
5448 )
5449 });
5450 }
5451 unexpected => panic!("Unexpected server name: {unexpected}"),
5452 }
5453 }
5454
5455 let hover_task = project.update(cx, |project, cx| {
5456 project.hover(&buffer, Point::new(0, 0), cx)
5457 });
5458 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5459 |mut hover_request| async move {
5460 hover_request
5461 .next()
5462 .await
5463 .expect("All hover requests should have been triggered")
5464 },
5465 ))
5466 .await;
5467 assert_eq!(
5468 vec!["TailwindServer hover", "TypeScriptServer hover"],
5469 hover_task
5470 .await
5471 .into_iter()
5472 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5473 .sorted()
5474 .collect::<Vec<_>>(),
5475 "Should receive hover responses from all related servers with hover capabilities"
5476 );
5477}
5478
5479#[gpui::test]
5480async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5481 init_test(cx);
5482
5483 let fs = FakeFs::new(cx.executor());
5484 fs.insert_tree(
5485 path!("/dir"),
5486 json!({
5487 "a.ts": "a",
5488 }),
5489 )
5490 .await;
5491
5492 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5493
5494 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5495 language_registry.add(typescript_lang());
5496 let mut fake_language_servers = language_registry.register_fake_lsp(
5497 "TypeScript",
5498 FakeLspAdapter {
5499 capabilities: lsp::ServerCapabilities {
5500 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5501 ..lsp::ServerCapabilities::default()
5502 },
5503 ..FakeLspAdapter::default()
5504 },
5505 );
5506
5507 let (buffer, _handle) = project
5508 .update(cx, |p, cx| {
5509 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5510 })
5511 .await
5512 .unwrap();
5513 cx.executor().run_until_parked();
5514
5515 let fake_server = fake_language_servers
5516 .next()
5517 .await
5518 .expect("failed to get the language server");
5519
5520 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5521 move |_, _| async move {
5522 Ok(Some(lsp::Hover {
5523 contents: lsp::HoverContents::Array(vec![
5524 lsp::MarkedString::String("".to_string()),
5525 lsp::MarkedString::String(" ".to_string()),
5526 lsp::MarkedString::String("\n\n\n".to_string()),
5527 ]),
5528 range: None,
5529 }))
5530 },
5531 );
5532
5533 let hover_task = project.update(cx, |project, cx| {
5534 project.hover(&buffer, Point::new(0, 0), cx)
5535 });
5536 let () = request_handled
5537 .next()
5538 .await
5539 .expect("All hover requests should have been triggered");
5540 assert_eq!(
5541 Vec::<String>::new(),
5542 hover_task
5543 .await
5544 .into_iter()
5545 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5546 .sorted()
5547 .collect::<Vec<_>>(),
5548 "Empty hover parts should be ignored"
5549 );
5550}
5551
5552#[gpui::test]
5553async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5554 init_test(cx);
5555
5556 let fs = FakeFs::new(cx.executor());
5557 fs.insert_tree(
5558 path!("/dir"),
5559 json!({
5560 "a.ts": "a",
5561 }),
5562 )
5563 .await;
5564
5565 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5566
5567 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5568 language_registry.add(typescript_lang());
5569 let mut fake_language_servers = language_registry.register_fake_lsp(
5570 "TypeScript",
5571 FakeLspAdapter {
5572 capabilities: lsp::ServerCapabilities {
5573 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5574 ..lsp::ServerCapabilities::default()
5575 },
5576 ..FakeLspAdapter::default()
5577 },
5578 );
5579
5580 let (buffer, _handle) = project
5581 .update(cx, |p, cx| {
5582 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5583 })
5584 .await
5585 .unwrap();
5586 cx.executor().run_until_parked();
5587
5588 let fake_server = fake_language_servers
5589 .next()
5590 .await
5591 .expect("failed to get the language server");
5592
5593 let mut request_handled = fake_server
5594 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5595 Ok(Some(vec![
5596 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5597 title: "organize imports".to_string(),
5598 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5599 ..lsp::CodeAction::default()
5600 }),
5601 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5602 title: "fix code".to_string(),
5603 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5604 ..lsp::CodeAction::default()
5605 }),
5606 ]))
5607 });
5608
5609 let code_actions_task = project.update(cx, |project, cx| {
5610 project.code_actions(
5611 &buffer,
5612 0..buffer.read(cx).len(),
5613 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5614 cx,
5615 )
5616 });
5617
5618 let () = request_handled
5619 .next()
5620 .await
5621 .expect("The code action request should have been triggered");
5622
5623 let code_actions = code_actions_task.await.unwrap();
5624 assert_eq!(code_actions.len(), 1);
5625 assert_eq!(
5626 code_actions[0].lsp_action.action_kind(),
5627 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5628 );
5629}
5630
5631#[gpui::test]
5632async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5633 init_test(cx);
5634
5635 let fs = FakeFs::new(cx.executor());
5636 fs.insert_tree(
5637 path!("/dir"),
5638 json!({
5639 "a.tsx": "a",
5640 }),
5641 )
5642 .await;
5643
5644 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5645
5646 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5647 language_registry.add(tsx_lang());
5648 let language_server_names = [
5649 "TypeScriptServer",
5650 "TailwindServer",
5651 "ESLintServer",
5652 "NoActionsCapabilitiesServer",
5653 ];
5654
5655 let mut language_server_rxs = [
5656 language_registry.register_fake_lsp(
5657 "tsx",
5658 FakeLspAdapter {
5659 name: language_server_names[0],
5660 capabilities: lsp::ServerCapabilities {
5661 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5662 ..lsp::ServerCapabilities::default()
5663 },
5664 ..FakeLspAdapter::default()
5665 },
5666 ),
5667 language_registry.register_fake_lsp(
5668 "tsx",
5669 FakeLspAdapter {
5670 name: language_server_names[1],
5671 capabilities: lsp::ServerCapabilities {
5672 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5673 ..lsp::ServerCapabilities::default()
5674 },
5675 ..FakeLspAdapter::default()
5676 },
5677 ),
5678 language_registry.register_fake_lsp(
5679 "tsx",
5680 FakeLspAdapter {
5681 name: language_server_names[2],
5682 capabilities: lsp::ServerCapabilities {
5683 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5684 ..lsp::ServerCapabilities::default()
5685 },
5686 ..FakeLspAdapter::default()
5687 },
5688 ),
5689 language_registry.register_fake_lsp(
5690 "tsx",
5691 FakeLspAdapter {
5692 name: language_server_names[3],
5693 capabilities: lsp::ServerCapabilities {
5694 code_action_provider: None,
5695 ..lsp::ServerCapabilities::default()
5696 },
5697 ..FakeLspAdapter::default()
5698 },
5699 ),
5700 ];
5701
5702 let (buffer, _handle) = project
5703 .update(cx, |p, cx| {
5704 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5705 })
5706 .await
5707 .unwrap();
5708 cx.executor().run_until_parked();
5709
5710 let mut servers_with_actions_requests = HashMap::default();
5711 for i in 0..language_server_names.len() {
5712 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5713 panic!(
5714 "Failed to get language server #{i} with name {}",
5715 &language_server_names[i]
5716 )
5717 });
5718 let new_server_name = new_server.server.name();
5719
5720 assert!(
5721 !servers_with_actions_requests.contains_key(&new_server_name),
5722 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5723 );
5724 match new_server_name.0.as_ref() {
5725 "TailwindServer" | "TypeScriptServer" => {
5726 servers_with_actions_requests.insert(
5727 new_server_name.clone(),
5728 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5729 move |_, _| {
5730 let name = new_server_name.clone();
5731 async move {
5732 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5733 lsp::CodeAction {
5734 title: format!("{name} code action"),
5735 ..lsp::CodeAction::default()
5736 },
5737 )]))
5738 }
5739 },
5740 ),
5741 );
5742 }
5743 "ESLintServer" => {
5744 servers_with_actions_requests.insert(
5745 new_server_name,
5746 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5747 |_, _| async move { Ok(None) },
5748 ),
5749 );
5750 }
5751 "NoActionsCapabilitiesServer" => {
5752 let _never_handled = new_server
5753 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5754 panic!(
5755 "Should not call for code actions server with no corresponding capabilities"
5756 )
5757 });
5758 }
5759 unexpected => panic!("Unexpected server name: {unexpected}"),
5760 }
5761 }
5762
5763 let code_actions_task = project.update(cx, |project, cx| {
5764 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5765 });
5766
5767 // cx.run_until_parked();
5768 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5769 |mut code_actions_request| async move {
5770 code_actions_request
5771 .next()
5772 .await
5773 .expect("All code actions requests should have been triggered")
5774 },
5775 ))
5776 .await;
5777 assert_eq!(
5778 vec!["TailwindServer code action", "TypeScriptServer code action"],
5779 code_actions_task
5780 .await
5781 .unwrap()
5782 .into_iter()
5783 .map(|code_action| code_action.lsp_action.title().to_owned())
5784 .sorted()
5785 .collect::<Vec<_>>(),
5786 "Should receive code actions responses from all related servers with hover capabilities"
5787 );
5788}
5789
5790#[gpui::test]
5791async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5792 init_test(cx);
5793
5794 let fs = FakeFs::new(cx.executor());
5795 fs.insert_tree(
5796 "/dir",
5797 json!({
5798 "a.rs": "let a = 1;",
5799 "b.rs": "let b = 2;",
5800 "c.rs": "let c = 2;",
5801 }),
5802 )
5803 .await;
5804
5805 let project = Project::test(
5806 fs,
5807 [
5808 "/dir/a.rs".as_ref(),
5809 "/dir/b.rs".as_ref(),
5810 "/dir/c.rs".as_ref(),
5811 ],
5812 cx,
5813 )
5814 .await;
5815
5816 // check the initial state and get the worktrees
5817 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5818 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5819 assert_eq!(worktrees.len(), 3);
5820
5821 let worktree_a = worktrees[0].read(cx);
5822 let worktree_b = worktrees[1].read(cx);
5823 let worktree_c = worktrees[2].read(cx);
5824
5825 // check they start in the right order
5826 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5827 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5828 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5829
5830 (
5831 worktrees[0].clone(),
5832 worktrees[1].clone(),
5833 worktrees[2].clone(),
5834 )
5835 });
5836
5837 // move first worktree to after the second
5838 // [a, b, c] -> [b, a, c]
5839 project
5840 .update(cx, |project, cx| {
5841 let first = worktree_a.read(cx);
5842 let second = worktree_b.read(cx);
5843 project.move_worktree(first.id(), second.id(), cx)
5844 })
5845 .expect("moving first after second");
5846
5847 // check the state after moving
5848 project.update(cx, |project, cx| {
5849 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5850 assert_eq!(worktrees.len(), 3);
5851
5852 let first = worktrees[0].read(cx);
5853 let second = worktrees[1].read(cx);
5854 let third = worktrees[2].read(cx);
5855
5856 // check they are now in the right order
5857 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5858 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5859 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5860 });
5861
5862 // move the second worktree to before the first
5863 // [b, a, c] -> [a, b, c]
5864 project
5865 .update(cx, |project, cx| {
5866 let second = worktree_a.read(cx);
5867 let first = worktree_b.read(cx);
5868 project.move_worktree(first.id(), second.id(), cx)
5869 })
5870 .expect("moving second before first");
5871
5872 // check the state after moving
5873 project.update(cx, |project, cx| {
5874 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5875 assert_eq!(worktrees.len(), 3);
5876
5877 let first = worktrees[0].read(cx);
5878 let second = worktrees[1].read(cx);
5879 let third = worktrees[2].read(cx);
5880
5881 // check they are now in the right order
5882 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5883 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5884 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5885 });
5886
5887 // move the second worktree to after the third
5888 // [a, b, c] -> [a, c, b]
5889 project
5890 .update(cx, |project, cx| {
5891 let second = worktree_b.read(cx);
5892 let third = worktree_c.read(cx);
5893 project.move_worktree(second.id(), third.id(), cx)
5894 })
5895 .expect("moving second after third");
5896
5897 // check the state after moving
5898 project.update(cx, |project, cx| {
5899 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5900 assert_eq!(worktrees.len(), 3);
5901
5902 let first = worktrees[0].read(cx);
5903 let second = worktrees[1].read(cx);
5904 let third = worktrees[2].read(cx);
5905
5906 // check they are now in the right order
5907 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5908 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5909 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5910 });
5911
5912 // move the third worktree to before the second
5913 // [a, c, b] -> [a, b, c]
5914 project
5915 .update(cx, |project, cx| {
5916 let third = worktree_c.read(cx);
5917 let second = worktree_b.read(cx);
5918 project.move_worktree(third.id(), second.id(), cx)
5919 })
5920 .expect("moving third before second");
5921
5922 // check the state after moving
5923 project.update(cx, |project, cx| {
5924 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5925 assert_eq!(worktrees.len(), 3);
5926
5927 let first = worktrees[0].read(cx);
5928 let second = worktrees[1].read(cx);
5929 let third = worktrees[2].read(cx);
5930
5931 // check they are now in the right order
5932 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5933 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5934 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5935 });
5936
5937 // move the first worktree to after the third
5938 // [a, b, c] -> [b, c, a]
5939 project
5940 .update(cx, |project, cx| {
5941 let first = worktree_a.read(cx);
5942 let third = worktree_c.read(cx);
5943 project.move_worktree(first.id(), third.id(), cx)
5944 })
5945 .expect("moving first after third");
5946
5947 // check the state after moving
5948 project.update(cx, |project, cx| {
5949 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5950 assert_eq!(worktrees.len(), 3);
5951
5952 let first = worktrees[0].read(cx);
5953 let second = worktrees[1].read(cx);
5954 let third = worktrees[2].read(cx);
5955
5956 // check they are now in the right order
5957 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5958 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5959 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5960 });
5961
5962 // move the third worktree to before the first
5963 // [b, c, a] -> [a, b, c]
5964 project
5965 .update(cx, |project, cx| {
5966 let third = worktree_a.read(cx);
5967 let first = worktree_b.read(cx);
5968 project.move_worktree(third.id(), first.id(), cx)
5969 })
5970 .expect("moving third before first");
5971
5972 // check the state after moving
5973 project.update(cx, |project, cx| {
5974 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5975 assert_eq!(worktrees.len(), 3);
5976
5977 let first = worktrees[0].read(cx);
5978 let second = worktrees[1].read(cx);
5979 let third = worktrees[2].read(cx);
5980
5981 // check they are now in the right order
5982 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5983 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5984 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5985 });
5986}
5987
5988#[gpui::test]
5989async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5990 init_test(cx);
5991
5992 let staged_contents = r#"
5993 fn main() {
5994 println!("hello world");
5995 }
5996 "#
5997 .unindent();
5998 let file_contents = r#"
5999 // print goodbye
6000 fn main() {
6001 println!("goodbye world");
6002 }
6003 "#
6004 .unindent();
6005
6006 let fs = FakeFs::new(cx.background_executor.clone());
6007 fs.insert_tree(
6008 "/dir",
6009 json!({
6010 ".git": {},
6011 "src": {
6012 "main.rs": file_contents,
6013 }
6014 }),
6015 )
6016 .await;
6017
6018 fs.set_index_for_repo(
6019 Path::new("/dir/.git"),
6020 &[("src/main.rs".into(), staged_contents)],
6021 );
6022
6023 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6024
6025 let buffer = project
6026 .update(cx, |project, cx| {
6027 project.open_local_buffer("/dir/src/main.rs", cx)
6028 })
6029 .await
6030 .unwrap();
6031 let unstaged_diff = project
6032 .update(cx, |project, cx| {
6033 project.open_unstaged_diff(buffer.clone(), cx)
6034 })
6035 .await
6036 .unwrap();
6037
6038 cx.run_until_parked();
6039 unstaged_diff.update(cx, |unstaged_diff, cx| {
6040 let snapshot = buffer.read(cx).snapshot();
6041 assert_hunks(
6042 unstaged_diff.hunks(&snapshot, cx),
6043 &snapshot,
6044 &unstaged_diff.base_text_string().unwrap(),
6045 &[
6046 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6047 (
6048 2..3,
6049 " println!(\"hello world\");\n",
6050 " println!(\"goodbye world\");\n",
6051 DiffHunkStatus::modified_none(),
6052 ),
6053 ],
6054 );
6055 });
6056
6057 let staged_contents = r#"
6058 // print goodbye
6059 fn main() {
6060 }
6061 "#
6062 .unindent();
6063
6064 fs.set_index_for_repo(
6065 Path::new("/dir/.git"),
6066 &[("src/main.rs".into(), staged_contents)],
6067 );
6068
6069 cx.run_until_parked();
6070 unstaged_diff.update(cx, |unstaged_diff, cx| {
6071 let snapshot = buffer.read(cx).snapshot();
6072 assert_hunks(
6073 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6074 &snapshot,
6075 &unstaged_diff.base_text().text(),
6076 &[(
6077 2..3,
6078 "",
6079 " println!(\"goodbye world\");\n",
6080 DiffHunkStatus::added_none(),
6081 )],
6082 );
6083 });
6084}
6085
6086#[gpui::test]
6087async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6088 init_test(cx);
6089
6090 let committed_contents = r#"
6091 fn main() {
6092 println!("hello world");
6093 }
6094 "#
6095 .unindent();
6096 let staged_contents = r#"
6097 fn main() {
6098 println!("goodbye world");
6099 }
6100 "#
6101 .unindent();
6102 let file_contents = r#"
6103 // print goodbye
6104 fn main() {
6105 println!("goodbye world");
6106 }
6107 "#
6108 .unindent();
6109
6110 let fs = FakeFs::new(cx.background_executor.clone());
6111 fs.insert_tree(
6112 "/dir",
6113 json!({
6114 ".git": {},
6115 "src": {
6116 "modification.rs": file_contents,
6117 }
6118 }),
6119 )
6120 .await;
6121
6122 fs.set_head_for_repo(
6123 Path::new("/dir/.git"),
6124 &[
6125 ("src/modification.rs".into(), committed_contents),
6126 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6127 ],
6128 );
6129 fs.set_index_for_repo(
6130 Path::new("/dir/.git"),
6131 &[
6132 ("src/modification.rs".into(), staged_contents),
6133 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6134 ],
6135 );
6136
6137 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6138 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6139 let language = rust_lang();
6140 language_registry.add(language.clone());
6141
6142 let buffer_1 = project
6143 .update(cx, |project, cx| {
6144 project.open_local_buffer("/dir/src/modification.rs", cx)
6145 })
6146 .await
6147 .unwrap();
6148 let diff_1 = project
6149 .update(cx, |project, cx| {
6150 project.open_uncommitted_diff(buffer_1.clone(), cx)
6151 })
6152 .await
6153 .unwrap();
6154 diff_1.read_with(cx, |diff, _| {
6155 assert_eq!(diff.base_text().language().cloned(), Some(language))
6156 });
6157 cx.run_until_parked();
6158 diff_1.update(cx, |diff, cx| {
6159 let snapshot = buffer_1.read(cx).snapshot();
6160 assert_hunks(
6161 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6162 &snapshot,
6163 &diff.base_text_string().unwrap(),
6164 &[
6165 (
6166 0..1,
6167 "",
6168 "// print goodbye\n",
6169 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6170 ),
6171 (
6172 2..3,
6173 " println!(\"hello world\");\n",
6174 " println!(\"goodbye world\");\n",
6175 DiffHunkStatus::modified_none(),
6176 ),
6177 ],
6178 );
6179 });
6180
6181 // Reset HEAD to a version that differs from both the buffer and the index.
6182 let committed_contents = r#"
6183 // print goodbye
6184 fn main() {
6185 }
6186 "#
6187 .unindent();
6188 fs.set_head_for_repo(
6189 Path::new("/dir/.git"),
6190 &[
6191 ("src/modification.rs".into(), committed_contents.clone()),
6192 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6193 ],
6194 );
6195
6196 // Buffer now has an unstaged hunk.
6197 cx.run_until_parked();
6198 diff_1.update(cx, |diff, cx| {
6199 let snapshot = buffer_1.read(cx).snapshot();
6200 assert_hunks(
6201 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6202 &snapshot,
6203 &diff.base_text().text(),
6204 &[(
6205 2..3,
6206 "",
6207 " println!(\"goodbye world\");\n",
6208 DiffHunkStatus::added_none(),
6209 )],
6210 );
6211 });
6212
6213 // Open a buffer for a file that's been deleted.
6214 let buffer_2 = project
6215 .update(cx, |project, cx| {
6216 project.open_local_buffer("/dir/src/deletion.rs", cx)
6217 })
6218 .await
6219 .unwrap();
6220 let diff_2 = project
6221 .update(cx, |project, cx| {
6222 project.open_uncommitted_diff(buffer_2.clone(), cx)
6223 })
6224 .await
6225 .unwrap();
6226 cx.run_until_parked();
6227 diff_2.update(cx, |diff, cx| {
6228 let snapshot = buffer_2.read(cx).snapshot();
6229 assert_hunks(
6230 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6231 &snapshot,
6232 &diff.base_text_string().unwrap(),
6233 &[(
6234 0..0,
6235 "// the-deleted-contents\n",
6236 "",
6237 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6238 )],
6239 );
6240 });
6241
6242 // Stage the deletion of this file
6243 fs.set_index_for_repo(
6244 Path::new("/dir/.git"),
6245 &[("src/modification.rs".into(), committed_contents.clone())],
6246 );
6247 cx.run_until_parked();
6248 diff_2.update(cx, |diff, cx| {
6249 let snapshot = buffer_2.read(cx).snapshot();
6250 assert_hunks(
6251 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6252 &snapshot,
6253 &diff.base_text_string().unwrap(),
6254 &[(
6255 0..0,
6256 "// the-deleted-contents\n",
6257 "",
6258 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6259 )],
6260 );
6261 });
6262}
6263
6264#[gpui::test]
6265async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6266 use DiffHunkSecondaryStatus::*;
6267 init_test(cx);
6268
6269 let committed_contents = r#"
6270 zero
6271 one
6272 two
6273 three
6274 four
6275 five
6276 "#
6277 .unindent();
6278 let file_contents = r#"
6279 one
6280 TWO
6281 three
6282 FOUR
6283 five
6284 "#
6285 .unindent();
6286
6287 let fs = FakeFs::new(cx.background_executor.clone());
6288 fs.insert_tree(
6289 "/dir",
6290 json!({
6291 ".git": {},
6292 "file.txt": file_contents.clone()
6293 }),
6294 )
6295 .await;
6296
6297 fs.set_head_and_index_for_repo(
6298 "/dir/.git".as_ref(),
6299 &[("file.txt".into(), committed_contents.clone())],
6300 );
6301
6302 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6303
6304 let buffer = project
6305 .update(cx, |project, cx| {
6306 project.open_local_buffer("/dir/file.txt", cx)
6307 })
6308 .await
6309 .unwrap();
6310 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6311 let uncommitted_diff = project
6312 .update(cx, |project, cx| {
6313 project.open_uncommitted_diff(buffer.clone(), cx)
6314 })
6315 .await
6316 .unwrap();
6317 let mut diff_events = cx.events(&uncommitted_diff);
6318
6319 // The hunks are initially unstaged.
6320 uncommitted_diff.read_with(cx, |diff, cx| {
6321 assert_hunks(
6322 diff.hunks(&snapshot, cx),
6323 &snapshot,
6324 &diff.base_text_string().unwrap(),
6325 &[
6326 (
6327 0..0,
6328 "zero\n",
6329 "",
6330 DiffHunkStatus::deleted(HasSecondaryHunk),
6331 ),
6332 (
6333 1..2,
6334 "two\n",
6335 "TWO\n",
6336 DiffHunkStatus::modified(HasSecondaryHunk),
6337 ),
6338 (
6339 3..4,
6340 "four\n",
6341 "FOUR\n",
6342 DiffHunkStatus::modified(HasSecondaryHunk),
6343 ),
6344 ],
6345 );
6346 });
6347
6348 // Stage a hunk. It appears as optimistically staged.
6349 uncommitted_diff.update(cx, |diff, cx| {
6350 let range =
6351 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6352 let hunks = diff
6353 .hunks_intersecting_range(range, &snapshot, cx)
6354 .collect::<Vec<_>>();
6355 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6356
6357 assert_hunks(
6358 diff.hunks(&snapshot, cx),
6359 &snapshot,
6360 &diff.base_text_string().unwrap(),
6361 &[
6362 (
6363 0..0,
6364 "zero\n",
6365 "",
6366 DiffHunkStatus::deleted(HasSecondaryHunk),
6367 ),
6368 (
6369 1..2,
6370 "two\n",
6371 "TWO\n",
6372 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6373 ),
6374 (
6375 3..4,
6376 "four\n",
6377 "FOUR\n",
6378 DiffHunkStatus::modified(HasSecondaryHunk),
6379 ),
6380 ],
6381 );
6382 });
6383
6384 // The diff emits a change event for the range of the staged hunk.
6385 assert!(matches!(
6386 diff_events.next().await.unwrap(),
6387 BufferDiffEvent::HunksStagedOrUnstaged(_)
6388 ));
6389 let event = diff_events.next().await.unwrap();
6390 if let BufferDiffEvent::DiffChanged {
6391 changed_range: Some(changed_range),
6392 } = event
6393 {
6394 let changed_range = changed_range.to_point(&snapshot);
6395 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6396 } else {
6397 panic!("Unexpected event {event:?}");
6398 }
6399
6400 // When the write to the index completes, it appears as staged.
6401 cx.run_until_parked();
6402 uncommitted_diff.update(cx, |diff, cx| {
6403 assert_hunks(
6404 diff.hunks(&snapshot, cx),
6405 &snapshot,
6406 &diff.base_text_string().unwrap(),
6407 &[
6408 (
6409 0..0,
6410 "zero\n",
6411 "",
6412 DiffHunkStatus::deleted(HasSecondaryHunk),
6413 ),
6414 (
6415 1..2,
6416 "two\n",
6417 "TWO\n",
6418 DiffHunkStatus::modified(NoSecondaryHunk),
6419 ),
6420 (
6421 3..4,
6422 "four\n",
6423 "FOUR\n",
6424 DiffHunkStatus::modified(HasSecondaryHunk),
6425 ),
6426 ],
6427 );
6428 });
6429
6430 // The diff emits a change event for the changed index text.
6431 let event = diff_events.next().await.unwrap();
6432 if let BufferDiffEvent::DiffChanged {
6433 changed_range: Some(changed_range),
6434 } = event
6435 {
6436 let changed_range = changed_range.to_point(&snapshot);
6437 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6438 } else {
6439 panic!("Unexpected event {event:?}");
6440 }
6441
6442 // Simulate a problem writing to the git index.
6443 fs.set_error_message_for_index_write(
6444 "/dir/.git".as_ref(),
6445 Some("failed to write git index".into()),
6446 );
6447
6448 // Stage another hunk.
6449 uncommitted_diff.update(cx, |diff, cx| {
6450 let range =
6451 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6452 let hunks = diff
6453 .hunks_intersecting_range(range, &snapshot, cx)
6454 .collect::<Vec<_>>();
6455 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6456
6457 assert_hunks(
6458 diff.hunks(&snapshot, cx),
6459 &snapshot,
6460 &diff.base_text_string().unwrap(),
6461 &[
6462 (
6463 0..0,
6464 "zero\n",
6465 "",
6466 DiffHunkStatus::deleted(HasSecondaryHunk),
6467 ),
6468 (
6469 1..2,
6470 "two\n",
6471 "TWO\n",
6472 DiffHunkStatus::modified(NoSecondaryHunk),
6473 ),
6474 (
6475 3..4,
6476 "four\n",
6477 "FOUR\n",
6478 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6479 ),
6480 ],
6481 );
6482 });
6483 assert!(matches!(
6484 diff_events.next().await.unwrap(),
6485 BufferDiffEvent::HunksStagedOrUnstaged(_)
6486 ));
6487 let event = diff_events.next().await.unwrap();
6488 if let BufferDiffEvent::DiffChanged {
6489 changed_range: Some(changed_range),
6490 } = event
6491 {
6492 let changed_range = changed_range.to_point(&snapshot);
6493 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6494 } else {
6495 panic!("Unexpected event {event:?}");
6496 }
6497
6498 // When the write fails, the hunk returns to being unstaged.
6499 cx.run_until_parked();
6500 uncommitted_diff.update(cx, |diff, cx| {
6501 assert_hunks(
6502 diff.hunks(&snapshot, cx),
6503 &snapshot,
6504 &diff.base_text_string().unwrap(),
6505 &[
6506 (
6507 0..0,
6508 "zero\n",
6509 "",
6510 DiffHunkStatus::deleted(HasSecondaryHunk),
6511 ),
6512 (
6513 1..2,
6514 "two\n",
6515 "TWO\n",
6516 DiffHunkStatus::modified(NoSecondaryHunk),
6517 ),
6518 (
6519 3..4,
6520 "four\n",
6521 "FOUR\n",
6522 DiffHunkStatus::modified(HasSecondaryHunk),
6523 ),
6524 ],
6525 );
6526 });
6527
6528 let event = diff_events.next().await.unwrap();
6529 if let BufferDiffEvent::DiffChanged {
6530 changed_range: Some(changed_range),
6531 } = event
6532 {
6533 let changed_range = changed_range.to_point(&snapshot);
6534 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6535 } else {
6536 panic!("Unexpected event {event:?}");
6537 }
6538
6539 // Allow writing to the git index to succeed again.
6540 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6541
6542 // Stage two hunks with separate operations.
6543 uncommitted_diff.update(cx, |diff, cx| {
6544 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6545 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6546 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6547 });
6548
6549 // Both staged hunks appear as pending.
6550 uncommitted_diff.update(cx, |diff, cx| {
6551 assert_hunks(
6552 diff.hunks(&snapshot, cx),
6553 &snapshot,
6554 &diff.base_text_string().unwrap(),
6555 &[
6556 (
6557 0..0,
6558 "zero\n",
6559 "",
6560 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6561 ),
6562 (
6563 1..2,
6564 "two\n",
6565 "TWO\n",
6566 DiffHunkStatus::modified(NoSecondaryHunk),
6567 ),
6568 (
6569 3..4,
6570 "four\n",
6571 "FOUR\n",
6572 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6573 ),
6574 ],
6575 );
6576 });
6577
6578 // Both staging operations take effect.
6579 cx.run_until_parked();
6580 uncommitted_diff.update(cx, |diff, cx| {
6581 assert_hunks(
6582 diff.hunks(&snapshot, cx),
6583 &snapshot,
6584 &diff.base_text_string().unwrap(),
6585 &[
6586 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6587 (
6588 1..2,
6589 "two\n",
6590 "TWO\n",
6591 DiffHunkStatus::modified(NoSecondaryHunk),
6592 ),
6593 (
6594 3..4,
6595 "four\n",
6596 "FOUR\n",
6597 DiffHunkStatus::modified(NoSecondaryHunk),
6598 ),
6599 ],
6600 );
6601 });
6602}
6603
6604#[gpui::test(seeds(340, 472))]
6605async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6606 use DiffHunkSecondaryStatus::*;
6607 init_test(cx);
6608
6609 let committed_contents = r#"
6610 zero
6611 one
6612 two
6613 three
6614 four
6615 five
6616 "#
6617 .unindent();
6618 let file_contents = r#"
6619 one
6620 TWO
6621 three
6622 FOUR
6623 five
6624 "#
6625 .unindent();
6626
6627 let fs = FakeFs::new(cx.background_executor.clone());
6628 fs.insert_tree(
6629 "/dir",
6630 json!({
6631 ".git": {},
6632 "file.txt": file_contents.clone()
6633 }),
6634 )
6635 .await;
6636
6637 fs.set_head_for_repo(
6638 "/dir/.git".as_ref(),
6639 &[("file.txt".into(), committed_contents.clone())],
6640 );
6641 fs.set_index_for_repo(
6642 "/dir/.git".as_ref(),
6643 &[("file.txt".into(), committed_contents.clone())],
6644 );
6645
6646 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6647
6648 let buffer = project
6649 .update(cx, |project, cx| {
6650 project.open_local_buffer("/dir/file.txt", cx)
6651 })
6652 .await
6653 .unwrap();
6654 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6655 let uncommitted_diff = project
6656 .update(cx, |project, cx| {
6657 project.open_uncommitted_diff(buffer.clone(), cx)
6658 })
6659 .await
6660 .unwrap();
6661
6662 // The hunks are initially unstaged.
6663 uncommitted_diff.read_with(cx, |diff, cx| {
6664 assert_hunks(
6665 diff.hunks(&snapshot, cx),
6666 &snapshot,
6667 &diff.base_text_string().unwrap(),
6668 &[
6669 (
6670 0..0,
6671 "zero\n",
6672 "",
6673 DiffHunkStatus::deleted(HasSecondaryHunk),
6674 ),
6675 (
6676 1..2,
6677 "two\n",
6678 "TWO\n",
6679 DiffHunkStatus::modified(HasSecondaryHunk),
6680 ),
6681 (
6682 3..4,
6683 "four\n",
6684 "FOUR\n",
6685 DiffHunkStatus::modified(HasSecondaryHunk),
6686 ),
6687 ],
6688 );
6689 });
6690
6691 // Pause IO events
6692 fs.pause_events();
6693
6694 // Stage the first hunk.
6695 uncommitted_diff.update(cx, |diff, cx| {
6696 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6697 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6698 assert_hunks(
6699 diff.hunks(&snapshot, cx),
6700 &snapshot,
6701 &diff.base_text_string().unwrap(),
6702 &[
6703 (
6704 0..0,
6705 "zero\n",
6706 "",
6707 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6708 ),
6709 (
6710 1..2,
6711 "two\n",
6712 "TWO\n",
6713 DiffHunkStatus::modified(HasSecondaryHunk),
6714 ),
6715 (
6716 3..4,
6717 "four\n",
6718 "FOUR\n",
6719 DiffHunkStatus::modified(HasSecondaryHunk),
6720 ),
6721 ],
6722 );
6723 });
6724
6725 // Stage the second hunk *before* receiving the FS event for the first hunk.
6726 cx.run_until_parked();
6727 uncommitted_diff.update(cx, |diff, cx| {
6728 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6729 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6730 assert_hunks(
6731 diff.hunks(&snapshot, cx),
6732 &snapshot,
6733 &diff.base_text_string().unwrap(),
6734 &[
6735 (
6736 0..0,
6737 "zero\n",
6738 "",
6739 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6740 ),
6741 (
6742 1..2,
6743 "two\n",
6744 "TWO\n",
6745 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6746 ),
6747 (
6748 3..4,
6749 "four\n",
6750 "FOUR\n",
6751 DiffHunkStatus::modified(HasSecondaryHunk),
6752 ),
6753 ],
6754 );
6755 });
6756
6757 // Process the FS event for staging the first hunk (second event is still pending).
6758 fs.flush_events(1);
6759 cx.run_until_parked();
6760
6761 // Stage the third hunk before receiving the second FS event.
6762 uncommitted_diff.update(cx, |diff, cx| {
6763 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6764 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6765 });
6766
6767 // Wait for all remaining IO.
6768 cx.run_until_parked();
6769 fs.flush_events(fs.buffered_event_count());
6770
6771 // Now all hunks are staged.
6772 cx.run_until_parked();
6773 uncommitted_diff.update(cx, |diff, cx| {
6774 assert_hunks(
6775 diff.hunks(&snapshot, cx),
6776 &snapshot,
6777 &diff.base_text_string().unwrap(),
6778 &[
6779 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6780 (
6781 1..2,
6782 "two\n",
6783 "TWO\n",
6784 DiffHunkStatus::modified(NoSecondaryHunk),
6785 ),
6786 (
6787 3..4,
6788 "four\n",
6789 "FOUR\n",
6790 DiffHunkStatus::modified(NoSecondaryHunk),
6791 ),
6792 ],
6793 );
6794 });
6795}
6796
6797#[gpui::test]
6798async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6799 use DiffHunkSecondaryStatus::*;
6800 init_test(cx);
6801
6802 let different_lines = (0..500)
6803 .step_by(5)
6804 .map(|i| format!("diff {}\n", i))
6805 .collect::<Vec<String>>();
6806 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6807 let file_contents = (0..500)
6808 .map(|i| {
6809 if i % 5 == 0 {
6810 different_lines[i / 5].clone()
6811 } else {
6812 format!("{}\n", i)
6813 }
6814 })
6815 .collect::<String>();
6816
6817 let fs = FakeFs::new(cx.background_executor.clone());
6818 fs.insert_tree(
6819 "/dir",
6820 json!({
6821 ".git": {},
6822 "file.txt": file_contents.clone()
6823 }),
6824 )
6825 .await;
6826
6827 fs.set_head_for_repo(
6828 "/dir/.git".as_ref(),
6829 &[("file.txt".into(), committed_contents.clone())],
6830 );
6831 fs.set_index_for_repo(
6832 "/dir/.git".as_ref(),
6833 &[("file.txt".into(), committed_contents.clone())],
6834 );
6835
6836 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6837
6838 let buffer = project
6839 .update(cx, |project, cx| {
6840 project.open_local_buffer("/dir/file.txt", cx)
6841 })
6842 .await
6843 .unwrap();
6844 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6845 let uncommitted_diff = project
6846 .update(cx, |project, cx| {
6847 project.open_uncommitted_diff(buffer.clone(), cx)
6848 })
6849 .await
6850 .unwrap();
6851
6852 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6853 .step_by(5)
6854 .map(|i| {
6855 (
6856 i as u32..i as u32 + 1,
6857 format!("{}\n", i),
6858 different_lines[i / 5].clone(),
6859 DiffHunkStatus::modified(HasSecondaryHunk),
6860 )
6861 })
6862 .collect();
6863
6864 // The hunks are initially unstaged
6865 uncommitted_diff.read_with(cx, |diff, cx| {
6866 assert_hunks(
6867 diff.hunks(&snapshot, cx),
6868 &snapshot,
6869 &diff.base_text_string().unwrap(),
6870 &expected_hunks,
6871 );
6872 });
6873
6874 for (_, _, _, status) in expected_hunks.iter_mut() {
6875 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6876 }
6877
6878 // Stage every hunk with a different call
6879 uncommitted_diff.update(cx, |diff, cx| {
6880 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6881 for hunk in hunks {
6882 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6883 }
6884
6885 assert_hunks(
6886 diff.hunks(&snapshot, cx),
6887 &snapshot,
6888 &diff.base_text_string().unwrap(),
6889 &expected_hunks,
6890 );
6891 });
6892
6893 // If we wait, we'll have no pending hunks
6894 cx.run_until_parked();
6895 for (_, _, _, status) in expected_hunks.iter_mut() {
6896 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6897 }
6898
6899 uncommitted_diff.update(cx, |diff, cx| {
6900 assert_hunks(
6901 diff.hunks(&snapshot, cx),
6902 &snapshot,
6903 &diff.base_text_string().unwrap(),
6904 &expected_hunks,
6905 );
6906 });
6907
6908 for (_, _, _, status) in expected_hunks.iter_mut() {
6909 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6910 }
6911
6912 // Unstage every hunk with a different call
6913 uncommitted_diff.update(cx, |diff, cx| {
6914 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6915 for hunk in hunks {
6916 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6917 }
6918
6919 assert_hunks(
6920 diff.hunks(&snapshot, cx),
6921 &snapshot,
6922 &diff.base_text_string().unwrap(),
6923 &expected_hunks,
6924 );
6925 });
6926
6927 // If we wait, we'll have no pending hunks, again
6928 cx.run_until_parked();
6929 for (_, _, _, status) in expected_hunks.iter_mut() {
6930 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6931 }
6932
6933 uncommitted_diff.update(cx, |diff, cx| {
6934 assert_hunks(
6935 diff.hunks(&snapshot, cx),
6936 &snapshot,
6937 &diff.base_text_string().unwrap(),
6938 &expected_hunks,
6939 );
6940 });
6941}
6942
6943#[gpui::test]
6944async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6945 init_test(cx);
6946
6947 let committed_contents = r#"
6948 fn main() {
6949 println!("hello from HEAD");
6950 }
6951 "#
6952 .unindent();
6953 let file_contents = r#"
6954 fn main() {
6955 println!("hello from the working copy");
6956 }
6957 "#
6958 .unindent();
6959
6960 let fs = FakeFs::new(cx.background_executor.clone());
6961 fs.insert_tree(
6962 "/dir",
6963 json!({
6964 ".git": {},
6965 "src": {
6966 "main.rs": file_contents,
6967 }
6968 }),
6969 )
6970 .await;
6971
6972 fs.set_head_for_repo(
6973 Path::new("/dir/.git"),
6974 &[("src/main.rs".into(), committed_contents.clone())],
6975 );
6976 fs.set_index_for_repo(
6977 Path::new("/dir/.git"),
6978 &[("src/main.rs".into(), committed_contents.clone())],
6979 );
6980
6981 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6982
6983 let buffer = project
6984 .update(cx, |project, cx| {
6985 project.open_local_buffer("/dir/src/main.rs", cx)
6986 })
6987 .await
6988 .unwrap();
6989 let uncommitted_diff = project
6990 .update(cx, |project, cx| {
6991 project.open_uncommitted_diff(buffer.clone(), cx)
6992 })
6993 .await
6994 .unwrap();
6995
6996 cx.run_until_parked();
6997 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6998 let snapshot = buffer.read(cx).snapshot();
6999 assert_hunks(
7000 uncommitted_diff.hunks(&snapshot, cx),
7001 &snapshot,
7002 &uncommitted_diff.base_text_string().unwrap(),
7003 &[(
7004 1..2,
7005 " println!(\"hello from HEAD\");\n",
7006 " println!(\"hello from the working copy\");\n",
7007 DiffHunkStatus {
7008 kind: DiffHunkStatusKind::Modified,
7009 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7010 },
7011 )],
7012 );
7013 });
7014}
7015
7016#[gpui::test]
7017async fn test_repository_and_path_for_project_path(
7018 background_executor: BackgroundExecutor,
7019 cx: &mut gpui::TestAppContext,
7020) {
7021 init_test(cx);
7022 let fs = FakeFs::new(background_executor);
7023 fs.insert_tree(
7024 path!("/root"),
7025 json!({
7026 "c.txt": "",
7027 "dir1": {
7028 ".git": {},
7029 "deps": {
7030 "dep1": {
7031 ".git": {},
7032 "src": {
7033 "a.txt": ""
7034 }
7035 }
7036 },
7037 "src": {
7038 "b.txt": ""
7039 }
7040 },
7041 }),
7042 )
7043 .await;
7044
7045 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7046 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7047 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7048 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7049 .await;
7050 tree.flush_fs_events(cx).await;
7051
7052 project.read_with(cx, |project, cx| {
7053 let git_store = project.git_store().read(cx);
7054 let pairs = [
7055 ("c.txt", None),
7056 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7057 (
7058 "dir1/deps/dep1/src/a.txt",
7059 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7060 ),
7061 ];
7062 let expected = pairs
7063 .iter()
7064 .map(|(path, result)| {
7065 (
7066 path,
7067 result.map(|(repo, repo_path)| {
7068 (Path::new(repo).to_owned(), RepoPath::from(repo_path))
7069 }),
7070 )
7071 })
7072 .collect::<Vec<_>>();
7073 let actual = pairs
7074 .iter()
7075 .map(|(path, _)| {
7076 let project_path = (tree_id, Path::new(path)).into();
7077 let result = maybe!({
7078 let (repo, repo_path) =
7079 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7080 Some((
7081 repo.read(cx)
7082 .repository_entry
7083 .work_directory_abs_path
7084 .clone(),
7085 repo_path,
7086 ))
7087 });
7088 (path, result)
7089 })
7090 .collect::<Vec<_>>();
7091 pretty_assertions::assert_eq!(expected, actual);
7092 });
7093
7094 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7095 .await
7096 .unwrap();
7097 tree.flush_fs_events(cx).await;
7098
7099 project.read_with(cx, |project, cx| {
7100 let git_store = project.git_store().read(cx);
7101 assert_eq!(
7102 git_store.repository_and_path_for_project_path(
7103 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7104 cx
7105 ),
7106 None
7107 );
7108 });
7109}
7110
7111#[gpui::test]
7112async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7113 init_test(cx);
7114 let fs = FakeFs::new(cx.background_executor.clone());
7115 fs.insert_tree(
7116 path!("/root"),
7117 json!({
7118 "home": {
7119 ".git": {},
7120 "project": {
7121 "a.txt": "A"
7122 },
7123 },
7124 }),
7125 )
7126 .await;
7127 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7128
7129 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7130 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7131 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7132 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7133 .await;
7134 tree.flush_fs_events(cx).await;
7135
7136 project.read_with(cx, |project, cx| {
7137 let containing = project
7138 .git_store()
7139 .read(cx)
7140 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7141 assert!(containing.is_none());
7142 });
7143
7144 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7145 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7146 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7147 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7148 .await;
7149 tree.flush_fs_events(cx).await;
7150
7151 project.read_with(cx, |project, cx| {
7152 let containing = project
7153 .git_store()
7154 .read(cx)
7155 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7156 assert_eq!(
7157 containing
7158 .unwrap()
7159 .0
7160 .read(cx)
7161 .repository_entry
7162 .work_directory_abs_path,
7163 Path::new(path!("/root/home"))
7164 );
7165 });
7166}
7167
7168async fn search(
7169 project: &Entity<Project>,
7170 query: SearchQuery,
7171 cx: &mut gpui::TestAppContext,
7172) -> Result<HashMap<String, Vec<Range<usize>>>> {
7173 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
7174 let mut results = HashMap::default();
7175 while let Ok(search_result) = search_rx.recv().await {
7176 match search_result {
7177 SearchResult::Buffer { buffer, ranges } => {
7178 results.entry(buffer).or_insert(ranges);
7179 }
7180 SearchResult::LimitReached => {}
7181 }
7182 }
7183 Ok(results
7184 .into_iter()
7185 .map(|(buffer, ranges)| {
7186 buffer.update(cx, |buffer, cx| {
7187 let path = buffer
7188 .file()
7189 .unwrap()
7190 .full_path(cx)
7191 .to_string_lossy()
7192 .to_string();
7193 let ranges = ranges
7194 .into_iter()
7195 .map(|range| range.to_offset(buffer))
7196 .collect::<Vec<_>>();
7197 (path, ranges)
7198 })
7199 })
7200 .collect())
7201}
7202
7203pub fn init_test(cx: &mut gpui::TestAppContext) {
7204 if std::env::var("RUST_LOG").is_ok() {
7205 env_logger::try_init().ok();
7206 }
7207
7208 cx.update(|cx| {
7209 let settings_store = SettingsStore::test(cx);
7210 cx.set_global(settings_store);
7211 release_channel::init(SemanticVersion::default(), cx);
7212 language::init(cx);
7213 Project::init_settings(cx);
7214 });
7215}
7216
7217fn json_lang() -> Arc<Language> {
7218 Arc::new(Language::new(
7219 LanguageConfig {
7220 name: "JSON".into(),
7221 matcher: LanguageMatcher {
7222 path_suffixes: vec!["json".to_string()],
7223 ..Default::default()
7224 },
7225 ..Default::default()
7226 },
7227 None,
7228 ))
7229}
7230
7231fn js_lang() -> Arc<Language> {
7232 Arc::new(Language::new(
7233 LanguageConfig {
7234 name: "JavaScript".into(),
7235 matcher: LanguageMatcher {
7236 path_suffixes: vec!["js".to_string()],
7237 ..Default::default()
7238 },
7239 ..Default::default()
7240 },
7241 None,
7242 ))
7243}
7244
7245fn rust_lang() -> Arc<Language> {
7246 Arc::new(Language::new(
7247 LanguageConfig {
7248 name: "Rust".into(),
7249 matcher: LanguageMatcher {
7250 path_suffixes: vec!["rs".to_string()],
7251 ..Default::default()
7252 },
7253 ..Default::default()
7254 },
7255 Some(tree_sitter_rust::LANGUAGE.into()),
7256 ))
7257}
7258
7259fn typescript_lang() -> Arc<Language> {
7260 Arc::new(Language::new(
7261 LanguageConfig {
7262 name: "TypeScript".into(),
7263 matcher: LanguageMatcher {
7264 path_suffixes: vec!["ts".to_string()],
7265 ..Default::default()
7266 },
7267 ..Default::default()
7268 },
7269 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
7270 ))
7271}
7272
7273fn tsx_lang() -> Arc<Language> {
7274 Arc::new(Language::new(
7275 LanguageConfig {
7276 name: "tsx".into(),
7277 matcher: LanguageMatcher {
7278 path_suffixes: vec!["tsx".to_string()],
7279 ..Default::default()
7280 },
7281 ..Default::default()
7282 },
7283 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
7284 ))
7285}
7286
7287fn get_all_tasks(
7288 project: &Entity<Project>,
7289 task_contexts: &TaskContexts,
7290 cx: &mut App,
7291) -> Vec<(TaskSourceKind, ResolvedTask)> {
7292 let (mut old, new) = project.update(cx, |project, cx| {
7293 project
7294 .task_store
7295 .read(cx)
7296 .task_inventory()
7297 .unwrap()
7298 .read(cx)
7299 .used_and_current_resolved_tasks(task_contexts, cx)
7300 });
7301 old.extend(new);
7302 old
7303}