1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, task_store::TaskSettingsLocation, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use git::repository::RepoPath;
10use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
11use http_client::Url;
12use language::{
13 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
14 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
15 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
16 OffsetRangeExt, Point, ToPoint,
17};
18use lsp::{
19 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
20 NumberOrString, TextDocumentEdit, WillRenameFiles,
21};
22use parking_lot::Mutex;
23use paths::tasks_file;
24use pretty_assertions::{assert_eq, assert_matches};
25use serde_json::json;
26#[cfg(not(windows))]
27use std::os;
28use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
29use task::{ResolvedTask, TaskContext};
30use unindent::Unindent as _;
31use util::{
32 assert_set_eq, path,
33 paths::PathMatcher,
34 separator,
35 test::{marked_text_offsets, TempTree},
36 uri, TryFutureExt as _,
37};
38use worktree::WorktreeModelHandle as _;
39
40#[gpui::test]
41async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
42 cx.executor().allow_parking();
43
44 let (tx, mut rx) = futures::channel::mpsc::unbounded();
45 let _thread = std::thread::spawn(move || {
46 #[cfg(not(target_os = "windows"))]
47 std::fs::metadata("/tmp").unwrap();
48 #[cfg(target_os = "windows")]
49 std::fs::metadata("C:/Windows").unwrap();
50 std::thread::sleep(Duration::from_millis(1000));
51 tx.unbounded_send(1).unwrap();
52 });
53 rx.next().await.unwrap();
54}
55
56#[gpui::test]
57async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
58 cx.executor().allow_parking();
59
60 let io_task = smol::unblock(move || {
61 println!("sleeping on thread {:?}", std::thread::current().id());
62 std::thread::sleep(Duration::from_millis(10));
63 1
64 });
65
66 let task = cx.foreground_executor().spawn(async move {
67 io_task.await;
68 });
69
70 task.await;
71}
72
73#[cfg(not(windows))]
74#[gpui::test]
75async fn test_symlinks(cx: &mut gpui::TestAppContext) {
76 init_test(cx);
77 cx.executor().allow_parking();
78
79 let dir = TempTree::new(json!({
80 "root": {
81 "apple": "",
82 "banana": {
83 "carrot": {
84 "date": "",
85 "endive": "",
86 }
87 },
88 "fennel": {
89 "grape": "",
90 }
91 }
92 }));
93
94 let root_link_path = dir.path().join("root_link");
95 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
96 os::unix::fs::symlink(
97 dir.path().join("root/fennel"),
98 dir.path().join("root/finnochio"),
99 )
100 .unwrap();
101
102 let project = Project::test(
103 Arc::new(RealFs::new(None, cx.executor())),
104 [root_link_path.as_ref()],
105 cx,
106 )
107 .await;
108
109 project.update(cx, |project, cx| {
110 let tree = project.worktrees(cx).next().unwrap().read(cx);
111 assert_eq!(tree.file_count(), 5);
112 assert_eq!(
113 tree.inode_for_path("fennel/grape"),
114 tree.inode_for_path("finnochio/grape")
115 );
116 });
117}
118
119#[gpui::test]
120async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
121 init_test(cx);
122
123 let dir = TempTree::new(json!({
124 ".editorconfig": r#"
125 root = true
126 [*.rs]
127 indent_style = tab
128 indent_size = 3
129 end_of_line = lf
130 insert_final_newline = true
131 trim_trailing_whitespace = true
132 [*.js]
133 tab_width = 10
134 "#,
135 ".zed": {
136 "settings.json": r#"{
137 "tab_size": 8,
138 "hard_tabs": false,
139 "ensure_final_newline_on_save": false,
140 "remove_trailing_whitespace_on_save": false,
141 "soft_wrap": "editor_width"
142 }"#,
143 },
144 "a.rs": "fn a() {\n A\n}",
145 "b": {
146 ".editorconfig": r#"
147 [*.rs]
148 indent_size = 2
149 "#,
150 "b.rs": "fn b() {\n B\n}",
151 },
152 "c.js": "def c\n C\nend",
153 "README.json": "tabs are better\n",
154 }));
155
156 let path = dir.path();
157 let fs = FakeFs::new(cx.executor());
158 fs.insert_tree_from_real_fs(path, path).await;
159 let project = Project::test(fs, [path], cx).await;
160
161 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
162 language_registry.add(js_lang());
163 language_registry.add(json_lang());
164 language_registry.add(rust_lang());
165
166 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
167
168 cx.executor().run_until_parked();
169
170 cx.update(|cx| {
171 let tree = worktree.read(cx);
172 let settings_for = |path: &str| {
173 let file_entry = tree.entry_for_path(path).unwrap().clone();
174 let file = File::for_entry(file_entry, worktree.clone());
175 let file_language = project
176 .read(cx)
177 .languages()
178 .language_for_file_path(file.path.as_ref());
179 let file_language = cx
180 .background_executor()
181 .block(file_language)
182 .expect("Failed to get file language");
183 let file = file as _;
184 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
185 };
186
187 let settings_a = settings_for("a.rs");
188 let settings_b = settings_for("b/b.rs");
189 let settings_c = settings_for("c.js");
190 let settings_readme = settings_for("README.json");
191
192 // .editorconfig overrides .zed/settings
193 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
194 assert_eq!(settings_a.hard_tabs, true);
195 assert_eq!(settings_a.ensure_final_newline_on_save, true);
196 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
197
198 // .editorconfig in b/ overrides .editorconfig in root
199 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
200
201 // "indent_size" is not set, so "tab_width" is used
202 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
203
204 // README.md should not be affected by .editorconfig's globe "*.rs"
205 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
206 });
207}
208
209#[gpui::test]
210async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
211 init_test(cx);
212 TaskStore::init(None);
213
214 let fs = FakeFs::new(cx.executor());
215 fs.insert_tree(
216 path!("/dir"),
217 json!({
218 ".zed": {
219 "settings.json": r#"{ "tab_size": 8 }"#,
220 "tasks.json": r#"[{
221 "label": "cargo check all",
222 "command": "cargo",
223 "args": ["check", "--all"]
224 },]"#,
225 },
226 "a": {
227 "a.rs": "fn a() {\n A\n}"
228 },
229 "b": {
230 ".zed": {
231 "settings.json": r#"{ "tab_size": 2 }"#,
232 "tasks.json": r#"[{
233 "label": "cargo check",
234 "command": "cargo",
235 "args": ["check"]
236 },]"#,
237 },
238 "b.rs": "fn b() {\n B\n}"
239 }
240 }),
241 )
242 .await;
243
244 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
245 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
246
247 cx.executor().run_until_parked();
248 let worktree_id = cx.update(|cx| {
249 project.update(cx, |project, cx| {
250 project.worktrees(cx).next().unwrap().read(cx).id()
251 })
252 });
253
254 let mut task_contexts = TaskContexts::default();
255 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
256
257 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
258 id: worktree_id,
259 directory_in_worktree: PathBuf::from(".zed"),
260 id_base: "local worktree tasks from directory \".zed\"".into(),
261 };
262
263 let all_tasks = cx
264 .update(|cx| {
265 let tree = worktree.read(cx);
266
267 let file_a = File::for_entry(
268 tree.entry_for_path("a/a.rs").unwrap().clone(),
269 worktree.clone(),
270 ) as _;
271 let settings_a = language_settings(None, Some(&file_a), cx);
272 let file_b = File::for_entry(
273 tree.entry_for_path("b/b.rs").unwrap().clone(),
274 worktree.clone(),
275 ) as _;
276 let settings_b = language_settings(None, Some(&file_b), cx);
277
278 assert_eq!(settings_a.tab_size.get(), 8);
279 assert_eq!(settings_b.tab_size.get(), 2);
280
281 get_all_tasks(&project, &task_contexts, cx)
282 })
283 .into_iter()
284 .map(|(source_kind, task)| {
285 let resolved = task.resolved.unwrap();
286 (
287 source_kind,
288 task.resolved_label,
289 resolved.args,
290 resolved.env,
291 )
292 })
293 .collect::<Vec<_>>();
294 assert_eq!(
295 all_tasks,
296 vec![
297 (
298 TaskSourceKind::Worktree {
299 id: worktree_id,
300 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
301 id_base: if cfg!(windows) {
302 "local worktree tasks from directory \"b\\\\.zed\"".into()
303 } else {
304 "local worktree tasks from directory \"b/.zed\"".into()
305 },
306 },
307 "cargo check".to_string(),
308 vec!["check".to_string()],
309 HashMap::default(),
310 ),
311 (
312 topmost_local_task_source_kind.clone(),
313 "cargo check all".to_string(),
314 vec!["check".to_string(), "--all".to_string()],
315 HashMap::default(),
316 ),
317 ]
318 );
319
320 let (_, resolved_task) = cx
321 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
322 .into_iter()
323 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
324 .expect("should have one global task");
325 project.update(cx, |project, cx| {
326 let task_inventory = project
327 .task_store
328 .read(cx)
329 .task_inventory()
330 .cloned()
331 .unwrap();
332 task_inventory.update(cx, |inventory, _| {
333 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
334 inventory
335 .update_file_based_tasks(
336 TaskSettingsLocation::Global(tasks_file()),
337 Some(
338 &json!([{
339 "label": "cargo check unstable",
340 "command": "cargo",
341 "args": [
342 "check",
343 "--all",
344 "--all-targets"
345 ],
346 "env": {
347 "RUSTFLAGS": "-Zunstable-options"
348 }
349 }])
350 .to_string(),
351 ),
352 settings::TaskKind::Script,
353 )
354 .unwrap();
355 });
356 });
357 cx.run_until_parked();
358
359 let all_tasks = cx
360 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved.unwrap();
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 topmost_local_task_source_kind.clone(),
377 "cargo check all".to_string(),
378 vec!["check".to_string(), "--all".to_string()],
379 HashMap::default(),
380 ),
381 (
382 TaskSourceKind::Worktree {
383 id: worktree_id,
384 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
385 id_base: if cfg!(windows) {
386 "local worktree tasks from directory \"b\\\\.zed\"".into()
387 } else {
388 "local worktree tasks from directory \"b/.zed\"".into()
389 },
390 },
391 "cargo check".to_string(),
392 vec!["check".to_string()],
393 HashMap::default(),
394 ),
395 (
396 TaskSourceKind::AbsPath {
397 abs_path: paths::tasks_file().clone(),
398 id_base: "global tasks.json".into(),
399 },
400 "cargo check unstable".to_string(),
401 vec![
402 "check".to_string(),
403 "--all".to_string(),
404 "--all-targets".to_string(),
405 ],
406 HashMap::from_iter(Some((
407 "RUSTFLAGS".to_string(),
408 "-Zunstable-options".to_string()
409 ))),
410 ),
411 ]
412 );
413}
414
415#[gpui::test]
416async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
417 init_test(cx);
418 TaskStore::init(None);
419
420 let fs = FakeFs::new(cx.executor());
421 fs.insert_tree(
422 path!("/dir"),
423 json!({
424 ".zed": {
425 "tasks.json": r#"[{
426 "label": "test worktree root",
427 "command": "echo $ZED_WORKTREE_ROOT"
428 }]"#,
429 },
430 "a": {
431 "a.rs": "fn a() {\n A\n}"
432 },
433 }),
434 )
435 .await;
436
437 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
438 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
439
440 cx.executor().run_until_parked();
441 let worktree_id = cx.update(|cx| {
442 project.update(cx, |project, cx| {
443 project.worktrees(cx).next().unwrap().read(cx).id()
444 })
445 });
446
447 let active_non_worktree_item_tasks = cx.update(|cx| {
448 get_all_tasks(
449 &project,
450 &TaskContexts {
451 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
452 active_worktree_context: None,
453 other_worktree_contexts: Vec::new(),
454 },
455 cx,
456 )
457 });
458 assert!(
459 active_non_worktree_item_tasks.is_empty(),
460 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
461 );
462
463 let active_worktree_tasks = cx.update(|cx| {
464 get_all_tasks(
465 &project,
466 &TaskContexts {
467 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
468 active_worktree_context: Some((worktree_id, {
469 let mut worktree_context = TaskContext::default();
470 worktree_context
471 .task_variables
472 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
473 worktree_context
474 })),
475 other_worktree_contexts: Vec::new(),
476 },
477 cx,
478 )
479 });
480 assert_eq!(
481 active_worktree_tasks
482 .into_iter()
483 .map(|(source_kind, task)| {
484 let resolved = task.resolved.unwrap();
485 (source_kind, resolved.command)
486 })
487 .collect::<Vec<_>>(),
488 vec![(
489 TaskSourceKind::Worktree {
490 id: worktree_id,
491 directory_in_worktree: PathBuf::from(separator!(".zed")),
492 id_base: if cfg!(windows) {
493 "local worktree tasks from directory \".zed\"".into()
494 } else {
495 "local worktree tasks from directory \".zed\"".into()
496 },
497 },
498 "echo /dir".to_string(),
499 )]
500 );
501}
502
503#[gpui::test]
504async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506
507 let fs = FakeFs::new(cx.executor());
508 fs.insert_tree(
509 path!("/dir"),
510 json!({
511 "test.rs": "const A: i32 = 1;",
512 "test2.rs": "",
513 "Cargo.toml": "a = 1",
514 "package.json": "{\"a\": 1}",
515 }),
516 )
517 .await;
518
519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
520 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
521
522 let mut fake_rust_servers = language_registry.register_fake_lsp(
523 "Rust",
524 FakeLspAdapter {
525 name: "the-rust-language-server",
526 capabilities: lsp::ServerCapabilities {
527 completion_provider: Some(lsp::CompletionOptions {
528 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
529 ..Default::default()
530 }),
531 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
532 lsp::TextDocumentSyncOptions {
533 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
534 ..Default::default()
535 },
536 )),
537 ..Default::default()
538 },
539 ..Default::default()
540 },
541 );
542 let mut fake_json_servers = language_registry.register_fake_lsp(
543 "JSON",
544 FakeLspAdapter {
545 name: "the-json-language-server",
546 capabilities: lsp::ServerCapabilities {
547 completion_provider: Some(lsp::CompletionOptions {
548 trigger_characters: Some(vec![":".to_string()]),
549 ..Default::default()
550 }),
551 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
552 lsp::TextDocumentSyncOptions {
553 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
554 ..Default::default()
555 },
556 )),
557 ..Default::default()
558 },
559 ..Default::default()
560 },
561 );
562
563 // Open a buffer without an associated language server.
564 let (toml_buffer, _handle) = project
565 .update(cx, |project, cx| {
566 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
567 })
568 .await
569 .unwrap();
570
571 // Open a buffer with an associated language server before the language for it has been loaded.
572 let (rust_buffer, _handle2) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
575 })
576 .await
577 .unwrap();
578 rust_buffer.update(cx, |buffer, _| {
579 assert_eq!(buffer.language().map(|l| l.name()), None);
580 });
581
582 // Now we add the languages to the project, and ensure they get assigned to all
583 // the relevant open buffers.
584 language_registry.add(json_lang());
585 language_registry.add(rust_lang());
586 cx.executor().run_until_parked();
587 rust_buffer.update(cx, |buffer, _| {
588 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
589 });
590
591 // A server is started up, and it is notified about Rust files.
592 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
593 assert_eq!(
594 fake_rust_server
595 .receive_notification::<lsp::notification::DidOpenTextDocument>()
596 .await
597 .text_document,
598 lsp::TextDocumentItem {
599 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
600 version: 0,
601 text: "const A: i32 = 1;".to_string(),
602 language_id: "rust".to_string(),
603 }
604 );
605
606 // The buffer is configured based on the language server's capabilities.
607 rust_buffer.update(cx, |buffer, _| {
608 assert_eq!(
609 buffer
610 .completion_triggers()
611 .into_iter()
612 .cloned()
613 .collect::<Vec<_>>(),
614 &[".".to_string(), "::".to_string()]
615 );
616 });
617 toml_buffer.update(cx, |buffer, _| {
618 assert!(buffer.completion_triggers().is_empty());
619 });
620
621 // Edit a buffer. The changes are reported to the language server.
622 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
623 assert_eq!(
624 fake_rust_server
625 .receive_notification::<lsp::notification::DidChangeTextDocument>()
626 .await
627 .text_document,
628 lsp::VersionedTextDocumentIdentifier::new(
629 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
630 1
631 )
632 );
633
634 // Open a third buffer with a different associated language server.
635 let (json_buffer, _json_handle) = project
636 .update(cx, |project, cx| {
637 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
638 })
639 .await
640 .unwrap();
641
642 // A json language server is started up and is only notified about the json buffer.
643 let mut fake_json_server = fake_json_servers.next().await.unwrap();
644 assert_eq!(
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 lsp::TextDocumentItem {
650 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
651 version: 0,
652 text: "{\"a\": 1}".to_string(),
653 language_id: "json".to_string(),
654 }
655 );
656
657 // This buffer is configured based on the second language server's
658 // capabilities.
659 json_buffer.update(cx, |buffer, _| {
660 assert_eq!(
661 buffer
662 .completion_triggers()
663 .into_iter()
664 .cloned()
665 .collect::<Vec<_>>(),
666 &[":".to_string()]
667 );
668 });
669
670 // When opening another buffer whose language server is already running,
671 // it is also configured based on the existing language server's capabilities.
672 let (rust_buffer2, _handle4) = project
673 .update(cx, |project, cx| {
674 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
675 })
676 .await
677 .unwrap();
678 rust_buffer2.update(cx, |buffer, _| {
679 assert_eq!(
680 buffer
681 .completion_triggers()
682 .into_iter()
683 .cloned()
684 .collect::<Vec<_>>(),
685 &[".".to_string(), "::".to_string()]
686 );
687 });
688
689 // Changes are reported only to servers matching the buffer's language.
690 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
691 rust_buffer2.update(cx, |buffer, cx| {
692 buffer.edit([(0..0, "let x = 1;")], None, cx)
693 });
694 assert_eq!(
695 fake_rust_server
696 .receive_notification::<lsp::notification::DidChangeTextDocument>()
697 .await
698 .text_document,
699 lsp::VersionedTextDocumentIdentifier::new(
700 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
701 1
702 )
703 );
704
705 // Save notifications are reported to all servers.
706 project
707 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
708 .await
709 .unwrap();
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidSaveTextDocument>()
713 .await
714 .text_document,
715 lsp::TextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
717 )
718 );
719 assert_eq!(
720 fake_json_server
721 .receive_notification::<lsp::notification::DidSaveTextDocument>()
722 .await
723 .text_document,
724 lsp::TextDocumentIdentifier::new(
725 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
726 )
727 );
728
729 // Renames are reported only to servers matching the buffer's language.
730 fs.rename(
731 Path::new(path!("/dir/test2.rs")),
732 Path::new(path!("/dir/test3.rs")),
733 Default::default(),
734 )
735 .await
736 .unwrap();
737 assert_eq!(
738 fake_rust_server
739 .receive_notification::<lsp::notification::DidCloseTextDocument>()
740 .await
741 .text_document,
742 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
743 );
744 assert_eq!(
745 fake_rust_server
746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
747 .await
748 .text_document,
749 lsp::TextDocumentItem {
750 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
751 version: 0,
752 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
753 language_id: "rust".to_string(),
754 },
755 );
756
757 rust_buffer2.update(cx, |buffer, cx| {
758 buffer.update_diagnostics(
759 LanguageServerId(0),
760 DiagnosticSet::from_sorted_entries(
761 vec![DiagnosticEntry {
762 diagnostic: Default::default(),
763 range: Anchor::MIN..Anchor::MAX,
764 }],
765 &buffer.snapshot(),
766 ),
767 cx,
768 );
769 assert_eq!(
770 buffer
771 .snapshot()
772 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
773 .count(),
774 1
775 );
776 });
777
778 // When the rename changes the extension of the file, the buffer gets closed on the old
779 // language server and gets opened on the new one.
780 fs.rename(
781 Path::new(path!("/dir/test3.rs")),
782 Path::new(path!("/dir/test3.json")),
783 Default::default(),
784 )
785 .await
786 .unwrap();
787 assert_eq!(
788 fake_rust_server
789 .receive_notification::<lsp::notification::DidCloseTextDocument>()
790 .await
791 .text_document,
792 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
793 );
794 assert_eq!(
795 fake_json_server
796 .receive_notification::<lsp::notification::DidOpenTextDocument>()
797 .await
798 .text_document,
799 lsp::TextDocumentItem {
800 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
801 version: 0,
802 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
803 language_id: "json".to_string(),
804 },
805 );
806
807 // We clear the diagnostics, since the language has changed.
808 rust_buffer2.update(cx, |buffer, _| {
809 assert_eq!(
810 buffer
811 .snapshot()
812 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
813 .count(),
814 0
815 );
816 });
817
818 // The renamed file's version resets after changing language server.
819 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
820 assert_eq!(
821 fake_json_server
822 .receive_notification::<lsp::notification::DidChangeTextDocument>()
823 .await
824 .text_document,
825 lsp::VersionedTextDocumentIdentifier::new(
826 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
827 1
828 )
829 );
830
831 // Restart language servers
832 project.update(cx, |project, cx| {
833 project.restart_language_servers_for_buffers(
834 vec![rust_buffer.clone(), json_buffer.clone()],
835 cx,
836 );
837 });
838
839 let mut rust_shutdown_requests = fake_rust_server
840 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
841 let mut json_shutdown_requests = fake_json_server
842 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
843 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
844
845 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
846 let mut fake_json_server = fake_json_servers.next().await.unwrap();
847
848 // Ensure rust document is reopened in new rust language server
849 assert_eq!(
850 fake_rust_server
851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
852 .await
853 .text_document,
854 lsp::TextDocumentItem {
855 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
856 version: 0,
857 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
858 language_id: "rust".to_string(),
859 }
860 );
861
862 // Ensure json documents are reopened in new json language server
863 assert_set_eq!(
864 [
865 fake_json_server
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document,
869 fake_json_server
870 .receive_notification::<lsp::notification::DidOpenTextDocument>()
871 .await
872 .text_document,
873 ],
874 [
875 lsp::TextDocumentItem {
876 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
877 version: 0,
878 text: json_buffer.update(cx, |buffer, _| buffer.text()),
879 language_id: "json".to_string(),
880 },
881 lsp::TextDocumentItem {
882 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
883 version: 0,
884 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
885 language_id: "json".to_string(),
886 }
887 ]
888 );
889
890 // Close notifications are reported only to servers matching the buffer's language.
891 cx.update(|_| drop(_json_handle));
892 let close_message = lsp::DidCloseTextDocumentParams {
893 text_document: lsp::TextDocumentIdentifier::new(
894 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
895 ),
896 };
897 assert_eq!(
898 fake_json_server
899 .receive_notification::<lsp::notification::DidCloseTextDocument>()
900 .await,
901 close_message,
902 );
903}
904
905#[gpui::test]
906async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
907 init_test(cx);
908
909 let fs = FakeFs::new(cx.executor());
910 fs.insert_tree(
911 path!("/the-root"),
912 json!({
913 ".gitignore": "target\n",
914 "src": {
915 "a.rs": "",
916 "b.rs": "",
917 },
918 "target": {
919 "x": {
920 "out": {
921 "x.rs": ""
922 }
923 },
924 "y": {
925 "out": {
926 "y.rs": "",
927 }
928 },
929 "z": {
930 "out": {
931 "z.rs": ""
932 }
933 }
934 }
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
940 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
941 language_registry.add(rust_lang());
942 let mut fake_servers = language_registry.register_fake_lsp(
943 "Rust",
944 FakeLspAdapter {
945 name: "the-language-server",
946 ..Default::default()
947 },
948 );
949
950 cx.executor().run_until_parked();
951
952 // Start the language server by opening a buffer with a compatible file extension.
953 project
954 .update(cx, |project, cx| {
955 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
956 })
957 .await
958 .unwrap();
959
960 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
961 project.update(cx, |project, cx| {
962 let worktree = project.worktrees(cx).next().unwrap();
963 assert_eq!(
964 worktree
965 .read(cx)
966 .snapshot()
967 .entries(true, 0)
968 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
969 .collect::<Vec<_>>(),
970 &[
971 (Path::new(""), false),
972 (Path::new(".gitignore"), false),
973 (Path::new("src"), false),
974 (Path::new("src/a.rs"), false),
975 (Path::new("src/b.rs"), false),
976 (Path::new("target"), true),
977 ]
978 );
979 });
980
981 let prev_read_dir_count = fs.read_dir_call_count();
982
983 // Keep track of the FS events reported to the language server.
984 let fake_server = fake_servers.next().await.unwrap();
985 let file_changes = Arc::new(Mutex::new(Vec::new()));
986 fake_server
987 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
988 registrations: vec![lsp::Registration {
989 id: Default::default(),
990 method: "workspace/didChangeWatchedFiles".to_string(),
991 register_options: serde_json::to_value(
992 lsp::DidChangeWatchedFilesRegistrationOptions {
993 watchers: vec![
994 lsp::FileSystemWatcher {
995 glob_pattern: lsp::GlobPattern::String(
996 path!("/the-root/Cargo.toml").to_string(),
997 ),
998 kind: None,
999 },
1000 lsp::FileSystemWatcher {
1001 glob_pattern: lsp::GlobPattern::String(
1002 path!("/the-root/src/*.{rs,c}").to_string(),
1003 ),
1004 kind: None,
1005 },
1006 lsp::FileSystemWatcher {
1007 glob_pattern: lsp::GlobPattern::String(
1008 path!("/the-root/target/y/**/*.rs").to_string(),
1009 ),
1010 kind: None,
1011 },
1012 ],
1013 },
1014 )
1015 .ok(),
1016 }],
1017 })
1018 .await
1019 .unwrap();
1020 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1021 let file_changes = file_changes.clone();
1022 move |params, _| {
1023 let mut file_changes = file_changes.lock();
1024 file_changes.extend(params.changes);
1025 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1026 }
1027 });
1028
1029 cx.executor().run_until_parked();
1030 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1031 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1032
1033 // Now the language server has asked us to watch an ignored directory path,
1034 // so we recursively load it.
1035 project.update(cx, |project, cx| {
1036 let worktree = project.worktrees(cx).next().unwrap();
1037 assert_eq!(
1038 worktree
1039 .read(cx)
1040 .snapshot()
1041 .entries(true, 0)
1042 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1043 .collect::<Vec<_>>(),
1044 &[
1045 (Path::new(""), false),
1046 (Path::new(".gitignore"), false),
1047 (Path::new("src"), false),
1048 (Path::new("src/a.rs"), false),
1049 (Path::new("src/b.rs"), false),
1050 (Path::new("target"), true),
1051 (Path::new("target/x"), true),
1052 (Path::new("target/y"), true),
1053 (Path::new("target/y/out"), true),
1054 (Path::new("target/y/out/y.rs"), true),
1055 (Path::new("target/z"), true),
1056 ]
1057 );
1058 });
1059
1060 // Perform some file system mutations, two of which match the watched patterns,
1061 // and one of which does not.
1062 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1063 .await
1064 .unwrap();
1065 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1066 .await
1067 .unwrap();
1068 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1069 .await
1070 .unwrap();
1071 fs.create_file(
1072 path!("/the-root/target/x/out/x2.rs").as_ref(),
1073 Default::default(),
1074 )
1075 .await
1076 .unwrap();
1077 fs.create_file(
1078 path!("/the-root/target/y/out/y2.rs").as_ref(),
1079 Default::default(),
1080 )
1081 .await
1082 .unwrap();
1083
1084 // The language server receives events for the FS mutations that match its watch patterns.
1085 cx.executor().run_until_parked();
1086 assert_eq!(
1087 &*file_changes.lock(),
1088 &[
1089 lsp::FileEvent {
1090 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1091 typ: lsp::FileChangeType::DELETED,
1092 },
1093 lsp::FileEvent {
1094 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1095 typ: lsp::FileChangeType::CREATED,
1096 },
1097 lsp::FileEvent {
1098 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1099 typ: lsp::FileChangeType::CREATED,
1100 },
1101 ]
1102 );
1103}
1104
1105#[gpui::test]
1106async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1107 init_test(cx);
1108
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 path!("/dir"),
1112 json!({
1113 "a.rs": "let a = 1;",
1114 "b.rs": "let b = 2;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(
1120 fs,
1121 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1122 cx,
1123 )
1124 .await;
1125 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1126
1127 let buffer_a = project
1128 .update(cx, |project, cx| {
1129 project.open_local_buffer(path!("/dir/a.rs"), cx)
1130 })
1131 .await
1132 .unwrap();
1133 let buffer_b = project
1134 .update(cx, |project, cx| {
1135 project.open_local_buffer(path!("/dir/b.rs"), cx)
1136 })
1137 .await
1138 .unwrap();
1139
1140 lsp_store.update(cx, |lsp_store, cx| {
1141 lsp_store
1142 .update_diagnostics(
1143 LanguageServerId(0),
1144 lsp::PublishDiagnosticsParams {
1145 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1146 version: None,
1147 diagnostics: vec![lsp::Diagnostic {
1148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1149 severity: Some(lsp::DiagnosticSeverity::ERROR),
1150 message: "error 1".to_string(),
1151 ..Default::default()
1152 }],
1153 },
1154 &[],
1155 cx,
1156 )
1157 .unwrap();
1158 lsp_store
1159 .update_diagnostics(
1160 LanguageServerId(0),
1161 lsp::PublishDiagnosticsParams {
1162 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1163 version: None,
1164 diagnostics: vec![lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1166 severity: Some(DiagnosticSeverity::WARNING),
1167 message: "error 2".to_string(),
1168 ..Default::default()
1169 }],
1170 },
1171 &[],
1172 cx,
1173 )
1174 .unwrap();
1175 });
1176
1177 buffer_a.update(cx, |buffer, _| {
1178 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1179 assert_eq!(
1180 chunks
1181 .iter()
1182 .map(|(s, d)| (s.as_str(), *d))
1183 .collect::<Vec<_>>(),
1184 &[
1185 ("let ", None),
1186 ("a", Some(DiagnosticSeverity::ERROR)),
1187 (" = 1;", None),
1188 ]
1189 );
1190 });
1191 buffer_b.update(cx, |buffer, _| {
1192 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1193 assert_eq!(
1194 chunks
1195 .iter()
1196 .map(|(s, d)| (s.as_str(), *d))
1197 .collect::<Vec<_>>(),
1198 &[
1199 ("let ", None),
1200 ("b", Some(DiagnosticSeverity::WARNING)),
1201 (" = 2;", None),
1202 ]
1203 );
1204 });
1205}
1206
1207#[gpui::test]
1208async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1209 init_test(cx);
1210
1211 let fs = FakeFs::new(cx.executor());
1212 fs.insert_tree(
1213 path!("/root"),
1214 json!({
1215 "dir": {
1216 ".git": {
1217 "HEAD": "ref: refs/heads/main",
1218 },
1219 ".gitignore": "b.rs",
1220 "a.rs": "let a = 1;",
1221 "b.rs": "let b = 2;",
1222 },
1223 "other.rs": "let b = c;"
1224 }),
1225 )
1226 .await;
1227
1228 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1229 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1230 let (worktree, _) = project
1231 .update(cx, |project, cx| {
1232 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1233 })
1234 .await
1235 .unwrap();
1236 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1237
1238 let (worktree, _) = project
1239 .update(cx, |project, cx| {
1240 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1241 })
1242 .await
1243 .unwrap();
1244 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1245
1246 let server_id = LanguageServerId(0);
1247 lsp_store.update(cx, |lsp_store, cx| {
1248 lsp_store
1249 .update_diagnostics(
1250 server_id,
1251 lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1253 version: None,
1254 diagnostics: vec![lsp::Diagnostic {
1255 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1256 severity: Some(lsp::DiagnosticSeverity::ERROR),
1257 message: "unused variable 'b'".to_string(),
1258 ..Default::default()
1259 }],
1260 },
1261 &[],
1262 cx,
1263 )
1264 .unwrap();
1265 lsp_store
1266 .update_diagnostics(
1267 server_id,
1268 lsp::PublishDiagnosticsParams {
1269 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1270 version: None,
1271 diagnostics: vec![lsp::Diagnostic {
1272 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1273 severity: Some(lsp::DiagnosticSeverity::ERROR),
1274 message: "unknown variable 'c'".to_string(),
1275 ..Default::default()
1276 }],
1277 },
1278 &[],
1279 cx,
1280 )
1281 .unwrap();
1282 });
1283
1284 let main_ignored_buffer = project
1285 .update(cx, |project, cx| {
1286 project.open_buffer((main_worktree_id, "b.rs"), cx)
1287 })
1288 .await
1289 .unwrap();
1290 main_ignored_buffer.update(cx, |buffer, _| {
1291 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1292 assert_eq!(
1293 chunks
1294 .iter()
1295 .map(|(s, d)| (s.as_str(), *d))
1296 .collect::<Vec<_>>(),
1297 &[
1298 ("let ", None),
1299 ("b", Some(DiagnosticSeverity::ERROR)),
1300 (" = 2;", None),
1301 ],
1302 "Gigitnored buffers should still get in-buffer diagnostics",
1303 );
1304 });
1305 let other_buffer = project
1306 .update(cx, |project, cx| {
1307 project.open_buffer((other_worktree_id, ""), cx)
1308 })
1309 .await
1310 .unwrap();
1311 other_buffer.update(cx, |buffer, _| {
1312 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1313 assert_eq!(
1314 chunks
1315 .iter()
1316 .map(|(s, d)| (s.as_str(), *d))
1317 .collect::<Vec<_>>(),
1318 &[
1319 ("let b = ", None),
1320 ("c", Some(DiagnosticSeverity::ERROR)),
1321 (";", None),
1322 ],
1323 "Buffers from hidden projects should still get in-buffer diagnostics"
1324 );
1325 });
1326
1327 project.update(cx, |project, cx| {
1328 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1329 assert_eq!(
1330 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1331 vec![(
1332 ProjectPath {
1333 worktree_id: main_worktree_id,
1334 path: Arc::from(Path::new("b.rs")),
1335 },
1336 server_id,
1337 DiagnosticSummary {
1338 error_count: 1,
1339 warning_count: 0,
1340 }
1341 )]
1342 );
1343 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1344 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1345 });
1346}
1347
1348#[gpui::test]
1349async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1350 init_test(cx);
1351
1352 let progress_token = "the-progress-token";
1353
1354 let fs = FakeFs::new(cx.executor());
1355 fs.insert_tree(
1356 path!("/dir"),
1357 json!({
1358 "a.rs": "fn a() { A }",
1359 "b.rs": "const y: i32 = 1",
1360 }),
1361 )
1362 .await;
1363
1364 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1365 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1366
1367 language_registry.add(rust_lang());
1368 let mut fake_servers = language_registry.register_fake_lsp(
1369 "Rust",
1370 FakeLspAdapter {
1371 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1372 disk_based_diagnostics_sources: vec!["disk".into()],
1373 ..Default::default()
1374 },
1375 );
1376
1377 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1378
1379 // Cause worktree to start the fake language server
1380 let _ = project
1381 .update(cx, |project, cx| {
1382 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1383 })
1384 .await
1385 .unwrap();
1386
1387 let mut events = cx.events(&project);
1388
1389 let fake_server = fake_servers.next().await.unwrap();
1390 assert_eq!(
1391 events.next().await.unwrap(),
1392 Event::LanguageServerAdded(
1393 LanguageServerId(0),
1394 fake_server.server.name(),
1395 Some(worktree_id)
1396 ),
1397 );
1398
1399 fake_server
1400 .start_progress(format!("{}/0", progress_token))
1401 .await;
1402 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1403 assert_eq!(
1404 events.next().await.unwrap(),
1405 Event::DiskBasedDiagnosticsStarted {
1406 language_server_id: LanguageServerId(0),
1407 }
1408 );
1409
1410 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1411 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1412 version: None,
1413 diagnostics: vec![lsp::Diagnostic {
1414 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1415 severity: Some(lsp::DiagnosticSeverity::ERROR),
1416 message: "undefined variable 'A'".to_string(),
1417 ..Default::default()
1418 }],
1419 });
1420 assert_eq!(
1421 events.next().await.unwrap(),
1422 Event::DiagnosticsUpdated {
1423 language_server_id: LanguageServerId(0),
1424 path: (worktree_id, Path::new("a.rs")).into()
1425 }
1426 );
1427
1428 fake_server.end_progress(format!("{}/0", progress_token));
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsFinished {
1432 language_server_id: LanguageServerId(0)
1433 }
1434 );
1435
1436 let buffer = project
1437 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1438 .await
1439 .unwrap();
1440
1441 buffer.update(cx, |buffer, _| {
1442 let snapshot = buffer.snapshot();
1443 let diagnostics = snapshot
1444 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1445 .collect::<Vec<_>>();
1446 assert_eq!(
1447 diagnostics,
1448 &[DiagnosticEntry {
1449 range: Point::new(0, 9)..Point::new(0, 10),
1450 diagnostic: Diagnostic {
1451 severity: lsp::DiagnosticSeverity::ERROR,
1452 message: "undefined variable 'A'".to_string(),
1453 group_id: 0,
1454 is_primary: true,
1455 ..Default::default()
1456 }
1457 }]
1458 )
1459 });
1460
1461 // Ensure publishing empty diagnostics twice only results in one update event.
1462 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1463 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1464 version: None,
1465 diagnostics: Default::default(),
1466 });
1467 assert_eq!(
1468 events.next().await.unwrap(),
1469 Event::DiagnosticsUpdated {
1470 language_server_id: LanguageServerId(0),
1471 path: (worktree_id, Path::new("a.rs")).into()
1472 }
1473 );
1474
1475 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1476 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1477 version: None,
1478 diagnostics: Default::default(),
1479 });
1480 cx.executor().run_until_parked();
1481 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1482}
1483
1484#[gpui::test]
1485async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1486 init_test(cx);
1487
1488 let progress_token = "the-progress-token";
1489
1490 let fs = FakeFs::new(cx.executor());
1491 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1492
1493 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1494
1495 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1496 language_registry.add(rust_lang());
1497 let mut fake_servers = language_registry.register_fake_lsp(
1498 "Rust",
1499 FakeLspAdapter {
1500 name: "the-language-server",
1501 disk_based_diagnostics_sources: vec!["disk".into()],
1502 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1503 ..Default::default()
1504 },
1505 );
1506
1507 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1508
1509 let (buffer, _handle) = project
1510 .update(cx, |project, cx| {
1511 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1512 })
1513 .await
1514 .unwrap();
1515 // Simulate diagnostics starting to update.
1516 let fake_server = fake_servers.next().await.unwrap();
1517 fake_server.start_progress(progress_token).await;
1518
1519 // Restart the server before the diagnostics finish updating.
1520 project.update(cx, |project, cx| {
1521 project.restart_language_servers_for_buffers(vec![buffer], cx);
1522 });
1523 let mut events = cx.events(&project);
1524
1525 // Simulate the newly started server sending more diagnostics.
1526 let fake_server = fake_servers.next().await.unwrap();
1527 assert_eq!(
1528 events.next().await.unwrap(),
1529 Event::LanguageServerAdded(
1530 LanguageServerId(1),
1531 fake_server.server.name(),
1532 Some(worktree_id)
1533 )
1534 );
1535 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1536 fake_server.start_progress(progress_token).await;
1537 assert_eq!(
1538 events.next().await.unwrap(),
1539 Event::DiskBasedDiagnosticsStarted {
1540 language_server_id: LanguageServerId(1)
1541 }
1542 );
1543 project.update(cx, |project, cx| {
1544 assert_eq!(
1545 project
1546 .language_servers_running_disk_based_diagnostics(cx)
1547 .collect::<Vec<_>>(),
1548 [LanguageServerId(1)]
1549 );
1550 });
1551
1552 // All diagnostics are considered done, despite the old server's diagnostic
1553 // task never completing.
1554 fake_server.end_progress(progress_token);
1555 assert_eq!(
1556 events.next().await.unwrap(),
1557 Event::DiskBasedDiagnosticsFinished {
1558 language_server_id: LanguageServerId(1)
1559 }
1560 );
1561 project.update(cx, |project, cx| {
1562 assert_eq!(
1563 project
1564 .language_servers_running_disk_based_diagnostics(cx)
1565 .collect::<Vec<_>>(),
1566 [] as [language::LanguageServerId; 0]
1567 );
1568 });
1569}
1570
1571#[gpui::test]
1572async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1573 init_test(cx);
1574
1575 let fs = FakeFs::new(cx.executor());
1576 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1577
1578 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1579
1580 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1581 language_registry.add(rust_lang());
1582 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1583
1584 let (buffer, _) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590
1591 // Publish diagnostics
1592 let fake_server = fake_servers.next().await.unwrap();
1593 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1594 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1595 version: None,
1596 diagnostics: vec![lsp::Diagnostic {
1597 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1598 severity: Some(lsp::DiagnosticSeverity::ERROR),
1599 message: "the message".to_string(),
1600 ..Default::default()
1601 }],
1602 });
1603
1604 cx.executor().run_until_parked();
1605 buffer.update(cx, |buffer, _| {
1606 assert_eq!(
1607 buffer
1608 .snapshot()
1609 .diagnostics_in_range::<_, usize>(0..1, false)
1610 .map(|entry| entry.diagnostic.message.clone())
1611 .collect::<Vec<_>>(),
1612 ["the message".to_string()]
1613 );
1614 });
1615 project.update(cx, |project, cx| {
1616 assert_eq!(
1617 project.diagnostic_summary(false, cx),
1618 DiagnosticSummary {
1619 error_count: 1,
1620 warning_count: 0,
1621 }
1622 );
1623 });
1624
1625 project.update(cx, |project, cx| {
1626 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1627 });
1628
1629 // The diagnostics are cleared.
1630 cx.executor().run_until_parked();
1631 buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .snapshot()
1635 .diagnostics_in_range::<_, usize>(0..1, false)
1636 .map(|entry| entry.diagnostic.message.clone())
1637 .collect::<Vec<_>>(),
1638 Vec::<String>::new(),
1639 );
1640 });
1641 project.update(cx, |project, cx| {
1642 assert_eq!(
1643 project.diagnostic_summary(false, cx),
1644 DiagnosticSummary {
1645 error_count: 0,
1646 warning_count: 0,
1647 }
1648 );
1649 });
1650}
1651
1652#[gpui::test]
1653async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1654 init_test(cx);
1655
1656 let fs = FakeFs::new(cx.executor());
1657 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1658
1659 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1661
1662 language_registry.add(rust_lang());
1663 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1664
1665 let (buffer, _handle) = project
1666 .update(cx, |project, cx| {
1667 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1668 })
1669 .await
1670 .unwrap();
1671
1672 // Before restarting the server, report diagnostics with an unknown buffer version.
1673 let fake_server = fake_servers.next().await.unwrap();
1674 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1675 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1676 version: Some(10000),
1677 diagnostics: Vec::new(),
1678 });
1679 cx.executor().run_until_parked();
1680 project.update(cx, |project, cx| {
1681 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1682 });
1683
1684 let mut fake_server = fake_servers.next().await.unwrap();
1685 let notification = fake_server
1686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1687 .await
1688 .text_document;
1689 assert_eq!(notification.version, 0);
1690}
1691
1692#[gpui::test]
1693async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1694 init_test(cx);
1695
1696 let progress_token = "the-progress-token";
1697
1698 let fs = FakeFs::new(cx.executor());
1699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1700
1701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1702
1703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1704 language_registry.add(rust_lang());
1705 let mut fake_servers = language_registry.register_fake_lsp(
1706 "Rust",
1707 FakeLspAdapter {
1708 name: "the-language-server",
1709 disk_based_diagnostics_sources: vec!["disk".into()],
1710 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1711 ..Default::default()
1712 },
1713 );
1714
1715 let (buffer, _handle) = project
1716 .update(cx, |project, cx| {
1717 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1718 })
1719 .await
1720 .unwrap();
1721
1722 // Simulate diagnostics starting to update.
1723 let mut fake_server = fake_servers.next().await.unwrap();
1724 fake_server
1725 .start_progress_with(
1726 "another-token",
1727 lsp::WorkDoneProgressBegin {
1728 cancellable: Some(false),
1729 ..Default::default()
1730 },
1731 )
1732 .await;
1733 fake_server
1734 .start_progress_with(
1735 progress_token,
1736 lsp::WorkDoneProgressBegin {
1737 cancellable: Some(true),
1738 ..Default::default()
1739 },
1740 )
1741 .await;
1742 cx.executor().run_until_parked();
1743
1744 project.update(cx, |project, cx| {
1745 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1746 });
1747
1748 let cancel_notification = fake_server
1749 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1750 .await;
1751 assert_eq!(
1752 cancel_notification.token,
1753 NumberOrString::String(progress_token.into())
1754 );
1755}
1756
1757#[gpui::test]
1758async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1759 init_test(cx);
1760
1761 let fs = FakeFs::new(cx.executor());
1762 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1763 .await;
1764
1765 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1767
1768 let mut fake_rust_servers = language_registry.register_fake_lsp(
1769 "Rust",
1770 FakeLspAdapter {
1771 name: "rust-lsp",
1772 ..Default::default()
1773 },
1774 );
1775 let mut fake_js_servers = language_registry.register_fake_lsp(
1776 "JavaScript",
1777 FakeLspAdapter {
1778 name: "js-lsp",
1779 ..Default::default()
1780 },
1781 );
1782 language_registry.add(rust_lang());
1783 language_registry.add(js_lang());
1784
1785 let _rs_buffer = project
1786 .update(cx, |project, cx| {
1787 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1788 })
1789 .await
1790 .unwrap();
1791 let _js_buffer = project
1792 .update(cx, |project, cx| {
1793 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1794 })
1795 .await
1796 .unwrap();
1797
1798 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1799 assert_eq!(
1800 fake_rust_server_1
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document
1804 .uri
1805 .as_str(),
1806 uri!("file:///dir/a.rs")
1807 );
1808
1809 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1810 assert_eq!(
1811 fake_js_server
1812 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1813 .await
1814 .text_document
1815 .uri
1816 .as_str(),
1817 uri!("file:///dir/b.js")
1818 );
1819
1820 // Disable Rust language server, ensuring only that server gets stopped.
1821 cx.update(|cx| {
1822 SettingsStore::update_global(cx, |settings, cx| {
1823 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1824 settings.languages.insert(
1825 "Rust".into(),
1826 LanguageSettingsContent {
1827 enable_language_server: Some(false),
1828 ..Default::default()
1829 },
1830 );
1831 });
1832 })
1833 });
1834 fake_rust_server_1
1835 .receive_notification::<lsp::notification::Exit>()
1836 .await;
1837
1838 // Enable Rust and disable JavaScript language servers, ensuring that the
1839 // former gets started again and that the latter stops.
1840 cx.update(|cx| {
1841 SettingsStore::update_global(cx, |settings, cx| {
1842 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1843 settings.languages.insert(
1844 LanguageName::new("Rust"),
1845 LanguageSettingsContent {
1846 enable_language_server: Some(true),
1847 ..Default::default()
1848 },
1849 );
1850 settings.languages.insert(
1851 LanguageName::new("JavaScript"),
1852 LanguageSettingsContent {
1853 enable_language_server: Some(false),
1854 ..Default::default()
1855 },
1856 );
1857 });
1858 })
1859 });
1860 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1861 assert_eq!(
1862 fake_rust_server_2
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document
1866 .uri
1867 .as_str(),
1868 uri!("file:///dir/a.rs")
1869 );
1870 fake_js_server
1871 .receive_notification::<lsp::notification::Exit>()
1872 .await;
1873}
1874
1875#[gpui::test(iterations = 3)]
1876async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1877 init_test(cx);
1878
1879 let text = "
1880 fn a() { A }
1881 fn b() { BB }
1882 fn c() { CCC }
1883 "
1884 .unindent();
1885
1886 let fs = FakeFs::new(cx.executor());
1887 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1888
1889 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1890 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1891
1892 language_registry.add(rust_lang());
1893 let mut fake_servers = language_registry.register_fake_lsp(
1894 "Rust",
1895 FakeLspAdapter {
1896 disk_based_diagnostics_sources: vec!["disk".into()],
1897 ..Default::default()
1898 },
1899 );
1900
1901 let buffer = project
1902 .update(cx, |project, cx| {
1903 project.open_local_buffer(path!("/dir/a.rs"), cx)
1904 })
1905 .await
1906 .unwrap();
1907
1908 let _handle = project.update(cx, |project, cx| {
1909 project.register_buffer_with_language_servers(&buffer, cx)
1910 });
1911
1912 let mut fake_server = fake_servers.next().await.unwrap();
1913 let open_notification = fake_server
1914 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1915 .await;
1916
1917 // Edit the buffer, moving the content down
1918 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1919 let change_notification_1 = fake_server
1920 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1921 .await;
1922 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1923
1924 // Report some diagnostics for the initial version of the buffer
1925 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1926 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1927 version: Some(open_notification.text_document.version),
1928 diagnostics: vec![
1929 lsp::Diagnostic {
1930 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1931 severity: Some(DiagnosticSeverity::ERROR),
1932 message: "undefined variable 'A'".to_string(),
1933 source: Some("disk".to_string()),
1934 ..Default::default()
1935 },
1936 lsp::Diagnostic {
1937 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1938 severity: Some(DiagnosticSeverity::ERROR),
1939 message: "undefined variable 'BB'".to_string(),
1940 source: Some("disk".to_string()),
1941 ..Default::default()
1942 },
1943 lsp::Diagnostic {
1944 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1945 severity: Some(DiagnosticSeverity::ERROR),
1946 source: Some("disk".to_string()),
1947 message: "undefined variable 'CCC'".to_string(),
1948 ..Default::default()
1949 },
1950 ],
1951 });
1952
1953 // The diagnostics have moved down since they were created.
1954 cx.executor().run_until_parked();
1955 buffer.update(cx, |buffer, _| {
1956 assert_eq!(
1957 buffer
1958 .snapshot()
1959 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1960 .collect::<Vec<_>>(),
1961 &[
1962 DiagnosticEntry {
1963 range: Point::new(3, 9)..Point::new(3, 11),
1964 diagnostic: Diagnostic {
1965 source: Some("disk".into()),
1966 severity: DiagnosticSeverity::ERROR,
1967 message: "undefined variable 'BB'".to_string(),
1968 is_disk_based: true,
1969 group_id: 1,
1970 is_primary: true,
1971 ..Default::default()
1972 },
1973 },
1974 DiagnosticEntry {
1975 range: Point::new(4, 9)..Point::new(4, 12),
1976 diagnostic: Diagnostic {
1977 source: Some("disk".into()),
1978 severity: DiagnosticSeverity::ERROR,
1979 message: "undefined variable 'CCC'".to_string(),
1980 is_disk_based: true,
1981 group_id: 2,
1982 is_primary: true,
1983 ..Default::default()
1984 }
1985 }
1986 ]
1987 );
1988 assert_eq!(
1989 chunks_with_diagnostics(buffer, 0..buffer.len()),
1990 [
1991 ("\n\nfn a() { ".to_string(), None),
1992 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1993 (" }\nfn b() { ".to_string(), None),
1994 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 (" }\nfn c() { ".to_string(), None),
1996 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 (" }\n".to_string(), None),
1998 ]
1999 );
2000 assert_eq!(
2001 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2002 [
2003 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2004 (" }\nfn c() { ".to_string(), None),
2005 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2006 ]
2007 );
2008 });
2009
2010 // Ensure overlapping diagnostics are highlighted correctly.
2011 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2012 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2013 version: Some(open_notification.text_document.version),
2014 diagnostics: vec![
2015 lsp::Diagnostic {
2016 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2017 severity: Some(DiagnosticSeverity::ERROR),
2018 message: "undefined variable 'A'".to_string(),
2019 source: Some("disk".to_string()),
2020 ..Default::default()
2021 },
2022 lsp::Diagnostic {
2023 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2024 severity: Some(DiagnosticSeverity::WARNING),
2025 message: "unreachable statement".to_string(),
2026 source: Some("disk".to_string()),
2027 ..Default::default()
2028 },
2029 ],
2030 });
2031
2032 cx.executor().run_until_parked();
2033 buffer.update(cx, |buffer, _| {
2034 assert_eq!(
2035 buffer
2036 .snapshot()
2037 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2038 .collect::<Vec<_>>(),
2039 &[
2040 DiagnosticEntry {
2041 range: Point::new(2, 9)..Point::new(2, 12),
2042 diagnostic: Diagnostic {
2043 source: Some("disk".into()),
2044 severity: DiagnosticSeverity::WARNING,
2045 message: "unreachable statement".to_string(),
2046 is_disk_based: true,
2047 group_id: 4,
2048 is_primary: true,
2049 ..Default::default()
2050 }
2051 },
2052 DiagnosticEntry {
2053 range: Point::new(2, 9)..Point::new(2, 10),
2054 diagnostic: Diagnostic {
2055 source: Some("disk".into()),
2056 severity: DiagnosticSeverity::ERROR,
2057 message: "undefined variable 'A'".to_string(),
2058 is_disk_based: true,
2059 group_id: 3,
2060 is_primary: true,
2061 ..Default::default()
2062 },
2063 }
2064 ]
2065 );
2066 assert_eq!(
2067 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2068 [
2069 ("fn a() { ".to_string(), None),
2070 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2071 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2072 ("\n".to_string(), None),
2073 ]
2074 );
2075 assert_eq!(
2076 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2077 [
2078 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2079 ("\n".to_string(), None),
2080 ]
2081 );
2082 });
2083
2084 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2085 // changes since the last save.
2086 buffer.update(cx, |buffer, cx| {
2087 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2088 buffer.edit(
2089 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2090 None,
2091 cx,
2092 );
2093 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2094 });
2095 let change_notification_2 = fake_server
2096 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2097 .await;
2098 assert!(
2099 change_notification_2.text_document.version > change_notification_1.text_document.version
2100 );
2101
2102 // Handle out-of-order diagnostics
2103 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2104 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2105 version: Some(change_notification_2.text_document.version),
2106 diagnostics: vec![
2107 lsp::Diagnostic {
2108 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2109 severity: Some(DiagnosticSeverity::ERROR),
2110 message: "undefined variable 'BB'".to_string(),
2111 source: Some("disk".to_string()),
2112 ..Default::default()
2113 },
2114 lsp::Diagnostic {
2115 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2116 severity: Some(DiagnosticSeverity::WARNING),
2117 message: "undefined variable 'A'".to_string(),
2118 source: Some("disk".to_string()),
2119 ..Default::default()
2120 },
2121 ],
2122 });
2123
2124 cx.executor().run_until_parked();
2125 buffer.update(cx, |buffer, _| {
2126 assert_eq!(
2127 buffer
2128 .snapshot()
2129 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2130 .collect::<Vec<_>>(),
2131 &[
2132 DiagnosticEntry {
2133 range: Point::new(2, 21)..Point::new(2, 22),
2134 diagnostic: Diagnostic {
2135 source: Some("disk".into()),
2136 severity: DiagnosticSeverity::WARNING,
2137 message: "undefined variable 'A'".to_string(),
2138 is_disk_based: true,
2139 group_id: 6,
2140 is_primary: true,
2141 ..Default::default()
2142 }
2143 },
2144 DiagnosticEntry {
2145 range: Point::new(3, 9)..Point::new(3, 14),
2146 diagnostic: Diagnostic {
2147 source: Some("disk".into()),
2148 severity: DiagnosticSeverity::ERROR,
2149 message: "undefined variable 'BB'".to_string(),
2150 is_disk_based: true,
2151 group_id: 5,
2152 is_primary: true,
2153 ..Default::default()
2154 },
2155 }
2156 ]
2157 );
2158 });
2159}
2160
2161#[gpui::test]
2162async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2163 init_test(cx);
2164
2165 let text = concat!(
2166 "let one = ;\n", //
2167 "let two = \n",
2168 "let three = 3;\n",
2169 );
2170
2171 let fs = FakeFs::new(cx.executor());
2172 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2173
2174 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2175 let buffer = project
2176 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2177 .await
2178 .unwrap();
2179
2180 project.update(cx, |project, cx| {
2181 project.lsp_store.update(cx, |lsp_store, cx| {
2182 lsp_store
2183 .update_diagnostic_entries(
2184 LanguageServerId(0),
2185 PathBuf::from("/dir/a.rs"),
2186 None,
2187 vec![
2188 DiagnosticEntry {
2189 range: Unclipped(PointUtf16::new(0, 10))
2190 ..Unclipped(PointUtf16::new(0, 10)),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "syntax error 1".to_string(),
2194 ..Default::default()
2195 },
2196 },
2197 DiagnosticEntry {
2198 range: Unclipped(PointUtf16::new(1, 10))
2199 ..Unclipped(PointUtf16::new(1, 10)),
2200 diagnostic: Diagnostic {
2201 severity: DiagnosticSeverity::ERROR,
2202 message: "syntax error 2".to_string(),
2203 ..Default::default()
2204 },
2205 },
2206 ],
2207 cx,
2208 )
2209 .unwrap();
2210 })
2211 });
2212
2213 // An empty range is extended forward to include the following character.
2214 // At the end of a line, an empty range is extended backward to include
2215 // the preceding character.
2216 buffer.update(cx, |buffer, _| {
2217 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2218 assert_eq!(
2219 chunks
2220 .iter()
2221 .map(|(s, d)| (s.as_str(), *d))
2222 .collect::<Vec<_>>(),
2223 &[
2224 ("let one = ", None),
2225 (";", Some(DiagnosticSeverity::ERROR)),
2226 ("\nlet two =", None),
2227 (" ", Some(DiagnosticSeverity::ERROR)),
2228 ("\nlet three = 3;\n", None)
2229 ]
2230 );
2231 });
2232}
2233
2234#[gpui::test]
2235async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2236 init_test(cx);
2237
2238 let fs = FakeFs::new(cx.executor());
2239 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2240 .await;
2241
2242 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2243 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2244
2245 lsp_store.update(cx, |lsp_store, cx| {
2246 lsp_store
2247 .update_diagnostic_entries(
2248 LanguageServerId(0),
2249 Path::new("/dir/a.rs").to_owned(),
2250 None,
2251 vec![DiagnosticEntry {
2252 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2253 diagnostic: Diagnostic {
2254 severity: DiagnosticSeverity::ERROR,
2255 is_primary: true,
2256 message: "syntax error a1".to_string(),
2257 ..Default::default()
2258 },
2259 }],
2260 cx,
2261 )
2262 .unwrap();
2263 lsp_store
2264 .update_diagnostic_entries(
2265 LanguageServerId(1),
2266 Path::new("/dir/a.rs").to_owned(),
2267 None,
2268 vec![DiagnosticEntry {
2269 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2270 diagnostic: Diagnostic {
2271 severity: DiagnosticSeverity::ERROR,
2272 is_primary: true,
2273 message: "syntax error b1".to_string(),
2274 ..Default::default()
2275 },
2276 }],
2277 cx,
2278 )
2279 .unwrap();
2280
2281 assert_eq!(
2282 lsp_store.diagnostic_summary(false, cx),
2283 DiagnosticSummary {
2284 error_count: 2,
2285 warning_count: 0,
2286 }
2287 );
2288 });
2289}
2290
2291#[gpui::test]
2292async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2293 init_test(cx);
2294
2295 let text = "
2296 fn a() {
2297 f1();
2298 }
2299 fn b() {
2300 f2();
2301 }
2302 fn c() {
2303 f3();
2304 }
2305 "
2306 .unindent();
2307
2308 let fs = FakeFs::new(cx.executor());
2309 fs.insert_tree(
2310 path!("/dir"),
2311 json!({
2312 "a.rs": text.clone(),
2313 }),
2314 )
2315 .await;
2316
2317 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2318 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2319
2320 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2321 language_registry.add(rust_lang());
2322 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2323
2324 let (buffer, _handle) = project
2325 .update(cx, |project, cx| {
2326 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2327 })
2328 .await
2329 .unwrap();
2330
2331 let mut fake_server = fake_servers.next().await.unwrap();
2332 let lsp_document_version = fake_server
2333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2334 .await
2335 .text_document
2336 .version;
2337
2338 // Simulate editing the buffer after the language server computes some edits.
2339 buffer.update(cx, |buffer, cx| {
2340 buffer.edit(
2341 [(
2342 Point::new(0, 0)..Point::new(0, 0),
2343 "// above first function\n",
2344 )],
2345 None,
2346 cx,
2347 );
2348 buffer.edit(
2349 [(
2350 Point::new(2, 0)..Point::new(2, 0),
2351 " // inside first function\n",
2352 )],
2353 None,
2354 cx,
2355 );
2356 buffer.edit(
2357 [(
2358 Point::new(6, 4)..Point::new(6, 4),
2359 "// inside second function ",
2360 )],
2361 None,
2362 cx,
2363 );
2364
2365 assert_eq!(
2366 buffer.text(),
2367 "
2368 // above first function
2369 fn a() {
2370 // inside first function
2371 f1();
2372 }
2373 fn b() {
2374 // inside second function f2();
2375 }
2376 fn c() {
2377 f3();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383
2384 let edits = lsp_store
2385 .update(cx, |lsp_store, cx| {
2386 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2387 &buffer,
2388 vec![
2389 // replace body of first function
2390 lsp::TextEdit {
2391 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2392 new_text: "
2393 fn a() {
2394 f10();
2395 }
2396 "
2397 .unindent(),
2398 },
2399 // edit inside second function
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2402 new_text: "00".into(),
2403 },
2404 // edit inside third function via two distinct edits
2405 lsp::TextEdit {
2406 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2407 new_text: "4000".into(),
2408 },
2409 lsp::TextEdit {
2410 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2411 new_text: "".into(),
2412 },
2413 ],
2414 LanguageServerId(0),
2415 Some(lsp_document_version),
2416 cx,
2417 )
2418 })
2419 .await
2420 .unwrap();
2421
2422 buffer.update(cx, |buffer, cx| {
2423 for (range, new_text) in edits {
2424 buffer.edit([(range, new_text)], None, cx);
2425 }
2426 assert_eq!(
2427 buffer.text(),
2428 "
2429 // above first function
2430 fn a() {
2431 // inside first function
2432 f10();
2433 }
2434 fn b() {
2435 // inside second function f200();
2436 }
2437 fn c() {
2438 f4000();
2439 }
2440 "
2441 .unindent()
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let text = "
2451 use a::b;
2452 use a::c;
2453
2454 fn f() {
2455 b();
2456 c();
2457 }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(
2463 path!("/dir"),
2464 json!({
2465 "a.rs": text.clone(),
2466 }),
2467 )
2468 .await;
2469
2470 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2471 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2472 let buffer = project
2473 .update(cx, |project, cx| {
2474 project.open_local_buffer(path!("/dir/a.rs"), cx)
2475 })
2476 .await
2477 .unwrap();
2478
2479 // Simulate the language server sending us a small edit in the form of a very large diff.
2480 // Rust-analyzer does this when performing a merge-imports code action.
2481 let edits = lsp_store
2482 .update(cx, |lsp_store, cx| {
2483 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2484 &buffer,
2485 [
2486 // Replace the first use statement without editing the semicolon.
2487 lsp::TextEdit {
2488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2489 new_text: "a::{b, c}".into(),
2490 },
2491 // Reinsert the remainder of the file between the semicolon and the final
2492 // newline of the file.
2493 lsp::TextEdit {
2494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2495 new_text: "\n\n".into(),
2496 },
2497 lsp::TextEdit {
2498 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2499 new_text: "
2500 fn f() {
2501 b();
2502 c();
2503 }"
2504 .unindent(),
2505 },
2506 // Delete everything after the first newline of the file.
2507 lsp::TextEdit {
2508 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2509 new_text: "".into(),
2510 },
2511 ],
2512 LanguageServerId(0),
2513 None,
2514 cx,
2515 )
2516 })
2517 .await
2518 .unwrap();
2519
2520 buffer.update(cx, |buffer, cx| {
2521 let edits = edits
2522 .into_iter()
2523 .map(|(range, text)| {
2524 (
2525 range.start.to_point(buffer)..range.end.to_point(buffer),
2526 text,
2527 )
2528 })
2529 .collect::<Vec<_>>();
2530
2531 assert_eq!(
2532 edits,
2533 [
2534 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2535 (Point::new(1, 0)..Point::new(2, 0), "".into())
2536 ]
2537 );
2538
2539 for (range, new_text) in edits {
2540 buffer.edit([(range, new_text)], None, cx);
2541 }
2542 assert_eq!(
2543 buffer.text(),
2544 "
2545 use a::{b, c};
2546
2547 fn f() {
2548 b();
2549 c();
2550 }
2551 "
2552 .unindent()
2553 );
2554 });
2555}
2556
2557#[gpui::test]
2558async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2559 init_test(cx);
2560
2561 let text = "
2562 use a::b;
2563 use a::c;
2564
2565 fn f() {
2566 b();
2567 c();
2568 }
2569 "
2570 .unindent();
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 path!("/dir"),
2575 json!({
2576 "a.rs": text.clone(),
2577 }),
2578 )
2579 .await;
2580
2581 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2582 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2583 let buffer = project
2584 .update(cx, |project, cx| {
2585 project.open_local_buffer(path!("/dir/a.rs"), cx)
2586 })
2587 .await
2588 .unwrap();
2589
2590 // Simulate the language server sending us edits in a non-ordered fashion,
2591 // with ranges sometimes being inverted or pointing to invalid locations.
2592 let edits = lsp_store
2593 .update(cx, |lsp_store, cx| {
2594 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2595 &buffer,
2596 [
2597 lsp::TextEdit {
2598 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2599 new_text: "\n\n".into(),
2600 },
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2603 new_text: "a::{b, c}".into(),
2604 },
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2607 new_text: "".into(),
2608 },
2609 lsp::TextEdit {
2610 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2611 new_text: "
2612 fn f() {
2613 b();
2614 c();
2615 }"
2616 .unindent(),
2617 },
2618 ],
2619 LanguageServerId(0),
2620 None,
2621 cx,
2622 )
2623 })
2624 .await
2625 .unwrap();
2626
2627 buffer.update(cx, |buffer, cx| {
2628 let edits = edits
2629 .into_iter()
2630 .map(|(range, text)| {
2631 (
2632 range.start.to_point(buffer)..range.end.to_point(buffer),
2633 text,
2634 )
2635 })
2636 .collect::<Vec<_>>();
2637
2638 assert_eq!(
2639 edits,
2640 [
2641 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2642 (Point::new(1, 0)..Point::new(2, 0), "".into())
2643 ]
2644 );
2645
2646 for (range, new_text) in edits {
2647 buffer.edit([(range, new_text)], None, cx);
2648 }
2649 assert_eq!(
2650 buffer.text(),
2651 "
2652 use a::{b, c};
2653
2654 fn f() {
2655 b();
2656 c();
2657 }
2658 "
2659 .unindent()
2660 );
2661 });
2662}
2663
2664fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2665 buffer: &Buffer,
2666 range: Range<T>,
2667) -> Vec<(String, Option<DiagnosticSeverity>)> {
2668 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2669 for chunk in buffer.snapshot().chunks(range, true) {
2670 if chunks.last().map_or(false, |prev_chunk| {
2671 prev_chunk.1 == chunk.diagnostic_severity
2672 }) {
2673 chunks.last_mut().unwrap().0.push_str(chunk.text);
2674 } else {
2675 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2676 }
2677 }
2678 chunks
2679}
2680
2681#[gpui::test(iterations = 10)]
2682async fn test_definition(cx: &mut gpui::TestAppContext) {
2683 init_test(cx);
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": "const fn a() { A }",
2690 "b.rs": "const y: i32 = crate::a()",
2691 }),
2692 )
2693 .await;
2694
2695 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2696
2697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2698 language_registry.add(rust_lang());
2699 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2700
2701 let (buffer, _handle) = project
2702 .update(cx, |project, cx| {
2703 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2704 })
2705 .await
2706 .unwrap();
2707
2708 let fake_server = fake_servers.next().await.unwrap();
2709 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2710 let params = params.text_document_position_params;
2711 assert_eq!(
2712 params.text_document.uri.to_file_path().unwrap(),
2713 Path::new(path!("/dir/b.rs")),
2714 );
2715 assert_eq!(params.position, lsp::Position::new(0, 22));
2716
2717 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2718 lsp::Location::new(
2719 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2720 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2721 ),
2722 )))
2723 });
2724 let mut definitions = project
2725 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2726 .await
2727 .unwrap();
2728
2729 // Assert no new language server started
2730 cx.executor().run_until_parked();
2731 assert!(fake_servers.try_next().is_err());
2732
2733 assert_eq!(definitions.len(), 1);
2734 let definition = definitions.pop().unwrap();
2735 cx.update(|cx| {
2736 let target_buffer = definition.target.buffer.read(cx);
2737 assert_eq!(
2738 target_buffer
2739 .file()
2740 .unwrap()
2741 .as_local()
2742 .unwrap()
2743 .abs_path(cx),
2744 Path::new(path!("/dir/a.rs")),
2745 );
2746 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2747 assert_eq!(
2748 list_worktrees(&project, cx),
2749 [
2750 (path!("/dir/a.rs").as_ref(), false),
2751 (path!("/dir/b.rs").as_ref(), true)
2752 ],
2753 );
2754
2755 drop(definition);
2756 });
2757 cx.update(|cx| {
2758 assert_eq!(
2759 list_worktrees(&project, cx),
2760 [(path!("/dir/b.rs").as_ref(), true)]
2761 );
2762 });
2763
2764 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2765 project
2766 .read(cx)
2767 .worktrees(cx)
2768 .map(|worktree| {
2769 let worktree = worktree.read(cx);
2770 (
2771 worktree.as_local().unwrap().abs_path().as_ref(),
2772 worktree.is_visible(),
2773 )
2774 })
2775 .collect::<Vec<_>>()
2776 }
2777}
2778
2779#[gpui::test]
2780async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree(
2785 path!("/dir"),
2786 json!({
2787 "a.ts": "",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2793
2794 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2795 language_registry.add(typescript_lang());
2796 let mut fake_language_servers = language_registry.register_fake_lsp(
2797 "TypeScript",
2798 FakeLspAdapter {
2799 capabilities: lsp::ServerCapabilities {
2800 completion_provider: Some(lsp::CompletionOptions {
2801 trigger_characters: Some(vec![".".to_string()]),
2802 ..Default::default()
2803 }),
2804 ..Default::default()
2805 },
2806 ..Default::default()
2807 },
2808 );
2809
2810 let (buffer, _handle) = project
2811 .update(cx, |p, cx| {
2812 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2813 })
2814 .await
2815 .unwrap();
2816
2817 let fake_server = fake_language_servers.next().await.unwrap();
2818
2819 // When text_edit exists, it takes precedence over insert_text and label
2820 let text = "let a = obj.fqn";
2821 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2822 let completions = project.update(cx, |project, cx| {
2823 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2824 });
2825
2826 fake_server
2827 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2828 Ok(Some(lsp::CompletionResponse::Array(vec![
2829 lsp::CompletionItem {
2830 label: "labelText".into(),
2831 insert_text: Some("insertText".into()),
2832 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2833 range: lsp::Range::new(
2834 lsp::Position::new(0, text.len() as u32 - 3),
2835 lsp::Position::new(0, text.len() as u32),
2836 ),
2837 new_text: "textEditText".into(),
2838 })),
2839 ..Default::default()
2840 },
2841 ])))
2842 })
2843 .next()
2844 .await;
2845
2846 let completions = completions.await.unwrap().unwrap();
2847 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2848
2849 assert_eq!(completions.len(), 1);
2850 assert_eq!(completions[0].new_text, "textEditText");
2851 assert_eq!(
2852 completions[0].old_range.to_offset(&snapshot),
2853 text.len() - 3..text.len()
2854 );
2855}
2856
2857#[gpui::test]
2858async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
2859 init_test(cx);
2860
2861 let fs = FakeFs::new(cx.executor());
2862 fs.insert_tree(
2863 path!("/dir"),
2864 json!({
2865 "a.ts": "",
2866 }),
2867 )
2868 .await;
2869
2870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2871
2872 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2873 language_registry.add(typescript_lang());
2874 let mut fake_language_servers = language_registry.register_fake_lsp(
2875 "TypeScript",
2876 FakeLspAdapter {
2877 capabilities: lsp::ServerCapabilities {
2878 completion_provider: Some(lsp::CompletionOptions {
2879 trigger_characters: Some(vec![".".to_string()]),
2880 ..Default::default()
2881 }),
2882 ..Default::default()
2883 },
2884 ..Default::default()
2885 },
2886 );
2887
2888 let (buffer, _handle) = project
2889 .update(cx, |p, cx| {
2890 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2891 })
2892 .await
2893 .unwrap();
2894
2895 let fake_server = fake_language_servers.next().await.unwrap();
2896 let text = "let a = obj.fqn";
2897
2898 // Test 1: When text_edit is None but insert_text exists with default edit_range
2899 {
2900 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2901 let completions = project.update(cx, |project, cx| {
2902 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2903 });
2904
2905 fake_server
2906 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2907 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2908 is_incomplete: false,
2909 item_defaults: Some(lsp::CompletionListItemDefaults {
2910 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2911 lsp::Range::new(
2912 lsp::Position::new(0, text.len() as u32 - 3),
2913 lsp::Position::new(0, text.len() as u32),
2914 ),
2915 )),
2916 ..Default::default()
2917 }),
2918 items: vec![lsp::CompletionItem {
2919 label: "labelText".into(),
2920 insert_text: Some("insertText".into()),
2921 text_edit: None,
2922 ..Default::default()
2923 }],
2924 })))
2925 })
2926 .next()
2927 .await;
2928
2929 let completions = completions.await.unwrap().unwrap();
2930 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2931
2932 assert_eq!(completions.len(), 1);
2933 assert_eq!(completions[0].new_text, "insertText");
2934 assert_eq!(
2935 completions[0].old_range.to_offset(&snapshot),
2936 text.len() - 3..text.len()
2937 );
2938 }
2939
2940 // Test 2: When both text_edit and insert_text are None with default edit_range
2941 {
2942 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2943 let completions = project.update(cx, |project, cx| {
2944 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2945 });
2946
2947 fake_server
2948 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2949 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2950 is_incomplete: false,
2951 item_defaults: Some(lsp::CompletionListItemDefaults {
2952 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2953 lsp::Range::new(
2954 lsp::Position::new(0, text.len() as u32 - 3),
2955 lsp::Position::new(0, text.len() as u32),
2956 ),
2957 )),
2958 ..Default::default()
2959 }),
2960 items: vec![lsp::CompletionItem {
2961 label: "labelText".into(),
2962 insert_text: None,
2963 text_edit: None,
2964 ..Default::default()
2965 }],
2966 })))
2967 })
2968 .next()
2969 .await;
2970
2971 let completions = completions.await.unwrap().unwrap();
2972 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2973
2974 assert_eq!(completions.len(), 1);
2975 assert_eq!(completions[0].new_text, "labelText");
2976 assert_eq!(
2977 completions[0].old_range.to_offset(&snapshot),
2978 text.len() - 3..text.len()
2979 );
2980 }
2981}
2982
2983#[gpui::test]
2984async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2985 init_test(cx);
2986
2987 let fs = FakeFs::new(cx.executor());
2988 fs.insert_tree(
2989 path!("/dir"),
2990 json!({
2991 "a.ts": "",
2992 }),
2993 )
2994 .await;
2995
2996 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2997
2998 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2999 language_registry.add(typescript_lang());
3000 let mut fake_language_servers = language_registry.register_fake_lsp(
3001 "TypeScript",
3002 FakeLspAdapter {
3003 capabilities: lsp::ServerCapabilities {
3004 completion_provider: Some(lsp::CompletionOptions {
3005 trigger_characters: Some(vec![":".to_string()]),
3006 ..Default::default()
3007 }),
3008 ..Default::default()
3009 },
3010 ..Default::default()
3011 },
3012 );
3013
3014 let (buffer, _handle) = project
3015 .update(cx, |p, cx| {
3016 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3017 })
3018 .await
3019 .unwrap();
3020
3021 let fake_server = fake_language_servers.next().await.unwrap();
3022
3023 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3024 let text = "let a = b.fqn";
3025 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3026 let completions = project.update(cx, |project, cx| {
3027 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3028 });
3029
3030 fake_server
3031 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3032 Ok(Some(lsp::CompletionResponse::Array(vec![
3033 lsp::CompletionItem {
3034 label: "fullyQualifiedName?".into(),
3035 insert_text: Some("fullyQualifiedName".into()),
3036 ..Default::default()
3037 },
3038 ])))
3039 })
3040 .next()
3041 .await;
3042 let completions = completions.await.unwrap().unwrap();
3043 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3044 assert_eq!(completions.len(), 1);
3045 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3046 assert_eq!(
3047 completions[0].old_range.to_offset(&snapshot),
3048 text.len() - 3..text.len()
3049 );
3050
3051 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3052 let text = "let a = \"atoms/cmp\"";
3053 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3054 let completions = project.update(cx, |project, cx| {
3055 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3056 });
3057
3058 fake_server
3059 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3060 Ok(Some(lsp::CompletionResponse::Array(vec![
3061 lsp::CompletionItem {
3062 label: "component".into(),
3063 ..Default::default()
3064 },
3065 ])))
3066 })
3067 .next()
3068 .await;
3069 let completions = completions.await.unwrap().unwrap();
3070 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3071 assert_eq!(completions.len(), 1);
3072 assert_eq!(completions[0].new_text, "component");
3073 assert_eq!(
3074 completions[0].old_range.to_offset(&snapshot),
3075 text.len() - 4..text.len() - 1
3076 );
3077}
3078
3079#[gpui::test]
3080async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3081 init_test(cx);
3082
3083 let fs = FakeFs::new(cx.executor());
3084 fs.insert_tree(
3085 path!("/dir"),
3086 json!({
3087 "a.ts": "",
3088 }),
3089 )
3090 .await;
3091
3092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3093
3094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3095 language_registry.add(typescript_lang());
3096 let mut fake_language_servers = language_registry.register_fake_lsp(
3097 "TypeScript",
3098 FakeLspAdapter {
3099 capabilities: lsp::ServerCapabilities {
3100 completion_provider: Some(lsp::CompletionOptions {
3101 trigger_characters: Some(vec![":".to_string()]),
3102 ..Default::default()
3103 }),
3104 ..Default::default()
3105 },
3106 ..Default::default()
3107 },
3108 );
3109
3110 let (buffer, _handle) = project
3111 .update(cx, |p, cx| {
3112 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3113 })
3114 .await
3115 .unwrap();
3116
3117 let fake_server = fake_language_servers.next().await.unwrap();
3118
3119 let text = "let a = b.fqn";
3120 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3121 let completions = project.update(cx, |project, cx| {
3122 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3123 });
3124
3125 fake_server
3126 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3127 Ok(Some(lsp::CompletionResponse::Array(vec![
3128 lsp::CompletionItem {
3129 label: "fullyQualifiedName?".into(),
3130 insert_text: Some("fully\rQualified\r\nName".into()),
3131 ..Default::default()
3132 },
3133 ])))
3134 })
3135 .next()
3136 .await;
3137 let completions = completions.await.unwrap().unwrap();
3138 assert_eq!(completions.len(), 1);
3139 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3140}
3141
3142#[gpui::test(iterations = 10)]
3143async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(
3148 path!("/dir"),
3149 json!({
3150 "a.ts": "a",
3151 }),
3152 )
3153 .await;
3154
3155 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3156
3157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3158 language_registry.add(typescript_lang());
3159 let mut fake_language_servers = language_registry.register_fake_lsp(
3160 "TypeScript",
3161 FakeLspAdapter {
3162 capabilities: lsp::ServerCapabilities {
3163 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3164 lsp::CodeActionOptions {
3165 resolve_provider: Some(true),
3166 ..lsp::CodeActionOptions::default()
3167 },
3168 )),
3169 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3170 commands: vec!["_the/command".to_string()],
3171 ..lsp::ExecuteCommandOptions::default()
3172 }),
3173 ..lsp::ServerCapabilities::default()
3174 },
3175 ..FakeLspAdapter::default()
3176 },
3177 );
3178
3179 let (buffer, _handle) = project
3180 .update(cx, |p, cx| {
3181 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3182 })
3183 .await
3184 .unwrap();
3185
3186 let fake_server = fake_language_servers.next().await.unwrap();
3187
3188 // Language server returns code actions that contain commands, and not edits.
3189 let actions = project.update(cx, |project, cx| {
3190 project.code_actions(&buffer, 0..0, None, cx)
3191 });
3192 fake_server
3193 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3194 Ok(Some(vec![
3195 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3196 title: "The code action".into(),
3197 data: Some(serde_json::json!({
3198 "command": "_the/command",
3199 })),
3200 ..lsp::CodeAction::default()
3201 }),
3202 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3203 title: "two".into(),
3204 ..lsp::CodeAction::default()
3205 }),
3206 ]))
3207 })
3208 .next()
3209 .await;
3210
3211 let action = actions.await.unwrap()[0].clone();
3212 let apply = project.update(cx, |project, cx| {
3213 project.apply_code_action(buffer.clone(), action, true, cx)
3214 });
3215
3216 // Resolving the code action does not populate its edits. In absence of
3217 // edits, we must execute the given command.
3218 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3219 |mut action, _| async move {
3220 if action.data.is_some() {
3221 action.command = Some(lsp::Command {
3222 title: "The command".into(),
3223 command: "_the/command".into(),
3224 arguments: Some(vec![json!("the-argument")]),
3225 });
3226 }
3227 Ok(action)
3228 },
3229 );
3230
3231 // While executing the command, the language server sends the editor
3232 // a `workspaceEdit` request.
3233 fake_server
3234 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3235 let fake = fake_server.clone();
3236 move |params, _| {
3237 assert_eq!(params.command, "_the/command");
3238 let fake = fake.clone();
3239 async move {
3240 fake.server
3241 .request::<lsp::request::ApplyWorkspaceEdit>(
3242 lsp::ApplyWorkspaceEditParams {
3243 label: None,
3244 edit: lsp::WorkspaceEdit {
3245 changes: Some(
3246 [(
3247 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3248 vec![lsp::TextEdit {
3249 range: lsp::Range::new(
3250 lsp::Position::new(0, 0),
3251 lsp::Position::new(0, 0),
3252 ),
3253 new_text: "X".into(),
3254 }],
3255 )]
3256 .into_iter()
3257 .collect(),
3258 ),
3259 ..Default::default()
3260 },
3261 },
3262 )
3263 .await
3264 .unwrap();
3265 Ok(Some(json!(null)))
3266 }
3267 }
3268 })
3269 .next()
3270 .await;
3271
3272 // Applying the code action returns a project transaction containing the edits
3273 // sent by the language server in its `workspaceEdit` request.
3274 let transaction = apply.await.unwrap();
3275 assert!(transaction.0.contains_key(&buffer));
3276 buffer.update(cx, |buffer, cx| {
3277 assert_eq!(buffer.text(), "Xa");
3278 buffer.undo(cx);
3279 assert_eq!(buffer.text(), "a");
3280 });
3281}
3282
3283#[gpui::test(iterations = 10)]
3284async fn test_save_file(cx: &mut gpui::TestAppContext) {
3285 init_test(cx);
3286
3287 let fs = FakeFs::new(cx.executor());
3288 fs.insert_tree(
3289 path!("/dir"),
3290 json!({
3291 "file1": "the old contents",
3292 }),
3293 )
3294 .await;
3295
3296 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3297 let buffer = project
3298 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3299 .await
3300 .unwrap();
3301 buffer.update(cx, |buffer, cx| {
3302 assert_eq!(buffer.text(), "the old contents");
3303 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3304 });
3305
3306 project
3307 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3308 .await
3309 .unwrap();
3310
3311 let new_text = fs
3312 .load(Path::new(path!("/dir/file1")))
3313 .await
3314 .unwrap()
3315 .replace("\r\n", "\n");
3316 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3317}
3318
3319#[gpui::test(iterations = 30)]
3320async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3321 init_test(cx);
3322
3323 let fs = FakeFs::new(cx.executor().clone());
3324 fs.insert_tree(
3325 path!("/dir"),
3326 json!({
3327 "file1": "the original contents",
3328 }),
3329 )
3330 .await;
3331
3332 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3333 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3334 let buffer = project
3335 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3336 .await
3337 .unwrap();
3338
3339 // Simulate buffer diffs being slow, so that they don't complete before
3340 // the next file change occurs.
3341 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3342
3343 // Change the buffer's file on disk, and then wait for the file change
3344 // to be detected by the worktree, so that the buffer starts reloading.
3345 fs.save(
3346 path!("/dir/file1").as_ref(),
3347 &"the first contents".into(),
3348 Default::default(),
3349 )
3350 .await
3351 .unwrap();
3352 worktree.next_event(cx).await;
3353
3354 // Change the buffer's file again. Depending on the random seed, the
3355 // previous file change may still be in progress.
3356 fs.save(
3357 path!("/dir/file1").as_ref(),
3358 &"the second contents".into(),
3359 Default::default(),
3360 )
3361 .await
3362 .unwrap();
3363 worktree.next_event(cx).await;
3364
3365 cx.executor().run_until_parked();
3366 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3367 buffer.read_with(cx, |buffer, _| {
3368 assert_eq!(buffer.text(), on_disk_text);
3369 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3370 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3371 });
3372}
3373
3374#[gpui::test(iterations = 30)]
3375async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3376 init_test(cx);
3377
3378 let fs = FakeFs::new(cx.executor().clone());
3379 fs.insert_tree(
3380 path!("/dir"),
3381 json!({
3382 "file1": "the original contents",
3383 }),
3384 )
3385 .await;
3386
3387 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3388 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3389 let buffer = project
3390 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3391 .await
3392 .unwrap();
3393
3394 // Simulate buffer diffs being slow, so that they don't complete before
3395 // the next file change occurs.
3396 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3397
3398 // Change the buffer's file on disk, and then wait for the file change
3399 // to be detected by the worktree, so that the buffer starts reloading.
3400 fs.save(
3401 path!("/dir/file1").as_ref(),
3402 &"the first contents".into(),
3403 Default::default(),
3404 )
3405 .await
3406 .unwrap();
3407 worktree.next_event(cx).await;
3408
3409 cx.executor()
3410 .spawn(cx.executor().simulate_random_delay())
3411 .await;
3412
3413 // Perform a noop edit, causing the buffer's version to increase.
3414 buffer.update(cx, |buffer, cx| {
3415 buffer.edit([(0..0, " ")], None, cx);
3416 buffer.undo(cx);
3417 });
3418
3419 cx.executor().run_until_parked();
3420 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3421 buffer.read_with(cx, |buffer, _| {
3422 let buffer_text = buffer.text();
3423 if buffer_text == on_disk_text {
3424 assert!(
3425 !buffer.is_dirty() && !buffer.has_conflict(),
3426 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3427 );
3428 }
3429 // If the file change occurred while the buffer was processing the first
3430 // change, the buffer will be in a conflicting state.
3431 else {
3432 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3433 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3434 }
3435 });
3436}
3437
3438#[gpui::test]
3439async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let fs = FakeFs::new(cx.executor());
3443 fs.insert_tree(
3444 path!("/dir"),
3445 json!({
3446 "file1": "the old contents",
3447 }),
3448 )
3449 .await;
3450
3451 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3452 let buffer = project
3453 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3454 .await
3455 .unwrap();
3456 buffer.update(cx, |buffer, cx| {
3457 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3458 });
3459
3460 project
3461 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3462 .await
3463 .unwrap();
3464
3465 let new_text = fs
3466 .load(Path::new(path!("/dir/file1")))
3467 .await
3468 .unwrap()
3469 .replace("\r\n", "\n");
3470 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3471}
3472
3473#[gpui::test]
3474async fn test_save_as(cx: &mut gpui::TestAppContext) {
3475 init_test(cx);
3476
3477 let fs = FakeFs::new(cx.executor());
3478 fs.insert_tree("/dir", json!({})).await;
3479
3480 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3481
3482 let languages = project.update(cx, |project, _| project.languages().clone());
3483 languages.add(rust_lang());
3484
3485 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3486 buffer.update(cx, |buffer, cx| {
3487 buffer.edit([(0..0, "abc")], None, cx);
3488 assert!(buffer.is_dirty());
3489 assert!(!buffer.has_conflict());
3490 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3491 });
3492 project
3493 .update(cx, |project, cx| {
3494 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3495 let path = ProjectPath {
3496 worktree_id,
3497 path: Arc::from(Path::new("file1.rs")),
3498 };
3499 project.save_buffer_as(buffer.clone(), path, cx)
3500 })
3501 .await
3502 .unwrap();
3503 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3504
3505 cx.executor().run_until_parked();
3506 buffer.update(cx, |buffer, cx| {
3507 assert_eq!(
3508 buffer.file().unwrap().full_path(cx),
3509 Path::new("dir/file1.rs")
3510 );
3511 assert!(!buffer.is_dirty());
3512 assert!(!buffer.has_conflict());
3513 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3514 });
3515
3516 let opened_buffer = project
3517 .update(cx, |project, cx| {
3518 project.open_local_buffer("/dir/file1.rs", cx)
3519 })
3520 .await
3521 .unwrap();
3522 assert_eq!(opened_buffer, buffer);
3523}
3524
3525#[gpui::test(retries = 5)]
3526async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3527 use worktree::WorktreeModelHandle as _;
3528
3529 init_test(cx);
3530 cx.executor().allow_parking();
3531
3532 let dir = TempTree::new(json!({
3533 "a": {
3534 "file1": "",
3535 "file2": "",
3536 "file3": "",
3537 },
3538 "b": {
3539 "c": {
3540 "file4": "",
3541 "file5": "",
3542 }
3543 }
3544 }));
3545
3546 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3547
3548 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3549 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3550 async move { buffer.await.unwrap() }
3551 };
3552 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3553 project.update(cx, |project, cx| {
3554 let tree = project.worktrees(cx).next().unwrap();
3555 tree.read(cx)
3556 .entry_for_path(path)
3557 .unwrap_or_else(|| panic!("no entry for path {}", path))
3558 .id
3559 })
3560 };
3561
3562 let buffer2 = buffer_for_path("a/file2", cx).await;
3563 let buffer3 = buffer_for_path("a/file3", cx).await;
3564 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3565 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3566
3567 let file2_id = id_for_path("a/file2", cx);
3568 let file3_id = id_for_path("a/file3", cx);
3569 let file4_id = id_for_path("b/c/file4", cx);
3570
3571 // Create a remote copy of this worktree.
3572 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3573 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3574
3575 let updates = Arc::new(Mutex::new(Vec::new()));
3576 tree.update(cx, |tree, cx| {
3577 let updates = updates.clone();
3578 tree.observe_updates(0, cx, move |update| {
3579 updates.lock().push(update);
3580 async { true }
3581 });
3582 });
3583
3584 let remote =
3585 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3586
3587 cx.executor().run_until_parked();
3588
3589 cx.update(|cx| {
3590 assert!(!buffer2.read(cx).is_dirty());
3591 assert!(!buffer3.read(cx).is_dirty());
3592 assert!(!buffer4.read(cx).is_dirty());
3593 assert!(!buffer5.read(cx).is_dirty());
3594 });
3595
3596 // Rename and delete files and directories.
3597 tree.flush_fs_events(cx).await;
3598 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3599 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3600 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3601 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3602 tree.flush_fs_events(cx).await;
3603
3604 cx.update(|app| {
3605 assert_eq!(
3606 tree.read(app)
3607 .paths()
3608 .map(|p| p.to_str().unwrap())
3609 .collect::<Vec<_>>(),
3610 vec![
3611 "a",
3612 separator!("a/file1"),
3613 separator!("a/file2.new"),
3614 "b",
3615 "d",
3616 separator!("d/file3"),
3617 separator!("d/file4"),
3618 ]
3619 );
3620 });
3621
3622 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3623 assert_eq!(id_for_path("d/file3", cx), file3_id);
3624 assert_eq!(id_for_path("d/file4", cx), file4_id);
3625
3626 cx.update(|cx| {
3627 assert_eq!(
3628 buffer2.read(cx).file().unwrap().path().as_ref(),
3629 Path::new("a/file2.new")
3630 );
3631 assert_eq!(
3632 buffer3.read(cx).file().unwrap().path().as_ref(),
3633 Path::new("d/file3")
3634 );
3635 assert_eq!(
3636 buffer4.read(cx).file().unwrap().path().as_ref(),
3637 Path::new("d/file4")
3638 );
3639 assert_eq!(
3640 buffer5.read(cx).file().unwrap().path().as_ref(),
3641 Path::new("b/c/file5")
3642 );
3643
3644 assert_matches!(
3645 buffer2.read(cx).file().unwrap().disk_state(),
3646 DiskState::Present { .. }
3647 );
3648 assert_matches!(
3649 buffer3.read(cx).file().unwrap().disk_state(),
3650 DiskState::Present { .. }
3651 );
3652 assert_matches!(
3653 buffer4.read(cx).file().unwrap().disk_state(),
3654 DiskState::Present { .. }
3655 );
3656 assert_eq!(
3657 buffer5.read(cx).file().unwrap().disk_state(),
3658 DiskState::Deleted
3659 );
3660 });
3661
3662 // Update the remote worktree. Check that it becomes consistent with the
3663 // local worktree.
3664 cx.executor().run_until_parked();
3665
3666 remote.update(cx, |remote, _| {
3667 for update in updates.lock().drain(..) {
3668 remote.as_remote_mut().unwrap().update_from_remote(update);
3669 }
3670 });
3671 cx.executor().run_until_parked();
3672 remote.update(cx, |remote, _| {
3673 assert_eq!(
3674 remote
3675 .paths()
3676 .map(|p| p.to_str().unwrap())
3677 .collect::<Vec<_>>(),
3678 vec![
3679 "a",
3680 separator!("a/file1"),
3681 separator!("a/file2.new"),
3682 "b",
3683 "d",
3684 separator!("d/file3"),
3685 separator!("d/file4"),
3686 ]
3687 );
3688 });
3689}
3690
3691#[gpui::test(iterations = 10)]
3692async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3693 init_test(cx);
3694
3695 let fs = FakeFs::new(cx.executor());
3696 fs.insert_tree(
3697 path!("/dir"),
3698 json!({
3699 "a": {
3700 "file1": "",
3701 }
3702 }),
3703 )
3704 .await;
3705
3706 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3707 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3708 let tree_id = tree.update(cx, |tree, _| tree.id());
3709
3710 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3711 project.update(cx, |project, cx| {
3712 let tree = project.worktrees(cx).next().unwrap();
3713 tree.read(cx)
3714 .entry_for_path(path)
3715 .unwrap_or_else(|| panic!("no entry for path {}", path))
3716 .id
3717 })
3718 };
3719
3720 let dir_id = id_for_path("a", cx);
3721 let file_id = id_for_path("a/file1", cx);
3722 let buffer = project
3723 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3724 .await
3725 .unwrap();
3726 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3727
3728 project
3729 .update(cx, |project, cx| {
3730 project.rename_entry(dir_id, Path::new("b"), cx)
3731 })
3732 .unwrap()
3733 .await
3734 .to_included()
3735 .unwrap();
3736 cx.executor().run_until_parked();
3737
3738 assert_eq!(id_for_path("b", cx), dir_id);
3739 assert_eq!(id_for_path("b/file1", cx), file_id);
3740 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3741}
3742
3743#[gpui::test]
3744async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3745 init_test(cx);
3746
3747 let fs = FakeFs::new(cx.executor());
3748 fs.insert_tree(
3749 "/dir",
3750 json!({
3751 "a.txt": "a-contents",
3752 "b.txt": "b-contents",
3753 }),
3754 )
3755 .await;
3756
3757 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3758
3759 // Spawn multiple tasks to open paths, repeating some paths.
3760 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3761 (
3762 p.open_local_buffer("/dir/a.txt", cx),
3763 p.open_local_buffer("/dir/b.txt", cx),
3764 p.open_local_buffer("/dir/a.txt", cx),
3765 )
3766 });
3767
3768 let buffer_a_1 = buffer_a_1.await.unwrap();
3769 let buffer_a_2 = buffer_a_2.await.unwrap();
3770 let buffer_b = buffer_b.await.unwrap();
3771 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3772 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3773
3774 // There is only one buffer per path.
3775 let buffer_a_id = buffer_a_1.entity_id();
3776 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3777
3778 // Open the same path again while it is still open.
3779 drop(buffer_a_1);
3780 let buffer_a_3 = project
3781 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3782 .await
3783 .unwrap();
3784
3785 // There's still only one buffer per path.
3786 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3787}
3788
3789#[gpui::test]
3790async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3791 init_test(cx);
3792
3793 let fs = FakeFs::new(cx.executor());
3794 fs.insert_tree(
3795 path!("/dir"),
3796 json!({
3797 "file1": "abc",
3798 "file2": "def",
3799 "file3": "ghi",
3800 }),
3801 )
3802 .await;
3803
3804 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3805
3806 let buffer1 = project
3807 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3808 .await
3809 .unwrap();
3810 let events = Arc::new(Mutex::new(Vec::new()));
3811
3812 // initially, the buffer isn't dirty.
3813 buffer1.update(cx, |buffer, cx| {
3814 cx.subscribe(&buffer1, {
3815 let events = events.clone();
3816 move |_, _, event, _| match event {
3817 BufferEvent::Operation { .. } => {}
3818 _ => events.lock().push(event.clone()),
3819 }
3820 })
3821 .detach();
3822
3823 assert!(!buffer.is_dirty());
3824 assert!(events.lock().is_empty());
3825
3826 buffer.edit([(1..2, "")], None, cx);
3827 });
3828
3829 // after the first edit, the buffer is dirty, and emits a dirtied event.
3830 buffer1.update(cx, |buffer, cx| {
3831 assert!(buffer.text() == "ac");
3832 assert!(buffer.is_dirty());
3833 assert_eq!(
3834 *events.lock(),
3835 &[
3836 language::BufferEvent::Edited,
3837 language::BufferEvent::DirtyChanged
3838 ]
3839 );
3840 events.lock().clear();
3841 buffer.did_save(
3842 buffer.version(),
3843 buffer.file().unwrap().disk_state().mtime(),
3844 cx,
3845 );
3846 });
3847
3848 // after saving, the buffer is not dirty, and emits a saved event.
3849 buffer1.update(cx, |buffer, cx| {
3850 assert!(!buffer.is_dirty());
3851 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3852 events.lock().clear();
3853
3854 buffer.edit([(1..1, "B")], None, cx);
3855 buffer.edit([(2..2, "D")], None, cx);
3856 });
3857
3858 // after editing again, the buffer is dirty, and emits another dirty event.
3859 buffer1.update(cx, |buffer, cx| {
3860 assert!(buffer.text() == "aBDc");
3861 assert!(buffer.is_dirty());
3862 assert_eq!(
3863 *events.lock(),
3864 &[
3865 language::BufferEvent::Edited,
3866 language::BufferEvent::DirtyChanged,
3867 language::BufferEvent::Edited,
3868 ],
3869 );
3870 events.lock().clear();
3871
3872 // After restoring the buffer to its previously-saved state,
3873 // the buffer is not considered dirty anymore.
3874 buffer.edit([(1..3, "")], None, cx);
3875 assert!(buffer.text() == "ac");
3876 assert!(!buffer.is_dirty());
3877 });
3878
3879 assert_eq!(
3880 *events.lock(),
3881 &[
3882 language::BufferEvent::Edited,
3883 language::BufferEvent::DirtyChanged
3884 ]
3885 );
3886
3887 // When a file is deleted, it is not considered dirty.
3888 let events = Arc::new(Mutex::new(Vec::new()));
3889 let buffer2 = project
3890 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3891 .await
3892 .unwrap();
3893 buffer2.update(cx, |_, cx| {
3894 cx.subscribe(&buffer2, {
3895 let events = events.clone();
3896 move |_, _, event, _| match event {
3897 BufferEvent::Operation { .. } => {}
3898 _ => events.lock().push(event.clone()),
3899 }
3900 })
3901 .detach();
3902 });
3903
3904 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3905 .await
3906 .unwrap();
3907 cx.executor().run_until_parked();
3908 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3909 assert_eq!(
3910 mem::take(&mut *events.lock()),
3911 &[language::BufferEvent::FileHandleChanged]
3912 );
3913
3914 // Buffer becomes dirty when edited.
3915 buffer2.update(cx, |buffer, cx| {
3916 buffer.edit([(2..3, "")], None, cx);
3917 assert_eq!(buffer.is_dirty(), true);
3918 });
3919 assert_eq!(
3920 mem::take(&mut *events.lock()),
3921 &[
3922 language::BufferEvent::Edited,
3923 language::BufferEvent::DirtyChanged
3924 ]
3925 );
3926
3927 // Buffer becomes clean again when all of its content is removed, because
3928 // the file was deleted.
3929 buffer2.update(cx, |buffer, cx| {
3930 buffer.edit([(0..2, "")], None, cx);
3931 assert_eq!(buffer.is_empty(), true);
3932 assert_eq!(buffer.is_dirty(), false);
3933 });
3934 assert_eq!(
3935 *events.lock(),
3936 &[
3937 language::BufferEvent::Edited,
3938 language::BufferEvent::DirtyChanged
3939 ]
3940 );
3941
3942 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3943 let events = Arc::new(Mutex::new(Vec::new()));
3944 let buffer3 = project
3945 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3946 .await
3947 .unwrap();
3948 buffer3.update(cx, |_, cx| {
3949 cx.subscribe(&buffer3, {
3950 let events = events.clone();
3951 move |_, _, event, _| match event {
3952 BufferEvent::Operation { .. } => {}
3953 _ => events.lock().push(event.clone()),
3954 }
3955 })
3956 .detach();
3957 });
3958
3959 buffer3.update(cx, |buffer, cx| {
3960 buffer.edit([(0..0, "x")], None, cx);
3961 });
3962 events.lock().clear();
3963 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3964 .await
3965 .unwrap();
3966 cx.executor().run_until_parked();
3967 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3968 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3969}
3970
3971#[gpui::test]
3972async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3973 init_test(cx);
3974
3975 let (initial_contents, initial_offsets) =
3976 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3977 let fs = FakeFs::new(cx.executor());
3978 fs.insert_tree(
3979 path!("/dir"),
3980 json!({
3981 "the-file": initial_contents,
3982 }),
3983 )
3984 .await;
3985 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3986 let buffer = project
3987 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3988 .await
3989 .unwrap();
3990
3991 let anchors = initial_offsets
3992 .iter()
3993 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3994 .collect::<Vec<_>>();
3995
3996 // Change the file on disk, adding two new lines of text, and removing
3997 // one line.
3998 buffer.update(cx, |buffer, _| {
3999 assert!(!buffer.is_dirty());
4000 assert!(!buffer.has_conflict());
4001 });
4002
4003 let (new_contents, new_offsets) =
4004 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4005 fs.save(
4006 path!("/dir/the-file").as_ref(),
4007 &new_contents.as_str().into(),
4008 LineEnding::Unix,
4009 )
4010 .await
4011 .unwrap();
4012
4013 // Because the buffer was not modified, it is reloaded from disk. Its
4014 // contents are edited according to the diff between the old and new
4015 // file contents.
4016 cx.executor().run_until_parked();
4017 buffer.update(cx, |buffer, _| {
4018 assert_eq!(buffer.text(), new_contents);
4019 assert!(!buffer.is_dirty());
4020 assert!(!buffer.has_conflict());
4021
4022 let anchor_offsets = anchors
4023 .iter()
4024 .map(|anchor| anchor.to_offset(&*buffer))
4025 .collect::<Vec<_>>();
4026 assert_eq!(anchor_offsets, new_offsets);
4027 });
4028
4029 // Modify the buffer
4030 buffer.update(cx, |buffer, cx| {
4031 buffer.edit([(0..0, " ")], None, cx);
4032 assert!(buffer.is_dirty());
4033 assert!(!buffer.has_conflict());
4034 });
4035
4036 // Change the file on disk again, adding blank lines to the beginning.
4037 fs.save(
4038 path!("/dir/the-file").as_ref(),
4039 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4040 LineEnding::Unix,
4041 )
4042 .await
4043 .unwrap();
4044
4045 // Because the buffer is modified, it doesn't reload from disk, but is
4046 // marked as having a conflict.
4047 cx.executor().run_until_parked();
4048 buffer.update(cx, |buffer, _| {
4049 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4050 assert!(buffer.has_conflict());
4051 });
4052}
4053
4054#[gpui::test]
4055async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4056 init_test(cx);
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 path!("/dir"),
4061 json!({
4062 "file1": "a\nb\nc\n",
4063 "file2": "one\r\ntwo\r\nthree\r\n",
4064 }),
4065 )
4066 .await;
4067
4068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4069 let buffer1 = project
4070 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4071 .await
4072 .unwrap();
4073 let buffer2 = project
4074 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4075 .await
4076 .unwrap();
4077
4078 buffer1.update(cx, |buffer, _| {
4079 assert_eq!(buffer.text(), "a\nb\nc\n");
4080 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4081 });
4082 buffer2.update(cx, |buffer, _| {
4083 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4084 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4085 });
4086
4087 // Change a file's line endings on disk from unix to windows. The buffer's
4088 // state updates correctly.
4089 fs.save(
4090 path!("/dir/file1").as_ref(),
4091 &"aaa\nb\nc\n".into(),
4092 LineEnding::Windows,
4093 )
4094 .await
4095 .unwrap();
4096 cx.executor().run_until_parked();
4097 buffer1.update(cx, |buffer, _| {
4098 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4099 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4100 });
4101
4102 // Save a file with windows line endings. The file is written correctly.
4103 buffer2.update(cx, |buffer, cx| {
4104 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4105 });
4106 project
4107 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4108 .await
4109 .unwrap();
4110 assert_eq!(
4111 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4112 "one\r\ntwo\r\nthree\r\nfour\r\n",
4113 );
4114}
4115
4116#[gpui::test]
4117async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4118 init_test(cx);
4119
4120 let fs = FakeFs::new(cx.executor());
4121 fs.insert_tree(
4122 path!("/dir"),
4123 json!({
4124 "a.rs": "
4125 fn foo(mut v: Vec<usize>) {
4126 for x in &v {
4127 v.push(1);
4128 }
4129 }
4130 "
4131 .unindent(),
4132 }),
4133 )
4134 .await;
4135
4136 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4137 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4138 let buffer = project
4139 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4140 .await
4141 .unwrap();
4142
4143 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4144 let message = lsp::PublishDiagnosticsParams {
4145 uri: buffer_uri.clone(),
4146 diagnostics: vec![
4147 lsp::Diagnostic {
4148 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4149 severity: Some(DiagnosticSeverity::WARNING),
4150 message: "error 1".to_string(),
4151 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4152 location: lsp::Location {
4153 uri: buffer_uri.clone(),
4154 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4155 },
4156 message: "error 1 hint 1".to_string(),
4157 }]),
4158 ..Default::default()
4159 },
4160 lsp::Diagnostic {
4161 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4162 severity: Some(DiagnosticSeverity::HINT),
4163 message: "error 1 hint 1".to_string(),
4164 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4165 location: lsp::Location {
4166 uri: buffer_uri.clone(),
4167 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4168 },
4169 message: "original diagnostic".to_string(),
4170 }]),
4171 ..Default::default()
4172 },
4173 lsp::Diagnostic {
4174 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4175 severity: Some(DiagnosticSeverity::ERROR),
4176 message: "error 2".to_string(),
4177 related_information: Some(vec![
4178 lsp::DiagnosticRelatedInformation {
4179 location: lsp::Location {
4180 uri: buffer_uri.clone(),
4181 range: lsp::Range::new(
4182 lsp::Position::new(1, 13),
4183 lsp::Position::new(1, 15),
4184 ),
4185 },
4186 message: "error 2 hint 1".to_string(),
4187 },
4188 lsp::DiagnosticRelatedInformation {
4189 location: lsp::Location {
4190 uri: buffer_uri.clone(),
4191 range: lsp::Range::new(
4192 lsp::Position::new(1, 13),
4193 lsp::Position::new(1, 15),
4194 ),
4195 },
4196 message: "error 2 hint 2".to_string(),
4197 },
4198 ]),
4199 ..Default::default()
4200 },
4201 lsp::Diagnostic {
4202 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4203 severity: Some(DiagnosticSeverity::HINT),
4204 message: "error 2 hint 1".to_string(),
4205 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4206 location: lsp::Location {
4207 uri: buffer_uri.clone(),
4208 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4209 },
4210 message: "original diagnostic".to_string(),
4211 }]),
4212 ..Default::default()
4213 },
4214 lsp::Diagnostic {
4215 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4216 severity: Some(DiagnosticSeverity::HINT),
4217 message: "error 2 hint 2".to_string(),
4218 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4219 location: lsp::Location {
4220 uri: buffer_uri,
4221 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4222 },
4223 message: "original diagnostic".to_string(),
4224 }]),
4225 ..Default::default()
4226 },
4227 ],
4228 version: None,
4229 };
4230
4231 lsp_store
4232 .update(cx, |lsp_store, cx| {
4233 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4234 })
4235 .unwrap();
4236 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4237
4238 assert_eq!(
4239 buffer
4240 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4241 .collect::<Vec<_>>(),
4242 &[
4243 DiagnosticEntry {
4244 range: Point::new(1, 8)..Point::new(1, 9),
4245 diagnostic: Diagnostic {
4246 severity: DiagnosticSeverity::WARNING,
4247 message: "error 1".to_string(),
4248 group_id: 1,
4249 is_primary: true,
4250 ..Default::default()
4251 }
4252 },
4253 DiagnosticEntry {
4254 range: Point::new(1, 8)..Point::new(1, 9),
4255 diagnostic: Diagnostic {
4256 severity: DiagnosticSeverity::HINT,
4257 message: "error 1 hint 1".to_string(),
4258 group_id: 1,
4259 is_primary: false,
4260 ..Default::default()
4261 }
4262 },
4263 DiagnosticEntry {
4264 range: Point::new(1, 13)..Point::new(1, 15),
4265 diagnostic: Diagnostic {
4266 severity: DiagnosticSeverity::HINT,
4267 message: "error 2 hint 1".to_string(),
4268 group_id: 0,
4269 is_primary: false,
4270 ..Default::default()
4271 }
4272 },
4273 DiagnosticEntry {
4274 range: Point::new(1, 13)..Point::new(1, 15),
4275 diagnostic: Diagnostic {
4276 severity: DiagnosticSeverity::HINT,
4277 message: "error 2 hint 2".to_string(),
4278 group_id: 0,
4279 is_primary: false,
4280 ..Default::default()
4281 }
4282 },
4283 DiagnosticEntry {
4284 range: Point::new(2, 8)..Point::new(2, 17),
4285 diagnostic: Diagnostic {
4286 severity: DiagnosticSeverity::ERROR,
4287 message: "error 2".to_string(),
4288 group_id: 0,
4289 is_primary: true,
4290 ..Default::default()
4291 }
4292 }
4293 ]
4294 );
4295
4296 assert_eq!(
4297 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4298 &[
4299 DiagnosticEntry {
4300 range: Point::new(1, 13)..Point::new(1, 15),
4301 diagnostic: Diagnostic {
4302 severity: DiagnosticSeverity::HINT,
4303 message: "error 2 hint 1".to_string(),
4304 group_id: 0,
4305 is_primary: false,
4306 ..Default::default()
4307 }
4308 },
4309 DiagnosticEntry {
4310 range: Point::new(1, 13)..Point::new(1, 15),
4311 diagnostic: Diagnostic {
4312 severity: DiagnosticSeverity::HINT,
4313 message: "error 2 hint 2".to_string(),
4314 group_id: 0,
4315 is_primary: false,
4316 ..Default::default()
4317 }
4318 },
4319 DiagnosticEntry {
4320 range: Point::new(2, 8)..Point::new(2, 17),
4321 diagnostic: Diagnostic {
4322 severity: DiagnosticSeverity::ERROR,
4323 message: "error 2".to_string(),
4324 group_id: 0,
4325 is_primary: true,
4326 ..Default::default()
4327 }
4328 }
4329 ]
4330 );
4331
4332 assert_eq!(
4333 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4334 &[
4335 DiagnosticEntry {
4336 range: Point::new(1, 8)..Point::new(1, 9),
4337 diagnostic: Diagnostic {
4338 severity: DiagnosticSeverity::WARNING,
4339 message: "error 1".to_string(),
4340 group_id: 1,
4341 is_primary: true,
4342 ..Default::default()
4343 }
4344 },
4345 DiagnosticEntry {
4346 range: Point::new(1, 8)..Point::new(1, 9),
4347 diagnostic: Diagnostic {
4348 severity: DiagnosticSeverity::HINT,
4349 message: "error 1 hint 1".to_string(),
4350 group_id: 1,
4351 is_primary: false,
4352 ..Default::default()
4353 }
4354 },
4355 ]
4356 );
4357}
4358
4359#[gpui::test]
4360async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4361 init_test(cx);
4362
4363 let fs = FakeFs::new(cx.executor());
4364 fs.insert_tree(
4365 path!("/dir"),
4366 json!({
4367 "one.rs": "const ONE: usize = 1;",
4368 "two": {
4369 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4370 }
4371
4372 }),
4373 )
4374 .await;
4375 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4376
4377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4378 language_registry.add(rust_lang());
4379 let watched_paths = lsp::FileOperationRegistrationOptions {
4380 filters: vec![
4381 FileOperationFilter {
4382 scheme: Some("file".to_owned()),
4383 pattern: lsp::FileOperationPattern {
4384 glob: "**/*.rs".to_owned(),
4385 matches: Some(lsp::FileOperationPatternKind::File),
4386 options: None,
4387 },
4388 },
4389 FileOperationFilter {
4390 scheme: Some("file".to_owned()),
4391 pattern: lsp::FileOperationPattern {
4392 glob: "**/**".to_owned(),
4393 matches: Some(lsp::FileOperationPatternKind::Folder),
4394 options: None,
4395 },
4396 },
4397 ],
4398 };
4399 let mut fake_servers = language_registry.register_fake_lsp(
4400 "Rust",
4401 FakeLspAdapter {
4402 capabilities: lsp::ServerCapabilities {
4403 workspace: Some(lsp::WorkspaceServerCapabilities {
4404 workspace_folders: None,
4405 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4406 did_rename: Some(watched_paths.clone()),
4407 will_rename: Some(watched_paths),
4408 ..Default::default()
4409 }),
4410 }),
4411 ..Default::default()
4412 },
4413 ..Default::default()
4414 },
4415 );
4416
4417 let _ = project
4418 .update(cx, |project, cx| {
4419 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4420 })
4421 .await
4422 .unwrap();
4423
4424 let fake_server = fake_servers.next().await.unwrap();
4425 let response = project.update(cx, |project, cx| {
4426 let worktree = project.worktrees(cx).next().unwrap();
4427 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4428 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4429 });
4430 let expected_edit = lsp::WorkspaceEdit {
4431 changes: None,
4432 document_changes: Some(DocumentChanges::Edits({
4433 vec![TextDocumentEdit {
4434 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4435 range: lsp::Range {
4436 start: lsp::Position {
4437 line: 0,
4438 character: 1,
4439 },
4440 end: lsp::Position {
4441 line: 0,
4442 character: 3,
4443 },
4444 },
4445 new_text: "This is not a drill".to_owned(),
4446 })],
4447 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4448 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4449 version: Some(1337),
4450 },
4451 }]
4452 })),
4453 change_annotations: None,
4454 };
4455 let resolved_workspace_edit = Arc::new(OnceLock::new());
4456 fake_server
4457 .set_request_handler::<WillRenameFiles, _, _>({
4458 let resolved_workspace_edit = resolved_workspace_edit.clone();
4459 let expected_edit = expected_edit.clone();
4460 move |params, _| {
4461 let resolved_workspace_edit = resolved_workspace_edit.clone();
4462 let expected_edit = expected_edit.clone();
4463 async move {
4464 assert_eq!(params.files.len(), 1);
4465 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4466 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4467 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4468 Ok(Some(expected_edit))
4469 }
4470 }
4471 })
4472 .next()
4473 .await
4474 .unwrap();
4475 let _ = response.await.unwrap();
4476 fake_server
4477 .handle_notification::<DidRenameFiles, _>(|params, _| {
4478 assert_eq!(params.files.len(), 1);
4479 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4480 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4481 })
4482 .next()
4483 .await
4484 .unwrap();
4485 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4486}
4487
4488#[gpui::test]
4489async fn test_rename(cx: &mut gpui::TestAppContext) {
4490 // hi
4491 init_test(cx);
4492
4493 let fs = FakeFs::new(cx.executor());
4494 fs.insert_tree(
4495 path!("/dir"),
4496 json!({
4497 "one.rs": "const ONE: usize = 1;",
4498 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4504
4505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4506 language_registry.add(rust_lang());
4507 let mut fake_servers = language_registry.register_fake_lsp(
4508 "Rust",
4509 FakeLspAdapter {
4510 capabilities: lsp::ServerCapabilities {
4511 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4512 prepare_provider: Some(true),
4513 work_done_progress_options: Default::default(),
4514 })),
4515 ..Default::default()
4516 },
4517 ..Default::default()
4518 },
4519 );
4520
4521 let (buffer, _handle) = project
4522 .update(cx, |project, cx| {
4523 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4524 })
4525 .await
4526 .unwrap();
4527
4528 let fake_server = fake_servers.next().await.unwrap();
4529
4530 let response = project.update(cx, |project, cx| {
4531 project.prepare_rename(buffer.clone(), 7, cx)
4532 });
4533 fake_server
4534 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4535 assert_eq!(
4536 params.text_document.uri.as_str(),
4537 uri!("file:///dir/one.rs")
4538 );
4539 assert_eq!(params.position, lsp::Position::new(0, 7));
4540 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4541 lsp::Position::new(0, 6),
4542 lsp::Position::new(0, 9),
4543 ))))
4544 })
4545 .next()
4546 .await
4547 .unwrap();
4548 let response = response.await.unwrap();
4549 let PrepareRenameResponse::Success(range) = response else {
4550 panic!("{:?}", response);
4551 };
4552 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4553 assert_eq!(range, 6..9);
4554
4555 let response = project.update(cx, |project, cx| {
4556 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4557 });
4558 fake_server
4559 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4560 assert_eq!(
4561 params.text_document_position.text_document.uri.as_str(),
4562 uri!("file:///dir/one.rs")
4563 );
4564 assert_eq!(
4565 params.text_document_position.position,
4566 lsp::Position::new(0, 7)
4567 );
4568 assert_eq!(params.new_name, "THREE");
4569 Ok(Some(lsp::WorkspaceEdit {
4570 changes: Some(
4571 [
4572 (
4573 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4574 vec![lsp::TextEdit::new(
4575 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4576 "THREE".to_string(),
4577 )],
4578 ),
4579 (
4580 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4581 vec![
4582 lsp::TextEdit::new(
4583 lsp::Range::new(
4584 lsp::Position::new(0, 24),
4585 lsp::Position::new(0, 27),
4586 ),
4587 "THREE".to_string(),
4588 ),
4589 lsp::TextEdit::new(
4590 lsp::Range::new(
4591 lsp::Position::new(0, 35),
4592 lsp::Position::new(0, 38),
4593 ),
4594 "THREE".to_string(),
4595 ),
4596 ],
4597 ),
4598 ]
4599 .into_iter()
4600 .collect(),
4601 ),
4602 ..Default::default()
4603 }))
4604 })
4605 .next()
4606 .await
4607 .unwrap();
4608 let mut transaction = response.await.unwrap().0;
4609 assert_eq!(transaction.len(), 2);
4610 assert_eq!(
4611 transaction
4612 .remove_entry(&buffer)
4613 .unwrap()
4614 .0
4615 .update(cx, |buffer, _| buffer.text()),
4616 "const THREE: usize = 1;"
4617 );
4618 assert_eq!(
4619 transaction
4620 .into_keys()
4621 .next()
4622 .unwrap()
4623 .update(cx, |buffer, _| buffer.text()),
4624 "const TWO: usize = one::THREE + one::THREE;"
4625 );
4626}
4627
4628#[gpui::test]
4629async fn test_search(cx: &mut gpui::TestAppContext) {
4630 init_test(cx);
4631
4632 let fs = FakeFs::new(cx.executor());
4633 fs.insert_tree(
4634 path!("/dir"),
4635 json!({
4636 "one.rs": "const ONE: usize = 1;",
4637 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4638 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4639 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4640 }),
4641 )
4642 .await;
4643 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4644 assert_eq!(
4645 search(
4646 &project,
4647 SearchQuery::text(
4648 "TWO",
4649 false,
4650 true,
4651 false,
4652 Default::default(),
4653 Default::default(),
4654 None
4655 )
4656 .unwrap(),
4657 cx
4658 )
4659 .await
4660 .unwrap(),
4661 HashMap::from_iter([
4662 (separator!("dir/two.rs").to_string(), vec![6..9]),
4663 (separator!("dir/three.rs").to_string(), vec![37..40])
4664 ])
4665 );
4666
4667 let buffer_4 = project
4668 .update(cx, |project, cx| {
4669 project.open_local_buffer(path!("/dir/four.rs"), cx)
4670 })
4671 .await
4672 .unwrap();
4673 buffer_4.update(cx, |buffer, cx| {
4674 let text = "two::TWO";
4675 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4676 });
4677
4678 assert_eq!(
4679 search(
4680 &project,
4681 SearchQuery::text(
4682 "TWO",
4683 false,
4684 true,
4685 false,
4686 Default::default(),
4687 Default::default(),
4688 None,
4689 )
4690 .unwrap(),
4691 cx
4692 )
4693 .await
4694 .unwrap(),
4695 HashMap::from_iter([
4696 (separator!("dir/two.rs").to_string(), vec![6..9]),
4697 (separator!("dir/three.rs").to_string(), vec![37..40]),
4698 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4699 ])
4700 );
4701}
4702
4703#[gpui::test]
4704async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4705 init_test(cx);
4706
4707 let search_query = "file";
4708
4709 let fs = FakeFs::new(cx.executor());
4710 fs.insert_tree(
4711 path!("/dir"),
4712 json!({
4713 "one.rs": r#"// Rust file one"#,
4714 "one.ts": r#"// TypeScript file one"#,
4715 "two.rs": r#"// Rust file two"#,
4716 "two.ts": r#"// TypeScript file two"#,
4717 }),
4718 )
4719 .await;
4720 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4721
4722 assert!(
4723 search(
4724 &project,
4725 SearchQuery::text(
4726 search_query,
4727 false,
4728 true,
4729 false,
4730 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4731 Default::default(),
4732 None
4733 )
4734 .unwrap(),
4735 cx
4736 )
4737 .await
4738 .unwrap()
4739 .is_empty(),
4740 "If no inclusions match, no files should be returned"
4741 );
4742
4743 assert_eq!(
4744 search(
4745 &project,
4746 SearchQuery::text(
4747 search_query,
4748 false,
4749 true,
4750 false,
4751 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4752 Default::default(),
4753 None
4754 )
4755 .unwrap(),
4756 cx
4757 )
4758 .await
4759 .unwrap(),
4760 HashMap::from_iter([
4761 (separator!("dir/one.rs").to_string(), vec![8..12]),
4762 (separator!("dir/two.rs").to_string(), vec![8..12]),
4763 ]),
4764 "Rust only search should give only Rust files"
4765 );
4766
4767 assert_eq!(
4768 search(
4769 &project,
4770 SearchQuery::text(
4771 search_query,
4772 false,
4773 true,
4774 false,
4775 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4776 Default::default(),
4777 None,
4778 )
4779 .unwrap(),
4780 cx
4781 )
4782 .await
4783 .unwrap(),
4784 HashMap::from_iter([
4785 (separator!("dir/one.ts").to_string(), vec![14..18]),
4786 (separator!("dir/two.ts").to_string(), vec![14..18]),
4787 ]),
4788 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4789 );
4790
4791 assert_eq!(
4792 search(
4793 &project,
4794 SearchQuery::text(
4795 search_query,
4796 false,
4797 true,
4798 false,
4799 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4800 .unwrap(),
4801 Default::default(),
4802 None,
4803 )
4804 .unwrap(),
4805 cx
4806 )
4807 .await
4808 .unwrap(),
4809 HashMap::from_iter([
4810 (separator!("dir/two.ts").to_string(), vec![14..18]),
4811 (separator!("dir/one.rs").to_string(), vec![8..12]),
4812 (separator!("dir/one.ts").to_string(), vec![14..18]),
4813 (separator!("dir/two.rs").to_string(), vec![8..12]),
4814 ]),
4815 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4816 );
4817}
4818
4819#[gpui::test]
4820async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4821 init_test(cx);
4822
4823 let search_query = "file";
4824
4825 let fs = FakeFs::new(cx.executor());
4826 fs.insert_tree(
4827 path!("/dir"),
4828 json!({
4829 "one.rs": r#"// Rust file one"#,
4830 "one.ts": r#"// TypeScript file one"#,
4831 "two.rs": r#"// Rust file two"#,
4832 "two.ts": r#"// TypeScript file two"#,
4833 }),
4834 )
4835 .await;
4836 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4837
4838 assert_eq!(
4839 search(
4840 &project,
4841 SearchQuery::text(
4842 search_query,
4843 false,
4844 true,
4845 false,
4846 Default::default(),
4847 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4848 None,
4849 )
4850 .unwrap(),
4851 cx
4852 )
4853 .await
4854 .unwrap(),
4855 HashMap::from_iter([
4856 (separator!("dir/one.rs").to_string(), vec![8..12]),
4857 (separator!("dir/one.ts").to_string(), vec![14..18]),
4858 (separator!("dir/two.rs").to_string(), vec![8..12]),
4859 (separator!("dir/two.ts").to_string(), vec![14..18]),
4860 ]),
4861 "If no exclusions match, all files should be returned"
4862 );
4863
4864 assert_eq!(
4865 search(
4866 &project,
4867 SearchQuery::text(
4868 search_query,
4869 false,
4870 true,
4871 false,
4872 Default::default(),
4873 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4874 None,
4875 )
4876 .unwrap(),
4877 cx
4878 )
4879 .await
4880 .unwrap(),
4881 HashMap::from_iter([
4882 (separator!("dir/one.ts").to_string(), vec![14..18]),
4883 (separator!("dir/two.ts").to_string(), vec![14..18]),
4884 ]),
4885 "Rust exclusion search should give only TypeScript files"
4886 );
4887
4888 assert_eq!(
4889 search(
4890 &project,
4891 SearchQuery::text(
4892 search_query,
4893 false,
4894 true,
4895 false,
4896 Default::default(),
4897 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4898 None,
4899 )
4900 .unwrap(),
4901 cx
4902 )
4903 .await
4904 .unwrap(),
4905 HashMap::from_iter([
4906 (separator!("dir/one.rs").to_string(), vec![8..12]),
4907 (separator!("dir/two.rs").to_string(), vec![8..12]),
4908 ]),
4909 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4910 );
4911
4912 assert!(
4913 search(
4914 &project,
4915 SearchQuery::text(
4916 search_query,
4917 false,
4918 true,
4919 false,
4920 Default::default(),
4921 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4922 .unwrap(),
4923 None,
4924 )
4925 .unwrap(),
4926 cx
4927 )
4928 .await
4929 .unwrap()
4930 .is_empty(),
4931 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4932 );
4933}
4934
4935#[gpui::test]
4936async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4937 init_test(cx);
4938
4939 let search_query = "file";
4940
4941 let fs = FakeFs::new(cx.executor());
4942 fs.insert_tree(
4943 path!("/dir"),
4944 json!({
4945 "one.rs": r#"// Rust file one"#,
4946 "one.ts": r#"// TypeScript file one"#,
4947 "two.rs": r#"// Rust file two"#,
4948 "two.ts": r#"// TypeScript file two"#,
4949 }),
4950 )
4951 .await;
4952 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4953
4954 assert!(
4955 search(
4956 &project,
4957 SearchQuery::text(
4958 search_query,
4959 false,
4960 true,
4961 false,
4962 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4963 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4964 None,
4965 )
4966 .unwrap(),
4967 cx
4968 )
4969 .await
4970 .unwrap()
4971 .is_empty(),
4972 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4973 );
4974
4975 assert!(
4976 search(
4977 &project,
4978 SearchQuery::text(
4979 search_query,
4980 false,
4981 true,
4982 false,
4983 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4984 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4985 None,
4986 )
4987 .unwrap(),
4988 cx
4989 )
4990 .await
4991 .unwrap()
4992 .is_empty(),
4993 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4994 );
4995
4996 assert!(
4997 search(
4998 &project,
4999 SearchQuery::text(
5000 search_query,
5001 false,
5002 true,
5003 false,
5004 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5005 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5006 None,
5007 )
5008 .unwrap(),
5009 cx
5010 )
5011 .await
5012 .unwrap()
5013 .is_empty(),
5014 "Non-matching inclusions and exclusions should not change that."
5015 );
5016
5017 assert_eq!(
5018 search(
5019 &project,
5020 SearchQuery::text(
5021 search_query,
5022 false,
5023 true,
5024 false,
5025 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5026 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5027 None,
5028 )
5029 .unwrap(),
5030 cx
5031 )
5032 .await
5033 .unwrap(),
5034 HashMap::from_iter([
5035 (separator!("dir/one.ts").to_string(), vec![14..18]),
5036 (separator!("dir/two.ts").to_string(), vec![14..18]),
5037 ]),
5038 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5039 );
5040}
5041
5042#[gpui::test]
5043async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5044 init_test(cx);
5045
5046 let fs = FakeFs::new(cx.executor());
5047 fs.insert_tree(
5048 path!("/worktree-a"),
5049 json!({
5050 "haystack.rs": r#"// NEEDLE"#,
5051 "haystack.ts": r#"// NEEDLE"#,
5052 }),
5053 )
5054 .await;
5055 fs.insert_tree(
5056 path!("/worktree-b"),
5057 json!({
5058 "haystack.rs": r#"// NEEDLE"#,
5059 "haystack.ts": r#"// NEEDLE"#,
5060 }),
5061 )
5062 .await;
5063
5064 let project = Project::test(
5065 fs.clone(),
5066 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5067 cx,
5068 )
5069 .await;
5070
5071 assert_eq!(
5072 search(
5073 &project,
5074 SearchQuery::text(
5075 "NEEDLE",
5076 false,
5077 true,
5078 false,
5079 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5080 Default::default(),
5081 None,
5082 )
5083 .unwrap(),
5084 cx
5085 )
5086 .await
5087 .unwrap(),
5088 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5089 "should only return results from included worktree"
5090 );
5091 assert_eq!(
5092 search(
5093 &project,
5094 SearchQuery::text(
5095 "NEEDLE",
5096 false,
5097 true,
5098 false,
5099 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5100 Default::default(),
5101 None,
5102 )
5103 .unwrap(),
5104 cx
5105 )
5106 .await
5107 .unwrap(),
5108 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5109 "should only return results from included worktree"
5110 );
5111
5112 assert_eq!(
5113 search(
5114 &project,
5115 SearchQuery::text(
5116 "NEEDLE",
5117 false,
5118 true,
5119 false,
5120 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5121 Default::default(),
5122 None,
5123 )
5124 .unwrap(),
5125 cx
5126 )
5127 .await
5128 .unwrap(),
5129 HashMap::from_iter([
5130 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5131 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5132 ]),
5133 "should return results from both worktrees"
5134 );
5135}
5136
5137#[gpui::test]
5138async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5139 init_test(cx);
5140
5141 let fs = FakeFs::new(cx.background_executor.clone());
5142 fs.insert_tree(
5143 path!("/dir"),
5144 json!({
5145 ".git": {},
5146 ".gitignore": "**/target\n/node_modules\n",
5147 "target": {
5148 "index.txt": "index_key:index_value"
5149 },
5150 "node_modules": {
5151 "eslint": {
5152 "index.ts": "const eslint_key = 'eslint value'",
5153 "package.json": r#"{ "some_key": "some value" }"#,
5154 },
5155 "prettier": {
5156 "index.ts": "const prettier_key = 'prettier value'",
5157 "package.json": r#"{ "other_key": "other value" }"#,
5158 },
5159 },
5160 "package.json": r#"{ "main_key": "main value" }"#,
5161 }),
5162 )
5163 .await;
5164 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5165
5166 let query = "key";
5167 assert_eq!(
5168 search(
5169 &project,
5170 SearchQuery::text(
5171 query,
5172 false,
5173 false,
5174 false,
5175 Default::default(),
5176 Default::default(),
5177 None,
5178 )
5179 .unwrap(),
5180 cx
5181 )
5182 .await
5183 .unwrap(),
5184 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5185 "Only one non-ignored file should have the query"
5186 );
5187
5188 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5189 assert_eq!(
5190 search(
5191 &project,
5192 SearchQuery::text(
5193 query,
5194 false,
5195 false,
5196 true,
5197 Default::default(),
5198 Default::default(),
5199 None,
5200 )
5201 .unwrap(),
5202 cx
5203 )
5204 .await
5205 .unwrap(),
5206 HashMap::from_iter([
5207 (separator!("dir/package.json").to_string(), vec![8..11]),
5208 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5209 (
5210 separator!("dir/node_modules/prettier/package.json").to_string(),
5211 vec![9..12]
5212 ),
5213 (
5214 separator!("dir/node_modules/prettier/index.ts").to_string(),
5215 vec![15..18]
5216 ),
5217 (
5218 separator!("dir/node_modules/eslint/index.ts").to_string(),
5219 vec![13..16]
5220 ),
5221 (
5222 separator!("dir/node_modules/eslint/package.json").to_string(),
5223 vec![8..11]
5224 ),
5225 ]),
5226 "Unrestricted search with ignored directories should find every file with the query"
5227 );
5228
5229 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5230 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5231 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5232 assert_eq!(
5233 search(
5234 &project,
5235 SearchQuery::text(
5236 query,
5237 false,
5238 false,
5239 true,
5240 files_to_include,
5241 files_to_exclude,
5242 None,
5243 )
5244 .unwrap(),
5245 cx
5246 )
5247 .await
5248 .unwrap(),
5249 HashMap::from_iter([(
5250 separator!("dir/node_modules/prettier/package.json").to_string(),
5251 vec![9..12]
5252 )]),
5253 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5254 );
5255}
5256
5257#[gpui::test]
5258async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5259 init_test(cx);
5260
5261 let fs = FakeFs::new(cx.executor().clone());
5262 fs.insert_tree(
5263 "/one/two",
5264 json!({
5265 "three": {
5266 "a.txt": "",
5267 "four": {}
5268 },
5269 "c.rs": ""
5270 }),
5271 )
5272 .await;
5273
5274 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5275 project
5276 .update(cx, |project, cx| {
5277 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5278 project.create_entry((id, "b.."), true, cx)
5279 })
5280 .await
5281 .unwrap()
5282 .to_included()
5283 .unwrap();
5284
5285 // Can't create paths outside the project
5286 let result = project
5287 .update(cx, |project, cx| {
5288 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5289 project.create_entry((id, "../../boop"), true, cx)
5290 })
5291 .await;
5292 assert!(result.is_err());
5293
5294 // Can't create paths with '..'
5295 let result = project
5296 .update(cx, |project, cx| {
5297 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5298 project.create_entry((id, "four/../beep"), true, cx)
5299 })
5300 .await;
5301 assert!(result.is_err());
5302
5303 assert_eq!(
5304 fs.paths(true),
5305 vec![
5306 PathBuf::from(path!("/")),
5307 PathBuf::from(path!("/one")),
5308 PathBuf::from(path!("/one/two")),
5309 PathBuf::from(path!("/one/two/c.rs")),
5310 PathBuf::from(path!("/one/two/three")),
5311 PathBuf::from(path!("/one/two/three/a.txt")),
5312 PathBuf::from(path!("/one/two/three/b..")),
5313 PathBuf::from(path!("/one/two/three/four")),
5314 ]
5315 );
5316
5317 // And we cannot open buffers with '..'
5318 let result = project
5319 .update(cx, |project, cx| {
5320 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5321 project.open_buffer((id, "../c.rs"), cx)
5322 })
5323 .await;
5324 assert!(result.is_err())
5325}
5326
5327#[gpui::test]
5328async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5329 init_test(cx);
5330
5331 let fs = FakeFs::new(cx.executor());
5332 fs.insert_tree(
5333 path!("/dir"),
5334 json!({
5335 "a.tsx": "a",
5336 }),
5337 )
5338 .await;
5339
5340 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5341
5342 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5343 language_registry.add(tsx_lang());
5344 let language_server_names = [
5345 "TypeScriptServer",
5346 "TailwindServer",
5347 "ESLintServer",
5348 "NoHoverCapabilitiesServer",
5349 ];
5350 let mut language_servers = [
5351 language_registry.register_fake_lsp(
5352 "tsx",
5353 FakeLspAdapter {
5354 name: language_server_names[0],
5355 capabilities: lsp::ServerCapabilities {
5356 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5357 ..lsp::ServerCapabilities::default()
5358 },
5359 ..FakeLspAdapter::default()
5360 },
5361 ),
5362 language_registry.register_fake_lsp(
5363 "tsx",
5364 FakeLspAdapter {
5365 name: language_server_names[1],
5366 capabilities: lsp::ServerCapabilities {
5367 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5368 ..lsp::ServerCapabilities::default()
5369 },
5370 ..FakeLspAdapter::default()
5371 },
5372 ),
5373 language_registry.register_fake_lsp(
5374 "tsx",
5375 FakeLspAdapter {
5376 name: language_server_names[2],
5377 capabilities: lsp::ServerCapabilities {
5378 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5379 ..lsp::ServerCapabilities::default()
5380 },
5381 ..FakeLspAdapter::default()
5382 },
5383 ),
5384 language_registry.register_fake_lsp(
5385 "tsx",
5386 FakeLspAdapter {
5387 name: language_server_names[3],
5388 capabilities: lsp::ServerCapabilities {
5389 hover_provider: None,
5390 ..lsp::ServerCapabilities::default()
5391 },
5392 ..FakeLspAdapter::default()
5393 },
5394 ),
5395 ];
5396
5397 let (buffer, _handle) = project
5398 .update(cx, |p, cx| {
5399 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5400 })
5401 .await
5402 .unwrap();
5403 cx.executor().run_until_parked();
5404
5405 let mut servers_with_hover_requests = HashMap::default();
5406 for i in 0..language_server_names.len() {
5407 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5408 panic!(
5409 "Failed to get language server #{i} with name {}",
5410 &language_server_names[i]
5411 )
5412 });
5413 let new_server_name = new_server.server.name();
5414 assert!(
5415 !servers_with_hover_requests.contains_key(&new_server_name),
5416 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5417 );
5418 match new_server_name.as_ref() {
5419 "TailwindServer" | "TypeScriptServer" => {
5420 servers_with_hover_requests.insert(
5421 new_server_name.clone(),
5422 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5423 move |_, _| {
5424 let name = new_server_name.clone();
5425 async move {
5426 Ok(Some(lsp::Hover {
5427 contents: lsp::HoverContents::Scalar(
5428 lsp::MarkedString::String(format!("{name} hover")),
5429 ),
5430 range: None,
5431 }))
5432 }
5433 },
5434 ),
5435 );
5436 }
5437 "ESLintServer" => {
5438 servers_with_hover_requests.insert(
5439 new_server_name,
5440 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5441 |_, _| async move { Ok(None) },
5442 ),
5443 );
5444 }
5445 "NoHoverCapabilitiesServer" => {
5446 let _never_handled = new_server
5447 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5448 panic!(
5449 "Should not call for hovers server with no corresponding capabilities"
5450 )
5451 });
5452 }
5453 unexpected => panic!("Unexpected server name: {unexpected}"),
5454 }
5455 }
5456
5457 let hover_task = project.update(cx, |project, cx| {
5458 project.hover(&buffer, Point::new(0, 0), cx)
5459 });
5460 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5461 |mut hover_request| async move {
5462 hover_request
5463 .next()
5464 .await
5465 .expect("All hover requests should have been triggered")
5466 },
5467 ))
5468 .await;
5469 assert_eq!(
5470 vec!["TailwindServer hover", "TypeScriptServer hover"],
5471 hover_task
5472 .await
5473 .into_iter()
5474 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5475 .sorted()
5476 .collect::<Vec<_>>(),
5477 "Should receive hover responses from all related servers with hover capabilities"
5478 );
5479}
5480
5481#[gpui::test]
5482async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5483 init_test(cx);
5484
5485 let fs = FakeFs::new(cx.executor());
5486 fs.insert_tree(
5487 path!("/dir"),
5488 json!({
5489 "a.ts": "a",
5490 }),
5491 )
5492 .await;
5493
5494 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5495
5496 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5497 language_registry.add(typescript_lang());
5498 let mut fake_language_servers = language_registry.register_fake_lsp(
5499 "TypeScript",
5500 FakeLspAdapter {
5501 capabilities: lsp::ServerCapabilities {
5502 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5503 ..lsp::ServerCapabilities::default()
5504 },
5505 ..FakeLspAdapter::default()
5506 },
5507 );
5508
5509 let (buffer, _handle) = project
5510 .update(cx, |p, cx| {
5511 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5512 })
5513 .await
5514 .unwrap();
5515 cx.executor().run_until_parked();
5516
5517 let fake_server = fake_language_servers
5518 .next()
5519 .await
5520 .expect("failed to get the language server");
5521
5522 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5523 move |_, _| async move {
5524 Ok(Some(lsp::Hover {
5525 contents: lsp::HoverContents::Array(vec![
5526 lsp::MarkedString::String("".to_string()),
5527 lsp::MarkedString::String(" ".to_string()),
5528 lsp::MarkedString::String("\n\n\n".to_string()),
5529 ]),
5530 range: None,
5531 }))
5532 },
5533 );
5534
5535 let hover_task = project.update(cx, |project, cx| {
5536 project.hover(&buffer, Point::new(0, 0), cx)
5537 });
5538 let () = request_handled
5539 .next()
5540 .await
5541 .expect("All hover requests should have been triggered");
5542 assert_eq!(
5543 Vec::<String>::new(),
5544 hover_task
5545 .await
5546 .into_iter()
5547 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5548 .sorted()
5549 .collect::<Vec<_>>(),
5550 "Empty hover parts should be ignored"
5551 );
5552}
5553
5554#[gpui::test]
5555async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5556 init_test(cx);
5557
5558 let fs = FakeFs::new(cx.executor());
5559 fs.insert_tree(
5560 path!("/dir"),
5561 json!({
5562 "a.ts": "a",
5563 }),
5564 )
5565 .await;
5566
5567 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5568
5569 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5570 language_registry.add(typescript_lang());
5571 let mut fake_language_servers = language_registry.register_fake_lsp(
5572 "TypeScript",
5573 FakeLspAdapter {
5574 capabilities: lsp::ServerCapabilities {
5575 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5576 ..lsp::ServerCapabilities::default()
5577 },
5578 ..FakeLspAdapter::default()
5579 },
5580 );
5581
5582 let (buffer, _handle) = project
5583 .update(cx, |p, cx| {
5584 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5585 })
5586 .await
5587 .unwrap();
5588 cx.executor().run_until_parked();
5589
5590 let fake_server = fake_language_servers
5591 .next()
5592 .await
5593 .expect("failed to get the language server");
5594
5595 let mut request_handled = fake_server
5596 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5597 Ok(Some(vec![
5598 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5599 title: "organize imports".to_string(),
5600 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5601 ..lsp::CodeAction::default()
5602 }),
5603 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5604 title: "fix code".to_string(),
5605 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5606 ..lsp::CodeAction::default()
5607 }),
5608 ]))
5609 });
5610
5611 let code_actions_task = project.update(cx, |project, cx| {
5612 project.code_actions(
5613 &buffer,
5614 0..buffer.read(cx).len(),
5615 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5616 cx,
5617 )
5618 });
5619
5620 let () = request_handled
5621 .next()
5622 .await
5623 .expect("The code action request should have been triggered");
5624
5625 let code_actions = code_actions_task.await.unwrap();
5626 assert_eq!(code_actions.len(), 1);
5627 assert_eq!(
5628 code_actions[0].lsp_action.action_kind(),
5629 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5630 );
5631}
5632
5633#[gpui::test]
5634async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5635 init_test(cx);
5636
5637 let fs = FakeFs::new(cx.executor());
5638 fs.insert_tree(
5639 path!("/dir"),
5640 json!({
5641 "a.tsx": "a",
5642 }),
5643 )
5644 .await;
5645
5646 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5647
5648 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5649 language_registry.add(tsx_lang());
5650 let language_server_names = [
5651 "TypeScriptServer",
5652 "TailwindServer",
5653 "ESLintServer",
5654 "NoActionsCapabilitiesServer",
5655 ];
5656
5657 let mut language_server_rxs = [
5658 language_registry.register_fake_lsp(
5659 "tsx",
5660 FakeLspAdapter {
5661 name: language_server_names[0],
5662 capabilities: lsp::ServerCapabilities {
5663 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5664 ..lsp::ServerCapabilities::default()
5665 },
5666 ..FakeLspAdapter::default()
5667 },
5668 ),
5669 language_registry.register_fake_lsp(
5670 "tsx",
5671 FakeLspAdapter {
5672 name: language_server_names[1],
5673 capabilities: lsp::ServerCapabilities {
5674 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5675 ..lsp::ServerCapabilities::default()
5676 },
5677 ..FakeLspAdapter::default()
5678 },
5679 ),
5680 language_registry.register_fake_lsp(
5681 "tsx",
5682 FakeLspAdapter {
5683 name: language_server_names[2],
5684 capabilities: lsp::ServerCapabilities {
5685 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5686 ..lsp::ServerCapabilities::default()
5687 },
5688 ..FakeLspAdapter::default()
5689 },
5690 ),
5691 language_registry.register_fake_lsp(
5692 "tsx",
5693 FakeLspAdapter {
5694 name: language_server_names[3],
5695 capabilities: lsp::ServerCapabilities {
5696 code_action_provider: None,
5697 ..lsp::ServerCapabilities::default()
5698 },
5699 ..FakeLspAdapter::default()
5700 },
5701 ),
5702 ];
5703
5704 let (buffer, _handle) = project
5705 .update(cx, |p, cx| {
5706 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5707 })
5708 .await
5709 .unwrap();
5710 cx.executor().run_until_parked();
5711
5712 let mut servers_with_actions_requests = HashMap::default();
5713 for i in 0..language_server_names.len() {
5714 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5715 panic!(
5716 "Failed to get language server #{i} with name {}",
5717 &language_server_names[i]
5718 )
5719 });
5720 let new_server_name = new_server.server.name();
5721
5722 assert!(
5723 !servers_with_actions_requests.contains_key(&new_server_name),
5724 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5725 );
5726 match new_server_name.0.as_ref() {
5727 "TailwindServer" | "TypeScriptServer" => {
5728 servers_with_actions_requests.insert(
5729 new_server_name.clone(),
5730 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5731 move |_, _| {
5732 let name = new_server_name.clone();
5733 async move {
5734 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5735 lsp::CodeAction {
5736 title: format!("{name} code action"),
5737 ..lsp::CodeAction::default()
5738 },
5739 )]))
5740 }
5741 },
5742 ),
5743 );
5744 }
5745 "ESLintServer" => {
5746 servers_with_actions_requests.insert(
5747 new_server_name,
5748 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5749 |_, _| async move { Ok(None) },
5750 ),
5751 );
5752 }
5753 "NoActionsCapabilitiesServer" => {
5754 let _never_handled = new_server
5755 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5756 panic!(
5757 "Should not call for code actions server with no corresponding capabilities"
5758 )
5759 });
5760 }
5761 unexpected => panic!("Unexpected server name: {unexpected}"),
5762 }
5763 }
5764
5765 let code_actions_task = project.update(cx, |project, cx| {
5766 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5767 });
5768
5769 // cx.run_until_parked();
5770 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5771 |mut code_actions_request| async move {
5772 code_actions_request
5773 .next()
5774 .await
5775 .expect("All code actions requests should have been triggered")
5776 },
5777 ))
5778 .await;
5779 assert_eq!(
5780 vec!["TailwindServer code action", "TypeScriptServer code action"],
5781 code_actions_task
5782 .await
5783 .unwrap()
5784 .into_iter()
5785 .map(|code_action| code_action.lsp_action.title().to_owned())
5786 .sorted()
5787 .collect::<Vec<_>>(),
5788 "Should receive code actions responses from all related servers with hover capabilities"
5789 );
5790}
5791
5792#[gpui::test]
5793async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5794 init_test(cx);
5795
5796 let fs = FakeFs::new(cx.executor());
5797 fs.insert_tree(
5798 "/dir",
5799 json!({
5800 "a.rs": "let a = 1;",
5801 "b.rs": "let b = 2;",
5802 "c.rs": "let c = 2;",
5803 }),
5804 )
5805 .await;
5806
5807 let project = Project::test(
5808 fs,
5809 [
5810 "/dir/a.rs".as_ref(),
5811 "/dir/b.rs".as_ref(),
5812 "/dir/c.rs".as_ref(),
5813 ],
5814 cx,
5815 )
5816 .await;
5817
5818 // check the initial state and get the worktrees
5819 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5820 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5821 assert_eq!(worktrees.len(), 3);
5822
5823 let worktree_a = worktrees[0].read(cx);
5824 let worktree_b = worktrees[1].read(cx);
5825 let worktree_c = worktrees[2].read(cx);
5826
5827 // check they start in the right order
5828 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5829 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5830 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5831
5832 (
5833 worktrees[0].clone(),
5834 worktrees[1].clone(),
5835 worktrees[2].clone(),
5836 )
5837 });
5838
5839 // move first worktree to after the second
5840 // [a, b, c] -> [b, a, c]
5841 project
5842 .update(cx, |project, cx| {
5843 let first = worktree_a.read(cx);
5844 let second = worktree_b.read(cx);
5845 project.move_worktree(first.id(), second.id(), cx)
5846 })
5847 .expect("moving first after second");
5848
5849 // check the state after moving
5850 project.update(cx, |project, cx| {
5851 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5852 assert_eq!(worktrees.len(), 3);
5853
5854 let first = worktrees[0].read(cx);
5855 let second = worktrees[1].read(cx);
5856 let third = worktrees[2].read(cx);
5857
5858 // check they are now in the right order
5859 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5860 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5861 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5862 });
5863
5864 // move the second worktree to before the first
5865 // [b, a, c] -> [a, b, c]
5866 project
5867 .update(cx, |project, cx| {
5868 let second = worktree_a.read(cx);
5869 let first = worktree_b.read(cx);
5870 project.move_worktree(first.id(), second.id(), cx)
5871 })
5872 .expect("moving second before first");
5873
5874 // check the state after moving
5875 project.update(cx, |project, cx| {
5876 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5877 assert_eq!(worktrees.len(), 3);
5878
5879 let first = worktrees[0].read(cx);
5880 let second = worktrees[1].read(cx);
5881 let third = worktrees[2].read(cx);
5882
5883 // check they are now in the right order
5884 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5885 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5886 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5887 });
5888
5889 // move the second worktree to after the third
5890 // [a, b, c] -> [a, c, b]
5891 project
5892 .update(cx, |project, cx| {
5893 let second = worktree_b.read(cx);
5894 let third = worktree_c.read(cx);
5895 project.move_worktree(second.id(), third.id(), cx)
5896 })
5897 .expect("moving second after third");
5898
5899 // check the state after moving
5900 project.update(cx, |project, cx| {
5901 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5902 assert_eq!(worktrees.len(), 3);
5903
5904 let first = worktrees[0].read(cx);
5905 let second = worktrees[1].read(cx);
5906 let third = worktrees[2].read(cx);
5907
5908 // check they are now in the right order
5909 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5910 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5911 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5912 });
5913
5914 // move the third worktree to before the second
5915 // [a, c, b] -> [a, b, c]
5916 project
5917 .update(cx, |project, cx| {
5918 let third = worktree_c.read(cx);
5919 let second = worktree_b.read(cx);
5920 project.move_worktree(third.id(), second.id(), cx)
5921 })
5922 .expect("moving third before second");
5923
5924 // check the state after moving
5925 project.update(cx, |project, cx| {
5926 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5927 assert_eq!(worktrees.len(), 3);
5928
5929 let first = worktrees[0].read(cx);
5930 let second = worktrees[1].read(cx);
5931 let third = worktrees[2].read(cx);
5932
5933 // check they are now in the right order
5934 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5935 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5936 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5937 });
5938
5939 // move the first worktree to after the third
5940 // [a, b, c] -> [b, c, a]
5941 project
5942 .update(cx, |project, cx| {
5943 let first = worktree_a.read(cx);
5944 let third = worktree_c.read(cx);
5945 project.move_worktree(first.id(), third.id(), cx)
5946 })
5947 .expect("moving first after third");
5948
5949 // check the state after moving
5950 project.update(cx, |project, cx| {
5951 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5952 assert_eq!(worktrees.len(), 3);
5953
5954 let first = worktrees[0].read(cx);
5955 let second = worktrees[1].read(cx);
5956 let third = worktrees[2].read(cx);
5957
5958 // check they are now in the right order
5959 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5960 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5961 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5962 });
5963
5964 // move the third worktree to before the first
5965 // [b, c, a] -> [a, b, c]
5966 project
5967 .update(cx, |project, cx| {
5968 let third = worktree_a.read(cx);
5969 let first = worktree_b.read(cx);
5970 project.move_worktree(third.id(), first.id(), cx)
5971 })
5972 .expect("moving third before first");
5973
5974 // check the state after moving
5975 project.update(cx, |project, cx| {
5976 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5977 assert_eq!(worktrees.len(), 3);
5978
5979 let first = worktrees[0].read(cx);
5980 let second = worktrees[1].read(cx);
5981 let third = worktrees[2].read(cx);
5982
5983 // check they are now in the right order
5984 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5985 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5986 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5987 });
5988}
5989
5990#[gpui::test]
5991async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5992 init_test(cx);
5993
5994 let staged_contents = r#"
5995 fn main() {
5996 println!("hello world");
5997 }
5998 "#
5999 .unindent();
6000 let file_contents = r#"
6001 // print goodbye
6002 fn main() {
6003 println!("goodbye world");
6004 }
6005 "#
6006 .unindent();
6007
6008 let fs = FakeFs::new(cx.background_executor.clone());
6009 fs.insert_tree(
6010 "/dir",
6011 json!({
6012 ".git": {},
6013 "src": {
6014 "main.rs": file_contents,
6015 }
6016 }),
6017 )
6018 .await;
6019
6020 fs.set_index_for_repo(
6021 Path::new("/dir/.git"),
6022 &[("src/main.rs".into(), staged_contents)],
6023 );
6024
6025 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6026
6027 let buffer = project
6028 .update(cx, |project, cx| {
6029 project.open_local_buffer("/dir/src/main.rs", cx)
6030 })
6031 .await
6032 .unwrap();
6033 let unstaged_diff = project
6034 .update(cx, |project, cx| {
6035 project.open_unstaged_diff(buffer.clone(), cx)
6036 })
6037 .await
6038 .unwrap();
6039
6040 cx.run_until_parked();
6041 unstaged_diff.update(cx, |unstaged_diff, cx| {
6042 let snapshot = buffer.read(cx).snapshot();
6043 assert_hunks(
6044 unstaged_diff.hunks(&snapshot, cx),
6045 &snapshot,
6046 &unstaged_diff.base_text_string().unwrap(),
6047 &[
6048 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6049 (
6050 2..3,
6051 " println!(\"hello world\");\n",
6052 " println!(\"goodbye world\");\n",
6053 DiffHunkStatus::modified_none(),
6054 ),
6055 ],
6056 );
6057 });
6058
6059 let staged_contents = r#"
6060 // print goodbye
6061 fn main() {
6062 }
6063 "#
6064 .unindent();
6065
6066 fs.set_index_for_repo(
6067 Path::new("/dir/.git"),
6068 &[("src/main.rs".into(), staged_contents)],
6069 );
6070
6071 cx.run_until_parked();
6072 unstaged_diff.update(cx, |unstaged_diff, cx| {
6073 let snapshot = buffer.read(cx).snapshot();
6074 assert_hunks(
6075 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6076 &snapshot,
6077 &unstaged_diff.base_text().text(),
6078 &[(
6079 2..3,
6080 "",
6081 " println!(\"goodbye world\");\n",
6082 DiffHunkStatus::added_none(),
6083 )],
6084 );
6085 });
6086}
6087
6088#[gpui::test]
6089async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6090 init_test(cx);
6091
6092 let committed_contents = r#"
6093 fn main() {
6094 println!("hello world");
6095 }
6096 "#
6097 .unindent();
6098 let staged_contents = r#"
6099 fn main() {
6100 println!("goodbye world");
6101 }
6102 "#
6103 .unindent();
6104 let file_contents = r#"
6105 // print goodbye
6106 fn main() {
6107 println!("goodbye world");
6108 }
6109 "#
6110 .unindent();
6111
6112 let fs = FakeFs::new(cx.background_executor.clone());
6113 fs.insert_tree(
6114 "/dir",
6115 json!({
6116 ".git": {},
6117 "src": {
6118 "modification.rs": file_contents,
6119 }
6120 }),
6121 )
6122 .await;
6123
6124 fs.set_head_for_repo(
6125 Path::new("/dir/.git"),
6126 &[
6127 ("src/modification.rs".into(), committed_contents),
6128 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6129 ],
6130 );
6131 fs.set_index_for_repo(
6132 Path::new("/dir/.git"),
6133 &[
6134 ("src/modification.rs".into(), staged_contents),
6135 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6136 ],
6137 );
6138
6139 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6140 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6141 let language = rust_lang();
6142 language_registry.add(language.clone());
6143
6144 let buffer_1 = project
6145 .update(cx, |project, cx| {
6146 project.open_local_buffer("/dir/src/modification.rs", cx)
6147 })
6148 .await
6149 .unwrap();
6150 let diff_1 = project
6151 .update(cx, |project, cx| {
6152 project.open_uncommitted_diff(buffer_1.clone(), cx)
6153 })
6154 .await
6155 .unwrap();
6156 diff_1.read_with(cx, |diff, _| {
6157 assert_eq!(diff.base_text().language().cloned(), Some(language))
6158 });
6159 cx.run_until_parked();
6160 diff_1.update(cx, |diff, cx| {
6161 let snapshot = buffer_1.read(cx).snapshot();
6162 assert_hunks(
6163 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6164 &snapshot,
6165 &diff.base_text_string().unwrap(),
6166 &[
6167 (
6168 0..1,
6169 "",
6170 "// print goodbye\n",
6171 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6172 ),
6173 (
6174 2..3,
6175 " println!(\"hello world\");\n",
6176 " println!(\"goodbye world\");\n",
6177 DiffHunkStatus::modified_none(),
6178 ),
6179 ],
6180 );
6181 });
6182
6183 // Reset HEAD to a version that differs from both the buffer and the index.
6184 let committed_contents = r#"
6185 // print goodbye
6186 fn main() {
6187 }
6188 "#
6189 .unindent();
6190 fs.set_head_for_repo(
6191 Path::new("/dir/.git"),
6192 &[
6193 ("src/modification.rs".into(), committed_contents.clone()),
6194 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6195 ],
6196 );
6197
6198 // Buffer now has an unstaged hunk.
6199 cx.run_until_parked();
6200 diff_1.update(cx, |diff, cx| {
6201 let snapshot = buffer_1.read(cx).snapshot();
6202 assert_hunks(
6203 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6204 &snapshot,
6205 &diff.base_text().text(),
6206 &[(
6207 2..3,
6208 "",
6209 " println!(\"goodbye world\");\n",
6210 DiffHunkStatus::added_none(),
6211 )],
6212 );
6213 });
6214
6215 // Open a buffer for a file that's been deleted.
6216 let buffer_2 = project
6217 .update(cx, |project, cx| {
6218 project.open_local_buffer("/dir/src/deletion.rs", cx)
6219 })
6220 .await
6221 .unwrap();
6222 let diff_2 = project
6223 .update(cx, |project, cx| {
6224 project.open_uncommitted_diff(buffer_2.clone(), cx)
6225 })
6226 .await
6227 .unwrap();
6228 cx.run_until_parked();
6229 diff_2.update(cx, |diff, cx| {
6230 let snapshot = buffer_2.read(cx).snapshot();
6231 assert_hunks(
6232 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6233 &snapshot,
6234 &diff.base_text_string().unwrap(),
6235 &[(
6236 0..0,
6237 "// the-deleted-contents\n",
6238 "",
6239 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6240 )],
6241 );
6242 });
6243
6244 // Stage the deletion of this file
6245 fs.set_index_for_repo(
6246 Path::new("/dir/.git"),
6247 &[("src/modification.rs".into(), committed_contents.clone())],
6248 );
6249 cx.run_until_parked();
6250 diff_2.update(cx, |diff, cx| {
6251 let snapshot = buffer_2.read(cx).snapshot();
6252 assert_hunks(
6253 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6254 &snapshot,
6255 &diff.base_text_string().unwrap(),
6256 &[(
6257 0..0,
6258 "// the-deleted-contents\n",
6259 "",
6260 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6261 )],
6262 );
6263 });
6264}
6265
6266#[gpui::test]
6267async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6268 use DiffHunkSecondaryStatus::*;
6269 init_test(cx);
6270
6271 let committed_contents = r#"
6272 zero
6273 one
6274 two
6275 three
6276 four
6277 five
6278 "#
6279 .unindent();
6280 let file_contents = r#"
6281 one
6282 TWO
6283 three
6284 FOUR
6285 five
6286 "#
6287 .unindent();
6288
6289 let fs = FakeFs::new(cx.background_executor.clone());
6290 fs.insert_tree(
6291 "/dir",
6292 json!({
6293 ".git": {},
6294 "file.txt": file_contents.clone()
6295 }),
6296 )
6297 .await;
6298
6299 fs.set_head_and_index_for_repo(
6300 "/dir/.git".as_ref(),
6301 &[("file.txt".into(), committed_contents.clone())],
6302 );
6303
6304 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6305
6306 let buffer = project
6307 .update(cx, |project, cx| {
6308 project.open_local_buffer("/dir/file.txt", cx)
6309 })
6310 .await
6311 .unwrap();
6312 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6313 let uncommitted_diff = project
6314 .update(cx, |project, cx| {
6315 project.open_uncommitted_diff(buffer.clone(), cx)
6316 })
6317 .await
6318 .unwrap();
6319 let mut diff_events = cx.events(&uncommitted_diff);
6320
6321 // The hunks are initially unstaged.
6322 uncommitted_diff.read_with(cx, |diff, cx| {
6323 assert_hunks(
6324 diff.hunks(&snapshot, cx),
6325 &snapshot,
6326 &diff.base_text_string().unwrap(),
6327 &[
6328 (
6329 0..0,
6330 "zero\n",
6331 "",
6332 DiffHunkStatus::deleted(HasSecondaryHunk),
6333 ),
6334 (
6335 1..2,
6336 "two\n",
6337 "TWO\n",
6338 DiffHunkStatus::modified(HasSecondaryHunk),
6339 ),
6340 (
6341 3..4,
6342 "four\n",
6343 "FOUR\n",
6344 DiffHunkStatus::modified(HasSecondaryHunk),
6345 ),
6346 ],
6347 );
6348 });
6349
6350 // Stage a hunk. It appears as optimistically staged.
6351 uncommitted_diff.update(cx, |diff, cx| {
6352 let range =
6353 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6354 let hunks = diff
6355 .hunks_intersecting_range(range, &snapshot, cx)
6356 .collect::<Vec<_>>();
6357 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6358
6359 assert_hunks(
6360 diff.hunks(&snapshot, cx),
6361 &snapshot,
6362 &diff.base_text_string().unwrap(),
6363 &[
6364 (
6365 0..0,
6366 "zero\n",
6367 "",
6368 DiffHunkStatus::deleted(HasSecondaryHunk),
6369 ),
6370 (
6371 1..2,
6372 "two\n",
6373 "TWO\n",
6374 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6375 ),
6376 (
6377 3..4,
6378 "four\n",
6379 "FOUR\n",
6380 DiffHunkStatus::modified(HasSecondaryHunk),
6381 ),
6382 ],
6383 );
6384 });
6385
6386 // The diff emits a change event for the range of the staged hunk.
6387 assert!(matches!(
6388 diff_events.next().await.unwrap(),
6389 BufferDiffEvent::HunksStagedOrUnstaged(_)
6390 ));
6391 let event = diff_events.next().await.unwrap();
6392 if let BufferDiffEvent::DiffChanged {
6393 changed_range: Some(changed_range),
6394 } = event
6395 {
6396 let changed_range = changed_range.to_point(&snapshot);
6397 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6398 } else {
6399 panic!("Unexpected event {event:?}");
6400 }
6401
6402 // When the write to the index completes, it appears as staged.
6403 cx.run_until_parked();
6404 uncommitted_diff.update(cx, |diff, cx| {
6405 assert_hunks(
6406 diff.hunks(&snapshot, cx),
6407 &snapshot,
6408 &diff.base_text_string().unwrap(),
6409 &[
6410 (
6411 0..0,
6412 "zero\n",
6413 "",
6414 DiffHunkStatus::deleted(HasSecondaryHunk),
6415 ),
6416 (
6417 1..2,
6418 "two\n",
6419 "TWO\n",
6420 DiffHunkStatus::modified(NoSecondaryHunk),
6421 ),
6422 (
6423 3..4,
6424 "four\n",
6425 "FOUR\n",
6426 DiffHunkStatus::modified(HasSecondaryHunk),
6427 ),
6428 ],
6429 );
6430 });
6431
6432 // The diff emits a change event for the changed index text.
6433 let event = diff_events.next().await.unwrap();
6434 if let BufferDiffEvent::DiffChanged {
6435 changed_range: Some(changed_range),
6436 } = event
6437 {
6438 let changed_range = changed_range.to_point(&snapshot);
6439 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6440 } else {
6441 panic!("Unexpected event {event:?}");
6442 }
6443
6444 // Simulate a problem writing to the git index.
6445 fs.set_error_message_for_index_write(
6446 "/dir/.git".as_ref(),
6447 Some("failed to write git index".into()),
6448 );
6449
6450 // Stage another hunk.
6451 uncommitted_diff.update(cx, |diff, cx| {
6452 let range =
6453 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6454 let hunks = diff
6455 .hunks_intersecting_range(range, &snapshot, cx)
6456 .collect::<Vec<_>>();
6457 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6458
6459 assert_hunks(
6460 diff.hunks(&snapshot, cx),
6461 &snapshot,
6462 &diff.base_text_string().unwrap(),
6463 &[
6464 (
6465 0..0,
6466 "zero\n",
6467 "",
6468 DiffHunkStatus::deleted(HasSecondaryHunk),
6469 ),
6470 (
6471 1..2,
6472 "two\n",
6473 "TWO\n",
6474 DiffHunkStatus::modified(NoSecondaryHunk),
6475 ),
6476 (
6477 3..4,
6478 "four\n",
6479 "FOUR\n",
6480 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6481 ),
6482 ],
6483 );
6484 });
6485 assert!(matches!(
6486 diff_events.next().await.unwrap(),
6487 BufferDiffEvent::HunksStagedOrUnstaged(_)
6488 ));
6489 let event = diff_events.next().await.unwrap();
6490 if let BufferDiffEvent::DiffChanged {
6491 changed_range: Some(changed_range),
6492 } = event
6493 {
6494 let changed_range = changed_range.to_point(&snapshot);
6495 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6496 } else {
6497 panic!("Unexpected event {event:?}");
6498 }
6499
6500 // When the write fails, the hunk returns to being unstaged.
6501 cx.run_until_parked();
6502 uncommitted_diff.update(cx, |diff, cx| {
6503 assert_hunks(
6504 diff.hunks(&snapshot, cx),
6505 &snapshot,
6506 &diff.base_text_string().unwrap(),
6507 &[
6508 (
6509 0..0,
6510 "zero\n",
6511 "",
6512 DiffHunkStatus::deleted(HasSecondaryHunk),
6513 ),
6514 (
6515 1..2,
6516 "two\n",
6517 "TWO\n",
6518 DiffHunkStatus::modified(NoSecondaryHunk),
6519 ),
6520 (
6521 3..4,
6522 "four\n",
6523 "FOUR\n",
6524 DiffHunkStatus::modified(HasSecondaryHunk),
6525 ),
6526 ],
6527 );
6528 });
6529
6530 let event = diff_events.next().await.unwrap();
6531 if let BufferDiffEvent::DiffChanged {
6532 changed_range: Some(changed_range),
6533 } = event
6534 {
6535 let changed_range = changed_range.to_point(&snapshot);
6536 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6537 } else {
6538 panic!("Unexpected event {event:?}");
6539 }
6540
6541 // Allow writing to the git index to succeed again.
6542 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6543
6544 // Stage two hunks with separate operations.
6545 uncommitted_diff.update(cx, |diff, cx| {
6546 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6547 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6548 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6549 });
6550
6551 // Both staged hunks appear as pending.
6552 uncommitted_diff.update(cx, |diff, cx| {
6553 assert_hunks(
6554 diff.hunks(&snapshot, cx),
6555 &snapshot,
6556 &diff.base_text_string().unwrap(),
6557 &[
6558 (
6559 0..0,
6560 "zero\n",
6561 "",
6562 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6563 ),
6564 (
6565 1..2,
6566 "two\n",
6567 "TWO\n",
6568 DiffHunkStatus::modified(NoSecondaryHunk),
6569 ),
6570 (
6571 3..4,
6572 "four\n",
6573 "FOUR\n",
6574 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6575 ),
6576 ],
6577 );
6578 });
6579
6580 // Both staging operations take effect.
6581 cx.run_until_parked();
6582 uncommitted_diff.update(cx, |diff, cx| {
6583 assert_hunks(
6584 diff.hunks(&snapshot, cx),
6585 &snapshot,
6586 &diff.base_text_string().unwrap(),
6587 &[
6588 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6589 (
6590 1..2,
6591 "two\n",
6592 "TWO\n",
6593 DiffHunkStatus::modified(NoSecondaryHunk),
6594 ),
6595 (
6596 3..4,
6597 "four\n",
6598 "FOUR\n",
6599 DiffHunkStatus::modified(NoSecondaryHunk),
6600 ),
6601 ],
6602 );
6603 });
6604}
6605
6606#[gpui::test(seeds(340, 472))]
6607async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6608 use DiffHunkSecondaryStatus::*;
6609 init_test(cx);
6610
6611 let committed_contents = r#"
6612 zero
6613 one
6614 two
6615 three
6616 four
6617 five
6618 "#
6619 .unindent();
6620 let file_contents = r#"
6621 one
6622 TWO
6623 three
6624 FOUR
6625 five
6626 "#
6627 .unindent();
6628
6629 let fs = FakeFs::new(cx.background_executor.clone());
6630 fs.insert_tree(
6631 "/dir",
6632 json!({
6633 ".git": {},
6634 "file.txt": file_contents.clone()
6635 }),
6636 )
6637 .await;
6638
6639 fs.set_head_for_repo(
6640 "/dir/.git".as_ref(),
6641 &[("file.txt".into(), committed_contents.clone())],
6642 );
6643 fs.set_index_for_repo(
6644 "/dir/.git".as_ref(),
6645 &[("file.txt".into(), committed_contents.clone())],
6646 );
6647
6648 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6649
6650 let buffer = project
6651 .update(cx, |project, cx| {
6652 project.open_local_buffer("/dir/file.txt", cx)
6653 })
6654 .await
6655 .unwrap();
6656 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6657 let uncommitted_diff = project
6658 .update(cx, |project, cx| {
6659 project.open_uncommitted_diff(buffer.clone(), cx)
6660 })
6661 .await
6662 .unwrap();
6663
6664 // The hunks are initially unstaged.
6665 uncommitted_diff.read_with(cx, |diff, cx| {
6666 assert_hunks(
6667 diff.hunks(&snapshot, cx),
6668 &snapshot,
6669 &diff.base_text_string().unwrap(),
6670 &[
6671 (
6672 0..0,
6673 "zero\n",
6674 "",
6675 DiffHunkStatus::deleted(HasSecondaryHunk),
6676 ),
6677 (
6678 1..2,
6679 "two\n",
6680 "TWO\n",
6681 DiffHunkStatus::modified(HasSecondaryHunk),
6682 ),
6683 (
6684 3..4,
6685 "four\n",
6686 "FOUR\n",
6687 DiffHunkStatus::modified(HasSecondaryHunk),
6688 ),
6689 ],
6690 );
6691 });
6692
6693 // Pause IO events
6694 fs.pause_events();
6695
6696 // Stage the first hunk.
6697 uncommitted_diff.update(cx, |diff, cx| {
6698 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6699 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6700 assert_hunks(
6701 diff.hunks(&snapshot, cx),
6702 &snapshot,
6703 &diff.base_text_string().unwrap(),
6704 &[
6705 (
6706 0..0,
6707 "zero\n",
6708 "",
6709 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6710 ),
6711 (
6712 1..2,
6713 "two\n",
6714 "TWO\n",
6715 DiffHunkStatus::modified(HasSecondaryHunk),
6716 ),
6717 (
6718 3..4,
6719 "four\n",
6720 "FOUR\n",
6721 DiffHunkStatus::modified(HasSecondaryHunk),
6722 ),
6723 ],
6724 );
6725 });
6726
6727 // Stage the second hunk *before* receiving the FS event for the first hunk.
6728 cx.run_until_parked();
6729 uncommitted_diff.update(cx, |diff, cx| {
6730 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6731 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6732 assert_hunks(
6733 diff.hunks(&snapshot, cx),
6734 &snapshot,
6735 &diff.base_text_string().unwrap(),
6736 &[
6737 (
6738 0..0,
6739 "zero\n",
6740 "",
6741 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6742 ),
6743 (
6744 1..2,
6745 "two\n",
6746 "TWO\n",
6747 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6748 ),
6749 (
6750 3..4,
6751 "four\n",
6752 "FOUR\n",
6753 DiffHunkStatus::modified(HasSecondaryHunk),
6754 ),
6755 ],
6756 );
6757 });
6758
6759 // Process the FS event for staging the first hunk (second event is still pending).
6760 fs.flush_events(1);
6761 cx.run_until_parked();
6762
6763 // Stage the third hunk before receiving the second FS event.
6764 uncommitted_diff.update(cx, |diff, cx| {
6765 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6766 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6767 });
6768
6769 // Wait for all remaining IO.
6770 cx.run_until_parked();
6771 fs.flush_events(fs.buffered_event_count());
6772
6773 // Now all hunks are staged.
6774 cx.run_until_parked();
6775 uncommitted_diff.update(cx, |diff, cx| {
6776 assert_hunks(
6777 diff.hunks(&snapshot, cx),
6778 &snapshot,
6779 &diff.base_text_string().unwrap(),
6780 &[
6781 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6782 (
6783 1..2,
6784 "two\n",
6785 "TWO\n",
6786 DiffHunkStatus::modified(NoSecondaryHunk),
6787 ),
6788 (
6789 3..4,
6790 "four\n",
6791 "FOUR\n",
6792 DiffHunkStatus::modified(NoSecondaryHunk),
6793 ),
6794 ],
6795 );
6796 });
6797}
6798
6799#[gpui::test]
6800async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6801 use DiffHunkSecondaryStatus::*;
6802 init_test(cx);
6803
6804 let different_lines = (0..500)
6805 .step_by(5)
6806 .map(|i| format!("diff {}\n", i))
6807 .collect::<Vec<String>>();
6808 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6809 let file_contents = (0..500)
6810 .map(|i| {
6811 if i % 5 == 0 {
6812 different_lines[i / 5].clone()
6813 } else {
6814 format!("{}\n", i)
6815 }
6816 })
6817 .collect::<String>();
6818
6819 let fs = FakeFs::new(cx.background_executor.clone());
6820 fs.insert_tree(
6821 "/dir",
6822 json!({
6823 ".git": {},
6824 "file.txt": file_contents.clone()
6825 }),
6826 )
6827 .await;
6828
6829 fs.set_head_for_repo(
6830 "/dir/.git".as_ref(),
6831 &[("file.txt".into(), committed_contents.clone())],
6832 );
6833 fs.set_index_for_repo(
6834 "/dir/.git".as_ref(),
6835 &[("file.txt".into(), committed_contents.clone())],
6836 );
6837
6838 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6839
6840 let buffer = project
6841 .update(cx, |project, cx| {
6842 project.open_local_buffer("/dir/file.txt", cx)
6843 })
6844 .await
6845 .unwrap();
6846 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6847 let uncommitted_diff = project
6848 .update(cx, |project, cx| {
6849 project.open_uncommitted_diff(buffer.clone(), cx)
6850 })
6851 .await
6852 .unwrap();
6853
6854 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6855 .step_by(5)
6856 .map(|i| {
6857 (
6858 i as u32..i as u32 + 1,
6859 format!("{}\n", i),
6860 different_lines[i / 5].clone(),
6861 DiffHunkStatus::modified(HasSecondaryHunk),
6862 )
6863 })
6864 .collect();
6865
6866 // The hunks are initially unstaged
6867 uncommitted_diff.read_with(cx, |diff, cx| {
6868 assert_hunks(
6869 diff.hunks(&snapshot, cx),
6870 &snapshot,
6871 &diff.base_text_string().unwrap(),
6872 &expected_hunks,
6873 );
6874 });
6875
6876 for (_, _, _, status) in expected_hunks.iter_mut() {
6877 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6878 }
6879
6880 // Stage every hunk with a different call
6881 uncommitted_diff.update(cx, |diff, cx| {
6882 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6883 for hunk in hunks {
6884 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6885 }
6886
6887 assert_hunks(
6888 diff.hunks(&snapshot, cx),
6889 &snapshot,
6890 &diff.base_text_string().unwrap(),
6891 &expected_hunks,
6892 );
6893 });
6894
6895 // If we wait, we'll have no pending hunks
6896 cx.run_until_parked();
6897 for (_, _, _, status) in expected_hunks.iter_mut() {
6898 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6899 }
6900
6901 uncommitted_diff.update(cx, |diff, cx| {
6902 assert_hunks(
6903 diff.hunks(&snapshot, cx),
6904 &snapshot,
6905 &diff.base_text_string().unwrap(),
6906 &expected_hunks,
6907 );
6908 });
6909
6910 for (_, _, _, status) in expected_hunks.iter_mut() {
6911 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6912 }
6913
6914 // Unstage every hunk with a different call
6915 uncommitted_diff.update(cx, |diff, cx| {
6916 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6917 for hunk in hunks {
6918 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6919 }
6920
6921 assert_hunks(
6922 diff.hunks(&snapshot, cx),
6923 &snapshot,
6924 &diff.base_text_string().unwrap(),
6925 &expected_hunks,
6926 );
6927 });
6928
6929 // If we wait, we'll have no pending hunks, again
6930 cx.run_until_parked();
6931 for (_, _, _, status) in expected_hunks.iter_mut() {
6932 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6933 }
6934
6935 uncommitted_diff.update(cx, |diff, cx| {
6936 assert_hunks(
6937 diff.hunks(&snapshot, cx),
6938 &snapshot,
6939 &diff.base_text_string().unwrap(),
6940 &expected_hunks,
6941 );
6942 });
6943}
6944
6945#[gpui::test]
6946async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6947 init_test(cx);
6948
6949 let committed_contents = r#"
6950 fn main() {
6951 println!("hello from HEAD");
6952 }
6953 "#
6954 .unindent();
6955 let file_contents = r#"
6956 fn main() {
6957 println!("hello from the working copy");
6958 }
6959 "#
6960 .unindent();
6961
6962 let fs = FakeFs::new(cx.background_executor.clone());
6963 fs.insert_tree(
6964 "/dir",
6965 json!({
6966 ".git": {},
6967 "src": {
6968 "main.rs": file_contents,
6969 }
6970 }),
6971 )
6972 .await;
6973
6974 fs.set_head_for_repo(
6975 Path::new("/dir/.git"),
6976 &[("src/main.rs".into(), committed_contents.clone())],
6977 );
6978 fs.set_index_for_repo(
6979 Path::new("/dir/.git"),
6980 &[("src/main.rs".into(), committed_contents.clone())],
6981 );
6982
6983 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6984
6985 let buffer = project
6986 .update(cx, |project, cx| {
6987 project.open_local_buffer("/dir/src/main.rs", cx)
6988 })
6989 .await
6990 .unwrap();
6991 let uncommitted_diff = project
6992 .update(cx, |project, cx| {
6993 project.open_uncommitted_diff(buffer.clone(), cx)
6994 })
6995 .await
6996 .unwrap();
6997
6998 cx.run_until_parked();
6999 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7000 let snapshot = buffer.read(cx).snapshot();
7001 assert_hunks(
7002 uncommitted_diff.hunks(&snapshot, cx),
7003 &snapshot,
7004 &uncommitted_diff.base_text_string().unwrap(),
7005 &[(
7006 1..2,
7007 " println!(\"hello from HEAD\");\n",
7008 " println!(\"hello from the working copy\");\n",
7009 DiffHunkStatus {
7010 kind: DiffHunkStatusKind::Modified,
7011 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7012 },
7013 )],
7014 );
7015 });
7016}
7017
7018#[gpui::test]
7019async fn test_repository_and_path_for_project_path(
7020 background_executor: BackgroundExecutor,
7021 cx: &mut gpui::TestAppContext,
7022) {
7023 init_test(cx);
7024 let fs = FakeFs::new(background_executor);
7025 fs.insert_tree(
7026 path!("/root"),
7027 json!({
7028 "c.txt": "",
7029 "dir1": {
7030 ".git": {},
7031 "deps": {
7032 "dep1": {
7033 ".git": {},
7034 "src": {
7035 "a.txt": ""
7036 }
7037 }
7038 },
7039 "src": {
7040 "b.txt": ""
7041 }
7042 },
7043 }),
7044 )
7045 .await;
7046
7047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7048 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7049 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7050 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7051 .await;
7052 tree.flush_fs_events(cx).await;
7053
7054 project.read_with(cx, |project, cx| {
7055 let git_store = project.git_store().read(cx);
7056 let pairs = [
7057 ("c.txt", None),
7058 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7059 (
7060 "dir1/deps/dep1/src/a.txt",
7061 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7062 ),
7063 ];
7064 let expected = pairs
7065 .iter()
7066 .map(|(path, result)| {
7067 (
7068 path,
7069 result.map(|(repo, repo_path)| {
7070 (Path::new(repo).to_owned(), RepoPath::from(repo_path))
7071 }),
7072 )
7073 })
7074 .collect::<Vec<_>>();
7075 let actual = pairs
7076 .iter()
7077 .map(|(path, _)| {
7078 let project_path = (tree_id, Path::new(path)).into();
7079 let result = maybe!({
7080 let (repo, repo_path) =
7081 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7082 Some((
7083 repo.read(cx)
7084 .repository_entry
7085 .work_directory_abs_path
7086 .clone(),
7087 repo_path,
7088 ))
7089 });
7090 (path, result)
7091 })
7092 .collect::<Vec<_>>();
7093 pretty_assertions::assert_eq!(expected, actual);
7094 });
7095
7096 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7097 .await
7098 .unwrap();
7099 tree.flush_fs_events(cx).await;
7100
7101 project.read_with(cx, |project, cx| {
7102 let git_store = project.git_store().read(cx);
7103 assert_eq!(
7104 git_store.repository_and_path_for_project_path(
7105 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7106 cx
7107 ),
7108 None
7109 );
7110 });
7111}
7112
7113#[gpui::test]
7114async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7115 init_test(cx);
7116 let fs = FakeFs::new(cx.background_executor.clone());
7117 fs.insert_tree(
7118 path!("/root"),
7119 json!({
7120 "home": {
7121 ".git": {},
7122 "project": {
7123 "a.txt": "A"
7124 },
7125 },
7126 }),
7127 )
7128 .await;
7129 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7130
7131 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7132 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7133 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7134 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7135 .await;
7136 tree.flush_fs_events(cx).await;
7137
7138 project.read_with(cx, |project, cx| {
7139 let containing = project
7140 .git_store()
7141 .read(cx)
7142 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7143 assert!(containing.is_none());
7144 });
7145
7146 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7147 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7148 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7149 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7150 .await;
7151 tree.flush_fs_events(cx).await;
7152
7153 project.read_with(cx, |project, cx| {
7154 let containing = project
7155 .git_store()
7156 .read(cx)
7157 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7158 assert_eq!(
7159 containing
7160 .unwrap()
7161 .0
7162 .read(cx)
7163 .repository_entry
7164 .work_directory_abs_path,
7165 Path::new(path!("/root/home"))
7166 );
7167 });
7168}
7169
7170async fn search(
7171 project: &Entity<Project>,
7172 query: SearchQuery,
7173 cx: &mut gpui::TestAppContext,
7174) -> Result<HashMap<String, Vec<Range<usize>>>> {
7175 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
7176 let mut results = HashMap::default();
7177 while let Ok(search_result) = search_rx.recv().await {
7178 match search_result {
7179 SearchResult::Buffer { buffer, ranges } => {
7180 results.entry(buffer).or_insert(ranges);
7181 }
7182 SearchResult::LimitReached => {}
7183 }
7184 }
7185 Ok(results
7186 .into_iter()
7187 .map(|(buffer, ranges)| {
7188 buffer.update(cx, |buffer, cx| {
7189 let path = buffer
7190 .file()
7191 .unwrap()
7192 .full_path(cx)
7193 .to_string_lossy()
7194 .to_string();
7195 let ranges = ranges
7196 .into_iter()
7197 .map(|range| range.to_offset(buffer))
7198 .collect::<Vec<_>>();
7199 (path, ranges)
7200 })
7201 })
7202 .collect())
7203}
7204
7205pub fn init_test(cx: &mut gpui::TestAppContext) {
7206 if std::env::var("RUST_LOG").is_ok() {
7207 env_logger::try_init().ok();
7208 }
7209
7210 cx.update(|cx| {
7211 let settings_store = SettingsStore::test(cx);
7212 cx.set_global(settings_store);
7213 release_channel::init(SemanticVersion::default(), cx);
7214 language::init(cx);
7215 Project::init_settings(cx);
7216 });
7217}
7218
7219fn json_lang() -> Arc<Language> {
7220 Arc::new(Language::new(
7221 LanguageConfig {
7222 name: "JSON".into(),
7223 matcher: LanguageMatcher {
7224 path_suffixes: vec!["json".to_string()],
7225 ..Default::default()
7226 },
7227 ..Default::default()
7228 },
7229 None,
7230 ))
7231}
7232
7233fn js_lang() -> Arc<Language> {
7234 Arc::new(Language::new(
7235 LanguageConfig {
7236 name: "JavaScript".into(),
7237 matcher: LanguageMatcher {
7238 path_suffixes: vec!["js".to_string()],
7239 ..Default::default()
7240 },
7241 ..Default::default()
7242 },
7243 None,
7244 ))
7245}
7246
7247fn rust_lang() -> Arc<Language> {
7248 Arc::new(Language::new(
7249 LanguageConfig {
7250 name: "Rust".into(),
7251 matcher: LanguageMatcher {
7252 path_suffixes: vec!["rs".to_string()],
7253 ..Default::default()
7254 },
7255 ..Default::default()
7256 },
7257 Some(tree_sitter_rust::LANGUAGE.into()),
7258 ))
7259}
7260
7261fn typescript_lang() -> Arc<Language> {
7262 Arc::new(Language::new(
7263 LanguageConfig {
7264 name: "TypeScript".into(),
7265 matcher: LanguageMatcher {
7266 path_suffixes: vec!["ts".to_string()],
7267 ..Default::default()
7268 },
7269 ..Default::default()
7270 },
7271 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
7272 ))
7273}
7274
7275fn tsx_lang() -> Arc<Language> {
7276 Arc::new(Language::new(
7277 LanguageConfig {
7278 name: "tsx".into(),
7279 matcher: LanguageMatcher {
7280 path_suffixes: vec!["tsx".to_string()],
7281 ..Default::default()
7282 },
7283 ..Default::default()
7284 },
7285 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
7286 ))
7287}
7288
7289fn get_all_tasks(
7290 project: &Entity<Project>,
7291 task_contexts: &TaskContexts,
7292 cx: &mut App,
7293) -> Vec<(TaskSourceKind, ResolvedTask)> {
7294 let (mut old, new) = project.update(cx, |project, cx| {
7295 project
7296 .task_store
7297 .read(cx)
7298 .task_inventory()
7299 .unwrap()
7300 .read(cx)
7301 .used_and_current_resolved_tasks(task_contexts, cx)
7302 });
7303 old.extend(new);
7304 old
7305}