1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use gpui::{App, SemanticVersion, UpdateGlobal};
10use http_client::Url;
11use language::{
12 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
13 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
14 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
15 OffsetRangeExt, Point, ToPoint,
16};
17use lsp::{
18 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
19 NumberOrString, TextDocumentEdit, WillRenameFiles,
20};
21use parking_lot::Mutex;
22use pretty_assertions::{assert_eq, assert_matches};
23use serde_json::json;
24#[cfg(not(windows))]
25use std::os;
26use std::{str::FromStr, sync::OnceLock};
27
28use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
29use task::{ResolvedTask, TaskContext};
30use unindent::Unindent as _;
31use util::{
32 assert_set_eq, path,
33 paths::PathMatcher,
34 separator,
35 test::{marked_text_offsets, TempTree},
36 uri, TryFutureExt as _,
37};
38
39#[gpui::test]
40async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
41 cx.executor().allow_parking();
42
43 let (tx, mut rx) = futures::channel::mpsc::unbounded();
44 let _thread = std::thread::spawn(move || {
45 #[cfg(not(target_os = "windows"))]
46 std::fs::metadata("/tmp").unwrap();
47 #[cfg(target_os = "windows")]
48 std::fs::metadata("C:/Windows").unwrap();
49 std::thread::sleep(Duration::from_millis(1000));
50 tx.unbounded_send(1).unwrap();
51 });
52 rx.next().await.unwrap();
53}
54
55#[gpui::test]
56async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
57 cx.executor().allow_parking();
58
59 let io_task = smol::unblock(move || {
60 println!("sleeping on thread {:?}", std::thread::current().id());
61 std::thread::sleep(Duration::from_millis(10));
62 1
63 });
64
65 let task = cx.foreground_executor().spawn(async move {
66 io_task.await;
67 });
68
69 task.await;
70}
71
72#[cfg(not(windows))]
73#[gpui::test]
74async fn test_symlinks(cx: &mut gpui::TestAppContext) {
75 init_test(cx);
76 cx.executor().allow_parking();
77
78 let dir = TempTree::new(json!({
79 "root": {
80 "apple": "",
81 "banana": {
82 "carrot": {
83 "date": "",
84 "endive": "",
85 }
86 },
87 "fennel": {
88 "grape": "",
89 }
90 }
91 }));
92
93 let root_link_path = dir.path().join("root_link");
94 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
95 os::unix::fs::symlink(
96 dir.path().join("root/fennel"),
97 dir.path().join("root/finnochio"),
98 )
99 .unwrap();
100
101 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
102
103 project.update(cx, |project, cx| {
104 let tree = project.worktrees(cx).next().unwrap().read(cx);
105 assert_eq!(tree.file_count(), 5);
106 assert_eq!(
107 tree.inode_for_path("fennel/grape"),
108 tree.inode_for_path("finnochio/grape")
109 );
110 });
111}
112
113#[gpui::test]
114async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
115 init_test(cx);
116
117 let dir = TempTree::new(json!({
118 ".editorconfig": r#"
119 root = true
120 [*.rs]
121 indent_style = tab
122 indent_size = 3
123 end_of_line = lf
124 insert_final_newline = true
125 trim_trailing_whitespace = true
126 [*.js]
127 tab_width = 10
128 "#,
129 ".zed": {
130 "settings.json": r#"{
131 "tab_size": 8,
132 "hard_tabs": false,
133 "ensure_final_newline_on_save": false,
134 "remove_trailing_whitespace_on_save": false,
135 "soft_wrap": "editor_width"
136 }"#,
137 },
138 "a.rs": "fn a() {\n A\n}",
139 "b": {
140 ".editorconfig": r#"
141 [*.rs]
142 indent_size = 2
143 "#,
144 "b.rs": "fn b() {\n B\n}",
145 },
146 "c.js": "def c\n C\nend",
147 "README.json": "tabs are better\n",
148 }));
149
150 let path = dir.path();
151 let fs = FakeFs::new(cx.executor());
152 fs.insert_tree_from_real_fs(path, path).await;
153 let project = Project::test(fs, [path], cx).await;
154
155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
156 language_registry.add(js_lang());
157 language_registry.add(json_lang());
158 language_registry.add(rust_lang());
159
160 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
161
162 cx.executor().run_until_parked();
163
164 cx.update(|cx| {
165 let tree = worktree.read(cx);
166 let settings_for = |path: &str| {
167 let file_entry = tree.entry_for_path(path).unwrap().clone();
168 let file = File::for_entry(file_entry, worktree.clone());
169 let file_language = project
170 .read(cx)
171 .languages()
172 .language_for_file_path(file.path.as_ref());
173 let file_language = cx
174 .background_executor()
175 .block(file_language)
176 .expect("Failed to get file language");
177 let file = file as _;
178 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
179 };
180
181 let settings_a = settings_for("a.rs");
182 let settings_b = settings_for("b/b.rs");
183 let settings_c = settings_for("c.js");
184 let settings_readme = settings_for("README.json");
185
186 // .editorconfig overrides .zed/settings
187 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
188 assert_eq!(settings_a.hard_tabs, true);
189 assert_eq!(settings_a.ensure_final_newline_on_save, true);
190 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
191
192 // .editorconfig in b/ overrides .editorconfig in root
193 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
194
195 // "indent_size" is not set, so "tab_width" is used
196 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
197
198 // README.md should not be affected by .editorconfig's globe "*.rs"
199 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
200 });
201}
202
203#[gpui::test]
204async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
205 init_test(cx);
206 TaskStore::init(None);
207
208 let fs = FakeFs::new(cx.executor());
209 fs.insert_tree(
210 path!("/dir"),
211 json!({
212 ".zed": {
213 "settings.json": r#"{ "tab_size": 8 }"#,
214 "tasks.json": r#"[{
215 "label": "cargo check all",
216 "command": "cargo",
217 "args": ["check", "--all"]
218 },]"#,
219 },
220 "a": {
221 "a.rs": "fn a() {\n A\n}"
222 },
223 "b": {
224 ".zed": {
225 "settings.json": r#"{ "tab_size": 2 }"#,
226 "tasks.json": r#"[{
227 "label": "cargo check",
228 "command": "cargo",
229 "args": ["check"]
230 },]"#,
231 },
232 "b.rs": "fn b() {\n B\n}"
233 }
234 }),
235 )
236 .await;
237
238 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
239 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
240
241 cx.executor().run_until_parked();
242 let worktree_id = cx.update(|cx| {
243 project.update(cx, |project, cx| {
244 project.worktrees(cx).next().unwrap().read(cx).id()
245 })
246 });
247
248 let mut task_contexts = TaskContexts::default();
249 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
250
251 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
252 id: worktree_id,
253 directory_in_worktree: PathBuf::from(".zed"),
254 id_base: "local worktree tasks from directory \".zed\"".into(),
255 };
256
257 let all_tasks = cx
258 .update(|cx| {
259 let tree = worktree.read(cx);
260
261 let file_a = File::for_entry(
262 tree.entry_for_path("a/a.rs").unwrap().clone(),
263 worktree.clone(),
264 ) as _;
265 let settings_a = language_settings(None, Some(&file_a), cx);
266 let file_b = File::for_entry(
267 tree.entry_for_path("b/b.rs").unwrap().clone(),
268 worktree.clone(),
269 ) as _;
270 let settings_b = language_settings(None, Some(&file_b), cx);
271
272 assert_eq!(settings_a.tab_size.get(), 8);
273 assert_eq!(settings_b.tab_size.get(), 2);
274
275 get_all_tasks(&project, &task_contexts, cx)
276 })
277 .into_iter()
278 .map(|(source_kind, task)| {
279 let resolved = task.resolved.unwrap();
280 (
281 source_kind,
282 task.resolved_label,
283 resolved.args,
284 resolved.env,
285 )
286 })
287 .collect::<Vec<_>>();
288 assert_eq!(
289 all_tasks,
290 vec![
291 (
292 TaskSourceKind::Worktree {
293 id: worktree_id,
294 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
295 id_base: if cfg!(windows) {
296 "local worktree tasks from directory \"b\\\\.zed\"".into()
297 } else {
298 "local worktree tasks from directory \"b/.zed\"".into()
299 },
300 },
301 "cargo check".to_string(),
302 vec!["check".to_string()],
303 HashMap::default(),
304 ),
305 (
306 topmost_local_task_source_kind.clone(),
307 "cargo check all".to_string(),
308 vec!["check".to_string(), "--all".to_string()],
309 HashMap::default(),
310 ),
311 ]
312 );
313
314 let (_, resolved_task) = cx
315 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
316 .into_iter()
317 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
318 .expect("should have one global task");
319 project.update(cx, |project, cx| {
320 let task_inventory = project
321 .task_store
322 .read(cx)
323 .task_inventory()
324 .cloned()
325 .unwrap();
326 task_inventory.update(cx, |inventory, _| {
327 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
328 inventory
329 .update_file_based_tasks(
330 None,
331 Some(
332 &json!([{
333 "label": "cargo check unstable",
334 "command": "cargo",
335 "args": [
336 "check",
337 "--all",
338 "--all-targets"
339 ],
340 "env": {
341 "RUSTFLAGS": "-Zunstable-options"
342 }
343 }])
344 .to_string(),
345 ),
346 settings::TaskKind::Script,
347 )
348 .unwrap();
349 });
350 });
351 cx.run_until_parked();
352
353 let all_tasks = cx
354 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
355 .into_iter()
356 .map(|(source_kind, task)| {
357 let resolved = task.resolved.unwrap();
358 (
359 source_kind,
360 task.resolved_label,
361 resolved.args,
362 resolved.env,
363 )
364 })
365 .collect::<Vec<_>>();
366 assert_eq!(
367 all_tasks,
368 vec![
369 (
370 topmost_local_task_source_kind.clone(),
371 "cargo check all".to_string(),
372 vec!["check".to_string(), "--all".to_string()],
373 HashMap::default(),
374 ),
375 (
376 TaskSourceKind::Worktree {
377 id: worktree_id,
378 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
379 id_base: if cfg!(windows) {
380 "local worktree tasks from directory \"b\\\\.zed\"".into()
381 } else {
382 "local worktree tasks from directory \"b/.zed\"".into()
383 },
384 },
385 "cargo check".to_string(),
386 vec!["check".to_string()],
387 HashMap::default(),
388 ),
389 (
390 TaskSourceKind::AbsPath {
391 abs_path: paths::tasks_file().clone(),
392 id_base: "global tasks.json".into(),
393 },
394 "cargo check unstable".to_string(),
395 vec![
396 "check".to_string(),
397 "--all".to_string(),
398 "--all-targets".to_string(),
399 ],
400 HashMap::from_iter(Some((
401 "RUSTFLAGS".to_string(),
402 "-Zunstable-options".to_string()
403 ))),
404 ),
405 ]
406 );
407}
408
409#[gpui::test]
410async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
411 init_test(cx);
412 TaskStore::init(None);
413
414 let fs = FakeFs::new(cx.executor());
415 fs.insert_tree(
416 path!("/dir"),
417 json!({
418 ".zed": {
419 "tasks.json": r#"[{
420 "label": "test worktree root",
421 "command": "echo $ZED_WORKTREE_ROOT"
422 }]"#,
423 },
424 "a": {
425 "a.rs": "fn a() {\n A\n}"
426 },
427 }),
428 )
429 .await;
430
431 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
432 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
433
434 cx.executor().run_until_parked();
435 let worktree_id = cx.update(|cx| {
436 project.update(cx, |project, cx| {
437 project.worktrees(cx).next().unwrap().read(cx).id()
438 })
439 });
440
441 let active_non_worktree_item_tasks = cx.update(|cx| {
442 get_all_tasks(
443 &project,
444 &TaskContexts {
445 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
446 active_worktree_context: None,
447 other_worktree_contexts: Vec::new(),
448 },
449 cx,
450 )
451 });
452 assert!(
453 active_non_worktree_item_tasks.is_empty(),
454 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
455 );
456
457 let active_worktree_tasks = cx.update(|cx| {
458 get_all_tasks(
459 &project,
460 &TaskContexts {
461 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
462 active_worktree_context: Some((worktree_id, {
463 let mut worktree_context = TaskContext::default();
464 worktree_context
465 .task_variables
466 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
467 worktree_context
468 })),
469 other_worktree_contexts: Vec::new(),
470 },
471 cx,
472 )
473 });
474 assert_eq!(
475 active_worktree_tasks
476 .into_iter()
477 .map(|(source_kind, task)| {
478 let resolved = task.resolved.unwrap();
479 (source_kind, resolved.command)
480 })
481 .collect::<Vec<_>>(),
482 vec![(
483 TaskSourceKind::Worktree {
484 id: worktree_id,
485 directory_in_worktree: PathBuf::from(separator!(".zed")),
486 id_base: if cfg!(windows) {
487 "local worktree tasks from directory \".zed\"".into()
488 } else {
489 "local worktree tasks from directory \".zed\"".into()
490 },
491 },
492 "echo /dir".to_string(),
493 )]
494 );
495}
496
497#[gpui::test]
498async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
499 init_test(cx);
500
501 let fs = FakeFs::new(cx.executor());
502 fs.insert_tree(
503 path!("/dir"),
504 json!({
505 "test.rs": "const A: i32 = 1;",
506 "test2.rs": "",
507 "Cargo.toml": "a = 1",
508 "package.json": "{\"a\": 1}",
509 }),
510 )
511 .await;
512
513 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
514 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
515
516 let mut fake_rust_servers = language_registry.register_fake_lsp(
517 "Rust",
518 FakeLspAdapter {
519 name: "the-rust-language-server",
520 capabilities: lsp::ServerCapabilities {
521 completion_provider: Some(lsp::CompletionOptions {
522 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
523 ..Default::default()
524 }),
525 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
526 lsp::TextDocumentSyncOptions {
527 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
528 ..Default::default()
529 },
530 )),
531 ..Default::default()
532 },
533 ..Default::default()
534 },
535 );
536 let mut fake_json_servers = language_registry.register_fake_lsp(
537 "JSON",
538 FakeLspAdapter {
539 name: "the-json-language-server",
540 capabilities: lsp::ServerCapabilities {
541 completion_provider: Some(lsp::CompletionOptions {
542 trigger_characters: Some(vec![":".to_string()]),
543 ..Default::default()
544 }),
545 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
546 lsp::TextDocumentSyncOptions {
547 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
548 ..Default::default()
549 },
550 )),
551 ..Default::default()
552 },
553 ..Default::default()
554 },
555 );
556
557 // Open a buffer without an associated language server.
558 let (toml_buffer, _handle) = project
559 .update(cx, |project, cx| {
560 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
561 })
562 .await
563 .unwrap();
564
565 // Open a buffer with an associated language server before the language for it has been loaded.
566 let (rust_buffer, _handle2) = project
567 .update(cx, |project, cx| {
568 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
569 })
570 .await
571 .unwrap();
572 rust_buffer.update(cx, |buffer, _| {
573 assert_eq!(buffer.language().map(|l| l.name()), None);
574 });
575
576 // Now we add the languages to the project, and ensure they get assigned to all
577 // the relevant open buffers.
578 language_registry.add(json_lang());
579 language_registry.add(rust_lang());
580 cx.executor().run_until_parked();
581 rust_buffer.update(cx, |buffer, _| {
582 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
583 });
584
585 // A server is started up, and it is notified about Rust files.
586 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
587 assert_eq!(
588 fake_rust_server
589 .receive_notification::<lsp::notification::DidOpenTextDocument>()
590 .await
591 .text_document,
592 lsp::TextDocumentItem {
593 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
594 version: 0,
595 text: "const A: i32 = 1;".to_string(),
596 language_id: "rust".to_string(),
597 }
598 );
599
600 // The buffer is configured based on the language server's capabilities.
601 rust_buffer.update(cx, |buffer, _| {
602 assert_eq!(
603 buffer
604 .completion_triggers()
605 .into_iter()
606 .cloned()
607 .collect::<Vec<_>>(),
608 &[".".to_string(), "::".to_string()]
609 );
610 });
611 toml_buffer.update(cx, |buffer, _| {
612 assert!(buffer.completion_triggers().is_empty());
613 });
614
615 // Edit a buffer. The changes are reported to the language server.
616 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
617 assert_eq!(
618 fake_rust_server
619 .receive_notification::<lsp::notification::DidChangeTextDocument>()
620 .await
621 .text_document,
622 lsp::VersionedTextDocumentIdentifier::new(
623 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
624 1
625 )
626 );
627
628 // Open a third buffer with a different associated language server.
629 let (json_buffer, _json_handle) = project
630 .update(cx, |project, cx| {
631 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
632 })
633 .await
634 .unwrap();
635
636 // A json language server is started up and is only notified about the json buffer.
637 let mut fake_json_server = fake_json_servers.next().await.unwrap();
638 assert_eq!(
639 fake_json_server
640 .receive_notification::<lsp::notification::DidOpenTextDocument>()
641 .await
642 .text_document,
643 lsp::TextDocumentItem {
644 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
645 version: 0,
646 text: "{\"a\": 1}".to_string(),
647 language_id: "json".to_string(),
648 }
649 );
650
651 // This buffer is configured based on the second language server's
652 // capabilities.
653 json_buffer.update(cx, |buffer, _| {
654 assert_eq!(
655 buffer
656 .completion_triggers()
657 .into_iter()
658 .cloned()
659 .collect::<Vec<_>>(),
660 &[":".to_string()]
661 );
662 });
663
664 // When opening another buffer whose language server is already running,
665 // it is also configured based on the existing language server's capabilities.
666 let (rust_buffer2, _handle4) = project
667 .update(cx, |project, cx| {
668 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
669 })
670 .await
671 .unwrap();
672 rust_buffer2.update(cx, |buffer, _| {
673 assert_eq!(
674 buffer
675 .completion_triggers()
676 .into_iter()
677 .cloned()
678 .collect::<Vec<_>>(),
679 &[".".to_string(), "::".to_string()]
680 );
681 });
682
683 // Changes are reported only to servers matching the buffer's language.
684 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
685 rust_buffer2.update(cx, |buffer, cx| {
686 buffer.edit([(0..0, "let x = 1;")], None, cx)
687 });
688 assert_eq!(
689 fake_rust_server
690 .receive_notification::<lsp::notification::DidChangeTextDocument>()
691 .await
692 .text_document,
693 lsp::VersionedTextDocumentIdentifier::new(
694 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
695 1
696 )
697 );
698
699 // Save notifications are reported to all servers.
700 project
701 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
702 .await
703 .unwrap();
704 assert_eq!(
705 fake_rust_server
706 .receive_notification::<lsp::notification::DidSaveTextDocument>()
707 .await
708 .text_document,
709 lsp::TextDocumentIdentifier::new(
710 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
711 )
712 );
713 assert_eq!(
714 fake_json_server
715 .receive_notification::<lsp::notification::DidSaveTextDocument>()
716 .await
717 .text_document,
718 lsp::TextDocumentIdentifier::new(
719 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
720 )
721 );
722
723 // Renames are reported only to servers matching the buffer's language.
724 fs.rename(
725 Path::new(path!("/dir/test2.rs")),
726 Path::new(path!("/dir/test3.rs")),
727 Default::default(),
728 )
729 .await
730 .unwrap();
731 assert_eq!(
732 fake_rust_server
733 .receive_notification::<lsp::notification::DidCloseTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
737 );
738 assert_eq!(
739 fake_rust_server
740 .receive_notification::<lsp::notification::DidOpenTextDocument>()
741 .await
742 .text_document,
743 lsp::TextDocumentItem {
744 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
745 version: 0,
746 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
747 language_id: "rust".to_string(),
748 },
749 );
750
751 rust_buffer2.update(cx, |buffer, cx| {
752 buffer.update_diagnostics(
753 LanguageServerId(0),
754 DiagnosticSet::from_sorted_entries(
755 vec![DiagnosticEntry {
756 diagnostic: Default::default(),
757 range: Anchor::MIN..Anchor::MAX,
758 }],
759 &buffer.snapshot(),
760 ),
761 cx,
762 );
763 assert_eq!(
764 buffer
765 .snapshot()
766 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
767 .count(),
768 1
769 );
770 });
771
772 // When the rename changes the extension of the file, the buffer gets closed on the old
773 // language server and gets opened on the new one.
774 fs.rename(
775 Path::new(path!("/dir/test3.rs")),
776 Path::new(path!("/dir/test3.json")),
777 Default::default(),
778 )
779 .await
780 .unwrap();
781 assert_eq!(
782 fake_rust_server
783 .receive_notification::<lsp::notification::DidCloseTextDocument>()
784 .await
785 .text_document,
786 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
787 );
788 assert_eq!(
789 fake_json_server
790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
791 .await
792 .text_document,
793 lsp::TextDocumentItem {
794 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
795 version: 0,
796 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
797 language_id: "json".to_string(),
798 },
799 );
800
801 // We clear the diagnostics, since the language has changed.
802 rust_buffer2.update(cx, |buffer, _| {
803 assert_eq!(
804 buffer
805 .snapshot()
806 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
807 .count(),
808 0
809 );
810 });
811
812 // The renamed file's version resets after changing language server.
813 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
814 assert_eq!(
815 fake_json_server
816 .receive_notification::<lsp::notification::DidChangeTextDocument>()
817 .await
818 .text_document,
819 lsp::VersionedTextDocumentIdentifier::new(
820 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
821 1
822 )
823 );
824
825 // Restart language servers
826 project.update(cx, |project, cx| {
827 project.restart_language_servers_for_buffers(
828 vec![rust_buffer.clone(), json_buffer.clone()],
829 cx,
830 );
831 });
832
833 let mut rust_shutdown_requests = fake_rust_server
834 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
835 let mut json_shutdown_requests = fake_json_server
836 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
837 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
838
839 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
840 let mut fake_json_server = fake_json_servers.next().await.unwrap();
841
842 // Ensure rust document is reopened in new rust language server
843 assert_eq!(
844 fake_rust_server
845 .receive_notification::<lsp::notification::DidOpenTextDocument>()
846 .await
847 .text_document,
848 lsp::TextDocumentItem {
849 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
850 version: 0,
851 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
852 language_id: "rust".to_string(),
853 }
854 );
855
856 // Ensure json documents are reopened in new json language server
857 assert_set_eq!(
858 [
859 fake_json_server
860 .receive_notification::<lsp::notification::DidOpenTextDocument>()
861 .await
862 .text_document,
863 fake_json_server
864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
865 .await
866 .text_document,
867 ],
868 [
869 lsp::TextDocumentItem {
870 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
871 version: 0,
872 text: json_buffer.update(cx, |buffer, _| buffer.text()),
873 language_id: "json".to_string(),
874 },
875 lsp::TextDocumentItem {
876 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
877 version: 0,
878 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
879 language_id: "json".to_string(),
880 }
881 ]
882 );
883
884 // Close notifications are reported only to servers matching the buffer's language.
885 cx.update(|_| drop(_json_handle));
886 let close_message = lsp::DidCloseTextDocumentParams {
887 text_document: lsp::TextDocumentIdentifier::new(
888 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
889 ),
890 };
891 assert_eq!(
892 fake_json_server
893 .receive_notification::<lsp::notification::DidCloseTextDocument>()
894 .await,
895 close_message,
896 );
897}
898
899#[gpui::test]
900async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
901 init_test(cx);
902
903 let fs = FakeFs::new(cx.executor());
904 fs.insert_tree(
905 path!("/the-root"),
906 json!({
907 ".gitignore": "target\n",
908 "src": {
909 "a.rs": "",
910 "b.rs": "",
911 },
912 "target": {
913 "x": {
914 "out": {
915 "x.rs": ""
916 }
917 },
918 "y": {
919 "out": {
920 "y.rs": "",
921 }
922 },
923 "z": {
924 "out": {
925 "z.rs": ""
926 }
927 }
928 }
929 }),
930 )
931 .await;
932
933 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
934 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
935 language_registry.add(rust_lang());
936 let mut fake_servers = language_registry.register_fake_lsp(
937 "Rust",
938 FakeLspAdapter {
939 name: "the-language-server",
940 ..Default::default()
941 },
942 );
943
944 cx.executor().run_until_parked();
945
946 // Start the language server by opening a buffer with a compatible file extension.
947 project
948 .update(cx, |project, cx| {
949 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
950 })
951 .await
952 .unwrap();
953
954 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
955 project.update(cx, |project, cx| {
956 let worktree = project.worktrees(cx).next().unwrap();
957 assert_eq!(
958 worktree
959 .read(cx)
960 .snapshot()
961 .entries(true, 0)
962 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
963 .collect::<Vec<_>>(),
964 &[
965 (Path::new(""), false),
966 (Path::new(".gitignore"), false),
967 (Path::new("src"), false),
968 (Path::new("src/a.rs"), false),
969 (Path::new("src/b.rs"), false),
970 (Path::new("target"), true),
971 ]
972 );
973 });
974
975 let prev_read_dir_count = fs.read_dir_call_count();
976
977 // Keep track of the FS events reported to the language server.
978 let fake_server = fake_servers.next().await.unwrap();
979 let file_changes = Arc::new(Mutex::new(Vec::new()));
980 fake_server
981 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
982 registrations: vec![lsp::Registration {
983 id: Default::default(),
984 method: "workspace/didChangeWatchedFiles".to_string(),
985 register_options: serde_json::to_value(
986 lsp::DidChangeWatchedFilesRegistrationOptions {
987 watchers: vec![
988 lsp::FileSystemWatcher {
989 glob_pattern: lsp::GlobPattern::String(
990 path!("/the-root/Cargo.toml").to_string(),
991 ),
992 kind: None,
993 },
994 lsp::FileSystemWatcher {
995 glob_pattern: lsp::GlobPattern::String(
996 path!("/the-root/src/*.{rs,c}").to_string(),
997 ),
998 kind: None,
999 },
1000 lsp::FileSystemWatcher {
1001 glob_pattern: lsp::GlobPattern::String(
1002 path!("/the-root/target/y/**/*.rs").to_string(),
1003 ),
1004 kind: None,
1005 },
1006 ],
1007 },
1008 )
1009 .ok(),
1010 }],
1011 })
1012 .await
1013 .unwrap();
1014 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1015 let file_changes = file_changes.clone();
1016 move |params, _| {
1017 let mut file_changes = file_changes.lock();
1018 file_changes.extend(params.changes);
1019 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1020 }
1021 });
1022
1023 cx.executor().run_until_parked();
1024 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1025 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1026
1027 // Now the language server has asked us to watch an ignored directory path,
1028 // so we recursively load it.
1029 project.update(cx, |project, cx| {
1030 let worktree = project.worktrees(cx).next().unwrap();
1031 assert_eq!(
1032 worktree
1033 .read(cx)
1034 .snapshot()
1035 .entries(true, 0)
1036 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1037 .collect::<Vec<_>>(),
1038 &[
1039 (Path::new(""), false),
1040 (Path::new(".gitignore"), false),
1041 (Path::new("src"), false),
1042 (Path::new("src/a.rs"), false),
1043 (Path::new("src/b.rs"), false),
1044 (Path::new("target"), true),
1045 (Path::new("target/x"), true),
1046 (Path::new("target/y"), true),
1047 (Path::new("target/y/out"), true),
1048 (Path::new("target/y/out/y.rs"), true),
1049 (Path::new("target/z"), true),
1050 ]
1051 );
1052 });
1053
1054 // Perform some file system mutations, two of which match the watched patterns,
1055 // and one of which does not.
1056 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1057 .await
1058 .unwrap();
1059 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1060 .await
1061 .unwrap();
1062 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1063 .await
1064 .unwrap();
1065 fs.create_file(
1066 path!("/the-root/target/x/out/x2.rs").as_ref(),
1067 Default::default(),
1068 )
1069 .await
1070 .unwrap();
1071 fs.create_file(
1072 path!("/the-root/target/y/out/y2.rs").as_ref(),
1073 Default::default(),
1074 )
1075 .await
1076 .unwrap();
1077
1078 // The language server receives events for the FS mutations that match its watch patterns.
1079 cx.executor().run_until_parked();
1080 assert_eq!(
1081 &*file_changes.lock(),
1082 &[
1083 lsp::FileEvent {
1084 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1085 typ: lsp::FileChangeType::DELETED,
1086 },
1087 lsp::FileEvent {
1088 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1089 typ: lsp::FileChangeType::CREATED,
1090 },
1091 lsp::FileEvent {
1092 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1093 typ: lsp::FileChangeType::CREATED,
1094 },
1095 ]
1096 );
1097}
1098
1099#[gpui::test]
1100async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1101 init_test(cx);
1102
1103 let fs = FakeFs::new(cx.executor());
1104 fs.insert_tree(
1105 path!("/dir"),
1106 json!({
1107 "a.rs": "let a = 1;",
1108 "b.rs": "let b = 2;"
1109 }),
1110 )
1111 .await;
1112
1113 let project = Project::test(
1114 fs,
1115 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1116 cx,
1117 )
1118 .await;
1119 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1120
1121 let buffer_a = project
1122 .update(cx, |project, cx| {
1123 project.open_local_buffer(path!("/dir/a.rs"), cx)
1124 })
1125 .await
1126 .unwrap();
1127 let buffer_b = project
1128 .update(cx, |project, cx| {
1129 project.open_local_buffer(path!("/dir/b.rs"), cx)
1130 })
1131 .await
1132 .unwrap();
1133
1134 lsp_store.update(cx, |lsp_store, cx| {
1135 lsp_store
1136 .update_diagnostics(
1137 LanguageServerId(0),
1138 lsp::PublishDiagnosticsParams {
1139 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1140 version: None,
1141 diagnostics: vec![lsp::Diagnostic {
1142 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1143 severity: Some(lsp::DiagnosticSeverity::ERROR),
1144 message: "error 1".to_string(),
1145 ..Default::default()
1146 }],
1147 },
1148 &[],
1149 cx,
1150 )
1151 .unwrap();
1152 lsp_store
1153 .update_diagnostics(
1154 LanguageServerId(0),
1155 lsp::PublishDiagnosticsParams {
1156 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1157 version: None,
1158 diagnostics: vec![lsp::Diagnostic {
1159 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1160 severity: Some(DiagnosticSeverity::WARNING),
1161 message: "error 2".to_string(),
1162 ..Default::default()
1163 }],
1164 },
1165 &[],
1166 cx,
1167 )
1168 .unwrap();
1169 });
1170
1171 buffer_a.update(cx, |buffer, _| {
1172 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1173 assert_eq!(
1174 chunks
1175 .iter()
1176 .map(|(s, d)| (s.as_str(), *d))
1177 .collect::<Vec<_>>(),
1178 &[
1179 ("let ", None),
1180 ("a", Some(DiagnosticSeverity::ERROR)),
1181 (" = 1;", None),
1182 ]
1183 );
1184 });
1185 buffer_b.update(cx, |buffer, _| {
1186 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1187 assert_eq!(
1188 chunks
1189 .iter()
1190 .map(|(s, d)| (s.as_str(), *d))
1191 .collect::<Vec<_>>(),
1192 &[
1193 ("let ", None),
1194 ("b", Some(DiagnosticSeverity::WARNING)),
1195 (" = 2;", None),
1196 ]
1197 );
1198 });
1199}
1200
1201#[gpui::test]
1202async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1203 init_test(cx);
1204
1205 let fs = FakeFs::new(cx.executor());
1206 fs.insert_tree(
1207 path!("/root"),
1208 json!({
1209 "dir": {
1210 ".git": {
1211 "HEAD": "ref: refs/heads/main",
1212 },
1213 ".gitignore": "b.rs",
1214 "a.rs": "let a = 1;",
1215 "b.rs": "let b = 2;",
1216 },
1217 "other.rs": "let b = c;"
1218 }),
1219 )
1220 .await;
1221
1222 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1223 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1224 let (worktree, _) = project
1225 .update(cx, |project, cx| {
1226 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1227 })
1228 .await
1229 .unwrap();
1230 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1231
1232 let (worktree, _) = project
1233 .update(cx, |project, cx| {
1234 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1235 })
1236 .await
1237 .unwrap();
1238 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1239
1240 let server_id = LanguageServerId(0);
1241 lsp_store.update(cx, |lsp_store, cx| {
1242 lsp_store
1243 .update_diagnostics(
1244 server_id,
1245 lsp::PublishDiagnosticsParams {
1246 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1247 version: None,
1248 diagnostics: vec![lsp::Diagnostic {
1249 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1250 severity: Some(lsp::DiagnosticSeverity::ERROR),
1251 message: "unused variable 'b'".to_string(),
1252 ..Default::default()
1253 }],
1254 },
1255 &[],
1256 cx,
1257 )
1258 .unwrap();
1259 lsp_store
1260 .update_diagnostics(
1261 server_id,
1262 lsp::PublishDiagnosticsParams {
1263 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1264 version: None,
1265 diagnostics: vec![lsp::Diagnostic {
1266 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1267 severity: Some(lsp::DiagnosticSeverity::ERROR),
1268 message: "unknown variable 'c'".to_string(),
1269 ..Default::default()
1270 }],
1271 },
1272 &[],
1273 cx,
1274 )
1275 .unwrap();
1276 });
1277
1278 let main_ignored_buffer = project
1279 .update(cx, |project, cx| {
1280 project.open_buffer((main_worktree_id, "b.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284 main_ignored_buffer.update(cx, |buffer, _| {
1285 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1286 assert_eq!(
1287 chunks
1288 .iter()
1289 .map(|(s, d)| (s.as_str(), *d))
1290 .collect::<Vec<_>>(),
1291 &[
1292 ("let ", None),
1293 ("b", Some(DiagnosticSeverity::ERROR)),
1294 (" = 2;", None),
1295 ],
1296 "Gigitnored buffers should still get in-buffer diagnostics",
1297 );
1298 });
1299 let other_buffer = project
1300 .update(cx, |project, cx| {
1301 project.open_buffer((other_worktree_id, ""), cx)
1302 })
1303 .await
1304 .unwrap();
1305 other_buffer.update(cx, |buffer, _| {
1306 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1307 assert_eq!(
1308 chunks
1309 .iter()
1310 .map(|(s, d)| (s.as_str(), *d))
1311 .collect::<Vec<_>>(),
1312 &[
1313 ("let b = ", None),
1314 ("c", Some(DiagnosticSeverity::ERROR)),
1315 (";", None),
1316 ],
1317 "Buffers from hidden projects should still get in-buffer diagnostics"
1318 );
1319 });
1320
1321 project.update(cx, |project, cx| {
1322 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1323 assert_eq!(
1324 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1325 vec![(
1326 ProjectPath {
1327 worktree_id: main_worktree_id,
1328 path: Arc::from(Path::new("b.rs")),
1329 },
1330 server_id,
1331 DiagnosticSummary {
1332 error_count: 1,
1333 warning_count: 0,
1334 }
1335 )]
1336 );
1337 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1338 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1339 });
1340}
1341
1342#[gpui::test]
1343async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1344 init_test(cx);
1345
1346 let progress_token = "the-progress-token";
1347
1348 let fs = FakeFs::new(cx.executor());
1349 fs.insert_tree(
1350 path!("/dir"),
1351 json!({
1352 "a.rs": "fn a() { A }",
1353 "b.rs": "const y: i32 = 1",
1354 }),
1355 )
1356 .await;
1357
1358 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1359 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1360
1361 language_registry.add(rust_lang());
1362 let mut fake_servers = language_registry.register_fake_lsp(
1363 "Rust",
1364 FakeLspAdapter {
1365 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1366 disk_based_diagnostics_sources: vec!["disk".into()],
1367 ..Default::default()
1368 },
1369 );
1370
1371 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1372
1373 // Cause worktree to start the fake language server
1374 let _ = project
1375 .update(cx, |project, cx| {
1376 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1377 })
1378 .await
1379 .unwrap();
1380
1381 let mut events = cx.events(&project);
1382
1383 let fake_server = fake_servers.next().await.unwrap();
1384 assert_eq!(
1385 events.next().await.unwrap(),
1386 Event::LanguageServerAdded(
1387 LanguageServerId(0),
1388 fake_server.server.name(),
1389 Some(worktree_id)
1390 ),
1391 );
1392
1393 fake_server
1394 .start_progress(format!("{}/0", progress_token))
1395 .await;
1396 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1397 assert_eq!(
1398 events.next().await.unwrap(),
1399 Event::DiskBasedDiagnosticsStarted {
1400 language_server_id: LanguageServerId(0),
1401 }
1402 );
1403
1404 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1405 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1406 version: None,
1407 diagnostics: vec![lsp::Diagnostic {
1408 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1409 severity: Some(lsp::DiagnosticSeverity::ERROR),
1410 message: "undefined variable 'A'".to_string(),
1411 ..Default::default()
1412 }],
1413 });
1414 assert_eq!(
1415 events.next().await.unwrap(),
1416 Event::DiagnosticsUpdated {
1417 language_server_id: LanguageServerId(0),
1418 path: (worktree_id, Path::new("a.rs")).into()
1419 }
1420 );
1421
1422 fake_server.end_progress(format!("{}/0", progress_token));
1423 assert_eq!(
1424 events.next().await.unwrap(),
1425 Event::DiskBasedDiagnosticsFinished {
1426 language_server_id: LanguageServerId(0)
1427 }
1428 );
1429
1430 let buffer = project
1431 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1432 .await
1433 .unwrap();
1434
1435 buffer.update(cx, |buffer, _| {
1436 let snapshot = buffer.snapshot();
1437 let diagnostics = snapshot
1438 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1439 .collect::<Vec<_>>();
1440 assert_eq!(
1441 diagnostics,
1442 &[DiagnosticEntry {
1443 range: Point::new(0, 9)..Point::new(0, 10),
1444 diagnostic: Diagnostic {
1445 severity: lsp::DiagnosticSeverity::ERROR,
1446 message: "undefined variable 'A'".to_string(),
1447 group_id: 0,
1448 is_primary: true,
1449 ..Default::default()
1450 }
1451 }]
1452 )
1453 });
1454
1455 // Ensure publishing empty diagnostics twice only results in one update event.
1456 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1457 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1458 version: None,
1459 diagnostics: Default::default(),
1460 });
1461 assert_eq!(
1462 events.next().await.unwrap(),
1463 Event::DiagnosticsUpdated {
1464 language_server_id: LanguageServerId(0),
1465 path: (worktree_id, Path::new("a.rs")).into()
1466 }
1467 );
1468
1469 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1470 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1471 version: None,
1472 diagnostics: Default::default(),
1473 });
1474 cx.executor().run_until_parked();
1475 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1476}
1477
1478#[gpui::test]
1479async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1480 init_test(cx);
1481
1482 let progress_token = "the-progress-token";
1483
1484 let fs = FakeFs::new(cx.executor());
1485 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1486
1487 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1488
1489 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1490 language_registry.add(rust_lang());
1491 let mut fake_servers = language_registry.register_fake_lsp(
1492 "Rust",
1493 FakeLspAdapter {
1494 name: "the-language-server",
1495 disk_based_diagnostics_sources: vec!["disk".into()],
1496 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1497 ..Default::default()
1498 },
1499 );
1500
1501 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1502
1503 let (buffer, _handle) = project
1504 .update(cx, |project, cx| {
1505 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1506 })
1507 .await
1508 .unwrap();
1509 // Simulate diagnostics starting to update.
1510 let fake_server = fake_servers.next().await.unwrap();
1511 fake_server.start_progress(progress_token).await;
1512
1513 // Restart the server before the diagnostics finish updating.
1514 project.update(cx, |project, cx| {
1515 project.restart_language_servers_for_buffers(vec![buffer], cx);
1516 });
1517 let mut events = cx.events(&project);
1518
1519 // Simulate the newly started server sending more diagnostics.
1520 let fake_server = fake_servers.next().await.unwrap();
1521 assert_eq!(
1522 events.next().await.unwrap(),
1523 Event::LanguageServerAdded(
1524 LanguageServerId(1),
1525 fake_server.server.name(),
1526 Some(worktree_id)
1527 )
1528 );
1529 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1530 fake_server.start_progress(progress_token).await;
1531 assert_eq!(
1532 events.next().await.unwrap(),
1533 Event::DiskBasedDiagnosticsStarted {
1534 language_server_id: LanguageServerId(1)
1535 }
1536 );
1537 project.update(cx, |project, cx| {
1538 assert_eq!(
1539 project
1540 .language_servers_running_disk_based_diagnostics(cx)
1541 .collect::<Vec<_>>(),
1542 [LanguageServerId(1)]
1543 );
1544 });
1545
1546 // All diagnostics are considered done, despite the old server's diagnostic
1547 // task never completing.
1548 fake_server.end_progress(progress_token);
1549 assert_eq!(
1550 events.next().await.unwrap(),
1551 Event::DiskBasedDiagnosticsFinished {
1552 language_server_id: LanguageServerId(1)
1553 }
1554 );
1555 project.update(cx, |project, cx| {
1556 assert_eq!(
1557 project
1558 .language_servers_running_disk_based_diagnostics(cx)
1559 .collect::<Vec<_>>(),
1560 [] as [language::LanguageServerId; 0]
1561 );
1562 });
1563}
1564
1565#[gpui::test]
1566async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1567 init_test(cx);
1568
1569 let fs = FakeFs::new(cx.executor());
1570 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1571
1572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1573
1574 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1575 language_registry.add(rust_lang());
1576 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1577
1578 let (buffer, _) = project
1579 .update(cx, |project, cx| {
1580 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1581 })
1582 .await
1583 .unwrap();
1584
1585 // Publish diagnostics
1586 let fake_server = fake_servers.next().await.unwrap();
1587 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1588 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1589 version: None,
1590 diagnostics: vec![lsp::Diagnostic {
1591 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1592 severity: Some(lsp::DiagnosticSeverity::ERROR),
1593 message: "the message".to_string(),
1594 ..Default::default()
1595 }],
1596 });
1597
1598 cx.executor().run_until_parked();
1599 buffer.update(cx, |buffer, _| {
1600 assert_eq!(
1601 buffer
1602 .snapshot()
1603 .diagnostics_in_range::<_, usize>(0..1, false)
1604 .map(|entry| entry.diagnostic.message.clone())
1605 .collect::<Vec<_>>(),
1606 ["the message".to_string()]
1607 );
1608 });
1609 project.update(cx, |project, cx| {
1610 assert_eq!(
1611 project.diagnostic_summary(false, cx),
1612 DiagnosticSummary {
1613 error_count: 1,
1614 warning_count: 0,
1615 }
1616 );
1617 });
1618
1619 project.update(cx, |project, cx| {
1620 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1621 });
1622
1623 // The diagnostics are cleared.
1624 cx.executor().run_until_parked();
1625 buffer.update(cx, |buffer, _| {
1626 assert_eq!(
1627 buffer
1628 .snapshot()
1629 .diagnostics_in_range::<_, usize>(0..1, false)
1630 .map(|entry| entry.diagnostic.message.clone())
1631 .collect::<Vec<_>>(),
1632 Vec::<String>::new(),
1633 );
1634 });
1635 project.update(cx, |project, cx| {
1636 assert_eq!(
1637 project.diagnostic_summary(false, cx),
1638 DiagnosticSummary {
1639 error_count: 0,
1640 warning_count: 0,
1641 }
1642 );
1643 });
1644}
1645
1646#[gpui::test]
1647async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1648 init_test(cx);
1649
1650 let fs = FakeFs::new(cx.executor());
1651 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1652
1653 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1654 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1655
1656 language_registry.add(rust_lang());
1657 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1658
1659 let (buffer, _handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // Before restarting the server, report diagnostics with an unknown buffer version.
1667 let fake_server = fake_servers.next().await.unwrap();
1668 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1669 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1670 version: Some(10000),
1671 diagnostics: Vec::new(),
1672 });
1673 cx.executor().run_until_parked();
1674 project.update(cx, |project, cx| {
1675 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1676 });
1677
1678 let mut fake_server = fake_servers.next().await.unwrap();
1679 let notification = fake_server
1680 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1681 .await
1682 .text_document;
1683 assert_eq!(notification.version, 0);
1684}
1685
1686#[gpui::test]
1687async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1688 init_test(cx);
1689
1690 let progress_token = "the-progress-token";
1691
1692 let fs = FakeFs::new(cx.executor());
1693 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1694
1695 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1696
1697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1698 language_registry.add(rust_lang());
1699 let mut fake_servers = language_registry.register_fake_lsp(
1700 "Rust",
1701 FakeLspAdapter {
1702 name: "the-language-server",
1703 disk_based_diagnostics_sources: vec!["disk".into()],
1704 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1705 ..Default::default()
1706 },
1707 );
1708
1709 let (buffer, _handle) = project
1710 .update(cx, |project, cx| {
1711 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1712 })
1713 .await
1714 .unwrap();
1715
1716 // Simulate diagnostics starting to update.
1717 let mut fake_server = fake_servers.next().await.unwrap();
1718 fake_server
1719 .start_progress_with(
1720 "another-token",
1721 lsp::WorkDoneProgressBegin {
1722 cancellable: Some(false),
1723 ..Default::default()
1724 },
1725 )
1726 .await;
1727 fake_server
1728 .start_progress_with(
1729 progress_token,
1730 lsp::WorkDoneProgressBegin {
1731 cancellable: Some(true),
1732 ..Default::default()
1733 },
1734 )
1735 .await;
1736 cx.executor().run_until_parked();
1737
1738 project.update(cx, |project, cx| {
1739 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1740 });
1741
1742 let cancel_notification = fake_server
1743 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1744 .await;
1745 assert_eq!(
1746 cancel_notification.token,
1747 NumberOrString::String(progress_token.into())
1748 );
1749}
1750
1751#[gpui::test]
1752async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1753 init_test(cx);
1754
1755 let fs = FakeFs::new(cx.executor());
1756 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1757 .await;
1758
1759 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1760 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1761
1762 let mut fake_rust_servers = language_registry.register_fake_lsp(
1763 "Rust",
1764 FakeLspAdapter {
1765 name: "rust-lsp",
1766 ..Default::default()
1767 },
1768 );
1769 let mut fake_js_servers = language_registry.register_fake_lsp(
1770 "JavaScript",
1771 FakeLspAdapter {
1772 name: "js-lsp",
1773 ..Default::default()
1774 },
1775 );
1776 language_registry.add(rust_lang());
1777 language_registry.add(js_lang());
1778
1779 let _rs_buffer = project
1780 .update(cx, |project, cx| {
1781 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1782 })
1783 .await
1784 .unwrap();
1785 let _js_buffer = project
1786 .update(cx, |project, cx| {
1787 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1788 })
1789 .await
1790 .unwrap();
1791
1792 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1793 assert_eq!(
1794 fake_rust_server_1
1795 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1796 .await
1797 .text_document
1798 .uri
1799 .as_str(),
1800 uri!("file:///dir/a.rs")
1801 );
1802
1803 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1804 assert_eq!(
1805 fake_js_server
1806 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1807 .await
1808 .text_document
1809 .uri
1810 .as_str(),
1811 uri!("file:///dir/b.js")
1812 );
1813
1814 // Disable Rust language server, ensuring only that server gets stopped.
1815 cx.update(|cx| {
1816 SettingsStore::update_global(cx, |settings, cx| {
1817 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1818 settings.languages.insert(
1819 "Rust".into(),
1820 LanguageSettingsContent {
1821 enable_language_server: Some(false),
1822 ..Default::default()
1823 },
1824 );
1825 });
1826 })
1827 });
1828 fake_rust_server_1
1829 .receive_notification::<lsp::notification::Exit>()
1830 .await;
1831
1832 // Enable Rust and disable JavaScript language servers, ensuring that the
1833 // former gets started again and that the latter stops.
1834 cx.update(|cx| {
1835 SettingsStore::update_global(cx, |settings, cx| {
1836 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1837 settings.languages.insert(
1838 LanguageName::new("Rust"),
1839 LanguageSettingsContent {
1840 enable_language_server: Some(true),
1841 ..Default::default()
1842 },
1843 );
1844 settings.languages.insert(
1845 LanguageName::new("JavaScript"),
1846 LanguageSettingsContent {
1847 enable_language_server: Some(false),
1848 ..Default::default()
1849 },
1850 );
1851 });
1852 })
1853 });
1854 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1855 assert_eq!(
1856 fake_rust_server_2
1857 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1858 .await
1859 .text_document
1860 .uri
1861 .as_str(),
1862 uri!("file:///dir/a.rs")
1863 );
1864 fake_js_server
1865 .receive_notification::<lsp::notification::Exit>()
1866 .await;
1867}
1868
1869#[gpui::test(iterations = 3)]
1870async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1871 init_test(cx);
1872
1873 let text = "
1874 fn a() { A }
1875 fn b() { BB }
1876 fn c() { CCC }
1877 "
1878 .unindent();
1879
1880 let fs = FakeFs::new(cx.executor());
1881 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1882
1883 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1884 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1885
1886 language_registry.add(rust_lang());
1887 let mut fake_servers = language_registry.register_fake_lsp(
1888 "Rust",
1889 FakeLspAdapter {
1890 disk_based_diagnostics_sources: vec!["disk".into()],
1891 ..Default::default()
1892 },
1893 );
1894
1895 let buffer = project
1896 .update(cx, |project, cx| {
1897 project.open_local_buffer(path!("/dir/a.rs"), cx)
1898 })
1899 .await
1900 .unwrap();
1901
1902 let _handle = project.update(cx, |project, cx| {
1903 project.register_buffer_with_language_servers(&buffer, cx)
1904 });
1905
1906 let mut fake_server = fake_servers.next().await.unwrap();
1907 let open_notification = fake_server
1908 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1909 .await;
1910
1911 // Edit the buffer, moving the content down
1912 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1913 let change_notification_1 = fake_server
1914 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1915 .await;
1916 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1917
1918 // Report some diagnostics for the initial version of the buffer
1919 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1920 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1921 version: Some(open_notification.text_document.version),
1922 diagnostics: vec![
1923 lsp::Diagnostic {
1924 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1925 severity: Some(DiagnosticSeverity::ERROR),
1926 message: "undefined variable 'A'".to_string(),
1927 source: Some("disk".to_string()),
1928 ..Default::default()
1929 },
1930 lsp::Diagnostic {
1931 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1932 severity: Some(DiagnosticSeverity::ERROR),
1933 message: "undefined variable 'BB'".to_string(),
1934 source: Some("disk".to_string()),
1935 ..Default::default()
1936 },
1937 lsp::Diagnostic {
1938 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1939 severity: Some(DiagnosticSeverity::ERROR),
1940 source: Some("disk".to_string()),
1941 message: "undefined variable 'CCC'".to_string(),
1942 ..Default::default()
1943 },
1944 ],
1945 });
1946
1947 // The diagnostics have moved down since they were created.
1948 cx.executor().run_until_parked();
1949 buffer.update(cx, |buffer, _| {
1950 assert_eq!(
1951 buffer
1952 .snapshot()
1953 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1954 .collect::<Vec<_>>(),
1955 &[
1956 DiagnosticEntry {
1957 range: Point::new(3, 9)..Point::new(3, 11),
1958 diagnostic: Diagnostic {
1959 source: Some("disk".into()),
1960 severity: DiagnosticSeverity::ERROR,
1961 message: "undefined variable 'BB'".to_string(),
1962 is_disk_based: true,
1963 group_id: 1,
1964 is_primary: true,
1965 ..Default::default()
1966 },
1967 },
1968 DiagnosticEntry {
1969 range: Point::new(4, 9)..Point::new(4, 12),
1970 diagnostic: Diagnostic {
1971 source: Some("disk".into()),
1972 severity: DiagnosticSeverity::ERROR,
1973 message: "undefined variable 'CCC'".to_string(),
1974 is_disk_based: true,
1975 group_id: 2,
1976 is_primary: true,
1977 ..Default::default()
1978 }
1979 }
1980 ]
1981 );
1982 assert_eq!(
1983 chunks_with_diagnostics(buffer, 0..buffer.len()),
1984 [
1985 ("\n\nfn a() { ".to_string(), None),
1986 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1987 (" }\nfn b() { ".to_string(), None),
1988 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1989 (" }\nfn c() { ".to_string(), None),
1990 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1991 (" }\n".to_string(), None),
1992 ]
1993 );
1994 assert_eq!(
1995 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1996 [
1997 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1998 (" }\nfn c() { ".to_string(), None),
1999 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2000 ]
2001 );
2002 });
2003
2004 // Ensure overlapping diagnostics are highlighted correctly.
2005 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2006 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2007 version: Some(open_notification.text_document.version),
2008 diagnostics: vec![
2009 lsp::Diagnostic {
2010 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2011 severity: Some(DiagnosticSeverity::ERROR),
2012 message: "undefined variable 'A'".to_string(),
2013 source: Some("disk".to_string()),
2014 ..Default::default()
2015 },
2016 lsp::Diagnostic {
2017 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2018 severity: Some(DiagnosticSeverity::WARNING),
2019 message: "unreachable statement".to_string(),
2020 source: Some("disk".to_string()),
2021 ..Default::default()
2022 },
2023 ],
2024 });
2025
2026 cx.executor().run_until_parked();
2027 buffer.update(cx, |buffer, _| {
2028 assert_eq!(
2029 buffer
2030 .snapshot()
2031 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2032 .collect::<Vec<_>>(),
2033 &[
2034 DiagnosticEntry {
2035 range: Point::new(2, 9)..Point::new(2, 12),
2036 diagnostic: Diagnostic {
2037 source: Some("disk".into()),
2038 severity: DiagnosticSeverity::WARNING,
2039 message: "unreachable statement".to_string(),
2040 is_disk_based: true,
2041 group_id: 4,
2042 is_primary: true,
2043 ..Default::default()
2044 }
2045 },
2046 DiagnosticEntry {
2047 range: Point::new(2, 9)..Point::new(2, 10),
2048 diagnostic: Diagnostic {
2049 source: Some("disk".into()),
2050 severity: DiagnosticSeverity::ERROR,
2051 message: "undefined variable 'A'".to_string(),
2052 is_disk_based: true,
2053 group_id: 3,
2054 is_primary: true,
2055 ..Default::default()
2056 },
2057 }
2058 ]
2059 );
2060 assert_eq!(
2061 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2062 [
2063 ("fn a() { ".to_string(), None),
2064 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2065 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2066 ("\n".to_string(), None),
2067 ]
2068 );
2069 assert_eq!(
2070 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2071 [
2072 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2073 ("\n".to_string(), None),
2074 ]
2075 );
2076 });
2077
2078 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2079 // changes since the last save.
2080 buffer.update(cx, |buffer, cx| {
2081 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2082 buffer.edit(
2083 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2084 None,
2085 cx,
2086 );
2087 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2088 });
2089 let change_notification_2 = fake_server
2090 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2091 .await;
2092 assert!(
2093 change_notification_2.text_document.version > change_notification_1.text_document.version
2094 );
2095
2096 // Handle out-of-order diagnostics
2097 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2098 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2099 version: Some(change_notification_2.text_document.version),
2100 diagnostics: vec![
2101 lsp::Diagnostic {
2102 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2103 severity: Some(DiagnosticSeverity::ERROR),
2104 message: "undefined variable 'BB'".to_string(),
2105 source: Some("disk".to_string()),
2106 ..Default::default()
2107 },
2108 lsp::Diagnostic {
2109 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2110 severity: Some(DiagnosticSeverity::WARNING),
2111 message: "undefined variable 'A'".to_string(),
2112 source: Some("disk".to_string()),
2113 ..Default::default()
2114 },
2115 ],
2116 });
2117
2118 cx.executor().run_until_parked();
2119 buffer.update(cx, |buffer, _| {
2120 assert_eq!(
2121 buffer
2122 .snapshot()
2123 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2124 .collect::<Vec<_>>(),
2125 &[
2126 DiagnosticEntry {
2127 range: Point::new(2, 21)..Point::new(2, 22),
2128 diagnostic: Diagnostic {
2129 source: Some("disk".into()),
2130 severity: DiagnosticSeverity::WARNING,
2131 message: "undefined variable 'A'".to_string(),
2132 is_disk_based: true,
2133 group_id: 6,
2134 is_primary: true,
2135 ..Default::default()
2136 }
2137 },
2138 DiagnosticEntry {
2139 range: Point::new(3, 9)..Point::new(3, 14),
2140 diagnostic: Diagnostic {
2141 source: Some("disk".into()),
2142 severity: DiagnosticSeverity::ERROR,
2143 message: "undefined variable 'BB'".to_string(),
2144 is_disk_based: true,
2145 group_id: 5,
2146 is_primary: true,
2147 ..Default::default()
2148 },
2149 }
2150 ]
2151 );
2152 });
2153}
2154
2155#[gpui::test]
2156async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2157 init_test(cx);
2158
2159 let text = concat!(
2160 "let one = ;\n", //
2161 "let two = \n",
2162 "let three = 3;\n",
2163 );
2164
2165 let fs = FakeFs::new(cx.executor());
2166 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2167
2168 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2169 let buffer = project
2170 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2171 .await
2172 .unwrap();
2173
2174 project.update(cx, |project, cx| {
2175 project.lsp_store.update(cx, |lsp_store, cx| {
2176 lsp_store
2177 .update_diagnostic_entries(
2178 LanguageServerId(0),
2179 PathBuf::from("/dir/a.rs"),
2180 None,
2181 vec![
2182 DiagnosticEntry {
2183 range: Unclipped(PointUtf16::new(0, 10))
2184 ..Unclipped(PointUtf16::new(0, 10)),
2185 diagnostic: Diagnostic {
2186 severity: DiagnosticSeverity::ERROR,
2187 message: "syntax error 1".to_string(),
2188 ..Default::default()
2189 },
2190 },
2191 DiagnosticEntry {
2192 range: Unclipped(PointUtf16::new(1, 10))
2193 ..Unclipped(PointUtf16::new(1, 10)),
2194 diagnostic: Diagnostic {
2195 severity: DiagnosticSeverity::ERROR,
2196 message: "syntax error 2".to_string(),
2197 ..Default::default()
2198 },
2199 },
2200 ],
2201 cx,
2202 )
2203 .unwrap();
2204 })
2205 });
2206
2207 // An empty range is extended forward to include the following character.
2208 // At the end of a line, an empty range is extended backward to include
2209 // the preceding character.
2210 buffer.update(cx, |buffer, _| {
2211 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2212 assert_eq!(
2213 chunks
2214 .iter()
2215 .map(|(s, d)| (s.as_str(), *d))
2216 .collect::<Vec<_>>(),
2217 &[
2218 ("let one = ", None),
2219 (";", Some(DiagnosticSeverity::ERROR)),
2220 ("\nlet two =", None),
2221 (" ", Some(DiagnosticSeverity::ERROR)),
2222 ("\nlet three = 3;\n", None)
2223 ]
2224 );
2225 });
2226}
2227
2228#[gpui::test]
2229async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2230 init_test(cx);
2231
2232 let fs = FakeFs::new(cx.executor());
2233 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2234 .await;
2235
2236 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2237 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2238
2239 lsp_store.update(cx, |lsp_store, cx| {
2240 lsp_store
2241 .update_diagnostic_entries(
2242 LanguageServerId(0),
2243 Path::new("/dir/a.rs").to_owned(),
2244 None,
2245 vec![DiagnosticEntry {
2246 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2247 diagnostic: Diagnostic {
2248 severity: DiagnosticSeverity::ERROR,
2249 is_primary: true,
2250 message: "syntax error a1".to_string(),
2251 ..Default::default()
2252 },
2253 }],
2254 cx,
2255 )
2256 .unwrap();
2257 lsp_store
2258 .update_diagnostic_entries(
2259 LanguageServerId(1),
2260 Path::new("/dir/a.rs").to_owned(),
2261 None,
2262 vec![DiagnosticEntry {
2263 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2264 diagnostic: Diagnostic {
2265 severity: DiagnosticSeverity::ERROR,
2266 is_primary: true,
2267 message: "syntax error b1".to_string(),
2268 ..Default::default()
2269 },
2270 }],
2271 cx,
2272 )
2273 .unwrap();
2274
2275 assert_eq!(
2276 lsp_store.diagnostic_summary(false, cx),
2277 DiagnosticSummary {
2278 error_count: 2,
2279 warning_count: 0,
2280 }
2281 );
2282 });
2283}
2284
2285#[gpui::test]
2286async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2287 init_test(cx);
2288
2289 let text = "
2290 fn a() {
2291 f1();
2292 }
2293 fn b() {
2294 f2();
2295 }
2296 fn c() {
2297 f3();
2298 }
2299 "
2300 .unindent();
2301
2302 let fs = FakeFs::new(cx.executor());
2303 fs.insert_tree(
2304 path!("/dir"),
2305 json!({
2306 "a.rs": text.clone(),
2307 }),
2308 )
2309 .await;
2310
2311 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2312 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2313
2314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2315 language_registry.add(rust_lang());
2316 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2317
2318 let (buffer, _handle) = project
2319 .update(cx, |project, cx| {
2320 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2321 })
2322 .await
2323 .unwrap();
2324
2325 let mut fake_server = fake_servers.next().await.unwrap();
2326 let lsp_document_version = fake_server
2327 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2328 .await
2329 .text_document
2330 .version;
2331
2332 // Simulate editing the buffer after the language server computes some edits.
2333 buffer.update(cx, |buffer, cx| {
2334 buffer.edit(
2335 [(
2336 Point::new(0, 0)..Point::new(0, 0),
2337 "// above first function\n",
2338 )],
2339 None,
2340 cx,
2341 );
2342 buffer.edit(
2343 [(
2344 Point::new(2, 0)..Point::new(2, 0),
2345 " // inside first function\n",
2346 )],
2347 None,
2348 cx,
2349 );
2350 buffer.edit(
2351 [(
2352 Point::new(6, 4)..Point::new(6, 4),
2353 "// inside second function ",
2354 )],
2355 None,
2356 cx,
2357 );
2358
2359 assert_eq!(
2360 buffer.text(),
2361 "
2362 // above first function
2363 fn a() {
2364 // inside first function
2365 f1();
2366 }
2367 fn b() {
2368 // inside second function f2();
2369 }
2370 fn c() {
2371 f3();
2372 }
2373 "
2374 .unindent()
2375 );
2376 });
2377
2378 let edits = lsp_store
2379 .update(cx, |lsp_store, cx| {
2380 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2381 &buffer,
2382 vec![
2383 // replace body of first function
2384 lsp::TextEdit {
2385 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2386 new_text: "
2387 fn a() {
2388 f10();
2389 }
2390 "
2391 .unindent(),
2392 },
2393 // edit inside second function
2394 lsp::TextEdit {
2395 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2396 new_text: "00".into(),
2397 },
2398 // edit inside third function via two distinct edits
2399 lsp::TextEdit {
2400 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2401 new_text: "4000".into(),
2402 },
2403 lsp::TextEdit {
2404 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2405 new_text: "".into(),
2406 },
2407 ],
2408 LanguageServerId(0),
2409 Some(lsp_document_version),
2410 cx,
2411 )
2412 })
2413 .await
2414 .unwrap();
2415
2416 buffer.update(cx, |buffer, cx| {
2417 for (range, new_text) in edits {
2418 buffer.edit([(range, new_text)], None, cx);
2419 }
2420 assert_eq!(
2421 buffer.text(),
2422 "
2423 // above first function
2424 fn a() {
2425 // inside first function
2426 f10();
2427 }
2428 fn b() {
2429 // inside second function f200();
2430 }
2431 fn c() {
2432 f4000();
2433 }
2434 "
2435 .unindent()
2436 );
2437 });
2438}
2439
2440#[gpui::test]
2441async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2442 init_test(cx);
2443
2444 let text = "
2445 use a::b;
2446 use a::c;
2447
2448 fn f() {
2449 b();
2450 c();
2451 }
2452 "
2453 .unindent();
2454
2455 let fs = FakeFs::new(cx.executor());
2456 fs.insert_tree(
2457 path!("/dir"),
2458 json!({
2459 "a.rs": text.clone(),
2460 }),
2461 )
2462 .await;
2463
2464 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2465 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2466 let buffer = project
2467 .update(cx, |project, cx| {
2468 project.open_local_buffer(path!("/dir/a.rs"), cx)
2469 })
2470 .await
2471 .unwrap();
2472
2473 // Simulate the language server sending us a small edit in the form of a very large diff.
2474 // Rust-analyzer does this when performing a merge-imports code action.
2475 let edits = lsp_store
2476 .update(cx, |lsp_store, cx| {
2477 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2478 &buffer,
2479 [
2480 // Replace the first use statement without editing the semicolon.
2481 lsp::TextEdit {
2482 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2483 new_text: "a::{b, c}".into(),
2484 },
2485 // Reinsert the remainder of the file between the semicolon and the final
2486 // newline of the file.
2487 lsp::TextEdit {
2488 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2489 new_text: "\n\n".into(),
2490 },
2491 lsp::TextEdit {
2492 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2493 new_text: "
2494 fn f() {
2495 b();
2496 c();
2497 }"
2498 .unindent(),
2499 },
2500 // Delete everything after the first newline of the file.
2501 lsp::TextEdit {
2502 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2503 new_text: "".into(),
2504 },
2505 ],
2506 LanguageServerId(0),
2507 None,
2508 cx,
2509 )
2510 })
2511 .await
2512 .unwrap();
2513
2514 buffer.update(cx, |buffer, cx| {
2515 let edits = edits
2516 .into_iter()
2517 .map(|(range, text)| {
2518 (
2519 range.start.to_point(buffer)..range.end.to_point(buffer),
2520 text,
2521 )
2522 })
2523 .collect::<Vec<_>>();
2524
2525 assert_eq!(
2526 edits,
2527 [
2528 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2529 (Point::new(1, 0)..Point::new(2, 0), "".into())
2530 ]
2531 );
2532
2533 for (range, new_text) in edits {
2534 buffer.edit([(range, new_text)], None, cx);
2535 }
2536 assert_eq!(
2537 buffer.text(),
2538 "
2539 use a::{b, c};
2540
2541 fn f() {
2542 b();
2543 c();
2544 }
2545 "
2546 .unindent()
2547 );
2548 });
2549}
2550
2551#[gpui::test]
2552async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2553 init_test(cx);
2554
2555 let text = "
2556 use a::b;
2557 use a::c;
2558
2559 fn f() {
2560 b();
2561 c();
2562 }
2563 "
2564 .unindent();
2565
2566 let fs = FakeFs::new(cx.executor());
2567 fs.insert_tree(
2568 path!("/dir"),
2569 json!({
2570 "a.rs": text.clone(),
2571 }),
2572 )
2573 .await;
2574
2575 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2576 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2577 let buffer = project
2578 .update(cx, |project, cx| {
2579 project.open_local_buffer(path!("/dir/a.rs"), cx)
2580 })
2581 .await
2582 .unwrap();
2583
2584 // Simulate the language server sending us edits in a non-ordered fashion,
2585 // with ranges sometimes being inverted or pointing to invalid locations.
2586 let edits = lsp_store
2587 .update(cx, |lsp_store, cx| {
2588 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2589 &buffer,
2590 [
2591 lsp::TextEdit {
2592 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2593 new_text: "\n\n".into(),
2594 },
2595 lsp::TextEdit {
2596 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2597 new_text: "a::{b, c}".into(),
2598 },
2599 lsp::TextEdit {
2600 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2601 new_text: "".into(),
2602 },
2603 lsp::TextEdit {
2604 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2605 new_text: "
2606 fn f() {
2607 b();
2608 c();
2609 }"
2610 .unindent(),
2611 },
2612 ],
2613 LanguageServerId(0),
2614 None,
2615 cx,
2616 )
2617 })
2618 .await
2619 .unwrap();
2620
2621 buffer.update(cx, |buffer, cx| {
2622 let edits = edits
2623 .into_iter()
2624 .map(|(range, text)| {
2625 (
2626 range.start.to_point(buffer)..range.end.to_point(buffer),
2627 text,
2628 )
2629 })
2630 .collect::<Vec<_>>();
2631
2632 assert_eq!(
2633 edits,
2634 [
2635 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2636 (Point::new(1, 0)..Point::new(2, 0), "".into())
2637 ]
2638 );
2639
2640 for (range, new_text) in edits {
2641 buffer.edit([(range, new_text)], None, cx);
2642 }
2643 assert_eq!(
2644 buffer.text(),
2645 "
2646 use a::{b, c};
2647
2648 fn f() {
2649 b();
2650 c();
2651 }
2652 "
2653 .unindent()
2654 );
2655 });
2656}
2657
2658fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2659 buffer: &Buffer,
2660 range: Range<T>,
2661) -> Vec<(String, Option<DiagnosticSeverity>)> {
2662 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2663 for chunk in buffer.snapshot().chunks(range, true) {
2664 if chunks.last().map_or(false, |prev_chunk| {
2665 prev_chunk.1 == chunk.diagnostic_severity
2666 }) {
2667 chunks.last_mut().unwrap().0.push_str(chunk.text);
2668 } else {
2669 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2670 }
2671 }
2672 chunks
2673}
2674
2675#[gpui::test(iterations = 10)]
2676async fn test_definition(cx: &mut gpui::TestAppContext) {
2677 init_test(cx);
2678
2679 let fs = FakeFs::new(cx.executor());
2680 fs.insert_tree(
2681 path!("/dir"),
2682 json!({
2683 "a.rs": "const fn a() { A }",
2684 "b.rs": "const y: i32 = crate::a()",
2685 }),
2686 )
2687 .await;
2688
2689 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2690
2691 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2692 language_registry.add(rust_lang());
2693 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2694
2695 let (buffer, _handle) = project
2696 .update(cx, |project, cx| {
2697 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2698 })
2699 .await
2700 .unwrap();
2701
2702 let fake_server = fake_servers.next().await.unwrap();
2703 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2704 let params = params.text_document_position_params;
2705 assert_eq!(
2706 params.text_document.uri.to_file_path().unwrap(),
2707 Path::new(path!("/dir/b.rs")),
2708 );
2709 assert_eq!(params.position, lsp::Position::new(0, 22));
2710
2711 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2712 lsp::Location::new(
2713 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2714 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2715 ),
2716 )))
2717 });
2718 let mut definitions = project
2719 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2720 .await
2721 .unwrap();
2722
2723 // Assert no new language server started
2724 cx.executor().run_until_parked();
2725 assert!(fake_servers.try_next().is_err());
2726
2727 assert_eq!(definitions.len(), 1);
2728 let definition = definitions.pop().unwrap();
2729 cx.update(|cx| {
2730 let target_buffer = definition.target.buffer.read(cx);
2731 assert_eq!(
2732 target_buffer
2733 .file()
2734 .unwrap()
2735 .as_local()
2736 .unwrap()
2737 .abs_path(cx),
2738 Path::new(path!("/dir/a.rs")),
2739 );
2740 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2741 assert_eq!(
2742 list_worktrees(&project, cx),
2743 [
2744 (path!("/dir/a.rs").as_ref(), false),
2745 (path!("/dir/b.rs").as_ref(), true)
2746 ],
2747 );
2748
2749 drop(definition);
2750 });
2751 cx.update(|cx| {
2752 assert_eq!(
2753 list_worktrees(&project, cx),
2754 [(path!("/dir/b.rs").as_ref(), true)]
2755 );
2756 });
2757
2758 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2759 project
2760 .read(cx)
2761 .worktrees(cx)
2762 .map(|worktree| {
2763 let worktree = worktree.read(cx);
2764 (
2765 worktree.as_local().unwrap().abs_path().as_ref(),
2766 worktree.is_visible(),
2767 )
2768 })
2769 .collect::<Vec<_>>()
2770 }
2771}
2772
2773#[gpui::test]
2774async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2775 init_test(cx);
2776
2777 let fs = FakeFs::new(cx.executor());
2778 fs.insert_tree(
2779 path!("/dir"),
2780 json!({
2781 "a.ts": "",
2782 }),
2783 )
2784 .await;
2785
2786 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2787
2788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2789 language_registry.add(typescript_lang());
2790 let mut fake_language_servers = language_registry.register_fake_lsp(
2791 "TypeScript",
2792 FakeLspAdapter {
2793 capabilities: lsp::ServerCapabilities {
2794 completion_provider: Some(lsp::CompletionOptions {
2795 trigger_characters: Some(vec![":".to_string()]),
2796 ..Default::default()
2797 }),
2798 ..Default::default()
2799 },
2800 ..Default::default()
2801 },
2802 );
2803
2804 let (buffer, _handle) = project
2805 .update(cx, |p, cx| {
2806 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2807 })
2808 .await
2809 .unwrap();
2810
2811 let fake_server = fake_language_servers.next().await.unwrap();
2812
2813 let text = "let a = b.fqn";
2814 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2815 let completions = project.update(cx, |project, cx| {
2816 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2817 });
2818
2819 fake_server
2820 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2821 Ok(Some(lsp::CompletionResponse::Array(vec![
2822 lsp::CompletionItem {
2823 label: "fullyQualifiedName?".into(),
2824 insert_text: Some("fullyQualifiedName".into()),
2825 ..Default::default()
2826 },
2827 ])))
2828 })
2829 .next()
2830 .await;
2831 let completions = completions.await.unwrap().unwrap();
2832 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2833 assert_eq!(completions.len(), 1);
2834 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2835 assert_eq!(
2836 completions[0].old_range.to_offset(&snapshot),
2837 text.len() - 3..text.len()
2838 );
2839
2840 let text = "let a = \"atoms/cmp\"";
2841 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2842 let completions = project.update(cx, |project, cx| {
2843 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2844 });
2845
2846 fake_server
2847 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2848 Ok(Some(lsp::CompletionResponse::Array(vec![
2849 lsp::CompletionItem {
2850 label: "component".into(),
2851 ..Default::default()
2852 },
2853 ])))
2854 })
2855 .next()
2856 .await;
2857 let completions = completions.await.unwrap().unwrap();
2858 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2859 assert_eq!(completions.len(), 1);
2860 assert_eq!(completions[0].new_text, "component");
2861 assert_eq!(
2862 completions[0].old_range.to_offset(&snapshot),
2863 text.len() - 4..text.len() - 1
2864 );
2865}
2866
2867#[gpui::test]
2868async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2869 init_test(cx);
2870
2871 let fs = FakeFs::new(cx.executor());
2872 fs.insert_tree(
2873 path!("/dir"),
2874 json!({
2875 "a.ts": "",
2876 }),
2877 )
2878 .await;
2879
2880 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2881
2882 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2883 language_registry.add(typescript_lang());
2884 let mut fake_language_servers = language_registry.register_fake_lsp(
2885 "TypeScript",
2886 FakeLspAdapter {
2887 capabilities: lsp::ServerCapabilities {
2888 completion_provider: Some(lsp::CompletionOptions {
2889 trigger_characters: Some(vec![":".to_string()]),
2890 ..Default::default()
2891 }),
2892 ..Default::default()
2893 },
2894 ..Default::default()
2895 },
2896 );
2897
2898 let (buffer, _handle) = project
2899 .update(cx, |p, cx| {
2900 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2901 })
2902 .await
2903 .unwrap();
2904
2905 let fake_server = fake_language_servers.next().await.unwrap();
2906
2907 let text = "let a = b.fqn";
2908 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2909 let completions = project.update(cx, |project, cx| {
2910 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2911 });
2912
2913 fake_server
2914 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2915 Ok(Some(lsp::CompletionResponse::Array(vec![
2916 lsp::CompletionItem {
2917 label: "fullyQualifiedName?".into(),
2918 insert_text: Some("fully\rQualified\r\nName".into()),
2919 ..Default::default()
2920 },
2921 ])))
2922 })
2923 .next()
2924 .await;
2925 let completions = completions.await.unwrap().unwrap();
2926 assert_eq!(completions.len(), 1);
2927 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2928}
2929
2930#[gpui::test(iterations = 10)]
2931async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2932 init_test(cx);
2933
2934 let fs = FakeFs::new(cx.executor());
2935 fs.insert_tree(
2936 path!("/dir"),
2937 json!({
2938 "a.ts": "a",
2939 }),
2940 )
2941 .await;
2942
2943 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2944
2945 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2946 language_registry.add(typescript_lang());
2947 let mut fake_language_servers = language_registry.register_fake_lsp(
2948 "TypeScript",
2949 FakeLspAdapter {
2950 capabilities: lsp::ServerCapabilities {
2951 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2952 lsp::CodeActionOptions {
2953 resolve_provider: Some(true),
2954 ..lsp::CodeActionOptions::default()
2955 },
2956 )),
2957 execute_command_provider: Some(lsp::ExecuteCommandOptions {
2958 commands: vec!["_the/command".to_string()],
2959 ..lsp::ExecuteCommandOptions::default()
2960 }),
2961 ..lsp::ServerCapabilities::default()
2962 },
2963 ..FakeLspAdapter::default()
2964 },
2965 );
2966
2967 let (buffer, _handle) = project
2968 .update(cx, |p, cx| {
2969 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2970 })
2971 .await
2972 .unwrap();
2973
2974 let fake_server = fake_language_servers.next().await.unwrap();
2975
2976 // Language server returns code actions that contain commands, and not edits.
2977 let actions = project.update(cx, |project, cx| {
2978 project.code_actions(&buffer, 0..0, None, cx)
2979 });
2980 fake_server
2981 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2982 Ok(Some(vec![
2983 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2984 title: "The code action".into(),
2985 data: Some(serde_json::json!({
2986 "command": "_the/command",
2987 })),
2988 ..lsp::CodeAction::default()
2989 }),
2990 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2991 title: "two".into(),
2992 ..lsp::CodeAction::default()
2993 }),
2994 ]))
2995 })
2996 .next()
2997 .await;
2998
2999 let action = actions.await.unwrap()[0].clone();
3000 let apply = project.update(cx, |project, cx| {
3001 project.apply_code_action(buffer.clone(), action, true, cx)
3002 });
3003
3004 // Resolving the code action does not populate its edits. In absence of
3005 // edits, we must execute the given command.
3006 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
3007 |mut action, _| async move {
3008 if action.data.is_some() {
3009 action.command = Some(lsp::Command {
3010 title: "The command".into(),
3011 command: "_the/command".into(),
3012 arguments: Some(vec![json!("the-argument")]),
3013 });
3014 }
3015 Ok(action)
3016 },
3017 );
3018
3019 // While executing the command, the language server sends the editor
3020 // a `workspaceEdit` request.
3021 fake_server
3022 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3023 let fake = fake_server.clone();
3024 move |params, _| {
3025 assert_eq!(params.command, "_the/command");
3026 let fake = fake.clone();
3027 async move {
3028 fake.server
3029 .request::<lsp::request::ApplyWorkspaceEdit>(
3030 lsp::ApplyWorkspaceEditParams {
3031 label: None,
3032 edit: lsp::WorkspaceEdit {
3033 changes: Some(
3034 [(
3035 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3036 vec![lsp::TextEdit {
3037 range: lsp::Range::new(
3038 lsp::Position::new(0, 0),
3039 lsp::Position::new(0, 0),
3040 ),
3041 new_text: "X".into(),
3042 }],
3043 )]
3044 .into_iter()
3045 .collect(),
3046 ),
3047 ..Default::default()
3048 },
3049 },
3050 )
3051 .await
3052 .unwrap();
3053 Ok(Some(json!(null)))
3054 }
3055 }
3056 })
3057 .next()
3058 .await;
3059
3060 // Applying the code action returns a project transaction containing the edits
3061 // sent by the language server in its `workspaceEdit` request.
3062 let transaction = apply.await.unwrap();
3063 assert!(transaction.0.contains_key(&buffer));
3064 buffer.update(cx, |buffer, cx| {
3065 assert_eq!(buffer.text(), "Xa");
3066 buffer.undo(cx);
3067 assert_eq!(buffer.text(), "a");
3068 });
3069}
3070
3071#[gpui::test(iterations = 10)]
3072async fn test_save_file(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let fs = FakeFs::new(cx.executor());
3076 fs.insert_tree(
3077 path!("/dir"),
3078 json!({
3079 "file1": "the old contents",
3080 }),
3081 )
3082 .await;
3083
3084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3085 let buffer = project
3086 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3087 .await
3088 .unwrap();
3089 buffer.update(cx, |buffer, cx| {
3090 assert_eq!(buffer.text(), "the old contents");
3091 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3092 });
3093
3094 project
3095 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3096 .await
3097 .unwrap();
3098
3099 let new_text = fs
3100 .load(Path::new(path!("/dir/file1")))
3101 .await
3102 .unwrap()
3103 .replace("\r\n", "\n");
3104 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3105}
3106
3107#[gpui::test(iterations = 30)]
3108async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3109 init_test(cx);
3110
3111 let fs = FakeFs::new(cx.executor().clone());
3112 fs.insert_tree(
3113 path!("/dir"),
3114 json!({
3115 "file1": "the original contents",
3116 }),
3117 )
3118 .await;
3119
3120 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3121 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3122 let buffer = project
3123 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3124 .await
3125 .unwrap();
3126
3127 // Simulate buffer diffs being slow, so that they don't complete before
3128 // the next file change occurs.
3129 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3130
3131 // Change the buffer's file on disk, and then wait for the file change
3132 // to be detected by the worktree, so that the buffer starts reloading.
3133 fs.save(
3134 path!("/dir/file1").as_ref(),
3135 &"the first contents".into(),
3136 Default::default(),
3137 )
3138 .await
3139 .unwrap();
3140 worktree.next_event(cx).await;
3141
3142 // Change the buffer's file again. Depending on the random seed, the
3143 // previous file change may still be in progress.
3144 fs.save(
3145 path!("/dir/file1").as_ref(),
3146 &"the second contents".into(),
3147 Default::default(),
3148 )
3149 .await
3150 .unwrap();
3151 worktree.next_event(cx).await;
3152
3153 cx.executor().run_until_parked();
3154 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3155 buffer.read_with(cx, |buffer, _| {
3156 assert_eq!(buffer.text(), on_disk_text);
3157 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3158 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3159 });
3160}
3161
3162#[gpui::test(iterations = 30)]
3163async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3164 init_test(cx);
3165
3166 let fs = FakeFs::new(cx.executor().clone());
3167 fs.insert_tree(
3168 path!("/dir"),
3169 json!({
3170 "file1": "the original contents",
3171 }),
3172 )
3173 .await;
3174
3175 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3176 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3177 let buffer = project
3178 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3179 .await
3180 .unwrap();
3181
3182 // Simulate buffer diffs being slow, so that they don't complete before
3183 // the next file change occurs.
3184 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3185
3186 // Change the buffer's file on disk, and then wait for the file change
3187 // to be detected by the worktree, so that the buffer starts reloading.
3188 fs.save(
3189 path!("/dir/file1").as_ref(),
3190 &"the first contents".into(),
3191 Default::default(),
3192 )
3193 .await
3194 .unwrap();
3195 worktree.next_event(cx).await;
3196
3197 cx.executor()
3198 .spawn(cx.executor().simulate_random_delay())
3199 .await;
3200
3201 // Perform a noop edit, causing the buffer's version to increase.
3202 buffer.update(cx, |buffer, cx| {
3203 buffer.edit([(0..0, " ")], None, cx);
3204 buffer.undo(cx);
3205 });
3206
3207 cx.executor().run_until_parked();
3208 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3209 buffer.read_with(cx, |buffer, _| {
3210 let buffer_text = buffer.text();
3211 if buffer_text == on_disk_text {
3212 assert!(
3213 !buffer.is_dirty() && !buffer.has_conflict(),
3214 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3215 );
3216 }
3217 // If the file change occurred while the buffer was processing the first
3218 // change, the buffer will be in a conflicting state.
3219 else {
3220 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3221 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3222 }
3223 });
3224}
3225
3226#[gpui::test]
3227async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3228 init_test(cx);
3229
3230 let fs = FakeFs::new(cx.executor());
3231 fs.insert_tree(
3232 path!("/dir"),
3233 json!({
3234 "file1": "the old contents",
3235 }),
3236 )
3237 .await;
3238
3239 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3240 let buffer = project
3241 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3242 .await
3243 .unwrap();
3244 buffer.update(cx, |buffer, cx| {
3245 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3246 });
3247
3248 project
3249 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3250 .await
3251 .unwrap();
3252
3253 let new_text = fs
3254 .load(Path::new(path!("/dir/file1")))
3255 .await
3256 .unwrap()
3257 .replace("\r\n", "\n");
3258 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3259}
3260
3261#[gpui::test]
3262async fn test_save_as(cx: &mut gpui::TestAppContext) {
3263 init_test(cx);
3264
3265 let fs = FakeFs::new(cx.executor());
3266 fs.insert_tree("/dir", json!({})).await;
3267
3268 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3269
3270 let languages = project.update(cx, |project, _| project.languages().clone());
3271 languages.add(rust_lang());
3272
3273 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3274 buffer.update(cx, |buffer, cx| {
3275 buffer.edit([(0..0, "abc")], None, cx);
3276 assert!(buffer.is_dirty());
3277 assert!(!buffer.has_conflict());
3278 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3279 });
3280 project
3281 .update(cx, |project, cx| {
3282 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3283 let path = ProjectPath {
3284 worktree_id,
3285 path: Arc::from(Path::new("file1.rs")),
3286 };
3287 project.save_buffer_as(buffer.clone(), path, cx)
3288 })
3289 .await
3290 .unwrap();
3291 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3292
3293 cx.executor().run_until_parked();
3294 buffer.update(cx, |buffer, cx| {
3295 assert_eq!(
3296 buffer.file().unwrap().full_path(cx),
3297 Path::new("dir/file1.rs")
3298 );
3299 assert!(!buffer.is_dirty());
3300 assert!(!buffer.has_conflict());
3301 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3302 });
3303
3304 let opened_buffer = project
3305 .update(cx, |project, cx| {
3306 project.open_local_buffer("/dir/file1.rs", cx)
3307 })
3308 .await
3309 .unwrap();
3310 assert_eq!(opened_buffer, buffer);
3311}
3312
3313#[gpui::test(retries = 5)]
3314async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3315 use worktree::WorktreeModelHandle as _;
3316
3317 init_test(cx);
3318 cx.executor().allow_parking();
3319
3320 let dir = TempTree::new(json!({
3321 "a": {
3322 "file1": "",
3323 "file2": "",
3324 "file3": "",
3325 },
3326 "b": {
3327 "c": {
3328 "file4": "",
3329 "file5": "",
3330 }
3331 }
3332 }));
3333
3334 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3335
3336 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3337 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3338 async move { buffer.await.unwrap() }
3339 };
3340 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3341 project.update(cx, |project, cx| {
3342 let tree = project.worktrees(cx).next().unwrap();
3343 tree.read(cx)
3344 .entry_for_path(path)
3345 .unwrap_or_else(|| panic!("no entry for path {}", path))
3346 .id
3347 })
3348 };
3349
3350 let buffer2 = buffer_for_path("a/file2", cx).await;
3351 let buffer3 = buffer_for_path("a/file3", cx).await;
3352 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3353 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3354
3355 let file2_id = id_for_path("a/file2", cx);
3356 let file3_id = id_for_path("a/file3", cx);
3357 let file4_id = id_for_path("b/c/file4", cx);
3358
3359 // Create a remote copy of this worktree.
3360 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3361 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3362
3363 let updates = Arc::new(Mutex::new(Vec::new()));
3364 tree.update(cx, |tree, cx| {
3365 let updates = updates.clone();
3366 tree.observe_updates(0, cx, move |update| {
3367 updates.lock().push(update);
3368 async { true }
3369 });
3370 });
3371
3372 let remote =
3373 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3374
3375 cx.executor().run_until_parked();
3376
3377 cx.update(|cx| {
3378 assert!(!buffer2.read(cx).is_dirty());
3379 assert!(!buffer3.read(cx).is_dirty());
3380 assert!(!buffer4.read(cx).is_dirty());
3381 assert!(!buffer5.read(cx).is_dirty());
3382 });
3383
3384 // Rename and delete files and directories.
3385 tree.flush_fs_events(cx).await;
3386 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3387 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3388 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3389 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3390 tree.flush_fs_events(cx).await;
3391
3392 cx.update(|app| {
3393 assert_eq!(
3394 tree.read(app)
3395 .paths()
3396 .map(|p| p.to_str().unwrap())
3397 .collect::<Vec<_>>(),
3398 vec![
3399 "a",
3400 separator!("a/file1"),
3401 separator!("a/file2.new"),
3402 "b",
3403 "d",
3404 separator!("d/file3"),
3405 separator!("d/file4"),
3406 ]
3407 );
3408 });
3409
3410 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3411 assert_eq!(id_for_path("d/file3", cx), file3_id);
3412 assert_eq!(id_for_path("d/file4", cx), file4_id);
3413
3414 cx.update(|cx| {
3415 assert_eq!(
3416 buffer2.read(cx).file().unwrap().path().as_ref(),
3417 Path::new("a/file2.new")
3418 );
3419 assert_eq!(
3420 buffer3.read(cx).file().unwrap().path().as_ref(),
3421 Path::new("d/file3")
3422 );
3423 assert_eq!(
3424 buffer4.read(cx).file().unwrap().path().as_ref(),
3425 Path::new("d/file4")
3426 );
3427 assert_eq!(
3428 buffer5.read(cx).file().unwrap().path().as_ref(),
3429 Path::new("b/c/file5")
3430 );
3431
3432 assert_matches!(
3433 buffer2.read(cx).file().unwrap().disk_state(),
3434 DiskState::Present { .. }
3435 );
3436 assert_matches!(
3437 buffer3.read(cx).file().unwrap().disk_state(),
3438 DiskState::Present { .. }
3439 );
3440 assert_matches!(
3441 buffer4.read(cx).file().unwrap().disk_state(),
3442 DiskState::Present { .. }
3443 );
3444 assert_eq!(
3445 buffer5.read(cx).file().unwrap().disk_state(),
3446 DiskState::Deleted
3447 );
3448 });
3449
3450 // Update the remote worktree. Check that it becomes consistent with the
3451 // local worktree.
3452 cx.executor().run_until_parked();
3453
3454 remote.update(cx, |remote, _| {
3455 for update in updates.lock().drain(..) {
3456 remote.as_remote_mut().unwrap().update_from_remote(update);
3457 }
3458 });
3459 cx.executor().run_until_parked();
3460 remote.update(cx, |remote, _| {
3461 assert_eq!(
3462 remote
3463 .paths()
3464 .map(|p| p.to_str().unwrap())
3465 .collect::<Vec<_>>(),
3466 vec![
3467 "a",
3468 separator!("a/file1"),
3469 separator!("a/file2.new"),
3470 "b",
3471 "d",
3472 separator!("d/file3"),
3473 separator!("d/file4"),
3474 ]
3475 );
3476 });
3477}
3478
3479#[gpui::test(iterations = 10)]
3480async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3481 init_test(cx);
3482
3483 let fs = FakeFs::new(cx.executor());
3484 fs.insert_tree(
3485 path!("/dir"),
3486 json!({
3487 "a": {
3488 "file1": "",
3489 }
3490 }),
3491 )
3492 .await;
3493
3494 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3495 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3496 let tree_id = tree.update(cx, |tree, _| tree.id());
3497
3498 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3499 project.update(cx, |project, cx| {
3500 let tree = project.worktrees(cx).next().unwrap();
3501 tree.read(cx)
3502 .entry_for_path(path)
3503 .unwrap_or_else(|| panic!("no entry for path {}", path))
3504 .id
3505 })
3506 };
3507
3508 let dir_id = id_for_path("a", cx);
3509 let file_id = id_for_path("a/file1", cx);
3510 let buffer = project
3511 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3512 .await
3513 .unwrap();
3514 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3515
3516 project
3517 .update(cx, |project, cx| {
3518 project.rename_entry(dir_id, Path::new("b"), cx)
3519 })
3520 .unwrap()
3521 .await
3522 .to_included()
3523 .unwrap();
3524 cx.executor().run_until_parked();
3525
3526 assert_eq!(id_for_path("b", cx), dir_id);
3527 assert_eq!(id_for_path("b/file1", cx), file_id);
3528 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3529}
3530
3531#[gpui::test]
3532async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3533 init_test(cx);
3534
3535 let fs = FakeFs::new(cx.executor());
3536 fs.insert_tree(
3537 "/dir",
3538 json!({
3539 "a.txt": "a-contents",
3540 "b.txt": "b-contents",
3541 }),
3542 )
3543 .await;
3544
3545 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3546
3547 // Spawn multiple tasks to open paths, repeating some paths.
3548 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3549 (
3550 p.open_local_buffer("/dir/a.txt", cx),
3551 p.open_local_buffer("/dir/b.txt", cx),
3552 p.open_local_buffer("/dir/a.txt", cx),
3553 )
3554 });
3555
3556 let buffer_a_1 = buffer_a_1.await.unwrap();
3557 let buffer_a_2 = buffer_a_2.await.unwrap();
3558 let buffer_b = buffer_b.await.unwrap();
3559 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3560 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3561
3562 // There is only one buffer per path.
3563 let buffer_a_id = buffer_a_1.entity_id();
3564 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3565
3566 // Open the same path again while it is still open.
3567 drop(buffer_a_1);
3568 let buffer_a_3 = project
3569 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3570 .await
3571 .unwrap();
3572
3573 // There's still only one buffer per path.
3574 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3575}
3576
3577#[gpui::test]
3578async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3579 init_test(cx);
3580
3581 let fs = FakeFs::new(cx.executor());
3582 fs.insert_tree(
3583 path!("/dir"),
3584 json!({
3585 "file1": "abc",
3586 "file2": "def",
3587 "file3": "ghi",
3588 }),
3589 )
3590 .await;
3591
3592 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3593
3594 let buffer1 = project
3595 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3596 .await
3597 .unwrap();
3598 let events = Arc::new(Mutex::new(Vec::new()));
3599
3600 // initially, the buffer isn't dirty.
3601 buffer1.update(cx, |buffer, cx| {
3602 cx.subscribe(&buffer1, {
3603 let events = events.clone();
3604 move |_, _, event, _| match event {
3605 BufferEvent::Operation { .. } => {}
3606 _ => events.lock().push(event.clone()),
3607 }
3608 })
3609 .detach();
3610
3611 assert!(!buffer.is_dirty());
3612 assert!(events.lock().is_empty());
3613
3614 buffer.edit([(1..2, "")], None, cx);
3615 });
3616
3617 // after the first edit, the buffer is dirty, and emits a dirtied event.
3618 buffer1.update(cx, |buffer, cx| {
3619 assert!(buffer.text() == "ac");
3620 assert!(buffer.is_dirty());
3621 assert_eq!(
3622 *events.lock(),
3623 &[
3624 language::BufferEvent::Edited,
3625 language::BufferEvent::DirtyChanged
3626 ]
3627 );
3628 events.lock().clear();
3629 buffer.did_save(
3630 buffer.version(),
3631 buffer.file().unwrap().disk_state().mtime(),
3632 cx,
3633 );
3634 });
3635
3636 // after saving, the buffer is not dirty, and emits a saved event.
3637 buffer1.update(cx, |buffer, cx| {
3638 assert!(!buffer.is_dirty());
3639 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3640 events.lock().clear();
3641
3642 buffer.edit([(1..1, "B")], None, cx);
3643 buffer.edit([(2..2, "D")], None, cx);
3644 });
3645
3646 // after editing again, the buffer is dirty, and emits another dirty event.
3647 buffer1.update(cx, |buffer, cx| {
3648 assert!(buffer.text() == "aBDc");
3649 assert!(buffer.is_dirty());
3650 assert_eq!(
3651 *events.lock(),
3652 &[
3653 language::BufferEvent::Edited,
3654 language::BufferEvent::DirtyChanged,
3655 language::BufferEvent::Edited,
3656 ],
3657 );
3658 events.lock().clear();
3659
3660 // After restoring the buffer to its previously-saved state,
3661 // the buffer is not considered dirty anymore.
3662 buffer.edit([(1..3, "")], None, cx);
3663 assert!(buffer.text() == "ac");
3664 assert!(!buffer.is_dirty());
3665 });
3666
3667 assert_eq!(
3668 *events.lock(),
3669 &[
3670 language::BufferEvent::Edited,
3671 language::BufferEvent::DirtyChanged
3672 ]
3673 );
3674
3675 // When a file is deleted, the buffer is considered dirty.
3676 let events = Arc::new(Mutex::new(Vec::new()));
3677 let buffer2 = project
3678 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3679 .await
3680 .unwrap();
3681 buffer2.update(cx, |_, cx| {
3682 cx.subscribe(&buffer2, {
3683 let events = events.clone();
3684 move |_, _, event, _| events.lock().push(event.clone())
3685 })
3686 .detach();
3687 });
3688
3689 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3690 .await
3691 .unwrap();
3692 cx.executor().run_until_parked();
3693 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3694 assert_eq!(
3695 *events.lock(),
3696 &[
3697 language::BufferEvent::DirtyChanged,
3698 language::BufferEvent::FileHandleChanged
3699 ]
3700 );
3701
3702 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3703 let events = Arc::new(Mutex::new(Vec::new()));
3704 let buffer3 = project
3705 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3706 .await
3707 .unwrap();
3708 buffer3.update(cx, |_, cx| {
3709 cx.subscribe(&buffer3, {
3710 let events = events.clone();
3711 move |_, _, event, _| events.lock().push(event.clone())
3712 })
3713 .detach();
3714 });
3715
3716 buffer3.update(cx, |buffer, cx| {
3717 buffer.edit([(0..0, "x")], None, cx);
3718 });
3719 events.lock().clear();
3720 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3721 .await
3722 .unwrap();
3723 cx.executor().run_until_parked();
3724 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3725 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3726}
3727
3728#[gpui::test]
3729async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3730 init_test(cx);
3731
3732 let (initial_contents, initial_offsets) =
3733 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3734 let fs = FakeFs::new(cx.executor());
3735 fs.insert_tree(
3736 path!("/dir"),
3737 json!({
3738 "the-file": initial_contents,
3739 }),
3740 )
3741 .await;
3742 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3743 let buffer = project
3744 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3745 .await
3746 .unwrap();
3747
3748 let anchors = initial_offsets
3749 .iter()
3750 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3751 .collect::<Vec<_>>();
3752
3753 // Change the file on disk, adding two new lines of text, and removing
3754 // one line.
3755 buffer.update(cx, |buffer, _| {
3756 assert!(!buffer.is_dirty());
3757 assert!(!buffer.has_conflict());
3758 });
3759
3760 let (new_contents, new_offsets) =
3761 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3762 fs.save(
3763 path!("/dir/the-file").as_ref(),
3764 &new_contents.as_str().into(),
3765 LineEnding::Unix,
3766 )
3767 .await
3768 .unwrap();
3769
3770 // Because the buffer was not modified, it is reloaded from disk. Its
3771 // contents are edited according to the diff between the old and new
3772 // file contents.
3773 cx.executor().run_until_parked();
3774 buffer.update(cx, |buffer, _| {
3775 assert_eq!(buffer.text(), new_contents);
3776 assert!(!buffer.is_dirty());
3777 assert!(!buffer.has_conflict());
3778
3779 let anchor_offsets = anchors
3780 .iter()
3781 .map(|anchor| anchor.to_offset(&*buffer))
3782 .collect::<Vec<_>>();
3783 assert_eq!(anchor_offsets, new_offsets);
3784 });
3785
3786 // Modify the buffer
3787 buffer.update(cx, |buffer, cx| {
3788 buffer.edit([(0..0, " ")], None, cx);
3789 assert!(buffer.is_dirty());
3790 assert!(!buffer.has_conflict());
3791 });
3792
3793 // Change the file on disk again, adding blank lines to the beginning.
3794 fs.save(
3795 path!("/dir/the-file").as_ref(),
3796 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3797 LineEnding::Unix,
3798 )
3799 .await
3800 .unwrap();
3801
3802 // Because the buffer is modified, it doesn't reload from disk, but is
3803 // marked as having a conflict.
3804 cx.executor().run_until_parked();
3805 buffer.update(cx, |buffer, _| {
3806 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3807 assert!(buffer.has_conflict());
3808 });
3809}
3810
3811#[gpui::test]
3812async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3813 init_test(cx);
3814
3815 let fs = FakeFs::new(cx.executor());
3816 fs.insert_tree(
3817 path!("/dir"),
3818 json!({
3819 "file1": "a\nb\nc\n",
3820 "file2": "one\r\ntwo\r\nthree\r\n",
3821 }),
3822 )
3823 .await;
3824
3825 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3826 let buffer1 = project
3827 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3828 .await
3829 .unwrap();
3830 let buffer2 = project
3831 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3832 .await
3833 .unwrap();
3834
3835 buffer1.update(cx, |buffer, _| {
3836 assert_eq!(buffer.text(), "a\nb\nc\n");
3837 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3838 });
3839 buffer2.update(cx, |buffer, _| {
3840 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3841 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3842 });
3843
3844 // Change a file's line endings on disk from unix to windows. The buffer's
3845 // state updates correctly.
3846 fs.save(
3847 path!("/dir/file1").as_ref(),
3848 &"aaa\nb\nc\n".into(),
3849 LineEnding::Windows,
3850 )
3851 .await
3852 .unwrap();
3853 cx.executor().run_until_parked();
3854 buffer1.update(cx, |buffer, _| {
3855 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3856 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3857 });
3858
3859 // Save a file with windows line endings. The file is written correctly.
3860 buffer2.update(cx, |buffer, cx| {
3861 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3862 });
3863 project
3864 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3865 .await
3866 .unwrap();
3867 assert_eq!(
3868 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3869 "one\r\ntwo\r\nthree\r\nfour\r\n",
3870 );
3871}
3872
3873#[gpui::test]
3874async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3875 init_test(cx);
3876
3877 let fs = FakeFs::new(cx.executor());
3878 fs.insert_tree(
3879 path!("/dir"),
3880 json!({
3881 "a.rs": "
3882 fn foo(mut v: Vec<usize>) {
3883 for x in &v {
3884 v.push(1);
3885 }
3886 }
3887 "
3888 .unindent(),
3889 }),
3890 )
3891 .await;
3892
3893 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3894 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3895 let buffer = project
3896 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3897 .await
3898 .unwrap();
3899
3900 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3901 let message = lsp::PublishDiagnosticsParams {
3902 uri: buffer_uri.clone(),
3903 diagnostics: vec![
3904 lsp::Diagnostic {
3905 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3906 severity: Some(DiagnosticSeverity::WARNING),
3907 message: "error 1".to_string(),
3908 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3909 location: lsp::Location {
3910 uri: buffer_uri.clone(),
3911 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3912 },
3913 message: "error 1 hint 1".to_string(),
3914 }]),
3915 ..Default::default()
3916 },
3917 lsp::Diagnostic {
3918 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3919 severity: Some(DiagnosticSeverity::HINT),
3920 message: "error 1 hint 1".to_string(),
3921 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3922 location: lsp::Location {
3923 uri: buffer_uri.clone(),
3924 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3925 },
3926 message: "original diagnostic".to_string(),
3927 }]),
3928 ..Default::default()
3929 },
3930 lsp::Diagnostic {
3931 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3932 severity: Some(DiagnosticSeverity::ERROR),
3933 message: "error 2".to_string(),
3934 related_information: Some(vec![
3935 lsp::DiagnosticRelatedInformation {
3936 location: lsp::Location {
3937 uri: buffer_uri.clone(),
3938 range: lsp::Range::new(
3939 lsp::Position::new(1, 13),
3940 lsp::Position::new(1, 15),
3941 ),
3942 },
3943 message: "error 2 hint 1".to_string(),
3944 },
3945 lsp::DiagnosticRelatedInformation {
3946 location: lsp::Location {
3947 uri: buffer_uri.clone(),
3948 range: lsp::Range::new(
3949 lsp::Position::new(1, 13),
3950 lsp::Position::new(1, 15),
3951 ),
3952 },
3953 message: "error 2 hint 2".to_string(),
3954 },
3955 ]),
3956 ..Default::default()
3957 },
3958 lsp::Diagnostic {
3959 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3960 severity: Some(DiagnosticSeverity::HINT),
3961 message: "error 2 hint 1".to_string(),
3962 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3963 location: lsp::Location {
3964 uri: buffer_uri.clone(),
3965 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3966 },
3967 message: "original diagnostic".to_string(),
3968 }]),
3969 ..Default::default()
3970 },
3971 lsp::Diagnostic {
3972 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3973 severity: Some(DiagnosticSeverity::HINT),
3974 message: "error 2 hint 2".to_string(),
3975 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3976 location: lsp::Location {
3977 uri: buffer_uri,
3978 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3979 },
3980 message: "original diagnostic".to_string(),
3981 }]),
3982 ..Default::default()
3983 },
3984 ],
3985 version: None,
3986 };
3987
3988 lsp_store
3989 .update(cx, |lsp_store, cx| {
3990 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3991 })
3992 .unwrap();
3993 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3994
3995 assert_eq!(
3996 buffer
3997 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3998 .collect::<Vec<_>>(),
3999 &[
4000 DiagnosticEntry {
4001 range: Point::new(1, 8)..Point::new(1, 9),
4002 diagnostic: Diagnostic {
4003 severity: DiagnosticSeverity::WARNING,
4004 message: "error 1".to_string(),
4005 group_id: 1,
4006 is_primary: true,
4007 ..Default::default()
4008 }
4009 },
4010 DiagnosticEntry {
4011 range: Point::new(1, 8)..Point::new(1, 9),
4012 diagnostic: Diagnostic {
4013 severity: DiagnosticSeverity::HINT,
4014 message: "error 1 hint 1".to_string(),
4015 group_id: 1,
4016 is_primary: false,
4017 ..Default::default()
4018 }
4019 },
4020 DiagnosticEntry {
4021 range: Point::new(1, 13)..Point::new(1, 15),
4022 diagnostic: Diagnostic {
4023 severity: DiagnosticSeverity::HINT,
4024 message: "error 2 hint 1".to_string(),
4025 group_id: 0,
4026 is_primary: false,
4027 ..Default::default()
4028 }
4029 },
4030 DiagnosticEntry {
4031 range: Point::new(1, 13)..Point::new(1, 15),
4032 diagnostic: Diagnostic {
4033 severity: DiagnosticSeverity::HINT,
4034 message: "error 2 hint 2".to_string(),
4035 group_id: 0,
4036 is_primary: false,
4037 ..Default::default()
4038 }
4039 },
4040 DiagnosticEntry {
4041 range: Point::new(2, 8)..Point::new(2, 17),
4042 diagnostic: Diagnostic {
4043 severity: DiagnosticSeverity::ERROR,
4044 message: "error 2".to_string(),
4045 group_id: 0,
4046 is_primary: true,
4047 ..Default::default()
4048 }
4049 }
4050 ]
4051 );
4052
4053 assert_eq!(
4054 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4055 &[
4056 DiagnosticEntry {
4057 range: Point::new(1, 13)..Point::new(1, 15),
4058 diagnostic: Diagnostic {
4059 severity: DiagnosticSeverity::HINT,
4060 message: "error 2 hint 1".to_string(),
4061 group_id: 0,
4062 is_primary: false,
4063 ..Default::default()
4064 }
4065 },
4066 DiagnosticEntry {
4067 range: Point::new(1, 13)..Point::new(1, 15),
4068 diagnostic: Diagnostic {
4069 severity: DiagnosticSeverity::HINT,
4070 message: "error 2 hint 2".to_string(),
4071 group_id: 0,
4072 is_primary: false,
4073 ..Default::default()
4074 }
4075 },
4076 DiagnosticEntry {
4077 range: Point::new(2, 8)..Point::new(2, 17),
4078 diagnostic: Diagnostic {
4079 severity: DiagnosticSeverity::ERROR,
4080 message: "error 2".to_string(),
4081 group_id: 0,
4082 is_primary: true,
4083 ..Default::default()
4084 }
4085 }
4086 ]
4087 );
4088
4089 assert_eq!(
4090 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4091 &[
4092 DiagnosticEntry {
4093 range: Point::new(1, 8)..Point::new(1, 9),
4094 diagnostic: Diagnostic {
4095 severity: DiagnosticSeverity::WARNING,
4096 message: "error 1".to_string(),
4097 group_id: 1,
4098 is_primary: true,
4099 ..Default::default()
4100 }
4101 },
4102 DiagnosticEntry {
4103 range: Point::new(1, 8)..Point::new(1, 9),
4104 diagnostic: Diagnostic {
4105 severity: DiagnosticSeverity::HINT,
4106 message: "error 1 hint 1".to_string(),
4107 group_id: 1,
4108 is_primary: false,
4109 ..Default::default()
4110 }
4111 },
4112 ]
4113 );
4114}
4115
4116#[gpui::test]
4117async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4118 init_test(cx);
4119
4120 let fs = FakeFs::new(cx.executor());
4121 fs.insert_tree(
4122 path!("/dir"),
4123 json!({
4124 "one.rs": "const ONE: usize = 1;",
4125 "two": {
4126 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4127 }
4128
4129 }),
4130 )
4131 .await;
4132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4133
4134 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4135 language_registry.add(rust_lang());
4136 let watched_paths = lsp::FileOperationRegistrationOptions {
4137 filters: vec![
4138 FileOperationFilter {
4139 scheme: Some("file".to_owned()),
4140 pattern: lsp::FileOperationPattern {
4141 glob: "**/*.rs".to_owned(),
4142 matches: Some(lsp::FileOperationPatternKind::File),
4143 options: None,
4144 },
4145 },
4146 FileOperationFilter {
4147 scheme: Some("file".to_owned()),
4148 pattern: lsp::FileOperationPattern {
4149 glob: "**/**".to_owned(),
4150 matches: Some(lsp::FileOperationPatternKind::Folder),
4151 options: None,
4152 },
4153 },
4154 ],
4155 };
4156 let mut fake_servers = language_registry.register_fake_lsp(
4157 "Rust",
4158 FakeLspAdapter {
4159 capabilities: lsp::ServerCapabilities {
4160 workspace: Some(lsp::WorkspaceServerCapabilities {
4161 workspace_folders: None,
4162 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4163 did_rename: Some(watched_paths.clone()),
4164 will_rename: Some(watched_paths),
4165 ..Default::default()
4166 }),
4167 }),
4168 ..Default::default()
4169 },
4170 ..Default::default()
4171 },
4172 );
4173
4174 let _ = project
4175 .update(cx, |project, cx| {
4176 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4177 })
4178 .await
4179 .unwrap();
4180
4181 let fake_server = fake_servers.next().await.unwrap();
4182 let response = project.update(cx, |project, cx| {
4183 let worktree = project.worktrees(cx).next().unwrap();
4184 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4185 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4186 });
4187 let expected_edit = lsp::WorkspaceEdit {
4188 changes: None,
4189 document_changes: Some(DocumentChanges::Edits({
4190 vec![TextDocumentEdit {
4191 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4192 range: lsp::Range {
4193 start: lsp::Position {
4194 line: 0,
4195 character: 1,
4196 },
4197 end: lsp::Position {
4198 line: 0,
4199 character: 3,
4200 },
4201 },
4202 new_text: "This is not a drill".to_owned(),
4203 })],
4204 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4205 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4206 version: Some(1337),
4207 },
4208 }]
4209 })),
4210 change_annotations: None,
4211 };
4212 let resolved_workspace_edit = Arc::new(OnceLock::new());
4213 fake_server
4214 .handle_request::<WillRenameFiles, _, _>({
4215 let resolved_workspace_edit = resolved_workspace_edit.clone();
4216 let expected_edit = expected_edit.clone();
4217 move |params, _| {
4218 let resolved_workspace_edit = resolved_workspace_edit.clone();
4219 let expected_edit = expected_edit.clone();
4220 async move {
4221 assert_eq!(params.files.len(), 1);
4222 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4223 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4224 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4225 Ok(Some(expected_edit))
4226 }
4227 }
4228 })
4229 .next()
4230 .await
4231 .unwrap();
4232 let _ = response.await.unwrap();
4233 fake_server
4234 .handle_notification::<DidRenameFiles, _>(|params, _| {
4235 assert_eq!(params.files.len(), 1);
4236 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4237 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4238 })
4239 .next()
4240 .await
4241 .unwrap();
4242 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4243}
4244
4245#[gpui::test]
4246async fn test_rename(cx: &mut gpui::TestAppContext) {
4247 // hi
4248 init_test(cx);
4249
4250 let fs = FakeFs::new(cx.executor());
4251 fs.insert_tree(
4252 path!("/dir"),
4253 json!({
4254 "one.rs": "const ONE: usize = 1;",
4255 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4256 }),
4257 )
4258 .await;
4259
4260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4261
4262 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4263 language_registry.add(rust_lang());
4264 let mut fake_servers = language_registry.register_fake_lsp(
4265 "Rust",
4266 FakeLspAdapter {
4267 capabilities: lsp::ServerCapabilities {
4268 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4269 prepare_provider: Some(true),
4270 work_done_progress_options: Default::default(),
4271 })),
4272 ..Default::default()
4273 },
4274 ..Default::default()
4275 },
4276 );
4277
4278 let (buffer, _handle) = project
4279 .update(cx, |project, cx| {
4280 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4281 })
4282 .await
4283 .unwrap();
4284
4285 let fake_server = fake_servers.next().await.unwrap();
4286
4287 let response = project.update(cx, |project, cx| {
4288 project.prepare_rename(buffer.clone(), 7, cx)
4289 });
4290 fake_server
4291 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4292 assert_eq!(
4293 params.text_document.uri.as_str(),
4294 uri!("file:///dir/one.rs")
4295 );
4296 assert_eq!(params.position, lsp::Position::new(0, 7));
4297 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4298 lsp::Position::new(0, 6),
4299 lsp::Position::new(0, 9),
4300 ))))
4301 })
4302 .next()
4303 .await
4304 .unwrap();
4305 let response = response.await.unwrap();
4306 let PrepareRenameResponse::Success(range) = response else {
4307 panic!("{:?}", response);
4308 };
4309 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4310 assert_eq!(range, 6..9);
4311
4312 let response = project.update(cx, |project, cx| {
4313 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4314 });
4315 fake_server
4316 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4317 assert_eq!(
4318 params.text_document_position.text_document.uri.as_str(),
4319 uri!("file:///dir/one.rs")
4320 );
4321 assert_eq!(
4322 params.text_document_position.position,
4323 lsp::Position::new(0, 7)
4324 );
4325 assert_eq!(params.new_name, "THREE");
4326 Ok(Some(lsp::WorkspaceEdit {
4327 changes: Some(
4328 [
4329 (
4330 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4331 vec![lsp::TextEdit::new(
4332 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4333 "THREE".to_string(),
4334 )],
4335 ),
4336 (
4337 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4338 vec![
4339 lsp::TextEdit::new(
4340 lsp::Range::new(
4341 lsp::Position::new(0, 24),
4342 lsp::Position::new(0, 27),
4343 ),
4344 "THREE".to_string(),
4345 ),
4346 lsp::TextEdit::new(
4347 lsp::Range::new(
4348 lsp::Position::new(0, 35),
4349 lsp::Position::new(0, 38),
4350 ),
4351 "THREE".to_string(),
4352 ),
4353 ],
4354 ),
4355 ]
4356 .into_iter()
4357 .collect(),
4358 ),
4359 ..Default::default()
4360 }))
4361 })
4362 .next()
4363 .await
4364 .unwrap();
4365 let mut transaction = response.await.unwrap().0;
4366 assert_eq!(transaction.len(), 2);
4367 assert_eq!(
4368 transaction
4369 .remove_entry(&buffer)
4370 .unwrap()
4371 .0
4372 .update(cx, |buffer, _| buffer.text()),
4373 "const THREE: usize = 1;"
4374 );
4375 assert_eq!(
4376 transaction
4377 .into_keys()
4378 .next()
4379 .unwrap()
4380 .update(cx, |buffer, _| buffer.text()),
4381 "const TWO: usize = one::THREE + one::THREE;"
4382 );
4383}
4384
4385#[gpui::test]
4386async fn test_search(cx: &mut gpui::TestAppContext) {
4387 init_test(cx);
4388
4389 let fs = FakeFs::new(cx.executor());
4390 fs.insert_tree(
4391 path!("/dir"),
4392 json!({
4393 "one.rs": "const ONE: usize = 1;",
4394 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4395 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4396 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4397 }),
4398 )
4399 .await;
4400 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4401 assert_eq!(
4402 search(
4403 &project,
4404 SearchQuery::text(
4405 "TWO",
4406 false,
4407 true,
4408 false,
4409 Default::default(),
4410 Default::default(),
4411 None
4412 )
4413 .unwrap(),
4414 cx
4415 )
4416 .await
4417 .unwrap(),
4418 HashMap::from_iter([
4419 (separator!("dir/two.rs").to_string(), vec![6..9]),
4420 (separator!("dir/three.rs").to_string(), vec![37..40])
4421 ])
4422 );
4423
4424 let buffer_4 = project
4425 .update(cx, |project, cx| {
4426 project.open_local_buffer(path!("/dir/four.rs"), cx)
4427 })
4428 .await
4429 .unwrap();
4430 buffer_4.update(cx, |buffer, cx| {
4431 let text = "two::TWO";
4432 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4433 });
4434
4435 assert_eq!(
4436 search(
4437 &project,
4438 SearchQuery::text(
4439 "TWO",
4440 false,
4441 true,
4442 false,
4443 Default::default(),
4444 Default::default(),
4445 None,
4446 )
4447 .unwrap(),
4448 cx
4449 )
4450 .await
4451 .unwrap(),
4452 HashMap::from_iter([
4453 (separator!("dir/two.rs").to_string(), vec![6..9]),
4454 (separator!("dir/three.rs").to_string(), vec![37..40]),
4455 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4456 ])
4457 );
4458}
4459
4460#[gpui::test]
4461async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4462 init_test(cx);
4463
4464 let search_query = "file";
4465
4466 let fs = FakeFs::new(cx.executor());
4467 fs.insert_tree(
4468 path!("/dir"),
4469 json!({
4470 "one.rs": r#"// Rust file one"#,
4471 "one.ts": r#"// TypeScript file one"#,
4472 "two.rs": r#"// Rust file two"#,
4473 "two.ts": r#"// TypeScript file two"#,
4474 }),
4475 )
4476 .await;
4477 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4478
4479 assert!(
4480 search(
4481 &project,
4482 SearchQuery::text(
4483 search_query,
4484 false,
4485 true,
4486 false,
4487 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4488 Default::default(),
4489 None
4490 )
4491 .unwrap(),
4492 cx
4493 )
4494 .await
4495 .unwrap()
4496 .is_empty(),
4497 "If no inclusions match, no files should be returned"
4498 );
4499
4500 assert_eq!(
4501 search(
4502 &project,
4503 SearchQuery::text(
4504 search_query,
4505 false,
4506 true,
4507 false,
4508 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4509 Default::default(),
4510 None
4511 )
4512 .unwrap(),
4513 cx
4514 )
4515 .await
4516 .unwrap(),
4517 HashMap::from_iter([
4518 (separator!("dir/one.rs").to_string(), vec![8..12]),
4519 (separator!("dir/two.rs").to_string(), vec![8..12]),
4520 ]),
4521 "Rust only search should give only Rust files"
4522 );
4523
4524 assert_eq!(
4525 search(
4526 &project,
4527 SearchQuery::text(
4528 search_query,
4529 false,
4530 true,
4531 false,
4532
4533 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4534
4535 Default::default(),
4536 None,
4537 ).unwrap(),
4538 cx
4539 )
4540 .await
4541 .unwrap(),
4542 HashMap::from_iter([
4543 (separator!("dir/one.ts").to_string(), vec![14..18]),
4544 (separator!("dir/two.ts").to_string(), vec![14..18]),
4545 ]),
4546 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4547 );
4548
4549 assert_eq!(
4550 search(
4551 &project,
4552 SearchQuery::text(
4553 search_query,
4554 false,
4555 true,
4556 false,
4557
4558 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4559
4560 Default::default(),
4561 None,
4562 ).unwrap(),
4563 cx
4564 )
4565 .await
4566 .unwrap(),
4567 HashMap::from_iter([
4568 (separator!("dir/two.ts").to_string(), vec![14..18]),
4569 (separator!("dir/one.rs").to_string(), vec![8..12]),
4570 (separator!("dir/one.ts").to_string(), vec![14..18]),
4571 (separator!("dir/two.rs").to_string(), vec![8..12]),
4572 ]),
4573 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4574 );
4575}
4576
4577#[gpui::test]
4578async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4579 init_test(cx);
4580
4581 let search_query = "file";
4582
4583 let fs = FakeFs::new(cx.executor());
4584 fs.insert_tree(
4585 path!("/dir"),
4586 json!({
4587 "one.rs": r#"// Rust file one"#,
4588 "one.ts": r#"// TypeScript file one"#,
4589 "two.rs": r#"// Rust file two"#,
4590 "two.ts": r#"// TypeScript file two"#,
4591 }),
4592 )
4593 .await;
4594 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4595
4596 assert_eq!(
4597 search(
4598 &project,
4599 SearchQuery::text(
4600 search_query,
4601 false,
4602 true,
4603 false,
4604 Default::default(),
4605 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4606 None,
4607 )
4608 .unwrap(),
4609 cx
4610 )
4611 .await
4612 .unwrap(),
4613 HashMap::from_iter([
4614 (separator!("dir/one.rs").to_string(), vec![8..12]),
4615 (separator!("dir/one.ts").to_string(), vec![14..18]),
4616 (separator!("dir/two.rs").to_string(), vec![8..12]),
4617 (separator!("dir/two.ts").to_string(), vec![14..18]),
4618 ]),
4619 "If no exclusions match, all files should be returned"
4620 );
4621
4622 assert_eq!(
4623 search(
4624 &project,
4625 SearchQuery::text(
4626 search_query,
4627 false,
4628 true,
4629 false,
4630 Default::default(),
4631 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4632 None,
4633 )
4634 .unwrap(),
4635 cx
4636 )
4637 .await
4638 .unwrap(),
4639 HashMap::from_iter([
4640 (separator!("dir/one.ts").to_string(), vec![14..18]),
4641 (separator!("dir/two.ts").to_string(), vec![14..18]),
4642 ]),
4643 "Rust exclusion search should give only TypeScript files"
4644 );
4645
4646 assert_eq!(
4647 search(
4648 &project,
4649 SearchQuery::text(
4650 search_query,
4651 false,
4652 true,
4653 false,
4654 Default::default(),
4655 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4656 None,
4657 ).unwrap(),
4658 cx
4659 )
4660 .await
4661 .unwrap(),
4662 HashMap::from_iter([
4663 (separator!("dir/one.rs").to_string(), vec![8..12]),
4664 (separator!("dir/two.rs").to_string(), vec![8..12]),
4665 ]),
4666 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4667 );
4668
4669 assert!(
4670 search(
4671 &project,
4672 SearchQuery::text(
4673 search_query,
4674 false,
4675 true,
4676 false,
4677 Default::default(),
4678
4679 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4680 None,
4681
4682 ).unwrap(),
4683 cx
4684 )
4685 .await
4686 .unwrap().is_empty(),
4687 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4688 );
4689}
4690
4691#[gpui::test]
4692async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4693 init_test(cx);
4694
4695 let search_query = "file";
4696
4697 let fs = FakeFs::new(cx.executor());
4698 fs.insert_tree(
4699 path!("/dir"),
4700 json!({
4701 "one.rs": r#"// Rust file one"#,
4702 "one.ts": r#"// TypeScript file one"#,
4703 "two.rs": r#"// Rust file two"#,
4704 "two.ts": r#"// TypeScript file two"#,
4705 }),
4706 )
4707 .await;
4708 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4709
4710 assert!(
4711 search(
4712 &project,
4713 SearchQuery::text(
4714 search_query,
4715 false,
4716 true,
4717 false,
4718 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4719 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4720 None,
4721 )
4722 .unwrap(),
4723 cx
4724 )
4725 .await
4726 .unwrap()
4727 .is_empty(),
4728 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4729 );
4730
4731 assert!(
4732 search(
4733 &project,
4734 SearchQuery::text(
4735 search_query,
4736 false,
4737 true,
4738 false,
4739 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4740 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4741 None,
4742 ).unwrap(),
4743 cx
4744 )
4745 .await
4746 .unwrap()
4747 .is_empty(),
4748 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4749 );
4750
4751 assert!(
4752 search(
4753 &project,
4754 SearchQuery::text(
4755 search_query,
4756 false,
4757 true,
4758 false,
4759 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4760 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4761 None,
4762 )
4763 .unwrap(),
4764 cx
4765 )
4766 .await
4767 .unwrap()
4768 .is_empty(),
4769 "Non-matching inclusions and exclusions should not change that."
4770 );
4771
4772 assert_eq!(
4773 search(
4774 &project,
4775 SearchQuery::text(
4776 search_query,
4777 false,
4778 true,
4779 false,
4780 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4781 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4782 None,
4783 )
4784 .unwrap(),
4785 cx
4786 )
4787 .await
4788 .unwrap(),
4789 HashMap::from_iter([
4790 (separator!("dir/one.ts").to_string(), vec![14..18]),
4791 (separator!("dir/two.ts").to_string(), vec![14..18]),
4792 ]),
4793 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4794 );
4795}
4796
4797#[gpui::test]
4798async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4799 init_test(cx);
4800
4801 let fs = FakeFs::new(cx.executor());
4802 fs.insert_tree(
4803 path!("/worktree-a"),
4804 json!({
4805 "haystack.rs": r#"// NEEDLE"#,
4806 "haystack.ts": r#"// NEEDLE"#,
4807 }),
4808 )
4809 .await;
4810 fs.insert_tree(
4811 path!("/worktree-b"),
4812 json!({
4813 "haystack.rs": r#"// NEEDLE"#,
4814 "haystack.ts": r#"// NEEDLE"#,
4815 }),
4816 )
4817 .await;
4818
4819 let project = Project::test(
4820 fs.clone(),
4821 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4822 cx,
4823 )
4824 .await;
4825
4826 assert_eq!(
4827 search(
4828 &project,
4829 SearchQuery::text(
4830 "NEEDLE",
4831 false,
4832 true,
4833 false,
4834 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4835 Default::default(),
4836 None,
4837 )
4838 .unwrap(),
4839 cx
4840 )
4841 .await
4842 .unwrap(),
4843 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4844 "should only return results from included worktree"
4845 );
4846 assert_eq!(
4847 search(
4848 &project,
4849 SearchQuery::text(
4850 "NEEDLE",
4851 false,
4852 true,
4853 false,
4854 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4855 Default::default(),
4856 None,
4857 )
4858 .unwrap(),
4859 cx
4860 )
4861 .await
4862 .unwrap(),
4863 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4864 "should only return results from included worktree"
4865 );
4866
4867 assert_eq!(
4868 search(
4869 &project,
4870 SearchQuery::text(
4871 "NEEDLE",
4872 false,
4873 true,
4874 false,
4875 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4876 Default::default(),
4877 None,
4878 )
4879 .unwrap(),
4880 cx
4881 )
4882 .await
4883 .unwrap(),
4884 HashMap::from_iter([
4885 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4886 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4887 ]),
4888 "should return results from both worktrees"
4889 );
4890}
4891
4892#[gpui::test]
4893async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4894 init_test(cx);
4895
4896 let fs = FakeFs::new(cx.background_executor.clone());
4897 fs.insert_tree(
4898 path!("/dir"),
4899 json!({
4900 ".git": {},
4901 ".gitignore": "**/target\n/node_modules\n",
4902 "target": {
4903 "index.txt": "index_key:index_value"
4904 },
4905 "node_modules": {
4906 "eslint": {
4907 "index.ts": "const eslint_key = 'eslint value'",
4908 "package.json": r#"{ "some_key": "some value" }"#,
4909 },
4910 "prettier": {
4911 "index.ts": "const prettier_key = 'prettier value'",
4912 "package.json": r#"{ "other_key": "other value" }"#,
4913 },
4914 },
4915 "package.json": r#"{ "main_key": "main value" }"#,
4916 }),
4917 )
4918 .await;
4919 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4920
4921 let query = "key";
4922 assert_eq!(
4923 search(
4924 &project,
4925 SearchQuery::text(
4926 query,
4927 false,
4928 false,
4929 false,
4930 Default::default(),
4931 Default::default(),
4932 None,
4933 )
4934 .unwrap(),
4935 cx
4936 )
4937 .await
4938 .unwrap(),
4939 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4940 "Only one non-ignored file should have the query"
4941 );
4942
4943 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4944 assert_eq!(
4945 search(
4946 &project,
4947 SearchQuery::text(
4948 query,
4949 false,
4950 false,
4951 true,
4952 Default::default(),
4953 Default::default(),
4954 None,
4955 )
4956 .unwrap(),
4957 cx
4958 )
4959 .await
4960 .unwrap(),
4961 HashMap::from_iter([
4962 (separator!("dir/package.json").to_string(), vec![8..11]),
4963 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4964 (
4965 separator!("dir/node_modules/prettier/package.json").to_string(),
4966 vec![9..12]
4967 ),
4968 (
4969 separator!("dir/node_modules/prettier/index.ts").to_string(),
4970 vec![15..18]
4971 ),
4972 (
4973 separator!("dir/node_modules/eslint/index.ts").to_string(),
4974 vec![13..16]
4975 ),
4976 (
4977 separator!("dir/node_modules/eslint/package.json").to_string(),
4978 vec![8..11]
4979 ),
4980 ]),
4981 "Unrestricted search with ignored directories should find every file with the query"
4982 );
4983
4984 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4985 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4986 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4987 assert_eq!(
4988 search(
4989 &project,
4990 SearchQuery::text(
4991 query,
4992 false,
4993 false,
4994 true,
4995 files_to_include,
4996 files_to_exclude,
4997 None,
4998 )
4999 .unwrap(),
5000 cx
5001 )
5002 .await
5003 .unwrap(),
5004 HashMap::from_iter([(
5005 separator!("dir/node_modules/prettier/package.json").to_string(),
5006 vec![9..12]
5007 )]),
5008 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5009 );
5010}
5011
5012#[gpui::test]
5013async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5014 init_test(cx);
5015
5016 let fs = FakeFs::new(cx.executor().clone());
5017 fs.insert_tree(
5018 "/one/two",
5019 json!({
5020 "three": {
5021 "a.txt": "",
5022 "four": {}
5023 },
5024 "c.rs": ""
5025 }),
5026 )
5027 .await;
5028
5029 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5030 project
5031 .update(cx, |project, cx| {
5032 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5033 project.create_entry((id, "b.."), true, cx)
5034 })
5035 .await
5036 .unwrap()
5037 .to_included()
5038 .unwrap();
5039
5040 // Can't create paths outside the project
5041 let result = project
5042 .update(cx, |project, cx| {
5043 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5044 project.create_entry((id, "../../boop"), true, cx)
5045 })
5046 .await;
5047 assert!(result.is_err());
5048
5049 // Can't create paths with '..'
5050 let result = project
5051 .update(cx, |project, cx| {
5052 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5053 project.create_entry((id, "four/../beep"), true, cx)
5054 })
5055 .await;
5056 assert!(result.is_err());
5057
5058 assert_eq!(
5059 fs.paths(true),
5060 vec![
5061 PathBuf::from(path!("/")),
5062 PathBuf::from(path!("/one")),
5063 PathBuf::from(path!("/one/two")),
5064 PathBuf::from(path!("/one/two/c.rs")),
5065 PathBuf::from(path!("/one/two/three")),
5066 PathBuf::from(path!("/one/two/three/a.txt")),
5067 PathBuf::from(path!("/one/two/three/b..")),
5068 PathBuf::from(path!("/one/two/three/four")),
5069 ]
5070 );
5071
5072 // And we cannot open buffers with '..'
5073 let result = project
5074 .update(cx, |project, cx| {
5075 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5076 project.open_buffer((id, "../c.rs"), cx)
5077 })
5078 .await;
5079 assert!(result.is_err())
5080}
5081
5082#[gpui::test]
5083async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5084 init_test(cx);
5085
5086 let fs = FakeFs::new(cx.executor());
5087 fs.insert_tree(
5088 path!("/dir"),
5089 json!({
5090 "a.tsx": "a",
5091 }),
5092 )
5093 .await;
5094
5095 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5096
5097 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5098 language_registry.add(tsx_lang());
5099 let language_server_names = [
5100 "TypeScriptServer",
5101 "TailwindServer",
5102 "ESLintServer",
5103 "NoHoverCapabilitiesServer",
5104 ];
5105 let mut language_servers = [
5106 language_registry.register_fake_lsp(
5107 "tsx",
5108 FakeLspAdapter {
5109 name: language_server_names[0],
5110 capabilities: lsp::ServerCapabilities {
5111 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5112 ..lsp::ServerCapabilities::default()
5113 },
5114 ..FakeLspAdapter::default()
5115 },
5116 ),
5117 language_registry.register_fake_lsp(
5118 "tsx",
5119 FakeLspAdapter {
5120 name: language_server_names[1],
5121 capabilities: lsp::ServerCapabilities {
5122 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5123 ..lsp::ServerCapabilities::default()
5124 },
5125 ..FakeLspAdapter::default()
5126 },
5127 ),
5128 language_registry.register_fake_lsp(
5129 "tsx",
5130 FakeLspAdapter {
5131 name: language_server_names[2],
5132 capabilities: lsp::ServerCapabilities {
5133 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5134 ..lsp::ServerCapabilities::default()
5135 },
5136 ..FakeLspAdapter::default()
5137 },
5138 ),
5139 language_registry.register_fake_lsp(
5140 "tsx",
5141 FakeLspAdapter {
5142 name: language_server_names[3],
5143 capabilities: lsp::ServerCapabilities {
5144 hover_provider: None,
5145 ..lsp::ServerCapabilities::default()
5146 },
5147 ..FakeLspAdapter::default()
5148 },
5149 ),
5150 ];
5151
5152 let (buffer, _handle) = project
5153 .update(cx, |p, cx| {
5154 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5155 })
5156 .await
5157 .unwrap();
5158 cx.executor().run_until_parked();
5159
5160 let mut servers_with_hover_requests = HashMap::default();
5161 for i in 0..language_server_names.len() {
5162 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5163 panic!(
5164 "Failed to get language server #{i} with name {}",
5165 &language_server_names[i]
5166 )
5167 });
5168 let new_server_name = new_server.server.name();
5169 assert!(
5170 !servers_with_hover_requests.contains_key(&new_server_name),
5171 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5172 );
5173 match new_server_name.as_ref() {
5174 "TailwindServer" | "TypeScriptServer" => {
5175 servers_with_hover_requests.insert(
5176 new_server_name.clone(),
5177 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5178 let name = new_server_name.clone();
5179 async move {
5180 Ok(Some(lsp::Hover {
5181 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5182 format!("{name} hover"),
5183 )),
5184 range: None,
5185 }))
5186 }
5187 }),
5188 );
5189 }
5190 "ESLintServer" => {
5191 servers_with_hover_requests.insert(
5192 new_server_name,
5193 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5194 |_, _| async move { Ok(None) },
5195 ),
5196 );
5197 }
5198 "NoHoverCapabilitiesServer" => {
5199 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5200 |_, _| async move {
5201 panic!(
5202 "Should not call for hovers server with no corresponding capabilities"
5203 )
5204 },
5205 );
5206 }
5207 unexpected => panic!("Unexpected server name: {unexpected}"),
5208 }
5209 }
5210
5211 let hover_task = project.update(cx, |project, cx| {
5212 project.hover(&buffer, Point::new(0, 0), cx)
5213 });
5214 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5215 |mut hover_request| async move {
5216 hover_request
5217 .next()
5218 .await
5219 .expect("All hover requests should have been triggered")
5220 },
5221 ))
5222 .await;
5223 assert_eq!(
5224 vec!["TailwindServer hover", "TypeScriptServer hover"],
5225 hover_task
5226 .await
5227 .into_iter()
5228 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5229 .sorted()
5230 .collect::<Vec<_>>(),
5231 "Should receive hover responses from all related servers with hover capabilities"
5232 );
5233}
5234
5235#[gpui::test]
5236async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5237 init_test(cx);
5238
5239 let fs = FakeFs::new(cx.executor());
5240 fs.insert_tree(
5241 path!("/dir"),
5242 json!({
5243 "a.ts": "a",
5244 }),
5245 )
5246 .await;
5247
5248 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5249
5250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5251 language_registry.add(typescript_lang());
5252 let mut fake_language_servers = language_registry.register_fake_lsp(
5253 "TypeScript",
5254 FakeLspAdapter {
5255 capabilities: lsp::ServerCapabilities {
5256 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5257 ..lsp::ServerCapabilities::default()
5258 },
5259 ..FakeLspAdapter::default()
5260 },
5261 );
5262
5263 let (buffer, _handle) = project
5264 .update(cx, |p, cx| {
5265 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5266 })
5267 .await
5268 .unwrap();
5269 cx.executor().run_until_parked();
5270
5271 let fake_server = fake_language_servers
5272 .next()
5273 .await
5274 .expect("failed to get the language server");
5275
5276 let mut request_handled =
5277 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5278 Ok(Some(lsp::Hover {
5279 contents: lsp::HoverContents::Array(vec![
5280 lsp::MarkedString::String("".to_string()),
5281 lsp::MarkedString::String(" ".to_string()),
5282 lsp::MarkedString::String("\n\n\n".to_string()),
5283 ]),
5284 range: None,
5285 }))
5286 });
5287
5288 let hover_task = project.update(cx, |project, cx| {
5289 project.hover(&buffer, Point::new(0, 0), cx)
5290 });
5291 let () = request_handled
5292 .next()
5293 .await
5294 .expect("All hover requests should have been triggered");
5295 assert_eq!(
5296 Vec::<String>::new(),
5297 hover_task
5298 .await
5299 .into_iter()
5300 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5301 .sorted()
5302 .collect::<Vec<_>>(),
5303 "Empty hover parts should be ignored"
5304 );
5305}
5306
5307#[gpui::test]
5308async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5309 init_test(cx);
5310
5311 let fs = FakeFs::new(cx.executor());
5312 fs.insert_tree(
5313 path!("/dir"),
5314 json!({
5315 "a.ts": "a",
5316 }),
5317 )
5318 .await;
5319
5320 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5321
5322 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5323 language_registry.add(typescript_lang());
5324 let mut fake_language_servers = language_registry.register_fake_lsp(
5325 "TypeScript",
5326 FakeLspAdapter {
5327 capabilities: lsp::ServerCapabilities {
5328 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5329 ..lsp::ServerCapabilities::default()
5330 },
5331 ..FakeLspAdapter::default()
5332 },
5333 );
5334
5335 let (buffer, _handle) = project
5336 .update(cx, |p, cx| {
5337 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5338 })
5339 .await
5340 .unwrap();
5341 cx.executor().run_until_parked();
5342
5343 let fake_server = fake_language_servers
5344 .next()
5345 .await
5346 .expect("failed to get the language server");
5347
5348 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5349 move |_, _| async move {
5350 Ok(Some(vec![
5351 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5352 title: "organize imports".to_string(),
5353 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5354 ..lsp::CodeAction::default()
5355 }),
5356 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5357 title: "fix code".to_string(),
5358 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5359 ..lsp::CodeAction::default()
5360 }),
5361 ]))
5362 },
5363 );
5364
5365 let code_actions_task = project.update(cx, |project, cx| {
5366 project.code_actions(
5367 &buffer,
5368 0..buffer.read(cx).len(),
5369 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5370 cx,
5371 )
5372 });
5373
5374 let () = request_handled
5375 .next()
5376 .await
5377 .expect("The code action request should have been triggered");
5378
5379 let code_actions = code_actions_task.await.unwrap();
5380 assert_eq!(code_actions.len(), 1);
5381 assert_eq!(
5382 code_actions[0].lsp_action.action_kind(),
5383 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5384 );
5385}
5386
5387#[gpui::test]
5388async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5389 init_test(cx);
5390
5391 let fs = FakeFs::new(cx.executor());
5392 fs.insert_tree(
5393 path!("/dir"),
5394 json!({
5395 "a.tsx": "a",
5396 }),
5397 )
5398 .await;
5399
5400 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5401
5402 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5403 language_registry.add(tsx_lang());
5404 let language_server_names = [
5405 "TypeScriptServer",
5406 "TailwindServer",
5407 "ESLintServer",
5408 "NoActionsCapabilitiesServer",
5409 ];
5410
5411 let mut language_server_rxs = [
5412 language_registry.register_fake_lsp(
5413 "tsx",
5414 FakeLspAdapter {
5415 name: language_server_names[0],
5416 capabilities: lsp::ServerCapabilities {
5417 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5418 ..lsp::ServerCapabilities::default()
5419 },
5420 ..FakeLspAdapter::default()
5421 },
5422 ),
5423 language_registry.register_fake_lsp(
5424 "tsx",
5425 FakeLspAdapter {
5426 name: language_server_names[1],
5427 capabilities: lsp::ServerCapabilities {
5428 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5429 ..lsp::ServerCapabilities::default()
5430 },
5431 ..FakeLspAdapter::default()
5432 },
5433 ),
5434 language_registry.register_fake_lsp(
5435 "tsx",
5436 FakeLspAdapter {
5437 name: language_server_names[2],
5438 capabilities: lsp::ServerCapabilities {
5439 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5440 ..lsp::ServerCapabilities::default()
5441 },
5442 ..FakeLspAdapter::default()
5443 },
5444 ),
5445 language_registry.register_fake_lsp(
5446 "tsx",
5447 FakeLspAdapter {
5448 name: language_server_names[3],
5449 capabilities: lsp::ServerCapabilities {
5450 code_action_provider: None,
5451 ..lsp::ServerCapabilities::default()
5452 },
5453 ..FakeLspAdapter::default()
5454 },
5455 ),
5456 ];
5457
5458 let (buffer, _handle) = project
5459 .update(cx, |p, cx| {
5460 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5461 })
5462 .await
5463 .unwrap();
5464 cx.executor().run_until_parked();
5465
5466 let mut servers_with_actions_requests = HashMap::default();
5467 for i in 0..language_server_names.len() {
5468 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5469 panic!(
5470 "Failed to get language server #{i} with name {}",
5471 &language_server_names[i]
5472 )
5473 });
5474 let new_server_name = new_server.server.name();
5475
5476 assert!(
5477 !servers_with_actions_requests.contains_key(&new_server_name),
5478 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5479 );
5480 match new_server_name.0.as_ref() {
5481 "TailwindServer" | "TypeScriptServer" => {
5482 servers_with_actions_requests.insert(
5483 new_server_name.clone(),
5484 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5485 move |_, _| {
5486 let name = new_server_name.clone();
5487 async move {
5488 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5489 lsp::CodeAction {
5490 title: format!("{name} code action"),
5491 ..lsp::CodeAction::default()
5492 },
5493 )]))
5494 }
5495 },
5496 ),
5497 );
5498 }
5499 "ESLintServer" => {
5500 servers_with_actions_requests.insert(
5501 new_server_name,
5502 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5503 |_, _| async move { Ok(None) },
5504 ),
5505 );
5506 }
5507 "NoActionsCapabilitiesServer" => {
5508 let _never_handled = new_server
5509 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5510 panic!(
5511 "Should not call for code actions server with no corresponding capabilities"
5512 )
5513 });
5514 }
5515 unexpected => panic!("Unexpected server name: {unexpected}"),
5516 }
5517 }
5518
5519 let code_actions_task = project.update(cx, |project, cx| {
5520 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5521 });
5522
5523 // cx.run_until_parked();
5524 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5525 |mut code_actions_request| async move {
5526 code_actions_request
5527 .next()
5528 .await
5529 .expect("All code actions requests should have been triggered")
5530 },
5531 ))
5532 .await;
5533 assert_eq!(
5534 vec!["TailwindServer code action", "TypeScriptServer code action"],
5535 code_actions_task
5536 .await
5537 .unwrap()
5538 .into_iter()
5539 .map(|code_action| code_action.lsp_action.title().to_owned())
5540 .sorted()
5541 .collect::<Vec<_>>(),
5542 "Should receive code actions responses from all related servers with hover capabilities"
5543 );
5544}
5545
5546#[gpui::test]
5547async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5548 init_test(cx);
5549
5550 let fs = FakeFs::new(cx.executor());
5551 fs.insert_tree(
5552 "/dir",
5553 json!({
5554 "a.rs": "let a = 1;",
5555 "b.rs": "let b = 2;",
5556 "c.rs": "let c = 2;",
5557 }),
5558 )
5559 .await;
5560
5561 let project = Project::test(
5562 fs,
5563 [
5564 "/dir/a.rs".as_ref(),
5565 "/dir/b.rs".as_ref(),
5566 "/dir/c.rs".as_ref(),
5567 ],
5568 cx,
5569 )
5570 .await;
5571
5572 // check the initial state and get the worktrees
5573 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5574 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5575 assert_eq!(worktrees.len(), 3);
5576
5577 let worktree_a = worktrees[0].read(cx);
5578 let worktree_b = worktrees[1].read(cx);
5579 let worktree_c = worktrees[2].read(cx);
5580
5581 // check they start in the right order
5582 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5583 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5584 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5585
5586 (
5587 worktrees[0].clone(),
5588 worktrees[1].clone(),
5589 worktrees[2].clone(),
5590 )
5591 });
5592
5593 // move first worktree to after the second
5594 // [a, b, c] -> [b, a, c]
5595 project
5596 .update(cx, |project, cx| {
5597 let first = worktree_a.read(cx);
5598 let second = worktree_b.read(cx);
5599 project.move_worktree(first.id(), second.id(), cx)
5600 })
5601 .expect("moving first after second");
5602
5603 // check the state after moving
5604 project.update(cx, |project, cx| {
5605 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5606 assert_eq!(worktrees.len(), 3);
5607
5608 let first = worktrees[0].read(cx);
5609 let second = worktrees[1].read(cx);
5610 let third = worktrees[2].read(cx);
5611
5612 // check they are now in the right order
5613 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5614 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5615 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5616 });
5617
5618 // move the second worktree to before the first
5619 // [b, a, c] -> [a, b, c]
5620 project
5621 .update(cx, |project, cx| {
5622 let second = worktree_a.read(cx);
5623 let first = worktree_b.read(cx);
5624 project.move_worktree(first.id(), second.id(), cx)
5625 })
5626 .expect("moving second before first");
5627
5628 // check the state after moving
5629 project.update(cx, |project, cx| {
5630 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5631 assert_eq!(worktrees.len(), 3);
5632
5633 let first = worktrees[0].read(cx);
5634 let second = worktrees[1].read(cx);
5635 let third = worktrees[2].read(cx);
5636
5637 // check they are now in the right order
5638 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5639 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5640 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5641 });
5642
5643 // move the second worktree to after the third
5644 // [a, b, c] -> [a, c, b]
5645 project
5646 .update(cx, |project, cx| {
5647 let second = worktree_b.read(cx);
5648 let third = worktree_c.read(cx);
5649 project.move_worktree(second.id(), third.id(), cx)
5650 })
5651 .expect("moving second after third");
5652
5653 // check the state after moving
5654 project.update(cx, |project, cx| {
5655 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5656 assert_eq!(worktrees.len(), 3);
5657
5658 let first = worktrees[0].read(cx);
5659 let second = worktrees[1].read(cx);
5660 let third = worktrees[2].read(cx);
5661
5662 // check they are now in the right order
5663 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5664 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5665 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5666 });
5667
5668 // move the third worktree to before the second
5669 // [a, c, b] -> [a, b, c]
5670 project
5671 .update(cx, |project, cx| {
5672 let third = worktree_c.read(cx);
5673 let second = worktree_b.read(cx);
5674 project.move_worktree(third.id(), second.id(), cx)
5675 })
5676 .expect("moving third before second");
5677
5678 // check the state after moving
5679 project.update(cx, |project, cx| {
5680 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5681 assert_eq!(worktrees.len(), 3);
5682
5683 let first = worktrees[0].read(cx);
5684 let second = worktrees[1].read(cx);
5685 let third = worktrees[2].read(cx);
5686
5687 // check they are now in the right order
5688 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5689 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5690 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5691 });
5692
5693 // move the first worktree to after the third
5694 // [a, b, c] -> [b, c, a]
5695 project
5696 .update(cx, |project, cx| {
5697 let first = worktree_a.read(cx);
5698 let third = worktree_c.read(cx);
5699 project.move_worktree(first.id(), third.id(), cx)
5700 })
5701 .expect("moving first after third");
5702
5703 // check the state after moving
5704 project.update(cx, |project, cx| {
5705 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5706 assert_eq!(worktrees.len(), 3);
5707
5708 let first = worktrees[0].read(cx);
5709 let second = worktrees[1].read(cx);
5710 let third = worktrees[2].read(cx);
5711
5712 // check they are now in the right order
5713 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5714 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5715 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5716 });
5717
5718 // move the third worktree to before the first
5719 // [b, c, a] -> [a, b, c]
5720 project
5721 .update(cx, |project, cx| {
5722 let third = worktree_a.read(cx);
5723 let first = worktree_b.read(cx);
5724 project.move_worktree(third.id(), first.id(), cx)
5725 })
5726 .expect("moving third before first");
5727
5728 // check the state after moving
5729 project.update(cx, |project, cx| {
5730 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5731 assert_eq!(worktrees.len(), 3);
5732
5733 let first = worktrees[0].read(cx);
5734 let second = worktrees[1].read(cx);
5735 let third = worktrees[2].read(cx);
5736
5737 // check they are now in the right order
5738 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5739 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5740 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5741 });
5742}
5743
5744#[gpui::test]
5745async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5746 init_test(cx);
5747
5748 let staged_contents = r#"
5749 fn main() {
5750 println!("hello world");
5751 }
5752 "#
5753 .unindent();
5754 let file_contents = r#"
5755 // print goodbye
5756 fn main() {
5757 println!("goodbye world");
5758 }
5759 "#
5760 .unindent();
5761
5762 let fs = FakeFs::new(cx.background_executor.clone());
5763 fs.insert_tree(
5764 "/dir",
5765 json!({
5766 ".git": {},
5767 "src": {
5768 "main.rs": file_contents,
5769 }
5770 }),
5771 )
5772 .await;
5773
5774 fs.set_index_for_repo(
5775 Path::new("/dir/.git"),
5776 &[("src/main.rs".into(), staged_contents)],
5777 );
5778
5779 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5780
5781 let buffer = project
5782 .update(cx, |project, cx| {
5783 project.open_local_buffer("/dir/src/main.rs", cx)
5784 })
5785 .await
5786 .unwrap();
5787 let unstaged_diff = project
5788 .update(cx, |project, cx| {
5789 project.open_unstaged_diff(buffer.clone(), cx)
5790 })
5791 .await
5792 .unwrap();
5793
5794 cx.run_until_parked();
5795 unstaged_diff.update(cx, |unstaged_diff, cx| {
5796 let snapshot = buffer.read(cx).snapshot();
5797 assert_hunks(
5798 unstaged_diff.hunks(&snapshot, cx),
5799 &snapshot,
5800 &unstaged_diff.base_text_string().unwrap(),
5801 &[
5802 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5803 (
5804 2..3,
5805 " println!(\"hello world\");\n",
5806 " println!(\"goodbye world\");\n",
5807 DiffHunkStatus::modified_none(),
5808 ),
5809 ],
5810 );
5811 });
5812
5813 let staged_contents = r#"
5814 // print goodbye
5815 fn main() {
5816 }
5817 "#
5818 .unindent();
5819
5820 fs.set_index_for_repo(
5821 Path::new("/dir/.git"),
5822 &[("src/main.rs".into(), staged_contents)],
5823 );
5824
5825 cx.run_until_parked();
5826 unstaged_diff.update(cx, |unstaged_diff, cx| {
5827 let snapshot = buffer.read(cx).snapshot();
5828 assert_hunks(
5829 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5830 &snapshot,
5831 &unstaged_diff.base_text().text(),
5832 &[(
5833 2..3,
5834 "",
5835 " println!(\"goodbye world\");\n",
5836 DiffHunkStatus::added_none(),
5837 )],
5838 );
5839 });
5840}
5841
5842#[gpui::test]
5843async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5844 init_test(cx);
5845
5846 let committed_contents = r#"
5847 fn main() {
5848 println!("hello world");
5849 }
5850 "#
5851 .unindent();
5852 let staged_contents = r#"
5853 fn main() {
5854 println!("goodbye world");
5855 }
5856 "#
5857 .unindent();
5858 let file_contents = r#"
5859 // print goodbye
5860 fn main() {
5861 println!("goodbye world");
5862 }
5863 "#
5864 .unindent();
5865
5866 let fs = FakeFs::new(cx.background_executor.clone());
5867 fs.insert_tree(
5868 "/dir",
5869 json!({
5870 ".git": {},
5871 "src": {
5872 "modification.rs": file_contents,
5873 }
5874 }),
5875 )
5876 .await;
5877
5878 fs.set_head_for_repo(
5879 Path::new("/dir/.git"),
5880 &[
5881 ("src/modification.rs".into(), committed_contents),
5882 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5883 ],
5884 );
5885 fs.set_index_for_repo(
5886 Path::new("/dir/.git"),
5887 &[
5888 ("src/modification.rs".into(), staged_contents),
5889 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5890 ],
5891 );
5892
5893 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5894 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5895 let language = rust_lang();
5896 language_registry.add(language.clone());
5897
5898 let buffer_1 = project
5899 .update(cx, |project, cx| {
5900 project.open_local_buffer("/dir/src/modification.rs", cx)
5901 })
5902 .await
5903 .unwrap();
5904 let diff_1 = project
5905 .update(cx, |project, cx| {
5906 project.open_uncommitted_diff(buffer_1.clone(), cx)
5907 })
5908 .await
5909 .unwrap();
5910 diff_1.read_with(cx, |diff, _| {
5911 assert_eq!(diff.base_text().language().cloned(), Some(language))
5912 });
5913 cx.run_until_parked();
5914 diff_1.update(cx, |diff, cx| {
5915 let snapshot = buffer_1.read(cx).snapshot();
5916 assert_hunks(
5917 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5918 &snapshot,
5919 &diff.base_text_string().unwrap(),
5920 &[
5921 (
5922 0..1,
5923 "",
5924 "// print goodbye\n",
5925 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5926 ),
5927 (
5928 2..3,
5929 " println!(\"hello world\");\n",
5930 " println!(\"goodbye world\");\n",
5931 DiffHunkStatus::modified_none(),
5932 ),
5933 ],
5934 );
5935 });
5936
5937 // Reset HEAD to a version that differs from both the buffer and the index.
5938 let committed_contents = r#"
5939 // print goodbye
5940 fn main() {
5941 }
5942 "#
5943 .unindent();
5944 fs.set_head_for_repo(
5945 Path::new("/dir/.git"),
5946 &[
5947 ("src/modification.rs".into(), committed_contents.clone()),
5948 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5949 ],
5950 );
5951
5952 // Buffer now has an unstaged hunk.
5953 cx.run_until_parked();
5954 diff_1.update(cx, |diff, cx| {
5955 let snapshot = buffer_1.read(cx).snapshot();
5956 assert_hunks(
5957 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5958 &snapshot,
5959 &diff.base_text().text(),
5960 &[(
5961 2..3,
5962 "",
5963 " println!(\"goodbye world\");\n",
5964 DiffHunkStatus::added_none(),
5965 )],
5966 );
5967 });
5968
5969 // Open a buffer for a file that's been deleted.
5970 let buffer_2 = project
5971 .update(cx, |project, cx| {
5972 project.open_local_buffer("/dir/src/deletion.rs", cx)
5973 })
5974 .await
5975 .unwrap();
5976 let diff_2 = project
5977 .update(cx, |project, cx| {
5978 project.open_uncommitted_diff(buffer_2.clone(), cx)
5979 })
5980 .await
5981 .unwrap();
5982 cx.run_until_parked();
5983 diff_2.update(cx, |diff, cx| {
5984 let snapshot = buffer_2.read(cx).snapshot();
5985 assert_hunks(
5986 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5987 &snapshot,
5988 &diff.base_text_string().unwrap(),
5989 &[(
5990 0..0,
5991 "// the-deleted-contents\n",
5992 "",
5993 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
5994 )],
5995 );
5996 });
5997
5998 // Stage the deletion of this file
5999 fs.set_index_for_repo(
6000 Path::new("/dir/.git"),
6001 &[("src/modification.rs".into(), committed_contents.clone())],
6002 );
6003 cx.run_until_parked();
6004 diff_2.update(cx, |diff, cx| {
6005 let snapshot = buffer_2.read(cx).snapshot();
6006 assert_hunks(
6007 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6008 &snapshot,
6009 &diff.base_text_string().unwrap(),
6010 &[(
6011 0..0,
6012 "// the-deleted-contents\n",
6013 "",
6014 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6015 )],
6016 );
6017 });
6018}
6019
6020#[gpui::test]
6021async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6022 use DiffHunkSecondaryStatus::*;
6023 init_test(cx);
6024
6025 let committed_contents = r#"
6026 zero
6027 one
6028 two
6029 three
6030 four
6031 five
6032 "#
6033 .unindent();
6034 let file_contents = r#"
6035 one
6036 TWO
6037 three
6038 FOUR
6039 five
6040 "#
6041 .unindent();
6042
6043 let fs = FakeFs::new(cx.background_executor.clone());
6044 fs.insert_tree(
6045 "/dir",
6046 json!({
6047 ".git": {},
6048 "file.txt": file_contents.clone()
6049 }),
6050 )
6051 .await;
6052
6053 fs.set_head_and_index_for_repo(
6054 "/dir/.git".as_ref(),
6055 &[("file.txt".into(), committed_contents.clone())],
6056 );
6057
6058 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6059
6060 let buffer = project
6061 .update(cx, |project, cx| {
6062 project.open_local_buffer("/dir/file.txt", cx)
6063 })
6064 .await
6065 .unwrap();
6066 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6067 let uncommitted_diff = project
6068 .update(cx, |project, cx| {
6069 project.open_uncommitted_diff(buffer.clone(), cx)
6070 })
6071 .await
6072 .unwrap();
6073 let mut diff_events = cx.events(&uncommitted_diff);
6074
6075 // The hunks are initially unstaged.
6076 uncommitted_diff.read_with(cx, |diff, cx| {
6077 assert_hunks(
6078 diff.hunks(&snapshot, cx),
6079 &snapshot,
6080 &diff.base_text_string().unwrap(),
6081 &[
6082 (
6083 0..0,
6084 "zero\n",
6085 "",
6086 DiffHunkStatus::deleted(HasSecondaryHunk),
6087 ),
6088 (
6089 1..2,
6090 "two\n",
6091 "TWO\n",
6092 DiffHunkStatus::modified(HasSecondaryHunk),
6093 ),
6094 (
6095 3..4,
6096 "four\n",
6097 "FOUR\n",
6098 DiffHunkStatus::modified(HasSecondaryHunk),
6099 ),
6100 ],
6101 );
6102 });
6103
6104 // Stage a hunk. It appears as optimistically staged.
6105 uncommitted_diff.update(cx, |diff, cx| {
6106 let range =
6107 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6108 let hunks = diff
6109 .hunks_intersecting_range(range, &snapshot, cx)
6110 .collect::<Vec<_>>();
6111 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6112
6113 assert_hunks(
6114 diff.hunks(&snapshot, cx),
6115 &snapshot,
6116 &diff.base_text_string().unwrap(),
6117 &[
6118 (
6119 0..0,
6120 "zero\n",
6121 "",
6122 DiffHunkStatus::deleted(HasSecondaryHunk),
6123 ),
6124 (
6125 1..2,
6126 "two\n",
6127 "TWO\n",
6128 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6129 ),
6130 (
6131 3..4,
6132 "four\n",
6133 "FOUR\n",
6134 DiffHunkStatus::modified(HasSecondaryHunk),
6135 ),
6136 ],
6137 );
6138 });
6139
6140 // The diff emits a change event for the range of the staged hunk.
6141 assert!(matches!(
6142 diff_events.next().await.unwrap(),
6143 BufferDiffEvent::HunksStagedOrUnstaged(_)
6144 ));
6145 let event = diff_events.next().await.unwrap();
6146 if let BufferDiffEvent::DiffChanged {
6147 changed_range: Some(changed_range),
6148 } = event
6149 {
6150 let changed_range = changed_range.to_point(&snapshot);
6151 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6152 } else {
6153 panic!("Unexpected event {event:?}");
6154 }
6155
6156 // When the write to the index completes, it appears as staged.
6157 cx.run_until_parked();
6158 uncommitted_diff.update(cx, |diff, cx| {
6159 assert_hunks(
6160 diff.hunks(&snapshot, cx),
6161 &snapshot,
6162 &diff.base_text_string().unwrap(),
6163 &[
6164 (
6165 0..0,
6166 "zero\n",
6167 "",
6168 DiffHunkStatus::deleted(HasSecondaryHunk),
6169 ),
6170 (
6171 1..2,
6172 "two\n",
6173 "TWO\n",
6174 DiffHunkStatus::modified(NoSecondaryHunk),
6175 ),
6176 (
6177 3..4,
6178 "four\n",
6179 "FOUR\n",
6180 DiffHunkStatus::modified(HasSecondaryHunk),
6181 ),
6182 ],
6183 );
6184 });
6185
6186 // The diff emits a change event for the changed index text.
6187 let event = diff_events.next().await.unwrap();
6188 if let BufferDiffEvent::DiffChanged {
6189 changed_range: Some(changed_range),
6190 } = event
6191 {
6192 let changed_range = changed_range.to_point(&snapshot);
6193 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6194 } else {
6195 panic!("Unexpected event {event:?}");
6196 }
6197
6198 // Simulate a problem writing to the git index.
6199 fs.set_error_message_for_index_write(
6200 "/dir/.git".as_ref(),
6201 Some("failed to write git index".into()),
6202 );
6203
6204 // Stage another hunk.
6205 uncommitted_diff.update(cx, |diff, cx| {
6206 let range =
6207 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6208 let hunks = diff
6209 .hunks_intersecting_range(range, &snapshot, cx)
6210 .collect::<Vec<_>>();
6211 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6212
6213 assert_hunks(
6214 diff.hunks(&snapshot, cx),
6215 &snapshot,
6216 &diff.base_text_string().unwrap(),
6217 &[
6218 (
6219 0..0,
6220 "zero\n",
6221 "",
6222 DiffHunkStatus::deleted(HasSecondaryHunk),
6223 ),
6224 (
6225 1..2,
6226 "two\n",
6227 "TWO\n",
6228 DiffHunkStatus::modified(NoSecondaryHunk),
6229 ),
6230 (
6231 3..4,
6232 "four\n",
6233 "FOUR\n",
6234 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6235 ),
6236 ],
6237 );
6238 });
6239 assert!(matches!(
6240 diff_events.next().await.unwrap(),
6241 BufferDiffEvent::HunksStagedOrUnstaged(_)
6242 ));
6243 let event = diff_events.next().await.unwrap();
6244 if let BufferDiffEvent::DiffChanged {
6245 changed_range: Some(changed_range),
6246 } = event
6247 {
6248 let changed_range = changed_range.to_point(&snapshot);
6249 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6250 } else {
6251 panic!("Unexpected event {event:?}");
6252 }
6253
6254 // When the write fails, the hunk returns to being unstaged.
6255 cx.run_until_parked();
6256 uncommitted_diff.update(cx, |diff, cx| {
6257 assert_hunks(
6258 diff.hunks(&snapshot, cx),
6259 &snapshot,
6260 &diff.base_text_string().unwrap(),
6261 &[
6262 (
6263 0..0,
6264 "zero\n",
6265 "",
6266 DiffHunkStatus::deleted(HasSecondaryHunk),
6267 ),
6268 (
6269 1..2,
6270 "two\n",
6271 "TWO\n",
6272 DiffHunkStatus::modified(NoSecondaryHunk),
6273 ),
6274 (
6275 3..4,
6276 "four\n",
6277 "FOUR\n",
6278 DiffHunkStatus::modified(HasSecondaryHunk),
6279 ),
6280 ],
6281 );
6282 });
6283
6284 let event = diff_events.next().await.unwrap();
6285 if let BufferDiffEvent::DiffChanged {
6286 changed_range: Some(changed_range),
6287 } = event
6288 {
6289 let changed_range = changed_range.to_point(&snapshot);
6290 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6291 } else {
6292 panic!("Unexpected event {event:?}");
6293 }
6294
6295 // Allow writing to the git index to succeed again.
6296 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6297
6298 // Stage two hunks with separate operations.
6299 uncommitted_diff.update(cx, |diff, cx| {
6300 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6301 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6302 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6303 });
6304
6305 // Both staged hunks appear as pending.
6306 uncommitted_diff.update(cx, |diff, cx| {
6307 assert_hunks(
6308 diff.hunks(&snapshot, cx),
6309 &snapshot,
6310 &diff.base_text_string().unwrap(),
6311 &[
6312 (
6313 0..0,
6314 "zero\n",
6315 "",
6316 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6317 ),
6318 (
6319 1..2,
6320 "two\n",
6321 "TWO\n",
6322 DiffHunkStatus::modified(NoSecondaryHunk),
6323 ),
6324 (
6325 3..4,
6326 "four\n",
6327 "FOUR\n",
6328 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6329 ),
6330 ],
6331 );
6332 });
6333
6334 // Both staging operations take effect.
6335 cx.run_until_parked();
6336 uncommitted_diff.update(cx, |diff, cx| {
6337 assert_hunks(
6338 diff.hunks(&snapshot, cx),
6339 &snapshot,
6340 &diff.base_text_string().unwrap(),
6341 &[
6342 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6343 (
6344 1..2,
6345 "two\n",
6346 "TWO\n",
6347 DiffHunkStatus::modified(NoSecondaryHunk),
6348 ),
6349 (
6350 3..4,
6351 "four\n",
6352 "FOUR\n",
6353 DiffHunkStatus::modified(NoSecondaryHunk),
6354 ),
6355 ],
6356 );
6357 });
6358}
6359
6360#[gpui::test(iterations = 10, seeds(340, 472))]
6361async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6362 use DiffHunkSecondaryStatus::*;
6363 init_test(cx);
6364
6365 let committed_contents = r#"
6366 zero
6367 one
6368 two
6369 three
6370 four
6371 five
6372 "#
6373 .unindent();
6374 let file_contents = r#"
6375 one
6376 TWO
6377 three
6378 FOUR
6379 five
6380 "#
6381 .unindent();
6382
6383 let fs = FakeFs::new(cx.background_executor.clone());
6384 fs.insert_tree(
6385 "/dir",
6386 json!({
6387 ".git": {},
6388 "file.txt": file_contents.clone()
6389 }),
6390 )
6391 .await;
6392
6393 fs.set_head_for_repo(
6394 "/dir/.git".as_ref(),
6395 &[("file.txt".into(), committed_contents.clone())],
6396 );
6397 fs.set_index_for_repo(
6398 "/dir/.git".as_ref(),
6399 &[("file.txt".into(), committed_contents.clone())],
6400 );
6401
6402 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6403
6404 let buffer = project
6405 .update(cx, |project, cx| {
6406 project.open_local_buffer("/dir/file.txt", cx)
6407 })
6408 .await
6409 .unwrap();
6410 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6411 let uncommitted_diff = project
6412 .update(cx, |project, cx| {
6413 project.open_uncommitted_diff(buffer.clone(), cx)
6414 })
6415 .await
6416 .unwrap();
6417
6418 // The hunks are initially unstaged.
6419 uncommitted_diff.read_with(cx, |diff, cx| {
6420 assert_hunks(
6421 diff.hunks(&snapshot, cx),
6422 &snapshot,
6423 &diff.base_text_string().unwrap(),
6424 &[
6425 (
6426 0..0,
6427 "zero\n",
6428 "",
6429 DiffHunkStatus::deleted(HasSecondaryHunk),
6430 ),
6431 (
6432 1..2,
6433 "two\n",
6434 "TWO\n",
6435 DiffHunkStatus::modified(HasSecondaryHunk),
6436 ),
6437 (
6438 3..4,
6439 "four\n",
6440 "FOUR\n",
6441 DiffHunkStatus::modified(HasSecondaryHunk),
6442 ),
6443 ],
6444 );
6445 });
6446
6447 // Pause IO events
6448 fs.pause_events();
6449
6450 // Stage the first hunk.
6451 uncommitted_diff.update(cx, |diff, cx| {
6452 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6453 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6454 assert_hunks(
6455 diff.hunks(&snapshot, cx),
6456 &snapshot,
6457 &diff.base_text_string().unwrap(),
6458 &[
6459 (
6460 0..0,
6461 "zero\n",
6462 "",
6463 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6464 ),
6465 (
6466 1..2,
6467 "two\n",
6468 "TWO\n",
6469 DiffHunkStatus::modified(HasSecondaryHunk),
6470 ),
6471 (
6472 3..4,
6473 "four\n",
6474 "FOUR\n",
6475 DiffHunkStatus::modified(HasSecondaryHunk),
6476 ),
6477 ],
6478 );
6479 });
6480
6481 // Stage the second hunk *before* receiving the FS event for the first hunk.
6482 cx.run_until_parked();
6483 uncommitted_diff.update(cx, |diff, cx| {
6484 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6485 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6486 assert_hunks(
6487 diff.hunks(&snapshot, cx),
6488 &snapshot,
6489 &diff.base_text_string().unwrap(),
6490 &[
6491 (
6492 0..0,
6493 "zero\n",
6494 "",
6495 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6496 ),
6497 (
6498 1..2,
6499 "two\n",
6500 "TWO\n",
6501 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6502 ),
6503 (
6504 3..4,
6505 "four\n",
6506 "FOUR\n",
6507 DiffHunkStatus::modified(HasSecondaryHunk),
6508 ),
6509 ],
6510 );
6511 });
6512
6513 // Process the FS event for staging the first hunk (second event is still pending).
6514 fs.flush_events(1);
6515 cx.run_until_parked();
6516
6517 // Stage the third hunk before receiving the second FS event.
6518 uncommitted_diff.update(cx, |diff, cx| {
6519 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6520 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6521 });
6522
6523 // Wait for all remaining IO.
6524 cx.run_until_parked();
6525 fs.flush_events(fs.buffered_event_count());
6526
6527 // Now all hunks are staged.
6528 cx.run_until_parked();
6529 uncommitted_diff.update(cx, |diff, cx| {
6530 assert_hunks(
6531 diff.hunks(&snapshot, cx),
6532 &snapshot,
6533 &diff.base_text_string().unwrap(),
6534 &[
6535 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6536 (
6537 1..2,
6538 "two\n",
6539 "TWO\n",
6540 DiffHunkStatus::modified(NoSecondaryHunk),
6541 ),
6542 (
6543 3..4,
6544 "four\n",
6545 "FOUR\n",
6546 DiffHunkStatus::modified(NoSecondaryHunk),
6547 ),
6548 ],
6549 );
6550 });
6551}
6552
6553#[gpui::test]
6554async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6555 use DiffHunkSecondaryStatus::*;
6556 init_test(cx);
6557
6558 let different_lines = (0..500)
6559 .step_by(5)
6560 .map(|i| format!("diff {}\n", i))
6561 .collect::<Vec<String>>();
6562 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6563 let file_contents = (0..500)
6564 .map(|i| {
6565 if i % 5 == 0 {
6566 different_lines[i / 5].clone()
6567 } else {
6568 format!("{}\n", i)
6569 }
6570 })
6571 .collect::<String>();
6572
6573 let fs = FakeFs::new(cx.background_executor.clone());
6574 fs.insert_tree(
6575 "/dir",
6576 json!({
6577 ".git": {},
6578 "file.txt": file_contents.clone()
6579 }),
6580 )
6581 .await;
6582
6583 fs.set_head_for_repo(
6584 "/dir/.git".as_ref(),
6585 &[("file.txt".into(), committed_contents.clone())],
6586 );
6587 fs.set_index_for_repo(
6588 "/dir/.git".as_ref(),
6589 &[("file.txt".into(), committed_contents.clone())],
6590 );
6591
6592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6593
6594 let buffer = project
6595 .update(cx, |project, cx| {
6596 project.open_local_buffer("/dir/file.txt", cx)
6597 })
6598 .await
6599 .unwrap();
6600 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6601 let uncommitted_diff = project
6602 .update(cx, |project, cx| {
6603 project.open_uncommitted_diff(buffer.clone(), cx)
6604 })
6605 .await
6606 .unwrap();
6607
6608 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6609 .step_by(5)
6610 .map(|i| {
6611 (
6612 i as u32..i as u32 + 1,
6613 format!("{}\n", i),
6614 different_lines[i / 5].clone(),
6615 DiffHunkStatus::modified(HasSecondaryHunk),
6616 )
6617 })
6618 .collect();
6619
6620 // The hunks are initially unstaged
6621 uncommitted_diff.read_with(cx, |diff, cx| {
6622 assert_hunks(
6623 diff.hunks(&snapshot, cx),
6624 &snapshot,
6625 &diff.base_text_string().unwrap(),
6626 &expected_hunks,
6627 );
6628 });
6629
6630 for (_, _, _, status) in expected_hunks.iter_mut() {
6631 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6632 }
6633
6634 // Stage every hunk with a different call
6635 uncommitted_diff.update(cx, |diff, cx| {
6636 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6637 for hunk in hunks {
6638 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6639 }
6640
6641 assert_hunks(
6642 diff.hunks(&snapshot, cx),
6643 &snapshot,
6644 &diff.base_text_string().unwrap(),
6645 &expected_hunks,
6646 );
6647 });
6648
6649 // If we wait, we'll have no pending hunks
6650 cx.run_until_parked();
6651 for (_, _, _, status) in expected_hunks.iter_mut() {
6652 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6653 }
6654
6655 uncommitted_diff.update(cx, |diff, cx| {
6656 assert_hunks(
6657 diff.hunks(&snapshot, cx),
6658 &snapshot,
6659 &diff.base_text_string().unwrap(),
6660 &expected_hunks,
6661 );
6662 });
6663
6664 for (_, _, _, status) in expected_hunks.iter_mut() {
6665 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6666 }
6667
6668 // Unstage every hunk with a different call
6669 uncommitted_diff.update(cx, |diff, cx| {
6670 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6671 for hunk in hunks {
6672 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6673 }
6674
6675 assert_hunks(
6676 diff.hunks(&snapshot, cx),
6677 &snapshot,
6678 &diff.base_text_string().unwrap(),
6679 &expected_hunks,
6680 );
6681 });
6682
6683 // If we wait, we'll have no pending hunks, again
6684 cx.run_until_parked();
6685 for (_, _, _, status) in expected_hunks.iter_mut() {
6686 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6687 }
6688
6689 uncommitted_diff.update(cx, |diff, cx| {
6690 assert_hunks(
6691 diff.hunks(&snapshot, cx),
6692 &snapshot,
6693 &diff.base_text_string().unwrap(),
6694 &expected_hunks,
6695 );
6696 });
6697}
6698
6699#[gpui::test]
6700async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6701 init_test(cx);
6702
6703 let committed_contents = r#"
6704 fn main() {
6705 println!("hello from HEAD");
6706 }
6707 "#
6708 .unindent();
6709 let file_contents = r#"
6710 fn main() {
6711 println!("hello from the working copy");
6712 }
6713 "#
6714 .unindent();
6715
6716 let fs = FakeFs::new(cx.background_executor.clone());
6717 fs.insert_tree(
6718 "/dir",
6719 json!({
6720 ".git": {},
6721 "src": {
6722 "main.rs": file_contents,
6723 }
6724 }),
6725 )
6726 .await;
6727
6728 fs.set_head_for_repo(
6729 Path::new("/dir/.git"),
6730 &[("src/main.rs".into(), committed_contents.clone())],
6731 );
6732 fs.set_index_for_repo(
6733 Path::new("/dir/.git"),
6734 &[("src/main.rs".into(), committed_contents.clone())],
6735 );
6736
6737 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6738
6739 let buffer = project
6740 .update(cx, |project, cx| {
6741 project.open_local_buffer("/dir/src/main.rs", cx)
6742 })
6743 .await
6744 .unwrap();
6745 let uncommitted_diff = project
6746 .update(cx, |project, cx| {
6747 project.open_uncommitted_diff(buffer.clone(), cx)
6748 })
6749 .await
6750 .unwrap();
6751
6752 cx.run_until_parked();
6753 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6754 let snapshot = buffer.read(cx).snapshot();
6755 assert_hunks(
6756 uncommitted_diff.hunks(&snapshot, cx),
6757 &snapshot,
6758 &uncommitted_diff.base_text_string().unwrap(),
6759 &[(
6760 1..2,
6761 " println!(\"hello from HEAD\");\n",
6762 " println!(\"hello from the working copy\");\n",
6763 DiffHunkStatus {
6764 kind: DiffHunkStatusKind::Modified,
6765 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6766 },
6767 )],
6768 );
6769 });
6770}
6771
6772async fn search(
6773 project: &Entity<Project>,
6774 query: SearchQuery,
6775 cx: &mut gpui::TestAppContext,
6776) -> Result<HashMap<String, Vec<Range<usize>>>> {
6777 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6778 let mut results = HashMap::default();
6779 while let Ok(search_result) = search_rx.recv().await {
6780 match search_result {
6781 SearchResult::Buffer { buffer, ranges } => {
6782 results.entry(buffer).or_insert(ranges);
6783 }
6784 SearchResult::LimitReached => {}
6785 }
6786 }
6787 Ok(results
6788 .into_iter()
6789 .map(|(buffer, ranges)| {
6790 buffer.update(cx, |buffer, cx| {
6791 let path = buffer
6792 .file()
6793 .unwrap()
6794 .full_path(cx)
6795 .to_string_lossy()
6796 .to_string();
6797 let ranges = ranges
6798 .into_iter()
6799 .map(|range| range.to_offset(buffer))
6800 .collect::<Vec<_>>();
6801 (path, ranges)
6802 })
6803 })
6804 .collect())
6805}
6806
6807pub fn init_test(cx: &mut gpui::TestAppContext) {
6808 if std::env::var("RUST_LOG").is_ok() {
6809 env_logger::try_init().ok();
6810 }
6811
6812 cx.update(|cx| {
6813 let settings_store = SettingsStore::test(cx);
6814 cx.set_global(settings_store);
6815 release_channel::init(SemanticVersion::default(), cx);
6816 language::init(cx);
6817 Project::init_settings(cx);
6818 });
6819}
6820
6821fn json_lang() -> Arc<Language> {
6822 Arc::new(Language::new(
6823 LanguageConfig {
6824 name: "JSON".into(),
6825 matcher: LanguageMatcher {
6826 path_suffixes: vec!["json".to_string()],
6827 ..Default::default()
6828 },
6829 ..Default::default()
6830 },
6831 None,
6832 ))
6833}
6834
6835fn js_lang() -> Arc<Language> {
6836 Arc::new(Language::new(
6837 LanguageConfig {
6838 name: "JavaScript".into(),
6839 matcher: LanguageMatcher {
6840 path_suffixes: vec!["js".to_string()],
6841 ..Default::default()
6842 },
6843 ..Default::default()
6844 },
6845 None,
6846 ))
6847}
6848
6849fn rust_lang() -> Arc<Language> {
6850 Arc::new(Language::new(
6851 LanguageConfig {
6852 name: "Rust".into(),
6853 matcher: LanguageMatcher {
6854 path_suffixes: vec!["rs".to_string()],
6855 ..Default::default()
6856 },
6857 ..Default::default()
6858 },
6859 Some(tree_sitter_rust::LANGUAGE.into()),
6860 ))
6861}
6862
6863fn typescript_lang() -> Arc<Language> {
6864 Arc::new(Language::new(
6865 LanguageConfig {
6866 name: "TypeScript".into(),
6867 matcher: LanguageMatcher {
6868 path_suffixes: vec!["ts".to_string()],
6869 ..Default::default()
6870 },
6871 ..Default::default()
6872 },
6873 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6874 ))
6875}
6876
6877fn tsx_lang() -> Arc<Language> {
6878 Arc::new(Language::new(
6879 LanguageConfig {
6880 name: "tsx".into(),
6881 matcher: LanguageMatcher {
6882 path_suffixes: vec!["tsx".to_string()],
6883 ..Default::default()
6884 },
6885 ..Default::default()
6886 },
6887 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6888 ))
6889}
6890
6891fn get_all_tasks(
6892 project: &Entity<Project>,
6893 task_contexts: &TaskContexts,
6894 cx: &mut App,
6895) -> Vec<(TaskSourceKind, ResolvedTask)> {
6896 let (mut old, new) = project.update(cx, |project, cx| {
6897 project
6898 .task_store
6899 .read(cx)
6900 .task_inventory()
6901 .unwrap()
6902 .read(cx)
6903 .used_and_current_resolved_tasks(task_contexts, cx)
6904 });
6905 old.extend(new);
6906 old
6907}