1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, task_store::TaskSettingsLocation, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use gpui::{App, SemanticVersion, UpdateGlobal};
10use http_client::Url;
11use language::{
12 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
13 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
14 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
15 OffsetRangeExt, Point, ToPoint,
16};
17use lsp::{
18 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
19 NumberOrString, TextDocumentEdit, WillRenameFiles,
20};
21use parking_lot::Mutex;
22use paths::tasks_file;
23use pretty_assertions::{assert_eq, assert_matches};
24use serde_json::json;
25#[cfg(not(windows))]
26use std::os;
27use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
28use task::{ResolvedTask, TaskContext};
29use unindent::Unindent as _;
30use util::{
31 assert_set_eq, path,
32 paths::PathMatcher,
33 separator,
34 test::{marked_text_offsets, TempTree},
35 uri, TryFutureExt as _,
36};
37
38#[gpui::test]
39async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
40 cx.executor().allow_parking();
41
42 let (tx, mut rx) = futures::channel::mpsc::unbounded();
43 let _thread = std::thread::spawn(move || {
44 #[cfg(not(target_os = "windows"))]
45 std::fs::metadata("/tmp").unwrap();
46 #[cfg(target_os = "windows")]
47 std::fs::metadata("C:/Windows").unwrap();
48 std::thread::sleep(Duration::from_millis(1000));
49 tx.unbounded_send(1).unwrap();
50 });
51 rx.next().await.unwrap();
52}
53
54#[gpui::test]
55async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
56 cx.executor().allow_parking();
57
58 let io_task = smol::unblock(move || {
59 println!("sleeping on thread {:?}", std::thread::current().id());
60 std::thread::sleep(Duration::from_millis(10));
61 1
62 });
63
64 let task = cx.foreground_executor().spawn(async move {
65 io_task.await;
66 });
67
68 task.await;
69}
70
71#[cfg(not(windows))]
72#[gpui::test]
73async fn test_symlinks(cx: &mut gpui::TestAppContext) {
74 init_test(cx);
75 cx.executor().allow_parking();
76
77 let dir = TempTree::new(json!({
78 "root": {
79 "apple": "",
80 "banana": {
81 "carrot": {
82 "date": "",
83 "endive": "",
84 }
85 },
86 "fennel": {
87 "grape": "",
88 }
89 }
90 }));
91
92 let root_link_path = dir.path().join("root_link");
93 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
94 os::unix::fs::symlink(
95 dir.path().join("root/fennel"),
96 dir.path().join("root/finnochio"),
97 )
98 .unwrap();
99
100 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
101
102 project.update(cx, |project, cx| {
103 let tree = project.worktrees(cx).next().unwrap().read(cx);
104 assert_eq!(tree.file_count(), 5);
105 assert_eq!(
106 tree.inode_for_path("fennel/grape"),
107 tree.inode_for_path("finnochio/grape")
108 );
109 });
110}
111
112#[gpui::test]
113async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
114 init_test(cx);
115
116 let dir = TempTree::new(json!({
117 ".editorconfig": r#"
118 root = true
119 [*.rs]
120 indent_style = tab
121 indent_size = 3
122 end_of_line = lf
123 insert_final_newline = true
124 trim_trailing_whitespace = true
125 [*.js]
126 tab_width = 10
127 "#,
128 ".zed": {
129 "settings.json": r#"{
130 "tab_size": 8,
131 "hard_tabs": false,
132 "ensure_final_newline_on_save": false,
133 "remove_trailing_whitespace_on_save": false,
134 "soft_wrap": "editor_width"
135 }"#,
136 },
137 "a.rs": "fn a() {\n A\n}",
138 "b": {
139 ".editorconfig": r#"
140 [*.rs]
141 indent_size = 2
142 "#,
143 "b.rs": "fn b() {\n B\n}",
144 },
145 "c.js": "def c\n C\nend",
146 "README.json": "tabs are better\n",
147 }));
148
149 let path = dir.path();
150 let fs = FakeFs::new(cx.executor());
151 fs.insert_tree_from_real_fs(path, path).await;
152 let project = Project::test(fs, [path], cx).await;
153
154 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
155 language_registry.add(js_lang());
156 language_registry.add(json_lang());
157 language_registry.add(rust_lang());
158
159 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
160
161 cx.executor().run_until_parked();
162
163 cx.update(|cx| {
164 let tree = worktree.read(cx);
165 let settings_for = |path: &str| {
166 let file_entry = tree.entry_for_path(path).unwrap().clone();
167 let file = File::for_entry(file_entry, worktree.clone());
168 let file_language = project
169 .read(cx)
170 .languages()
171 .language_for_file_path(file.path.as_ref());
172 let file_language = cx
173 .background_executor()
174 .block(file_language)
175 .expect("Failed to get file language");
176 let file = file as _;
177 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
178 };
179
180 let settings_a = settings_for("a.rs");
181 let settings_b = settings_for("b/b.rs");
182 let settings_c = settings_for("c.js");
183 let settings_readme = settings_for("README.json");
184
185 // .editorconfig overrides .zed/settings
186 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
187 assert_eq!(settings_a.hard_tabs, true);
188 assert_eq!(settings_a.ensure_final_newline_on_save, true);
189 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
190
191 // .editorconfig in b/ overrides .editorconfig in root
192 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
193
194 // "indent_size" is not set, so "tab_width" is used
195 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
196
197 // README.md should not be affected by .editorconfig's globe "*.rs"
198 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
199 });
200}
201
202#[gpui::test]
203async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
204 init_test(cx);
205 TaskStore::init(None);
206
207 let fs = FakeFs::new(cx.executor());
208 fs.insert_tree(
209 path!("/dir"),
210 json!({
211 ".zed": {
212 "settings.json": r#"{ "tab_size": 8 }"#,
213 "tasks.json": r#"[{
214 "label": "cargo check all",
215 "command": "cargo",
216 "args": ["check", "--all"]
217 },]"#,
218 },
219 "a": {
220 "a.rs": "fn a() {\n A\n}"
221 },
222 "b": {
223 ".zed": {
224 "settings.json": r#"{ "tab_size": 2 }"#,
225 "tasks.json": r#"[{
226 "label": "cargo check",
227 "command": "cargo",
228 "args": ["check"]
229 },]"#,
230 },
231 "b.rs": "fn b() {\n B\n}"
232 }
233 }),
234 )
235 .await;
236
237 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241 let worktree_id = cx.update(|cx| {
242 project.update(cx, |project, cx| {
243 project.worktrees(cx).next().unwrap().read(cx).id()
244 })
245 });
246
247 let mut task_contexts = TaskContexts::default();
248 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
249
250 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
251 id: worktree_id,
252 directory_in_worktree: PathBuf::from(".zed"),
253 id_base: "local worktree tasks from directory \".zed\"".into(),
254 };
255
256 let all_tasks = cx
257 .update(|cx| {
258 let tree = worktree.read(cx);
259
260 let file_a = File::for_entry(
261 tree.entry_for_path("a/a.rs").unwrap().clone(),
262 worktree.clone(),
263 ) as _;
264 let settings_a = language_settings(None, Some(&file_a), cx);
265 let file_b = File::for_entry(
266 tree.entry_for_path("b/b.rs").unwrap().clone(),
267 worktree.clone(),
268 ) as _;
269 let settings_b = language_settings(None, Some(&file_b), cx);
270
271 assert_eq!(settings_a.tab_size.get(), 8);
272 assert_eq!(settings_b.tab_size.get(), 2);
273
274 get_all_tasks(&project, &task_contexts, cx)
275 })
276 .into_iter()
277 .map(|(source_kind, task)| {
278 let resolved = task.resolved.unwrap();
279 (
280 source_kind,
281 task.resolved_label,
282 resolved.args,
283 resolved.env,
284 )
285 })
286 .collect::<Vec<_>>();
287 assert_eq!(
288 all_tasks,
289 vec![
290 (
291 TaskSourceKind::Worktree {
292 id: worktree_id,
293 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
294 id_base: if cfg!(windows) {
295 "local worktree tasks from directory \"b\\\\.zed\"".into()
296 } else {
297 "local worktree tasks from directory \"b/.zed\"".into()
298 },
299 },
300 "cargo check".to_string(),
301 vec!["check".to_string()],
302 HashMap::default(),
303 ),
304 (
305 topmost_local_task_source_kind.clone(),
306 "cargo check all".to_string(),
307 vec!["check".to_string(), "--all".to_string()],
308 HashMap::default(),
309 ),
310 ]
311 );
312
313 let (_, resolved_task) = cx
314 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
315 .into_iter()
316 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
317 .expect("should have one global task");
318 project.update(cx, |project, cx| {
319 let task_inventory = project
320 .task_store
321 .read(cx)
322 .task_inventory()
323 .cloned()
324 .unwrap();
325 task_inventory.update(cx, |inventory, _| {
326 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
327 inventory
328 .update_file_based_tasks(
329 TaskSettingsLocation::Global(tasks_file()),
330 Some(
331 &json!([{
332 "label": "cargo check unstable",
333 "command": "cargo",
334 "args": [
335 "check",
336 "--all",
337 "--all-targets"
338 ],
339 "env": {
340 "RUSTFLAGS": "-Zunstable-options"
341 }
342 }])
343 .to_string(),
344 ),
345 settings::TaskKind::Script,
346 )
347 .unwrap();
348 });
349 });
350 cx.run_until_parked();
351
352 let all_tasks = cx
353 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
354 .into_iter()
355 .map(|(source_kind, task)| {
356 let resolved = task.resolved.unwrap();
357 (
358 source_kind,
359 task.resolved_label,
360 resolved.args,
361 resolved.env,
362 )
363 })
364 .collect::<Vec<_>>();
365 assert_eq!(
366 all_tasks,
367 vec![
368 (
369 topmost_local_task_source_kind.clone(),
370 "cargo check all".to_string(),
371 vec!["check".to_string(), "--all".to_string()],
372 HashMap::default(),
373 ),
374 (
375 TaskSourceKind::Worktree {
376 id: worktree_id,
377 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
378 id_base: if cfg!(windows) {
379 "local worktree tasks from directory \"b\\\\.zed\"".into()
380 } else {
381 "local worktree tasks from directory \"b/.zed\"".into()
382 },
383 },
384 "cargo check".to_string(),
385 vec!["check".to_string()],
386 HashMap::default(),
387 ),
388 (
389 TaskSourceKind::AbsPath {
390 abs_path: paths::tasks_file().clone(),
391 id_base: "global tasks.json".into(),
392 },
393 "cargo check unstable".to_string(),
394 vec![
395 "check".to_string(),
396 "--all".to_string(),
397 "--all-targets".to_string(),
398 ],
399 HashMap::from_iter(Some((
400 "RUSTFLAGS".to_string(),
401 "-Zunstable-options".to_string()
402 ))),
403 ),
404 ]
405 );
406}
407
408#[gpui::test]
409async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
410 init_test(cx);
411 TaskStore::init(None);
412
413 let fs = FakeFs::new(cx.executor());
414 fs.insert_tree(
415 path!("/dir"),
416 json!({
417 ".zed": {
418 "tasks.json": r#"[{
419 "label": "test worktree root",
420 "command": "echo $ZED_WORKTREE_ROOT"
421 }]"#,
422 },
423 "a": {
424 "a.rs": "fn a() {\n A\n}"
425 },
426 }),
427 )
428 .await;
429
430 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
431 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
432
433 cx.executor().run_until_parked();
434 let worktree_id = cx.update(|cx| {
435 project.update(cx, |project, cx| {
436 project.worktrees(cx).next().unwrap().read(cx).id()
437 })
438 });
439
440 let active_non_worktree_item_tasks = cx.update(|cx| {
441 get_all_tasks(
442 &project,
443 &TaskContexts {
444 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
445 active_worktree_context: None,
446 other_worktree_contexts: Vec::new(),
447 },
448 cx,
449 )
450 });
451 assert!(
452 active_non_worktree_item_tasks.is_empty(),
453 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
454 );
455
456 let active_worktree_tasks = cx.update(|cx| {
457 get_all_tasks(
458 &project,
459 &TaskContexts {
460 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
461 active_worktree_context: Some((worktree_id, {
462 let mut worktree_context = TaskContext::default();
463 worktree_context
464 .task_variables
465 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
466 worktree_context
467 })),
468 other_worktree_contexts: Vec::new(),
469 },
470 cx,
471 )
472 });
473 assert_eq!(
474 active_worktree_tasks
475 .into_iter()
476 .map(|(source_kind, task)| {
477 let resolved = task.resolved.unwrap();
478 (source_kind, resolved.command)
479 })
480 .collect::<Vec<_>>(),
481 vec![(
482 TaskSourceKind::Worktree {
483 id: worktree_id,
484 directory_in_worktree: PathBuf::from(separator!(".zed")),
485 id_base: if cfg!(windows) {
486 "local worktree tasks from directory \".zed\"".into()
487 } else {
488 "local worktree tasks from directory \".zed\"".into()
489 },
490 },
491 "echo /dir".to_string(),
492 )]
493 );
494}
495
496#[gpui::test]
497async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
498 init_test(cx);
499
500 let fs = FakeFs::new(cx.executor());
501 fs.insert_tree(
502 path!("/dir"),
503 json!({
504 "test.rs": "const A: i32 = 1;",
505 "test2.rs": "",
506 "Cargo.toml": "a = 1",
507 "package.json": "{\"a\": 1}",
508 }),
509 )
510 .await;
511
512 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
513 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
514
515 let mut fake_rust_servers = language_registry.register_fake_lsp(
516 "Rust",
517 FakeLspAdapter {
518 name: "the-rust-language-server",
519 capabilities: lsp::ServerCapabilities {
520 completion_provider: Some(lsp::CompletionOptions {
521 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
522 ..Default::default()
523 }),
524 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
525 lsp::TextDocumentSyncOptions {
526 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
527 ..Default::default()
528 },
529 )),
530 ..Default::default()
531 },
532 ..Default::default()
533 },
534 );
535 let mut fake_json_servers = language_registry.register_fake_lsp(
536 "JSON",
537 FakeLspAdapter {
538 name: "the-json-language-server",
539 capabilities: lsp::ServerCapabilities {
540 completion_provider: Some(lsp::CompletionOptions {
541 trigger_characters: Some(vec![":".to_string()]),
542 ..Default::default()
543 }),
544 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
545 lsp::TextDocumentSyncOptions {
546 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
547 ..Default::default()
548 },
549 )),
550 ..Default::default()
551 },
552 ..Default::default()
553 },
554 );
555
556 // Open a buffer without an associated language server.
557 let (toml_buffer, _handle) = project
558 .update(cx, |project, cx| {
559 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
560 })
561 .await
562 .unwrap();
563
564 // Open a buffer with an associated language server before the language for it has been loaded.
565 let (rust_buffer, _handle2) = project
566 .update(cx, |project, cx| {
567 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
568 })
569 .await
570 .unwrap();
571 rust_buffer.update(cx, |buffer, _| {
572 assert_eq!(buffer.language().map(|l| l.name()), None);
573 });
574
575 // Now we add the languages to the project, and ensure they get assigned to all
576 // the relevant open buffers.
577 language_registry.add(json_lang());
578 language_registry.add(rust_lang());
579 cx.executor().run_until_parked();
580 rust_buffer.update(cx, |buffer, _| {
581 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
582 });
583
584 // A server is started up, and it is notified about Rust files.
585 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
586 assert_eq!(
587 fake_rust_server
588 .receive_notification::<lsp::notification::DidOpenTextDocument>()
589 .await
590 .text_document,
591 lsp::TextDocumentItem {
592 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
593 version: 0,
594 text: "const A: i32 = 1;".to_string(),
595 language_id: "rust".to_string(),
596 }
597 );
598
599 // The buffer is configured based on the language server's capabilities.
600 rust_buffer.update(cx, |buffer, _| {
601 assert_eq!(
602 buffer
603 .completion_triggers()
604 .into_iter()
605 .cloned()
606 .collect::<Vec<_>>(),
607 &[".".to_string(), "::".to_string()]
608 );
609 });
610 toml_buffer.update(cx, |buffer, _| {
611 assert!(buffer.completion_triggers().is_empty());
612 });
613
614 // Edit a buffer. The changes are reported to the language server.
615 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
616 assert_eq!(
617 fake_rust_server
618 .receive_notification::<lsp::notification::DidChangeTextDocument>()
619 .await
620 .text_document,
621 lsp::VersionedTextDocumentIdentifier::new(
622 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
623 1
624 )
625 );
626
627 // Open a third buffer with a different associated language server.
628 let (json_buffer, _json_handle) = project
629 .update(cx, |project, cx| {
630 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
631 })
632 .await
633 .unwrap();
634
635 // A json language server is started up and is only notified about the json buffer.
636 let mut fake_json_server = fake_json_servers.next().await.unwrap();
637 assert_eq!(
638 fake_json_server
639 .receive_notification::<lsp::notification::DidOpenTextDocument>()
640 .await
641 .text_document,
642 lsp::TextDocumentItem {
643 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
644 version: 0,
645 text: "{\"a\": 1}".to_string(),
646 language_id: "json".to_string(),
647 }
648 );
649
650 // This buffer is configured based on the second language server's
651 // capabilities.
652 json_buffer.update(cx, |buffer, _| {
653 assert_eq!(
654 buffer
655 .completion_triggers()
656 .into_iter()
657 .cloned()
658 .collect::<Vec<_>>(),
659 &[":".to_string()]
660 );
661 });
662
663 // When opening another buffer whose language server is already running,
664 // it is also configured based on the existing language server's capabilities.
665 let (rust_buffer2, _handle4) = project
666 .update(cx, |project, cx| {
667 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
668 })
669 .await
670 .unwrap();
671 rust_buffer2.update(cx, |buffer, _| {
672 assert_eq!(
673 buffer
674 .completion_triggers()
675 .into_iter()
676 .cloned()
677 .collect::<Vec<_>>(),
678 &[".".to_string(), "::".to_string()]
679 );
680 });
681
682 // Changes are reported only to servers matching the buffer's language.
683 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
684 rust_buffer2.update(cx, |buffer, cx| {
685 buffer.edit([(0..0, "let x = 1;")], None, cx)
686 });
687 assert_eq!(
688 fake_rust_server
689 .receive_notification::<lsp::notification::DidChangeTextDocument>()
690 .await
691 .text_document,
692 lsp::VersionedTextDocumentIdentifier::new(
693 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
694 1
695 )
696 );
697
698 // Save notifications are reported to all servers.
699 project
700 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
701 .await
702 .unwrap();
703 assert_eq!(
704 fake_rust_server
705 .receive_notification::<lsp::notification::DidSaveTextDocument>()
706 .await
707 .text_document,
708 lsp::TextDocumentIdentifier::new(
709 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
710 )
711 );
712 assert_eq!(
713 fake_json_server
714 .receive_notification::<lsp::notification::DidSaveTextDocument>()
715 .await
716 .text_document,
717 lsp::TextDocumentIdentifier::new(
718 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
719 )
720 );
721
722 // Renames are reported only to servers matching the buffer's language.
723 fs.rename(
724 Path::new(path!("/dir/test2.rs")),
725 Path::new(path!("/dir/test3.rs")),
726 Default::default(),
727 )
728 .await
729 .unwrap();
730 assert_eq!(
731 fake_rust_server
732 .receive_notification::<lsp::notification::DidCloseTextDocument>()
733 .await
734 .text_document,
735 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
736 );
737 assert_eq!(
738 fake_rust_server
739 .receive_notification::<lsp::notification::DidOpenTextDocument>()
740 .await
741 .text_document,
742 lsp::TextDocumentItem {
743 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
744 version: 0,
745 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
746 language_id: "rust".to_string(),
747 },
748 );
749
750 rust_buffer2.update(cx, |buffer, cx| {
751 buffer.update_diagnostics(
752 LanguageServerId(0),
753 DiagnosticSet::from_sorted_entries(
754 vec![DiagnosticEntry {
755 diagnostic: Default::default(),
756 range: Anchor::MIN..Anchor::MAX,
757 }],
758 &buffer.snapshot(),
759 ),
760 cx,
761 );
762 assert_eq!(
763 buffer
764 .snapshot()
765 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
766 .count(),
767 1
768 );
769 });
770
771 // When the rename changes the extension of the file, the buffer gets closed on the old
772 // language server and gets opened on the new one.
773 fs.rename(
774 Path::new(path!("/dir/test3.rs")),
775 Path::new(path!("/dir/test3.json")),
776 Default::default(),
777 )
778 .await
779 .unwrap();
780 assert_eq!(
781 fake_rust_server
782 .receive_notification::<lsp::notification::DidCloseTextDocument>()
783 .await
784 .text_document,
785 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
786 );
787 assert_eq!(
788 fake_json_server
789 .receive_notification::<lsp::notification::DidOpenTextDocument>()
790 .await
791 .text_document,
792 lsp::TextDocumentItem {
793 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
794 version: 0,
795 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
796 language_id: "json".to_string(),
797 },
798 );
799
800 // We clear the diagnostics, since the language has changed.
801 rust_buffer2.update(cx, |buffer, _| {
802 assert_eq!(
803 buffer
804 .snapshot()
805 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
806 .count(),
807 0
808 );
809 });
810
811 // The renamed file's version resets after changing language server.
812 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
813 assert_eq!(
814 fake_json_server
815 .receive_notification::<lsp::notification::DidChangeTextDocument>()
816 .await
817 .text_document,
818 lsp::VersionedTextDocumentIdentifier::new(
819 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
820 1
821 )
822 );
823
824 // Restart language servers
825 project.update(cx, |project, cx| {
826 project.restart_language_servers_for_buffers(
827 vec![rust_buffer.clone(), json_buffer.clone()],
828 cx,
829 );
830 });
831
832 let mut rust_shutdown_requests = fake_rust_server
833 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
834 let mut json_shutdown_requests = fake_json_server
835 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
836 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
837
838 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
839 let mut fake_json_server = fake_json_servers.next().await.unwrap();
840
841 // Ensure rust document is reopened in new rust language server
842 assert_eq!(
843 fake_rust_server
844 .receive_notification::<lsp::notification::DidOpenTextDocument>()
845 .await
846 .text_document,
847 lsp::TextDocumentItem {
848 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
849 version: 0,
850 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
851 language_id: "rust".to_string(),
852 }
853 );
854
855 // Ensure json documents are reopened in new json language server
856 assert_set_eq!(
857 [
858 fake_json_server
859 .receive_notification::<lsp::notification::DidOpenTextDocument>()
860 .await
861 .text_document,
862 fake_json_server
863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
864 .await
865 .text_document,
866 ],
867 [
868 lsp::TextDocumentItem {
869 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
870 version: 0,
871 text: json_buffer.update(cx, |buffer, _| buffer.text()),
872 language_id: "json".to_string(),
873 },
874 lsp::TextDocumentItem {
875 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
876 version: 0,
877 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
878 language_id: "json".to_string(),
879 }
880 ]
881 );
882
883 // Close notifications are reported only to servers matching the buffer's language.
884 cx.update(|_| drop(_json_handle));
885 let close_message = lsp::DidCloseTextDocumentParams {
886 text_document: lsp::TextDocumentIdentifier::new(
887 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
888 ),
889 };
890 assert_eq!(
891 fake_json_server
892 .receive_notification::<lsp::notification::DidCloseTextDocument>()
893 .await,
894 close_message,
895 );
896}
897
898#[gpui::test]
899async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
900 init_test(cx);
901
902 let fs = FakeFs::new(cx.executor());
903 fs.insert_tree(
904 path!("/the-root"),
905 json!({
906 ".gitignore": "target\n",
907 "src": {
908 "a.rs": "",
909 "b.rs": "",
910 },
911 "target": {
912 "x": {
913 "out": {
914 "x.rs": ""
915 }
916 },
917 "y": {
918 "out": {
919 "y.rs": "",
920 }
921 },
922 "z": {
923 "out": {
924 "z.rs": ""
925 }
926 }
927 }
928 }),
929 )
930 .await;
931
932 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
933 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
934 language_registry.add(rust_lang());
935 let mut fake_servers = language_registry.register_fake_lsp(
936 "Rust",
937 FakeLspAdapter {
938 name: "the-language-server",
939 ..Default::default()
940 },
941 );
942
943 cx.executor().run_until_parked();
944
945 // Start the language server by opening a buffer with a compatible file extension.
946 project
947 .update(cx, |project, cx| {
948 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
949 })
950 .await
951 .unwrap();
952
953 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
954 project.update(cx, |project, cx| {
955 let worktree = project.worktrees(cx).next().unwrap();
956 assert_eq!(
957 worktree
958 .read(cx)
959 .snapshot()
960 .entries(true, 0)
961 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
962 .collect::<Vec<_>>(),
963 &[
964 (Path::new(""), false),
965 (Path::new(".gitignore"), false),
966 (Path::new("src"), false),
967 (Path::new("src/a.rs"), false),
968 (Path::new("src/b.rs"), false),
969 (Path::new("target"), true),
970 ]
971 );
972 });
973
974 let prev_read_dir_count = fs.read_dir_call_count();
975
976 // Keep track of the FS events reported to the language server.
977 let fake_server = fake_servers.next().await.unwrap();
978 let file_changes = Arc::new(Mutex::new(Vec::new()));
979 fake_server
980 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
981 registrations: vec![lsp::Registration {
982 id: Default::default(),
983 method: "workspace/didChangeWatchedFiles".to_string(),
984 register_options: serde_json::to_value(
985 lsp::DidChangeWatchedFilesRegistrationOptions {
986 watchers: vec![
987 lsp::FileSystemWatcher {
988 glob_pattern: lsp::GlobPattern::String(
989 path!("/the-root/Cargo.toml").to_string(),
990 ),
991 kind: None,
992 },
993 lsp::FileSystemWatcher {
994 glob_pattern: lsp::GlobPattern::String(
995 path!("/the-root/src/*.{rs,c}").to_string(),
996 ),
997 kind: None,
998 },
999 lsp::FileSystemWatcher {
1000 glob_pattern: lsp::GlobPattern::String(
1001 path!("/the-root/target/y/**/*.rs").to_string(),
1002 ),
1003 kind: None,
1004 },
1005 ],
1006 },
1007 )
1008 .ok(),
1009 }],
1010 })
1011 .await
1012 .unwrap();
1013 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1014 let file_changes = file_changes.clone();
1015 move |params, _| {
1016 let mut file_changes = file_changes.lock();
1017 file_changes.extend(params.changes);
1018 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1019 }
1020 });
1021
1022 cx.executor().run_until_parked();
1023 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1024 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1025
1026 // Now the language server has asked us to watch an ignored directory path,
1027 // so we recursively load it.
1028 project.update(cx, |project, cx| {
1029 let worktree = project.worktrees(cx).next().unwrap();
1030 assert_eq!(
1031 worktree
1032 .read(cx)
1033 .snapshot()
1034 .entries(true, 0)
1035 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1036 .collect::<Vec<_>>(),
1037 &[
1038 (Path::new(""), false),
1039 (Path::new(".gitignore"), false),
1040 (Path::new("src"), false),
1041 (Path::new("src/a.rs"), false),
1042 (Path::new("src/b.rs"), false),
1043 (Path::new("target"), true),
1044 (Path::new("target/x"), true),
1045 (Path::new("target/y"), true),
1046 (Path::new("target/y/out"), true),
1047 (Path::new("target/y/out/y.rs"), true),
1048 (Path::new("target/z"), true),
1049 ]
1050 );
1051 });
1052
1053 // Perform some file system mutations, two of which match the watched patterns,
1054 // and one of which does not.
1055 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1056 .await
1057 .unwrap();
1058 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1059 .await
1060 .unwrap();
1061 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1062 .await
1063 .unwrap();
1064 fs.create_file(
1065 path!("/the-root/target/x/out/x2.rs").as_ref(),
1066 Default::default(),
1067 )
1068 .await
1069 .unwrap();
1070 fs.create_file(
1071 path!("/the-root/target/y/out/y2.rs").as_ref(),
1072 Default::default(),
1073 )
1074 .await
1075 .unwrap();
1076
1077 // The language server receives events for the FS mutations that match its watch patterns.
1078 cx.executor().run_until_parked();
1079 assert_eq!(
1080 &*file_changes.lock(),
1081 &[
1082 lsp::FileEvent {
1083 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1084 typ: lsp::FileChangeType::DELETED,
1085 },
1086 lsp::FileEvent {
1087 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1088 typ: lsp::FileChangeType::CREATED,
1089 },
1090 lsp::FileEvent {
1091 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1092 typ: lsp::FileChangeType::CREATED,
1093 },
1094 ]
1095 );
1096}
1097
1098#[gpui::test]
1099async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1100 init_test(cx);
1101
1102 let fs = FakeFs::new(cx.executor());
1103 fs.insert_tree(
1104 path!("/dir"),
1105 json!({
1106 "a.rs": "let a = 1;",
1107 "b.rs": "let b = 2;"
1108 }),
1109 )
1110 .await;
1111
1112 let project = Project::test(
1113 fs,
1114 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1115 cx,
1116 )
1117 .await;
1118 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1119
1120 let buffer_a = project
1121 .update(cx, |project, cx| {
1122 project.open_local_buffer(path!("/dir/a.rs"), cx)
1123 })
1124 .await
1125 .unwrap();
1126 let buffer_b = project
1127 .update(cx, |project, cx| {
1128 project.open_local_buffer(path!("/dir/b.rs"), cx)
1129 })
1130 .await
1131 .unwrap();
1132
1133 lsp_store.update(cx, |lsp_store, cx| {
1134 lsp_store
1135 .update_diagnostics(
1136 LanguageServerId(0),
1137 lsp::PublishDiagnosticsParams {
1138 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1139 version: None,
1140 diagnostics: vec![lsp::Diagnostic {
1141 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1142 severity: Some(lsp::DiagnosticSeverity::ERROR),
1143 message: "error 1".to_string(),
1144 ..Default::default()
1145 }],
1146 },
1147 &[],
1148 cx,
1149 )
1150 .unwrap();
1151 lsp_store
1152 .update_diagnostics(
1153 LanguageServerId(0),
1154 lsp::PublishDiagnosticsParams {
1155 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1156 version: None,
1157 diagnostics: vec![lsp::Diagnostic {
1158 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1159 severity: Some(DiagnosticSeverity::WARNING),
1160 message: "error 2".to_string(),
1161 ..Default::default()
1162 }],
1163 },
1164 &[],
1165 cx,
1166 )
1167 .unwrap();
1168 });
1169
1170 buffer_a.update(cx, |buffer, _| {
1171 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1172 assert_eq!(
1173 chunks
1174 .iter()
1175 .map(|(s, d)| (s.as_str(), *d))
1176 .collect::<Vec<_>>(),
1177 &[
1178 ("let ", None),
1179 ("a", Some(DiagnosticSeverity::ERROR)),
1180 (" = 1;", None),
1181 ]
1182 );
1183 });
1184 buffer_b.update(cx, |buffer, _| {
1185 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1186 assert_eq!(
1187 chunks
1188 .iter()
1189 .map(|(s, d)| (s.as_str(), *d))
1190 .collect::<Vec<_>>(),
1191 &[
1192 ("let ", None),
1193 ("b", Some(DiagnosticSeverity::WARNING)),
1194 (" = 2;", None),
1195 ]
1196 );
1197 });
1198}
1199
1200#[gpui::test]
1201async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1202 init_test(cx);
1203
1204 let fs = FakeFs::new(cx.executor());
1205 fs.insert_tree(
1206 path!("/root"),
1207 json!({
1208 "dir": {
1209 ".git": {
1210 "HEAD": "ref: refs/heads/main",
1211 },
1212 ".gitignore": "b.rs",
1213 "a.rs": "let a = 1;",
1214 "b.rs": "let b = 2;",
1215 },
1216 "other.rs": "let b = c;"
1217 }),
1218 )
1219 .await;
1220
1221 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1222 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1223 let (worktree, _) = project
1224 .update(cx, |project, cx| {
1225 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1226 })
1227 .await
1228 .unwrap();
1229 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1230
1231 let (worktree, _) = project
1232 .update(cx, |project, cx| {
1233 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1234 })
1235 .await
1236 .unwrap();
1237 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1238
1239 let server_id = LanguageServerId(0);
1240 lsp_store.update(cx, |lsp_store, cx| {
1241 lsp_store
1242 .update_diagnostics(
1243 server_id,
1244 lsp::PublishDiagnosticsParams {
1245 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1246 version: None,
1247 diagnostics: vec![lsp::Diagnostic {
1248 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1249 severity: Some(lsp::DiagnosticSeverity::ERROR),
1250 message: "unused variable 'b'".to_string(),
1251 ..Default::default()
1252 }],
1253 },
1254 &[],
1255 cx,
1256 )
1257 .unwrap();
1258 lsp_store
1259 .update_diagnostics(
1260 server_id,
1261 lsp::PublishDiagnosticsParams {
1262 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1263 version: None,
1264 diagnostics: vec![lsp::Diagnostic {
1265 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1266 severity: Some(lsp::DiagnosticSeverity::ERROR),
1267 message: "unknown variable 'c'".to_string(),
1268 ..Default::default()
1269 }],
1270 },
1271 &[],
1272 cx,
1273 )
1274 .unwrap();
1275 });
1276
1277 let main_ignored_buffer = project
1278 .update(cx, |project, cx| {
1279 project.open_buffer((main_worktree_id, "b.rs"), cx)
1280 })
1281 .await
1282 .unwrap();
1283 main_ignored_buffer.update(cx, |buffer, _| {
1284 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1285 assert_eq!(
1286 chunks
1287 .iter()
1288 .map(|(s, d)| (s.as_str(), *d))
1289 .collect::<Vec<_>>(),
1290 &[
1291 ("let ", None),
1292 ("b", Some(DiagnosticSeverity::ERROR)),
1293 (" = 2;", None),
1294 ],
1295 "Gigitnored buffers should still get in-buffer diagnostics",
1296 );
1297 });
1298 let other_buffer = project
1299 .update(cx, |project, cx| {
1300 project.open_buffer((other_worktree_id, ""), cx)
1301 })
1302 .await
1303 .unwrap();
1304 other_buffer.update(cx, |buffer, _| {
1305 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1306 assert_eq!(
1307 chunks
1308 .iter()
1309 .map(|(s, d)| (s.as_str(), *d))
1310 .collect::<Vec<_>>(),
1311 &[
1312 ("let b = ", None),
1313 ("c", Some(DiagnosticSeverity::ERROR)),
1314 (";", None),
1315 ],
1316 "Buffers from hidden projects should still get in-buffer diagnostics"
1317 );
1318 });
1319
1320 project.update(cx, |project, cx| {
1321 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1322 assert_eq!(
1323 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1324 vec![(
1325 ProjectPath {
1326 worktree_id: main_worktree_id,
1327 path: Arc::from(Path::new("b.rs")),
1328 },
1329 server_id,
1330 DiagnosticSummary {
1331 error_count: 1,
1332 warning_count: 0,
1333 }
1334 )]
1335 );
1336 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1337 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1338 });
1339}
1340
1341#[gpui::test]
1342async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1343 init_test(cx);
1344
1345 let progress_token = "the-progress-token";
1346
1347 let fs = FakeFs::new(cx.executor());
1348 fs.insert_tree(
1349 path!("/dir"),
1350 json!({
1351 "a.rs": "fn a() { A }",
1352 "b.rs": "const y: i32 = 1",
1353 }),
1354 )
1355 .await;
1356
1357 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1358 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1359
1360 language_registry.add(rust_lang());
1361 let mut fake_servers = language_registry.register_fake_lsp(
1362 "Rust",
1363 FakeLspAdapter {
1364 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1365 disk_based_diagnostics_sources: vec!["disk".into()],
1366 ..Default::default()
1367 },
1368 );
1369
1370 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1371
1372 // Cause worktree to start the fake language server
1373 let _ = project
1374 .update(cx, |project, cx| {
1375 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1376 })
1377 .await
1378 .unwrap();
1379
1380 let mut events = cx.events(&project);
1381
1382 let fake_server = fake_servers.next().await.unwrap();
1383 assert_eq!(
1384 events.next().await.unwrap(),
1385 Event::LanguageServerAdded(
1386 LanguageServerId(0),
1387 fake_server.server.name(),
1388 Some(worktree_id)
1389 ),
1390 );
1391
1392 fake_server
1393 .start_progress(format!("{}/0", progress_token))
1394 .await;
1395 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1396 assert_eq!(
1397 events.next().await.unwrap(),
1398 Event::DiskBasedDiagnosticsStarted {
1399 language_server_id: LanguageServerId(0),
1400 }
1401 );
1402
1403 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1404 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1405 version: None,
1406 diagnostics: vec![lsp::Diagnostic {
1407 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1408 severity: Some(lsp::DiagnosticSeverity::ERROR),
1409 message: "undefined variable 'A'".to_string(),
1410 ..Default::default()
1411 }],
1412 });
1413 assert_eq!(
1414 events.next().await.unwrap(),
1415 Event::DiagnosticsUpdated {
1416 language_server_id: LanguageServerId(0),
1417 path: (worktree_id, Path::new("a.rs")).into()
1418 }
1419 );
1420
1421 fake_server.end_progress(format!("{}/0", progress_token));
1422 assert_eq!(
1423 events.next().await.unwrap(),
1424 Event::DiskBasedDiagnosticsFinished {
1425 language_server_id: LanguageServerId(0)
1426 }
1427 );
1428
1429 let buffer = project
1430 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1431 .await
1432 .unwrap();
1433
1434 buffer.update(cx, |buffer, _| {
1435 let snapshot = buffer.snapshot();
1436 let diagnostics = snapshot
1437 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1438 .collect::<Vec<_>>();
1439 assert_eq!(
1440 diagnostics,
1441 &[DiagnosticEntry {
1442 range: Point::new(0, 9)..Point::new(0, 10),
1443 diagnostic: Diagnostic {
1444 severity: lsp::DiagnosticSeverity::ERROR,
1445 message: "undefined variable 'A'".to_string(),
1446 group_id: 0,
1447 is_primary: true,
1448 ..Default::default()
1449 }
1450 }]
1451 )
1452 });
1453
1454 // Ensure publishing empty diagnostics twice only results in one update event.
1455 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1456 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1457 version: None,
1458 diagnostics: Default::default(),
1459 });
1460 assert_eq!(
1461 events.next().await.unwrap(),
1462 Event::DiagnosticsUpdated {
1463 language_server_id: LanguageServerId(0),
1464 path: (worktree_id, Path::new("a.rs")).into()
1465 }
1466 );
1467
1468 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1469 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1470 version: None,
1471 diagnostics: Default::default(),
1472 });
1473 cx.executor().run_until_parked();
1474 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1475}
1476
1477#[gpui::test]
1478async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1479 init_test(cx);
1480
1481 let progress_token = "the-progress-token";
1482
1483 let fs = FakeFs::new(cx.executor());
1484 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1485
1486 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1487
1488 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1489 language_registry.add(rust_lang());
1490 let mut fake_servers = language_registry.register_fake_lsp(
1491 "Rust",
1492 FakeLspAdapter {
1493 name: "the-language-server",
1494 disk_based_diagnostics_sources: vec!["disk".into()],
1495 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1496 ..Default::default()
1497 },
1498 );
1499
1500 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1501
1502 let (buffer, _handle) = project
1503 .update(cx, |project, cx| {
1504 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1505 })
1506 .await
1507 .unwrap();
1508 // Simulate diagnostics starting to update.
1509 let fake_server = fake_servers.next().await.unwrap();
1510 fake_server.start_progress(progress_token).await;
1511
1512 // Restart the server before the diagnostics finish updating.
1513 project.update(cx, |project, cx| {
1514 project.restart_language_servers_for_buffers(vec![buffer], cx);
1515 });
1516 let mut events = cx.events(&project);
1517
1518 // Simulate the newly started server sending more diagnostics.
1519 let fake_server = fake_servers.next().await.unwrap();
1520 assert_eq!(
1521 events.next().await.unwrap(),
1522 Event::LanguageServerAdded(
1523 LanguageServerId(1),
1524 fake_server.server.name(),
1525 Some(worktree_id)
1526 )
1527 );
1528 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1529 fake_server.start_progress(progress_token).await;
1530 assert_eq!(
1531 events.next().await.unwrap(),
1532 Event::DiskBasedDiagnosticsStarted {
1533 language_server_id: LanguageServerId(1)
1534 }
1535 );
1536 project.update(cx, |project, cx| {
1537 assert_eq!(
1538 project
1539 .language_servers_running_disk_based_diagnostics(cx)
1540 .collect::<Vec<_>>(),
1541 [LanguageServerId(1)]
1542 );
1543 });
1544
1545 // All diagnostics are considered done, despite the old server's diagnostic
1546 // task never completing.
1547 fake_server.end_progress(progress_token);
1548 assert_eq!(
1549 events.next().await.unwrap(),
1550 Event::DiskBasedDiagnosticsFinished {
1551 language_server_id: LanguageServerId(1)
1552 }
1553 );
1554 project.update(cx, |project, cx| {
1555 assert_eq!(
1556 project
1557 .language_servers_running_disk_based_diagnostics(cx)
1558 .collect::<Vec<_>>(),
1559 [] as [language::LanguageServerId; 0]
1560 );
1561 });
1562}
1563
1564#[gpui::test]
1565async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1566 init_test(cx);
1567
1568 let fs = FakeFs::new(cx.executor());
1569 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1570
1571 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1572
1573 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1574 language_registry.add(rust_lang());
1575 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1576
1577 let (buffer, _) = project
1578 .update(cx, |project, cx| {
1579 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1580 })
1581 .await
1582 .unwrap();
1583
1584 // Publish diagnostics
1585 let fake_server = fake_servers.next().await.unwrap();
1586 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1587 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1588 version: None,
1589 diagnostics: vec![lsp::Diagnostic {
1590 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1591 severity: Some(lsp::DiagnosticSeverity::ERROR),
1592 message: "the message".to_string(),
1593 ..Default::default()
1594 }],
1595 });
1596
1597 cx.executor().run_until_parked();
1598 buffer.update(cx, |buffer, _| {
1599 assert_eq!(
1600 buffer
1601 .snapshot()
1602 .diagnostics_in_range::<_, usize>(0..1, false)
1603 .map(|entry| entry.diagnostic.message.clone())
1604 .collect::<Vec<_>>(),
1605 ["the message".to_string()]
1606 );
1607 });
1608 project.update(cx, |project, cx| {
1609 assert_eq!(
1610 project.diagnostic_summary(false, cx),
1611 DiagnosticSummary {
1612 error_count: 1,
1613 warning_count: 0,
1614 }
1615 );
1616 });
1617
1618 project.update(cx, |project, cx| {
1619 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1620 });
1621
1622 // The diagnostics are cleared.
1623 cx.executor().run_until_parked();
1624 buffer.update(cx, |buffer, _| {
1625 assert_eq!(
1626 buffer
1627 .snapshot()
1628 .diagnostics_in_range::<_, usize>(0..1, false)
1629 .map(|entry| entry.diagnostic.message.clone())
1630 .collect::<Vec<_>>(),
1631 Vec::<String>::new(),
1632 );
1633 });
1634 project.update(cx, |project, cx| {
1635 assert_eq!(
1636 project.diagnostic_summary(false, cx),
1637 DiagnosticSummary {
1638 error_count: 0,
1639 warning_count: 0,
1640 }
1641 );
1642 });
1643}
1644
1645#[gpui::test]
1646async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1647 init_test(cx);
1648
1649 let fs = FakeFs::new(cx.executor());
1650 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1651
1652 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1653 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1654
1655 language_registry.add(rust_lang());
1656 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1657
1658 let (buffer, _handle) = project
1659 .update(cx, |project, cx| {
1660 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1661 })
1662 .await
1663 .unwrap();
1664
1665 // Before restarting the server, report diagnostics with an unknown buffer version.
1666 let fake_server = fake_servers.next().await.unwrap();
1667 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1668 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1669 version: Some(10000),
1670 diagnostics: Vec::new(),
1671 });
1672 cx.executor().run_until_parked();
1673 project.update(cx, |project, cx| {
1674 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1675 });
1676
1677 let mut fake_server = fake_servers.next().await.unwrap();
1678 let notification = fake_server
1679 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1680 .await
1681 .text_document;
1682 assert_eq!(notification.version, 0);
1683}
1684
1685#[gpui::test]
1686async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1687 init_test(cx);
1688
1689 let progress_token = "the-progress-token";
1690
1691 let fs = FakeFs::new(cx.executor());
1692 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1693
1694 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1695
1696 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1697 language_registry.add(rust_lang());
1698 let mut fake_servers = language_registry.register_fake_lsp(
1699 "Rust",
1700 FakeLspAdapter {
1701 name: "the-language-server",
1702 disk_based_diagnostics_sources: vec!["disk".into()],
1703 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1704 ..Default::default()
1705 },
1706 );
1707
1708 let (buffer, _handle) = project
1709 .update(cx, |project, cx| {
1710 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1711 })
1712 .await
1713 .unwrap();
1714
1715 // Simulate diagnostics starting to update.
1716 let mut fake_server = fake_servers.next().await.unwrap();
1717 fake_server
1718 .start_progress_with(
1719 "another-token",
1720 lsp::WorkDoneProgressBegin {
1721 cancellable: Some(false),
1722 ..Default::default()
1723 },
1724 )
1725 .await;
1726 fake_server
1727 .start_progress_with(
1728 progress_token,
1729 lsp::WorkDoneProgressBegin {
1730 cancellable: Some(true),
1731 ..Default::default()
1732 },
1733 )
1734 .await;
1735 cx.executor().run_until_parked();
1736
1737 project.update(cx, |project, cx| {
1738 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1739 });
1740
1741 let cancel_notification = fake_server
1742 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1743 .await;
1744 assert_eq!(
1745 cancel_notification.token,
1746 NumberOrString::String(progress_token.into())
1747 );
1748}
1749
1750#[gpui::test]
1751async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1752 init_test(cx);
1753
1754 let fs = FakeFs::new(cx.executor());
1755 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1756 .await;
1757
1758 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1759 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1760
1761 let mut fake_rust_servers = language_registry.register_fake_lsp(
1762 "Rust",
1763 FakeLspAdapter {
1764 name: "rust-lsp",
1765 ..Default::default()
1766 },
1767 );
1768 let mut fake_js_servers = language_registry.register_fake_lsp(
1769 "JavaScript",
1770 FakeLspAdapter {
1771 name: "js-lsp",
1772 ..Default::default()
1773 },
1774 );
1775 language_registry.add(rust_lang());
1776 language_registry.add(js_lang());
1777
1778 let _rs_buffer = project
1779 .update(cx, |project, cx| {
1780 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1781 })
1782 .await
1783 .unwrap();
1784 let _js_buffer = project
1785 .update(cx, |project, cx| {
1786 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1787 })
1788 .await
1789 .unwrap();
1790
1791 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1792 assert_eq!(
1793 fake_rust_server_1
1794 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1795 .await
1796 .text_document
1797 .uri
1798 .as_str(),
1799 uri!("file:///dir/a.rs")
1800 );
1801
1802 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1803 assert_eq!(
1804 fake_js_server
1805 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1806 .await
1807 .text_document
1808 .uri
1809 .as_str(),
1810 uri!("file:///dir/b.js")
1811 );
1812
1813 // Disable Rust language server, ensuring only that server gets stopped.
1814 cx.update(|cx| {
1815 SettingsStore::update_global(cx, |settings, cx| {
1816 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1817 settings.languages.insert(
1818 "Rust".into(),
1819 LanguageSettingsContent {
1820 enable_language_server: Some(false),
1821 ..Default::default()
1822 },
1823 );
1824 });
1825 })
1826 });
1827 fake_rust_server_1
1828 .receive_notification::<lsp::notification::Exit>()
1829 .await;
1830
1831 // Enable Rust and disable JavaScript language servers, ensuring that the
1832 // former gets started again and that the latter stops.
1833 cx.update(|cx| {
1834 SettingsStore::update_global(cx, |settings, cx| {
1835 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1836 settings.languages.insert(
1837 LanguageName::new("Rust"),
1838 LanguageSettingsContent {
1839 enable_language_server: Some(true),
1840 ..Default::default()
1841 },
1842 );
1843 settings.languages.insert(
1844 LanguageName::new("JavaScript"),
1845 LanguageSettingsContent {
1846 enable_language_server: Some(false),
1847 ..Default::default()
1848 },
1849 );
1850 });
1851 })
1852 });
1853 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1854 assert_eq!(
1855 fake_rust_server_2
1856 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1857 .await
1858 .text_document
1859 .uri
1860 .as_str(),
1861 uri!("file:///dir/a.rs")
1862 );
1863 fake_js_server
1864 .receive_notification::<lsp::notification::Exit>()
1865 .await;
1866}
1867
1868#[gpui::test(iterations = 3)]
1869async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1870 init_test(cx);
1871
1872 let text = "
1873 fn a() { A }
1874 fn b() { BB }
1875 fn c() { CCC }
1876 "
1877 .unindent();
1878
1879 let fs = FakeFs::new(cx.executor());
1880 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1881
1882 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1883 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1884
1885 language_registry.add(rust_lang());
1886 let mut fake_servers = language_registry.register_fake_lsp(
1887 "Rust",
1888 FakeLspAdapter {
1889 disk_based_diagnostics_sources: vec!["disk".into()],
1890 ..Default::default()
1891 },
1892 );
1893
1894 let buffer = project
1895 .update(cx, |project, cx| {
1896 project.open_local_buffer(path!("/dir/a.rs"), cx)
1897 })
1898 .await
1899 .unwrap();
1900
1901 let _handle = project.update(cx, |project, cx| {
1902 project.register_buffer_with_language_servers(&buffer, cx)
1903 });
1904
1905 let mut fake_server = fake_servers.next().await.unwrap();
1906 let open_notification = fake_server
1907 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1908 .await;
1909
1910 // Edit the buffer, moving the content down
1911 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1912 let change_notification_1 = fake_server
1913 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1914 .await;
1915 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1916
1917 // Report some diagnostics for the initial version of the buffer
1918 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1919 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1920 version: Some(open_notification.text_document.version),
1921 diagnostics: vec![
1922 lsp::Diagnostic {
1923 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1924 severity: Some(DiagnosticSeverity::ERROR),
1925 message: "undefined variable 'A'".to_string(),
1926 source: Some("disk".to_string()),
1927 ..Default::default()
1928 },
1929 lsp::Diagnostic {
1930 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1931 severity: Some(DiagnosticSeverity::ERROR),
1932 message: "undefined variable 'BB'".to_string(),
1933 source: Some("disk".to_string()),
1934 ..Default::default()
1935 },
1936 lsp::Diagnostic {
1937 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1938 severity: Some(DiagnosticSeverity::ERROR),
1939 source: Some("disk".to_string()),
1940 message: "undefined variable 'CCC'".to_string(),
1941 ..Default::default()
1942 },
1943 ],
1944 });
1945
1946 // The diagnostics have moved down since they were created.
1947 cx.executor().run_until_parked();
1948 buffer.update(cx, |buffer, _| {
1949 assert_eq!(
1950 buffer
1951 .snapshot()
1952 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1953 .collect::<Vec<_>>(),
1954 &[
1955 DiagnosticEntry {
1956 range: Point::new(3, 9)..Point::new(3, 11),
1957 diagnostic: Diagnostic {
1958 source: Some("disk".into()),
1959 severity: DiagnosticSeverity::ERROR,
1960 message: "undefined variable 'BB'".to_string(),
1961 is_disk_based: true,
1962 group_id: 1,
1963 is_primary: true,
1964 ..Default::default()
1965 },
1966 },
1967 DiagnosticEntry {
1968 range: Point::new(4, 9)..Point::new(4, 12),
1969 diagnostic: Diagnostic {
1970 source: Some("disk".into()),
1971 severity: DiagnosticSeverity::ERROR,
1972 message: "undefined variable 'CCC'".to_string(),
1973 is_disk_based: true,
1974 group_id: 2,
1975 is_primary: true,
1976 ..Default::default()
1977 }
1978 }
1979 ]
1980 );
1981 assert_eq!(
1982 chunks_with_diagnostics(buffer, 0..buffer.len()),
1983 [
1984 ("\n\nfn a() { ".to_string(), None),
1985 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1986 (" }\nfn b() { ".to_string(), None),
1987 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1988 (" }\nfn c() { ".to_string(), None),
1989 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1990 (" }\n".to_string(), None),
1991 ]
1992 );
1993 assert_eq!(
1994 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1995 [
1996 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 (" }\nfn c() { ".to_string(), None),
1998 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1999 ]
2000 );
2001 });
2002
2003 // Ensure overlapping diagnostics are highlighted correctly.
2004 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2005 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2006 version: Some(open_notification.text_document.version),
2007 diagnostics: vec![
2008 lsp::Diagnostic {
2009 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2010 severity: Some(DiagnosticSeverity::ERROR),
2011 message: "undefined variable 'A'".to_string(),
2012 source: Some("disk".to_string()),
2013 ..Default::default()
2014 },
2015 lsp::Diagnostic {
2016 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2017 severity: Some(DiagnosticSeverity::WARNING),
2018 message: "unreachable statement".to_string(),
2019 source: Some("disk".to_string()),
2020 ..Default::default()
2021 },
2022 ],
2023 });
2024
2025 cx.executor().run_until_parked();
2026 buffer.update(cx, |buffer, _| {
2027 assert_eq!(
2028 buffer
2029 .snapshot()
2030 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2031 .collect::<Vec<_>>(),
2032 &[
2033 DiagnosticEntry {
2034 range: Point::new(2, 9)..Point::new(2, 12),
2035 diagnostic: Diagnostic {
2036 source: Some("disk".into()),
2037 severity: DiagnosticSeverity::WARNING,
2038 message: "unreachable statement".to_string(),
2039 is_disk_based: true,
2040 group_id: 4,
2041 is_primary: true,
2042 ..Default::default()
2043 }
2044 },
2045 DiagnosticEntry {
2046 range: Point::new(2, 9)..Point::new(2, 10),
2047 diagnostic: Diagnostic {
2048 source: Some("disk".into()),
2049 severity: DiagnosticSeverity::ERROR,
2050 message: "undefined variable 'A'".to_string(),
2051 is_disk_based: true,
2052 group_id: 3,
2053 is_primary: true,
2054 ..Default::default()
2055 },
2056 }
2057 ]
2058 );
2059 assert_eq!(
2060 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2061 [
2062 ("fn a() { ".to_string(), None),
2063 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2064 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2065 ("\n".to_string(), None),
2066 ]
2067 );
2068 assert_eq!(
2069 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2070 [
2071 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2072 ("\n".to_string(), None),
2073 ]
2074 );
2075 });
2076
2077 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2078 // changes since the last save.
2079 buffer.update(cx, |buffer, cx| {
2080 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2081 buffer.edit(
2082 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2083 None,
2084 cx,
2085 );
2086 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2087 });
2088 let change_notification_2 = fake_server
2089 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2090 .await;
2091 assert!(
2092 change_notification_2.text_document.version > change_notification_1.text_document.version
2093 );
2094
2095 // Handle out-of-order diagnostics
2096 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2097 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2098 version: Some(change_notification_2.text_document.version),
2099 diagnostics: vec![
2100 lsp::Diagnostic {
2101 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2102 severity: Some(DiagnosticSeverity::ERROR),
2103 message: "undefined variable 'BB'".to_string(),
2104 source: Some("disk".to_string()),
2105 ..Default::default()
2106 },
2107 lsp::Diagnostic {
2108 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2109 severity: Some(DiagnosticSeverity::WARNING),
2110 message: "undefined variable 'A'".to_string(),
2111 source: Some("disk".to_string()),
2112 ..Default::default()
2113 },
2114 ],
2115 });
2116
2117 cx.executor().run_until_parked();
2118 buffer.update(cx, |buffer, _| {
2119 assert_eq!(
2120 buffer
2121 .snapshot()
2122 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2123 .collect::<Vec<_>>(),
2124 &[
2125 DiagnosticEntry {
2126 range: Point::new(2, 21)..Point::new(2, 22),
2127 diagnostic: Diagnostic {
2128 source: Some("disk".into()),
2129 severity: DiagnosticSeverity::WARNING,
2130 message: "undefined variable 'A'".to_string(),
2131 is_disk_based: true,
2132 group_id: 6,
2133 is_primary: true,
2134 ..Default::default()
2135 }
2136 },
2137 DiagnosticEntry {
2138 range: Point::new(3, 9)..Point::new(3, 14),
2139 diagnostic: Diagnostic {
2140 source: Some("disk".into()),
2141 severity: DiagnosticSeverity::ERROR,
2142 message: "undefined variable 'BB'".to_string(),
2143 is_disk_based: true,
2144 group_id: 5,
2145 is_primary: true,
2146 ..Default::default()
2147 },
2148 }
2149 ]
2150 );
2151 });
2152}
2153
2154#[gpui::test]
2155async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2156 init_test(cx);
2157
2158 let text = concat!(
2159 "let one = ;\n", //
2160 "let two = \n",
2161 "let three = 3;\n",
2162 );
2163
2164 let fs = FakeFs::new(cx.executor());
2165 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2166
2167 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2168 let buffer = project
2169 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2170 .await
2171 .unwrap();
2172
2173 project.update(cx, |project, cx| {
2174 project.lsp_store.update(cx, |lsp_store, cx| {
2175 lsp_store
2176 .update_diagnostic_entries(
2177 LanguageServerId(0),
2178 PathBuf::from("/dir/a.rs"),
2179 None,
2180 vec![
2181 DiagnosticEntry {
2182 range: Unclipped(PointUtf16::new(0, 10))
2183 ..Unclipped(PointUtf16::new(0, 10)),
2184 diagnostic: Diagnostic {
2185 severity: DiagnosticSeverity::ERROR,
2186 message: "syntax error 1".to_string(),
2187 ..Default::default()
2188 },
2189 },
2190 DiagnosticEntry {
2191 range: Unclipped(PointUtf16::new(1, 10))
2192 ..Unclipped(PointUtf16::new(1, 10)),
2193 diagnostic: Diagnostic {
2194 severity: DiagnosticSeverity::ERROR,
2195 message: "syntax error 2".to_string(),
2196 ..Default::default()
2197 },
2198 },
2199 ],
2200 cx,
2201 )
2202 .unwrap();
2203 })
2204 });
2205
2206 // An empty range is extended forward to include the following character.
2207 // At the end of a line, an empty range is extended backward to include
2208 // the preceding character.
2209 buffer.update(cx, |buffer, _| {
2210 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2211 assert_eq!(
2212 chunks
2213 .iter()
2214 .map(|(s, d)| (s.as_str(), *d))
2215 .collect::<Vec<_>>(),
2216 &[
2217 ("let one = ", None),
2218 (";", Some(DiagnosticSeverity::ERROR)),
2219 ("\nlet two =", None),
2220 (" ", Some(DiagnosticSeverity::ERROR)),
2221 ("\nlet three = 3;\n", None)
2222 ]
2223 );
2224 });
2225}
2226
2227#[gpui::test]
2228async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2229 init_test(cx);
2230
2231 let fs = FakeFs::new(cx.executor());
2232 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2233 .await;
2234
2235 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2236 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2237
2238 lsp_store.update(cx, |lsp_store, cx| {
2239 lsp_store
2240 .update_diagnostic_entries(
2241 LanguageServerId(0),
2242 Path::new("/dir/a.rs").to_owned(),
2243 None,
2244 vec![DiagnosticEntry {
2245 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2246 diagnostic: Diagnostic {
2247 severity: DiagnosticSeverity::ERROR,
2248 is_primary: true,
2249 message: "syntax error a1".to_string(),
2250 ..Default::default()
2251 },
2252 }],
2253 cx,
2254 )
2255 .unwrap();
2256 lsp_store
2257 .update_diagnostic_entries(
2258 LanguageServerId(1),
2259 Path::new("/dir/a.rs").to_owned(),
2260 None,
2261 vec![DiagnosticEntry {
2262 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2263 diagnostic: Diagnostic {
2264 severity: DiagnosticSeverity::ERROR,
2265 is_primary: true,
2266 message: "syntax error b1".to_string(),
2267 ..Default::default()
2268 },
2269 }],
2270 cx,
2271 )
2272 .unwrap();
2273
2274 assert_eq!(
2275 lsp_store.diagnostic_summary(false, cx),
2276 DiagnosticSummary {
2277 error_count: 2,
2278 warning_count: 0,
2279 }
2280 );
2281 });
2282}
2283
2284#[gpui::test]
2285async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2286 init_test(cx);
2287
2288 let text = "
2289 fn a() {
2290 f1();
2291 }
2292 fn b() {
2293 f2();
2294 }
2295 fn c() {
2296 f3();
2297 }
2298 "
2299 .unindent();
2300
2301 let fs = FakeFs::new(cx.executor());
2302 fs.insert_tree(
2303 path!("/dir"),
2304 json!({
2305 "a.rs": text.clone(),
2306 }),
2307 )
2308 .await;
2309
2310 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2311 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2312
2313 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2314 language_registry.add(rust_lang());
2315 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2316
2317 let (buffer, _handle) = project
2318 .update(cx, |project, cx| {
2319 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2320 })
2321 .await
2322 .unwrap();
2323
2324 let mut fake_server = fake_servers.next().await.unwrap();
2325 let lsp_document_version = fake_server
2326 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2327 .await
2328 .text_document
2329 .version;
2330
2331 // Simulate editing the buffer after the language server computes some edits.
2332 buffer.update(cx, |buffer, cx| {
2333 buffer.edit(
2334 [(
2335 Point::new(0, 0)..Point::new(0, 0),
2336 "// above first function\n",
2337 )],
2338 None,
2339 cx,
2340 );
2341 buffer.edit(
2342 [(
2343 Point::new(2, 0)..Point::new(2, 0),
2344 " // inside first function\n",
2345 )],
2346 None,
2347 cx,
2348 );
2349 buffer.edit(
2350 [(
2351 Point::new(6, 4)..Point::new(6, 4),
2352 "// inside second function ",
2353 )],
2354 None,
2355 cx,
2356 );
2357
2358 assert_eq!(
2359 buffer.text(),
2360 "
2361 // above first function
2362 fn a() {
2363 // inside first function
2364 f1();
2365 }
2366 fn b() {
2367 // inside second function f2();
2368 }
2369 fn c() {
2370 f3();
2371 }
2372 "
2373 .unindent()
2374 );
2375 });
2376
2377 let edits = lsp_store
2378 .update(cx, |lsp_store, cx| {
2379 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2380 &buffer,
2381 vec![
2382 // replace body of first function
2383 lsp::TextEdit {
2384 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2385 new_text: "
2386 fn a() {
2387 f10();
2388 }
2389 "
2390 .unindent(),
2391 },
2392 // edit inside second function
2393 lsp::TextEdit {
2394 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2395 new_text: "00".into(),
2396 },
2397 // edit inside third function via two distinct edits
2398 lsp::TextEdit {
2399 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2400 new_text: "4000".into(),
2401 },
2402 lsp::TextEdit {
2403 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2404 new_text: "".into(),
2405 },
2406 ],
2407 LanguageServerId(0),
2408 Some(lsp_document_version),
2409 cx,
2410 )
2411 })
2412 .await
2413 .unwrap();
2414
2415 buffer.update(cx, |buffer, cx| {
2416 for (range, new_text) in edits {
2417 buffer.edit([(range, new_text)], None, cx);
2418 }
2419 assert_eq!(
2420 buffer.text(),
2421 "
2422 // above first function
2423 fn a() {
2424 // inside first function
2425 f10();
2426 }
2427 fn b() {
2428 // inside second function f200();
2429 }
2430 fn c() {
2431 f4000();
2432 }
2433 "
2434 .unindent()
2435 );
2436 });
2437}
2438
2439#[gpui::test]
2440async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2441 init_test(cx);
2442
2443 let text = "
2444 use a::b;
2445 use a::c;
2446
2447 fn f() {
2448 b();
2449 c();
2450 }
2451 "
2452 .unindent();
2453
2454 let fs = FakeFs::new(cx.executor());
2455 fs.insert_tree(
2456 path!("/dir"),
2457 json!({
2458 "a.rs": text.clone(),
2459 }),
2460 )
2461 .await;
2462
2463 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2464 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2465 let buffer = project
2466 .update(cx, |project, cx| {
2467 project.open_local_buffer(path!("/dir/a.rs"), cx)
2468 })
2469 .await
2470 .unwrap();
2471
2472 // Simulate the language server sending us a small edit in the form of a very large diff.
2473 // Rust-analyzer does this when performing a merge-imports code action.
2474 let edits = lsp_store
2475 .update(cx, |lsp_store, cx| {
2476 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2477 &buffer,
2478 [
2479 // Replace the first use statement without editing the semicolon.
2480 lsp::TextEdit {
2481 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2482 new_text: "a::{b, c}".into(),
2483 },
2484 // Reinsert the remainder of the file between the semicolon and the final
2485 // newline of the file.
2486 lsp::TextEdit {
2487 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2488 new_text: "\n\n".into(),
2489 },
2490 lsp::TextEdit {
2491 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2492 new_text: "
2493 fn f() {
2494 b();
2495 c();
2496 }"
2497 .unindent(),
2498 },
2499 // Delete everything after the first newline of the file.
2500 lsp::TextEdit {
2501 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2502 new_text: "".into(),
2503 },
2504 ],
2505 LanguageServerId(0),
2506 None,
2507 cx,
2508 )
2509 })
2510 .await
2511 .unwrap();
2512
2513 buffer.update(cx, |buffer, cx| {
2514 let edits = edits
2515 .into_iter()
2516 .map(|(range, text)| {
2517 (
2518 range.start.to_point(buffer)..range.end.to_point(buffer),
2519 text,
2520 )
2521 })
2522 .collect::<Vec<_>>();
2523
2524 assert_eq!(
2525 edits,
2526 [
2527 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2528 (Point::new(1, 0)..Point::new(2, 0), "".into())
2529 ]
2530 );
2531
2532 for (range, new_text) in edits {
2533 buffer.edit([(range, new_text)], None, cx);
2534 }
2535 assert_eq!(
2536 buffer.text(),
2537 "
2538 use a::{b, c};
2539
2540 fn f() {
2541 b();
2542 c();
2543 }
2544 "
2545 .unindent()
2546 );
2547 });
2548}
2549
2550#[gpui::test]
2551async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2552 init_test(cx);
2553
2554 let text = "
2555 use a::b;
2556 use a::c;
2557
2558 fn f() {
2559 b();
2560 c();
2561 }
2562 "
2563 .unindent();
2564
2565 let fs = FakeFs::new(cx.executor());
2566 fs.insert_tree(
2567 path!("/dir"),
2568 json!({
2569 "a.rs": text.clone(),
2570 }),
2571 )
2572 .await;
2573
2574 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2575 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2576 let buffer = project
2577 .update(cx, |project, cx| {
2578 project.open_local_buffer(path!("/dir/a.rs"), cx)
2579 })
2580 .await
2581 .unwrap();
2582
2583 // Simulate the language server sending us edits in a non-ordered fashion,
2584 // with ranges sometimes being inverted or pointing to invalid locations.
2585 let edits = lsp_store
2586 .update(cx, |lsp_store, cx| {
2587 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2588 &buffer,
2589 [
2590 lsp::TextEdit {
2591 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2592 new_text: "\n\n".into(),
2593 },
2594 lsp::TextEdit {
2595 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2596 new_text: "a::{b, c}".into(),
2597 },
2598 lsp::TextEdit {
2599 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2600 new_text: "".into(),
2601 },
2602 lsp::TextEdit {
2603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2604 new_text: "
2605 fn f() {
2606 b();
2607 c();
2608 }"
2609 .unindent(),
2610 },
2611 ],
2612 LanguageServerId(0),
2613 None,
2614 cx,
2615 )
2616 })
2617 .await
2618 .unwrap();
2619
2620 buffer.update(cx, |buffer, cx| {
2621 let edits = edits
2622 .into_iter()
2623 .map(|(range, text)| {
2624 (
2625 range.start.to_point(buffer)..range.end.to_point(buffer),
2626 text,
2627 )
2628 })
2629 .collect::<Vec<_>>();
2630
2631 assert_eq!(
2632 edits,
2633 [
2634 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2635 (Point::new(1, 0)..Point::new(2, 0), "".into())
2636 ]
2637 );
2638
2639 for (range, new_text) in edits {
2640 buffer.edit([(range, new_text)], None, cx);
2641 }
2642 assert_eq!(
2643 buffer.text(),
2644 "
2645 use a::{b, c};
2646
2647 fn f() {
2648 b();
2649 c();
2650 }
2651 "
2652 .unindent()
2653 );
2654 });
2655}
2656
2657fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2658 buffer: &Buffer,
2659 range: Range<T>,
2660) -> Vec<(String, Option<DiagnosticSeverity>)> {
2661 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2662 for chunk in buffer.snapshot().chunks(range, true) {
2663 if chunks.last().map_or(false, |prev_chunk| {
2664 prev_chunk.1 == chunk.diagnostic_severity
2665 }) {
2666 chunks.last_mut().unwrap().0.push_str(chunk.text);
2667 } else {
2668 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2669 }
2670 }
2671 chunks
2672}
2673
2674#[gpui::test(iterations = 10)]
2675async fn test_definition(cx: &mut gpui::TestAppContext) {
2676 init_test(cx);
2677
2678 let fs = FakeFs::new(cx.executor());
2679 fs.insert_tree(
2680 path!("/dir"),
2681 json!({
2682 "a.rs": "const fn a() { A }",
2683 "b.rs": "const y: i32 = crate::a()",
2684 }),
2685 )
2686 .await;
2687
2688 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2689
2690 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2691 language_registry.add(rust_lang());
2692 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2693
2694 let (buffer, _handle) = project
2695 .update(cx, |project, cx| {
2696 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2697 })
2698 .await
2699 .unwrap();
2700
2701 let fake_server = fake_servers.next().await.unwrap();
2702 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2703 let params = params.text_document_position_params;
2704 assert_eq!(
2705 params.text_document.uri.to_file_path().unwrap(),
2706 Path::new(path!("/dir/b.rs")),
2707 );
2708 assert_eq!(params.position, lsp::Position::new(0, 22));
2709
2710 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2711 lsp::Location::new(
2712 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2713 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2714 ),
2715 )))
2716 });
2717 let mut definitions = project
2718 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2719 .await
2720 .unwrap();
2721
2722 // Assert no new language server started
2723 cx.executor().run_until_parked();
2724 assert!(fake_servers.try_next().is_err());
2725
2726 assert_eq!(definitions.len(), 1);
2727 let definition = definitions.pop().unwrap();
2728 cx.update(|cx| {
2729 let target_buffer = definition.target.buffer.read(cx);
2730 assert_eq!(
2731 target_buffer
2732 .file()
2733 .unwrap()
2734 .as_local()
2735 .unwrap()
2736 .abs_path(cx),
2737 Path::new(path!("/dir/a.rs")),
2738 );
2739 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2740 assert_eq!(
2741 list_worktrees(&project, cx),
2742 [
2743 (path!("/dir/a.rs").as_ref(), false),
2744 (path!("/dir/b.rs").as_ref(), true)
2745 ],
2746 );
2747
2748 drop(definition);
2749 });
2750 cx.update(|cx| {
2751 assert_eq!(
2752 list_worktrees(&project, cx),
2753 [(path!("/dir/b.rs").as_ref(), true)]
2754 );
2755 });
2756
2757 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2758 project
2759 .read(cx)
2760 .worktrees(cx)
2761 .map(|worktree| {
2762 let worktree = worktree.read(cx);
2763 (
2764 worktree.as_local().unwrap().abs_path().as_ref(),
2765 worktree.is_visible(),
2766 )
2767 })
2768 .collect::<Vec<_>>()
2769 }
2770}
2771
2772#[gpui::test]
2773async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2774 init_test(cx);
2775
2776 let fs = FakeFs::new(cx.executor());
2777 fs.insert_tree(
2778 path!("/dir"),
2779 json!({
2780 "a.ts": "",
2781 }),
2782 )
2783 .await;
2784
2785 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2786
2787 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2788 language_registry.add(typescript_lang());
2789 let mut fake_language_servers = language_registry.register_fake_lsp(
2790 "TypeScript",
2791 FakeLspAdapter {
2792 capabilities: lsp::ServerCapabilities {
2793 completion_provider: Some(lsp::CompletionOptions {
2794 trigger_characters: Some(vec![":".to_string()]),
2795 ..Default::default()
2796 }),
2797 ..Default::default()
2798 },
2799 ..Default::default()
2800 },
2801 );
2802
2803 let (buffer, _handle) = project
2804 .update(cx, |p, cx| {
2805 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2806 })
2807 .await
2808 .unwrap();
2809
2810 let fake_server = fake_language_servers.next().await.unwrap();
2811
2812 let text = "let a = b.fqn";
2813 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2814 let completions = project.update(cx, |project, cx| {
2815 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2816 });
2817
2818 fake_server
2819 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
2820 Ok(Some(lsp::CompletionResponse::Array(vec![
2821 lsp::CompletionItem {
2822 label: "fullyQualifiedName?".into(),
2823 insert_text: Some("fullyQualifiedName".into()),
2824 ..Default::default()
2825 },
2826 ])))
2827 })
2828 .next()
2829 .await;
2830 let completions = completions.await.unwrap().unwrap();
2831 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2832 assert_eq!(completions.len(), 1);
2833 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2834 assert_eq!(
2835 completions[0].old_range.to_offset(&snapshot),
2836 text.len() - 3..text.len()
2837 );
2838
2839 let text = "let a = \"atoms/cmp\"";
2840 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2841 let completions = project.update(cx, |project, cx| {
2842 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2843 });
2844
2845 fake_server
2846 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
2847 Ok(Some(lsp::CompletionResponse::Array(vec![
2848 lsp::CompletionItem {
2849 label: "component".into(),
2850 ..Default::default()
2851 },
2852 ])))
2853 })
2854 .next()
2855 .await;
2856 let completions = completions.await.unwrap().unwrap();
2857 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2858 assert_eq!(completions.len(), 1);
2859 assert_eq!(completions[0].new_text, "component");
2860 assert_eq!(
2861 completions[0].old_range.to_offset(&snapshot),
2862 text.len() - 4..text.len() - 1
2863 );
2864}
2865
2866#[gpui::test]
2867async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2868 init_test(cx);
2869
2870 let fs = FakeFs::new(cx.executor());
2871 fs.insert_tree(
2872 path!("/dir"),
2873 json!({
2874 "a.ts": "",
2875 }),
2876 )
2877 .await;
2878
2879 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2880
2881 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2882 language_registry.add(typescript_lang());
2883 let mut fake_language_servers = language_registry.register_fake_lsp(
2884 "TypeScript",
2885 FakeLspAdapter {
2886 capabilities: lsp::ServerCapabilities {
2887 completion_provider: Some(lsp::CompletionOptions {
2888 trigger_characters: Some(vec![":".to_string()]),
2889 ..Default::default()
2890 }),
2891 ..Default::default()
2892 },
2893 ..Default::default()
2894 },
2895 );
2896
2897 let (buffer, _handle) = project
2898 .update(cx, |p, cx| {
2899 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2900 })
2901 .await
2902 .unwrap();
2903
2904 let fake_server = fake_language_servers.next().await.unwrap();
2905
2906 let text = "let a = b.fqn";
2907 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2908 let completions = project.update(cx, |project, cx| {
2909 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2910 });
2911
2912 fake_server
2913 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
2914 Ok(Some(lsp::CompletionResponse::Array(vec![
2915 lsp::CompletionItem {
2916 label: "fullyQualifiedName?".into(),
2917 insert_text: Some("fully\rQualified\r\nName".into()),
2918 ..Default::default()
2919 },
2920 ])))
2921 })
2922 .next()
2923 .await;
2924 let completions = completions.await.unwrap().unwrap();
2925 assert_eq!(completions.len(), 1);
2926 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2927}
2928
2929#[gpui::test(iterations = 10)]
2930async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2931 init_test(cx);
2932
2933 let fs = FakeFs::new(cx.executor());
2934 fs.insert_tree(
2935 path!("/dir"),
2936 json!({
2937 "a.ts": "a",
2938 }),
2939 )
2940 .await;
2941
2942 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2943
2944 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2945 language_registry.add(typescript_lang());
2946 let mut fake_language_servers = language_registry.register_fake_lsp(
2947 "TypeScript",
2948 FakeLspAdapter {
2949 capabilities: lsp::ServerCapabilities {
2950 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2951 lsp::CodeActionOptions {
2952 resolve_provider: Some(true),
2953 ..lsp::CodeActionOptions::default()
2954 },
2955 )),
2956 execute_command_provider: Some(lsp::ExecuteCommandOptions {
2957 commands: vec!["_the/command".to_string()],
2958 ..lsp::ExecuteCommandOptions::default()
2959 }),
2960 ..lsp::ServerCapabilities::default()
2961 },
2962 ..FakeLspAdapter::default()
2963 },
2964 );
2965
2966 let (buffer, _handle) = project
2967 .update(cx, |p, cx| {
2968 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2969 })
2970 .await
2971 .unwrap();
2972
2973 let fake_server = fake_language_servers.next().await.unwrap();
2974
2975 // Language server returns code actions that contain commands, and not edits.
2976 let actions = project.update(cx, |project, cx| {
2977 project.code_actions(&buffer, 0..0, None, cx)
2978 });
2979 fake_server
2980 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2981 Ok(Some(vec![
2982 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2983 title: "The code action".into(),
2984 data: Some(serde_json::json!({
2985 "command": "_the/command",
2986 })),
2987 ..lsp::CodeAction::default()
2988 }),
2989 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2990 title: "two".into(),
2991 ..lsp::CodeAction::default()
2992 }),
2993 ]))
2994 })
2995 .next()
2996 .await;
2997
2998 let action = actions.await.unwrap()[0].clone();
2999 let apply = project.update(cx, |project, cx| {
3000 project.apply_code_action(buffer.clone(), action, true, cx)
3001 });
3002
3003 // Resolving the code action does not populate its edits. In absence of
3004 // edits, we must execute the given command.
3005 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3006 |mut action, _| async move {
3007 if action.data.is_some() {
3008 action.command = Some(lsp::Command {
3009 title: "The command".into(),
3010 command: "_the/command".into(),
3011 arguments: Some(vec![json!("the-argument")]),
3012 });
3013 }
3014 Ok(action)
3015 },
3016 );
3017
3018 // While executing the command, the language server sends the editor
3019 // a `workspaceEdit` request.
3020 fake_server
3021 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3022 let fake = fake_server.clone();
3023 move |params, _| {
3024 assert_eq!(params.command, "_the/command");
3025 let fake = fake.clone();
3026 async move {
3027 fake.server
3028 .request::<lsp::request::ApplyWorkspaceEdit>(
3029 lsp::ApplyWorkspaceEditParams {
3030 label: None,
3031 edit: lsp::WorkspaceEdit {
3032 changes: Some(
3033 [(
3034 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3035 vec![lsp::TextEdit {
3036 range: lsp::Range::new(
3037 lsp::Position::new(0, 0),
3038 lsp::Position::new(0, 0),
3039 ),
3040 new_text: "X".into(),
3041 }],
3042 )]
3043 .into_iter()
3044 .collect(),
3045 ),
3046 ..Default::default()
3047 },
3048 },
3049 )
3050 .await
3051 .unwrap();
3052 Ok(Some(json!(null)))
3053 }
3054 }
3055 })
3056 .next()
3057 .await;
3058
3059 // Applying the code action returns a project transaction containing the edits
3060 // sent by the language server in its `workspaceEdit` request.
3061 let transaction = apply.await.unwrap();
3062 assert!(transaction.0.contains_key(&buffer));
3063 buffer.update(cx, |buffer, cx| {
3064 assert_eq!(buffer.text(), "Xa");
3065 buffer.undo(cx);
3066 assert_eq!(buffer.text(), "a");
3067 });
3068}
3069
3070#[gpui::test(iterations = 10)]
3071async fn test_save_file(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let fs = FakeFs::new(cx.executor());
3075 fs.insert_tree(
3076 path!("/dir"),
3077 json!({
3078 "file1": "the old contents",
3079 }),
3080 )
3081 .await;
3082
3083 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3084 let buffer = project
3085 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3086 .await
3087 .unwrap();
3088 buffer.update(cx, |buffer, cx| {
3089 assert_eq!(buffer.text(), "the old contents");
3090 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3091 });
3092
3093 project
3094 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3095 .await
3096 .unwrap();
3097
3098 let new_text = fs
3099 .load(Path::new(path!("/dir/file1")))
3100 .await
3101 .unwrap()
3102 .replace("\r\n", "\n");
3103 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3104}
3105
3106#[gpui::test(iterations = 30)]
3107async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3108 init_test(cx);
3109
3110 let fs = FakeFs::new(cx.executor().clone());
3111 fs.insert_tree(
3112 path!("/dir"),
3113 json!({
3114 "file1": "the original contents",
3115 }),
3116 )
3117 .await;
3118
3119 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3120 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3121 let buffer = project
3122 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3123 .await
3124 .unwrap();
3125
3126 // Simulate buffer diffs being slow, so that they don't complete before
3127 // the next file change occurs.
3128 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3129
3130 // Change the buffer's file on disk, and then wait for the file change
3131 // to be detected by the worktree, so that the buffer starts reloading.
3132 fs.save(
3133 path!("/dir/file1").as_ref(),
3134 &"the first contents".into(),
3135 Default::default(),
3136 )
3137 .await
3138 .unwrap();
3139 worktree.next_event(cx).await;
3140
3141 // Change the buffer's file again. Depending on the random seed, the
3142 // previous file change may still be in progress.
3143 fs.save(
3144 path!("/dir/file1").as_ref(),
3145 &"the second contents".into(),
3146 Default::default(),
3147 )
3148 .await
3149 .unwrap();
3150 worktree.next_event(cx).await;
3151
3152 cx.executor().run_until_parked();
3153 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3154 buffer.read_with(cx, |buffer, _| {
3155 assert_eq!(buffer.text(), on_disk_text);
3156 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3157 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3158 });
3159}
3160
3161#[gpui::test(iterations = 30)]
3162async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3163 init_test(cx);
3164
3165 let fs = FakeFs::new(cx.executor().clone());
3166 fs.insert_tree(
3167 path!("/dir"),
3168 json!({
3169 "file1": "the original contents",
3170 }),
3171 )
3172 .await;
3173
3174 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3175 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3176 let buffer = project
3177 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3178 .await
3179 .unwrap();
3180
3181 // Simulate buffer diffs being slow, so that they don't complete before
3182 // the next file change occurs.
3183 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3184
3185 // Change the buffer's file on disk, and then wait for the file change
3186 // to be detected by the worktree, so that the buffer starts reloading.
3187 fs.save(
3188 path!("/dir/file1").as_ref(),
3189 &"the first contents".into(),
3190 Default::default(),
3191 )
3192 .await
3193 .unwrap();
3194 worktree.next_event(cx).await;
3195
3196 cx.executor()
3197 .spawn(cx.executor().simulate_random_delay())
3198 .await;
3199
3200 // Perform a noop edit, causing the buffer's version to increase.
3201 buffer.update(cx, |buffer, cx| {
3202 buffer.edit([(0..0, " ")], None, cx);
3203 buffer.undo(cx);
3204 });
3205
3206 cx.executor().run_until_parked();
3207 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3208 buffer.read_with(cx, |buffer, _| {
3209 let buffer_text = buffer.text();
3210 if buffer_text == on_disk_text {
3211 assert!(
3212 !buffer.is_dirty() && !buffer.has_conflict(),
3213 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3214 );
3215 }
3216 // If the file change occurred while the buffer was processing the first
3217 // change, the buffer will be in a conflicting state.
3218 else {
3219 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3220 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3221 }
3222 });
3223}
3224
3225#[gpui::test]
3226async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3227 init_test(cx);
3228
3229 let fs = FakeFs::new(cx.executor());
3230 fs.insert_tree(
3231 path!("/dir"),
3232 json!({
3233 "file1": "the old contents",
3234 }),
3235 )
3236 .await;
3237
3238 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3239 let buffer = project
3240 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3241 .await
3242 .unwrap();
3243 buffer.update(cx, |buffer, cx| {
3244 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3245 });
3246
3247 project
3248 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3249 .await
3250 .unwrap();
3251
3252 let new_text = fs
3253 .load(Path::new(path!("/dir/file1")))
3254 .await
3255 .unwrap()
3256 .replace("\r\n", "\n");
3257 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3258}
3259
3260#[gpui::test]
3261async fn test_save_as(cx: &mut gpui::TestAppContext) {
3262 init_test(cx);
3263
3264 let fs = FakeFs::new(cx.executor());
3265 fs.insert_tree("/dir", json!({})).await;
3266
3267 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3268
3269 let languages = project.update(cx, |project, _| project.languages().clone());
3270 languages.add(rust_lang());
3271
3272 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3273 buffer.update(cx, |buffer, cx| {
3274 buffer.edit([(0..0, "abc")], None, cx);
3275 assert!(buffer.is_dirty());
3276 assert!(!buffer.has_conflict());
3277 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3278 });
3279 project
3280 .update(cx, |project, cx| {
3281 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3282 let path = ProjectPath {
3283 worktree_id,
3284 path: Arc::from(Path::new("file1.rs")),
3285 };
3286 project.save_buffer_as(buffer.clone(), path, cx)
3287 })
3288 .await
3289 .unwrap();
3290 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3291
3292 cx.executor().run_until_parked();
3293 buffer.update(cx, |buffer, cx| {
3294 assert_eq!(
3295 buffer.file().unwrap().full_path(cx),
3296 Path::new("dir/file1.rs")
3297 );
3298 assert!(!buffer.is_dirty());
3299 assert!(!buffer.has_conflict());
3300 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3301 });
3302
3303 let opened_buffer = project
3304 .update(cx, |project, cx| {
3305 project.open_local_buffer("/dir/file1.rs", cx)
3306 })
3307 .await
3308 .unwrap();
3309 assert_eq!(opened_buffer, buffer);
3310}
3311
3312#[gpui::test(retries = 5)]
3313async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3314 use worktree::WorktreeModelHandle as _;
3315
3316 init_test(cx);
3317 cx.executor().allow_parking();
3318
3319 let dir = TempTree::new(json!({
3320 "a": {
3321 "file1": "",
3322 "file2": "",
3323 "file3": "",
3324 },
3325 "b": {
3326 "c": {
3327 "file4": "",
3328 "file5": "",
3329 }
3330 }
3331 }));
3332
3333 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3334
3335 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3336 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3337 async move { buffer.await.unwrap() }
3338 };
3339 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3340 project.update(cx, |project, cx| {
3341 let tree = project.worktrees(cx).next().unwrap();
3342 tree.read(cx)
3343 .entry_for_path(path)
3344 .unwrap_or_else(|| panic!("no entry for path {}", path))
3345 .id
3346 })
3347 };
3348
3349 let buffer2 = buffer_for_path("a/file2", cx).await;
3350 let buffer3 = buffer_for_path("a/file3", cx).await;
3351 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3352 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3353
3354 let file2_id = id_for_path("a/file2", cx);
3355 let file3_id = id_for_path("a/file3", cx);
3356 let file4_id = id_for_path("b/c/file4", cx);
3357
3358 // Create a remote copy of this worktree.
3359 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3360 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3361
3362 let updates = Arc::new(Mutex::new(Vec::new()));
3363 tree.update(cx, |tree, cx| {
3364 let updates = updates.clone();
3365 tree.observe_updates(0, cx, move |update| {
3366 updates.lock().push(update);
3367 async { true }
3368 });
3369 });
3370
3371 let remote =
3372 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3373
3374 cx.executor().run_until_parked();
3375
3376 cx.update(|cx| {
3377 assert!(!buffer2.read(cx).is_dirty());
3378 assert!(!buffer3.read(cx).is_dirty());
3379 assert!(!buffer4.read(cx).is_dirty());
3380 assert!(!buffer5.read(cx).is_dirty());
3381 });
3382
3383 // Rename and delete files and directories.
3384 tree.flush_fs_events(cx).await;
3385 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3386 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3387 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3388 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3389 tree.flush_fs_events(cx).await;
3390
3391 cx.update(|app| {
3392 assert_eq!(
3393 tree.read(app)
3394 .paths()
3395 .map(|p| p.to_str().unwrap())
3396 .collect::<Vec<_>>(),
3397 vec![
3398 "a",
3399 separator!("a/file1"),
3400 separator!("a/file2.new"),
3401 "b",
3402 "d",
3403 separator!("d/file3"),
3404 separator!("d/file4"),
3405 ]
3406 );
3407 });
3408
3409 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3410 assert_eq!(id_for_path("d/file3", cx), file3_id);
3411 assert_eq!(id_for_path("d/file4", cx), file4_id);
3412
3413 cx.update(|cx| {
3414 assert_eq!(
3415 buffer2.read(cx).file().unwrap().path().as_ref(),
3416 Path::new("a/file2.new")
3417 );
3418 assert_eq!(
3419 buffer3.read(cx).file().unwrap().path().as_ref(),
3420 Path::new("d/file3")
3421 );
3422 assert_eq!(
3423 buffer4.read(cx).file().unwrap().path().as_ref(),
3424 Path::new("d/file4")
3425 );
3426 assert_eq!(
3427 buffer5.read(cx).file().unwrap().path().as_ref(),
3428 Path::new("b/c/file5")
3429 );
3430
3431 assert_matches!(
3432 buffer2.read(cx).file().unwrap().disk_state(),
3433 DiskState::Present { .. }
3434 );
3435 assert_matches!(
3436 buffer3.read(cx).file().unwrap().disk_state(),
3437 DiskState::Present { .. }
3438 );
3439 assert_matches!(
3440 buffer4.read(cx).file().unwrap().disk_state(),
3441 DiskState::Present { .. }
3442 );
3443 assert_eq!(
3444 buffer5.read(cx).file().unwrap().disk_state(),
3445 DiskState::Deleted
3446 );
3447 });
3448
3449 // Update the remote worktree. Check that it becomes consistent with the
3450 // local worktree.
3451 cx.executor().run_until_parked();
3452
3453 remote.update(cx, |remote, _| {
3454 for update in updates.lock().drain(..) {
3455 remote.as_remote_mut().unwrap().update_from_remote(update);
3456 }
3457 });
3458 cx.executor().run_until_parked();
3459 remote.update(cx, |remote, _| {
3460 assert_eq!(
3461 remote
3462 .paths()
3463 .map(|p| p.to_str().unwrap())
3464 .collect::<Vec<_>>(),
3465 vec![
3466 "a",
3467 separator!("a/file1"),
3468 separator!("a/file2.new"),
3469 "b",
3470 "d",
3471 separator!("d/file3"),
3472 separator!("d/file4"),
3473 ]
3474 );
3475 });
3476}
3477
3478#[gpui::test(iterations = 10)]
3479async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3480 init_test(cx);
3481
3482 let fs = FakeFs::new(cx.executor());
3483 fs.insert_tree(
3484 path!("/dir"),
3485 json!({
3486 "a": {
3487 "file1": "",
3488 }
3489 }),
3490 )
3491 .await;
3492
3493 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3494 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3495 let tree_id = tree.update(cx, |tree, _| tree.id());
3496
3497 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3498 project.update(cx, |project, cx| {
3499 let tree = project.worktrees(cx).next().unwrap();
3500 tree.read(cx)
3501 .entry_for_path(path)
3502 .unwrap_or_else(|| panic!("no entry for path {}", path))
3503 .id
3504 })
3505 };
3506
3507 let dir_id = id_for_path("a", cx);
3508 let file_id = id_for_path("a/file1", cx);
3509 let buffer = project
3510 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3511 .await
3512 .unwrap();
3513 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3514
3515 project
3516 .update(cx, |project, cx| {
3517 project.rename_entry(dir_id, Path::new("b"), cx)
3518 })
3519 .unwrap()
3520 .await
3521 .to_included()
3522 .unwrap();
3523 cx.executor().run_until_parked();
3524
3525 assert_eq!(id_for_path("b", cx), dir_id);
3526 assert_eq!(id_for_path("b/file1", cx), file_id);
3527 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3528}
3529
3530#[gpui::test]
3531async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3532 init_test(cx);
3533
3534 let fs = FakeFs::new(cx.executor());
3535 fs.insert_tree(
3536 "/dir",
3537 json!({
3538 "a.txt": "a-contents",
3539 "b.txt": "b-contents",
3540 }),
3541 )
3542 .await;
3543
3544 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3545
3546 // Spawn multiple tasks to open paths, repeating some paths.
3547 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3548 (
3549 p.open_local_buffer("/dir/a.txt", cx),
3550 p.open_local_buffer("/dir/b.txt", cx),
3551 p.open_local_buffer("/dir/a.txt", cx),
3552 )
3553 });
3554
3555 let buffer_a_1 = buffer_a_1.await.unwrap();
3556 let buffer_a_2 = buffer_a_2.await.unwrap();
3557 let buffer_b = buffer_b.await.unwrap();
3558 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3559 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3560
3561 // There is only one buffer per path.
3562 let buffer_a_id = buffer_a_1.entity_id();
3563 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3564
3565 // Open the same path again while it is still open.
3566 drop(buffer_a_1);
3567 let buffer_a_3 = project
3568 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3569 .await
3570 .unwrap();
3571
3572 // There's still only one buffer per path.
3573 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3574}
3575
3576#[gpui::test]
3577async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3578 init_test(cx);
3579
3580 let fs = FakeFs::new(cx.executor());
3581 fs.insert_tree(
3582 path!("/dir"),
3583 json!({
3584 "file1": "abc",
3585 "file2": "def",
3586 "file3": "ghi",
3587 }),
3588 )
3589 .await;
3590
3591 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3592
3593 let buffer1 = project
3594 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3595 .await
3596 .unwrap();
3597 let events = Arc::new(Mutex::new(Vec::new()));
3598
3599 // initially, the buffer isn't dirty.
3600 buffer1.update(cx, |buffer, cx| {
3601 cx.subscribe(&buffer1, {
3602 let events = events.clone();
3603 move |_, _, event, _| match event {
3604 BufferEvent::Operation { .. } => {}
3605 _ => events.lock().push(event.clone()),
3606 }
3607 })
3608 .detach();
3609
3610 assert!(!buffer.is_dirty());
3611 assert!(events.lock().is_empty());
3612
3613 buffer.edit([(1..2, "")], None, cx);
3614 });
3615
3616 // after the first edit, the buffer is dirty, and emits a dirtied event.
3617 buffer1.update(cx, |buffer, cx| {
3618 assert!(buffer.text() == "ac");
3619 assert!(buffer.is_dirty());
3620 assert_eq!(
3621 *events.lock(),
3622 &[
3623 language::BufferEvent::Edited,
3624 language::BufferEvent::DirtyChanged
3625 ]
3626 );
3627 events.lock().clear();
3628 buffer.did_save(
3629 buffer.version(),
3630 buffer.file().unwrap().disk_state().mtime(),
3631 cx,
3632 );
3633 });
3634
3635 // after saving, the buffer is not dirty, and emits a saved event.
3636 buffer1.update(cx, |buffer, cx| {
3637 assert!(!buffer.is_dirty());
3638 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3639 events.lock().clear();
3640
3641 buffer.edit([(1..1, "B")], None, cx);
3642 buffer.edit([(2..2, "D")], None, cx);
3643 });
3644
3645 // after editing again, the buffer is dirty, and emits another dirty event.
3646 buffer1.update(cx, |buffer, cx| {
3647 assert!(buffer.text() == "aBDc");
3648 assert!(buffer.is_dirty());
3649 assert_eq!(
3650 *events.lock(),
3651 &[
3652 language::BufferEvent::Edited,
3653 language::BufferEvent::DirtyChanged,
3654 language::BufferEvent::Edited,
3655 ],
3656 );
3657 events.lock().clear();
3658
3659 // After restoring the buffer to its previously-saved state,
3660 // the buffer is not considered dirty anymore.
3661 buffer.edit([(1..3, "")], None, cx);
3662 assert!(buffer.text() == "ac");
3663 assert!(!buffer.is_dirty());
3664 });
3665
3666 assert_eq!(
3667 *events.lock(),
3668 &[
3669 language::BufferEvent::Edited,
3670 language::BufferEvent::DirtyChanged
3671 ]
3672 );
3673
3674 // When a file is deleted, the buffer is considered dirty.
3675 let events = Arc::new(Mutex::new(Vec::new()));
3676 let buffer2 = project
3677 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3678 .await
3679 .unwrap();
3680 buffer2.update(cx, |_, cx| {
3681 cx.subscribe(&buffer2, {
3682 let events = events.clone();
3683 move |_, _, event, _| events.lock().push(event.clone())
3684 })
3685 .detach();
3686 });
3687
3688 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3689 .await
3690 .unwrap();
3691 cx.executor().run_until_parked();
3692 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3693 assert_eq!(
3694 *events.lock(),
3695 &[
3696 language::BufferEvent::DirtyChanged,
3697 language::BufferEvent::FileHandleChanged
3698 ]
3699 );
3700
3701 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3702 let events = Arc::new(Mutex::new(Vec::new()));
3703 let buffer3 = project
3704 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3705 .await
3706 .unwrap();
3707 buffer3.update(cx, |_, cx| {
3708 cx.subscribe(&buffer3, {
3709 let events = events.clone();
3710 move |_, _, event, _| events.lock().push(event.clone())
3711 })
3712 .detach();
3713 });
3714
3715 buffer3.update(cx, |buffer, cx| {
3716 buffer.edit([(0..0, "x")], None, cx);
3717 });
3718 events.lock().clear();
3719 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3720 .await
3721 .unwrap();
3722 cx.executor().run_until_parked();
3723 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3724 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3725}
3726
3727#[gpui::test]
3728async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3729 init_test(cx);
3730
3731 let (initial_contents, initial_offsets) =
3732 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3733 let fs = FakeFs::new(cx.executor());
3734 fs.insert_tree(
3735 path!("/dir"),
3736 json!({
3737 "the-file": initial_contents,
3738 }),
3739 )
3740 .await;
3741 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3742 let buffer = project
3743 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3744 .await
3745 .unwrap();
3746
3747 let anchors = initial_offsets
3748 .iter()
3749 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3750 .collect::<Vec<_>>();
3751
3752 // Change the file on disk, adding two new lines of text, and removing
3753 // one line.
3754 buffer.update(cx, |buffer, _| {
3755 assert!(!buffer.is_dirty());
3756 assert!(!buffer.has_conflict());
3757 });
3758
3759 let (new_contents, new_offsets) =
3760 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3761 fs.save(
3762 path!("/dir/the-file").as_ref(),
3763 &new_contents.as_str().into(),
3764 LineEnding::Unix,
3765 )
3766 .await
3767 .unwrap();
3768
3769 // Because the buffer was not modified, it is reloaded from disk. Its
3770 // contents are edited according to the diff between the old and new
3771 // file contents.
3772 cx.executor().run_until_parked();
3773 buffer.update(cx, |buffer, _| {
3774 assert_eq!(buffer.text(), new_contents);
3775 assert!(!buffer.is_dirty());
3776 assert!(!buffer.has_conflict());
3777
3778 let anchor_offsets = anchors
3779 .iter()
3780 .map(|anchor| anchor.to_offset(&*buffer))
3781 .collect::<Vec<_>>();
3782 assert_eq!(anchor_offsets, new_offsets);
3783 });
3784
3785 // Modify the buffer
3786 buffer.update(cx, |buffer, cx| {
3787 buffer.edit([(0..0, " ")], None, cx);
3788 assert!(buffer.is_dirty());
3789 assert!(!buffer.has_conflict());
3790 });
3791
3792 // Change the file on disk again, adding blank lines to the beginning.
3793 fs.save(
3794 path!("/dir/the-file").as_ref(),
3795 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3796 LineEnding::Unix,
3797 )
3798 .await
3799 .unwrap();
3800
3801 // Because the buffer is modified, it doesn't reload from disk, but is
3802 // marked as having a conflict.
3803 cx.executor().run_until_parked();
3804 buffer.update(cx, |buffer, _| {
3805 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3806 assert!(buffer.has_conflict());
3807 });
3808}
3809
3810#[gpui::test]
3811async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3812 init_test(cx);
3813
3814 let fs = FakeFs::new(cx.executor());
3815 fs.insert_tree(
3816 path!("/dir"),
3817 json!({
3818 "file1": "a\nb\nc\n",
3819 "file2": "one\r\ntwo\r\nthree\r\n",
3820 }),
3821 )
3822 .await;
3823
3824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3825 let buffer1 = project
3826 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3827 .await
3828 .unwrap();
3829 let buffer2 = project
3830 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3831 .await
3832 .unwrap();
3833
3834 buffer1.update(cx, |buffer, _| {
3835 assert_eq!(buffer.text(), "a\nb\nc\n");
3836 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3837 });
3838 buffer2.update(cx, |buffer, _| {
3839 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3840 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3841 });
3842
3843 // Change a file's line endings on disk from unix to windows. The buffer's
3844 // state updates correctly.
3845 fs.save(
3846 path!("/dir/file1").as_ref(),
3847 &"aaa\nb\nc\n".into(),
3848 LineEnding::Windows,
3849 )
3850 .await
3851 .unwrap();
3852 cx.executor().run_until_parked();
3853 buffer1.update(cx, |buffer, _| {
3854 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3855 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3856 });
3857
3858 // Save a file with windows line endings. The file is written correctly.
3859 buffer2.update(cx, |buffer, cx| {
3860 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3861 });
3862 project
3863 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3864 .await
3865 .unwrap();
3866 assert_eq!(
3867 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3868 "one\r\ntwo\r\nthree\r\nfour\r\n",
3869 );
3870}
3871
3872#[gpui::test]
3873async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3874 init_test(cx);
3875
3876 let fs = FakeFs::new(cx.executor());
3877 fs.insert_tree(
3878 path!("/dir"),
3879 json!({
3880 "a.rs": "
3881 fn foo(mut v: Vec<usize>) {
3882 for x in &v {
3883 v.push(1);
3884 }
3885 }
3886 "
3887 .unindent(),
3888 }),
3889 )
3890 .await;
3891
3892 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3893 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3894 let buffer = project
3895 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3896 .await
3897 .unwrap();
3898
3899 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3900 let message = lsp::PublishDiagnosticsParams {
3901 uri: buffer_uri.clone(),
3902 diagnostics: vec![
3903 lsp::Diagnostic {
3904 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3905 severity: Some(DiagnosticSeverity::WARNING),
3906 message: "error 1".to_string(),
3907 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3908 location: lsp::Location {
3909 uri: buffer_uri.clone(),
3910 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3911 },
3912 message: "error 1 hint 1".to_string(),
3913 }]),
3914 ..Default::default()
3915 },
3916 lsp::Diagnostic {
3917 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3918 severity: Some(DiagnosticSeverity::HINT),
3919 message: "error 1 hint 1".to_string(),
3920 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3921 location: lsp::Location {
3922 uri: buffer_uri.clone(),
3923 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3924 },
3925 message: "original diagnostic".to_string(),
3926 }]),
3927 ..Default::default()
3928 },
3929 lsp::Diagnostic {
3930 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3931 severity: Some(DiagnosticSeverity::ERROR),
3932 message: "error 2".to_string(),
3933 related_information: Some(vec![
3934 lsp::DiagnosticRelatedInformation {
3935 location: lsp::Location {
3936 uri: buffer_uri.clone(),
3937 range: lsp::Range::new(
3938 lsp::Position::new(1, 13),
3939 lsp::Position::new(1, 15),
3940 ),
3941 },
3942 message: "error 2 hint 1".to_string(),
3943 },
3944 lsp::DiagnosticRelatedInformation {
3945 location: lsp::Location {
3946 uri: buffer_uri.clone(),
3947 range: lsp::Range::new(
3948 lsp::Position::new(1, 13),
3949 lsp::Position::new(1, 15),
3950 ),
3951 },
3952 message: "error 2 hint 2".to_string(),
3953 },
3954 ]),
3955 ..Default::default()
3956 },
3957 lsp::Diagnostic {
3958 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3959 severity: Some(DiagnosticSeverity::HINT),
3960 message: "error 2 hint 1".to_string(),
3961 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3962 location: lsp::Location {
3963 uri: buffer_uri.clone(),
3964 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3965 },
3966 message: "original diagnostic".to_string(),
3967 }]),
3968 ..Default::default()
3969 },
3970 lsp::Diagnostic {
3971 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3972 severity: Some(DiagnosticSeverity::HINT),
3973 message: "error 2 hint 2".to_string(),
3974 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3975 location: lsp::Location {
3976 uri: buffer_uri,
3977 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3978 },
3979 message: "original diagnostic".to_string(),
3980 }]),
3981 ..Default::default()
3982 },
3983 ],
3984 version: None,
3985 };
3986
3987 lsp_store
3988 .update(cx, |lsp_store, cx| {
3989 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3990 })
3991 .unwrap();
3992 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3993
3994 assert_eq!(
3995 buffer
3996 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3997 .collect::<Vec<_>>(),
3998 &[
3999 DiagnosticEntry {
4000 range: Point::new(1, 8)..Point::new(1, 9),
4001 diagnostic: Diagnostic {
4002 severity: DiagnosticSeverity::WARNING,
4003 message: "error 1".to_string(),
4004 group_id: 1,
4005 is_primary: true,
4006 ..Default::default()
4007 }
4008 },
4009 DiagnosticEntry {
4010 range: Point::new(1, 8)..Point::new(1, 9),
4011 diagnostic: Diagnostic {
4012 severity: DiagnosticSeverity::HINT,
4013 message: "error 1 hint 1".to_string(),
4014 group_id: 1,
4015 is_primary: false,
4016 ..Default::default()
4017 }
4018 },
4019 DiagnosticEntry {
4020 range: Point::new(1, 13)..Point::new(1, 15),
4021 diagnostic: Diagnostic {
4022 severity: DiagnosticSeverity::HINT,
4023 message: "error 2 hint 1".to_string(),
4024 group_id: 0,
4025 is_primary: false,
4026 ..Default::default()
4027 }
4028 },
4029 DiagnosticEntry {
4030 range: Point::new(1, 13)..Point::new(1, 15),
4031 diagnostic: Diagnostic {
4032 severity: DiagnosticSeverity::HINT,
4033 message: "error 2 hint 2".to_string(),
4034 group_id: 0,
4035 is_primary: false,
4036 ..Default::default()
4037 }
4038 },
4039 DiagnosticEntry {
4040 range: Point::new(2, 8)..Point::new(2, 17),
4041 diagnostic: Diagnostic {
4042 severity: DiagnosticSeverity::ERROR,
4043 message: "error 2".to_string(),
4044 group_id: 0,
4045 is_primary: true,
4046 ..Default::default()
4047 }
4048 }
4049 ]
4050 );
4051
4052 assert_eq!(
4053 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4054 &[
4055 DiagnosticEntry {
4056 range: Point::new(1, 13)..Point::new(1, 15),
4057 diagnostic: Diagnostic {
4058 severity: DiagnosticSeverity::HINT,
4059 message: "error 2 hint 1".to_string(),
4060 group_id: 0,
4061 is_primary: false,
4062 ..Default::default()
4063 }
4064 },
4065 DiagnosticEntry {
4066 range: Point::new(1, 13)..Point::new(1, 15),
4067 diagnostic: Diagnostic {
4068 severity: DiagnosticSeverity::HINT,
4069 message: "error 2 hint 2".to_string(),
4070 group_id: 0,
4071 is_primary: false,
4072 ..Default::default()
4073 }
4074 },
4075 DiagnosticEntry {
4076 range: Point::new(2, 8)..Point::new(2, 17),
4077 diagnostic: Diagnostic {
4078 severity: DiagnosticSeverity::ERROR,
4079 message: "error 2".to_string(),
4080 group_id: 0,
4081 is_primary: true,
4082 ..Default::default()
4083 }
4084 }
4085 ]
4086 );
4087
4088 assert_eq!(
4089 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4090 &[
4091 DiagnosticEntry {
4092 range: Point::new(1, 8)..Point::new(1, 9),
4093 diagnostic: Diagnostic {
4094 severity: DiagnosticSeverity::WARNING,
4095 message: "error 1".to_string(),
4096 group_id: 1,
4097 is_primary: true,
4098 ..Default::default()
4099 }
4100 },
4101 DiagnosticEntry {
4102 range: Point::new(1, 8)..Point::new(1, 9),
4103 diagnostic: Diagnostic {
4104 severity: DiagnosticSeverity::HINT,
4105 message: "error 1 hint 1".to_string(),
4106 group_id: 1,
4107 is_primary: false,
4108 ..Default::default()
4109 }
4110 },
4111 ]
4112 );
4113}
4114
4115#[gpui::test]
4116async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4117 init_test(cx);
4118
4119 let fs = FakeFs::new(cx.executor());
4120 fs.insert_tree(
4121 path!("/dir"),
4122 json!({
4123 "one.rs": "const ONE: usize = 1;",
4124 "two": {
4125 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4126 }
4127
4128 }),
4129 )
4130 .await;
4131 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4132
4133 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4134 language_registry.add(rust_lang());
4135 let watched_paths = lsp::FileOperationRegistrationOptions {
4136 filters: vec![
4137 FileOperationFilter {
4138 scheme: Some("file".to_owned()),
4139 pattern: lsp::FileOperationPattern {
4140 glob: "**/*.rs".to_owned(),
4141 matches: Some(lsp::FileOperationPatternKind::File),
4142 options: None,
4143 },
4144 },
4145 FileOperationFilter {
4146 scheme: Some("file".to_owned()),
4147 pattern: lsp::FileOperationPattern {
4148 glob: "**/**".to_owned(),
4149 matches: Some(lsp::FileOperationPatternKind::Folder),
4150 options: None,
4151 },
4152 },
4153 ],
4154 };
4155 let mut fake_servers = language_registry.register_fake_lsp(
4156 "Rust",
4157 FakeLspAdapter {
4158 capabilities: lsp::ServerCapabilities {
4159 workspace: Some(lsp::WorkspaceServerCapabilities {
4160 workspace_folders: None,
4161 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4162 did_rename: Some(watched_paths.clone()),
4163 will_rename: Some(watched_paths),
4164 ..Default::default()
4165 }),
4166 }),
4167 ..Default::default()
4168 },
4169 ..Default::default()
4170 },
4171 );
4172
4173 let _ = project
4174 .update(cx, |project, cx| {
4175 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4176 })
4177 .await
4178 .unwrap();
4179
4180 let fake_server = fake_servers.next().await.unwrap();
4181 let response = project.update(cx, |project, cx| {
4182 let worktree = project.worktrees(cx).next().unwrap();
4183 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4184 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4185 });
4186 let expected_edit = lsp::WorkspaceEdit {
4187 changes: None,
4188 document_changes: Some(DocumentChanges::Edits({
4189 vec![TextDocumentEdit {
4190 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4191 range: lsp::Range {
4192 start: lsp::Position {
4193 line: 0,
4194 character: 1,
4195 },
4196 end: lsp::Position {
4197 line: 0,
4198 character: 3,
4199 },
4200 },
4201 new_text: "This is not a drill".to_owned(),
4202 })],
4203 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4204 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4205 version: Some(1337),
4206 },
4207 }]
4208 })),
4209 change_annotations: None,
4210 };
4211 let resolved_workspace_edit = Arc::new(OnceLock::new());
4212 fake_server
4213 .set_request_handler::<WillRenameFiles, _, _>({
4214 let resolved_workspace_edit = resolved_workspace_edit.clone();
4215 let expected_edit = expected_edit.clone();
4216 move |params, _| {
4217 let resolved_workspace_edit = resolved_workspace_edit.clone();
4218 let expected_edit = expected_edit.clone();
4219 async move {
4220 assert_eq!(params.files.len(), 1);
4221 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4222 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4223 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4224 Ok(Some(expected_edit))
4225 }
4226 }
4227 })
4228 .next()
4229 .await
4230 .unwrap();
4231 let _ = response.await.unwrap();
4232 fake_server
4233 .handle_notification::<DidRenameFiles, _>(|params, _| {
4234 assert_eq!(params.files.len(), 1);
4235 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4236 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4237 })
4238 .next()
4239 .await
4240 .unwrap();
4241 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4242}
4243
4244#[gpui::test]
4245async fn test_rename(cx: &mut gpui::TestAppContext) {
4246 // hi
4247 init_test(cx);
4248
4249 let fs = FakeFs::new(cx.executor());
4250 fs.insert_tree(
4251 path!("/dir"),
4252 json!({
4253 "one.rs": "const ONE: usize = 1;",
4254 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4255 }),
4256 )
4257 .await;
4258
4259 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4260
4261 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4262 language_registry.add(rust_lang());
4263 let mut fake_servers = language_registry.register_fake_lsp(
4264 "Rust",
4265 FakeLspAdapter {
4266 capabilities: lsp::ServerCapabilities {
4267 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4268 prepare_provider: Some(true),
4269 work_done_progress_options: Default::default(),
4270 })),
4271 ..Default::default()
4272 },
4273 ..Default::default()
4274 },
4275 );
4276
4277 let (buffer, _handle) = project
4278 .update(cx, |project, cx| {
4279 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4280 })
4281 .await
4282 .unwrap();
4283
4284 let fake_server = fake_servers.next().await.unwrap();
4285
4286 let response = project.update(cx, |project, cx| {
4287 project.prepare_rename(buffer.clone(), 7, cx)
4288 });
4289 fake_server
4290 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4291 assert_eq!(
4292 params.text_document.uri.as_str(),
4293 uri!("file:///dir/one.rs")
4294 );
4295 assert_eq!(params.position, lsp::Position::new(0, 7));
4296 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4297 lsp::Position::new(0, 6),
4298 lsp::Position::new(0, 9),
4299 ))))
4300 })
4301 .next()
4302 .await
4303 .unwrap();
4304 let response = response.await.unwrap();
4305 let PrepareRenameResponse::Success(range) = response else {
4306 panic!("{:?}", response);
4307 };
4308 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4309 assert_eq!(range, 6..9);
4310
4311 let response = project.update(cx, |project, cx| {
4312 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4313 });
4314 fake_server
4315 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4316 assert_eq!(
4317 params.text_document_position.text_document.uri.as_str(),
4318 uri!("file:///dir/one.rs")
4319 );
4320 assert_eq!(
4321 params.text_document_position.position,
4322 lsp::Position::new(0, 7)
4323 );
4324 assert_eq!(params.new_name, "THREE");
4325 Ok(Some(lsp::WorkspaceEdit {
4326 changes: Some(
4327 [
4328 (
4329 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4330 vec![lsp::TextEdit::new(
4331 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4332 "THREE".to_string(),
4333 )],
4334 ),
4335 (
4336 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4337 vec![
4338 lsp::TextEdit::new(
4339 lsp::Range::new(
4340 lsp::Position::new(0, 24),
4341 lsp::Position::new(0, 27),
4342 ),
4343 "THREE".to_string(),
4344 ),
4345 lsp::TextEdit::new(
4346 lsp::Range::new(
4347 lsp::Position::new(0, 35),
4348 lsp::Position::new(0, 38),
4349 ),
4350 "THREE".to_string(),
4351 ),
4352 ],
4353 ),
4354 ]
4355 .into_iter()
4356 .collect(),
4357 ),
4358 ..Default::default()
4359 }))
4360 })
4361 .next()
4362 .await
4363 .unwrap();
4364 let mut transaction = response.await.unwrap().0;
4365 assert_eq!(transaction.len(), 2);
4366 assert_eq!(
4367 transaction
4368 .remove_entry(&buffer)
4369 .unwrap()
4370 .0
4371 .update(cx, |buffer, _| buffer.text()),
4372 "const THREE: usize = 1;"
4373 );
4374 assert_eq!(
4375 transaction
4376 .into_keys()
4377 .next()
4378 .unwrap()
4379 .update(cx, |buffer, _| buffer.text()),
4380 "const TWO: usize = one::THREE + one::THREE;"
4381 );
4382}
4383
4384#[gpui::test]
4385async fn test_search(cx: &mut gpui::TestAppContext) {
4386 init_test(cx);
4387
4388 let fs = FakeFs::new(cx.executor());
4389 fs.insert_tree(
4390 path!("/dir"),
4391 json!({
4392 "one.rs": "const ONE: usize = 1;",
4393 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4394 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4395 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4396 }),
4397 )
4398 .await;
4399 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4400 assert_eq!(
4401 search(
4402 &project,
4403 SearchQuery::text(
4404 "TWO",
4405 false,
4406 true,
4407 false,
4408 Default::default(),
4409 Default::default(),
4410 None
4411 )
4412 .unwrap(),
4413 cx
4414 )
4415 .await
4416 .unwrap(),
4417 HashMap::from_iter([
4418 (separator!("dir/two.rs").to_string(), vec![6..9]),
4419 (separator!("dir/three.rs").to_string(), vec![37..40])
4420 ])
4421 );
4422
4423 let buffer_4 = project
4424 .update(cx, |project, cx| {
4425 project.open_local_buffer(path!("/dir/four.rs"), cx)
4426 })
4427 .await
4428 .unwrap();
4429 buffer_4.update(cx, |buffer, cx| {
4430 let text = "two::TWO";
4431 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4432 });
4433
4434 assert_eq!(
4435 search(
4436 &project,
4437 SearchQuery::text(
4438 "TWO",
4439 false,
4440 true,
4441 false,
4442 Default::default(),
4443 Default::default(),
4444 None,
4445 )
4446 .unwrap(),
4447 cx
4448 )
4449 .await
4450 .unwrap(),
4451 HashMap::from_iter([
4452 (separator!("dir/two.rs").to_string(), vec![6..9]),
4453 (separator!("dir/three.rs").to_string(), vec![37..40]),
4454 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4455 ])
4456 );
4457}
4458
4459#[gpui::test]
4460async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4461 init_test(cx);
4462
4463 let search_query = "file";
4464
4465 let fs = FakeFs::new(cx.executor());
4466 fs.insert_tree(
4467 path!("/dir"),
4468 json!({
4469 "one.rs": r#"// Rust file one"#,
4470 "one.ts": r#"// TypeScript file one"#,
4471 "two.rs": r#"// Rust file two"#,
4472 "two.ts": r#"// TypeScript file two"#,
4473 }),
4474 )
4475 .await;
4476 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4477
4478 assert!(
4479 search(
4480 &project,
4481 SearchQuery::text(
4482 search_query,
4483 false,
4484 true,
4485 false,
4486 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4487 Default::default(),
4488 None
4489 )
4490 .unwrap(),
4491 cx
4492 )
4493 .await
4494 .unwrap()
4495 .is_empty(),
4496 "If no inclusions match, no files should be returned"
4497 );
4498
4499 assert_eq!(
4500 search(
4501 &project,
4502 SearchQuery::text(
4503 search_query,
4504 false,
4505 true,
4506 false,
4507 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4508 Default::default(),
4509 None
4510 )
4511 .unwrap(),
4512 cx
4513 )
4514 .await
4515 .unwrap(),
4516 HashMap::from_iter([
4517 (separator!("dir/one.rs").to_string(), vec![8..12]),
4518 (separator!("dir/two.rs").to_string(), vec![8..12]),
4519 ]),
4520 "Rust only search should give only Rust files"
4521 );
4522
4523 assert_eq!(
4524 search(
4525 &project,
4526 SearchQuery::text(
4527 search_query,
4528 false,
4529 true,
4530 false,
4531
4532 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4533
4534 Default::default(),
4535 None,
4536 ).unwrap(),
4537 cx
4538 )
4539 .await
4540 .unwrap(),
4541 HashMap::from_iter([
4542 (separator!("dir/one.ts").to_string(), vec![14..18]),
4543 (separator!("dir/two.ts").to_string(), vec![14..18]),
4544 ]),
4545 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4546 );
4547
4548 assert_eq!(
4549 search(
4550 &project,
4551 SearchQuery::text(
4552 search_query,
4553 false,
4554 true,
4555 false,
4556
4557 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4558
4559 Default::default(),
4560 None,
4561 ).unwrap(),
4562 cx
4563 )
4564 .await
4565 .unwrap(),
4566 HashMap::from_iter([
4567 (separator!("dir/two.ts").to_string(), vec![14..18]),
4568 (separator!("dir/one.rs").to_string(), vec![8..12]),
4569 (separator!("dir/one.ts").to_string(), vec![14..18]),
4570 (separator!("dir/two.rs").to_string(), vec![8..12]),
4571 ]),
4572 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4573 );
4574}
4575
4576#[gpui::test]
4577async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4578 init_test(cx);
4579
4580 let search_query = "file";
4581
4582 let fs = FakeFs::new(cx.executor());
4583 fs.insert_tree(
4584 path!("/dir"),
4585 json!({
4586 "one.rs": r#"// Rust file one"#,
4587 "one.ts": r#"// TypeScript file one"#,
4588 "two.rs": r#"// Rust file two"#,
4589 "two.ts": r#"// TypeScript file two"#,
4590 }),
4591 )
4592 .await;
4593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4594
4595 assert_eq!(
4596 search(
4597 &project,
4598 SearchQuery::text(
4599 search_query,
4600 false,
4601 true,
4602 false,
4603 Default::default(),
4604 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4605 None,
4606 )
4607 .unwrap(),
4608 cx
4609 )
4610 .await
4611 .unwrap(),
4612 HashMap::from_iter([
4613 (separator!("dir/one.rs").to_string(), vec![8..12]),
4614 (separator!("dir/one.ts").to_string(), vec![14..18]),
4615 (separator!("dir/two.rs").to_string(), vec![8..12]),
4616 (separator!("dir/two.ts").to_string(), vec![14..18]),
4617 ]),
4618 "If no exclusions match, all files should be returned"
4619 );
4620
4621 assert_eq!(
4622 search(
4623 &project,
4624 SearchQuery::text(
4625 search_query,
4626 false,
4627 true,
4628 false,
4629 Default::default(),
4630 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4631 None,
4632 )
4633 .unwrap(),
4634 cx
4635 )
4636 .await
4637 .unwrap(),
4638 HashMap::from_iter([
4639 (separator!("dir/one.ts").to_string(), vec![14..18]),
4640 (separator!("dir/two.ts").to_string(), vec![14..18]),
4641 ]),
4642 "Rust exclusion search should give only TypeScript files"
4643 );
4644
4645 assert_eq!(
4646 search(
4647 &project,
4648 SearchQuery::text(
4649 search_query,
4650 false,
4651 true,
4652 false,
4653 Default::default(),
4654 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4655 None,
4656 ).unwrap(),
4657 cx
4658 )
4659 .await
4660 .unwrap(),
4661 HashMap::from_iter([
4662 (separator!("dir/one.rs").to_string(), vec![8..12]),
4663 (separator!("dir/two.rs").to_string(), vec![8..12]),
4664 ]),
4665 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4666 );
4667
4668 assert!(
4669 search(
4670 &project,
4671 SearchQuery::text(
4672 search_query,
4673 false,
4674 true,
4675 false,
4676 Default::default(),
4677
4678 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4679 None,
4680
4681 ).unwrap(),
4682 cx
4683 )
4684 .await
4685 .unwrap().is_empty(),
4686 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4687 );
4688}
4689
4690#[gpui::test]
4691async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4692 init_test(cx);
4693
4694 let search_query = "file";
4695
4696 let fs = FakeFs::new(cx.executor());
4697 fs.insert_tree(
4698 path!("/dir"),
4699 json!({
4700 "one.rs": r#"// Rust file one"#,
4701 "one.ts": r#"// TypeScript file one"#,
4702 "two.rs": r#"// Rust file two"#,
4703 "two.ts": r#"// TypeScript file two"#,
4704 }),
4705 )
4706 .await;
4707 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4708
4709 assert!(
4710 search(
4711 &project,
4712 SearchQuery::text(
4713 search_query,
4714 false,
4715 true,
4716 false,
4717 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4718 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4719 None,
4720 )
4721 .unwrap(),
4722 cx
4723 )
4724 .await
4725 .unwrap()
4726 .is_empty(),
4727 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4728 );
4729
4730 assert!(
4731 search(
4732 &project,
4733 SearchQuery::text(
4734 search_query,
4735 false,
4736 true,
4737 false,
4738 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4739 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4740 None,
4741 ).unwrap(),
4742 cx
4743 )
4744 .await
4745 .unwrap()
4746 .is_empty(),
4747 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4748 );
4749
4750 assert!(
4751 search(
4752 &project,
4753 SearchQuery::text(
4754 search_query,
4755 false,
4756 true,
4757 false,
4758 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4759 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4760 None,
4761 )
4762 .unwrap(),
4763 cx
4764 )
4765 .await
4766 .unwrap()
4767 .is_empty(),
4768 "Non-matching inclusions and exclusions should not change that."
4769 );
4770
4771 assert_eq!(
4772 search(
4773 &project,
4774 SearchQuery::text(
4775 search_query,
4776 false,
4777 true,
4778 false,
4779 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4780 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4781 None,
4782 )
4783 .unwrap(),
4784 cx
4785 )
4786 .await
4787 .unwrap(),
4788 HashMap::from_iter([
4789 (separator!("dir/one.ts").to_string(), vec![14..18]),
4790 (separator!("dir/two.ts").to_string(), vec![14..18]),
4791 ]),
4792 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4793 );
4794}
4795
4796#[gpui::test]
4797async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4798 init_test(cx);
4799
4800 let fs = FakeFs::new(cx.executor());
4801 fs.insert_tree(
4802 path!("/worktree-a"),
4803 json!({
4804 "haystack.rs": r#"// NEEDLE"#,
4805 "haystack.ts": r#"// NEEDLE"#,
4806 }),
4807 )
4808 .await;
4809 fs.insert_tree(
4810 path!("/worktree-b"),
4811 json!({
4812 "haystack.rs": r#"// NEEDLE"#,
4813 "haystack.ts": r#"// NEEDLE"#,
4814 }),
4815 )
4816 .await;
4817
4818 let project = Project::test(
4819 fs.clone(),
4820 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4821 cx,
4822 )
4823 .await;
4824
4825 assert_eq!(
4826 search(
4827 &project,
4828 SearchQuery::text(
4829 "NEEDLE",
4830 false,
4831 true,
4832 false,
4833 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4834 Default::default(),
4835 None,
4836 )
4837 .unwrap(),
4838 cx
4839 )
4840 .await
4841 .unwrap(),
4842 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4843 "should only return results from included worktree"
4844 );
4845 assert_eq!(
4846 search(
4847 &project,
4848 SearchQuery::text(
4849 "NEEDLE",
4850 false,
4851 true,
4852 false,
4853 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4854 Default::default(),
4855 None,
4856 )
4857 .unwrap(),
4858 cx
4859 )
4860 .await
4861 .unwrap(),
4862 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4863 "should only return results from included worktree"
4864 );
4865
4866 assert_eq!(
4867 search(
4868 &project,
4869 SearchQuery::text(
4870 "NEEDLE",
4871 false,
4872 true,
4873 false,
4874 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4875 Default::default(),
4876 None,
4877 )
4878 .unwrap(),
4879 cx
4880 )
4881 .await
4882 .unwrap(),
4883 HashMap::from_iter([
4884 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4885 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4886 ]),
4887 "should return results from both worktrees"
4888 );
4889}
4890
4891#[gpui::test]
4892async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4893 init_test(cx);
4894
4895 let fs = FakeFs::new(cx.background_executor.clone());
4896 fs.insert_tree(
4897 path!("/dir"),
4898 json!({
4899 ".git": {},
4900 ".gitignore": "**/target\n/node_modules\n",
4901 "target": {
4902 "index.txt": "index_key:index_value"
4903 },
4904 "node_modules": {
4905 "eslint": {
4906 "index.ts": "const eslint_key = 'eslint value'",
4907 "package.json": r#"{ "some_key": "some value" }"#,
4908 },
4909 "prettier": {
4910 "index.ts": "const prettier_key = 'prettier value'",
4911 "package.json": r#"{ "other_key": "other value" }"#,
4912 },
4913 },
4914 "package.json": r#"{ "main_key": "main value" }"#,
4915 }),
4916 )
4917 .await;
4918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4919
4920 let query = "key";
4921 assert_eq!(
4922 search(
4923 &project,
4924 SearchQuery::text(
4925 query,
4926 false,
4927 false,
4928 false,
4929 Default::default(),
4930 Default::default(),
4931 None,
4932 )
4933 .unwrap(),
4934 cx
4935 )
4936 .await
4937 .unwrap(),
4938 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4939 "Only one non-ignored file should have the query"
4940 );
4941
4942 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4943 assert_eq!(
4944 search(
4945 &project,
4946 SearchQuery::text(
4947 query,
4948 false,
4949 false,
4950 true,
4951 Default::default(),
4952 Default::default(),
4953 None,
4954 )
4955 .unwrap(),
4956 cx
4957 )
4958 .await
4959 .unwrap(),
4960 HashMap::from_iter([
4961 (separator!("dir/package.json").to_string(), vec![8..11]),
4962 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4963 (
4964 separator!("dir/node_modules/prettier/package.json").to_string(),
4965 vec![9..12]
4966 ),
4967 (
4968 separator!("dir/node_modules/prettier/index.ts").to_string(),
4969 vec![15..18]
4970 ),
4971 (
4972 separator!("dir/node_modules/eslint/index.ts").to_string(),
4973 vec![13..16]
4974 ),
4975 (
4976 separator!("dir/node_modules/eslint/package.json").to_string(),
4977 vec![8..11]
4978 ),
4979 ]),
4980 "Unrestricted search with ignored directories should find every file with the query"
4981 );
4982
4983 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4984 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4985 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4986 assert_eq!(
4987 search(
4988 &project,
4989 SearchQuery::text(
4990 query,
4991 false,
4992 false,
4993 true,
4994 files_to_include,
4995 files_to_exclude,
4996 None,
4997 )
4998 .unwrap(),
4999 cx
5000 )
5001 .await
5002 .unwrap(),
5003 HashMap::from_iter([(
5004 separator!("dir/node_modules/prettier/package.json").to_string(),
5005 vec![9..12]
5006 )]),
5007 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5008 );
5009}
5010
5011#[gpui::test]
5012async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5013 init_test(cx);
5014
5015 let fs = FakeFs::new(cx.executor().clone());
5016 fs.insert_tree(
5017 "/one/two",
5018 json!({
5019 "three": {
5020 "a.txt": "",
5021 "four": {}
5022 },
5023 "c.rs": ""
5024 }),
5025 )
5026 .await;
5027
5028 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5029 project
5030 .update(cx, |project, cx| {
5031 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5032 project.create_entry((id, "b.."), true, cx)
5033 })
5034 .await
5035 .unwrap()
5036 .to_included()
5037 .unwrap();
5038
5039 // Can't create paths outside the project
5040 let result = project
5041 .update(cx, |project, cx| {
5042 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5043 project.create_entry((id, "../../boop"), true, cx)
5044 })
5045 .await;
5046 assert!(result.is_err());
5047
5048 // Can't create paths with '..'
5049 let result = project
5050 .update(cx, |project, cx| {
5051 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5052 project.create_entry((id, "four/../beep"), true, cx)
5053 })
5054 .await;
5055 assert!(result.is_err());
5056
5057 assert_eq!(
5058 fs.paths(true),
5059 vec![
5060 PathBuf::from(path!("/")),
5061 PathBuf::from(path!("/one")),
5062 PathBuf::from(path!("/one/two")),
5063 PathBuf::from(path!("/one/two/c.rs")),
5064 PathBuf::from(path!("/one/two/three")),
5065 PathBuf::from(path!("/one/two/three/a.txt")),
5066 PathBuf::from(path!("/one/two/three/b..")),
5067 PathBuf::from(path!("/one/two/three/four")),
5068 ]
5069 );
5070
5071 // And we cannot open buffers with '..'
5072 let result = project
5073 .update(cx, |project, cx| {
5074 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5075 project.open_buffer((id, "../c.rs"), cx)
5076 })
5077 .await;
5078 assert!(result.is_err())
5079}
5080
5081#[gpui::test]
5082async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5083 init_test(cx);
5084
5085 let fs = FakeFs::new(cx.executor());
5086 fs.insert_tree(
5087 path!("/dir"),
5088 json!({
5089 "a.tsx": "a",
5090 }),
5091 )
5092 .await;
5093
5094 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5095
5096 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5097 language_registry.add(tsx_lang());
5098 let language_server_names = [
5099 "TypeScriptServer",
5100 "TailwindServer",
5101 "ESLintServer",
5102 "NoHoverCapabilitiesServer",
5103 ];
5104 let mut language_servers = [
5105 language_registry.register_fake_lsp(
5106 "tsx",
5107 FakeLspAdapter {
5108 name: language_server_names[0],
5109 capabilities: lsp::ServerCapabilities {
5110 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5111 ..lsp::ServerCapabilities::default()
5112 },
5113 ..FakeLspAdapter::default()
5114 },
5115 ),
5116 language_registry.register_fake_lsp(
5117 "tsx",
5118 FakeLspAdapter {
5119 name: language_server_names[1],
5120 capabilities: lsp::ServerCapabilities {
5121 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5122 ..lsp::ServerCapabilities::default()
5123 },
5124 ..FakeLspAdapter::default()
5125 },
5126 ),
5127 language_registry.register_fake_lsp(
5128 "tsx",
5129 FakeLspAdapter {
5130 name: language_server_names[2],
5131 capabilities: lsp::ServerCapabilities {
5132 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5133 ..lsp::ServerCapabilities::default()
5134 },
5135 ..FakeLspAdapter::default()
5136 },
5137 ),
5138 language_registry.register_fake_lsp(
5139 "tsx",
5140 FakeLspAdapter {
5141 name: language_server_names[3],
5142 capabilities: lsp::ServerCapabilities {
5143 hover_provider: None,
5144 ..lsp::ServerCapabilities::default()
5145 },
5146 ..FakeLspAdapter::default()
5147 },
5148 ),
5149 ];
5150
5151 let (buffer, _handle) = project
5152 .update(cx, |p, cx| {
5153 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5154 })
5155 .await
5156 .unwrap();
5157 cx.executor().run_until_parked();
5158
5159 let mut servers_with_hover_requests = HashMap::default();
5160 for i in 0..language_server_names.len() {
5161 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5162 panic!(
5163 "Failed to get language server #{i} with name {}",
5164 &language_server_names[i]
5165 )
5166 });
5167 let new_server_name = new_server.server.name();
5168 assert!(
5169 !servers_with_hover_requests.contains_key(&new_server_name),
5170 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5171 );
5172 match new_server_name.as_ref() {
5173 "TailwindServer" | "TypeScriptServer" => {
5174 servers_with_hover_requests.insert(
5175 new_server_name.clone(),
5176 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5177 move |_, _| {
5178 let name = new_server_name.clone();
5179 async move {
5180 Ok(Some(lsp::Hover {
5181 contents: lsp::HoverContents::Scalar(
5182 lsp::MarkedString::String(format!("{name} hover")),
5183 ),
5184 range: None,
5185 }))
5186 }
5187 },
5188 ),
5189 );
5190 }
5191 "ESLintServer" => {
5192 servers_with_hover_requests.insert(
5193 new_server_name,
5194 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5195 |_, _| async move { Ok(None) },
5196 ),
5197 );
5198 }
5199 "NoHoverCapabilitiesServer" => {
5200 let _never_handled = new_server
5201 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5202 panic!(
5203 "Should not call for hovers server with no corresponding capabilities"
5204 )
5205 });
5206 }
5207 unexpected => panic!("Unexpected server name: {unexpected}"),
5208 }
5209 }
5210
5211 let hover_task = project.update(cx, |project, cx| {
5212 project.hover(&buffer, Point::new(0, 0), cx)
5213 });
5214 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5215 |mut hover_request| async move {
5216 hover_request
5217 .next()
5218 .await
5219 .expect("All hover requests should have been triggered")
5220 },
5221 ))
5222 .await;
5223 assert_eq!(
5224 vec!["TailwindServer hover", "TypeScriptServer hover"],
5225 hover_task
5226 .await
5227 .into_iter()
5228 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5229 .sorted()
5230 .collect::<Vec<_>>(),
5231 "Should receive hover responses from all related servers with hover capabilities"
5232 );
5233}
5234
5235#[gpui::test]
5236async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5237 init_test(cx);
5238
5239 let fs = FakeFs::new(cx.executor());
5240 fs.insert_tree(
5241 path!("/dir"),
5242 json!({
5243 "a.ts": "a",
5244 }),
5245 )
5246 .await;
5247
5248 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5249
5250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5251 language_registry.add(typescript_lang());
5252 let mut fake_language_servers = language_registry.register_fake_lsp(
5253 "TypeScript",
5254 FakeLspAdapter {
5255 capabilities: lsp::ServerCapabilities {
5256 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5257 ..lsp::ServerCapabilities::default()
5258 },
5259 ..FakeLspAdapter::default()
5260 },
5261 );
5262
5263 let (buffer, _handle) = project
5264 .update(cx, |p, cx| {
5265 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5266 })
5267 .await
5268 .unwrap();
5269 cx.executor().run_until_parked();
5270
5271 let fake_server = fake_language_servers
5272 .next()
5273 .await
5274 .expect("failed to get the language server");
5275
5276 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5277 move |_, _| async move {
5278 Ok(Some(lsp::Hover {
5279 contents: lsp::HoverContents::Array(vec![
5280 lsp::MarkedString::String("".to_string()),
5281 lsp::MarkedString::String(" ".to_string()),
5282 lsp::MarkedString::String("\n\n\n".to_string()),
5283 ]),
5284 range: None,
5285 }))
5286 },
5287 );
5288
5289 let hover_task = project.update(cx, |project, cx| {
5290 project.hover(&buffer, Point::new(0, 0), cx)
5291 });
5292 let () = request_handled
5293 .next()
5294 .await
5295 .expect("All hover requests should have been triggered");
5296 assert_eq!(
5297 Vec::<String>::new(),
5298 hover_task
5299 .await
5300 .into_iter()
5301 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5302 .sorted()
5303 .collect::<Vec<_>>(),
5304 "Empty hover parts should be ignored"
5305 );
5306}
5307
5308#[gpui::test]
5309async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5310 init_test(cx);
5311
5312 let fs = FakeFs::new(cx.executor());
5313 fs.insert_tree(
5314 path!("/dir"),
5315 json!({
5316 "a.ts": "a",
5317 }),
5318 )
5319 .await;
5320
5321 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5322
5323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5324 language_registry.add(typescript_lang());
5325 let mut fake_language_servers = language_registry.register_fake_lsp(
5326 "TypeScript",
5327 FakeLspAdapter {
5328 capabilities: lsp::ServerCapabilities {
5329 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5330 ..lsp::ServerCapabilities::default()
5331 },
5332 ..FakeLspAdapter::default()
5333 },
5334 );
5335
5336 let (buffer, _handle) = project
5337 .update(cx, |p, cx| {
5338 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5339 })
5340 .await
5341 .unwrap();
5342 cx.executor().run_until_parked();
5343
5344 let fake_server = fake_language_servers
5345 .next()
5346 .await
5347 .expect("failed to get the language server");
5348
5349 let mut request_handled = fake_server
5350 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5351 Ok(Some(vec![
5352 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5353 title: "organize imports".to_string(),
5354 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5355 ..lsp::CodeAction::default()
5356 }),
5357 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5358 title: "fix code".to_string(),
5359 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5360 ..lsp::CodeAction::default()
5361 }),
5362 ]))
5363 });
5364
5365 let code_actions_task = project.update(cx, |project, cx| {
5366 project.code_actions(
5367 &buffer,
5368 0..buffer.read(cx).len(),
5369 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5370 cx,
5371 )
5372 });
5373
5374 let () = request_handled
5375 .next()
5376 .await
5377 .expect("The code action request should have been triggered");
5378
5379 let code_actions = code_actions_task.await.unwrap();
5380 assert_eq!(code_actions.len(), 1);
5381 assert_eq!(
5382 code_actions[0].lsp_action.action_kind(),
5383 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5384 );
5385}
5386
5387#[gpui::test]
5388async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5389 init_test(cx);
5390
5391 let fs = FakeFs::new(cx.executor());
5392 fs.insert_tree(
5393 path!("/dir"),
5394 json!({
5395 "a.tsx": "a",
5396 }),
5397 )
5398 .await;
5399
5400 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5401
5402 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5403 language_registry.add(tsx_lang());
5404 let language_server_names = [
5405 "TypeScriptServer",
5406 "TailwindServer",
5407 "ESLintServer",
5408 "NoActionsCapabilitiesServer",
5409 ];
5410
5411 let mut language_server_rxs = [
5412 language_registry.register_fake_lsp(
5413 "tsx",
5414 FakeLspAdapter {
5415 name: language_server_names[0],
5416 capabilities: lsp::ServerCapabilities {
5417 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5418 ..lsp::ServerCapabilities::default()
5419 },
5420 ..FakeLspAdapter::default()
5421 },
5422 ),
5423 language_registry.register_fake_lsp(
5424 "tsx",
5425 FakeLspAdapter {
5426 name: language_server_names[1],
5427 capabilities: lsp::ServerCapabilities {
5428 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5429 ..lsp::ServerCapabilities::default()
5430 },
5431 ..FakeLspAdapter::default()
5432 },
5433 ),
5434 language_registry.register_fake_lsp(
5435 "tsx",
5436 FakeLspAdapter {
5437 name: language_server_names[2],
5438 capabilities: lsp::ServerCapabilities {
5439 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5440 ..lsp::ServerCapabilities::default()
5441 },
5442 ..FakeLspAdapter::default()
5443 },
5444 ),
5445 language_registry.register_fake_lsp(
5446 "tsx",
5447 FakeLspAdapter {
5448 name: language_server_names[3],
5449 capabilities: lsp::ServerCapabilities {
5450 code_action_provider: None,
5451 ..lsp::ServerCapabilities::default()
5452 },
5453 ..FakeLspAdapter::default()
5454 },
5455 ),
5456 ];
5457
5458 let (buffer, _handle) = project
5459 .update(cx, |p, cx| {
5460 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5461 })
5462 .await
5463 .unwrap();
5464 cx.executor().run_until_parked();
5465
5466 let mut servers_with_actions_requests = HashMap::default();
5467 for i in 0..language_server_names.len() {
5468 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5469 panic!(
5470 "Failed to get language server #{i} with name {}",
5471 &language_server_names[i]
5472 )
5473 });
5474 let new_server_name = new_server.server.name();
5475
5476 assert!(
5477 !servers_with_actions_requests.contains_key(&new_server_name),
5478 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5479 );
5480 match new_server_name.0.as_ref() {
5481 "TailwindServer" | "TypeScriptServer" => {
5482 servers_with_actions_requests.insert(
5483 new_server_name.clone(),
5484 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5485 move |_, _| {
5486 let name = new_server_name.clone();
5487 async move {
5488 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5489 lsp::CodeAction {
5490 title: format!("{name} code action"),
5491 ..lsp::CodeAction::default()
5492 },
5493 )]))
5494 }
5495 },
5496 ),
5497 );
5498 }
5499 "ESLintServer" => {
5500 servers_with_actions_requests.insert(
5501 new_server_name,
5502 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5503 |_, _| async move { Ok(None) },
5504 ),
5505 );
5506 }
5507 "NoActionsCapabilitiesServer" => {
5508 let _never_handled = new_server
5509 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5510 panic!(
5511 "Should not call for code actions server with no corresponding capabilities"
5512 )
5513 });
5514 }
5515 unexpected => panic!("Unexpected server name: {unexpected}"),
5516 }
5517 }
5518
5519 let code_actions_task = project.update(cx, |project, cx| {
5520 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5521 });
5522
5523 // cx.run_until_parked();
5524 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5525 |mut code_actions_request| async move {
5526 code_actions_request
5527 .next()
5528 .await
5529 .expect("All code actions requests should have been triggered")
5530 },
5531 ))
5532 .await;
5533 assert_eq!(
5534 vec!["TailwindServer code action", "TypeScriptServer code action"],
5535 code_actions_task
5536 .await
5537 .unwrap()
5538 .into_iter()
5539 .map(|code_action| code_action.lsp_action.title().to_owned())
5540 .sorted()
5541 .collect::<Vec<_>>(),
5542 "Should receive code actions responses from all related servers with hover capabilities"
5543 );
5544}
5545
5546#[gpui::test]
5547async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5548 init_test(cx);
5549
5550 let fs = FakeFs::new(cx.executor());
5551 fs.insert_tree(
5552 "/dir",
5553 json!({
5554 "a.rs": "let a = 1;",
5555 "b.rs": "let b = 2;",
5556 "c.rs": "let c = 2;",
5557 }),
5558 )
5559 .await;
5560
5561 let project = Project::test(
5562 fs,
5563 [
5564 "/dir/a.rs".as_ref(),
5565 "/dir/b.rs".as_ref(),
5566 "/dir/c.rs".as_ref(),
5567 ],
5568 cx,
5569 )
5570 .await;
5571
5572 // check the initial state and get the worktrees
5573 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5574 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5575 assert_eq!(worktrees.len(), 3);
5576
5577 let worktree_a = worktrees[0].read(cx);
5578 let worktree_b = worktrees[1].read(cx);
5579 let worktree_c = worktrees[2].read(cx);
5580
5581 // check they start in the right order
5582 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5583 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5584 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5585
5586 (
5587 worktrees[0].clone(),
5588 worktrees[1].clone(),
5589 worktrees[2].clone(),
5590 )
5591 });
5592
5593 // move first worktree to after the second
5594 // [a, b, c] -> [b, a, c]
5595 project
5596 .update(cx, |project, cx| {
5597 let first = worktree_a.read(cx);
5598 let second = worktree_b.read(cx);
5599 project.move_worktree(first.id(), second.id(), cx)
5600 })
5601 .expect("moving first after second");
5602
5603 // check the state after moving
5604 project.update(cx, |project, cx| {
5605 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5606 assert_eq!(worktrees.len(), 3);
5607
5608 let first = worktrees[0].read(cx);
5609 let second = worktrees[1].read(cx);
5610 let third = worktrees[2].read(cx);
5611
5612 // check they are now in the right order
5613 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5614 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5615 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5616 });
5617
5618 // move the second worktree to before the first
5619 // [b, a, c] -> [a, b, c]
5620 project
5621 .update(cx, |project, cx| {
5622 let second = worktree_a.read(cx);
5623 let first = worktree_b.read(cx);
5624 project.move_worktree(first.id(), second.id(), cx)
5625 })
5626 .expect("moving second before first");
5627
5628 // check the state after moving
5629 project.update(cx, |project, cx| {
5630 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5631 assert_eq!(worktrees.len(), 3);
5632
5633 let first = worktrees[0].read(cx);
5634 let second = worktrees[1].read(cx);
5635 let third = worktrees[2].read(cx);
5636
5637 // check they are now in the right order
5638 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5639 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5640 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5641 });
5642
5643 // move the second worktree to after the third
5644 // [a, b, c] -> [a, c, b]
5645 project
5646 .update(cx, |project, cx| {
5647 let second = worktree_b.read(cx);
5648 let third = worktree_c.read(cx);
5649 project.move_worktree(second.id(), third.id(), cx)
5650 })
5651 .expect("moving second after third");
5652
5653 // check the state after moving
5654 project.update(cx, |project, cx| {
5655 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5656 assert_eq!(worktrees.len(), 3);
5657
5658 let first = worktrees[0].read(cx);
5659 let second = worktrees[1].read(cx);
5660 let third = worktrees[2].read(cx);
5661
5662 // check they are now in the right order
5663 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5664 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5665 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5666 });
5667
5668 // move the third worktree to before the second
5669 // [a, c, b] -> [a, b, c]
5670 project
5671 .update(cx, |project, cx| {
5672 let third = worktree_c.read(cx);
5673 let second = worktree_b.read(cx);
5674 project.move_worktree(third.id(), second.id(), cx)
5675 })
5676 .expect("moving third before second");
5677
5678 // check the state after moving
5679 project.update(cx, |project, cx| {
5680 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5681 assert_eq!(worktrees.len(), 3);
5682
5683 let first = worktrees[0].read(cx);
5684 let second = worktrees[1].read(cx);
5685 let third = worktrees[2].read(cx);
5686
5687 // check they are now in the right order
5688 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5689 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5690 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5691 });
5692
5693 // move the first worktree to after the third
5694 // [a, b, c] -> [b, c, a]
5695 project
5696 .update(cx, |project, cx| {
5697 let first = worktree_a.read(cx);
5698 let third = worktree_c.read(cx);
5699 project.move_worktree(first.id(), third.id(), cx)
5700 })
5701 .expect("moving first after third");
5702
5703 // check the state after moving
5704 project.update(cx, |project, cx| {
5705 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5706 assert_eq!(worktrees.len(), 3);
5707
5708 let first = worktrees[0].read(cx);
5709 let second = worktrees[1].read(cx);
5710 let third = worktrees[2].read(cx);
5711
5712 // check they are now in the right order
5713 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5714 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5715 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5716 });
5717
5718 // move the third worktree to before the first
5719 // [b, c, a] -> [a, b, c]
5720 project
5721 .update(cx, |project, cx| {
5722 let third = worktree_a.read(cx);
5723 let first = worktree_b.read(cx);
5724 project.move_worktree(third.id(), first.id(), cx)
5725 })
5726 .expect("moving third before first");
5727
5728 // check the state after moving
5729 project.update(cx, |project, cx| {
5730 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5731 assert_eq!(worktrees.len(), 3);
5732
5733 let first = worktrees[0].read(cx);
5734 let second = worktrees[1].read(cx);
5735 let third = worktrees[2].read(cx);
5736
5737 // check they are now in the right order
5738 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5739 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5740 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5741 });
5742}
5743
5744#[gpui::test]
5745async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5746 init_test(cx);
5747
5748 let staged_contents = r#"
5749 fn main() {
5750 println!("hello world");
5751 }
5752 "#
5753 .unindent();
5754 let file_contents = r#"
5755 // print goodbye
5756 fn main() {
5757 println!("goodbye world");
5758 }
5759 "#
5760 .unindent();
5761
5762 let fs = FakeFs::new(cx.background_executor.clone());
5763 fs.insert_tree(
5764 "/dir",
5765 json!({
5766 ".git": {},
5767 "src": {
5768 "main.rs": file_contents,
5769 }
5770 }),
5771 )
5772 .await;
5773
5774 fs.set_index_for_repo(
5775 Path::new("/dir/.git"),
5776 &[("src/main.rs".into(), staged_contents)],
5777 );
5778
5779 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5780
5781 let buffer = project
5782 .update(cx, |project, cx| {
5783 project.open_local_buffer("/dir/src/main.rs", cx)
5784 })
5785 .await
5786 .unwrap();
5787 let unstaged_diff = project
5788 .update(cx, |project, cx| {
5789 project.open_unstaged_diff(buffer.clone(), cx)
5790 })
5791 .await
5792 .unwrap();
5793
5794 cx.run_until_parked();
5795 unstaged_diff.update(cx, |unstaged_diff, cx| {
5796 let snapshot = buffer.read(cx).snapshot();
5797 assert_hunks(
5798 unstaged_diff.hunks(&snapshot, cx),
5799 &snapshot,
5800 &unstaged_diff.base_text_string().unwrap(),
5801 &[
5802 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5803 (
5804 2..3,
5805 " println!(\"hello world\");\n",
5806 " println!(\"goodbye world\");\n",
5807 DiffHunkStatus::modified_none(),
5808 ),
5809 ],
5810 );
5811 });
5812
5813 let staged_contents = r#"
5814 // print goodbye
5815 fn main() {
5816 }
5817 "#
5818 .unindent();
5819
5820 fs.set_index_for_repo(
5821 Path::new("/dir/.git"),
5822 &[("src/main.rs".into(), staged_contents)],
5823 );
5824
5825 cx.run_until_parked();
5826 unstaged_diff.update(cx, |unstaged_diff, cx| {
5827 let snapshot = buffer.read(cx).snapshot();
5828 assert_hunks(
5829 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5830 &snapshot,
5831 &unstaged_diff.base_text().text(),
5832 &[(
5833 2..3,
5834 "",
5835 " println!(\"goodbye world\");\n",
5836 DiffHunkStatus::added_none(),
5837 )],
5838 );
5839 });
5840}
5841
5842#[gpui::test]
5843async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5844 init_test(cx);
5845
5846 let committed_contents = r#"
5847 fn main() {
5848 println!("hello world");
5849 }
5850 "#
5851 .unindent();
5852 let staged_contents = r#"
5853 fn main() {
5854 println!("goodbye world");
5855 }
5856 "#
5857 .unindent();
5858 let file_contents = r#"
5859 // print goodbye
5860 fn main() {
5861 println!("goodbye world");
5862 }
5863 "#
5864 .unindent();
5865
5866 let fs = FakeFs::new(cx.background_executor.clone());
5867 fs.insert_tree(
5868 "/dir",
5869 json!({
5870 ".git": {},
5871 "src": {
5872 "modification.rs": file_contents,
5873 }
5874 }),
5875 )
5876 .await;
5877
5878 fs.set_head_for_repo(
5879 Path::new("/dir/.git"),
5880 &[
5881 ("src/modification.rs".into(), committed_contents),
5882 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5883 ],
5884 );
5885 fs.set_index_for_repo(
5886 Path::new("/dir/.git"),
5887 &[
5888 ("src/modification.rs".into(), staged_contents),
5889 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5890 ],
5891 );
5892
5893 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5894 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5895 let language = rust_lang();
5896 language_registry.add(language.clone());
5897
5898 let buffer_1 = project
5899 .update(cx, |project, cx| {
5900 project.open_local_buffer("/dir/src/modification.rs", cx)
5901 })
5902 .await
5903 .unwrap();
5904 let diff_1 = project
5905 .update(cx, |project, cx| {
5906 project.open_uncommitted_diff(buffer_1.clone(), cx)
5907 })
5908 .await
5909 .unwrap();
5910 diff_1.read_with(cx, |diff, _| {
5911 assert_eq!(diff.base_text().language().cloned(), Some(language))
5912 });
5913 cx.run_until_parked();
5914 diff_1.update(cx, |diff, cx| {
5915 let snapshot = buffer_1.read(cx).snapshot();
5916 assert_hunks(
5917 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5918 &snapshot,
5919 &diff.base_text_string().unwrap(),
5920 &[
5921 (
5922 0..1,
5923 "",
5924 "// print goodbye\n",
5925 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5926 ),
5927 (
5928 2..3,
5929 " println!(\"hello world\");\n",
5930 " println!(\"goodbye world\");\n",
5931 DiffHunkStatus::modified_none(),
5932 ),
5933 ],
5934 );
5935 });
5936
5937 // Reset HEAD to a version that differs from both the buffer and the index.
5938 let committed_contents = r#"
5939 // print goodbye
5940 fn main() {
5941 }
5942 "#
5943 .unindent();
5944 fs.set_head_for_repo(
5945 Path::new("/dir/.git"),
5946 &[
5947 ("src/modification.rs".into(), committed_contents.clone()),
5948 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5949 ],
5950 );
5951
5952 // Buffer now has an unstaged hunk.
5953 cx.run_until_parked();
5954 diff_1.update(cx, |diff, cx| {
5955 let snapshot = buffer_1.read(cx).snapshot();
5956 assert_hunks(
5957 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5958 &snapshot,
5959 &diff.base_text().text(),
5960 &[(
5961 2..3,
5962 "",
5963 " println!(\"goodbye world\");\n",
5964 DiffHunkStatus::added_none(),
5965 )],
5966 );
5967 });
5968
5969 // Open a buffer for a file that's been deleted.
5970 let buffer_2 = project
5971 .update(cx, |project, cx| {
5972 project.open_local_buffer("/dir/src/deletion.rs", cx)
5973 })
5974 .await
5975 .unwrap();
5976 let diff_2 = project
5977 .update(cx, |project, cx| {
5978 project.open_uncommitted_diff(buffer_2.clone(), cx)
5979 })
5980 .await
5981 .unwrap();
5982 cx.run_until_parked();
5983 diff_2.update(cx, |diff, cx| {
5984 let snapshot = buffer_2.read(cx).snapshot();
5985 assert_hunks(
5986 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5987 &snapshot,
5988 &diff.base_text_string().unwrap(),
5989 &[(
5990 0..0,
5991 "// the-deleted-contents\n",
5992 "",
5993 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
5994 )],
5995 );
5996 });
5997
5998 // Stage the deletion of this file
5999 fs.set_index_for_repo(
6000 Path::new("/dir/.git"),
6001 &[("src/modification.rs".into(), committed_contents.clone())],
6002 );
6003 cx.run_until_parked();
6004 diff_2.update(cx, |diff, cx| {
6005 let snapshot = buffer_2.read(cx).snapshot();
6006 assert_hunks(
6007 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6008 &snapshot,
6009 &diff.base_text_string().unwrap(),
6010 &[(
6011 0..0,
6012 "// the-deleted-contents\n",
6013 "",
6014 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6015 )],
6016 );
6017 });
6018}
6019
6020#[gpui::test]
6021async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6022 use DiffHunkSecondaryStatus::*;
6023 init_test(cx);
6024
6025 let committed_contents = r#"
6026 zero
6027 one
6028 two
6029 three
6030 four
6031 five
6032 "#
6033 .unindent();
6034 let file_contents = r#"
6035 one
6036 TWO
6037 three
6038 FOUR
6039 five
6040 "#
6041 .unindent();
6042
6043 let fs = FakeFs::new(cx.background_executor.clone());
6044 fs.insert_tree(
6045 "/dir",
6046 json!({
6047 ".git": {},
6048 "file.txt": file_contents.clone()
6049 }),
6050 )
6051 .await;
6052
6053 fs.set_head_and_index_for_repo(
6054 "/dir/.git".as_ref(),
6055 &[("file.txt".into(), committed_contents.clone())],
6056 );
6057
6058 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6059
6060 let buffer = project
6061 .update(cx, |project, cx| {
6062 project.open_local_buffer("/dir/file.txt", cx)
6063 })
6064 .await
6065 .unwrap();
6066 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6067 let uncommitted_diff = project
6068 .update(cx, |project, cx| {
6069 project.open_uncommitted_diff(buffer.clone(), cx)
6070 })
6071 .await
6072 .unwrap();
6073 let mut diff_events = cx.events(&uncommitted_diff);
6074
6075 // The hunks are initially unstaged.
6076 uncommitted_diff.read_with(cx, |diff, cx| {
6077 assert_hunks(
6078 diff.hunks(&snapshot, cx),
6079 &snapshot,
6080 &diff.base_text_string().unwrap(),
6081 &[
6082 (
6083 0..0,
6084 "zero\n",
6085 "",
6086 DiffHunkStatus::deleted(HasSecondaryHunk),
6087 ),
6088 (
6089 1..2,
6090 "two\n",
6091 "TWO\n",
6092 DiffHunkStatus::modified(HasSecondaryHunk),
6093 ),
6094 (
6095 3..4,
6096 "four\n",
6097 "FOUR\n",
6098 DiffHunkStatus::modified(HasSecondaryHunk),
6099 ),
6100 ],
6101 );
6102 });
6103
6104 // Stage a hunk. It appears as optimistically staged.
6105 uncommitted_diff.update(cx, |diff, cx| {
6106 let range =
6107 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6108 let hunks = diff
6109 .hunks_intersecting_range(range, &snapshot, cx)
6110 .collect::<Vec<_>>();
6111 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6112
6113 assert_hunks(
6114 diff.hunks(&snapshot, cx),
6115 &snapshot,
6116 &diff.base_text_string().unwrap(),
6117 &[
6118 (
6119 0..0,
6120 "zero\n",
6121 "",
6122 DiffHunkStatus::deleted(HasSecondaryHunk),
6123 ),
6124 (
6125 1..2,
6126 "two\n",
6127 "TWO\n",
6128 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6129 ),
6130 (
6131 3..4,
6132 "four\n",
6133 "FOUR\n",
6134 DiffHunkStatus::modified(HasSecondaryHunk),
6135 ),
6136 ],
6137 );
6138 });
6139
6140 // The diff emits a change event for the range of the staged hunk.
6141 assert!(matches!(
6142 diff_events.next().await.unwrap(),
6143 BufferDiffEvent::HunksStagedOrUnstaged(_)
6144 ));
6145 let event = diff_events.next().await.unwrap();
6146 if let BufferDiffEvent::DiffChanged {
6147 changed_range: Some(changed_range),
6148 } = event
6149 {
6150 let changed_range = changed_range.to_point(&snapshot);
6151 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6152 } else {
6153 panic!("Unexpected event {event:?}");
6154 }
6155
6156 // When the write to the index completes, it appears as staged.
6157 cx.run_until_parked();
6158 uncommitted_diff.update(cx, |diff, cx| {
6159 assert_hunks(
6160 diff.hunks(&snapshot, cx),
6161 &snapshot,
6162 &diff.base_text_string().unwrap(),
6163 &[
6164 (
6165 0..0,
6166 "zero\n",
6167 "",
6168 DiffHunkStatus::deleted(HasSecondaryHunk),
6169 ),
6170 (
6171 1..2,
6172 "two\n",
6173 "TWO\n",
6174 DiffHunkStatus::modified(NoSecondaryHunk),
6175 ),
6176 (
6177 3..4,
6178 "four\n",
6179 "FOUR\n",
6180 DiffHunkStatus::modified(HasSecondaryHunk),
6181 ),
6182 ],
6183 );
6184 });
6185
6186 // The diff emits a change event for the changed index text.
6187 let event = diff_events.next().await.unwrap();
6188 if let BufferDiffEvent::DiffChanged {
6189 changed_range: Some(changed_range),
6190 } = event
6191 {
6192 let changed_range = changed_range.to_point(&snapshot);
6193 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6194 } else {
6195 panic!("Unexpected event {event:?}");
6196 }
6197
6198 // Simulate a problem writing to the git index.
6199 fs.set_error_message_for_index_write(
6200 "/dir/.git".as_ref(),
6201 Some("failed to write git index".into()),
6202 );
6203
6204 // Stage another hunk.
6205 uncommitted_diff.update(cx, |diff, cx| {
6206 let range =
6207 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6208 let hunks = diff
6209 .hunks_intersecting_range(range, &snapshot, cx)
6210 .collect::<Vec<_>>();
6211 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6212
6213 assert_hunks(
6214 diff.hunks(&snapshot, cx),
6215 &snapshot,
6216 &diff.base_text_string().unwrap(),
6217 &[
6218 (
6219 0..0,
6220 "zero\n",
6221 "",
6222 DiffHunkStatus::deleted(HasSecondaryHunk),
6223 ),
6224 (
6225 1..2,
6226 "two\n",
6227 "TWO\n",
6228 DiffHunkStatus::modified(NoSecondaryHunk),
6229 ),
6230 (
6231 3..4,
6232 "four\n",
6233 "FOUR\n",
6234 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6235 ),
6236 ],
6237 );
6238 });
6239 assert!(matches!(
6240 diff_events.next().await.unwrap(),
6241 BufferDiffEvent::HunksStagedOrUnstaged(_)
6242 ));
6243 let event = diff_events.next().await.unwrap();
6244 if let BufferDiffEvent::DiffChanged {
6245 changed_range: Some(changed_range),
6246 } = event
6247 {
6248 let changed_range = changed_range.to_point(&snapshot);
6249 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6250 } else {
6251 panic!("Unexpected event {event:?}");
6252 }
6253
6254 // When the write fails, the hunk returns to being unstaged.
6255 cx.run_until_parked();
6256 uncommitted_diff.update(cx, |diff, cx| {
6257 assert_hunks(
6258 diff.hunks(&snapshot, cx),
6259 &snapshot,
6260 &diff.base_text_string().unwrap(),
6261 &[
6262 (
6263 0..0,
6264 "zero\n",
6265 "",
6266 DiffHunkStatus::deleted(HasSecondaryHunk),
6267 ),
6268 (
6269 1..2,
6270 "two\n",
6271 "TWO\n",
6272 DiffHunkStatus::modified(NoSecondaryHunk),
6273 ),
6274 (
6275 3..4,
6276 "four\n",
6277 "FOUR\n",
6278 DiffHunkStatus::modified(HasSecondaryHunk),
6279 ),
6280 ],
6281 );
6282 });
6283
6284 let event = diff_events.next().await.unwrap();
6285 if let BufferDiffEvent::DiffChanged {
6286 changed_range: Some(changed_range),
6287 } = event
6288 {
6289 let changed_range = changed_range.to_point(&snapshot);
6290 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6291 } else {
6292 panic!("Unexpected event {event:?}");
6293 }
6294
6295 // Allow writing to the git index to succeed again.
6296 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6297
6298 // Stage two hunks with separate operations.
6299 uncommitted_diff.update(cx, |diff, cx| {
6300 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6301 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6302 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6303 });
6304
6305 // Both staged hunks appear as pending.
6306 uncommitted_diff.update(cx, |diff, cx| {
6307 assert_hunks(
6308 diff.hunks(&snapshot, cx),
6309 &snapshot,
6310 &diff.base_text_string().unwrap(),
6311 &[
6312 (
6313 0..0,
6314 "zero\n",
6315 "",
6316 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6317 ),
6318 (
6319 1..2,
6320 "two\n",
6321 "TWO\n",
6322 DiffHunkStatus::modified(NoSecondaryHunk),
6323 ),
6324 (
6325 3..4,
6326 "four\n",
6327 "FOUR\n",
6328 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6329 ),
6330 ],
6331 );
6332 });
6333
6334 // Both staging operations take effect.
6335 cx.run_until_parked();
6336 uncommitted_diff.update(cx, |diff, cx| {
6337 assert_hunks(
6338 diff.hunks(&snapshot, cx),
6339 &snapshot,
6340 &diff.base_text_string().unwrap(),
6341 &[
6342 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6343 (
6344 1..2,
6345 "two\n",
6346 "TWO\n",
6347 DiffHunkStatus::modified(NoSecondaryHunk),
6348 ),
6349 (
6350 3..4,
6351 "four\n",
6352 "FOUR\n",
6353 DiffHunkStatus::modified(NoSecondaryHunk),
6354 ),
6355 ],
6356 );
6357 });
6358}
6359
6360#[gpui::test(seeds(340, 472))]
6361async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6362 use DiffHunkSecondaryStatus::*;
6363 init_test(cx);
6364
6365 let committed_contents = r#"
6366 zero
6367 one
6368 two
6369 three
6370 four
6371 five
6372 "#
6373 .unindent();
6374 let file_contents = r#"
6375 one
6376 TWO
6377 three
6378 FOUR
6379 five
6380 "#
6381 .unindent();
6382
6383 let fs = FakeFs::new(cx.background_executor.clone());
6384 fs.insert_tree(
6385 "/dir",
6386 json!({
6387 ".git": {},
6388 "file.txt": file_contents.clone()
6389 }),
6390 )
6391 .await;
6392
6393 fs.set_head_for_repo(
6394 "/dir/.git".as_ref(),
6395 &[("file.txt".into(), committed_contents.clone())],
6396 );
6397 fs.set_index_for_repo(
6398 "/dir/.git".as_ref(),
6399 &[("file.txt".into(), committed_contents.clone())],
6400 );
6401
6402 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6403
6404 let buffer = project
6405 .update(cx, |project, cx| {
6406 project.open_local_buffer("/dir/file.txt", cx)
6407 })
6408 .await
6409 .unwrap();
6410 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6411 let uncommitted_diff = project
6412 .update(cx, |project, cx| {
6413 project.open_uncommitted_diff(buffer.clone(), cx)
6414 })
6415 .await
6416 .unwrap();
6417
6418 // The hunks are initially unstaged.
6419 uncommitted_diff.read_with(cx, |diff, cx| {
6420 assert_hunks(
6421 diff.hunks(&snapshot, cx),
6422 &snapshot,
6423 &diff.base_text_string().unwrap(),
6424 &[
6425 (
6426 0..0,
6427 "zero\n",
6428 "",
6429 DiffHunkStatus::deleted(HasSecondaryHunk),
6430 ),
6431 (
6432 1..2,
6433 "two\n",
6434 "TWO\n",
6435 DiffHunkStatus::modified(HasSecondaryHunk),
6436 ),
6437 (
6438 3..4,
6439 "four\n",
6440 "FOUR\n",
6441 DiffHunkStatus::modified(HasSecondaryHunk),
6442 ),
6443 ],
6444 );
6445 });
6446
6447 // Pause IO events
6448 fs.pause_events();
6449
6450 // Stage the first hunk.
6451 uncommitted_diff.update(cx, |diff, cx| {
6452 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6453 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6454 assert_hunks(
6455 diff.hunks(&snapshot, cx),
6456 &snapshot,
6457 &diff.base_text_string().unwrap(),
6458 &[
6459 (
6460 0..0,
6461 "zero\n",
6462 "",
6463 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6464 ),
6465 (
6466 1..2,
6467 "two\n",
6468 "TWO\n",
6469 DiffHunkStatus::modified(HasSecondaryHunk),
6470 ),
6471 (
6472 3..4,
6473 "four\n",
6474 "FOUR\n",
6475 DiffHunkStatus::modified(HasSecondaryHunk),
6476 ),
6477 ],
6478 );
6479 });
6480
6481 // Stage the second hunk *before* receiving the FS event for the first hunk.
6482 cx.run_until_parked();
6483 uncommitted_diff.update(cx, |diff, cx| {
6484 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6485 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6486 assert_hunks(
6487 diff.hunks(&snapshot, cx),
6488 &snapshot,
6489 &diff.base_text_string().unwrap(),
6490 &[
6491 (
6492 0..0,
6493 "zero\n",
6494 "",
6495 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6496 ),
6497 (
6498 1..2,
6499 "two\n",
6500 "TWO\n",
6501 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6502 ),
6503 (
6504 3..4,
6505 "four\n",
6506 "FOUR\n",
6507 DiffHunkStatus::modified(HasSecondaryHunk),
6508 ),
6509 ],
6510 );
6511 });
6512
6513 // Process the FS event for staging the first hunk (second event is still pending).
6514 fs.flush_events(1);
6515 cx.run_until_parked();
6516
6517 // Stage the third hunk before receiving the second FS event.
6518 uncommitted_diff.update(cx, |diff, cx| {
6519 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6520 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6521 });
6522
6523 // Wait for all remaining IO.
6524 cx.run_until_parked();
6525 fs.flush_events(fs.buffered_event_count());
6526
6527 // Now all hunks are staged.
6528 cx.run_until_parked();
6529 uncommitted_diff.update(cx, |diff, cx| {
6530 assert_hunks(
6531 diff.hunks(&snapshot, cx),
6532 &snapshot,
6533 &diff.base_text_string().unwrap(),
6534 &[
6535 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6536 (
6537 1..2,
6538 "two\n",
6539 "TWO\n",
6540 DiffHunkStatus::modified(NoSecondaryHunk),
6541 ),
6542 (
6543 3..4,
6544 "four\n",
6545 "FOUR\n",
6546 DiffHunkStatus::modified(NoSecondaryHunk),
6547 ),
6548 ],
6549 );
6550 });
6551}
6552
6553#[gpui::test]
6554async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6555 use DiffHunkSecondaryStatus::*;
6556 init_test(cx);
6557
6558 let different_lines = (0..500)
6559 .step_by(5)
6560 .map(|i| format!("diff {}\n", i))
6561 .collect::<Vec<String>>();
6562 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6563 let file_contents = (0..500)
6564 .map(|i| {
6565 if i % 5 == 0 {
6566 different_lines[i / 5].clone()
6567 } else {
6568 format!("{}\n", i)
6569 }
6570 })
6571 .collect::<String>();
6572
6573 let fs = FakeFs::new(cx.background_executor.clone());
6574 fs.insert_tree(
6575 "/dir",
6576 json!({
6577 ".git": {},
6578 "file.txt": file_contents.clone()
6579 }),
6580 )
6581 .await;
6582
6583 fs.set_head_for_repo(
6584 "/dir/.git".as_ref(),
6585 &[("file.txt".into(), committed_contents.clone())],
6586 );
6587 fs.set_index_for_repo(
6588 "/dir/.git".as_ref(),
6589 &[("file.txt".into(), committed_contents.clone())],
6590 );
6591
6592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6593
6594 let buffer = project
6595 .update(cx, |project, cx| {
6596 project.open_local_buffer("/dir/file.txt", cx)
6597 })
6598 .await
6599 .unwrap();
6600 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6601 let uncommitted_diff = project
6602 .update(cx, |project, cx| {
6603 project.open_uncommitted_diff(buffer.clone(), cx)
6604 })
6605 .await
6606 .unwrap();
6607
6608 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6609 .step_by(5)
6610 .map(|i| {
6611 (
6612 i as u32..i as u32 + 1,
6613 format!("{}\n", i),
6614 different_lines[i / 5].clone(),
6615 DiffHunkStatus::modified(HasSecondaryHunk),
6616 )
6617 })
6618 .collect();
6619
6620 // The hunks are initially unstaged
6621 uncommitted_diff.read_with(cx, |diff, cx| {
6622 assert_hunks(
6623 diff.hunks(&snapshot, cx),
6624 &snapshot,
6625 &diff.base_text_string().unwrap(),
6626 &expected_hunks,
6627 );
6628 });
6629
6630 for (_, _, _, status) in expected_hunks.iter_mut() {
6631 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6632 }
6633
6634 // Stage every hunk with a different call
6635 uncommitted_diff.update(cx, |diff, cx| {
6636 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6637 for hunk in hunks {
6638 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6639 }
6640
6641 assert_hunks(
6642 diff.hunks(&snapshot, cx),
6643 &snapshot,
6644 &diff.base_text_string().unwrap(),
6645 &expected_hunks,
6646 );
6647 });
6648
6649 // If we wait, we'll have no pending hunks
6650 cx.run_until_parked();
6651 for (_, _, _, status) in expected_hunks.iter_mut() {
6652 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6653 }
6654
6655 uncommitted_diff.update(cx, |diff, cx| {
6656 assert_hunks(
6657 diff.hunks(&snapshot, cx),
6658 &snapshot,
6659 &diff.base_text_string().unwrap(),
6660 &expected_hunks,
6661 );
6662 });
6663
6664 for (_, _, _, status) in expected_hunks.iter_mut() {
6665 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6666 }
6667
6668 // Unstage every hunk with a different call
6669 uncommitted_diff.update(cx, |diff, cx| {
6670 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6671 for hunk in hunks {
6672 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6673 }
6674
6675 assert_hunks(
6676 diff.hunks(&snapshot, cx),
6677 &snapshot,
6678 &diff.base_text_string().unwrap(),
6679 &expected_hunks,
6680 );
6681 });
6682
6683 // If we wait, we'll have no pending hunks, again
6684 cx.run_until_parked();
6685 for (_, _, _, status) in expected_hunks.iter_mut() {
6686 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6687 }
6688
6689 uncommitted_diff.update(cx, |diff, cx| {
6690 assert_hunks(
6691 diff.hunks(&snapshot, cx),
6692 &snapshot,
6693 &diff.base_text_string().unwrap(),
6694 &expected_hunks,
6695 );
6696 });
6697}
6698
6699#[gpui::test]
6700async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6701 init_test(cx);
6702
6703 let committed_contents = r#"
6704 fn main() {
6705 println!("hello from HEAD");
6706 }
6707 "#
6708 .unindent();
6709 let file_contents = r#"
6710 fn main() {
6711 println!("hello from the working copy");
6712 }
6713 "#
6714 .unindent();
6715
6716 let fs = FakeFs::new(cx.background_executor.clone());
6717 fs.insert_tree(
6718 "/dir",
6719 json!({
6720 ".git": {},
6721 "src": {
6722 "main.rs": file_contents,
6723 }
6724 }),
6725 )
6726 .await;
6727
6728 fs.set_head_for_repo(
6729 Path::new("/dir/.git"),
6730 &[("src/main.rs".into(), committed_contents.clone())],
6731 );
6732 fs.set_index_for_repo(
6733 Path::new("/dir/.git"),
6734 &[("src/main.rs".into(), committed_contents.clone())],
6735 );
6736
6737 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6738
6739 let buffer = project
6740 .update(cx, |project, cx| {
6741 project.open_local_buffer("/dir/src/main.rs", cx)
6742 })
6743 .await
6744 .unwrap();
6745 let uncommitted_diff = project
6746 .update(cx, |project, cx| {
6747 project.open_uncommitted_diff(buffer.clone(), cx)
6748 })
6749 .await
6750 .unwrap();
6751
6752 cx.run_until_parked();
6753 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6754 let snapshot = buffer.read(cx).snapshot();
6755 assert_hunks(
6756 uncommitted_diff.hunks(&snapshot, cx),
6757 &snapshot,
6758 &uncommitted_diff.base_text_string().unwrap(),
6759 &[(
6760 1..2,
6761 " println!(\"hello from HEAD\");\n",
6762 " println!(\"hello from the working copy\");\n",
6763 DiffHunkStatus {
6764 kind: DiffHunkStatusKind::Modified,
6765 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6766 },
6767 )],
6768 );
6769 });
6770}
6771
6772async fn search(
6773 project: &Entity<Project>,
6774 query: SearchQuery,
6775 cx: &mut gpui::TestAppContext,
6776) -> Result<HashMap<String, Vec<Range<usize>>>> {
6777 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6778 let mut results = HashMap::default();
6779 while let Ok(search_result) = search_rx.recv().await {
6780 match search_result {
6781 SearchResult::Buffer { buffer, ranges } => {
6782 results.entry(buffer).or_insert(ranges);
6783 }
6784 SearchResult::LimitReached => {}
6785 }
6786 }
6787 Ok(results
6788 .into_iter()
6789 .map(|(buffer, ranges)| {
6790 buffer.update(cx, |buffer, cx| {
6791 let path = buffer
6792 .file()
6793 .unwrap()
6794 .full_path(cx)
6795 .to_string_lossy()
6796 .to_string();
6797 let ranges = ranges
6798 .into_iter()
6799 .map(|range| range.to_offset(buffer))
6800 .collect::<Vec<_>>();
6801 (path, ranges)
6802 })
6803 })
6804 .collect())
6805}
6806
6807pub fn init_test(cx: &mut gpui::TestAppContext) {
6808 if std::env::var("RUST_LOG").is_ok() {
6809 env_logger::try_init().ok();
6810 }
6811
6812 cx.update(|cx| {
6813 let settings_store = SettingsStore::test(cx);
6814 cx.set_global(settings_store);
6815 release_channel::init(SemanticVersion::default(), cx);
6816 language::init(cx);
6817 Project::init_settings(cx);
6818 });
6819}
6820
6821fn json_lang() -> Arc<Language> {
6822 Arc::new(Language::new(
6823 LanguageConfig {
6824 name: "JSON".into(),
6825 matcher: LanguageMatcher {
6826 path_suffixes: vec!["json".to_string()],
6827 ..Default::default()
6828 },
6829 ..Default::default()
6830 },
6831 None,
6832 ))
6833}
6834
6835fn js_lang() -> Arc<Language> {
6836 Arc::new(Language::new(
6837 LanguageConfig {
6838 name: "JavaScript".into(),
6839 matcher: LanguageMatcher {
6840 path_suffixes: vec!["js".to_string()],
6841 ..Default::default()
6842 },
6843 ..Default::default()
6844 },
6845 None,
6846 ))
6847}
6848
6849fn rust_lang() -> Arc<Language> {
6850 Arc::new(Language::new(
6851 LanguageConfig {
6852 name: "Rust".into(),
6853 matcher: LanguageMatcher {
6854 path_suffixes: vec!["rs".to_string()],
6855 ..Default::default()
6856 },
6857 ..Default::default()
6858 },
6859 Some(tree_sitter_rust::LANGUAGE.into()),
6860 ))
6861}
6862
6863fn typescript_lang() -> Arc<Language> {
6864 Arc::new(Language::new(
6865 LanguageConfig {
6866 name: "TypeScript".into(),
6867 matcher: LanguageMatcher {
6868 path_suffixes: vec!["ts".to_string()],
6869 ..Default::default()
6870 },
6871 ..Default::default()
6872 },
6873 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6874 ))
6875}
6876
6877fn tsx_lang() -> Arc<Language> {
6878 Arc::new(Language::new(
6879 LanguageConfig {
6880 name: "tsx".into(),
6881 matcher: LanguageMatcher {
6882 path_suffixes: vec!["tsx".to_string()],
6883 ..Default::default()
6884 },
6885 ..Default::default()
6886 },
6887 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6888 ))
6889}
6890
6891fn get_all_tasks(
6892 project: &Entity<Project>,
6893 task_contexts: &TaskContexts,
6894 cx: &mut App,
6895) -> Vec<(TaskSourceKind, ResolvedTask)> {
6896 let (mut old, new) = project.update(cx, |project, cx| {
6897 project
6898 .task_store
6899 .read(cx)
6900 .task_inventory()
6901 .unwrap()
6902 .read(cx)
6903 .used_and_current_resolved_tasks(task_contexts, cx)
6904 });
6905 old.extend(new);
6906 old
6907}