1use crate::{task_inventory::TaskContexts, Event, *};
2use buffer_diff::{
3 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
4};
5use fs::FakeFs;
6use futures::{future, StreamExt};
7use gpui::{App, SemanticVersion, UpdateGlobal};
8use http_client::Url;
9use language::{
10 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
11 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
12 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
13 OffsetRangeExt, Point, ToPoint,
14};
15use lsp::{
16 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
17 NumberOrString, TextDocumentEdit, WillRenameFiles,
18};
19use parking_lot::Mutex;
20use pretty_assertions::{assert_eq, assert_matches};
21use serde_json::json;
22#[cfg(not(windows))]
23use std::os;
24use std::{str::FromStr, sync::OnceLock};
25
26use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
27use task::{ResolvedTask, TaskContext};
28use unindent::Unindent as _;
29use util::{
30 assert_set_eq, path,
31 paths::PathMatcher,
32 separator,
33 test::{marked_text_offsets, TempTree},
34 uri, TryFutureExt as _,
35};
36
37#[gpui::test]
38async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
39 cx.executor().allow_parking();
40
41 let (tx, mut rx) = futures::channel::mpsc::unbounded();
42 let _thread = std::thread::spawn(move || {
43 #[cfg(not(target_os = "windows"))]
44 std::fs::metadata("/tmp").unwrap();
45 #[cfg(target_os = "windows")]
46 std::fs::metadata("C:/Windows").unwrap();
47 std::thread::sleep(Duration::from_millis(1000));
48 tx.unbounded_send(1).unwrap();
49 });
50 rx.next().await.unwrap();
51}
52
53#[gpui::test]
54async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
55 cx.executor().allow_parking();
56
57 let io_task = smol::unblock(move || {
58 println!("sleeping on thread {:?}", std::thread::current().id());
59 std::thread::sleep(Duration::from_millis(10));
60 1
61 });
62
63 let task = cx.foreground_executor().spawn(async move {
64 io_task.await;
65 });
66
67 task.await;
68}
69
70#[cfg(not(windows))]
71#[gpui::test]
72async fn test_symlinks(cx: &mut gpui::TestAppContext) {
73 init_test(cx);
74 cx.executor().allow_parking();
75
76 let dir = TempTree::new(json!({
77 "root": {
78 "apple": "",
79 "banana": {
80 "carrot": {
81 "date": "",
82 "endive": "",
83 }
84 },
85 "fennel": {
86 "grape": "",
87 }
88 }
89 }));
90
91 let root_link_path = dir.path().join("root_link");
92 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
93 os::unix::fs::symlink(
94 dir.path().join("root/fennel"),
95 dir.path().join("root/finnochio"),
96 )
97 .unwrap();
98
99 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
100
101 project.update(cx, |project, cx| {
102 let tree = project.worktrees(cx).next().unwrap().read(cx);
103 assert_eq!(tree.file_count(), 5);
104 assert_eq!(
105 tree.inode_for_path("fennel/grape"),
106 tree.inode_for_path("finnochio/grape")
107 );
108 });
109}
110
111#[gpui::test]
112async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
113 init_test(cx);
114
115 let dir = TempTree::new(json!({
116 ".editorconfig": r#"
117 root = true
118 [*.rs]
119 indent_style = tab
120 indent_size = 3
121 end_of_line = lf
122 insert_final_newline = true
123 trim_trailing_whitespace = true
124 [*.js]
125 tab_width = 10
126 "#,
127 ".zed": {
128 "settings.json": r#"{
129 "tab_size": 8,
130 "hard_tabs": false,
131 "ensure_final_newline_on_save": false,
132 "remove_trailing_whitespace_on_save": false,
133 "soft_wrap": "editor_width"
134 }"#,
135 },
136 "a.rs": "fn a() {\n A\n}",
137 "b": {
138 ".editorconfig": r#"
139 [*.rs]
140 indent_size = 2
141 "#,
142 "b.rs": "fn b() {\n B\n}",
143 },
144 "c.js": "def c\n C\nend",
145 "README.json": "tabs are better\n",
146 }));
147
148 let path = dir.path();
149 let fs = FakeFs::new(cx.executor());
150 fs.insert_tree_from_real_fs(path, path).await;
151 let project = Project::test(fs, [path], cx).await;
152
153 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
154 language_registry.add(js_lang());
155 language_registry.add(json_lang());
156 language_registry.add(rust_lang());
157
158 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
159
160 cx.executor().run_until_parked();
161
162 cx.update(|cx| {
163 let tree = worktree.read(cx);
164 let settings_for = |path: &str| {
165 let file_entry = tree.entry_for_path(path).unwrap().clone();
166 let file = File::for_entry(file_entry, worktree.clone());
167 let file_language = project
168 .read(cx)
169 .languages()
170 .language_for_file_path(file.path.as_ref());
171 let file_language = cx
172 .background_executor()
173 .block(file_language)
174 .expect("Failed to get file language");
175 let file = file as _;
176 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
177 };
178
179 let settings_a = settings_for("a.rs");
180 let settings_b = settings_for("b/b.rs");
181 let settings_c = settings_for("c.js");
182 let settings_readme = settings_for("README.json");
183
184 // .editorconfig overrides .zed/settings
185 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
186 assert_eq!(settings_a.hard_tabs, true);
187 assert_eq!(settings_a.ensure_final_newline_on_save, true);
188 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
189
190 // .editorconfig in b/ overrides .editorconfig in root
191 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
192
193 // "indent_size" is not set, so "tab_width" is used
194 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
195
196 // README.md should not be affected by .editorconfig's globe "*.rs"
197 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
198 });
199}
200
201#[gpui::test]
202async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
203 init_test(cx);
204 TaskStore::init(None);
205
206 let fs = FakeFs::new(cx.executor());
207 fs.insert_tree(
208 path!("/dir"),
209 json!({
210 ".zed": {
211 "settings.json": r#"{ "tab_size": 8 }"#,
212 "tasks.json": r#"[{
213 "label": "cargo check all",
214 "command": "cargo",
215 "args": ["check", "--all"]
216 },]"#,
217 },
218 "a": {
219 "a.rs": "fn a() {\n A\n}"
220 },
221 "b": {
222 ".zed": {
223 "settings.json": r#"{ "tab_size": 2 }"#,
224 "tasks.json": r#"[{
225 "label": "cargo check",
226 "command": "cargo",
227 "args": ["check"]
228 },]"#,
229 },
230 "b.rs": "fn b() {\n B\n}"
231 }
232 }),
233 )
234 .await;
235
236 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
237 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
238
239 cx.executor().run_until_parked();
240 let worktree_id = cx.update(|cx| {
241 project.update(cx, |project, cx| {
242 project.worktrees(cx).next().unwrap().read(cx).id()
243 })
244 });
245
246 let mut task_contexts = TaskContexts::default();
247 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
248
249 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
250 id: worktree_id,
251 directory_in_worktree: PathBuf::from(".zed"),
252 id_base: "local worktree tasks from directory \".zed\"".into(),
253 };
254
255 let all_tasks = cx
256 .update(|cx| {
257 let tree = worktree.read(cx);
258
259 let file_a = File::for_entry(
260 tree.entry_for_path("a/a.rs").unwrap().clone(),
261 worktree.clone(),
262 ) as _;
263 let settings_a = language_settings(None, Some(&file_a), cx);
264 let file_b = File::for_entry(
265 tree.entry_for_path("b/b.rs").unwrap().clone(),
266 worktree.clone(),
267 ) as _;
268 let settings_b = language_settings(None, Some(&file_b), cx);
269
270 assert_eq!(settings_a.tab_size.get(), 8);
271 assert_eq!(settings_b.tab_size.get(), 2);
272
273 get_all_tasks(&project, &task_contexts, cx)
274 })
275 .into_iter()
276 .map(|(source_kind, task)| {
277 let resolved = task.resolved.unwrap();
278 (
279 source_kind,
280 task.resolved_label,
281 resolved.args,
282 resolved.env,
283 )
284 })
285 .collect::<Vec<_>>();
286 assert_eq!(
287 all_tasks,
288 vec![
289 (
290 TaskSourceKind::Worktree {
291 id: worktree_id,
292 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
293 id_base: if cfg!(windows) {
294 "local worktree tasks from directory \"b\\\\.zed\"".into()
295 } else {
296 "local worktree tasks from directory \"b/.zed\"".into()
297 },
298 },
299 "cargo check".to_string(),
300 vec!["check".to_string()],
301 HashMap::default(),
302 ),
303 (
304 topmost_local_task_source_kind.clone(),
305 "cargo check all".to_string(),
306 vec!["check".to_string(), "--all".to_string()],
307 HashMap::default(),
308 ),
309 ]
310 );
311
312 let (_, resolved_task) = cx
313 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
314 .into_iter()
315 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
316 .expect("should have one global task");
317 project.update(cx, |project, cx| {
318 let task_inventory = project
319 .task_store
320 .read(cx)
321 .task_inventory()
322 .cloned()
323 .unwrap();
324 task_inventory.update(cx, |inventory, _| {
325 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
326 inventory
327 .update_file_based_tasks(
328 None,
329 Some(
330 &json!([{
331 "label": "cargo check unstable",
332 "command": "cargo",
333 "args": [
334 "check",
335 "--all",
336 "--all-targets"
337 ],
338 "env": {
339 "RUSTFLAGS": "-Zunstable-options"
340 }
341 }])
342 .to_string(),
343 ),
344 )
345 .unwrap();
346 });
347 });
348 cx.run_until_parked();
349
350 let all_tasks = cx
351 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
352 .into_iter()
353 .map(|(source_kind, task)| {
354 let resolved = task.resolved.unwrap();
355 (
356 source_kind,
357 task.resolved_label,
358 resolved.args,
359 resolved.env,
360 )
361 })
362 .collect::<Vec<_>>();
363 assert_eq!(
364 all_tasks,
365 vec![
366 (
367 topmost_local_task_source_kind.clone(),
368 "cargo check all".to_string(),
369 vec!["check".to_string(), "--all".to_string()],
370 HashMap::default(),
371 ),
372 (
373 TaskSourceKind::Worktree {
374 id: worktree_id,
375 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
376 id_base: if cfg!(windows) {
377 "local worktree tasks from directory \"b\\\\.zed\"".into()
378 } else {
379 "local worktree tasks from directory \"b/.zed\"".into()
380 },
381 },
382 "cargo check".to_string(),
383 vec!["check".to_string()],
384 HashMap::default(),
385 ),
386 (
387 TaskSourceKind::AbsPath {
388 abs_path: paths::tasks_file().clone(),
389 id_base: "global tasks.json".into(),
390 },
391 "cargo check unstable".to_string(),
392 vec![
393 "check".to_string(),
394 "--all".to_string(),
395 "--all-targets".to_string(),
396 ],
397 HashMap::from_iter(Some((
398 "RUSTFLAGS".to_string(),
399 "-Zunstable-options".to_string()
400 ))),
401 ),
402 ]
403 );
404}
405
406#[gpui::test]
407async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
408 init_test(cx);
409 TaskStore::init(None);
410
411 let fs = FakeFs::new(cx.executor());
412 fs.insert_tree(
413 path!("/dir"),
414 json!({
415 ".zed": {
416 "tasks.json": r#"[{
417 "label": "test worktree root",
418 "command": "echo $ZED_WORKTREE_ROOT"
419 }]"#,
420 },
421 "a": {
422 "a.rs": "fn a() {\n A\n}"
423 },
424 }),
425 )
426 .await;
427
428 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
429 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
430
431 cx.executor().run_until_parked();
432 let worktree_id = cx.update(|cx| {
433 project.update(cx, |project, cx| {
434 project.worktrees(cx).next().unwrap().read(cx).id()
435 })
436 });
437
438 let active_non_worktree_item_tasks = cx.update(|cx| {
439 get_all_tasks(
440 &project,
441 &TaskContexts {
442 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
443 active_worktree_context: None,
444 other_worktree_contexts: Vec::new(),
445 },
446 cx,
447 )
448 });
449 assert!(
450 active_non_worktree_item_tasks.is_empty(),
451 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
452 );
453
454 let active_worktree_tasks = cx.update(|cx| {
455 get_all_tasks(
456 &project,
457 &TaskContexts {
458 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
459 active_worktree_context: Some((worktree_id, {
460 let mut worktree_context = TaskContext::default();
461 worktree_context
462 .task_variables
463 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
464 worktree_context
465 })),
466 other_worktree_contexts: Vec::new(),
467 },
468 cx,
469 )
470 });
471 assert_eq!(
472 active_worktree_tasks
473 .into_iter()
474 .map(|(source_kind, task)| {
475 let resolved = task.resolved.unwrap();
476 (source_kind, resolved.command)
477 })
478 .collect::<Vec<_>>(),
479 vec![(
480 TaskSourceKind::Worktree {
481 id: worktree_id,
482 directory_in_worktree: PathBuf::from(separator!(".zed")),
483 id_base: if cfg!(windows) {
484 "local worktree tasks from directory \".zed\"".into()
485 } else {
486 "local worktree tasks from directory \".zed\"".into()
487 },
488 },
489 "echo /dir".to_string(),
490 )]
491 );
492}
493
494#[gpui::test]
495async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/dir"),
501 json!({
502 "test.rs": "const A: i32 = 1;",
503 "test2.rs": "",
504 "Cargo.toml": "a = 1",
505 "package.json": "{\"a\": 1}",
506 }),
507 )
508 .await;
509
510 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
511 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
512
513 let mut fake_rust_servers = language_registry.register_fake_lsp(
514 "Rust",
515 FakeLspAdapter {
516 name: "the-rust-language-server",
517 capabilities: lsp::ServerCapabilities {
518 completion_provider: Some(lsp::CompletionOptions {
519 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
520 ..Default::default()
521 }),
522 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
523 lsp::TextDocumentSyncOptions {
524 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
525 ..Default::default()
526 },
527 )),
528 ..Default::default()
529 },
530 ..Default::default()
531 },
532 );
533 let mut fake_json_servers = language_registry.register_fake_lsp(
534 "JSON",
535 FakeLspAdapter {
536 name: "the-json-language-server",
537 capabilities: lsp::ServerCapabilities {
538 completion_provider: Some(lsp::CompletionOptions {
539 trigger_characters: Some(vec![":".to_string()]),
540 ..Default::default()
541 }),
542 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
543 lsp::TextDocumentSyncOptions {
544 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
545 ..Default::default()
546 },
547 )),
548 ..Default::default()
549 },
550 ..Default::default()
551 },
552 );
553
554 // Open a buffer without an associated language server.
555 let (toml_buffer, _handle) = project
556 .update(cx, |project, cx| {
557 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
558 })
559 .await
560 .unwrap();
561
562 // Open a buffer with an associated language server before the language for it has been loaded.
563 let (rust_buffer, _handle2) = project
564 .update(cx, |project, cx| {
565 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
566 })
567 .await
568 .unwrap();
569 rust_buffer.update(cx, |buffer, _| {
570 assert_eq!(buffer.language().map(|l| l.name()), None);
571 });
572
573 // Now we add the languages to the project, and ensure they get assigned to all
574 // the relevant open buffers.
575 language_registry.add(json_lang());
576 language_registry.add(rust_lang());
577 cx.executor().run_until_parked();
578 rust_buffer.update(cx, |buffer, _| {
579 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
580 });
581
582 // A server is started up, and it is notified about Rust files.
583 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
584 assert_eq!(
585 fake_rust_server
586 .receive_notification::<lsp::notification::DidOpenTextDocument>()
587 .await
588 .text_document,
589 lsp::TextDocumentItem {
590 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
591 version: 0,
592 text: "const A: i32 = 1;".to_string(),
593 language_id: "rust".to_string(),
594 }
595 );
596
597 // The buffer is configured based on the language server's capabilities.
598 rust_buffer.update(cx, |buffer, _| {
599 assert_eq!(
600 buffer
601 .completion_triggers()
602 .into_iter()
603 .cloned()
604 .collect::<Vec<_>>(),
605 &[".".to_string(), "::".to_string()]
606 );
607 });
608 toml_buffer.update(cx, |buffer, _| {
609 assert!(buffer.completion_triggers().is_empty());
610 });
611
612 // Edit a buffer. The changes are reported to the language server.
613 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
614 assert_eq!(
615 fake_rust_server
616 .receive_notification::<lsp::notification::DidChangeTextDocument>()
617 .await
618 .text_document,
619 lsp::VersionedTextDocumentIdentifier::new(
620 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
621 1
622 )
623 );
624
625 // Open a third buffer with a different associated language server.
626 let (json_buffer, _json_handle) = project
627 .update(cx, |project, cx| {
628 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
629 })
630 .await
631 .unwrap();
632
633 // A json language server is started up and is only notified about the json buffer.
634 let mut fake_json_server = fake_json_servers.next().await.unwrap();
635 assert_eq!(
636 fake_json_server
637 .receive_notification::<lsp::notification::DidOpenTextDocument>()
638 .await
639 .text_document,
640 lsp::TextDocumentItem {
641 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
642 version: 0,
643 text: "{\"a\": 1}".to_string(),
644 language_id: "json".to_string(),
645 }
646 );
647
648 // This buffer is configured based on the second language server's
649 // capabilities.
650 json_buffer.update(cx, |buffer, _| {
651 assert_eq!(
652 buffer
653 .completion_triggers()
654 .into_iter()
655 .cloned()
656 .collect::<Vec<_>>(),
657 &[":".to_string()]
658 );
659 });
660
661 // When opening another buffer whose language server is already running,
662 // it is also configured based on the existing language server's capabilities.
663 let (rust_buffer2, _handle4) = project
664 .update(cx, |project, cx| {
665 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
666 })
667 .await
668 .unwrap();
669 rust_buffer2.update(cx, |buffer, _| {
670 assert_eq!(
671 buffer
672 .completion_triggers()
673 .into_iter()
674 .cloned()
675 .collect::<Vec<_>>(),
676 &[".".to_string(), "::".to_string()]
677 );
678 });
679
680 // Changes are reported only to servers matching the buffer's language.
681 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
682 rust_buffer2.update(cx, |buffer, cx| {
683 buffer.edit([(0..0, "let x = 1;")], None, cx)
684 });
685 assert_eq!(
686 fake_rust_server
687 .receive_notification::<lsp::notification::DidChangeTextDocument>()
688 .await
689 .text_document,
690 lsp::VersionedTextDocumentIdentifier::new(
691 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
692 1
693 )
694 );
695
696 // Save notifications are reported to all servers.
697 project
698 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
699 .await
700 .unwrap();
701 assert_eq!(
702 fake_rust_server
703 .receive_notification::<lsp::notification::DidSaveTextDocument>()
704 .await
705 .text_document,
706 lsp::TextDocumentIdentifier::new(
707 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
708 )
709 );
710 assert_eq!(
711 fake_json_server
712 .receive_notification::<lsp::notification::DidSaveTextDocument>()
713 .await
714 .text_document,
715 lsp::TextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
717 )
718 );
719
720 // Renames are reported only to servers matching the buffer's language.
721 fs.rename(
722 Path::new(path!("/dir/test2.rs")),
723 Path::new(path!("/dir/test3.rs")),
724 Default::default(),
725 )
726 .await
727 .unwrap();
728 assert_eq!(
729 fake_rust_server
730 .receive_notification::<lsp::notification::DidCloseTextDocument>()
731 .await
732 .text_document,
733 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
734 );
735 assert_eq!(
736 fake_rust_server
737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
738 .await
739 .text_document,
740 lsp::TextDocumentItem {
741 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
742 version: 0,
743 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
744 language_id: "rust".to_string(),
745 },
746 );
747
748 rust_buffer2.update(cx, |buffer, cx| {
749 buffer.update_diagnostics(
750 LanguageServerId(0),
751 DiagnosticSet::from_sorted_entries(
752 vec![DiagnosticEntry {
753 diagnostic: Default::default(),
754 range: Anchor::MIN..Anchor::MAX,
755 }],
756 &buffer.snapshot(),
757 ),
758 cx,
759 );
760 assert_eq!(
761 buffer
762 .snapshot()
763 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
764 .count(),
765 1
766 );
767 });
768
769 // When the rename changes the extension of the file, the buffer gets closed on the old
770 // language server and gets opened on the new one.
771 fs.rename(
772 Path::new(path!("/dir/test3.rs")),
773 Path::new(path!("/dir/test3.json")),
774 Default::default(),
775 )
776 .await
777 .unwrap();
778 assert_eq!(
779 fake_rust_server
780 .receive_notification::<lsp::notification::DidCloseTextDocument>()
781 .await
782 .text_document,
783 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
784 );
785 assert_eq!(
786 fake_json_server
787 .receive_notification::<lsp::notification::DidOpenTextDocument>()
788 .await
789 .text_document,
790 lsp::TextDocumentItem {
791 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
792 version: 0,
793 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
794 language_id: "json".to_string(),
795 },
796 );
797
798 // We clear the diagnostics, since the language has changed.
799 rust_buffer2.update(cx, |buffer, _| {
800 assert_eq!(
801 buffer
802 .snapshot()
803 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
804 .count(),
805 0
806 );
807 });
808
809 // The renamed file's version resets after changing language server.
810 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
811 assert_eq!(
812 fake_json_server
813 .receive_notification::<lsp::notification::DidChangeTextDocument>()
814 .await
815 .text_document,
816 lsp::VersionedTextDocumentIdentifier::new(
817 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
818 1
819 )
820 );
821
822 // Restart language servers
823 project.update(cx, |project, cx| {
824 project.restart_language_servers_for_buffers(
825 vec![rust_buffer.clone(), json_buffer.clone()],
826 cx,
827 );
828 });
829
830 let mut rust_shutdown_requests = fake_rust_server
831 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
832 let mut json_shutdown_requests = fake_json_server
833 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
834 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
835
836 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
837 let mut fake_json_server = fake_json_servers.next().await.unwrap();
838
839 // Ensure rust document is reopened in new rust language server
840 assert_eq!(
841 fake_rust_server
842 .receive_notification::<lsp::notification::DidOpenTextDocument>()
843 .await
844 .text_document,
845 lsp::TextDocumentItem {
846 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
847 version: 0,
848 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
849 language_id: "rust".to_string(),
850 }
851 );
852
853 // Ensure json documents are reopened in new json language server
854 assert_set_eq!(
855 [
856 fake_json_server
857 .receive_notification::<lsp::notification::DidOpenTextDocument>()
858 .await
859 .text_document,
860 fake_json_server
861 .receive_notification::<lsp::notification::DidOpenTextDocument>()
862 .await
863 .text_document,
864 ],
865 [
866 lsp::TextDocumentItem {
867 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
868 version: 0,
869 text: json_buffer.update(cx, |buffer, _| buffer.text()),
870 language_id: "json".to_string(),
871 },
872 lsp::TextDocumentItem {
873 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
874 version: 0,
875 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
876 language_id: "json".to_string(),
877 }
878 ]
879 );
880
881 // Close notifications are reported only to servers matching the buffer's language.
882 cx.update(|_| drop(_json_handle));
883 let close_message = lsp::DidCloseTextDocumentParams {
884 text_document: lsp::TextDocumentIdentifier::new(
885 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
886 ),
887 };
888 assert_eq!(
889 fake_json_server
890 .receive_notification::<lsp::notification::DidCloseTextDocument>()
891 .await,
892 close_message,
893 );
894}
895
896#[gpui::test]
897async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
898 init_test(cx);
899
900 let fs = FakeFs::new(cx.executor());
901 fs.insert_tree(
902 path!("/the-root"),
903 json!({
904 ".gitignore": "target\n",
905 "src": {
906 "a.rs": "",
907 "b.rs": "",
908 },
909 "target": {
910 "x": {
911 "out": {
912 "x.rs": ""
913 }
914 },
915 "y": {
916 "out": {
917 "y.rs": "",
918 }
919 },
920 "z": {
921 "out": {
922 "z.rs": ""
923 }
924 }
925 }
926 }),
927 )
928 .await;
929
930 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
931 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
932 language_registry.add(rust_lang());
933 let mut fake_servers = language_registry.register_fake_lsp(
934 "Rust",
935 FakeLspAdapter {
936 name: "the-language-server",
937 ..Default::default()
938 },
939 );
940
941 cx.executor().run_until_parked();
942
943 // Start the language server by opening a buffer with a compatible file extension.
944 project
945 .update(cx, |project, cx| {
946 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
947 })
948 .await
949 .unwrap();
950
951 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
952 project.update(cx, |project, cx| {
953 let worktree = project.worktrees(cx).next().unwrap();
954 assert_eq!(
955 worktree
956 .read(cx)
957 .snapshot()
958 .entries(true, 0)
959 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
960 .collect::<Vec<_>>(),
961 &[
962 (Path::new(""), false),
963 (Path::new(".gitignore"), false),
964 (Path::new("src"), false),
965 (Path::new("src/a.rs"), false),
966 (Path::new("src/b.rs"), false),
967 (Path::new("target"), true),
968 ]
969 );
970 });
971
972 let prev_read_dir_count = fs.read_dir_call_count();
973
974 // Keep track of the FS events reported to the language server.
975 let fake_server = fake_servers.next().await.unwrap();
976 let file_changes = Arc::new(Mutex::new(Vec::new()));
977 fake_server
978 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
979 registrations: vec![lsp::Registration {
980 id: Default::default(),
981 method: "workspace/didChangeWatchedFiles".to_string(),
982 register_options: serde_json::to_value(
983 lsp::DidChangeWatchedFilesRegistrationOptions {
984 watchers: vec![
985 lsp::FileSystemWatcher {
986 glob_pattern: lsp::GlobPattern::String(
987 path!("/the-root/Cargo.toml").to_string(),
988 ),
989 kind: None,
990 },
991 lsp::FileSystemWatcher {
992 glob_pattern: lsp::GlobPattern::String(
993 path!("/the-root/src/*.{rs,c}").to_string(),
994 ),
995 kind: None,
996 },
997 lsp::FileSystemWatcher {
998 glob_pattern: lsp::GlobPattern::String(
999 path!("/the-root/target/y/**/*.rs").to_string(),
1000 ),
1001 kind: None,
1002 },
1003 ],
1004 },
1005 )
1006 .ok(),
1007 }],
1008 })
1009 .await
1010 .unwrap();
1011 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1012 let file_changes = file_changes.clone();
1013 move |params, _| {
1014 let mut file_changes = file_changes.lock();
1015 file_changes.extend(params.changes);
1016 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1017 }
1018 });
1019
1020 cx.executor().run_until_parked();
1021 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1022 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1023
1024 // Now the language server has asked us to watch an ignored directory path,
1025 // so we recursively load it.
1026 project.update(cx, |project, cx| {
1027 let worktree = project.worktrees(cx).next().unwrap();
1028 assert_eq!(
1029 worktree
1030 .read(cx)
1031 .snapshot()
1032 .entries(true, 0)
1033 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1034 .collect::<Vec<_>>(),
1035 &[
1036 (Path::new(""), false),
1037 (Path::new(".gitignore"), false),
1038 (Path::new("src"), false),
1039 (Path::new("src/a.rs"), false),
1040 (Path::new("src/b.rs"), false),
1041 (Path::new("target"), true),
1042 (Path::new("target/x"), true),
1043 (Path::new("target/y"), true),
1044 (Path::new("target/y/out"), true),
1045 (Path::new("target/y/out/y.rs"), true),
1046 (Path::new("target/z"), true),
1047 ]
1048 );
1049 });
1050
1051 // Perform some file system mutations, two of which match the watched patterns,
1052 // and one of which does not.
1053 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1054 .await
1055 .unwrap();
1056 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1057 .await
1058 .unwrap();
1059 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1060 .await
1061 .unwrap();
1062 fs.create_file(
1063 path!("/the-root/target/x/out/x2.rs").as_ref(),
1064 Default::default(),
1065 )
1066 .await
1067 .unwrap();
1068 fs.create_file(
1069 path!("/the-root/target/y/out/y2.rs").as_ref(),
1070 Default::default(),
1071 )
1072 .await
1073 .unwrap();
1074
1075 // The language server receives events for the FS mutations that match its watch patterns.
1076 cx.executor().run_until_parked();
1077 assert_eq!(
1078 &*file_changes.lock(),
1079 &[
1080 lsp::FileEvent {
1081 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1082 typ: lsp::FileChangeType::DELETED,
1083 },
1084 lsp::FileEvent {
1085 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1086 typ: lsp::FileChangeType::CREATED,
1087 },
1088 lsp::FileEvent {
1089 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1090 typ: lsp::FileChangeType::CREATED,
1091 },
1092 ]
1093 );
1094}
1095
1096#[gpui::test]
1097async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1098 init_test(cx);
1099
1100 let fs = FakeFs::new(cx.executor());
1101 fs.insert_tree(
1102 path!("/dir"),
1103 json!({
1104 "a.rs": "let a = 1;",
1105 "b.rs": "let b = 2;"
1106 }),
1107 )
1108 .await;
1109
1110 let project = Project::test(
1111 fs,
1112 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1113 cx,
1114 )
1115 .await;
1116 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1117
1118 let buffer_a = project
1119 .update(cx, |project, cx| {
1120 project.open_local_buffer(path!("/dir/a.rs"), cx)
1121 })
1122 .await
1123 .unwrap();
1124 let buffer_b = project
1125 .update(cx, |project, cx| {
1126 project.open_local_buffer(path!("/dir/b.rs"), cx)
1127 })
1128 .await
1129 .unwrap();
1130
1131 lsp_store.update(cx, |lsp_store, cx| {
1132 lsp_store
1133 .update_diagnostics(
1134 LanguageServerId(0),
1135 lsp::PublishDiagnosticsParams {
1136 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1137 version: None,
1138 diagnostics: vec![lsp::Diagnostic {
1139 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1140 severity: Some(lsp::DiagnosticSeverity::ERROR),
1141 message: "error 1".to_string(),
1142 ..Default::default()
1143 }],
1144 },
1145 &[],
1146 cx,
1147 )
1148 .unwrap();
1149 lsp_store
1150 .update_diagnostics(
1151 LanguageServerId(0),
1152 lsp::PublishDiagnosticsParams {
1153 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1154 version: None,
1155 diagnostics: vec![lsp::Diagnostic {
1156 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1157 severity: Some(DiagnosticSeverity::WARNING),
1158 message: "error 2".to_string(),
1159 ..Default::default()
1160 }],
1161 },
1162 &[],
1163 cx,
1164 )
1165 .unwrap();
1166 });
1167
1168 buffer_a.update(cx, |buffer, _| {
1169 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1170 assert_eq!(
1171 chunks
1172 .iter()
1173 .map(|(s, d)| (s.as_str(), *d))
1174 .collect::<Vec<_>>(),
1175 &[
1176 ("let ", None),
1177 ("a", Some(DiagnosticSeverity::ERROR)),
1178 (" = 1;", None),
1179 ]
1180 );
1181 });
1182 buffer_b.update(cx, |buffer, _| {
1183 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1184 assert_eq!(
1185 chunks
1186 .iter()
1187 .map(|(s, d)| (s.as_str(), *d))
1188 .collect::<Vec<_>>(),
1189 &[
1190 ("let ", None),
1191 ("b", Some(DiagnosticSeverity::WARNING)),
1192 (" = 2;", None),
1193 ]
1194 );
1195 });
1196}
1197
1198#[gpui::test]
1199async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1200 init_test(cx);
1201
1202 let fs = FakeFs::new(cx.executor());
1203 fs.insert_tree(
1204 path!("/root"),
1205 json!({
1206 "dir": {
1207 ".git": {
1208 "HEAD": "ref: refs/heads/main",
1209 },
1210 ".gitignore": "b.rs",
1211 "a.rs": "let a = 1;",
1212 "b.rs": "let b = 2;",
1213 },
1214 "other.rs": "let b = c;"
1215 }),
1216 )
1217 .await;
1218
1219 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1220 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1221 let (worktree, _) = project
1222 .update(cx, |project, cx| {
1223 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1224 })
1225 .await
1226 .unwrap();
1227 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1228
1229 let (worktree, _) = project
1230 .update(cx, |project, cx| {
1231 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1232 })
1233 .await
1234 .unwrap();
1235 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1236
1237 let server_id = LanguageServerId(0);
1238 lsp_store.update(cx, |lsp_store, cx| {
1239 lsp_store
1240 .update_diagnostics(
1241 server_id,
1242 lsp::PublishDiagnosticsParams {
1243 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1244 version: None,
1245 diagnostics: vec![lsp::Diagnostic {
1246 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1247 severity: Some(lsp::DiagnosticSeverity::ERROR),
1248 message: "unused variable 'b'".to_string(),
1249 ..Default::default()
1250 }],
1251 },
1252 &[],
1253 cx,
1254 )
1255 .unwrap();
1256 lsp_store
1257 .update_diagnostics(
1258 server_id,
1259 lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "unknown variable 'c'".to_string(),
1266 ..Default::default()
1267 }],
1268 },
1269 &[],
1270 cx,
1271 )
1272 .unwrap();
1273 });
1274
1275 let main_ignored_buffer = project
1276 .update(cx, |project, cx| {
1277 project.open_buffer((main_worktree_id, "b.rs"), cx)
1278 })
1279 .await
1280 .unwrap();
1281 main_ignored_buffer.update(cx, |buffer, _| {
1282 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1283 assert_eq!(
1284 chunks
1285 .iter()
1286 .map(|(s, d)| (s.as_str(), *d))
1287 .collect::<Vec<_>>(),
1288 &[
1289 ("let ", None),
1290 ("b", Some(DiagnosticSeverity::ERROR)),
1291 (" = 2;", None),
1292 ],
1293 "Gigitnored buffers should still get in-buffer diagnostics",
1294 );
1295 });
1296 let other_buffer = project
1297 .update(cx, |project, cx| {
1298 project.open_buffer((other_worktree_id, ""), cx)
1299 })
1300 .await
1301 .unwrap();
1302 other_buffer.update(cx, |buffer, _| {
1303 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1304 assert_eq!(
1305 chunks
1306 .iter()
1307 .map(|(s, d)| (s.as_str(), *d))
1308 .collect::<Vec<_>>(),
1309 &[
1310 ("let b = ", None),
1311 ("c", Some(DiagnosticSeverity::ERROR)),
1312 (";", None),
1313 ],
1314 "Buffers from hidden projects should still get in-buffer diagnostics"
1315 );
1316 });
1317
1318 project.update(cx, |project, cx| {
1319 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1320 assert_eq!(
1321 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1322 vec![(
1323 ProjectPath {
1324 worktree_id: main_worktree_id,
1325 path: Arc::from(Path::new("b.rs")),
1326 },
1327 server_id,
1328 DiagnosticSummary {
1329 error_count: 1,
1330 warning_count: 0,
1331 }
1332 )]
1333 );
1334 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1335 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1336 });
1337}
1338
1339#[gpui::test]
1340async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let progress_token = "the-progress-token";
1344
1345 let fs = FakeFs::new(cx.executor());
1346 fs.insert_tree(
1347 path!("/dir"),
1348 json!({
1349 "a.rs": "fn a() { A }",
1350 "b.rs": "const y: i32 = 1",
1351 }),
1352 )
1353 .await;
1354
1355 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1356 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1357
1358 language_registry.add(rust_lang());
1359 let mut fake_servers = language_registry.register_fake_lsp(
1360 "Rust",
1361 FakeLspAdapter {
1362 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1363 disk_based_diagnostics_sources: vec!["disk".into()],
1364 ..Default::default()
1365 },
1366 );
1367
1368 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1369
1370 // Cause worktree to start the fake language server
1371 let _ = project
1372 .update(cx, |project, cx| {
1373 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1374 })
1375 .await
1376 .unwrap();
1377
1378 let mut events = cx.events(&project);
1379
1380 let fake_server = fake_servers.next().await.unwrap();
1381 assert_eq!(
1382 events.next().await.unwrap(),
1383 Event::LanguageServerAdded(
1384 LanguageServerId(0),
1385 fake_server.server.name(),
1386 Some(worktree_id)
1387 ),
1388 );
1389
1390 fake_server
1391 .start_progress(format!("{}/0", progress_token))
1392 .await;
1393 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1394 assert_eq!(
1395 events.next().await.unwrap(),
1396 Event::DiskBasedDiagnosticsStarted {
1397 language_server_id: LanguageServerId(0),
1398 }
1399 );
1400
1401 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1402 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1403 version: None,
1404 diagnostics: vec![lsp::Diagnostic {
1405 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1406 severity: Some(lsp::DiagnosticSeverity::ERROR),
1407 message: "undefined variable 'A'".to_string(),
1408 ..Default::default()
1409 }],
1410 });
1411 assert_eq!(
1412 events.next().await.unwrap(),
1413 Event::DiagnosticsUpdated {
1414 language_server_id: LanguageServerId(0),
1415 path: (worktree_id, Path::new("a.rs")).into()
1416 }
1417 );
1418
1419 fake_server.end_progress(format!("{}/0", progress_token));
1420 assert_eq!(
1421 events.next().await.unwrap(),
1422 Event::DiskBasedDiagnosticsFinished {
1423 language_server_id: LanguageServerId(0)
1424 }
1425 );
1426
1427 let buffer = project
1428 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1429 .await
1430 .unwrap();
1431
1432 buffer.update(cx, |buffer, _| {
1433 let snapshot = buffer.snapshot();
1434 let diagnostics = snapshot
1435 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1436 .collect::<Vec<_>>();
1437 assert_eq!(
1438 diagnostics,
1439 &[DiagnosticEntry {
1440 range: Point::new(0, 9)..Point::new(0, 10),
1441 diagnostic: Diagnostic {
1442 severity: lsp::DiagnosticSeverity::ERROR,
1443 message: "undefined variable 'A'".to_string(),
1444 group_id: 0,
1445 is_primary: true,
1446 ..Default::default()
1447 }
1448 }]
1449 )
1450 });
1451
1452 // Ensure publishing empty diagnostics twice only results in one update event.
1453 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1454 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1455 version: None,
1456 diagnostics: Default::default(),
1457 });
1458 assert_eq!(
1459 events.next().await.unwrap(),
1460 Event::DiagnosticsUpdated {
1461 language_server_id: LanguageServerId(0),
1462 path: (worktree_id, Path::new("a.rs")).into()
1463 }
1464 );
1465
1466 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1467 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1468 version: None,
1469 diagnostics: Default::default(),
1470 });
1471 cx.executor().run_until_parked();
1472 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1473}
1474
1475#[gpui::test]
1476async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1477 init_test(cx);
1478
1479 let progress_token = "the-progress-token";
1480
1481 let fs = FakeFs::new(cx.executor());
1482 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1483
1484 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1485
1486 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1487 language_registry.add(rust_lang());
1488 let mut fake_servers = language_registry.register_fake_lsp(
1489 "Rust",
1490 FakeLspAdapter {
1491 name: "the-language-server",
1492 disk_based_diagnostics_sources: vec!["disk".into()],
1493 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1494 ..Default::default()
1495 },
1496 );
1497
1498 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1499
1500 let (buffer, _handle) = project
1501 .update(cx, |project, cx| {
1502 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1503 })
1504 .await
1505 .unwrap();
1506 // Simulate diagnostics starting to update.
1507 let fake_server = fake_servers.next().await.unwrap();
1508 fake_server.start_progress(progress_token).await;
1509
1510 // Restart the server before the diagnostics finish updating.
1511 project.update(cx, |project, cx| {
1512 project.restart_language_servers_for_buffers(vec![buffer], cx);
1513 });
1514 let mut events = cx.events(&project);
1515
1516 // Simulate the newly started server sending more diagnostics.
1517 let fake_server = fake_servers.next().await.unwrap();
1518 assert_eq!(
1519 events.next().await.unwrap(),
1520 Event::LanguageServerAdded(
1521 LanguageServerId(1),
1522 fake_server.server.name(),
1523 Some(worktree_id)
1524 )
1525 );
1526 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1527 fake_server.start_progress(progress_token).await;
1528 assert_eq!(
1529 events.next().await.unwrap(),
1530 Event::DiskBasedDiagnosticsStarted {
1531 language_server_id: LanguageServerId(1)
1532 }
1533 );
1534 project.update(cx, |project, cx| {
1535 assert_eq!(
1536 project
1537 .language_servers_running_disk_based_diagnostics(cx)
1538 .collect::<Vec<_>>(),
1539 [LanguageServerId(1)]
1540 );
1541 });
1542
1543 // All diagnostics are considered done, despite the old server's diagnostic
1544 // task never completing.
1545 fake_server.end_progress(progress_token);
1546 assert_eq!(
1547 events.next().await.unwrap(),
1548 Event::DiskBasedDiagnosticsFinished {
1549 language_server_id: LanguageServerId(1)
1550 }
1551 );
1552 project.update(cx, |project, cx| {
1553 assert_eq!(
1554 project
1555 .language_servers_running_disk_based_diagnostics(cx)
1556 .collect::<Vec<_>>(),
1557 [] as [language::LanguageServerId; 0]
1558 );
1559 });
1560}
1561
1562#[gpui::test]
1563async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1564 init_test(cx);
1565
1566 let fs = FakeFs::new(cx.executor());
1567 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1568
1569 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1570
1571 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1572 language_registry.add(rust_lang());
1573 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1574
1575 let (buffer, _) = project
1576 .update(cx, |project, cx| {
1577 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1578 })
1579 .await
1580 .unwrap();
1581
1582 // Publish diagnostics
1583 let fake_server = fake_servers.next().await.unwrap();
1584 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1585 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1586 version: None,
1587 diagnostics: vec![lsp::Diagnostic {
1588 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1589 severity: Some(lsp::DiagnosticSeverity::ERROR),
1590 message: "the message".to_string(),
1591 ..Default::default()
1592 }],
1593 });
1594
1595 cx.executor().run_until_parked();
1596 buffer.update(cx, |buffer, _| {
1597 assert_eq!(
1598 buffer
1599 .snapshot()
1600 .diagnostics_in_range::<_, usize>(0..1, false)
1601 .map(|entry| entry.diagnostic.message.clone())
1602 .collect::<Vec<_>>(),
1603 ["the message".to_string()]
1604 );
1605 });
1606 project.update(cx, |project, cx| {
1607 assert_eq!(
1608 project.diagnostic_summary(false, cx),
1609 DiagnosticSummary {
1610 error_count: 1,
1611 warning_count: 0,
1612 }
1613 );
1614 });
1615
1616 project.update(cx, |project, cx| {
1617 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1618 });
1619
1620 // The diagnostics are cleared.
1621 cx.executor().run_until_parked();
1622 buffer.update(cx, |buffer, _| {
1623 assert_eq!(
1624 buffer
1625 .snapshot()
1626 .diagnostics_in_range::<_, usize>(0..1, false)
1627 .map(|entry| entry.diagnostic.message.clone())
1628 .collect::<Vec<_>>(),
1629 Vec::<String>::new(),
1630 );
1631 });
1632 project.update(cx, |project, cx| {
1633 assert_eq!(
1634 project.diagnostic_summary(false, cx),
1635 DiagnosticSummary {
1636 error_count: 0,
1637 warning_count: 0,
1638 }
1639 );
1640 });
1641}
1642
1643#[gpui::test]
1644async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1645 init_test(cx);
1646
1647 let fs = FakeFs::new(cx.executor());
1648 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1649
1650 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1651 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1652
1653 language_registry.add(rust_lang());
1654 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1655
1656 let (buffer, _handle) = project
1657 .update(cx, |project, cx| {
1658 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1659 })
1660 .await
1661 .unwrap();
1662
1663 // Before restarting the server, report diagnostics with an unknown buffer version.
1664 let fake_server = fake_servers.next().await.unwrap();
1665 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1666 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1667 version: Some(10000),
1668 diagnostics: Vec::new(),
1669 });
1670 cx.executor().run_until_parked();
1671 project.update(cx, |project, cx| {
1672 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1673 });
1674
1675 let mut fake_server = fake_servers.next().await.unwrap();
1676 let notification = fake_server
1677 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1678 .await
1679 .text_document;
1680 assert_eq!(notification.version, 0);
1681}
1682
1683#[gpui::test]
1684async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1685 init_test(cx);
1686
1687 let progress_token = "the-progress-token";
1688
1689 let fs = FakeFs::new(cx.executor());
1690 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1691
1692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1693
1694 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1695 language_registry.add(rust_lang());
1696 let mut fake_servers = language_registry.register_fake_lsp(
1697 "Rust",
1698 FakeLspAdapter {
1699 name: "the-language-server",
1700 disk_based_diagnostics_sources: vec!["disk".into()],
1701 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1702 ..Default::default()
1703 },
1704 );
1705
1706 let (buffer, _handle) = project
1707 .update(cx, |project, cx| {
1708 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1709 })
1710 .await
1711 .unwrap();
1712
1713 // Simulate diagnostics starting to update.
1714 let mut fake_server = fake_servers.next().await.unwrap();
1715 fake_server
1716 .start_progress_with(
1717 "another-token",
1718 lsp::WorkDoneProgressBegin {
1719 cancellable: Some(false),
1720 ..Default::default()
1721 },
1722 )
1723 .await;
1724 fake_server
1725 .start_progress_with(
1726 progress_token,
1727 lsp::WorkDoneProgressBegin {
1728 cancellable: Some(true),
1729 ..Default::default()
1730 },
1731 )
1732 .await;
1733 cx.executor().run_until_parked();
1734
1735 project.update(cx, |project, cx| {
1736 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1737 });
1738
1739 let cancel_notification = fake_server
1740 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1741 .await;
1742 assert_eq!(
1743 cancel_notification.token,
1744 NumberOrString::String(progress_token.into())
1745 );
1746}
1747
1748#[gpui::test]
1749async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1750 init_test(cx);
1751
1752 let fs = FakeFs::new(cx.executor());
1753 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1754 .await;
1755
1756 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1758
1759 let mut fake_rust_servers = language_registry.register_fake_lsp(
1760 "Rust",
1761 FakeLspAdapter {
1762 name: "rust-lsp",
1763 ..Default::default()
1764 },
1765 );
1766 let mut fake_js_servers = language_registry.register_fake_lsp(
1767 "JavaScript",
1768 FakeLspAdapter {
1769 name: "js-lsp",
1770 ..Default::default()
1771 },
1772 );
1773 language_registry.add(rust_lang());
1774 language_registry.add(js_lang());
1775
1776 let _rs_buffer = project
1777 .update(cx, |project, cx| {
1778 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1779 })
1780 .await
1781 .unwrap();
1782 let _js_buffer = project
1783 .update(cx, |project, cx| {
1784 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1785 })
1786 .await
1787 .unwrap();
1788
1789 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1790 assert_eq!(
1791 fake_rust_server_1
1792 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1793 .await
1794 .text_document
1795 .uri
1796 .as_str(),
1797 uri!("file:///dir/a.rs")
1798 );
1799
1800 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1801 assert_eq!(
1802 fake_js_server
1803 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1804 .await
1805 .text_document
1806 .uri
1807 .as_str(),
1808 uri!("file:///dir/b.js")
1809 );
1810
1811 // Disable Rust language server, ensuring only that server gets stopped.
1812 cx.update(|cx| {
1813 SettingsStore::update_global(cx, |settings, cx| {
1814 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1815 settings.languages.insert(
1816 "Rust".into(),
1817 LanguageSettingsContent {
1818 enable_language_server: Some(false),
1819 ..Default::default()
1820 },
1821 );
1822 });
1823 })
1824 });
1825 fake_rust_server_1
1826 .receive_notification::<lsp::notification::Exit>()
1827 .await;
1828
1829 // Enable Rust and disable JavaScript language servers, ensuring that the
1830 // former gets started again and that the latter stops.
1831 cx.update(|cx| {
1832 SettingsStore::update_global(cx, |settings, cx| {
1833 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1834 settings.languages.insert(
1835 LanguageName::new("Rust"),
1836 LanguageSettingsContent {
1837 enable_language_server: Some(true),
1838 ..Default::default()
1839 },
1840 );
1841 settings.languages.insert(
1842 LanguageName::new("JavaScript"),
1843 LanguageSettingsContent {
1844 enable_language_server: Some(false),
1845 ..Default::default()
1846 },
1847 );
1848 });
1849 })
1850 });
1851 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1852 assert_eq!(
1853 fake_rust_server_2
1854 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1855 .await
1856 .text_document
1857 .uri
1858 .as_str(),
1859 uri!("file:///dir/a.rs")
1860 );
1861 fake_js_server
1862 .receive_notification::<lsp::notification::Exit>()
1863 .await;
1864}
1865
1866#[gpui::test(iterations = 3)]
1867async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1868 init_test(cx);
1869
1870 let text = "
1871 fn a() { A }
1872 fn b() { BB }
1873 fn c() { CCC }
1874 "
1875 .unindent();
1876
1877 let fs = FakeFs::new(cx.executor());
1878 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1879
1880 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1881 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1882
1883 language_registry.add(rust_lang());
1884 let mut fake_servers = language_registry.register_fake_lsp(
1885 "Rust",
1886 FakeLspAdapter {
1887 disk_based_diagnostics_sources: vec!["disk".into()],
1888 ..Default::default()
1889 },
1890 );
1891
1892 let buffer = project
1893 .update(cx, |project, cx| {
1894 project.open_local_buffer(path!("/dir/a.rs"), cx)
1895 })
1896 .await
1897 .unwrap();
1898
1899 let _handle = project.update(cx, |project, cx| {
1900 project.register_buffer_with_language_servers(&buffer, cx)
1901 });
1902
1903 let mut fake_server = fake_servers.next().await.unwrap();
1904 let open_notification = fake_server
1905 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1906 .await;
1907
1908 // Edit the buffer, moving the content down
1909 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1910 let change_notification_1 = fake_server
1911 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1912 .await;
1913 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1914
1915 // Report some diagnostics for the initial version of the buffer
1916 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1917 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1918 version: Some(open_notification.text_document.version),
1919 diagnostics: vec![
1920 lsp::Diagnostic {
1921 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1922 severity: Some(DiagnosticSeverity::ERROR),
1923 message: "undefined variable 'A'".to_string(),
1924 source: Some("disk".to_string()),
1925 ..Default::default()
1926 },
1927 lsp::Diagnostic {
1928 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1929 severity: Some(DiagnosticSeverity::ERROR),
1930 message: "undefined variable 'BB'".to_string(),
1931 source: Some("disk".to_string()),
1932 ..Default::default()
1933 },
1934 lsp::Diagnostic {
1935 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1936 severity: Some(DiagnosticSeverity::ERROR),
1937 source: Some("disk".to_string()),
1938 message: "undefined variable 'CCC'".to_string(),
1939 ..Default::default()
1940 },
1941 ],
1942 });
1943
1944 // The diagnostics have moved down since they were created.
1945 cx.executor().run_until_parked();
1946 buffer.update(cx, |buffer, _| {
1947 assert_eq!(
1948 buffer
1949 .snapshot()
1950 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1951 .collect::<Vec<_>>(),
1952 &[
1953 DiagnosticEntry {
1954 range: Point::new(3, 9)..Point::new(3, 11),
1955 diagnostic: Diagnostic {
1956 source: Some("disk".into()),
1957 severity: DiagnosticSeverity::ERROR,
1958 message: "undefined variable 'BB'".to_string(),
1959 is_disk_based: true,
1960 group_id: 1,
1961 is_primary: true,
1962 ..Default::default()
1963 },
1964 },
1965 DiagnosticEntry {
1966 range: Point::new(4, 9)..Point::new(4, 12),
1967 diagnostic: Diagnostic {
1968 source: Some("disk".into()),
1969 severity: DiagnosticSeverity::ERROR,
1970 message: "undefined variable 'CCC'".to_string(),
1971 is_disk_based: true,
1972 group_id: 2,
1973 is_primary: true,
1974 ..Default::default()
1975 }
1976 }
1977 ]
1978 );
1979 assert_eq!(
1980 chunks_with_diagnostics(buffer, 0..buffer.len()),
1981 [
1982 ("\n\nfn a() { ".to_string(), None),
1983 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1984 (" }\nfn b() { ".to_string(), None),
1985 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1986 (" }\nfn c() { ".to_string(), None),
1987 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1988 (" }\n".to_string(), None),
1989 ]
1990 );
1991 assert_eq!(
1992 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1993 [
1994 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 (" }\nfn c() { ".to_string(), None),
1996 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 ]
1998 );
1999 });
2000
2001 // Ensure overlapping diagnostics are highlighted correctly.
2002 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2003 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2004 version: Some(open_notification.text_document.version),
2005 diagnostics: vec![
2006 lsp::Diagnostic {
2007 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2008 severity: Some(DiagnosticSeverity::ERROR),
2009 message: "undefined variable 'A'".to_string(),
2010 source: Some("disk".to_string()),
2011 ..Default::default()
2012 },
2013 lsp::Diagnostic {
2014 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2015 severity: Some(DiagnosticSeverity::WARNING),
2016 message: "unreachable statement".to_string(),
2017 source: Some("disk".to_string()),
2018 ..Default::default()
2019 },
2020 ],
2021 });
2022
2023 cx.executor().run_until_parked();
2024 buffer.update(cx, |buffer, _| {
2025 assert_eq!(
2026 buffer
2027 .snapshot()
2028 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2029 .collect::<Vec<_>>(),
2030 &[
2031 DiagnosticEntry {
2032 range: Point::new(2, 9)..Point::new(2, 12),
2033 diagnostic: Diagnostic {
2034 source: Some("disk".into()),
2035 severity: DiagnosticSeverity::WARNING,
2036 message: "unreachable statement".to_string(),
2037 is_disk_based: true,
2038 group_id: 4,
2039 is_primary: true,
2040 ..Default::default()
2041 }
2042 },
2043 DiagnosticEntry {
2044 range: Point::new(2, 9)..Point::new(2, 10),
2045 diagnostic: Diagnostic {
2046 source: Some("disk".into()),
2047 severity: DiagnosticSeverity::ERROR,
2048 message: "undefined variable 'A'".to_string(),
2049 is_disk_based: true,
2050 group_id: 3,
2051 is_primary: true,
2052 ..Default::default()
2053 },
2054 }
2055 ]
2056 );
2057 assert_eq!(
2058 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2059 [
2060 ("fn a() { ".to_string(), None),
2061 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2062 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2063 ("\n".to_string(), None),
2064 ]
2065 );
2066 assert_eq!(
2067 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2068 [
2069 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2070 ("\n".to_string(), None),
2071 ]
2072 );
2073 });
2074
2075 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2076 // changes since the last save.
2077 buffer.update(cx, |buffer, cx| {
2078 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2079 buffer.edit(
2080 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2081 None,
2082 cx,
2083 );
2084 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2085 });
2086 let change_notification_2 = fake_server
2087 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2088 .await;
2089 assert!(
2090 change_notification_2.text_document.version > change_notification_1.text_document.version
2091 );
2092
2093 // Handle out-of-order diagnostics
2094 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2095 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2096 version: Some(change_notification_2.text_document.version),
2097 diagnostics: vec![
2098 lsp::Diagnostic {
2099 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2100 severity: Some(DiagnosticSeverity::ERROR),
2101 message: "undefined variable 'BB'".to_string(),
2102 source: Some("disk".to_string()),
2103 ..Default::default()
2104 },
2105 lsp::Diagnostic {
2106 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2107 severity: Some(DiagnosticSeverity::WARNING),
2108 message: "undefined variable 'A'".to_string(),
2109 source: Some("disk".to_string()),
2110 ..Default::default()
2111 },
2112 ],
2113 });
2114
2115 cx.executor().run_until_parked();
2116 buffer.update(cx, |buffer, _| {
2117 assert_eq!(
2118 buffer
2119 .snapshot()
2120 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2121 .collect::<Vec<_>>(),
2122 &[
2123 DiagnosticEntry {
2124 range: Point::new(2, 21)..Point::new(2, 22),
2125 diagnostic: Diagnostic {
2126 source: Some("disk".into()),
2127 severity: DiagnosticSeverity::WARNING,
2128 message: "undefined variable 'A'".to_string(),
2129 is_disk_based: true,
2130 group_id: 6,
2131 is_primary: true,
2132 ..Default::default()
2133 }
2134 },
2135 DiagnosticEntry {
2136 range: Point::new(3, 9)..Point::new(3, 14),
2137 diagnostic: Diagnostic {
2138 source: Some("disk".into()),
2139 severity: DiagnosticSeverity::ERROR,
2140 message: "undefined variable 'BB'".to_string(),
2141 is_disk_based: true,
2142 group_id: 5,
2143 is_primary: true,
2144 ..Default::default()
2145 },
2146 }
2147 ]
2148 );
2149 });
2150}
2151
2152#[gpui::test]
2153async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2154 init_test(cx);
2155
2156 let text = concat!(
2157 "let one = ;\n", //
2158 "let two = \n",
2159 "let three = 3;\n",
2160 );
2161
2162 let fs = FakeFs::new(cx.executor());
2163 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2164
2165 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2166 let buffer = project
2167 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2168 .await
2169 .unwrap();
2170
2171 project.update(cx, |project, cx| {
2172 project.lsp_store.update(cx, |lsp_store, cx| {
2173 lsp_store
2174 .update_diagnostic_entries(
2175 LanguageServerId(0),
2176 PathBuf::from("/dir/a.rs"),
2177 None,
2178 vec![
2179 DiagnosticEntry {
2180 range: Unclipped(PointUtf16::new(0, 10))
2181 ..Unclipped(PointUtf16::new(0, 10)),
2182 diagnostic: Diagnostic {
2183 severity: DiagnosticSeverity::ERROR,
2184 message: "syntax error 1".to_string(),
2185 ..Default::default()
2186 },
2187 },
2188 DiagnosticEntry {
2189 range: Unclipped(PointUtf16::new(1, 10))
2190 ..Unclipped(PointUtf16::new(1, 10)),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "syntax error 2".to_string(),
2194 ..Default::default()
2195 },
2196 },
2197 ],
2198 cx,
2199 )
2200 .unwrap();
2201 })
2202 });
2203
2204 // An empty range is extended forward to include the following character.
2205 // At the end of a line, an empty range is extended backward to include
2206 // the preceding character.
2207 buffer.update(cx, |buffer, _| {
2208 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2209 assert_eq!(
2210 chunks
2211 .iter()
2212 .map(|(s, d)| (s.as_str(), *d))
2213 .collect::<Vec<_>>(),
2214 &[
2215 ("let one = ", None),
2216 (";", Some(DiagnosticSeverity::ERROR)),
2217 ("\nlet two =", None),
2218 (" ", Some(DiagnosticSeverity::ERROR)),
2219 ("\nlet three = 3;\n", None)
2220 ]
2221 );
2222 });
2223}
2224
2225#[gpui::test]
2226async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2227 init_test(cx);
2228
2229 let fs = FakeFs::new(cx.executor());
2230 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2231 .await;
2232
2233 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2234 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2235
2236 lsp_store.update(cx, |lsp_store, cx| {
2237 lsp_store
2238 .update_diagnostic_entries(
2239 LanguageServerId(0),
2240 Path::new("/dir/a.rs").to_owned(),
2241 None,
2242 vec![DiagnosticEntry {
2243 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2244 diagnostic: Diagnostic {
2245 severity: DiagnosticSeverity::ERROR,
2246 is_primary: true,
2247 message: "syntax error a1".to_string(),
2248 ..Default::default()
2249 },
2250 }],
2251 cx,
2252 )
2253 .unwrap();
2254 lsp_store
2255 .update_diagnostic_entries(
2256 LanguageServerId(1),
2257 Path::new("/dir/a.rs").to_owned(),
2258 None,
2259 vec![DiagnosticEntry {
2260 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2261 diagnostic: Diagnostic {
2262 severity: DiagnosticSeverity::ERROR,
2263 is_primary: true,
2264 message: "syntax error b1".to_string(),
2265 ..Default::default()
2266 },
2267 }],
2268 cx,
2269 )
2270 .unwrap();
2271
2272 assert_eq!(
2273 lsp_store.diagnostic_summary(false, cx),
2274 DiagnosticSummary {
2275 error_count: 2,
2276 warning_count: 0,
2277 }
2278 );
2279 });
2280}
2281
2282#[gpui::test]
2283async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2284 init_test(cx);
2285
2286 let text = "
2287 fn a() {
2288 f1();
2289 }
2290 fn b() {
2291 f2();
2292 }
2293 fn c() {
2294 f3();
2295 }
2296 "
2297 .unindent();
2298
2299 let fs = FakeFs::new(cx.executor());
2300 fs.insert_tree(
2301 path!("/dir"),
2302 json!({
2303 "a.rs": text.clone(),
2304 }),
2305 )
2306 .await;
2307
2308 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2309 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2310
2311 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2312 language_registry.add(rust_lang());
2313 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2314
2315 let (buffer, _handle) = project
2316 .update(cx, |project, cx| {
2317 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2318 })
2319 .await
2320 .unwrap();
2321
2322 let mut fake_server = fake_servers.next().await.unwrap();
2323 let lsp_document_version = fake_server
2324 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2325 .await
2326 .text_document
2327 .version;
2328
2329 // Simulate editing the buffer after the language server computes some edits.
2330 buffer.update(cx, |buffer, cx| {
2331 buffer.edit(
2332 [(
2333 Point::new(0, 0)..Point::new(0, 0),
2334 "// above first function\n",
2335 )],
2336 None,
2337 cx,
2338 );
2339 buffer.edit(
2340 [(
2341 Point::new(2, 0)..Point::new(2, 0),
2342 " // inside first function\n",
2343 )],
2344 None,
2345 cx,
2346 );
2347 buffer.edit(
2348 [(
2349 Point::new(6, 4)..Point::new(6, 4),
2350 "// inside second function ",
2351 )],
2352 None,
2353 cx,
2354 );
2355
2356 assert_eq!(
2357 buffer.text(),
2358 "
2359 // above first function
2360 fn a() {
2361 // inside first function
2362 f1();
2363 }
2364 fn b() {
2365 // inside second function f2();
2366 }
2367 fn c() {
2368 f3();
2369 }
2370 "
2371 .unindent()
2372 );
2373 });
2374
2375 let edits = lsp_store
2376 .update(cx, |lsp_store, cx| {
2377 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2378 &buffer,
2379 vec![
2380 // replace body of first function
2381 lsp::TextEdit {
2382 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2383 new_text: "
2384 fn a() {
2385 f10();
2386 }
2387 "
2388 .unindent(),
2389 },
2390 // edit inside second function
2391 lsp::TextEdit {
2392 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2393 new_text: "00".into(),
2394 },
2395 // edit inside third function via two distinct edits
2396 lsp::TextEdit {
2397 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2398 new_text: "4000".into(),
2399 },
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2402 new_text: "".into(),
2403 },
2404 ],
2405 LanguageServerId(0),
2406 Some(lsp_document_version),
2407 cx,
2408 )
2409 })
2410 .await
2411 .unwrap();
2412
2413 buffer.update(cx, |buffer, cx| {
2414 for (range, new_text) in edits {
2415 buffer.edit([(range, new_text)], None, cx);
2416 }
2417 assert_eq!(
2418 buffer.text(),
2419 "
2420 // above first function
2421 fn a() {
2422 // inside first function
2423 f10();
2424 }
2425 fn b() {
2426 // inside second function f200();
2427 }
2428 fn c() {
2429 f4000();
2430 }
2431 "
2432 .unindent()
2433 );
2434 });
2435}
2436
2437#[gpui::test]
2438async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2439 init_test(cx);
2440
2441 let text = "
2442 use a::b;
2443 use a::c;
2444
2445 fn f() {
2446 b();
2447 c();
2448 }
2449 "
2450 .unindent();
2451
2452 let fs = FakeFs::new(cx.executor());
2453 fs.insert_tree(
2454 path!("/dir"),
2455 json!({
2456 "a.rs": text.clone(),
2457 }),
2458 )
2459 .await;
2460
2461 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2462 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2463 let buffer = project
2464 .update(cx, |project, cx| {
2465 project.open_local_buffer(path!("/dir/a.rs"), cx)
2466 })
2467 .await
2468 .unwrap();
2469
2470 // Simulate the language server sending us a small edit in the form of a very large diff.
2471 // Rust-analyzer does this when performing a merge-imports code action.
2472 let edits = lsp_store
2473 .update(cx, |lsp_store, cx| {
2474 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2475 &buffer,
2476 [
2477 // Replace the first use statement without editing the semicolon.
2478 lsp::TextEdit {
2479 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2480 new_text: "a::{b, c}".into(),
2481 },
2482 // Reinsert the remainder of the file between the semicolon and the final
2483 // newline of the file.
2484 lsp::TextEdit {
2485 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2486 new_text: "\n\n".into(),
2487 },
2488 lsp::TextEdit {
2489 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2490 new_text: "
2491 fn f() {
2492 b();
2493 c();
2494 }"
2495 .unindent(),
2496 },
2497 // Delete everything after the first newline of the file.
2498 lsp::TextEdit {
2499 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2500 new_text: "".into(),
2501 },
2502 ],
2503 LanguageServerId(0),
2504 None,
2505 cx,
2506 )
2507 })
2508 .await
2509 .unwrap();
2510
2511 buffer.update(cx, |buffer, cx| {
2512 let edits = edits
2513 .into_iter()
2514 .map(|(range, text)| {
2515 (
2516 range.start.to_point(buffer)..range.end.to_point(buffer),
2517 text,
2518 )
2519 })
2520 .collect::<Vec<_>>();
2521
2522 assert_eq!(
2523 edits,
2524 [
2525 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2526 (Point::new(1, 0)..Point::new(2, 0), "".into())
2527 ]
2528 );
2529
2530 for (range, new_text) in edits {
2531 buffer.edit([(range, new_text)], None, cx);
2532 }
2533 assert_eq!(
2534 buffer.text(),
2535 "
2536 use a::{b, c};
2537
2538 fn f() {
2539 b();
2540 c();
2541 }
2542 "
2543 .unindent()
2544 );
2545 });
2546}
2547
2548#[gpui::test]
2549async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2550 init_test(cx);
2551
2552 let text = "
2553 use a::b;
2554 use a::c;
2555
2556 fn f() {
2557 b();
2558 c();
2559 }
2560 "
2561 .unindent();
2562
2563 let fs = FakeFs::new(cx.executor());
2564 fs.insert_tree(
2565 path!("/dir"),
2566 json!({
2567 "a.rs": text.clone(),
2568 }),
2569 )
2570 .await;
2571
2572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2573 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2574 let buffer = project
2575 .update(cx, |project, cx| {
2576 project.open_local_buffer(path!("/dir/a.rs"), cx)
2577 })
2578 .await
2579 .unwrap();
2580
2581 // Simulate the language server sending us edits in a non-ordered fashion,
2582 // with ranges sometimes being inverted or pointing to invalid locations.
2583 let edits = lsp_store
2584 .update(cx, |lsp_store, cx| {
2585 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2586 &buffer,
2587 [
2588 lsp::TextEdit {
2589 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2590 new_text: "\n\n".into(),
2591 },
2592 lsp::TextEdit {
2593 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2594 new_text: "a::{b, c}".into(),
2595 },
2596 lsp::TextEdit {
2597 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2598 new_text: "".into(),
2599 },
2600 lsp::TextEdit {
2601 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2602 new_text: "
2603 fn f() {
2604 b();
2605 c();
2606 }"
2607 .unindent(),
2608 },
2609 ],
2610 LanguageServerId(0),
2611 None,
2612 cx,
2613 )
2614 })
2615 .await
2616 .unwrap();
2617
2618 buffer.update(cx, |buffer, cx| {
2619 let edits = edits
2620 .into_iter()
2621 .map(|(range, text)| {
2622 (
2623 range.start.to_point(buffer)..range.end.to_point(buffer),
2624 text,
2625 )
2626 })
2627 .collect::<Vec<_>>();
2628
2629 assert_eq!(
2630 edits,
2631 [
2632 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2633 (Point::new(1, 0)..Point::new(2, 0), "".into())
2634 ]
2635 );
2636
2637 for (range, new_text) in edits {
2638 buffer.edit([(range, new_text)], None, cx);
2639 }
2640 assert_eq!(
2641 buffer.text(),
2642 "
2643 use a::{b, c};
2644
2645 fn f() {
2646 b();
2647 c();
2648 }
2649 "
2650 .unindent()
2651 );
2652 });
2653}
2654
2655fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2656 buffer: &Buffer,
2657 range: Range<T>,
2658) -> Vec<(String, Option<DiagnosticSeverity>)> {
2659 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2660 for chunk in buffer.snapshot().chunks(range, true) {
2661 if chunks.last().map_or(false, |prev_chunk| {
2662 prev_chunk.1 == chunk.diagnostic_severity
2663 }) {
2664 chunks.last_mut().unwrap().0.push_str(chunk.text);
2665 } else {
2666 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2667 }
2668 }
2669 chunks
2670}
2671
2672#[gpui::test(iterations = 10)]
2673async fn test_definition(cx: &mut gpui::TestAppContext) {
2674 init_test(cx);
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree(
2678 path!("/dir"),
2679 json!({
2680 "a.rs": "const fn a() { A }",
2681 "b.rs": "const y: i32 = crate::a()",
2682 }),
2683 )
2684 .await;
2685
2686 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2687
2688 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2689 language_registry.add(rust_lang());
2690 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2691
2692 let (buffer, _handle) = project
2693 .update(cx, |project, cx| {
2694 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2695 })
2696 .await
2697 .unwrap();
2698
2699 let fake_server = fake_servers.next().await.unwrap();
2700 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2701 let params = params.text_document_position_params;
2702 assert_eq!(
2703 params.text_document.uri.to_file_path().unwrap(),
2704 Path::new(path!("/dir/b.rs")),
2705 );
2706 assert_eq!(params.position, lsp::Position::new(0, 22));
2707
2708 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2709 lsp::Location::new(
2710 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2711 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2712 ),
2713 )))
2714 });
2715 let mut definitions = project
2716 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2717 .await
2718 .unwrap();
2719
2720 // Assert no new language server started
2721 cx.executor().run_until_parked();
2722 assert!(fake_servers.try_next().is_err());
2723
2724 assert_eq!(definitions.len(), 1);
2725 let definition = definitions.pop().unwrap();
2726 cx.update(|cx| {
2727 let target_buffer = definition.target.buffer.read(cx);
2728 assert_eq!(
2729 target_buffer
2730 .file()
2731 .unwrap()
2732 .as_local()
2733 .unwrap()
2734 .abs_path(cx),
2735 Path::new(path!("/dir/a.rs")),
2736 );
2737 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2738 assert_eq!(
2739 list_worktrees(&project, cx),
2740 [
2741 (path!("/dir/a.rs").as_ref(), false),
2742 (path!("/dir/b.rs").as_ref(), true)
2743 ],
2744 );
2745
2746 drop(definition);
2747 });
2748 cx.update(|cx| {
2749 assert_eq!(
2750 list_worktrees(&project, cx),
2751 [(path!("/dir/b.rs").as_ref(), true)]
2752 );
2753 });
2754
2755 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2756 project
2757 .read(cx)
2758 .worktrees(cx)
2759 .map(|worktree| {
2760 let worktree = worktree.read(cx);
2761 (
2762 worktree.as_local().unwrap().abs_path().as_ref(),
2763 worktree.is_visible(),
2764 )
2765 })
2766 .collect::<Vec<_>>()
2767 }
2768}
2769
2770#[gpui::test]
2771async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2772 init_test(cx);
2773
2774 let fs = FakeFs::new(cx.executor());
2775 fs.insert_tree(
2776 path!("/dir"),
2777 json!({
2778 "a.ts": "",
2779 }),
2780 )
2781 .await;
2782
2783 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2784
2785 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2786 language_registry.add(typescript_lang());
2787 let mut fake_language_servers = language_registry.register_fake_lsp(
2788 "TypeScript",
2789 FakeLspAdapter {
2790 capabilities: lsp::ServerCapabilities {
2791 completion_provider: Some(lsp::CompletionOptions {
2792 trigger_characters: Some(vec![":".to_string()]),
2793 ..Default::default()
2794 }),
2795 ..Default::default()
2796 },
2797 ..Default::default()
2798 },
2799 );
2800
2801 let (buffer, _handle) = project
2802 .update(cx, |p, cx| {
2803 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2804 })
2805 .await
2806 .unwrap();
2807
2808 let fake_server = fake_language_servers.next().await.unwrap();
2809
2810 let text = "let a = b.fqn";
2811 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2812 let completions = project.update(cx, |project, cx| {
2813 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2814 });
2815
2816 fake_server
2817 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2818 Ok(Some(lsp::CompletionResponse::Array(vec![
2819 lsp::CompletionItem {
2820 label: "fullyQualifiedName?".into(),
2821 insert_text: Some("fullyQualifiedName".into()),
2822 ..Default::default()
2823 },
2824 ])))
2825 })
2826 .next()
2827 .await;
2828 let completions = completions.await.unwrap().unwrap();
2829 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2830 assert_eq!(completions.len(), 1);
2831 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2832 assert_eq!(
2833 completions[0].old_range.to_offset(&snapshot),
2834 text.len() - 3..text.len()
2835 );
2836
2837 let text = "let a = \"atoms/cmp\"";
2838 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2839 let completions = project.update(cx, |project, cx| {
2840 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2841 });
2842
2843 fake_server
2844 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2845 Ok(Some(lsp::CompletionResponse::Array(vec![
2846 lsp::CompletionItem {
2847 label: "component".into(),
2848 ..Default::default()
2849 },
2850 ])))
2851 })
2852 .next()
2853 .await;
2854 let completions = completions.await.unwrap().unwrap();
2855 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2856 assert_eq!(completions.len(), 1);
2857 assert_eq!(completions[0].new_text, "component");
2858 assert_eq!(
2859 completions[0].old_range.to_offset(&snapshot),
2860 text.len() - 4..text.len() - 1
2861 );
2862}
2863
2864#[gpui::test]
2865async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2866 init_test(cx);
2867
2868 let fs = FakeFs::new(cx.executor());
2869 fs.insert_tree(
2870 path!("/dir"),
2871 json!({
2872 "a.ts": "",
2873 }),
2874 )
2875 .await;
2876
2877 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2878
2879 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2880 language_registry.add(typescript_lang());
2881 let mut fake_language_servers = language_registry.register_fake_lsp(
2882 "TypeScript",
2883 FakeLspAdapter {
2884 capabilities: lsp::ServerCapabilities {
2885 completion_provider: Some(lsp::CompletionOptions {
2886 trigger_characters: Some(vec![":".to_string()]),
2887 ..Default::default()
2888 }),
2889 ..Default::default()
2890 },
2891 ..Default::default()
2892 },
2893 );
2894
2895 let (buffer, _handle) = project
2896 .update(cx, |p, cx| {
2897 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2898 })
2899 .await
2900 .unwrap();
2901
2902 let fake_server = fake_language_servers.next().await.unwrap();
2903
2904 let text = "let a = b.fqn";
2905 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2906 let completions = project.update(cx, |project, cx| {
2907 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2908 });
2909
2910 fake_server
2911 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2912 Ok(Some(lsp::CompletionResponse::Array(vec![
2913 lsp::CompletionItem {
2914 label: "fullyQualifiedName?".into(),
2915 insert_text: Some("fully\rQualified\r\nName".into()),
2916 ..Default::default()
2917 },
2918 ])))
2919 })
2920 .next()
2921 .await;
2922 let completions = completions.await.unwrap().unwrap();
2923 assert_eq!(completions.len(), 1);
2924 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2925}
2926
2927#[gpui::test(iterations = 10)]
2928async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2929 init_test(cx);
2930
2931 let fs = FakeFs::new(cx.executor());
2932 fs.insert_tree(
2933 path!("/dir"),
2934 json!({
2935 "a.ts": "a",
2936 }),
2937 )
2938 .await;
2939
2940 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2941
2942 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2943 language_registry.add(typescript_lang());
2944 let mut fake_language_servers = language_registry.register_fake_lsp(
2945 "TypeScript",
2946 FakeLspAdapter {
2947 capabilities: lsp::ServerCapabilities {
2948 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2949 lsp::CodeActionOptions {
2950 resolve_provider: Some(true),
2951 ..lsp::CodeActionOptions::default()
2952 },
2953 )),
2954 execute_command_provider: Some(lsp::ExecuteCommandOptions {
2955 commands: vec!["_the/command".to_string()],
2956 ..lsp::ExecuteCommandOptions::default()
2957 }),
2958 ..lsp::ServerCapabilities::default()
2959 },
2960 ..FakeLspAdapter::default()
2961 },
2962 );
2963
2964 let (buffer, _handle) = project
2965 .update(cx, |p, cx| {
2966 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2967 })
2968 .await
2969 .unwrap();
2970
2971 let fake_server = fake_language_servers.next().await.unwrap();
2972
2973 // Language server returns code actions that contain commands, and not edits.
2974 let actions = project.update(cx, |project, cx| {
2975 project.code_actions(&buffer, 0..0, None, cx)
2976 });
2977 fake_server
2978 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2979 Ok(Some(vec![
2980 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2981 title: "The code action".into(),
2982 data: Some(serde_json::json!({
2983 "command": "_the/command",
2984 })),
2985 ..lsp::CodeAction::default()
2986 }),
2987 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2988 title: "two".into(),
2989 ..lsp::CodeAction::default()
2990 }),
2991 ]))
2992 })
2993 .next()
2994 .await;
2995
2996 let action = actions.await.unwrap()[0].clone();
2997 let apply = project.update(cx, |project, cx| {
2998 project.apply_code_action(buffer.clone(), action, true, cx)
2999 });
3000
3001 // Resolving the code action does not populate its edits. In absence of
3002 // edits, we must execute the given command.
3003 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
3004 |mut action, _| async move {
3005 if action.data.is_some() {
3006 action.command = Some(lsp::Command {
3007 title: "The command".into(),
3008 command: "_the/command".into(),
3009 arguments: Some(vec![json!("the-argument")]),
3010 });
3011 }
3012 Ok(action)
3013 },
3014 );
3015
3016 // While executing the command, the language server sends the editor
3017 // a `workspaceEdit` request.
3018 fake_server
3019 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3020 let fake = fake_server.clone();
3021 move |params, _| {
3022 assert_eq!(params.command, "_the/command");
3023 let fake = fake.clone();
3024 async move {
3025 fake.server
3026 .request::<lsp::request::ApplyWorkspaceEdit>(
3027 lsp::ApplyWorkspaceEditParams {
3028 label: None,
3029 edit: lsp::WorkspaceEdit {
3030 changes: Some(
3031 [(
3032 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3033 vec![lsp::TextEdit {
3034 range: lsp::Range::new(
3035 lsp::Position::new(0, 0),
3036 lsp::Position::new(0, 0),
3037 ),
3038 new_text: "X".into(),
3039 }],
3040 )]
3041 .into_iter()
3042 .collect(),
3043 ),
3044 ..Default::default()
3045 },
3046 },
3047 )
3048 .await
3049 .unwrap();
3050 Ok(Some(json!(null)))
3051 }
3052 }
3053 })
3054 .next()
3055 .await;
3056
3057 // Applying the code action returns a project transaction containing the edits
3058 // sent by the language server in its `workspaceEdit` request.
3059 let transaction = apply.await.unwrap();
3060 assert!(transaction.0.contains_key(&buffer));
3061 buffer.update(cx, |buffer, cx| {
3062 assert_eq!(buffer.text(), "Xa");
3063 buffer.undo(cx);
3064 assert_eq!(buffer.text(), "a");
3065 });
3066}
3067
3068#[gpui::test(iterations = 10)]
3069async fn test_save_file(cx: &mut gpui::TestAppContext) {
3070 init_test(cx);
3071
3072 let fs = FakeFs::new(cx.executor());
3073 fs.insert_tree(
3074 path!("/dir"),
3075 json!({
3076 "file1": "the old contents",
3077 }),
3078 )
3079 .await;
3080
3081 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3082 let buffer = project
3083 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3084 .await
3085 .unwrap();
3086 buffer.update(cx, |buffer, cx| {
3087 assert_eq!(buffer.text(), "the old contents");
3088 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3089 });
3090
3091 project
3092 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3093 .await
3094 .unwrap();
3095
3096 let new_text = fs
3097 .load(Path::new(path!("/dir/file1")))
3098 .await
3099 .unwrap()
3100 .replace("\r\n", "\n");
3101 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3102}
3103
3104#[gpui::test(iterations = 30)]
3105async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3106 init_test(cx);
3107
3108 let fs = FakeFs::new(cx.executor().clone());
3109 fs.insert_tree(
3110 path!("/dir"),
3111 json!({
3112 "file1": "the original contents",
3113 }),
3114 )
3115 .await;
3116
3117 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3118 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3119 let buffer = project
3120 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3121 .await
3122 .unwrap();
3123
3124 // Simulate buffer diffs being slow, so that they don't complete before
3125 // the next file change occurs.
3126 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3127
3128 // Change the buffer's file on disk, and then wait for the file change
3129 // to be detected by the worktree, so that the buffer starts reloading.
3130 fs.save(
3131 path!("/dir/file1").as_ref(),
3132 &"the first contents".into(),
3133 Default::default(),
3134 )
3135 .await
3136 .unwrap();
3137 worktree.next_event(cx).await;
3138
3139 // Change the buffer's file again. Depending on the random seed, the
3140 // previous file change may still be in progress.
3141 fs.save(
3142 path!("/dir/file1").as_ref(),
3143 &"the second contents".into(),
3144 Default::default(),
3145 )
3146 .await
3147 .unwrap();
3148 worktree.next_event(cx).await;
3149
3150 cx.executor().run_until_parked();
3151 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3152 buffer.read_with(cx, |buffer, _| {
3153 assert_eq!(buffer.text(), on_disk_text);
3154 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3155 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3156 });
3157}
3158
3159#[gpui::test(iterations = 30)]
3160async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3161 init_test(cx);
3162
3163 let fs = FakeFs::new(cx.executor().clone());
3164 fs.insert_tree(
3165 path!("/dir"),
3166 json!({
3167 "file1": "the original contents",
3168 }),
3169 )
3170 .await;
3171
3172 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3173 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3174 let buffer = project
3175 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3176 .await
3177 .unwrap();
3178
3179 // Simulate buffer diffs being slow, so that they don't complete before
3180 // the next file change occurs.
3181 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3182
3183 // Change the buffer's file on disk, and then wait for the file change
3184 // to be detected by the worktree, so that the buffer starts reloading.
3185 fs.save(
3186 path!("/dir/file1").as_ref(),
3187 &"the first contents".into(),
3188 Default::default(),
3189 )
3190 .await
3191 .unwrap();
3192 worktree.next_event(cx).await;
3193
3194 cx.executor()
3195 .spawn(cx.executor().simulate_random_delay())
3196 .await;
3197
3198 // Perform a noop edit, causing the buffer's version to increase.
3199 buffer.update(cx, |buffer, cx| {
3200 buffer.edit([(0..0, " ")], None, cx);
3201 buffer.undo(cx);
3202 });
3203
3204 cx.executor().run_until_parked();
3205 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3206 buffer.read_with(cx, |buffer, _| {
3207 let buffer_text = buffer.text();
3208 if buffer_text == on_disk_text {
3209 assert!(
3210 !buffer.is_dirty() && !buffer.has_conflict(),
3211 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3212 );
3213 }
3214 // If the file change occurred while the buffer was processing the first
3215 // change, the buffer will be in a conflicting state.
3216 else {
3217 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3218 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3219 }
3220 });
3221}
3222
3223#[gpui::test]
3224async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3225 init_test(cx);
3226
3227 let fs = FakeFs::new(cx.executor());
3228 fs.insert_tree(
3229 path!("/dir"),
3230 json!({
3231 "file1": "the old contents",
3232 }),
3233 )
3234 .await;
3235
3236 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3237 let buffer = project
3238 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3239 .await
3240 .unwrap();
3241 buffer.update(cx, |buffer, cx| {
3242 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3243 });
3244
3245 project
3246 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3247 .await
3248 .unwrap();
3249
3250 let new_text = fs
3251 .load(Path::new(path!("/dir/file1")))
3252 .await
3253 .unwrap()
3254 .replace("\r\n", "\n");
3255 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3256}
3257
3258#[gpui::test]
3259async fn test_save_as(cx: &mut gpui::TestAppContext) {
3260 init_test(cx);
3261
3262 let fs = FakeFs::new(cx.executor());
3263 fs.insert_tree("/dir", json!({})).await;
3264
3265 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3266
3267 let languages = project.update(cx, |project, _| project.languages().clone());
3268 languages.add(rust_lang());
3269
3270 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3271 buffer.update(cx, |buffer, cx| {
3272 buffer.edit([(0..0, "abc")], None, cx);
3273 assert!(buffer.is_dirty());
3274 assert!(!buffer.has_conflict());
3275 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3276 });
3277 project
3278 .update(cx, |project, cx| {
3279 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3280 let path = ProjectPath {
3281 worktree_id,
3282 path: Arc::from(Path::new("file1.rs")),
3283 };
3284 project.save_buffer_as(buffer.clone(), path, cx)
3285 })
3286 .await
3287 .unwrap();
3288 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3289
3290 cx.executor().run_until_parked();
3291 buffer.update(cx, |buffer, cx| {
3292 assert_eq!(
3293 buffer.file().unwrap().full_path(cx),
3294 Path::new("dir/file1.rs")
3295 );
3296 assert!(!buffer.is_dirty());
3297 assert!(!buffer.has_conflict());
3298 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3299 });
3300
3301 let opened_buffer = project
3302 .update(cx, |project, cx| {
3303 project.open_local_buffer("/dir/file1.rs", cx)
3304 })
3305 .await
3306 .unwrap();
3307 assert_eq!(opened_buffer, buffer);
3308}
3309
3310#[gpui::test(retries = 5)]
3311async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3312 use worktree::WorktreeModelHandle as _;
3313
3314 init_test(cx);
3315 cx.executor().allow_parking();
3316
3317 let dir = TempTree::new(json!({
3318 "a": {
3319 "file1": "",
3320 "file2": "",
3321 "file3": "",
3322 },
3323 "b": {
3324 "c": {
3325 "file4": "",
3326 "file5": "",
3327 }
3328 }
3329 }));
3330
3331 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3332
3333 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3334 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3335 async move { buffer.await.unwrap() }
3336 };
3337 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3338 project.update(cx, |project, cx| {
3339 let tree = project.worktrees(cx).next().unwrap();
3340 tree.read(cx)
3341 .entry_for_path(path)
3342 .unwrap_or_else(|| panic!("no entry for path {}", path))
3343 .id
3344 })
3345 };
3346
3347 let buffer2 = buffer_for_path("a/file2", cx).await;
3348 let buffer3 = buffer_for_path("a/file3", cx).await;
3349 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3350 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3351
3352 let file2_id = id_for_path("a/file2", cx);
3353 let file3_id = id_for_path("a/file3", cx);
3354 let file4_id = id_for_path("b/c/file4", cx);
3355
3356 // Create a remote copy of this worktree.
3357 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3358 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3359
3360 let updates = Arc::new(Mutex::new(Vec::new()));
3361 tree.update(cx, |tree, cx| {
3362 let updates = updates.clone();
3363 tree.observe_updates(0, cx, move |update| {
3364 updates.lock().push(update);
3365 async { true }
3366 });
3367 });
3368
3369 let remote =
3370 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3371
3372 cx.executor().run_until_parked();
3373
3374 cx.update(|cx| {
3375 assert!(!buffer2.read(cx).is_dirty());
3376 assert!(!buffer3.read(cx).is_dirty());
3377 assert!(!buffer4.read(cx).is_dirty());
3378 assert!(!buffer5.read(cx).is_dirty());
3379 });
3380
3381 // Rename and delete files and directories.
3382 tree.flush_fs_events(cx).await;
3383 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3384 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3385 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3386 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3387 tree.flush_fs_events(cx).await;
3388
3389 cx.update(|app| {
3390 assert_eq!(
3391 tree.read(app)
3392 .paths()
3393 .map(|p| p.to_str().unwrap())
3394 .collect::<Vec<_>>(),
3395 vec![
3396 "a",
3397 separator!("a/file1"),
3398 separator!("a/file2.new"),
3399 "b",
3400 "d",
3401 separator!("d/file3"),
3402 separator!("d/file4"),
3403 ]
3404 );
3405 });
3406
3407 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3408 assert_eq!(id_for_path("d/file3", cx), file3_id);
3409 assert_eq!(id_for_path("d/file4", cx), file4_id);
3410
3411 cx.update(|cx| {
3412 assert_eq!(
3413 buffer2.read(cx).file().unwrap().path().as_ref(),
3414 Path::new("a/file2.new")
3415 );
3416 assert_eq!(
3417 buffer3.read(cx).file().unwrap().path().as_ref(),
3418 Path::new("d/file3")
3419 );
3420 assert_eq!(
3421 buffer4.read(cx).file().unwrap().path().as_ref(),
3422 Path::new("d/file4")
3423 );
3424 assert_eq!(
3425 buffer5.read(cx).file().unwrap().path().as_ref(),
3426 Path::new("b/c/file5")
3427 );
3428
3429 assert_matches!(
3430 buffer2.read(cx).file().unwrap().disk_state(),
3431 DiskState::Present { .. }
3432 );
3433 assert_matches!(
3434 buffer3.read(cx).file().unwrap().disk_state(),
3435 DiskState::Present { .. }
3436 );
3437 assert_matches!(
3438 buffer4.read(cx).file().unwrap().disk_state(),
3439 DiskState::Present { .. }
3440 );
3441 assert_eq!(
3442 buffer5.read(cx).file().unwrap().disk_state(),
3443 DiskState::Deleted
3444 );
3445 });
3446
3447 // Update the remote worktree. Check that it becomes consistent with the
3448 // local worktree.
3449 cx.executor().run_until_parked();
3450
3451 remote.update(cx, |remote, _| {
3452 for update in updates.lock().drain(..) {
3453 remote.as_remote_mut().unwrap().update_from_remote(update);
3454 }
3455 });
3456 cx.executor().run_until_parked();
3457 remote.update(cx, |remote, _| {
3458 assert_eq!(
3459 remote
3460 .paths()
3461 .map(|p| p.to_str().unwrap())
3462 .collect::<Vec<_>>(),
3463 vec![
3464 "a",
3465 separator!("a/file1"),
3466 separator!("a/file2.new"),
3467 "b",
3468 "d",
3469 separator!("d/file3"),
3470 separator!("d/file4"),
3471 ]
3472 );
3473 });
3474}
3475
3476#[gpui::test(iterations = 10)]
3477async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3478 init_test(cx);
3479
3480 let fs = FakeFs::new(cx.executor());
3481 fs.insert_tree(
3482 path!("/dir"),
3483 json!({
3484 "a": {
3485 "file1": "",
3486 }
3487 }),
3488 )
3489 .await;
3490
3491 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3492 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3493 let tree_id = tree.update(cx, |tree, _| tree.id());
3494
3495 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3496 project.update(cx, |project, cx| {
3497 let tree = project.worktrees(cx).next().unwrap();
3498 tree.read(cx)
3499 .entry_for_path(path)
3500 .unwrap_or_else(|| panic!("no entry for path {}", path))
3501 .id
3502 })
3503 };
3504
3505 let dir_id = id_for_path("a", cx);
3506 let file_id = id_for_path("a/file1", cx);
3507 let buffer = project
3508 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3509 .await
3510 .unwrap();
3511 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3512
3513 project
3514 .update(cx, |project, cx| {
3515 project.rename_entry(dir_id, Path::new("b"), cx)
3516 })
3517 .unwrap()
3518 .await
3519 .to_included()
3520 .unwrap();
3521 cx.executor().run_until_parked();
3522
3523 assert_eq!(id_for_path("b", cx), dir_id);
3524 assert_eq!(id_for_path("b/file1", cx), file_id);
3525 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3526}
3527
3528#[gpui::test]
3529async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3530 init_test(cx);
3531
3532 let fs = FakeFs::new(cx.executor());
3533 fs.insert_tree(
3534 "/dir",
3535 json!({
3536 "a.txt": "a-contents",
3537 "b.txt": "b-contents",
3538 }),
3539 )
3540 .await;
3541
3542 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3543
3544 // Spawn multiple tasks to open paths, repeating some paths.
3545 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3546 (
3547 p.open_local_buffer("/dir/a.txt", cx),
3548 p.open_local_buffer("/dir/b.txt", cx),
3549 p.open_local_buffer("/dir/a.txt", cx),
3550 )
3551 });
3552
3553 let buffer_a_1 = buffer_a_1.await.unwrap();
3554 let buffer_a_2 = buffer_a_2.await.unwrap();
3555 let buffer_b = buffer_b.await.unwrap();
3556 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3557 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3558
3559 // There is only one buffer per path.
3560 let buffer_a_id = buffer_a_1.entity_id();
3561 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3562
3563 // Open the same path again while it is still open.
3564 drop(buffer_a_1);
3565 let buffer_a_3 = project
3566 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3567 .await
3568 .unwrap();
3569
3570 // There's still only one buffer per path.
3571 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3572}
3573
3574#[gpui::test]
3575async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3576 init_test(cx);
3577
3578 let fs = FakeFs::new(cx.executor());
3579 fs.insert_tree(
3580 path!("/dir"),
3581 json!({
3582 "file1": "abc",
3583 "file2": "def",
3584 "file3": "ghi",
3585 }),
3586 )
3587 .await;
3588
3589 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3590
3591 let buffer1 = project
3592 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3593 .await
3594 .unwrap();
3595 let events = Arc::new(Mutex::new(Vec::new()));
3596
3597 // initially, the buffer isn't dirty.
3598 buffer1.update(cx, |buffer, cx| {
3599 cx.subscribe(&buffer1, {
3600 let events = events.clone();
3601 move |_, _, event, _| match event {
3602 BufferEvent::Operation { .. } => {}
3603 _ => events.lock().push(event.clone()),
3604 }
3605 })
3606 .detach();
3607
3608 assert!(!buffer.is_dirty());
3609 assert!(events.lock().is_empty());
3610
3611 buffer.edit([(1..2, "")], None, cx);
3612 });
3613
3614 // after the first edit, the buffer is dirty, and emits a dirtied event.
3615 buffer1.update(cx, |buffer, cx| {
3616 assert!(buffer.text() == "ac");
3617 assert!(buffer.is_dirty());
3618 assert_eq!(
3619 *events.lock(),
3620 &[
3621 language::BufferEvent::Edited,
3622 language::BufferEvent::DirtyChanged
3623 ]
3624 );
3625 events.lock().clear();
3626 buffer.did_save(
3627 buffer.version(),
3628 buffer.file().unwrap().disk_state().mtime(),
3629 cx,
3630 );
3631 });
3632
3633 // after saving, the buffer is not dirty, and emits a saved event.
3634 buffer1.update(cx, |buffer, cx| {
3635 assert!(!buffer.is_dirty());
3636 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3637 events.lock().clear();
3638
3639 buffer.edit([(1..1, "B")], None, cx);
3640 buffer.edit([(2..2, "D")], None, cx);
3641 });
3642
3643 // after editing again, the buffer is dirty, and emits another dirty event.
3644 buffer1.update(cx, |buffer, cx| {
3645 assert!(buffer.text() == "aBDc");
3646 assert!(buffer.is_dirty());
3647 assert_eq!(
3648 *events.lock(),
3649 &[
3650 language::BufferEvent::Edited,
3651 language::BufferEvent::DirtyChanged,
3652 language::BufferEvent::Edited,
3653 ],
3654 );
3655 events.lock().clear();
3656
3657 // After restoring the buffer to its previously-saved state,
3658 // the buffer is not considered dirty anymore.
3659 buffer.edit([(1..3, "")], None, cx);
3660 assert!(buffer.text() == "ac");
3661 assert!(!buffer.is_dirty());
3662 });
3663
3664 assert_eq!(
3665 *events.lock(),
3666 &[
3667 language::BufferEvent::Edited,
3668 language::BufferEvent::DirtyChanged
3669 ]
3670 );
3671
3672 // When a file is deleted, the buffer is considered dirty.
3673 let events = Arc::new(Mutex::new(Vec::new()));
3674 let buffer2 = project
3675 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3676 .await
3677 .unwrap();
3678 buffer2.update(cx, |_, cx| {
3679 cx.subscribe(&buffer2, {
3680 let events = events.clone();
3681 move |_, _, event, _| events.lock().push(event.clone())
3682 })
3683 .detach();
3684 });
3685
3686 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3687 .await
3688 .unwrap();
3689 cx.executor().run_until_parked();
3690 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3691 assert_eq!(
3692 *events.lock(),
3693 &[
3694 language::BufferEvent::DirtyChanged,
3695 language::BufferEvent::FileHandleChanged
3696 ]
3697 );
3698
3699 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3700 let events = Arc::new(Mutex::new(Vec::new()));
3701 let buffer3 = project
3702 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3703 .await
3704 .unwrap();
3705 buffer3.update(cx, |_, cx| {
3706 cx.subscribe(&buffer3, {
3707 let events = events.clone();
3708 move |_, _, event, _| events.lock().push(event.clone())
3709 })
3710 .detach();
3711 });
3712
3713 buffer3.update(cx, |buffer, cx| {
3714 buffer.edit([(0..0, "x")], None, cx);
3715 });
3716 events.lock().clear();
3717 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3718 .await
3719 .unwrap();
3720 cx.executor().run_until_parked();
3721 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3722 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3723}
3724
3725#[gpui::test]
3726async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3727 init_test(cx);
3728
3729 let (initial_contents, initial_offsets) =
3730 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3731 let fs = FakeFs::new(cx.executor());
3732 fs.insert_tree(
3733 path!("/dir"),
3734 json!({
3735 "the-file": initial_contents,
3736 }),
3737 )
3738 .await;
3739 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3740 let buffer = project
3741 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3742 .await
3743 .unwrap();
3744
3745 let anchors = initial_offsets
3746 .iter()
3747 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3748 .collect::<Vec<_>>();
3749
3750 // Change the file on disk, adding two new lines of text, and removing
3751 // one line.
3752 buffer.update(cx, |buffer, _| {
3753 assert!(!buffer.is_dirty());
3754 assert!(!buffer.has_conflict());
3755 });
3756
3757 let (new_contents, new_offsets) =
3758 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3759 fs.save(
3760 path!("/dir/the-file").as_ref(),
3761 &new_contents.as_str().into(),
3762 LineEnding::Unix,
3763 )
3764 .await
3765 .unwrap();
3766
3767 // Because the buffer was not modified, it is reloaded from disk. Its
3768 // contents are edited according to the diff between the old and new
3769 // file contents.
3770 cx.executor().run_until_parked();
3771 buffer.update(cx, |buffer, _| {
3772 assert_eq!(buffer.text(), new_contents);
3773 assert!(!buffer.is_dirty());
3774 assert!(!buffer.has_conflict());
3775
3776 let anchor_offsets = anchors
3777 .iter()
3778 .map(|anchor| anchor.to_offset(&*buffer))
3779 .collect::<Vec<_>>();
3780 assert_eq!(anchor_offsets, new_offsets);
3781 });
3782
3783 // Modify the buffer
3784 buffer.update(cx, |buffer, cx| {
3785 buffer.edit([(0..0, " ")], None, cx);
3786 assert!(buffer.is_dirty());
3787 assert!(!buffer.has_conflict());
3788 });
3789
3790 // Change the file on disk again, adding blank lines to the beginning.
3791 fs.save(
3792 path!("/dir/the-file").as_ref(),
3793 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3794 LineEnding::Unix,
3795 )
3796 .await
3797 .unwrap();
3798
3799 // Because the buffer is modified, it doesn't reload from disk, but is
3800 // marked as having a conflict.
3801 cx.executor().run_until_parked();
3802 buffer.update(cx, |buffer, _| {
3803 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3804 assert!(buffer.has_conflict());
3805 });
3806}
3807
3808#[gpui::test]
3809async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3810 init_test(cx);
3811
3812 let fs = FakeFs::new(cx.executor());
3813 fs.insert_tree(
3814 path!("/dir"),
3815 json!({
3816 "file1": "a\nb\nc\n",
3817 "file2": "one\r\ntwo\r\nthree\r\n",
3818 }),
3819 )
3820 .await;
3821
3822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3823 let buffer1 = project
3824 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3825 .await
3826 .unwrap();
3827 let buffer2 = project
3828 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3829 .await
3830 .unwrap();
3831
3832 buffer1.update(cx, |buffer, _| {
3833 assert_eq!(buffer.text(), "a\nb\nc\n");
3834 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3835 });
3836 buffer2.update(cx, |buffer, _| {
3837 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3838 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3839 });
3840
3841 // Change a file's line endings on disk from unix to windows. The buffer's
3842 // state updates correctly.
3843 fs.save(
3844 path!("/dir/file1").as_ref(),
3845 &"aaa\nb\nc\n".into(),
3846 LineEnding::Windows,
3847 )
3848 .await
3849 .unwrap();
3850 cx.executor().run_until_parked();
3851 buffer1.update(cx, |buffer, _| {
3852 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3853 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3854 });
3855
3856 // Save a file with windows line endings. The file is written correctly.
3857 buffer2.update(cx, |buffer, cx| {
3858 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3859 });
3860 project
3861 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3862 .await
3863 .unwrap();
3864 assert_eq!(
3865 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3866 "one\r\ntwo\r\nthree\r\nfour\r\n",
3867 );
3868}
3869
3870#[gpui::test]
3871async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3872 init_test(cx);
3873
3874 let fs = FakeFs::new(cx.executor());
3875 fs.insert_tree(
3876 path!("/dir"),
3877 json!({
3878 "a.rs": "
3879 fn foo(mut v: Vec<usize>) {
3880 for x in &v {
3881 v.push(1);
3882 }
3883 }
3884 "
3885 .unindent(),
3886 }),
3887 )
3888 .await;
3889
3890 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3891 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3892 let buffer = project
3893 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3894 .await
3895 .unwrap();
3896
3897 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3898 let message = lsp::PublishDiagnosticsParams {
3899 uri: buffer_uri.clone(),
3900 diagnostics: vec![
3901 lsp::Diagnostic {
3902 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3903 severity: Some(DiagnosticSeverity::WARNING),
3904 message: "error 1".to_string(),
3905 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3906 location: lsp::Location {
3907 uri: buffer_uri.clone(),
3908 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3909 },
3910 message: "error 1 hint 1".to_string(),
3911 }]),
3912 ..Default::default()
3913 },
3914 lsp::Diagnostic {
3915 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3916 severity: Some(DiagnosticSeverity::HINT),
3917 message: "error 1 hint 1".to_string(),
3918 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3919 location: lsp::Location {
3920 uri: buffer_uri.clone(),
3921 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3922 },
3923 message: "original diagnostic".to_string(),
3924 }]),
3925 ..Default::default()
3926 },
3927 lsp::Diagnostic {
3928 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3929 severity: Some(DiagnosticSeverity::ERROR),
3930 message: "error 2".to_string(),
3931 related_information: Some(vec![
3932 lsp::DiagnosticRelatedInformation {
3933 location: lsp::Location {
3934 uri: buffer_uri.clone(),
3935 range: lsp::Range::new(
3936 lsp::Position::new(1, 13),
3937 lsp::Position::new(1, 15),
3938 ),
3939 },
3940 message: "error 2 hint 1".to_string(),
3941 },
3942 lsp::DiagnosticRelatedInformation {
3943 location: lsp::Location {
3944 uri: buffer_uri.clone(),
3945 range: lsp::Range::new(
3946 lsp::Position::new(1, 13),
3947 lsp::Position::new(1, 15),
3948 ),
3949 },
3950 message: "error 2 hint 2".to_string(),
3951 },
3952 ]),
3953 ..Default::default()
3954 },
3955 lsp::Diagnostic {
3956 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3957 severity: Some(DiagnosticSeverity::HINT),
3958 message: "error 2 hint 1".to_string(),
3959 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3960 location: lsp::Location {
3961 uri: buffer_uri.clone(),
3962 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3963 },
3964 message: "original diagnostic".to_string(),
3965 }]),
3966 ..Default::default()
3967 },
3968 lsp::Diagnostic {
3969 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3970 severity: Some(DiagnosticSeverity::HINT),
3971 message: "error 2 hint 2".to_string(),
3972 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3973 location: lsp::Location {
3974 uri: buffer_uri,
3975 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3976 },
3977 message: "original diagnostic".to_string(),
3978 }]),
3979 ..Default::default()
3980 },
3981 ],
3982 version: None,
3983 };
3984
3985 lsp_store
3986 .update(cx, |lsp_store, cx| {
3987 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3988 })
3989 .unwrap();
3990 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3991
3992 assert_eq!(
3993 buffer
3994 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3995 .collect::<Vec<_>>(),
3996 &[
3997 DiagnosticEntry {
3998 range: Point::new(1, 8)..Point::new(1, 9),
3999 diagnostic: Diagnostic {
4000 severity: DiagnosticSeverity::WARNING,
4001 message: "error 1".to_string(),
4002 group_id: 1,
4003 is_primary: true,
4004 ..Default::default()
4005 }
4006 },
4007 DiagnosticEntry {
4008 range: Point::new(1, 8)..Point::new(1, 9),
4009 diagnostic: Diagnostic {
4010 severity: DiagnosticSeverity::HINT,
4011 message: "error 1 hint 1".to_string(),
4012 group_id: 1,
4013 is_primary: false,
4014 ..Default::default()
4015 }
4016 },
4017 DiagnosticEntry {
4018 range: Point::new(1, 13)..Point::new(1, 15),
4019 diagnostic: Diagnostic {
4020 severity: DiagnosticSeverity::HINT,
4021 message: "error 2 hint 1".to_string(),
4022 group_id: 0,
4023 is_primary: false,
4024 ..Default::default()
4025 }
4026 },
4027 DiagnosticEntry {
4028 range: Point::new(1, 13)..Point::new(1, 15),
4029 diagnostic: Diagnostic {
4030 severity: DiagnosticSeverity::HINT,
4031 message: "error 2 hint 2".to_string(),
4032 group_id: 0,
4033 is_primary: false,
4034 ..Default::default()
4035 }
4036 },
4037 DiagnosticEntry {
4038 range: Point::new(2, 8)..Point::new(2, 17),
4039 diagnostic: Diagnostic {
4040 severity: DiagnosticSeverity::ERROR,
4041 message: "error 2".to_string(),
4042 group_id: 0,
4043 is_primary: true,
4044 ..Default::default()
4045 }
4046 }
4047 ]
4048 );
4049
4050 assert_eq!(
4051 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4052 &[
4053 DiagnosticEntry {
4054 range: Point::new(1, 13)..Point::new(1, 15),
4055 diagnostic: Diagnostic {
4056 severity: DiagnosticSeverity::HINT,
4057 message: "error 2 hint 1".to_string(),
4058 group_id: 0,
4059 is_primary: false,
4060 ..Default::default()
4061 }
4062 },
4063 DiagnosticEntry {
4064 range: Point::new(1, 13)..Point::new(1, 15),
4065 diagnostic: Diagnostic {
4066 severity: DiagnosticSeverity::HINT,
4067 message: "error 2 hint 2".to_string(),
4068 group_id: 0,
4069 is_primary: false,
4070 ..Default::default()
4071 }
4072 },
4073 DiagnosticEntry {
4074 range: Point::new(2, 8)..Point::new(2, 17),
4075 diagnostic: Diagnostic {
4076 severity: DiagnosticSeverity::ERROR,
4077 message: "error 2".to_string(),
4078 group_id: 0,
4079 is_primary: true,
4080 ..Default::default()
4081 }
4082 }
4083 ]
4084 );
4085
4086 assert_eq!(
4087 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4088 &[
4089 DiagnosticEntry {
4090 range: Point::new(1, 8)..Point::new(1, 9),
4091 diagnostic: Diagnostic {
4092 severity: DiagnosticSeverity::WARNING,
4093 message: "error 1".to_string(),
4094 group_id: 1,
4095 is_primary: true,
4096 ..Default::default()
4097 }
4098 },
4099 DiagnosticEntry {
4100 range: Point::new(1, 8)..Point::new(1, 9),
4101 diagnostic: Diagnostic {
4102 severity: DiagnosticSeverity::HINT,
4103 message: "error 1 hint 1".to_string(),
4104 group_id: 1,
4105 is_primary: false,
4106 ..Default::default()
4107 }
4108 },
4109 ]
4110 );
4111}
4112
4113#[gpui::test]
4114async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4115 init_test(cx);
4116
4117 let fs = FakeFs::new(cx.executor());
4118 fs.insert_tree(
4119 path!("/dir"),
4120 json!({
4121 "one.rs": "const ONE: usize = 1;",
4122 "two": {
4123 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4124 }
4125
4126 }),
4127 )
4128 .await;
4129 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4130
4131 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4132 language_registry.add(rust_lang());
4133 let watched_paths = lsp::FileOperationRegistrationOptions {
4134 filters: vec![
4135 FileOperationFilter {
4136 scheme: Some("file".to_owned()),
4137 pattern: lsp::FileOperationPattern {
4138 glob: "**/*.rs".to_owned(),
4139 matches: Some(lsp::FileOperationPatternKind::File),
4140 options: None,
4141 },
4142 },
4143 FileOperationFilter {
4144 scheme: Some("file".to_owned()),
4145 pattern: lsp::FileOperationPattern {
4146 glob: "**/**".to_owned(),
4147 matches: Some(lsp::FileOperationPatternKind::Folder),
4148 options: None,
4149 },
4150 },
4151 ],
4152 };
4153 let mut fake_servers = language_registry.register_fake_lsp(
4154 "Rust",
4155 FakeLspAdapter {
4156 capabilities: lsp::ServerCapabilities {
4157 workspace: Some(lsp::WorkspaceServerCapabilities {
4158 workspace_folders: None,
4159 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4160 did_rename: Some(watched_paths.clone()),
4161 will_rename: Some(watched_paths),
4162 ..Default::default()
4163 }),
4164 }),
4165 ..Default::default()
4166 },
4167 ..Default::default()
4168 },
4169 );
4170
4171 let _ = project
4172 .update(cx, |project, cx| {
4173 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4174 })
4175 .await
4176 .unwrap();
4177
4178 let fake_server = fake_servers.next().await.unwrap();
4179 let response = project.update(cx, |project, cx| {
4180 let worktree = project.worktrees(cx).next().unwrap();
4181 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4182 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4183 });
4184 let expected_edit = lsp::WorkspaceEdit {
4185 changes: None,
4186 document_changes: Some(DocumentChanges::Edits({
4187 vec![TextDocumentEdit {
4188 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4189 range: lsp::Range {
4190 start: lsp::Position {
4191 line: 0,
4192 character: 1,
4193 },
4194 end: lsp::Position {
4195 line: 0,
4196 character: 3,
4197 },
4198 },
4199 new_text: "This is not a drill".to_owned(),
4200 })],
4201 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4202 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4203 version: Some(1337),
4204 },
4205 }]
4206 })),
4207 change_annotations: None,
4208 };
4209 let resolved_workspace_edit = Arc::new(OnceLock::new());
4210 fake_server
4211 .handle_request::<WillRenameFiles, _, _>({
4212 let resolved_workspace_edit = resolved_workspace_edit.clone();
4213 let expected_edit = expected_edit.clone();
4214 move |params, _| {
4215 let resolved_workspace_edit = resolved_workspace_edit.clone();
4216 let expected_edit = expected_edit.clone();
4217 async move {
4218 assert_eq!(params.files.len(), 1);
4219 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4220 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4221 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4222 Ok(Some(expected_edit))
4223 }
4224 }
4225 })
4226 .next()
4227 .await
4228 .unwrap();
4229 let _ = response.await.unwrap();
4230 fake_server
4231 .handle_notification::<DidRenameFiles, _>(|params, _| {
4232 assert_eq!(params.files.len(), 1);
4233 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4234 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4235 })
4236 .next()
4237 .await
4238 .unwrap();
4239 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4240}
4241
4242#[gpui::test]
4243async fn test_rename(cx: &mut gpui::TestAppContext) {
4244 // hi
4245 init_test(cx);
4246
4247 let fs = FakeFs::new(cx.executor());
4248 fs.insert_tree(
4249 path!("/dir"),
4250 json!({
4251 "one.rs": "const ONE: usize = 1;",
4252 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4253 }),
4254 )
4255 .await;
4256
4257 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4258
4259 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4260 language_registry.add(rust_lang());
4261 let mut fake_servers = language_registry.register_fake_lsp(
4262 "Rust",
4263 FakeLspAdapter {
4264 capabilities: lsp::ServerCapabilities {
4265 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4266 prepare_provider: Some(true),
4267 work_done_progress_options: Default::default(),
4268 })),
4269 ..Default::default()
4270 },
4271 ..Default::default()
4272 },
4273 );
4274
4275 let (buffer, _handle) = project
4276 .update(cx, |project, cx| {
4277 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4278 })
4279 .await
4280 .unwrap();
4281
4282 let fake_server = fake_servers.next().await.unwrap();
4283
4284 let response = project.update(cx, |project, cx| {
4285 project.prepare_rename(buffer.clone(), 7, cx)
4286 });
4287 fake_server
4288 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4289 assert_eq!(
4290 params.text_document.uri.as_str(),
4291 uri!("file:///dir/one.rs")
4292 );
4293 assert_eq!(params.position, lsp::Position::new(0, 7));
4294 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4295 lsp::Position::new(0, 6),
4296 lsp::Position::new(0, 9),
4297 ))))
4298 })
4299 .next()
4300 .await
4301 .unwrap();
4302 let response = response.await.unwrap();
4303 let PrepareRenameResponse::Success(range) = response else {
4304 panic!("{:?}", response);
4305 };
4306 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4307 assert_eq!(range, 6..9);
4308
4309 let response = project.update(cx, |project, cx| {
4310 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4311 });
4312 fake_server
4313 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4314 assert_eq!(
4315 params.text_document_position.text_document.uri.as_str(),
4316 uri!("file:///dir/one.rs")
4317 );
4318 assert_eq!(
4319 params.text_document_position.position,
4320 lsp::Position::new(0, 7)
4321 );
4322 assert_eq!(params.new_name, "THREE");
4323 Ok(Some(lsp::WorkspaceEdit {
4324 changes: Some(
4325 [
4326 (
4327 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4328 vec![lsp::TextEdit::new(
4329 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4330 "THREE".to_string(),
4331 )],
4332 ),
4333 (
4334 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4335 vec![
4336 lsp::TextEdit::new(
4337 lsp::Range::new(
4338 lsp::Position::new(0, 24),
4339 lsp::Position::new(0, 27),
4340 ),
4341 "THREE".to_string(),
4342 ),
4343 lsp::TextEdit::new(
4344 lsp::Range::new(
4345 lsp::Position::new(0, 35),
4346 lsp::Position::new(0, 38),
4347 ),
4348 "THREE".to_string(),
4349 ),
4350 ],
4351 ),
4352 ]
4353 .into_iter()
4354 .collect(),
4355 ),
4356 ..Default::default()
4357 }))
4358 })
4359 .next()
4360 .await
4361 .unwrap();
4362 let mut transaction = response.await.unwrap().0;
4363 assert_eq!(transaction.len(), 2);
4364 assert_eq!(
4365 transaction
4366 .remove_entry(&buffer)
4367 .unwrap()
4368 .0
4369 .update(cx, |buffer, _| buffer.text()),
4370 "const THREE: usize = 1;"
4371 );
4372 assert_eq!(
4373 transaction
4374 .into_keys()
4375 .next()
4376 .unwrap()
4377 .update(cx, |buffer, _| buffer.text()),
4378 "const TWO: usize = one::THREE + one::THREE;"
4379 );
4380}
4381
4382#[gpui::test]
4383async fn test_search(cx: &mut gpui::TestAppContext) {
4384 init_test(cx);
4385
4386 let fs = FakeFs::new(cx.executor());
4387 fs.insert_tree(
4388 path!("/dir"),
4389 json!({
4390 "one.rs": "const ONE: usize = 1;",
4391 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4392 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4393 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4394 }),
4395 )
4396 .await;
4397 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4398 assert_eq!(
4399 search(
4400 &project,
4401 SearchQuery::text(
4402 "TWO",
4403 false,
4404 true,
4405 false,
4406 Default::default(),
4407 Default::default(),
4408 None
4409 )
4410 .unwrap(),
4411 cx
4412 )
4413 .await
4414 .unwrap(),
4415 HashMap::from_iter([
4416 (separator!("dir/two.rs").to_string(), vec![6..9]),
4417 (separator!("dir/three.rs").to_string(), vec![37..40])
4418 ])
4419 );
4420
4421 let buffer_4 = project
4422 .update(cx, |project, cx| {
4423 project.open_local_buffer(path!("/dir/four.rs"), cx)
4424 })
4425 .await
4426 .unwrap();
4427 buffer_4.update(cx, |buffer, cx| {
4428 let text = "two::TWO";
4429 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4430 });
4431
4432 assert_eq!(
4433 search(
4434 &project,
4435 SearchQuery::text(
4436 "TWO",
4437 false,
4438 true,
4439 false,
4440 Default::default(),
4441 Default::default(),
4442 None,
4443 )
4444 .unwrap(),
4445 cx
4446 )
4447 .await
4448 .unwrap(),
4449 HashMap::from_iter([
4450 (separator!("dir/two.rs").to_string(), vec![6..9]),
4451 (separator!("dir/three.rs").to_string(), vec![37..40]),
4452 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4453 ])
4454 );
4455}
4456
4457#[gpui::test]
4458async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4459 init_test(cx);
4460
4461 let search_query = "file";
4462
4463 let fs = FakeFs::new(cx.executor());
4464 fs.insert_tree(
4465 path!("/dir"),
4466 json!({
4467 "one.rs": r#"// Rust file one"#,
4468 "one.ts": r#"// TypeScript file one"#,
4469 "two.rs": r#"// Rust file two"#,
4470 "two.ts": r#"// TypeScript file two"#,
4471 }),
4472 )
4473 .await;
4474 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4475
4476 assert!(
4477 search(
4478 &project,
4479 SearchQuery::text(
4480 search_query,
4481 false,
4482 true,
4483 false,
4484 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4485 Default::default(),
4486 None
4487 )
4488 .unwrap(),
4489 cx
4490 )
4491 .await
4492 .unwrap()
4493 .is_empty(),
4494 "If no inclusions match, no files should be returned"
4495 );
4496
4497 assert_eq!(
4498 search(
4499 &project,
4500 SearchQuery::text(
4501 search_query,
4502 false,
4503 true,
4504 false,
4505 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4506 Default::default(),
4507 None
4508 )
4509 .unwrap(),
4510 cx
4511 )
4512 .await
4513 .unwrap(),
4514 HashMap::from_iter([
4515 (separator!("dir/one.rs").to_string(), vec![8..12]),
4516 (separator!("dir/two.rs").to_string(), vec![8..12]),
4517 ]),
4518 "Rust only search should give only Rust files"
4519 );
4520
4521 assert_eq!(
4522 search(
4523 &project,
4524 SearchQuery::text(
4525 search_query,
4526 false,
4527 true,
4528 false,
4529
4530 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4531
4532 Default::default(),
4533 None,
4534 ).unwrap(),
4535 cx
4536 )
4537 .await
4538 .unwrap(),
4539 HashMap::from_iter([
4540 (separator!("dir/one.ts").to_string(), vec![14..18]),
4541 (separator!("dir/two.ts").to_string(), vec![14..18]),
4542 ]),
4543 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4544 );
4545
4546 assert_eq!(
4547 search(
4548 &project,
4549 SearchQuery::text(
4550 search_query,
4551 false,
4552 true,
4553 false,
4554
4555 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4556
4557 Default::default(),
4558 None,
4559 ).unwrap(),
4560 cx
4561 )
4562 .await
4563 .unwrap(),
4564 HashMap::from_iter([
4565 (separator!("dir/two.ts").to_string(), vec![14..18]),
4566 (separator!("dir/one.rs").to_string(), vec![8..12]),
4567 (separator!("dir/one.ts").to_string(), vec![14..18]),
4568 (separator!("dir/two.rs").to_string(), vec![8..12]),
4569 ]),
4570 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4571 );
4572}
4573
4574#[gpui::test]
4575async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4576 init_test(cx);
4577
4578 let search_query = "file";
4579
4580 let fs = FakeFs::new(cx.executor());
4581 fs.insert_tree(
4582 path!("/dir"),
4583 json!({
4584 "one.rs": r#"// Rust file one"#,
4585 "one.ts": r#"// TypeScript file one"#,
4586 "two.rs": r#"// Rust file two"#,
4587 "two.ts": r#"// TypeScript file two"#,
4588 }),
4589 )
4590 .await;
4591 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4592
4593 assert_eq!(
4594 search(
4595 &project,
4596 SearchQuery::text(
4597 search_query,
4598 false,
4599 true,
4600 false,
4601 Default::default(),
4602 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4603 None,
4604 )
4605 .unwrap(),
4606 cx
4607 )
4608 .await
4609 .unwrap(),
4610 HashMap::from_iter([
4611 (separator!("dir/one.rs").to_string(), vec![8..12]),
4612 (separator!("dir/one.ts").to_string(), vec![14..18]),
4613 (separator!("dir/two.rs").to_string(), vec![8..12]),
4614 (separator!("dir/two.ts").to_string(), vec![14..18]),
4615 ]),
4616 "If no exclusions match, all files should be returned"
4617 );
4618
4619 assert_eq!(
4620 search(
4621 &project,
4622 SearchQuery::text(
4623 search_query,
4624 false,
4625 true,
4626 false,
4627 Default::default(),
4628 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4629 None,
4630 )
4631 .unwrap(),
4632 cx
4633 )
4634 .await
4635 .unwrap(),
4636 HashMap::from_iter([
4637 (separator!("dir/one.ts").to_string(), vec![14..18]),
4638 (separator!("dir/two.ts").to_string(), vec![14..18]),
4639 ]),
4640 "Rust exclusion search should give only TypeScript files"
4641 );
4642
4643 assert_eq!(
4644 search(
4645 &project,
4646 SearchQuery::text(
4647 search_query,
4648 false,
4649 true,
4650 false,
4651 Default::default(),
4652 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4653 None,
4654 ).unwrap(),
4655 cx
4656 )
4657 .await
4658 .unwrap(),
4659 HashMap::from_iter([
4660 (separator!("dir/one.rs").to_string(), vec![8..12]),
4661 (separator!("dir/two.rs").to_string(), vec![8..12]),
4662 ]),
4663 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4664 );
4665
4666 assert!(
4667 search(
4668 &project,
4669 SearchQuery::text(
4670 search_query,
4671 false,
4672 true,
4673 false,
4674 Default::default(),
4675
4676 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4677 None,
4678
4679 ).unwrap(),
4680 cx
4681 )
4682 .await
4683 .unwrap().is_empty(),
4684 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4685 );
4686}
4687
4688#[gpui::test]
4689async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4690 init_test(cx);
4691
4692 let search_query = "file";
4693
4694 let fs = FakeFs::new(cx.executor());
4695 fs.insert_tree(
4696 path!("/dir"),
4697 json!({
4698 "one.rs": r#"// Rust file one"#,
4699 "one.ts": r#"// TypeScript file one"#,
4700 "two.rs": r#"// Rust file two"#,
4701 "two.ts": r#"// TypeScript file two"#,
4702 }),
4703 )
4704 .await;
4705 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4706
4707 assert!(
4708 search(
4709 &project,
4710 SearchQuery::text(
4711 search_query,
4712 false,
4713 true,
4714 false,
4715 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4716 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4717 None,
4718 )
4719 .unwrap(),
4720 cx
4721 )
4722 .await
4723 .unwrap()
4724 .is_empty(),
4725 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4726 );
4727
4728 assert!(
4729 search(
4730 &project,
4731 SearchQuery::text(
4732 search_query,
4733 false,
4734 true,
4735 false,
4736 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4737 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4738 None,
4739 ).unwrap(),
4740 cx
4741 )
4742 .await
4743 .unwrap()
4744 .is_empty(),
4745 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4746 );
4747
4748 assert!(
4749 search(
4750 &project,
4751 SearchQuery::text(
4752 search_query,
4753 false,
4754 true,
4755 false,
4756 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4757 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4758 None,
4759 )
4760 .unwrap(),
4761 cx
4762 )
4763 .await
4764 .unwrap()
4765 .is_empty(),
4766 "Non-matching inclusions and exclusions should not change that."
4767 );
4768
4769 assert_eq!(
4770 search(
4771 &project,
4772 SearchQuery::text(
4773 search_query,
4774 false,
4775 true,
4776 false,
4777 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4778 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4779 None,
4780 )
4781 .unwrap(),
4782 cx
4783 )
4784 .await
4785 .unwrap(),
4786 HashMap::from_iter([
4787 (separator!("dir/one.ts").to_string(), vec![14..18]),
4788 (separator!("dir/two.ts").to_string(), vec![14..18]),
4789 ]),
4790 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4791 );
4792}
4793
4794#[gpui::test]
4795async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4796 init_test(cx);
4797
4798 let fs = FakeFs::new(cx.executor());
4799 fs.insert_tree(
4800 path!("/worktree-a"),
4801 json!({
4802 "haystack.rs": r#"// NEEDLE"#,
4803 "haystack.ts": r#"// NEEDLE"#,
4804 }),
4805 )
4806 .await;
4807 fs.insert_tree(
4808 path!("/worktree-b"),
4809 json!({
4810 "haystack.rs": r#"// NEEDLE"#,
4811 "haystack.ts": r#"// NEEDLE"#,
4812 }),
4813 )
4814 .await;
4815
4816 let project = Project::test(
4817 fs.clone(),
4818 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4819 cx,
4820 )
4821 .await;
4822
4823 assert_eq!(
4824 search(
4825 &project,
4826 SearchQuery::text(
4827 "NEEDLE",
4828 false,
4829 true,
4830 false,
4831 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4832 Default::default(),
4833 None,
4834 )
4835 .unwrap(),
4836 cx
4837 )
4838 .await
4839 .unwrap(),
4840 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4841 "should only return results from included worktree"
4842 );
4843 assert_eq!(
4844 search(
4845 &project,
4846 SearchQuery::text(
4847 "NEEDLE",
4848 false,
4849 true,
4850 false,
4851 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4852 Default::default(),
4853 None,
4854 )
4855 .unwrap(),
4856 cx
4857 )
4858 .await
4859 .unwrap(),
4860 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4861 "should only return results from included worktree"
4862 );
4863
4864 assert_eq!(
4865 search(
4866 &project,
4867 SearchQuery::text(
4868 "NEEDLE",
4869 false,
4870 true,
4871 false,
4872 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4873 Default::default(),
4874 None,
4875 )
4876 .unwrap(),
4877 cx
4878 )
4879 .await
4880 .unwrap(),
4881 HashMap::from_iter([
4882 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4883 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4884 ]),
4885 "should return results from both worktrees"
4886 );
4887}
4888
4889#[gpui::test]
4890async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4891 init_test(cx);
4892
4893 let fs = FakeFs::new(cx.background_executor.clone());
4894 fs.insert_tree(
4895 path!("/dir"),
4896 json!({
4897 ".git": {},
4898 ".gitignore": "**/target\n/node_modules\n",
4899 "target": {
4900 "index.txt": "index_key:index_value"
4901 },
4902 "node_modules": {
4903 "eslint": {
4904 "index.ts": "const eslint_key = 'eslint value'",
4905 "package.json": r#"{ "some_key": "some value" }"#,
4906 },
4907 "prettier": {
4908 "index.ts": "const prettier_key = 'prettier value'",
4909 "package.json": r#"{ "other_key": "other value" }"#,
4910 },
4911 },
4912 "package.json": r#"{ "main_key": "main value" }"#,
4913 }),
4914 )
4915 .await;
4916 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4917
4918 let query = "key";
4919 assert_eq!(
4920 search(
4921 &project,
4922 SearchQuery::text(
4923 query,
4924 false,
4925 false,
4926 false,
4927 Default::default(),
4928 Default::default(),
4929 None,
4930 )
4931 .unwrap(),
4932 cx
4933 )
4934 .await
4935 .unwrap(),
4936 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4937 "Only one non-ignored file should have the query"
4938 );
4939
4940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4941 assert_eq!(
4942 search(
4943 &project,
4944 SearchQuery::text(
4945 query,
4946 false,
4947 false,
4948 true,
4949 Default::default(),
4950 Default::default(),
4951 None,
4952 )
4953 .unwrap(),
4954 cx
4955 )
4956 .await
4957 .unwrap(),
4958 HashMap::from_iter([
4959 (separator!("dir/package.json").to_string(), vec![8..11]),
4960 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4961 (
4962 separator!("dir/node_modules/prettier/package.json").to_string(),
4963 vec![9..12]
4964 ),
4965 (
4966 separator!("dir/node_modules/prettier/index.ts").to_string(),
4967 vec![15..18]
4968 ),
4969 (
4970 separator!("dir/node_modules/eslint/index.ts").to_string(),
4971 vec![13..16]
4972 ),
4973 (
4974 separator!("dir/node_modules/eslint/package.json").to_string(),
4975 vec![8..11]
4976 ),
4977 ]),
4978 "Unrestricted search with ignored directories should find every file with the query"
4979 );
4980
4981 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4982 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4983 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4984 assert_eq!(
4985 search(
4986 &project,
4987 SearchQuery::text(
4988 query,
4989 false,
4990 false,
4991 true,
4992 files_to_include,
4993 files_to_exclude,
4994 None,
4995 )
4996 .unwrap(),
4997 cx
4998 )
4999 .await
5000 .unwrap(),
5001 HashMap::from_iter([(
5002 separator!("dir/node_modules/prettier/package.json").to_string(),
5003 vec![9..12]
5004 )]),
5005 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5006 );
5007}
5008
5009#[gpui::test]
5010async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5011 init_test(cx);
5012
5013 let fs = FakeFs::new(cx.executor().clone());
5014 fs.insert_tree(
5015 "/one/two",
5016 json!({
5017 "three": {
5018 "a.txt": "",
5019 "four": {}
5020 },
5021 "c.rs": ""
5022 }),
5023 )
5024 .await;
5025
5026 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5027 project
5028 .update(cx, |project, cx| {
5029 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5030 project.create_entry((id, "b.."), true, cx)
5031 })
5032 .await
5033 .unwrap()
5034 .to_included()
5035 .unwrap();
5036
5037 // Can't create paths outside the project
5038 let result = project
5039 .update(cx, |project, cx| {
5040 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5041 project.create_entry((id, "../../boop"), true, cx)
5042 })
5043 .await;
5044 assert!(result.is_err());
5045
5046 // Can't create paths with '..'
5047 let result = project
5048 .update(cx, |project, cx| {
5049 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5050 project.create_entry((id, "four/../beep"), true, cx)
5051 })
5052 .await;
5053 assert!(result.is_err());
5054
5055 assert_eq!(
5056 fs.paths(true),
5057 vec![
5058 PathBuf::from(path!("/")),
5059 PathBuf::from(path!("/one")),
5060 PathBuf::from(path!("/one/two")),
5061 PathBuf::from(path!("/one/two/c.rs")),
5062 PathBuf::from(path!("/one/two/three")),
5063 PathBuf::from(path!("/one/two/three/a.txt")),
5064 PathBuf::from(path!("/one/two/three/b..")),
5065 PathBuf::from(path!("/one/two/three/four")),
5066 ]
5067 );
5068
5069 // And we cannot open buffers with '..'
5070 let result = project
5071 .update(cx, |project, cx| {
5072 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5073 project.open_buffer((id, "../c.rs"), cx)
5074 })
5075 .await;
5076 assert!(result.is_err())
5077}
5078
5079#[gpui::test]
5080async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5081 init_test(cx);
5082
5083 let fs = FakeFs::new(cx.executor());
5084 fs.insert_tree(
5085 path!("/dir"),
5086 json!({
5087 "a.tsx": "a",
5088 }),
5089 )
5090 .await;
5091
5092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5093
5094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5095 language_registry.add(tsx_lang());
5096 let language_server_names = [
5097 "TypeScriptServer",
5098 "TailwindServer",
5099 "ESLintServer",
5100 "NoHoverCapabilitiesServer",
5101 ];
5102 let mut language_servers = [
5103 language_registry.register_fake_lsp(
5104 "tsx",
5105 FakeLspAdapter {
5106 name: language_server_names[0],
5107 capabilities: lsp::ServerCapabilities {
5108 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5109 ..lsp::ServerCapabilities::default()
5110 },
5111 ..FakeLspAdapter::default()
5112 },
5113 ),
5114 language_registry.register_fake_lsp(
5115 "tsx",
5116 FakeLspAdapter {
5117 name: language_server_names[1],
5118 capabilities: lsp::ServerCapabilities {
5119 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5120 ..lsp::ServerCapabilities::default()
5121 },
5122 ..FakeLspAdapter::default()
5123 },
5124 ),
5125 language_registry.register_fake_lsp(
5126 "tsx",
5127 FakeLspAdapter {
5128 name: language_server_names[2],
5129 capabilities: lsp::ServerCapabilities {
5130 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5131 ..lsp::ServerCapabilities::default()
5132 },
5133 ..FakeLspAdapter::default()
5134 },
5135 ),
5136 language_registry.register_fake_lsp(
5137 "tsx",
5138 FakeLspAdapter {
5139 name: language_server_names[3],
5140 capabilities: lsp::ServerCapabilities {
5141 hover_provider: None,
5142 ..lsp::ServerCapabilities::default()
5143 },
5144 ..FakeLspAdapter::default()
5145 },
5146 ),
5147 ];
5148
5149 let (buffer, _handle) = project
5150 .update(cx, |p, cx| {
5151 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5152 })
5153 .await
5154 .unwrap();
5155 cx.executor().run_until_parked();
5156
5157 let mut servers_with_hover_requests = HashMap::default();
5158 for i in 0..language_server_names.len() {
5159 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5160 panic!(
5161 "Failed to get language server #{i} with name {}",
5162 &language_server_names[i]
5163 )
5164 });
5165 let new_server_name = new_server.server.name();
5166 assert!(
5167 !servers_with_hover_requests.contains_key(&new_server_name),
5168 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5169 );
5170 match new_server_name.as_ref() {
5171 "TailwindServer" | "TypeScriptServer" => {
5172 servers_with_hover_requests.insert(
5173 new_server_name.clone(),
5174 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5175 let name = new_server_name.clone();
5176 async move {
5177 Ok(Some(lsp::Hover {
5178 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5179 format!("{name} hover"),
5180 )),
5181 range: None,
5182 }))
5183 }
5184 }),
5185 );
5186 }
5187 "ESLintServer" => {
5188 servers_with_hover_requests.insert(
5189 new_server_name,
5190 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5191 |_, _| async move { Ok(None) },
5192 ),
5193 );
5194 }
5195 "NoHoverCapabilitiesServer" => {
5196 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5197 |_, _| async move {
5198 panic!(
5199 "Should not call for hovers server with no corresponding capabilities"
5200 )
5201 },
5202 );
5203 }
5204 unexpected => panic!("Unexpected server name: {unexpected}"),
5205 }
5206 }
5207
5208 let hover_task = project.update(cx, |project, cx| {
5209 project.hover(&buffer, Point::new(0, 0), cx)
5210 });
5211 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5212 |mut hover_request| async move {
5213 hover_request
5214 .next()
5215 .await
5216 .expect("All hover requests should have been triggered")
5217 },
5218 ))
5219 .await;
5220 assert_eq!(
5221 vec!["TailwindServer hover", "TypeScriptServer hover"],
5222 hover_task
5223 .await
5224 .into_iter()
5225 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5226 .sorted()
5227 .collect::<Vec<_>>(),
5228 "Should receive hover responses from all related servers with hover capabilities"
5229 );
5230}
5231
5232#[gpui::test]
5233async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5234 init_test(cx);
5235
5236 let fs = FakeFs::new(cx.executor());
5237 fs.insert_tree(
5238 path!("/dir"),
5239 json!({
5240 "a.ts": "a",
5241 }),
5242 )
5243 .await;
5244
5245 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5246
5247 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5248 language_registry.add(typescript_lang());
5249 let mut fake_language_servers = language_registry.register_fake_lsp(
5250 "TypeScript",
5251 FakeLspAdapter {
5252 capabilities: lsp::ServerCapabilities {
5253 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5254 ..lsp::ServerCapabilities::default()
5255 },
5256 ..FakeLspAdapter::default()
5257 },
5258 );
5259
5260 let (buffer, _handle) = project
5261 .update(cx, |p, cx| {
5262 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5263 })
5264 .await
5265 .unwrap();
5266 cx.executor().run_until_parked();
5267
5268 let fake_server = fake_language_servers
5269 .next()
5270 .await
5271 .expect("failed to get the language server");
5272
5273 let mut request_handled =
5274 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5275 Ok(Some(lsp::Hover {
5276 contents: lsp::HoverContents::Array(vec![
5277 lsp::MarkedString::String("".to_string()),
5278 lsp::MarkedString::String(" ".to_string()),
5279 lsp::MarkedString::String("\n\n\n".to_string()),
5280 ]),
5281 range: None,
5282 }))
5283 });
5284
5285 let hover_task = project.update(cx, |project, cx| {
5286 project.hover(&buffer, Point::new(0, 0), cx)
5287 });
5288 let () = request_handled
5289 .next()
5290 .await
5291 .expect("All hover requests should have been triggered");
5292 assert_eq!(
5293 Vec::<String>::new(),
5294 hover_task
5295 .await
5296 .into_iter()
5297 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5298 .sorted()
5299 .collect::<Vec<_>>(),
5300 "Empty hover parts should be ignored"
5301 );
5302}
5303
5304#[gpui::test]
5305async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5306 init_test(cx);
5307
5308 let fs = FakeFs::new(cx.executor());
5309 fs.insert_tree(
5310 path!("/dir"),
5311 json!({
5312 "a.ts": "a",
5313 }),
5314 )
5315 .await;
5316
5317 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5318
5319 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5320 language_registry.add(typescript_lang());
5321 let mut fake_language_servers = language_registry.register_fake_lsp(
5322 "TypeScript",
5323 FakeLspAdapter {
5324 capabilities: lsp::ServerCapabilities {
5325 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5326 ..lsp::ServerCapabilities::default()
5327 },
5328 ..FakeLspAdapter::default()
5329 },
5330 );
5331
5332 let (buffer, _handle) = project
5333 .update(cx, |p, cx| {
5334 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5335 })
5336 .await
5337 .unwrap();
5338 cx.executor().run_until_parked();
5339
5340 let fake_server = fake_language_servers
5341 .next()
5342 .await
5343 .expect("failed to get the language server");
5344
5345 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5346 move |_, _| async move {
5347 Ok(Some(vec![
5348 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5349 title: "organize imports".to_string(),
5350 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5351 ..lsp::CodeAction::default()
5352 }),
5353 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5354 title: "fix code".to_string(),
5355 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5356 ..lsp::CodeAction::default()
5357 }),
5358 ]))
5359 },
5360 );
5361
5362 let code_actions_task = project.update(cx, |project, cx| {
5363 project.code_actions(
5364 &buffer,
5365 0..buffer.read(cx).len(),
5366 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5367 cx,
5368 )
5369 });
5370
5371 let () = request_handled
5372 .next()
5373 .await
5374 .expect("The code action request should have been triggered");
5375
5376 let code_actions = code_actions_task.await.unwrap();
5377 assert_eq!(code_actions.len(), 1);
5378 assert_eq!(
5379 code_actions[0].lsp_action.action_kind(),
5380 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5381 );
5382}
5383
5384#[gpui::test]
5385async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5386 init_test(cx);
5387
5388 let fs = FakeFs::new(cx.executor());
5389 fs.insert_tree(
5390 path!("/dir"),
5391 json!({
5392 "a.tsx": "a",
5393 }),
5394 )
5395 .await;
5396
5397 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5398
5399 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5400 language_registry.add(tsx_lang());
5401 let language_server_names = [
5402 "TypeScriptServer",
5403 "TailwindServer",
5404 "ESLintServer",
5405 "NoActionsCapabilitiesServer",
5406 ];
5407
5408 let mut language_server_rxs = [
5409 language_registry.register_fake_lsp(
5410 "tsx",
5411 FakeLspAdapter {
5412 name: language_server_names[0],
5413 capabilities: lsp::ServerCapabilities {
5414 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5415 ..lsp::ServerCapabilities::default()
5416 },
5417 ..FakeLspAdapter::default()
5418 },
5419 ),
5420 language_registry.register_fake_lsp(
5421 "tsx",
5422 FakeLspAdapter {
5423 name: language_server_names[1],
5424 capabilities: lsp::ServerCapabilities {
5425 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5426 ..lsp::ServerCapabilities::default()
5427 },
5428 ..FakeLspAdapter::default()
5429 },
5430 ),
5431 language_registry.register_fake_lsp(
5432 "tsx",
5433 FakeLspAdapter {
5434 name: language_server_names[2],
5435 capabilities: lsp::ServerCapabilities {
5436 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5437 ..lsp::ServerCapabilities::default()
5438 },
5439 ..FakeLspAdapter::default()
5440 },
5441 ),
5442 language_registry.register_fake_lsp(
5443 "tsx",
5444 FakeLspAdapter {
5445 name: language_server_names[3],
5446 capabilities: lsp::ServerCapabilities {
5447 code_action_provider: None,
5448 ..lsp::ServerCapabilities::default()
5449 },
5450 ..FakeLspAdapter::default()
5451 },
5452 ),
5453 ];
5454
5455 let (buffer, _handle) = project
5456 .update(cx, |p, cx| {
5457 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5458 })
5459 .await
5460 .unwrap();
5461 cx.executor().run_until_parked();
5462
5463 let mut servers_with_actions_requests = HashMap::default();
5464 for i in 0..language_server_names.len() {
5465 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5466 panic!(
5467 "Failed to get language server #{i} with name {}",
5468 &language_server_names[i]
5469 )
5470 });
5471 let new_server_name = new_server.server.name();
5472
5473 assert!(
5474 !servers_with_actions_requests.contains_key(&new_server_name),
5475 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5476 );
5477 match new_server_name.0.as_ref() {
5478 "TailwindServer" | "TypeScriptServer" => {
5479 servers_with_actions_requests.insert(
5480 new_server_name.clone(),
5481 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5482 move |_, _| {
5483 let name = new_server_name.clone();
5484 async move {
5485 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5486 lsp::CodeAction {
5487 title: format!("{name} code action"),
5488 ..lsp::CodeAction::default()
5489 },
5490 )]))
5491 }
5492 },
5493 ),
5494 );
5495 }
5496 "ESLintServer" => {
5497 servers_with_actions_requests.insert(
5498 new_server_name,
5499 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5500 |_, _| async move { Ok(None) },
5501 ),
5502 );
5503 }
5504 "NoActionsCapabilitiesServer" => {
5505 let _never_handled = new_server
5506 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5507 panic!(
5508 "Should not call for code actions server with no corresponding capabilities"
5509 )
5510 });
5511 }
5512 unexpected => panic!("Unexpected server name: {unexpected}"),
5513 }
5514 }
5515
5516 let code_actions_task = project.update(cx, |project, cx| {
5517 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5518 });
5519
5520 // cx.run_until_parked();
5521 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5522 |mut code_actions_request| async move {
5523 code_actions_request
5524 .next()
5525 .await
5526 .expect("All code actions requests should have been triggered")
5527 },
5528 ))
5529 .await;
5530 assert_eq!(
5531 vec!["TailwindServer code action", "TypeScriptServer code action"],
5532 code_actions_task
5533 .await
5534 .unwrap()
5535 .into_iter()
5536 .map(|code_action| code_action.lsp_action.title().to_owned())
5537 .sorted()
5538 .collect::<Vec<_>>(),
5539 "Should receive code actions responses from all related servers with hover capabilities"
5540 );
5541}
5542
5543#[gpui::test]
5544async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5545 init_test(cx);
5546
5547 let fs = FakeFs::new(cx.executor());
5548 fs.insert_tree(
5549 "/dir",
5550 json!({
5551 "a.rs": "let a = 1;",
5552 "b.rs": "let b = 2;",
5553 "c.rs": "let c = 2;",
5554 }),
5555 )
5556 .await;
5557
5558 let project = Project::test(
5559 fs,
5560 [
5561 "/dir/a.rs".as_ref(),
5562 "/dir/b.rs".as_ref(),
5563 "/dir/c.rs".as_ref(),
5564 ],
5565 cx,
5566 )
5567 .await;
5568
5569 // check the initial state and get the worktrees
5570 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5571 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5572 assert_eq!(worktrees.len(), 3);
5573
5574 let worktree_a = worktrees[0].read(cx);
5575 let worktree_b = worktrees[1].read(cx);
5576 let worktree_c = worktrees[2].read(cx);
5577
5578 // check they start in the right order
5579 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5580 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5581 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5582
5583 (
5584 worktrees[0].clone(),
5585 worktrees[1].clone(),
5586 worktrees[2].clone(),
5587 )
5588 });
5589
5590 // move first worktree to after the second
5591 // [a, b, c] -> [b, a, c]
5592 project
5593 .update(cx, |project, cx| {
5594 let first = worktree_a.read(cx);
5595 let second = worktree_b.read(cx);
5596 project.move_worktree(first.id(), second.id(), cx)
5597 })
5598 .expect("moving first after second");
5599
5600 // check the state after moving
5601 project.update(cx, |project, cx| {
5602 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5603 assert_eq!(worktrees.len(), 3);
5604
5605 let first = worktrees[0].read(cx);
5606 let second = worktrees[1].read(cx);
5607 let third = worktrees[2].read(cx);
5608
5609 // check they are now in the right order
5610 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5611 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5612 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5613 });
5614
5615 // move the second worktree to before the first
5616 // [b, a, c] -> [a, b, c]
5617 project
5618 .update(cx, |project, cx| {
5619 let second = worktree_a.read(cx);
5620 let first = worktree_b.read(cx);
5621 project.move_worktree(first.id(), second.id(), cx)
5622 })
5623 .expect("moving second before first");
5624
5625 // check the state after moving
5626 project.update(cx, |project, cx| {
5627 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5628 assert_eq!(worktrees.len(), 3);
5629
5630 let first = worktrees[0].read(cx);
5631 let second = worktrees[1].read(cx);
5632 let third = worktrees[2].read(cx);
5633
5634 // check they are now in the right order
5635 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5636 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5637 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5638 });
5639
5640 // move the second worktree to after the third
5641 // [a, b, c] -> [a, c, b]
5642 project
5643 .update(cx, |project, cx| {
5644 let second = worktree_b.read(cx);
5645 let third = worktree_c.read(cx);
5646 project.move_worktree(second.id(), third.id(), cx)
5647 })
5648 .expect("moving second after third");
5649
5650 // check the state after moving
5651 project.update(cx, |project, cx| {
5652 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5653 assert_eq!(worktrees.len(), 3);
5654
5655 let first = worktrees[0].read(cx);
5656 let second = worktrees[1].read(cx);
5657 let third = worktrees[2].read(cx);
5658
5659 // check they are now in the right order
5660 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5661 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5662 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5663 });
5664
5665 // move the third worktree to before the second
5666 // [a, c, b] -> [a, b, c]
5667 project
5668 .update(cx, |project, cx| {
5669 let third = worktree_c.read(cx);
5670 let second = worktree_b.read(cx);
5671 project.move_worktree(third.id(), second.id(), cx)
5672 })
5673 .expect("moving third before second");
5674
5675 // check the state after moving
5676 project.update(cx, |project, cx| {
5677 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5678 assert_eq!(worktrees.len(), 3);
5679
5680 let first = worktrees[0].read(cx);
5681 let second = worktrees[1].read(cx);
5682 let third = worktrees[2].read(cx);
5683
5684 // check they are now in the right order
5685 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5686 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5687 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5688 });
5689
5690 // move the first worktree to after the third
5691 // [a, b, c] -> [b, c, a]
5692 project
5693 .update(cx, |project, cx| {
5694 let first = worktree_a.read(cx);
5695 let third = worktree_c.read(cx);
5696 project.move_worktree(first.id(), third.id(), cx)
5697 })
5698 .expect("moving first after third");
5699
5700 // check the state after moving
5701 project.update(cx, |project, cx| {
5702 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5703 assert_eq!(worktrees.len(), 3);
5704
5705 let first = worktrees[0].read(cx);
5706 let second = worktrees[1].read(cx);
5707 let third = worktrees[2].read(cx);
5708
5709 // check they are now in the right order
5710 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5711 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5712 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5713 });
5714
5715 // move the third worktree to before the first
5716 // [b, c, a] -> [a, b, c]
5717 project
5718 .update(cx, |project, cx| {
5719 let third = worktree_a.read(cx);
5720 let first = worktree_b.read(cx);
5721 project.move_worktree(third.id(), first.id(), cx)
5722 })
5723 .expect("moving third before first");
5724
5725 // check the state after moving
5726 project.update(cx, |project, cx| {
5727 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5728 assert_eq!(worktrees.len(), 3);
5729
5730 let first = worktrees[0].read(cx);
5731 let second = worktrees[1].read(cx);
5732 let third = worktrees[2].read(cx);
5733
5734 // check they are now in the right order
5735 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5736 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5737 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5738 });
5739}
5740
5741#[gpui::test]
5742async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5743 init_test(cx);
5744
5745 let staged_contents = r#"
5746 fn main() {
5747 println!("hello world");
5748 }
5749 "#
5750 .unindent();
5751 let file_contents = r#"
5752 // print goodbye
5753 fn main() {
5754 println!("goodbye world");
5755 }
5756 "#
5757 .unindent();
5758
5759 let fs = FakeFs::new(cx.background_executor.clone());
5760 fs.insert_tree(
5761 "/dir",
5762 json!({
5763 ".git": {},
5764 "src": {
5765 "main.rs": file_contents,
5766 }
5767 }),
5768 )
5769 .await;
5770
5771 fs.set_index_for_repo(
5772 Path::new("/dir/.git"),
5773 &[("src/main.rs".into(), staged_contents)],
5774 );
5775
5776 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5777
5778 let buffer = project
5779 .update(cx, |project, cx| {
5780 project.open_local_buffer("/dir/src/main.rs", cx)
5781 })
5782 .await
5783 .unwrap();
5784 let unstaged_diff = project
5785 .update(cx, |project, cx| {
5786 project.open_unstaged_diff(buffer.clone(), cx)
5787 })
5788 .await
5789 .unwrap();
5790
5791 cx.run_until_parked();
5792 unstaged_diff.update(cx, |unstaged_diff, cx| {
5793 let snapshot = buffer.read(cx).snapshot();
5794 assert_hunks(
5795 unstaged_diff.hunks(&snapshot, cx),
5796 &snapshot,
5797 &unstaged_diff.base_text_string().unwrap(),
5798 &[
5799 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5800 (
5801 2..3,
5802 " println!(\"hello world\");\n",
5803 " println!(\"goodbye world\");\n",
5804 DiffHunkStatus::modified_none(),
5805 ),
5806 ],
5807 );
5808 });
5809
5810 let staged_contents = r#"
5811 // print goodbye
5812 fn main() {
5813 }
5814 "#
5815 .unindent();
5816
5817 fs.set_index_for_repo(
5818 Path::new("/dir/.git"),
5819 &[("src/main.rs".into(), staged_contents)],
5820 );
5821
5822 cx.run_until_parked();
5823 unstaged_diff.update(cx, |unstaged_diff, cx| {
5824 let snapshot = buffer.read(cx).snapshot();
5825 assert_hunks(
5826 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5827 &snapshot,
5828 &unstaged_diff.base_text().text(),
5829 &[(
5830 2..3,
5831 "",
5832 " println!(\"goodbye world\");\n",
5833 DiffHunkStatus::added_none(),
5834 )],
5835 );
5836 });
5837}
5838
5839#[gpui::test]
5840async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5841 init_test(cx);
5842
5843 let committed_contents = r#"
5844 fn main() {
5845 println!("hello world");
5846 }
5847 "#
5848 .unindent();
5849 let staged_contents = r#"
5850 fn main() {
5851 println!("goodbye world");
5852 }
5853 "#
5854 .unindent();
5855 let file_contents = r#"
5856 // print goodbye
5857 fn main() {
5858 println!("goodbye world");
5859 }
5860 "#
5861 .unindent();
5862
5863 let fs = FakeFs::new(cx.background_executor.clone());
5864 fs.insert_tree(
5865 "/dir",
5866 json!({
5867 ".git": {},
5868 "src": {
5869 "modification.rs": file_contents,
5870 }
5871 }),
5872 )
5873 .await;
5874
5875 fs.set_head_for_repo(
5876 Path::new("/dir/.git"),
5877 &[
5878 ("src/modification.rs".into(), committed_contents),
5879 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5880 ],
5881 );
5882 fs.set_index_for_repo(
5883 Path::new("/dir/.git"),
5884 &[
5885 ("src/modification.rs".into(), staged_contents),
5886 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5887 ],
5888 );
5889
5890 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5891 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5892 let language = rust_lang();
5893 language_registry.add(language.clone());
5894
5895 let buffer_1 = project
5896 .update(cx, |project, cx| {
5897 project.open_local_buffer("/dir/src/modification.rs", cx)
5898 })
5899 .await
5900 .unwrap();
5901 let diff_1 = project
5902 .update(cx, |project, cx| {
5903 project.open_uncommitted_diff(buffer_1.clone(), cx)
5904 })
5905 .await
5906 .unwrap();
5907 diff_1.read_with(cx, |diff, _| {
5908 assert_eq!(diff.base_text().language().cloned(), Some(language))
5909 });
5910 cx.run_until_parked();
5911 diff_1.update(cx, |diff, cx| {
5912 let snapshot = buffer_1.read(cx).snapshot();
5913 assert_hunks(
5914 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5915 &snapshot,
5916 &diff.base_text_string().unwrap(),
5917 &[
5918 (
5919 0..1,
5920 "",
5921 "// print goodbye\n",
5922 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5923 ),
5924 (
5925 2..3,
5926 " println!(\"hello world\");\n",
5927 " println!(\"goodbye world\");\n",
5928 DiffHunkStatus::modified_none(),
5929 ),
5930 ],
5931 );
5932 });
5933
5934 // Reset HEAD to a version that differs from both the buffer and the index.
5935 let committed_contents = r#"
5936 // print goodbye
5937 fn main() {
5938 }
5939 "#
5940 .unindent();
5941 fs.set_head_for_repo(
5942 Path::new("/dir/.git"),
5943 &[
5944 ("src/modification.rs".into(), committed_contents.clone()),
5945 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5946 ],
5947 );
5948
5949 // Buffer now has an unstaged hunk.
5950 cx.run_until_parked();
5951 diff_1.update(cx, |diff, cx| {
5952 let snapshot = buffer_1.read(cx).snapshot();
5953 assert_hunks(
5954 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5955 &snapshot,
5956 &diff.base_text().text(),
5957 &[(
5958 2..3,
5959 "",
5960 " println!(\"goodbye world\");\n",
5961 DiffHunkStatus::added_none(),
5962 )],
5963 );
5964 });
5965
5966 // Open a buffer for a file that's been deleted.
5967 let buffer_2 = project
5968 .update(cx, |project, cx| {
5969 project.open_local_buffer("/dir/src/deletion.rs", cx)
5970 })
5971 .await
5972 .unwrap();
5973 let diff_2 = project
5974 .update(cx, |project, cx| {
5975 project.open_uncommitted_diff(buffer_2.clone(), cx)
5976 })
5977 .await
5978 .unwrap();
5979 cx.run_until_parked();
5980 diff_2.update(cx, |diff, cx| {
5981 let snapshot = buffer_2.read(cx).snapshot();
5982 assert_hunks(
5983 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5984 &snapshot,
5985 &diff.base_text_string().unwrap(),
5986 &[(
5987 0..0,
5988 "// the-deleted-contents\n",
5989 "",
5990 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
5991 )],
5992 );
5993 });
5994
5995 // Stage the deletion of this file
5996 fs.set_index_for_repo(
5997 Path::new("/dir/.git"),
5998 &[("src/modification.rs".into(), committed_contents.clone())],
5999 );
6000 cx.run_until_parked();
6001 diff_2.update(cx, |diff, cx| {
6002 let snapshot = buffer_2.read(cx).snapshot();
6003 assert_hunks(
6004 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6005 &snapshot,
6006 &diff.base_text_string().unwrap(),
6007 &[(
6008 0..0,
6009 "// the-deleted-contents\n",
6010 "",
6011 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6012 )],
6013 );
6014 });
6015}
6016
6017#[gpui::test]
6018async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6019 use DiffHunkSecondaryStatus::*;
6020 init_test(cx);
6021
6022 let committed_contents = r#"
6023 zero
6024 one
6025 two
6026 three
6027 four
6028 five
6029 "#
6030 .unindent();
6031 let file_contents = r#"
6032 one
6033 TWO
6034 three
6035 FOUR
6036 five
6037 "#
6038 .unindent();
6039
6040 let fs = FakeFs::new(cx.background_executor.clone());
6041 fs.insert_tree(
6042 "/dir",
6043 json!({
6044 ".git": {},
6045 "file.txt": file_contents.clone()
6046 }),
6047 )
6048 .await;
6049
6050 fs.set_head_for_repo(
6051 "/dir/.git".as_ref(),
6052 &[("file.txt".into(), committed_contents.clone())],
6053 );
6054 fs.set_index_for_repo(
6055 "/dir/.git".as_ref(),
6056 &[("file.txt".into(), committed_contents.clone())],
6057 );
6058
6059 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6060
6061 let buffer = project
6062 .update(cx, |project, cx| {
6063 project.open_local_buffer("/dir/file.txt", cx)
6064 })
6065 .await
6066 .unwrap();
6067 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6068 let uncommitted_diff = project
6069 .update(cx, |project, cx| {
6070 project.open_uncommitted_diff(buffer.clone(), cx)
6071 })
6072 .await
6073 .unwrap();
6074 let mut diff_events = cx.events(&uncommitted_diff);
6075
6076 // The hunks are initially unstaged.
6077 uncommitted_diff.read_with(cx, |diff, cx| {
6078 assert_hunks(
6079 diff.hunks(&snapshot, cx),
6080 &snapshot,
6081 &diff.base_text_string().unwrap(),
6082 &[
6083 (
6084 0..0,
6085 "zero\n",
6086 "",
6087 DiffHunkStatus::deleted(HasSecondaryHunk),
6088 ),
6089 (
6090 1..2,
6091 "two\n",
6092 "TWO\n",
6093 DiffHunkStatus::modified(HasSecondaryHunk),
6094 ),
6095 (
6096 3..4,
6097 "four\n",
6098 "FOUR\n",
6099 DiffHunkStatus::modified(HasSecondaryHunk),
6100 ),
6101 ],
6102 );
6103 });
6104
6105 // Stage a hunk. It appears as optimistically staged.
6106 uncommitted_diff.update(cx, |diff, cx| {
6107 let range =
6108 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6109 let hunks = diff
6110 .hunks_intersecting_range(range, &snapshot, cx)
6111 .collect::<Vec<_>>();
6112 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6113
6114 assert_hunks(
6115 diff.hunks(&snapshot, cx),
6116 &snapshot,
6117 &diff.base_text_string().unwrap(),
6118 &[
6119 (
6120 0..0,
6121 "zero\n",
6122 "",
6123 DiffHunkStatus::deleted(HasSecondaryHunk),
6124 ),
6125 (
6126 1..2,
6127 "two\n",
6128 "TWO\n",
6129 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6130 ),
6131 (
6132 3..4,
6133 "four\n",
6134 "FOUR\n",
6135 DiffHunkStatus::modified(HasSecondaryHunk),
6136 ),
6137 ],
6138 );
6139 });
6140
6141 // The diff emits a change event for the range of the staged hunk.
6142 assert!(matches!(
6143 diff_events.next().await.unwrap(),
6144 BufferDiffEvent::HunksStagedOrUnstaged(_)
6145 ));
6146 let event = diff_events.next().await.unwrap();
6147 if let BufferDiffEvent::DiffChanged {
6148 changed_range: Some(changed_range),
6149 } = event
6150 {
6151 let changed_range = changed_range.to_point(&snapshot);
6152 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6153 } else {
6154 panic!("Unexpected event {event:?}");
6155 }
6156
6157 // When the write to the index completes, it appears as staged.
6158 cx.run_until_parked();
6159 uncommitted_diff.update(cx, |diff, cx| {
6160 assert_hunks(
6161 diff.hunks(&snapshot, cx),
6162 &snapshot,
6163 &diff.base_text_string().unwrap(),
6164 &[
6165 (
6166 0..0,
6167 "zero\n",
6168 "",
6169 DiffHunkStatus::deleted(HasSecondaryHunk),
6170 ),
6171 (
6172 1..2,
6173 "two\n",
6174 "TWO\n",
6175 DiffHunkStatus::modified(NoSecondaryHunk),
6176 ),
6177 (
6178 3..4,
6179 "four\n",
6180 "FOUR\n",
6181 DiffHunkStatus::modified(HasSecondaryHunk),
6182 ),
6183 ],
6184 );
6185 });
6186
6187 // The diff emits a change event for the changed index text.
6188 let event = diff_events.next().await.unwrap();
6189 if let BufferDiffEvent::DiffChanged {
6190 changed_range: Some(changed_range),
6191 } = event
6192 {
6193 let changed_range = changed_range.to_point(&snapshot);
6194 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6195 } else {
6196 panic!("Unexpected event {event:?}");
6197 }
6198
6199 // Simulate a problem writing to the git index.
6200 fs.set_error_message_for_index_write(
6201 "/dir/.git".as_ref(),
6202 Some("failed to write git index".into()),
6203 );
6204
6205 // Stage another hunk.
6206 uncommitted_diff.update(cx, |diff, cx| {
6207 let range =
6208 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6209 let hunks = diff
6210 .hunks_intersecting_range(range, &snapshot, cx)
6211 .collect::<Vec<_>>();
6212 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6213
6214 assert_hunks(
6215 diff.hunks(&snapshot, cx),
6216 &snapshot,
6217 &diff.base_text_string().unwrap(),
6218 &[
6219 (
6220 0..0,
6221 "zero\n",
6222 "",
6223 DiffHunkStatus::deleted(HasSecondaryHunk),
6224 ),
6225 (
6226 1..2,
6227 "two\n",
6228 "TWO\n",
6229 DiffHunkStatus::modified(NoSecondaryHunk),
6230 ),
6231 (
6232 3..4,
6233 "four\n",
6234 "FOUR\n",
6235 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6236 ),
6237 ],
6238 );
6239 });
6240 assert!(matches!(
6241 diff_events.next().await.unwrap(),
6242 BufferDiffEvent::HunksStagedOrUnstaged(_)
6243 ));
6244 let event = diff_events.next().await.unwrap();
6245 if let BufferDiffEvent::DiffChanged {
6246 changed_range: Some(changed_range),
6247 } = event
6248 {
6249 let changed_range = changed_range.to_point(&snapshot);
6250 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6251 } else {
6252 panic!("Unexpected event {event:?}");
6253 }
6254
6255 // When the write fails, the hunk returns to being unstaged.
6256 cx.run_until_parked();
6257 uncommitted_diff.update(cx, |diff, cx| {
6258 assert_hunks(
6259 diff.hunks(&snapshot, cx),
6260 &snapshot,
6261 &diff.base_text_string().unwrap(),
6262 &[
6263 (
6264 0..0,
6265 "zero\n",
6266 "",
6267 DiffHunkStatus::deleted(HasSecondaryHunk),
6268 ),
6269 (
6270 1..2,
6271 "two\n",
6272 "TWO\n",
6273 DiffHunkStatus::modified(NoSecondaryHunk),
6274 ),
6275 (
6276 3..4,
6277 "four\n",
6278 "FOUR\n",
6279 DiffHunkStatus::modified(HasSecondaryHunk),
6280 ),
6281 ],
6282 );
6283 });
6284
6285 let event = diff_events.next().await.unwrap();
6286 if let BufferDiffEvent::DiffChanged {
6287 changed_range: Some(changed_range),
6288 } = event
6289 {
6290 let changed_range = changed_range.to_point(&snapshot);
6291 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6292 } else {
6293 panic!("Unexpected event {event:?}");
6294 }
6295
6296 // Allow writing to the git index to succeed again.
6297 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6298
6299 // Stage two hunks with separate operations.
6300 uncommitted_diff.update(cx, |diff, cx| {
6301 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6302 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6303 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6304 });
6305
6306 // Both staged hunks appear as pending.
6307 uncommitted_diff.update(cx, |diff, cx| {
6308 assert_hunks(
6309 diff.hunks(&snapshot, cx),
6310 &snapshot,
6311 &diff.base_text_string().unwrap(),
6312 &[
6313 (
6314 0..0,
6315 "zero\n",
6316 "",
6317 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6318 ),
6319 (
6320 1..2,
6321 "two\n",
6322 "TWO\n",
6323 DiffHunkStatus::modified(NoSecondaryHunk),
6324 ),
6325 (
6326 3..4,
6327 "four\n",
6328 "FOUR\n",
6329 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6330 ),
6331 ],
6332 );
6333 });
6334
6335 // Both staging operations take effect.
6336 cx.run_until_parked();
6337 uncommitted_diff.update(cx, |diff, cx| {
6338 assert_hunks(
6339 diff.hunks(&snapshot, cx),
6340 &snapshot,
6341 &diff.base_text_string().unwrap(),
6342 &[
6343 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6344 (
6345 1..2,
6346 "two\n",
6347 "TWO\n",
6348 DiffHunkStatus::modified(NoSecondaryHunk),
6349 ),
6350 (
6351 3..4,
6352 "four\n",
6353 "FOUR\n",
6354 DiffHunkStatus::modified(NoSecondaryHunk),
6355 ),
6356 ],
6357 );
6358 });
6359}
6360
6361#[allow(clippy::format_collect)]
6362#[gpui::test]
6363async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6364 use DiffHunkSecondaryStatus::*;
6365 init_test(cx);
6366
6367 let different_lines = (0..500)
6368 .step_by(5)
6369 .map(|i| format!("diff {}\n", i))
6370 .collect::<Vec<String>>();
6371 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6372 let file_contents = (0..500)
6373 .map(|i| {
6374 if i % 5 == 0 {
6375 different_lines[i / 5].clone()
6376 } else {
6377 format!("{}\n", i)
6378 }
6379 })
6380 .collect::<String>();
6381
6382 let fs = FakeFs::new(cx.background_executor.clone());
6383 fs.insert_tree(
6384 "/dir",
6385 json!({
6386 ".git": {},
6387 "file.txt": file_contents.clone()
6388 }),
6389 )
6390 .await;
6391
6392 fs.set_head_for_repo(
6393 "/dir/.git".as_ref(),
6394 &[("file.txt".into(), committed_contents.clone())],
6395 );
6396 fs.set_index_for_repo(
6397 "/dir/.git".as_ref(),
6398 &[("file.txt".into(), committed_contents.clone())],
6399 );
6400
6401 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6402
6403 let buffer = project
6404 .update(cx, |project, cx| {
6405 project.open_local_buffer("/dir/file.txt", cx)
6406 })
6407 .await
6408 .unwrap();
6409 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6410 let uncommitted_diff = project
6411 .update(cx, |project, cx| {
6412 project.open_uncommitted_diff(buffer.clone(), cx)
6413 })
6414 .await
6415 .unwrap();
6416
6417 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6418 .step_by(5)
6419 .map(|i| {
6420 (
6421 i as u32..i as u32 + 1,
6422 format!("{}\n", i),
6423 different_lines[i / 5].clone(),
6424 DiffHunkStatus::modified(HasSecondaryHunk),
6425 )
6426 })
6427 .collect();
6428
6429 // The hunks are initially unstaged
6430 uncommitted_diff.read_with(cx, |diff, cx| {
6431 assert_hunks(
6432 diff.hunks(&snapshot, cx),
6433 &snapshot,
6434 &diff.base_text_string().unwrap(),
6435 &expected_hunks,
6436 );
6437 });
6438
6439 for (_, _, _, status) in expected_hunks.iter_mut() {
6440 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6441 }
6442
6443 // Stage every hunk with a different call
6444 uncommitted_diff.update(cx, |diff, cx| {
6445 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6446 for hunk in hunks {
6447 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6448 }
6449
6450 assert_hunks(
6451 diff.hunks(&snapshot, cx),
6452 &snapshot,
6453 &diff.base_text_string().unwrap(),
6454 &expected_hunks,
6455 );
6456 });
6457
6458 // If we wait, we'll have no pending hunks
6459 cx.run_until_parked();
6460 for (_, _, _, status) in expected_hunks.iter_mut() {
6461 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6462 }
6463
6464 uncommitted_diff.update(cx, |diff, cx| {
6465 assert_hunks(
6466 diff.hunks(&snapshot, cx),
6467 &snapshot,
6468 &diff.base_text_string().unwrap(),
6469 &expected_hunks,
6470 );
6471 });
6472
6473 for (_, _, _, status) in expected_hunks.iter_mut() {
6474 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6475 }
6476
6477 // Unstage every hunk with a different call
6478 uncommitted_diff.update(cx, |diff, cx| {
6479 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6480 for hunk in hunks {
6481 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6482 }
6483
6484 assert_hunks(
6485 diff.hunks(&snapshot, cx),
6486 &snapshot,
6487 &diff.base_text_string().unwrap(),
6488 &expected_hunks,
6489 );
6490 });
6491
6492 // If we wait, we'll have no pending hunks, again
6493 cx.run_until_parked();
6494 for (_, _, _, status) in expected_hunks.iter_mut() {
6495 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6496 }
6497
6498 uncommitted_diff.update(cx, |diff, cx| {
6499 assert_hunks(
6500 diff.hunks(&snapshot, cx),
6501 &snapshot,
6502 &diff.base_text_string().unwrap(),
6503 &expected_hunks,
6504 );
6505 });
6506}
6507
6508#[gpui::test]
6509async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6510 init_test(cx);
6511
6512 let committed_contents = r#"
6513 fn main() {
6514 println!("hello from HEAD");
6515 }
6516 "#
6517 .unindent();
6518 let file_contents = r#"
6519 fn main() {
6520 println!("hello from the working copy");
6521 }
6522 "#
6523 .unindent();
6524
6525 let fs = FakeFs::new(cx.background_executor.clone());
6526 fs.insert_tree(
6527 "/dir",
6528 json!({
6529 ".git": {},
6530 "src": {
6531 "main.rs": file_contents,
6532 }
6533 }),
6534 )
6535 .await;
6536
6537 fs.set_head_for_repo(
6538 Path::new("/dir/.git"),
6539 &[("src/main.rs".into(), committed_contents.clone())],
6540 );
6541 fs.set_index_for_repo(
6542 Path::new("/dir/.git"),
6543 &[("src/main.rs".into(), committed_contents.clone())],
6544 );
6545
6546 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6547
6548 let buffer = project
6549 .update(cx, |project, cx| {
6550 project.open_local_buffer("/dir/src/main.rs", cx)
6551 })
6552 .await
6553 .unwrap();
6554 let uncommitted_diff = project
6555 .update(cx, |project, cx| {
6556 project.open_uncommitted_diff(buffer.clone(), cx)
6557 })
6558 .await
6559 .unwrap();
6560
6561 cx.run_until_parked();
6562 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6563 let snapshot = buffer.read(cx).snapshot();
6564 assert_hunks(
6565 uncommitted_diff.hunks(&snapshot, cx),
6566 &snapshot,
6567 &uncommitted_diff.base_text_string().unwrap(),
6568 &[(
6569 1..2,
6570 " println!(\"hello from HEAD\");\n",
6571 " println!(\"hello from the working copy\");\n",
6572 DiffHunkStatus {
6573 kind: DiffHunkStatusKind::Modified,
6574 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6575 },
6576 )],
6577 );
6578 });
6579}
6580
6581async fn search(
6582 project: &Entity<Project>,
6583 query: SearchQuery,
6584 cx: &mut gpui::TestAppContext,
6585) -> Result<HashMap<String, Vec<Range<usize>>>> {
6586 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6587 let mut results = HashMap::default();
6588 while let Ok(search_result) = search_rx.recv().await {
6589 match search_result {
6590 SearchResult::Buffer { buffer, ranges } => {
6591 results.entry(buffer).or_insert(ranges);
6592 }
6593 SearchResult::LimitReached => {}
6594 }
6595 }
6596 Ok(results
6597 .into_iter()
6598 .map(|(buffer, ranges)| {
6599 buffer.update(cx, |buffer, cx| {
6600 let path = buffer
6601 .file()
6602 .unwrap()
6603 .full_path(cx)
6604 .to_string_lossy()
6605 .to_string();
6606 let ranges = ranges
6607 .into_iter()
6608 .map(|range| range.to_offset(buffer))
6609 .collect::<Vec<_>>();
6610 (path, ranges)
6611 })
6612 })
6613 .collect())
6614}
6615
6616pub fn init_test(cx: &mut gpui::TestAppContext) {
6617 if std::env::var("RUST_LOG").is_ok() {
6618 env_logger::try_init().ok();
6619 }
6620
6621 cx.update(|cx| {
6622 let settings_store = SettingsStore::test(cx);
6623 cx.set_global(settings_store);
6624 release_channel::init(SemanticVersion::default(), cx);
6625 language::init(cx);
6626 Project::init_settings(cx);
6627 });
6628}
6629
6630fn json_lang() -> Arc<Language> {
6631 Arc::new(Language::new(
6632 LanguageConfig {
6633 name: "JSON".into(),
6634 matcher: LanguageMatcher {
6635 path_suffixes: vec!["json".to_string()],
6636 ..Default::default()
6637 },
6638 ..Default::default()
6639 },
6640 None,
6641 ))
6642}
6643
6644fn js_lang() -> Arc<Language> {
6645 Arc::new(Language::new(
6646 LanguageConfig {
6647 name: "JavaScript".into(),
6648 matcher: LanguageMatcher {
6649 path_suffixes: vec!["js".to_string()],
6650 ..Default::default()
6651 },
6652 ..Default::default()
6653 },
6654 None,
6655 ))
6656}
6657
6658fn rust_lang() -> Arc<Language> {
6659 Arc::new(Language::new(
6660 LanguageConfig {
6661 name: "Rust".into(),
6662 matcher: LanguageMatcher {
6663 path_suffixes: vec!["rs".to_string()],
6664 ..Default::default()
6665 },
6666 ..Default::default()
6667 },
6668 Some(tree_sitter_rust::LANGUAGE.into()),
6669 ))
6670}
6671
6672fn typescript_lang() -> Arc<Language> {
6673 Arc::new(Language::new(
6674 LanguageConfig {
6675 name: "TypeScript".into(),
6676 matcher: LanguageMatcher {
6677 path_suffixes: vec!["ts".to_string()],
6678 ..Default::default()
6679 },
6680 ..Default::default()
6681 },
6682 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6683 ))
6684}
6685
6686fn tsx_lang() -> Arc<Language> {
6687 Arc::new(Language::new(
6688 LanguageConfig {
6689 name: "tsx".into(),
6690 matcher: LanguageMatcher {
6691 path_suffixes: vec!["tsx".to_string()],
6692 ..Default::default()
6693 },
6694 ..Default::default()
6695 },
6696 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6697 ))
6698}
6699
6700fn get_all_tasks(
6701 project: &Entity<Project>,
6702 task_contexts: &TaskContexts,
6703 cx: &mut App,
6704) -> Vec<(TaskSourceKind, ResolvedTask)> {
6705 let (mut old, new) = project.update(cx, |project, cx| {
6706 project
6707 .task_store
6708 .read(cx)
6709 .task_inventory()
6710 .unwrap()
6711 .read(cx)
6712 .used_and_current_resolved_tasks(task_contexts, cx)
6713 });
6714 old.extend(new);
6715 old
6716}