1use crate::{task_inventory::TaskContexts, Event, *};
2use buffer_diff::{
3 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
4};
5use fs::FakeFs;
6use futures::{future, StreamExt};
7use gpui::{App, SemanticVersion, UpdateGlobal};
8use http_client::Url;
9use language::{
10 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
11 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
12 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
13 OffsetRangeExt, Point, ToPoint,
14};
15use lsp::{
16 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
17 NumberOrString, TextDocumentEdit, WillRenameFiles,
18};
19use parking_lot::Mutex;
20use pretty_assertions::{assert_eq, assert_matches};
21use serde_json::json;
22#[cfg(not(windows))]
23use std::os;
24use std::{str::FromStr, sync::OnceLock};
25
26use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
27use task::{ResolvedTask, TaskContext};
28use unindent::Unindent as _;
29use util::{
30 assert_set_eq, path,
31 paths::PathMatcher,
32 separator,
33 test::{marked_text_offsets, TempTree},
34 uri, TryFutureExt as _,
35};
36
37#[gpui::test]
38async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
39 cx.executor().allow_parking();
40
41 let (tx, mut rx) = futures::channel::mpsc::unbounded();
42 let _thread = std::thread::spawn(move || {
43 #[cfg(not(target_os = "windows"))]
44 std::fs::metadata("/tmp").unwrap();
45 #[cfg(target_os = "windows")]
46 std::fs::metadata("C:/Windows").unwrap();
47 std::thread::sleep(Duration::from_millis(1000));
48 tx.unbounded_send(1).unwrap();
49 });
50 rx.next().await.unwrap();
51}
52
53#[gpui::test]
54async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
55 cx.executor().allow_parking();
56
57 let io_task = smol::unblock(move || {
58 println!("sleeping on thread {:?}", std::thread::current().id());
59 std::thread::sleep(Duration::from_millis(10));
60 1
61 });
62
63 let task = cx.foreground_executor().spawn(async move {
64 io_task.await;
65 });
66
67 task.await;
68}
69
70#[cfg(not(windows))]
71#[gpui::test]
72async fn test_symlinks(cx: &mut gpui::TestAppContext) {
73 init_test(cx);
74 cx.executor().allow_parking();
75
76 let dir = TempTree::new(json!({
77 "root": {
78 "apple": "",
79 "banana": {
80 "carrot": {
81 "date": "",
82 "endive": "",
83 }
84 },
85 "fennel": {
86 "grape": "",
87 }
88 }
89 }));
90
91 let root_link_path = dir.path().join("root_link");
92 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
93 os::unix::fs::symlink(
94 dir.path().join("root/fennel"),
95 dir.path().join("root/finnochio"),
96 )
97 .unwrap();
98
99 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
100
101 project.update(cx, |project, cx| {
102 let tree = project.worktrees(cx).next().unwrap().read(cx);
103 assert_eq!(tree.file_count(), 5);
104 assert_eq!(
105 tree.inode_for_path("fennel/grape"),
106 tree.inode_for_path("finnochio/grape")
107 );
108 });
109}
110
111#[gpui::test]
112async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
113 init_test(cx);
114
115 let dir = TempTree::new(json!({
116 ".editorconfig": r#"
117 root = true
118 [*.rs]
119 indent_style = tab
120 indent_size = 3
121 end_of_line = lf
122 insert_final_newline = true
123 trim_trailing_whitespace = true
124 [*.js]
125 tab_width = 10
126 "#,
127 ".zed": {
128 "settings.json": r#"{
129 "tab_size": 8,
130 "hard_tabs": false,
131 "ensure_final_newline_on_save": false,
132 "remove_trailing_whitespace_on_save": false,
133 "soft_wrap": "editor_width"
134 }"#,
135 },
136 "a.rs": "fn a() {\n A\n}",
137 "b": {
138 ".editorconfig": r#"
139 [*.rs]
140 indent_size = 2
141 "#,
142 "b.rs": "fn b() {\n B\n}",
143 },
144 "c.js": "def c\n C\nend",
145 "README.json": "tabs are better\n",
146 }));
147
148 let path = dir.path();
149 let fs = FakeFs::new(cx.executor());
150 fs.insert_tree_from_real_fs(path, path).await;
151 let project = Project::test(fs, [path], cx).await;
152
153 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
154 language_registry.add(js_lang());
155 language_registry.add(json_lang());
156 language_registry.add(rust_lang());
157
158 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
159
160 cx.executor().run_until_parked();
161
162 cx.update(|cx| {
163 let tree = worktree.read(cx);
164 let settings_for = |path: &str| {
165 let file_entry = tree.entry_for_path(path).unwrap().clone();
166 let file = File::for_entry(file_entry, worktree.clone());
167 let file_language = project
168 .read(cx)
169 .languages()
170 .language_for_file_path(file.path.as_ref());
171 let file_language = cx
172 .background_executor()
173 .block(file_language)
174 .expect("Failed to get file language");
175 let file = file as _;
176 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
177 };
178
179 let settings_a = settings_for("a.rs");
180 let settings_b = settings_for("b/b.rs");
181 let settings_c = settings_for("c.js");
182 let settings_readme = settings_for("README.json");
183
184 // .editorconfig overrides .zed/settings
185 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
186 assert_eq!(settings_a.hard_tabs, true);
187 assert_eq!(settings_a.ensure_final_newline_on_save, true);
188 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
189
190 // .editorconfig in b/ overrides .editorconfig in root
191 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
192
193 // "indent_size" is not set, so "tab_width" is used
194 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
195
196 // README.md should not be affected by .editorconfig's globe "*.rs"
197 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
198 });
199}
200
201#[gpui::test]
202async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
203 init_test(cx);
204 TaskStore::init(None);
205
206 let fs = FakeFs::new(cx.executor());
207 fs.insert_tree(
208 path!("/dir"),
209 json!({
210 ".zed": {
211 "settings.json": r#"{ "tab_size": 8 }"#,
212 "tasks.json": r#"[{
213 "label": "cargo check all",
214 "command": "cargo",
215 "args": ["check", "--all"]
216 },]"#,
217 },
218 "a": {
219 "a.rs": "fn a() {\n A\n}"
220 },
221 "b": {
222 ".zed": {
223 "settings.json": r#"{ "tab_size": 2 }"#,
224 "tasks.json": r#"[{
225 "label": "cargo check",
226 "command": "cargo",
227 "args": ["check"]
228 },]"#,
229 },
230 "b.rs": "fn b() {\n B\n}"
231 }
232 }),
233 )
234 .await;
235
236 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
237 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
238
239 cx.executor().run_until_parked();
240 let worktree_id = cx.update(|cx| {
241 project.update(cx, |project, cx| {
242 project.worktrees(cx).next().unwrap().read(cx).id()
243 })
244 });
245
246 let mut task_contexts = TaskContexts::default();
247 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
248
249 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
250 id: worktree_id,
251 directory_in_worktree: PathBuf::from(".zed"),
252 id_base: "local worktree tasks from directory \".zed\"".into(),
253 };
254
255 let all_tasks = cx
256 .update(|cx| {
257 let tree = worktree.read(cx);
258
259 let file_a = File::for_entry(
260 tree.entry_for_path("a/a.rs").unwrap().clone(),
261 worktree.clone(),
262 ) as _;
263 let settings_a = language_settings(None, Some(&file_a), cx);
264 let file_b = File::for_entry(
265 tree.entry_for_path("b/b.rs").unwrap().clone(),
266 worktree.clone(),
267 ) as _;
268 let settings_b = language_settings(None, Some(&file_b), cx);
269
270 assert_eq!(settings_a.tab_size.get(), 8);
271 assert_eq!(settings_b.tab_size.get(), 2);
272
273 get_all_tasks(&project, &task_contexts, cx)
274 })
275 .into_iter()
276 .map(|(source_kind, task)| {
277 let resolved = task.resolved.unwrap();
278 (
279 source_kind,
280 task.resolved_label,
281 resolved.args,
282 resolved.env,
283 )
284 })
285 .collect::<Vec<_>>();
286 assert_eq!(
287 all_tasks,
288 vec![
289 (
290 TaskSourceKind::Worktree {
291 id: worktree_id,
292 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
293 id_base: if cfg!(windows) {
294 "local worktree tasks from directory \"b\\\\.zed\"".into()
295 } else {
296 "local worktree tasks from directory \"b/.zed\"".into()
297 },
298 },
299 "cargo check".to_string(),
300 vec!["check".to_string()],
301 HashMap::default(),
302 ),
303 (
304 topmost_local_task_source_kind.clone(),
305 "cargo check all".to_string(),
306 vec!["check".to_string(), "--all".to_string()],
307 HashMap::default(),
308 ),
309 ]
310 );
311
312 let (_, resolved_task) = cx
313 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
314 .into_iter()
315 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
316 .expect("should have one global task");
317 project.update(cx, |project, cx| {
318 let task_inventory = project
319 .task_store
320 .read(cx)
321 .task_inventory()
322 .cloned()
323 .unwrap();
324 task_inventory.update(cx, |inventory, _| {
325 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
326 inventory
327 .update_file_based_tasks(
328 None,
329 Some(
330 &json!([{
331 "label": "cargo check unstable",
332 "command": "cargo",
333 "args": [
334 "check",
335 "--all",
336 "--all-targets"
337 ],
338 "env": {
339 "RUSTFLAGS": "-Zunstable-options"
340 }
341 }])
342 .to_string(),
343 ),
344 )
345 .unwrap();
346 });
347 });
348 cx.run_until_parked();
349
350 let all_tasks = cx
351 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
352 .into_iter()
353 .map(|(source_kind, task)| {
354 let resolved = task.resolved.unwrap();
355 (
356 source_kind,
357 task.resolved_label,
358 resolved.args,
359 resolved.env,
360 )
361 })
362 .collect::<Vec<_>>();
363 assert_eq!(
364 all_tasks,
365 vec![
366 (
367 topmost_local_task_source_kind.clone(),
368 "cargo check all".to_string(),
369 vec!["check".to_string(), "--all".to_string()],
370 HashMap::default(),
371 ),
372 (
373 TaskSourceKind::Worktree {
374 id: worktree_id,
375 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
376 id_base: if cfg!(windows) {
377 "local worktree tasks from directory \"b\\\\.zed\"".into()
378 } else {
379 "local worktree tasks from directory \"b/.zed\"".into()
380 },
381 },
382 "cargo check".to_string(),
383 vec!["check".to_string()],
384 HashMap::default(),
385 ),
386 (
387 TaskSourceKind::AbsPath {
388 abs_path: paths::tasks_file().clone(),
389 id_base: "global tasks.json".into(),
390 },
391 "cargo check unstable".to_string(),
392 vec![
393 "check".to_string(),
394 "--all".to_string(),
395 "--all-targets".to_string(),
396 ],
397 HashMap::from_iter(Some((
398 "RUSTFLAGS".to_string(),
399 "-Zunstable-options".to_string()
400 ))),
401 ),
402 ]
403 );
404}
405
406#[gpui::test]
407async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
408 init_test(cx);
409 TaskStore::init(None);
410
411 let fs = FakeFs::new(cx.executor());
412 fs.insert_tree(
413 path!("/dir"),
414 json!({
415 ".zed": {
416 "tasks.json": r#"[{
417 "label": "test worktree root",
418 "command": "echo $ZED_WORKTREE_ROOT"
419 }]"#,
420 },
421 "a": {
422 "a.rs": "fn a() {\n A\n}"
423 },
424 }),
425 )
426 .await;
427
428 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
429 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
430
431 cx.executor().run_until_parked();
432 let worktree_id = cx.update(|cx| {
433 project.update(cx, |project, cx| {
434 project.worktrees(cx).next().unwrap().read(cx).id()
435 })
436 });
437
438 let active_non_worktree_item_tasks = cx.update(|cx| {
439 get_all_tasks(
440 &project,
441 &TaskContexts {
442 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
443 active_worktree_context: None,
444 other_worktree_contexts: Vec::new(),
445 },
446 cx,
447 )
448 });
449 assert!(
450 active_non_worktree_item_tasks.is_empty(),
451 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
452 );
453
454 let active_worktree_tasks = cx.update(|cx| {
455 get_all_tasks(
456 &project,
457 &TaskContexts {
458 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
459 active_worktree_context: Some((worktree_id, {
460 let mut worktree_context = TaskContext::default();
461 worktree_context
462 .task_variables
463 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
464 worktree_context
465 })),
466 other_worktree_contexts: Vec::new(),
467 },
468 cx,
469 )
470 });
471 assert_eq!(
472 active_worktree_tasks
473 .into_iter()
474 .map(|(source_kind, task)| {
475 let resolved = task.resolved.unwrap();
476 (source_kind, resolved.command)
477 })
478 .collect::<Vec<_>>(),
479 vec![(
480 TaskSourceKind::Worktree {
481 id: worktree_id,
482 directory_in_worktree: PathBuf::from(separator!(".zed")),
483 id_base: if cfg!(windows) {
484 "local worktree tasks from directory \".zed\"".into()
485 } else {
486 "local worktree tasks from directory \".zed\"".into()
487 },
488 },
489 "echo /dir".to_string(),
490 )]
491 );
492}
493
494#[gpui::test]
495async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/dir"),
501 json!({
502 "test.rs": "const A: i32 = 1;",
503 "test2.rs": "",
504 "Cargo.toml": "a = 1",
505 "package.json": "{\"a\": 1}",
506 }),
507 )
508 .await;
509
510 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
511 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
512
513 let mut fake_rust_servers = language_registry.register_fake_lsp(
514 "Rust",
515 FakeLspAdapter {
516 name: "the-rust-language-server",
517 capabilities: lsp::ServerCapabilities {
518 completion_provider: Some(lsp::CompletionOptions {
519 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
520 ..Default::default()
521 }),
522 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
523 lsp::TextDocumentSyncOptions {
524 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
525 ..Default::default()
526 },
527 )),
528 ..Default::default()
529 },
530 ..Default::default()
531 },
532 );
533 let mut fake_json_servers = language_registry.register_fake_lsp(
534 "JSON",
535 FakeLspAdapter {
536 name: "the-json-language-server",
537 capabilities: lsp::ServerCapabilities {
538 completion_provider: Some(lsp::CompletionOptions {
539 trigger_characters: Some(vec![":".to_string()]),
540 ..Default::default()
541 }),
542 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
543 lsp::TextDocumentSyncOptions {
544 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
545 ..Default::default()
546 },
547 )),
548 ..Default::default()
549 },
550 ..Default::default()
551 },
552 );
553
554 // Open a buffer without an associated language server.
555 let (toml_buffer, _handle) = project
556 .update(cx, |project, cx| {
557 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
558 })
559 .await
560 .unwrap();
561
562 // Open a buffer with an associated language server before the language for it has been loaded.
563 let (rust_buffer, _handle2) = project
564 .update(cx, |project, cx| {
565 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
566 })
567 .await
568 .unwrap();
569 rust_buffer.update(cx, |buffer, _| {
570 assert_eq!(buffer.language().map(|l| l.name()), None);
571 });
572
573 // Now we add the languages to the project, and ensure they get assigned to all
574 // the relevant open buffers.
575 language_registry.add(json_lang());
576 language_registry.add(rust_lang());
577 cx.executor().run_until_parked();
578 rust_buffer.update(cx, |buffer, _| {
579 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
580 });
581
582 // A server is started up, and it is notified about Rust files.
583 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
584 assert_eq!(
585 fake_rust_server
586 .receive_notification::<lsp::notification::DidOpenTextDocument>()
587 .await
588 .text_document,
589 lsp::TextDocumentItem {
590 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
591 version: 0,
592 text: "const A: i32 = 1;".to_string(),
593 language_id: "rust".to_string(),
594 }
595 );
596
597 // The buffer is configured based on the language server's capabilities.
598 rust_buffer.update(cx, |buffer, _| {
599 assert_eq!(
600 buffer
601 .completion_triggers()
602 .into_iter()
603 .cloned()
604 .collect::<Vec<_>>(),
605 &[".".to_string(), "::".to_string()]
606 );
607 });
608 toml_buffer.update(cx, |buffer, _| {
609 assert!(buffer.completion_triggers().is_empty());
610 });
611
612 // Edit a buffer. The changes are reported to the language server.
613 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
614 assert_eq!(
615 fake_rust_server
616 .receive_notification::<lsp::notification::DidChangeTextDocument>()
617 .await
618 .text_document,
619 lsp::VersionedTextDocumentIdentifier::new(
620 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
621 1
622 )
623 );
624
625 // Open a third buffer with a different associated language server.
626 let (json_buffer, _json_handle) = project
627 .update(cx, |project, cx| {
628 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
629 })
630 .await
631 .unwrap();
632
633 // A json language server is started up and is only notified about the json buffer.
634 let mut fake_json_server = fake_json_servers.next().await.unwrap();
635 assert_eq!(
636 fake_json_server
637 .receive_notification::<lsp::notification::DidOpenTextDocument>()
638 .await
639 .text_document,
640 lsp::TextDocumentItem {
641 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
642 version: 0,
643 text: "{\"a\": 1}".to_string(),
644 language_id: "json".to_string(),
645 }
646 );
647
648 // This buffer is configured based on the second language server's
649 // capabilities.
650 json_buffer.update(cx, |buffer, _| {
651 assert_eq!(
652 buffer
653 .completion_triggers()
654 .into_iter()
655 .cloned()
656 .collect::<Vec<_>>(),
657 &[":".to_string()]
658 );
659 });
660
661 // When opening another buffer whose language server is already running,
662 // it is also configured based on the existing language server's capabilities.
663 let (rust_buffer2, _handle4) = project
664 .update(cx, |project, cx| {
665 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
666 })
667 .await
668 .unwrap();
669 rust_buffer2.update(cx, |buffer, _| {
670 assert_eq!(
671 buffer
672 .completion_triggers()
673 .into_iter()
674 .cloned()
675 .collect::<Vec<_>>(),
676 &[".".to_string(), "::".to_string()]
677 );
678 });
679
680 // Changes are reported only to servers matching the buffer's language.
681 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
682 rust_buffer2.update(cx, |buffer, cx| {
683 buffer.edit([(0..0, "let x = 1;")], None, cx)
684 });
685 assert_eq!(
686 fake_rust_server
687 .receive_notification::<lsp::notification::DidChangeTextDocument>()
688 .await
689 .text_document,
690 lsp::VersionedTextDocumentIdentifier::new(
691 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
692 1
693 )
694 );
695
696 // Save notifications are reported to all servers.
697 project
698 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
699 .await
700 .unwrap();
701 assert_eq!(
702 fake_rust_server
703 .receive_notification::<lsp::notification::DidSaveTextDocument>()
704 .await
705 .text_document,
706 lsp::TextDocumentIdentifier::new(
707 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
708 )
709 );
710 assert_eq!(
711 fake_json_server
712 .receive_notification::<lsp::notification::DidSaveTextDocument>()
713 .await
714 .text_document,
715 lsp::TextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
717 )
718 );
719
720 // Renames are reported only to servers matching the buffer's language.
721 fs.rename(
722 Path::new(path!("/dir/test2.rs")),
723 Path::new(path!("/dir/test3.rs")),
724 Default::default(),
725 )
726 .await
727 .unwrap();
728 assert_eq!(
729 fake_rust_server
730 .receive_notification::<lsp::notification::DidCloseTextDocument>()
731 .await
732 .text_document,
733 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
734 );
735 assert_eq!(
736 fake_rust_server
737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
738 .await
739 .text_document,
740 lsp::TextDocumentItem {
741 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
742 version: 0,
743 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
744 language_id: "rust".to_string(),
745 },
746 );
747
748 rust_buffer2.update(cx, |buffer, cx| {
749 buffer.update_diagnostics(
750 LanguageServerId(0),
751 DiagnosticSet::from_sorted_entries(
752 vec![DiagnosticEntry {
753 diagnostic: Default::default(),
754 range: Anchor::MIN..Anchor::MAX,
755 }],
756 &buffer.snapshot(),
757 ),
758 cx,
759 );
760 assert_eq!(
761 buffer
762 .snapshot()
763 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
764 .count(),
765 1
766 );
767 });
768
769 // When the rename changes the extension of the file, the buffer gets closed on the old
770 // language server and gets opened on the new one.
771 fs.rename(
772 Path::new(path!("/dir/test3.rs")),
773 Path::new(path!("/dir/test3.json")),
774 Default::default(),
775 )
776 .await
777 .unwrap();
778 assert_eq!(
779 fake_rust_server
780 .receive_notification::<lsp::notification::DidCloseTextDocument>()
781 .await
782 .text_document,
783 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
784 );
785 assert_eq!(
786 fake_json_server
787 .receive_notification::<lsp::notification::DidOpenTextDocument>()
788 .await
789 .text_document,
790 lsp::TextDocumentItem {
791 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
792 version: 0,
793 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
794 language_id: "json".to_string(),
795 },
796 );
797
798 // We clear the diagnostics, since the language has changed.
799 rust_buffer2.update(cx, |buffer, _| {
800 assert_eq!(
801 buffer
802 .snapshot()
803 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
804 .count(),
805 0
806 );
807 });
808
809 // The renamed file's version resets after changing language server.
810 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
811 assert_eq!(
812 fake_json_server
813 .receive_notification::<lsp::notification::DidChangeTextDocument>()
814 .await
815 .text_document,
816 lsp::VersionedTextDocumentIdentifier::new(
817 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
818 1
819 )
820 );
821
822 // Restart language servers
823 project.update(cx, |project, cx| {
824 project.restart_language_servers_for_buffers(
825 vec![rust_buffer.clone(), json_buffer.clone()],
826 cx,
827 );
828 });
829
830 let mut rust_shutdown_requests = fake_rust_server
831 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
832 let mut json_shutdown_requests = fake_json_server
833 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
834 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
835
836 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
837 let mut fake_json_server = fake_json_servers.next().await.unwrap();
838
839 // Ensure rust document is reopened in new rust language server
840 assert_eq!(
841 fake_rust_server
842 .receive_notification::<lsp::notification::DidOpenTextDocument>()
843 .await
844 .text_document,
845 lsp::TextDocumentItem {
846 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
847 version: 0,
848 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
849 language_id: "rust".to_string(),
850 }
851 );
852
853 // Ensure json documents are reopened in new json language server
854 assert_set_eq!(
855 [
856 fake_json_server
857 .receive_notification::<lsp::notification::DidOpenTextDocument>()
858 .await
859 .text_document,
860 fake_json_server
861 .receive_notification::<lsp::notification::DidOpenTextDocument>()
862 .await
863 .text_document,
864 ],
865 [
866 lsp::TextDocumentItem {
867 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
868 version: 0,
869 text: json_buffer.update(cx, |buffer, _| buffer.text()),
870 language_id: "json".to_string(),
871 },
872 lsp::TextDocumentItem {
873 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
874 version: 0,
875 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
876 language_id: "json".to_string(),
877 }
878 ]
879 );
880
881 // Close notifications are reported only to servers matching the buffer's language.
882 cx.update(|_| drop(_json_handle));
883 let close_message = lsp::DidCloseTextDocumentParams {
884 text_document: lsp::TextDocumentIdentifier::new(
885 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
886 ),
887 };
888 assert_eq!(
889 fake_json_server
890 .receive_notification::<lsp::notification::DidCloseTextDocument>()
891 .await,
892 close_message,
893 );
894}
895
896#[gpui::test]
897async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
898 init_test(cx);
899
900 let fs = FakeFs::new(cx.executor());
901 fs.insert_tree(
902 path!("/the-root"),
903 json!({
904 ".gitignore": "target\n",
905 "src": {
906 "a.rs": "",
907 "b.rs": "",
908 },
909 "target": {
910 "x": {
911 "out": {
912 "x.rs": ""
913 }
914 },
915 "y": {
916 "out": {
917 "y.rs": "",
918 }
919 },
920 "z": {
921 "out": {
922 "z.rs": ""
923 }
924 }
925 }
926 }),
927 )
928 .await;
929
930 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
931 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
932 language_registry.add(rust_lang());
933 let mut fake_servers = language_registry.register_fake_lsp(
934 "Rust",
935 FakeLspAdapter {
936 name: "the-language-server",
937 ..Default::default()
938 },
939 );
940
941 cx.executor().run_until_parked();
942
943 // Start the language server by opening a buffer with a compatible file extension.
944 let _ = project
945 .update(cx, |project, cx| {
946 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
947 })
948 .await
949 .unwrap();
950
951 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
952 project.update(cx, |project, cx| {
953 let worktree = project.worktrees(cx).next().unwrap();
954 assert_eq!(
955 worktree
956 .read(cx)
957 .snapshot()
958 .entries(true, 0)
959 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
960 .collect::<Vec<_>>(),
961 &[
962 (Path::new(""), false),
963 (Path::new(".gitignore"), false),
964 (Path::new("src"), false),
965 (Path::new("src/a.rs"), false),
966 (Path::new("src/b.rs"), false),
967 (Path::new("target"), true),
968 ]
969 );
970 });
971
972 let prev_read_dir_count = fs.read_dir_call_count();
973
974 // Keep track of the FS events reported to the language server.
975 let fake_server = fake_servers.next().await.unwrap();
976 let file_changes = Arc::new(Mutex::new(Vec::new()));
977 fake_server
978 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
979 registrations: vec![lsp::Registration {
980 id: Default::default(),
981 method: "workspace/didChangeWatchedFiles".to_string(),
982 register_options: serde_json::to_value(
983 lsp::DidChangeWatchedFilesRegistrationOptions {
984 watchers: vec![
985 lsp::FileSystemWatcher {
986 glob_pattern: lsp::GlobPattern::String(
987 path!("/the-root/Cargo.toml").to_string(),
988 ),
989 kind: None,
990 },
991 lsp::FileSystemWatcher {
992 glob_pattern: lsp::GlobPattern::String(
993 path!("/the-root/src/*.{rs,c}").to_string(),
994 ),
995 kind: None,
996 },
997 lsp::FileSystemWatcher {
998 glob_pattern: lsp::GlobPattern::String(
999 path!("/the-root/target/y/**/*.rs").to_string(),
1000 ),
1001 kind: None,
1002 },
1003 ],
1004 },
1005 )
1006 .ok(),
1007 }],
1008 })
1009 .await
1010 .unwrap();
1011 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1012 let file_changes = file_changes.clone();
1013 move |params, _| {
1014 let mut file_changes = file_changes.lock();
1015 file_changes.extend(params.changes);
1016 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1017 }
1018 });
1019
1020 cx.executor().run_until_parked();
1021 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1022 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1023
1024 // Now the language server has asked us to watch an ignored directory path,
1025 // so we recursively load it.
1026 project.update(cx, |project, cx| {
1027 let worktree = project.worktrees(cx).next().unwrap();
1028 assert_eq!(
1029 worktree
1030 .read(cx)
1031 .snapshot()
1032 .entries(true, 0)
1033 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1034 .collect::<Vec<_>>(),
1035 &[
1036 (Path::new(""), false),
1037 (Path::new(".gitignore"), false),
1038 (Path::new("src"), false),
1039 (Path::new("src/a.rs"), false),
1040 (Path::new("src/b.rs"), false),
1041 (Path::new("target"), true),
1042 (Path::new("target/x"), true),
1043 (Path::new("target/y"), true),
1044 (Path::new("target/y/out"), true),
1045 (Path::new("target/y/out/y.rs"), true),
1046 (Path::new("target/z"), true),
1047 ]
1048 );
1049 });
1050
1051 // Perform some file system mutations, two of which match the watched patterns,
1052 // and one of which does not.
1053 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1054 .await
1055 .unwrap();
1056 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1057 .await
1058 .unwrap();
1059 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1060 .await
1061 .unwrap();
1062 fs.create_file(
1063 path!("/the-root/target/x/out/x2.rs").as_ref(),
1064 Default::default(),
1065 )
1066 .await
1067 .unwrap();
1068 fs.create_file(
1069 path!("/the-root/target/y/out/y2.rs").as_ref(),
1070 Default::default(),
1071 )
1072 .await
1073 .unwrap();
1074
1075 // The language server receives events for the FS mutations that match its watch patterns.
1076 cx.executor().run_until_parked();
1077 assert_eq!(
1078 &*file_changes.lock(),
1079 &[
1080 lsp::FileEvent {
1081 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1082 typ: lsp::FileChangeType::DELETED,
1083 },
1084 lsp::FileEvent {
1085 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1086 typ: lsp::FileChangeType::CREATED,
1087 },
1088 lsp::FileEvent {
1089 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1090 typ: lsp::FileChangeType::CREATED,
1091 },
1092 ]
1093 );
1094}
1095
1096#[gpui::test]
1097async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1098 init_test(cx);
1099
1100 let fs = FakeFs::new(cx.executor());
1101 fs.insert_tree(
1102 path!("/dir"),
1103 json!({
1104 "a.rs": "let a = 1;",
1105 "b.rs": "let b = 2;"
1106 }),
1107 )
1108 .await;
1109
1110 let project = Project::test(
1111 fs,
1112 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1113 cx,
1114 )
1115 .await;
1116 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1117
1118 let buffer_a = project
1119 .update(cx, |project, cx| {
1120 project.open_local_buffer(path!("/dir/a.rs"), cx)
1121 })
1122 .await
1123 .unwrap();
1124 let buffer_b = project
1125 .update(cx, |project, cx| {
1126 project.open_local_buffer(path!("/dir/b.rs"), cx)
1127 })
1128 .await
1129 .unwrap();
1130
1131 lsp_store.update(cx, |lsp_store, cx| {
1132 lsp_store
1133 .update_diagnostics(
1134 LanguageServerId(0),
1135 lsp::PublishDiagnosticsParams {
1136 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1137 version: None,
1138 diagnostics: vec![lsp::Diagnostic {
1139 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1140 severity: Some(lsp::DiagnosticSeverity::ERROR),
1141 message: "error 1".to_string(),
1142 ..Default::default()
1143 }],
1144 },
1145 &[],
1146 cx,
1147 )
1148 .unwrap();
1149 lsp_store
1150 .update_diagnostics(
1151 LanguageServerId(0),
1152 lsp::PublishDiagnosticsParams {
1153 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1154 version: None,
1155 diagnostics: vec![lsp::Diagnostic {
1156 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1157 severity: Some(DiagnosticSeverity::WARNING),
1158 message: "error 2".to_string(),
1159 ..Default::default()
1160 }],
1161 },
1162 &[],
1163 cx,
1164 )
1165 .unwrap();
1166 });
1167
1168 buffer_a.update(cx, |buffer, _| {
1169 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1170 assert_eq!(
1171 chunks
1172 .iter()
1173 .map(|(s, d)| (s.as_str(), *d))
1174 .collect::<Vec<_>>(),
1175 &[
1176 ("let ", None),
1177 ("a", Some(DiagnosticSeverity::ERROR)),
1178 (" = 1;", None),
1179 ]
1180 );
1181 });
1182 buffer_b.update(cx, |buffer, _| {
1183 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1184 assert_eq!(
1185 chunks
1186 .iter()
1187 .map(|(s, d)| (s.as_str(), *d))
1188 .collect::<Vec<_>>(),
1189 &[
1190 ("let ", None),
1191 ("b", Some(DiagnosticSeverity::WARNING)),
1192 (" = 2;", None),
1193 ]
1194 );
1195 });
1196}
1197
1198#[gpui::test]
1199async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1200 init_test(cx);
1201
1202 let fs = FakeFs::new(cx.executor());
1203 fs.insert_tree(
1204 path!("/root"),
1205 json!({
1206 "dir": {
1207 ".git": {
1208 "HEAD": "ref: refs/heads/main",
1209 },
1210 ".gitignore": "b.rs",
1211 "a.rs": "let a = 1;",
1212 "b.rs": "let b = 2;",
1213 },
1214 "other.rs": "let b = c;"
1215 }),
1216 )
1217 .await;
1218
1219 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1220 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1221 let (worktree, _) = project
1222 .update(cx, |project, cx| {
1223 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1224 })
1225 .await
1226 .unwrap();
1227 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1228
1229 let (worktree, _) = project
1230 .update(cx, |project, cx| {
1231 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1232 })
1233 .await
1234 .unwrap();
1235 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1236
1237 let server_id = LanguageServerId(0);
1238 lsp_store.update(cx, |lsp_store, cx| {
1239 lsp_store
1240 .update_diagnostics(
1241 server_id,
1242 lsp::PublishDiagnosticsParams {
1243 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1244 version: None,
1245 diagnostics: vec![lsp::Diagnostic {
1246 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1247 severity: Some(lsp::DiagnosticSeverity::ERROR),
1248 message: "unused variable 'b'".to_string(),
1249 ..Default::default()
1250 }],
1251 },
1252 &[],
1253 cx,
1254 )
1255 .unwrap();
1256 lsp_store
1257 .update_diagnostics(
1258 server_id,
1259 lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "unknown variable 'c'".to_string(),
1266 ..Default::default()
1267 }],
1268 },
1269 &[],
1270 cx,
1271 )
1272 .unwrap();
1273 });
1274
1275 let main_ignored_buffer = project
1276 .update(cx, |project, cx| {
1277 project.open_buffer((main_worktree_id, "b.rs"), cx)
1278 })
1279 .await
1280 .unwrap();
1281 main_ignored_buffer.update(cx, |buffer, _| {
1282 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1283 assert_eq!(
1284 chunks
1285 .iter()
1286 .map(|(s, d)| (s.as_str(), *d))
1287 .collect::<Vec<_>>(),
1288 &[
1289 ("let ", None),
1290 ("b", Some(DiagnosticSeverity::ERROR)),
1291 (" = 2;", None),
1292 ],
1293 "Gigitnored buffers should still get in-buffer diagnostics",
1294 );
1295 });
1296 let other_buffer = project
1297 .update(cx, |project, cx| {
1298 project.open_buffer((other_worktree_id, ""), cx)
1299 })
1300 .await
1301 .unwrap();
1302 other_buffer.update(cx, |buffer, _| {
1303 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1304 assert_eq!(
1305 chunks
1306 .iter()
1307 .map(|(s, d)| (s.as_str(), *d))
1308 .collect::<Vec<_>>(),
1309 &[
1310 ("let b = ", None),
1311 ("c", Some(DiagnosticSeverity::ERROR)),
1312 (";", None),
1313 ],
1314 "Buffers from hidden projects should still get in-buffer diagnostics"
1315 );
1316 });
1317
1318 project.update(cx, |project, cx| {
1319 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1320 assert_eq!(
1321 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1322 vec![(
1323 ProjectPath {
1324 worktree_id: main_worktree_id,
1325 path: Arc::from(Path::new("b.rs")),
1326 },
1327 server_id,
1328 DiagnosticSummary {
1329 error_count: 1,
1330 warning_count: 0,
1331 }
1332 )]
1333 );
1334 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1335 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1336 });
1337}
1338
1339#[gpui::test]
1340async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let progress_token = "the-progress-token";
1344
1345 let fs = FakeFs::new(cx.executor());
1346 fs.insert_tree(
1347 path!("/dir"),
1348 json!({
1349 "a.rs": "fn a() { A }",
1350 "b.rs": "const y: i32 = 1",
1351 }),
1352 )
1353 .await;
1354
1355 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1356 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1357
1358 language_registry.add(rust_lang());
1359 let mut fake_servers = language_registry.register_fake_lsp(
1360 "Rust",
1361 FakeLspAdapter {
1362 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1363 disk_based_diagnostics_sources: vec!["disk".into()],
1364 ..Default::default()
1365 },
1366 );
1367
1368 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1369
1370 // Cause worktree to start the fake language server
1371 let _ = project
1372 .update(cx, |project, cx| {
1373 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1374 })
1375 .await
1376 .unwrap();
1377
1378 let mut events = cx.events(&project);
1379
1380 let fake_server = fake_servers.next().await.unwrap();
1381 assert_eq!(
1382 events.next().await.unwrap(),
1383 Event::LanguageServerAdded(
1384 LanguageServerId(0),
1385 fake_server.server.name(),
1386 Some(worktree_id)
1387 ),
1388 );
1389
1390 fake_server
1391 .start_progress(format!("{}/0", progress_token))
1392 .await;
1393 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1394 assert_eq!(
1395 events.next().await.unwrap(),
1396 Event::DiskBasedDiagnosticsStarted {
1397 language_server_id: LanguageServerId(0),
1398 }
1399 );
1400
1401 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1402 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1403 version: None,
1404 diagnostics: vec![lsp::Diagnostic {
1405 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1406 severity: Some(lsp::DiagnosticSeverity::ERROR),
1407 message: "undefined variable 'A'".to_string(),
1408 ..Default::default()
1409 }],
1410 });
1411 assert_eq!(
1412 events.next().await.unwrap(),
1413 Event::DiagnosticsUpdated {
1414 language_server_id: LanguageServerId(0),
1415 path: (worktree_id, Path::new("a.rs")).into()
1416 }
1417 );
1418
1419 fake_server.end_progress(format!("{}/0", progress_token));
1420 assert_eq!(
1421 events.next().await.unwrap(),
1422 Event::DiskBasedDiagnosticsFinished {
1423 language_server_id: LanguageServerId(0)
1424 }
1425 );
1426
1427 let buffer = project
1428 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1429 .await
1430 .unwrap();
1431
1432 buffer.update(cx, |buffer, _| {
1433 let snapshot = buffer.snapshot();
1434 let diagnostics = snapshot
1435 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1436 .collect::<Vec<_>>();
1437 assert_eq!(
1438 diagnostics,
1439 &[DiagnosticEntry {
1440 range: Point::new(0, 9)..Point::new(0, 10),
1441 diagnostic: Diagnostic {
1442 severity: lsp::DiagnosticSeverity::ERROR,
1443 message: "undefined variable 'A'".to_string(),
1444 group_id: 0,
1445 is_primary: true,
1446 ..Default::default()
1447 }
1448 }]
1449 )
1450 });
1451
1452 // Ensure publishing empty diagnostics twice only results in one update event.
1453 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1454 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1455 version: None,
1456 diagnostics: Default::default(),
1457 });
1458 assert_eq!(
1459 events.next().await.unwrap(),
1460 Event::DiagnosticsUpdated {
1461 language_server_id: LanguageServerId(0),
1462 path: (worktree_id, Path::new("a.rs")).into()
1463 }
1464 );
1465
1466 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1467 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1468 version: None,
1469 diagnostics: Default::default(),
1470 });
1471 cx.executor().run_until_parked();
1472 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1473}
1474
1475#[gpui::test]
1476async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1477 init_test(cx);
1478
1479 let progress_token = "the-progress-token";
1480
1481 let fs = FakeFs::new(cx.executor());
1482 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1483
1484 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1485
1486 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1487 language_registry.add(rust_lang());
1488 let mut fake_servers = language_registry.register_fake_lsp(
1489 "Rust",
1490 FakeLspAdapter {
1491 name: "the-language-server",
1492 disk_based_diagnostics_sources: vec!["disk".into()],
1493 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1494 ..Default::default()
1495 },
1496 );
1497
1498 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1499
1500 let (buffer, _handle) = project
1501 .update(cx, |project, cx| {
1502 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1503 })
1504 .await
1505 .unwrap();
1506 // Simulate diagnostics starting to update.
1507 let fake_server = fake_servers.next().await.unwrap();
1508 fake_server.start_progress(progress_token).await;
1509
1510 // Restart the server before the diagnostics finish updating.
1511 project.update(cx, |project, cx| {
1512 project.restart_language_servers_for_buffers(vec![buffer], cx);
1513 });
1514 let mut events = cx.events(&project);
1515
1516 // Simulate the newly started server sending more diagnostics.
1517 let fake_server = fake_servers.next().await.unwrap();
1518 assert_eq!(
1519 events.next().await.unwrap(),
1520 Event::LanguageServerAdded(
1521 LanguageServerId(1),
1522 fake_server.server.name(),
1523 Some(worktree_id)
1524 )
1525 );
1526 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1527 fake_server.start_progress(progress_token).await;
1528 assert_eq!(
1529 events.next().await.unwrap(),
1530 Event::DiskBasedDiagnosticsStarted {
1531 language_server_id: LanguageServerId(1)
1532 }
1533 );
1534 project.update(cx, |project, cx| {
1535 assert_eq!(
1536 project
1537 .language_servers_running_disk_based_diagnostics(cx)
1538 .collect::<Vec<_>>(),
1539 [LanguageServerId(1)]
1540 );
1541 });
1542
1543 // All diagnostics are considered done, despite the old server's diagnostic
1544 // task never completing.
1545 fake_server.end_progress(progress_token);
1546 assert_eq!(
1547 events.next().await.unwrap(),
1548 Event::DiskBasedDiagnosticsFinished {
1549 language_server_id: LanguageServerId(1)
1550 }
1551 );
1552 project.update(cx, |project, cx| {
1553 assert_eq!(
1554 project
1555 .language_servers_running_disk_based_diagnostics(cx)
1556 .collect::<Vec<_>>(),
1557 [] as [language::LanguageServerId; 0]
1558 );
1559 });
1560}
1561
1562#[gpui::test]
1563async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1564 init_test(cx);
1565
1566 let fs = FakeFs::new(cx.executor());
1567 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1568
1569 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1570
1571 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1572 language_registry.add(rust_lang());
1573 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1574
1575 let (buffer, _) = project
1576 .update(cx, |project, cx| {
1577 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1578 })
1579 .await
1580 .unwrap();
1581
1582 // Publish diagnostics
1583 let fake_server = fake_servers.next().await.unwrap();
1584 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1585 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1586 version: None,
1587 diagnostics: vec![lsp::Diagnostic {
1588 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1589 severity: Some(lsp::DiagnosticSeverity::ERROR),
1590 message: "the message".to_string(),
1591 ..Default::default()
1592 }],
1593 });
1594
1595 cx.executor().run_until_parked();
1596 buffer.update(cx, |buffer, _| {
1597 assert_eq!(
1598 buffer
1599 .snapshot()
1600 .diagnostics_in_range::<_, usize>(0..1, false)
1601 .map(|entry| entry.diagnostic.message.clone())
1602 .collect::<Vec<_>>(),
1603 ["the message".to_string()]
1604 );
1605 });
1606 project.update(cx, |project, cx| {
1607 assert_eq!(
1608 project.diagnostic_summary(false, cx),
1609 DiagnosticSummary {
1610 error_count: 1,
1611 warning_count: 0,
1612 }
1613 );
1614 });
1615
1616 project.update(cx, |project, cx| {
1617 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1618 });
1619
1620 // The diagnostics are cleared.
1621 cx.executor().run_until_parked();
1622 buffer.update(cx, |buffer, _| {
1623 assert_eq!(
1624 buffer
1625 .snapshot()
1626 .diagnostics_in_range::<_, usize>(0..1, false)
1627 .map(|entry| entry.diagnostic.message.clone())
1628 .collect::<Vec<_>>(),
1629 Vec::<String>::new(),
1630 );
1631 });
1632 project.update(cx, |project, cx| {
1633 assert_eq!(
1634 project.diagnostic_summary(false, cx),
1635 DiagnosticSummary {
1636 error_count: 0,
1637 warning_count: 0,
1638 }
1639 );
1640 });
1641}
1642
1643#[gpui::test]
1644async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1645 init_test(cx);
1646
1647 let fs = FakeFs::new(cx.executor());
1648 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1649
1650 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1651 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1652
1653 language_registry.add(rust_lang());
1654 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1655
1656 let (buffer, _handle) = project
1657 .update(cx, |project, cx| {
1658 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1659 })
1660 .await
1661 .unwrap();
1662
1663 // Before restarting the server, report diagnostics with an unknown buffer version.
1664 let fake_server = fake_servers.next().await.unwrap();
1665 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1666 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1667 version: Some(10000),
1668 diagnostics: Vec::new(),
1669 });
1670 cx.executor().run_until_parked();
1671 project.update(cx, |project, cx| {
1672 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1673 });
1674
1675 let mut fake_server = fake_servers.next().await.unwrap();
1676 let notification = fake_server
1677 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1678 .await
1679 .text_document;
1680 assert_eq!(notification.version, 0);
1681}
1682
1683#[gpui::test]
1684async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1685 init_test(cx);
1686
1687 let progress_token = "the-progress-token";
1688
1689 let fs = FakeFs::new(cx.executor());
1690 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1691
1692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1693
1694 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1695 language_registry.add(rust_lang());
1696 let mut fake_servers = language_registry.register_fake_lsp(
1697 "Rust",
1698 FakeLspAdapter {
1699 name: "the-language-server",
1700 disk_based_diagnostics_sources: vec!["disk".into()],
1701 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1702 ..Default::default()
1703 },
1704 );
1705
1706 let (buffer, _handle) = project
1707 .update(cx, |project, cx| {
1708 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1709 })
1710 .await
1711 .unwrap();
1712
1713 // Simulate diagnostics starting to update.
1714 let mut fake_server = fake_servers.next().await.unwrap();
1715 fake_server
1716 .start_progress_with(
1717 "another-token",
1718 lsp::WorkDoneProgressBegin {
1719 cancellable: Some(false),
1720 ..Default::default()
1721 },
1722 )
1723 .await;
1724 fake_server
1725 .start_progress_with(
1726 progress_token,
1727 lsp::WorkDoneProgressBegin {
1728 cancellable: Some(true),
1729 ..Default::default()
1730 },
1731 )
1732 .await;
1733 cx.executor().run_until_parked();
1734
1735 project.update(cx, |project, cx| {
1736 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1737 });
1738
1739 let cancel_notification = fake_server
1740 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1741 .await;
1742 assert_eq!(
1743 cancel_notification.token,
1744 NumberOrString::String(progress_token.into())
1745 );
1746}
1747
1748#[gpui::test]
1749async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1750 init_test(cx);
1751
1752 let fs = FakeFs::new(cx.executor());
1753 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1754 .await;
1755
1756 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1758
1759 let mut fake_rust_servers = language_registry.register_fake_lsp(
1760 "Rust",
1761 FakeLspAdapter {
1762 name: "rust-lsp",
1763 ..Default::default()
1764 },
1765 );
1766 let mut fake_js_servers = language_registry.register_fake_lsp(
1767 "JavaScript",
1768 FakeLspAdapter {
1769 name: "js-lsp",
1770 ..Default::default()
1771 },
1772 );
1773 language_registry.add(rust_lang());
1774 language_registry.add(js_lang());
1775
1776 let _rs_buffer = project
1777 .update(cx, |project, cx| {
1778 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1779 })
1780 .await
1781 .unwrap();
1782 let _js_buffer = project
1783 .update(cx, |project, cx| {
1784 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1785 })
1786 .await
1787 .unwrap();
1788
1789 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1790 assert_eq!(
1791 fake_rust_server_1
1792 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1793 .await
1794 .text_document
1795 .uri
1796 .as_str(),
1797 uri!("file:///dir/a.rs")
1798 );
1799
1800 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1801 assert_eq!(
1802 fake_js_server
1803 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1804 .await
1805 .text_document
1806 .uri
1807 .as_str(),
1808 uri!("file:///dir/b.js")
1809 );
1810
1811 // Disable Rust language server, ensuring only that server gets stopped.
1812 cx.update(|cx| {
1813 SettingsStore::update_global(cx, |settings, cx| {
1814 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1815 settings.languages.insert(
1816 "Rust".into(),
1817 LanguageSettingsContent {
1818 enable_language_server: Some(false),
1819 ..Default::default()
1820 },
1821 );
1822 });
1823 })
1824 });
1825 fake_rust_server_1
1826 .receive_notification::<lsp::notification::Exit>()
1827 .await;
1828
1829 // Enable Rust and disable JavaScript language servers, ensuring that the
1830 // former gets started again and that the latter stops.
1831 cx.update(|cx| {
1832 SettingsStore::update_global(cx, |settings, cx| {
1833 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1834 settings.languages.insert(
1835 LanguageName::new("Rust"),
1836 LanguageSettingsContent {
1837 enable_language_server: Some(true),
1838 ..Default::default()
1839 },
1840 );
1841 settings.languages.insert(
1842 LanguageName::new("JavaScript"),
1843 LanguageSettingsContent {
1844 enable_language_server: Some(false),
1845 ..Default::default()
1846 },
1847 );
1848 });
1849 })
1850 });
1851 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1852 assert_eq!(
1853 fake_rust_server_2
1854 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1855 .await
1856 .text_document
1857 .uri
1858 .as_str(),
1859 uri!("file:///dir/a.rs")
1860 );
1861 fake_js_server
1862 .receive_notification::<lsp::notification::Exit>()
1863 .await;
1864}
1865
1866#[gpui::test(iterations = 3)]
1867async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1868 init_test(cx);
1869
1870 let text = "
1871 fn a() { A }
1872 fn b() { BB }
1873 fn c() { CCC }
1874 "
1875 .unindent();
1876
1877 let fs = FakeFs::new(cx.executor());
1878 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1879
1880 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1881 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1882
1883 language_registry.add(rust_lang());
1884 let mut fake_servers = language_registry.register_fake_lsp(
1885 "Rust",
1886 FakeLspAdapter {
1887 disk_based_diagnostics_sources: vec!["disk".into()],
1888 ..Default::default()
1889 },
1890 );
1891
1892 let buffer = project
1893 .update(cx, |project, cx| {
1894 project.open_local_buffer(path!("/dir/a.rs"), cx)
1895 })
1896 .await
1897 .unwrap();
1898
1899 let _handle = project.update(cx, |project, cx| {
1900 project.register_buffer_with_language_servers(&buffer, cx)
1901 });
1902
1903 let mut fake_server = fake_servers.next().await.unwrap();
1904 let open_notification = fake_server
1905 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1906 .await;
1907
1908 // Edit the buffer, moving the content down
1909 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1910 let change_notification_1 = fake_server
1911 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1912 .await;
1913 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1914
1915 // Report some diagnostics for the initial version of the buffer
1916 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1917 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1918 version: Some(open_notification.text_document.version),
1919 diagnostics: vec![
1920 lsp::Diagnostic {
1921 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1922 severity: Some(DiagnosticSeverity::ERROR),
1923 message: "undefined variable 'A'".to_string(),
1924 source: Some("disk".to_string()),
1925 ..Default::default()
1926 },
1927 lsp::Diagnostic {
1928 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1929 severity: Some(DiagnosticSeverity::ERROR),
1930 message: "undefined variable 'BB'".to_string(),
1931 source: Some("disk".to_string()),
1932 ..Default::default()
1933 },
1934 lsp::Diagnostic {
1935 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1936 severity: Some(DiagnosticSeverity::ERROR),
1937 source: Some("disk".to_string()),
1938 message: "undefined variable 'CCC'".to_string(),
1939 ..Default::default()
1940 },
1941 ],
1942 });
1943
1944 // The diagnostics have moved down since they were created.
1945 cx.executor().run_until_parked();
1946 buffer.update(cx, |buffer, _| {
1947 assert_eq!(
1948 buffer
1949 .snapshot()
1950 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1951 .collect::<Vec<_>>(),
1952 &[
1953 DiagnosticEntry {
1954 range: Point::new(3, 9)..Point::new(3, 11),
1955 diagnostic: Diagnostic {
1956 source: Some("disk".into()),
1957 severity: DiagnosticSeverity::ERROR,
1958 message: "undefined variable 'BB'".to_string(),
1959 is_disk_based: true,
1960 group_id: 1,
1961 is_primary: true,
1962 ..Default::default()
1963 },
1964 },
1965 DiagnosticEntry {
1966 range: Point::new(4, 9)..Point::new(4, 12),
1967 diagnostic: Diagnostic {
1968 source: Some("disk".into()),
1969 severity: DiagnosticSeverity::ERROR,
1970 message: "undefined variable 'CCC'".to_string(),
1971 is_disk_based: true,
1972 group_id: 2,
1973 is_primary: true,
1974 ..Default::default()
1975 }
1976 }
1977 ]
1978 );
1979 assert_eq!(
1980 chunks_with_diagnostics(buffer, 0..buffer.len()),
1981 [
1982 ("\n\nfn a() { ".to_string(), None),
1983 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1984 (" }\nfn b() { ".to_string(), None),
1985 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1986 (" }\nfn c() { ".to_string(), None),
1987 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1988 (" }\n".to_string(), None),
1989 ]
1990 );
1991 assert_eq!(
1992 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1993 [
1994 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 (" }\nfn c() { ".to_string(), None),
1996 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 ]
1998 );
1999 });
2000
2001 // Ensure overlapping diagnostics are highlighted correctly.
2002 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2003 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2004 version: Some(open_notification.text_document.version),
2005 diagnostics: vec![
2006 lsp::Diagnostic {
2007 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2008 severity: Some(DiagnosticSeverity::ERROR),
2009 message: "undefined variable 'A'".to_string(),
2010 source: Some("disk".to_string()),
2011 ..Default::default()
2012 },
2013 lsp::Diagnostic {
2014 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2015 severity: Some(DiagnosticSeverity::WARNING),
2016 message: "unreachable statement".to_string(),
2017 source: Some("disk".to_string()),
2018 ..Default::default()
2019 },
2020 ],
2021 });
2022
2023 cx.executor().run_until_parked();
2024 buffer.update(cx, |buffer, _| {
2025 assert_eq!(
2026 buffer
2027 .snapshot()
2028 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2029 .collect::<Vec<_>>(),
2030 &[
2031 DiagnosticEntry {
2032 range: Point::new(2, 9)..Point::new(2, 12),
2033 diagnostic: Diagnostic {
2034 source: Some("disk".into()),
2035 severity: DiagnosticSeverity::WARNING,
2036 message: "unreachable statement".to_string(),
2037 is_disk_based: true,
2038 group_id: 4,
2039 is_primary: true,
2040 ..Default::default()
2041 }
2042 },
2043 DiagnosticEntry {
2044 range: Point::new(2, 9)..Point::new(2, 10),
2045 diagnostic: Diagnostic {
2046 source: Some("disk".into()),
2047 severity: DiagnosticSeverity::ERROR,
2048 message: "undefined variable 'A'".to_string(),
2049 is_disk_based: true,
2050 group_id: 3,
2051 is_primary: true,
2052 ..Default::default()
2053 },
2054 }
2055 ]
2056 );
2057 assert_eq!(
2058 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2059 [
2060 ("fn a() { ".to_string(), None),
2061 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2062 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2063 ("\n".to_string(), None),
2064 ]
2065 );
2066 assert_eq!(
2067 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2068 [
2069 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2070 ("\n".to_string(), None),
2071 ]
2072 );
2073 });
2074
2075 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2076 // changes since the last save.
2077 buffer.update(cx, |buffer, cx| {
2078 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2079 buffer.edit(
2080 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2081 None,
2082 cx,
2083 );
2084 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2085 });
2086 let change_notification_2 = fake_server
2087 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2088 .await;
2089 assert!(
2090 change_notification_2.text_document.version > change_notification_1.text_document.version
2091 );
2092
2093 // Handle out-of-order diagnostics
2094 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2095 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2096 version: Some(change_notification_2.text_document.version),
2097 diagnostics: vec![
2098 lsp::Diagnostic {
2099 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2100 severity: Some(DiagnosticSeverity::ERROR),
2101 message: "undefined variable 'BB'".to_string(),
2102 source: Some("disk".to_string()),
2103 ..Default::default()
2104 },
2105 lsp::Diagnostic {
2106 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2107 severity: Some(DiagnosticSeverity::WARNING),
2108 message: "undefined variable 'A'".to_string(),
2109 source: Some("disk".to_string()),
2110 ..Default::default()
2111 },
2112 ],
2113 });
2114
2115 cx.executor().run_until_parked();
2116 buffer.update(cx, |buffer, _| {
2117 assert_eq!(
2118 buffer
2119 .snapshot()
2120 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2121 .collect::<Vec<_>>(),
2122 &[
2123 DiagnosticEntry {
2124 range: Point::new(2, 21)..Point::new(2, 22),
2125 diagnostic: Diagnostic {
2126 source: Some("disk".into()),
2127 severity: DiagnosticSeverity::WARNING,
2128 message: "undefined variable 'A'".to_string(),
2129 is_disk_based: true,
2130 group_id: 6,
2131 is_primary: true,
2132 ..Default::default()
2133 }
2134 },
2135 DiagnosticEntry {
2136 range: Point::new(3, 9)..Point::new(3, 14),
2137 diagnostic: Diagnostic {
2138 source: Some("disk".into()),
2139 severity: DiagnosticSeverity::ERROR,
2140 message: "undefined variable 'BB'".to_string(),
2141 is_disk_based: true,
2142 group_id: 5,
2143 is_primary: true,
2144 ..Default::default()
2145 },
2146 }
2147 ]
2148 );
2149 });
2150}
2151
2152#[gpui::test]
2153async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2154 init_test(cx);
2155
2156 let text = concat!(
2157 "let one = ;\n", //
2158 "let two = \n",
2159 "let three = 3;\n",
2160 );
2161
2162 let fs = FakeFs::new(cx.executor());
2163 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2164
2165 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2166 let buffer = project
2167 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2168 .await
2169 .unwrap();
2170
2171 project.update(cx, |project, cx| {
2172 project.lsp_store.update(cx, |lsp_store, cx| {
2173 lsp_store
2174 .update_diagnostic_entries(
2175 LanguageServerId(0),
2176 PathBuf::from("/dir/a.rs"),
2177 None,
2178 vec![
2179 DiagnosticEntry {
2180 range: Unclipped(PointUtf16::new(0, 10))
2181 ..Unclipped(PointUtf16::new(0, 10)),
2182 diagnostic: Diagnostic {
2183 severity: DiagnosticSeverity::ERROR,
2184 message: "syntax error 1".to_string(),
2185 ..Default::default()
2186 },
2187 },
2188 DiagnosticEntry {
2189 range: Unclipped(PointUtf16::new(1, 10))
2190 ..Unclipped(PointUtf16::new(1, 10)),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "syntax error 2".to_string(),
2194 ..Default::default()
2195 },
2196 },
2197 ],
2198 cx,
2199 )
2200 .unwrap();
2201 })
2202 });
2203
2204 // An empty range is extended forward to include the following character.
2205 // At the end of a line, an empty range is extended backward to include
2206 // the preceding character.
2207 buffer.update(cx, |buffer, _| {
2208 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2209 assert_eq!(
2210 chunks
2211 .iter()
2212 .map(|(s, d)| (s.as_str(), *d))
2213 .collect::<Vec<_>>(),
2214 &[
2215 ("let one = ", None),
2216 (";", Some(DiagnosticSeverity::ERROR)),
2217 ("\nlet two =", None),
2218 (" ", Some(DiagnosticSeverity::ERROR)),
2219 ("\nlet three = 3;\n", None)
2220 ]
2221 );
2222 });
2223}
2224
2225#[gpui::test]
2226async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2227 init_test(cx);
2228
2229 let fs = FakeFs::new(cx.executor());
2230 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2231 .await;
2232
2233 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2234 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2235
2236 lsp_store.update(cx, |lsp_store, cx| {
2237 lsp_store
2238 .update_diagnostic_entries(
2239 LanguageServerId(0),
2240 Path::new("/dir/a.rs").to_owned(),
2241 None,
2242 vec![DiagnosticEntry {
2243 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2244 diagnostic: Diagnostic {
2245 severity: DiagnosticSeverity::ERROR,
2246 is_primary: true,
2247 message: "syntax error a1".to_string(),
2248 ..Default::default()
2249 },
2250 }],
2251 cx,
2252 )
2253 .unwrap();
2254 lsp_store
2255 .update_diagnostic_entries(
2256 LanguageServerId(1),
2257 Path::new("/dir/a.rs").to_owned(),
2258 None,
2259 vec![DiagnosticEntry {
2260 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2261 diagnostic: Diagnostic {
2262 severity: DiagnosticSeverity::ERROR,
2263 is_primary: true,
2264 message: "syntax error b1".to_string(),
2265 ..Default::default()
2266 },
2267 }],
2268 cx,
2269 )
2270 .unwrap();
2271
2272 assert_eq!(
2273 lsp_store.diagnostic_summary(false, cx),
2274 DiagnosticSummary {
2275 error_count: 2,
2276 warning_count: 0,
2277 }
2278 );
2279 });
2280}
2281
2282#[gpui::test]
2283async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2284 init_test(cx);
2285
2286 let text = "
2287 fn a() {
2288 f1();
2289 }
2290 fn b() {
2291 f2();
2292 }
2293 fn c() {
2294 f3();
2295 }
2296 "
2297 .unindent();
2298
2299 let fs = FakeFs::new(cx.executor());
2300 fs.insert_tree(
2301 path!("/dir"),
2302 json!({
2303 "a.rs": text.clone(),
2304 }),
2305 )
2306 .await;
2307
2308 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2309 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2310
2311 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2312 language_registry.add(rust_lang());
2313 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2314
2315 let (buffer, _handle) = project
2316 .update(cx, |project, cx| {
2317 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2318 })
2319 .await
2320 .unwrap();
2321
2322 let mut fake_server = fake_servers.next().await.unwrap();
2323 let lsp_document_version = fake_server
2324 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2325 .await
2326 .text_document
2327 .version;
2328
2329 // Simulate editing the buffer after the language server computes some edits.
2330 buffer.update(cx, |buffer, cx| {
2331 buffer.edit(
2332 [(
2333 Point::new(0, 0)..Point::new(0, 0),
2334 "// above first function\n",
2335 )],
2336 None,
2337 cx,
2338 );
2339 buffer.edit(
2340 [(
2341 Point::new(2, 0)..Point::new(2, 0),
2342 " // inside first function\n",
2343 )],
2344 None,
2345 cx,
2346 );
2347 buffer.edit(
2348 [(
2349 Point::new(6, 4)..Point::new(6, 4),
2350 "// inside second function ",
2351 )],
2352 None,
2353 cx,
2354 );
2355
2356 assert_eq!(
2357 buffer.text(),
2358 "
2359 // above first function
2360 fn a() {
2361 // inside first function
2362 f1();
2363 }
2364 fn b() {
2365 // inside second function f2();
2366 }
2367 fn c() {
2368 f3();
2369 }
2370 "
2371 .unindent()
2372 );
2373 });
2374
2375 let edits = lsp_store
2376 .update(cx, |lsp_store, cx| {
2377 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2378 &buffer,
2379 vec![
2380 // replace body of first function
2381 lsp::TextEdit {
2382 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2383 new_text: "
2384 fn a() {
2385 f10();
2386 }
2387 "
2388 .unindent(),
2389 },
2390 // edit inside second function
2391 lsp::TextEdit {
2392 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2393 new_text: "00".into(),
2394 },
2395 // edit inside third function via two distinct edits
2396 lsp::TextEdit {
2397 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2398 new_text: "4000".into(),
2399 },
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2402 new_text: "".into(),
2403 },
2404 ],
2405 LanguageServerId(0),
2406 Some(lsp_document_version),
2407 cx,
2408 )
2409 })
2410 .await
2411 .unwrap();
2412
2413 buffer.update(cx, |buffer, cx| {
2414 for (range, new_text) in edits {
2415 buffer.edit([(range, new_text)], None, cx);
2416 }
2417 assert_eq!(
2418 buffer.text(),
2419 "
2420 // above first function
2421 fn a() {
2422 // inside first function
2423 f10();
2424 }
2425 fn b() {
2426 // inside second function f200();
2427 }
2428 fn c() {
2429 f4000();
2430 }
2431 "
2432 .unindent()
2433 );
2434 });
2435}
2436
2437#[gpui::test]
2438async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2439 init_test(cx);
2440
2441 let text = "
2442 use a::b;
2443 use a::c;
2444
2445 fn f() {
2446 b();
2447 c();
2448 }
2449 "
2450 .unindent();
2451
2452 let fs = FakeFs::new(cx.executor());
2453 fs.insert_tree(
2454 path!("/dir"),
2455 json!({
2456 "a.rs": text.clone(),
2457 }),
2458 )
2459 .await;
2460
2461 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2462 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2463 let buffer = project
2464 .update(cx, |project, cx| {
2465 project.open_local_buffer(path!("/dir/a.rs"), cx)
2466 })
2467 .await
2468 .unwrap();
2469
2470 // Simulate the language server sending us a small edit in the form of a very large diff.
2471 // Rust-analyzer does this when performing a merge-imports code action.
2472 let edits = lsp_store
2473 .update(cx, |lsp_store, cx| {
2474 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2475 &buffer,
2476 [
2477 // Replace the first use statement without editing the semicolon.
2478 lsp::TextEdit {
2479 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2480 new_text: "a::{b, c}".into(),
2481 },
2482 // Reinsert the remainder of the file between the semicolon and the final
2483 // newline of the file.
2484 lsp::TextEdit {
2485 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2486 new_text: "\n\n".into(),
2487 },
2488 lsp::TextEdit {
2489 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2490 new_text: "
2491 fn f() {
2492 b();
2493 c();
2494 }"
2495 .unindent(),
2496 },
2497 // Delete everything after the first newline of the file.
2498 lsp::TextEdit {
2499 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2500 new_text: "".into(),
2501 },
2502 ],
2503 LanguageServerId(0),
2504 None,
2505 cx,
2506 )
2507 })
2508 .await
2509 .unwrap();
2510
2511 buffer.update(cx, |buffer, cx| {
2512 let edits = edits
2513 .into_iter()
2514 .map(|(range, text)| {
2515 (
2516 range.start.to_point(buffer)..range.end.to_point(buffer),
2517 text,
2518 )
2519 })
2520 .collect::<Vec<_>>();
2521
2522 assert_eq!(
2523 edits,
2524 [
2525 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2526 (Point::new(1, 0)..Point::new(2, 0), "".into())
2527 ]
2528 );
2529
2530 for (range, new_text) in edits {
2531 buffer.edit([(range, new_text)], None, cx);
2532 }
2533 assert_eq!(
2534 buffer.text(),
2535 "
2536 use a::{b, c};
2537
2538 fn f() {
2539 b();
2540 c();
2541 }
2542 "
2543 .unindent()
2544 );
2545 });
2546}
2547
2548#[gpui::test]
2549async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2550 init_test(cx);
2551
2552 let text = "
2553 use a::b;
2554 use a::c;
2555
2556 fn f() {
2557 b();
2558 c();
2559 }
2560 "
2561 .unindent();
2562
2563 let fs = FakeFs::new(cx.executor());
2564 fs.insert_tree(
2565 path!("/dir"),
2566 json!({
2567 "a.rs": text.clone(),
2568 }),
2569 )
2570 .await;
2571
2572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2573 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2574 let buffer = project
2575 .update(cx, |project, cx| {
2576 project.open_local_buffer(path!("/dir/a.rs"), cx)
2577 })
2578 .await
2579 .unwrap();
2580
2581 // Simulate the language server sending us edits in a non-ordered fashion,
2582 // with ranges sometimes being inverted or pointing to invalid locations.
2583 let edits = lsp_store
2584 .update(cx, |lsp_store, cx| {
2585 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2586 &buffer,
2587 [
2588 lsp::TextEdit {
2589 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2590 new_text: "\n\n".into(),
2591 },
2592 lsp::TextEdit {
2593 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2594 new_text: "a::{b, c}".into(),
2595 },
2596 lsp::TextEdit {
2597 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2598 new_text: "".into(),
2599 },
2600 lsp::TextEdit {
2601 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2602 new_text: "
2603 fn f() {
2604 b();
2605 c();
2606 }"
2607 .unindent(),
2608 },
2609 ],
2610 LanguageServerId(0),
2611 None,
2612 cx,
2613 )
2614 })
2615 .await
2616 .unwrap();
2617
2618 buffer.update(cx, |buffer, cx| {
2619 let edits = edits
2620 .into_iter()
2621 .map(|(range, text)| {
2622 (
2623 range.start.to_point(buffer)..range.end.to_point(buffer),
2624 text,
2625 )
2626 })
2627 .collect::<Vec<_>>();
2628
2629 assert_eq!(
2630 edits,
2631 [
2632 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2633 (Point::new(1, 0)..Point::new(2, 0), "".into())
2634 ]
2635 );
2636
2637 for (range, new_text) in edits {
2638 buffer.edit([(range, new_text)], None, cx);
2639 }
2640 assert_eq!(
2641 buffer.text(),
2642 "
2643 use a::{b, c};
2644
2645 fn f() {
2646 b();
2647 c();
2648 }
2649 "
2650 .unindent()
2651 );
2652 });
2653}
2654
2655fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2656 buffer: &Buffer,
2657 range: Range<T>,
2658) -> Vec<(String, Option<DiagnosticSeverity>)> {
2659 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2660 for chunk in buffer.snapshot().chunks(range, true) {
2661 if chunks.last().map_or(false, |prev_chunk| {
2662 prev_chunk.1 == chunk.diagnostic_severity
2663 }) {
2664 chunks.last_mut().unwrap().0.push_str(chunk.text);
2665 } else {
2666 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2667 }
2668 }
2669 chunks
2670}
2671
2672#[gpui::test(iterations = 10)]
2673async fn test_definition(cx: &mut gpui::TestAppContext) {
2674 init_test(cx);
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree(
2678 path!("/dir"),
2679 json!({
2680 "a.rs": "const fn a() { A }",
2681 "b.rs": "const y: i32 = crate::a()",
2682 }),
2683 )
2684 .await;
2685
2686 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2687
2688 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2689 language_registry.add(rust_lang());
2690 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2691
2692 let (buffer, _handle) = project
2693 .update(cx, |project, cx| {
2694 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2695 })
2696 .await
2697 .unwrap();
2698
2699 let fake_server = fake_servers.next().await.unwrap();
2700 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2701 let params = params.text_document_position_params;
2702 assert_eq!(
2703 params.text_document.uri.to_file_path().unwrap(),
2704 Path::new(path!("/dir/b.rs")),
2705 );
2706 assert_eq!(params.position, lsp::Position::new(0, 22));
2707
2708 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2709 lsp::Location::new(
2710 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2711 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2712 ),
2713 )))
2714 });
2715 let mut definitions = project
2716 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2717 .await
2718 .unwrap();
2719
2720 // Assert no new language server started
2721 cx.executor().run_until_parked();
2722 assert!(fake_servers.try_next().is_err());
2723
2724 assert_eq!(definitions.len(), 1);
2725 let definition = definitions.pop().unwrap();
2726 cx.update(|cx| {
2727 let target_buffer = definition.target.buffer.read(cx);
2728 assert_eq!(
2729 target_buffer
2730 .file()
2731 .unwrap()
2732 .as_local()
2733 .unwrap()
2734 .abs_path(cx),
2735 Path::new(path!("/dir/a.rs")),
2736 );
2737 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2738 assert_eq!(
2739 list_worktrees(&project, cx),
2740 [
2741 (path!("/dir/a.rs").as_ref(), false),
2742 (path!("/dir/b.rs").as_ref(), true)
2743 ],
2744 );
2745
2746 drop(definition);
2747 });
2748 cx.update(|cx| {
2749 assert_eq!(
2750 list_worktrees(&project, cx),
2751 [(path!("/dir/b.rs").as_ref(), true)]
2752 );
2753 });
2754
2755 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2756 project
2757 .read(cx)
2758 .worktrees(cx)
2759 .map(|worktree| {
2760 let worktree = worktree.read(cx);
2761 (
2762 worktree.as_local().unwrap().abs_path().as_ref(),
2763 worktree.is_visible(),
2764 )
2765 })
2766 .collect::<Vec<_>>()
2767 }
2768}
2769
2770#[gpui::test]
2771async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2772 init_test(cx);
2773
2774 let fs = FakeFs::new(cx.executor());
2775 fs.insert_tree(
2776 path!("/dir"),
2777 json!({
2778 "a.ts": "",
2779 }),
2780 )
2781 .await;
2782
2783 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2784
2785 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2786 language_registry.add(typescript_lang());
2787 let mut fake_language_servers = language_registry.register_fake_lsp(
2788 "TypeScript",
2789 FakeLspAdapter {
2790 capabilities: lsp::ServerCapabilities {
2791 completion_provider: Some(lsp::CompletionOptions {
2792 trigger_characters: Some(vec![":".to_string()]),
2793 ..Default::default()
2794 }),
2795 ..Default::default()
2796 },
2797 ..Default::default()
2798 },
2799 );
2800
2801 let (buffer, _handle) = project
2802 .update(cx, |p, cx| {
2803 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2804 })
2805 .await
2806 .unwrap();
2807
2808 let fake_server = fake_language_servers.next().await.unwrap();
2809
2810 let text = "let a = b.fqn";
2811 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2812 let completions = project.update(cx, |project, cx| {
2813 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2814 });
2815
2816 fake_server
2817 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2818 Ok(Some(lsp::CompletionResponse::Array(vec![
2819 lsp::CompletionItem {
2820 label: "fullyQualifiedName?".into(),
2821 insert_text: Some("fullyQualifiedName".into()),
2822 ..Default::default()
2823 },
2824 ])))
2825 })
2826 .next()
2827 .await;
2828 let completions = completions.await.unwrap();
2829 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2830 assert_eq!(completions.len(), 1);
2831 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2832 assert_eq!(
2833 completions[0].old_range.to_offset(&snapshot),
2834 text.len() - 3..text.len()
2835 );
2836
2837 let text = "let a = \"atoms/cmp\"";
2838 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2839 let completions = project.update(cx, |project, cx| {
2840 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2841 });
2842
2843 fake_server
2844 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2845 Ok(Some(lsp::CompletionResponse::Array(vec![
2846 lsp::CompletionItem {
2847 label: "component".into(),
2848 ..Default::default()
2849 },
2850 ])))
2851 })
2852 .next()
2853 .await;
2854 let completions = completions.await.unwrap();
2855 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2856 assert_eq!(completions.len(), 1);
2857 assert_eq!(completions[0].new_text, "component");
2858 assert_eq!(
2859 completions[0].old_range.to_offset(&snapshot),
2860 text.len() - 4..text.len() - 1
2861 );
2862}
2863
2864#[gpui::test]
2865async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2866 init_test(cx);
2867
2868 let fs = FakeFs::new(cx.executor());
2869 fs.insert_tree(
2870 path!("/dir"),
2871 json!({
2872 "a.ts": "",
2873 }),
2874 )
2875 .await;
2876
2877 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2878
2879 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2880 language_registry.add(typescript_lang());
2881 let mut fake_language_servers = language_registry.register_fake_lsp(
2882 "TypeScript",
2883 FakeLspAdapter {
2884 capabilities: lsp::ServerCapabilities {
2885 completion_provider: Some(lsp::CompletionOptions {
2886 trigger_characters: Some(vec![":".to_string()]),
2887 ..Default::default()
2888 }),
2889 ..Default::default()
2890 },
2891 ..Default::default()
2892 },
2893 );
2894
2895 let (buffer, _handle) = project
2896 .update(cx, |p, cx| {
2897 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2898 })
2899 .await
2900 .unwrap();
2901
2902 let fake_server = fake_language_servers.next().await.unwrap();
2903
2904 let text = "let a = b.fqn";
2905 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2906 let completions = project.update(cx, |project, cx| {
2907 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2908 });
2909
2910 fake_server
2911 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2912 Ok(Some(lsp::CompletionResponse::Array(vec![
2913 lsp::CompletionItem {
2914 label: "fullyQualifiedName?".into(),
2915 insert_text: Some("fully\rQualified\r\nName".into()),
2916 ..Default::default()
2917 },
2918 ])))
2919 })
2920 .next()
2921 .await;
2922 let completions = completions.await.unwrap();
2923 assert_eq!(completions.len(), 1);
2924 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2925}
2926
2927#[gpui::test(iterations = 10)]
2928async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2929 init_test(cx);
2930
2931 let fs = FakeFs::new(cx.executor());
2932 fs.insert_tree(
2933 path!("/dir"),
2934 json!({
2935 "a.ts": "a",
2936 }),
2937 )
2938 .await;
2939
2940 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2941
2942 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2943 language_registry.add(typescript_lang());
2944 let mut fake_language_servers = language_registry.register_fake_lsp(
2945 "TypeScript",
2946 FakeLspAdapter {
2947 capabilities: lsp::ServerCapabilities {
2948 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2949 lsp::CodeActionOptions {
2950 resolve_provider: Some(true),
2951 ..lsp::CodeActionOptions::default()
2952 },
2953 )),
2954 ..lsp::ServerCapabilities::default()
2955 },
2956 ..FakeLspAdapter::default()
2957 },
2958 );
2959
2960 let (buffer, _handle) = project
2961 .update(cx, |p, cx| {
2962 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2963 })
2964 .await
2965 .unwrap();
2966
2967 let fake_server = fake_language_servers.next().await.unwrap();
2968
2969 // Language server returns code actions that contain commands, and not edits.
2970 let actions = project.update(cx, |project, cx| {
2971 project.code_actions(&buffer, 0..0, None, cx)
2972 });
2973 fake_server
2974 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2975 Ok(Some(vec![
2976 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2977 title: "The code action".into(),
2978 data: Some(serde_json::json!({
2979 "command": "_the/command",
2980 })),
2981 ..lsp::CodeAction::default()
2982 }),
2983 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2984 title: "two".into(),
2985 ..lsp::CodeAction::default()
2986 }),
2987 ]))
2988 })
2989 .next()
2990 .await;
2991
2992 let action = actions.await.unwrap()[0].clone();
2993 let apply = project.update(cx, |project, cx| {
2994 project.apply_code_action(buffer.clone(), action, true, cx)
2995 });
2996
2997 // Resolving the code action does not populate its edits. In absence of
2998 // edits, we must execute the given command.
2999 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
3000 |mut action, _| async move {
3001 if action.data.is_some() {
3002 action.command = Some(lsp::Command {
3003 title: "The command".into(),
3004 command: "_the/command".into(),
3005 arguments: Some(vec![json!("the-argument")]),
3006 });
3007 }
3008 Ok(action)
3009 },
3010 );
3011
3012 // While executing the command, the language server sends the editor
3013 // a `workspaceEdit` request.
3014 fake_server
3015 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3016 let fake = fake_server.clone();
3017 move |params, _| {
3018 assert_eq!(params.command, "_the/command");
3019 let fake = fake.clone();
3020 async move {
3021 fake.server
3022 .request::<lsp::request::ApplyWorkspaceEdit>(
3023 lsp::ApplyWorkspaceEditParams {
3024 label: None,
3025 edit: lsp::WorkspaceEdit {
3026 changes: Some(
3027 [(
3028 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3029 vec![lsp::TextEdit {
3030 range: lsp::Range::new(
3031 lsp::Position::new(0, 0),
3032 lsp::Position::new(0, 0),
3033 ),
3034 new_text: "X".into(),
3035 }],
3036 )]
3037 .into_iter()
3038 .collect(),
3039 ),
3040 ..Default::default()
3041 },
3042 },
3043 )
3044 .await
3045 .unwrap();
3046 Ok(Some(json!(null)))
3047 }
3048 }
3049 })
3050 .next()
3051 .await;
3052
3053 // Applying the code action returns a project transaction containing the edits
3054 // sent by the language server in its `workspaceEdit` request.
3055 let transaction = apply.await.unwrap();
3056 assert!(transaction.0.contains_key(&buffer));
3057 buffer.update(cx, |buffer, cx| {
3058 assert_eq!(buffer.text(), "Xa");
3059 buffer.undo(cx);
3060 assert_eq!(buffer.text(), "a");
3061 });
3062}
3063
3064#[gpui::test(iterations = 10)]
3065async fn test_save_file(cx: &mut gpui::TestAppContext) {
3066 init_test(cx);
3067
3068 let fs = FakeFs::new(cx.executor());
3069 fs.insert_tree(
3070 path!("/dir"),
3071 json!({
3072 "file1": "the old contents",
3073 }),
3074 )
3075 .await;
3076
3077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3078 let buffer = project
3079 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3080 .await
3081 .unwrap();
3082 buffer.update(cx, |buffer, cx| {
3083 assert_eq!(buffer.text(), "the old contents");
3084 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3085 });
3086
3087 project
3088 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3089 .await
3090 .unwrap();
3091
3092 let new_text = fs
3093 .load(Path::new(path!("/dir/file1")))
3094 .await
3095 .unwrap()
3096 .replace("\r\n", "\n");
3097 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3098}
3099
3100#[gpui::test(iterations = 30)]
3101async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3102 init_test(cx);
3103
3104 let fs = FakeFs::new(cx.executor().clone());
3105 fs.insert_tree(
3106 path!("/dir"),
3107 json!({
3108 "file1": "the original contents",
3109 }),
3110 )
3111 .await;
3112
3113 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3114 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3115 let buffer = project
3116 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3117 .await
3118 .unwrap();
3119
3120 // Simulate buffer diffs being slow, so that they don't complete before
3121 // the next file change occurs.
3122 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3123
3124 // Change the buffer's file on disk, and then wait for the file change
3125 // to be detected by the worktree, so that the buffer starts reloading.
3126 fs.save(
3127 path!("/dir/file1").as_ref(),
3128 &"the first contents".into(),
3129 Default::default(),
3130 )
3131 .await
3132 .unwrap();
3133 worktree.next_event(cx).await;
3134
3135 // Change the buffer's file again. Depending on the random seed, the
3136 // previous file change may still be in progress.
3137 fs.save(
3138 path!("/dir/file1").as_ref(),
3139 &"the second contents".into(),
3140 Default::default(),
3141 )
3142 .await
3143 .unwrap();
3144 worktree.next_event(cx).await;
3145
3146 cx.executor().run_until_parked();
3147 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3148 buffer.read_with(cx, |buffer, _| {
3149 assert_eq!(buffer.text(), on_disk_text);
3150 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3151 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3152 });
3153}
3154
3155#[gpui::test(iterations = 30)]
3156async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3157 init_test(cx);
3158
3159 let fs = FakeFs::new(cx.executor().clone());
3160 fs.insert_tree(
3161 path!("/dir"),
3162 json!({
3163 "file1": "the original contents",
3164 }),
3165 )
3166 .await;
3167
3168 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3169 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3170 let buffer = project
3171 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3172 .await
3173 .unwrap();
3174
3175 // Simulate buffer diffs being slow, so that they don't complete before
3176 // the next file change occurs.
3177 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3178
3179 // Change the buffer's file on disk, and then wait for the file change
3180 // to be detected by the worktree, so that the buffer starts reloading.
3181 fs.save(
3182 path!("/dir/file1").as_ref(),
3183 &"the first contents".into(),
3184 Default::default(),
3185 )
3186 .await
3187 .unwrap();
3188 worktree.next_event(cx).await;
3189
3190 cx.executor()
3191 .spawn(cx.executor().simulate_random_delay())
3192 .await;
3193
3194 // Perform a noop edit, causing the buffer's version to increase.
3195 buffer.update(cx, |buffer, cx| {
3196 buffer.edit([(0..0, " ")], None, cx);
3197 buffer.undo(cx);
3198 });
3199
3200 cx.executor().run_until_parked();
3201 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3202 buffer.read_with(cx, |buffer, _| {
3203 let buffer_text = buffer.text();
3204 if buffer_text == on_disk_text {
3205 assert!(
3206 !buffer.is_dirty() && !buffer.has_conflict(),
3207 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3208 );
3209 }
3210 // If the file change occurred while the buffer was processing the first
3211 // change, the buffer will be in a conflicting state.
3212 else {
3213 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3214 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3215 }
3216 });
3217}
3218
3219#[gpui::test]
3220async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3221 init_test(cx);
3222
3223 let fs = FakeFs::new(cx.executor());
3224 fs.insert_tree(
3225 path!("/dir"),
3226 json!({
3227 "file1": "the old contents",
3228 }),
3229 )
3230 .await;
3231
3232 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3233 let buffer = project
3234 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3235 .await
3236 .unwrap();
3237 buffer.update(cx, |buffer, cx| {
3238 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3239 });
3240
3241 project
3242 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3243 .await
3244 .unwrap();
3245
3246 let new_text = fs
3247 .load(Path::new(path!("/dir/file1")))
3248 .await
3249 .unwrap()
3250 .replace("\r\n", "\n");
3251 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3252}
3253
3254#[gpui::test]
3255async fn test_save_as(cx: &mut gpui::TestAppContext) {
3256 init_test(cx);
3257
3258 let fs = FakeFs::new(cx.executor());
3259 fs.insert_tree("/dir", json!({})).await;
3260
3261 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3262
3263 let languages = project.update(cx, |project, _| project.languages().clone());
3264 languages.add(rust_lang());
3265
3266 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3267 buffer.update(cx, |buffer, cx| {
3268 buffer.edit([(0..0, "abc")], None, cx);
3269 assert!(buffer.is_dirty());
3270 assert!(!buffer.has_conflict());
3271 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3272 });
3273 project
3274 .update(cx, |project, cx| {
3275 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3276 let path = ProjectPath {
3277 worktree_id,
3278 path: Arc::from(Path::new("file1.rs")),
3279 };
3280 project.save_buffer_as(buffer.clone(), path, cx)
3281 })
3282 .await
3283 .unwrap();
3284 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3285
3286 cx.executor().run_until_parked();
3287 buffer.update(cx, |buffer, cx| {
3288 assert_eq!(
3289 buffer.file().unwrap().full_path(cx),
3290 Path::new("dir/file1.rs")
3291 );
3292 assert!(!buffer.is_dirty());
3293 assert!(!buffer.has_conflict());
3294 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3295 });
3296
3297 let opened_buffer = project
3298 .update(cx, |project, cx| {
3299 project.open_local_buffer("/dir/file1.rs", cx)
3300 })
3301 .await
3302 .unwrap();
3303 assert_eq!(opened_buffer, buffer);
3304}
3305
3306#[gpui::test(retries = 5)]
3307async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3308 use worktree::WorktreeModelHandle as _;
3309
3310 init_test(cx);
3311 cx.executor().allow_parking();
3312
3313 let dir = TempTree::new(json!({
3314 "a": {
3315 "file1": "",
3316 "file2": "",
3317 "file3": "",
3318 },
3319 "b": {
3320 "c": {
3321 "file4": "",
3322 "file5": "",
3323 }
3324 }
3325 }));
3326
3327 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3328
3329 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3330 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3331 async move { buffer.await.unwrap() }
3332 };
3333 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3334 project.update(cx, |project, cx| {
3335 let tree = project.worktrees(cx).next().unwrap();
3336 tree.read(cx)
3337 .entry_for_path(path)
3338 .unwrap_or_else(|| panic!("no entry for path {}", path))
3339 .id
3340 })
3341 };
3342
3343 let buffer2 = buffer_for_path("a/file2", cx).await;
3344 let buffer3 = buffer_for_path("a/file3", cx).await;
3345 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3346 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3347
3348 let file2_id = id_for_path("a/file2", cx);
3349 let file3_id = id_for_path("a/file3", cx);
3350 let file4_id = id_for_path("b/c/file4", cx);
3351
3352 // Create a remote copy of this worktree.
3353 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3354 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3355
3356 let updates = Arc::new(Mutex::new(Vec::new()));
3357 tree.update(cx, |tree, cx| {
3358 let updates = updates.clone();
3359 tree.observe_updates(0, cx, move |update| {
3360 updates.lock().push(update);
3361 async { true }
3362 });
3363 });
3364
3365 let remote =
3366 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3367
3368 cx.executor().run_until_parked();
3369
3370 cx.update(|cx| {
3371 assert!(!buffer2.read(cx).is_dirty());
3372 assert!(!buffer3.read(cx).is_dirty());
3373 assert!(!buffer4.read(cx).is_dirty());
3374 assert!(!buffer5.read(cx).is_dirty());
3375 });
3376
3377 // Rename and delete files and directories.
3378 tree.flush_fs_events(cx).await;
3379 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3380 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3381 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3382 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3383 tree.flush_fs_events(cx).await;
3384
3385 cx.update(|app| {
3386 assert_eq!(
3387 tree.read(app)
3388 .paths()
3389 .map(|p| p.to_str().unwrap())
3390 .collect::<Vec<_>>(),
3391 vec![
3392 "a",
3393 separator!("a/file1"),
3394 separator!("a/file2.new"),
3395 "b",
3396 "d",
3397 separator!("d/file3"),
3398 separator!("d/file4"),
3399 ]
3400 );
3401 });
3402
3403 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3404 assert_eq!(id_for_path("d/file3", cx), file3_id);
3405 assert_eq!(id_for_path("d/file4", cx), file4_id);
3406
3407 cx.update(|cx| {
3408 assert_eq!(
3409 buffer2.read(cx).file().unwrap().path().as_ref(),
3410 Path::new("a/file2.new")
3411 );
3412 assert_eq!(
3413 buffer3.read(cx).file().unwrap().path().as_ref(),
3414 Path::new("d/file3")
3415 );
3416 assert_eq!(
3417 buffer4.read(cx).file().unwrap().path().as_ref(),
3418 Path::new("d/file4")
3419 );
3420 assert_eq!(
3421 buffer5.read(cx).file().unwrap().path().as_ref(),
3422 Path::new("b/c/file5")
3423 );
3424
3425 assert_matches!(
3426 buffer2.read(cx).file().unwrap().disk_state(),
3427 DiskState::Present { .. }
3428 );
3429 assert_matches!(
3430 buffer3.read(cx).file().unwrap().disk_state(),
3431 DiskState::Present { .. }
3432 );
3433 assert_matches!(
3434 buffer4.read(cx).file().unwrap().disk_state(),
3435 DiskState::Present { .. }
3436 );
3437 assert_eq!(
3438 buffer5.read(cx).file().unwrap().disk_state(),
3439 DiskState::Deleted
3440 );
3441 });
3442
3443 // Update the remote worktree. Check that it becomes consistent with the
3444 // local worktree.
3445 cx.executor().run_until_parked();
3446
3447 remote.update(cx, |remote, _| {
3448 for update in updates.lock().drain(..) {
3449 remote.as_remote_mut().unwrap().update_from_remote(update);
3450 }
3451 });
3452 cx.executor().run_until_parked();
3453 remote.update(cx, |remote, _| {
3454 assert_eq!(
3455 remote
3456 .paths()
3457 .map(|p| p.to_str().unwrap())
3458 .collect::<Vec<_>>(),
3459 vec![
3460 "a",
3461 separator!("a/file1"),
3462 separator!("a/file2.new"),
3463 "b",
3464 "d",
3465 separator!("d/file3"),
3466 separator!("d/file4"),
3467 ]
3468 );
3469 });
3470}
3471
3472#[gpui::test(iterations = 10)]
3473async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3474 init_test(cx);
3475
3476 let fs = FakeFs::new(cx.executor());
3477 fs.insert_tree(
3478 path!("/dir"),
3479 json!({
3480 "a": {
3481 "file1": "",
3482 }
3483 }),
3484 )
3485 .await;
3486
3487 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3488 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3489 let tree_id = tree.update(cx, |tree, _| tree.id());
3490
3491 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3492 project.update(cx, |project, cx| {
3493 let tree = project.worktrees(cx).next().unwrap();
3494 tree.read(cx)
3495 .entry_for_path(path)
3496 .unwrap_or_else(|| panic!("no entry for path {}", path))
3497 .id
3498 })
3499 };
3500
3501 let dir_id = id_for_path("a", cx);
3502 let file_id = id_for_path("a/file1", cx);
3503 let buffer = project
3504 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3505 .await
3506 .unwrap();
3507 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3508
3509 project
3510 .update(cx, |project, cx| {
3511 project.rename_entry(dir_id, Path::new("b"), cx)
3512 })
3513 .unwrap()
3514 .await
3515 .to_included()
3516 .unwrap();
3517 cx.executor().run_until_parked();
3518
3519 assert_eq!(id_for_path("b", cx), dir_id);
3520 assert_eq!(id_for_path("b/file1", cx), file_id);
3521 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3522}
3523
3524#[gpui::test]
3525async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3526 init_test(cx);
3527
3528 let fs = FakeFs::new(cx.executor());
3529 fs.insert_tree(
3530 "/dir",
3531 json!({
3532 "a.txt": "a-contents",
3533 "b.txt": "b-contents",
3534 }),
3535 )
3536 .await;
3537
3538 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3539
3540 // Spawn multiple tasks to open paths, repeating some paths.
3541 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3542 (
3543 p.open_local_buffer("/dir/a.txt", cx),
3544 p.open_local_buffer("/dir/b.txt", cx),
3545 p.open_local_buffer("/dir/a.txt", cx),
3546 )
3547 });
3548
3549 let buffer_a_1 = buffer_a_1.await.unwrap();
3550 let buffer_a_2 = buffer_a_2.await.unwrap();
3551 let buffer_b = buffer_b.await.unwrap();
3552 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3553 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3554
3555 // There is only one buffer per path.
3556 let buffer_a_id = buffer_a_1.entity_id();
3557 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3558
3559 // Open the same path again while it is still open.
3560 drop(buffer_a_1);
3561 let buffer_a_3 = project
3562 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3563 .await
3564 .unwrap();
3565
3566 // There's still only one buffer per path.
3567 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3568}
3569
3570#[gpui::test]
3571async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3572 init_test(cx);
3573
3574 let fs = FakeFs::new(cx.executor());
3575 fs.insert_tree(
3576 path!("/dir"),
3577 json!({
3578 "file1": "abc",
3579 "file2": "def",
3580 "file3": "ghi",
3581 }),
3582 )
3583 .await;
3584
3585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3586
3587 let buffer1 = project
3588 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3589 .await
3590 .unwrap();
3591 let events = Arc::new(Mutex::new(Vec::new()));
3592
3593 // initially, the buffer isn't dirty.
3594 buffer1.update(cx, |buffer, cx| {
3595 cx.subscribe(&buffer1, {
3596 let events = events.clone();
3597 move |_, _, event, _| match event {
3598 BufferEvent::Operation { .. } => {}
3599 _ => events.lock().push(event.clone()),
3600 }
3601 })
3602 .detach();
3603
3604 assert!(!buffer.is_dirty());
3605 assert!(events.lock().is_empty());
3606
3607 buffer.edit([(1..2, "")], None, cx);
3608 });
3609
3610 // after the first edit, the buffer is dirty, and emits a dirtied event.
3611 buffer1.update(cx, |buffer, cx| {
3612 assert!(buffer.text() == "ac");
3613 assert!(buffer.is_dirty());
3614 assert_eq!(
3615 *events.lock(),
3616 &[
3617 language::BufferEvent::Edited,
3618 language::BufferEvent::DirtyChanged
3619 ]
3620 );
3621 events.lock().clear();
3622 buffer.did_save(
3623 buffer.version(),
3624 buffer.file().unwrap().disk_state().mtime(),
3625 cx,
3626 );
3627 });
3628
3629 // after saving, the buffer is not dirty, and emits a saved event.
3630 buffer1.update(cx, |buffer, cx| {
3631 assert!(!buffer.is_dirty());
3632 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3633 events.lock().clear();
3634
3635 buffer.edit([(1..1, "B")], None, cx);
3636 buffer.edit([(2..2, "D")], None, cx);
3637 });
3638
3639 // after editing again, the buffer is dirty, and emits another dirty event.
3640 buffer1.update(cx, |buffer, cx| {
3641 assert!(buffer.text() == "aBDc");
3642 assert!(buffer.is_dirty());
3643 assert_eq!(
3644 *events.lock(),
3645 &[
3646 language::BufferEvent::Edited,
3647 language::BufferEvent::DirtyChanged,
3648 language::BufferEvent::Edited,
3649 ],
3650 );
3651 events.lock().clear();
3652
3653 // After restoring the buffer to its previously-saved state,
3654 // the buffer is not considered dirty anymore.
3655 buffer.edit([(1..3, "")], None, cx);
3656 assert!(buffer.text() == "ac");
3657 assert!(!buffer.is_dirty());
3658 });
3659
3660 assert_eq!(
3661 *events.lock(),
3662 &[
3663 language::BufferEvent::Edited,
3664 language::BufferEvent::DirtyChanged
3665 ]
3666 );
3667
3668 // When a file is deleted, the buffer is considered dirty.
3669 let events = Arc::new(Mutex::new(Vec::new()));
3670 let buffer2 = project
3671 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3672 .await
3673 .unwrap();
3674 buffer2.update(cx, |_, cx| {
3675 cx.subscribe(&buffer2, {
3676 let events = events.clone();
3677 move |_, _, event, _| events.lock().push(event.clone())
3678 })
3679 .detach();
3680 });
3681
3682 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3683 .await
3684 .unwrap();
3685 cx.executor().run_until_parked();
3686 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3687 assert_eq!(
3688 *events.lock(),
3689 &[
3690 language::BufferEvent::DirtyChanged,
3691 language::BufferEvent::FileHandleChanged
3692 ]
3693 );
3694
3695 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3696 let events = Arc::new(Mutex::new(Vec::new()));
3697 let buffer3 = project
3698 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3699 .await
3700 .unwrap();
3701 buffer3.update(cx, |_, cx| {
3702 cx.subscribe(&buffer3, {
3703 let events = events.clone();
3704 move |_, _, event, _| events.lock().push(event.clone())
3705 })
3706 .detach();
3707 });
3708
3709 buffer3.update(cx, |buffer, cx| {
3710 buffer.edit([(0..0, "x")], None, cx);
3711 });
3712 events.lock().clear();
3713 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3714 .await
3715 .unwrap();
3716 cx.executor().run_until_parked();
3717 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3718 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3719}
3720
3721#[gpui::test]
3722async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3723 init_test(cx);
3724
3725 let (initial_contents, initial_offsets) =
3726 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3727 let fs = FakeFs::new(cx.executor());
3728 fs.insert_tree(
3729 path!("/dir"),
3730 json!({
3731 "the-file": initial_contents,
3732 }),
3733 )
3734 .await;
3735 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3736 let buffer = project
3737 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3738 .await
3739 .unwrap();
3740
3741 let anchors = initial_offsets
3742 .iter()
3743 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3744 .collect::<Vec<_>>();
3745
3746 // Change the file on disk, adding two new lines of text, and removing
3747 // one line.
3748 buffer.update(cx, |buffer, _| {
3749 assert!(!buffer.is_dirty());
3750 assert!(!buffer.has_conflict());
3751 });
3752
3753 let (new_contents, new_offsets) =
3754 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3755 fs.save(
3756 path!("/dir/the-file").as_ref(),
3757 &new_contents.as_str().into(),
3758 LineEnding::Unix,
3759 )
3760 .await
3761 .unwrap();
3762
3763 // Because the buffer was not modified, it is reloaded from disk. Its
3764 // contents are edited according to the diff between the old and new
3765 // file contents.
3766 cx.executor().run_until_parked();
3767 buffer.update(cx, |buffer, _| {
3768 assert_eq!(buffer.text(), new_contents);
3769 assert!(!buffer.is_dirty());
3770 assert!(!buffer.has_conflict());
3771
3772 let anchor_offsets = anchors
3773 .iter()
3774 .map(|anchor| anchor.to_offset(&*buffer))
3775 .collect::<Vec<_>>();
3776 assert_eq!(anchor_offsets, new_offsets);
3777 });
3778
3779 // Modify the buffer
3780 buffer.update(cx, |buffer, cx| {
3781 buffer.edit([(0..0, " ")], None, cx);
3782 assert!(buffer.is_dirty());
3783 assert!(!buffer.has_conflict());
3784 });
3785
3786 // Change the file on disk again, adding blank lines to the beginning.
3787 fs.save(
3788 path!("/dir/the-file").as_ref(),
3789 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3790 LineEnding::Unix,
3791 )
3792 .await
3793 .unwrap();
3794
3795 // Because the buffer is modified, it doesn't reload from disk, but is
3796 // marked as having a conflict.
3797 cx.executor().run_until_parked();
3798 buffer.update(cx, |buffer, _| {
3799 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3800 assert!(buffer.has_conflict());
3801 });
3802}
3803
3804#[gpui::test]
3805async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3806 init_test(cx);
3807
3808 let fs = FakeFs::new(cx.executor());
3809 fs.insert_tree(
3810 path!("/dir"),
3811 json!({
3812 "file1": "a\nb\nc\n",
3813 "file2": "one\r\ntwo\r\nthree\r\n",
3814 }),
3815 )
3816 .await;
3817
3818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3819 let buffer1 = project
3820 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3821 .await
3822 .unwrap();
3823 let buffer2 = project
3824 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3825 .await
3826 .unwrap();
3827
3828 buffer1.update(cx, |buffer, _| {
3829 assert_eq!(buffer.text(), "a\nb\nc\n");
3830 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3831 });
3832 buffer2.update(cx, |buffer, _| {
3833 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3834 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3835 });
3836
3837 // Change a file's line endings on disk from unix to windows. The buffer's
3838 // state updates correctly.
3839 fs.save(
3840 path!("/dir/file1").as_ref(),
3841 &"aaa\nb\nc\n".into(),
3842 LineEnding::Windows,
3843 )
3844 .await
3845 .unwrap();
3846 cx.executor().run_until_parked();
3847 buffer1.update(cx, |buffer, _| {
3848 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3849 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3850 });
3851
3852 // Save a file with windows line endings. The file is written correctly.
3853 buffer2.update(cx, |buffer, cx| {
3854 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3855 });
3856 project
3857 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3858 .await
3859 .unwrap();
3860 assert_eq!(
3861 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3862 "one\r\ntwo\r\nthree\r\nfour\r\n",
3863 );
3864}
3865
3866#[gpui::test]
3867async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3868 init_test(cx);
3869
3870 let fs = FakeFs::new(cx.executor());
3871 fs.insert_tree(
3872 path!("/dir"),
3873 json!({
3874 "a.rs": "
3875 fn foo(mut v: Vec<usize>) {
3876 for x in &v {
3877 v.push(1);
3878 }
3879 }
3880 "
3881 .unindent(),
3882 }),
3883 )
3884 .await;
3885
3886 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3887 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3888 let buffer = project
3889 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3890 .await
3891 .unwrap();
3892
3893 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3894 let message = lsp::PublishDiagnosticsParams {
3895 uri: buffer_uri.clone(),
3896 diagnostics: vec![
3897 lsp::Diagnostic {
3898 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3899 severity: Some(DiagnosticSeverity::WARNING),
3900 message: "error 1".to_string(),
3901 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3902 location: lsp::Location {
3903 uri: buffer_uri.clone(),
3904 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3905 },
3906 message: "error 1 hint 1".to_string(),
3907 }]),
3908 ..Default::default()
3909 },
3910 lsp::Diagnostic {
3911 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3912 severity: Some(DiagnosticSeverity::HINT),
3913 message: "error 1 hint 1".to_string(),
3914 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3915 location: lsp::Location {
3916 uri: buffer_uri.clone(),
3917 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3918 },
3919 message: "original diagnostic".to_string(),
3920 }]),
3921 ..Default::default()
3922 },
3923 lsp::Diagnostic {
3924 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3925 severity: Some(DiagnosticSeverity::ERROR),
3926 message: "error 2".to_string(),
3927 related_information: Some(vec![
3928 lsp::DiagnosticRelatedInformation {
3929 location: lsp::Location {
3930 uri: buffer_uri.clone(),
3931 range: lsp::Range::new(
3932 lsp::Position::new(1, 13),
3933 lsp::Position::new(1, 15),
3934 ),
3935 },
3936 message: "error 2 hint 1".to_string(),
3937 },
3938 lsp::DiagnosticRelatedInformation {
3939 location: lsp::Location {
3940 uri: buffer_uri.clone(),
3941 range: lsp::Range::new(
3942 lsp::Position::new(1, 13),
3943 lsp::Position::new(1, 15),
3944 ),
3945 },
3946 message: "error 2 hint 2".to_string(),
3947 },
3948 ]),
3949 ..Default::default()
3950 },
3951 lsp::Diagnostic {
3952 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3953 severity: Some(DiagnosticSeverity::HINT),
3954 message: "error 2 hint 1".to_string(),
3955 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3956 location: lsp::Location {
3957 uri: buffer_uri.clone(),
3958 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3959 },
3960 message: "original diagnostic".to_string(),
3961 }]),
3962 ..Default::default()
3963 },
3964 lsp::Diagnostic {
3965 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3966 severity: Some(DiagnosticSeverity::HINT),
3967 message: "error 2 hint 2".to_string(),
3968 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3969 location: lsp::Location {
3970 uri: buffer_uri,
3971 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3972 },
3973 message: "original diagnostic".to_string(),
3974 }]),
3975 ..Default::default()
3976 },
3977 ],
3978 version: None,
3979 };
3980
3981 lsp_store
3982 .update(cx, |lsp_store, cx| {
3983 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3984 })
3985 .unwrap();
3986 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3987
3988 assert_eq!(
3989 buffer
3990 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3991 .collect::<Vec<_>>(),
3992 &[
3993 DiagnosticEntry {
3994 range: Point::new(1, 8)..Point::new(1, 9),
3995 diagnostic: Diagnostic {
3996 severity: DiagnosticSeverity::WARNING,
3997 message: "error 1".to_string(),
3998 group_id: 1,
3999 is_primary: true,
4000 ..Default::default()
4001 }
4002 },
4003 DiagnosticEntry {
4004 range: Point::new(1, 8)..Point::new(1, 9),
4005 diagnostic: Diagnostic {
4006 severity: DiagnosticSeverity::HINT,
4007 message: "error 1 hint 1".to_string(),
4008 group_id: 1,
4009 is_primary: false,
4010 ..Default::default()
4011 }
4012 },
4013 DiagnosticEntry {
4014 range: Point::new(1, 13)..Point::new(1, 15),
4015 diagnostic: Diagnostic {
4016 severity: DiagnosticSeverity::HINT,
4017 message: "error 2 hint 1".to_string(),
4018 group_id: 0,
4019 is_primary: false,
4020 ..Default::default()
4021 }
4022 },
4023 DiagnosticEntry {
4024 range: Point::new(1, 13)..Point::new(1, 15),
4025 diagnostic: Diagnostic {
4026 severity: DiagnosticSeverity::HINT,
4027 message: "error 2 hint 2".to_string(),
4028 group_id: 0,
4029 is_primary: false,
4030 ..Default::default()
4031 }
4032 },
4033 DiagnosticEntry {
4034 range: Point::new(2, 8)..Point::new(2, 17),
4035 diagnostic: Diagnostic {
4036 severity: DiagnosticSeverity::ERROR,
4037 message: "error 2".to_string(),
4038 group_id: 0,
4039 is_primary: true,
4040 ..Default::default()
4041 }
4042 }
4043 ]
4044 );
4045
4046 assert_eq!(
4047 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4048 &[
4049 DiagnosticEntry {
4050 range: Point::new(1, 13)..Point::new(1, 15),
4051 diagnostic: Diagnostic {
4052 severity: DiagnosticSeverity::HINT,
4053 message: "error 2 hint 1".to_string(),
4054 group_id: 0,
4055 is_primary: false,
4056 ..Default::default()
4057 }
4058 },
4059 DiagnosticEntry {
4060 range: Point::new(1, 13)..Point::new(1, 15),
4061 diagnostic: Diagnostic {
4062 severity: DiagnosticSeverity::HINT,
4063 message: "error 2 hint 2".to_string(),
4064 group_id: 0,
4065 is_primary: false,
4066 ..Default::default()
4067 }
4068 },
4069 DiagnosticEntry {
4070 range: Point::new(2, 8)..Point::new(2, 17),
4071 diagnostic: Diagnostic {
4072 severity: DiagnosticSeverity::ERROR,
4073 message: "error 2".to_string(),
4074 group_id: 0,
4075 is_primary: true,
4076 ..Default::default()
4077 }
4078 }
4079 ]
4080 );
4081
4082 assert_eq!(
4083 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4084 &[
4085 DiagnosticEntry {
4086 range: Point::new(1, 8)..Point::new(1, 9),
4087 diagnostic: Diagnostic {
4088 severity: DiagnosticSeverity::WARNING,
4089 message: "error 1".to_string(),
4090 group_id: 1,
4091 is_primary: true,
4092 ..Default::default()
4093 }
4094 },
4095 DiagnosticEntry {
4096 range: Point::new(1, 8)..Point::new(1, 9),
4097 diagnostic: Diagnostic {
4098 severity: DiagnosticSeverity::HINT,
4099 message: "error 1 hint 1".to_string(),
4100 group_id: 1,
4101 is_primary: false,
4102 ..Default::default()
4103 }
4104 },
4105 ]
4106 );
4107}
4108
4109#[gpui::test]
4110async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4111 init_test(cx);
4112
4113 let fs = FakeFs::new(cx.executor());
4114 fs.insert_tree(
4115 path!("/dir"),
4116 json!({
4117 "one.rs": "const ONE: usize = 1;",
4118 "two": {
4119 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4120 }
4121
4122 }),
4123 )
4124 .await;
4125 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4126
4127 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4128 language_registry.add(rust_lang());
4129 let watched_paths = lsp::FileOperationRegistrationOptions {
4130 filters: vec![
4131 FileOperationFilter {
4132 scheme: Some("file".to_owned()),
4133 pattern: lsp::FileOperationPattern {
4134 glob: "**/*.rs".to_owned(),
4135 matches: Some(lsp::FileOperationPatternKind::File),
4136 options: None,
4137 },
4138 },
4139 FileOperationFilter {
4140 scheme: Some("file".to_owned()),
4141 pattern: lsp::FileOperationPattern {
4142 glob: "**/**".to_owned(),
4143 matches: Some(lsp::FileOperationPatternKind::Folder),
4144 options: None,
4145 },
4146 },
4147 ],
4148 };
4149 let mut fake_servers = language_registry.register_fake_lsp(
4150 "Rust",
4151 FakeLspAdapter {
4152 capabilities: lsp::ServerCapabilities {
4153 workspace: Some(lsp::WorkspaceServerCapabilities {
4154 workspace_folders: None,
4155 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4156 did_rename: Some(watched_paths.clone()),
4157 will_rename: Some(watched_paths),
4158 ..Default::default()
4159 }),
4160 }),
4161 ..Default::default()
4162 },
4163 ..Default::default()
4164 },
4165 );
4166
4167 let _ = project
4168 .update(cx, |project, cx| {
4169 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4170 })
4171 .await
4172 .unwrap();
4173
4174 let fake_server = fake_servers.next().await.unwrap();
4175 let response = project.update(cx, |project, cx| {
4176 let worktree = project.worktrees(cx).next().unwrap();
4177 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4178 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4179 });
4180 let expected_edit = lsp::WorkspaceEdit {
4181 changes: None,
4182 document_changes: Some(DocumentChanges::Edits({
4183 vec![TextDocumentEdit {
4184 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4185 range: lsp::Range {
4186 start: lsp::Position {
4187 line: 0,
4188 character: 1,
4189 },
4190 end: lsp::Position {
4191 line: 0,
4192 character: 3,
4193 },
4194 },
4195 new_text: "This is not a drill".to_owned(),
4196 })],
4197 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4198 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4199 version: Some(1337),
4200 },
4201 }]
4202 })),
4203 change_annotations: None,
4204 };
4205 let resolved_workspace_edit = Arc::new(OnceLock::new());
4206 fake_server
4207 .handle_request::<WillRenameFiles, _, _>({
4208 let resolved_workspace_edit = resolved_workspace_edit.clone();
4209 let expected_edit = expected_edit.clone();
4210 move |params, _| {
4211 let resolved_workspace_edit = resolved_workspace_edit.clone();
4212 let expected_edit = expected_edit.clone();
4213 async move {
4214 assert_eq!(params.files.len(), 1);
4215 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4216 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4217 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4218 Ok(Some(expected_edit))
4219 }
4220 }
4221 })
4222 .next()
4223 .await
4224 .unwrap();
4225 let _ = response.await.unwrap();
4226 fake_server
4227 .handle_notification::<DidRenameFiles, _>(|params, _| {
4228 assert_eq!(params.files.len(), 1);
4229 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4230 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4231 })
4232 .next()
4233 .await
4234 .unwrap();
4235 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4236}
4237
4238#[gpui::test]
4239async fn test_rename(cx: &mut gpui::TestAppContext) {
4240 // hi
4241 init_test(cx);
4242
4243 let fs = FakeFs::new(cx.executor());
4244 fs.insert_tree(
4245 path!("/dir"),
4246 json!({
4247 "one.rs": "const ONE: usize = 1;",
4248 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4249 }),
4250 )
4251 .await;
4252
4253 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4254
4255 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4256 language_registry.add(rust_lang());
4257 let mut fake_servers = language_registry.register_fake_lsp(
4258 "Rust",
4259 FakeLspAdapter {
4260 capabilities: lsp::ServerCapabilities {
4261 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4262 prepare_provider: Some(true),
4263 work_done_progress_options: Default::default(),
4264 })),
4265 ..Default::default()
4266 },
4267 ..Default::default()
4268 },
4269 );
4270
4271 let (buffer, _handle) = project
4272 .update(cx, |project, cx| {
4273 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4274 })
4275 .await
4276 .unwrap();
4277
4278 let fake_server = fake_servers.next().await.unwrap();
4279
4280 let response = project.update(cx, |project, cx| {
4281 project.prepare_rename(buffer.clone(), 7, cx)
4282 });
4283 fake_server
4284 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4285 assert_eq!(
4286 params.text_document.uri.as_str(),
4287 uri!("file:///dir/one.rs")
4288 );
4289 assert_eq!(params.position, lsp::Position::new(0, 7));
4290 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4291 lsp::Position::new(0, 6),
4292 lsp::Position::new(0, 9),
4293 ))))
4294 })
4295 .next()
4296 .await
4297 .unwrap();
4298 let response = response.await.unwrap();
4299 let PrepareRenameResponse::Success(range) = response else {
4300 panic!("{:?}", response);
4301 };
4302 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4303 assert_eq!(range, 6..9);
4304
4305 let response = project.update(cx, |project, cx| {
4306 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4307 });
4308 fake_server
4309 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4310 assert_eq!(
4311 params.text_document_position.text_document.uri.as_str(),
4312 uri!("file:///dir/one.rs")
4313 );
4314 assert_eq!(
4315 params.text_document_position.position,
4316 lsp::Position::new(0, 7)
4317 );
4318 assert_eq!(params.new_name, "THREE");
4319 Ok(Some(lsp::WorkspaceEdit {
4320 changes: Some(
4321 [
4322 (
4323 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4324 vec![lsp::TextEdit::new(
4325 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4326 "THREE".to_string(),
4327 )],
4328 ),
4329 (
4330 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4331 vec![
4332 lsp::TextEdit::new(
4333 lsp::Range::new(
4334 lsp::Position::new(0, 24),
4335 lsp::Position::new(0, 27),
4336 ),
4337 "THREE".to_string(),
4338 ),
4339 lsp::TextEdit::new(
4340 lsp::Range::new(
4341 lsp::Position::new(0, 35),
4342 lsp::Position::new(0, 38),
4343 ),
4344 "THREE".to_string(),
4345 ),
4346 ],
4347 ),
4348 ]
4349 .into_iter()
4350 .collect(),
4351 ),
4352 ..Default::default()
4353 }))
4354 })
4355 .next()
4356 .await
4357 .unwrap();
4358 let mut transaction = response.await.unwrap().0;
4359 assert_eq!(transaction.len(), 2);
4360 assert_eq!(
4361 transaction
4362 .remove_entry(&buffer)
4363 .unwrap()
4364 .0
4365 .update(cx, |buffer, _| buffer.text()),
4366 "const THREE: usize = 1;"
4367 );
4368 assert_eq!(
4369 transaction
4370 .into_keys()
4371 .next()
4372 .unwrap()
4373 .update(cx, |buffer, _| buffer.text()),
4374 "const TWO: usize = one::THREE + one::THREE;"
4375 );
4376}
4377
4378#[gpui::test]
4379async fn test_search(cx: &mut gpui::TestAppContext) {
4380 init_test(cx);
4381
4382 let fs = FakeFs::new(cx.executor());
4383 fs.insert_tree(
4384 path!("/dir"),
4385 json!({
4386 "one.rs": "const ONE: usize = 1;",
4387 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4388 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4389 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4390 }),
4391 )
4392 .await;
4393 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4394 assert_eq!(
4395 search(
4396 &project,
4397 SearchQuery::text(
4398 "TWO",
4399 false,
4400 true,
4401 false,
4402 Default::default(),
4403 Default::default(),
4404 None
4405 )
4406 .unwrap(),
4407 cx
4408 )
4409 .await
4410 .unwrap(),
4411 HashMap::from_iter([
4412 (separator!("dir/two.rs").to_string(), vec![6..9]),
4413 (separator!("dir/three.rs").to_string(), vec![37..40])
4414 ])
4415 );
4416
4417 let buffer_4 = project
4418 .update(cx, |project, cx| {
4419 project.open_local_buffer(path!("/dir/four.rs"), cx)
4420 })
4421 .await
4422 .unwrap();
4423 buffer_4.update(cx, |buffer, cx| {
4424 let text = "two::TWO";
4425 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4426 });
4427
4428 assert_eq!(
4429 search(
4430 &project,
4431 SearchQuery::text(
4432 "TWO",
4433 false,
4434 true,
4435 false,
4436 Default::default(),
4437 Default::default(),
4438 None,
4439 )
4440 .unwrap(),
4441 cx
4442 )
4443 .await
4444 .unwrap(),
4445 HashMap::from_iter([
4446 (separator!("dir/two.rs").to_string(), vec![6..9]),
4447 (separator!("dir/three.rs").to_string(), vec![37..40]),
4448 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4449 ])
4450 );
4451}
4452
4453#[gpui::test]
4454async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4455 init_test(cx);
4456
4457 let search_query = "file";
4458
4459 let fs = FakeFs::new(cx.executor());
4460 fs.insert_tree(
4461 path!("/dir"),
4462 json!({
4463 "one.rs": r#"// Rust file one"#,
4464 "one.ts": r#"// TypeScript file one"#,
4465 "two.rs": r#"// Rust file two"#,
4466 "two.ts": r#"// TypeScript file two"#,
4467 }),
4468 )
4469 .await;
4470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4471
4472 assert!(
4473 search(
4474 &project,
4475 SearchQuery::text(
4476 search_query,
4477 false,
4478 true,
4479 false,
4480 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4481 Default::default(),
4482 None
4483 )
4484 .unwrap(),
4485 cx
4486 )
4487 .await
4488 .unwrap()
4489 .is_empty(),
4490 "If no inclusions match, no files should be returned"
4491 );
4492
4493 assert_eq!(
4494 search(
4495 &project,
4496 SearchQuery::text(
4497 search_query,
4498 false,
4499 true,
4500 false,
4501 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4502 Default::default(),
4503 None
4504 )
4505 .unwrap(),
4506 cx
4507 )
4508 .await
4509 .unwrap(),
4510 HashMap::from_iter([
4511 (separator!("dir/one.rs").to_string(), vec![8..12]),
4512 (separator!("dir/two.rs").to_string(), vec![8..12]),
4513 ]),
4514 "Rust only search should give only Rust files"
4515 );
4516
4517 assert_eq!(
4518 search(
4519 &project,
4520 SearchQuery::text(
4521 search_query,
4522 false,
4523 true,
4524 false,
4525
4526 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4527
4528 Default::default(),
4529 None,
4530 ).unwrap(),
4531 cx
4532 )
4533 .await
4534 .unwrap(),
4535 HashMap::from_iter([
4536 (separator!("dir/one.ts").to_string(), vec![14..18]),
4537 (separator!("dir/two.ts").to_string(), vec![14..18]),
4538 ]),
4539 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4540 );
4541
4542 assert_eq!(
4543 search(
4544 &project,
4545 SearchQuery::text(
4546 search_query,
4547 false,
4548 true,
4549 false,
4550
4551 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4552
4553 Default::default(),
4554 None,
4555 ).unwrap(),
4556 cx
4557 )
4558 .await
4559 .unwrap(),
4560 HashMap::from_iter([
4561 (separator!("dir/two.ts").to_string(), vec![14..18]),
4562 (separator!("dir/one.rs").to_string(), vec![8..12]),
4563 (separator!("dir/one.ts").to_string(), vec![14..18]),
4564 (separator!("dir/two.rs").to_string(), vec![8..12]),
4565 ]),
4566 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4567 );
4568}
4569
4570#[gpui::test]
4571async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4572 init_test(cx);
4573
4574 let search_query = "file";
4575
4576 let fs = FakeFs::new(cx.executor());
4577 fs.insert_tree(
4578 path!("/dir"),
4579 json!({
4580 "one.rs": r#"// Rust file one"#,
4581 "one.ts": r#"// TypeScript file one"#,
4582 "two.rs": r#"// Rust file two"#,
4583 "two.ts": r#"// TypeScript file two"#,
4584 }),
4585 )
4586 .await;
4587 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4588
4589 assert_eq!(
4590 search(
4591 &project,
4592 SearchQuery::text(
4593 search_query,
4594 false,
4595 true,
4596 false,
4597 Default::default(),
4598 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4599 None,
4600 )
4601 .unwrap(),
4602 cx
4603 )
4604 .await
4605 .unwrap(),
4606 HashMap::from_iter([
4607 (separator!("dir/one.rs").to_string(), vec![8..12]),
4608 (separator!("dir/one.ts").to_string(), vec![14..18]),
4609 (separator!("dir/two.rs").to_string(), vec![8..12]),
4610 (separator!("dir/two.ts").to_string(), vec![14..18]),
4611 ]),
4612 "If no exclusions match, all files should be returned"
4613 );
4614
4615 assert_eq!(
4616 search(
4617 &project,
4618 SearchQuery::text(
4619 search_query,
4620 false,
4621 true,
4622 false,
4623 Default::default(),
4624 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4625 None,
4626 )
4627 .unwrap(),
4628 cx
4629 )
4630 .await
4631 .unwrap(),
4632 HashMap::from_iter([
4633 (separator!("dir/one.ts").to_string(), vec![14..18]),
4634 (separator!("dir/two.ts").to_string(), vec![14..18]),
4635 ]),
4636 "Rust exclusion search should give only TypeScript files"
4637 );
4638
4639 assert_eq!(
4640 search(
4641 &project,
4642 SearchQuery::text(
4643 search_query,
4644 false,
4645 true,
4646 false,
4647 Default::default(),
4648 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4649 None,
4650 ).unwrap(),
4651 cx
4652 )
4653 .await
4654 .unwrap(),
4655 HashMap::from_iter([
4656 (separator!("dir/one.rs").to_string(), vec![8..12]),
4657 (separator!("dir/two.rs").to_string(), vec![8..12]),
4658 ]),
4659 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4660 );
4661
4662 assert!(
4663 search(
4664 &project,
4665 SearchQuery::text(
4666 search_query,
4667 false,
4668 true,
4669 false,
4670 Default::default(),
4671
4672 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4673 None,
4674
4675 ).unwrap(),
4676 cx
4677 )
4678 .await
4679 .unwrap().is_empty(),
4680 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4681 );
4682}
4683
4684#[gpui::test]
4685async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4686 init_test(cx);
4687
4688 let search_query = "file";
4689
4690 let fs = FakeFs::new(cx.executor());
4691 fs.insert_tree(
4692 path!("/dir"),
4693 json!({
4694 "one.rs": r#"// Rust file one"#,
4695 "one.ts": r#"// TypeScript file one"#,
4696 "two.rs": r#"// Rust file two"#,
4697 "two.ts": r#"// TypeScript file two"#,
4698 }),
4699 )
4700 .await;
4701 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4702
4703 assert!(
4704 search(
4705 &project,
4706 SearchQuery::text(
4707 search_query,
4708 false,
4709 true,
4710 false,
4711 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4712 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4713 None,
4714 )
4715 .unwrap(),
4716 cx
4717 )
4718 .await
4719 .unwrap()
4720 .is_empty(),
4721 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4722 );
4723
4724 assert!(
4725 search(
4726 &project,
4727 SearchQuery::text(
4728 search_query,
4729 false,
4730 true,
4731 false,
4732 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4733 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4734 None,
4735 ).unwrap(),
4736 cx
4737 )
4738 .await
4739 .unwrap()
4740 .is_empty(),
4741 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4742 );
4743
4744 assert!(
4745 search(
4746 &project,
4747 SearchQuery::text(
4748 search_query,
4749 false,
4750 true,
4751 false,
4752 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4753 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4754 None,
4755 )
4756 .unwrap(),
4757 cx
4758 )
4759 .await
4760 .unwrap()
4761 .is_empty(),
4762 "Non-matching inclusions and exclusions should not change that."
4763 );
4764
4765 assert_eq!(
4766 search(
4767 &project,
4768 SearchQuery::text(
4769 search_query,
4770 false,
4771 true,
4772 false,
4773 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4774 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4775 None,
4776 )
4777 .unwrap(),
4778 cx
4779 )
4780 .await
4781 .unwrap(),
4782 HashMap::from_iter([
4783 (separator!("dir/one.ts").to_string(), vec![14..18]),
4784 (separator!("dir/two.ts").to_string(), vec![14..18]),
4785 ]),
4786 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4787 );
4788}
4789
4790#[gpui::test]
4791async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4792 init_test(cx);
4793
4794 let fs = FakeFs::new(cx.executor());
4795 fs.insert_tree(
4796 path!("/worktree-a"),
4797 json!({
4798 "haystack.rs": r#"// NEEDLE"#,
4799 "haystack.ts": r#"// NEEDLE"#,
4800 }),
4801 )
4802 .await;
4803 fs.insert_tree(
4804 path!("/worktree-b"),
4805 json!({
4806 "haystack.rs": r#"// NEEDLE"#,
4807 "haystack.ts": r#"// NEEDLE"#,
4808 }),
4809 )
4810 .await;
4811
4812 let project = Project::test(
4813 fs.clone(),
4814 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4815 cx,
4816 )
4817 .await;
4818
4819 assert_eq!(
4820 search(
4821 &project,
4822 SearchQuery::text(
4823 "NEEDLE",
4824 false,
4825 true,
4826 false,
4827 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4828 Default::default(),
4829 None,
4830 )
4831 .unwrap(),
4832 cx
4833 )
4834 .await
4835 .unwrap(),
4836 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4837 "should only return results from included worktree"
4838 );
4839 assert_eq!(
4840 search(
4841 &project,
4842 SearchQuery::text(
4843 "NEEDLE",
4844 false,
4845 true,
4846 false,
4847 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4848 Default::default(),
4849 None,
4850 )
4851 .unwrap(),
4852 cx
4853 )
4854 .await
4855 .unwrap(),
4856 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4857 "should only return results from included worktree"
4858 );
4859
4860 assert_eq!(
4861 search(
4862 &project,
4863 SearchQuery::text(
4864 "NEEDLE",
4865 false,
4866 true,
4867 false,
4868 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4869 Default::default(),
4870 None,
4871 )
4872 .unwrap(),
4873 cx
4874 )
4875 .await
4876 .unwrap(),
4877 HashMap::from_iter([
4878 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4879 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4880 ]),
4881 "should return results from both worktrees"
4882 );
4883}
4884
4885#[gpui::test]
4886async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4887 init_test(cx);
4888
4889 let fs = FakeFs::new(cx.background_executor.clone());
4890 fs.insert_tree(
4891 path!("/dir"),
4892 json!({
4893 ".git": {},
4894 ".gitignore": "**/target\n/node_modules\n",
4895 "target": {
4896 "index.txt": "index_key:index_value"
4897 },
4898 "node_modules": {
4899 "eslint": {
4900 "index.ts": "const eslint_key = 'eslint value'",
4901 "package.json": r#"{ "some_key": "some value" }"#,
4902 },
4903 "prettier": {
4904 "index.ts": "const prettier_key = 'prettier value'",
4905 "package.json": r#"{ "other_key": "other value" }"#,
4906 },
4907 },
4908 "package.json": r#"{ "main_key": "main value" }"#,
4909 }),
4910 )
4911 .await;
4912 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4913
4914 let query = "key";
4915 assert_eq!(
4916 search(
4917 &project,
4918 SearchQuery::text(
4919 query,
4920 false,
4921 false,
4922 false,
4923 Default::default(),
4924 Default::default(),
4925 None,
4926 )
4927 .unwrap(),
4928 cx
4929 )
4930 .await
4931 .unwrap(),
4932 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4933 "Only one non-ignored file should have the query"
4934 );
4935
4936 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4937 assert_eq!(
4938 search(
4939 &project,
4940 SearchQuery::text(
4941 query,
4942 false,
4943 false,
4944 true,
4945 Default::default(),
4946 Default::default(),
4947 None,
4948 )
4949 .unwrap(),
4950 cx
4951 )
4952 .await
4953 .unwrap(),
4954 HashMap::from_iter([
4955 (separator!("dir/package.json").to_string(), vec![8..11]),
4956 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4957 (
4958 separator!("dir/node_modules/prettier/package.json").to_string(),
4959 vec![9..12]
4960 ),
4961 (
4962 separator!("dir/node_modules/prettier/index.ts").to_string(),
4963 vec![15..18]
4964 ),
4965 (
4966 separator!("dir/node_modules/eslint/index.ts").to_string(),
4967 vec![13..16]
4968 ),
4969 (
4970 separator!("dir/node_modules/eslint/package.json").to_string(),
4971 vec![8..11]
4972 ),
4973 ]),
4974 "Unrestricted search with ignored directories should find every file with the query"
4975 );
4976
4977 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4978 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4979 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4980 assert_eq!(
4981 search(
4982 &project,
4983 SearchQuery::text(
4984 query,
4985 false,
4986 false,
4987 true,
4988 files_to_include,
4989 files_to_exclude,
4990 None,
4991 )
4992 .unwrap(),
4993 cx
4994 )
4995 .await
4996 .unwrap(),
4997 HashMap::from_iter([(
4998 separator!("dir/node_modules/prettier/package.json").to_string(),
4999 vec![9..12]
5000 )]),
5001 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5002 );
5003}
5004
5005#[gpui::test]
5006async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5007 init_test(cx);
5008
5009 let fs = FakeFs::new(cx.executor().clone());
5010 fs.insert_tree(
5011 "/one/two",
5012 json!({
5013 "three": {
5014 "a.txt": "",
5015 "four": {}
5016 },
5017 "c.rs": ""
5018 }),
5019 )
5020 .await;
5021
5022 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5023 project
5024 .update(cx, |project, cx| {
5025 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5026 project.create_entry((id, "b.."), true, cx)
5027 })
5028 .await
5029 .unwrap()
5030 .to_included()
5031 .unwrap();
5032
5033 // Can't create paths outside the project
5034 let result = project
5035 .update(cx, |project, cx| {
5036 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5037 project.create_entry((id, "../../boop"), true, cx)
5038 })
5039 .await;
5040 assert!(result.is_err());
5041
5042 // Can't create paths with '..'
5043 let result = project
5044 .update(cx, |project, cx| {
5045 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5046 project.create_entry((id, "four/../beep"), true, cx)
5047 })
5048 .await;
5049 assert!(result.is_err());
5050
5051 assert_eq!(
5052 fs.paths(true),
5053 vec![
5054 PathBuf::from(path!("/")),
5055 PathBuf::from(path!("/one")),
5056 PathBuf::from(path!("/one/two")),
5057 PathBuf::from(path!("/one/two/c.rs")),
5058 PathBuf::from(path!("/one/two/three")),
5059 PathBuf::from(path!("/one/two/three/a.txt")),
5060 PathBuf::from(path!("/one/two/three/b..")),
5061 PathBuf::from(path!("/one/two/three/four")),
5062 ]
5063 );
5064
5065 // And we cannot open buffers with '..'
5066 let result = project
5067 .update(cx, |project, cx| {
5068 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5069 project.open_buffer((id, "../c.rs"), cx)
5070 })
5071 .await;
5072 assert!(result.is_err())
5073}
5074
5075#[gpui::test]
5076async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5077 init_test(cx);
5078
5079 let fs = FakeFs::new(cx.executor());
5080 fs.insert_tree(
5081 path!("/dir"),
5082 json!({
5083 "a.tsx": "a",
5084 }),
5085 )
5086 .await;
5087
5088 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5089
5090 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5091 language_registry.add(tsx_lang());
5092 let language_server_names = [
5093 "TypeScriptServer",
5094 "TailwindServer",
5095 "ESLintServer",
5096 "NoHoverCapabilitiesServer",
5097 ];
5098 let mut language_servers = [
5099 language_registry.register_fake_lsp(
5100 "tsx",
5101 FakeLspAdapter {
5102 name: language_server_names[0],
5103 capabilities: lsp::ServerCapabilities {
5104 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5105 ..lsp::ServerCapabilities::default()
5106 },
5107 ..FakeLspAdapter::default()
5108 },
5109 ),
5110 language_registry.register_fake_lsp(
5111 "tsx",
5112 FakeLspAdapter {
5113 name: language_server_names[1],
5114 capabilities: lsp::ServerCapabilities {
5115 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5116 ..lsp::ServerCapabilities::default()
5117 },
5118 ..FakeLspAdapter::default()
5119 },
5120 ),
5121 language_registry.register_fake_lsp(
5122 "tsx",
5123 FakeLspAdapter {
5124 name: language_server_names[2],
5125 capabilities: lsp::ServerCapabilities {
5126 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5127 ..lsp::ServerCapabilities::default()
5128 },
5129 ..FakeLspAdapter::default()
5130 },
5131 ),
5132 language_registry.register_fake_lsp(
5133 "tsx",
5134 FakeLspAdapter {
5135 name: language_server_names[3],
5136 capabilities: lsp::ServerCapabilities {
5137 hover_provider: None,
5138 ..lsp::ServerCapabilities::default()
5139 },
5140 ..FakeLspAdapter::default()
5141 },
5142 ),
5143 ];
5144
5145 let (buffer, _handle) = project
5146 .update(cx, |p, cx| {
5147 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5148 })
5149 .await
5150 .unwrap();
5151 cx.executor().run_until_parked();
5152
5153 let mut servers_with_hover_requests = HashMap::default();
5154 for i in 0..language_server_names.len() {
5155 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5156 panic!(
5157 "Failed to get language server #{i} with name {}",
5158 &language_server_names[i]
5159 )
5160 });
5161 let new_server_name = new_server.server.name();
5162 assert!(
5163 !servers_with_hover_requests.contains_key(&new_server_name),
5164 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5165 );
5166 match new_server_name.as_ref() {
5167 "TailwindServer" | "TypeScriptServer" => {
5168 servers_with_hover_requests.insert(
5169 new_server_name.clone(),
5170 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5171 let name = new_server_name.clone();
5172 async move {
5173 Ok(Some(lsp::Hover {
5174 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5175 format!("{name} hover"),
5176 )),
5177 range: None,
5178 }))
5179 }
5180 }),
5181 );
5182 }
5183 "ESLintServer" => {
5184 servers_with_hover_requests.insert(
5185 new_server_name,
5186 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5187 |_, _| async move { Ok(None) },
5188 ),
5189 );
5190 }
5191 "NoHoverCapabilitiesServer" => {
5192 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5193 |_, _| async move {
5194 panic!(
5195 "Should not call for hovers server with no corresponding capabilities"
5196 )
5197 },
5198 );
5199 }
5200 unexpected => panic!("Unexpected server name: {unexpected}"),
5201 }
5202 }
5203
5204 let hover_task = project.update(cx, |project, cx| {
5205 project.hover(&buffer, Point::new(0, 0), cx)
5206 });
5207 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5208 |mut hover_request| async move {
5209 hover_request
5210 .next()
5211 .await
5212 .expect("All hover requests should have been triggered")
5213 },
5214 ))
5215 .await;
5216 assert_eq!(
5217 vec!["TailwindServer hover", "TypeScriptServer hover"],
5218 hover_task
5219 .await
5220 .into_iter()
5221 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5222 .sorted()
5223 .collect::<Vec<_>>(),
5224 "Should receive hover responses from all related servers with hover capabilities"
5225 );
5226}
5227
5228#[gpui::test]
5229async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5230 init_test(cx);
5231
5232 let fs = FakeFs::new(cx.executor());
5233 fs.insert_tree(
5234 path!("/dir"),
5235 json!({
5236 "a.ts": "a",
5237 }),
5238 )
5239 .await;
5240
5241 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5242
5243 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5244 language_registry.add(typescript_lang());
5245 let mut fake_language_servers = language_registry.register_fake_lsp(
5246 "TypeScript",
5247 FakeLspAdapter {
5248 capabilities: lsp::ServerCapabilities {
5249 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5250 ..lsp::ServerCapabilities::default()
5251 },
5252 ..FakeLspAdapter::default()
5253 },
5254 );
5255
5256 let (buffer, _handle) = project
5257 .update(cx, |p, cx| {
5258 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5259 })
5260 .await
5261 .unwrap();
5262 cx.executor().run_until_parked();
5263
5264 let fake_server = fake_language_servers
5265 .next()
5266 .await
5267 .expect("failed to get the language server");
5268
5269 let mut request_handled =
5270 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5271 Ok(Some(lsp::Hover {
5272 contents: lsp::HoverContents::Array(vec![
5273 lsp::MarkedString::String("".to_string()),
5274 lsp::MarkedString::String(" ".to_string()),
5275 lsp::MarkedString::String("\n\n\n".to_string()),
5276 ]),
5277 range: None,
5278 }))
5279 });
5280
5281 let hover_task = project.update(cx, |project, cx| {
5282 project.hover(&buffer, Point::new(0, 0), cx)
5283 });
5284 let () = request_handled
5285 .next()
5286 .await
5287 .expect("All hover requests should have been triggered");
5288 assert_eq!(
5289 Vec::<String>::new(),
5290 hover_task
5291 .await
5292 .into_iter()
5293 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5294 .sorted()
5295 .collect::<Vec<_>>(),
5296 "Empty hover parts should be ignored"
5297 );
5298}
5299
5300#[gpui::test]
5301async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5302 init_test(cx);
5303
5304 let fs = FakeFs::new(cx.executor());
5305 fs.insert_tree(
5306 path!("/dir"),
5307 json!({
5308 "a.ts": "a",
5309 }),
5310 )
5311 .await;
5312
5313 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5314
5315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5316 language_registry.add(typescript_lang());
5317 let mut fake_language_servers = language_registry.register_fake_lsp(
5318 "TypeScript",
5319 FakeLspAdapter {
5320 capabilities: lsp::ServerCapabilities {
5321 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5322 ..lsp::ServerCapabilities::default()
5323 },
5324 ..FakeLspAdapter::default()
5325 },
5326 );
5327
5328 let (buffer, _handle) = project
5329 .update(cx, |p, cx| {
5330 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5331 })
5332 .await
5333 .unwrap();
5334 cx.executor().run_until_parked();
5335
5336 let fake_server = fake_language_servers
5337 .next()
5338 .await
5339 .expect("failed to get the language server");
5340
5341 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5342 move |_, _| async move {
5343 Ok(Some(vec![
5344 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5345 title: "organize imports".to_string(),
5346 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5347 ..lsp::CodeAction::default()
5348 }),
5349 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5350 title: "fix code".to_string(),
5351 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5352 ..lsp::CodeAction::default()
5353 }),
5354 ]))
5355 },
5356 );
5357
5358 let code_actions_task = project.update(cx, |project, cx| {
5359 project.code_actions(
5360 &buffer,
5361 0..buffer.read(cx).len(),
5362 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5363 cx,
5364 )
5365 });
5366
5367 let () = request_handled
5368 .next()
5369 .await
5370 .expect("The code action request should have been triggered");
5371
5372 let code_actions = code_actions_task.await.unwrap();
5373 assert_eq!(code_actions.len(), 1);
5374 assert_eq!(
5375 code_actions[0].lsp_action.kind,
5376 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5377 );
5378}
5379
5380#[gpui::test]
5381async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5382 init_test(cx);
5383
5384 let fs = FakeFs::new(cx.executor());
5385 fs.insert_tree(
5386 path!("/dir"),
5387 json!({
5388 "a.tsx": "a",
5389 }),
5390 )
5391 .await;
5392
5393 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5394
5395 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5396 language_registry.add(tsx_lang());
5397 let language_server_names = [
5398 "TypeScriptServer",
5399 "TailwindServer",
5400 "ESLintServer",
5401 "NoActionsCapabilitiesServer",
5402 ];
5403
5404 let mut language_server_rxs = [
5405 language_registry.register_fake_lsp(
5406 "tsx",
5407 FakeLspAdapter {
5408 name: language_server_names[0],
5409 capabilities: lsp::ServerCapabilities {
5410 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5411 ..lsp::ServerCapabilities::default()
5412 },
5413 ..FakeLspAdapter::default()
5414 },
5415 ),
5416 language_registry.register_fake_lsp(
5417 "tsx",
5418 FakeLspAdapter {
5419 name: language_server_names[1],
5420 capabilities: lsp::ServerCapabilities {
5421 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5422 ..lsp::ServerCapabilities::default()
5423 },
5424 ..FakeLspAdapter::default()
5425 },
5426 ),
5427 language_registry.register_fake_lsp(
5428 "tsx",
5429 FakeLspAdapter {
5430 name: language_server_names[2],
5431 capabilities: lsp::ServerCapabilities {
5432 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5433 ..lsp::ServerCapabilities::default()
5434 },
5435 ..FakeLspAdapter::default()
5436 },
5437 ),
5438 language_registry.register_fake_lsp(
5439 "tsx",
5440 FakeLspAdapter {
5441 name: language_server_names[3],
5442 capabilities: lsp::ServerCapabilities {
5443 code_action_provider: None,
5444 ..lsp::ServerCapabilities::default()
5445 },
5446 ..FakeLspAdapter::default()
5447 },
5448 ),
5449 ];
5450
5451 let (buffer, _handle) = project
5452 .update(cx, |p, cx| {
5453 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5454 })
5455 .await
5456 .unwrap();
5457 cx.executor().run_until_parked();
5458
5459 let mut servers_with_actions_requests = HashMap::default();
5460 for i in 0..language_server_names.len() {
5461 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5462 panic!(
5463 "Failed to get language server #{i} with name {}",
5464 &language_server_names[i]
5465 )
5466 });
5467 let new_server_name = new_server.server.name();
5468
5469 assert!(
5470 !servers_with_actions_requests.contains_key(&new_server_name),
5471 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5472 );
5473 match new_server_name.0.as_ref() {
5474 "TailwindServer" | "TypeScriptServer" => {
5475 servers_with_actions_requests.insert(
5476 new_server_name.clone(),
5477 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5478 move |_, _| {
5479 let name = new_server_name.clone();
5480 async move {
5481 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5482 lsp::CodeAction {
5483 title: format!("{name} code action"),
5484 ..lsp::CodeAction::default()
5485 },
5486 )]))
5487 }
5488 },
5489 ),
5490 );
5491 }
5492 "ESLintServer" => {
5493 servers_with_actions_requests.insert(
5494 new_server_name,
5495 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5496 |_, _| async move { Ok(None) },
5497 ),
5498 );
5499 }
5500 "NoActionsCapabilitiesServer" => {
5501 let _never_handled = new_server
5502 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5503 panic!(
5504 "Should not call for code actions server with no corresponding capabilities"
5505 )
5506 });
5507 }
5508 unexpected => panic!("Unexpected server name: {unexpected}"),
5509 }
5510 }
5511
5512 let code_actions_task = project.update(cx, |project, cx| {
5513 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5514 });
5515
5516 // cx.run_until_parked();
5517 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5518 |mut code_actions_request| async move {
5519 code_actions_request
5520 .next()
5521 .await
5522 .expect("All code actions requests should have been triggered")
5523 },
5524 ))
5525 .await;
5526 assert_eq!(
5527 vec!["TailwindServer code action", "TypeScriptServer code action"],
5528 code_actions_task
5529 .await
5530 .unwrap()
5531 .into_iter()
5532 .map(|code_action| code_action.lsp_action.title)
5533 .sorted()
5534 .collect::<Vec<_>>(),
5535 "Should receive code actions responses from all related servers with hover capabilities"
5536 );
5537}
5538
5539#[gpui::test]
5540async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5541 init_test(cx);
5542
5543 let fs = FakeFs::new(cx.executor());
5544 fs.insert_tree(
5545 "/dir",
5546 json!({
5547 "a.rs": "let a = 1;",
5548 "b.rs": "let b = 2;",
5549 "c.rs": "let c = 2;",
5550 }),
5551 )
5552 .await;
5553
5554 let project = Project::test(
5555 fs,
5556 [
5557 "/dir/a.rs".as_ref(),
5558 "/dir/b.rs".as_ref(),
5559 "/dir/c.rs".as_ref(),
5560 ],
5561 cx,
5562 )
5563 .await;
5564
5565 // check the initial state and get the worktrees
5566 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5567 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5568 assert_eq!(worktrees.len(), 3);
5569
5570 let worktree_a = worktrees[0].read(cx);
5571 let worktree_b = worktrees[1].read(cx);
5572 let worktree_c = worktrees[2].read(cx);
5573
5574 // check they start in the right order
5575 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5576 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5577 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5578
5579 (
5580 worktrees[0].clone(),
5581 worktrees[1].clone(),
5582 worktrees[2].clone(),
5583 )
5584 });
5585
5586 // move first worktree to after the second
5587 // [a, b, c] -> [b, a, c]
5588 project
5589 .update(cx, |project, cx| {
5590 let first = worktree_a.read(cx);
5591 let second = worktree_b.read(cx);
5592 project.move_worktree(first.id(), second.id(), cx)
5593 })
5594 .expect("moving first after second");
5595
5596 // check the state after moving
5597 project.update(cx, |project, cx| {
5598 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5599 assert_eq!(worktrees.len(), 3);
5600
5601 let first = worktrees[0].read(cx);
5602 let second = worktrees[1].read(cx);
5603 let third = worktrees[2].read(cx);
5604
5605 // check they are now in the right order
5606 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5607 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5608 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5609 });
5610
5611 // move the second worktree to before the first
5612 // [b, a, c] -> [a, b, c]
5613 project
5614 .update(cx, |project, cx| {
5615 let second = worktree_a.read(cx);
5616 let first = worktree_b.read(cx);
5617 project.move_worktree(first.id(), second.id(), cx)
5618 })
5619 .expect("moving second before first");
5620
5621 // check the state after moving
5622 project.update(cx, |project, cx| {
5623 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5624 assert_eq!(worktrees.len(), 3);
5625
5626 let first = worktrees[0].read(cx);
5627 let second = worktrees[1].read(cx);
5628 let third = worktrees[2].read(cx);
5629
5630 // check they are now in the right order
5631 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5632 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5633 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5634 });
5635
5636 // move the second worktree to after the third
5637 // [a, b, c] -> [a, c, b]
5638 project
5639 .update(cx, |project, cx| {
5640 let second = worktree_b.read(cx);
5641 let third = worktree_c.read(cx);
5642 project.move_worktree(second.id(), third.id(), cx)
5643 })
5644 .expect("moving second after third");
5645
5646 // check the state after moving
5647 project.update(cx, |project, cx| {
5648 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5649 assert_eq!(worktrees.len(), 3);
5650
5651 let first = worktrees[0].read(cx);
5652 let second = worktrees[1].read(cx);
5653 let third = worktrees[2].read(cx);
5654
5655 // check they are now in the right order
5656 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5657 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5658 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5659 });
5660
5661 // move the third worktree to before the second
5662 // [a, c, b] -> [a, b, c]
5663 project
5664 .update(cx, |project, cx| {
5665 let third = worktree_c.read(cx);
5666 let second = worktree_b.read(cx);
5667 project.move_worktree(third.id(), second.id(), cx)
5668 })
5669 .expect("moving third before second");
5670
5671 // check the state after moving
5672 project.update(cx, |project, cx| {
5673 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5674 assert_eq!(worktrees.len(), 3);
5675
5676 let first = worktrees[0].read(cx);
5677 let second = worktrees[1].read(cx);
5678 let third = worktrees[2].read(cx);
5679
5680 // check they are now in the right order
5681 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5682 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5683 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5684 });
5685
5686 // move the first worktree to after the third
5687 // [a, b, c] -> [b, c, a]
5688 project
5689 .update(cx, |project, cx| {
5690 let first = worktree_a.read(cx);
5691 let third = worktree_c.read(cx);
5692 project.move_worktree(first.id(), third.id(), cx)
5693 })
5694 .expect("moving first after third");
5695
5696 // check the state after moving
5697 project.update(cx, |project, cx| {
5698 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5699 assert_eq!(worktrees.len(), 3);
5700
5701 let first = worktrees[0].read(cx);
5702 let second = worktrees[1].read(cx);
5703 let third = worktrees[2].read(cx);
5704
5705 // check they are now in the right order
5706 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5707 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5708 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5709 });
5710
5711 // move the third worktree to before the first
5712 // [b, c, a] -> [a, b, c]
5713 project
5714 .update(cx, |project, cx| {
5715 let third = worktree_a.read(cx);
5716 let first = worktree_b.read(cx);
5717 project.move_worktree(third.id(), first.id(), cx)
5718 })
5719 .expect("moving third before first");
5720
5721 // check the state after moving
5722 project.update(cx, |project, cx| {
5723 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5724 assert_eq!(worktrees.len(), 3);
5725
5726 let first = worktrees[0].read(cx);
5727 let second = worktrees[1].read(cx);
5728 let third = worktrees[2].read(cx);
5729
5730 // check they are now in the right order
5731 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5732 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5733 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5734 });
5735}
5736
5737#[gpui::test]
5738async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5739 init_test(cx);
5740
5741 let staged_contents = r#"
5742 fn main() {
5743 println!("hello world");
5744 }
5745 "#
5746 .unindent();
5747 let file_contents = r#"
5748 // print goodbye
5749 fn main() {
5750 println!("goodbye world");
5751 }
5752 "#
5753 .unindent();
5754
5755 let fs = FakeFs::new(cx.background_executor.clone());
5756 fs.insert_tree(
5757 "/dir",
5758 json!({
5759 ".git": {},
5760 "src": {
5761 "main.rs": file_contents,
5762 }
5763 }),
5764 )
5765 .await;
5766
5767 fs.set_index_for_repo(
5768 Path::new("/dir/.git"),
5769 &[("src/main.rs".into(), staged_contents)],
5770 );
5771
5772 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5773
5774 let buffer = project
5775 .update(cx, |project, cx| {
5776 project.open_local_buffer("/dir/src/main.rs", cx)
5777 })
5778 .await
5779 .unwrap();
5780 let unstaged_diff = project
5781 .update(cx, |project, cx| {
5782 project.open_unstaged_diff(buffer.clone(), cx)
5783 })
5784 .await
5785 .unwrap();
5786
5787 cx.run_until_parked();
5788 unstaged_diff.update(cx, |unstaged_diff, cx| {
5789 let snapshot = buffer.read(cx).snapshot();
5790 assert_hunks(
5791 unstaged_diff.hunks(&snapshot, cx),
5792 &snapshot,
5793 &unstaged_diff.base_text_string().unwrap(),
5794 &[
5795 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5796 (
5797 2..3,
5798 " println!(\"hello world\");\n",
5799 " println!(\"goodbye world\");\n",
5800 DiffHunkStatus::modified_none(),
5801 ),
5802 ],
5803 );
5804 });
5805
5806 let staged_contents = r#"
5807 // print goodbye
5808 fn main() {
5809 }
5810 "#
5811 .unindent();
5812
5813 fs.set_index_for_repo(
5814 Path::new("/dir/.git"),
5815 &[("src/main.rs".into(), staged_contents)],
5816 );
5817
5818 cx.run_until_parked();
5819 unstaged_diff.update(cx, |unstaged_diff, cx| {
5820 let snapshot = buffer.read(cx).snapshot();
5821 assert_hunks(
5822 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5823 &snapshot,
5824 &unstaged_diff.base_text().text(),
5825 &[(
5826 2..3,
5827 "",
5828 " println!(\"goodbye world\");\n",
5829 DiffHunkStatus::added_none(),
5830 )],
5831 );
5832 });
5833}
5834
5835#[gpui::test]
5836async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5837 init_test(cx);
5838
5839 let committed_contents = r#"
5840 fn main() {
5841 println!("hello world");
5842 }
5843 "#
5844 .unindent();
5845 let staged_contents = r#"
5846 fn main() {
5847 println!("goodbye world");
5848 }
5849 "#
5850 .unindent();
5851 let file_contents = r#"
5852 // print goodbye
5853 fn main() {
5854 println!("goodbye world");
5855 }
5856 "#
5857 .unindent();
5858
5859 let fs = FakeFs::new(cx.background_executor.clone());
5860 fs.insert_tree(
5861 "/dir",
5862 json!({
5863 ".git": {},
5864 "src": {
5865 "modification.rs": file_contents,
5866 }
5867 }),
5868 )
5869 .await;
5870
5871 fs.set_head_for_repo(
5872 Path::new("/dir/.git"),
5873 &[
5874 ("src/modification.rs".into(), committed_contents),
5875 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5876 ],
5877 );
5878 fs.set_index_for_repo(
5879 Path::new("/dir/.git"),
5880 &[
5881 ("src/modification.rs".into(), staged_contents),
5882 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5883 ],
5884 );
5885
5886 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5887 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5888 let language = rust_lang();
5889 language_registry.add(language.clone());
5890
5891 let buffer_1 = project
5892 .update(cx, |project, cx| {
5893 project.open_local_buffer("/dir/src/modification.rs", cx)
5894 })
5895 .await
5896 .unwrap();
5897 let diff_1 = project
5898 .update(cx, |project, cx| {
5899 project.open_uncommitted_diff(buffer_1.clone(), cx)
5900 })
5901 .await
5902 .unwrap();
5903 diff_1.read_with(cx, |diff, _| {
5904 assert_eq!(diff.base_text().language().cloned(), Some(language))
5905 });
5906 cx.run_until_parked();
5907 diff_1.update(cx, |diff, cx| {
5908 let snapshot = buffer_1.read(cx).snapshot();
5909 assert_hunks(
5910 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5911 &snapshot,
5912 &diff.base_text_string().unwrap(),
5913 &[
5914 (
5915 0..1,
5916 "",
5917 "// print goodbye\n",
5918 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5919 ),
5920 (
5921 2..3,
5922 " println!(\"hello world\");\n",
5923 " println!(\"goodbye world\");\n",
5924 DiffHunkStatus::modified_none(),
5925 ),
5926 ],
5927 );
5928 });
5929
5930 // Reset HEAD to a version that differs from both the buffer and the index.
5931 let committed_contents = r#"
5932 // print goodbye
5933 fn main() {
5934 }
5935 "#
5936 .unindent();
5937 fs.set_head_for_repo(
5938 Path::new("/dir/.git"),
5939 &[
5940 ("src/modification.rs".into(), committed_contents.clone()),
5941 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5942 ],
5943 );
5944
5945 // Buffer now has an unstaged hunk.
5946 cx.run_until_parked();
5947 diff_1.update(cx, |diff, cx| {
5948 let snapshot = buffer_1.read(cx).snapshot();
5949 assert_hunks(
5950 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5951 &snapshot,
5952 &diff.base_text().text(),
5953 &[(
5954 2..3,
5955 "",
5956 " println!(\"goodbye world\");\n",
5957 DiffHunkStatus::added_none(),
5958 )],
5959 );
5960 });
5961
5962 // Open a buffer for a file that's been deleted.
5963 let buffer_2 = project
5964 .update(cx, |project, cx| {
5965 project.open_local_buffer("/dir/src/deletion.rs", cx)
5966 })
5967 .await
5968 .unwrap();
5969 let diff_2 = project
5970 .update(cx, |project, cx| {
5971 project.open_uncommitted_diff(buffer_2.clone(), cx)
5972 })
5973 .await
5974 .unwrap();
5975 cx.run_until_parked();
5976 diff_2.update(cx, |diff, cx| {
5977 let snapshot = buffer_2.read(cx).snapshot();
5978 assert_hunks(
5979 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5980 &snapshot,
5981 &diff.base_text_string().unwrap(),
5982 &[(
5983 0..0,
5984 "// the-deleted-contents\n",
5985 "",
5986 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
5987 )],
5988 );
5989 });
5990
5991 // Stage the deletion of this file
5992 fs.set_index_for_repo(
5993 Path::new("/dir/.git"),
5994 &[("src/modification.rs".into(), committed_contents.clone())],
5995 );
5996 cx.run_until_parked();
5997 diff_2.update(cx, |diff, cx| {
5998 let snapshot = buffer_2.read(cx).snapshot();
5999 assert_hunks(
6000 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6001 &snapshot,
6002 &diff.base_text_string().unwrap(),
6003 &[(
6004 0..0,
6005 "// the-deleted-contents\n",
6006 "",
6007 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::None),
6008 )],
6009 );
6010 });
6011}
6012
6013#[gpui::test]
6014async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6015 use DiffHunkSecondaryStatus::*;
6016 init_test(cx);
6017
6018 let committed_contents = r#"
6019 zero
6020 one
6021 two
6022 three
6023 four
6024 five
6025 "#
6026 .unindent();
6027 let file_contents = r#"
6028 one
6029 TWO
6030 three
6031 FOUR
6032 five
6033 "#
6034 .unindent();
6035
6036 let fs = FakeFs::new(cx.background_executor.clone());
6037 fs.insert_tree(
6038 "/dir",
6039 json!({
6040 ".git": {},
6041 "file.txt": file_contents.clone()
6042 }),
6043 )
6044 .await;
6045
6046 fs.set_head_for_repo(
6047 "/dir/.git".as_ref(),
6048 &[("file.txt".into(), committed_contents.clone())],
6049 );
6050 fs.set_index_for_repo(
6051 "/dir/.git".as_ref(),
6052 &[("file.txt".into(), committed_contents.clone())],
6053 );
6054
6055 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6056
6057 let buffer = project
6058 .update(cx, |project, cx| {
6059 project.open_local_buffer("/dir/file.txt", cx)
6060 })
6061 .await
6062 .unwrap();
6063 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6064 let uncommitted_diff = project
6065 .update(cx, |project, cx| {
6066 project.open_uncommitted_diff(buffer.clone(), cx)
6067 })
6068 .await
6069 .unwrap();
6070 let mut diff_events = cx.events(&uncommitted_diff);
6071
6072 // The hunks are initially unstaged.
6073 uncommitted_diff.read_with(cx, |diff, cx| {
6074 assert_hunks(
6075 diff.hunks(&snapshot, cx),
6076 &snapshot,
6077 &diff.base_text_string().unwrap(),
6078 &[
6079 (
6080 0..0,
6081 "zero\n",
6082 "",
6083 DiffHunkStatus::deleted(HasSecondaryHunk),
6084 ),
6085 (
6086 1..2,
6087 "two\n",
6088 "TWO\n",
6089 DiffHunkStatus::modified(HasSecondaryHunk),
6090 ),
6091 (
6092 3..4,
6093 "four\n",
6094 "FOUR\n",
6095 DiffHunkStatus::modified(HasSecondaryHunk),
6096 ),
6097 ],
6098 );
6099 });
6100
6101 // Stage a hunk. It appears as optimistically staged.
6102 uncommitted_diff.update(cx, |diff, cx| {
6103 let range =
6104 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6105 let hunks = diff
6106 .hunks_intersecting_range(range, &snapshot, cx)
6107 .collect::<Vec<_>>();
6108 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6109
6110 assert_hunks(
6111 diff.hunks(&snapshot, cx),
6112 &snapshot,
6113 &diff.base_text_string().unwrap(),
6114 &[
6115 (
6116 0..0,
6117 "zero\n",
6118 "",
6119 DiffHunkStatus::deleted(HasSecondaryHunk),
6120 ),
6121 (
6122 1..2,
6123 "two\n",
6124 "TWO\n",
6125 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6126 ),
6127 (
6128 3..4,
6129 "four\n",
6130 "FOUR\n",
6131 DiffHunkStatus::modified(HasSecondaryHunk),
6132 ),
6133 ],
6134 );
6135 });
6136
6137 // The diff emits a change event for the range of the staged hunk.
6138 assert!(matches!(
6139 diff_events.next().await.unwrap(),
6140 BufferDiffEvent::HunksStagedOrUnstaged(_)
6141 ));
6142 let event = diff_events.next().await.unwrap();
6143 if let BufferDiffEvent::DiffChanged {
6144 changed_range: Some(changed_range),
6145 } = event
6146 {
6147 let changed_range = changed_range.to_point(&snapshot);
6148 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6149 } else {
6150 panic!("Unexpected event {event:?}");
6151 }
6152
6153 // When the write to the index completes, it appears as staged.
6154 cx.run_until_parked();
6155 uncommitted_diff.update(cx, |diff, cx| {
6156 assert_hunks(
6157 diff.hunks(&snapshot, cx),
6158 &snapshot,
6159 &diff.base_text_string().unwrap(),
6160 &[
6161 (
6162 0..0,
6163 "zero\n",
6164 "",
6165 DiffHunkStatus::deleted(HasSecondaryHunk),
6166 ),
6167 (1..2, "two\n", "TWO\n", DiffHunkStatus::modified(None)),
6168 (
6169 3..4,
6170 "four\n",
6171 "FOUR\n",
6172 DiffHunkStatus::modified(HasSecondaryHunk),
6173 ),
6174 ],
6175 );
6176 });
6177
6178 // The diff emits a change event for the changed index text.
6179 let event = diff_events.next().await.unwrap();
6180 if let BufferDiffEvent::DiffChanged {
6181 changed_range: Some(changed_range),
6182 } = event
6183 {
6184 let changed_range = changed_range.to_point(&snapshot);
6185 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6186 } else {
6187 panic!("Unexpected event {event:?}");
6188 }
6189
6190 // Simulate a problem writing to the git index.
6191 fs.set_error_message_for_index_write(
6192 "/dir/.git".as_ref(),
6193 Some("failed to write git index".into()),
6194 );
6195
6196 // Stage another hunk.
6197 uncommitted_diff.update(cx, |diff, cx| {
6198 let range =
6199 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6200 let hunks = diff
6201 .hunks_intersecting_range(range, &snapshot, cx)
6202 .collect::<Vec<_>>();
6203 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6204
6205 assert_hunks(
6206 diff.hunks(&snapshot, cx),
6207 &snapshot,
6208 &diff.base_text_string().unwrap(),
6209 &[
6210 (
6211 0..0,
6212 "zero\n",
6213 "",
6214 DiffHunkStatus::deleted(HasSecondaryHunk),
6215 ),
6216 (1..2, "two\n", "TWO\n", DiffHunkStatus::modified(None)),
6217 (
6218 3..4,
6219 "four\n",
6220 "FOUR\n",
6221 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6222 ),
6223 ],
6224 );
6225 });
6226 assert!(matches!(
6227 diff_events.next().await.unwrap(),
6228 BufferDiffEvent::HunksStagedOrUnstaged(_)
6229 ));
6230 let event = diff_events.next().await.unwrap();
6231 if let BufferDiffEvent::DiffChanged {
6232 changed_range: Some(changed_range),
6233 } = event
6234 {
6235 let changed_range = changed_range.to_point(&snapshot);
6236 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6237 } else {
6238 panic!("Unexpected event {event:?}");
6239 }
6240
6241 // When the write fails, the hunk returns to being unstaged.
6242 cx.run_until_parked();
6243 uncommitted_diff.update(cx, |diff, cx| {
6244 assert_hunks(
6245 diff.hunks(&snapshot, cx),
6246 &snapshot,
6247 &diff.base_text_string().unwrap(),
6248 &[
6249 (
6250 0..0,
6251 "zero\n",
6252 "",
6253 DiffHunkStatus::deleted(HasSecondaryHunk),
6254 ),
6255 (1..2, "two\n", "TWO\n", DiffHunkStatus::modified(None)),
6256 (
6257 3..4,
6258 "four\n",
6259 "FOUR\n",
6260 DiffHunkStatus::modified(HasSecondaryHunk),
6261 ),
6262 ],
6263 );
6264 });
6265
6266 let event = diff_events.next().await.unwrap();
6267 if let BufferDiffEvent::DiffChanged {
6268 changed_range: Some(changed_range),
6269 } = event
6270 {
6271 let changed_range = changed_range.to_point(&snapshot);
6272 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6273 } else {
6274 panic!("Unexpected event {event:?}");
6275 }
6276}
6277
6278#[gpui::test]
6279async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6280 init_test(cx);
6281
6282 let committed_contents = r#"
6283 fn main() {
6284 println!("hello from HEAD");
6285 }
6286 "#
6287 .unindent();
6288 let file_contents = r#"
6289 fn main() {
6290 println!("hello from the working copy");
6291 }
6292 "#
6293 .unindent();
6294
6295 let fs = FakeFs::new(cx.background_executor.clone());
6296 fs.insert_tree(
6297 "/dir",
6298 json!({
6299 ".git": {},
6300 "src": {
6301 "main.rs": file_contents,
6302 }
6303 }),
6304 )
6305 .await;
6306
6307 fs.set_head_for_repo(
6308 Path::new("/dir/.git"),
6309 &[("src/main.rs".into(), committed_contents.clone())],
6310 );
6311 fs.set_index_for_repo(
6312 Path::new("/dir/.git"),
6313 &[("src/main.rs".into(), committed_contents.clone())],
6314 );
6315
6316 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6317
6318 let buffer = project
6319 .update(cx, |project, cx| {
6320 project.open_local_buffer("/dir/src/main.rs", cx)
6321 })
6322 .await
6323 .unwrap();
6324 let uncommitted_diff = project
6325 .update(cx, |project, cx| {
6326 project.open_uncommitted_diff(buffer.clone(), cx)
6327 })
6328 .await
6329 .unwrap();
6330
6331 cx.run_until_parked();
6332 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6333 let snapshot = buffer.read(cx).snapshot();
6334 assert_hunks(
6335 uncommitted_diff.hunks(&snapshot, cx),
6336 &snapshot,
6337 &uncommitted_diff.base_text_string().unwrap(),
6338 &[(
6339 1..2,
6340 " println!(\"hello from HEAD\");\n",
6341 " println!(\"hello from the working copy\");\n",
6342 DiffHunkStatus {
6343 kind: DiffHunkStatusKind::Modified,
6344 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6345 },
6346 )],
6347 );
6348 });
6349}
6350
6351async fn search(
6352 project: &Entity<Project>,
6353 query: SearchQuery,
6354 cx: &mut gpui::TestAppContext,
6355) -> Result<HashMap<String, Vec<Range<usize>>>> {
6356 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6357 let mut results = HashMap::default();
6358 while let Ok(search_result) = search_rx.recv().await {
6359 match search_result {
6360 SearchResult::Buffer { buffer, ranges } => {
6361 results.entry(buffer).or_insert(ranges);
6362 }
6363 SearchResult::LimitReached => {}
6364 }
6365 }
6366 Ok(results
6367 .into_iter()
6368 .map(|(buffer, ranges)| {
6369 buffer.update(cx, |buffer, cx| {
6370 let path = buffer
6371 .file()
6372 .unwrap()
6373 .full_path(cx)
6374 .to_string_lossy()
6375 .to_string();
6376 let ranges = ranges
6377 .into_iter()
6378 .map(|range| range.to_offset(buffer))
6379 .collect::<Vec<_>>();
6380 (path, ranges)
6381 })
6382 })
6383 .collect())
6384}
6385
6386pub fn init_test(cx: &mut gpui::TestAppContext) {
6387 if std::env::var("RUST_LOG").is_ok() {
6388 env_logger::try_init().ok();
6389 }
6390
6391 cx.update(|cx| {
6392 let settings_store = SettingsStore::test(cx);
6393 cx.set_global(settings_store);
6394 release_channel::init(SemanticVersion::default(), cx);
6395 language::init(cx);
6396 Project::init_settings(cx);
6397 });
6398}
6399
6400fn json_lang() -> Arc<Language> {
6401 Arc::new(Language::new(
6402 LanguageConfig {
6403 name: "JSON".into(),
6404 matcher: LanguageMatcher {
6405 path_suffixes: vec!["json".to_string()],
6406 ..Default::default()
6407 },
6408 ..Default::default()
6409 },
6410 None,
6411 ))
6412}
6413
6414fn js_lang() -> Arc<Language> {
6415 Arc::new(Language::new(
6416 LanguageConfig {
6417 name: "JavaScript".into(),
6418 matcher: LanguageMatcher {
6419 path_suffixes: vec!["js".to_string()],
6420 ..Default::default()
6421 },
6422 ..Default::default()
6423 },
6424 None,
6425 ))
6426}
6427
6428fn rust_lang() -> Arc<Language> {
6429 Arc::new(Language::new(
6430 LanguageConfig {
6431 name: "Rust".into(),
6432 matcher: LanguageMatcher {
6433 path_suffixes: vec!["rs".to_string()],
6434 ..Default::default()
6435 },
6436 ..Default::default()
6437 },
6438 Some(tree_sitter_rust::LANGUAGE.into()),
6439 ))
6440}
6441
6442fn typescript_lang() -> Arc<Language> {
6443 Arc::new(Language::new(
6444 LanguageConfig {
6445 name: "TypeScript".into(),
6446 matcher: LanguageMatcher {
6447 path_suffixes: vec!["ts".to_string()],
6448 ..Default::default()
6449 },
6450 ..Default::default()
6451 },
6452 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6453 ))
6454}
6455
6456fn tsx_lang() -> Arc<Language> {
6457 Arc::new(Language::new(
6458 LanguageConfig {
6459 name: "tsx".into(),
6460 matcher: LanguageMatcher {
6461 path_suffixes: vec!["tsx".to_string()],
6462 ..Default::default()
6463 },
6464 ..Default::default()
6465 },
6466 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6467 ))
6468}
6469
6470fn get_all_tasks(
6471 project: &Entity<Project>,
6472 task_contexts: &TaskContexts,
6473 cx: &mut App,
6474) -> Vec<(TaskSourceKind, ResolvedTask)> {
6475 let (mut old, new) = project.update(cx, |project, cx| {
6476 project
6477 .task_store
6478 .read(cx)
6479 .task_inventory()
6480 .unwrap()
6481 .read(cx)
6482 .used_and_current_resolved_tasks(task_contexts, cx)
6483 });
6484 old.extend(new);
6485 old
6486}