1use crate::{task_inventory::TaskContexts, Event, *};
2use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus};
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq, path,
29 paths::PathMatcher,
30 separator,
31 test::{marked_text_offsets, TempTree},
32 uri, TryFutureExt as _,
33};
34
35#[gpui::test]
36async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let (tx, mut rx) = futures::channel::mpsc::unbounded();
40 let _thread = std::thread::spawn(move || {
41 #[cfg(not(target_os = "windows"))]
42 std::fs::metadata("/tmp").unwrap();
43 #[cfg(target_os = "windows")]
44 std::fs::metadata("C:/Windows").unwrap();
45 std::thread::sleep(Duration::from_millis(1000));
46 tx.unbounded_send(1).unwrap();
47 });
48 rx.next().await.unwrap();
49}
50
51#[gpui::test]
52async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let io_task = smol::unblock(move || {
56 println!("sleeping on thread {:?}", std::thread::current().id());
57 std::thread::sleep(Duration::from_millis(10));
58 1
59 });
60
61 let task = cx.foreground_executor().spawn(async move {
62 io_task.await;
63 });
64
65 task.await;
66}
67
68#[cfg(not(windows))]
69#[gpui::test]
70async fn test_symlinks(cx: &mut gpui::TestAppContext) {
71 init_test(cx);
72 cx.executor().allow_parking();
73
74 let dir = TempTree::new(json!({
75 "root": {
76 "apple": "",
77 "banana": {
78 "carrot": {
79 "date": "",
80 "endive": "",
81 }
82 },
83 "fennel": {
84 "grape": "",
85 }
86 }
87 }));
88
89 let root_link_path = dir.path().join("root_link");
90 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
91 os::unix::fs::symlink(
92 dir.path().join("root/fennel"),
93 dir.path().join("root/finnochio"),
94 )
95 .unwrap();
96
97 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
98
99 project.update(cx, |project, cx| {
100 let tree = project.worktrees(cx).next().unwrap().read(cx);
101 assert_eq!(tree.file_count(), 5);
102 assert_eq!(
103 tree.inode_for_path("fennel/grape"),
104 tree.inode_for_path("finnochio/grape")
105 );
106 });
107}
108
109#[gpui::test]
110async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
111 init_test(cx);
112
113 let dir = TempTree::new(json!({
114 ".editorconfig": r#"
115 root = true
116 [*.rs]
117 indent_style = tab
118 indent_size = 3
119 end_of_line = lf
120 insert_final_newline = true
121 trim_trailing_whitespace = true
122 [*.js]
123 tab_width = 10
124 "#,
125 ".zed": {
126 "settings.json": r#"{
127 "tab_size": 8,
128 "hard_tabs": false,
129 "ensure_final_newline_on_save": false,
130 "remove_trailing_whitespace_on_save": false,
131 "soft_wrap": "editor_width"
132 }"#,
133 },
134 "a.rs": "fn a() {\n A\n}",
135 "b": {
136 ".editorconfig": r#"
137 [*.rs]
138 indent_size = 2
139 "#,
140 "b.rs": "fn b() {\n B\n}",
141 },
142 "c.js": "def c\n C\nend",
143 "README.json": "tabs are better\n",
144 }));
145
146 let path = dir.path();
147 let fs = FakeFs::new(cx.executor());
148 fs.insert_tree_from_real_fs(path, path).await;
149 let project = Project::test(fs, [path], cx).await;
150
151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
152 language_registry.add(js_lang());
153 language_registry.add(json_lang());
154 language_registry.add(rust_lang());
155
156 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
157
158 cx.executor().run_until_parked();
159
160 cx.update(|cx| {
161 let tree = worktree.read(cx);
162 let settings_for = |path: &str| {
163 let file_entry = tree.entry_for_path(path).unwrap().clone();
164 let file = File::for_entry(file_entry, worktree.clone());
165 let file_language = project
166 .read(cx)
167 .languages()
168 .language_for_file_path(file.path.as_ref());
169 let file_language = cx
170 .background_executor()
171 .block(file_language)
172 .expect("Failed to get file language");
173 let file = file as _;
174 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
175 };
176
177 let settings_a = settings_for("a.rs");
178 let settings_b = settings_for("b/b.rs");
179 let settings_c = settings_for("c.js");
180 let settings_readme = settings_for("README.json");
181
182 // .editorconfig overrides .zed/settings
183 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
184 assert_eq!(settings_a.hard_tabs, true);
185 assert_eq!(settings_a.ensure_final_newline_on_save, true);
186 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
187
188 // .editorconfig in b/ overrides .editorconfig in root
189 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
190
191 // "indent_size" is not set, so "tab_width" is used
192 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
193
194 // README.md should not be affected by .editorconfig's globe "*.rs"
195 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
196 });
197}
198
199#[gpui::test]
200async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
201 init_test(cx);
202 TaskStore::init(None);
203
204 let fs = FakeFs::new(cx.executor());
205 fs.insert_tree(
206 path!("/dir"),
207 json!({
208 ".zed": {
209 "settings.json": r#"{ "tab_size": 8 }"#,
210 "tasks.json": r#"[{
211 "label": "cargo check all",
212 "command": "cargo",
213 "args": ["check", "--all"]
214 },]"#,
215 },
216 "a": {
217 "a.rs": "fn a() {\n A\n}"
218 },
219 "b": {
220 ".zed": {
221 "settings.json": r#"{ "tab_size": 2 }"#,
222 "tasks.json": r#"[{
223 "label": "cargo check",
224 "command": "cargo",
225 "args": ["check"]
226 },]"#,
227 },
228 "b.rs": "fn b() {\n B\n}"
229 }
230 }),
231 )
232 .await;
233
234 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
235 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
236
237 cx.executor().run_until_parked();
238 let worktree_id = cx.update(|cx| {
239 project.update(cx, |project, cx| {
240 project.worktrees(cx).next().unwrap().read(cx).id()
241 })
242 });
243
244 let mut task_contexts = TaskContexts::default();
245 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
246
247 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
248 id: worktree_id,
249 directory_in_worktree: PathBuf::from(".zed"),
250 id_base: "local worktree tasks from directory \".zed\"".into(),
251 };
252
253 let all_tasks = cx
254 .update(|cx| {
255 let tree = worktree.read(cx);
256
257 let file_a = File::for_entry(
258 tree.entry_for_path("a/a.rs").unwrap().clone(),
259 worktree.clone(),
260 ) as _;
261 let settings_a = language_settings(None, Some(&file_a), cx);
262 let file_b = File::for_entry(
263 tree.entry_for_path("b/b.rs").unwrap().clone(),
264 worktree.clone(),
265 ) as _;
266 let settings_b = language_settings(None, Some(&file_b), cx);
267
268 assert_eq!(settings_a.tab_size.get(), 8);
269 assert_eq!(settings_b.tab_size.get(), 2);
270
271 get_all_tasks(&project, &task_contexts, cx)
272 })
273 .into_iter()
274 .map(|(source_kind, task)| {
275 let resolved = task.resolved.unwrap();
276 (
277 source_kind,
278 task.resolved_label,
279 resolved.args,
280 resolved.env,
281 )
282 })
283 .collect::<Vec<_>>();
284 assert_eq!(
285 all_tasks,
286 vec![
287 (
288 TaskSourceKind::Worktree {
289 id: worktree_id,
290 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
291 id_base: if cfg!(windows) {
292 "local worktree tasks from directory \"b\\\\.zed\"".into()
293 } else {
294 "local worktree tasks from directory \"b/.zed\"".into()
295 },
296 },
297 "cargo check".to_string(),
298 vec!["check".to_string()],
299 HashMap::default(),
300 ),
301 (
302 topmost_local_task_source_kind.clone(),
303 "cargo check all".to_string(),
304 vec!["check".to_string(), "--all".to_string()],
305 HashMap::default(),
306 ),
307 ]
308 );
309
310 let (_, resolved_task) = cx
311 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
312 .into_iter()
313 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
314 .expect("should have one global task");
315 project.update(cx, |project, cx| {
316 let task_inventory = project
317 .task_store
318 .read(cx)
319 .task_inventory()
320 .cloned()
321 .unwrap();
322 task_inventory.update(cx, |inventory, _| {
323 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
324 inventory
325 .update_file_based_tasks(
326 None,
327 Some(
328 &json!([{
329 "label": "cargo check unstable",
330 "command": "cargo",
331 "args": [
332 "check",
333 "--all",
334 "--all-targets"
335 ],
336 "env": {
337 "RUSTFLAGS": "-Zunstable-options"
338 }
339 }])
340 .to_string(),
341 ),
342 )
343 .unwrap();
344 });
345 });
346 cx.run_until_parked();
347
348 let all_tasks = cx
349 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
350 .into_iter()
351 .map(|(source_kind, task)| {
352 let resolved = task.resolved.unwrap();
353 (
354 source_kind,
355 task.resolved_label,
356 resolved.args,
357 resolved.env,
358 )
359 })
360 .collect::<Vec<_>>();
361 assert_eq!(
362 all_tasks,
363 vec![
364 (
365 topmost_local_task_source_kind.clone(),
366 "cargo check all".to_string(),
367 vec!["check".to_string(), "--all".to_string()],
368 HashMap::default(),
369 ),
370 (
371 TaskSourceKind::Worktree {
372 id: worktree_id,
373 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
374 id_base: if cfg!(windows) {
375 "local worktree tasks from directory \"b\\\\.zed\"".into()
376 } else {
377 "local worktree tasks from directory \"b/.zed\"".into()
378 },
379 },
380 "cargo check".to_string(),
381 vec!["check".to_string()],
382 HashMap::default(),
383 ),
384 (
385 TaskSourceKind::AbsPath {
386 abs_path: paths::tasks_file().clone(),
387 id_base: "global tasks.json".into(),
388 },
389 "cargo check unstable".to_string(),
390 vec![
391 "check".to_string(),
392 "--all".to_string(),
393 "--all-targets".to_string(),
394 ],
395 HashMap::from_iter(Some((
396 "RUSTFLAGS".to_string(),
397 "-Zunstable-options".to_string()
398 ))),
399 ),
400 ]
401 );
402}
403
404#[gpui::test]
405async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
406 init_test(cx);
407 TaskStore::init(None);
408
409 let fs = FakeFs::new(cx.executor());
410 fs.insert_tree(
411 path!("/dir"),
412 json!({
413 ".zed": {
414 "tasks.json": r#"[{
415 "label": "test worktree root",
416 "command": "echo $ZED_WORKTREE_ROOT"
417 }]"#,
418 },
419 "a": {
420 "a.rs": "fn a() {\n A\n}"
421 },
422 }),
423 )
424 .await;
425
426 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
427 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
428
429 cx.executor().run_until_parked();
430 let worktree_id = cx.update(|cx| {
431 project.update(cx, |project, cx| {
432 project.worktrees(cx).next().unwrap().read(cx).id()
433 })
434 });
435
436 let active_non_worktree_item_tasks = cx.update(|cx| {
437 get_all_tasks(
438 &project,
439 &TaskContexts {
440 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
441 active_worktree_context: None,
442 other_worktree_contexts: Vec::new(),
443 },
444 cx,
445 )
446 });
447 assert!(
448 active_non_worktree_item_tasks.is_empty(),
449 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
450 );
451
452 let active_worktree_tasks = cx.update(|cx| {
453 get_all_tasks(
454 &project,
455 &TaskContexts {
456 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
457 active_worktree_context: Some((worktree_id, {
458 let mut worktree_context = TaskContext::default();
459 worktree_context
460 .task_variables
461 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
462 worktree_context
463 })),
464 other_worktree_contexts: Vec::new(),
465 },
466 cx,
467 )
468 });
469 assert_eq!(
470 active_worktree_tasks
471 .into_iter()
472 .map(|(source_kind, task)| {
473 let resolved = task.resolved.unwrap();
474 (source_kind, resolved.command)
475 })
476 .collect::<Vec<_>>(),
477 vec![(
478 TaskSourceKind::Worktree {
479 id: worktree_id,
480 directory_in_worktree: PathBuf::from(separator!(".zed")),
481 id_base: if cfg!(windows) {
482 "local worktree tasks from directory \".zed\"".into()
483 } else {
484 "local worktree tasks from directory \".zed\"".into()
485 },
486 },
487 "echo /dir".to_string(),
488 )]
489 );
490}
491
492#[gpui::test]
493async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
494 init_test(cx);
495
496 let fs = FakeFs::new(cx.executor());
497 fs.insert_tree(
498 path!("/dir"),
499 json!({
500 "test.rs": "const A: i32 = 1;",
501 "test2.rs": "",
502 "Cargo.toml": "a = 1",
503 "package.json": "{\"a\": 1}",
504 }),
505 )
506 .await;
507
508 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
509 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
510
511 let mut fake_rust_servers = language_registry.register_fake_lsp(
512 "Rust",
513 FakeLspAdapter {
514 name: "the-rust-language-server",
515 capabilities: lsp::ServerCapabilities {
516 completion_provider: Some(lsp::CompletionOptions {
517 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
518 ..Default::default()
519 }),
520 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
521 lsp::TextDocumentSyncOptions {
522 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
523 ..Default::default()
524 },
525 )),
526 ..Default::default()
527 },
528 ..Default::default()
529 },
530 );
531 let mut fake_json_servers = language_registry.register_fake_lsp(
532 "JSON",
533 FakeLspAdapter {
534 name: "the-json-language-server",
535 capabilities: lsp::ServerCapabilities {
536 completion_provider: Some(lsp::CompletionOptions {
537 trigger_characters: Some(vec![":".to_string()]),
538 ..Default::default()
539 }),
540 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
541 lsp::TextDocumentSyncOptions {
542 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
543 ..Default::default()
544 },
545 )),
546 ..Default::default()
547 },
548 ..Default::default()
549 },
550 );
551
552 // Open a buffer without an associated language server.
553 let (toml_buffer, _handle) = project
554 .update(cx, |project, cx| {
555 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
556 })
557 .await
558 .unwrap();
559
560 // Open a buffer with an associated language server before the language for it has been loaded.
561 let (rust_buffer, _handle2) = project
562 .update(cx, |project, cx| {
563 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
564 })
565 .await
566 .unwrap();
567 rust_buffer.update(cx, |buffer, _| {
568 assert_eq!(buffer.language().map(|l| l.name()), None);
569 });
570
571 // Now we add the languages to the project, and ensure they get assigned to all
572 // the relevant open buffers.
573 language_registry.add(json_lang());
574 language_registry.add(rust_lang());
575 cx.executor().run_until_parked();
576 rust_buffer.update(cx, |buffer, _| {
577 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
578 });
579
580 // A server is started up, and it is notified about Rust files.
581 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
582 assert_eq!(
583 fake_rust_server
584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
585 .await
586 .text_document,
587 lsp::TextDocumentItem {
588 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
589 version: 0,
590 text: "const A: i32 = 1;".to_string(),
591 language_id: "rust".to_string(),
592 }
593 );
594
595 // The buffer is configured based on the language server's capabilities.
596 rust_buffer.update(cx, |buffer, _| {
597 assert_eq!(
598 buffer
599 .completion_triggers()
600 .into_iter()
601 .cloned()
602 .collect::<Vec<_>>(),
603 &[".".to_string(), "::".to_string()]
604 );
605 });
606 toml_buffer.update(cx, |buffer, _| {
607 assert!(buffer.completion_triggers().is_empty());
608 });
609
610 // Edit a buffer. The changes are reported to the language server.
611 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
612 assert_eq!(
613 fake_rust_server
614 .receive_notification::<lsp::notification::DidChangeTextDocument>()
615 .await
616 .text_document,
617 lsp::VersionedTextDocumentIdentifier::new(
618 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
619 1
620 )
621 );
622
623 // Open a third buffer with a different associated language server.
624 let (json_buffer, _json_handle) = project
625 .update(cx, |project, cx| {
626 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
627 })
628 .await
629 .unwrap();
630
631 // A json language server is started up and is only notified about the json buffer.
632 let mut fake_json_server = fake_json_servers.next().await.unwrap();
633 assert_eq!(
634 fake_json_server
635 .receive_notification::<lsp::notification::DidOpenTextDocument>()
636 .await
637 .text_document,
638 lsp::TextDocumentItem {
639 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
640 version: 0,
641 text: "{\"a\": 1}".to_string(),
642 language_id: "json".to_string(),
643 }
644 );
645
646 // This buffer is configured based on the second language server's
647 // capabilities.
648 json_buffer.update(cx, |buffer, _| {
649 assert_eq!(
650 buffer
651 .completion_triggers()
652 .into_iter()
653 .cloned()
654 .collect::<Vec<_>>(),
655 &[":".to_string()]
656 );
657 });
658
659 // When opening another buffer whose language server is already running,
660 // it is also configured based on the existing language server's capabilities.
661 let (rust_buffer2, _handle4) = project
662 .update(cx, |project, cx| {
663 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
664 })
665 .await
666 .unwrap();
667 rust_buffer2.update(cx, |buffer, _| {
668 assert_eq!(
669 buffer
670 .completion_triggers()
671 .into_iter()
672 .cloned()
673 .collect::<Vec<_>>(),
674 &[".".to_string(), "::".to_string()]
675 );
676 });
677
678 // Changes are reported only to servers matching the buffer's language.
679 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
680 rust_buffer2.update(cx, |buffer, cx| {
681 buffer.edit([(0..0, "let x = 1;")], None, cx)
682 });
683 assert_eq!(
684 fake_rust_server
685 .receive_notification::<lsp::notification::DidChangeTextDocument>()
686 .await
687 .text_document,
688 lsp::VersionedTextDocumentIdentifier::new(
689 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
690 1
691 )
692 );
693
694 // Save notifications are reported to all servers.
695 project
696 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
697 .await
698 .unwrap();
699 assert_eq!(
700 fake_rust_server
701 .receive_notification::<lsp::notification::DidSaveTextDocument>()
702 .await
703 .text_document,
704 lsp::TextDocumentIdentifier::new(
705 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
706 )
707 );
708 assert_eq!(
709 fake_json_server
710 .receive_notification::<lsp::notification::DidSaveTextDocument>()
711 .await
712 .text_document,
713 lsp::TextDocumentIdentifier::new(
714 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
715 )
716 );
717
718 // Renames are reported only to servers matching the buffer's language.
719 fs.rename(
720 Path::new(path!("/dir/test2.rs")),
721 Path::new(path!("/dir/test3.rs")),
722 Default::default(),
723 )
724 .await
725 .unwrap();
726 assert_eq!(
727 fake_rust_server
728 .receive_notification::<lsp::notification::DidCloseTextDocument>()
729 .await
730 .text_document,
731 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
732 );
733 assert_eq!(
734 fake_rust_server
735 .receive_notification::<lsp::notification::DidOpenTextDocument>()
736 .await
737 .text_document,
738 lsp::TextDocumentItem {
739 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
740 version: 0,
741 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
742 language_id: "rust".to_string(),
743 },
744 );
745
746 rust_buffer2.update(cx, |buffer, cx| {
747 buffer.update_diagnostics(
748 LanguageServerId(0),
749 DiagnosticSet::from_sorted_entries(
750 vec![DiagnosticEntry {
751 diagnostic: Default::default(),
752 range: Anchor::MIN..Anchor::MAX,
753 }],
754 &buffer.snapshot(),
755 ),
756 cx,
757 );
758 assert_eq!(
759 buffer
760 .snapshot()
761 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
762 .count(),
763 1
764 );
765 });
766
767 // When the rename changes the extension of the file, the buffer gets closed on the old
768 // language server and gets opened on the new one.
769 fs.rename(
770 Path::new(path!("/dir/test3.rs")),
771 Path::new(path!("/dir/test3.json")),
772 Default::default(),
773 )
774 .await
775 .unwrap();
776 assert_eq!(
777 fake_rust_server
778 .receive_notification::<lsp::notification::DidCloseTextDocument>()
779 .await
780 .text_document,
781 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
782 );
783 assert_eq!(
784 fake_json_server
785 .receive_notification::<lsp::notification::DidOpenTextDocument>()
786 .await
787 .text_document,
788 lsp::TextDocumentItem {
789 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
790 version: 0,
791 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
792 language_id: "json".to_string(),
793 },
794 );
795
796 // We clear the diagnostics, since the language has changed.
797 rust_buffer2.update(cx, |buffer, _| {
798 assert_eq!(
799 buffer
800 .snapshot()
801 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
802 .count(),
803 0
804 );
805 });
806
807 // The renamed file's version resets after changing language server.
808 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
809 assert_eq!(
810 fake_json_server
811 .receive_notification::<lsp::notification::DidChangeTextDocument>()
812 .await
813 .text_document,
814 lsp::VersionedTextDocumentIdentifier::new(
815 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
816 1
817 )
818 );
819
820 // Restart language servers
821 project.update(cx, |project, cx| {
822 project.restart_language_servers_for_buffers(
823 vec![rust_buffer.clone(), json_buffer.clone()],
824 cx,
825 );
826 });
827
828 let mut rust_shutdown_requests = fake_rust_server
829 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
830 let mut json_shutdown_requests = fake_json_server
831 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
832 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
833
834 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
835 let mut fake_json_server = fake_json_servers.next().await.unwrap();
836
837 // Ensure rust document is reopened in new rust language server
838 assert_eq!(
839 fake_rust_server
840 .receive_notification::<lsp::notification::DidOpenTextDocument>()
841 .await
842 .text_document,
843 lsp::TextDocumentItem {
844 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
845 version: 0,
846 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
847 language_id: "rust".to_string(),
848 }
849 );
850
851 // Ensure json documents are reopened in new json language server
852 assert_set_eq!(
853 [
854 fake_json_server
855 .receive_notification::<lsp::notification::DidOpenTextDocument>()
856 .await
857 .text_document,
858 fake_json_server
859 .receive_notification::<lsp::notification::DidOpenTextDocument>()
860 .await
861 .text_document,
862 ],
863 [
864 lsp::TextDocumentItem {
865 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
866 version: 0,
867 text: json_buffer.update(cx, |buffer, _| buffer.text()),
868 language_id: "json".to_string(),
869 },
870 lsp::TextDocumentItem {
871 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
872 version: 0,
873 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
874 language_id: "json".to_string(),
875 }
876 ]
877 );
878
879 // Close notifications are reported only to servers matching the buffer's language.
880 cx.update(|_| drop(_json_handle));
881 let close_message = lsp::DidCloseTextDocumentParams {
882 text_document: lsp::TextDocumentIdentifier::new(
883 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
884 ),
885 };
886 assert_eq!(
887 fake_json_server
888 .receive_notification::<lsp::notification::DidCloseTextDocument>()
889 .await,
890 close_message,
891 );
892}
893
894#[gpui::test]
895async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
896 init_test(cx);
897
898 let fs = FakeFs::new(cx.executor());
899 fs.insert_tree(
900 path!("/the-root"),
901 json!({
902 ".gitignore": "target\n",
903 "src": {
904 "a.rs": "",
905 "b.rs": "",
906 },
907 "target": {
908 "x": {
909 "out": {
910 "x.rs": ""
911 }
912 },
913 "y": {
914 "out": {
915 "y.rs": "",
916 }
917 },
918 "z": {
919 "out": {
920 "z.rs": ""
921 }
922 }
923 }
924 }),
925 )
926 .await;
927
928 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
929 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
930 language_registry.add(rust_lang());
931 let mut fake_servers = language_registry.register_fake_lsp(
932 "Rust",
933 FakeLspAdapter {
934 name: "the-language-server",
935 ..Default::default()
936 },
937 );
938
939 cx.executor().run_until_parked();
940
941 // Start the language server by opening a buffer with a compatible file extension.
942 let _ = project
943 .update(cx, |project, cx| {
944 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
945 })
946 .await
947 .unwrap();
948
949 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
950 project.update(cx, |project, cx| {
951 let worktree = project.worktrees(cx).next().unwrap();
952 assert_eq!(
953 worktree
954 .read(cx)
955 .snapshot()
956 .entries(true, 0)
957 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
958 .collect::<Vec<_>>(),
959 &[
960 (Path::new(""), false),
961 (Path::new(".gitignore"), false),
962 (Path::new("src"), false),
963 (Path::new("src/a.rs"), false),
964 (Path::new("src/b.rs"), false),
965 (Path::new("target"), true),
966 ]
967 );
968 });
969
970 let prev_read_dir_count = fs.read_dir_call_count();
971
972 // Keep track of the FS events reported to the language server.
973 let fake_server = fake_servers.next().await.unwrap();
974 let file_changes = Arc::new(Mutex::new(Vec::new()));
975 fake_server
976 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
977 registrations: vec![lsp::Registration {
978 id: Default::default(),
979 method: "workspace/didChangeWatchedFiles".to_string(),
980 register_options: serde_json::to_value(
981 lsp::DidChangeWatchedFilesRegistrationOptions {
982 watchers: vec![
983 lsp::FileSystemWatcher {
984 glob_pattern: lsp::GlobPattern::String(
985 path!("/the-root/Cargo.toml").to_string(),
986 ),
987 kind: None,
988 },
989 lsp::FileSystemWatcher {
990 glob_pattern: lsp::GlobPattern::String(
991 path!("/the-root/src/*.{rs,c}").to_string(),
992 ),
993 kind: None,
994 },
995 lsp::FileSystemWatcher {
996 glob_pattern: lsp::GlobPattern::String(
997 path!("/the-root/target/y/**/*.rs").to_string(),
998 ),
999 kind: None,
1000 },
1001 ],
1002 },
1003 )
1004 .ok(),
1005 }],
1006 })
1007 .await
1008 .unwrap();
1009 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1010 let file_changes = file_changes.clone();
1011 move |params, _| {
1012 let mut file_changes = file_changes.lock();
1013 file_changes.extend(params.changes);
1014 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1015 }
1016 });
1017
1018 cx.executor().run_until_parked();
1019 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1020 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1021
1022 // Now the language server has asked us to watch an ignored directory path,
1023 // so we recursively load it.
1024 project.update(cx, |project, cx| {
1025 let worktree = project.worktrees(cx).next().unwrap();
1026 assert_eq!(
1027 worktree
1028 .read(cx)
1029 .snapshot()
1030 .entries(true, 0)
1031 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1032 .collect::<Vec<_>>(),
1033 &[
1034 (Path::new(""), false),
1035 (Path::new(".gitignore"), false),
1036 (Path::new("src"), false),
1037 (Path::new("src/a.rs"), false),
1038 (Path::new("src/b.rs"), false),
1039 (Path::new("target"), true),
1040 (Path::new("target/x"), true),
1041 (Path::new("target/y"), true),
1042 (Path::new("target/y/out"), true),
1043 (Path::new("target/y/out/y.rs"), true),
1044 (Path::new("target/z"), true),
1045 ]
1046 );
1047 });
1048
1049 // Perform some file system mutations, two of which match the watched patterns,
1050 // and one of which does not.
1051 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1052 .await
1053 .unwrap();
1054 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1055 .await
1056 .unwrap();
1057 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1058 .await
1059 .unwrap();
1060 fs.create_file(
1061 path!("/the-root/target/x/out/x2.rs").as_ref(),
1062 Default::default(),
1063 )
1064 .await
1065 .unwrap();
1066 fs.create_file(
1067 path!("/the-root/target/y/out/y2.rs").as_ref(),
1068 Default::default(),
1069 )
1070 .await
1071 .unwrap();
1072
1073 // The language server receives events for the FS mutations that match its watch patterns.
1074 cx.executor().run_until_parked();
1075 assert_eq!(
1076 &*file_changes.lock(),
1077 &[
1078 lsp::FileEvent {
1079 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1080 typ: lsp::FileChangeType::DELETED,
1081 },
1082 lsp::FileEvent {
1083 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1084 typ: lsp::FileChangeType::CREATED,
1085 },
1086 lsp::FileEvent {
1087 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1088 typ: lsp::FileChangeType::CREATED,
1089 },
1090 ]
1091 );
1092}
1093
1094#[gpui::test]
1095async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1096 init_test(cx);
1097
1098 let fs = FakeFs::new(cx.executor());
1099 fs.insert_tree(
1100 path!("/dir"),
1101 json!({
1102 "a.rs": "let a = 1;",
1103 "b.rs": "let b = 2;"
1104 }),
1105 )
1106 .await;
1107
1108 let project = Project::test(
1109 fs,
1110 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1111 cx,
1112 )
1113 .await;
1114 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1115
1116 let buffer_a = project
1117 .update(cx, |project, cx| {
1118 project.open_local_buffer(path!("/dir/a.rs"), cx)
1119 })
1120 .await
1121 .unwrap();
1122 let buffer_b = project
1123 .update(cx, |project, cx| {
1124 project.open_local_buffer(path!("/dir/b.rs"), cx)
1125 })
1126 .await
1127 .unwrap();
1128
1129 lsp_store.update(cx, |lsp_store, cx| {
1130 lsp_store
1131 .update_diagnostics(
1132 LanguageServerId(0),
1133 lsp::PublishDiagnosticsParams {
1134 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1135 version: None,
1136 diagnostics: vec![lsp::Diagnostic {
1137 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1138 severity: Some(lsp::DiagnosticSeverity::ERROR),
1139 message: "error 1".to_string(),
1140 ..Default::default()
1141 }],
1142 },
1143 &[],
1144 cx,
1145 )
1146 .unwrap();
1147 lsp_store
1148 .update_diagnostics(
1149 LanguageServerId(0),
1150 lsp::PublishDiagnosticsParams {
1151 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1152 version: None,
1153 diagnostics: vec![lsp::Diagnostic {
1154 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1155 severity: Some(DiagnosticSeverity::WARNING),
1156 message: "error 2".to_string(),
1157 ..Default::default()
1158 }],
1159 },
1160 &[],
1161 cx,
1162 )
1163 .unwrap();
1164 });
1165
1166 buffer_a.update(cx, |buffer, _| {
1167 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1168 assert_eq!(
1169 chunks
1170 .iter()
1171 .map(|(s, d)| (s.as_str(), *d))
1172 .collect::<Vec<_>>(),
1173 &[
1174 ("let ", None),
1175 ("a", Some(DiagnosticSeverity::ERROR)),
1176 (" = 1;", None),
1177 ]
1178 );
1179 });
1180 buffer_b.update(cx, |buffer, _| {
1181 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1182 assert_eq!(
1183 chunks
1184 .iter()
1185 .map(|(s, d)| (s.as_str(), *d))
1186 .collect::<Vec<_>>(),
1187 &[
1188 ("let ", None),
1189 ("b", Some(DiagnosticSeverity::WARNING)),
1190 (" = 2;", None),
1191 ]
1192 );
1193 });
1194}
1195
1196#[gpui::test]
1197async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let fs = FakeFs::new(cx.executor());
1201 fs.insert_tree(
1202 path!("/root"),
1203 json!({
1204 "dir": {
1205 ".git": {
1206 "HEAD": "ref: refs/heads/main",
1207 },
1208 ".gitignore": "b.rs",
1209 "a.rs": "let a = 1;",
1210 "b.rs": "let b = 2;",
1211 },
1212 "other.rs": "let b = c;"
1213 }),
1214 )
1215 .await;
1216
1217 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1218 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1219 let (worktree, _) = project
1220 .update(cx, |project, cx| {
1221 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1222 })
1223 .await
1224 .unwrap();
1225 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1226
1227 let (worktree, _) = project
1228 .update(cx, |project, cx| {
1229 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1230 })
1231 .await
1232 .unwrap();
1233 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1234
1235 let server_id = LanguageServerId(0);
1236 lsp_store.update(cx, |lsp_store, cx| {
1237 lsp_store
1238 .update_diagnostics(
1239 server_id,
1240 lsp::PublishDiagnosticsParams {
1241 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1242 version: None,
1243 diagnostics: vec![lsp::Diagnostic {
1244 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1245 severity: Some(lsp::DiagnosticSeverity::ERROR),
1246 message: "unused variable 'b'".to_string(),
1247 ..Default::default()
1248 }],
1249 },
1250 &[],
1251 cx,
1252 )
1253 .unwrap();
1254 lsp_store
1255 .update_diagnostics(
1256 server_id,
1257 lsp::PublishDiagnosticsParams {
1258 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1259 version: None,
1260 diagnostics: vec![lsp::Diagnostic {
1261 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1262 severity: Some(lsp::DiagnosticSeverity::ERROR),
1263 message: "unknown variable 'c'".to_string(),
1264 ..Default::default()
1265 }],
1266 },
1267 &[],
1268 cx,
1269 )
1270 .unwrap();
1271 });
1272
1273 let main_ignored_buffer = project
1274 .update(cx, |project, cx| {
1275 project.open_buffer((main_worktree_id, "b.rs"), cx)
1276 })
1277 .await
1278 .unwrap();
1279 main_ignored_buffer.update(cx, |buffer, _| {
1280 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1281 assert_eq!(
1282 chunks
1283 .iter()
1284 .map(|(s, d)| (s.as_str(), *d))
1285 .collect::<Vec<_>>(),
1286 &[
1287 ("let ", None),
1288 ("b", Some(DiagnosticSeverity::ERROR)),
1289 (" = 2;", None),
1290 ],
1291 "Gigitnored buffers should still get in-buffer diagnostics",
1292 );
1293 });
1294 let other_buffer = project
1295 .update(cx, |project, cx| {
1296 project.open_buffer((other_worktree_id, ""), cx)
1297 })
1298 .await
1299 .unwrap();
1300 other_buffer.update(cx, |buffer, _| {
1301 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1302 assert_eq!(
1303 chunks
1304 .iter()
1305 .map(|(s, d)| (s.as_str(), *d))
1306 .collect::<Vec<_>>(),
1307 &[
1308 ("let b = ", None),
1309 ("c", Some(DiagnosticSeverity::ERROR)),
1310 (";", None),
1311 ],
1312 "Buffers from hidden projects should still get in-buffer diagnostics"
1313 );
1314 });
1315
1316 project.update(cx, |project, cx| {
1317 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1318 assert_eq!(
1319 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1320 vec![(
1321 ProjectPath {
1322 worktree_id: main_worktree_id,
1323 path: Arc::from(Path::new("b.rs")),
1324 },
1325 server_id,
1326 DiagnosticSummary {
1327 error_count: 1,
1328 warning_count: 0,
1329 }
1330 )]
1331 );
1332 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1333 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1334 });
1335}
1336
1337#[gpui::test]
1338async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1339 init_test(cx);
1340
1341 let progress_token = "the-progress-token";
1342
1343 let fs = FakeFs::new(cx.executor());
1344 fs.insert_tree(
1345 path!("/dir"),
1346 json!({
1347 "a.rs": "fn a() { A }",
1348 "b.rs": "const y: i32 = 1",
1349 }),
1350 )
1351 .await;
1352
1353 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1354 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1355
1356 language_registry.add(rust_lang());
1357 let mut fake_servers = language_registry.register_fake_lsp(
1358 "Rust",
1359 FakeLspAdapter {
1360 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1361 disk_based_diagnostics_sources: vec!["disk".into()],
1362 ..Default::default()
1363 },
1364 );
1365
1366 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1367
1368 // Cause worktree to start the fake language server
1369 let _ = project
1370 .update(cx, |project, cx| {
1371 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1372 })
1373 .await
1374 .unwrap();
1375
1376 let mut events = cx.events(&project);
1377
1378 let fake_server = fake_servers.next().await.unwrap();
1379 assert_eq!(
1380 events.next().await.unwrap(),
1381 Event::LanguageServerAdded(
1382 LanguageServerId(0),
1383 fake_server.server.name(),
1384 Some(worktree_id)
1385 ),
1386 );
1387
1388 fake_server
1389 .start_progress(format!("{}/0", progress_token))
1390 .await;
1391 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1392 assert_eq!(
1393 events.next().await.unwrap(),
1394 Event::DiskBasedDiagnosticsStarted {
1395 language_server_id: LanguageServerId(0),
1396 }
1397 );
1398
1399 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1400 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1401 version: None,
1402 diagnostics: vec![lsp::Diagnostic {
1403 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1404 severity: Some(lsp::DiagnosticSeverity::ERROR),
1405 message: "undefined variable 'A'".to_string(),
1406 ..Default::default()
1407 }],
1408 });
1409 assert_eq!(
1410 events.next().await.unwrap(),
1411 Event::DiagnosticsUpdated {
1412 language_server_id: LanguageServerId(0),
1413 path: (worktree_id, Path::new("a.rs")).into()
1414 }
1415 );
1416
1417 fake_server.end_progress(format!("{}/0", progress_token));
1418 assert_eq!(
1419 events.next().await.unwrap(),
1420 Event::DiskBasedDiagnosticsFinished {
1421 language_server_id: LanguageServerId(0)
1422 }
1423 );
1424
1425 let buffer = project
1426 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1427 .await
1428 .unwrap();
1429
1430 buffer.update(cx, |buffer, _| {
1431 let snapshot = buffer.snapshot();
1432 let diagnostics = snapshot
1433 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1434 .collect::<Vec<_>>();
1435 assert_eq!(
1436 diagnostics,
1437 &[DiagnosticEntry {
1438 range: Point::new(0, 9)..Point::new(0, 10),
1439 diagnostic: Diagnostic {
1440 severity: lsp::DiagnosticSeverity::ERROR,
1441 message: "undefined variable 'A'".to_string(),
1442 group_id: 0,
1443 is_primary: true,
1444 ..Default::default()
1445 }
1446 }]
1447 )
1448 });
1449
1450 // Ensure publishing empty diagnostics twice only results in one update event.
1451 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1452 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1453 version: None,
1454 diagnostics: Default::default(),
1455 });
1456 assert_eq!(
1457 events.next().await.unwrap(),
1458 Event::DiagnosticsUpdated {
1459 language_server_id: LanguageServerId(0),
1460 path: (worktree_id, Path::new("a.rs")).into()
1461 }
1462 );
1463
1464 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1465 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1466 version: None,
1467 diagnostics: Default::default(),
1468 });
1469 cx.executor().run_until_parked();
1470 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1471}
1472
1473#[gpui::test]
1474async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1475 init_test(cx);
1476
1477 let progress_token = "the-progress-token";
1478
1479 let fs = FakeFs::new(cx.executor());
1480 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1481
1482 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1483
1484 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1485 language_registry.add(rust_lang());
1486 let mut fake_servers = language_registry.register_fake_lsp(
1487 "Rust",
1488 FakeLspAdapter {
1489 name: "the-language-server",
1490 disk_based_diagnostics_sources: vec!["disk".into()],
1491 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1492 ..Default::default()
1493 },
1494 );
1495
1496 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1497
1498 let (buffer, _handle) = project
1499 .update(cx, |project, cx| {
1500 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1501 })
1502 .await
1503 .unwrap();
1504 // Simulate diagnostics starting to update.
1505 let fake_server = fake_servers.next().await.unwrap();
1506 fake_server.start_progress(progress_token).await;
1507
1508 // Restart the server before the diagnostics finish updating.
1509 project.update(cx, |project, cx| {
1510 project.restart_language_servers_for_buffers(vec![buffer], cx);
1511 });
1512 let mut events = cx.events(&project);
1513
1514 // Simulate the newly started server sending more diagnostics.
1515 let fake_server = fake_servers.next().await.unwrap();
1516 assert_eq!(
1517 events.next().await.unwrap(),
1518 Event::LanguageServerAdded(
1519 LanguageServerId(1),
1520 fake_server.server.name(),
1521 Some(worktree_id)
1522 )
1523 );
1524 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1525 fake_server.start_progress(progress_token).await;
1526 assert_eq!(
1527 events.next().await.unwrap(),
1528 Event::DiskBasedDiagnosticsStarted {
1529 language_server_id: LanguageServerId(1)
1530 }
1531 );
1532 project.update(cx, |project, cx| {
1533 assert_eq!(
1534 project
1535 .language_servers_running_disk_based_diagnostics(cx)
1536 .collect::<Vec<_>>(),
1537 [LanguageServerId(1)]
1538 );
1539 });
1540
1541 // All diagnostics are considered done, despite the old server's diagnostic
1542 // task never completing.
1543 fake_server.end_progress(progress_token);
1544 assert_eq!(
1545 events.next().await.unwrap(),
1546 Event::DiskBasedDiagnosticsFinished {
1547 language_server_id: LanguageServerId(1)
1548 }
1549 );
1550 project.update(cx, |project, cx| {
1551 assert_eq!(
1552 project
1553 .language_servers_running_disk_based_diagnostics(cx)
1554 .collect::<Vec<_>>(),
1555 [] as [language::LanguageServerId; 0]
1556 );
1557 });
1558}
1559
1560#[gpui::test]
1561async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1562 init_test(cx);
1563
1564 let fs = FakeFs::new(cx.executor());
1565 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1566
1567 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1568
1569 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1570 language_registry.add(rust_lang());
1571 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1572
1573 let (buffer, _) = project
1574 .update(cx, |project, cx| {
1575 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1576 })
1577 .await
1578 .unwrap();
1579
1580 // Publish diagnostics
1581 let fake_server = fake_servers.next().await.unwrap();
1582 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1583 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1584 version: None,
1585 diagnostics: vec![lsp::Diagnostic {
1586 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1587 severity: Some(lsp::DiagnosticSeverity::ERROR),
1588 message: "the message".to_string(),
1589 ..Default::default()
1590 }],
1591 });
1592
1593 cx.executor().run_until_parked();
1594 buffer.update(cx, |buffer, _| {
1595 assert_eq!(
1596 buffer
1597 .snapshot()
1598 .diagnostics_in_range::<_, usize>(0..1, false)
1599 .map(|entry| entry.diagnostic.message.clone())
1600 .collect::<Vec<_>>(),
1601 ["the message".to_string()]
1602 );
1603 });
1604 project.update(cx, |project, cx| {
1605 assert_eq!(
1606 project.diagnostic_summary(false, cx),
1607 DiagnosticSummary {
1608 error_count: 1,
1609 warning_count: 0,
1610 }
1611 );
1612 });
1613
1614 project.update(cx, |project, cx| {
1615 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1616 });
1617
1618 // The diagnostics are cleared.
1619 cx.executor().run_until_parked();
1620 buffer.update(cx, |buffer, _| {
1621 assert_eq!(
1622 buffer
1623 .snapshot()
1624 .diagnostics_in_range::<_, usize>(0..1, false)
1625 .map(|entry| entry.diagnostic.message.clone())
1626 .collect::<Vec<_>>(),
1627 Vec::<String>::new(),
1628 );
1629 });
1630 project.update(cx, |project, cx| {
1631 assert_eq!(
1632 project.diagnostic_summary(false, cx),
1633 DiagnosticSummary {
1634 error_count: 0,
1635 warning_count: 0,
1636 }
1637 );
1638 });
1639}
1640
1641#[gpui::test]
1642async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1643 init_test(cx);
1644
1645 let fs = FakeFs::new(cx.executor());
1646 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1647
1648 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1649 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1650
1651 language_registry.add(rust_lang());
1652 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1653
1654 let (buffer, _handle) = project
1655 .update(cx, |project, cx| {
1656 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1657 })
1658 .await
1659 .unwrap();
1660
1661 // Before restarting the server, report diagnostics with an unknown buffer version.
1662 let fake_server = fake_servers.next().await.unwrap();
1663 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1664 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1665 version: Some(10000),
1666 diagnostics: Vec::new(),
1667 });
1668 cx.executor().run_until_parked();
1669 project.update(cx, |project, cx| {
1670 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1671 });
1672
1673 let mut fake_server = fake_servers.next().await.unwrap();
1674 let notification = fake_server
1675 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1676 .await
1677 .text_document;
1678 assert_eq!(notification.version, 0);
1679}
1680
1681#[gpui::test]
1682async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1683 init_test(cx);
1684
1685 let progress_token = "the-progress-token";
1686
1687 let fs = FakeFs::new(cx.executor());
1688 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1689
1690 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1691
1692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1693 language_registry.add(rust_lang());
1694 let mut fake_servers = language_registry.register_fake_lsp(
1695 "Rust",
1696 FakeLspAdapter {
1697 name: "the-language-server",
1698 disk_based_diagnostics_sources: vec!["disk".into()],
1699 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1700 ..Default::default()
1701 },
1702 );
1703
1704 let (buffer, _handle) = project
1705 .update(cx, |project, cx| {
1706 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1707 })
1708 .await
1709 .unwrap();
1710
1711 // Simulate diagnostics starting to update.
1712 let mut fake_server = fake_servers.next().await.unwrap();
1713 fake_server
1714 .start_progress_with(
1715 "another-token",
1716 lsp::WorkDoneProgressBegin {
1717 cancellable: Some(false),
1718 ..Default::default()
1719 },
1720 )
1721 .await;
1722 fake_server
1723 .start_progress_with(
1724 progress_token,
1725 lsp::WorkDoneProgressBegin {
1726 cancellable: Some(true),
1727 ..Default::default()
1728 },
1729 )
1730 .await;
1731 cx.executor().run_until_parked();
1732
1733 project.update(cx, |project, cx| {
1734 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1735 });
1736
1737 let cancel_notification = fake_server
1738 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1739 .await;
1740 assert_eq!(
1741 cancel_notification.token,
1742 NumberOrString::String(progress_token.into())
1743 );
1744}
1745
1746#[gpui::test]
1747async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1748 init_test(cx);
1749
1750 let fs = FakeFs::new(cx.executor());
1751 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1752 .await;
1753
1754 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1755 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1756
1757 let mut fake_rust_servers = language_registry.register_fake_lsp(
1758 "Rust",
1759 FakeLspAdapter {
1760 name: "rust-lsp",
1761 ..Default::default()
1762 },
1763 );
1764 let mut fake_js_servers = language_registry.register_fake_lsp(
1765 "JavaScript",
1766 FakeLspAdapter {
1767 name: "js-lsp",
1768 ..Default::default()
1769 },
1770 );
1771 language_registry.add(rust_lang());
1772 language_registry.add(js_lang());
1773
1774 let _rs_buffer = project
1775 .update(cx, |project, cx| {
1776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1777 })
1778 .await
1779 .unwrap();
1780 let _js_buffer = project
1781 .update(cx, |project, cx| {
1782 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1783 })
1784 .await
1785 .unwrap();
1786
1787 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1788 assert_eq!(
1789 fake_rust_server_1
1790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1791 .await
1792 .text_document
1793 .uri
1794 .as_str(),
1795 uri!("file:///dir/a.rs")
1796 );
1797
1798 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1799 assert_eq!(
1800 fake_js_server
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document
1804 .uri
1805 .as_str(),
1806 uri!("file:///dir/b.js")
1807 );
1808
1809 // Disable Rust language server, ensuring only that server gets stopped.
1810 cx.update(|cx| {
1811 SettingsStore::update_global(cx, |settings, cx| {
1812 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1813 settings.languages.insert(
1814 "Rust".into(),
1815 LanguageSettingsContent {
1816 enable_language_server: Some(false),
1817 ..Default::default()
1818 },
1819 );
1820 });
1821 })
1822 });
1823 fake_rust_server_1
1824 .receive_notification::<lsp::notification::Exit>()
1825 .await;
1826
1827 // Enable Rust and disable JavaScript language servers, ensuring that the
1828 // former gets started again and that the latter stops.
1829 cx.update(|cx| {
1830 SettingsStore::update_global(cx, |settings, cx| {
1831 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1832 settings.languages.insert(
1833 LanguageName::new("Rust"),
1834 LanguageSettingsContent {
1835 enable_language_server: Some(true),
1836 ..Default::default()
1837 },
1838 );
1839 settings.languages.insert(
1840 LanguageName::new("JavaScript"),
1841 LanguageSettingsContent {
1842 enable_language_server: Some(false),
1843 ..Default::default()
1844 },
1845 );
1846 });
1847 })
1848 });
1849 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1850 assert_eq!(
1851 fake_rust_server_2
1852 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1853 .await
1854 .text_document
1855 .uri
1856 .as_str(),
1857 uri!("file:///dir/a.rs")
1858 );
1859 fake_js_server
1860 .receive_notification::<lsp::notification::Exit>()
1861 .await;
1862}
1863
1864#[gpui::test(iterations = 3)]
1865async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1866 init_test(cx);
1867
1868 let text = "
1869 fn a() { A }
1870 fn b() { BB }
1871 fn c() { CCC }
1872 "
1873 .unindent();
1874
1875 let fs = FakeFs::new(cx.executor());
1876 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1877
1878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1879 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1880
1881 language_registry.add(rust_lang());
1882 let mut fake_servers = language_registry.register_fake_lsp(
1883 "Rust",
1884 FakeLspAdapter {
1885 disk_based_diagnostics_sources: vec!["disk".into()],
1886 ..Default::default()
1887 },
1888 );
1889
1890 let buffer = project
1891 .update(cx, |project, cx| {
1892 project.open_local_buffer(path!("/dir/a.rs"), cx)
1893 })
1894 .await
1895 .unwrap();
1896
1897 let _handle = project.update(cx, |project, cx| {
1898 project.register_buffer_with_language_servers(&buffer, cx)
1899 });
1900
1901 let mut fake_server = fake_servers.next().await.unwrap();
1902 let open_notification = fake_server
1903 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1904 .await;
1905
1906 // Edit the buffer, moving the content down
1907 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1908 let change_notification_1 = fake_server
1909 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1910 .await;
1911 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1912
1913 // Report some diagnostics for the initial version of the buffer
1914 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1915 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1916 version: Some(open_notification.text_document.version),
1917 diagnostics: vec![
1918 lsp::Diagnostic {
1919 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1920 severity: Some(DiagnosticSeverity::ERROR),
1921 message: "undefined variable 'A'".to_string(),
1922 source: Some("disk".to_string()),
1923 ..Default::default()
1924 },
1925 lsp::Diagnostic {
1926 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1927 severity: Some(DiagnosticSeverity::ERROR),
1928 message: "undefined variable 'BB'".to_string(),
1929 source: Some("disk".to_string()),
1930 ..Default::default()
1931 },
1932 lsp::Diagnostic {
1933 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1934 severity: Some(DiagnosticSeverity::ERROR),
1935 source: Some("disk".to_string()),
1936 message: "undefined variable 'CCC'".to_string(),
1937 ..Default::default()
1938 },
1939 ],
1940 });
1941
1942 // The diagnostics have moved down since they were created.
1943 cx.executor().run_until_parked();
1944 buffer.update(cx, |buffer, _| {
1945 assert_eq!(
1946 buffer
1947 .snapshot()
1948 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1949 .collect::<Vec<_>>(),
1950 &[
1951 DiagnosticEntry {
1952 range: Point::new(3, 9)..Point::new(3, 11),
1953 diagnostic: Diagnostic {
1954 source: Some("disk".into()),
1955 severity: DiagnosticSeverity::ERROR,
1956 message: "undefined variable 'BB'".to_string(),
1957 is_disk_based: true,
1958 group_id: 1,
1959 is_primary: true,
1960 ..Default::default()
1961 },
1962 },
1963 DiagnosticEntry {
1964 range: Point::new(4, 9)..Point::new(4, 12),
1965 diagnostic: Diagnostic {
1966 source: Some("disk".into()),
1967 severity: DiagnosticSeverity::ERROR,
1968 message: "undefined variable 'CCC'".to_string(),
1969 is_disk_based: true,
1970 group_id: 2,
1971 is_primary: true,
1972 ..Default::default()
1973 }
1974 }
1975 ]
1976 );
1977 assert_eq!(
1978 chunks_with_diagnostics(buffer, 0..buffer.len()),
1979 [
1980 ("\n\nfn a() { ".to_string(), None),
1981 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1982 (" }\nfn b() { ".to_string(), None),
1983 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1984 (" }\nfn c() { ".to_string(), None),
1985 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1986 (" }\n".to_string(), None),
1987 ]
1988 );
1989 assert_eq!(
1990 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1991 [
1992 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1993 (" }\nfn c() { ".to_string(), None),
1994 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 ]
1996 );
1997 });
1998
1999 // Ensure overlapping diagnostics are highlighted correctly.
2000 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2001 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2002 version: Some(open_notification.text_document.version),
2003 diagnostics: vec![
2004 lsp::Diagnostic {
2005 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2006 severity: Some(DiagnosticSeverity::ERROR),
2007 message: "undefined variable 'A'".to_string(),
2008 source: Some("disk".to_string()),
2009 ..Default::default()
2010 },
2011 lsp::Diagnostic {
2012 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2013 severity: Some(DiagnosticSeverity::WARNING),
2014 message: "unreachable statement".to_string(),
2015 source: Some("disk".to_string()),
2016 ..Default::default()
2017 },
2018 ],
2019 });
2020
2021 cx.executor().run_until_parked();
2022 buffer.update(cx, |buffer, _| {
2023 assert_eq!(
2024 buffer
2025 .snapshot()
2026 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2027 .collect::<Vec<_>>(),
2028 &[
2029 DiagnosticEntry {
2030 range: Point::new(2, 9)..Point::new(2, 12),
2031 diagnostic: Diagnostic {
2032 source: Some("disk".into()),
2033 severity: DiagnosticSeverity::WARNING,
2034 message: "unreachable statement".to_string(),
2035 is_disk_based: true,
2036 group_id: 4,
2037 is_primary: true,
2038 ..Default::default()
2039 }
2040 },
2041 DiagnosticEntry {
2042 range: Point::new(2, 9)..Point::new(2, 10),
2043 diagnostic: Diagnostic {
2044 source: Some("disk".into()),
2045 severity: DiagnosticSeverity::ERROR,
2046 message: "undefined variable 'A'".to_string(),
2047 is_disk_based: true,
2048 group_id: 3,
2049 is_primary: true,
2050 ..Default::default()
2051 },
2052 }
2053 ]
2054 );
2055 assert_eq!(
2056 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2057 [
2058 ("fn a() { ".to_string(), None),
2059 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2060 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2061 ("\n".to_string(), None),
2062 ]
2063 );
2064 assert_eq!(
2065 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2066 [
2067 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2068 ("\n".to_string(), None),
2069 ]
2070 );
2071 });
2072
2073 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2074 // changes since the last save.
2075 buffer.update(cx, |buffer, cx| {
2076 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2077 buffer.edit(
2078 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2079 None,
2080 cx,
2081 );
2082 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2083 });
2084 let change_notification_2 = fake_server
2085 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2086 .await;
2087 assert!(
2088 change_notification_2.text_document.version > change_notification_1.text_document.version
2089 );
2090
2091 // Handle out-of-order diagnostics
2092 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2093 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2094 version: Some(change_notification_2.text_document.version),
2095 diagnostics: vec![
2096 lsp::Diagnostic {
2097 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2098 severity: Some(DiagnosticSeverity::ERROR),
2099 message: "undefined variable 'BB'".to_string(),
2100 source: Some("disk".to_string()),
2101 ..Default::default()
2102 },
2103 lsp::Diagnostic {
2104 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2105 severity: Some(DiagnosticSeverity::WARNING),
2106 message: "undefined variable 'A'".to_string(),
2107 source: Some("disk".to_string()),
2108 ..Default::default()
2109 },
2110 ],
2111 });
2112
2113 cx.executor().run_until_parked();
2114 buffer.update(cx, |buffer, _| {
2115 assert_eq!(
2116 buffer
2117 .snapshot()
2118 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2119 .collect::<Vec<_>>(),
2120 &[
2121 DiagnosticEntry {
2122 range: Point::new(2, 21)..Point::new(2, 22),
2123 diagnostic: Diagnostic {
2124 source: Some("disk".into()),
2125 severity: DiagnosticSeverity::WARNING,
2126 message: "undefined variable 'A'".to_string(),
2127 is_disk_based: true,
2128 group_id: 6,
2129 is_primary: true,
2130 ..Default::default()
2131 }
2132 },
2133 DiagnosticEntry {
2134 range: Point::new(3, 9)..Point::new(3, 14),
2135 diagnostic: Diagnostic {
2136 source: Some("disk".into()),
2137 severity: DiagnosticSeverity::ERROR,
2138 message: "undefined variable 'BB'".to_string(),
2139 is_disk_based: true,
2140 group_id: 5,
2141 is_primary: true,
2142 ..Default::default()
2143 },
2144 }
2145 ]
2146 );
2147 });
2148}
2149
2150#[gpui::test]
2151async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2152 init_test(cx);
2153
2154 let text = concat!(
2155 "let one = ;\n", //
2156 "let two = \n",
2157 "let three = 3;\n",
2158 );
2159
2160 let fs = FakeFs::new(cx.executor());
2161 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2162
2163 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2164 let buffer = project
2165 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2166 .await
2167 .unwrap();
2168
2169 project.update(cx, |project, cx| {
2170 project.lsp_store.update(cx, |lsp_store, cx| {
2171 lsp_store
2172 .update_diagnostic_entries(
2173 LanguageServerId(0),
2174 PathBuf::from("/dir/a.rs"),
2175 None,
2176 vec![
2177 DiagnosticEntry {
2178 range: Unclipped(PointUtf16::new(0, 10))
2179 ..Unclipped(PointUtf16::new(0, 10)),
2180 diagnostic: Diagnostic {
2181 severity: DiagnosticSeverity::ERROR,
2182 message: "syntax error 1".to_string(),
2183 ..Default::default()
2184 },
2185 },
2186 DiagnosticEntry {
2187 range: Unclipped(PointUtf16::new(1, 10))
2188 ..Unclipped(PointUtf16::new(1, 10)),
2189 diagnostic: Diagnostic {
2190 severity: DiagnosticSeverity::ERROR,
2191 message: "syntax error 2".to_string(),
2192 ..Default::default()
2193 },
2194 },
2195 ],
2196 cx,
2197 )
2198 .unwrap();
2199 })
2200 });
2201
2202 // An empty range is extended forward to include the following character.
2203 // At the end of a line, an empty range is extended backward to include
2204 // the preceding character.
2205 buffer.update(cx, |buffer, _| {
2206 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2207 assert_eq!(
2208 chunks
2209 .iter()
2210 .map(|(s, d)| (s.as_str(), *d))
2211 .collect::<Vec<_>>(),
2212 &[
2213 ("let one = ", None),
2214 (";", Some(DiagnosticSeverity::ERROR)),
2215 ("\nlet two =", None),
2216 (" ", Some(DiagnosticSeverity::ERROR)),
2217 ("\nlet three = 3;\n", None)
2218 ]
2219 );
2220 });
2221}
2222
2223#[gpui::test]
2224async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2225 init_test(cx);
2226
2227 let fs = FakeFs::new(cx.executor());
2228 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2229 .await;
2230
2231 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2232 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2233
2234 lsp_store.update(cx, |lsp_store, cx| {
2235 lsp_store
2236 .update_diagnostic_entries(
2237 LanguageServerId(0),
2238 Path::new("/dir/a.rs").to_owned(),
2239 None,
2240 vec![DiagnosticEntry {
2241 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2242 diagnostic: Diagnostic {
2243 severity: DiagnosticSeverity::ERROR,
2244 is_primary: true,
2245 message: "syntax error a1".to_string(),
2246 ..Default::default()
2247 },
2248 }],
2249 cx,
2250 )
2251 .unwrap();
2252 lsp_store
2253 .update_diagnostic_entries(
2254 LanguageServerId(1),
2255 Path::new("/dir/a.rs").to_owned(),
2256 None,
2257 vec![DiagnosticEntry {
2258 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2259 diagnostic: Diagnostic {
2260 severity: DiagnosticSeverity::ERROR,
2261 is_primary: true,
2262 message: "syntax error b1".to_string(),
2263 ..Default::default()
2264 },
2265 }],
2266 cx,
2267 )
2268 .unwrap();
2269
2270 assert_eq!(
2271 lsp_store.diagnostic_summary(false, cx),
2272 DiagnosticSummary {
2273 error_count: 2,
2274 warning_count: 0,
2275 }
2276 );
2277 });
2278}
2279
2280#[gpui::test]
2281async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2282 init_test(cx);
2283
2284 let text = "
2285 fn a() {
2286 f1();
2287 }
2288 fn b() {
2289 f2();
2290 }
2291 fn c() {
2292 f3();
2293 }
2294 "
2295 .unindent();
2296
2297 let fs = FakeFs::new(cx.executor());
2298 fs.insert_tree(
2299 path!("/dir"),
2300 json!({
2301 "a.rs": text.clone(),
2302 }),
2303 )
2304 .await;
2305
2306 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2307 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2308
2309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2310 language_registry.add(rust_lang());
2311 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2312
2313 let (buffer, _handle) = project
2314 .update(cx, |project, cx| {
2315 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2316 })
2317 .await
2318 .unwrap();
2319
2320 let mut fake_server = fake_servers.next().await.unwrap();
2321 let lsp_document_version = fake_server
2322 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2323 .await
2324 .text_document
2325 .version;
2326
2327 // Simulate editing the buffer after the language server computes some edits.
2328 buffer.update(cx, |buffer, cx| {
2329 buffer.edit(
2330 [(
2331 Point::new(0, 0)..Point::new(0, 0),
2332 "// above first function\n",
2333 )],
2334 None,
2335 cx,
2336 );
2337 buffer.edit(
2338 [(
2339 Point::new(2, 0)..Point::new(2, 0),
2340 " // inside first function\n",
2341 )],
2342 None,
2343 cx,
2344 );
2345 buffer.edit(
2346 [(
2347 Point::new(6, 4)..Point::new(6, 4),
2348 "// inside second function ",
2349 )],
2350 None,
2351 cx,
2352 );
2353
2354 assert_eq!(
2355 buffer.text(),
2356 "
2357 // above first function
2358 fn a() {
2359 // inside first function
2360 f1();
2361 }
2362 fn b() {
2363 // inside second function f2();
2364 }
2365 fn c() {
2366 f3();
2367 }
2368 "
2369 .unindent()
2370 );
2371 });
2372
2373 let edits = lsp_store
2374 .update(cx, |lsp_store, cx| {
2375 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2376 &buffer,
2377 vec![
2378 // replace body of first function
2379 lsp::TextEdit {
2380 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2381 new_text: "
2382 fn a() {
2383 f10();
2384 }
2385 "
2386 .unindent(),
2387 },
2388 // edit inside second function
2389 lsp::TextEdit {
2390 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2391 new_text: "00".into(),
2392 },
2393 // edit inside third function via two distinct edits
2394 lsp::TextEdit {
2395 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2396 new_text: "4000".into(),
2397 },
2398 lsp::TextEdit {
2399 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2400 new_text: "".into(),
2401 },
2402 ],
2403 LanguageServerId(0),
2404 Some(lsp_document_version),
2405 cx,
2406 )
2407 })
2408 .await
2409 .unwrap();
2410
2411 buffer.update(cx, |buffer, cx| {
2412 for (range, new_text) in edits {
2413 buffer.edit([(range, new_text)], None, cx);
2414 }
2415 assert_eq!(
2416 buffer.text(),
2417 "
2418 // above first function
2419 fn a() {
2420 // inside first function
2421 f10();
2422 }
2423 fn b() {
2424 // inside second function f200();
2425 }
2426 fn c() {
2427 f4000();
2428 }
2429 "
2430 .unindent()
2431 );
2432 });
2433}
2434
2435#[gpui::test]
2436async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2437 init_test(cx);
2438
2439 let text = "
2440 use a::b;
2441 use a::c;
2442
2443 fn f() {
2444 b();
2445 c();
2446 }
2447 "
2448 .unindent();
2449
2450 let fs = FakeFs::new(cx.executor());
2451 fs.insert_tree(
2452 path!("/dir"),
2453 json!({
2454 "a.rs": text.clone(),
2455 }),
2456 )
2457 .await;
2458
2459 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2460 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2461 let buffer = project
2462 .update(cx, |project, cx| {
2463 project.open_local_buffer(path!("/dir/a.rs"), cx)
2464 })
2465 .await
2466 .unwrap();
2467
2468 // Simulate the language server sending us a small edit in the form of a very large diff.
2469 // Rust-analyzer does this when performing a merge-imports code action.
2470 let edits = lsp_store
2471 .update(cx, |lsp_store, cx| {
2472 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2473 &buffer,
2474 [
2475 // Replace the first use statement without editing the semicolon.
2476 lsp::TextEdit {
2477 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2478 new_text: "a::{b, c}".into(),
2479 },
2480 // Reinsert the remainder of the file between the semicolon and the final
2481 // newline of the file.
2482 lsp::TextEdit {
2483 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2484 new_text: "\n\n".into(),
2485 },
2486 lsp::TextEdit {
2487 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2488 new_text: "
2489 fn f() {
2490 b();
2491 c();
2492 }"
2493 .unindent(),
2494 },
2495 // Delete everything after the first newline of the file.
2496 lsp::TextEdit {
2497 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2498 new_text: "".into(),
2499 },
2500 ],
2501 LanguageServerId(0),
2502 None,
2503 cx,
2504 )
2505 })
2506 .await
2507 .unwrap();
2508
2509 buffer.update(cx, |buffer, cx| {
2510 let edits = edits
2511 .into_iter()
2512 .map(|(range, text)| {
2513 (
2514 range.start.to_point(buffer)..range.end.to_point(buffer),
2515 text,
2516 )
2517 })
2518 .collect::<Vec<_>>();
2519
2520 assert_eq!(
2521 edits,
2522 [
2523 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2524 (Point::new(1, 0)..Point::new(2, 0), "".into())
2525 ]
2526 );
2527
2528 for (range, new_text) in edits {
2529 buffer.edit([(range, new_text)], None, cx);
2530 }
2531 assert_eq!(
2532 buffer.text(),
2533 "
2534 use a::{b, c};
2535
2536 fn f() {
2537 b();
2538 c();
2539 }
2540 "
2541 .unindent()
2542 );
2543 });
2544}
2545
2546#[gpui::test]
2547async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let text = "
2551 use a::b;
2552 use a::c;
2553
2554 fn f() {
2555 b();
2556 c();
2557 }
2558 "
2559 .unindent();
2560
2561 let fs = FakeFs::new(cx.executor());
2562 fs.insert_tree(
2563 path!("/dir"),
2564 json!({
2565 "a.rs": text.clone(),
2566 }),
2567 )
2568 .await;
2569
2570 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2571 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2572 let buffer = project
2573 .update(cx, |project, cx| {
2574 project.open_local_buffer(path!("/dir/a.rs"), cx)
2575 })
2576 .await
2577 .unwrap();
2578
2579 // Simulate the language server sending us edits in a non-ordered fashion,
2580 // with ranges sometimes being inverted or pointing to invalid locations.
2581 let edits = lsp_store
2582 .update(cx, |lsp_store, cx| {
2583 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2584 &buffer,
2585 [
2586 lsp::TextEdit {
2587 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2588 new_text: "\n\n".into(),
2589 },
2590 lsp::TextEdit {
2591 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2592 new_text: "a::{b, c}".into(),
2593 },
2594 lsp::TextEdit {
2595 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2596 new_text: "".into(),
2597 },
2598 lsp::TextEdit {
2599 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2600 new_text: "
2601 fn f() {
2602 b();
2603 c();
2604 }"
2605 .unindent(),
2606 },
2607 ],
2608 LanguageServerId(0),
2609 None,
2610 cx,
2611 )
2612 })
2613 .await
2614 .unwrap();
2615
2616 buffer.update(cx, |buffer, cx| {
2617 let edits = edits
2618 .into_iter()
2619 .map(|(range, text)| {
2620 (
2621 range.start.to_point(buffer)..range.end.to_point(buffer),
2622 text,
2623 )
2624 })
2625 .collect::<Vec<_>>();
2626
2627 assert_eq!(
2628 edits,
2629 [
2630 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2631 (Point::new(1, 0)..Point::new(2, 0), "".into())
2632 ]
2633 );
2634
2635 for (range, new_text) in edits {
2636 buffer.edit([(range, new_text)], None, cx);
2637 }
2638 assert_eq!(
2639 buffer.text(),
2640 "
2641 use a::{b, c};
2642
2643 fn f() {
2644 b();
2645 c();
2646 }
2647 "
2648 .unindent()
2649 );
2650 });
2651}
2652
2653fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2654 buffer: &Buffer,
2655 range: Range<T>,
2656) -> Vec<(String, Option<DiagnosticSeverity>)> {
2657 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2658 for chunk in buffer.snapshot().chunks(range, true) {
2659 if chunks.last().map_or(false, |prev_chunk| {
2660 prev_chunk.1 == chunk.diagnostic_severity
2661 }) {
2662 chunks.last_mut().unwrap().0.push_str(chunk.text);
2663 } else {
2664 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2665 }
2666 }
2667 chunks
2668}
2669
2670#[gpui::test(iterations = 10)]
2671async fn test_definition(cx: &mut gpui::TestAppContext) {
2672 init_test(cx);
2673
2674 let fs = FakeFs::new(cx.executor());
2675 fs.insert_tree(
2676 path!("/dir"),
2677 json!({
2678 "a.rs": "const fn a() { A }",
2679 "b.rs": "const y: i32 = crate::a()",
2680 }),
2681 )
2682 .await;
2683
2684 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2685
2686 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2687 language_registry.add(rust_lang());
2688 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2689
2690 let (buffer, _handle) = project
2691 .update(cx, |project, cx| {
2692 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2693 })
2694 .await
2695 .unwrap();
2696
2697 let fake_server = fake_servers.next().await.unwrap();
2698 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2699 let params = params.text_document_position_params;
2700 assert_eq!(
2701 params.text_document.uri.to_file_path().unwrap(),
2702 Path::new(path!("/dir/b.rs")),
2703 );
2704 assert_eq!(params.position, lsp::Position::new(0, 22));
2705
2706 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2707 lsp::Location::new(
2708 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2709 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2710 ),
2711 )))
2712 });
2713 let mut definitions = project
2714 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2715 .await
2716 .unwrap();
2717
2718 // Assert no new language server started
2719 cx.executor().run_until_parked();
2720 assert!(fake_servers.try_next().is_err());
2721
2722 assert_eq!(definitions.len(), 1);
2723 let definition = definitions.pop().unwrap();
2724 cx.update(|cx| {
2725 let target_buffer = definition.target.buffer.read(cx);
2726 assert_eq!(
2727 target_buffer
2728 .file()
2729 .unwrap()
2730 .as_local()
2731 .unwrap()
2732 .abs_path(cx),
2733 Path::new(path!("/dir/a.rs")),
2734 );
2735 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2736 assert_eq!(
2737 list_worktrees(&project, cx),
2738 [
2739 (path!("/dir/a.rs").as_ref(), false),
2740 (path!("/dir/b.rs").as_ref(), true)
2741 ],
2742 );
2743
2744 drop(definition);
2745 });
2746 cx.update(|cx| {
2747 assert_eq!(
2748 list_worktrees(&project, cx),
2749 [(path!("/dir/b.rs").as_ref(), true)]
2750 );
2751 });
2752
2753 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2754 project
2755 .read(cx)
2756 .worktrees(cx)
2757 .map(|worktree| {
2758 let worktree = worktree.read(cx);
2759 (
2760 worktree.as_local().unwrap().abs_path().as_ref(),
2761 worktree.is_visible(),
2762 )
2763 })
2764 .collect::<Vec<_>>()
2765 }
2766}
2767
2768#[gpui::test]
2769async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2770 init_test(cx);
2771
2772 let fs = FakeFs::new(cx.executor());
2773 fs.insert_tree(
2774 path!("/dir"),
2775 json!({
2776 "a.ts": "",
2777 }),
2778 )
2779 .await;
2780
2781 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2782
2783 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2784 language_registry.add(typescript_lang());
2785 let mut fake_language_servers = language_registry.register_fake_lsp(
2786 "TypeScript",
2787 FakeLspAdapter {
2788 capabilities: lsp::ServerCapabilities {
2789 completion_provider: Some(lsp::CompletionOptions {
2790 trigger_characters: Some(vec![":".to_string()]),
2791 ..Default::default()
2792 }),
2793 ..Default::default()
2794 },
2795 ..Default::default()
2796 },
2797 );
2798
2799 let (buffer, _handle) = project
2800 .update(cx, |p, cx| {
2801 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2802 })
2803 .await
2804 .unwrap();
2805
2806 let fake_server = fake_language_servers.next().await.unwrap();
2807
2808 let text = "let a = b.fqn";
2809 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2810 let completions = project.update(cx, |project, cx| {
2811 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2812 });
2813
2814 fake_server
2815 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2816 Ok(Some(lsp::CompletionResponse::Array(vec![
2817 lsp::CompletionItem {
2818 label: "fullyQualifiedName?".into(),
2819 insert_text: Some("fullyQualifiedName".into()),
2820 ..Default::default()
2821 },
2822 ])))
2823 })
2824 .next()
2825 .await;
2826 let completions = completions.await.unwrap();
2827 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2828 assert_eq!(completions.len(), 1);
2829 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2830 assert_eq!(
2831 completions[0].old_range.to_offset(&snapshot),
2832 text.len() - 3..text.len()
2833 );
2834
2835 let text = "let a = \"atoms/cmp\"";
2836 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2837 let completions = project.update(cx, |project, cx| {
2838 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2839 });
2840
2841 fake_server
2842 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2843 Ok(Some(lsp::CompletionResponse::Array(vec![
2844 lsp::CompletionItem {
2845 label: "component".into(),
2846 ..Default::default()
2847 },
2848 ])))
2849 })
2850 .next()
2851 .await;
2852 let completions = completions.await.unwrap();
2853 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2854 assert_eq!(completions.len(), 1);
2855 assert_eq!(completions[0].new_text, "component");
2856 assert_eq!(
2857 completions[0].old_range.to_offset(&snapshot),
2858 text.len() - 4..text.len() - 1
2859 );
2860}
2861
2862#[gpui::test]
2863async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2864 init_test(cx);
2865
2866 let fs = FakeFs::new(cx.executor());
2867 fs.insert_tree(
2868 path!("/dir"),
2869 json!({
2870 "a.ts": "",
2871 }),
2872 )
2873 .await;
2874
2875 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2876
2877 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2878 language_registry.add(typescript_lang());
2879 let mut fake_language_servers = language_registry.register_fake_lsp(
2880 "TypeScript",
2881 FakeLspAdapter {
2882 capabilities: lsp::ServerCapabilities {
2883 completion_provider: Some(lsp::CompletionOptions {
2884 trigger_characters: Some(vec![":".to_string()]),
2885 ..Default::default()
2886 }),
2887 ..Default::default()
2888 },
2889 ..Default::default()
2890 },
2891 );
2892
2893 let (buffer, _handle) = project
2894 .update(cx, |p, cx| {
2895 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2896 })
2897 .await
2898 .unwrap();
2899
2900 let fake_server = fake_language_servers.next().await.unwrap();
2901
2902 let text = "let a = b.fqn";
2903 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2904 let completions = project.update(cx, |project, cx| {
2905 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2906 });
2907
2908 fake_server
2909 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2910 Ok(Some(lsp::CompletionResponse::Array(vec![
2911 lsp::CompletionItem {
2912 label: "fullyQualifiedName?".into(),
2913 insert_text: Some("fully\rQualified\r\nName".into()),
2914 ..Default::default()
2915 },
2916 ])))
2917 })
2918 .next()
2919 .await;
2920 let completions = completions.await.unwrap();
2921 assert_eq!(completions.len(), 1);
2922 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2923}
2924
2925#[gpui::test(iterations = 10)]
2926async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2927 init_test(cx);
2928
2929 let fs = FakeFs::new(cx.executor());
2930 fs.insert_tree(
2931 path!("/dir"),
2932 json!({
2933 "a.ts": "a",
2934 }),
2935 )
2936 .await;
2937
2938 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2939
2940 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2941 language_registry.add(typescript_lang());
2942 let mut fake_language_servers = language_registry.register_fake_lsp(
2943 "TypeScript",
2944 FakeLspAdapter {
2945 capabilities: lsp::ServerCapabilities {
2946 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2947 lsp::CodeActionOptions {
2948 resolve_provider: Some(true),
2949 ..lsp::CodeActionOptions::default()
2950 },
2951 )),
2952 ..lsp::ServerCapabilities::default()
2953 },
2954 ..FakeLspAdapter::default()
2955 },
2956 );
2957
2958 let (buffer, _handle) = project
2959 .update(cx, |p, cx| {
2960 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2961 })
2962 .await
2963 .unwrap();
2964
2965 let fake_server = fake_language_servers.next().await.unwrap();
2966
2967 // Language server returns code actions that contain commands, and not edits.
2968 let actions = project.update(cx, |project, cx| {
2969 project.code_actions(&buffer, 0..0, None, cx)
2970 });
2971 fake_server
2972 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2973 Ok(Some(vec![
2974 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2975 title: "The code action".into(),
2976 data: Some(serde_json::json!({
2977 "command": "_the/command",
2978 })),
2979 ..lsp::CodeAction::default()
2980 }),
2981 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2982 title: "two".into(),
2983 ..lsp::CodeAction::default()
2984 }),
2985 ]))
2986 })
2987 .next()
2988 .await;
2989
2990 let action = actions.await.unwrap()[0].clone();
2991 let apply = project.update(cx, |project, cx| {
2992 project.apply_code_action(buffer.clone(), action, true, cx)
2993 });
2994
2995 // Resolving the code action does not populate its edits. In absence of
2996 // edits, we must execute the given command.
2997 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2998 |mut action, _| async move {
2999 if action.data.is_some() {
3000 action.command = Some(lsp::Command {
3001 title: "The command".into(),
3002 command: "_the/command".into(),
3003 arguments: Some(vec![json!("the-argument")]),
3004 });
3005 }
3006 Ok(action)
3007 },
3008 );
3009
3010 // While executing the command, the language server sends the editor
3011 // a `workspaceEdit` request.
3012 fake_server
3013 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3014 let fake = fake_server.clone();
3015 move |params, _| {
3016 assert_eq!(params.command, "_the/command");
3017 let fake = fake.clone();
3018 async move {
3019 fake.server
3020 .request::<lsp::request::ApplyWorkspaceEdit>(
3021 lsp::ApplyWorkspaceEditParams {
3022 label: None,
3023 edit: lsp::WorkspaceEdit {
3024 changes: Some(
3025 [(
3026 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3027 vec![lsp::TextEdit {
3028 range: lsp::Range::new(
3029 lsp::Position::new(0, 0),
3030 lsp::Position::new(0, 0),
3031 ),
3032 new_text: "X".into(),
3033 }],
3034 )]
3035 .into_iter()
3036 .collect(),
3037 ),
3038 ..Default::default()
3039 },
3040 },
3041 )
3042 .await
3043 .unwrap();
3044 Ok(Some(json!(null)))
3045 }
3046 }
3047 })
3048 .next()
3049 .await;
3050
3051 // Applying the code action returns a project transaction containing the edits
3052 // sent by the language server in its `workspaceEdit` request.
3053 let transaction = apply.await.unwrap();
3054 assert!(transaction.0.contains_key(&buffer));
3055 buffer.update(cx, |buffer, cx| {
3056 assert_eq!(buffer.text(), "Xa");
3057 buffer.undo(cx);
3058 assert_eq!(buffer.text(), "a");
3059 });
3060}
3061
3062#[gpui::test(iterations = 10)]
3063async fn test_save_file(cx: &mut gpui::TestAppContext) {
3064 init_test(cx);
3065
3066 let fs = FakeFs::new(cx.executor());
3067 fs.insert_tree(
3068 path!("/dir"),
3069 json!({
3070 "file1": "the old contents",
3071 }),
3072 )
3073 .await;
3074
3075 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3076 let buffer = project
3077 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3078 .await
3079 .unwrap();
3080 buffer.update(cx, |buffer, cx| {
3081 assert_eq!(buffer.text(), "the old contents");
3082 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3083 });
3084
3085 project
3086 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3087 .await
3088 .unwrap();
3089
3090 let new_text = fs
3091 .load(Path::new(path!("/dir/file1")))
3092 .await
3093 .unwrap()
3094 .replace("\r\n", "\n");
3095 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3096}
3097
3098#[gpui::test(iterations = 30)]
3099async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3100 init_test(cx);
3101
3102 let fs = FakeFs::new(cx.executor().clone());
3103 fs.insert_tree(
3104 path!("/dir"),
3105 json!({
3106 "file1": "the original contents",
3107 }),
3108 )
3109 .await;
3110
3111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3112 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3113 let buffer = project
3114 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3115 .await
3116 .unwrap();
3117
3118 // Simulate buffer diffs being slow, so that they don't complete before
3119 // the next file change occurs.
3120 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3121
3122 // Change the buffer's file on disk, and then wait for the file change
3123 // to be detected by the worktree, so that the buffer starts reloading.
3124 fs.save(
3125 path!("/dir/file1").as_ref(),
3126 &"the first contents".into(),
3127 Default::default(),
3128 )
3129 .await
3130 .unwrap();
3131 worktree.next_event(cx).await;
3132
3133 // Change the buffer's file again. Depending on the random seed, the
3134 // previous file change may still be in progress.
3135 fs.save(
3136 path!("/dir/file1").as_ref(),
3137 &"the second contents".into(),
3138 Default::default(),
3139 )
3140 .await
3141 .unwrap();
3142 worktree.next_event(cx).await;
3143
3144 cx.executor().run_until_parked();
3145 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3146 buffer.read_with(cx, |buffer, _| {
3147 assert_eq!(buffer.text(), on_disk_text);
3148 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3149 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3150 });
3151}
3152
3153#[gpui::test(iterations = 30)]
3154async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3155 init_test(cx);
3156
3157 let fs = FakeFs::new(cx.executor().clone());
3158 fs.insert_tree(
3159 path!("/dir"),
3160 json!({
3161 "file1": "the original contents",
3162 }),
3163 )
3164 .await;
3165
3166 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3167 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3168 let buffer = project
3169 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3170 .await
3171 .unwrap();
3172
3173 // Simulate buffer diffs being slow, so that they don't complete before
3174 // the next file change occurs.
3175 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3176
3177 // Change the buffer's file on disk, and then wait for the file change
3178 // to be detected by the worktree, so that the buffer starts reloading.
3179 fs.save(
3180 path!("/dir/file1").as_ref(),
3181 &"the first contents".into(),
3182 Default::default(),
3183 )
3184 .await
3185 .unwrap();
3186 worktree.next_event(cx).await;
3187
3188 cx.executor()
3189 .spawn(cx.executor().simulate_random_delay())
3190 .await;
3191
3192 // Perform a noop edit, causing the buffer's version to increase.
3193 buffer.update(cx, |buffer, cx| {
3194 buffer.edit([(0..0, " ")], None, cx);
3195 buffer.undo(cx);
3196 });
3197
3198 cx.executor().run_until_parked();
3199 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3200 buffer.read_with(cx, |buffer, _| {
3201 let buffer_text = buffer.text();
3202 if buffer_text == on_disk_text {
3203 assert!(
3204 !buffer.is_dirty() && !buffer.has_conflict(),
3205 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3206 );
3207 }
3208 // If the file change occurred while the buffer was processing the first
3209 // change, the buffer will be in a conflicting state.
3210 else {
3211 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3212 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3213 }
3214 });
3215}
3216
3217#[gpui::test]
3218async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3219 init_test(cx);
3220
3221 let fs = FakeFs::new(cx.executor());
3222 fs.insert_tree(
3223 path!("/dir"),
3224 json!({
3225 "file1": "the old contents",
3226 }),
3227 )
3228 .await;
3229
3230 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3231 let buffer = project
3232 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3233 .await
3234 .unwrap();
3235 buffer.update(cx, |buffer, cx| {
3236 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3237 });
3238
3239 project
3240 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3241 .await
3242 .unwrap();
3243
3244 let new_text = fs
3245 .load(Path::new(path!("/dir/file1")))
3246 .await
3247 .unwrap()
3248 .replace("\r\n", "\n");
3249 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3250}
3251
3252#[gpui::test]
3253async fn test_save_as(cx: &mut gpui::TestAppContext) {
3254 init_test(cx);
3255
3256 let fs = FakeFs::new(cx.executor());
3257 fs.insert_tree("/dir", json!({})).await;
3258
3259 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3260
3261 let languages = project.update(cx, |project, _| project.languages().clone());
3262 languages.add(rust_lang());
3263
3264 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3265 buffer.update(cx, |buffer, cx| {
3266 buffer.edit([(0..0, "abc")], None, cx);
3267 assert!(buffer.is_dirty());
3268 assert!(!buffer.has_conflict());
3269 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3270 });
3271 project
3272 .update(cx, |project, cx| {
3273 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3274 let path = ProjectPath {
3275 worktree_id,
3276 path: Arc::from(Path::new("file1.rs")),
3277 };
3278 project.save_buffer_as(buffer.clone(), path, cx)
3279 })
3280 .await
3281 .unwrap();
3282 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3283
3284 cx.executor().run_until_parked();
3285 buffer.update(cx, |buffer, cx| {
3286 assert_eq!(
3287 buffer.file().unwrap().full_path(cx),
3288 Path::new("dir/file1.rs")
3289 );
3290 assert!(!buffer.is_dirty());
3291 assert!(!buffer.has_conflict());
3292 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3293 });
3294
3295 let opened_buffer = project
3296 .update(cx, |project, cx| {
3297 project.open_local_buffer("/dir/file1.rs", cx)
3298 })
3299 .await
3300 .unwrap();
3301 assert_eq!(opened_buffer, buffer);
3302}
3303
3304#[gpui::test(retries = 5)]
3305async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3306 use worktree::WorktreeModelHandle as _;
3307
3308 init_test(cx);
3309 cx.executor().allow_parking();
3310
3311 let dir = TempTree::new(json!({
3312 "a": {
3313 "file1": "",
3314 "file2": "",
3315 "file3": "",
3316 },
3317 "b": {
3318 "c": {
3319 "file4": "",
3320 "file5": "",
3321 }
3322 }
3323 }));
3324
3325 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3326
3327 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3328 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3329 async move { buffer.await.unwrap() }
3330 };
3331 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3332 project.update(cx, |project, cx| {
3333 let tree = project.worktrees(cx).next().unwrap();
3334 tree.read(cx)
3335 .entry_for_path(path)
3336 .unwrap_or_else(|| panic!("no entry for path {}", path))
3337 .id
3338 })
3339 };
3340
3341 let buffer2 = buffer_for_path("a/file2", cx).await;
3342 let buffer3 = buffer_for_path("a/file3", cx).await;
3343 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3344 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3345
3346 let file2_id = id_for_path("a/file2", cx);
3347 let file3_id = id_for_path("a/file3", cx);
3348 let file4_id = id_for_path("b/c/file4", cx);
3349
3350 // Create a remote copy of this worktree.
3351 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3352 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3353
3354 let updates = Arc::new(Mutex::new(Vec::new()));
3355 tree.update(cx, |tree, cx| {
3356 let updates = updates.clone();
3357 tree.observe_updates(0, cx, move |update| {
3358 updates.lock().push(update);
3359 async { true }
3360 });
3361 });
3362
3363 let remote =
3364 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3365
3366 cx.executor().run_until_parked();
3367
3368 cx.update(|cx| {
3369 assert!(!buffer2.read(cx).is_dirty());
3370 assert!(!buffer3.read(cx).is_dirty());
3371 assert!(!buffer4.read(cx).is_dirty());
3372 assert!(!buffer5.read(cx).is_dirty());
3373 });
3374
3375 // Rename and delete files and directories.
3376 tree.flush_fs_events(cx).await;
3377 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3378 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3379 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3380 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3381 tree.flush_fs_events(cx).await;
3382
3383 cx.update(|app| {
3384 assert_eq!(
3385 tree.read(app)
3386 .paths()
3387 .map(|p| p.to_str().unwrap())
3388 .collect::<Vec<_>>(),
3389 vec![
3390 "a",
3391 separator!("a/file1"),
3392 separator!("a/file2.new"),
3393 "b",
3394 "d",
3395 separator!("d/file3"),
3396 separator!("d/file4"),
3397 ]
3398 );
3399 });
3400
3401 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3402 assert_eq!(id_for_path("d/file3", cx), file3_id);
3403 assert_eq!(id_for_path("d/file4", cx), file4_id);
3404
3405 cx.update(|cx| {
3406 assert_eq!(
3407 buffer2.read(cx).file().unwrap().path().as_ref(),
3408 Path::new("a/file2.new")
3409 );
3410 assert_eq!(
3411 buffer3.read(cx).file().unwrap().path().as_ref(),
3412 Path::new("d/file3")
3413 );
3414 assert_eq!(
3415 buffer4.read(cx).file().unwrap().path().as_ref(),
3416 Path::new("d/file4")
3417 );
3418 assert_eq!(
3419 buffer5.read(cx).file().unwrap().path().as_ref(),
3420 Path::new("b/c/file5")
3421 );
3422
3423 assert_matches!(
3424 buffer2.read(cx).file().unwrap().disk_state(),
3425 DiskState::Present { .. }
3426 );
3427 assert_matches!(
3428 buffer3.read(cx).file().unwrap().disk_state(),
3429 DiskState::Present { .. }
3430 );
3431 assert_matches!(
3432 buffer4.read(cx).file().unwrap().disk_state(),
3433 DiskState::Present { .. }
3434 );
3435 assert_eq!(
3436 buffer5.read(cx).file().unwrap().disk_state(),
3437 DiskState::Deleted
3438 );
3439 });
3440
3441 // Update the remote worktree. Check that it becomes consistent with the
3442 // local worktree.
3443 cx.executor().run_until_parked();
3444
3445 remote.update(cx, |remote, _| {
3446 for update in updates.lock().drain(..) {
3447 remote.as_remote_mut().unwrap().update_from_remote(update);
3448 }
3449 });
3450 cx.executor().run_until_parked();
3451 remote.update(cx, |remote, _| {
3452 assert_eq!(
3453 remote
3454 .paths()
3455 .map(|p| p.to_str().unwrap())
3456 .collect::<Vec<_>>(),
3457 vec![
3458 "a",
3459 separator!("a/file1"),
3460 separator!("a/file2.new"),
3461 "b",
3462 "d",
3463 separator!("d/file3"),
3464 separator!("d/file4"),
3465 ]
3466 );
3467 });
3468}
3469
3470#[gpui::test(iterations = 10)]
3471async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3472 init_test(cx);
3473
3474 let fs = FakeFs::new(cx.executor());
3475 fs.insert_tree(
3476 path!("/dir"),
3477 json!({
3478 "a": {
3479 "file1": "",
3480 }
3481 }),
3482 )
3483 .await;
3484
3485 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3486 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3487 let tree_id = tree.update(cx, |tree, _| tree.id());
3488
3489 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3490 project.update(cx, |project, cx| {
3491 let tree = project.worktrees(cx).next().unwrap();
3492 tree.read(cx)
3493 .entry_for_path(path)
3494 .unwrap_or_else(|| panic!("no entry for path {}", path))
3495 .id
3496 })
3497 };
3498
3499 let dir_id = id_for_path("a", cx);
3500 let file_id = id_for_path("a/file1", cx);
3501 let buffer = project
3502 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3503 .await
3504 .unwrap();
3505 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3506
3507 project
3508 .update(cx, |project, cx| {
3509 project.rename_entry(dir_id, Path::new("b"), cx)
3510 })
3511 .unwrap()
3512 .await
3513 .to_included()
3514 .unwrap();
3515 cx.executor().run_until_parked();
3516
3517 assert_eq!(id_for_path("b", cx), dir_id);
3518 assert_eq!(id_for_path("b/file1", cx), file_id);
3519 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3520}
3521
3522#[gpui::test]
3523async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3524 init_test(cx);
3525
3526 let fs = FakeFs::new(cx.executor());
3527 fs.insert_tree(
3528 "/dir",
3529 json!({
3530 "a.txt": "a-contents",
3531 "b.txt": "b-contents",
3532 }),
3533 )
3534 .await;
3535
3536 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3537
3538 // Spawn multiple tasks to open paths, repeating some paths.
3539 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3540 (
3541 p.open_local_buffer("/dir/a.txt", cx),
3542 p.open_local_buffer("/dir/b.txt", cx),
3543 p.open_local_buffer("/dir/a.txt", cx),
3544 )
3545 });
3546
3547 let buffer_a_1 = buffer_a_1.await.unwrap();
3548 let buffer_a_2 = buffer_a_2.await.unwrap();
3549 let buffer_b = buffer_b.await.unwrap();
3550 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3551 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3552
3553 // There is only one buffer per path.
3554 let buffer_a_id = buffer_a_1.entity_id();
3555 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3556
3557 // Open the same path again while it is still open.
3558 drop(buffer_a_1);
3559 let buffer_a_3 = project
3560 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3561 .await
3562 .unwrap();
3563
3564 // There's still only one buffer per path.
3565 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3566}
3567
3568#[gpui::test]
3569async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3570 init_test(cx);
3571
3572 let fs = FakeFs::new(cx.executor());
3573 fs.insert_tree(
3574 path!("/dir"),
3575 json!({
3576 "file1": "abc",
3577 "file2": "def",
3578 "file3": "ghi",
3579 }),
3580 )
3581 .await;
3582
3583 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3584
3585 let buffer1 = project
3586 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3587 .await
3588 .unwrap();
3589 let events = Arc::new(Mutex::new(Vec::new()));
3590
3591 // initially, the buffer isn't dirty.
3592 buffer1.update(cx, |buffer, cx| {
3593 cx.subscribe(&buffer1, {
3594 let events = events.clone();
3595 move |_, _, event, _| match event {
3596 BufferEvent::Operation { .. } => {}
3597 _ => events.lock().push(event.clone()),
3598 }
3599 })
3600 .detach();
3601
3602 assert!(!buffer.is_dirty());
3603 assert!(events.lock().is_empty());
3604
3605 buffer.edit([(1..2, "")], None, cx);
3606 });
3607
3608 // after the first edit, the buffer is dirty, and emits a dirtied event.
3609 buffer1.update(cx, |buffer, cx| {
3610 assert!(buffer.text() == "ac");
3611 assert!(buffer.is_dirty());
3612 assert_eq!(
3613 *events.lock(),
3614 &[
3615 language::BufferEvent::Edited,
3616 language::BufferEvent::DirtyChanged
3617 ]
3618 );
3619 events.lock().clear();
3620 buffer.did_save(
3621 buffer.version(),
3622 buffer.file().unwrap().disk_state().mtime(),
3623 cx,
3624 );
3625 });
3626
3627 // after saving, the buffer is not dirty, and emits a saved event.
3628 buffer1.update(cx, |buffer, cx| {
3629 assert!(!buffer.is_dirty());
3630 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3631 events.lock().clear();
3632
3633 buffer.edit([(1..1, "B")], None, cx);
3634 buffer.edit([(2..2, "D")], None, cx);
3635 });
3636
3637 // after editing again, the buffer is dirty, and emits another dirty event.
3638 buffer1.update(cx, |buffer, cx| {
3639 assert!(buffer.text() == "aBDc");
3640 assert!(buffer.is_dirty());
3641 assert_eq!(
3642 *events.lock(),
3643 &[
3644 language::BufferEvent::Edited,
3645 language::BufferEvent::DirtyChanged,
3646 language::BufferEvent::Edited,
3647 ],
3648 );
3649 events.lock().clear();
3650
3651 // After restoring the buffer to its previously-saved state,
3652 // the buffer is not considered dirty anymore.
3653 buffer.edit([(1..3, "")], None, cx);
3654 assert!(buffer.text() == "ac");
3655 assert!(!buffer.is_dirty());
3656 });
3657
3658 assert_eq!(
3659 *events.lock(),
3660 &[
3661 language::BufferEvent::Edited,
3662 language::BufferEvent::DirtyChanged
3663 ]
3664 );
3665
3666 // When a file is deleted, the buffer is considered dirty.
3667 let events = Arc::new(Mutex::new(Vec::new()));
3668 let buffer2 = project
3669 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3670 .await
3671 .unwrap();
3672 buffer2.update(cx, |_, cx| {
3673 cx.subscribe(&buffer2, {
3674 let events = events.clone();
3675 move |_, _, event, _| events.lock().push(event.clone())
3676 })
3677 .detach();
3678 });
3679
3680 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3681 .await
3682 .unwrap();
3683 cx.executor().run_until_parked();
3684 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3685 assert_eq!(
3686 *events.lock(),
3687 &[
3688 language::BufferEvent::DirtyChanged,
3689 language::BufferEvent::FileHandleChanged
3690 ]
3691 );
3692
3693 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3694 let events = Arc::new(Mutex::new(Vec::new()));
3695 let buffer3 = project
3696 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3697 .await
3698 .unwrap();
3699 buffer3.update(cx, |_, cx| {
3700 cx.subscribe(&buffer3, {
3701 let events = events.clone();
3702 move |_, _, event, _| events.lock().push(event.clone())
3703 })
3704 .detach();
3705 });
3706
3707 buffer3.update(cx, |buffer, cx| {
3708 buffer.edit([(0..0, "x")], None, cx);
3709 });
3710 events.lock().clear();
3711 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3712 .await
3713 .unwrap();
3714 cx.executor().run_until_parked();
3715 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3716 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3717}
3718
3719#[gpui::test]
3720async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3721 init_test(cx);
3722
3723 let (initial_contents, initial_offsets) =
3724 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3725 let fs = FakeFs::new(cx.executor());
3726 fs.insert_tree(
3727 path!("/dir"),
3728 json!({
3729 "the-file": initial_contents,
3730 }),
3731 )
3732 .await;
3733 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3734 let buffer = project
3735 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3736 .await
3737 .unwrap();
3738
3739 let anchors = initial_offsets
3740 .iter()
3741 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3742 .collect::<Vec<_>>();
3743
3744 // Change the file on disk, adding two new lines of text, and removing
3745 // one line.
3746 buffer.update(cx, |buffer, _| {
3747 assert!(!buffer.is_dirty());
3748 assert!(!buffer.has_conflict());
3749 });
3750
3751 let (new_contents, new_offsets) =
3752 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3753 fs.save(
3754 path!("/dir/the-file").as_ref(),
3755 &new_contents.as_str().into(),
3756 LineEnding::Unix,
3757 )
3758 .await
3759 .unwrap();
3760
3761 // Because the buffer was not modified, it is reloaded from disk. Its
3762 // contents are edited according to the diff between the old and new
3763 // file contents.
3764 cx.executor().run_until_parked();
3765 buffer.update(cx, |buffer, _| {
3766 assert_eq!(buffer.text(), new_contents);
3767 assert!(!buffer.is_dirty());
3768 assert!(!buffer.has_conflict());
3769
3770 let anchor_offsets = anchors
3771 .iter()
3772 .map(|anchor| anchor.to_offset(&*buffer))
3773 .collect::<Vec<_>>();
3774 assert_eq!(anchor_offsets, new_offsets);
3775 });
3776
3777 // Modify the buffer
3778 buffer.update(cx, |buffer, cx| {
3779 buffer.edit([(0..0, " ")], None, cx);
3780 assert!(buffer.is_dirty());
3781 assert!(!buffer.has_conflict());
3782 });
3783
3784 // Change the file on disk again, adding blank lines to the beginning.
3785 fs.save(
3786 path!("/dir/the-file").as_ref(),
3787 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3788 LineEnding::Unix,
3789 )
3790 .await
3791 .unwrap();
3792
3793 // Because the buffer is modified, it doesn't reload from disk, but is
3794 // marked as having a conflict.
3795 cx.executor().run_until_parked();
3796 buffer.update(cx, |buffer, _| {
3797 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3798 assert!(buffer.has_conflict());
3799 });
3800}
3801
3802#[gpui::test]
3803async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3804 init_test(cx);
3805
3806 let fs = FakeFs::new(cx.executor());
3807 fs.insert_tree(
3808 path!("/dir"),
3809 json!({
3810 "file1": "a\nb\nc\n",
3811 "file2": "one\r\ntwo\r\nthree\r\n",
3812 }),
3813 )
3814 .await;
3815
3816 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3817 let buffer1 = project
3818 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3819 .await
3820 .unwrap();
3821 let buffer2 = project
3822 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3823 .await
3824 .unwrap();
3825
3826 buffer1.update(cx, |buffer, _| {
3827 assert_eq!(buffer.text(), "a\nb\nc\n");
3828 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3829 });
3830 buffer2.update(cx, |buffer, _| {
3831 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3832 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3833 });
3834
3835 // Change a file's line endings on disk from unix to windows. The buffer's
3836 // state updates correctly.
3837 fs.save(
3838 path!("/dir/file1").as_ref(),
3839 &"aaa\nb\nc\n".into(),
3840 LineEnding::Windows,
3841 )
3842 .await
3843 .unwrap();
3844 cx.executor().run_until_parked();
3845 buffer1.update(cx, |buffer, _| {
3846 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3847 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3848 });
3849
3850 // Save a file with windows line endings. The file is written correctly.
3851 buffer2.update(cx, |buffer, cx| {
3852 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3853 });
3854 project
3855 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3856 .await
3857 .unwrap();
3858 assert_eq!(
3859 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3860 "one\r\ntwo\r\nthree\r\nfour\r\n",
3861 );
3862}
3863
3864#[gpui::test]
3865async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3866 init_test(cx);
3867
3868 let fs = FakeFs::new(cx.executor());
3869 fs.insert_tree(
3870 path!("/dir"),
3871 json!({
3872 "a.rs": "
3873 fn foo(mut v: Vec<usize>) {
3874 for x in &v {
3875 v.push(1);
3876 }
3877 }
3878 "
3879 .unindent(),
3880 }),
3881 )
3882 .await;
3883
3884 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3885 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3886 let buffer = project
3887 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3888 .await
3889 .unwrap();
3890
3891 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3892 let message = lsp::PublishDiagnosticsParams {
3893 uri: buffer_uri.clone(),
3894 diagnostics: vec![
3895 lsp::Diagnostic {
3896 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3897 severity: Some(DiagnosticSeverity::WARNING),
3898 message: "error 1".to_string(),
3899 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3900 location: lsp::Location {
3901 uri: buffer_uri.clone(),
3902 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3903 },
3904 message: "error 1 hint 1".to_string(),
3905 }]),
3906 ..Default::default()
3907 },
3908 lsp::Diagnostic {
3909 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3910 severity: Some(DiagnosticSeverity::HINT),
3911 message: "error 1 hint 1".to_string(),
3912 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3913 location: lsp::Location {
3914 uri: buffer_uri.clone(),
3915 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3916 },
3917 message: "original diagnostic".to_string(),
3918 }]),
3919 ..Default::default()
3920 },
3921 lsp::Diagnostic {
3922 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3923 severity: Some(DiagnosticSeverity::ERROR),
3924 message: "error 2".to_string(),
3925 related_information: Some(vec![
3926 lsp::DiagnosticRelatedInformation {
3927 location: lsp::Location {
3928 uri: buffer_uri.clone(),
3929 range: lsp::Range::new(
3930 lsp::Position::new(1, 13),
3931 lsp::Position::new(1, 15),
3932 ),
3933 },
3934 message: "error 2 hint 1".to_string(),
3935 },
3936 lsp::DiagnosticRelatedInformation {
3937 location: lsp::Location {
3938 uri: buffer_uri.clone(),
3939 range: lsp::Range::new(
3940 lsp::Position::new(1, 13),
3941 lsp::Position::new(1, 15),
3942 ),
3943 },
3944 message: "error 2 hint 2".to_string(),
3945 },
3946 ]),
3947 ..Default::default()
3948 },
3949 lsp::Diagnostic {
3950 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3951 severity: Some(DiagnosticSeverity::HINT),
3952 message: "error 2 hint 1".to_string(),
3953 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3954 location: lsp::Location {
3955 uri: buffer_uri.clone(),
3956 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3957 },
3958 message: "original diagnostic".to_string(),
3959 }]),
3960 ..Default::default()
3961 },
3962 lsp::Diagnostic {
3963 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3964 severity: Some(DiagnosticSeverity::HINT),
3965 message: "error 2 hint 2".to_string(),
3966 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3967 location: lsp::Location {
3968 uri: buffer_uri,
3969 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3970 },
3971 message: "original diagnostic".to_string(),
3972 }]),
3973 ..Default::default()
3974 },
3975 ],
3976 version: None,
3977 };
3978
3979 lsp_store
3980 .update(cx, |lsp_store, cx| {
3981 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3982 })
3983 .unwrap();
3984 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3985
3986 assert_eq!(
3987 buffer
3988 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3989 .collect::<Vec<_>>(),
3990 &[
3991 DiagnosticEntry {
3992 range: Point::new(1, 8)..Point::new(1, 9),
3993 diagnostic: Diagnostic {
3994 severity: DiagnosticSeverity::WARNING,
3995 message: "error 1".to_string(),
3996 group_id: 1,
3997 is_primary: true,
3998 ..Default::default()
3999 }
4000 },
4001 DiagnosticEntry {
4002 range: Point::new(1, 8)..Point::new(1, 9),
4003 diagnostic: Diagnostic {
4004 severity: DiagnosticSeverity::HINT,
4005 message: "error 1 hint 1".to_string(),
4006 group_id: 1,
4007 is_primary: false,
4008 ..Default::default()
4009 }
4010 },
4011 DiagnosticEntry {
4012 range: Point::new(1, 13)..Point::new(1, 15),
4013 diagnostic: Diagnostic {
4014 severity: DiagnosticSeverity::HINT,
4015 message: "error 2 hint 1".to_string(),
4016 group_id: 0,
4017 is_primary: false,
4018 ..Default::default()
4019 }
4020 },
4021 DiagnosticEntry {
4022 range: Point::new(1, 13)..Point::new(1, 15),
4023 diagnostic: Diagnostic {
4024 severity: DiagnosticSeverity::HINT,
4025 message: "error 2 hint 2".to_string(),
4026 group_id: 0,
4027 is_primary: false,
4028 ..Default::default()
4029 }
4030 },
4031 DiagnosticEntry {
4032 range: Point::new(2, 8)..Point::new(2, 17),
4033 diagnostic: Diagnostic {
4034 severity: DiagnosticSeverity::ERROR,
4035 message: "error 2".to_string(),
4036 group_id: 0,
4037 is_primary: true,
4038 ..Default::default()
4039 }
4040 }
4041 ]
4042 );
4043
4044 assert_eq!(
4045 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4046 &[
4047 DiagnosticEntry {
4048 range: Point::new(1, 13)..Point::new(1, 15),
4049 diagnostic: Diagnostic {
4050 severity: DiagnosticSeverity::HINT,
4051 message: "error 2 hint 1".to_string(),
4052 group_id: 0,
4053 is_primary: false,
4054 ..Default::default()
4055 }
4056 },
4057 DiagnosticEntry {
4058 range: Point::new(1, 13)..Point::new(1, 15),
4059 diagnostic: Diagnostic {
4060 severity: DiagnosticSeverity::HINT,
4061 message: "error 2 hint 2".to_string(),
4062 group_id: 0,
4063 is_primary: false,
4064 ..Default::default()
4065 }
4066 },
4067 DiagnosticEntry {
4068 range: Point::new(2, 8)..Point::new(2, 17),
4069 diagnostic: Diagnostic {
4070 severity: DiagnosticSeverity::ERROR,
4071 message: "error 2".to_string(),
4072 group_id: 0,
4073 is_primary: true,
4074 ..Default::default()
4075 }
4076 }
4077 ]
4078 );
4079
4080 assert_eq!(
4081 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4082 &[
4083 DiagnosticEntry {
4084 range: Point::new(1, 8)..Point::new(1, 9),
4085 diagnostic: Diagnostic {
4086 severity: DiagnosticSeverity::WARNING,
4087 message: "error 1".to_string(),
4088 group_id: 1,
4089 is_primary: true,
4090 ..Default::default()
4091 }
4092 },
4093 DiagnosticEntry {
4094 range: Point::new(1, 8)..Point::new(1, 9),
4095 diagnostic: Diagnostic {
4096 severity: DiagnosticSeverity::HINT,
4097 message: "error 1 hint 1".to_string(),
4098 group_id: 1,
4099 is_primary: false,
4100 ..Default::default()
4101 }
4102 },
4103 ]
4104 );
4105}
4106
4107#[gpui::test]
4108async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4109 init_test(cx);
4110
4111 let fs = FakeFs::new(cx.executor());
4112 fs.insert_tree(
4113 path!("/dir"),
4114 json!({
4115 "one.rs": "const ONE: usize = 1;",
4116 "two": {
4117 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4118 }
4119
4120 }),
4121 )
4122 .await;
4123 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4124
4125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4126 language_registry.add(rust_lang());
4127 let watched_paths = lsp::FileOperationRegistrationOptions {
4128 filters: vec![
4129 FileOperationFilter {
4130 scheme: Some("file".to_owned()),
4131 pattern: lsp::FileOperationPattern {
4132 glob: "**/*.rs".to_owned(),
4133 matches: Some(lsp::FileOperationPatternKind::File),
4134 options: None,
4135 },
4136 },
4137 FileOperationFilter {
4138 scheme: Some("file".to_owned()),
4139 pattern: lsp::FileOperationPattern {
4140 glob: "**/**".to_owned(),
4141 matches: Some(lsp::FileOperationPatternKind::Folder),
4142 options: None,
4143 },
4144 },
4145 ],
4146 };
4147 let mut fake_servers = language_registry.register_fake_lsp(
4148 "Rust",
4149 FakeLspAdapter {
4150 capabilities: lsp::ServerCapabilities {
4151 workspace: Some(lsp::WorkspaceServerCapabilities {
4152 workspace_folders: None,
4153 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4154 did_rename: Some(watched_paths.clone()),
4155 will_rename: Some(watched_paths),
4156 ..Default::default()
4157 }),
4158 }),
4159 ..Default::default()
4160 },
4161 ..Default::default()
4162 },
4163 );
4164
4165 let _ = project
4166 .update(cx, |project, cx| {
4167 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4168 })
4169 .await
4170 .unwrap();
4171
4172 let fake_server = fake_servers.next().await.unwrap();
4173 let response = project.update(cx, |project, cx| {
4174 let worktree = project.worktrees(cx).next().unwrap();
4175 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4176 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4177 });
4178 let expected_edit = lsp::WorkspaceEdit {
4179 changes: None,
4180 document_changes: Some(DocumentChanges::Edits({
4181 vec![TextDocumentEdit {
4182 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4183 range: lsp::Range {
4184 start: lsp::Position {
4185 line: 0,
4186 character: 1,
4187 },
4188 end: lsp::Position {
4189 line: 0,
4190 character: 3,
4191 },
4192 },
4193 new_text: "This is not a drill".to_owned(),
4194 })],
4195 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4196 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4197 version: Some(1337),
4198 },
4199 }]
4200 })),
4201 change_annotations: None,
4202 };
4203 let resolved_workspace_edit = Arc::new(OnceLock::new());
4204 fake_server
4205 .handle_request::<WillRenameFiles, _, _>({
4206 let resolved_workspace_edit = resolved_workspace_edit.clone();
4207 let expected_edit = expected_edit.clone();
4208 move |params, _| {
4209 let resolved_workspace_edit = resolved_workspace_edit.clone();
4210 let expected_edit = expected_edit.clone();
4211 async move {
4212 assert_eq!(params.files.len(), 1);
4213 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4214 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4215 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4216 Ok(Some(expected_edit))
4217 }
4218 }
4219 })
4220 .next()
4221 .await
4222 .unwrap();
4223 let _ = response.await.unwrap();
4224 fake_server
4225 .handle_notification::<DidRenameFiles, _>(|params, _| {
4226 assert_eq!(params.files.len(), 1);
4227 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4228 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4229 })
4230 .next()
4231 .await
4232 .unwrap();
4233 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4234}
4235
4236#[gpui::test]
4237async fn test_rename(cx: &mut gpui::TestAppContext) {
4238 // hi
4239 init_test(cx);
4240
4241 let fs = FakeFs::new(cx.executor());
4242 fs.insert_tree(
4243 path!("/dir"),
4244 json!({
4245 "one.rs": "const ONE: usize = 1;",
4246 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4247 }),
4248 )
4249 .await;
4250
4251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4252
4253 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4254 language_registry.add(rust_lang());
4255 let mut fake_servers = language_registry.register_fake_lsp(
4256 "Rust",
4257 FakeLspAdapter {
4258 capabilities: lsp::ServerCapabilities {
4259 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4260 prepare_provider: Some(true),
4261 work_done_progress_options: Default::default(),
4262 })),
4263 ..Default::default()
4264 },
4265 ..Default::default()
4266 },
4267 );
4268
4269 let (buffer, _handle) = project
4270 .update(cx, |project, cx| {
4271 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4272 })
4273 .await
4274 .unwrap();
4275
4276 let fake_server = fake_servers.next().await.unwrap();
4277
4278 let response = project.update(cx, |project, cx| {
4279 project.prepare_rename(buffer.clone(), 7, cx)
4280 });
4281 fake_server
4282 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4283 assert_eq!(
4284 params.text_document.uri.as_str(),
4285 uri!("file:///dir/one.rs")
4286 );
4287 assert_eq!(params.position, lsp::Position::new(0, 7));
4288 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4289 lsp::Position::new(0, 6),
4290 lsp::Position::new(0, 9),
4291 ))))
4292 })
4293 .next()
4294 .await
4295 .unwrap();
4296 let response = response.await.unwrap();
4297 let PrepareRenameResponse::Success(range) = response else {
4298 panic!("{:?}", response);
4299 };
4300 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4301 assert_eq!(range, 6..9);
4302
4303 let response = project.update(cx, |project, cx| {
4304 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4305 });
4306 fake_server
4307 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4308 assert_eq!(
4309 params.text_document_position.text_document.uri.as_str(),
4310 uri!("file:///dir/one.rs")
4311 );
4312 assert_eq!(
4313 params.text_document_position.position,
4314 lsp::Position::new(0, 7)
4315 );
4316 assert_eq!(params.new_name, "THREE");
4317 Ok(Some(lsp::WorkspaceEdit {
4318 changes: Some(
4319 [
4320 (
4321 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4322 vec![lsp::TextEdit::new(
4323 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4324 "THREE".to_string(),
4325 )],
4326 ),
4327 (
4328 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4329 vec![
4330 lsp::TextEdit::new(
4331 lsp::Range::new(
4332 lsp::Position::new(0, 24),
4333 lsp::Position::new(0, 27),
4334 ),
4335 "THREE".to_string(),
4336 ),
4337 lsp::TextEdit::new(
4338 lsp::Range::new(
4339 lsp::Position::new(0, 35),
4340 lsp::Position::new(0, 38),
4341 ),
4342 "THREE".to_string(),
4343 ),
4344 ],
4345 ),
4346 ]
4347 .into_iter()
4348 .collect(),
4349 ),
4350 ..Default::default()
4351 }))
4352 })
4353 .next()
4354 .await
4355 .unwrap();
4356 let mut transaction = response.await.unwrap().0;
4357 assert_eq!(transaction.len(), 2);
4358 assert_eq!(
4359 transaction
4360 .remove_entry(&buffer)
4361 .unwrap()
4362 .0
4363 .update(cx, |buffer, _| buffer.text()),
4364 "const THREE: usize = 1;"
4365 );
4366 assert_eq!(
4367 transaction
4368 .into_keys()
4369 .next()
4370 .unwrap()
4371 .update(cx, |buffer, _| buffer.text()),
4372 "const TWO: usize = one::THREE + one::THREE;"
4373 );
4374}
4375
4376#[gpui::test]
4377async fn test_search(cx: &mut gpui::TestAppContext) {
4378 init_test(cx);
4379
4380 let fs = FakeFs::new(cx.executor());
4381 fs.insert_tree(
4382 path!("/dir"),
4383 json!({
4384 "one.rs": "const ONE: usize = 1;",
4385 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4386 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4387 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4388 }),
4389 )
4390 .await;
4391 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4392 assert_eq!(
4393 search(
4394 &project,
4395 SearchQuery::text(
4396 "TWO",
4397 false,
4398 true,
4399 false,
4400 Default::default(),
4401 Default::default(),
4402 None
4403 )
4404 .unwrap(),
4405 cx
4406 )
4407 .await
4408 .unwrap(),
4409 HashMap::from_iter([
4410 (separator!("dir/two.rs").to_string(), vec![6..9]),
4411 (separator!("dir/three.rs").to_string(), vec![37..40])
4412 ])
4413 );
4414
4415 let buffer_4 = project
4416 .update(cx, |project, cx| {
4417 project.open_local_buffer(path!("/dir/four.rs"), cx)
4418 })
4419 .await
4420 .unwrap();
4421 buffer_4.update(cx, |buffer, cx| {
4422 let text = "two::TWO";
4423 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4424 });
4425
4426 assert_eq!(
4427 search(
4428 &project,
4429 SearchQuery::text(
4430 "TWO",
4431 false,
4432 true,
4433 false,
4434 Default::default(),
4435 Default::default(),
4436 None,
4437 )
4438 .unwrap(),
4439 cx
4440 )
4441 .await
4442 .unwrap(),
4443 HashMap::from_iter([
4444 (separator!("dir/two.rs").to_string(), vec![6..9]),
4445 (separator!("dir/three.rs").to_string(), vec![37..40]),
4446 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4447 ])
4448 );
4449}
4450
4451#[gpui::test]
4452async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4453 init_test(cx);
4454
4455 let search_query = "file";
4456
4457 let fs = FakeFs::new(cx.executor());
4458 fs.insert_tree(
4459 path!("/dir"),
4460 json!({
4461 "one.rs": r#"// Rust file one"#,
4462 "one.ts": r#"// TypeScript file one"#,
4463 "two.rs": r#"// Rust file two"#,
4464 "two.ts": r#"// TypeScript file two"#,
4465 }),
4466 )
4467 .await;
4468 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4469
4470 assert!(
4471 search(
4472 &project,
4473 SearchQuery::text(
4474 search_query,
4475 false,
4476 true,
4477 false,
4478 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4479 Default::default(),
4480 None
4481 )
4482 .unwrap(),
4483 cx
4484 )
4485 .await
4486 .unwrap()
4487 .is_empty(),
4488 "If no inclusions match, no files should be returned"
4489 );
4490
4491 assert_eq!(
4492 search(
4493 &project,
4494 SearchQuery::text(
4495 search_query,
4496 false,
4497 true,
4498 false,
4499 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4500 Default::default(),
4501 None
4502 )
4503 .unwrap(),
4504 cx
4505 )
4506 .await
4507 .unwrap(),
4508 HashMap::from_iter([
4509 (separator!("dir/one.rs").to_string(), vec![8..12]),
4510 (separator!("dir/two.rs").to_string(), vec![8..12]),
4511 ]),
4512 "Rust only search should give only Rust files"
4513 );
4514
4515 assert_eq!(
4516 search(
4517 &project,
4518 SearchQuery::text(
4519 search_query,
4520 false,
4521 true,
4522 false,
4523
4524 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4525
4526 Default::default(),
4527 None,
4528 ).unwrap(),
4529 cx
4530 )
4531 .await
4532 .unwrap(),
4533 HashMap::from_iter([
4534 (separator!("dir/one.ts").to_string(), vec![14..18]),
4535 (separator!("dir/two.ts").to_string(), vec![14..18]),
4536 ]),
4537 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4538 );
4539
4540 assert_eq!(
4541 search(
4542 &project,
4543 SearchQuery::text(
4544 search_query,
4545 false,
4546 true,
4547 false,
4548
4549 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4550
4551 Default::default(),
4552 None,
4553 ).unwrap(),
4554 cx
4555 )
4556 .await
4557 .unwrap(),
4558 HashMap::from_iter([
4559 (separator!("dir/two.ts").to_string(), vec![14..18]),
4560 (separator!("dir/one.rs").to_string(), vec![8..12]),
4561 (separator!("dir/one.ts").to_string(), vec![14..18]),
4562 (separator!("dir/two.rs").to_string(), vec![8..12]),
4563 ]),
4564 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4565 );
4566}
4567
4568#[gpui::test]
4569async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4570 init_test(cx);
4571
4572 let search_query = "file";
4573
4574 let fs = FakeFs::new(cx.executor());
4575 fs.insert_tree(
4576 path!("/dir"),
4577 json!({
4578 "one.rs": r#"// Rust file one"#,
4579 "one.ts": r#"// TypeScript file one"#,
4580 "two.rs": r#"// Rust file two"#,
4581 "two.ts": r#"// TypeScript file two"#,
4582 }),
4583 )
4584 .await;
4585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4586
4587 assert_eq!(
4588 search(
4589 &project,
4590 SearchQuery::text(
4591 search_query,
4592 false,
4593 true,
4594 false,
4595 Default::default(),
4596 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4597 None,
4598 )
4599 .unwrap(),
4600 cx
4601 )
4602 .await
4603 .unwrap(),
4604 HashMap::from_iter([
4605 (separator!("dir/one.rs").to_string(), vec![8..12]),
4606 (separator!("dir/one.ts").to_string(), vec![14..18]),
4607 (separator!("dir/two.rs").to_string(), vec![8..12]),
4608 (separator!("dir/two.ts").to_string(), vec![14..18]),
4609 ]),
4610 "If no exclusions match, all files should be returned"
4611 );
4612
4613 assert_eq!(
4614 search(
4615 &project,
4616 SearchQuery::text(
4617 search_query,
4618 false,
4619 true,
4620 false,
4621 Default::default(),
4622 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4623 None,
4624 )
4625 .unwrap(),
4626 cx
4627 )
4628 .await
4629 .unwrap(),
4630 HashMap::from_iter([
4631 (separator!("dir/one.ts").to_string(), vec![14..18]),
4632 (separator!("dir/two.ts").to_string(), vec![14..18]),
4633 ]),
4634 "Rust exclusion search should give only TypeScript files"
4635 );
4636
4637 assert_eq!(
4638 search(
4639 &project,
4640 SearchQuery::text(
4641 search_query,
4642 false,
4643 true,
4644 false,
4645 Default::default(),
4646 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4647 None,
4648 ).unwrap(),
4649 cx
4650 )
4651 .await
4652 .unwrap(),
4653 HashMap::from_iter([
4654 (separator!("dir/one.rs").to_string(), vec![8..12]),
4655 (separator!("dir/two.rs").to_string(), vec![8..12]),
4656 ]),
4657 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4658 );
4659
4660 assert!(
4661 search(
4662 &project,
4663 SearchQuery::text(
4664 search_query,
4665 false,
4666 true,
4667 false,
4668 Default::default(),
4669
4670 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4671 None,
4672
4673 ).unwrap(),
4674 cx
4675 )
4676 .await
4677 .unwrap().is_empty(),
4678 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4679 );
4680}
4681
4682#[gpui::test]
4683async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4684 init_test(cx);
4685
4686 let search_query = "file";
4687
4688 let fs = FakeFs::new(cx.executor());
4689 fs.insert_tree(
4690 path!("/dir"),
4691 json!({
4692 "one.rs": r#"// Rust file one"#,
4693 "one.ts": r#"// TypeScript file one"#,
4694 "two.rs": r#"// Rust file two"#,
4695 "two.ts": r#"// TypeScript file two"#,
4696 }),
4697 )
4698 .await;
4699 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4700
4701 assert!(
4702 search(
4703 &project,
4704 SearchQuery::text(
4705 search_query,
4706 false,
4707 true,
4708 false,
4709 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4710 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4711 None,
4712 )
4713 .unwrap(),
4714 cx
4715 )
4716 .await
4717 .unwrap()
4718 .is_empty(),
4719 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4720 );
4721
4722 assert!(
4723 search(
4724 &project,
4725 SearchQuery::text(
4726 search_query,
4727 false,
4728 true,
4729 false,
4730 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4731 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4732 None,
4733 ).unwrap(),
4734 cx
4735 )
4736 .await
4737 .unwrap()
4738 .is_empty(),
4739 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4740 );
4741
4742 assert!(
4743 search(
4744 &project,
4745 SearchQuery::text(
4746 search_query,
4747 false,
4748 true,
4749 false,
4750 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4751 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4752 None,
4753 )
4754 .unwrap(),
4755 cx
4756 )
4757 .await
4758 .unwrap()
4759 .is_empty(),
4760 "Non-matching inclusions and exclusions should not change that."
4761 );
4762
4763 assert_eq!(
4764 search(
4765 &project,
4766 SearchQuery::text(
4767 search_query,
4768 false,
4769 true,
4770 false,
4771 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4772 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4773 None,
4774 )
4775 .unwrap(),
4776 cx
4777 )
4778 .await
4779 .unwrap(),
4780 HashMap::from_iter([
4781 (separator!("dir/one.ts").to_string(), vec![14..18]),
4782 (separator!("dir/two.ts").to_string(), vec![14..18]),
4783 ]),
4784 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4785 );
4786}
4787
4788#[gpui::test]
4789async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4790 init_test(cx);
4791
4792 let fs = FakeFs::new(cx.executor());
4793 fs.insert_tree(
4794 path!("/worktree-a"),
4795 json!({
4796 "haystack.rs": r#"// NEEDLE"#,
4797 "haystack.ts": r#"// NEEDLE"#,
4798 }),
4799 )
4800 .await;
4801 fs.insert_tree(
4802 path!("/worktree-b"),
4803 json!({
4804 "haystack.rs": r#"// NEEDLE"#,
4805 "haystack.ts": r#"// NEEDLE"#,
4806 }),
4807 )
4808 .await;
4809
4810 let project = Project::test(
4811 fs.clone(),
4812 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4813 cx,
4814 )
4815 .await;
4816
4817 assert_eq!(
4818 search(
4819 &project,
4820 SearchQuery::text(
4821 "NEEDLE",
4822 false,
4823 true,
4824 false,
4825 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4826 Default::default(),
4827 None,
4828 )
4829 .unwrap(),
4830 cx
4831 )
4832 .await
4833 .unwrap(),
4834 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4835 "should only return results from included worktree"
4836 );
4837 assert_eq!(
4838 search(
4839 &project,
4840 SearchQuery::text(
4841 "NEEDLE",
4842 false,
4843 true,
4844 false,
4845 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4846 Default::default(),
4847 None,
4848 )
4849 .unwrap(),
4850 cx
4851 )
4852 .await
4853 .unwrap(),
4854 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4855 "should only return results from included worktree"
4856 );
4857
4858 assert_eq!(
4859 search(
4860 &project,
4861 SearchQuery::text(
4862 "NEEDLE",
4863 false,
4864 true,
4865 false,
4866 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4867 Default::default(),
4868 None,
4869 )
4870 .unwrap(),
4871 cx
4872 )
4873 .await
4874 .unwrap(),
4875 HashMap::from_iter([
4876 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4877 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4878 ]),
4879 "should return results from both worktrees"
4880 );
4881}
4882
4883#[gpui::test]
4884async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4885 init_test(cx);
4886
4887 let fs = FakeFs::new(cx.background_executor.clone());
4888 fs.insert_tree(
4889 path!("/dir"),
4890 json!({
4891 ".git": {},
4892 ".gitignore": "**/target\n/node_modules\n",
4893 "target": {
4894 "index.txt": "index_key:index_value"
4895 },
4896 "node_modules": {
4897 "eslint": {
4898 "index.ts": "const eslint_key = 'eslint value'",
4899 "package.json": r#"{ "some_key": "some value" }"#,
4900 },
4901 "prettier": {
4902 "index.ts": "const prettier_key = 'prettier value'",
4903 "package.json": r#"{ "other_key": "other value" }"#,
4904 },
4905 },
4906 "package.json": r#"{ "main_key": "main value" }"#,
4907 }),
4908 )
4909 .await;
4910 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4911
4912 let query = "key";
4913 assert_eq!(
4914 search(
4915 &project,
4916 SearchQuery::text(
4917 query,
4918 false,
4919 false,
4920 false,
4921 Default::default(),
4922 Default::default(),
4923 None,
4924 )
4925 .unwrap(),
4926 cx
4927 )
4928 .await
4929 .unwrap(),
4930 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4931 "Only one non-ignored file should have the query"
4932 );
4933
4934 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4935 assert_eq!(
4936 search(
4937 &project,
4938 SearchQuery::text(
4939 query,
4940 false,
4941 false,
4942 true,
4943 Default::default(),
4944 Default::default(),
4945 None,
4946 )
4947 .unwrap(),
4948 cx
4949 )
4950 .await
4951 .unwrap(),
4952 HashMap::from_iter([
4953 (separator!("dir/package.json").to_string(), vec![8..11]),
4954 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4955 (
4956 separator!("dir/node_modules/prettier/package.json").to_string(),
4957 vec![9..12]
4958 ),
4959 (
4960 separator!("dir/node_modules/prettier/index.ts").to_string(),
4961 vec![15..18]
4962 ),
4963 (
4964 separator!("dir/node_modules/eslint/index.ts").to_string(),
4965 vec![13..16]
4966 ),
4967 (
4968 separator!("dir/node_modules/eslint/package.json").to_string(),
4969 vec![8..11]
4970 ),
4971 ]),
4972 "Unrestricted search with ignored directories should find every file with the query"
4973 );
4974
4975 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4976 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4977 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4978 assert_eq!(
4979 search(
4980 &project,
4981 SearchQuery::text(
4982 query,
4983 false,
4984 false,
4985 true,
4986 files_to_include,
4987 files_to_exclude,
4988 None,
4989 )
4990 .unwrap(),
4991 cx
4992 )
4993 .await
4994 .unwrap(),
4995 HashMap::from_iter([(
4996 separator!("dir/node_modules/prettier/package.json").to_string(),
4997 vec![9..12]
4998 )]),
4999 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5000 );
5001}
5002
5003#[gpui::test]
5004async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5005 init_test(cx);
5006
5007 let fs = FakeFs::new(cx.executor().clone());
5008 fs.insert_tree(
5009 "/one/two",
5010 json!({
5011 "three": {
5012 "a.txt": "",
5013 "four": {}
5014 },
5015 "c.rs": ""
5016 }),
5017 )
5018 .await;
5019
5020 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5021 project
5022 .update(cx, |project, cx| {
5023 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5024 project.create_entry((id, "b.."), true, cx)
5025 })
5026 .await
5027 .unwrap()
5028 .to_included()
5029 .unwrap();
5030
5031 // Can't create paths outside the project
5032 let result = project
5033 .update(cx, |project, cx| {
5034 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5035 project.create_entry((id, "../../boop"), true, cx)
5036 })
5037 .await;
5038 assert!(result.is_err());
5039
5040 // Can't create paths with '..'
5041 let result = project
5042 .update(cx, |project, cx| {
5043 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5044 project.create_entry((id, "four/../beep"), true, cx)
5045 })
5046 .await;
5047 assert!(result.is_err());
5048
5049 assert_eq!(
5050 fs.paths(true),
5051 vec![
5052 PathBuf::from(path!("/")),
5053 PathBuf::from(path!("/one")),
5054 PathBuf::from(path!("/one/two")),
5055 PathBuf::from(path!("/one/two/c.rs")),
5056 PathBuf::from(path!("/one/two/three")),
5057 PathBuf::from(path!("/one/two/three/a.txt")),
5058 PathBuf::from(path!("/one/two/three/b..")),
5059 PathBuf::from(path!("/one/two/three/four")),
5060 ]
5061 );
5062
5063 // And we cannot open buffers with '..'
5064 let result = project
5065 .update(cx, |project, cx| {
5066 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5067 project.open_buffer((id, "../c.rs"), cx)
5068 })
5069 .await;
5070 assert!(result.is_err())
5071}
5072
5073#[gpui::test]
5074async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5075 init_test(cx);
5076
5077 let fs = FakeFs::new(cx.executor());
5078 fs.insert_tree(
5079 path!("/dir"),
5080 json!({
5081 "a.tsx": "a",
5082 }),
5083 )
5084 .await;
5085
5086 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5087
5088 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5089 language_registry.add(tsx_lang());
5090 let language_server_names = [
5091 "TypeScriptServer",
5092 "TailwindServer",
5093 "ESLintServer",
5094 "NoHoverCapabilitiesServer",
5095 ];
5096 let mut language_servers = [
5097 language_registry.register_fake_lsp(
5098 "tsx",
5099 FakeLspAdapter {
5100 name: language_server_names[0],
5101 capabilities: lsp::ServerCapabilities {
5102 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5103 ..lsp::ServerCapabilities::default()
5104 },
5105 ..FakeLspAdapter::default()
5106 },
5107 ),
5108 language_registry.register_fake_lsp(
5109 "tsx",
5110 FakeLspAdapter {
5111 name: language_server_names[1],
5112 capabilities: lsp::ServerCapabilities {
5113 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5114 ..lsp::ServerCapabilities::default()
5115 },
5116 ..FakeLspAdapter::default()
5117 },
5118 ),
5119 language_registry.register_fake_lsp(
5120 "tsx",
5121 FakeLspAdapter {
5122 name: language_server_names[2],
5123 capabilities: lsp::ServerCapabilities {
5124 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5125 ..lsp::ServerCapabilities::default()
5126 },
5127 ..FakeLspAdapter::default()
5128 },
5129 ),
5130 language_registry.register_fake_lsp(
5131 "tsx",
5132 FakeLspAdapter {
5133 name: language_server_names[3],
5134 capabilities: lsp::ServerCapabilities {
5135 hover_provider: None,
5136 ..lsp::ServerCapabilities::default()
5137 },
5138 ..FakeLspAdapter::default()
5139 },
5140 ),
5141 ];
5142
5143 let (buffer, _handle) = project
5144 .update(cx, |p, cx| {
5145 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5146 })
5147 .await
5148 .unwrap();
5149 cx.executor().run_until_parked();
5150
5151 let mut servers_with_hover_requests = HashMap::default();
5152 for i in 0..language_server_names.len() {
5153 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5154 panic!(
5155 "Failed to get language server #{i} with name {}",
5156 &language_server_names[i]
5157 )
5158 });
5159 let new_server_name = new_server.server.name();
5160 assert!(
5161 !servers_with_hover_requests.contains_key(&new_server_name),
5162 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5163 );
5164 match new_server_name.as_ref() {
5165 "TailwindServer" | "TypeScriptServer" => {
5166 servers_with_hover_requests.insert(
5167 new_server_name.clone(),
5168 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5169 let name = new_server_name.clone();
5170 async move {
5171 Ok(Some(lsp::Hover {
5172 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5173 format!("{name} hover"),
5174 )),
5175 range: None,
5176 }))
5177 }
5178 }),
5179 );
5180 }
5181 "ESLintServer" => {
5182 servers_with_hover_requests.insert(
5183 new_server_name,
5184 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5185 |_, _| async move { Ok(None) },
5186 ),
5187 );
5188 }
5189 "NoHoverCapabilitiesServer" => {
5190 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5191 |_, _| async move {
5192 panic!(
5193 "Should not call for hovers server with no corresponding capabilities"
5194 )
5195 },
5196 );
5197 }
5198 unexpected => panic!("Unexpected server name: {unexpected}"),
5199 }
5200 }
5201
5202 let hover_task = project.update(cx, |project, cx| {
5203 project.hover(&buffer, Point::new(0, 0), cx)
5204 });
5205 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5206 |mut hover_request| async move {
5207 hover_request
5208 .next()
5209 .await
5210 .expect("All hover requests should have been triggered")
5211 },
5212 ))
5213 .await;
5214 assert_eq!(
5215 vec!["TailwindServer hover", "TypeScriptServer hover"],
5216 hover_task
5217 .await
5218 .into_iter()
5219 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5220 .sorted()
5221 .collect::<Vec<_>>(),
5222 "Should receive hover responses from all related servers with hover capabilities"
5223 );
5224}
5225
5226#[gpui::test]
5227async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5228 init_test(cx);
5229
5230 let fs = FakeFs::new(cx.executor());
5231 fs.insert_tree(
5232 path!("/dir"),
5233 json!({
5234 "a.ts": "a",
5235 }),
5236 )
5237 .await;
5238
5239 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5240
5241 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5242 language_registry.add(typescript_lang());
5243 let mut fake_language_servers = language_registry.register_fake_lsp(
5244 "TypeScript",
5245 FakeLspAdapter {
5246 capabilities: lsp::ServerCapabilities {
5247 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5248 ..lsp::ServerCapabilities::default()
5249 },
5250 ..FakeLspAdapter::default()
5251 },
5252 );
5253
5254 let (buffer, _handle) = project
5255 .update(cx, |p, cx| {
5256 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5257 })
5258 .await
5259 .unwrap();
5260 cx.executor().run_until_parked();
5261
5262 let fake_server = fake_language_servers
5263 .next()
5264 .await
5265 .expect("failed to get the language server");
5266
5267 let mut request_handled =
5268 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5269 Ok(Some(lsp::Hover {
5270 contents: lsp::HoverContents::Array(vec![
5271 lsp::MarkedString::String("".to_string()),
5272 lsp::MarkedString::String(" ".to_string()),
5273 lsp::MarkedString::String("\n\n\n".to_string()),
5274 ]),
5275 range: None,
5276 }))
5277 });
5278
5279 let hover_task = project.update(cx, |project, cx| {
5280 project.hover(&buffer, Point::new(0, 0), cx)
5281 });
5282 let () = request_handled
5283 .next()
5284 .await
5285 .expect("All hover requests should have been triggered");
5286 assert_eq!(
5287 Vec::<String>::new(),
5288 hover_task
5289 .await
5290 .into_iter()
5291 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5292 .sorted()
5293 .collect::<Vec<_>>(),
5294 "Empty hover parts should be ignored"
5295 );
5296}
5297
5298#[gpui::test]
5299async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5300 init_test(cx);
5301
5302 let fs = FakeFs::new(cx.executor());
5303 fs.insert_tree(
5304 path!("/dir"),
5305 json!({
5306 "a.ts": "a",
5307 }),
5308 )
5309 .await;
5310
5311 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5312
5313 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5314 language_registry.add(typescript_lang());
5315 let mut fake_language_servers = language_registry.register_fake_lsp(
5316 "TypeScript",
5317 FakeLspAdapter {
5318 capabilities: lsp::ServerCapabilities {
5319 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5320 ..lsp::ServerCapabilities::default()
5321 },
5322 ..FakeLspAdapter::default()
5323 },
5324 );
5325
5326 let (buffer, _handle) = project
5327 .update(cx, |p, cx| {
5328 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5329 })
5330 .await
5331 .unwrap();
5332 cx.executor().run_until_parked();
5333
5334 let fake_server = fake_language_servers
5335 .next()
5336 .await
5337 .expect("failed to get the language server");
5338
5339 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5340 move |_, _| async move {
5341 Ok(Some(vec![
5342 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5343 title: "organize imports".to_string(),
5344 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5345 ..lsp::CodeAction::default()
5346 }),
5347 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5348 title: "fix code".to_string(),
5349 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5350 ..lsp::CodeAction::default()
5351 }),
5352 ]))
5353 },
5354 );
5355
5356 let code_actions_task = project.update(cx, |project, cx| {
5357 project.code_actions(
5358 &buffer,
5359 0..buffer.read(cx).len(),
5360 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5361 cx,
5362 )
5363 });
5364
5365 let () = request_handled
5366 .next()
5367 .await
5368 .expect("The code action request should have been triggered");
5369
5370 let code_actions = code_actions_task.await.unwrap();
5371 assert_eq!(code_actions.len(), 1);
5372 assert_eq!(
5373 code_actions[0].lsp_action.kind,
5374 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5375 );
5376}
5377
5378#[gpui::test]
5379async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5380 init_test(cx);
5381
5382 let fs = FakeFs::new(cx.executor());
5383 fs.insert_tree(
5384 path!("/dir"),
5385 json!({
5386 "a.tsx": "a",
5387 }),
5388 )
5389 .await;
5390
5391 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5392
5393 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5394 language_registry.add(tsx_lang());
5395 let language_server_names = [
5396 "TypeScriptServer",
5397 "TailwindServer",
5398 "ESLintServer",
5399 "NoActionsCapabilitiesServer",
5400 ];
5401
5402 let mut language_server_rxs = [
5403 language_registry.register_fake_lsp(
5404 "tsx",
5405 FakeLspAdapter {
5406 name: language_server_names[0],
5407 capabilities: lsp::ServerCapabilities {
5408 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5409 ..lsp::ServerCapabilities::default()
5410 },
5411 ..FakeLspAdapter::default()
5412 },
5413 ),
5414 language_registry.register_fake_lsp(
5415 "tsx",
5416 FakeLspAdapter {
5417 name: language_server_names[1],
5418 capabilities: lsp::ServerCapabilities {
5419 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5420 ..lsp::ServerCapabilities::default()
5421 },
5422 ..FakeLspAdapter::default()
5423 },
5424 ),
5425 language_registry.register_fake_lsp(
5426 "tsx",
5427 FakeLspAdapter {
5428 name: language_server_names[2],
5429 capabilities: lsp::ServerCapabilities {
5430 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5431 ..lsp::ServerCapabilities::default()
5432 },
5433 ..FakeLspAdapter::default()
5434 },
5435 ),
5436 language_registry.register_fake_lsp(
5437 "tsx",
5438 FakeLspAdapter {
5439 name: language_server_names[3],
5440 capabilities: lsp::ServerCapabilities {
5441 code_action_provider: None,
5442 ..lsp::ServerCapabilities::default()
5443 },
5444 ..FakeLspAdapter::default()
5445 },
5446 ),
5447 ];
5448
5449 let (buffer, _handle) = project
5450 .update(cx, |p, cx| {
5451 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5452 })
5453 .await
5454 .unwrap();
5455 cx.executor().run_until_parked();
5456
5457 let mut servers_with_actions_requests = HashMap::default();
5458 for i in 0..language_server_names.len() {
5459 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5460 panic!(
5461 "Failed to get language server #{i} with name {}",
5462 &language_server_names[i]
5463 )
5464 });
5465 let new_server_name = new_server.server.name();
5466
5467 assert!(
5468 !servers_with_actions_requests.contains_key(&new_server_name),
5469 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5470 );
5471 match new_server_name.0.as_ref() {
5472 "TailwindServer" | "TypeScriptServer" => {
5473 servers_with_actions_requests.insert(
5474 new_server_name.clone(),
5475 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5476 move |_, _| {
5477 let name = new_server_name.clone();
5478 async move {
5479 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5480 lsp::CodeAction {
5481 title: format!("{name} code action"),
5482 ..lsp::CodeAction::default()
5483 },
5484 )]))
5485 }
5486 },
5487 ),
5488 );
5489 }
5490 "ESLintServer" => {
5491 servers_with_actions_requests.insert(
5492 new_server_name,
5493 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5494 |_, _| async move { Ok(None) },
5495 ),
5496 );
5497 }
5498 "NoActionsCapabilitiesServer" => {
5499 let _never_handled = new_server
5500 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5501 panic!(
5502 "Should not call for code actions server with no corresponding capabilities"
5503 )
5504 });
5505 }
5506 unexpected => panic!("Unexpected server name: {unexpected}"),
5507 }
5508 }
5509
5510 let code_actions_task = project.update(cx, |project, cx| {
5511 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5512 });
5513
5514 // cx.run_until_parked();
5515 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5516 |mut code_actions_request| async move {
5517 code_actions_request
5518 .next()
5519 .await
5520 .expect("All code actions requests should have been triggered")
5521 },
5522 ))
5523 .await;
5524 assert_eq!(
5525 vec!["TailwindServer code action", "TypeScriptServer code action"],
5526 code_actions_task
5527 .await
5528 .unwrap()
5529 .into_iter()
5530 .map(|code_action| code_action.lsp_action.title)
5531 .sorted()
5532 .collect::<Vec<_>>(),
5533 "Should receive code actions responses from all related servers with hover capabilities"
5534 );
5535}
5536
5537#[gpui::test]
5538async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5539 init_test(cx);
5540
5541 let fs = FakeFs::new(cx.executor());
5542 fs.insert_tree(
5543 "/dir",
5544 json!({
5545 "a.rs": "let a = 1;",
5546 "b.rs": "let b = 2;",
5547 "c.rs": "let c = 2;",
5548 }),
5549 )
5550 .await;
5551
5552 let project = Project::test(
5553 fs,
5554 [
5555 "/dir/a.rs".as_ref(),
5556 "/dir/b.rs".as_ref(),
5557 "/dir/c.rs".as_ref(),
5558 ],
5559 cx,
5560 )
5561 .await;
5562
5563 // check the initial state and get the worktrees
5564 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5565 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5566 assert_eq!(worktrees.len(), 3);
5567
5568 let worktree_a = worktrees[0].read(cx);
5569 let worktree_b = worktrees[1].read(cx);
5570 let worktree_c = worktrees[2].read(cx);
5571
5572 // check they start in the right order
5573 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5574 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5575 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5576
5577 (
5578 worktrees[0].clone(),
5579 worktrees[1].clone(),
5580 worktrees[2].clone(),
5581 )
5582 });
5583
5584 // move first worktree to after the second
5585 // [a, b, c] -> [b, a, c]
5586 project
5587 .update(cx, |project, cx| {
5588 let first = worktree_a.read(cx);
5589 let second = worktree_b.read(cx);
5590 project.move_worktree(first.id(), second.id(), cx)
5591 })
5592 .expect("moving first after second");
5593
5594 // check the state after moving
5595 project.update(cx, |project, cx| {
5596 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5597 assert_eq!(worktrees.len(), 3);
5598
5599 let first = worktrees[0].read(cx);
5600 let second = worktrees[1].read(cx);
5601 let third = worktrees[2].read(cx);
5602
5603 // check they are now in the right order
5604 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5605 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5606 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5607 });
5608
5609 // move the second worktree to before the first
5610 // [b, a, c] -> [a, b, c]
5611 project
5612 .update(cx, |project, cx| {
5613 let second = worktree_a.read(cx);
5614 let first = worktree_b.read(cx);
5615 project.move_worktree(first.id(), second.id(), cx)
5616 })
5617 .expect("moving second before first");
5618
5619 // check the state after moving
5620 project.update(cx, |project, cx| {
5621 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5622 assert_eq!(worktrees.len(), 3);
5623
5624 let first = worktrees[0].read(cx);
5625 let second = worktrees[1].read(cx);
5626 let third = worktrees[2].read(cx);
5627
5628 // check they are now in the right order
5629 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5630 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5631 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5632 });
5633
5634 // move the second worktree to after the third
5635 // [a, b, c] -> [a, c, b]
5636 project
5637 .update(cx, |project, cx| {
5638 let second = worktree_b.read(cx);
5639 let third = worktree_c.read(cx);
5640 project.move_worktree(second.id(), third.id(), cx)
5641 })
5642 .expect("moving second after third");
5643
5644 // check the state after moving
5645 project.update(cx, |project, cx| {
5646 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5647 assert_eq!(worktrees.len(), 3);
5648
5649 let first = worktrees[0].read(cx);
5650 let second = worktrees[1].read(cx);
5651 let third = worktrees[2].read(cx);
5652
5653 // check they are now in the right order
5654 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5655 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5656 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5657 });
5658
5659 // move the third worktree to before the second
5660 // [a, c, b] -> [a, b, c]
5661 project
5662 .update(cx, |project, cx| {
5663 let third = worktree_c.read(cx);
5664 let second = worktree_b.read(cx);
5665 project.move_worktree(third.id(), second.id(), cx)
5666 })
5667 .expect("moving third before second");
5668
5669 // check the state after moving
5670 project.update(cx, |project, cx| {
5671 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5672 assert_eq!(worktrees.len(), 3);
5673
5674 let first = worktrees[0].read(cx);
5675 let second = worktrees[1].read(cx);
5676 let third = worktrees[2].read(cx);
5677
5678 // check they are now in the right order
5679 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5680 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5681 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5682 });
5683
5684 // move the first worktree to after the third
5685 // [a, b, c] -> [b, c, a]
5686 project
5687 .update(cx, |project, cx| {
5688 let first = worktree_a.read(cx);
5689 let third = worktree_c.read(cx);
5690 project.move_worktree(first.id(), third.id(), cx)
5691 })
5692 .expect("moving first after third");
5693
5694 // check the state after moving
5695 project.update(cx, |project, cx| {
5696 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5697 assert_eq!(worktrees.len(), 3);
5698
5699 let first = worktrees[0].read(cx);
5700 let second = worktrees[1].read(cx);
5701 let third = worktrees[2].read(cx);
5702
5703 // check they are now in the right order
5704 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5705 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5706 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5707 });
5708
5709 // move the third worktree to before the first
5710 // [b, c, a] -> [a, b, c]
5711 project
5712 .update(cx, |project, cx| {
5713 let third = worktree_a.read(cx);
5714 let first = worktree_b.read(cx);
5715 project.move_worktree(third.id(), first.id(), cx)
5716 })
5717 .expect("moving third before first");
5718
5719 // check the state after moving
5720 project.update(cx, |project, cx| {
5721 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5722 assert_eq!(worktrees.len(), 3);
5723
5724 let first = worktrees[0].read(cx);
5725 let second = worktrees[1].read(cx);
5726 let third = worktrees[2].read(cx);
5727
5728 // check they are now in the right order
5729 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5730 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5731 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5732 });
5733}
5734
5735#[gpui::test]
5736async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5737 init_test(cx);
5738
5739 let staged_contents = r#"
5740 fn main() {
5741 println!("hello world");
5742 }
5743 "#
5744 .unindent();
5745 let file_contents = r#"
5746 // print goodbye
5747 fn main() {
5748 println!("goodbye world");
5749 }
5750 "#
5751 .unindent();
5752
5753 let fs = FakeFs::new(cx.background_executor.clone());
5754 fs.insert_tree(
5755 "/dir",
5756 json!({
5757 ".git": {},
5758 "src": {
5759 "main.rs": file_contents,
5760 }
5761 }),
5762 )
5763 .await;
5764
5765 fs.set_index_for_repo(
5766 Path::new("/dir/.git"),
5767 &[("src/main.rs".into(), staged_contents)],
5768 );
5769
5770 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5771
5772 let buffer = project
5773 .update(cx, |project, cx| {
5774 project.open_local_buffer("/dir/src/main.rs", cx)
5775 })
5776 .await
5777 .unwrap();
5778 let unstaged_diff = project
5779 .update(cx, |project, cx| {
5780 project.open_unstaged_diff(buffer.clone(), cx)
5781 })
5782 .await
5783 .unwrap();
5784
5785 cx.run_until_parked();
5786 unstaged_diff.update(cx, |unstaged_diff, cx| {
5787 let snapshot = buffer.read(cx).snapshot();
5788 assert_hunks(
5789 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5790 &snapshot,
5791 &unstaged_diff.base_text_string().unwrap(),
5792 &[
5793 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5794 (
5795 2..3,
5796 " println!(\"hello world\");\n",
5797 " println!(\"goodbye world\");\n",
5798 DiffHunkStatus::modified_none(),
5799 ),
5800 ],
5801 );
5802 });
5803
5804 let staged_contents = r#"
5805 // print goodbye
5806 fn main() {
5807 }
5808 "#
5809 .unindent();
5810
5811 fs.set_index_for_repo(
5812 Path::new("/dir/.git"),
5813 &[("src/main.rs".into(), staged_contents)],
5814 );
5815
5816 cx.run_until_parked();
5817 unstaged_diff.update(cx, |unstaged_diff, cx| {
5818 let snapshot = buffer.read(cx).snapshot();
5819 assert_hunks(
5820 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5821 &snapshot,
5822 &unstaged_diff.base_text().unwrap().text(),
5823 &[(
5824 2..3,
5825 "",
5826 " println!(\"goodbye world\");\n",
5827 DiffHunkStatus::added_none(),
5828 )],
5829 );
5830 });
5831}
5832
5833#[gpui::test]
5834async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5835 init_test(cx);
5836
5837 let committed_contents = r#"
5838 fn main() {
5839 println!("hello world");
5840 }
5841 "#
5842 .unindent();
5843 let staged_contents = r#"
5844 fn main() {
5845 println!("goodbye world");
5846 }
5847 "#
5848 .unindent();
5849 let file_contents = r#"
5850 // print goodbye
5851 fn main() {
5852 println!("goodbye world");
5853 }
5854 "#
5855 .unindent();
5856
5857 let fs = FakeFs::new(cx.background_executor.clone());
5858 fs.insert_tree(
5859 "/dir",
5860 json!({
5861 ".git": {},
5862 "src": {
5863 "main.rs": file_contents,
5864 }
5865 }),
5866 )
5867 .await;
5868
5869 fs.set_index_for_repo(
5870 Path::new("/dir/.git"),
5871 &[("src/main.rs".into(), staged_contents)],
5872 );
5873 fs.set_head_for_repo(
5874 Path::new("/dir/.git"),
5875 &[("src/main.rs".into(), committed_contents)],
5876 );
5877
5878 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5879 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5880 let language = rust_lang();
5881 language_registry.add(language.clone());
5882
5883 let buffer = project
5884 .update(cx, |project, cx| {
5885 project.open_local_buffer("/dir/src/main.rs", cx)
5886 })
5887 .await
5888 .unwrap();
5889 let uncommitted_diff = project
5890 .update(cx, |project, cx| {
5891 project.open_uncommitted_diff(buffer.clone(), cx)
5892 })
5893 .await
5894 .unwrap();
5895
5896 uncommitted_diff.read_with(cx, |diff, _| {
5897 assert_eq!(
5898 diff.base_text().and_then(|base| base.language().cloned()),
5899 Some(language)
5900 )
5901 });
5902
5903 cx.run_until_parked();
5904 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5905 let snapshot = buffer.read(cx).snapshot();
5906 assert_hunks(
5907 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5908 &snapshot,
5909 &uncommitted_diff.base_text_string().unwrap(),
5910 &[
5911 (
5912 0..1,
5913 "",
5914 "// print goodbye\n",
5915 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5916 ),
5917 (
5918 2..3,
5919 " println!(\"hello world\");\n",
5920 " println!(\"goodbye world\");\n",
5921 DiffHunkStatus::modified_none(),
5922 ),
5923 ],
5924 );
5925 });
5926
5927 let committed_contents = r#"
5928 // print goodbye
5929 fn main() {
5930 }
5931 "#
5932 .unindent();
5933
5934 fs.set_head_for_repo(
5935 Path::new("/dir/.git"),
5936 &[("src/main.rs".into(), committed_contents)],
5937 );
5938
5939 cx.run_until_parked();
5940 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5941 let snapshot = buffer.read(cx).snapshot();
5942 assert_hunks(
5943 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5944 &snapshot,
5945 &uncommitted_diff.base_text().unwrap().text(),
5946 &[(
5947 2..3,
5948 "",
5949 " println!(\"goodbye world\");\n",
5950 DiffHunkStatus::added_none(),
5951 )],
5952 );
5953 });
5954}
5955
5956#[gpui::test]
5957async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
5958 init_test(cx);
5959
5960 let committed_contents = r#"
5961 fn main() {
5962 println!("hello from HEAD");
5963 }
5964 "#
5965 .unindent();
5966 let file_contents = r#"
5967 fn main() {
5968 println!("hello from the working copy");
5969 }
5970 "#
5971 .unindent();
5972
5973 let fs = FakeFs::new(cx.background_executor.clone());
5974 fs.insert_tree(
5975 "/dir",
5976 json!({
5977 ".git": {},
5978 "src": {
5979 "main.rs": file_contents,
5980 }
5981 }),
5982 )
5983 .await;
5984
5985 fs.set_head_for_repo(
5986 Path::new("/dir/.git"),
5987 &[("src/main.rs".into(), committed_contents)],
5988 );
5989
5990 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
5991
5992 let buffer = project
5993 .update(cx, |project, cx| {
5994 project.open_local_buffer("/dir/src/main.rs", cx)
5995 })
5996 .await
5997 .unwrap();
5998 let uncommitted_diff = project
5999 .update(cx, |project, cx| {
6000 project.open_uncommitted_diff(buffer.clone(), cx)
6001 })
6002 .await
6003 .unwrap();
6004
6005 cx.run_until_parked();
6006 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6007 let snapshot = buffer.read(cx).snapshot();
6008 assert_hunks(
6009 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6010 &snapshot,
6011 &uncommitted_diff.base_text_string().unwrap(),
6012 &[(
6013 1..2,
6014 " println!(\"hello from HEAD\");\n",
6015 " println!(\"hello from the working copy\");\n",
6016 DiffHunkStatus::modified_none(),
6017 )],
6018 );
6019 });
6020}
6021
6022async fn search(
6023 project: &Entity<Project>,
6024 query: SearchQuery,
6025 cx: &mut gpui::TestAppContext,
6026) -> Result<HashMap<String, Vec<Range<usize>>>> {
6027 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6028 let mut results = HashMap::default();
6029 while let Ok(search_result) = search_rx.recv().await {
6030 match search_result {
6031 SearchResult::Buffer { buffer, ranges } => {
6032 results.entry(buffer).or_insert(ranges);
6033 }
6034 SearchResult::LimitReached => {}
6035 }
6036 }
6037 Ok(results
6038 .into_iter()
6039 .map(|(buffer, ranges)| {
6040 buffer.update(cx, |buffer, cx| {
6041 let path = buffer
6042 .file()
6043 .unwrap()
6044 .full_path(cx)
6045 .to_string_lossy()
6046 .to_string();
6047 let ranges = ranges
6048 .into_iter()
6049 .map(|range| range.to_offset(buffer))
6050 .collect::<Vec<_>>();
6051 (path, ranges)
6052 })
6053 })
6054 .collect())
6055}
6056
6057pub fn init_test(cx: &mut gpui::TestAppContext) {
6058 if std::env::var("RUST_LOG").is_ok() {
6059 env_logger::try_init().ok();
6060 }
6061
6062 cx.update(|cx| {
6063 let settings_store = SettingsStore::test(cx);
6064 cx.set_global(settings_store);
6065 release_channel::init(SemanticVersion::default(), cx);
6066 language::init(cx);
6067 Project::init_settings(cx);
6068 });
6069}
6070
6071fn json_lang() -> Arc<Language> {
6072 Arc::new(Language::new(
6073 LanguageConfig {
6074 name: "JSON".into(),
6075 matcher: LanguageMatcher {
6076 path_suffixes: vec!["json".to_string()],
6077 ..Default::default()
6078 },
6079 ..Default::default()
6080 },
6081 None,
6082 ))
6083}
6084
6085fn js_lang() -> Arc<Language> {
6086 Arc::new(Language::new(
6087 LanguageConfig {
6088 name: "JavaScript".into(),
6089 matcher: LanguageMatcher {
6090 path_suffixes: vec!["js".to_string()],
6091 ..Default::default()
6092 },
6093 ..Default::default()
6094 },
6095 None,
6096 ))
6097}
6098
6099fn rust_lang() -> Arc<Language> {
6100 Arc::new(Language::new(
6101 LanguageConfig {
6102 name: "Rust".into(),
6103 matcher: LanguageMatcher {
6104 path_suffixes: vec!["rs".to_string()],
6105 ..Default::default()
6106 },
6107 ..Default::default()
6108 },
6109 Some(tree_sitter_rust::LANGUAGE.into()),
6110 ))
6111}
6112
6113fn typescript_lang() -> Arc<Language> {
6114 Arc::new(Language::new(
6115 LanguageConfig {
6116 name: "TypeScript".into(),
6117 matcher: LanguageMatcher {
6118 path_suffixes: vec!["ts".to_string()],
6119 ..Default::default()
6120 },
6121 ..Default::default()
6122 },
6123 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6124 ))
6125}
6126
6127fn tsx_lang() -> Arc<Language> {
6128 Arc::new(Language::new(
6129 LanguageConfig {
6130 name: "tsx".into(),
6131 matcher: LanguageMatcher {
6132 path_suffixes: vec!["tsx".to_string()],
6133 ..Default::default()
6134 },
6135 ..Default::default()
6136 },
6137 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6138 ))
6139}
6140
6141fn get_all_tasks(
6142 project: &Entity<Project>,
6143 task_contexts: &TaskContexts,
6144 cx: &mut App,
6145) -> Vec<(TaskSourceKind, ResolvedTask)> {
6146 let (mut old, new) = project.update(cx, |project, cx| {
6147 project
6148 .task_store
6149 .read(cx)
6150 .task_inventory()
6151 .unwrap()
6152 .read(cx)
6153 .used_and_current_resolved_tasks(task_contexts, cx)
6154 });
6155 old.extend(new);
6156 old
6157}