1use crate::{task_inventory::TaskContexts, Event, *};
2use buffer_diff::{assert_hunks, DiffHunkSecondaryStatus, DiffHunkStatus};
3use fs::FakeFs;
4use futures::{future, StreamExt};
5use gpui::{App, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{
28 assert_set_eq, path,
29 paths::PathMatcher,
30 separator,
31 test::{marked_text_offsets, TempTree},
32 uri, TryFutureExt as _,
33};
34
35#[gpui::test]
36async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let (tx, mut rx) = futures::channel::mpsc::unbounded();
40 let _thread = std::thread::spawn(move || {
41 #[cfg(not(target_os = "windows"))]
42 std::fs::metadata("/tmp").unwrap();
43 #[cfg(target_os = "windows")]
44 std::fs::metadata("C:/Windows").unwrap();
45 std::thread::sleep(Duration::from_millis(1000));
46 tx.unbounded_send(1).unwrap();
47 });
48 rx.next().await.unwrap();
49}
50
51#[gpui::test]
52async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let io_task = smol::unblock(move || {
56 println!("sleeping on thread {:?}", std::thread::current().id());
57 std::thread::sleep(Duration::from_millis(10));
58 1
59 });
60
61 let task = cx.foreground_executor().spawn(async move {
62 io_task.await;
63 });
64
65 task.await;
66}
67
68#[cfg(not(windows))]
69#[gpui::test]
70async fn test_symlinks(cx: &mut gpui::TestAppContext) {
71 init_test(cx);
72 cx.executor().allow_parking();
73
74 let dir = TempTree::new(json!({
75 "root": {
76 "apple": "",
77 "banana": {
78 "carrot": {
79 "date": "",
80 "endive": "",
81 }
82 },
83 "fennel": {
84 "grape": "",
85 }
86 }
87 }));
88
89 let root_link_path = dir.path().join("root_link");
90 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
91 os::unix::fs::symlink(
92 dir.path().join("root/fennel"),
93 dir.path().join("root/finnochio"),
94 )
95 .unwrap();
96
97 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
98
99 project.update(cx, |project, cx| {
100 let tree = project.worktrees(cx).next().unwrap().read(cx);
101 assert_eq!(tree.file_count(), 5);
102 assert_eq!(
103 tree.inode_for_path("fennel/grape"),
104 tree.inode_for_path("finnochio/grape")
105 );
106 });
107}
108
109#[gpui::test]
110async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
111 init_test(cx);
112
113 let dir = TempTree::new(json!({
114 ".editorconfig": r#"
115 root = true
116 [*.rs]
117 indent_style = tab
118 indent_size = 3
119 end_of_line = lf
120 insert_final_newline = true
121 trim_trailing_whitespace = true
122 [*.js]
123 tab_width = 10
124 "#,
125 ".zed": {
126 "settings.json": r#"{
127 "tab_size": 8,
128 "hard_tabs": false,
129 "ensure_final_newline_on_save": false,
130 "remove_trailing_whitespace_on_save": false,
131 "soft_wrap": "editor_width"
132 }"#,
133 },
134 "a.rs": "fn a() {\n A\n}",
135 "b": {
136 ".editorconfig": r#"
137 [*.rs]
138 indent_size = 2
139 "#,
140 "b.rs": "fn b() {\n B\n}",
141 },
142 "c.js": "def c\n C\nend",
143 "README.json": "tabs are better\n",
144 }));
145
146 let path = dir.path();
147 let fs = FakeFs::new(cx.executor());
148 fs.insert_tree_from_real_fs(path, path).await;
149 let project = Project::test(fs, [path], cx).await;
150
151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
152 language_registry.add(js_lang());
153 language_registry.add(json_lang());
154 language_registry.add(rust_lang());
155
156 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
157
158 cx.executor().run_until_parked();
159
160 cx.update(|cx| {
161 let tree = worktree.read(cx);
162 let settings_for = |path: &str| {
163 let file_entry = tree.entry_for_path(path).unwrap().clone();
164 let file = File::for_entry(file_entry, worktree.clone());
165 let file_language = project
166 .read(cx)
167 .languages()
168 .language_for_file_path(file.path.as_ref());
169 let file_language = cx
170 .background_executor()
171 .block(file_language)
172 .expect("Failed to get file language");
173 let file = file as _;
174 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
175 };
176
177 let settings_a = settings_for("a.rs");
178 let settings_b = settings_for("b/b.rs");
179 let settings_c = settings_for("c.js");
180 let settings_readme = settings_for("README.json");
181
182 // .editorconfig overrides .zed/settings
183 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
184 assert_eq!(settings_a.hard_tabs, true);
185 assert_eq!(settings_a.ensure_final_newline_on_save, true);
186 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
187
188 // .editorconfig in b/ overrides .editorconfig in root
189 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
190
191 // "indent_size" is not set, so "tab_width" is used
192 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
193
194 // README.md should not be affected by .editorconfig's globe "*.rs"
195 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
196 });
197}
198
199#[gpui::test]
200async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
201 init_test(cx);
202 TaskStore::init(None);
203
204 let fs = FakeFs::new(cx.executor());
205 fs.insert_tree(
206 path!("/dir"),
207 json!({
208 ".zed": {
209 "settings.json": r#"{ "tab_size": 8 }"#,
210 "tasks.json": r#"[{
211 "label": "cargo check all",
212 "command": "cargo",
213 "args": ["check", "--all"]
214 },]"#,
215 },
216 "a": {
217 "a.rs": "fn a() {\n A\n}"
218 },
219 "b": {
220 ".zed": {
221 "settings.json": r#"{ "tab_size": 2 }"#,
222 "tasks.json": r#"[{
223 "label": "cargo check",
224 "command": "cargo",
225 "args": ["check"]
226 },]"#,
227 },
228 "b.rs": "fn b() {\n B\n}"
229 }
230 }),
231 )
232 .await;
233
234 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
235 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
236 let task_contexts = TaskContexts::default();
237
238 cx.executor().run_until_parked();
239 let worktree_id = cx.update(|cx| {
240 project.update(cx, |project, cx| {
241 project.worktrees(cx).next().unwrap().read(cx).id()
242 })
243 });
244 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
245 id: worktree_id,
246 directory_in_worktree: PathBuf::from(".zed"),
247 id_base: "local worktree tasks from directory \".zed\"".into(),
248 };
249
250 let all_tasks = cx
251 .update(|cx| {
252 let tree = worktree.read(cx);
253
254 let file_a = File::for_entry(
255 tree.entry_for_path("a/a.rs").unwrap().clone(),
256 worktree.clone(),
257 ) as _;
258 let settings_a = language_settings(None, Some(&file_a), cx);
259 let file_b = File::for_entry(
260 tree.entry_for_path("b/b.rs").unwrap().clone(),
261 worktree.clone(),
262 ) as _;
263 let settings_b = language_settings(None, Some(&file_b), cx);
264
265 assert_eq!(settings_a.tab_size.get(), 8);
266 assert_eq!(settings_b.tab_size.get(), 2);
267
268 get_all_tasks(&project, Some(worktree_id), &task_contexts, cx)
269 })
270 .into_iter()
271 .map(|(source_kind, task)| {
272 let resolved = task.resolved.unwrap();
273 (
274 source_kind,
275 task.resolved_label,
276 resolved.args,
277 resolved.env,
278 )
279 })
280 .collect::<Vec<_>>();
281 assert_eq!(
282 all_tasks,
283 vec![
284 (
285 TaskSourceKind::Worktree {
286 id: worktree_id,
287 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
288 id_base: if cfg!(windows) {
289 "local worktree tasks from directory \"b\\\\.zed\"".into()
290 } else {
291 "local worktree tasks from directory \"b/.zed\"".into()
292 },
293 },
294 "cargo check".to_string(),
295 vec!["check".to_string()],
296 HashMap::default(),
297 ),
298 (
299 topmost_local_task_source_kind.clone(),
300 "cargo check all".to_string(),
301 vec!["check".to_string(), "--all".to_string()],
302 HashMap::default(),
303 ),
304 ]
305 );
306
307 let (_, resolved_task) = cx
308 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_contexts, cx))
309 .into_iter()
310 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
311 .expect("should have one global task");
312 project.update(cx, |project, cx| {
313 let task_inventory = project
314 .task_store
315 .read(cx)
316 .task_inventory()
317 .cloned()
318 .unwrap();
319 task_inventory.update(cx, |inventory, _| {
320 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
321 inventory
322 .update_file_based_tasks(
323 None,
324 Some(
325 &json!([{
326 "label": "cargo check unstable",
327 "command": "cargo",
328 "args": [
329 "check",
330 "--all",
331 "--all-targets"
332 ],
333 "env": {
334 "RUSTFLAGS": "-Zunstable-options"
335 }
336 }])
337 .to_string(),
338 ),
339 )
340 .unwrap();
341 });
342 });
343 cx.run_until_parked();
344
345 let all_tasks = cx
346 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_contexts, cx))
347 .into_iter()
348 .map(|(source_kind, task)| {
349 let resolved = task.resolved.unwrap();
350 (
351 source_kind,
352 task.resolved_label,
353 resolved.args,
354 resolved.env,
355 )
356 })
357 .collect::<Vec<_>>();
358 assert_eq!(
359 all_tasks,
360 vec![
361 (
362 topmost_local_task_source_kind.clone(),
363 "cargo check all".to_string(),
364 vec!["check".to_string(), "--all".to_string()],
365 HashMap::default(),
366 ),
367 (
368 TaskSourceKind::Worktree {
369 id: worktree_id,
370 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
371 id_base: if cfg!(windows) {
372 "local worktree tasks from directory \"b\\\\.zed\"".into()
373 } else {
374 "local worktree tasks from directory \"b/.zed\"".into()
375 },
376 },
377 "cargo check".to_string(),
378 vec!["check".to_string()],
379 HashMap::default(),
380 ),
381 (
382 TaskSourceKind::AbsPath {
383 abs_path: paths::tasks_file().clone(),
384 id_base: "global tasks.json".into(),
385 },
386 "cargo check unstable".to_string(),
387 vec![
388 "check".to_string(),
389 "--all".to_string(),
390 "--all-targets".to_string(),
391 ],
392 HashMap::from_iter(Some((
393 "RUSTFLAGS".to_string(),
394 "-Zunstable-options".to_string()
395 ))),
396 ),
397 ]
398 );
399}
400
401#[gpui::test]
402async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
403 init_test(cx);
404 TaskStore::init(None);
405
406 let fs = FakeFs::new(cx.executor());
407 fs.insert_tree(
408 path!("/dir"),
409 json!({
410 ".zed": {
411 "tasks.json": r#"[{
412 "label": "test worktree root",
413 "command": "echo $ZED_WORKTREE_ROOT"
414 }]"#,
415 },
416 "a": {
417 "a.rs": "fn a() {\n A\n}"
418 },
419 }),
420 )
421 .await;
422
423 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
424 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
425
426 cx.executor().run_until_parked();
427 let worktree_id = cx.update(|cx| {
428 project.update(cx, |project, cx| {
429 project.worktrees(cx).next().unwrap().read(cx).id()
430 })
431 });
432
433 let active_non_worktree_item_tasks = cx.update(|cx| {
434 get_all_tasks(
435 &project,
436 Some(worktree_id),
437 &TaskContexts {
438 active_item_context: Some((Some(worktree_id), TaskContext::default())),
439 active_worktree_context: None,
440 other_worktree_contexts: Vec::new(),
441 },
442 cx,
443 )
444 });
445 assert!(
446 active_non_worktree_item_tasks.is_empty(),
447 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
448 );
449
450 let active_worktree_tasks = cx.update(|cx| {
451 get_all_tasks(
452 &project,
453 Some(worktree_id),
454 &TaskContexts {
455 active_item_context: Some((Some(worktree_id), TaskContext::default())),
456 active_worktree_context: Some((worktree_id, {
457 let mut worktree_context = TaskContext::default();
458 worktree_context
459 .task_variables
460 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
461 worktree_context
462 })),
463 other_worktree_contexts: Vec::new(),
464 },
465 cx,
466 )
467 });
468 assert_eq!(
469 active_worktree_tasks
470 .into_iter()
471 .map(|(source_kind, task)| {
472 let resolved = task.resolved.unwrap();
473 (source_kind, resolved.command)
474 })
475 .collect::<Vec<_>>(),
476 vec![(
477 TaskSourceKind::Worktree {
478 id: worktree_id,
479 directory_in_worktree: PathBuf::from(separator!(".zed")),
480 id_base: if cfg!(windows) {
481 "local worktree tasks from directory \".zed\"".into()
482 } else {
483 "local worktree tasks from directory \".zed\"".into()
484 },
485 },
486 "echo /dir".to_string(),
487 )]
488 );
489}
490
491#[gpui::test]
492async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
493 init_test(cx);
494
495 let fs = FakeFs::new(cx.executor());
496 fs.insert_tree(
497 path!("/dir"),
498 json!({
499 "test.rs": "const A: i32 = 1;",
500 "test2.rs": "",
501 "Cargo.toml": "a = 1",
502 "package.json": "{\"a\": 1}",
503 }),
504 )
505 .await;
506
507 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
508 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
509
510 let mut fake_rust_servers = language_registry.register_fake_lsp(
511 "Rust",
512 FakeLspAdapter {
513 name: "the-rust-language-server",
514 capabilities: lsp::ServerCapabilities {
515 completion_provider: Some(lsp::CompletionOptions {
516 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
517 ..Default::default()
518 }),
519 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
520 lsp::TextDocumentSyncOptions {
521 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
522 ..Default::default()
523 },
524 )),
525 ..Default::default()
526 },
527 ..Default::default()
528 },
529 );
530 let mut fake_json_servers = language_registry.register_fake_lsp(
531 "JSON",
532 FakeLspAdapter {
533 name: "the-json-language-server",
534 capabilities: lsp::ServerCapabilities {
535 completion_provider: Some(lsp::CompletionOptions {
536 trigger_characters: Some(vec![":".to_string()]),
537 ..Default::default()
538 }),
539 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
540 lsp::TextDocumentSyncOptions {
541 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
542 ..Default::default()
543 },
544 )),
545 ..Default::default()
546 },
547 ..Default::default()
548 },
549 );
550
551 // Open a buffer without an associated language server.
552 let (toml_buffer, _handle) = project
553 .update(cx, |project, cx| {
554 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
555 })
556 .await
557 .unwrap();
558
559 // Open a buffer with an associated language server before the language for it has been loaded.
560 let (rust_buffer, _handle2) = project
561 .update(cx, |project, cx| {
562 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
563 })
564 .await
565 .unwrap();
566 rust_buffer.update(cx, |buffer, _| {
567 assert_eq!(buffer.language().map(|l| l.name()), None);
568 });
569
570 // Now we add the languages to the project, and ensure they get assigned to all
571 // the relevant open buffers.
572 language_registry.add(json_lang());
573 language_registry.add(rust_lang());
574 cx.executor().run_until_parked();
575 rust_buffer.update(cx, |buffer, _| {
576 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
577 });
578
579 // A server is started up, and it is notified about Rust files.
580 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
581 assert_eq!(
582 fake_rust_server
583 .receive_notification::<lsp::notification::DidOpenTextDocument>()
584 .await
585 .text_document,
586 lsp::TextDocumentItem {
587 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
588 version: 0,
589 text: "const A: i32 = 1;".to_string(),
590 language_id: "rust".to_string(),
591 }
592 );
593
594 // The buffer is configured based on the language server's capabilities.
595 rust_buffer.update(cx, |buffer, _| {
596 assert_eq!(
597 buffer
598 .completion_triggers()
599 .into_iter()
600 .cloned()
601 .collect::<Vec<_>>(),
602 &[".".to_string(), "::".to_string()]
603 );
604 });
605 toml_buffer.update(cx, |buffer, _| {
606 assert!(buffer.completion_triggers().is_empty());
607 });
608
609 // Edit a buffer. The changes are reported to the language server.
610 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
611 assert_eq!(
612 fake_rust_server
613 .receive_notification::<lsp::notification::DidChangeTextDocument>()
614 .await
615 .text_document,
616 lsp::VersionedTextDocumentIdentifier::new(
617 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
618 1
619 )
620 );
621
622 // Open a third buffer with a different associated language server.
623 let (json_buffer, _json_handle) = project
624 .update(cx, |project, cx| {
625 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
626 })
627 .await
628 .unwrap();
629
630 // A json language server is started up and is only notified about the json buffer.
631 let mut fake_json_server = fake_json_servers.next().await.unwrap();
632 assert_eq!(
633 fake_json_server
634 .receive_notification::<lsp::notification::DidOpenTextDocument>()
635 .await
636 .text_document,
637 lsp::TextDocumentItem {
638 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
639 version: 0,
640 text: "{\"a\": 1}".to_string(),
641 language_id: "json".to_string(),
642 }
643 );
644
645 // This buffer is configured based on the second language server's
646 // capabilities.
647 json_buffer.update(cx, |buffer, _| {
648 assert_eq!(
649 buffer
650 .completion_triggers()
651 .into_iter()
652 .cloned()
653 .collect::<Vec<_>>(),
654 &[":".to_string()]
655 );
656 });
657
658 // When opening another buffer whose language server is already running,
659 // it is also configured based on the existing language server's capabilities.
660 let (rust_buffer2, _handle4) = project
661 .update(cx, |project, cx| {
662 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
663 })
664 .await
665 .unwrap();
666 rust_buffer2.update(cx, |buffer, _| {
667 assert_eq!(
668 buffer
669 .completion_triggers()
670 .into_iter()
671 .cloned()
672 .collect::<Vec<_>>(),
673 &[".".to_string(), "::".to_string()]
674 );
675 });
676
677 // Changes are reported only to servers matching the buffer's language.
678 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
679 rust_buffer2.update(cx, |buffer, cx| {
680 buffer.edit([(0..0, "let x = 1;")], None, cx)
681 });
682 assert_eq!(
683 fake_rust_server
684 .receive_notification::<lsp::notification::DidChangeTextDocument>()
685 .await
686 .text_document,
687 lsp::VersionedTextDocumentIdentifier::new(
688 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
689 1
690 )
691 );
692
693 // Save notifications are reported to all servers.
694 project
695 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
696 .await
697 .unwrap();
698 assert_eq!(
699 fake_rust_server
700 .receive_notification::<lsp::notification::DidSaveTextDocument>()
701 .await
702 .text_document,
703 lsp::TextDocumentIdentifier::new(
704 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
705 )
706 );
707 assert_eq!(
708 fake_json_server
709 .receive_notification::<lsp::notification::DidSaveTextDocument>()
710 .await
711 .text_document,
712 lsp::TextDocumentIdentifier::new(
713 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
714 )
715 );
716
717 // Renames are reported only to servers matching the buffer's language.
718 fs.rename(
719 Path::new(path!("/dir/test2.rs")),
720 Path::new(path!("/dir/test3.rs")),
721 Default::default(),
722 )
723 .await
724 .unwrap();
725 assert_eq!(
726 fake_rust_server
727 .receive_notification::<lsp::notification::DidCloseTextDocument>()
728 .await
729 .text_document,
730 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
731 );
732 assert_eq!(
733 fake_rust_server
734 .receive_notification::<lsp::notification::DidOpenTextDocument>()
735 .await
736 .text_document,
737 lsp::TextDocumentItem {
738 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
739 version: 0,
740 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
741 language_id: "rust".to_string(),
742 },
743 );
744
745 rust_buffer2.update(cx, |buffer, cx| {
746 buffer.update_diagnostics(
747 LanguageServerId(0),
748 DiagnosticSet::from_sorted_entries(
749 vec![DiagnosticEntry {
750 diagnostic: Default::default(),
751 range: Anchor::MIN..Anchor::MAX,
752 }],
753 &buffer.snapshot(),
754 ),
755 cx,
756 );
757 assert_eq!(
758 buffer
759 .snapshot()
760 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
761 .count(),
762 1
763 );
764 });
765
766 // When the rename changes the extension of the file, the buffer gets closed on the old
767 // language server and gets opened on the new one.
768 fs.rename(
769 Path::new(path!("/dir/test3.rs")),
770 Path::new(path!("/dir/test3.json")),
771 Default::default(),
772 )
773 .await
774 .unwrap();
775 assert_eq!(
776 fake_rust_server
777 .receive_notification::<lsp::notification::DidCloseTextDocument>()
778 .await
779 .text_document,
780 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
781 );
782 assert_eq!(
783 fake_json_server
784 .receive_notification::<lsp::notification::DidOpenTextDocument>()
785 .await
786 .text_document,
787 lsp::TextDocumentItem {
788 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
789 version: 0,
790 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
791 language_id: "json".to_string(),
792 },
793 );
794
795 // We clear the diagnostics, since the language has changed.
796 rust_buffer2.update(cx, |buffer, _| {
797 assert_eq!(
798 buffer
799 .snapshot()
800 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
801 .count(),
802 0
803 );
804 });
805
806 // The renamed file's version resets after changing language server.
807 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
808 assert_eq!(
809 fake_json_server
810 .receive_notification::<lsp::notification::DidChangeTextDocument>()
811 .await
812 .text_document,
813 lsp::VersionedTextDocumentIdentifier::new(
814 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
815 1
816 )
817 );
818
819 // Restart language servers
820 project.update(cx, |project, cx| {
821 project.restart_language_servers_for_buffers(
822 vec![rust_buffer.clone(), json_buffer.clone()],
823 cx,
824 );
825 });
826
827 let mut rust_shutdown_requests = fake_rust_server
828 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
829 let mut json_shutdown_requests = fake_json_server
830 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
831 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
832
833 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
834 let mut fake_json_server = fake_json_servers.next().await.unwrap();
835
836 // Ensure rust document is reopened in new rust language server
837 assert_eq!(
838 fake_rust_server
839 .receive_notification::<lsp::notification::DidOpenTextDocument>()
840 .await
841 .text_document,
842 lsp::TextDocumentItem {
843 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
844 version: 0,
845 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
846 language_id: "rust".to_string(),
847 }
848 );
849
850 // Ensure json documents are reopened in new json language server
851 assert_set_eq!(
852 [
853 fake_json_server
854 .receive_notification::<lsp::notification::DidOpenTextDocument>()
855 .await
856 .text_document,
857 fake_json_server
858 .receive_notification::<lsp::notification::DidOpenTextDocument>()
859 .await
860 .text_document,
861 ],
862 [
863 lsp::TextDocumentItem {
864 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
865 version: 0,
866 text: json_buffer.update(cx, |buffer, _| buffer.text()),
867 language_id: "json".to_string(),
868 },
869 lsp::TextDocumentItem {
870 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
871 version: 0,
872 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
873 language_id: "json".to_string(),
874 }
875 ]
876 );
877
878 // Close notifications are reported only to servers matching the buffer's language.
879 cx.update(|_| drop(_json_handle));
880 let close_message = lsp::DidCloseTextDocumentParams {
881 text_document: lsp::TextDocumentIdentifier::new(
882 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
883 ),
884 };
885 assert_eq!(
886 fake_json_server
887 .receive_notification::<lsp::notification::DidCloseTextDocument>()
888 .await,
889 close_message,
890 );
891}
892
893#[gpui::test]
894async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
895 init_test(cx);
896
897 let fs = FakeFs::new(cx.executor());
898 fs.insert_tree(
899 path!("/the-root"),
900 json!({
901 ".gitignore": "target\n",
902 "src": {
903 "a.rs": "",
904 "b.rs": "",
905 },
906 "target": {
907 "x": {
908 "out": {
909 "x.rs": ""
910 }
911 },
912 "y": {
913 "out": {
914 "y.rs": "",
915 }
916 },
917 "z": {
918 "out": {
919 "z.rs": ""
920 }
921 }
922 }
923 }),
924 )
925 .await;
926
927 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
928 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
929 language_registry.add(rust_lang());
930 let mut fake_servers = language_registry.register_fake_lsp(
931 "Rust",
932 FakeLspAdapter {
933 name: "the-language-server",
934 ..Default::default()
935 },
936 );
937
938 cx.executor().run_until_parked();
939
940 // Start the language server by opening a buffer with a compatible file extension.
941 let _ = project
942 .update(cx, |project, cx| {
943 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
944 })
945 .await
946 .unwrap();
947
948 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
949 project.update(cx, |project, cx| {
950 let worktree = project.worktrees(cx).next().unwrap();
951 assert_eq!(
952 worktree
953 .read(cx)
954 .snapshot()
955 .entries(true, 0)
956 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
957 .collect::<Vec<_>>(),
958 &[
959 (Path::new(""), false),
960 (Path::new(".gitignore"), false),
961 (Path::new("src"), false),
962 (Path::new("src/a.rs"), false),
963 (Path::new("src/b.rs"), false),
964 (Path::new("target"), true),
965 ]
966 );
967 });
968
969 let prev_read_dir_count = fs.read_dir_call_count();
970
971 // Keep track of the FS events reported to the language server.
972 let fake_server = fake_servers.next().await.unwrap();
973 let file_changes = Arc::new(Mutex::new(Vec::new()));
974 fake_server
975 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
976 registrations: vec![lsp::Registration {
977 id: Default::default(),
978 method: "workspace/didChangeWatchedFiles".to_string(),
979 register_options: serde_json::to_value(
980 lsp::DidChangeWatchedFilesRegistrationOptions {
981 watchers: vec![
982 lsp::FileSystemWatcher {
983 glob_pattern: lsp::GlobPattern::String(
984 path!("/the-root/Cargo.toml").to_string(),
985 ),
986 kind: None,
987 },
988 lsp::FileSystemWatcher {
989 glob_pattern: lsp::GlobPattern::String(
990 path!("/the-root/src/*.{rs,c}").to_string(),
991 ),
992 kind: None,
993 },
994 lsp::FileSystemWatcher {
995 glob_pattern: lsp::GlobPattern::String(
996 path!("/the-root/target/y/**/*.rs").to_string(),
997 ),
998 kind: None,
999 },
1000 ],
1001 },
1002 )
1003 .ok(),
1004 }],
1005 })
1006 .await
1007 .unwrap();
1008 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1009 let file_changes = file_changes.clone();
1010 move |params, _| {
1011 let mut file_changes = file_changes.lock();
1012 file_changes.extend(params.changes);
1013 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1014 }
1015 });
1016
1017 cx.executor().run_until_parked();
1018 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1019 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1020
1021 // Now the language server has asked us to watch an ignored directory path,
1022 // so we recursively load it.
1023 project.update(cx, |project, cx| {
1024 let worktree = project.worktrees(cx).next().unwrap();
1025 assert_eq!(
1026 worktree
1027 .read(cx)
1028 .snapshot()
1029 .entries(true, 0)
1030 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1031 .collect::<Vec<_>>(),
1032 &[
1033 (Path::new(""), false),
1034 (Path::new(".gitignore"), false),
1035 (Path::new("src"), false),
1036 (Path::new("src/a.rs"), false),
1037 (Path::new("src/b.rs"), false),
1038 (Path::new("target"), true),
1039 (Path::new("target/x"), true),
1040 (Path::new("target/y"), true),
1041 (Path::new("target/y/out"), true),
1042 (Path::new("target/y/out/y.rs"), true),
1043 (Path::new("target/z"), true),
1044 ]
1045 );
1046 });
1047
1048 // Perform some file system mutations, two of which match the watched patterns,
1049 // and one of which does not.
1050 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1051 .await
1052 .unwrap();
1053 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1054 .await
1055 .unwrap();
1056 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1057 .await
1058 .unwrap();
1059 fs.create_file(
1060 path!("/the-root/target/x/out/x2.rs").as_ref(),
1061 Default::default(),
1062 )
1063 .await
1064 .unwrap();
1065 fs.create_file(
1066 path!("/the-root/target/y/out/y2.rs").as_ref(),
1067 Default::default(),
1068 )
1069 .await
1070 .unwrap();
1071
1072 // The language server receives events for the FS mutations that match its watch patterns.
1073 cx.executor().run_until_parked();
1074 assert_eq!(
1075 &*file_changes.lock(),
1076 &[
1077 lsp::FileEvent {
1078 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1079 typ: lsp::FileChangeType::DELETED,
1080 },
1081 lsp::FileEvent {
1082 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1083 typ: lsp::FileChangeType::CREATED,
1084 },
1085 lsp::FileEvent {
1086 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1087 typ: lsp::FileChangeType::CREATED,
1088 },
1089 ]
1090 );
1091}
1092
1093#[gpui::test]
1094async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1095 init_test(cx);
1096
1097 let fs = FakeFs::new(cx.executor());
1098 fs.insert_tree(
1099 path!("/dir"),
1100 json!({
1101 "a.rs": "let a = 1;",
1102 "b.rs": "let b = 2;"
1103 }),
1104 )
1105 .await;
1106
1107 let project = Project::test(
1108 fs,
1109 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1110 cx,
1111 )
1112 .await;
1113 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1114
1115 let buffer_a = project
1116 .update(cx, |project, cx| {
1117 project.open_local_buffer(path!("/dir/a.rs"), cx)
1118 })
1119 .await
1120 .unwrap();
1121 let buffer_b = project
1122 .update(cx, |project, cx| {
1123 project.open_local_buffer(path!("/dir/b.rs"), cx)
1124 })
1125 .await
1126 .unwrap();
1127
1128 lsp_store.update(cx, |lsp_store, cx| {
1129 lsp_store
1130 .update_diagnostics(
1131 LanguageServerId(0),
1132 lsp::PublishDiagnosticsParams {
1133 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1134 version: None,
1135 diagnostics: vec![lsp::Diagnostic {
1136 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1137 severity: Some(lsp::DiagnosticSeverity::ERROR),
1138 message: "error 1".to_string(),
1139 ..Default::default()
1140 }],
1141 },
1142 &[],
1143 cx,
1144 )
1145 .unwrap();
1146 lsp_store
1147 .update_diagnostics(
1148 LanguageServerId(0),
1149 lsp::PublishDiagnosticsParams {
1150 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1151 version: None,
1152 diagnostics: vec![lsp::Diagnostic {
1153 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1154 severity: Some(DiagnosticSeverity::WARNING),
1155 message: "error 2".to_string(),
1156 ..Default::default()
1157 }],
1158 },
1159 &[],
1160 cx,
1161 )
1162 .unwrap();
1163 });
1164
1165 buffer_a.update(cx, |buffer, _| {
1166 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1167 assert_eq!(
1168 chunks
1169 .iter()
1170 .map(|(s, d)| (s.as_str(), *d))
1171 .collect::<Vec<_>>(),
1172 &[
1173 ("let ", None),
1174 ("a", Some(DiagnosticSeverity::ERROR)),
1175 (" = 1;", None),
1176 ]
1177 );
1178 });
1179 buffer_b.update(cx, |buffer, _| {
1180 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1181 assert_eq!(
1182 chunks
1183 .iter()
1184 .map(|(s, d)| (s.as_str(), *d))
1185 .collect::<Vec<_>>(),
1186 &[
1187 ("let ", None),
1188 ("b", Some(DiagnosticSeverity::WARNING)),
1189 (" = 2;", None),
1190 ]
1191 );
1192 });
1193}
1194
1195#[gpui::test]
1196async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1197 init_test(cx);
1198
1199 let fs = FakeFs::new(cx.executor());
1200 fs.insert_tree(
1201 path!("/root"),
1202 json!({
1203 "dir": {
1204 ".git": {
1205 "HEAD": "ref: refs/heads/main",
1206 },
1207 ".gitignore": "b.rs",
1208 "a.rs": "let a = 1;",
1209 "b.rs": "let b = 2;",
1210 },
1211 "other.rs": "let b = c;"
1212 }),
1213 )
1214 .await;
1215
1216 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1217 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1218 let (worktree, _) = project
1219 .update(cx, |project, cx| {
1220 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1221 })
1222 .await
1223 .unwrap();
1224 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1225
1226 let (worktree, _) = project
1227 .update(cx, |project, cx| {
1228 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1229 })
1230 .await
1231 .unwrap();
1232 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1233
1234 let server_id = LanguageServerId(0);
1235 lsp_store.update(cx, |lsp_store, cx| {
1236 lsp_store
1237 .update_diagnostics(
1238 server_id,
1239 lsp::PublishDiagnosticsParams {
1240 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1241 version: None,
1242 diagnostics: vec![lsp::Diagnostic {
1243 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1244 severity: Some(lsp::DiagnosticSeverity::ERROR),
1245 message: "unused variable 'b'".to_string(),
1246 ..Default::default()
1247 }],
1248 },
1249 &[],
1250 cx,
1251 )
1252 .unwrap();
1253 lsp_store
1254 .update_diagnostics(
1255 server_id,
1256 lsp::PublishDiagnosticsParams {
1257 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1258 version: None,
1259 diagnostics: vec![lsp::Diagnostic {
1260 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1261 severity: Some(lsp::DiagnosticSeverity::ERROR),
1262 message: "unknown variable 'c'".to_string(),
1263 ..Default::default()
1264 }],
1265 },
1266 &[],
1267 cx,
1268 )
1269 .unwrap();
1270 });
1271
1272 let main_ignored_buffer = project
1273 .update(cx, |project, cx| {
1274 project.open_buffer((main_worktree_id, "b.rs"), cx)
1275 })
1276 .await
1277 .unwrap();
1278 main_ignored_buffer.update(cx, |buffer, _| {
1279 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1280 assert_eq!(
1281 chunks
1282 .iter()
1283 .map(|(s, d)| (s.as_str(), *d))
1284 .collect::<Vec<_>>(),
1285 &[
1286 ("let ", None),
1287 ("b", Some(DiagnosticSeverity::ERROR)),
1288 (" = 2;", None),
1289 ],
1290 "Gigitnored buffers should still get in-buffer diagnostics",
1291 );
1292 });
1293 let other_buffer = project
1294 .update(cx, |project, cx| {
1295 project.open_buffer((other_worktree_id, ""), cx)
1296 })
1297 .await
1298 .unwrap();
1299 other_buffer.update(cx, |buffer, _| {
1300 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1301 assert_eq!(
1302 chunks
1303 .iter()
1304 .map(|(s, d)| (s.as_str(), *d))
1305 .collect::<Vec<_>>(),
1306 &[
1307 ("let b = ", None),
1308 ("c", Some(DiagnosticSeverity::ERROR)),
1309 (";", None),
1310 ],
1311 "Buffers from hidden projects should still get in-buffer diagnostics"
1312 );
1313 });
1314
1315 project.update(cx, |project, cx| {
1316 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1317 assert_eq!(
1318 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1319 vec![(
1320 ProjectPath {
1321 worktree_id: main_worktree_id,
1322 path: Arc::from(Path::new("b.rs")),
1323 },
1324 server_id,
1325 DiagnosticSummary {
1326 error_count: 1,
1327 warning_count: 0,
1328 }
1329 )]
1330 );
1331 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1332 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1333 });
1334}
1335
1336#[gpui::test]
1337async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1338 init_test(cx);
1339
1340 let progress_token = "the-progress-token";
1341
1342 let fs = FakeFs::new(cx.executor());
1343 fs.insert_tree(
1344 path!("/dir"),
1345 json!({
1346 "a.rs": "fn a() { A }",
1347 "b.rs": "const y: i32 = 1",
1348 }),
1349 )
1350 .await;
1351
1352 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1353 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1354
1355 language_registry.add(rust_lang());
1356 let mut fake_servers = language_registry.register_fake_lsp(
1357 "Rust",
1358 FakeLspAdapter {
1359 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1360 disk_based_diagnostics_sources: vec!["disk".into()],
1361 ..Default::default()
1362 },
1363 );
1364
1365 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1366
1367 // Cause worktree to start the fake language server
1368 let _ = project
1369 .update(cx, |project, cx| {
1370 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1371 })
1372 .await
1373 .unwrap();
1374
1375 let mut events = cx.events(&project);
1376
1377 let fake_server = fake_servers.next().await.unwrap();
1378 assert_eq!(
1379 events.next().await.unwrap(),
1380 Event::LanguageServerAdded(
1381 LanguageServerId(0),
1382 fake_server.server.name(),
1383 Some(worktree_id)
1384 ),
1385 );
1386
1387 fake_server
1388 .start_progress(format!("{}/0", progress_token))
1389 .await;
1390 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1391 assert_eq!(
1392 events.next().await.unwrap(),
1393 Event::DiskBasedDiagnosticsStarted {
1394 language_server_id: LanguageServerId(0),
1395 }
1396 );
1397
1398 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1399 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1400 version: None,
1401 diagnostics: vec![lsp::Diagnostic {
1402 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1403 severity: Some(lsp::DiagnosticSeverity::ERROR),
1404 message: "undefined variable 'A'".to_string(),
1405 ..Default::default()
1406 }],
1407 });
1408 assert_eq!(
1409 events.next().await.unwrap(),
1410 Event::DiagnosticsUpdated {
1411 language_server_id: LanguageServerId(0),
1412 path: (worktree_id, Path::new("a.rs")).into()
1413 }
1414 );
1415
1416 fake_server.end_progress(format!("{}/0", progress_token));
1417 assert_eq!(
1418 events.next().await.unwrap(),
1419 Event::DiskBasedDiagnosticsFinished {
1420 language_server_id: LanguageServerId(0)
1421 }
1422 );
1423
1424 let buffer = project
1425 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1426 .await
1427 .unwrap();
1428
1429 buffer.update(cx, |buffer, _| {
1430 let snapshot = buffer.snapshot();
1431 let diagnostics = snapshot
1432 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1433 .collect::<Vec<_>>();
1434 assert_eq!(
1435 diagnostics,
1436 &[DiagnosticEntry {
1437 range: Point::new(0, 9)..Point::new(0, 10),
1438 diagnostic: Diagnostic {
1439 severity: lsp::DiagnosticSeverity::ERROR,
1440 message: "undefined variable 'A'".to_string(),
1441 group_id: 0,
1442 is_primary: true,
1443 ..Default::default()
1444 }
1445 }]
1446 )
1447 });
1448
1449 // Ensure publishing empty diagnostics twice only results in one update event.
1450 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1451 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1452 version: None,
1453 diagnostics: Default::default(),
1454 });
1455 assert_eq!(
1456 events.next().await.unwrap(),
1457 Event::DiagnosticsUpdated {
1458 language_server_id: LanguageServerId(0),
1459 path: (worktree_id, Path::new("a.rs")).into()
1460 }
1461 );
1462
1463 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1464 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1465 version: None,
1466 diagnostics: Default::default(),
1467 });
1468 cx.executor().run_until_parked();
1469 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1470}
1471
1472#[gpui::test]
1473async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1474 init_test(cx);
1475
1476 let progress_token = "the-progress-token";
1477
1478 let fs = FakeFs::new(cx.executor());
1479 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1480
1481 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1482
1483 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1484 language_registry.add(rust_lang());
1485 let mut fake_servers = language_registry.register_fake_lsp(
1486 "Rust",
1487 FakeLspAdapter {
1488 name: "the-language-server",
1489 disk_based_diagnostics_sources: vec!["disk".into()],
1490 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1491 ..Default::default()
1492 },
1493 );
1494
1495 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1496
1497 let (buffer, _handle) = project
1498 .update(cx, |project, cx| {
1499 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1500 })
1501 .await
1502 .unwrap();
1503 // Simulate diagnostics starting to update.
1504 let fake_server = fake_servers.next().await.unwrap();
1505 fake_server.start_progress(progress_token).await;
1506
1507 // Restart the server before the diagnostics finish updating.
1508 project.update(cx, |project, cx| {
1509 project.restart_language_servers_for_buffers(vec![buffer], cx);
1510 });
1511 let mut events = cx.events(&project);
1512
1513 // Simulate the newly started server sending more diagnostics.
1514 let fake_server = fake_servers.next().await.unwrap();
1515 assert_eq!(
1516 events.next().await.unwrap(),
1517 Event::LanguageServerAdded(
1518 LanguageServerId(1),
1519 fake_server.server.name(),
1520 Some(worktree_id)
1521 )
1522 );
1523 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1524 fake_server.start_progress(progress_token).await;
1525 assert_eq!(
1526 events.next().await.unwrap(),
1527 Event::DiskBasedDiagnosticsStarted {
1528 language_server_id: LanguageServerId(1)
1529 }
1530 );
1531 project.update(cx, |project, cx| {
1532 assert_eq!(
1533 project
1534 .language_servers_running_disk_based_diagnostics(cx)
1535 .collect::<Vec<_>>(),
1536 [LanguageServerId(1)]
1537 );
1538 });
1539
1540 // All diagnostics are considered done, despite the old server's diagnostic
1541 // task never completing.
1542 fake_server.end_progress(progress_token);
1543 assert_eq!(
1544 events.next().await.unwrap(),
1545 Event::DiskBasedDiagnosticsFinished {
1546 language_server_id: LanguageServerId(1)
1547 }
1548 );
1549 project.update(cx, |project, cx| {
1550 assert_eq!(
1551 project
1552 .language_servers_running_disk_based_diagnostics(cx)
1553 .collect::<Vec<_>>(),
1554 [] as [language::LanguageServerId; 0]
1555 );
1556 });
1557}
1558
1559#[gpui::test]
1560async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1561 init_test(cx);
1562
1563 let fs = FakeFs::new(cx.executor());
1564 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1565
1566 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1567
1568 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1569 language_registry.add(rust_lang());
1570 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1571
1572 let (buffer, _) = project
1573 .update(cx, |project, cx| {
1574 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1575 })
1576 .await
1577 .unwrap();
1578
1579 // Publish diagnostics
1580 let fake_server = fake_servers.next().await.unwrap();
1581 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1582 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1583 version: None,
1584 diagnostics: vec![lsp::Diagnostic {
1585 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1586 severity: Some(lsp::DiagnosticSeverity::ERROR),
1587 message: "the message".to_string(),
1588 ..Default::default()
1589 }],
1590 });
1591
1592 cx.executor().run_until_parked();
1593 buffer.update(cx, |buffer, _| {
1594 assert_eq!(
1595 buffer
1596 .snapshot()
1597 .diagnostics_in_range::<_, usize>(0..1, false)
1598 .map(|entry| entry.diagnostic.message.clone())
1599 .collect::<Vec<_>>(),
1600 ["the message".to_string()]
1601 );
1602 });
1603 project.update(cx, |project, cx| {
1604 assert_eq!(
1605 project.diagnostic_summary(false, cx),
1606 DiagnosticSummary {
1607 error_count: 1,
1608 warning_count: 0,
1609 }
1610 );
1611 });
1612
1613 project.update(cx, |project, cx| {
1614 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1615 });
1616
1617 // The diagnostics are cleared.
1618 cx.executor().run_until_parked();
1619 buffer.update(cx, |buffer, _| {
1620 assert_eq!(
1621 buffer
1622 .snapshot()
1623 .diagnostics_in_range::<_, usize>(0..1, false)
1624 .map(|entry| entry.diagnostic.message.clone())
1625 .collect::<Vec<_>>(),
1626 Vec::<String>::new(),
1627 );
1628 });
1629 project.update(cx, |project, cx| {
1630 assert_eq!(
1631 project.diagnostic_summary(false, cx),
1632 DiagnosticSummary {
1633 error_count: 0,
1634 warning_count: 0,
1635 }
1636 );
1637 });
1638}
1639
1640#[gpui::test]
1641async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1642 init_test(cx);
1643
1644 let fs = FakeFs::new(cx.executor());
1645 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1646
1647 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1648 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1649
1650 language_registry.add(rust_lang());
1651 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1652
1653 let (buffer, _handle) = project
1654 .update(cx, |project, cx| {
1655 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1656 })
1657 .await
1658 .unwrap();
1659
1660 // Before restarting the server, report diagnostics with an unknown buffer version.
1661 let fake_server = fake_servers.next().await.unwrap();
1662 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1663 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1664 version: Some(10000),
1665 diagnostics: Vec::new(),
1666 });
1667 cx.executor().run_until_parked();
1668 project.update(cx, |project, cx| {
1669 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1670 });
1671
1672 let mut fake_server = fake_servers.next().await.unwrap();
1673 let notification = fake_server
1674 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1675 .await
1676 .text_document;
1677 assert_eq!(notification.version, 0);
1678}
1679
1680#[gpui::test]
1681async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1682 init_test(cx);
1683
1684 let progress_token = "the-progress-token";
1685
1686 let fs = FakeFs::new(cx.executor());
1687 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1688
1689 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1690
1691 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1692 language_registry.add(rust_lang());
1693 let mut fake_servers = language_registry.register_fake_lsp(
1694 "Rust",
1695 FakeLspAdapter {
1696 name: "the-language-server",
1697 disk_based_diagnostics_sources: vec!["disk".into()],
1698 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1699 ..Default::default()
1700 },
1701 );
1702
1703 let (buffer, _handle) = project
1704 .update(cx, |project, cx| {
1705 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1706 })
1707 .await
1708 .unwrap();
1709
1710 // Simulate diagnostics starting to update.
1711 let mut fake_server = fake_servers.next().await.unwrap();
1712 fake_server
1713 .start_progress_with(
1714 "another-token",
1715 lsp::WorkDoneProgressBegin {
1716 cancellable: Some(false),
1717 ..Default::default()
1718 },
1719 )
1720 .await;
1721 fake_server
1722 .start_progress_with(
1723 progress_token,
1724 lsp::WorkDoneProgressBegin {
1725 cancellable: Some(true),
1726 ..Default::default()
1727 },
1728 )
1729 .await;
1730 cx.executor().run_until_parked();
1731
1732 project.update(cx, |project, cx| {
1733 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1734 });
1735
1736 let cancel_notification = fake_server
1737 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1738 .await;
1739 assert_eq!(
1740 cancel_notification.token,
1741 NumberOrString::String(progress_token.into())
1742 );
1743}
1744
1745#[gpui::test]
1746async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1747 init_test(cx);
1748
1749 let fs = FakeFs::new(cx.executor());
1750 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1751 .await;
1752
1753 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1754 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1755
1756 let mut fake_rust_servers = language_registry.register_fake_lsp(
1757 "Rust",
1758 FakeLspAdapter {
1759 name: "rust-lsp",
1760 ..Default::default()
1761 },
1762 );
1763 let mut fake_js_servers = language_registry.register_fake_lsp(
1764 "JavaScript",
1765 FakeLspAdapter {
1766 name: "js-lsp",
1767 ..Default::default()
1768 },
1769 );
1770 language_registry.add(rust_lang());
1771 language_registry.add(js_lang());
1772
1773 let _rs_buffer = project
1774 .update(cx, |project, cx| {
1775 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1776 })
1777 .await
1778 .unwrap();
1779 let _js_buffer = project
1780 .update(cx, |project, cx| {
1781 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1782 })
1783 .await
1784 .unwrap();
1785
1786 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1787 assert_eq!(
1788 fake_rust_server_1
1789 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1790 .await
1791 .text_document
1792 .uri
1793 .as_str(),
1794 uri!("file:///dir/a.rs")
1795 );
1796
1797 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1798 assert_eq!(
1799 fake_js_server
1800 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1801 .await
1802 .text_document
1803 .uri
1804 .as_str(),
1805 uri!("file:///dir/b.js")
1806 );
1807
1808 // Disable Rust language server, ensuring only that server gets stopped.
1809 cx.update(|cx| {
1810 SettingsStore::update_global(cx, |settings, cx| {
1811 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1812 settings.languages.insert(
1813 "Rust".into(),
1814 LanguageSettingsContent {
1815 enable_language_server: Some(false),
1816 ..Default::default()
1817 },
1818 );
1819 });
1820 })
1821 });
1822 fake_rust_server_1
1823 .receive_notification::<lsp::notification::Exit>()
1824 .await;
1825
1826 // Enable Rust and disable JavaScript language servers, ensuring that the
1827 // former gets started again and that the latter stops.
1828 cx.update(|cx| {
1829 SettingsStore::update_global(cx, |settings, cx| {
1830 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1831 settings.languages.insert(
1832 LanguageName::new("Rust"),
1833 LanguageSettingsContent {
1834 enable_language_server: Some(true),
1835 ..Default::default()
1836 },
1837 );
1838 settings.languages.insert(
1839 LanguageName::new("JavaScript"),
1840 LanguageSettingsContent {
1841 enable_language_server: Some(false),
1842 ..Default::default()
1843 },
1844 );
1845 });
1846 })
1847 });
1848 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1849 assert_eq!(
1850 fake_rust_server_2
1851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1852 .await
1853 .text_document
1854 .uri
1855 .as_str(),
1856 uri!("file:///dir/a.rs")
1857 );
1858 fake_js_server
1859 .receive_notification::<lsp::notification::Exit>()
1860 .await;
1861}
1862
1863#[gpui::test(iterations = 3)]
1864async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1865 init_test(cx);
1866
1867 let text = "
1868 fn a() { A }
1869 fn b() { BB }
1870 fn c() { CCC }
1871 "
1872 .unindent();
1873
1874 let fs = FakeFs::new(cx.executor());
1875 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1876
1877 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1878 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1879
1880 language_registry.add(rust_lang());
1881 let mut fake_servers = language_registry.register_fake_lsp(
1882 "Rust",
1883 FakeLspAdapter {
1884 disk_based_diagnostics_sources: vec!["disk".into()],
1885 ..Default::default()
1886 },
1887 );
1888
1889 let buffer = project
1890 .update(cx, |project, cx| {
1891 project.open_local_buffer(path!("/dir/a.rs"), cx)
1892 })
1893 .await
1894 .unwrap();
1895
1896 let _handle = project.update(cx, |project, cx| {
1897 project.register_buffer_with_language_servers(&buffer, cx)
1898 });
1899
1900 let mut fake_server = fake_servers.next().await.unwrap();
1901 let open_notification = fake_server
1902 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1903 .await;
1904
1905 // Edit the buffer, moving the content down
1906 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1907 let change_notification_1 = fake_server
1908 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1909 .await;
1910 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1911
1912 // Report some diagnostics for the initial version of the buffer
1913 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1914 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1915 version: Some(open_notification.text_document.version),
1916 diagnostics: vec![
1917 lsp::Diagnostic {
1918 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1919 severity: Some(DiagnosticSeverity::ERROR),
1920 message: "undefined variable 'A'".to_string(),
1921 source: Some("disk".to_string()),
1922 ..Default::default()
1923 },
1924 lsp::Diagnostic {
1925 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1926 severity: Some(DiagnosticSeverity::ERROR),
1927 message: "undefined variable 'BB'".to_string(),
1928 source: Some("disk".to_string()),
1929 ..Default::default()
1930 },
1931 lsp::Diagnostic {
1932 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1933 severity: Some(DiagnosticSeverity::ERROR),
1934 source: Some("disk".to_string()),
1935 message: "undefined variable 'CCC'".to_string(),
1936 ..Default::default()
1937 },
1938 ],
1939 });
1940
1941 // The diagnostics have moved down since they were created.
1942 cx.executor().run_until_parked();
1943 buffer.update(cx, |buffer, _| {
1944 assert_eq!(
1945 buffer
1946 .snapshot()
1947 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1948 .collect::<Vec<_>>(),
1949 &[
1950 DiagnosticEntry {
1951 range: Point::new(3, 9)..Point::new(3, 11),
1952 diagnostic: Diagnostic {
1953 source: Some("disk".into()),
1954 severity: DiagnosticSeverity::ERROR,
1955 message: "undefined variable 'BB'".to_string(),
1956 is_disk_based: true,
1957 group_id: 1,
1958 is_primary: true,
1959 ..Default::default()
1960 },
1961 },
1962 DiagnosticEntry {
1963 range: Point::new(4, 9)..Point::new(4, 12),
1964 diagnostic: Diagnostic {
1965 source: Some("disk".into()),
1966 severity: DiagnosticSeverity::ERROR,
1967 message: "undefined variable 'CCC'".to_string(),
1968 is_disk_based: true,
1969 group_id: 2,
1970 is_primary: true,
1971 ..Default::default()
1972 }
1973 }
1974 ]
1975 );
1976 assert_eq!(
1977 chunks_with_diagnostics(buffer, 0..buffer.len()),
1978 [
1979 ("\n\nfn a() { ".to_string(), None),
1980 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1981 (" }\nfn b() { ".to_string(), None),
1982 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1983 (" }\nfn c() { ".to_string(), None),
1984 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1985 (" }\n".to_string(), None),
1986 ]
1987 );
1988 assert_eq!(
1989 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1990 [
1991 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1992 (" }\nfn c() { ".to_string(), None),
1993 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1994 ]
1995 );
1996 });
1997
1998 // Ensure overlapping diagnostics are highlighted correctly.
1999 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2000 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2001 version: Some(open_notification.text_document.version),
2002 diagnostics: vec![
2003 lsp::Diagnostic {
2004 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2005 severity: Some(DiagnosticSeverity::ERROR),
2006 message: "undefined variable 'A'".to_string(),
2007 source: Some("disk".to_string()),
2008 ..Default::default()
2009 },
2010 lsp::Diagnostic {
2011 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2012 severity: Some(DiagnosticSeverity::WARNING),
2013 message: "unreachable statement".to_string(),
2014 source: Some("disk".to_string()),
2015 ..Default::default()
2016 },
2017 ],
2018 });
2019
2020 cx.executor().run_until_parked();
2021 buffer.update(cx, |buffer, _| {
2022 assert_eq!(
2023 buffer
2024 .snapshot()
2025 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2026 .collect::<Vec<_>>(),
2027 &[
2028 DiagnosticEntry {
2029 range: Point::new(2, 9)..Point::new(2, 12),
2030 diagnostic: Diagnostic {
2031 source: Some("disk".into()),
2032 severity: DiagnosticSeverity::WARNING,
2033 message: "unreachable statement".to_string(),
2034 is_disk_based: true,
2035 group_id: 4,
2036 is_primary: true,
2037 ..Default::default()
2038 }
2039 },
2040 DiagnosticEntry {
2041 range: Point::new(2, 9)..Point::new(2, 10),
2042 diagnostic: Diagnostic {
2043 source: Some("disk".into()),
2044 severity: DiagnosticSeverity::ERROR,
2045 message: "undefined variable 'A'".to_string(),
2046 is_disk_based: true,
2047 group_id: 3,
2048 is_primary: true,
2049 ..Default::default()
2050 },
2051 }
2052 ]
2053 );
2054 assert_eq!(
2055 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2056 [
2057 ("fn a() { ".to_string(), None),
2058 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2059 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2060 ("\n".to_string(), None),
2061 ]
2062 );
2063 assert_eq!(
2064 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2065 [
2066 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2067 ("\n".to_string(), None),
2068 ]
2069 );
2070 });
2071
2072 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2073 // changes since the last save.
2074 buffer.update(cx, |buffer, cx| {
2075 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2076 buffer.edit(
2077 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2078 None,
2079 cx,
2080 );
2081 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2082 });
2083 let change_notification_2 = fake_server
2084 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2085 .await;
2086 assert!(
2087 change_notification_2.text_document.version > change_notification_1.text_document.version
2088 );
2089
2090 // Handle out-of-order diagnostics
2091 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2092 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2093 version: Some(change_notification_2.text_document.version),
2094 diagnostics: vec![
2095 lsp::Diagnostic {
2096 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2097 severity: Some(DiagnosticSeverity::ERROR),
2098 message: "undefined variable 'BB'".to_string(),
2099 source: Some("disk".to_string()),
2100 ..Default::default()
2101 },
2102 lsp::Diagnostic {
2103 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2104 severity: Some(DiagnosticSeverity::WARNING),
2105 message: "undefined variable 'A'".to_string(),
2106 source: Some("disk".to_string()),
2107 ..Default::default()
2108 },
2109 ],
2110 });
2111
2112 cx.executor().run_until_parked();
2113 buffer.update(cx, |buffer, _| {
2114 assert_eq!(
2115 buffer
2116 .snapshot()
2117 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2118 .collect::<Vec<_>>(),
2119 &[
2120 DiagnosticEntry {
2121 range: Point::new(2, 21)..Point::new(2, 22),
2122 diagnostic: Diagnostic {
2123 source: Some("disk".into()),
2124 severity: DiagnosticSeverity::WARNING,
2125 message: "undefined variable 'A'".to_string(),
2126 is_disk_based: true,
2127 group_id: 6,
2128 is_primary: true,
2129 ..Default::default()
2130 }
2131 },
2132 DiagnosticEntry {
2133 range: Point::new(3, 9)..Point::new(3, 14),
2134 diagnostic: Diagnostic {
2135 source: Some("disk".into()),
2136 severity: DiagnosticSeverity::ERROR,
2137 message: "undefined variable 'BB'".to_string(),
2138 is_disk_based: true,
2139 group_id: 5,
2140 is_primary: true,
2141 ..Default::default()
2142 },
2143 }
2144 ]
2145 );
2146 });
2147}
2148
2149#[gpui::test]
2150async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2151 init_test(cx);
2152
2153 let text = concat!(
2154 "let one = ;\n", //
2155 "let two = \n",
2156 "let three = 3;\n",
2157 );
2158
2159 let fs = FakeFs::new(cx.executor());
2160 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2161
2162 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2163 let buffer = project
2164 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2165 .await
2166 .unwrap();
2167
2168 project.update(cx, |project, cx| {
2169 project.lsp_store.update(cx, |lsp_store, cx| {
2170 lsp_store
2171 .update_diagnostic_entries(
2172 LanguageServerId(0),
2173 PathBuf::from("/dir/a.rs"),
2174 None,
2175 vec![
2176 DiagnosticEntry {
2177 range: Unclipped(PointUtf16::new(0, 10))
2178 ..Unclipped(PointUtf16::new(0, 10)),
2179 diagnostic: Diagnostic {
2180 severity: DiagnosticSeverity::ERROR,
2181 message: "syntax error 1".to_string(),
2182 ..Default::default()
2183 },
2184 },
2185 DiagnosticEntry {
2186 range: Unclipped(PointUtf16::new(1, 10))
2187 ..Unclipped(PointUtf16::new(1, 10)),
2188 diagnostic: Diagnostic {
2189 severity: DiagnosticSeverity::ERROR,
2190 message: "syntax error 2".to_string(),
2191 ..Default::default()
2192 },
2193 },
2194 ],
2195 cx,
2196 )
2197 .unwrap();
2198 })
2199 });
2200
2201 // An empty range is extended forward to include the following character.
2202 // At the end of a line, an empty range is extended backward to include
2203 // the preceding character.
2204 buffer.update(cx, |buffer, _| {
2205 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2206 assert_eq!(
2207 chunks
2208 .iter()
2209 .map(|(s, d)| (s.as_str(), *d))
2210 .collect::<Vec<_>>(),
2211 &[
2212 ("let one = ", None),
2213 (";", Some(DiagnosticSeverity::ERROR)),
2214 ("\nlet two =", None),
2215 (" ", Some(DiagnosticSeverity::ERROR)),
2216 ("\nlet three = 3;\n", None)
2217 ]
2218 );
2219 });
2220}
2221
2222#[gpui::test]
2223async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2224 init_test(cx);
2225
2226 let fs = FakeFs::new(cx.executor());
2227 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2228 .await;
2229
2230 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2231 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2232
2233 lsp_store.update(cx, |lsp_store, cx| {
2234 lsp_store
2235 .update_diagnostic_entries(
2236 LanguageServerId(0),
2237 Path::new("/dir/a.rs").to_owned(),
2238 None,
2239 vec![DiagnosticEntry {
2240 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2241 diagnostic: Diagnostic {
2242 severity: DiagnosticSeverity::ERROR,
2243 is_primary: true,
2244 message: "syntax error a1".to_string(),
2245 ..Default::default()
2246 },
2247 }],
2248 cx,
2249 )
2250 .unwrap();
2251 lsp_store
2252 .update_diagnostic_entries(
2253 LanguageServerId(1),
2254 Path::new("/dir/a.rs").to_owned(),
2255 None,
2256 vec![DiagnosticEntry {
2257 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2258 diagnostic: Diagnostic {
2259 severity: DiagnosticSeverity::ERROR,
2260 is_primary: true,
2261 message: "syntax error b1".to_string(),
2262 ..Default::default()
2263 },
2264 }],
2265 cx,
2266 )
2267 .unwrap();
2268
2269 assert_eq!(
2270 lsp_store.diagnostic_summary(false, cx),
2271 DiagnosticSummary {
2272 error_count: 2,
2273 warning_count: 0,
2274 }
2275 );
2276 });
2277}
2278
2279#[gpui::test]
2280async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2281 init_test(cx);
2282
2283 let text = "
2284 fn a() {
2285 f1();
2286 }
2287 fn b() {
2288 f2();
2289 }
2290 fn c() {
2291 f3();
2292 }
2293 "
2294 .unindent();
2295
2296 let fs = FakeFs::new(cx.executor());
2297 fs.insert_tree(
2298 path!("/dir"),
2299 json!({
2300 "a.rs": text.clone(),
2301 }),
2302 )
2303 .await;
2304
2305 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2307
2308 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2309 language_registry.add(rust_lang());
2310 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2311
2312 let (buffer, _handle) = project
2313 .update(cx, |project, cx| {
2314 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2315 })
2316 .await
2317 .unwrap();
2318
2319 let mut fake_server = fake_servers.next().await.unwrap();
2320 let lsp_document_version = fake_server
2321 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2322 .await
2323 .text_document
2324 .version;
2325
2326 // Simulate editing the buffer after the language server computes some edits.
2327 buffer.update(cx, |buffer, cx| {
2328 buffer.edit(
2329 [(
2330 Point::new(0, 0)..Point::new(0, 0),
2331 "// above first function\n",
2332 )],
2333 None,
2334 cx,
2335 );
2336 buffer.edit(
2337 [(
2338 Point::new(2, 0)..Point::new(2, 0),
2339 " // inside first function\n",
2340 )],
2341 None,
2342 cx,
2343 );
2344 buffer.edit(
2345 [(
2346 Point::new(6, 4)..Point::new(6, 4),
2347 "// inside second function ",
2348 )],
2349 None,
2350 cx,
2351 );
2352
2353 assert_eq!(
2354 buffer.text(),
2355 "
2356 // above first function
2357 fn a() {
2358 // inside first function
2359 f1();
2360 }
2361 fn b() {
2362 // inside second function f2();
2363 }
2364 fn c() {
2365 f3();
2366 }
2367 "
2368 .unindent()
2369 );
2370 });
2371
2372 let edits = lsp_store
2373 .update(cx, |lsp_store, cx| {
2374 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2375 &buffer,
2376 vec![
2377 // replace body of first function
2378 lsp::TextEdit {
2379 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2380 new_text: "
2381 fn a() {
2382 f10();
2383 }
2384 "
2385 .unindent(),
2386 },
2387 // edit inside second function
2388 lsp::TextEdit {
2389 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2390 new_text: "00".into(),
2391 },
2392 // edit inside third function via two distinct edits
2393 lsp::TextEdit {
2394 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2395 new_text: "4000".into(),
2396 },
2397 lsp::TextEdit {
2398 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2399 new_text: "".into(),
2400 },
2401 ],
2402 LanguageServerId(0),
2403 Some(lsp_document_version),
2404 cx,
2405 )
2406 })
2407 .await
2408 .unwrap();
2409
2410 buffer.update(cx, |buffer, cx| {
2411 for (range, new_text) in edits {
2412 buffer.edit([(range, new_text)], None, cx);
2413 }
2414 assert_eq!(
2415 buffer.text(),
2416 "
2417 // above first function
2418 fn a() {
2419 // inside first function
2420 f10();
2421 }
2422 fn b() {
2423 // inside second function f200();
2424 }
2425 fn c() {
2426 f4000();
2427 }
2428 "
2429 .unindent()
2430 );
2431 });
2432}
2433
2434#[gpui::test]
2435async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2436 init_test(cx);
2437
2438 let text = "
2439 use a::b;
2440 use a::c;
2441
2442 fn f() {
2443 b();
2444 c();
2445 }
2446 "
2447 .unindent();
2448
2449 let fs = FakeFs::new(cx.executor());
2450 fs.insert_tree(
2451 path!("/dir"),
2452 json!({
2453 "a.rs": text.clone(),
2454 }),
2455 )
2456 .await;
2457
2458 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2459 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2460 let buffer = project
2461 .update(cx, |project, cx| {
2462 project.open_local_buffer(path!("/dir/a.rs"), cx)
2463 })
2464 .await
2465 .unwrap();
2466
2467 // Simulate the language server sending us a small edit in the form of a very large diff.
2468 // Rust-analyzer does this when performing a merge-imports code action.
2469 let edits = lsp_store
2470 .update(cx, |lsp_store, cx| {
2471 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2472 &buffer,
2473 [
2474 // Replace the first use statement without editing the semicolon.
2475 lsp::TextEdit {
2476 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2477 new_text: "a::{b, c}".into(),
2478 },
2479 // Reinsert the remainder of the file between the semicolon and the final
2480 // newline of the file.
2481 lsp::TextEdit {
2482 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2483 new_text: "\n\n".into(),
2484 },
2485 lsp::TextEdit {
2486 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2487 new_text: "
2488 fn f() {
2489 b();
2490 c();
2491 }"
2492 .unindent(),
2493 },
2494 // Delete everything after the first newline of the file.
2495 lsp::TextEdit {
2496 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2497 new_text: "".into(),
2498 },
2499 ],
2500 LanguageServerId(0),
2501 None,
2502 cx,
2503 )
2504 })
2505 .await
2506 .unwrap();
2507
2508 buffer.update(cx, |buffer, cx| {
2509 let edits = edits
2510 .into_iter()
2511 .map(|(range, text)| {
2512 (
2513 range.start.to_point(buffer)..range.end.to_point(buffer),
2514 text,
2515 )
2516 })
2517 .collect::<Vec<_>>();
2518
2519 assert_eq!(
2520 edits,
2521 [
2522 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2523 (Point::new(1, 0)..Point::new(2, 0), "".into())
2524 ]
2525 );
2526
2527 for (range, new_text) in edits {
2528 buffer.edit([(range, new_text)], None, cx);
2529 }
2530 assert_eq!(
2531 buffer.text(),
2532 "
2533 use a::{b, c};
2534
2535 fn f() {
2536 b();
2537 c();
2538 }
2539 "
2540 .unindent()
2541 );
2542 });
2543}
2544
2545#[gpui::test]
2546async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2547 init_test(cx);
2548
2549 let text = "
2550 use a::b;
2551 use a::c;
2552
2553 fn f() {
2554 b();
2555 c();
2556 }
2557 "
2558 .unindent();
2559
2560 let fs = FakeFs::new(cx.executor());
2561 fs.insert_tree(
2562 path!("/dir"),
2563 json!({
2564 "a.rs": text.clone(),
2565 }),
2566 )
2567 .await;
2568
2569 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2570 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2571 let buffer = project
2572 .update(cx, |project, cx| {
2573 project.open_local_buffer(path!("/dir/a.rs"), cx)
2574 })
2575 .await
2576 .unwrap();
2577
2578 // Simulate the language server sending us edits in a non-ordered fashion,
2579 // with ranges sometimes being inverted or pointing to invalid locations.
2580 let edits = lsp_store
2581 .update(cx, |lsp_store, cx| {
2582 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2583 &buffer,
2584 [
2585 lsp::TextEdit {
2586 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2587 new_text: "\n\n".into(),
2588 },
2589 lsp::TextEdit {
2590 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2591 new_text: "a::{b, c}".into(),
2592 },
2593 lsp::TextEdit {
2594 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2595 new_text: "".into(),
2596 },
2597 lsp::TextEdit {
2598 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2599 new_text: "
2600 fn f() {
2601 b();
2602 c();
2603 }"
2604 .unindent(),
2605 },
2606 ],
2607 LanguageServerId(0),
2608 None,
2609 cx,
2610 )
2611 })
2612 .await
2613 .unwrap();
2614
2615 buffer.update(cx, |buffer, cx| {
2616 let edits = edits
2617 .into_iter()
2618 .map(|(range, text)| {
2619 (
2620 range.start.to_point(buffer)..range.end.to_point(buffer),
2621 text,
2622 )
2623 })
2624 .collect::<Vec<_>>();
2625
2626 assert_eq!(
2627 edits,
2628 [
2629 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2630 (Point::new(1, 0)..Point::new(2, 0), "".into())
2631 ]
2632 );
2633
2634 for (range, new_text) in edits {
2635 buffer.edit([(range, new_text)], None, cx);
2636 }
2637 assert_eq!(
2638 buffer.text(),
2639 "
2640 use a::{b, c};
2641
2642 fn f() {
2643 b();
2644 c();
2645 }
2646 "
2647 .unindent()
2648 );
2649 });
2650}
2651
2652fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2653 buffer: &Buffer,
2654 range: Range<T>,
2655) -> Vec<(String, Option<DiagnosticSeverity>)> {
2656 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2657 for chunk in buffer.snapshot().chunks(range, true) {
2658 if chunks.last().map_or(false, |prev_chunk| {
2659 prev_chunk.1 == chunk.diagnostic_severity
2660 }) {
2661 chunks.last_mut().unwrap().0.push_str(chunk.text);
2662 } else {
2663 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2664 }
2665 }
2666 chunks
2667}
2668
2669#[gpui::test(iterations = 10)]
2670async fn test_definition(cx: &mut gpui::TestAppContext) {
2671 init_test(cx);
2672
2673 let fs = FakeFs::new(cx.executor());
2674 fs.insert_tree(
2675 path!("/dir"),
2676 json!({
2677 "a.rs": "const fn a() { A }",
2678 "b.rs": "const y: i32 = crate::a()",
2679 }),
2680 )
2681 .await;
2682
2683 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2684
2685 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2686 language_registry.add(rust_lang());
2687 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2688
2689 let (buffer, _handle) = project
2690 .update(cx, |project, cx| {
2691 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2692 })
2693 .await
2694 .unwrap();
2695
2696 let fake_server = fake_servers.next().await.unwrap();
2697 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2698 let params = params.text_document_position_params;
2699 assert_eq!(
2700 params.text_document.uri.to_file_path().unwrap(),
2701 Path::new(path!("/dir/b.rs")),
2702 );
2703 assert_eq!(params.position, lsp::Position::new(0, 22));
2704
2705 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2706 lsp::Location::new(
2707 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2708 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2709 ),
2710 )))
2711 });
2712 let mut definitions = project
2713 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2714 .await
2715 .unwrap();
2716
2717 // Assert no new language server started
2718 cx.executor().run_until_parked();
2719 assert!(fake_servers.try_next().is_err());
2720
2721 assert_eq!(definitions.len(), 1);
2722 let definition = definitions.pop().unwrap();
2723 cx.update(|cx| {
2724 let target_buffer = definition.target.buffer.read(cx);
2725 assert_eq!(
2726 target_buffer
2727 .file()
2728 .unwrap()
2729 .as_local()
2730 .unwrap()
2731 .abs_path(cx),
2732 Path::new(path!("/dir/a.rs")),
2733 );
2734 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2735 assert_eq!(
2736 list_worktrees(&project, cx),
2737 [
2738 (path!("/dir/a.rs").as_ref(), false),
2739 (path!("/dir/b.rs").as_ref(), true)
2740 ],
2741 );
2742
2743 drop(definition);
2744 });
2745 cx.update(|cx| {
2746 assert_eq!(
2747 list_worktrees(&project, cx),
2748 [(path!("/dir/b.rs").as_ref(), true)]
2749 );
2750 });
2751
2752 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2753 project
2754 .read(cx)
2755 .worktrees(cx)
2756 .map(|worktree| {
2757 let worktree = worktree.read(cx);
2758 (
2759 worktree.as_local().unwrap().abs_path().as_ref(),
2760 worktree.is_visible(),
2761 )
2762 })
2763 .collect::<Vec<_>>()
2764 }
2765}
2766
2767#[gpui::test]
2768async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2769 init_test(cx);
2770
2771 let fs = FakeFs::new(cx.executor());
2772 fs.insert_tree(
2773 path!("/dir"),
2774 json!({
2775 "a.ts": "",
2776 }),
2777 )
2778 .await;
2779
2780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2781
2782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2783 language_registry.add(typescript_lang());
2784 let mut fake_language_servers = language_registry.register_fake_lsp(
2785 "TypeScript",
2786 FakeLspAdapter {
2787 capabilities: lsp::ServerCapabilities {
2788 completion_provider: Some(lsp::CompletionOptions {
2789 trigger_characters: Some(vec![":".to_string()]),
2790 ..Default::default()
2791 }),
2792 ..Default::default()
2793 },
2794 ..Default::default()
2795 },
2796 );
2797
2798 let (buffer, _handle) = project
2799 .update(cx, |p, cx| {
2800 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2801 })
2802 .await
2803 .unwrap();
2804
2805 let fake_server = fake_language_servers.next().await.unwrap();
2806
2807 let text = "let a = b.fqn";
2808 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2809 let completions = project.update(cx, |project, cx| {
2810 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2811 });
2812
2813 fake_server
2814 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2815 Ok(Some(lsp::CompletionResponse::Array(vec![
2816 lsp::CompletionItem {
2817 label: "fullyQualifiedName?".into(),
2818 insert_text: Some("fullyQualifiedName".into()),
2819 ..Default::default()
2820 },
2821 ])))
2822 })
2823 .next()
2824 .await;
2825 let completions = completions.await.unwrap();
2826 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2827 assert_eq!(completions.len(), 1);
2828 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2829 assert_eq!(
2830 completions[0].old_range.to_offset(&snapshot),
2831 text.len() - 3..text.len()
2832 );
2833
2834 let text = "let a = \"atoms/cmp\"";
2835 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2836 let completions = project.update(cx, |project, cx| {
2837 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2838 });
2839
2840 fake_server
2841 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2842 Ok(Some(lsp::CompletionResponse::Array(vec![
2843 lsp::CompletionItem {
2844 label: "component".into(),
2845 ..Default::default()
2846 },
2847 ])))
2848 })
2849 .next()
2850 .await;
2851 let completions = completions.await.unwrap();
2852 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2853 assert_eq!(completions.len(), 1);
2854 assert_eq!(completions[0].new_text, "component");
2855 assert_eq!(
2856 completions[0].old_range.to_offset(&snapshot),
2857 text.len() - 4..text.len() - 1
2858 );
2859}
2860
2861#[gpui::test]
2862async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2863 init_test(cx);
2864
2865 let fs = FakeFs::new(cx.executor());
2866 fs.insert_tree(
2867 path!("/dir"),
2868 json!({
2869 "a.ts": "",
2870 }),
2871 )
2872 .await;
2873
2874 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2875
2876 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2877 language_registry.add(typescript_lang());
2878 let mut fake_language_servers = language_registry.register_fake_lsp(
2879 "TypeScript",
2880 FakeLspAdapter {
2881 capabilities: lsp::ServerCapabilities {
2882 completion_provider: Some(lsp::CompletionOptions {
2883 trigger_characters: Some(vec![":".to_string()]),
2884 ..Default::default()
2885 }),
2886 ..Default::default()
2887 },
2888 ..Default::default()
2889 },
2890 );
2891
2892 let (buffer, _handle) = project
2893 .update(cx, |p, cx| {
2894 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2895 })
2896 .await
2897 .unwrap();
2898
2899 let fake_server = fake_language_servers.next().await.unwrap();
2900
2901 let text = "let a = b.fqn";
2902 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2903 let completions = project.update(cx, |project, cx| {
2904 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2905 });
2906
2907 fake_server
2908 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2909 Ok(Some(lsp::CompletionResponse::Array(vec![
2910 lsp::CompletionItem {
2911 label: "fullyQualifiedName?".into(),
2912 insert_text: Some("fully\rQualified\r\nName".into()),
2913 ..Default::default()
2914 },
2915 ])))
2916 })
2917 .next()
2918 .await;
2919 let completions = completions.await.unwrap();
2920 assert_eq!(completions.len(), 1);
2921 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2922}
2923
2924#[gpui::test(iterations = 10)]
2925async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2926 init_test(cx);
2927
2928 let fs = FakeFs::new(cx.executor());
2929 fs.insert_tree(
2930 path!("/dir"),
2931 json!({
2932 "a.ts": "a",
2933 }),
2934 )
2935 .await;
2936
2937 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2938
2939 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2940 language_registry.add(typescript_lang());
2941 let mut fake_language_servers = language_registry.register_fake_lsp(
2942 "TypeScript",
2943 FakeLspAdapter {
2944 capabilities: lsp::ServerCapabilities {
2945 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2946 lsp::CodeActionOptions {
2947 resolve_provider: Some(true),
2948 ..lsp::CodeActionOptions::default()
2949 },
2950 )),
2951 ..lsp::ServerCapabilities::default()
2952 },
2953 ..FakeLspAdapter::default()
2954 },
2955 );
2956
2957 let (buffer, _handle) = project
2958 .update(cx, |p, cx| {
2959 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2960 })
2961 .await
2962 .unwrap();
2963
2964 let fake_server = fake_language_servers.next().await.unwrap();
2965
2966 // Language server returns code actions that contain commands, and not edits.
2967 let actions = project.update(cx, |project, cx| {
2968 project.code_actions(&buffer, 0..0, None, cx)
2969 });
2970 fake_server
2971 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2972 Ok(Some(vec![
2973 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2974 title: "The code action".into(),
2975 data: Some(serde_json::json!({
2976 "command": "_the/command",
2977 })),
2978 ..lsp::CodeAction::default()
2979 }),
2980 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2981 title: "two".into(),
2982 ..lsp::CodeAction::default()
2983 }),
2984 ]))
2985 })
2986 .next()
2987 .await;
2988
2989 let action = actions.await.unwrap()[0].clone();
2990 let apply = project.update(cx, |project, cx| {
2991 project.apply_code_action(buffer.clone(), action, true, cx)
2992 });
2993
2994 // Resolving the code action does not populate its edits. In absence of
2995 // edits, we must execute the given command.
2996 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2997 |mut action, _| async move {
2998 if action.data.is_some() {
2999 action.command = Some(lsp::Command {
3000 title: "The command".into(),
3001 command: "_the/command".into(),
3002 arguments: Some(vec![json!("the-argument")]),
3003 });
3004 }
3005 Ok(action)
3006 },
3007 );
3008
3009 // While executing the command, the language server sends the editor
3010 // a `workspaceEdit` request.
3011 fake_server
3012 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3013 let fake = fake_server.clone();
3014 move |params, _| {
3015 assert_eq!(params.command, "_the/command");
3016 let fake = fake.clone();
3017 async move {
3018 fake.server
3019 .request::<lsp::request::ApplyWorkspaceEdit>(
3020 lsp::ApplyWorkspaceEditParams {
3021 label: None,
3022 edit: lsp::WorkspaceEdit {
3023 changes: Some(
3024 [(
3025 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3026 vec![lsp::TextEdit {
3027 range: lsp::Range::new(
3028 lsp::Position::new(0, 0),
3029 lsp::Position::new(0, 0),
3030 ),
3031 new_text: "X".into(),
3032 }],
3033 )]
3034 .into_iter()
3035 .collect(),
3036 ),
3037 ..Default::default()
3038 },
3039 },
3040 )
3041 .await
3042 .unwrap();
3043 Ok(Some(json!(null)))
3044 }
3045 }
3046 })
3047 .next()
3048 .await;
3049
3050 // Applying the code action returns a project transaction containing the edits
3051 // sent by the language server in its `workspaceEdit` request.
3052 let transaction = apply.await.unwrap();
3053 assert!(transaction.0.contains_key(&buffer));
3054 buffer.update(cx, |buffer, cx| {
3055 assert_eq!(buffer.text(), "Xa");
3056 buffer.undo(cx);
3057 assert_eq!(buffer.text(), "a");
3058 });
3059}
3060
3061#[gpui::test(iterations = 10)]
3062async fn test_save_file(cx: &mut gpui::TestAppContext) {
3063 init_test(cx);
3064
3065 let fs = FakeFs::new(cx.executor());
3066 fs.insert_tree(
3067 path!("/dir"),
3068 json!({
3069 "file1": "the old contents",
3070 }),
3071 )
3072 .await;
3073
3074 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3075 let buffer = project
3076 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3077 .await
3078 .unwrap();
3079 buffer.update(cx, |buffer, cx| {
3080 assert_eq!(buffer.text(), "the old contents");
3081 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3082 });
3083
3084 project
3085 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3086 .await
3087 .unwrap();
3088
3089 let new_text = fs
3090 .load(Path::new(path!("/dir/file1")))
3091 .await
3092 .unwrap()
3093 .replace("\r\n", "\n");
3094 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3095}
3096
3097#[gpui::test(iterations = 30)]
3098async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3099 init_test(cx);
3100
3101 let fs = FakeFs::new(cx.executor().clone());
3102 fs.insert_tree(
3103 path!("/dir"),
3104 json!({
3105 "file1": "the original contents",
3106 }),
3107 )
3108 .await;
3109
3110 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3111 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3112 let buffer = project
3113 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3114 .await
3115 .unwrap();
3116
3117 // Simulate buffer diffs being slow, so that they don't complete before
3118 // the next file change occurs.
3119 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3120
3121 // Change the buffer's file on disk, and then wait for the file change
3122 // to be detected by the worktree, so that the buffer starts reloading.
3123 fs.save(
3124 path!("/dir/file1").as_ref(),
3125 &"the first contents".into(),
3126 Default::default(),
3127 )
3128 .await
3129 .unwrap();
3130 worktree.next_event(cx).await;
3131
3132 // Change the buffer's file again. Depending on the random seed, the
3133 // previous file change may still be in progress.
3134 fs.save(
3135 path!("/dir/file1").as_ref(),
3136 &"the second contents".into(),
3137 Default::default(),
3138 )
3139 .await
3140 .unwrap();
3141 worktree.next_event(cx).await;
3142
3143 cx.executor().run_until_parked();
3144 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3145 buffer.read_with(cx, |buffer, _| {
3146 assert_eq!(buffer.text(), on_disk_text);
3147 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3148 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3149 });
3150}
3151
3152#[gpui::test(iterations = 30)]
3153async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3154 init_test(cx);
3155
3156 let fs = FakeFs::new(cx.executor().clone());
3157 fs.insert_tree(
3158 path!("/dir"),
3159 json!({
3160 "file1": "the original contents",
3161 }),
3162 )
3163 .await;
3164
3165 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3166 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3167 let buffer = project
3168 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3169 .await
3170 .unwrap();
3171
3172 // Simulate buffer diffs being slow, so that they don't complete before
3173 // the next file change occurs.
3174 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3175
3176 // Change the buffer's file on disk, and then wait for the file change
3177 // to be detected by the worktree, so that the buffer starts reloading.
3178 fs.save(
3179 path!("/dir/file1").as_ref(),
3180 &"the first contents".into(),
3181 Default::default(),
3182 )
3183 .await
3184 .unwrap();
3185 worktree.next_event(cx).await;
3186
3187 cx.executor()
3188 .spawn(cx.executor().simulate_random_delay())
3189 .await;
3190
3191 // Perform a noop edit, causing the buffer's version to increase.
3192 buffer.update(cx, |buffer, cx| {
3193 buffer.edit([(0..0, " ")], None, cx);
3194 buffer.undo(cx);
3195 });
3196
3197 cx.executor().run_until_parked();
3198 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3199 buffer.read_with(cx, |buffer, _| {
3200 let buffer_text = buffer.text();
3201 if buffer_text == on_disk_text {
3202 assert!(
3203 !buffer.is_dirty() && !buffer.has_conflict(),
3204 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3205 );
3206 }
3207 // If the file change occurred while the buffer was processing the first
3208 // change, the buffer will be in a conflicting state.
3209 else {
3210 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3211 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3212 }
3213 });
3214}
3215
3216#[gpui::test]
3217async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3218 init_test(cx);
3219
3220 let fs = FakeFs::new(cx.executor());
3221 fs.insert_tree(
3222 path!("/dir"),
3223 json!({
3224 "file1": "the old contents",
3225 }),
3226 )
3227 .await;
3228
3229 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3230 let buffer = project
3231 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3232 .await
3233 .unwrap();
3234 buffer.update(cx, |buffer, cx| {
3235 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3236 });
3237
3238 project
3239 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3240 .await
3241 .unwrap();
3242
3243 let new_text = fs
3244 .load(Path::new(path!("/dir/file1")))
3245 .await
3246 .unwrap()
3247 .replace("\r\n", "\n");
3248 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3249}
3250
3251#[gpui::test]
3252async fn test_save_as(cx: &mut gpui::TestAppContext) {
3253 init_test(cx);
3254
3255 let fs = FakeFs::new(cx.executor());
3256 fs.insert_tree("/dir", json!({})).await;
3257
3258 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3259
3260 let languages = project.update(cx, |project, _| project.languages().clone());
3261 languages.add(rust_lang());
3262
3263 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3264 buffer.update(cx, |buffer, cx| {
3265 buffer.edit([(0..0, "abc")], None, cx);
3266 assert!(buffer.is_dirty());
3267 assert!(!buffer.has_conflict());
3268 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3269 });
3270 project
3271 .update(cx, |project, cx| {
3272 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3273 let path = ProjectPath {
3274 worktree_id,
3275 path: Arc::from(Path::new("file1.rs")),
3276 };
3277 project.save_buffer_as(buffer.clone(), path, cx)
3278 })
3279 .await
3280 .unwrap();
3281 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3282
3283 cx.executor().run_until_parked();
3284 buffer.update(cx, |buffer, cx| {
3285 assert_eq!(
3286 buffer.file().unwrap().full_path(cx),
3287 Path::new("dir/file1.rs")
3288 );
3289 assert!(!buffer.is_dirty());
3290 assert!(!buffer.has_conflict());
3291 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3292 });
3293
3294 let opened_buffer = project
3295 .update(cx, |project, cx| {
3296 project.open_local_buffer("/dir/file1.rs", cx)
3297 })
3298 .await
3299 .unwrap();
3300 assert_eq!(opened_buffer, buffer);
3301}
3302
3303#[gpui::test(retries = 5)]
3304async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3305 use worktree::WorktreeModelHandle as _;
3306
3307 init_test(cx);
3308 cx.executor().allow_parking();
3309
3310 let dir = TempTree::new(json!({
3311 "a": {
3312 "file1": "",
3313 "file2": "",
3314 "file3": "",
3315 },
3316 "b": {
3317 "c": {
3318 "file4": "",
3319 "file5": "",
3320 }
3321 }
3322 }));
3323
3324 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3325
3326 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3327 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3328 async move { buffer.await.unwrap() }
3329 };
3330 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3331 project.update(cx, |project, cx| {
3332 let tree = project.worktrees(cx).next().unwrap();
3333 tree.read(cx)
3334 .entry_for_path(path)
3335 .unwrap_or_else(|| panic!("no entry for path {}", path))
3336 .id
3337 })
3338 };
3339
3340 let buffer2 = buffer_for_path("a/file2", cx).await;
3341 let buffer3 = buffer_for_path("a/file3", cx).await;
3342 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3343 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3344
3345 let file2_id = id_for_path("a/file2", cx);
3346 let file3_id = id_for_path("a/file3", cx);
3347 let file4_id = id_for_path("b/c/file4", cx);
3348
3349 // Create a remote copy of this worktree.
3350 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3351 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3352
3353 let updates = Arc::new(Mutex::new(Vec::new()));
3354 tree.update(cx, |tree, cx| {
3355 let updates = updates.clone();
3356 tree.observe_updates(0, cx, move |update| {
3357 updates.lock().push(update);
3358 async { true }
3359 });
3360 });
3361
3362 let remote =
3363 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3364
3365 cx.executor().run_until_parked();
3366
3367 cx.update(|cx| {
3368 assert!(!buffer2.read(cx).is_dirty());
3369 assert!(!buffer3.read(cx).is_dirty());
3370 assert!(!buffer4.read(cx).is_dirty());
3371 assert!(!buffer5.read(cx).is_dirty());
3372 });
3373
3374 // Rename and delete files and directories.
3375 tree.flush_fs_events(cx).await;
3376 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3377 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3378 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3379 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3380 tree.flush_fs_events(cx).await;
3381
3382 cx.update(|app| {
3383 assert_eq!(
3384 tree.read(app)
3385 .paths()
3386 .map(|p| p.to_str().unwrap())
3387 .collect::<Vec<_>>(),
3388 vec![
3389 "a",
3390 separator!("a/file1"),
3391 separator!("a/file2.new"),
3392 "b",
3393 "d",
3394 separator!("d/file3"),
3395 separator!("d/file4"),
3396 ]
3397 );
3398 });
3399
3400 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3401 assert_eq!(id_for_path("d/file3", cx), file3_id);
3402 assert_eq!(id_for_path("d/file4", cx), file4_id);
3403
3404 cx.update(|cx| {
3405 assert_eq!(
3406 buffer2.read(cx).file().unwrap().path().as_ref(),
3407 Path::new("a/file2.new")
3408 );
3409 assert_eq!(
3410 buffer3.read(cx).file().unwrap().path().as_ref(),
3411 Path::new("d/file3")
3412 );
3413 assert_eq!(
3414 buffer4.read(cx).file().unwrap().path().as_ref(),
3415 Path::new("d/file4")
3416 );
3417 assert_eq!(
3418 buffer5.read(cx).file().unwrap().path().as_ref(),
3419 Path::new("b/c/file5")
3420 );
3421
3422 assert_matches!(
3423 buffer2.read(cx).file().unwrap().disk_state(),
3424 DiskState::Present { .. }
3425 );
3426 assert_matches!(
3427 buffer3.read(cx).file().unwrap().disk_state(),
3428 DiskState::Present { .. }
3429 );
3430 assert_matches!(
3431 buffer4.read(cx).file().unwrap().disk_state(),
3432 DiskState::Present { .. }
3433 );
3434 assert_eq!(
3435 buffer5.read(cx).file().unwrap().disk_state(),
3436 DiskState::Deleted
3437 );
3438 });
3439
3440 // Update the remote worktree. Check that it becomes consistent with the
3441 // local worktree.
3442 cx.executor().run_until_parked();
3443
3444 remote.update(cx, |remote, _| {
3445 for update in updates.lock().drain(..) {
3446 remote.as_remote_mut().unwrap().update_from_remote(update);
3447 }
3448 });
3449 cx.executor().run_until_parked();
3450 remote.update(cx, |remote, _| {
3451 assert_eq!(
3452 remote
3453 .paths()
3454 .map(|p| p.to_str().unwrap())
3455 .collect::<Vec<_>>(),
3456 vec![
3457 "a",
3458 separator!("a/file1"),
3459 separator!("a/file2.new"),
3460 "b",
3461 "d",
3462 separator!("d/file3"),
3463 separator!("d/file4"),
3464 ]
3465 );
3466 });
3467}
3468
3469#[gpui::test(iterations = 10)]
3470async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3471 init_test(cx);
3472
3473 let fs = FakeFs::new(cx.executor());
3474 fs.insert_tree(
3475 path!("/dir"),
3476 json!({
3477 "a": {
3478 "file1": "",
3479 }
3480 }),
3481 )
3482 .await;
3483
3484 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3485 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3486 let tree_id = tree.update(cx, |tree, _| tree.id());
3487
3488 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3489 project.update(cx, |project, cx| {
3490 let tree = project.worktrees(cx).next().unwrap();
3491 tree.read(cx)
3492 .entry_for_path(path)
3493 .unwrap_or_else(|| panic!("no entry for path {}", path))
3494 .id
3495 })
3496 };
3497
3498 let dir_id = id_for_path("a", cx);
3499 let file_id = id_for_path("a/file1", cx);
3500 let buffer = project
3501 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3502 .await
3503 .unwrap();
3504 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3505
3506 project
3507 .update(cx, |project, cx| {
3508 project.rename_entry(dir_id, Path::new("b"), cx)
3509 })
3510 .unwrap()
3511 .await
3512 .to_included()
3513 .unwrap();
3514 cx.executor().run_until_parked();
3515
3516 assert_eq!(id_for_path("b", cx), dir_id);
3517 assert_eq!(id_for_path("b/file1", cx), file_id);
3518 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3519}
3520
3521#[gpui::test]
3522async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3523 init_test(cx);
3524
3525 let fs = FakeFs::new(cx.executor());
3526 fs.insert_tree(
3527 "/dir",
3528 json!({
3529 "a.txt": "a-contents",
3530 "b.txt": "b-contents",
3531 }),
3532 )
3533 .await;
3534
3535 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3536
3537 // Spawn multiple tasks to open paths, repeating some paths.
3538 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3539 (
3540 p.open_local_buffer("/dir/a.txt", cx),
3541 p.open_local_buffer("/dir/b.txt", cx),
3542 p.open_local_buffer("/dir/a.txt", cx),
3543 )
3544 });
3545
3546 let buffer_a_1 = buffer_a_1.await.unwrap();
3547 let buffer_a_2 = buffer_a_2.await.unwrap();
3548 let buffer_b = buffer_b.await.unwrap();
3549 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3550 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3551
3552 // There is only one buffer per path.
3553 let buffer_a_id = buffer_a_1.entity_id();
3554 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3555
3556 // Open the same path again while it is still open.
3557 drop(buffer_a_1);
3558 let buffer_a_3 = project
3559 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3560 .await
3561 .unwrap();
3562
3563 // There's still only one buffer per path.
3564 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3565}
3566
3567#[gpui::test]
3568async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3569 init_test(cx);
3570
3571 let fs = FakeFs::new(cx.executor());
3572 fs.insert_tree(
3573 path!("/dir"),
3574 json!({
3575 "file1": "abc",
3576 "file2": "def",
3577 "file3": "ghi",
3578 }),
3579 )
3580 .await;
3581
3582 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3583
3584 let buffer1 = project
3585 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3586 .await
3587 .unwrap();
3588 let events = Arc::new(Mutex::new(Vec::new()));
3589
3590 // initially, the buffer isn't dirty.
3591 buffer1.update(cx, |buffer, cx| {
3592 cx.subscribe(&buffer1, {
3593 let events = events.clone();
3594 move |_, _, event, _| match event {
3595 BufferEvent::Operation { .. } => {}
3596 _ => events.lock().push(event.clone()),
3597 }
3598 })
3599 .detach();
3600
3601 assert!(!buffer.is_dirty());
3602 assert!(events.lock().is_empty());
3603
3604 buffer.edit([(1..2, "")], None, cx);
3605 });
3606
3607 // after the first edit, the buffer is dirty, and emits a dirtied event.
3608 buffer1.update(cx, |buffer, cx| {
3609 assert!(buffer.text() == "ac");
3610 assert!(buffer.is_dirty());
3611 assert_eq!(
3612 *events.lock(),
3613 &[
3614 language::BufferEvent::Edited,
3615 language::BufferEvent::DirtyChanged
3616 ]
3617 );
3618 events.lock().clear();
3619 buffer.did_save(
3620 buffer.version(),
3621 buffer.file().unwrap().disk_state().mtime(),
3622 cx,
3623 );
3624 });
3625
3626 // after saving, the buffer is not dirty, and emits a saved event.
3627 buffer1.update(cx, |buffer, cx| {
3628 assert!(!buffer.is_dirty());
3629 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3630 events.lock().clear();
3631
3632 buffer.edit([(1..1, "B")], None, cx);
3633 buffer.edit([(2..2, "D")], None, cx);
3634 });
3635
3636 // after editing again, the buffer is dirty, and emits another dirty event.
3637 buffer1.update(cx, |buffer, cx| {
3638 assert!(buffer.text() == "aBDc");
3639 assert!(buffer.is_dirty());
3640 assert_eq!(
3641 *events.lock(),
3642 &[
3643 language::BufferEvent::Edited,
3644 language::BufferEvent::DirtyChanged,
3645 language::BufferEvent::Edited,
3646 ],
3647 );
3648 events.lock().clear();
3649
3650 // After restoring the buffer to its previously-saved state,
3651 // the buffer is not considered dirty anymore.
3652 buffer.edit([(1..3, "")], None, cx);
3653 assert!(buffer.text() == "ac");
3654 assert!(!buffer.is_dirty());
3655 });
3656
3657 assert_eq!(
3658 *events.lock(),
3659 &[
3660 language::BufferEvent::Edited,
3661 language::BufferEvent::DirtyChanged
3662 ]
3663 );
3664
3665 // When a file is deleted, the buffer is considered dirty.
3666 let events = Arc::new(Mutex::new(Vec::new()));
3667 let buffer2 = project
3668 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3669 .await
3670 .unwrap();
3671 buffer2.update(cx, |_, cx| {
3672 cx.subscribe(&buffer2, {
3673 let events = events.clone();
3674 move |_, _, event, _| events.lock().push(event.clone())
3675 })
3676 .detach();
3677 });
3678
3679 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3680 .await
3681 .unwrap();
3682 cx.executor().run_until_parked();
3683 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3684 assert_eq!(
3685 *events.lock(),
3686 &[
3687 language::BufferEvent::DirtyChanged,
3688 language::BufferEvent::FileHandleChanged
3689 ]
3690 );
3691
3692 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3693 let events = Arc::new(Mutex::new(Vec::new()));
3694 let buffer3 = project
3695 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3696 .await
3697 .unwrap();
3698 buffer3.update(cx, |_, cx| {
3699 cx.subscribe(&buffer3, {
3700 let events = events.clone();
3701 move |_, _, event, _| events.lock().push(event.clone())
3702 })
3703 .detach();
3704 });
3705
3706 buffer3.update(cx, |buffer, cx| {
3707 buffer.edit([(0..0, "x")], None, cx);
3708 });
3709 events.lock().clear();
3710 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3711 .await
3712 .unwrap();
3713 cx.executor().run_until_parked();
3714 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3715 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3716}
3717
3718#[gpui::test]
3719async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3720 init_test(cx);
3721
3722 let (initial_contents, initial_offsets) =
3723 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3724 let fs = FakeFs::new(cx.executor());
3725 fs.insert_tree(
3726 path!("/dir"),
3727 json!({
3728 "the-file": initial_contents,
3729 }),
3730 )
3731 .await;
3732 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3733 let buffer = project
3734 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3735 .await
3736 .unwrap();
3737
3738 let anchors = initial_offsets
3739 .iter()
3740 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3741 .collect::<Vec<_>>();
3742
3743 // Change the file on disk, adding two new lines of text, and removing
3744 // one line.
3745 buffer.update(cx, |buffer, _| {
3746 assert!(!buffer.is_dirty());
3747 assert!(!buffer.has_conflict());
3748 });
3749
3750 let (new_contents, new_offsets) =
3751 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3752 fs.save(
3753 path!("/dir/the-file").as_ref(),
3754 &new_contents.as_str().into(),
3755 LineEnding::Unix,
3756 )
3757 .await
3758 .unwrap();
3759
3760 // Because the buffer was not modified, it is reloaded from disk. Its
3761 // contents are edited according to the diff between the old and new
3762 // file contents.
3763 cx.executor().run_until_parked();
3764 buffer.update(cx, |buffer, _| {
3765 assert_eq!(buffer.text(), new_contents);
3766 assert!(!buffer.is_dirty());
3767 assert!(!buffer.has_conflict());
3768
3769 let anchor_offsets = anchors
3770 .iter()
3771 .map(|anchor| anchor.to_offset(&*buffer))
3772 .collect::<Vec<_>>();
3773 assert_eq!(anchor_offsets, new_offsets);
3774 });
3775
3776 // Modify the buffer
3777 buffer.update(cx, |buffer, cx| {
3778 buffer.edit([(0..0, " ")], None, cx);
3779 assert!(buffer.is_dirty());
3780 assert!(!buffer.has_conflict());
3781 });
3782
3783 // Change the file on disk again, adding blank lines to the beginning.
3784 fs.save(
3785 path!("/dir/the-file").as_ref(),
3786 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3787 LineEnding::Unix,
3788 )
3789 .await
3790 .unwrap();
3791
3792 // Because the buffer is modified, it doesn't reload from disk, but is
3793 // marked as having a conflict.
3794 cx.executor().run_until_parked();
3795 buffer.update(cx, |buffer, _| {
3796 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3797 assert!(buffer.has_conflict());
3798 });
3799}
3800
3801#[gpui::test]
3802async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3803 init_test(cx);
3804
3805 let fs = FakeFs::new(cx.executor());
3806 fs.insert_tree(
3807 path!("/dir"),
3808 json!({
3809 "file1": "a\nb\nc\n",
3810 "file2": "one\r\ntwo\r\nthree\r\n",
3811 }),
3812 )
3813 .await;
3814
3815 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3816 let buffer1 = project
3817 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3818 .await
3819 .unwrap();
3820 let buffer2 = project
3821 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3822 .await
3823 .unwrap();
3824
3825 buffer1.update(cx, |buffer, _| {
3826 assert_eq!(buffer.text(), "a\nb\nc\n");
3827 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3828 });
3829 buffer2.update(cx, |buffer, _| {
3830 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3831 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3832 });
3833
3834 // Change a file's line endings on disk from unix to windows. The buffer's
3835 // state updates correctly.
3836 fs.save(
3837 path!("/dir/file1").as_ref(),
3838 &"aaa\nb\nc\n".into(),
3839 LineEnding::Windows,
3840 )
3841 .await
3842 .unwrap();
3843 cx.executor().run_until_parked();
3844 buffer1.update(cx, |buffer, _| {
3845 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3846 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3847 });
3848
3849 // Save a file with windows line endings. The file is written correctly.
3850 buffer2.update(cx, |buffer, cx| {
3851 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3852 });
3853 project
3854 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3855 .await
3856 .unwrap();
3857 assert_eq!(
3858 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3859 "one\r\ntwo\r\nthree\r\nfour\r\n",
3860 );
3861}
3862
3863#[gpui::test]
3864async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3865 init_test(cx);
3866
3867 let fs = FakeFs::new(cx.executor());
3868 fs.insert_tree(
3869 path!("/dir"),
3870 json!({
3871 "a.rs": "
3872 fn foo(mut v: Vec<usize>) {
3873 for x in &v {
3874 v.push(1);
3875 }
3876 }
3877 "
3878 .unindent(),
3879 }),
3880 )
3881 .await;
3882
3883 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3884 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3885 let buffer = project
3886 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3887 .await
3888 .unwrap();
3889
3890 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3891 let message = lsp::PublishDiagnosticsParams {
3892 uri: buffer_uri.clone(),
3893 diagnostics: vec![
3894 lsp::Diagnostic {
3895 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3896 severity: Some(DiagnosticSeverity::WARNING),
3897 message: "error 1".to_string(),
3898 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3899 location: lsp::Location {
3900 uri: buffer_uri.clone(),
3901 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3902 },
3903 message: "error 1 hint 1".to_string(),
3904 }]),
3905 ..Default::default()
3906 },
3907 lsp::Diagnostic {
3908 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3909 severity: Some(DiagnosticSeverity::HINT),
3910 message: "error 1 hint 1".to_string(),
3911 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3912 location: lsp::Location {
3913 uri: buffer_uri.clone(),
3914 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3915 },
3916 message: "original diagnostic".to_string(),
3917 }]),
3918 ..Default::default()
3919 },
3920 lsp::Diagnostic {
3921 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3922 severity: Some(DiagnosticSeverity::ERROR),
3923 message: "error 2".to_string(),
3924 related_information: Some(vec![
3925 lsp::DiagnosticRelatedInformation {
3926 location: lsp::Location {
3927 uri: buffer_uri.clone(),
3928 range: lsp::Range::new(
3929 lsp::Position::new(1, 13),
3930 lsp::Position::new(1, 15),
3931 ),
3932 },
3933 message: "error 2 hint 1".to_string(),
3934 },
3935 lsp::DiagnosticRelatedInformation {
3936 location: lsp::Location {
3937 uri: buffer_uri.clone(),
3938 range: lsp::Range::new(
3939 lsp::Position::new(1, 13),
3940 lsp::Position::new(1, 15),
3941 ),
3942 },
3943 message: "error 2 hint 2".to_string(),
3944 },
3945 ]),
3946 ..Default::default()
3947 },
3948 lsp::Diagnostic {
3949 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3950 severity: Some(DiagnosticSeverity::HINT),
3951 message: "error 2 hint 1".to_string(),
3952 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3953 location: lsp::Location {
3954 uri: buffer_uri.clone(),
3955 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3956 },
3957 message: "original diagnostic".to_string(),
3958 }]),
3959 ..Default::default()
3960 },
3961 lsp::Diagnostic {
3962 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3963 severity: Some(DiagnosticSeverity::HINT),
3964 message: "error 2 hint 2".to_string(),
3965 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3966 location: lsp::Location {
3967 uri: buffer_uri,
3968 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3969 },
3970 message: "original diagnostic".to_string(),
3971 }]),
3972 ..Default::default()
3973 },
3974 ],
3975 version: None,
3976 };
3977
3978 lsp_store
3979 .update(cx, |lsp_store, cx| {
3980 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3981 })
3982 .unwrap();
3983 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3984
3985 assert_eq!(
3986 buffer
3987 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3988 .collect::<Vec<_>>(),
3989 &[
3990 DiagnosticEntry {
3991 range: Point::new(1, 8)..Point::new(1, 9),
3992 diagnostic: Diagnostic {
3993 severity: DiagnosticSeverity::WARNING,
3994 message: "error 1".to_string(),
3995 group_id: 1,
3996 is_primary: true,
3997 ..Default::default()
3998 }
3999 },
4000 DiagnosticEntry {
4001 range: Point::new(1, 8)..Point::new(1, 9),
4002 diagnostic: Diagnostic {
4003 severity: DiagnosticSeverity::HINT,
4004 message: "error 1 hint 1".to_string(),
4005 group_id: 1,
4006 is_primary: false,
4007 ..Default::default()
4008 }
4009 },
4010 DiagnosticEntry {
4011 range: Point::new(1, 13)..Point::new(1, 15),
4012 diagnostic: Diagnostic {
4013 severity: DiagnosticSeverity::HINT,
4014 message: "error 2 hint 1".to_string(),
4015 group_id: 0,
4016 is_primary: false,
4017 ..Default::default()
4018 }
4019 },
4020 DiagnosticEntry {
4021 range: Point::new(1, 13)..Point::new(1, 15),
4022 diagnostic: Diagnostic {
4023 severity: DiagnosticSeverity::HINT,
4024 message: "error 2 hint 2".to_string(),
4025 group_id: 0,
4026 is_primary: false,
4027 ..Default::default()
4028 }
4029 },
4030 DiagnosticEntry {
4031 range: Point::new(2, 8)..Point::new(2, 17),
4032 diagnostic: Diagnostic {
4033 severity: DiagnosticSeverity::ERROR,
4034 message: "error 2".to_string(),
4035 group_id: 0,
4036 is_primary: true,
4037 ..Default::default()
4038 }
4039 }
4040 ]
4041 );
4042
4043 assert_eq!(
4044 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4045 &[
4046 DiagnosticEntry {
4047 range: Point::new(1, 13)..Point::new(1, 15),
4048 diagnostic: Diagnostic {
4049 severity: DiagnosticSeverity::HINT,
4050 message: "error 2 hint 1".to_string(),
4051 group_id: 0,
4052 is_primary: false,
4053 ..Default::default()
4054 }
4055 },
4056 DiagnosticEntry {
4057 range: Point::new(1, 13)..Point::new(1, 15),
4058 diagnostic: Diagnostic {
4059 severity: DiagnosticSeverity::HINT,
4060 message: "error 2 hint 2".to_string(),
4061 group_id: 0,
4062 is_primary: false,
4063 ..Default::default()
4064 }
4065 },
4066 DiagnosticEntry {
4067 range: Point::new(2, 8)..Point::new(2, 17),
4068 diagnostic: Diagnostic {
4069 severity: DiagnosticSeverity::ERROR,
4070 message: "error 2".to_string(),
4071 group_id: 0,
4072 is_primary: true,
4073 ..Default::default()
4074 }
4075 }
4076 ]
4077 );
4078
4079 assert_eq!(
4080 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4081 &[
4082 DiagnosticEntry {
4083 range: Point::new(1, 8)..Point::new(1, 9),
4084 diagnostic: Diagnostic {
4085 severity: DiagnosticSeverity::WARNING,
4086 message: "error 1".to_string(),
4087 group_id: 1,
4088 is_primary: true,
4089 ..Default::default()
4090 }
4091 },
4092 DiagnosticEntry {
4093 range: Point::new(1, 8)..Point::new(1, 9),
4094 diagnostic: Diagnostic {
4095 severity: DiagnosticSeverity::HINT,
4096 message: "error 1 hint 1".to_string(),
4097 group_id: 1,
4098 is_primary: false,
4099 ..Default::default()
4100 }
4101 },
4102 ]
4103 );
4104}
4105
4106#[gpui::test]
4107async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4108 init_test(cx);
4109
4110 let fs = FakeFs::new(cx.executor());
4111 fs.insert_tree(
4112 path!("/dir"),
4113 json!({
4114 "one.rs": "const ONE: usize = 1;",
4115 "two": {
4116 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4117 }
4118
4119 }),
4120 )
4121 .await;
4122 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4123
4124 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4125 language_registry.add(rust_lang());
4126 let watched_paths = lsp::FileOperationRegistrationOptions {
4127 filters: vec![
4128 FileOperationFilter {
4129 scheme: Some("file".to_owned()),
4130 pattern: lsp::FileOperationPattern {
4131 glob: "**/*.rs".to_owned(),
4132 matches: Some(lsp::FileOperationPatternKind::File),
4133 options: None,
4134 },
4135 },
4136 FileOperationFilter {
4137 scheme: Some("file".to_owned()),
4138 pattern: lsp::FileOperationPattern {
4139 glob: "**/**".to_owned(),
4140 matches: Some(lsp::FileOperationPatternKind::Folder),
4141 options: None,
4142 },
4143 },
4144 ],
4145 };
4146 let mut fake_servers = language_registry.register_fake_lsp(
4147 "Rust",
4148 FakeLspAdapter {
4149 capabilities: lsp::ServerCapabilities {
4150 workspace: Some(lsp::WorkspaceServerCapabilities {
4151 workspace_folders: None,
4152 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4153 did_rename: Some(watched_paths.clone()),
4154 will_rename: Some(watched_paths),
4155 ..Default::default()
4156 }),
4157 }),
4158 ..Default::default()
4159 },
4160 ..Default::default()
4161 },
4162 );
4163
4164 let _ = project
4165 .update(cx, |project, cx| {
4166 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4167 })
4168 .await
4169 .unwrap();
4170
4171 let fake_server = fake_servers.next().await.unwrap();
4172 let response = project.update(cx, |project, cx| {
4173 let worktree = project.worktrees(cx).next().unwrap();
4174 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4175 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4176 });
4177 let expected_edit = lsp::WorkspaceEdit {
4178 changes: None,
4179 document_changes: Some(DocumentChanges::Edits({
4180 vec![TextDocumentEdit {
4181 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4182 range: lsp::Range {
4183 start: lsp::Position {
4184 line: 0,
4185 character: 1,
4186 },
4187 end: lsp::Position {
4188 line: 0,
4189 character: 3,
4190 },
4191 },
4192 new_text: "This is not a drill".to_owned(),
4193 })],
4194 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4195 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4196 version: Some(1337),
4197 },
4198 }]
4199 })),
4200 change_annotations: None,
4201 };
4202 let resolved_workspace_edit = Arc::new(OnceLock::new());
4203 fake_server
4204 .handle_request::<WillRenameFiles, _, _>({
4205 let resolved_workspace_edit = resolved_workspace_edit.clone();
4206 let expected_edit = expected_edit.clone();
4207 move |params, _| {
4208 let resolved_workspace_edit = resolved_workspace_edit.clone();
4209 let expected_edit = expected_edit.clone();
4210 async move {
4211 assert_eq!(params.files.len(), 1);
4212 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4213 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4214 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4215 Ok(Some(expected_edit))
4216 }
4217 }
4218 })
4219 .next()
4220 .await
4221 .unwrap();
4222 let _ = response.await.unwrap();
4223 fake_server
4224 .handle_notification::<DidRenameFiles, _>(|params, _| {
4225 assert_eq!(params.files.len(), 1);
4226 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4227 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4228 })
4229 .next()
4230 .await
4231 .unwrap();
4232 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4233}
4234
4235#[gpui::test]
4236async fn test_rename(cx: &mut gpui::TestAppContext) {
4237 // hi
4238 init_test(cx);
4239
4240 let fs = FakeFs::new(cx.executor());
4241 fs.insert_tree(
4242 path!("/dir"),
4243 json!({
4244 "one.rs": "const ONE: usize = 1;",
4245 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4246 }),
4247 )
4248 .await;
4249
4250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4251
4252 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4253 language_registry.add(rust_lang());
4254 let mut fake_servers = language_registry.register_fake_lsp(
4255 "Rust",
4256 FakeLspAdapter {
4257 capabilities: lsp::ServerCapabilities {
4258 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4259 prepare_provider: Some(true),
4260 work_done_progress_options: Default::default(),
4261 })),
4262 ..Default::default()
4263 },
4264 ..Default::default()
4265 },
4266 );
4267
4268 let (buffer, _handle) = project
4269 .update(cx, |project, cx| {
4270 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4271 })
4272 .await
4273 .unwrap();
4274
4275 let fake_server = fake_servers.next().await.unwrap();
4276
4277 let response = project.update(cx, |project, cx| {
4278 project.prepare_rename(buffer.clone(), 7, cx)
4279 });
4280 fake_server
4281 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4282 assert_eq!(
4283 params.text_document.uri.as_str(),
4284 uri!("file:///dir/one.rs")
4285 );
4286 assert_eq!(params.position, lsp::Position::new(0, 7));
4287 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4288 lsp::Position::new(0, 6),
4289 lsp::Position::new(0, 9),
4290 ))))
4291 })
4292 .next()
4293 .await
4294 .unwrap();
4295 let response = response.await.unwrap();
4296 let PrepareRenameResponse::Success(range) = response else {
4297 panic!("{:?}", response);
4298 };
4299 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4300 assert_eq!(range, 6..9);
4301
4302 let response = project.update(cx, |project, cx| {
4303 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4304 });
4305 fake_server
4306 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4307 assert_eq!(
4308 params.text_document_position.text_document.uri.as_str(),
4309 uri!("file:///dir/one.rs")
4310 );
4311 assert_eq!(
4312 params.text_document_position.position,
4313 lsp::Position::new(0, 7)
4314 );
4315 assert_eq!(params.new_name, "THREE");
4316 Ok(Some(lsp::WorkspaceEdit {
4317 changes: Some(
4318 [
4319 (
4320 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4321 vec![lsp::TextEdit::new(
4322 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4323 "THREE".to_string(),
4324 )],
4325 ),
4326 (
4327 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4328 vec![
4329 lsp::TextEdit::new(
4330 lsp::Range::new(
4331 lsp::Position::new(0, 24),
4332 lsp::Position::new(0, 27),
4333 ),
4334 "THREE".to_string(),
4335 ),
4336 lsp::TextEdit::new(
4337 lsp::Range::new(
4338 lsp::Position::new(0, 35),
4339 lsp::Position::new(0, 38),
4340 ),
4341 "THREE".to_string(),
4342 ),
4343 ],
4344 ),
4345 ]
4346 .into_iter()
4347 .collect(),
4348 ),
4349 ..Default::default()
4350 }))
4351 })
4352 .next()
4353 .await
4354 .unwrap();
4355 let mut transaction = response.await.unwrap().0;
4356 assert_eq!(transaction.len(), 2);
4357 assert_eq!(
4358 transaction
4359 .remove_entry(&buffer)
4360 .unwrap()
4361 .0
4362 .update(cx, |buffer, _| buffer.text()),
4363 "const THREE: usize = 1;"
4364 );
4365 assert_eq!(
4366 transaction
4367 .into_keys()
4368 .next()
4369 .unwrap()
4370 .update(cx, |buffer, _| buffer.text()),
4371 "const TWO: usize = one::THREE + one::THREE;"
4372 );
4373}
4374
4375#[gpui::test]
4376async fn test_search(cx: &mut gpui::TestAppContext) {
4377 init_test(cx);
4378
4379 let fs = FakeFs::new(cx.executor());
4380 fs.insert_tree(
4381 path!("/dir"),
4382 json!({
4383 "one.rs": "const ONE: usize = 1;",
4384 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4385 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4386 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4387 }),
4388 )
4389 .await;
4390 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4391 assert_eq!(
4392 search(
4393 &project,
4394 SearchQuery::text(
4395 "TWO",
4396 false,
4397 true,
4398 false,
4399 Default::default(),
4400 Default::default(),
4401 None
4402 )
4403 .unwrap(),
4404 cx
4405 )
4406 .await
4407 .unwrap(),
4408 HashMap::from_iter([
4409 (separator!("dir/two.rs").to_string(), vec![6..9]),
4410 (separator!("dir/three.rs").to_string(), vec![37..40])
4411 ])
4412 );
4413
4414 let buffer_4 = project
4415 .update(cx, |project, cx| {
4416 project.open_local_buffer(path!("/dir/four.rs"), cx)
4417 })
4418 .await
4419 .unwrap();
4420 buffer_4.update(cx, |buffer, cx| {
4421 let text = "two::TWO";
4422 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4423 });
4424
4425 assert_eq!(
4426 search(
4427 &project,
4428 SearchQuery::text(
4429 "TWO",
4430 false,
4431 true,
4432 false,
4433 Default::default(),
4434 Default::default(),
4435 None,
4436 )
4437 .unwrap(),
4438 cx
4439 )
4440 .await
4441 .unwrap(),
4442 HashMap::from_iter([
4443 (separator!("dir/two.rs").to_string(), vec![6..9]),
4444 (separator!("dir/three.rs").to_string(), vec![37..40]),
4445 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4446 ])
4447 );
4448}
4449
4450#[gpui::test]
4451async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4452 init_test(cx);
4453
4454 let search_query = "file";
4455
4456 let fs = FakeFs::new(cx.executor());
4457 fs.insert_tree(
4458 path!("/dir"),
4459 json!({
4460 "one.rs": r#"// Rust file one"#,
4461 "one.ts": r#"// TypeScript file one"#,
4462 "two.rs": r#"// Rust file two"#,
4463 "two.ts": r#"// TypeScript file two"#,
4464 }),
4465 )
4466 .await;
4467 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4468
4469 assert!(
4470 search(
4471 &project,
4472 SearchQuery::text(
4473 search_query,
4474 false,
4475 true,
4476 false,
4477 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4478 Default::default(),
4479 None
4480 )
4481 .unwrap(),
4482 cx
4483 )
4484 .await
4485 .unwrap()
4486 .is_empty(),
4487 "If no inclusions match, no files should be returned"
4488 );
4489
4490 assert_eq!(
4491 search(
4492 &project,
4493 SearchQuery::text(
4494 search_query,
4495 false,
4496 true,
4497 false,
4498 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4499 Default::default(),
4500 None
4501 )
4502 .unwrap(),
4503 cx
4504 )
4505 .await
4506 .unwrap(),
4507 HashMap::from_iter([
4508 (separator!("dir/one.rs").to_string(), vec![8..12]),
4509 (separator!("dir/two.rs").to_string(), vec![8..12]),
4510 ]),
4511 "Rust only search should give only Rust files"
4512 );
4513
4514 assert_eq!(
4515 search(
4516 &project,
4517 SearchQuery::text(
4518 search_query,
4519 false,
4520 true,
4521 false,
4522
4523 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4524
4525 Default::default(),
4526 None,
4527 ).unwrap(),
4528 cx
4529 )
4530 .await
4531 .unwrap(),
4532 HashMap::from_iter([
4533 (separator!("dir/one.ts").to_string(), vec![14..18]),
4534 (separator!("dir/two.ts").to_string(), vec![14..18]),
4535 ]),
4536 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4537 );
4538
4539 assert_eq!(
4540 search(
4541 &project,
4542 SearchQuery::text(
4543 search_query,
4544 false,
4545 true,
4546 false,
4547
4548 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4549
4550 Default::default(),
4551 None,
4552 ).unwrap(),
4553 cx
4554 )
4555 .await
4556 .unwrap(),
4557 HashMap::from_iter([
4558 (separator!("dir/two.ts").to_string(), vec![14..18]),
4559 (separator!("dir/one.rs").to_string(), vec![8..12]),
4560 (separator!("dir/one.ts").to_string(), vec![14..18]),
4561 (separator!("dir/two.rs").to_string(), vec![8..12]),
4562 ]),
4563 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4564 );
4565}
4566
4567#[gpui::test]
4568async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4569 init_test(cx);
4570
4571 let search_query = "file";
4572
4573 let fs = FakeFs::new(cx.executor());
4574 fs.insert_tree(
4575 path!("/dir"),
4576 json!({
4577 "one.rs": r#"// Rust file one"#,
4578 "one.ts": r#"// TypeScript file one"#,
4579 "two.rs": r#"// Rust file two"#,
4580 "two.ts": r#"// TypeScript file two"#,
4581 }),
4582 )
4583 .await;
4584 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4585
4586 assert_eq!(
4587 search(
4588 &project,
4589 SearchQuery::text(
4590 search_query,
4591 false,
4592 true,
4593 false,
4594 Default::default(),
4595 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4596 None,
4597 )
4598 .unwrap(),
4599 cx
4600 )
4601 .await
4602 .unwrap(),
4603 HashMap::from_iter([
4604 (separator!("dir/one.rs").to_string(), vec![8..12]),
4605 (separator!("dir/one.ts").to_string(), vec![14..18]),
4606 (separator!("dir/two.rs").to_string(), vec![8..12]),
4607 (separator!("dir/two.ts").to_string(), vec![14..18]),
4608 ]),
4609 "If no exclusions match, all files should be returned"
4610 );
4611
4612 assert_eq!(
4613 search(
4614 &project,
4615 SearchQuery::text(
4616 search_query,
4617 false,
4618 true,
4619 false,
4620 Default::default(),
4621 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4622 None,
4623 )
4624 .unwrap(),
4625 cx
4626 )
4627 .await
4628 .unwrap(),
4629 HashMap::from_iter([
4630 (separator!("dir/one.ts").to_string(), vec![14..18]),
4631 (separator!("dir/two.ts").to_string(), vec![14..18]),
4632 ]),
4633 "Rust exclusion search should give only TypeScript files"
4634 );
4635
4636 assert_eq!(
4637 search(
4638 &project,
4639 SearchQuery::text(
4640 search_query,
4641 false,
4642 true,
4643 false,
4644 Default::default(),
4645 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4646 None,
4647 ).unwrap(),
4648 cx
4649 )
4650 .await
4651 .unwrap(),
4652 HashMap::from_iter([
4653 (separator!("dir/one.rs").to_string(), vec![8..12]),
4654 (separator!("dir/two.rs").to_string(), vec![8..12]),
4655 ]),
4656 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4657 );
4658
4659 assert!(
4660 search(
4661 &project,
4662 SearchQuery::text(
4663 search_query,
4664 false,
4665 true,
4666 false,
4667 Default::default(),
4668
4669 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4670 None,
4671
4672 ).unwrap(),
4673 cx
4674 )
4675 .await
4676 .unwrap().is_empty(),
4677 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4678 );
4679}
4680
4681#[gpui::test]
4682async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4683 init_test(cx);
4684
4685 let search_query = "file";
4686
4687 let fs = FakeFs::new(cx.executor());
4688 fs.insert_tree(
4689 path!("/dir"),
4690 json!({
4691 "one.rs": r#"// Rust file one"#,
4692 "one.ts": r#"// TypeScript file one"#,
4693 "two.rs": r#"// Rust file two"#,
4694 "two.ts": r#"// TypeScript file two"#,
4695 }),
4696 )
4697 .await;
4698 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4699
4700 assert!(
4701 search(
4702 &project,
4703 SearchQuery::text(
4704 search_query,
4705 false,
4706 true,
4707 false,
4708 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4709 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4710 None,
4711 )
4712 .unwrap(),
4713 cx
4714 )
4715 .await
4716 .unwrap()
4717 .is_empty(),
4718 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4719 );
4720
4721 assert!(
4722 search(
4723 &project,
4724 SearchQuery::text(
4725 search_query,
4726 false,
4727 true,
4728 false,
4729 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4730 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4731 None,
4732 ).unwrap(),
4733 cx
4734 )
4735 .await
4736 .unwrap()
4737 .is_empty(),
4738 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4739 );
4740
4741 assert!(
4742 search(
4743 &project,
4744 SearchQuery::text(
4745 search_query,
4746 false,
4747 true,
4748 false,
4749 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4750 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4751 None,
4752 )
4753 .unwrap(),
4754 cx
4755 )
4756 .await
4757 .unwrap()
4758 .is_empty(),
4759 "Non-matching inclusions and exclusions should not change that."
4760 );
4761
4762 assert_eq!(
4763 search(
4764 &project,
4765 SearchQuery::text(
4766 search_query,
4767 false,
4768 true,
4769 false,
4770 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4771 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4772 None,
4773 )
4774 .unwrap(),
4775 cx
4776 )
4777 .await
4778 .unwrap(),
4779 HashMap::from_iter([
4780 (separator!("dir/one.ts").to_string(), vec![14..18]),
4781 (separator!("dir/two.ts").to_string(), vec![14..18]),
4782 ]),
4783 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4784 );
4785}
4786
4787#[gpui::test]
4788async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4789 init_test(cx);
4790
4791 let fs = FakeFs::new(cx.executor());
4792 fs.insert_tree(
4793 path!("/worktree-a"),
4794 json!({
4795 "haystack.rs": r#"// NEEDLE"#,
4796 "haystack.ts": r#"// NEEDLE"#,
4797 }),
4798 )
4799 .await;
4800 fs.insert_tree(
4801 path!("/worktree-b"),
4802 json!({
4803 "haystack.rs": r#"// NEEDLE"#,
4804 "haystack.ts": r#"// NEEDLE"#,
4805 }),
4806 )
4807 .await;
4808
4809 let project = Project::test(
4810 fs.clone(),
4811 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4812 cx,
4813 )
4814 .await;
4815
4816 assert_eq!(
4817 search(
4818 &project,
4819 SearchQuery::text(
4820 "NEEDLE",
4821 false,
4822 true,
4823 false,
4824 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4825 Default::default(),
4826 None,
4827 )
4828 .unwrap(),
4829 cx
4830 )
4831 .await
4832 .unwrap(),
4833 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4834 "should only return results from included worktree"
4835 );
4836 assert_eq!(
4837 search(
4838 &project,
4839 SearchQuery::text(
4840 "NEEDLE",
4841 false,
4842 true,
4843 false,
4844 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4845 Default::default(),
4846 None,
4847 )
4848 .unwrap(),
4849 cx
4850 )
4851 .await
4852 .unwrap(),
4853 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4854 "should only return results from included worktree"
4855 );
4856
4857 assert_eq!(
4858 search(
4859 &project,
4860 SearchQuery::text(
4861 "NEEDLE",
4862 false,
4863 true,
4864 false,
4865 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4866 Default::default(),
4867 None,
4868 )
4869 .unwrap(),
4870 cx
4871 )
4872 .await
4873 .unwrap(),
4874 HashMap::from_iter([
4875 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4876 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4877 ]),
4878 "should return results from both worktrees"
4879 );
4880}
4881
4882#[gpui::test]
4883async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4884 init_test(cx);
4885
4886 let fs = FakeFs::new(cx.background_executor.clone());
4887 fs.insert_tree(
4888 path!("/dir"),
4889 json!({
4890 ".git": {},
4891 ".gitignore": "**/target\n/node_modules\n",
4892 "target": {
4893 "index.txt": "index_key:index_value"
4894 },
4895 "node_modules": {
4896 "eslint": {
4897 "index.ts": "const eslint_key = 'eslint value'",
4898 "package.json": r#"{ "some_key": "some value" }"#,
4899 },
4900 "prettier": {
4901 "index.ts": "const prettier_key = 'prettier value'",
4902 "package.json": r#"{ "other_key": "other value" }"#,
4903 },
4904 },
4905 "package.json": r#"{ "main_key": "main value" }"#,
4906 }),
4907 )
4908 .await;
4909 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4910
4911 let query = "key";
4912 assert_eq!(
4913 search(
4914 &project,
4915 SearchQuery::text(
4916 query,
4917 false,
4918 false,
4919 false,
4920 Default::default(),
4921 Default::default(),
4922 None,
4923 )
4924 .unwrap(),
4925 cx
4926 )
4927 .await
4928 .unwrap(),
4929 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4930 "Only one non-ignored file should have the query"
4931 );
4932
4933 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4934 assert_eq!(
4935 search(
4936 &project,
4937 SearchQuery::text(
4938 query,
4939 false,
4940 false,
4941 true,
4942 Default::default(),
4943 Default::default(),
4944 None,
4945 )
4946 .unwrap(),
4947 cx
4948 )
4949 .await
4950 .unwrap(),
4951 HashMap::from_iter([
4952 (separator!("dir/package.json").to_string(), vec![8..11]),
4953 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4954 (
4955 separator!("dir/node_modules/prettier/package.json").to_string(),
4956 vec![9..12]
4957 ),
4958 (
4959 separator!("dir/node_modules/prettier/index.ts").to_string(),
4960 vec![15..18]
4961 ),
4962 (
4963 separator!("dir/node_modules/eslint/index.ts").to_string(),
4964 vec![13..16]
4965 ),
4966 (
4967 separator!("dir/node_modules/eslint/package.json").to_string(),
4968 vec![8..11]
4969 ),
4970 ]),
4971 "Unrestricted search with ignored directories should find every file with the query"
4972 );
4973
4974 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4975 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4976 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4977 assert_eq!(
4978 search(
4979 &project,
4980 SearchQuery::text(
4981 query,
4982 false,
4983 false,
4984 true,
4985 files_to_include,
4986 files_to_exclude,
4987 None,
4988 )
4989 .unwrap(),
4990 cx
4991 )
4992 .await
4993 .unwrap(),
4994 HashMap::from_iter([(
4995 separator!("dir/node_modules/prettier/package.json").to_string(),
4996 vec![9..12]
4997 )]),
4998 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4999 );
5000}
5001
5002#[gpui::test]
5003async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5004 init_test(cx);
5005
5006 let fs = FakeFs::new(cx.executor().clone());
5007 fs.insert_tree(
5008 "/one/two",
5009 json!({
5010 "three": {
5011 "a.txt": "",
5012 "four": {}
5013 },
5014 "c.rs": ""
5015 }),
5016 )
5017 .await;
5018
5019 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5020 project
5021 .update(cx, |project, cx| {
5022 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5023 project.create_entry((id, "b.."), true, cx)
5024 })
5025 .await
5026 .unwrap()
5027 .to_included()
5028 .unwrap();
5029
5030 // Can't create paths outside the project
5031 let result = project
5032 .update(cx, |project, cx| {
5033 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5034 project.create_entry((id, "../../boop"), true, cx)
5035 })
5036 .await;
5037 assert!(result.is_err());
5038
5039 // Can't create paths with '..'
5040 let result = project
5041 .update(cx, |project, cx| {
5042 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5043 project.create_entry((id, "four/../beep"), true, cx)
5044 })
5045 .await;
5046 assert!(result.is_err());
5047
5048 assert_eq!(
5049 fs.paths(true),
5050 vec![
5051 PathBuf::from(path!("/")),
5052 PathBuf::from(path!("/one")),
5053 PathBuf::from(path!("/one/two")),
5054 PathBuf::from(path!("/one/two/c.rs")),
5055 PathBuf::from(path!("/one/two/three")),
5056 PathBuf::from(path!("/one/two/three/a.txt")),
5057 PathBuf::from(path!("/one/two/three/b..")),
5058 PathBuf::from(path!("/one/two/three/four")),
5059 ]
5060 );
5061
5062 // And we cannot open buffers with '..'
5063 let result = project
5064 .update(cx, |project, cx| {
5065 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5066 project.open_buffer((id, "../c.rs"), cx)
5067 })
5068 .await;
5069 assert!(result.is_err())
5070}
5071
5072#[gpui::test]
5073async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5074 init_test(cx);
5075
5076 let fs = FakeFs::new(cx.executor());
5077 fs.insert_tree(
5078 path!("/dir"),
5079 json!({
5080 "a.tsx": "a",
5081 }),
5082 )
5083 .await;
5084
5085 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5086
5087 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5088 language_registry.add(tsx_lang());
5089 let language_server_names = [
5090 "TypeScriptServer",
5091 "TailwindServer",
5092 "ESLintServer",
5093 "NoHoverCapabilitiesServer",
5094 ];
5095 let mut language_servers = [
5096 language_registry.register_fake_lsp(
5097 "tsx",
5098 FakeLspAdapter {
5099 name: language_server_names[0],
5100 capabilities: lsp::ServerCapabilities {
5101 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5102 ..lsp::ServerCapabilities::default()
5103 },
5104 ..FakeLspAdapter::default()
5105 },
5106 ),
5107 language_registry.register_fake_lsp(
5108 "tsx",
5109 FakeLspAdapter {
5110 name: language_server_names[1],
5111 capabilities: lsp::ServerCapabilities {
5112 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5113 ..lsp::ServerCapabilities::default()
5114 },
5115 ..FakeLspAdapter::default()
5116 },
5117 ),
5118 language_registry.register_fake_lsp(
5119 "tsx",
5120 FakeLspAdapter {
5121 name: language_server_names[2],
5122 capabilities: lsp::ServerCapabilities {
5123 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5124 ..lsp::ServerCapabilities::default()
5125 },
5126 ..FakeLspAdapter::default()
5127 },
5128 ),
5129 language_registry.register_fake_lsp(
5130 "tsx",
5131 FakeLspAdapter {
5132 name: language_server_names[3],
5133 capabilities: lsp::ServerCapabilities {
5134 hover_provider: None,
5135 ..lsp::ServerCapabilities::default()
5136 },
5137 ..FakeLspAdapter::default()
5138 },
5139 ),
5140 ];
5141
5142 let (buffer, _handle) = project
5143 .update(cx, |p, cx| {
5144 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5145 })
5146 .await
5147 .unwrap();
5148 cx.executor().run_until_parked();
5149
5150 let mut servers_with_hover_requests = HashMap::default();
5151 for i in 0..language_server_names.len() {
5152 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5153 panic!(
5154 "Failed to get language server #{i} with name {}",
5155 &language_server_names[i]
5156 )
5157 });
5158 let new_server_name = new_server.server.name();
5159 assert!(
5160 !servers_with_hover_requests.contains_key(&new_server_name),
5161 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5162 );
5163 match new_server_name.as_ref() {
5164 "TailwindServer" | "TypeScriptServer" => {
5165 servers_with_hover_requests.insert(
5166 new_server_name.clone(),
5167 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5168 let name = new_server_name.clone();
5169 async move {
5170 Ok(Some(lsp::Hover {
5171 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5172 format!("{name} hover"),
5173 )),
5174 range: None,
5175 }))
5176 }
5177 }),
5178 );
5179 }
5180 "ESLintServer" => {
5181 servers_with_hover_requests.insert(
5182 new_server_name,
5183 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5184 |_, _| async move { Ok(None) },
5185 ),
5186 );
5187 }
5188 "NoHoverCapabilitiesServer" => {
5189 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5190 |_, _| async move {
5191 panic!(
5192 "Should not call for hovers server with no corresponding capabilities"
5193 )
5194 },
5195 );
5196 }
5197 unexpected => panic!("Unexpected server name: {unexpected}"),
5198 }
5199 }
5200
5201 let hover_task = project.update(cx, |project, cx| {
5202 project.hover(&buffer, Point::new(0, 0), cx)
5203 });
5204 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5205 |mut hover_request| async move {
5206 hover_request
5207 .next()
5208 .await
5209 .expect("All hover requests should have been triggered")
5210 },
5211 ))
5212 .await;
5213 assert_eq!(
5214 vec!["TailwindServer hover", "TypeScriptServer hover"],
5215 hover_task
5216 .await
5217 .into_iter()
5218 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5219 .sorted()
5220 .collect::<Vec<_>>(),
5221 "Should receive hover responses from all related servers with hover capabilities"
5222 );
5223}
5224
5225#[gpui::test]
5226async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5227 init_test(cx);
5228
5229 let fs = FakeFs::new(cx.executor());
5230 fs.insert_tree(
5231 path!("/dir"),
5232 json!({
5233 "a.ts": "a",
5234 }),
5235 )
5236 .await;
5237
5238 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5239
5240 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5241 language_registry.add(typescript_lang());
5242 let mut fake_language_servers = language_registry.register_fake_lsp(
5243 "TypeScript",
5244 FakeLspAdapter {
5245 capabilities: lsp::ServerCapabilities {
5246 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5247 ..lsp::ServerCapabilities::default()
5248 },
5249 ..FakeLspAdapter::default()
5250 },
5251 );
5252
5253 let (buffer, _handle) = project
5254 .update(cx, |p, cx| {
5255 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5256 })
5257 .await
5258 .unwrap();
5259 cx.executor().run_until_parked();
5260
5261 let fake_server = fake_language_servers
5262 .next()
5263 .await
5264 .expect("failed to get the language server");
5265
5266 let mut request_handled =
5267 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5268 Ok(Some(lsp::Hover {
5269 contents: lsp::HoverContents::Array(vec![
5270 lsp::MarkedString::String("".to_string()),
5271 lsp::MarkedString::String(" ".to_string()),
5272 lsp::MarkedString::String("\n\n\n".to_string()),
5273 ]),
5274 range: None,
5275 }))
5276 });
5277
5278 let hover_task = project.update(cx, |project, cx| {
5279 project.hover(&buffer, Point::new(0, 0), cx)
5280 });
5281 let () = request_handled
5282 .next()
5283 .await
5284 .expect("All hover requests should have been triggered");
5285 assert_eq!(
5286 Vec::<String>::new(),
5287 hover_task
5288 .await
5289 .into_iter()
5290 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5291 .sorted()
5292 .collect::<Vec<_>>(),
5293 "Empty hover parts should be ignored"
5294 );
5295}
5296
5297#[gpui::test]
5298async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5299 init_test(cx);
5300
5301 let fs = FakeFs::new(cx.executor());
5302 fs.insert_tree(
5303 path!("/dir"),
5304 json!({
5305 "a.ts": "a",
5306 }),
5307 )
5308 .await;
5309
5310 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5311
5312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5313 language_registry.add(typescript_lang());
5314 let mut fake_language_servers = language_registry.register_fake_lsp(
5315 "TypeScript",
5316 FakeLspAdapter {
5317 capabilities: lsp::ServerCapabilities {
5318 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5319 ..lsp::ServerCapabilities::default()
5320 },
5321 ..FakeLspAdapter::default()
5322 },
5323 );
5324
5325 let (buffer, _handle) = project
5326 .update(cx, |p, cx| {
5327 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5328 })
5329 .await
5330 .unwrap();
5331 cx.executor().run_until_parked();
5332
5333 let fake_server = fake_language_servers
5334 .next()
5335 .await
5336 .expect("failed to get the language server");
5337
5338 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5339 move |_, _| async move {
5340 Ok(Some(vec![
5341 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5342 title: "organize imports".to_string(),
5343 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5344 ..lsp::CodeAction::default()
5345 }),
5346 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5347 title: "fix code".to_string(),
5348 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5349 ..lsp::CodeAction::default()
5350 }),
5351 ]))
5352 },
5353 );
5354
5355 let code_actions_task = project.update(cx, |project, cx| {
5356 project.code_actions(
5357 &buffer,
5358 0..buffer.read(cx).len(),
5359 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5360 cx,
5361 )
5362 });
5363
5364 let () = request_handled
5365 .next()
5366 .await
5367 .expect("The code action request should have been triggered");
5368
5369 let code_actions = code_actions_task.await.unwrap();
5370 assert_eq!(code_actions.len(), 1);
5371 assert_eq!(
5372 code_actions[0].lsp_action.kind,
5373 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5374 );
5375}
5376
5377#[gpui::test]
5378async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5379 init_test(cx);
5380
5381 let fs = FakeFs::new(cx.executor());
5382 fs.insert_tree(
5383 path!("/dir"),
5384 json!({
5385 "a.tsx": "a",
5386 }),
5387 )
5388 .await;
5389
5390 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5391
5392 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5393 language_registry.add(tsx_lang());
5394 let language_server_names = [
5395 "TypeScriptServer",
5396 "TailwindServer",
5397 "ESLintServer",
5398 "NoActionsCapabilitiesServer",
5399 ];
5400
5401 let mut language_server_rxs = [
5402 language_registry.register_fake_lsp(
5403 "tsx",
5404 FakeLspAdapter {
5405 name: language_server_names[0],
5406 capabilities: lsp::ServerCapabilities {
5407 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5408 ..lsp::ServerCapabilities::default()
5409 },
5410 ..FakeLspAdapter::default()
5411 },
5412 ),
5413 language_registry.register_fake_lsp(
5414 "tsx",
5415 FakeLspAdapter {
5416 name: language_server_names[1],
5417 capabilities: lsp::ServerCapabilities {
5418 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5419 ..lsp::ServerCapabilities::default()
5420 },
5421 ..FakeLspAdapter::default()
5422 },
5423 ),
5424 language_registry.register_fake_lsp(
5425 "tsx",
5426 FakeLspAdapter {
5427 name: language_server_names[2],
5428 capabilities: lsp::ServerCapabilities {
5429 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5430 ..lsp::ServerCapabilities::default()
5431 },
5432 ..FakeLspAdapter::default()
5433 },
5434 ),
5435 language_registry.register_fake_lsp(
5436 "tsx",
5437 FakeLspAdapter {
5438 name: language_server_names[3],
5439 capabilities: lsp::ServerCapabilities {
5440 code_action_provider: None,
5441 ..lsp::ServerCapabilities::default()
5442 },
5443 ..FakeLspAdapter::default()
5444 },
5445 ),
5446 ];
5447
5448 let (buffer, _handle) = project
5449 .update(cx, |p, cx| {
5450 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5451 })
5452 .await
5453 .unwrap();
5454 cx.executor().run_until_parked();
5455
5456 let mut servers_with_actions_requests = HashMap::default();
5457 for i in 0..language_server_names.len() {
5458 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5459 panic!(
5460 "Failed to get language server #{i} with name {}",
5461 &language_server_names[i]
5462 )
5463 });
5464 let new_server_name = new_server.server.name();
5465
5466 assert!(
5467 !servers_with_actions_requests.contains_key(&new_server_name),
5468 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5469 );
5470 match new_server_name.0.as_ref() {
5471 "TailwindServer" | "TypeScriptServer" => {
5472 servers_with_actions_requests.insert(
5473 new_server_name.clone(),
5474 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5475 move |_, _| {
5476 let name = new_server_name.clone();
5477 async move {
5478 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5479 lsp::CodeAction {
5480 title: format!("{name} code action"),
5481 ..lsp::CodeAction::default()
5482 },
5483 )]))
5484 }
5485 },
5486 ),
5487 );
5488 }
5489 "ESLintServer" => {
5490 servers_with_actions_requests.insert(
5491 new_server_name,
5492 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5493 |_, _| async move { Ok(None) },
5494 ),
5495 );
5496 }
5497 "NoActionsCapabilitiesServer" => {
5498 let _never_handled = new_server
5499 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5500 panic!(
5501 "Should not call for code actions server with no corresponding capabilities"
5502 )
5503 });
5504 }
5505 unexpected => panic!("Unexpected server name: {unexpected}"),
5506 }
5507 }
5508
5509 let code_actions_task = project.update(cx, |project, cx| {
5510 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5511 });
5512
5513 // cx.run_until_parked();
5514 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5515 |mut code_actions_request| async move {
5516 code_actions_request
5517 .next()
5518 .await
5519 .expect("All code actions requests should have been triggered")
5520 },
5521 ))
5522 .await;
5523 assert_eq!(
5524 vec!["TailwindServer code action", "TypeScriptServer code action"],
5525 code_actions_task
5526 .await
5527 .unwrap()
5528 .into_iter()
5529 .map(|code_action| code_action.lsp_action.title)
5530 .sorted()
5531 .collect::<Vec<_>>(),
5532 "Should receive code actions responses from all related servers with hover capabilities"
5533 );
5534}
5535
5536#[gpui::test]
5537async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5538 init_test(cx);
5539
5540 let fs = FakeFs::new(cx.executor());
5541 fs.insert_tree(
5542 "/dir",
5543 json!({
5544 "a.rs": "let a = 1;",
5545 "b.rs": "let b = 2;",
5546 "c.rs": "let c = 2;",
5547 }),
5548 )
5549 .await;
5550
5551 let project = Project::test(
5552 fs,
5553 [
5554 "/dir/a.rs".as_ref(),
5555 "/dir/b.rs".as_ref(),
5556 "/dir/c.rs".as_ref(),
5557 ],
5558 cx,
5559 )
5560 .await;
5561
5562 // check the initial state and get the worktrees
5563 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5564 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5565 assert_eq!(worktrees.len(), 3);
5566
5567 let worktree_a = worktrees[0].read(cx);
5568 let worktree_b = worktrees[1].read(cx);
5569 let worktree_c = worktrees[2].read(cx);
5570
5571 // check they start in the right order
5572 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5573 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5574 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5575
5576 (
5577 worktrees[0].clone(),
5578 worktrees[1].clone(),
5579 worktrees[2].clone(),
5580 )
5581 });
5582
5583 // move first worktree to after the second
5584 // [a, b, c] -> [b, a, c]
5585 project
5586 .update(cx, |project, cx| {
5587 let first = worktree_a.read(cx);
5588 let second = worktree_b.read(cx);
5589 project.move_worktree(first.id(), second.id(), cx)
5590 })
5591 .expect("moving first after second");
5592
5593 // check the state after moving
5594 project.update(cx, |project, cx| {
5595 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5596 assert_eq!(worktrees.len(), 3);
5597
5598 let first = worktrees[0].read(cx);
5599 let second = worktrees[1].read(cx);
5600 let third = worktrees[2].read(cx);
5601
5602 // check they are now in the right order
5603 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5604 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5605 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5606 });
5607
5608 // move the second worktree to before the first
5609 // [b, a, c] -> [a, b, c]
5610 project
5611 .update(cx, |project, cx| {
5612 let second = worktree_a.read(cx);
5613 let first = worktree_b.read(cx);
5614 project.move_worktree(first.id(), second.id(), cx)
5615 })
5616 .expect("moving second before first");
5617
5618 // check the state after moving
5619 project.update(cx, |project, cx| {
5620 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5621 assert_eq!(worktrees.len(), 3);
5622
5623 let first = worktrees[0].read(cx);
5624 let second = worktrees[1].read(cx);
5625 let third = worktrees[2].read(cx);
5626
5627 // check they are now in the right order
5628 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5629 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5630 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5631 });
5632
5633 // move the second worktree to after the third
5634 // [a, b, c] -> [a, c, b]
5635 project
5636 .update(cx, |project, cx| {
5637 let second = worktree_b.read(cx);
5638 let third = worktree_c.read(cx);
5639 project.move_worktree(second.id(), third.id(), cx)
5640 })
5641 .expect("moving second after third");
5642
5643 // check the state after moving
5644 project.update(cx, |project, cx| {
5645 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5646 assert_eq!(worktrees.len(), 3);
5647
5648 let first = worktrees[0].read(cx);
5649 let second = worktrees[1].read(cx);
5650 let third = worktrees[2].read(cx);
5651
5652 // check they are now in the right order
5653 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5654 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5655 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5656 });
5657
5658 // move the third worktree to before the second
5659 // [a, c, b] -> [a, b, c]
5660 project
5661 .update(cx, |project, cx| {
5662 let third = worktree_c.read(cx);
5663 let second = worktree_b.read(cx);
5664 project.move_worktree(third.id(), second.id(), cx)
5665 })
5666 .expect("moving third before second");
5667
5668 // check the state after moving
5669 project.update(cx, |project, cx| {
5670 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5671 assert_eq!(worktrees.len(), 3);
5672
5673 let first = worktrees[0].read(cx);
5674 let second = worktrees[1].read(cx);
5675 let third = worktrees[2].read(cx);
5676
5677 // check they are now in the right order
5678 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5679 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5680 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5681 });
5682
5683 // move the first worktree to after the third
5684 // [a, b, c] -> [b, c, a]
5685 project
5686 .update(cx, |project, cx| {
5687 let first = worktree_a.read(cx);
5688 let third = worktree_c.read(cx);
5689 project.move_worktree(first.id(), third.id(), cx)
5690 })
5691 .expect("moving first after third");
5692
5693 // check the state after moving
5694 project.update(cx, |project, cx| {
5695 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5696 assert_eq!(worktrees.len(), 3);
5697
5698 let first = worktrees[0].read(cx);
5699 let second = worktrees[1].read(cx);
5700 let third = worktrees[2].read(cx);
5701
5702 // check they are now in the right order
5703 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5704 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5705 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5706 });
5707
5708 // move the third worktree to before the first
5709 // [b, c, a] -> [a, b, c]
5710 project
5711 .update(cx, |project, cx| {
5712 let third = worktree_a.read(cx);
5713 let first = worktree_b.read(cx);
5714 project.move_worktree(third.id(), first.id(), cx)
5715 })
5716 .expect("moving third before first");
5717
5718 // check the state after moving
5719 project.update(cx, |project, cx| {
5720 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5721 assert_eq!(worktrees.len(), 3);
5722
5723 let first = worktrees[0].read(cx);
5724 let second = worktrees[1].read(cx);
5725 let third = worktrees[2].read(cx);
5726
5727 // check they are now in the right order
5728 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5729 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5730 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5731 });
5732}
5733
5734#[gpui::test]
5735async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5736 init_test(cx);
5737
5738 let staged_contents = r#"
5739 fn main() {
5740 println!("hello world");
5741 }
5742 "#
5743 .unindent();
5744 let file_contents = r#"
5745 // print goodbye
5746 fn main() {
5747 println!("goodbye world");
5748 }
5749 "#
5750 .unindent();
5751
5752 let fs = FakeFs::new(cx.background_executor.clone());
5753 fs.insert_tree(
5754 "/dir",
5755 json!({
5756 ".git": {},
5757 "src": {
5758 "main.rs": file_contents,
5759 }
5760 }),
5761 )
5762 .await;
5763
5764 fs.set_index_for_repo(
5765 Path::new("/dir/.git"),
5766 &[("src/main.rs".into(), staged_contents)],
5767 );
5768
5769 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5770
5771 let buffer = project
5772 .update(cx, |project, cx| {
5773 project.open_local_buffer("/dir/src/main.rs", cx)
5774 })
5775 .await
5776 .unwrap();
5777 let unstaged_diff = project
5778 .update(cx, |project, cx| {
5779 project.open_unstaged_diff(buffer.clone(), cx)
5780 })
5781 .await
5782 .unwrap();
5783
5784 cx.run_until_parked();
5785 unstaged_diff.update(cx, |unstaged_diff, cx| {
5786 let snapshot = buffer.read(cx).snapshot();
5787 assert_hunks(
5788 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5789 &snapshot,
5790 &unstaged_diff.base_text_string().unwrap(),
5791 &[
5792 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5793 (
5794 2..3,
5795 " println!(\"hello world\");\n",
5796 " println!(\"goodbye world\");\n",
5797 DiffHunkStatus::modified_none(),
5798 ),
5799 ],
5800 );
5801 });
5802
5803 let staged_contents = r#"
5804 // print goodbye
5805 fn main() {
5806 }
5807 "#
5808 .unindent();
5809
5810 fs.set_index_for_repo(
5811 Path::new("/dir/.git"),
5812 &[("src/main.rs".into(), staged_contents)],
5813 );
5814
5815 cx.run_until_parked();
5816 unstaged_diff.update(cx, |unstaged_diff, cx| {
5817 let snapshot = buffer.read(cx).snapshot();
5818 assert_hunks(
5819 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5820 &snapshot,
5821 &unstaged_diff.base_text().unwrap().text(),
5822 &[(
5823 2..3,
5824 "",
5825 " println!(\"goodbye world\");\n",
5826 DiffHunkStatus::added_none(),
5827 )],
5828 );
5829 });
5830}
5831
5832#[gpui::test]
5833async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5834 init_test(cx);
5835
5836 let committed_contents = r#"
5837 fn main() {
5838 println!("hello world");
5839 }
5840 "#
5841 .unindent();
5842 let staged_contents = r#"
5843 fn main() {
5844 println!("goodbye world");
5845 }
5846 "#
5847 .unindent();
5848 let file_contents = r#"
5849 // print goodbye
5850 fn main() {
5851 println!("goodbye world");
5852 }
5853 "#
5854 .unindent();
5855
5856 let fs = FakeFs::new(cx.background_executor.clone());
5857 fs.insert_tree(
5858 "/dir",
5859 json!({
5860 ".git": {},
5861 "src": {
5862 "main.rs": file_contents,
5863 }
5864 }),
5865 )
5866 .await;
5867
5868 fs.set_index_for_repo(
5869 Path::new("/dir/.git"),
5870 &[("src/main.rs".into(), staged_contents)],
5871 );
5872 fs.set_head_for_repo(
5873 Path::new("/dir/.git"),
5874 &[("src/main.rs".into(), committed_contents)],
5875 );
5876
5877 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5878 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5879 let language = rust_lang();
5880 language_registry.add(language.clone());
5881
5882 let buffer = project
5883 .update(cx, |project, cx| {
5884 project.open_local_buffer("/dir/src/main.rs", cx)
5885 })
5886 .await
5887 .unwrap();
5888 let uncommitted_diff = project
5889 .update(cx, |project, cx| {
5890 project.open_uncommitted_diff(buffer.clone(), cx)
5891 })
5892 .await
5893 .unwrap();
5894
5895 uncommitted_diff.read_with(cx, |diff, _| {
5896 assert_eq!(
5897 diff.base_text().and_then(|base| base.language().cloned()),
5898 Some(language)
5899 )
5900 });
5901
5902 cx.run_until_parked();
5903 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5904 let snapshot = buffer.read(cx).snapshot();
5905 assert_hunks(
5906 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5907 &snapshot,
5908 &uncommitted_diff.base_text_string().unwrap(),
5909 &[
5910 (
5911 0..1,
5912 "",
5913 "// print goodbye\n",
5914 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5915 ),
5916 (
5917 2..3,
5918 " println!(\"hello world\");\n",
5919 " println!(\"goodbye world\");\n",
5920 DiffHunkStatus::modified_none(),
5921 ),
5922 ],
5923 );
5924 });
5925
5926 let committed_contents = r#"
5927 // print goodbye
5928 fn main() {
5929 }
5930 "#
5931 .unindent();
5932
5933 fs.set_head_for_repo(
5934 Path::new("/dir/.git"),
5935 &[("src/main.rs".into(), committed_contents)],
5936 );
5937
5938 cx.run_until_parked();
5939 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
5940 let snapshot = buffer.read(cx).snapshot();
5941 assert_hunks(
5942 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5943 &snapshot,
5944 &uncommitted_diff.base_text().unwrap().text(),
5945 &[(
5946 2..3,
5947 "",
5948 " println!(\"goodbye world\");\n",
5949 DiffHunkStatus::added_none(),
5950 )],
5951 );
5952 });
5953}
5954
5955#[gpui::test]
5956async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
5957 init_test(cx);
5958
5959 let committed_contents = r#"
5960 fn main() {
5961 println!("hello from HEAD");
5962 }
5963 "#
5964 .unindent();
5965 let file_contents = r#"
5966 fn main() {
5967 println!("hello from the working copy");
5968 }
5969 "#
5970 .unindent();
5971
5972 let fs = FakeFs::new(cx.background_executor.clone());
5973 fs.insert_tree(
5974 "/dir",
5975 json!({
5976 ".git": {},
5977 "src": {
5978 "main.rs": file_contents,
5979 }
5980 }),
5981 )
5982 .await;
5983
5984 fs.set_head_for_repo(
5985 Path::new("/dir/.git"),
5986 &[("src/main.rs".into(), committed_contents)],
5987 );
5988
5989 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
5990
5991 let buffer = project
5992 .update(cx, |project, cx| {
5993 project.open_local_buffer("/dir/src/main.rs", cx)
5994 })
5995 .await
5996 .unwrap();
5997 let uncommitted_diff = project
5998 .update(cx, |project, cx| {
5999 project.open_uncommitted_diff(buffer.clone(), cx)
6000 })
6001 .await
6002 .unwrap();
6003
6004 cx.run_until_parked();
6005 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6006 let snapshot = buffer.read(cx).snapshot();
6007 assert_hunks(
6008 uncommitted_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6009 &snapshot,
6010 &uncommitted_diff.base_text_string().unwrap(),
6011 &[(
6012 1..2,
6013 " println!(\"hello from HEAD\");\n",
6014 " println!(\"hello from the working copy\");\n",
6015 DiffHunkStatus::modified_none(),
6016 )],
6017 );
6018 });
6019}
6020
6021async fn search(
6022 project: &Entity<Project>,
6023 query: SearchQuery,
6024 cx: &mut gpui::TestAppContext,
6025) -> Result<HashMap<String, Vec<Range<usize>>>> {
6026 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6027 let mut results = HashMap::default();
6028 while let Ok(search_result) = search_rx.recv().await {
6029 match search_result {
6030 SearchResult::Buffer { buffer, ranges } => {
6031 results.entry(buffer).or_insert(ranges);
6032 }
6033 SearchResult::LimitReached => {}
6034 }
6035 }
6036 Ok(results
6037 .into_iter()
6038 .map(|(buffer, ranges)| {
6039 buffer.update(cx, |buffer, cx| {
6040 let path = buffer
6041 .file()
6042 .unwrap()
6043 .full_path(cx)
6044 .to_string_lossy()
6045 .to_string();
6046 let ranges = ranges
6047 .into_iter()
6048 .map(|range| range.to_offset(buffer))
6049 .collect::<Vec<_>>();
6050 (path, ranges)
6051 })
6052 })
6053 .collect())
6054}
6055
6056pub fn init_test(cx: &mut gpui::TestAppContext) {
6057 if std::env::var("RUST_LOG").is_ok() {
6058 env_logger::try_init().ok();
6059 }
6060
6061 cx.update(|cx| {
6062 let settings_store = SettingsStore::test(cx);
6063 cx.set_global(settings_store);
6064 release_channel::init(SemanticVersion::default(), cx);
6065 language::init(cx);
6066 Project::init_settings(cx);
6067 });
6068}
6069
6070fn json_lang() -> Arc<Language> {
6071 Arc::new(Language::new(
6072 LanguageConfig {
6073 name: "JSON".into(),
6074 matcher: LanguageMatcher {
6075 path_suffixes: vec!["json".to_string()],
6076 ..Default::default()
6077 },
6078 ..Default::default()
6079 },
6080 None,
6081 ))
6082}
6083
6084fn js_lang() -> Arc<Language> {
6085 Arc::new(Language::new(
6086 LanguageConfig {
6087 name: "JavaScript".into(),
6088 matcher: LanguageMatcher {
6089 path_suffixes: vec!["js".to_string()],
6090 ..Default::default()
6091 },
6092 ..Default::default()
6093 },
6094 None,
6095 ))
6096}
6097
6098fn rust_lang() -> Arc<Language> {
6099 Arc::new(Language::new(
6100 LanguageConfig {
6101 name: "Rust".into(),
6102 matcher: LanguageMatcher {
6103 path_suffixes: vec!["rs".to_string()],
6104 ..Default::default()
6105 },
6106 ..Default::default()
6107 },
6108 Some(tree_sitter_rust::LANGUAGE.into()),
6109 ))
6110}
6111
6112fn typescript_lang() -> Arc<Language> {
6113 Arc::new(Language::new(
6114 LanguageConfig {
6115 name: "TypeScript".into(),
6116 matcher: LanguageMatcher {
6117 path_suffixes: vec!["ts".to_string()],
6118 ..Default::default()
6119 },
6120 ..Default::default()
6121 },
6122 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6123 ))
6124}
6125
6126fn tsx_lang() -> Arc<Language> {
6127 Arc::new(Language::new(
6128 LanguageConfig {
6129 name: "tsx".into(),
6130 matcher: LanguageMatcher {
6131 path_suffixes: vec!["tsx".to_string()],
6132 ..Default::default()
6133 },
6134 ..Default::default()
6135 },
6136 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6137 ))
6138}
6139
6140fn get_all_tasks(
6141 project: &Entity<Project>,
6142 worktree_id: Option<WorktreeId>,
6143 task_contexts: &TaskContexts,
6144 cx: &mut App,
6145) -> Vec<(TaskSourceKind, ResolvedTask)> {
6146 let (mut old, new) = project.update(cx, |project, cx| {
6147 project
6148 .task_store
6149 .read(cx)
6150 .task_inventory()
6151 .unwrap()
6152 .read(cx)
6153 .used_and_current_resolved_tasks(worktree_id, None, task_contexts, cx)
6154 });
6155 old.extend(new);
6156 old
6157}