1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use git::diff::assert_hunks;
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
28
29#[gpui::test]
30async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
31 cx.executor().allow_parking();
32
33 let (tx, mut rx) = futures::channel::mpsc::unbounded();
34 let _thread = std::thread::spawn(move || {
35 std::fs::metadata("/tmp").unwrap();
36 std::thread::sleep(Duration::from_millis(1000));
37 tx.unbounded_send(1).unwrap();
38 });
39 rx.next().await.unwrap();
40}
41
42#[gpui::test]
43async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
44 cx.executor().allow_parking();
45
46 let io_task = smol::unblock(move || {
47 println!("sleeping on thread {:?}", std::thread::current().id());
48 std::thread::sleep(Duration::from_millis(10));
49 1
50 });
51
52 let task = cx.foreground_executor().spawn(async move {
53 io_task.await;
54 });
55
56 task.await;
57}
58
59#[cfg(not(windows))]
60#[gpui::test]
61async fn test_symlinks(cx: &mut gpui::TestAppContext) {
62 init_test(cx);
63 cx.executor().allow_parking();
64
65 let dir = temp_tree(json!({
66 "root": {
67 "apple": "",
68 "banana": {
69 "carrot": {
70 "date": "",
71 "endive": "",
72 }
73 },
74 "fennel": {
75 "grape": "",
76 }
77 }
78 }));
79
80 let root_link_path = dir.path().join("root_link");
81 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
82 os::unix::fs::symlink(
83 dir.path().join("root/fennel"),
84 dir.path().join("root/finnochio"),
85 )
86 .unwrap();
87
88 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
89
90 project.update(cx, |project, cx| {
91 let tree = project.worktrees(cx).next().unwrap().read(cx);
92 assert_eq!(tree.file_count(), 5);
93 assert_eq!(
94 tree.inode_for_path("fennel/grape"),
95 tree.inode_for_path("finnochio/grape")
96 );
97 });
98}
99
100#[gpui::test]
101async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
102 init_test(cx);
103
104 let dir = temp_tree(json!({
105 ".editorconfig": r#"
106 root = true
107 [*.rs]
108 indent_style = tab
109 indent_size = 3
110 end_of_line = lf
111 insert_final_newline = true
112 trim_trailing_whitespace = true
113 [*.js]
114 tab_width = 10
115 "#,
116 ".zed": {
117 "settings.json": r#"{
118 "tab_size": 8,
119 "hard_tabs": false,
120 "ensure_final_newline_on_save": false,
121 "remove_trailing_whitespace_on_save": false,
122 "soft_wrap": "editor_width"
123 }"#,
124 },
125 "a.rs": "fn a() {\n A\n}",
126 "b": {
127 ".editorconfig": r#"
128 [*.rs]
129 indent_size = 2
130 "#,
131 "b.rs": "fn b() {\n B\n}",
132 },
133 "c.js": "def c\n C\nend",
134 "README.json": "tabs are better\n",
135 }));
136
137 let path = dir.path();
138 let fs = FakeFs::new(cx.executor());
139 fs.insert_tree_from_real_fs(path, path).await;
140 let project = Project::test(fs, [path], cx).await;
141
142 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
143 language_registry.add(js_lang());
144 language_registry.add(json_lang());
145 language_registry.add(rust_lang());
146
147 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
148
149 cx.executor().run_until_parked();
150
151 cx.update(|cx| {
152 let tree = worktree.read(cx);
153 let settings_for = |path: &str| {
154 let file_entry = tree.entry_for_path(path).unwrap().clone();
155 let file = File::for_entry(file_entry, worktree.clone());
156 let file_language = project
157 .read(cx)
158 .languages()
159 .language_for_file_path(file.path.as_ref());
160 let file_language = cx
161 .background_executor()
162 .block(file_language)
163 .expect("Failed to get file language");
164 let file = file as _;
165 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
166 };
167
168 let settings_a = settings_for("a.rs");
169 let settings_b = settings_for("b/b.rs");
170 let settings_c = settings_for("c.js");
171 let settings_readme = settings_for("README.json");
172
173 // .editorconfig overrides .zed/settings
174 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
175 assert_eq!(settings_a.hard_tabs, true);
176 assert_eq!(settings_a.ensure_final_newline_on_save, true);
177 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
178
179 // .editorconfig in b/ overrides .editorconfig in root
180 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
181
182 // "indent_size" is not set, so "tab_width" is used
183 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
184
185 // README.md should not be affected by .editorconfig's globe "*.rs"
186 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
187 });
188}
189
190#[gpui::test]
191async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 TaskStore::init(None);
194
195 let fs = FakeFs::new(cx.executor());
196 fs.insert_tree(
197 "/the-root",
198 json!({
199 ".zed": {
200 "settings.json": r#"{ "tab_size": 8 }"#,
201 "tasks.json": r#"[{
202 "label": "cargo check all",
203 "command": "cargo",
204 "args": ["check", "--all"]
205 },]"#,
206 },
207 "a": {
208 "a.rs": "fn a() {\n A\n}"
209 },
210 "b": {
211 ".zed": {
212 "settings.json": r#"{ "tab_size": 2 }"#,
213 "tasks.json": r#"[{
214 "label": "cargo check",
215 "command": "cargo",
216 "args": ["check"]
217 },]"#,
218 },
219 "b.rs": "fn b() {\n B\n}"
220 }
221 }),
222 )
223 .await;
224
225 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
226 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
227 let task_context = TaskContext::default();
228
229 cx.executor().run_until_parked();
230 let worktree_id = cx.update(|cx| {
231 project.update(cx, |project, cx| {
232 project.worktrees(cx).next().unwrap().read(cx).id()
233 })
234 });
235 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
236 id: worktree_id,
237 directory_in_worktree: PathBuf::from(".zed"),
238 id_base: "local worktree tasks from directory \".zed\"".into(),
239 };
240
241 let all_tasks = cx
242 .update(|cx| {
243 let tree = worktree.read(cx);
244
245 let file_a = File::for_entry(
246 tree.entry_for_path("a/a.rs").unwrap().clone(),
247 worktree.clone(),
248 ) as _;
249 let settings_a = language_settings(None, Some(&file_a), cx);
250 let file_b = File::for_entry(
251 tree.entry_for_path("b/b.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_b = language_settings(None, Some(&file_b), cx);
255
256 assert_eq!(settings_a.tab_size.get(), 8);
257 assert_eq!(settings_b.tab_size.get(), 2);
258
259 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
260 })
261 .into_iter()
262 .map(|(source_kind, task)| {
263 let resolved = task.resolved.unwrap();
264 (
265 source_kind,
266 task.resolved_label,
267 resolved.args,
268 resolved.env,
269 )
270 })
271 .collect::<Vec<_>>();
272 assert_eq!(
273 all_tasks,
274 vec![
275 (
276 TaskSourceKind::Worktree {
277 id: worktree_id,
278 directory_in_worktree: PathBuf::from("b/.zed"),
279 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
280 },
281 "cargo check".to_string(),
282 vec!["check".to_string()],
283 HashMap::default(),
284 ),
285 (
286 topmost_local_task_source_kind.clone(),
287 "cargo check all".to_string(),
288 vec!["check".to_string(), "--all".to_string()],
289 HashMap::default(),
290 ),
291 ]
292 );
293
294 let (_, resolved_task) = cx
295 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
296 .into_iter()
297 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
298 .expect("should have one global task");
299 project.update(cx, |project, cx| {
300 let task_inventory = project
301 .task_store
302 .read(cx)
303 .task_inventory()
304 .cloned()
305 .unwrap();
306 task_inventory.update(cx, |inventory, _| {
307 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
308 inventory
309 .update_file_based_tasks(
310 None,
311 Some(
312 &json!([{
313 "label": "cargo check unstable",
314 "command": "cargo",
315 "args": [
316 "check",
317 "--all",
318 "--all-targets"
319 ],
320 "env": {
321 "RUSTFLAGS": "-Zunstable-options"
322 }
323 }])
324 .to_string(),
325 ),
326 )
327 .unwrap();
328 });
329 });
330 cx.run_until_parked();
331
332 let all_tasks = cx
333 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
334 .into_iter()
335 .map(|(source_kind, task)| {
336 let resolved = task.resolved.unwrap();
337 (
338 source_kind,
339 task.resolved_label,
340 resolved.args,
341 resolved.env,
342 )
343 })
344 .collect::<Vec<_>>();
345 assert_eq!(
346 all_tasks,
347 vec![
348 (
349 topmost_local_task_source_kind.clone(),
350 "cargo check all".to_string(),
351 vec!["check".to_string(), "--all".to_string()],
352 HashMap::default(),
353 ),
354 (
355 TaskSourceKind::Worktree {
356 id: worktree_id,
357 directory_in_worktree: PathBuf::from("b/.zed"),
358 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
359 },
360 "cargo check".to_string(),
361 vec!["check".to_string()],
362 HashMap::default(),
363 ),
364 (
365 TaskSourceKind::AbsPath {
366 abs_path: paths::tasks_file().clone(),
367 id_base: "global tasks.json".into(),
368 },
369 "cargo check unstable".to_string(),
370 vec![
371 "check".to_string(),
372 "--all".to_string(),
373 "--all-targets".to_string(),
374 ],
375 HashMap::from_iter(Some((
376 "RUSTFLAGS".to_string(),
377 "-Zunstable-options".to_string()
378 ))),
379 ),
380 ]
381 );
382}
383
384#[gpui::test]
385async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
386 init_test(cx);
387
388 let fs = FakeFs::new(cx.executor());
389 fs.insert_tree(
390 "/the-root",
391 json!({
392 "test.rs": "const A: i32 = 1;",
393 "test2.rs": "",
394 "Cargo.toml": "a = 1",
395 "package.json": "{\"a\": 1}",
396 }),
397 )
398 .await;
399
400 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
401 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
402
403 let mut fake_rust_servers = language_registry.register_fake_lsp(
404 "Rust",
405 FakeLspAdapter {
406 name: "the-rust-language-server",
407 capabilities: lsp::ServerCapabilities {
408 completion_provider: Some(lsp::CompletionOptions {
409 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
410 ..Default::default()
411 }),
412 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
413 lsp::TextDocumentSyncOptions {
414 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
415 ..Default::default()
416 },
417 )),
418 ..Default::default()
419 },
420 ..Default::default()
421 },
422 );
423 let mut fake_json_servers = language_registry.register_fake_lsp(
424 "JSON",
425 FakeLspAdapter {
426 name: "the-json-language-server",
427 capabilities: lsp::ServerCapabilities {
428 completion_provider: Some(lsp::CompletionOptions {
429 trigger_characters: Some(vec![":".to_string()]),
430 ..Default::default()
431 }),
432 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
433 lsp::TextDocumentSyncOptions {
434 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
435 ..Default::default()
436 },
437 )),
438 ..Default::default()
439 },
440 ..Default::default()
441 },
442 );
443
444 // Open a buffer without an associated language server.
445 let toml_buffer = project
446 .update(cx, |project, cx| {
447 project.open_local_buffer("/the-root/Cargo.toml", cx)
448 })
449 .await
450 .unwrap();
451
452 // Open a buffer with an associated language server before the language for it has been loaded.
453 let rust_buffer = project
454 .update(cx, |project, cx| {
455 project.open_local_buffer("/the-root/test.rs", cx)
456 })
457 .await
458 .unwrap();
459 rust_buffer.update(cx, |buffer, _| {
460 assert_eq!(buffer.language().map(|l| l.name()), None);
461 });
462
463 // Now we add the languages to the project, and ensure they get assigned to all
464 // the relevant open buffers.
465 language_registry.add(json_lang());
466 language_registry.add(rust_lang());
467 cx.executor().run_until_parked();
468 rust_buffer.update(cx, |buffer, _| {
469 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
470 });
471
472 // A server is started up, and it is notified about Rust files.
473 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 lsp::TextDocumentItem {
480 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
481 version: 0,
482 text: "const A: i32 = 1;".to_string(),
483 language_id: "rust".to_string(),
484 }
485 );
486
487 // The buffer is configured based on the language server's capabilities.
488 rust_buffer.update(cx, |buffer, _| {
489 assert_eq!(
490 buffer
491 .completion_triggers()
492 .into_iter()
493 .cloned()
494 .collect::<Vec<_>>(),
495 &[".".to_string(), "::".to_string()]
496 );
497 });
498 toml_buffer.update(cx, |buffer, _| {
499 assert!(buffer.completion_triggers().is_empty());
500 });
501
502 // Edit a buffer. The changes are reported to the language server.
503 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
504 assert_eq!(
505 fake_rust_server
506 .receive_notification::<lsp::notification::DidChangeTextDocument>()
507 .await
508 .text_document,
509 lsp::VersionedTextDocumentIdentifier::new(
510 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
511 1
512 )
513 );
514
515 // Open a third buffer with a different associated language server.
516 let json_buffer = project
517 .update(cx, |project, cx| {
518 project.open_local_buffer("/the-root/package.json", cx)
519 })
520 .await
521 .unwrap();
522
523 // A json language server is started up and is only notified about the json buffer.
524 let mut fake_json_server = fake_json_servers.next().await.unwrap();
525 assert_eq!(
526 fake_json_server
527 .receive_notification::<lsp::notification::DidOpenTextDocument>()
528 .await
529 .text_document,
530 lsp::TextDocumentItem {
531 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
532 version: 0,
533 text: "{\"a\": 1}".to_string(),
534 language_id: "json".to_string(),
535 }
536 );
537
538 // This buffer is configured based on the second language server's
539 // capabilities.
540 json_buffer.update(cx, |buffer, _| {
541 assert_eq!(
542 buffer
543 .completion_triggers()
544 .into_iter()
545 .cloned()
546 .collect::<Vec<_>>(),
547 &[":".to_string()]
548 );
549 });
550
551 // When opening another buffer whose language server is already running,
552 // it is also configured based on the existing language server's capabilities.
553 let rust_buffer2 = project
554 .update(cx, |project, cx| {
555 project.open_local_buffer("/the-root/test2.rs", cx)
556 })
557 .await
558 .unwrap();
559 rust_buffer2.update(cx, |buffer, _| {
560 assert_eq!(
561 buffer
562 .completion_triggers()
563 .into_iter()
564 .cloned()
565 .collect::<Vec<_>>(),
566 &[".".to_string(), "::".to_string()]
567 );
568 });
569
570 // Changes are reported only to servers matching the buffer's language.
571 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
572 rust_buffer2.update(cx, |buffer, cx| {
573 buffer.edit([(0..0, "let x = 1;")], None, cx)
574 });
575 assert_eq!(
576 fake_rust_server
577 .receive_notification::<lsp::notification::DidChangeTextDocument>()
578 .await
579 .text_document,
580 lsp::VersionedTextDocumentIdentifier::new(
581 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
582 1
583 )
584 );
585
586 // Save notifications are reported to all servers.
587 project
588 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
589 .await
590 .unwrap();
591 assert_eq!(
592 fake_rust_server
593 .receive_notification::<lsp::notification::DidSaveTextDocument>()
594 .await
595 .text_document,
596 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
597 );
598 assert_eq!(
599 fake_json_server
600 .receive_notification::<lsp::notification::DidSaveTextDocument>()
601 .await
602 .text_document,
603 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
604 );
605
606 // Renames are reported only to servers matching the buffer's language.
607 fs.rename(
608 Path::new("/the-root/test2.rs"),
609 Path::new("/the-root/test3.rs"),
610 Default::default(),
611 )
612 .await
613 .unwrap();
614 assert_eq!(
615 fake_rust_server
616 .receive_notification::<lsp::notification::DidCloseTextDocument>()
617 .await
618 .text_document,
619 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
620 );
621 assert_eq!(
622 fake_rust_server
623 .receive_notification::<lsp::notification::DidOpenTextDocument>()
624 .await
625 .text_document,
626 lsp::TextDocumentItem {
627 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
628 version: 0,
629 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
630 language_id: "rust".to_string(),
631 },
632 );
633
634 rust_buffer2.update(cx, |buffer, cx| {
635 buffer.update_diagnostics(
636 LanguageServerId(0),
637 DiagnosticSet::from_sorted_entries(
638 vec![DiagnosticEntry {
639 diagnostic: Default::default(),
640 range: Anchor::MIN..Anchor::MAX,
641 }],
642 &buffer.snapshot(),
643 ),
644 cx,
645 );
646 assert_eq!(
647 buffer
648 .snapshot()
649 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
650 .count(),
651 1
652 );
653 });
654
655 // When the rename changes the extension of the file, the buffer gets closed on the old
656 // language server and gets opened on the new one.
657 fs.rename(
658 Path::new("/the-root/test3.rs"),
659 Path::new("/the-root/test3.json"),
660 Default::default(),
661 )
662 .await
663 .unwrap();
664 assert_eq!(
665 fake_rust_server
666 .receive_notification::<lsp::notification::DidCloseTextDocument>()
667 .await
668 .text_document,
669 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
670 );
671 assert_eq!(
672 fake_json_server
673 .receive_notification::<lsp::notification::DidOpenTextDocument>()
674 .await
675 .text_document,
676 lsp::TextDocumentItem {
677 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
678 version: 0,
679 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
680 language_id: "json".to_string(),
681 },
682 );
683
684 // We clear the diagnostics, since the language has changed.
685 rust_buffer2.update(cx, |buffer, _| {
686 assert_eq!(
687 buffer
688 .snapshot()
689 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
690 .count(),
691 0
692 );
693 });
694
695 // The renamed file's version resets after changing language server.
696 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
700 .await
701 .text_document,
702 lsp::VersionedTextDocumentIdentifier::new(
703 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
704 1
705 )
706 );
707
708 // Restart language servers
709 project.update(cx, |project, cx| {
710 project.restart_language_servers_for_buffers(
711 vec![rust_buffer.clone(), json_buffer.clone()],
712 cx,
713 );
714 });
715
716 let mut rust_shutdown_requests = fake_rust_server
717 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
718 let mut json_shutdown_requests = fake_json_server
719 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
720 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
721
722 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
723 let mut fake_json_server = fake_json_servers.next().await.unwrap();
724
725 // Ensure rust document is reopened in new rust language server
726 assert_eq!(
727 fake_rust_server
728 .receive_notification::<lsp::notification::DidOpenTextDocument>()
729 .await
730 .text_document,
731 lsp::TextDocumentItem {
732 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
733 version: 0,
734 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
735 language_id: "rust".to_string(),
736 }
737 );
738
739 // Ensure json documents are reopened in new json language server
740 assert_set_eq!(
741 [
742 fake_json_server
743 .receive_notification::<lsp::notification::DidOpenTextDocument>()
744 .await
745 .text_document,
746 fake_json_server
747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
748 .await
749 .text_document,
750 ],
751 [
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
754 version: 0,
755 text: json_buffer.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 },
758 lsp::TextDocumentItem {
759 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
760 version: 0,
761 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
762 language_id: "json".to_string(),
763 }
764 ]
765 );
766
767 // Close notifications are reported only to servers matching the buffer's language.
768 cx.update(|_| drop(json_buffer));
769 let close_message = lsp::DidCloseTextDocumentParams {
770 text_document: lsp::TextDocumentIdentifier::new(
771 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
772 ),
773 };
774 assert_eq!(
775 fake_json_server
776 .receive_notification::<lsp::notification::DidCloseTextDocument>()
777 .await,
778 close_message,
779 );
780}
781
782#[gpui::test]
783async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
784 init_test(cx);
785
786 let fs = FakeFs::new(cx.executor());
787 fs.insert_tree(
788 "/the-root",
789 json!({
790 ".gitignore": "target\n",
791 "src": {
792 "a.rs": "",
793 "b.rs": "",
794 },
795 "target": {
796 "x": {
797 "out": {
798 "x.rs": ""
799 }
800 },
801 "y": {
802 "out": {
803 "y.rs": "",
804 }
805 },
806 "z": {
807 "out": {
808 "z.rs": ""
809 }
810 }
811 }
812 }),
813 )
814 .await;
815
816 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
817 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
818 language_registry.add(rust_lang());
819 let mut fake_servers = language_registry.register_fake_lsp(
820 "Rust",
821 FakeLspAdapter {
822 name: "the-language-server",
823 ..Default::default()
824 },
825 );
826
827 cx.executor().run_until_parked();
828
829 // Start the language server by opening a buffer with a compatible file extension.
830 let _buffer = project
831 .update(cx, |project, cx| {
832 project.open_local_buffer("/the-root/src/a.rs", cx)
833 })
834 .await
835 .unwrap();
836
837 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
838 project.update(cx, |project, cx| {
839 let worktree = project.worktrees(cx).next().unwrap();
840 assert_eq!(
841 worktree
842 .read(cx)
843 .snapshot()
844 .entries(true, 0)
845 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
846 .collect::<Vec<_>>(),
847 &[
848 (Path::new(""), false),
849 (Path::new(".gitignore"), false),
850 (Path::new("src"), false),
851 (Path::new("src/a.rs"), false),
852 (Path::new("src/b.rs"), false),
853 (Path::new("target"), true),
854 ]
855 );
856 });
857
858 let prev_read_dir_count = fs.read_dir_call_count();
859
860 // Keep track of the FS events reported to the language server.
861 let fake_server = fake_servers.next().await.unwrap();
862 let file_changes = Arc::new(Mutex::new(Vec::new()));
863 fake_server
864 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
865 registrations: vec![lsp::Registration {
866 id: Default::default(),
867 method: "workspace/didChangeWatchedFiles".to_string(),
868 register_options: serde_json::to_value(
869 lsp::DidChangeWatchedFilesRegistrationOptions {
870 watchers: vec![
871 lsp::FileSystemWatcher {
872 glob_pattern: lsp::GlobPattern::String(
873 "/the-root/Cargo.toml".to_string(),
874 ),
875 kind: None,
876 },
877 lsp::FileSystemWatcher {
878 glob_pattern: lsp::GlobPattern::String(
879 "/the-root/src/*.{rs,c}".to_string(),
880 ),
881 kind: None,
882 },
883 lsp::FileSystemWatcher {
884 glob_pattern: lsp::GlobPattern::String(
885 "/the-root/target/y/**/*.rs".to_string(),
886 ),
887 kind: None,
888 },
889 ],
890 },
891 )
892 .ok(),
893 }],
894 })
895 .await
896 .unwrap();
897 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
898 let file_changes = file_changes.clone();
899 move |params, _| {
900 let mut file_changes = file_changes.lock();
901 file_changes.extend(params.changes);
902 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
903 }
904 });
905
906 cx.executor().run_until_parked();
907 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
908 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
909
910 // Now the language server has asked us to watch an ignored directory path,
911 // so we recursively load it.
912 project.update(cx, |project, cx| {
913 let worktree = project.worktrees(cx).next().unwrap();
914 assert_eq!(
915 worktree
916 .read(cx)
917 .snapshot()
918 .entries(true, 0)
919 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
920 .collect::<Vec<_>>(),
921 &[
922 (Path::new(""), false),
923 (Path::new(".gitignore"), false),
924 (Path::new("src"), false),
925 (Path::new("src/a.rs"), false),
926 (Path::new("src/b.rs"), false),
927 (Path::new("target"), true),
928 (Path::new("target/x"), true),
929 (Path::new("target/y"), true),
930 (Path::new("target/y/out"), true),
931 (Path::new("target/y/out/y.rs"), true),
932 (Path::new("target/z"), true),
933 ]
934 );
935 });
936
937 // Perform some file system mutations, two of which match the watched patterns,
938 // and one of which does not.
939 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
940 .await
941 .unwrap();
942 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
943 .await
944 .unwrap();
945 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
946 .await
947 .unwrap();
948 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
949 .await
950 .unwrap();
951 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
952 .await
953 .unwrap();
954
955 // The language server receives events for the FS mutations that match its watch patterns.
956 cx.executor().run_until_parked();
957 assert_eq!(
958 &*file_changes.lock(),
959 &[
960 lsp::FileEvent {
961 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
962 typ: lsp::FileChangeType::DELETED,
963 },
964 lsp::FileEvent {
965 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
966 typ: lsp::FileChangeType::CREATED,
967 },
968 lsp::FileEvent {
969 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
970 typ: lsp::FileChangeType::CREATED,
971 },
972 ]
973 );
974}
975
976#[gpui::test]
977async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
978 init_test(cx);
979
980 let fs = FakeFs::new(cx.executor());
981 fs.insert_tree(
982 "/dir",
983 json!({
984 "a.rs": "let a = 1;",
985 "b.rs": "let b = 2;"
986 }),
987 )
988 .await;
989
990 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
991 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
992
993 let buffer_a = project
994 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
995 .await
996 .unwrap();
997 let buffer_b = project
998 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
999 .await
1000 .unwrap();
1001
1002 lsp_store.update(cx, |lsp_store, cx| {
1003 lsp_store
1004 .update_diagnostics(
1005 LanguageServerId(0),
1006 lsp::PublishDiagnosticsParams {
1007 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1008 version: None,
1009 diagnostics: vec![lsp::Diagnostic {
1010 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1011 severity: Some(lsp::DiagnosticSeverity::ERROR),
1012 message: "error 1".to_string(),
1013 ..Default::default()
1014 }],
1015 },
1016 &[],
1017 cx,
1018 )
1019 .unwrap();
1020 lsp_store
1021 .update_diagnostics(
1022 LanguageServerId(0),
1023 lsp::PublishDiagnosticsParams {
1024 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1025 version: None,
1026 diagnostics: vec![lsp::Diagnostic {
1027 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1028 severity: Some(DiagnosticSeverity::WARNING),
1029 message: "error 2".to_string(),
1030 ..Default::default()
1031 }],
1032 },
1033 &[],
1034 cx,
1035 )
1036 .unwrap();
1037 });
1038
1039 buffer_a.update(cx, |buffer, _| {
1040 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1041 assert_eq!(
1042 chunks
1043 .iter()
1044 .map(|(s, d)| (s.as_str(), *d))
1045 .collect::<Vec<_>>(),
1046 &[
1047 ("let ", None),
1048 ("a", Some(DiagnosticSeverity::ERROR)),
1049 (" = 1;", None),
1050 ]
1051 );
1052 });
1053 buffer_b.update(cx, |buffer, _| {
1054 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1055 assert_eq!(
1056 chunks
1057 .iter()
1058 .map(|(s, d)| (s.as_str(), *d))
1059 .collect::<Vec<_>>(),
1060 &[
1061 ("let ", None),
1062 ("b", Some(DiagnosticSeverity::WARNING)),
1063 (" = 2;", None),
1064 ]
1065 );
1066 });
1067}
1068
1069#[gpui::test]
1070async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1071 init_test(cx);
1072
1073 let fs = FakeFs::new(cx.executor());
1074 fs.insert_tree(
1075 "/root",
1076 json!({
1077 "dir": {
1078 ".git": {
1079 "HEAD": "ref: refs/heads/main",
1080 },
1081 ".gitignore": "b.rs",
1082 "a.rs": "let a = 1;",
1083 "b.rs": "let b = 2;",
1084 },
1085 "other.rs": "let b = c;"
1086 }),
1087 )
1088 .await;
1089
1090 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1091 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1092 let (worktree, _) = project
1093 .update(cx, |project, cx| {
1094 project.find_or_create_worktree("/root/dir", true, cx)
1095 })
1096 .await
1097 .unwrap();
1098 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1099
1100 let (worktree, _) = project
1101 .update(cx, |project, cx| {
1102 project.find_or_create_worktree("/root/other.rs", false, cx)
1103 })
1104 .await
1105 .unwrap();
1106 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1107
1108 let server_id = LanguageServerId(0);
1109 lsp_store.update(cx, |lsp_store, cx| {
1110 lsp_store
1111 .update_diagnostics(
1112 server_id,
1113 lsp::PublishDiagnosticsParams {
1114 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1115 version: None,
1116 diagnostics: vec![lsp::Diagnostic {
1117 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1118 severity: Some(lsp::DiagnosticSeverity::ERROR),
1119 message: "unused variable 'b'".to_string(),
1120 ..Default::default()
1121 }],
1122 },
1123 &[],
1124 cx,
1125 )
1126 .unwrap();
1127 lsp_store
1128 .update_diagnostics(
1129 server_id,
1130 lsp::PublishDiagnosticsParams {
1131 uri: Url::from_file_path("/root/other.rs").unwrap(),
1132 version: None,
1133 diagnostics: vec![lsp::Diagnostic {
1134 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1135 severity: Some(lsp::DiagnosticSeverity::ERROR),
1136 message: "unknown variable 'c'".to_string(),
1137 ..Default::default()
1138 }],
1139 },
1140 &[],
1141 cx,
1142 )
1143 .unwrap();
1144 });
1145
1146 let main_ignored_buffer = project
1147 .update(cx, |project, cx| {
1148 project.open_buffer((main_worktree_id, "b.rs"), cx)
1149 })
1150 .await
1151 .unwrap();
1152 main_ignored_buffer.update(cx, |buffer, _| {
1153 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1154 assert_eq!(
1155 chunks
1156 .iter()
1157 .map(|(s, d)| (s.as_str(), *d))
1158 .collect::<Vec<_>>(),
1159 &[
1160 ("let ", None),
1161 ("b", Some(DiagnosticSeverity::ERROR)),
1162 (" = 2;", None),
1163 ],
1164 "Gigitnored buffers should still get in-buffer diagnostics",
1165 );
1166 });
1167 let other_buffer = project
1168 .update(cx, |project, cx| {
1169 project.open_buffer((other_worktree_id, ""), cx)
1170 })
1171 .await
1172 .unwrap();
1173 other_buffer.update(cx, |buffer, _| {
1174 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1175 assert_eq!(
1176 chunks
1177 .iter()
1178 .map(|(s, d)| (s.as_str(), *d))
1179 .collect::<Vec<_>>(),
1180 &[
1181 ("let b = ", None),
1182 ("c", Some(DiagnosticSeverity::ERROR)),
1183 (";", None),
1184 ],
1185 "Buffers from hidden projects should still get in-buffer diagnostics"
1186 );
1187 });
1188
1189 project.update(cx, |project, cx| {
1190 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1191 assert_eq!(
1192 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1193 vec![(
1194 ProjectPath {
1195 worktree_id: main_worktree_id,
1196 path: Arc::from(Path::new("b.rs")),
1197 },
1198 server_id,
1199 DiagnosticSummary {
1200 error_count: 1,
1201 warning_count: 0,
1202 }
1203 )]
1204 );
1205 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1206 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1207 });
1208}
1209
1210#[gpui::test]
1211async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let progress_token = "the-progress-token";
1215
1216 let fs = FakeFs::new(cx.executor());
1217 fs.insert_tree(
1218 "/dir",
1219 json!({
1220 "a.rs": "fn a() { A }",
1221 "b.rs": "const y: i32 = 1",
1222 }),
1223 )
1224 .await;
1225
1226 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1227 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1228
1229 language_registry.add(rust_lang());
1230 let mut fake_servers = language_registry.register_fake_lsp(
1231 "Rust",
1232 FakeLspAdapter {
1233 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1234 disk_based_diagnostics_sources: vec!["disk".into()],
1235 ..Default::default()
1236 },
1237 );
1238
1239 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1240
1241 // Cause worktree to start the fake language server
1242 let _buffer = project
1243 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1244 .await
1245 .unwrap();
1246
1247 let mut events = cx.events(&project);
1248
1249 let fake_server = fake_servers.next().await.unwrap();
1250 assert_eq!(
1251 events.next().await.unwrap(),
1252 Event::LanguageServerAdded(
1253 LanguageServerId(0),
1254 fake_server.server.name(),
1255 Some(worktree_id)
1256 ),
1257 );
1258
1259 fake_server
1260 .start_progress(format!("{}/0", progress_token))
1261 .await;
1262 assert_eq!(
1263 events.next().await.unwrap(),
1264 Event::DiskBasedDiagnosticsStarted {
1265 language_server_id: LanguageServerId(0),
1266 }
1267 );
1268
1269 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1270 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1271 version: None,
1272 diagnostics: vec![lsp::Diagnostic {
1273 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1274 severity: Some(lsp::DiagnosticSeverity::ERROR),
1275 message: "undefined variable 'A'".to_string(),
1276 ..Default::default()
1277 }],
1278 });
1279 assert_eq!(
1280 events.next().await.unwrap(),
1281 Event::DiagnosticsUpdated {
1282 language_server_id: LanguageServerId(0),
1283 path: (worktree_id, Path::new("a.rs")).into()
1284 }
1285 );
1286
1287 fake_server.end_progress(format!("{}/0", progress_token));
1288 assert_eq!(
1289 events.next().await.unwrap(),
1290 Event::DiskBasedDiagnosticsFinished {
1291 language_server_id: LanguageServerId(0)
1292 }
1293 );
1294
1295 let buffer = project
1296 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1297 .await
1298 .unwrap();
1299
1300 buffer.update(cx, |buffer, _| {
1301 let snapshot = buffer.snapshot();
1302 let diagnostics = snapshot
1303 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1304 .collect::<Vec<_>>();
1305 assert_eq!(
1306 diagnostics,
1307 &[DiagnosticEntry {
1308 range: Point::new(0, 9)..Point::new(0, 10),
1309 diagnostic: Diagnostic {
1310 severity: lsp::DiagnosticSeverity::ERROR,
1311 message: "undefined variable 'A'".to_string(),
1312 group_id: 0,
1313 is_primary: true,
1314 ..Default::default()
1315 }
1316 }]
1317 )
1318 });
1319
1320 // Ensure publishing empty diagnostics twice only results in one update event.
1321 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1322 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1323 version: None,
1324 diagnostics: Default::default(),
1325 });
1326 assert_eq!(
1327 events.next().await.unwrap(),
1328 Event::DiagnosticsUpdated {
1329 language_server_id: LanguageServerId(0),
1330 path: (worktree_id, Path::new("a.rs")).into()
1331 }
1332 );
1333
1334 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1335 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1336 version: None,
1337 diagnostics: Default::default(),
1338 });
1339 cx.executor().run_until_parked();
1340 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1341}
1342
1343#[gpui::test]
1344async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1345 init_test(cx);
1346
1347 let progress_token = "the-progress-token";
1348
1349 let fs = FakeFs::new(cx.executor());
1350 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1351
1352 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1353
1354 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1355 language_registry.add(rust_lang());
1356 let mut fake_servers = language_registry.register_fake_lsp(
1357 "Rust",
1358 FakeLspAdapter {
1359 name: "the-language-server",
1360 disk_based_diagnostics_sources: vec!["disk".into()],
1361 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1362 ..Default::default()
1363 },
1364 );
1365
1366 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1367
1368 let buffer = project
1369 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1370 .await
1371 .unwrap();
1372
1373 // Simulate diagnostics starting to update.
1374 let fake_server = fake_servers.next().await.unwrap();
1375 fake_server.start_progress(progress_token).await;
1376
1377 // Restart the server before the diagnostics finish updating.
1378 project.update(cx, |project, cx| {
1379 project.restart_language_servers_for_buffers([buffer], cx);
1380 });
1381 let mut events = cx.events(&project);
1382
1383 // Simulate the newly started server sending more diagnostics.
1384 let fake_server = fake_servers.next().await.unwrap();
1385 assert_eq!(
1386 events.next().await.unwrap(),
1387 Event::LanguageServerAdded(
1388 LanguageServerId(1),
1389 fake_server.server.name(),
1390 Some(worktree_id)
1391 )
1392 );
1393 fake_server.start_progress(progress_token).await;
1394 assert_eq!(
1395 events.next().await.unwrap(),
1396 Event::DiskBasedDiagnosticsStarted {
1397 language_server_id: LanguageServerId(1)
1398 }
1399 );
1400 project.update(cx, |project, cx| {
1401 assert_eq!(
1402 project
1403 .language_servers_running_disk_based_diagnostics(cx)
1404 .collect::<Vec<_>>(),
1405 [LanguageServerId(1)]
1406 );
1407 });
1408
1409 // All diagnostics are considered done, despite the old server's diagnostic
1410 // task never completing.
1411 fake_server.end_progress(progress_token);
1412 assert_eq!(
1413 events.next().await.unwrap(),
1414 Event::DiskBasedDiagnosticsFinished {
1415 language_server_id: LanguageServerId(1)
1416 }
1417 );
1418 project.update(cx, |project, cx| {
1419 assert_eq!(
1420 project
1421 .language_servers_running_disk_based_diagnostics(cx)
1422 .collect::<Vec<_>>(),
1423 [] as [language::LanguageServerId; 0]
1424 );
1425 });
1426}
1427
1428#[gpui::test]
1429async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1430 init_test(cx);
1431
1432 let fs = FakeFs::new(cx.executor());
1433 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1434
1435 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1436
1437 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1438 language_registry.add(rust_lang());
1439 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1440
1441 let buffer = project
1442 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1443 .await
1444 .unwrap();
1445
1446 // Publish diagnostics
1447 let fake_server = fake_servers.next().await.unwrap();
1448 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1449 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1450 version: None,
1451 diagnostics: vec![lsp::Diagnostic {
1452 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1453 severity: Some(lsp::DiagnosticSeverity::ERROR),
1454 message: "the message".to_string(),
1455 ..Default::default()
1456 }],
1457 });
1458
1459 cx.executor().run_until_parked();
1460 buffer.update(cx, |buffer, _| {
1461 assert_eq!(
1462 buffer
1463 .snapshot()
1464 .diagnostics_in_range::<_, usize>(0..1, false)
1465 .map(|entry| entry.diagnostic.message.clone())
1466 .collect::<Vec<_>>(),
1467 ["the message".to_string()]
1468 );
1469 });
1470 project.update(cx, |project, cx| {
1471 assert_eq!(
1472 project.diagnostic_summary(false, cx),
1473 DiagnosticSummary {
1474 error_count: 1,
1475 warning_count: 0,
1476 }
1477 );
1478 });
1479
1480 project.update(cx, |project, cx| {
1481 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1482 });
1483
1484 // The diagnostics are cleared.
1485 cx.executor().run_until_parked();
1486 buffer.update(cx, |buffer, _| {
1487 assert_eq!(
1488 buffer
1489 .snapshot()
1490 .diagnostics_in_range::<_, usize>(0..1, false)
1491 .map(|entry| entry.diagnostic.message.clone())
1492 .collect::<Vec<_>>(),
1493 Vec::<String>::new(),
1494 );
1495 });
1496 project.update(cx, |project, cx| {
1497 assert_eq!(
1498 project.diagnostic_summary(false, cx),
1499 DiagnosticSummary {
1500 error_count: 0,
1501 warning_count: 0,
1502 }
1503 );
1504 });
1505}
1506
1507#[gpui::test]
1508async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1509 init_test(cx);
1510
1511 let fs = FakeFs::new(cx.executor());
1512 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1513
1514 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1515 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1516
1517 language_registry.add(rust_lang());
1518 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1519
1520 let buffer = project
1521 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1522 .await
1523 .unwrap();
1524
1525 // Before restarting the server, report diagnostics with an unknown buffer version.
1526 let fake_server = fake_servers.next().await.unwrap();
1527 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1528 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1529 version: Some(10000),
1530 diagnostics: Vec::new(),
1531 });
1532 cx.executor().run_until_parked();
1533
1534 project.update(cx, |project, cx| {
1535 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1536 });
1537 let mut fake_server = fake_servers.next().await.unwrap();
1538 let notification = fake_server
1539 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1540 .await
1541 .text_document;
1542 assert_eq!(notification.version, 0);
1543}
1544
1545#[gpui::test]
1546async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let progress_token = "the-progress-token";
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1553
1554 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1555
1556 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1557 language_registry.add(rust_lang());
1558 let mut fake_servers = language_registry.register_fake_lsp(
1559 "Rust",
1560 FakeLspAdapter {
1561 name: "the-language-server",
1562 disk_based_diagnostics_sources: vec!["disk".into()],
1563 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1564 ..Default::default()
1565 },
1566 );
1567
1568 let buffer = project
1569 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1570 .await
1571 .unwrap();
1572
1573 // Simulate diagnostics starting to update.
1574 let mut fake_server = fake_servers.next().await.unwrap();
1575 fake_server
1576 .start_progress_with(
1577 "another-token",
1578 lsp::WorkDoneProgressBegin {
1579 cancellable: Some(false),
1580 ..Default::default()
1581 },
1582 )
1583 .await;
1584 fake_server
1585 .start_progress_with(
1586 progress_token,
1587 lsp::WorkDoneProgressBegin {
1588 cancellable: Some(true),
1589 ..Default::default()
1590 },
1591 )
1592 .await;
1593 cx.executor().run_until_parked();
1594
1595 project.update(cx, |project, cx| {
1596 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1597 });
1598
1599 let cancel_notification = fake_server
1600 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1601 .await;
1602 assert_eq!(
1603 cancel_notification.token,
1604 NumberOrString::String(progress_token.into())
1605 );
1606}
1607
1608#[gpui::test]
1609async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1610 init_test(cx);
1611
1612 let fs = FakeFs::new(cx.executor());
1613 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1614 .await;
1615
1616 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1617 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1618
1619 let mut fake_rust_servers = language_registry.register_fake_lsp(
1620 "Rust",
1621 FakeLspAdapter {
1622 name: "rust-lsp",
1623 ..Default::default()
1624 },
1625 );
1626 let mut fake_js_servers = language_registry.register_fake_lsp(
1627 "JavaScript",
1628 FakeLspAdapter {
1629 name: "js-lsp",
1630 ..Default::default()
1631 },
1632 );
1633 language_registry.add(rust_lang());
1634 language_registry.add(js_lang());
1635
1636 let _rs_buffer = project
1637 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1638 .await
1639 .unwrap();
1640 let _js_buffer = project
1641 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1642 .await
1643 .unwrap();
1644
1645 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1646 assert_eq!(
1647 fake_rust_server_1
1648 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1649 .await
1650 .text_document
1651 .uri
1652 .as_str(),
1653 "file:///dir/a.rs"
1654 );
1655
1656 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1657 assert_eq!(
1658 fake_js_server
1659 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1660 .await
1661 .text_document
1662 .uri
1663 .as_str(),
1664 "file:///dir/b.js"
1665 );
1666
1667 // Disable Rust language server, ensuring only that server gets stopped.
1668 cx.update(|cx| {
1669 SettingsStore::update_global(cx, |settings, cx| {
1670 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1671 settings.languages.insert(
1672 "Rust".into(),
1673 LanguageSettingsContent {
1674 enable_language_server: Some(false),
1675 ..Default::default()
1676 },
1677 );
1678 });
1679 })
1680 });
1681 fake_rust_server_1
1682 .receive_notification::<lsp::notification::Exit>()
1683 .await;
1684
1685 // Enable Rust and disable JavaScript language servers, ensuring that the
1686 // former gets started again and that the latter stops.
1687 cx.update(|cx| {
1688 SettingsStore::update_global(cx, |settings, cx| {
1689 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1690 settings.languages.insert(
1691 LanguageName::new("Rust"),
1692 LanguageSettingsContent {
1693 enable_language_server: Some(true),
1694 ..Default::default()
1695 },
1696 );
1697 settings.languages.insert(
1698 LanguageName::new("JavaScript"),
1699 LanguageSettingsContent {
1700 enable_language_server: Some(false),
1701 ..Default::default()
1702 },
1703 );
1704 });
1705 })
1706 });
1707 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1708 assert_eq!(
1709 fake_rust_server_2
1710 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1711 .await
1712 .text_document
1713 .uri
1714 .as_str(),
1715 "file:///dir/a.rs"
1716 );
1717 fake_js_server
1718 .receive_notification::<lsp::notification::Exit>()
1719 .await;
1720}
1721
1722#[gpui::test(iterations = 3)]
1723async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1724 init_test(cx);
1725
1726 let text = "
1727 fn a() { A }
1728 fn b() { BB }
1729 fn c() { CCC }
1730 "
1731 .unindent();
1732
1733 let fs = FakeFs::new(cx.executor());
1734 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1735
1736 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1737 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1738
1739 language_registry.add(rust_lang());
1740 let mut fake_servers = language_registry.register_fake_lsp(
1741 "Rust",
1742 FakeLspAdapter {
1743 disk_based_diagnostics_sources: vec!["disk".into()],
1744 ..Default::default()
1745 },
1746 );
1747
1748 let buffer = project
1749 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1750 .await
1751 .unwrap();
1752
1753 let mut fake_server = fake_servers.next().await.unwrap();
1754 let open_notification = fake_server
1755 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1756 .await;
1757
1758 // Edit the buffer, moving the content down
1759 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1760 let change_notification_1 = fake_server
1761 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1762 .await;
1763 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1764
1765 // Report some diagnostics for the initial version of the buffer
1766 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1767 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1768 version: Some(open_notification.text_document.version),
1769 diagnostics: vec![
1770 lsp::Diagnostic {
1771 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1772 severity: Some(DiagnosticSeverity::ERROR),
1773 message: "undefined variable 'A'".to_string(),
1774 source: Some("disk".to_string()),
1775 ..Default::default()
1776 },
1777 lsp::Diagnostic {
1778 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1779 severity: Some(DiagnosticSeverity::ERROR),
1780 message: "undefined variable 'BB'".to_string(),
1781 source: Some("disk".to_string()),
1782 ..Default::default()
1783 },
1784 lsp::Diagnostic {
1785 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1786 severity: Some(DiagnosticSeverity::ERROR),
1787 source: Some("disk".to_string()),
1788 message: "undefined variable 'CCC'".to_string(),
1789 ..Default::default()
1790 },
1791 ],
1792 });
1793
1794 // The diagnostics have moved down since they were created.
1795 cx.executor().run_until_parked();
1796 buffer.update(cx, |buffer, _| {
1797 assert_eq!(
1798 buffer
1799 .snapshot()
1800 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1801 .collect::<Vec<_>>(),
1802 &[
1803 DiagnosticEntry {
1804 range: Point::new(3, 9)..Point::new(3, 11),
1805 diagnostic: Diagnostic {
1806 source: Some("disk".into()),
1807 severity: DiagnosticSeverity::ERROR,
1808 message: "undefined variable 'BB'".to_string(),
1809 is_disk_based: true,
1810 group_id: 1,
1811 is_primary: true,
1812 ..Default::default()
1813 },
1814 },
1815 DiagnosticEntry {
1816 range: Point::new(4, 9)..Point::new(4, 12),
1817 diagnostic: Diagnostic {
1818 source: Some("disk".into()),
1819 severity: DiagnosticSeverity::ERROR,
1820 message: "undefined variable 'CCC'".to_string(),
1821 is_disk_based: true,
1822 group_id: 2,
1823 is_primary: true,
1824 ..Default::default()
1825 }
1826 }
1827 ]
1828 );
1829 assert_eq!(
1830 chunks_with_diagnostics(buffer, 0..buffer.len()),
1831 [
1832 ("\n\nfn a() { ".to_string(), None),
1833 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1834 (" }\nfn b() { ".to_string(), None),
1835 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1836 (" }\nfn c() { ".to_string(), None),
1837 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1838 (" }\n".to_string(), None),
1839 ]
1840 );
1841 assert_eq!(
1842 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1843 [
1844 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1845 (" }\nfn c() { ".to_string(), None),
1846 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1847 ]
1848 );
1849 });
1850
1851 // Ensure overlapping diagnostics are highlighted correctly.
1852 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1853 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1854 version: Some(open_notification.text_document.version),
1855 diagnostics: vec![
1856 lsp::Diagnostic {
1857 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1858 severity: Some(DiagnosticSeverity::ERROR),
1859 message: "undefined variable 'A'".to_string(),
1860 source: Some("disk".to_string()),
1861 ..Default::default()
1862 },
1863 lsp::Diagnostic {
1864 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1865 severity: Some(DiagnosticSeverity::WARNING),
1866 message: "unreachable statement".to_string(),
1867 source: Some("disk".to_string()),
1868 ..Default::default()
1869 },
1870 ],
1871 });
1872
1873 cx.executor().run_until_parked();
1874 buffer.update(cx, |buffer, _| {
1875 assert_eq!(
1876 buffer
1877 .snapshot()
1878 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1879 .collect::<Vec<_>>(),
1880 &[
1881 DiagnosticEntry {
1882 range: Point::new(2, 9)..Point::new(2, 12),
1883 diagnostic: Diagnostic {
1884 source: Some("disk".into()),
1885 severity: DiagnosticSeverity::WARNING,
1886 message: "unreachable statement".to_string(),
1887 is_disk_based: true,
1888 group_id: 4,
1889 is_primary: true,
1890 ..Default::default()
1891 }
1892 },
1893 DiagnosticEntry {
1894 range: Point::new(2, 9)..Point::new(2, 10),
1895 diagnostic: Diagnostic {
1896 source: Some("disk".into()),
1897 severity: DiagnosticSeverity::ERROR,
1898 message: "undefined variable 'A'".to_string(),
1899 is_disk_based: true,
1900 group_id: 3,
1901 is_primary: true,
1902 ..Default::default()
1903 },
1904 }
1905 ]
1906 );
1907 assert_eq!(
1908 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1909 [
1910 ("fn a() { ".to_string(), None),
1911 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1912 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1913 ("\n".to_string(), None),
1914 ]
1915 );
1916 assert_eq!(
1917 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1918 [
1919 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1920 ("\n".to_string(), None),
1921 ]
1922 );
1923 });
1924
1925 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1926 // changes since the last save.
1927 buffer.update(cx, |buffer, cx| {
1928 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1929 buffer.edit(
1930 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1931 None,
1932 cx,
1933 );
1934 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1935 });
1936 let change_notification_2 = fake_server
1937 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1938 .await;
1939 assert!(
1940 change_notification_2.text_document.version > change_notification_1.text_document.version
1941 );
1942
1943 // Handle out-of-order diagnostics
1944 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1945 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1946 version: Some(change_notification_2.text_document.version),
1947 diagnostics: vec![
1948 lsp::Diagnostic {
1949 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1950 severity: Some(DiagnosticSeverity::ERROR),
1951 message: "undefined variable 'BB'".to_string(),
1952 source: Some("disk".to_string()),
1953 ..Default::default()
1954 },
1955 lsp::Diagnostic {
1956 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1957 severity: Some(DiagnosticSeverity::WARNING),
1958 message: "undefined variable 'A'".to_string(),
1959 source: Some("disk".to_string()),
1960 ..Default::default()
1961 },
1962 ],
1963 });
1964
1965 cx.executor().run_until_parked();
1966 buffer.update(cx, |buffer, _| {
1967 assert_eq!(
1968 buffer
1969 .snapshot()
1970 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1971 .collect::<Vec<_>>(),
1972 &[
1973 DiagnosticEntry {
1974 range: Point::new(2, 21)..Point::new(2, 22),
1975 diagnostic: Diagnostic {
1976 source: Some("disk".into()),
1977 severity: DiagnosticSeverity::WARNING,
1978 message: "undefined variable 'A'".to_string(),
1979 is_disk_based: true,
1980 group_id: 6,
1981 is_primary: true,
1982 ..Default::default()
1983 }
1984 },
1985 DiagnosticEntry {
1986 range: Point::new(3, 9)..Point::new(3, 14),
1987 diagnostic: Diagnostic {
1988 source: Some("disk".into()),
1989 severity: DiagnosticSeverity::ERROR,
1990 message: "undefined variable 'BB'".to_string(),
1991 is_disk_based: true,
1992 group_id: 5,
1993 is_primary: true,
1994 ..Default::default()
1995 },
1996 }
1997 ]
1998 );
1999 });
2000}
2001
2002#[gpui::test]
2003async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let text = concat!(
2007 "let one = ;\n", //
2008 "let two = \n",
2009 "let three = 3;\n",
2010 );
2011
2012 let fs = FakeFs::new(cx.executor());
2013 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2014
2015 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2016 let buffer = project
2017 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2018 .await
2019 .unwrap();
2020
2021 project.update(cx, |project, cx| {
2022 project.lsp_store.update(cx, |lsp_store, cx| {
2023 lsp_store
2024 .update_diagnostic_entries(
2025 LanguageServerId(0),
2026 PathBuf::from("/dir/a.rs"),
2027 None,
2028 vec![
2029 DiagnosticEntry {
2030 range: Unclipped(PointUtf16::new(0, 10))
2031 ..Unclipped(PointUtf16::new(0, 10)),
2032 diagnostic: Diagnostic {
2033 severity: DiagnosticSeverity::ERROR,
2034 message: "syntax error 1".to_string(),
2035 ..Default::default()
2036 },
2037 },
2038 DiagnosticEntry {
2039 range: Unclipped(PointUtf16::new(1, 10))
2040 ..Unclipped(PointUtf16::new(1, 10)),
2041 diagnostic: Diagnostic {
2042 severity: DiagnosticSeverity::ERROR,
2043 message: "syntax error 2".to_string(),
2044 ..Default::default()
2045 },
2046 },
2047 ],
2048 cx,
2049 )
2050 .unwrap();
2051 })
2052 });
2053
2054 // An empty range is extended forward to include the following character.
2055 // At the end of a line, an empty range is extended backward to include
2056 // the preceding character.
2057 buffer.update(cx, |buffer, _| {
2058 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2059 assert_eq!(
2060 chunks
2061 .iter()
2062 .map(|(s, d)| (s.as_str(), *d))
2063 .collect::<Vec<_>>(),
2064 &[
2065 ("let one = ", None),
2066 (";", Some(DiagnosticSeverity::ERROR)),
2067 ("\nlet two =", None),
2068 (" ", Some(DiagnosticSeverity::ERROR)),
2069 ("\nlet three = 3;\n", None)
2070 ]
2071 );
2072 });
2073}
2074
2075#[gpui::test]
2076async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2081 .await;
2082
2083 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2084 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2085
2086 lsp_store.update(cx, |lsp_store, cx| {
2087 lsp_store
2088 .update_diagnostic_entries(
2089 LanguageServerId(0),
2090 Path::new("/dir/a.rs").to_owned(),
2091 None,
2092 vec![DiagnosticEntry {
2093 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2094 diagnostic: Diagnostic {
2095 severity: DiagnosticSeverity::ERROR,
2096 is_primary: true,
2097 message: "syntax error a1".to_string(),
2098 ..Default::default()
2099 },
2100 }],
2101 cx,
2102 )
2103 .unwrap();
2104 lsp_store
2105 .update_diagnostic_entries(
2106 LanguageServerId(1),
2107 Path::new("/dir/a.rs").to_owned(),
2108 None,
2109 vec![DiagnosticEntry {
2110 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2111 diagnostic: Diagnostic {
2112 severity: DiagnosticSeverity::ERROR,
2113 is_primary: true,
2114 message: "syntax error b1".to_string(),
2115 ..Default::default()
2116 },
2117 }],
2118 cx,
2119 )
2120 .unwrap();
2121
2122 assert_eq!(
2123 lsp_store.diagnostic_summary(false, cx),
2124 DiagnosticSummary {
2125 error_count: 2,
2126 warning_count: 0,
2127 }
2128 );
2129 });
2130}
2131
2132#[gpui::test]
2133async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2134 init_test(cx);
2135
2136 let text = "
2137 fn a() {
2138 f1();
2139 }
2140 fn b() {
2141 f2();
2142 }
2143 fn c() {
2144 f3();
2145 }
2146 "
2147 .unindent();
2148
2149 let fs = FakeFs::new(cx.executor());
2150 fs.insert_tree(
2151 "/dir",
2152 json!({
2153 "a.rs": text.clone(),
2154 }),
2155 )
2156 .await;
2157
2158 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2159 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2160
2161 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2162 language_registry.add(rust_lang());
2163 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2164
2165 let buffer = project
2166 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2167 .await
2168 .unwrap();
2169
2170 let mut fake_server = fake_servers.next().await.unwrap();
2171 let lsp_document_version = fake_server
2172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2173 .await
2174 .text_document
2175 .version;
2176
2177 // Simulate editing the buffer after the language server computes some edits.
2178 buffer.update(cx, |buffer, cx| {
2179 buffer.edit(
2180 [(
2181 Point::new(0, 0)..Point::new(0, 0),
2182 "// above first function\n",
2183 )],
2184 None,
2185 cx,
2186 );
2187 buffer.edit(
2188 [(
2189 Point::new(2, 0)..Point::new(2, 0),
2190 " // inside first function\n",
2191 )],
2192 None,
2193 cx,
2194 );
2195 buffer.edit(
2196 [(
2197 Point::new(6, 4)..Point::new(6, 4),
2198 "// inside second function ",
2199 )],
2200 None,
2201 cx,
2202 );
2203
2204 assert_eq!(
2205 buffer.text(),
2206 "
2207 // above first function
2208 fn a() {
2209 // inside first function
2210 f1();
2211 }
2212 fn b() {
2213 // inside second function f2();
2214 }
2215 fn c() {
2216 f3();
2217 }
2218 "
2219 .unindent()
2220 );
2221 });
2222
2223 let edits = lsp_store
2224 .update(cx, |lsp_store, cx| {
2225 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2226 &buffer,
2227 vec![
2228 // replace body of first function
2229 lsp::TextEdit {
2230 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2231 new_text: "
2232 fn a() {
2233 f10();
2234 }
2235 "
2236 .unindent(),
2237 },
2238 // edit inside second function
2239 lsp::TextEdit {
2240 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2241 new_text: "00".into(),
2242 },
2243 // edit inside third function via two distinct edits
2244 lsp::TextEdit {
2245 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2246 new_text: "4000".into(),
2247 },
2248 lsp::TextEdit {
2249 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2250 new_text: "".into(),
2251 },
2252 ],
2253 LanguageServerId(0),
2254 Some(lsp_document_version),
2255 cx,
2256 )
2257 })
2258 .await
2259 .unwrap();
2260
2261 buffer.update(cx, |buffer, cx| {
2262 for (range, new_text) in edits {
2263 buffer.edit([(range, new_text)], None, cx);
2264 }
2265 assert_eq!(
2266 buffer.text(),
2267 "
2268 // above first function
2269 fn a() {
2270 // inside first function
2271 f10();
2272 }
2273 fn b() {
2274 // inside second function f200();
2275 }
2276 fn c() {
2277 f4000();
2278 }
2279 "
2280 .unindent()
2281 );
2282 });
2283}
2284
2285#[gpui::test]
2286async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2287 init_test(cx);
2288
2289 let text = "
2290 use a::b;
2291 use a::c;
2292
2293 fn f() {
2294 b();
2295 c();
2296 }
2297 "
2298 .unindent();
2299
2300 let fs = FakeFs::new(cx.executor());
2301 fs.insert_tree(
2302 "/dir",
2303 json!({
2304 "a.rs": text.clone(),
2305 }),
2306 )
2307 .await;
2308
2309 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2310 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2311 let buffer = project
2312 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2313 .await
2314 .unwrap();
2315
2316 // Simulate the language server sending us a small edit in the form of a very large diff.
2317 // Rust-analyzer does this when performing a merge-imports code action.
2318 let edits = lsp_store
2319 .update(cx, |lsp_store, cx| {
2320 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2321 &buffer,
2322 [
2323 // Replace the first use statement without editing the semicolon.
2324 lsp::TextEdit {
2325 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2326 new_text: "a::{b, c}".into(),
2327 },
2328 // Reinsert the remainder of the file between the semicolon and the final
2329 // newline of the file.
2330 lsp::TextEdit {
2331 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2332 new_text: "\n\n".into(),
2333 },
2334 lsp::TextEdit {
2335 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2336 new_text: "
2337 fn f() {
2338 b();
2339 c();
2340 }"
2341 .unindent(),
2342 },
2343 // Delete everything after the first newline of the file.
2344 lsp::TextEdit {
2345 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2346 new_text: "".into(),
2347 },
2348 ],
2349 LanguageServerId(0),
2350 None,
2351 cx,
2352 )
2353 })
2354 .await
2355 .unwrap();
2356
2357 buffer.update(cx, |buffer, cx| {
2358 let edits = edits
2359 .into_iter()
2360 .map(|(range, text)| {
2361 (
2362 range.start.to_point(buffer)..range.end.to_point(buffer),
2363 text,
2364 )
2365 })
2366 .collect::<Vec<_>>();
2367
2368 assert_eq!(
2369 edits,
2370 [
2371 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2372 (Point::new(1, 0)..Point::new(2, 0), "".into())
2373 ]
2374 );
2375
2376 for (range, new_text) in edits {
2377 buffer.edit([(range, new_text)], None, cx);
2378 }
2379 assert_eq!(
2380 buffer.text(),
2381 "
2382 use a::{b, c};
2383
2384 fn f() {
2385 b();
2386 c();
2387 }
2388 "
2389 .unindent()
2390 );
2391 });
2392}
2393
2394#[gpui::test]
2395async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2396 init_test(cx);
2397
2398 let text = "
2399 use a::b;
2400 use a::c;
2401
2402 fn f() {
2403 b();
2404 c();
2405 }
2406 "
2407 .unindent();
2408
2409 let fs = FakeFs::new(cx.executor());
2410 fs.insert_tree(
2411 "/dir",
2412 json!({
2413 "a.rs": text.clone(),
2414 }),
2415 )
2416 .await;
2417
2418 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2419 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2420 let buffer = project
2421 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2422 .await
2423 .unwrap();
2424
2425 // Simulate the language server sending us edits in a non-ordered fashion,
2426 // with ranges sometimes being inverted or pointing to invalid locations.
2427 let edits = lsp_store
2428 .update(cx, |lsp_store, cx| {
2429 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2430 &buffer,
2431 [
2432 lsp::TextEdit {
2433 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2434 new_text: "\n\n".into(),
2435 },
2436 lsp::TextEdit {
2437 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2438 new_text: "a::{b, c}".into(),
2439 },
2440 lsp::TextEdit {
2441 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2442 new_text: "".into(),
2443 },
2444 lsp::TextEdit {
2445 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2446 new_text: "
2447 fn f() {
2448 b();
2449 c();
2450 }"
2451 .unindent(),
2452 },
2453 ],
2454 LanguageServerId(0),
2455 None,
2456 cx,
2457 )
2458 })
2459 .await
2460 .unwrap();
2461
2462 buffer.update(cx, |buffer, cx| {
2463 let edits = edits
2464 .into_iter()
2465 .map(|(range, text)| {
2466 (
2467 range.start.to_point(buffer)..range.end.to_point(buffer),
2468 text,
2469 )
2470 })
2471 .collect::<Vec<_>>();
2472
2473 assert_eq!(
2474 edits,
2475 [
2476 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2477 (Point::new(1, 0)..Point::new(2, 0), "".into())
2478 ]
2479 );
2480
2481 for (range, new_text) in edits {
2482 buffer.edit([(range, new_text)], None, cx);
2483 }
2484 assert_eq!(
2485 buffer.text(),
2486 "
2487 use a::{b, c};
2488
2489 fn f() {
2490 b();
2491 c();
2492 }
2493 "
2494 .unindent()
2495 );
2496 });
2497}
2498
2499fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2500 buffer: &Buffer,
2501 range: Range<T>,
2502) -> Vec<(String, Option<DiagnosticSeverity>)> {
2503 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2504 for chunk in buffer.snapshot().chunks(range, true) {
2505 if chunks.last().map_or(false, |prev_chunk| {
2506 prev_chunk.1 == chunk.diagnostic_severity
2507 }) {
2508 chunks.last_mut().unwrap().0.push_str(chunk.text);
2509 } else {
2510 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2511 }
2512 }
2513 chunks
2514}
2515
2516#[gpui::test(iterations = 10)]
2517async fn test_definition(cx: &mut gpui::TestAppContext) {
2518 init_test(cx);
2519
2520 let fs = FakeFs::new(cx.executor());
2521 fs.insert_tree(
2522 "/dir",
2523 json!({
2524 "a.rs": "const fn a() { A }",
2525 "b.rs": "const y: i32 = crate::a()",
2526 }),
2527 )
2528 .await;
2529
2530 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2531
2532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2533 language_registry.add(rust_lang());
2534 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2535
2536 let buffer = project
2537 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2538 .await
2539 .unwrap();
2540
2541 let fake_server = fake_servers.next().await.unwrap();
2542 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2543 let params = params.text_document_position_params;
2544 assert_eq!(
2545 params.text_document.uri.to_file_path().unwrap(),
2546 Path::new("/dir/b.rs"),
2547 );
2548 assert_eq!(params.position, lsp::Position::new(0, 22));
2549
2550 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2551 lsp::Location::new(
2552 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2553 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2554 ),
2555 )))
2556 });
2557
2558 let mut definitions = project
2559 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2560 .await
2561 .unwrap();
2562
2563 // Assert no new language server started
2564 cx.executor().run_until_parked();
2565 assert!(fake_servers.try_next().is_err());
2566
2567 assert_eq!(definitions.len(), 1);
2568 let definition = definitions.pop().unwrap();
2569 cx.update(|cx| {
2570 let target_buffer = definition.target.buffer.read(cx);
2571 assert_eq!(
2572 target_buffer
2573 .file()
2574 .unwrap()
2575 .as_local()
2576 .unwrap()
2577 .abs_path(cx),
2578 Path::new("/dir/a.rs"),
2579 );
2580 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2581 assert_eq!(
2582 list_worktrees(&project, cx),
2583 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2584 );
2585
2586 drop(definition);
2587 });
2588 cx.update(|cx| {
2589 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2590 });
2591
2592 fn list_worktrees<'a>(
2593 project: &'a Model<Project>,
2594 cx: &'a AppContext,
2595 ) -> Vec<(&'a Path, bool)> {
2596 project
2597 .read(cx)
2598 .worktrees(cx)
2599 .map(|worktree| {
2600 let worktree = worktree.read(cx);
2601 (
2602 worktree.as_local().unwrap().abs_path().as_ref(),
2603 worktree.is_visible(),
2604 )
2605 })
2606 .collect::<Vec<_>>()
2607 }
2608}
2609
2610#[gpui::test]
2611async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2612 init_test(cx);
2613
2614 let fs = FakeFs::new(cx.executor());
2615 fs.insert_tree(
2616 "/dir",
2617 json!({
2618 "a.ts": "",
2619 }),
2620 )
2621 .await;
2622
2623 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2624
2625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2626 language_registry.add(typescript_lang());
2627 let mut fake_language_servers = language_registry.register_fake_lsp(
2628 "TypeScript",
2629 FakeLspAdapter {
2630 capabilities: lsp::ServerCapabilities {
2631 completion_provider: Some(lsp::CompletionOptions {
2632 trigger_characters: Some(vec![":".to_string()]),
2633 ..Default::default()
2634 }),
2635 ..Default::default()
2636 },
2637 ..Default::default()
2638 },
2639 );
2640
2641 let buffer = project
2642 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2643 .await
2644 .unwrap();
2645
2646 let fake_server = fake_language_servers.next().await.unwrap();
2647
2648 let text = "let a = b.fqn";
2649 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2650 let completions = project.update(cx, |project, cx| {
2651 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2652 });
2653
2654 fake_server
2655 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2656 Ok(Some(lsp::CompletionResponse::Array(vec![
2657 lsp::CompletionItem {
2658 label: "fullyQualifiedName?".into(),
2659 insert_text: Some("fullyQualifiedName".into()),
2660 ..Default::default()
2661 },
2662 ])))
2663 })
2664 .next()
2665 .await;
2666 let completions = completions.await.unwrap();
2667 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2668 assert_eq!(completions.len(), 1);
2669 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2670 assert_eq!(
2671 completions[0].old_range.to_offset(&snapshot),
2672 text.len() - 3..text.len()
2673 );
2674
2675 let text = "let a = \"atoms/cmp\"";
2676 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2677 let completions = project.update(cx, |project, cx| {
2678 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2679 });
2680
2681 fake_server
2682 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2683 Ok(Some(lsp::CompletionResponse::Array(vec![
2684 lsp::CompletionItem {
2685 label: "component".into(),
2686 ..Default::default()
2687 },
2688 ])))
2689 })
2690 .next()
2691 .await;
2692 let completions = completions.await.unwrap();
2693 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2694 assert_eq!(completions.len(), 1);
2695 assert_eq!(completions[0].new_text, "component");
2696 assert_eq!(
2697 completions[0].old_range.to_offset(&snapshot),
2698 text.len() - 4..text.len() - 1
2699 );
2700}
2701
2702#[gpui::test]
2703async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2704 init_test(cx);
2705
2706 let fs = FakeFs::new(cx.executor());
2707 fs.insert_tree(
2708 "/dir",
2709 json!({
2710 "a.ts": "",
2711 }),
2712 )
2713 .await;
2714
2715 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2716
2717 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2718 language_registry.add(typescript_lang());
2719 let mut fake_language_servers = language_registry.register_fake_lsp(
2720 "TypeScript",
2721 FakeLspAdapter {
2722 capabilities: lsp::ServerCapabilities {
2723 completion_provider: Some(lsp::CompletionOptions {
2724 trigger_characters: Some(vec![":".to_string()]),
2725 ..Default::default()
2726 }),
2727 ..Default::default()
2728 },
2729 ..Default::default()
2730 },
2731 );
2732
2733 let buffer = project
2734 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2735 .await
2736 .unwrap();
2737
2738 let fake_server = fake_language_servers.next().await.unwrap();
2739
2740 let text = "let a = b.fqn";
2741 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2742 let completions = project.update(cx, |project, cx| {
2743 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2744 });
2745
2746 fake_server
2747 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2748 Ok(Some(lsp::CompletionResponse::Array(vec![
2749 lsp::CompletionItem {
2750 label: "fullyQualifiedName?".into(),
2751 insert_text: Some("fully\rQualified\r\nName".into()),
2752 ..Default::default()
2753 },
2754 ])))
2755 })
2756 .next()
2757 .await;
2758 let completions = completions.await.unwrap();
2759 assert_eq!(completions.len(), 1);
2760 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2761}
2762
2763#[gpui::test(iterations = 10)]
2764async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2765 init_test(cx);
2766
2767 let fs = FakeFs::new(cx.executor());
2768 fs.insert_tree(
2769 "/dir",
2770 json!({
2771 "a.ts": "a",
2772 }),
2773 )
2774 .await;
2775
2776 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2777
2778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2779 language_registry.add(typescript_lang());
2780 let mut fake_language_servers = language_registry.register_fake_lsp(
2781 "TypeScript",
2782 FakeLspAdapter {
2783 capabilities: lsp::ServerCapabilities {
2784 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2785 lsp::CodeActionOptions {
2786 resolve_provider: Some(true),
2787 ..lsp::CodeActionOptions::default()
2788 },
2789 )),
2790 ..lsp::ServerCapabilities::default()
2791 },
2792 ..FakeLspAdapter::default()
2793 },
2794 );
2795
2796 let buffer = project
2797 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2798 .await
2799 .unwrap();
2800
2801 let fake_server = fake_language_servers.next().await.unwrap();
2802
2803 // Language server returns code actions that contain commands, and not edits.
2804 let actions = project.update(cx, |project, cx| {
2805 project.code_actions(&buffer, 0..0, None, cx)
2806 });
2807 fake_server
2808 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2809 Ok(Some(vec![
2810 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2811 title: "The code action".into(),
2812 data: Some(serde_json::json!({
2813 "command": "_the/command",
2814 })),
2815 ..lsp::CodeAction::default()
2816 }),
2817 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2818 title: "two".into(),
2819 ..lsp::CodeAction::default()
2820 }),
2821 ]))
2822 })
2823 .next()
2824 .await;
2825
2826 let action = actions.await.unwrap()[0].clone();
2827 let apply = project.update(cx, |project, cx| {
2828 project.apply_code_action(buffer.clone(), action, true, cx)
2829 });
2830
2831 // Resolving the code action does not populate its edits. In absence of
2832 // edits, we must execute the given command.
2833 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2834 |mut action, _| async move {
2835 if action.data.is_some() {
2836 action.command = Some(lsp::Command {
2837 title: "The command".into(),
2838 command: "_the/command".into(),
2839 arguments: Some(vec![json!("the-argument")]),
2840 });
2841 }
2842 Ok(action)
2843 },
2844 );
2845
2846 // While executing the command, the language server sends the editor
2847 // a `workspaceEdit` request.
2848 fake_server
2849 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2850 let fake = fake_server.clone();
2851 move |params, _| {
2852 assert_eq!(params.command, "_the/command");
2853 let fake = fake.clone();
2854 async move {
2855 fake.server
2856 .request::<lsp::request::ApplyWorkspaceEdit>(
2857 lsp::ApplyWorkspaceEditParams {
2858 label: None,
2859 edit: lsp::WorkspaceEdit {
2860 changes: Some(
2861 [(
2862 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2863 vec![lsp::TextEdit {
2864 range: lsp::Range::new(
2865 lsp::Position::new(0, 0),
2866 lsp::Position::new(0, 0),
2867 ),
2868 new_text: "X".into(),
2869 }],
2870 )]
2871 .into_iter()
2872 .collect(),
2873 ),
2874 ..Default::default()
2875 },
2876 },
2877 )
2878 .await
2879 .unwrap();
2880 Ok(Some(json!(null)))
2881 }
2882 }
2883 })
2884 .next()
2885 .await;
2886
2887 // Applying the code action returns a project transaction containing the edits
2888 // sent by the language server in its `workspaceEdit` request.
2889 let transaction = apply.await.unwrap();
2890 assert!(transaction.0.contains_key(&buffer));
2891 buffer.update(cx, |buffer, cx| {
2892 assert_eq!(buffer.text(), "Xa");
2893 buffer.undo(cx);
2894 assert_eq!(buffer.text(), "a");
2895 });
2896}
2897
2898#[gpui::test(iterations = 10)]
2899async fn test_save_file(cx: &mut gpui::TestAppContext) {
2900 init_test(cx);
2901
2902 let fs = FakeFs::new(cx.executor());
2903 fs.insert_tree(
2904 "/dir",
2905 json!({
2906 "file1": "the old contents",
2907 }),
2908 )
2909 .await;
2910
2911 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2912 let buffer = project
2913 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2914 .await
2915 .unwrap();
2916 buffer.update(cx, |buffer, cx| {
2917 assert_eq!(buffer.text(), "the old contents");
2918 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2919 });
2920
2921 project
2922 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2923 .await
2924 .unwrap();
2925
2926 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2927 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2928}
2929
2930#[gpui::test(iterations = 30)]
2931async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2932 init_test(cx);
2933
2934 let fs = FakeFs::new(cx.executor().clone());
2935 fs.insert_tree(
2936 "/dir",
2937 json!({
2938 "file1": "the original contents",
2939 }),
2940 )
2941 .await;
2942
2943 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2944 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2945 let buffer = project
2946 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2947 .await
2948 .unwrap();
2949
2950 // Simulate buffer diffs being slow, so that they don't complete before
2951 // the next file change occurs.
2952 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2953
2954 // Change the buffer's file on disk, and then wait for the file change
2955 // to be detected by the worktree, so that the buffer starts reloading.
2956 fs.save(
2957 "/dir/file1".as_ref(),
2958 &"the first contents".into(),
2959 Default::default(),
2960 )
2961 .await
2962 .unwrap();
2963 worktree.next_event(cx).await;
2964
2965 // Change the buffer's file again. Depending on the random seed, the
2966 // previous file change may still be in progress.
2967 fs.save(
2968 "/dir/file1".as_ref(),
2969 &"the second contents".into(),
2970 Default::default(),
2971 )
2972 .await
2973 .unwrap();
2974 worktree.next_event(cx).await;
2975
2976 cx.executor().run_until_parked();
2977 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2978 buffer.read_with(cx, |buffer, _| {
2979 assert_eq!(buffer.text(), on_disk_text);
2980 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2981 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2982 });
2983}
2984
2985#[gpui::test(iterations = 30)]
2986async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2987 init_test(cx);
2988
2989 let fs = FakeFs::new(cx.executor().clone());
2990 fs.insert_tree(
2991 "/dir",
2992 json!({
2993 "file1": "the original contents",
2994 }),
2995 )
2996 .await;
2997
2998 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2999 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3000 let buffer = project
3001 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3002 .await
3003 .unwrap();
3004
3005 // Simulate buffer diffs being slow, so that they don't complete before
3006 // the next file change occurs.
3007 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3008
3009 // Change the buffer's file on disk, and then wait for the file change
3010 // to be detected by the worktree, so that the buffer starts reloading.
3011 fs.save(
3012 "/dir/file1".as_ref(),
3013 &"the first contents".into(),
3014 Default::default(),
3015 )
3016 .await
3017 .unwrap();
3018 worktree.next_event(cx).await;
3019
3020 cx.executor()
3021 .spawn(cx.executor().simulate_random_delay())
3022 .await;
3023
3024 // Perform a noop edit, causing the buffer's version to increase.
3025 buffer.update(cx, |buffer, cx| {
3026 buffer.edit([(0..0, " ")], None, cx);
3027 buffer.undo(cx);
3028 });
3029
3030 cx.executor().run_until_parked();
3031 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3032 buffer.read_with(cx, |buffer, _| {
3033 let buffer_text = buffer.text();
3034 if buffer_text == on_disk_text {
3035 assert!(
3036 !buffer.is_dirty() && !buffer.has_conflict(),
3037 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3038 );
3039 }
3040 // If the file change occurred while the buffer was processing the first
3041 // change, the buffer will be in a conflicting state.
3042 else {
3043 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3044 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3045 }
3046 });
3047}
3048
3049#[gpui::test]
3050async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3051 init_test(cx);
3052
3053 let fs = FakeFs::new(cx.executor());
3054 fs.insert_tree(
3055 "/dir",
3056 json!({
3057 "file1": "the old contents",
3058 }),
3059 )
3060 .await;
3061
3062 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3063 let buffer = project
3064 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3065 .await
3066 .unwrap();
3067 buffer.update(cx, |buffer, cx| {
3068 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3069 });
3070
3071 project
3072 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3073 .await
3074 .unwrap();
3075
3076 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3077 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3078}
3079
3080#[gpui::test]
3081async fn test_save_as(cx: &mut gpui::TestAppContext) {
3082 init_test(cx);
3083
3084 let fs = FakeFs::new(cx.executor());
3085 fs.insert_tree("/dir", json!({})).await;
3086
3087 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3088
3089 let languages = project.update(cx, |project, _| project.languages().clone());
3090 languages.add(rust_lang());
3091
3092 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3093 buffer.update(cx, |buffer, cx| {
3094 buffer.edit([(0..0, "abc")], None, cx);
3095 assert!(buffer.is_dirty());
3096 assert!(!buffer.has_conflict());
3097 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3098 });
3099 project
3100 .update(cx, |project, cx| {
3101 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3102 let path = ProjectPath {
3103 worktree_id,
3104 path: Arc::from(Path::new("file1.rs")),
3105 };
3106 project.save_buffer_as(buffer.clone(), path, cx)
3107 })
3108 .await
3109 .unwrap();
3110 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3111
3112 cx.executor().run_until_parked();
3113 buffer.update(cx, |buffer, cx| {
3114 assert_eq!(
3115 buffer.file().unwrap().full_path(cx),
3116 Path::new("dir/file1.rs")
3117 );
3118 assert!(!buffer.is_dirty());
3119 assert!(!buffer.has_conflict());
3120 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3121 });
3122
3123 let opened_buffer = project
3124 .update(cx, |project, cx| {
3125 project.open_local_buffer("/dir/file1.rs", cx)
3126 })
3127 .await
3128 .unwrap();
3129 assert_eq!(opened_buffer, buffer);
3130}
3131
3132#[gpui::test(retries = 5)]
3133async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3134 use worktree::WorktreeModelHandle as _;
3135
3136 init_test(cx);
3137 cx.executor().allow_parking();
3138
3139 let dir = temp_tree(json!({
3140 "a": {
3141 "file1": "",
3142 "file2": "",
3143 "file3": "",
3144 },
3145 "b": {
3146 "c": {
3147 "file4": "",
3148 "file5": "",
3149 }
3150 }
3151 }));
3152
3153 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3154
3155 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3156 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3157 async move { buffer.await.unwrap() }
3158 };
3159 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3160 project.update(cx, |project, cx| {
3161 let tree = project.worktrees(cx).next().unwrap();
3162 tree.read(cx)
3163 .entry_for_path(path)
3164 .unwrap_or_else(|| panic!("no entry for path {}", path))
3165 .id
3166 })
3167 };
3168
3169 let buffer2 = buffer_for_path("a/file2", cx).await;
3170 let buffer3 = buffer_for_path("a/file3", cx).await;
3171 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3172 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3173
3174 let file2_id = id_for_path("a/file2", cx);
3175 let file3_id = id_for_path("a/file3", cx);
3176 let file4_id = id_for_path("b/c/file4", cx);
3177
3178 // Create a remote copy of this worktree.
3179 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3180 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3181
3182 let updates = Arc::new(Mutex::new(Vec::new()));
3183 tree.update(cx, |tree, cx| {
3184 let updates = updates.clone();
3185 tree.observe_updates(0, cx, move |update| {
3186 updates.lock().push(update);
3187 async { true }
3188 });
3189 });
3190
3191 let remote =
3192 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3193
3194 cx.executor().run_until_parked();
3195
3196 cx.update(|cx| {
3197 assert!(!buffer2.read(cx).is_dirty());
3198 assert!(!buffer3.read(cx).is_dirty());
3199 assert!(!buffer4.read(cx).is_dirty());
3200 assert!(!buffer5.read(cx).is_dirty());
3201 });
3202
3203 // Rename and delete files and directories.
3204 tree.flush_fs_events(cx).await;
3205 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3206 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3207 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3208 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3209 tree.flush_fs_events(cx).await;
3210
3211 let expected_paths = vec![
3212 "a",
3213 "a/file1",
3214 "a/file2.new",
3215 "b",
3216 "d",
3217 "d/file3",
3218 "d/file4",
3219 ];
3220
3221 cx.update(|app| {
3222 assert_eq!(
3223 tree.read(app)
3224 .paths()
3225 .map(|p| p.to_str().unwrap())
3226 .collect::<Vec<_>>(),
3227 expected_paths
3228 );
3229 });
3230
3231 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3232 assert_eq!(id_for_path("d/file3", cx), file3_id);
3233 assert_eq!(id_for_path("d/file4", cx), file4_id);
3234
3235 cx.update(|cx| {
3236 assert_eq!(
3237 buffer2.read(cx).file().unwrap().path().as_ref(),
3238 Path::new("a/file2.new")
3239 );
3240 assert_eq!(
3241 buffer3.read(cx).file().unwrap().path().as_ref(),
3242 Path::new("d/file3")
3243 );
3244 assert_eq!(
3245 buffer4.read(cx).file().unwrap().path().as_ref(),
3246 Path::new("d/file4")
3247 );
3248 assert_eq!(
3249 buffer5.read(cx).file().unwrap().path().as_ref(),
3250 Path::new("b/c/file5")
3251 );
3252
3253 assert_matches!(
3254 buffer2.read(cx).file().unwrap().disk_state(),
3255 DiskState::Present { .. }
3256 );
3257 assert_matches!(
3258 buffer3.read(cx).file().unwrap().disk_state(),
3259 DiskState::Present { .. }
3260 );
3261 assert_matches!(
3262 buffer4.read(cx).file().unwrap().disk_state(),
3263 DiskState::Present { .. }
3264 );
3265 assert_eq!(
3266 buffer5.read(cx).file().unwrap().disk_state(),
3267 DiskState::Deleted
3268 );
3269 });
3270
3271 // Update the remote worktree. Check that it becomes consistent with the
3272 // local worktree.
3273 cx.executor().run_until_parked();
3274
3275 remote.update(cx, |remote, _| {
3276 for update in updates.lock().drain(..) {
3277 remote.as_remote_mut().unwrap().update_from_remote(update);
3278 }
3279 });
3280 cx.executor().run_until_parked();
3281 remote.update(cx, |remote, _| {
3282 assert_eq!(
3283 remote
3284 .paths()
3285 .map(|p| p.to_str().unwrap())
3286 .collect::<Vec<_>>(),
3287 expected_paths
3288 );
3289 });
3290}
3291
3292#[gpui::test(iterations = 10)]
3293async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3294 init_test(cx);
3295
3296 let fs = FakeFs::new(cx.executor());
3297 fs.insert_tree(
3298 "/dir",
3299 json!({
3300 "a": {
3301 "file1": "",
3302 }
3303 }),
3304 )
3305 .await;
3306
3307 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3308 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3309 let tree_id = tree.update(cx, |tree, _| tree.id());
3310
3311 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3312 project.update(cx, |project, cx| {
3313 let tree = project.worktrees(cx).next().unwrap();
3314 tree.read(cx)
3315 .entry_for_path(path)
3316 .unwrap_or_else(|| panic!("no entry for path {}", path))
3317 .id
3318 })
3319 };
3320
3321 let dir_id = id_for_path("a", cx);
3322 let file_id = id_for_path("a/file1", cx);
3323 let buffer = project
3324 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3325 .await
3326 .unwrap();
3327 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3328
3329 project
3330 .update(cx, |project, cx| {
3331 project.rename_entry(dir_id, Path::new("b"), cx)
3332 })
3333 .unwrap()
3334 .await
3335 .to_included()
3336 .unwrap();
3337 cx.executor().run_until_parked();
3338
3339 assert_eq!(id_for_path("b", cx), dir_id);
3340 assert_eq!(id_for_path("b/file1", cx), file_id);
3341 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3342}
3343
3344#[gpui::test]
3345async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3346 init_test(cx);
3347
3348 let fs = FakeFs::new(cx.executor());
3349 fs.insert_tree(
3350 "/dir",
3351 json!({
3352 "a.txt": "a-contents",
3353 "b.txt": "b-contents",
3354 }),
3355 )
3356 .await;
3357
3358 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3359
3360 // Spawn multiple tasks to open paths, repeating some paths.
3361 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3362 (
3363 p.open_local_buffer("/dir/a.txt", cx),
3364 p.open_local_buffer("/dir/b.txt", cx),
3365 p.open_local_buffer("/dir/a.txt", cx),
3366 )
3367 });
3368
3369 let buffer_a_1 = buffer_a_1.await.unwrap();
3370 let buffer_a_2 = buffer_a_2.await.unwrap();
3371 let buffer_b = buffer_b.await.unwrap();
3372 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3373 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3374
3375 // There is only one buffer per path.
3376 let buffer_a_id = buffer_a_1.entity_id();
3377 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3378
3379 // Open the same path again while it is still open.
3380 drop(buffer_a_1);
3381 let buffer_a_3 = project
3382 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3383 .await
3384 .unwrap();
3385
3386 // There's still only one buffer per path.
3387 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3388}
3389
3390#[gpui::test]
3391async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3392 init_test(cx);
3393
3394 let fs = FakeFs::new(cx.executor());
3395 fs.insert_tree(
3396 "/dir",
3397 json!({
3398 "file1": "abc",
3399 "file2": "def",
3400 "file3": "ghi",
3401 }),
3402 )
3403 .await;
3404
3405 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3406
3407 let buffer1 = project
3408 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3409 .await
3410 .unwrap();
3411 let events = Arc::new(Mutex::new(Vec::new()));
3412
3413 // initially, the buffer isn't dirty.
3414 buffer1.update(cx, |buffer, cx| {
3415 cx.subscribe(&buffer1, {
3416 let events = events.clone();
3417 move |_, _, event, _| match event {
3418 BufferEvent::Operation { .. } => {}
3419 _ => events.lock().push(event.clone()),
3420 }
3421 })
3422 .detach();
3423
3424 assert!(!buffer.is_dirty());
3425 assert!(events.lock().is_empty());
3426
3427 buffer.edit([(1..2, "")], None, cx);
3428 });
3429
3430 // after the first edit, the buffer is dirty, and emits a dirtied event.
3431 buffer1.update(cx, |buffer, cx| {
3432 assert!(buffer.text() == "ac");
3433 assert!(buffer.is_dirty());
3434 assert_eq!(
3435 *events.lock(),
3436 &[
3437 language::BufferEvent::Edited,
3438 language::BufferEvent::DirtyChanged
3439 ]
3440 );
3441 events.lock().clear();
3442 buffer.did_save(
3443 buffer.version(),
3444 buffer.file().unwrap().disk_state().mtime(),
3445 cx,
3446 );
3447 });
3448
3449 // after saving, the buffer is not dirty, and emits a saved event.
3450 buffer1.update(cx, |buffer, cx| {
3451 assert!(!buffer.is_dirty());
3452 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3453 events.lock().clear();
3454
3455 buffer.edit([(1..1, "B")], None, cx);
3456 buffer.edit([(2..2, "D")], None, cx);
3457 });
3458
3459 // after editing again, the buffer is dirty, and emits another dirty event.
3460 buffer1.update(cx, |buffer, cx| {
3461 assert!(buffer.text() == "aBDc");
3462 assert!(buffer.is_dirty());
3463 assert_eq!(
3464 *events.lock(),
3465 &[
3466 language::BufferEvent::Edited,
3467 language::BufferEvent::DirtyChanged,
3468 language::BufferEvent::Edited,
3469 ],
3470 );
3471 events.lock().clear();
3472
3473 // After restoring the buffer to its previously-saved state,
3474 // the buffer is not considered dirty anymore.
3475 buffer.edit([(1..3, "")], None, cx);
3476 assert!(buffer.text() == "ac");
3477 assert!(!buffer.is_dirty());
3478 });
3479
3480 assert_eq!(
3481 *events.lock(),
3482 &[
3483 language::BufferEvent::Edited,
3484 language::BufferEvent::DirtyChanged
3485 ]
3486 );
3487
3488 // When a file is deleted, the buffer is considered dirty.
3489 let events = Arc::new(Mutex::new(Vec::new()));
3490 let buffer2 = project
3491 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3492 .await
3493 .unwrap();
3494 buffer2.update(cx, |_, cx| {
3495 cx.subscribe(&buffer2, {
3496 let events = events.clone();
3497 move |_, _, event, _| events.lock().push(event.clone())
3498 })
3499 .detach();
3500 });
3501
3502 fs.remove_file("/dir/file2".as_ref(), Default::default())
3503 .await
3504 .unwrap();
3505 cx.executor().run_until_parked();
3506 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3507 assert_eq!(
3508 *events.lock(),
3509 &[
3510 language::BufferEvent::DirtyChanged,
3511 language::BufferEvent::FileHandleChanged
3512 ]
3513 );
3514
3515 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3516 let events = Arc::new(Mutex::new(Vec::new()));
3517 let buffer3 = project
3518 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3519 .await
3520 .unwrap();
3521 buffer3.update(cx, |_, cx| {
3522 cx.subscribe(&buffer3, {
3523 let events = events.clone();
3524 move |_, _, event, _| events.lock().push(event.clone())
3525 })
3526 .detach();
3527 });
3528
3529 buffer3.update(cx, |buffer, cx| {
3530 buffer.edit([(0..0, "x")], None, cx);
3531 });
3532 events.lock().clear();
3533 fs.remove_file("/dir/file3".as_ref(), Default::default())
3534 .await
3535 .unwrap();
3536 cx.executor().run_until_parked();
3537 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3538 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3539}
3540
3541#[gpui::test]
3542async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3543 init_test(cx);
3544
3545 let initial_contents = "aaa\nbbbbb\nc\n";
3546 let fs = FakeFs::new(cx.executor());
3547 fs.insert_tree(
3548 "/dir",
3549 json!({
3550 "the-file": initial_contents,
3551 }),
3552 )
3553 .await;
3554 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3555 let buffer = project
3556 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3557 .await
3558 .unwrap();
3559
3560 let anchors = (0..3)
3561 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3562 .collect::<Vec<_>>();
3563
3564 // Change the file on disk, adding two new lines of text, and removing
3565 // one line.
3566 buffer.update(cx, |buffer, _| {
3567 assert!(!buffer.is_dirty());
3568 assert!(!buffer.has_conflict());
3569 });
3570 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3571 fs.save(
3572 "/dir/the-file".as_ref(),
3573 &new_contents.into(),
3574 LineEnding::Unix,
3575 )
3576 .await
3577 .unwrap();
3578
3579 // Because the buffer was not modified, it is reloaded from disk. Its
3580 // contents are edited according to the diff between the old and new
3581 // file contents.
3582 cx.executor().run_until_parked();
3583 buffer.update(cx, |buffer, _| {
3584 assert_eq!(buffer.text(), new_contents);
3585 assert!(!buffer.is_dirty());
3586 assert!(!buffer.has_conflict());
3587
3588 let anchor_positions = anchors
3589 .iter()
3590 .map(|anchor| anchor.to_point(&*buffer))
3591 .collect::<Vec<_>>();
3592 assert_eq!(
3593 anchor_positions,
3594 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3595 );
3596 });
3597
3598 // Modify the buffer
3599 buffer.update(cx, |buffer, cx| {
3600 buffer.edit([(0..0, " ")], None, cx);
3601 assert!(buffer.is_dirty());
3602 assert!(!buffer.has_conflict());
3603 });
3604
3605 // Change the file on disk again, adding blank lines to the beginning.
3606 fs.save(
3607 "/dir/the-file".as_ref(),
3608 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3609 LineEnding::Unix,
3610 )
3611 .await
3612 .unwrap();
3613
3614 // Because the buffer is modified, it doesn't reload from disk, but is
3615 // marked as having a conflict.
3616 cx.executor().run_until_parked();
3617 buffer.update(cx, |buffer, _| {
3618 assert!(buffer.has_conflict());
3619 });
3620}
3621
3622#[gpui::test]
3623async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3624 init_test(cx);
3625
3626 let fs = FakeFs::new(cx.executor());
3627 fs.insert_tree(
3628 "/dir",
3629 json!({
3630 "file1": "a\nb\nc\n",
3631 "file2": "one\r\ntwo\r\nthree\r\n",
3632 }),
3633 )
3634 .await;
3635
3636 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3637 let buffer1 = project
3638 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3639 .await
3640 .unwrap();
3641 let buffer2 = project
3642 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3643 .await
3644 .unwrap();
3645
3646 buffer1.update(cx, |buffer, _| {
3647 assert_eq!(buffer.text(), "a\nb\nc\n");
3648 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3649 });
3650 buffer2.update(cx, |buffer, _| {
3651 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3652 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3653 });
3654
3655 // Change a file's line endings on disk from unix to windows. The buffer's
3656 // state updates correctly.
3657 fs.save(
3658 "/dir/file1".as_ref(),
3659 &"aaa\nb\nc\n".into(),
3660 LineEnding::Windows,
3661 )
3662 .await
3663 .unwrap();
3664 cx.executor().run_until_parked();
3665 buffer1.update(cx, |buffer, _| {
3666 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3667 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3668 });
3669
3670 // Save a file with windows line endings. The file is written correctly.
3671 buffer2.update(cx, |buffer, cx| {
3672 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3673 });
3674 project
3675 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3676 .await
3677 .unwrap();
3678 assert_eq!(
3679 fs.load("/dir/file2".as_ref()).await.unwrap(),
3680 "one\r\ntwo\r\nthree\r\nfour\r\n",
3681 );
3682}
3683
3684#[gpui::test]
3685async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3686 init_test(cx);
3687
3688 let fs = FakeFs::new(cx.executor());
3689 fs.insert_tree(
3690 "/the-dir",
3691 json!({
3692 "a.rs": "
3693 fn foo(mut v: Vec<usize>) {
3694 for x in &v {
3695 v.push(1);
3696 }
3697 }
3698 "
3699 .unindent(),
3700 }),
3701 )
3702 .await;
3703
3704 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3705 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3706 let buffer = project
3707 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3708 .await
3709 .unwrap();
3710
3711 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3712 let message = lsp::PublishDiagnosticsParams {
3713 uri: buffer_uri.clone(),
3714 diagnostics: vec![
3715 lsp::Diagnostic {
3716 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3717 severity: Some(DiagnosticSeverity::WARNING),
3718 message: "error 1".to_string(),
3719 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3720 location: lsp::Location {
3721 uri: buffer_uri.clone(),
3722 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3723 },
3724 message: "error 1 hint 1".to_string(),
3725 }]),
3726 ..Default::default()
3727 },
3728 lsp::Diagnostic {
3729 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3730 severity: Some(DiagnosticSeverity::HINT),
3731 message: "error 1 hint 1".to_string(),
3732 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3733 location: lsp::Location {
3734 uri: buffer_uri.clone(),
3735 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3736 },
3737 message: "original diagnostic".to_string(),
3738 }]),
3739 ..Default::default()
3740 },
3741 lsp::Diagnostic {
3742 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3743 severity: Some(DiagnosticSeverity::ERROR),
3744 message: "error 2".to_string(),
3745 related_information: Some(vec![
3746 lsp::DiagnosticRelatedInformation {
3747 location: lsp::Location {
3748 uri: buffer_uri.clone(),
3749 range: lsp::Range::new(
3750 lsp::Position::new(1, 13),
3751 lsp::Position::new(1, 15),
3752 ),
3753 },
3754 message: "error 2 hint 1".to_string(),
3755 },
3756 lsp::DiagnosticRelatedInformation {
3757 location: lsp::Location {
3758 uri: buffer_uri.clone(),
3759 range: lsp::Range::new(
3760 lsp::Position::new(1, 13),
3761 lsp::Position::new(1, 15),
3762 ),
3763 },
3764 message: "error 2 hint 2".to_string(),
3765 },
3766 ]),
3767 ..Default::default()
3768 },
3769 lsp::Diagnostic {
3770 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3771 severity: Some(DiagnosticSeverity::HINT),
3772 message: "error 2 hint 1".to_string(),
3773 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3774 location: lsp::Location {
3775 uri: buffer_uri.clone(),
3776 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3777 },
3778 message: "original diagnostic".to_string(),
3779 }]),
3780 ..Default::default()
3781 },
3782 lsp::Diagnostic {
3783 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3784 severity: Some(DiagnosticSeverity::HINT),
3785 message: "error 2 hint 2".to_string(),
3786 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3787 location: lsp::Location {
3788 uri: buffer_uri,
3789 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3790 },
3791 message: "original diagnostic".to_string(),
3792 }]),
3793 ..Default::default()
3794 },
3795 ],
3796 version: None,
3797 };
3798
3799 lsp_store
3800 .update(cx, |lsp_store, cx| {
3801 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3802 })
3803 .unwrap();
3804 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3805
3806 assert_eq!(
3807 buffer
3808 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3809 .collect::<Vec<_>>(),
3810 &[
3811 DiagnosticEntry {
3812 range: Point::new(1, 8)..Point::new(1, 9),
3813 diagnostic: Diagnostic {
3814 severity: DiagnosticSeverity::WARNING,
3815 message: "error 1".to_string(),
3816 group_id: 1,
3817 is_primary: true,
3818 ..Default::default()
3819 }
3820 },
3821 DiagnosticEntry {
3822 range: Point::new(1, 8)..Point::new(1, 9),
3823 diagnostic: Diagnostic {
3824 severity: DiagnosticSeverity::HINT,
3825 message: "error 1 hint 1".to_string(),
3826 group_id: 1,
3827 is_primary: false,
3828 ..Default::default()
3829 }
3830 },
3831 DiagnosticEntry {
3832 range: Point::new(1, 13)..Point::new(1, 15),
3833 diagnostic: Diagnostic {
3834 severity: DiagnosticSeverity::HINT,
3835 message: "error 2 hint 1".to_string(),
3836 group_id: 0,
3837 is_primary: false,
3838 ..Default::default()
3839 }
3840 },
3841 DiagnosticEntry {
3842 range: Point::new(1, 13)..Point::new(1, 15),
3843 diagnostic: Diagnostic {
3844 severity: DiagnosticSeverity::HINT,
3845 message: "error 2 hint 2".to_string(),
3846 group_id: 0,
3847 is_primary: false,
3848 ..Default::default()
3849 }
3850 },
3851 DiagnosticEntry {
3852 range: Point::new(2, 8)..Point::new(2, 17),
3853 diagnostic: Diagnostic {
3854 severity: DiagnosticSeverity::ERROR,
3855 message: "error 2".to_string(),
3856 group_id: 0,
3857 is_primary: true,
3858 ..Default::default()
3859 }
3860 }
3861 ]
3862 );
3863
3864 assert_eq!(
3865 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3866 &[
3867 DiagnosticEntry {
3868 range: Point::new(1, 13)..Point::new(1, 15),
3869 diagnostic: Diagnostic {
3870 severity: DiagnosticSeverity::HINT,
3871 message: "error 2 hint 1".to_string(),
3872 group_id: 0,
3873 is_primary: false,
3874 ..Default::default()
3875 }
3876 },
3877 DiagnosticEntry {
3878 range: Point::new(1, 13)..Point::new(1, 15),
3879 diagnostic: Diagnostic {
3880 severity: DiagnosticSeverity::HINT,
3881 message: "error 2 hint 2".to_string(),
3882 group_id: 0,
3883 is_primary: false,
3884 ..Default::default()
3885 }
3886 },
3887 DiagnosticEntry {
3888 range: Point::new(2, 8)..Point::new(2, 17),
3889 diagnostic: Diagnostic {
3890 severity: DiagnosticSeverity::ERROR,
3891 message: "error 2".to_string(),
3892 group_id: 0,
3893 is_primary: true,
3894 ..Default::default()
3895 }
3896 }
3897 ]
3898 );
3899
3900 assert_eq!(
3901 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3902 &[
3903 DiagnosticEntry {
3904 range: Point::new(1, 8)..Point::new(1, 9),
3905 diagnostic: Diagnostic {
3906 severity: DiagnosticSeverity::WARNING,
3907 message: "error 1".to_string(),
3908 group_id: 1,
3909 is_primary: true,
3910 ..Default::default()
3911 }
3912 },
3913 DiagnosticEntry {
3914 range: Point::new(1, 8)..Point::new(1, 9),
3915 diagnostic: Diagnostic {
3916 severity: DiagnosticSeverity::HINT,
3917 message: "error 1 hint 1".to_string(),
3918 group_id: 1,
3919 is_primary: false,
3920 ..Default::default()
3921 }
3922 },
3923 ]
3924 );
3925}
3926
3927#[gpui::test]
3928async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3929 init_test(cx);
3930
3931 let fs = FakeFs::new(cx.executor());
3932 fs.insert_tree(
3933 "/dir",
3934 json!({
3935 "one.rs": "const ONE: usize = 1;",
3936 "two": {
3937 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3938 }
3939
3940 }),
3941 )
3942 .await;
3943 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3944
3945 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3946 language_registry.add(rust_lang());
3947 let watched_paths = lsp::FileOperationRegistrationOptions {
3948 filters: vec![
3949 FileOperationFilter {
3950 scheme: Some("file".to_owned()),
3951 pattern: lsp::FileOperationPattern {
3952 glob: "**/*.rs".to_owned(),
3953 matches: Some(lsp::FileOperationPatternKind::File),
3954 options: None,
3955 },
3956 },
3957 FileOperationFilter {
3958 scheme: Some("file".to_owned()),
3959 pattern: lsp::FileOperationPattern {
3960 glob: "**/**".to_owned(),
3961 matches: Some(lsp::FileOperationPatternKind::Folder),
3962 options: None,
3963 },
3964 },
3965 ],
3966 };
3967 let mut fake_servers = language_registry.register_fake_lsp(
3968 "Rust",
3969 FakeLspAdapter {
3970 capabilities: lsp::ServerCapabilities {
3971 workspace: Some(lsp::WorkspaceServerCapabilities {
3972 workspace_folders: None,
3973 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
3974 did_rename: Some(watched_paths.clone()),
3975 will_rename: Some(watched_paths),
3976 ..Default::default()
3977 }),
3978 }),
3979 ..Default::default()
3980 },
3981 ..Default::default()
3982 },
3983 );
3984
3985 let _ = project
3986 .update(cx, |project, cx| {
3987 project.open_local_buffer("/dir/one.rs", cx)
3988 })
3989 .await
3990 .unwrap();
3991
3992 let fake_server = fake_servers.next().await.unwrap();
3993 let response = project.update(cx, |project, cx| {
3994 let worktree = project.worktrees(cx).next().unwrap();
3995 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
3996 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
3997 });
3998 let expected_edit = lsp::WorkspaceEdit {
3999 changes: None,
4000 document_changes: Some(DocumentChanges::Edits({
4001 vec![TextDocumentEdit {
4002 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4003 range: lsp::Range {
4004 start: lsp::Position {
4005 line: 0,
4006 character: 1,
4007 },
4008 end: lsp::Position {
4009 line: 0,
4010 character: 3,
4011 },
4012 },
4013 new_text: "This is not a drill".to_owned(),
4014 })],
4015 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4016 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4017 version: Some(1337),
4018 },
4019 }]
4020 })),
4021 change_annotations: None,
4022 };
4023 let resolved_workspace_edit = Arc::new(OnceLock::new());
4024 fake_server
4025 .handle_request::<WillRenameFiles, _, _>({
4026 let resolved_workspace_edit = resolved_workspace_edit.clone();
4027 let expected_edit = expected_edit.clone();
4028 move |params, _| {
4029 let resolved_workspace_edit = resolved_workspace_edit.clone();
4030 let expected_edit = expected_edit.clone();
4031 async move {
4032 assert_eq!(params.files.len(), 1);
4033 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4034 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4035 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4036 Ok(Some(expected_edit))
4037 }
4038 }
4039 })
4040 .next()
4041 .await
4042 .unwrap();
4043 let _ = response.await.unwrap();
4044 fake_server
4045 .handle_notification::<DidRenameFiles, _>(|params, _| {
4046 assert_eq!(params.files.len(), 1);
4047 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4048 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4049 })
4050 .next()
4051 .await
4052 .unwrap();
4053 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4054}
4055
4056#[gpui::test]
4057async fn test_rename(cx: &mut gpui::TestAppContext) {
4058 // hi
4059 init_test(cx);
4060
4061 let fs = FakeFs::new(cx.executor());
4062 fs.insert_tree(
4063 "/dir",
4064 json!({
4065 "one.rs": "const ONE: usize = 1;",
4066 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4067 }),
4068 )
4069 .await;
4070
4071 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4072
4073 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4074 language_registry.add(rust_lang());
4075 let mut fake_servers = language_registry.register_fake_lsp(
4076 "Rust",
4077 FakeLspAdapter {
4078 capabilities: lsp::ServerCapabilities {
4079 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4080 prepare_provider: Some(true),
4081 work_done_progress_options: Default::default(),
4082 })),
4083 ..Default::default()
4084 },
4085 ..Default::default()
4086 },
4087 );
4088
4089 let buffer = project
4090 .update(cx, |project, cx| {
4091 project.open_local_buffer("/dir/one.rs", cx)
4092 })
4093 .await
4094 .unwrap();
4095
4096 let fake_server = fake_servers.next().await.unwrap();
4097
4098 let response = project.update(cx, |project, cx| {
4099 project.prepare_rename(buffer.clone(), 7, cx)
4100 });
4101 fake_server
4102 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4103 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4104 assert_eq!(params.position, lsp::Position::new(0, 7));
4105 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4106 lsp::Position::new(0, 6),
4107 lsp::Position::new(0, 9),
4108 ))))
4109 })
4110 .next()
4111 .await
4112 .unwrap();
4113 let range = response.await.unwrap().unwrap();
4114 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4115 assert_eq!(range, 6..9);
4116
4117 let response = project.update(cx, |project, cx| {
4118 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4119 });
4120 fake_server
4121 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4122 assert_eq!(
4123 params.text_document_position.text_document.uri.as_str(),
4124 "file:///dir/one.rs"
4125 );
4126 assert_eq!(
4127 params.text_document_position.position,
4128 lsp::Position::new(0, 7)
4129 );
4130 assert_eq!(params.new_name, "THREE");
4131 Ok(Some(lsp::WorkspaceEdit {
4132 changes: Some(
4133 [
4134 (
4135 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4136 vec![lsp::TextEdit::new(
4137 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4138 "THREE".to_string(),
4139 )],
4140 ),
4141 (
4142 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4143 vec![
4144 lsp::TextEdit::new(
4145 lsp::Range::new(
4146 lsp::Position::new(0, 24),
4147 lsp::Position::new(0, 27),
4148 ),
4149 "THREE".to_string(),
4150 ),
4151 lsp::TextEdit::new(
4152 lsp::Range::new(
4153 lsp::Position::new(0, 35),
4154 lsp::Position::new(0, 38),
4155 ),
4156 "THREE".to_string(),
4157 ),
4158 ],
4159 ),
4160 ]
4161 .into_iter()
4162 .collect(),
4163 ),
4164 ..Default::default()
4165 }))
4166 })
4167 .next()
4168 .await
4169 .unwrap();
4170 let mut transaction = response.await.unwrap().0;
4171 assert_eq!(transaction.len(), 2);
4172 assert_eq!(
4173 transaction
4174 .remove_entry(&buffer)
4175 .unwrap()
4176 .0
4177 .update(cx, |buffer, _| buffer.text()),
4178 "const THREE: usize = 1;"
4179 );
4180 assert_eq!(
4181 transaction
4182 .into_keys()
4183 .next()
4184 .unwrap()
4185 .update(cx, |buffer, _| buffer.text()),
4186 "const TWO: usize = one::THREE + one::THREE;"
4187 );
4188}
4189
4190#[gpui::test]
4191async fn test_search(cx: &mut gpui::TestAppContext) {
4192 init_test(cx);
4193
4194 let fs = FakeFs::new(cx.executor());
4195 fs.insert_tree(
4196 "/dir",
4197 json!({
4198 "one.rs": "const ONE: usize = 1;",
4199 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4200 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4201 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4202 }),
4203 )
4204 .await;
4205 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4206 assert_eq!(
4207 search(
4208 &project,
4209 SearchQuery::text(
4210 "TWO",
4211 false,
4212 true,
4213 false,
4214 Default::default(),
4215 Default::default(),
4216 None
4217 )
4218 .unwrap(),
4219 cx
4220 )
4221 .await
4222 .unwrap(),
4223 HashMap::from_iter([
4224 ("dir/two.rs".to_string(), vec![6..9]),
4225 ("dir/three.rs".to_string(), vec![37..40])
4226 ])
4227 );
4228
4229 let buffer_4 = project
4230 .update(cx, |project, cx| {
4231 project.open_local_buffer("/dir/four.rs", cx)
4232 })
4233 .await
4234 .unwrap();
4235 buffer_4.update(cx, |buffer, cx| {
4236 let text = "two::TWO";
4237 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4238 });
4239
4240 assert_eq!(
4241 search(
4242 &project,
4243 SearchQuery::text(
4244 "TWO",
4245 false,
4246 true,
4247 false,
4248 Default::default(),
4249 Default::default(),
4250 None,
4251 )
4252 .unwrap(),
4253 cx
4254 )
4255 .await
4256 .unwrap(),
4257 HashMap::from_iter([
4258 ("dir/two.rs".to_string(), vec![6..9]),
4259 ("dir/three.rs".to_string(), vec![37..40]),
4260 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4261 ])
4262 );
4263}
4264
4265#[gpui::test]
4266async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4267 init_test(cx);
4268
4269 let search_query = "file";
4270
4271 let fs = FakeFs::new(cx.executor());
4272 fs.insert_tree(
4273 "/dir",
4274 json!({
4275 "one.rs": r#"// Rust file one"#,
4276 "one.ts": r#"// TypeScript file one"#,
4277 "two.rs": r#"// Rust file two"#,
4278 "two.ts": r#"// TypeScript file two"#,
4279 }),
4280 )
4281 .await;
4282 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4283
4284 assert!(
4285 search(
4286 &project,
4287 SearchQuery::text(
4288 search_query,
4289 false,
4290 true,
4291 false,
4292 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4293 Default::default(),
4294 None
4295 )
4296 .unwrap(),
4297 cx
4298 )
4299 .await
4300 .unwrap()
4301 .is_empty(),
4302 "If no inclusions match, no files should be returned"
4303 );
4304
4305 assert_eq!(
4306 search(
4307 &project,
4308 SearchQuery::text(
4309 search_query,
4310 false,
4311 true,
4312 false,
4313 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4314 Default::default(),
4315 None
4316 )
4317 .unwrap(),
4318 cx
4319 )
4320 .await
4321 .unwrap(),
4322 HashMap::from_iter([
4323 ("dir/one.rs".to_string(), vec![8..12]),
4324 ("dir/two.rs".to_string(), vec![8..12]),
4325 ]),
4326 "Rust only search should give only Rust files"
4327 );
4328
4329 assert_eq!(
4330 search(
4331 &project,
4332 SearchQuery::text(
4333 search_query,
4334 false,
4335 true,
4336 false,
4337
4338 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4339
4340 Default::default(),
4341 None,
4342 ).unwrap(),
4343 cx
4344 )
4345 .await
4346 .unwrap(),
4347 HashMap::from_iter([
4348 ("dir/one.ts".to_string(), vec![14..18]),
4349 ("dir/two.ts".to_string(), vec![14..18]),
4350 ]),
4351 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4352 );
4353
4354 assert_eq!(
4355 search(
4356 &project,
4357 SearchQuery::text(
4358 search_query,
4359 false,
4360 true,
4361 false,
4362
4363 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4364
4365 Default::default(),
4366 None,
4367 ).unwrap(),
4368 cx
4369 )
4370 .await
4371 .unwrap(),
4372 HashMap::from_iter([
4373 ("dir/two.ts".to_string(), vec![14..18]),
4374 ("dir/one.rs".to_string(), vec![8..12]),
4375 ("dir/one.ts".to_string(), vec![14..18]),
4376 ("dir/two.rs".to_string(), vec![8..12]),
4377 ]),
4378 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4379 );
4380}
4381
4382#[gpui::test]
4383async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4384 init_test(cx);
4385
4386 let search_query = "file";
4387
4388 let fs = FakeFs::new(cx.executor());
4389 fs.insert_tree(
4390 "/dir",
4391 json!({
4392 "one.rs": r#"// Rust file one"#,
4393 "one.ts": r#"// TypeScript file one"#,
4394 "two.rs": r#"// Rust file two"#,
4395 "two.ts": r#"// TypeScript file two"#,
4396 }),
4397 )
4398 .await;
4399 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4400
4401 assert_eq!(
4402 search(
4403 &project,
4404 SearchQuery::text(
4405 search_query,
4406 false,
4407 true,
4408 false,
4409 Default::default(),
4410 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4411 None,
4412 )
4413 .unwrap(),
4414 cx
4415 )
4416 .await
4417 .unwrap(),
4418 HashMap::from_iter([
4419 ("dir/one.rs".to_string(), vec![8..12]),
4420 ("dir/one.ts".to_string(), vec![14..18]),
4421 ("dir/two.rs".to_string(), vec![8..12]),
4422 ("dir/two.ts".to_string(), vec![14..18]),
4423 ]),
4424 "If no exclusions match, all files should be returned"
4425 );
4426
4427 assert_eq!(
4428 search(
4429 &project,
4430 SearchQuery::text(
4431 search_query,
4432 false,
4433 true,
4434 false,
4435 Default::default(),
4436 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4437 None,
4438 )
4439 .unwrap(),
4440 cx
4441 )
4442 .await
4443 .unwrap(),
4444 HashMap::from_iter([
4445 ("dir/one.ts".to_string(), vec![14..18]),
4446 ("dir/two.ts".to_string(), vec![14..18]),
4447 ]),
4448 "Rust exclusion search should give only TypeScript files"
4449 );
4450
4451 assert_eq!(
4452 search(
4453 &project,
4454 SearchQuery::text(
4455 search_query,
4456 false,
4457 true,
4458 false,
4459 Default::default(),
4460 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4461 None,
4462 ).unwrap(),
4463 cx
4464 )
4465 .await
4466 .unwrap(),
4467 HashMap::from_iter([
4468 ("dir/one.rs".to_string(), vec![8..12]),
4469 ("dir/two.rs".to_string(), vec![8..12]),
4470 ]),
4471 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4472 );
4473
4474 assert!(
4475 search(
4476 &project,
4477 SearchQuery::text(
4478 search_query,
4479 false,
4480 true,
4481 false,
4482 Default::default(),
4483
4484 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4485 None,
4486
4487 ).unwrap(),
4488 cx
4489 )
4490 .await
4491 .unwrap().is_empty(),
4492 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4493 );
4494}
4495
4496#[gpui::test]
4497async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4498 init_test(cx);
4499
4500 let search_query = "file";
4501
4502 let fs = FakeFs::new(cx.executor());
4503 fs.insert_tree(
4504 "/dir",
4505 json!({
4506 "one.rs": r#"// Rust file one"#,
4507 "one.ts": r#"// TypeScript file one"#,
4508 "two.rs": r#"// Rust file two"#,
4509 "two.ts": r#"// TypeScript file two"#,
4510 }),
4511 )
4512 .await;
4513 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4514
4515 assert!(
4516 search(
4517 &project,
4518 SearchQuery::text(
4519 search_query,
4520 false,
4521 true,
4522 false,
4523 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4524 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4525 None,
4526 )
4527 .unwrap(),
4528 cx
4529 )
4530 .await
4531 .unwrap()
4532 .is_empty(),
4533 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4534 );
4535
4536 assert!(
4537 search(
4538 &project,
4539 SearchQuery::text(
4540 search_query,
4541 false,
4542 true,
4543 false,
4544 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4545 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4546 None,
4547 ).unwrap(),
4548 cx
4549 )
4550 .await
4551 .unwrap()
4552 .is_empty(),
4553 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4554 );
4555
4556 assert!(
4557 search(
4558 &project,
4559 SearchQuery::text(
4560 search_query,
4561 false,
4562 true,
4563 false,
4564 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4565 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4566 None,
4567 )
4568 .unwrap(),
4569 cx
4570 )
4571 .await
4572 .unwrap()
4573 .is_empty(),
4574 "Non-matching inclusions and exclusions should not change that."
4575 );
4576
4577 assert_eq!(
4578 search(
4579 &project,
4580 SearchQuery::text(
4581 search_query,
4582 false,
4583 true,
4584 false,
4585 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4586 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4587 None,
4588 )
4589 .unwrap(),
4590 cx
4591 )
4592 .await
4593 .unwrap(),
4594 HashMap::from_iter([
4595 ("dir/one.ts".to_string(), vec![14..18]),
4596 ("dir/two.ts".to_string(), vec![14..18]),
4597 ]),
4598 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4599 );
4600}
4601
4602#[gpui::test]
4603async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4604 init_test(cx);
4605
4606 let fs = FakeFs::new(cx.executor());
4607 fs.insert_tree(
4608 "/worktree-a",
4609 json!({
4610 "haystack.rs": r#"// NEEDLE"#,
4611 "haystack.ts": r#"// NEEDLE"#,
4612 }),
4613 )
4614 .await;
4615 fs.insert_tree(
4616 "/worktree-b",
4617 json!({
4618 "haystack.rs": r#"// NEEDLE"#,
4619 "haystack.ts": r#"// NEEDLE"#,
4620 }),
4621 )
4622 .await;
4623
4624 let project = Project::test(
4625 fs.clone(),
4626 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4627 cx,
4628 )
4629 .await;
4630
4631 assert_eq!(
4632 search(
4633 &project,
4634 SearchQuery::text(
4635 "NEEDLE",
4636 false,
4637 true,
4638 false,
4639 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4640 Default::default(),
4641 None,
4642 )
4643 .unwrap(),
4644 cx
4645 )
4646 .await
4647 .unwrap(),
4648 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4649 "should only return results from included worktree"
4650 );
4651 assert_eq!(
4652 search(
4653 &project,
4654 SearchQuery::text(
4655 "NEEDLE",
4656 false,
4657 true,
4658 false,
4659 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4660 Default::default(),
4661 None,
4662 )
4663 .unwrap(),
4664 cx
4665 )
4666 .await
4667 .unwrap(),
4668 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4669 "should only return results from included worktree"
4670 );
4671
4672 assert_eq!(
4673 search(
4674 &project,
4675 SearchQuery::text(
4676 "NEEDLE",
4677 false,
4678 true,
4679 false,
4680 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4681 Default::default(),
4682 None,
4683 )
4684 .unwrap(),
4685 cx
4686 )
4687 .await
4688 .unwrap(),
4689 HashMap::from_iter([
4690 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4691 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4692 ]),
4693 "should return results from both worktrees"
4694 );
4695}
4696
4697#[gpui::test]
4698async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4699 init_test(cx);
4700
4701 let fs = FakeFs::new(cx.background_executor.clone());
4702 fs.insert_tree(
4703 "/dir",
4704 json!({
4705 ".git": {},
4706 ".gitignore": "**/target\n/node_modules\n",
4707 "target": {
4708 "index.txt": "index_key:index_value"
4709 },
4710 "node_modules": {
4711 "eslint": {
4712 "index.ts": "const eslint_key = 'eslint value'",
4713 "package.json": r#"{ "some_key": "some value" }"#,
4714 },
4715 "prettier": {
4716 "index.ts": "const prettier_key = 'prettier value'",
4717 "package.json": r#"{ "other_key": "other value" }"#,
4718 },
4719 },
4720 "package.json": r#"{ "main_key": "main value" }"#,
4721 }),
4722 )
4723 .await;
4724 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4725
4726 let query = "key";
4727 assert_eq!(
4728 search(
4729 &project,
4730 SearchQuery::text(
4731 query,
4732 false,
4733 false,
4734 false,
4735 Default::default(),
4736 Default::default(),
4737 None,
4738 )
4739 .unwrap(),
4740 cx
4741 )
4742 .await
4743 .unwrap(),
4744 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4745 "Only one non-ignored file should have the query"
4746 );
4747
4748 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4749 assert_eq!(
4750 search(
4751 &project,
4752 SearchQuery::text(
4753 query,
4754 false,
4755 false,
4756 true,
4757 Default::default(),
4758 Default::default(),
4759 None,
4760 )
4761 .unwrap(),
4762 cx
4763 )
4764 .await
4765 .unwrap(),
4766 HashMap::from_iter([
4767 ("dir/package.json".to_string(), vec![8..11]),
4768 ("dir/target/index.txt".to_string(), vec![6..9]),
4769 (
4770 "dir/node_modules/prettier/package.json".to_string(),
4771 vec![9..12]
4772 ),
4773 (
4774 "dir/node_modules/prettier/index.ts".to_string(),
4775 vec![15..18]
4776 ),
4777 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4778 (
4779 "dir/node_modules/eslint/package.json".to_string(),
4780 vec![8..11]
4781 ),
4782 ]),
4783 "Unrestricted search with ignored directories should find every file with the query"
4784 );
4785
4786 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4787 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4788 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4789 assert_eq!(
4790 search(
4791 &project,
4792 SearchQuery::text(
4793 query,
4794 false,
4795 false,
4796 true,
4797 files_to_include,
4798 files_to_exclude,
4799 None,
4800 )
4801 .unwrap(),
4802 cx
4803 )
4804 .await
4805 .unwrap(),
4806 HashMap::from_iter([(
4807 "dir/node_modules/prettier/package.json".to_string(),
4808 vec![9..12]
4809 )]),
4810 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4811 );
4812}
4813
4814#[gpui::test]
4815async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4816 init_test(cx);
4817
4818 let fs = FakeFs::new(cx.executor().clone());
4819 fs.insert_tree(
4820 "/one/two",
4821 json!({
4822 "three": {
4823 "a.txt": "",
4824 "four": {}
4825 },
4826 "c.rs": ""
4827 }),
4828 )
4829 .await;
4830
4831 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4832 project
4833 .update(cx, |project, cx| {
4834 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4835 project.create_entry((id, "b.."), true, cx)
4836 })
4837 .await
4838 .unwrap()
4839 .to_included()
4840 .unwrap();
4841
4842 // Can't create paths outside the project
4843 let result = project
4844 .update(cx, |project, cx| {
4845 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4846 project.create_entry((id, "../../boop"), true, cx)
4847 })
4848 .await;
4849 assert!(result.is_err());
4850
4851 // Can't create paths with '..'
4852 let result = project
4853 .update(cx, |project, cx| {
4854 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4855 project.create_entry((id, "four/../beep"), true, cx)
4856 })
4857 .await;
4858 assert!(result.is_err());
4859
4860 assert_eq!(
4861 fs.paths(true),
4862 vec![
4863 PathBuf::from("/"),
4864 PathBuf::from("/one"),
4865 PathBuf::from("/one/two"),
4866 PathBuf::from("/one/two/c.rs"),
4867 PathBuf::from("/one/two/three"),
4868 PathBuf::from("/one/two/three/a.txt"),
4869 PathBuf::from("/one/two/three/b.."),
4870 PathBuf::from("/one/two/three/four"),
4871 ]
4872 );
4873
4874 // And we cannot open buffers with '..'
4875 let result = project
4876 .update(cx, |project, cx| {
4877 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4878 project.open_buffer((id, "../c.rs"), cx)
4879 })
4880 .await;
4881 assert!(result.is_err())
4882}
4883
4884#[gpui::test]
4885async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4886 init_test(cx);
4887
4888 let fs = FakeFs::new(cx.executor());
4889 fs.insert_tree(
4890 "/dir",
4891 json!({
4892 "a.tsx": "a",
4893 }),
4894 )
4895 .await;
4896
4897 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4898
4899 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4900 language_registry.add(tsx_lang());
4901 let language_server_names = [
4902 "TypeScriptServer",
4903 "TailwindServer",
4904 "ESLintServer",
4905 "NoHoverCapabilitiesServer",
4906 ];
4907 let mut language_servers = [
4908 language_registry.register_fake_lsp(
4909 "tsx",
4910 FakeLspAdapter {
4911 name: language_server_names[0],
4912 capabilities: lsp::ServerCapabilities {
4913 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4914 ..lsp::ServerCapabilities::default()
4915 },
4916 ..FakeLspAdapter::default()
4917 },
4918 ),
4919 language_registry.register_fake_lsp(
4920 "tsx",
4921 FakeLspAdapter {
4922 name: language_server_names[1],
4923 capabilities: lsp::ServerCapabilities {
4924 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4925 ..lsp::ServerCapabilities::default()
4926 },
4927 ..FakeLspAdapter::default()
4928 },
4929 ),
4930 language_registry.register_fake_lsp(
4931 "tsx",
4932 FakeLspAdapter {
4933 name: language_server_names[2],
4934 capabilities: lsp::ServerCapabilities {
4935 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4936 ..lsp::ServerCapabilities::default()
4937 },
4938 ..FakeLspAdapter::default()
4939 },
4940 ),
4941 language_registry.register_fake_lsp(
4942 "tsx",
4943 FakeLspAdapter {
4944 name: language_server_names[3],
4945 capabilities: lsp::ServerCapabilities {
4946 hover_provider: None,
4947 ..lsp::ServerCapabilities::default()
4948 },
4949 ..FakeLspAdapter::default()
4950 },
4951 ),
4952 ];
4953
4954 let buffer = project
4955 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4956 .await
4957 .unwrap();
4958 cx.executor().run_until_parked();
4959
4960 let mut servers_with_hover_requests = HashMap::default();
4961 for i in 0..language_server_names.len() {
4962 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4963 panic!(
4964 "Failed to get language server #{i} with name {}",
4965 &language_server_names[i]
4966 )
4967 });
4968 let new_server_name = new_server.server.name();
4969 assert!(
4970 !servers_with_hover_requests.contains_key(&new_server_name),
4971 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4972 );
4973 match new_server_name.as_ref() {
4974 "TailwindServer" | "TypeScriptServer" => {
4975 servers_with_hover_requests.insert(
4976 new_server_name.clone(),
4977 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4978 let name = new_server_name.clone();
4979 async move {
4980 Ok(Some(lsp::Hover {
4981 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4982 format!("{name} hover"),
4983 )),
4984 range: None,
4985 }))
4986 }
4987 }),
4988 );
4989 }
4990 "ESLintServer" => {
4991 servers_with_hover_requests.insert(
4992 new_server_name,
4993 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4994 |_, _| async move { Ok(None) },
4995 ),
4996 );
4997 }
4998 "NoHoverCapabilitiesServer" => {
4999 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5000 |_, _| async move {
5001 panic!(
5002 "Should not call for hovers server with no corresponding capabilities"
5003 )
5004 },
5005 );
5006 }
5007 unexpected => panic!("Unexpected server name: {unexpected}"),
5008 }
5009 }
5010
5011 let hover_task = project.update(cx, |project, cx| {
5012 project.hover(&buffer, Point::new(0, 0), cx)
5013 });
5014 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5015 |mut hover_request| async move {
5016 hover_request
5017 .next()
5018 .await
5019 .expect("All hover requests should have been triggered")
5020 },
5021 ))
5022 .await;
5023 assert_eq!(
5024 vec!["TailwindServer hover", "TypeScriptServer hover"],
5025 hover_task
5026 .await
5027 .into_iter()
5028 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5029 .sorted()
5030 .collect::<Vec<_>>(),
5031 "Should receive hover responses from all related servers with hover capabilities"
5032 );
5033}
5034
5035#[gpui::test]
5036async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5037 init_test(cx);
5038
5039 let fs = FakeFs::new(cx.executor());
5040 fs.insert_tree(
5041 "/dir",
5042 json!({
5043 "a.ts": "a",
5044 }),
5045 )
5046 .await;
5047
5048 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5049
5050 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5051 language_registry.add(typescript_lang());
5052 let mut fake_language_servers = language_registry.register_fake_lsp(
5053 "TypeScript",
5054 FakeLspAdapter {
5055 capabilities: lsp::ServerCapabilities {
5056 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5057 ..lsp::ServerCapabilities::default()
5058 },
5059 ..FakeLspAdapter::default()
5060 },
5061 );
5062
5063 let buffer = project
5064 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
5065 .await
5066 .unwrap();
5067 cx.executor().run_until_parked();
5068
5069 let fake_server = fake_language_servers
5070 .next()
5071 .await
5072 .expect("failed to get the language server");
5073
5074 let mut request_handled =
5075 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5076 Ok(Some(lsp::Hover {
5077 contents: lsp::HoverContents::Array(vec![
5078 lsp::MarkedString::String("".to_string()),
5079 lsp::MarkedString::String(" ".to_string()),
5080 lsp::MarkedString::String("\n\n\n".to_string()),
5081 ]),
5082 range: None,
5083 }))
5084 });
5085
5086 let hover_task = project.update(cx, |project, cx| {
5087 project.hover(&buffer, Point::new(0, 0), cx)
5088 });
5089 let () = request_handled
5090 .next()
5091 .await
5092 .expect("All hover requests should have been triggered");
5093 assert_eq!(
5094 Vec::<String>::new(),
5095 hover_task
5096 .await
5097 .into_iter()
5098 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5099 .sorted()
5100 .collect::<Vec<_>>(),
5101 "Empty hover parts should be ignored"
5102 );
5103}
5104
5105#[gpui::test]
5106async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5107 init_test(cx);
5108
5109 let fs = FakeFs::new(cx.executor());
5110 fs.insert_tree(
5111 "/dir",
5112 json!({
5113 "a.ts": "a",
5114 }),
5115 )
5116 .await;
5117
5118 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5119
5120 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5121 language_registry.add(typescript_lang());
5122 let mut fake_language_servers = language_registry.register_fake_lsp(
5123 "TypeScript",
5124 FakeLspAdapter {
5125 capabilities: lsp::ServerCapabilities {
5126 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5127 ..lsp::ServerCapabilities::default()
5128 },
5129 ..FakeLspAdapter::default()
5130 },
5131 );
5132
5133 let buffer = project
5134 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
5135 .await
5136 .unwrap();
5137 cx.executor().run_until_parked();
5138
5139 let fake_server = fake_language_servers
5140 .next()
5141 .await
5142 .expect("failed to get the language server");
5143
5144 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5145 move |_, _| async move {
5146 Ok(Some(vec![
5147 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5148 title: "organize imports".to_string(),
5149 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5150 ..lsp::CodeAction::default()
5151 }),
5152 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5153 title: "fix code".to_string(),
5154 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5155 ..lsp::CodeAction::default()
5156 }),
5157 ]))
5158 },
5159 );
5160
5161 let code_actions_task = project.update(cx, |project, cx| {
5162 project.code_actions(
5163 &buffer,
5164 0..buffer.read(cx).len(),
5165 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5166 cx,
5167 )
5168 });
5169
5170 let () = request_handled
5171 .next()
5172 .await
5173 .expect("The code action request should have been triggered");
5174
5175 let code_actions = code_actions_task.await.unwrap();
5176 assert_eq!(code_actions.len(), 1);
5177 assert_eq!(
5178 code_actions[0].lsp_action.kind,
5179 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5180 );
5181}
5182
5183#[gpui::test]
5184async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5185 init_test(cx);
5186
5187 let fs = FakeFs::new(cx.executor());
5188 fs.insert_tree(
5189 "/dir",
5190 json!({
5191 "a.tsx": "a",
5192 }),
5193 )
5194 .await;
5195
5196 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5197
5198 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5199 language_registry.add(tsx_lang());
5200 let language_server_names = [
5201 "TypeScriptServer",
5202 "TailwindServer",
5203 "ESLintServer",
5204 "NoActionsCapabilitiesServer",
5205 ];
5206
5207 let mut language_server_rxs = [
5208 language_registry.register_fake_lsp(
5209 "tsx",
5210 FakeLspAdapter {
5211 name: language_server_names[0],
5212 capabilities: lsp::ServerCapabilities {
5213 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5214 ..lsp::ServerCapabilities::default()
5215 },
5216 ..FakeLspAdapter::default()
5217 },
5218 ),
5219 language_registry.register_fake_lsp(
5220 "tsx",
5221 FakeLspAdapter {
5222 name: language_server_names[1],
5223 capabilities: lsp::ServerCapabilities {
5224 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5225 ..lsp::ServerCapabilities::default()
5226 },
5227 ..FakeLspAdapter::default()
5228 },
5229 ),
5230 language_registry.register_fake_lsp(
5231 "tsx",
5232 FakeLspAdapter {
5233 name: language_server_names[2],
5234 capabilities: lsp::ServerCapabilities {
5235 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5236 ..lsp::ServerCapabilities::default()
5237 },
5238 ..FakeLspAdapter::default()
5239 },
5240 ),
5241 language_registry.register_fake_lsp(
5242 "tsx",
5243 FakeLspAdapter {
5244 name: language_server_names[3],
5245 capabilities: lsp::ServerCapabilities {
5246 code_action_provider: None,
5247 ..lsp::ServerCapabilities::default()
5248 },
5249 ..FakeLspAdapter::default()
5250 },
5251 ),
5252 ];
5253
5254 let buffer = project
5255 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5256 .await
5257 .unwrap();
5258 cx.executor().run_until_parked();
5259
5260 let mut servers_with_actions_requests = HashMap::default();
5261 for i in 0..language_server_names.len() {
5262 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5263 panic!(
5264 "Failed to get language server #{i} with name {}",
5265 &language_server_names[i]
5266 )
5267 });
5268 let new_server_name = new_server.server.name();
5269
5270 assert!(
5271 !servers_with_actions_requests.contains_key(&new_server_name),
5272 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5273 );
5274 match new_server_name.0.as_ref() {
5275 "TailwindServer" | "TypeScriptServer" => {
5276 servers_with_actions_requests.insert(
5277 new_server_name.clone(),
5278 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5279 move |_, _| {
5280 let name = new_server_name.clone();
5281 async move {
5282 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5283 lsp::CodeAction {
5284 title: format!("{name} code action"),
5285 ..lsp::CodeAction::default()
5286 },
5287 )]))
5288 }
5289 },
5290 ),
5291 );
5292 }
5293 "ESLintServer" => {
5294 servers_with_actions_requests.insert(
5295 new_server_name,
5296 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5297 |_, _| async move { Ok(None) },
5298 ),
5299 );
5300 }
5301 "NoActionsCapabilitiesServer" => {
5302 let _never_handled = new_server
5303 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5304 panic!(
5305 "Should not call for code actions server with no corresponding capabilities"
5306 )
5307 });
5308 }
5309 unexpected => panic!("Unexpected server name: {unexpected}"),
5310 }
5311 }
5312
5313 let code_actions_task = project.update(cx, |project, cx| {
5314 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5315 });
5316
5317 // cx.run_until_parked();
5318 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5319 |mut code_actions_request| async move {
5320 code_actions_request
5321 .next()
5322 .await
5323 .expect("All code actions requests should have been triggered")
5324 },
5325 ))
5326 .await;
5327 assert_eq!(
5328 vec!["TailwindServer code action", "TypeScriptServer code action"],
5329 code_actions_task
5330 .await
5331 .unwrap()
5332 .into_iter()
5333 .map(|code_action| code_action.lsp_action.title)
5334 .sorted()
5335 .collect::<Vec<_>>(),
5336 "Should receive code actions responses from all related servers with hover capabilities"
5337 );
5338}
5339
5340#[gpui::test]
5341async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5342 init_test(cx);
5343
5344 let fs = FakeFs::new(cx.executor());
5345 fs.insert_tree(
5346 "/dir",
5347 json!({
5348 "a.rs": "let a = 1;",
5349 "b.rs": "let b = 2;",
5350 "c.rs": "let c = 2;",
5351 }),
5352 )
5353 .await;
5354
5355 let project = Project::test(
5356 fs,
5357 [
5358 "/dir/a.rs".as_ref(),
5359 "/dir/b.rs".as_ref(),
5360 "/dir/c.rs".as_ref(),
5361 ],
5362 cx,
5363 )
5364 .await;
5365
5366 // check the initial state and get the worktrees
5367 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5368 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5369 assert_eq!(worktrees.len(), 3);
5370
5371 let worktree_a = worktrees[0].read(cx);
5372 let worktree_b = worktrees[1].read(cx);
5373 let worktree_c = worktrees[2].read(cx);
5374
5375 // check they start in the right order
5376 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5377 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5378 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5379
5380 (
5381 worktrees[0].clone(),
5382 worktrees[1].clone(),
5383 worktrees[2].clone(),
5384 )
5385 });
5386
5387 // move first worktree to after the second
5388 // [a, b, c] -> [b, a, c]
5389 project
5390 .update(cx, |project, cx| {
5391 let first = worktree_a.read(cx);
5392 let second = worktree_b.read(cx);
5393 project.move_worktree(first.id(), second.id(), cx)
5394 })
5395 .expect("moving first after second");
5396
5397 // check the state after moving
5398 project.update(cx, |project, cx| {
5399 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5400 assert_eq!(worktrees.len(), 3);
5401
5402 let first = worktrees[0].read(cx);
5403 let second = worktrees[1].read(cx);
5404 let third = worktrees[2].read(cx);
5405
5406 // check they are now in the right order
5407 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5408 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5409 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5410 });
5411
5412 // move the second worktree to before the first
5413 // [b, a, c] -> [a, b, c]
5414 project
5415 .update(cx, |project, cx| {
5416 let second = worktree_a.read(cx);
5417 let first = worktree_b.read(cx);
5418 project.move_worktree(first.id(), second.id(), cx)
5419 })
5420 .expect("moving second before first");
5421
5422 // check the state after moving
5423 project.update(cx, |project, cx| {
5424 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5425 assert_eq!(worktrees.len(), 3);
5426
5427 let first = worktrees[0].read(cx);
5428 let second = worktrees[1].read(cx);
5429 let third = worktrees[2].read(cx);
5430
5431 // check they are now in the right order
5432 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5433 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5434 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5435 });
5436
5437 // move the second worktree to after the third
5438 // [a, b, c] -> [a, c, b]
5439 project
5440 .update(cx, |project, cx| {
5441 let second = worktree_b.read(cx);
5442 let third = worktree_c.read(cx);
5443 project.move_worktree(second.id(), third.id(), cx)
5444 })
5445 .expect("moving second after third");
5446
5447 // check the state after moving
5448 project.update(cx, |project, cx| {
5449 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5450 assert_eq!(worktrees.len(), 3);
5451
5452 let first = worktrees[0].read(cx);
5453 let second = worktrees[1].read(cx);
5454 let third = worktrees[2].read(cx);
5455
5456 // check they are now in the right order
5457 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5458 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5459 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5460 });
5461
5462 // move the third worktree to before the second
5463 // [a, c, b] -> [a, b, c]
5464 project
5465 .update(cx, |project, cx| {
5466 let third = worktree_c.read(cx);
5467 let second = worktree_b.read(cx);
5468 project.move_worktree(third.id(), second.id(), cx)
5469 })
5470 .expect("moving third before second");
5471
5472 // check the state after moving
5473 project.update(cx, |project, cx| {
5474 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5475 assert_eq!(worktrees.len(), 3);
5476
5477 let first = worktrees[0].read(cx);
5478 let second = worktrees[1].read(cx);
5479 let third = worktrees[2].read(cx);
5480
5481 // check they are now in the right order
5482 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5483 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5484 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5485 });
5486
5487 // move the first worktree to after the third
5488 // [a, b, c] -> [b, c, a]
5489 project
5490 .update(cx, |project, cx| {
5491 let first = worktree_a.read(cx);
5492 let third = worktree_c.read(cx);
5493 project.move_worktree(first.id(), third.id(), cx)
5494 })
5495 .expect("moving first after third");
5496
5497 // check the state after moving
5498 project.update(cx, |project, cx| {
5499 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5500 assert_eq!(worktrees.len(), 3);
5501
5502 let first = worktrees[0].read(cx);
5503 let second = worktrees[1].read(cx);
5504 let third = worktrees[2].read(cx);
5505
5506 // check they are now in the right order
5507 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5508 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5509 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5510 });
5511
5512 // move the third worktree to before the first
5513 // [b, c, a] -> [a, b, c]
5514 project
5515 .update(cx, |project, cx| {
5516 let third = worktree_a.read(cx);
5517 let first = worktree_b.read(cx);
5518 project.move_worktree(third.id(), first.id(), cx)
5519 })
5520 .expect("moving third before first");
5521
5522 // check the state after moving
5523 project.update(cx, |project, cx| {
5524 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5525 assert_eq!(worktrees.len(), 3);
5526
5527 let first = worktrees[0].read(cx);
5528 let second = worktrees[1].read(cx);
5529 let third = worktrees[2].read(cx);
5530
5531 // check they are now in the right order
5532 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5533 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5534 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5535 });
5536}
5537
5538#[gpui::test]
5539async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5540 init_test(cx);
5541
5542 let staged_contents = r#"
5543 fn main() {
5544 println!("hello world");
5545 }
5546 "#
5547 .unindent();
5548 let file_contents = r#"
5549 // print goodbye
5550 fn main() {
5551 println!("goodbye world");
5552 }
5553 "#
5554 .unindent();
5555
5556 let fs = FakeFs::new(cx.background_executor.clone());
5557 fs.insert_tree(
5558 "/dir",
5559 json!({
5560 ".git": {},
5561 "src": {
5562 "main.rs": file_contents,
5563 }
5564 }),
5565 )
5566 .await;
5567
5568 fs.set_index_for_repo(
5569 Path::new("/dir/.git"),
5570 &[(Path::new("src/main.rs"), staged_contents)],
5571 );
5572
5573 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5574
5575 let buffer = project
5576 .update(cx, |project, cx| {
5577 project.open_local_buffer("/dir/src/main.rs", cx)
5578 })
5579 .await
5580 .unwrap();
5581 let unstaged_changes = project
5582 .update(cx, |project, cx| {
5583 project.open_unstaged_changes(buffer.clone(), cx)
5584 })
5585 .await
5586 .unwrap();
5587
5588 cx.run_until_parked();
5589 unstaged_changes.update(cx, |unstaged_changes, cx| {
5590 let snapshot = buffer.read(cx).snapshot();
5591 assert_hunks(
5592 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5593 &snapshot,
5594 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5595 &[
5596 (0..1, "", "// print goodbye\n"),
5597 (
5598 2..3,
5599 " println!(\"hello world\");\n",
5600 " println!(\"goodbye world\");\n",
5601 ),
5602 ],
5603 );
5604 });
5605
5606 let staged_contents = r#"
5607 // print goodbye
5608 fn main() {
5609 }
5610 "#
5611 .unindent();
5612
5613 fs.set_index_for_repo(
5614 Path::new("/dir/.git"),
5615 &[(Path::new("src/main.rs"), staged_contents)],
5616 );
5617
5618 cx.run_until_parked();
5619 unstaged_changes.update(cx, |unstaged_changes, cx| {
5620 let snapshot = buffer.read(cx).snapshot();
5621 assert_hunks(
5622 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5623 &snapshot,
5624 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5625 &[(2..3, "", " println!(\"goodbye world\");\n")],
5626 );
5627 });
5628}
5629
5630async fn search(
5631 project: &Model<Project>,
5632 query: SearchQuery,
5633 cx: &mut gpui::TestAppContext,
5634) -> Result<HashMap<String, Vec<Range<usize>>>> {
5635 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5636 let mut results = HashMap::default();
5637 while let Some(search_result) = search_rx.next().await {
5638 match search_result {
5639 SearchResult::Buffer { buffer, ranges } => {
5640 results.entry(buffer).or_insert(ranges);
5641 }
5642 SearchResult::LimitReached => {}
5643 }
5644 }
5645 Ok(results
5646 .into_iter()
5647 .map(|(buffer, ranges)| {
5648 buffer.update(cx, |buffer, cx| {
5649 let path = buffer
5650 .file()
5651 .unwrap()
5652 .full_path(cx)
5653 .to_string_lossy()
5654 .to_string();
5655 let ranges = ranges
5656 .into_iter()
5657 .map(|range| range.to_offset(buffer))
5658 .collect::<Vec<_>>();
5659 (path, ranges)
5660 })
5661 })
5662 .collect())
5663}
5664
5665pub fn init_test(cx: &mut gpui::TestAppContext) {
5666 if std::env::var("RUST_LOG").is_ok() {
5667 env_logger::try_init().ok();
5668 }
5669
5670 cx.update(|cx| {
5671 let settings_store = SettingsStore::test(cx);
5672 cx.set_global(settings_store);
5673 release_channel::init(SemanticVersion::default(), cx);
5674 language::init(cx);
5675 Project::init_settings(cx);
5676 });
5677}
5678
5679fn json_lang() -> Arc<Language> {
5680 Arc::new(Language::new(
5681 LanguageConfig {
5682 name: "JSON".into(),
5683 matcher: LanguageMatcher {
5684 path_suffixes: vec!["json".to_string()],
5685 ..Default::default()
5686 },
5687 ..Default::default()
5688 },
5689 None,
5690 ))
5691}
5692
5693fn js_lang() -> Arc<Language> {
5694 Arc::new(Language::new(
5695 LanguageConfig {
5696 name: "JavaScript".into(),
5697 matcher: LanguageMatcher {
5698 path_suffixes: vec!["js".to_string()],
5699 ..Default::default()
5700 },
5701 ..Default::default()
5702 },
5703 None,
5704 ))
5705}
5706
5707fn rust_lang() -> Arc<Language> {
5708 Arc::new(Language::new(
5709 LanguageConfig {
5710 name: "Rust".into(),
5711 matcher: LanguageMatcher {
5712 path_suffixes: vec!["rs".to_string()],
5713 ..Default::default()
5714 },
5715 ..Default::default()
5716 },
5717 Some(tree_sitter_rust::LANGUAGE.into()),
5718 ))
5719}
5720
5721fn typescript_lang() -> Arc<Language> {
5722 Arc::new(Language::new(
5723 LanguageConfig {
5724 name: "TypeScript".into(),
5725 matcher: LanguageMatcher {
5726 path_suffixes: vec!["ts".to_string()],
5727 ..Default::default()
5728 },
5729 ..Default::default()
5730 },
5731 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5732 ))
5733}
5734
5735fn tsx_lang() -> Arc<Language> {
5736 Arc::new(Language::new(
5737 LanguageConfig {
5738 name: "tsx".into(),
5739 matcher: LanguageMatcher {
5740 path_suffixes: vec!["tsx".to_string()],
5741 ..Default::default()
5742 },
5743 ..Default::default()
5744 },
5745 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5746 ))
5747}
5748
5749fn get_all_tasks(
5750 project: &Model<Project>,
5751 worktree_id: Option<WorktreeId>,
5752 task_context: &TaskContext,
5753 cx: &mut AppContext,
5754) -> Vec<(TaskSourceKind, ResolvedTask)> {
5755 let (mut old, new) = project.update(cx, |project, cx| {
5756 project
5757 .task_store
5758 .read(cx)
5759 .task_inventory()
5760 .unwrap()
5761 .read(cx)
5762 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5763 });
5764 old.extend(new);
5765 old
5766}