1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use git::diff::assert_hunks;
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, DiskState, FakeLspAdapter,
10 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
11};
12use lsp::{
13 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
14 NumberOrString, TextDocumentEdit, WillRenameFiles,
15};
16use parking_lot::Mutex;
17use pretty_assertions::{assert_eq, assert_matches};
18use serde_json::json;
19#[cfg(not(windows))]
20use std::os;
21use std::{str::FromStr, sync::OnceLock};
22
23use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
24use task::{ResolvedTask, TaskContext};
25use unindent::Unindent as _;
26use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
27
28#[gpui::test]
29async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
30 cx.executor().allow_parking();
31
32 let (tx, mut rx) = futures::channel::mpsc::unbounded();
33 let _thread = std::thread::spawn(move || {
34 std::fs::metadata("/tmp").unwrap();
35 std::thread::sleep(Duration::from_millis(1000));
36 tx.unbounded_send(1).unwrap();
37 });
38 rx.next().await.unwrap();
39}
40
41#[gpui::test]
42async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
43 cx.executor().allow_parking();
44
45 let io_task = smol::unblock(move || {
46 println!("sleeping on thread {:?}", std::thread::current().id());
47 std::thread::sleep(Duration::from_millis(10));
48 1
49 });
50
51 let task = cx.foreground_executor().spawn(async move {
52 io_task.await;
53 });
54
55 task.await;
56}
57
58#[cfg(not(windows))]
59#[gpui::test]
60async fn test_symlinks(cx: &mut gpui::TestAppContext) {
61 init_test(cx);
62 cx.executor().allow_parking();
63
64 let dir = temp_tree(json!({
65 "root": {
66 "apple": "",
67 "banana": {
68 "carrot": {
69 "date": "",
70 "endive": "",
71 }
72 },
73 "fennel": {
74 "grape": "",
75 }
76 }
77 }));
78
79 let root_link_path = dir.path().join("root_link");
80 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
81 os::unix::fs::symlink(
82 dir.path().join("root/fennel"),
83 dir.path().join("root/finnochio"),
84 )
85 .unwrap();
86
87 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
88
89 project.update(cx, |project, cx| {
90 let tree = project.worktrees(cx).next().unwrap().read(cx);
91 assert_eq!(tree.file_count(), 5);
92 assert_eq!(
93 tree.inode_for_path("fennel/grape"),
94 tree.inode_for_path("finnochio/grape")
95 );
96 });
97}
98
99#[gpui::test]
100async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
101 init_test(cx);
102
103 let dir = temp_tree(json!({
104 ".editorconfig": r#"
105 root = true
106 [*.rs]
107 indent_style = tab
108 indent_size = 3
109 end_of_line = lf
110 insert_final_newline = true
111 trim_trailing_whitespace = true
112 [*.js]
113 tab_width = 10
114 "#,
115 ".zed": {
116 "settings.json": r#"{
117 "tab_size": 8,
118 "hard_tabs": false,
119 "ensure_final_newline_on_save": false,
120 "remove_trailing_whitespace_on_save": false,
121 "soft_wrap": "editor_width"
122 }"#,
123 },
124 "a.rs": "fn a() {\n A\n}",
125 "b": {
126 ".editorconfig": r#"
127 [*.rs]
128 indent_size = 2
129 "#,
130 "b.rs": "fn b() {\n B\n}",
131 },
132 "c.js": "def c\n C\nend",
133 "README.json": "tabs are better\n",
134 }));
135
136 let path = dir.path();
137 let fs = FakeFs::new(cx.executor());
138 fs.insert_tree_from_real_fs(path, path).await;
139 let project = Project::test(fs, [path], cx).await;
140
141 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
142 language_registry.add(js_lang());
143 language_registry.add(json_lang());
144 language_registry.add(rust_lang());
145
146 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
147
148 cx.executor().run_until_parked();
149
150 cx.update(|cx| {
151 let tree = worktree.read(cx);
152 let settings_for = |path: &str| {
153 let file_entry = tree.entry_for_path(path).unwrap().clone();
154 let file = File::for_entry(file_entry, worktree.clone());
155 let file_language = project
156 .read(cx)
157 .languages()
158 .language_for_file_path(file.path.as_ref());
159 let file_language = cx
160 .background_executor()
161 .block(file_language)
162 .expect("Failed to get file language");
163 let file = file as _;
164 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
165 };
166
167 let settings_a = settings_for("a.rs");
168 let settings_b = settings_for("b/b.rs");
169 let settings_c = settings_for("c.js");
170 let settings_readme = settings_for("README.json");
171
172 // .editorconfig overrides .zed/settings
173 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
174 assert_eq!(settings_a.hard_tabs, true);
175 assert_eq!(settings_a.ensure_final_newline_on_save, true);
176 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
177
178 // .editorconfig in b/ overrides .editorconfig in root
179 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
180
181 // "indent_size" is not set, so "tab_width" is used
182 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
183
184 // README.md should not be affected by .editorconfig's globe "*.rs"
185 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
186 });
187}
188
189#[gpui::test]
190async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
191 init_test(cx);
192 TaskStore::init(None);
193
194 let fs = FakeFs::new(cx.executor());
195 fs.insert_tree(
196 "/the-root",
197 json!({
198 ".zed": {
199 "settings.json": r#"{ "tab_size": 8 }"#,
200 "tasks.json": r#"[{
201 "label": "cargo check all",
202 "command": "cargo",
203 "args": ["check", "--all"]
204 },]"#,
205 },
206 "a": {
207 "a.rs": "fn a() {\n A\n}"
208 },
209 "b": {
210 ".zed": {
211 "settings.json": r#"{ "tab_size": 2 }"#,
212 "tasks.json": r#"[{
213 "label": "cargo check",
214 "command": "cargo",
215 "args": ["check"]
216 },]"#,
217 },
218 "b.rs": "fn b() {\n B\n}"
219 }
220 }),
221 )
222 .await;
223
224 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
225 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
226 let task_context = TaskContext::default();
227
228 cx.executor().run_until_parked();
229 let worktree_id = cx.update(|cx| {
230 project.update(cx, |project, cx| {
231 project.worktrees(cx).next().unwrap().read(cx).id()
232 })
233 });
234 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
235 id: worktree_id,
236 directory_in_worktree: PathBuf::from(".zed"),
237 id_base: "local worktree tasks from directory \".zed\"".into(),
238 };
239
240 let all_tasks = cx
241 .update(|cx| {
242 let tree = worktree.read(cx);
243
244 let file_a = File::for_entry(
245 tree.entry_for_path("a/a.rs").unwrap().clone(),
246 worktree.clone(),
247 ) as _;
248 let settings_a = language_settings(None, Some(&file_a), cx);
249 let file_b = File::for_entry(
250 tree.entry_for_path("b/b.rs").unwrap().clone(),
251 worktree.clone(),
252 ) as _;
253 let settings_b = language_settings(None, Some(&file_b), cx);
254
255 assert_eq!(settings_a.tab_size.get(), 8);
256 assert_eq!(settings_b.tab_size.get(), 2);
257
258 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
259 })
260 .into_iter()
261 .map(|(source_kind, task)| {
262 let resolved = task.resolved.unwrap();
263 (
264 source_kind,
265 task.resolved_label,
266 resolved.args,
267 resolved.env,
268 )
269 })
270 .collect::<Vec<_>>();
271 assert_eq!(
272 all_tasks,
273 vec![
274 (
275 TaskSourceKind::Worktree {
276 id: worktree_id,
277 directory_in_worktree: PathBuf::from("b/.zed"),
278 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
279 },
280 "cargo check".to_string(),
281 vec!["check".to_string()],
282 HashMap::default(),
283 ),
284 (
285 topmost_local_task_source_kind.clone(),
286 "cargo check all".to_string(),
287 vec!["check".to_string(), "--all".to_string()],
288 HashMap::default(),
289 ),
290 ]
291 );
292
293 let (_, resolved_task) = cx
294 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
295 .into_iter()
296 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
297 .expect("should have one global task");
298 project.update(cx, |project, cx| {
299 let task_inventory = project
300 .task_store
301 .read(cx)
302 .task_inventory()
303 .cloned()
304 .unwrap();
305 task_inventory.update(cx, |inventory, _| {
306 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
307 inventory
308 .update_file_based_tasks(
309 None,
310 Some(
311 &json!([{
312 "label": "cargo check unstable",
313 "command": "cargo",
314 "args": [
315 "check",
316 "--all",
317 "--all-targets"
318 ],
319 "env": {
320 "RUSTFLAGS": "-Zunstable-options"
321 }
322 }])
323 .to_string(),
324 ),
325 )
326 .unwrap();
327 });
328 });
329 cx.run_until_parked();
330
331 let all_tasks = cx
332 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
333 .into_iter()
334 .map(|(source_kind, task)| {
335 let resolved = task.resolved.unwrap();
336 (
337 source_kind,
338 task.resolved_label,
339 resolved.args,
340 resolved.env,
341 )
342 })
343 .collect::<Vec<_>>();
344 assert_eq!(
345 all_tasks,
346 vec![
347 (
348 topmost_local_task_source_kind.clone(),
349 "cargo check all".to_string(),
350 vec!["check".to_string(), "--all".to_string()],
351 HashMap::default(),
352 ),
353 (
354 TaskSourceKind::Worktree {
355 id: worktree_id,
356 directory_in_worktree: PathBuf::from("b/.zed"),
357 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
358 },
359 "cargo check".to_string(),
360 vec!["check".to_string()],
361 HashMap::default(),
362 ),
363 (
364 TaskSourceKind::AbsPath {
365 abs_path: paths::tasks_file().clone(),
366 id_base: "global tasks.json".into(),
367 },
368 "cargo check unstable".to_string(),
369 vec![
370 "check".to_string(),
371 "--all".to_string(),
372 "--all-targets".to_string(),
373 ],
374 HashMap::from_iter(Some((
375 "RUSTFLAGS".to_string(),
376 "-Zunstable-options".to_string()
377 ))),
378 ),
379 ]
380 );
381}
382
383#[gpui::test]
384async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
385 init_test(cx);
386
387 let fs = FakeFs::new(cx.executor());
388 fs.insert_tree(
389 "/the-root",
390 json!({
391 "test.rs": "const A: i32 = 1;",
392 "test2.rs": "",
393 "Cargo.toml": "a = 1",
394 "package.json": "{\"a\": 1}",
395 }),
396 )
397 .await;
398
399 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
400 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
401
402 let mut fake_rust_servers = language_registry.register_fake_lsp(
403 "Rust",
404 FakeLspAdapter {
405 name: "the-rust-language-server",
406 capabilities: lsp::ServerCapabilities {
407 completion_provider: Some(lsp::CompletionOptions {
408 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
409 ..Default::default()
410 }),
411 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
412 lsp::TextDocumentSyncOptions {
413 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
414 ..Default::default()
415 },
416 )),
417 ..Default::default()
418 },
419 ..Default::default()
420 },
421 );
422 let mut fake_json_servers = language_registry.register_fake_lsp(
423 "JSON",
424 FakeLspAdapter {
425 name: "the-json-language-server",
426 capabilities: lsp::ServerCapabilities {
427 completion_provider: Some(lsp::CompletionOptions {
428 trigger_characters: Some(vec![":".to_string()]),
429 ..Default::default()
430 }),
431 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
432 lsp::TextDocumentSyncOptions {
433 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
434 ..Default::default()
435 },
436 )),
437 ..Default::default()
438 },
439 ..Default::default()
440 },
441 );
442
443 // Open a buffer without an associated language server.
444 let toml_buffer = project
445 .update(cx, |project, cx| {
446 project.open_local_buffer("/the-root/Cargo.toml", cx)
447 })
448 .await
449 .unwrap();
450
451 // Open a buffer with an associated language server before the language for it has been loaded.
452 let rust_buffer = project
453 .update(cx, |project, cx| {
454 project.open_local_buffer("/the-root/test.rs", cx)
455 })
456 .await
457 .unwrap();
458 rust_buffer.update(cx, |buffer, _| {
459 assert_eq!(buffer.language().map(|l| l.name()), None);
460 });
461
462 // Now we add the languages to the project, and ensure they get assigned to all
463 // the relevant open buffers.
464 language_registry.add(json_lang());
465 language_registry.add(rust_lang());
466 cx.executor().run_until_parked();
467 rust_buffer.update(cx, |buffer, _| {
468 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
469 });
470
471 // A server is started up, and it is notified about Rust files.
472 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
473 assert_eq!(
474 fake_rust_server
475 .receive_notification::<lsp::notification::DidOpenTextDocument>()
476 .await
477 .text_document,
478 lsp::TextDocumentItem {
479 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
480 version: 0,
481 text: "const A: i32 = 1;".to_string(),
482 language_id: "rust".to_string(),
483 }
484 );
485
486 // The buffer is configured based on the language server's capabilities.
487 rust_buffer.update(cx, |buffer, _| {
488 assert_eq!(
489 buffer
490 .completion_triggers()
491 .into_iter()
492 .cloned()
493 .collect::<Vec<_>>(),
494 &[".".to_string(), "::".to_string()]
495 );
496 });
497 toml_buffer.update(cx, |buffer, _| {
498 assert!(buffer.completion_triggers().is_empty());
499 });
500
501 // Edit a buffer. The changes are reported to the language server.
502 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
503 assert_eq!(
504 fake_rust_server
505 .receive_notification::<lsp::notification::DidChangeTextDocument>()
506 .await
507 .text_document,
508 lsp::VersionedTextDocumentIdentifier::new(
509 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
510 1
511 )
512 );
513
514 // Open a third buffer with a different associated language server.
515 let json_buffer = project
516 .update(cx, |project, cx| {
517 project.open_local_buffer("/the-root/package.json", cx)
518 })
519 .await
520 .unwrap();
521
522 // A json language server is started up and is only notified about the json buffer.
523 let mut fake_json_server = fake_json_servers.next().await.unwrap();
524 assert_eq!(
525 fake_json_server
526 .receive_notification::<lsp::notification::DidOpenTextDocument>()
527 .await
528 .text_document,
529 lsp::TextDocumentItem {
530 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
531 version: 0,
532 text: "{\"a\": 1}".to_string(),
533 language_id: "json".to_string(),
534 }
535 );
536
537 // This buffer is configured based on the second language server's
538 // capabilities.
539 json_buffer.update(cx, |buffer, _| {
540 assert_eq!(
541 buffer
542 .completion_triggers()
543 .into_iter()
544 .cloned()
545 .collect::<Vec<_>>(),
546 &[":".to_string()]
547 );
548 });
549
550 // When opening another buffer whose language server is already running,
551 // it is also configured based on the existing language server's capabilities.
552 let rust_buffer2 = project
553 .update(cx, |project, cx| {
554 project.open_local_buffer("/the-root/test2.rs", cx)
555 })
556 .await
557 .unwrap();
558 rust_buffer2.update(cx, |buffer, _| {
559 assert_eq!(
560 buffer
561 .completion_triggers()
562 .into_iter()
563 .cloned()
564 .collect::<Vec<_>>(),
565 &[".".to_string(), "::".to_string()]
566 );
567 });
568
569 // Changes are reported only to servers matching the buffer's language.
570 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
571 rust_buffer2.update(cx, |buffer, cx| {
572 buffer.edit([(0..0, "let x = 1;")], None, cx)
573 });
574 assert_eq!(
575 fake_rust_server
576 .receive_notification::<lsp::notification::DidChangeTextDocument>()
577 .await
578 .text_document,
579 lsp::VersionedTextDocumentIdentifier::new(
580 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
581 1
582 )
583 );
584
585 // Save notifications are reported to all servers.
586 project
587 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
588 .await
589 .unwrap();
590 assert_eq!(
591 fake_rust_server
592 .receive_notification::<lsp::notification::DidSaveTextDocument>()
593 .await
594 .text_document,
595 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
596 );
597 assert_eq!(
598 fake_json_server
599 .receive_notification::<lsp::notification::DidSaveTextDocument>()
600 .await
601 .text_document,
602 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
603 );
604
605 // Renames are reported only to servers matching the buffer's language.
606 fs.rename(
607 Path::new("/the-root/test2.rs"),
608 Path::new("/the-root/test3.rs"),
609 Default::default(),
610 )
611 .await
612 .unwrap();
613 assert_eq!(
614 fake_rust_server
615 .receive_notification::<lsp::notification::DidCloseTextDocument>()
616 .await
617 .text_document,
618 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
619 );
620 assert_eq!(
621 fake_rust_server
622 .receive_notification::<lsp::notification::DidOpenTextDocument>()
623 .await
624 .text_document,
625 lsp::TextDocumentItem {
626 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
627 version: 0,
628 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
629 language_id: "rust".to_string(),
630 },
631 );
632
633 rust_buffer2.update(cx, |buffer, cx| {
634 buffer.update_diagnostics(
635 LanguageServerId(0),
636 DiagnosticSet::from_sorted_entries(
637 vec![DiagnosticEntry {
638 diagnostic: Default::default(),
639 range: Anchor::MIN..Anchor::MAX,
640 }],
641 &buffer.snapshot(),
642 ),
643 cx,
644 );
645 assert_eq!(
646 buffer
647 .snapshot()
648 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
649 .count(),
650 1
651 );
652 });
653
654 // When the rename changes the extension of the file, the buffer gets closed on the old
655 // language server and gets opened on the new one.
656 fs.rename(
657 Path::new("/the-root/test3.rs"),
658 Path::new("/the-root/test3.json"),
659 Default::default(),
660 )
661 .await
662 .unwrap();
663 assert_eq!(
664 fake_rust_server
665 .receive_notification::<lsp::notification::DidCloseTextDocument>()
666 .await
667 .text_document,
668 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
669 );
670 assert_eq!(
671 fake_json_server
672 .receive_notification::<lsp::notification::DidOpenTextDocument>()
673 .await
674 .text_document,
675 lsp::TextDocumentItem {
676 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
677 version: 0,
678 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
679 language_id: "json".to_string(),
680 },
681 );
682
683 // We clear the diagnostics, since the language has changed.
684 rust_buffer2.update(cx, |buffer, _| {
685 assert_eq!(
686 buffer
687 .snapshot()
688 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
689 .count(),
690 0
691 );
692 });
693
694 // The renamed file's version resets after changing language server.
695 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
696 assert_eq!(
697 fake_json_server
698 .receive_notification::<lsp::notification::DidChangeTextDocument>()
699 .await
700 .text_document,
701 lsp::VersionedTextDocumentIdentifier::new(
702 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
703 1
704 )
705 );
706
707 // Restart language servers
708 project.update(cx, |project, cx| {
709 project.restart_language_servers_for_buffers(
710 vec![rust_buffer.clone(), json_buffer.clone()],
711 cx,
712 );
713 });
714
715 let mut rust_shutdown_requests = fake_rust_server
716 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
717 let mut json_shutdown_requests = fake_json_server
718 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
719 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
720
721 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
722 let mut fake_json_server = fake_json_servers.next().await.unwrap();
723
724 // Ensure rust document is reopened in new rust language server
725 assert_eq!(
726 fake_rust_server
727 .receive_notification::<lsp::notification::DidOpenTextDocument>()
728 .await
729 .text_document,
730 lsp::TextDocumentItem {
731 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
732 version: 0,
733 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
734 language_id: "rust".to_string(),
735 }
736 );
737
738 // Ensure json documents are reopened in new json language server
739 assert_set_eq!(
740 [
741 fake_json_server
742 .receive_notification::<lsp::notification::DidOpenTextDocument>()
743 .await
744 .text_document,
745 fake_json_server
746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
747 .await
748 .text_document,
749 ],
750 [
751 lsp::TextDocumentItem {
752 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
753 version: 0,
754 text: json_buffer.update(cx, |buffer, _| buffer.text()),
755 language_id: "json".to_string(),
756 },
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
759 version: 0,
760 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
761 language_id: "json".to_string(),
762 }
763 ]
764 );
765
766 // Close notifications are reported only to servers matching the buffer's language.
767 cx.update(|_| drop(json_buffer));
768 let close_message = lsp::DidCloseTextDocumentParams {
769 text_document: lsp::TextDocumentIdentifier::new(
770 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
771 ),
772 };
773 assert_eq!(
774 fake_json_server
775 .receive_notification::<lsp::notification::DidCloseTextDocument>()
776 .await,
777 close_message,
778 );
779}
780
781#[gpui::test]
782async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
783 init_test(cx);
784
785 let fs = FakeFs::new(cx.executor());
786 fs.insert_tree(
787 "/the-root",
788 json!({
789 ".gitignore": "target\n",
790 "src": {
791 "a.rs": "",
792 "b.rs": "",
793 },
794 "target": {
795 "x": {
796 "out": {
797 "x.rs": ""
798 }
799 },
800 "y": {
801 "out": {
802 "y.rs": "",
803 }
804 },
805 "z": {
806 "out": {
807 "z.rs": ""
808 }
809 }
810 }
811 }),
812 )
813 .await;
814
815 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
816 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
817 language_registry.add(rust_lang());
818 let mut fake_servers = language_registry.register_fake_lsp(
819 "Rust",
820 FakeLspAdapter {
821 name: "the-language-server",
822 ..Default::default()
823 },
824 );
825
826 cx.executor().run_until_parked();
827
828 // Start the language server by opening a buffer with a compatible file extension.
829 let _buffer = project
830 .update(cx, |project, cx| {
831 project.open_local_buffer("/the-root/src/a.rs", cx)
832 })
833 .await
834 .unwrap();
835
836 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
837 project.update(cx, |project, cx| {
838 let worktree = project.worktrees(cx).next().unwrap();
839 assert_eq!(
840 worktree
841 .read(cx)
842 .snapshot()
843 .entries(true, 0)
844 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
845 .collect::<Vec<_>>(),
846 &[
847 (Path::new(""), false),
848 (Path::new(".gitignore"), false),
849 (Path::new("src"), false),
850 (Path::new("src/a.rs"), false),
851 (Path::new("src/b.rs"), false),
852 (Path::new("target"), true),
853 ]
854 );
855 });
856
857 let prev_read_dir_count = fs.read_dir_call_count();
858
859 // Keep track of the FS events reported to the language server.
860 let fake_server = fake_servers.next().await.unwrap();
861 let file_changes = Arc::new(Mutex::new(Vec::new()));
862 fake_server
863 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
864 registrations: vec![lsp::Registration {
865 id: Default::default(),
866 method: "workspace/didChangeWatchedFiles".to_string(),
867 register_options: serde_json::to_value(
868 lsp::DidChangeWatchedFilesRegistrationOptions {
869 watchers: vec![
870 lsp::FileSystemWatcher {
871 glob_pattern: lsp::GlobPattern::String(
872 "/the-root/Cargo.toml".to_string(),
873 ),
874 kind: None,
875 },
876 lsp::FileSystemWatcher {
877 glob_pattern: lsp::GlobPattern::String(
878 "/the-root/src/*.{rs,c}".to_string(),
879 ),
880 kind: None,
881 },
882 lsp::FileSystemWatcher {
883 glob_pattern: lsp::GlobPattern::String(
884 "/the-root/target/y/**/*.rs".to_string(),
885 ),
886 kind: None,
887 },
888 ],
889 },
890 )
891 .ok(),
892 }],
893 })
894 .await
895 .unwrap();
896 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
897 let file_changes = file_changes.clone();
898 move |params, _| {
899 let mut file_changes = file_changes.lock();
900 file_changes.extend(params.changes);
901 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
902 }
903 });
904
905 cx.executor().run_until_parked();
906 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
907 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
908
909 // Now the language server has asked us to watch an ignored directory path,
910 // so we recursively load it.
911 project.update(cx, |project, cx| {
912 let worktree = project.worktrees(cx).next().unwrap();
913 assert_eq!(
914 worktree
915 .read(cx)
916 .snapshot()
917 .entries(true, 0)
918 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
919 .collect::<Vec<_>>(),
920 &[
921 (Path::new(""), false),
922 (Path::new(".gitignore"), false),
923 (Path::new("src"), false),
924 (Path::new("src/a.rs"), false),
925 (Path::new("src/b.rs"), false),
926 (Path::new("target"), true),
927 (Path::new("target/x"), true),
928 (Path::new("target/y"), true),
929 (Path::new("target/y/out"), true),
930 (Path::new("target/y/out/y.rs"), true),
931 (Path::new("target/z"), true),
932 ]
933 );
934 });
935
936 // Perform some file system mutations, two of which match the watched patterns,
937 // and one of which does not.
938 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
939 .await
940 .unwrap();
941 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
942 .await
943 .unwrap();
944 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
945 .await
946 .unwrap();
947 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
948 .await
949 .unwrap();
950 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
951 .await
952 .unwrap();
953
954 // The language server receives events for the FS mutations that match its watch patterns.
955 cx.executor().run_until_parked();
956 assert_eq!(
957 &*file_changes.lock(),
958 &[
959 lsp::FileEvent {
960 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
961 typ: lsp::FileChangeType::DELETED,
962 },
963 lsp::FileEvent {
964 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
965 typ: lsp::FileChangeType::CREATED,
966 },
967 lsp::FileEvent {
968 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
969 typ: lsp::FileChangeType::CREATED,
970 },
971 ]
972 );
973}
974
975#[gpui::test]
976async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
977 init_test(cx);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 "/dir",
982 json!({
983 "a.rs": "let a = 1;",
984 "b.rs": "let b = 2;"
985 }),
986 )
987 .await;
988
989 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
990
991 let buffer_a = project
992 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
993 .await
994 .unwrap();
995 let buffer_b = project
996 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
997 .await
998 .unwrap();
999
1000 project.update(cx, |project, cx| {
1001 project
1002 .update_diagnostics(
1003 LanguageServerId(0),
1004 lsp::PublishDiagnosticsParams {
1005 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1006 version: None,
1007 diagnostics: vec![lsp::Diagnostic {
1008 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1009 severity: Some(lsp::DiagnosticSeverity::ERROR),
1010 message: "error 1".to_string(),
1011 ..Default::default()
1012 }],
1013 },
1014 &[],
1015 cx,
1016 )
1017 .unwrap();
1018 project
1019 .update_diagnostics(
1020 LanguageServerId(0),
1021 lsp::PublishDiagnosticsParams {
1022 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1023 version: None,
1024 diagnostics: vec![lsp::Diagnostic {
1025 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1026 severity: Some(DiagnosticSeverity::WARNING),
1027 message: "error 2".to_string(),
1028 ..Default::default()
1029 }],
1030 },
1031 &[],
1032 cx,
1033 )
1034 .unwrap();
1035 });
1036
1037 buffer_a.update(cx, |buffer, _| {
1038 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1039 assert_eq!(
1040 chunks
1041 .iter()
1042 .map(|(s, d)| (s.as_str(), *d))
1043 .collect::<Vec<_>>(),
1044 &[
1045 ("let ", None),
1046 ("a", Some(DiagnosticSeverity::ERROR)),
1047 (" = 1;", None),
1048 ]
1049 );
1050 });
1051 buffer_b.update(cx, |buffer, _| {
1052 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1053 assert_eq!(
1054 chunks
1055 .iter()
1056 .map(|(s, d)| (s.as_str(), *d))
1057 .collect::<Vec<_>>(),
1058 &[
1059 ("let ", None),
1060 ("b", Some(DiagnosticSeverity::WARNING)),
1061 (" = 2;", None),
1062 ]
1063 );
1064 });
1065}
1066
1067#[gpui::test]
1068async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1069 init_test(cx);
1070
1071 let fs = FakeFs::new(cx.executor());
1072 fs.insert_tree(
1073 "/root",
1074 json!({
1075 "dir": {
1076 ".git": {
1077 "HEAD": "ref: refs/heads/main",
1078 },
1079 ".gitignore": "b.rs",
1080 "a.rs": "let a = 1;",
1081 "b.rs": "let b = 2;",
1082 },
1083 "other.rs": "let b = c;"
1084 }),
1085 )
1086 .await;
1087
1088 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1089 let (worktree, _) = project
1090 .update(cx, |project, cx| {
1091 project.find_or_create_worktree("/root/dir", true, cx)
1092 })
1093 .await
1094 .unwrap();
1095 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1096
1097 let (worktree, _) = project
1098 .update(cx, |project, cx| {
1099 project.find_or_create_worktree("/root/other.rs", false, cx)
1100 })
1101 .await
1102 .unwrap();
1103 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1104
1105 let server_id = LanguageServerId(0);
1106 project.update(cx, |project, cx| {
1107 project
1108 .update_diagnostics(
1109 server_id,
1110 lsp::PublishDiagnosticsParams {
1111 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1112 version: None,
1113 diagnostics: vec![lsp::Diagnostic {
1114 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1115 severity: Some(lsp::DiagnosticSeverity::ERROR),
1116 message: "unused variable 'b'".to_string(),
1117 ..Default::default()
1118 }],
1119 },
1120 &[],
1121 cx,
1122 )
1123 .unwrap();
1124 project
1125 .update_diagnostics(
1126 server_id,
1127 lsp::PublishDiagnosticsParams {
1128 uri: Url::from_file_path("/root/other.rs").unwrap(),
1129 version: None,
1130 diagnostics: vec![lsp::Diagnostic {
1131 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1132 severity: Some(lsp::DiagnosticSeverity::ERROR),
1133 message: "unknown variable 'c'".to_string(),
1134 ..Default::default()
1135 }],
1136 },
1137 &[],
1138 cx,
1139 )
1140 .unwrap();
1141 });
1142
1143 let main_ignored_buffer = project
1144 .update(cx, |project, cx| {
1145 project.open_buffer((main_worktree_id, "b.rs"), cx)
1146 })
1147 .await
1148 .unwrap();
1149 main_ignored_buffer.update(cx, |buffer, _| {
1150 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1151 assert_eq!(
1152 chunks
1153 .iter()
1154 .map(|(s, d)| (s.as_str(), *d))
1155 .collect::<Vec<_>>(),
1156 &[
1157 ("let ", None),
1158 ("b", Some(DiagnosticSeverity::ERROR)),
1159 (" = 2;", None),
1160 ],
1161 "Gigitnored buffers should still get in-buffer diagnostics",
1162 );
1163 });
1164 let other_buffer = project
1165 .update(cx, |project, cx| {
1166 project.open_buffer((other_worktree_id, ""), cx)
1167 })
1168 .await
1169 .unwrap();
1170 other_buffer.update(cx, |buffer, _| {
1171 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1172 assert_eq!(
1173 chunks
1174 .iter()
1175 .map(|(s, d)| (s.as_str(), *d))
1176 .collect::<Vec<_>>(),
1177 &[
1178 ("let b = ", None),
1179 ("c", Some(DiagnosticSeverity::ERROR)),
1180 (";", None),
1181 ],
1182 "Buffers from hidden projects should still get in-buffer diagnostics"
1183 );
1184 });
1185
1186 project.update(cx, |project, cx| {
1187 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1188 assert_eq!(
1189 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1190 vec![(
1191 ProjectPath {
1192 worktree_id: main_worktree_id,
1193 path: Arc::from(Path::new("b.rs")),
1194 },
1195 server_id,
1196 DiagnosticSummary {
1197 error_count: 1,
1198 warning_count: 0,
1199 }
1200 )]
1201 );
1202 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1203 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1204 });
1205}
1206
1207#[gpui::test]
1208async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1209 init_test(cx);
1210
1211 let progress_token = "the-progress-token";
1212
1213 let fs = FakeFs::new(cx.executor());
1214 fs.insert_tree(
1215 "/dir",
1216 json!({
1217 "a.rs": "fn a() { A }",
1218 "b.rs": "const y: i32 = 1",
1219 }),
1220 )
1221 .await;
1222
1223 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1224 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1225
1226 language_registry.add(rust_lang());
1227 let mut fake_servers = language_registry.register_fake_lsp(
1228 "Rust",
1229 FakeLspAdapter {
1230 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1231 disk_based_diagnostics_sources: vec!["disk".into()],
1232 ..Default::default()
1233 },
1234 );
1235
1236 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1237
1238 // Cause worktree to start the fake language server
1239 let _buffer = project
1240 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1241 .await
1242 .unwrap();
1243
1244 let mut events = cx.events(&project);
1245
1246 let fake_server = fake_servers.next().await.unwrap();
1247 assert_eq!(
1248 events.next().await.unwrap(),
1249 Event::LanguageServerAdded(
1250 LanguageServerId(0),
1251 fake_server.server.name(),
1252 Some(worktree_id)
1253 ),
1254 );
1255
1256 fake_server
1257 .start_progress(format!("{}/0", progress_token))
1258 .await;
1259 assert_eq!(
1260 events.next().await.unwrap(),
1261 Event::DiskBasedDiagnosticsStarted {
1262 language_server_id: LanguageServerId(0),
1263 }
1264 );
1265
1266 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1267 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1268 version: None,
1269 diagnostics: vec![lsp::Diagnostic {
1270 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1271 severity: Some(lsp::DiagnosticSeverity::ERROR),
1272 message: "undefined variable 'A'".to_string(),
1273 ..Default::default()
1274 }],
1275 });
1276 assert_eq!(
1277 events.next().await.unwrap(),
1278 Event::DiagnosticsUpdated {
1279 language_server_id: LanguageServerId(0),
1280 path: (worktree_id, Path::new("a.rs")).into()
1281 }
1282 );
1283
1284 fake_server.end_progress(format!("{}/0", progress_token));
1285 assert_eq!(
1286 events.next().await.unwrap(),
1287 Event::DiskBasedDiagnosticsFinished {
1288 language_server_id: LanguageServerId(0)
1289 }
1290 );
1291
1292 let buffer = project
1293 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1294 .await
1295 .unwrap();
1296
1297 buffer.update(cx, |buffer, _| {
1298 let snapshot = buffer.snapshot();
1299 let diagnostics = snapshot
1300 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1301 .collect::<Vec<_>>();
1302 assert_eq!(
1303 diagnostics,
1304 &[DiagnosticEntry {
1305 range: Point::new(0, 9)..Point::new(0, 10),
1306 diagnostic: Diagnostic {
1307 severity: lsp::DiagnosticSeverity::ERROR,
1308 message: "undefined variable 'A'".to_string(),
1309 group_id: 0,
1310 is_primary: true,
1311 ..Default::default()
1312 }
1313 }]
1314 )
1315 });
1316
1317 // Ensure publishing empty diagnostics twice only results in one update event.
1318 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1319 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1320 version: None,
1321 diagnostics: Default::default(),
1322 });
1323 assert_eq!(
1324 events.next().await.unwrap(),
1325 Event::DiagnosticsUpdated {
1326 language_server_id: LanguageServerId(0),
1327 path: (worktree_id, Path::new("a.rs")).into()
1328 }
1329 );
1330
1331 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1332 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1333 version: None,
1334 diagnostics: Default::default(),
1335 });
1336 cx.executor().run_until_parked();
1337 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1338}
1339
1340#[gpui::test]
1341async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1342 init_test(cx);
1343
1344 let progress_token = "the-progress-token";
1345
1346 let fs = FakeFs::new(cx.executor());
1347 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1348
1349 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1350
1351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1352 language_registry.add(rust_lang());
1353 let mut fake_servers = language_registry.register_fake_lsp(
1354 "Rust",
1355 FakeLspAdapter {
1356 name: "the-language-server",
1357 disk_based_diagnostics_sources: vec!["disk".into()],
1358 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1359 ..Default::default()
1360 },
1361 );
1362
1363 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1364
1365 let buffer = project
1366 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1367 .await
1368 .unwrap();
1369
1370 // Simulate diagnostics starting to update.
1371 let fake_server = fake_servers.next().await.unwrap();
1372 fake_server.start_progress(progress_token).await;
1373
1374 // Restart the server before the diagnostics finish updating.
1375 project.update(cx, |project, cx| {
1376 project.restart_language_servers_for_buffers([buffer], cx);
1377 });
1378 let mut events = cx.events(&project);
1379
1380 // Simulate the newly started server sending more diagnostics.
1381 let fake_server = fake_servers.next().await.unwrap();
1382 assert_eq!(
1383 events.next().await.unwrap(),
1384 Event::LanguageServerAdded(
1385 LanguageServerId(1),
1386 fake_server.server.name(),
1387 Some(worktree_id)
1388 )
1389 );
1390 fake_server.start_progress(progress_token).await;
1391 assert_eq!(
1392 events.next().await.unwrap(),
1393 Event::DiskBasedDiagnosticsStarted {
1394 language_server_id: LanguageServerId(1)
1395 }
1396 );
1397 project.update(cx, |project, cx| {
1398 assert_eq!(
1399 project
1400 .language_servers_running_disk_based_diagnostics(cx)
1401 .collect::<Vec<_>>(),
1402 [LanguageServerId(1)]
1403 );
1404 });
1405
1406 // All diagnostics are considered done, despite the old server's diagnostic
1407 // task never completing.
1408 fake_server.end_progress(progress_token);
1409 assert_eq!(
1410 events.next().await.unwrap(),
1411 Event::DiskBasedDiagnosticsFinished {
1412 language_server_id: LanguageServerId(1)
1413 }
1414 );
1415 project.update(cx, |project, cx| {
1416 assert_eq!(
1417 project
1418 .language_servers_running_disk_based_diagnostics(cx)
1419 .collect::<Vec<_>>(),
1420 [] as [language::LanguageServerId; 0]
1421 );
1422 });
1423}
1424
1425#[gpui::test]
1426async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1427 init_test(cx);
1428
1429 let fs = FakeFs::new(cx.executor());
1430 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1431
1432 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1433
1434 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1435 language_registry.add(rust_lang());
1436 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1437
1438 let buffer = project
1439 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1440 .await
1441 .unwrap();
1442
1443 // Publish diagnostics
1444 let fake_server = fake_servers.next().await.unwrap();
1445 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1446 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1447 version: None,
1448 diagnostics: vec![lsp::Diagnostic {
1449 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1450 severity: Some(lsp::DiagnosticSeverity::ERROR),
1451 message: "the message".to_string(),
1452 ..Default::default()
1453 }],
1454 });
1455
1456 cx.executor().run_until_parked();
1457 buffer.update(cx, |buffer, _| {
1458 assert_eq!(
1459 buffer
1460 .snapshot()
1461 .diagnostics_in_range::<_, usize>(0..1, false)
1462 .map(|entry| entry.diagnostic.message.clone())
1463 .collect::<Vec<_>>(),
1464 ["the message".to_string()]
1465 );
1466 });
1467 project.update(cx, |project, cx| {
1468 assert_eq!(
1469 project.diagnostic_summary(false, cx),
1470 DiagnosticSummary {
1471 error_count: 1,
1472 warning_count: 0,
1473 }
1474 );
1475 });
1476
1477 project.update(cx, |project, cx| {
1478 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1479 });
1480
1481 // The diagnostics are cleared.
1482 cx.executor().run_until_parked();
1483 buffer.update(cx, |buffer, _| {
1484 assert_eq!(
1485 buffer
1486 .snapshot()
1487 .diagnostics_in_range::<_, usize>(0..1, false)
1488 .map(|entry| entry.diagnostic.message.clone())
1489 .collect::<Vec<_>>(),
1490 Vec::<String>::new(),
1491 );
1492 });
1493 project.update(cx, |project, cx| {
1494 assert_eq!(
1495 project.diagnostic_summary(false, cx),
1496 DiagnosticSummary {
1497 error_count: 0,
1498 warning_count: 0,
1499 }
1500 );
1501 });
1502}
1503
1504#[gpui::test]
1505async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1506 init_test(cx);
1507
1508 let fs = FakeFs::new(cx.executor());
1509 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1510
1511 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1512 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1513
1514 language_registry.add(rust_lang());
1515 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1516
1517 let buffer = project
1518 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1519 .await
1520 .unwrap();
1521
1522 // Before restarting the server, report diagnostics with an unknown buffer version.
1523 let fake_server = fake_servers.next().await.unwrap();
1524 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1525 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1526 version: Some(10000),
1527 diagnostics: Vec::new(),
1528 });
1529 cx.executor().run_until_parked();
1530
1531 project.update(cx, |project, cx| {
1532 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1533 });
1534 let mut fake_server = fake_servers.next().await.unwrap();
1535 let notification = fake_server
1536 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1537 .await
1538 .text_document;
1539 assert_eq!(notification.version, 0);
1540}
1541
1542#[gpui::test]
1543async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1544 init_test(cx);
1545
1546 let progress_token = "the-progress-token";
1547
1548 let fs = FakeFs::new(cx.executor());
1549 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1550
1551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1552
1553 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1554 language_registry.add(rust_lang());
1555 let mut fake_servers = language_registry.register_fake_lsp(
1556 "Rust",
1557 FakeLspAdapter {
1558 name: "the-language-server",
1559 disk_based_diagnostics_sources: vec!["disk".into()],
1560 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1561 ..Default::default()
1562 },
1563 );
1564
1565 let buffer = project
1566 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1567 .await
1568 .unwrap();
1569
1570 // Simulate diagnostics starting to update.
1571 let mut fake_server = fake_servers.next().await.unwrap();
1572 fake_server
1573 .start_progress_with(
1574 "another-token",
1575 lsp::WorkDoneProgressBegin {
1576 cancellable: Some(false),
1577 ..Default::default()
1578 },
1579 )
1580 .await;
1581 fake_server
1582 .start_progress_with(
1583 progress_token,
1584 lsp::WorkDoneProgressBegin {
1585 cancellable: Some(true),
1586 ..Default::default()
1587 },
1588 )
1589 .await;
1590 cx.executor().run_until_parked();
1591
1592 project.update(cx, |project, cx| {
1593 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1594 });
1595
1596 let cancel_notification = fake_server
1597 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1598 .await;
1599 assert_eq!(
1600 cancel_notification.token,
1601 NumberOrString::String(progress_token.into())
1602 );
1603}
1604
1605#[gpui::test]
1606async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1607 init_test(cx);
1608
1609 let fs = FakeFs::new(cx.executor());
1610 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1611 .await;
1612
1613 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1614 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1615
1616 let mut fake_rust_servers = language_registry.register_fake_lsp(
1617 "Rust",
1618 FakeLspAdapter {
1619 name: "rust-lsp",
1620 ..Default::default()
1621 },
1622 );
1623 let mut fake_js_servers = language_registry.register_fake_lsp(
1624 "JavaScript",
1625 FakeLspAdapter {
1626 name: "js-lsp",
1627 ..Default::default()
1628 },
1629 );
1630 language_registry.add(rust_lang());
1631 language_registry.add(js_lang());
1632
1633 let _rs_buffer = project
1634 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1635 .await
1636 .unwrap();
1637 let _js_buffer = project
1638 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1639 .await
1640 .unwrap();
1641
1642 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1643 assert_eq!(
1644 fake_rust_server_1
1645 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1646 .await
1647 .text_document
1648 .uri
1649 .as_str(),
1650 "file:///dir/a.rs"
1651 );
1652
1653 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1654 assert_eq!(
1655 fake_js_server
1656 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1657 .await
1658 .text_document
1659 .uri
1660 .as_str(),
1661 "file:///dir/b.js"
1662 );
1663
1664 // Disable Rust language server, ensuring only that server gets stopped.
1665 cx.update(|cx| {
1666 SettingsStore::update_global(cx, |settings, cx| {
1667 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1668 settings.languages.insert(
1669 "Rust".into(),
1670 LanguageSettingsContent {
1671 enable_language_server: Some(false),
1672 ..Default::default()
1673 },
1674 );
1675 });
1676 })
1677 });
1678 fake_rust_server_1
1679 .receive_notification::<lsp::notification::Exit>()
1680 .await;
1681
1682 // Enable Rust and disable JavaScript language servers, ensuring that the
1683 // former gets started again and that the latter stops.
1684 cx.update(|cx| {
1685 SettingsStore::update_global(cx, |settings, cx| {
1686 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1687 settings.languages.insert(
1688 LanguageName::new("Rust"),
1689 LanguageSettingsContent {
1690 enable_language_server: Some(true),
1691 ..Default::default()
1692 },
1693 );
1694 settings.languages.insert(
1695 LanguageName::new("JavaScript"),
1696 LanguageSettingsContent {
1697 enable_language_server: Some(false),
1698 ..Default::default()
1699 },
1700 );
1701 });
1702 })
1703 });
1704 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1705 assert_eq!(
1706 fake_rust_server_2
1707 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1708 .await
1709 .text_document
1710 .uri
1711 .as_str(),
1712 "file:///dir/a.rs"
1713 );
1714 fake_js_server
1715 .receive_notification::<lsp::notification::Exit>()
1716 .await;
1717}
1718
1719#[gpui::test(iterations = 3)]
1720async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1721 init_test(cx);
1722
1723 let text = "
1724 fn a() { A }
1725 fn b() { BB }
1726 fn c() { CCC }
1727 "
1728 .unindent();
1729
1730 let fs = FakeFs::new(cx.executor());
1731 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1732
1733 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1734 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1735
1736 language_registry.add(rust_lang());
1737 let mut fake_servers = language_registry.register_fake_lsp(
1738 "Rust",
1739 FakeLspAdapter {
1740 disk_based_diagnostics_sources: vec!["disk".into()],
1741 ..Default::default()
1742 },
1743 );
1744
1745 let buffer = project
1746 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1747 .await
1748 .unwrap();
1749
1750 let mut fake_server = fake_servers.next().await.unwrap();
1751 let open_notification = fake_server
1752 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1753 .await;
1754
1755 // Edit the buffer, moving the content down
1756 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1757 let change_notification_1 = fake_server
1758 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1759 .await;
1760 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1761
1762 // Report some diagnostics for the initial version of the buffer
1763 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1764 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1765 version: Some(open_notification.text_document.version),
1766 diagnostics: vec![
1767 lsp::Diagnostic {
1768 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1769 severity: Some(DiagnosticSeverity::ERROR),
1770 message: "undefined variable 'A'".to_string(),
1771 source: Some("disk".to_string()),
1772 ..Default::default()
1773 },
1774 lsp::Diagnostic {
1775 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1776 severity: Some(DiagnosticSeverity::ERROR),
1777 message: "undefined variable 'BB'".to_string(),
1778 source: Some("disk".to_string()),
1779 ..Default::default()
1780 },
1781 lsp::Diagnostic {
1782 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1783 severity: Some(DiagnosticSeverity::ERROR),
1784 source: Some("disk".to_string()),
1785 message: "undefined variable 'CCC'".to_string(),
1786 ..Default::default()
1787 },
1788 ],
1789 });
1790
1791 // The diagnostics have moved down since they were created.
1792 cx.executor().run_until_parked();
1793 buffer.update(cx, |buffer, _| {
1794 assert_eq!(
1795 buffer
1796 .snapshot()
1797 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1798 .collect::<Vec<_>>(),
1799 &[
1800 DiagnosticEntry {
1801 range: Point::new(3, 9)..Point::new(3, 11),
1802 diagnostic: Diagnostic {
1803 source: Some("disk".into()),
1804 severity: DiagnosticSeverity::ERROR,
1805 message: "undefined variable 'BB'".to_string(),
1806 is_disk_based: true,
1807 group_id: 1,
1808 is_primary: true,
1809 ..Default::default()
1810 },
1811 },
1812 DiagnosticEntry {
1813 range: Point::new(4, 9)..Point::new(4, 12),
1814 diagnostic: Diagnostic {
1815 source: Some("disk".into()),
1816 severity: DiagnosticSeverity::ERROR,
1817 message: "undefined variable 'CCC'".to_string(),
1818 is_disk_based: true,
1819 group_id: 2,
1820 is_primary: true,
1821 ..Default::default()
1822 }
1823 }
1824 ]
1825 );
1826 assert_eq!(
1827 chunks_with_diagnostics(buffer, 0..buffer.len()),
1828 [
1829 ("\n\nfn a() { ".to_string(), None),
1830 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1831 (" }\nfn b() { ".to_string(), None),
1832 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1833 (" }\nfn c() { ".to_string(), None),
1834 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1835 (" }\n".to_string(), None),
1836 ]
1837 );
1838 assert_eq!(
1839 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1840 [
1841 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1842 (" }\nfn c() { ".to_string(), None),
1843 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1844 ]
1845 );
1846 });
1847
1848 // Ensure overlapping diagnostics are highlighted correctly.
1849 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1850 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1851 version: Some(open_notification.text_document.version),
1852 diagnostics: vec![
1853 lsp::Diagnostic {
1854 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1855 severity: Some(DiagnosticSeverity::ERROR),
1856 message: "undefined variable 'A'".to_string(),
1857 source: Some("disk".to_string()),
1858 ..Default::default()
1859 },
1860 lsp::Diagnostic {
1861 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1862 severity: Some(DiagnosticSeverity::WARNING),
1863 message: "unreachable statement".to_string(),
1864 source: Some("disk".to_string()),
1865 ..Default::default()
1866 },
1867 ],
1868 });
1869
1870 cx.executor().run_until_parked();
1871 buffer.update(cx, |buffer, _| {
1872 assert_eq!(
1873 buffer
1874 .snapshot()
1875 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1876 .collect::<Vec<_>>(),
1877 &[
1878 DiagnosticEntry {
1879 range: Point::new(2, 9)..Point::new(2, 12),
1880 diagnostic: Diagnostic {
1881 source: Some("disk".into()),
1882 severity: DiagnosticSeverity::WARNING,
1883 message: "unreachable statement".to_string(),
1884 is_disk_based: true,
1885 group_id: 4,
1886 is_primary: true,
1887 ..Default::default()
1888 }
1889 },
1890 DiagnosticEntry {
1891 range: Point::new(2, 9)..Point::new(2, 10),
1892 diagnostic: Diagnostic {
1893 source: Some("disk".into()),
1894 severity: DiagnosticSeverity::ERROR,
1895 message: "undefined variable 'A'".to_string(),
1896 is_disk_based: true,
1897 group_id: 3,
1898 is_primary: true,
1899 ..Default::default()
1900 },
1901 }
1902 ]
1903 );
1904 assert_eq!(
1905 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1906 [
1907 ("fn a() { ".to_string(), None),
1908 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1909 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1910 ("\n".to_string(), None),
1911 ]
1912 );
1913 assert_eq!(
1914 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1915 [
1916 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1917 ("\n".to_string(), None),
1918 ]
1919 );
1920 });
1921
1922 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1923 // changes since the last save.
1924 buffer.update(cx, |buffer, cx| {
1925 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1926 buffer.edit(
1927 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1928 None,
1929 cx,
1930 );
1931 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1932 });
1933 let change_notification_2 = fake_server
1934 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1935 .await;
1936 assert!(
1937 change_notification_2.text_document.version > change_notification_1.text_document.version
1938 );
1939
1940 // Handle out-of-order diagnostics
1941 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1942 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1943 version: Some(change_notification_2.text_document.version),
1944 diagnostics: vec![
1945 lsp::Diagnostic {
1946 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1947 severity: Some(DiagnosticSeverity::ERROR),
1948 message: "undefined variable 'BB'".to_string(),
1949 source: Some("disk".to_string()),
1950 ..Default::default()
1951 },
1952 lsp::Diagnostic {
1953 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1954 severity: Some(DiagnosticSeverity::WARNING),
1955 message: "undefined variable 'A'".to_string(),
1956 source: Some("disk".to_string()),
1957 ..Default::default()
1958 },
1959 ],
1960 });
1961
1962 cx.executor().run_until_parked();
1963 buffer.update(cx, |buffer, _| {
1964 assert_eq!(
1965 buffer
1966 .snapshot()
1967 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1968 .collect::<Vec<_>>(),
1969 &[
1970 DiagnosticEntry {
1971 range: Point::new(2, 21)..Point::new(2, 22),
1972 diagnostic: Diagnostic {
1973 source: Some("disk".into()),
1974 severity: DiagnosticSeverity::WARNING,
1975 message: "undefined variable 'A'".to_string(),
1976 is_disk_based: true,
1977 group_id: 6,
1978 is_primary: true,
1979 ..Default::default()
1980 }
1981 },
1982 DiagnosticEntry {
1983 range: Point::new(3, 9)..Point::new(3, 14),
1984 diagnostic: Diagnostic {
1985 source: Some("disk".into()),
1986 severity: DiagnosticSeverity::ERROR,
1987 message: "undefined variable 'BB'".to_string(),
1988 is_disk_based: true,
1989 group_id: 5,
1990 is_primary: true,
1991 ..Default::default()
1992 },
1993 }
1994 ]
1995 );
1996 });
1997}
1998
1999#[gpui::test]
2000async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2001 init_test(cx);
2002
2003 let text = concat!(
2004 "let one = ;\n", //
2005 "let two = \n",
2006 "let three = 3;\n",
2007 );
2008
2009 let fs = FakeFs::new(cx.executor());
2010 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2011
2012 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2013 let buffer = project
2014 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2015 .await
2016 .unwrap();
2017
2018 project.update(cx, |project, cx| {
2019 project.lsp_store.update(cx, |lsp_store, cx| {
2020 lsp_store
2021 .update_buffer_diagnostics(
2022 &buffer,
2023 LanguageServerId(0),
2024 None,
2025 vec![
2026 DiagnosticEntry {
2027 range: Unclipped(PointUtf16::new(0, 10))
2028 ..Unclipped(PointUtf16::new(0, 10)),
2029 diagnostic: Diagnostic {
2030 severity: DiagnosticSeverity::ERROR,
2031 message: "syntax error 1".to_string(),
2032 ..Default::default()
2033 },
2034 },
2035 DiagnosticEntry {
2036 range: Unclipped(PointUtf16::new(1, 10))
2037 ..Unclipped(PointUtf16::new(1, 10)),
2038 diagnostic: Diagnostic {
2039 severity: DiagnosticSeverity::ERROR,
2040 message: "syntax error 2".to_string(),
2041 ..Default::default()
2042 },
2043 },
2044 ],
2045 cx,
2046 )
2047 .unwrap();
2048 })
2049 });
2050
2051 // An empty range is extended forward to include the following character.
2052 // At the end of a line, an empty range is extended backward to include
2053 // the preceding character.
2054 buffer.update(cx, |buffer, _| {
2055 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2056 assert_eq!(
2057 chunks
2058 .iter()
2059 .map(|(s, d)| (s.as_str(), *d))
2060 .collect::<Vec<_>>(),
2061 &[
2062 ("let one = ", None),
2063 (";", Some(DiagnosticSeverity::ERROR)),
2064 ("\nlet two =", None),
2065 (" ", Some(DiagnosticSeverity::ERROR)),
2066 ("\nlet three = 3;\n", None)
2067 ]
2068 );
2069 });
2070}
2071
2072#[gpui::test]
2073async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2074 init_test(cx);
2075
2076 let fs = FakeFs::new(cx.executor());
2077 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2078 .await;
2079
2080 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2081
2082 project.update(cx, |project, cx| {
2083 project
2084 .update_diagnostic_entries(
2085 LanguageServerId(0),
2086 Path::new("/dir/a.rs").to_owned(),
2087 None,
2088 vec![DiagnosticEntry {
2089 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2090 diagnostic: Diagnostic {
2091 severity: DiagnosticSeverity::ERROR,
2092 is_primary: true,
2093 message: "syntax error a1".to_string(),
2094 ..Default::default()
2095 },
2096 }],
2097 cx,
2098 )
2099 .unwrap();
2100 project
2101 .update_diagnostic_entries(
2102 LanguageServerId(1),
2103 Path::new("/dir/a.rs").to_owned(),
2104 None,
2105 vec![DiagnosticEntry {
2106 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2107 diagnostic: Diagnostic {
2108 severity: DiagnosticSeverity::ERROR,
2109 is_primary: true,
2110 message: "syntax error b1".to_string(),
2111 ..Default::default()
2112 },
2113 }],
2114 cx,
2115 )
2116 .unwrap();
2117
2118 assert_eq!(
2119 project.diagnostic_summary(false, cx),
2120 DiagnosticSummary {
2121 error_count: 2,
2122 warning_count: 0,
2123 }
2124 );
2125 });
2126}
2127
2128#[gpui::test]
2129async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2130 init_test(cx);
2131
2132 let text = "
2133 fn a() {
2134 f1();
2135 }
2136 fn b() {
2137 f2();
2138 }
2139 fn c() {
2140 f3();
2141 }
2142 "
2143 .unindent();
2144
2145 let fs = FakeFs::new(cx.executor());
2146 fs.insert_tree(
2147 "/dir",
2148 json!({
2149 "a.rs": text.clone(),
2150 }),
2151 )
2152 .await;
2153
2154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2155 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2156
2157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2158 language_registry.add(rust_lang());
2159 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2160
2161 let buffer = project
2162 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2163 .await
2164 .unwrap();
2165
2166 let mut fake_server = fake_servers.next().await.unwrap();
2167 let lsp_document_version = fake_server
2168 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2169 .await
2170 .text_document
2171 .version;
2172
2173 // Simulate editing the buffer after the language server computes some edits.
2174 buffer.update(cx, |buffer, cx| {
2175 buffer.edit(
2176 [(
2177 Point::new(0, 0)..Point::new(0, 0),
2178 "// above first function\n",
2179 )],
2180 None,
2181 cx,
2182 );
2183 buffer.edit(
2184 [(
2185 Point::new(2, 0)..Point::new(2, 0),
2186 " // inside first function\n",
2187 )],
2188 None,
2189 cx,
2190 );
2191 buffer.edit(
2192 [(
2193 Point::new(6, 4)..Point::new(6, 4),
2194 "// inside second function ",
2195 )],
2196 None,
2197 cx,
2198 );
2199
2200 assert_eq!(
2201 buffer.text(),
2202 "
2203 // above first function
2204 fn a() {
2205 // inside first function
2206 f1();
2207 }
2208 fn b() {
2209 // inside second function f2();
2210 }
2211 fn c() {
2212 f3();
2213 }
2214 "
2215 .unindent()
2216 );
2217 });
2218
2219 let edits = lsp_store
2220 .update(cx, |lsp_store, cx| {
2221 lsp_store.edits_from_lsp(
2222 &buffer,
2223 vec![
2224 // replace body of first function
2225 lsp::TextEdit {
2226 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2227 new_text: "
2228 fn a() {
2229 f10();
2230 }
2231 "
2232 .unindent(),
2233 },
2234 // edit inside second function
2235 lsp::TextEdit {
2236 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2237 new_text: "00".into(),
2238 },
2239 // edit inside third function via two distinct edits
2240 lsp::TextEdit {
2241 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2242 new_text: "4000".into(),
2243 },
2244 lsp::TextEdit {
2245 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2246 new_text: "".into(),
2247 },
2248 ],
2249 LanguageServerId(0),
2250 Some(lsp_document_version),
2251 cx,
2252 )
2253 })
2254 .await
2255 .unwrap();
2256
2257 buffer.update(cx, |buffer, cx| {
2258 for (range, new_text) in edits {
2259 buffer.edit([(range, new_text)], None, cx);
2260 }
2261 assert_eq!(
2262 buffer.text(),
2263 "
2264 // above first function
2265 fn a() {
2266 // inside first function
2267 f10();
2268 }
2269 fn b() {
2270 // inside second function f200();
2271 }
2272 fn c() {
2273 f4000();
2274 }
2275 "
2276 .unindent()
2277 );
2278 });
2279}
2280
2281#[gpui::test]
2282async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2283 init_test(cx);
2284
2285 let text = "
2286 use a::b;
2287 use a::c;
2288
2289 fn f() {
2290 b();
2291 c();
2292 }
2293 "
2294 .unindent();
2295
2296 let fs = FakeFs::new(cx.executor());
2297 fs.insert_tree(
2298 "/dir",
2299 json!({
2300 "a.rs": text.clone(),
2301 }),
2302 )
2303 .await;
2304
2305 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2307 let buffer = project
2308 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2309 .await
2310 .unwrap();
2311
2312 // Simulate the language server sending us a small edit in the form of a very large diff.
2313 // Rust-analyzer does this when performing a merge-imports code action.
2314 let edits = lsp_store
2315 .update(cx, |lsp_store, cx| {
2316 lsp_store.edits_from_lsp(
2317 &buffer,
2318 [
2319 // Replace the first use statement without editing the semicolon.
2320 lsp::TextEdit {
2321 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2322 new_text: "a::{b, c}".into(),
2323 },
2324 // Reinsert the remainder of the file between the semicolon and the final
2325 // newline of the file.
2326 lsp::TextEdit {
2327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2328 new_text: "\n\n".into(),
2329 },
2330 lsp::TextEdit {
2331 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2332 new_text: "
2333 fn f() {
2334 b();
2335 c();
2336 }"
2337 .unindent(),
2338 },
2339 // Delete everything after the first newline of the file.
2340 lsp::TextEdit {
2341 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2342 new_text: "".into(),
2343 },
2344 ],
2345 LanguageServerId(0),
2346 None,
2347 cx,
2348 )
2349 })
2350 .await
2351 .unwrap();
2352
2353 buffer.update(cx, |buffer, cx| {
2354 let edits = edits
2355 .into_iter()
2356 .map(|(range, text)| {
2357 (
2358 range.start.to_point(buffer)..range.end.to_point(buffer),
2359 text,
2360 )
2361 })
2362 .collect::<Vec<_>>();
2363
2364 assert_eq!(
2365 edits,
2366 [
2367 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2368 (Point::new(1, 0)..Point::new(2, 0), "".into())
2369 ]
2370 );
2371
2372 for (range, new_text) in edits {
2373 buffer.edit([(range, new_text)], None, cx);
2374 }
2375 assert_eq!(
2376 buffer.text(),
2377 "
2378 use a::{b, c};
2379
2380 fn f() {
2381 b();
2382 c();
2383 }
2384 "
2385 .unindent()
2386 );
2387 });
2388}
2389
2390#[gpui::test]
2391async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2392 init_test(cx);
2393
2394 let text = "
2395 use a::b;
2396 use a::c;
2397
2398 fn f() {
2399 b();
2400 c();
2401 }
2402 "
2403 .unindent();
2404
2405 let fs = FakeFs::new(cx.executor());
2406 fs.insert_tree(
2407 "/dir",
2408 json!({
2409 "a.rs": text.clone(),
2410 }),
2411 )
2412 .await;
2413
2414 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2415 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2416 let buffer = project
2417 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2418 .await
2419 .unwrap();
2420
2421 // Simulate the language server sending us edits in a non-ordered fashion,
2422 // with ranges sometimes being inverted or pointing to invalid locations.
2423 let edits = lsp_store
2424 .update(cx, |lsp_store, cx| {
2425 lsp_store.edits_from_lsp(
2426 &buffer,
2427 [
2428 lsp::TextEdit {
2429 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2430 new_text: "\n\n".into(),
2431 },
2432 lsp::TextEdit {
2433 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2434 new_text: "a::{b, c}".into(),
2435 },
2436 lsp::TextEdit {
2437 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2438 new_text: "".into(),
2439 },
2440 lsp::TextEdit {
2441 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2442 new_text: "
2443 fn f() {
2444 b();
2445 c();
2446 }"
2447 .unindent(),
2448 },
2449 ],
2450 LanguageServerId(0),
2451 None,
2452 cx,
2453 )
2454 })
2455 .await
2456 .unwrap();
2457
2458 buffer.update(cx, |buffer, cx| {
2459 let edits = edits
2460 .into_iter()
2461 .map(|(range, text)| {
2462 (
2463 range.start.to_point(buffer)..range.end.to_point(buffer),
2464 text,
2465 )
2466 })
2467 .collect::<Vec<_>>();
2468
2469 assert_eq!(
2470 edits,
2471 [
2472 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2473 (Point::new(1, 0)..Point::new(2, 0), "".into())
2474 ]
2475 );
2476
2477 for (range, new_text) in edits {
2478 buffer.edit([(range, new_text)], None, cx);
2479 }
2480 assert_eq!(
2481 buffer.text(),
2482 "
2483 use a::{b, c};
2484
2485 fn f() {
2486 b();
2487 c();
2488 }
2489 "
2490 .unindent()
2491 );
2492 });
2493}
2494
2495fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2496 buffer: &Buffer,
2497 range: Range<T>,
2498) -> Vec<(String, Option<DiagnosticSeverity>)> {
2499 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2500 for chunk in buffer.snapshot().chunks(range, true) {
2501 if chunks.last().map_or(false, |prev_chunk| {
2502 prev_chunk.1 == chunk.diagnostic_severity
2503 }) {
2504 chunks.last_mut().unwrap().0.push_str(chunk.text);
2505 } else {
2506 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2507 }
2508 }
2509 chunks
2510}
2511
2512#[gpui::test(iterations = 10)]
2513async fn test_definition(cx: &mut gpui::TestAppContext) {
2514 init_test(cx);
2515
2516 let fs = FakeFs::new(cx.executor());
2517 fs.insert_tree(
2518 "/dir",
2519 json!({
2520 "a.rs": "const fn a() { A }",
2521 "b.rs": "const y: i32 = crate::a()",
2522 }),
2523 )
2524 .await;
2525
2526 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2527
2528 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2529 language_registry.add(rust_lang());
2530 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2531
2532 let buffer = project
2533 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2534 .await
2535 .unwrap();
2536
2537 let fake_server = fake_servers.next().await.unwrap();
2538 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2539 let params = params.text_document_position_params;
2540 assert_eq!(
2541 params.text_document.uri.to_file_path().unwrap(),
2542 Path::new("/dir/b.rs"),
2543 );
2544 assert_eq!(params.position, lsp::Position::new(0, 22));
2545
2546 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2547 lsp::Location::new(
2548 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2549 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2550 ),
2551 )))
2552 });
2553
2554 let mut definitions = project
2555 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2556 .await
2557 .unwrap();
2558
2559 // Assert no new language server started
2560 cx.executor().run_until_parked();
2561 assert!(fake_servers.try_next().is_err());
2562
2563 assert_eq!(definitions.len(), 1);
2564 let definition = definitions.pop().unwrap();
2565 cx.update(|cx| {
2566 let target_buffer = definition.target.buffer.read(cx);
2567 assert_eq!(
2568 target_buffer
2569 .file()
2570 .unwrap()
2571 .as_local()
2572 .unwrap()
2573 .abs_path(cx),
2574 Path::new("/dir/a.rs"),
2575 );
2576 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2577 assert_eq!(
2578 list_worktrees(&project, cx),
2579 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2580 );
2581
2582 drop(definition);
2583 });
2584 cx.update(|cx| {
2585 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2586 });
2587
2588 fn list_worktrees<'a>(
2589 project: &'a Model<Project>,
2590 cx: &'a AppContext,
2591 ) -> Vec<(&'a Path, bool)> {
2592 project
2593 .read(cx)
2594 .worktrees(cx)
2595 .map(|worktree| {
2596 let worktree = worktree.read(cx);
2597 (
2598 worktree.as_local().unwrap().abs_path().as_ref(),
2599 worktree.is_visible(),
2600 )
2601 })
2602 .collect::<Vec<_>>()
2603 }
2604}
2605
2606#[gpui::test]
2607async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2608 init_test(cx);
2609
2610 let fs = FakeFs::new(cx.executor());
2611 fs.insert_tree(
2612 "/dir",
2613 json!({
2614 "a.ts": "",
2615 }),
2616 )
2617 .await;
2618
2619 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2620
2621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2622 language_registry.add(typescript_lang());
2623 let mut fake_language_servers = language_registry.register_fake_lsp(
2624 "TypeScript",
2625 FakeLspAdapter {
2626 capabilities: lsp::ServerCapabilities {
2627 completion_provider: Some(lsp::CompletionOptions {
2628 trigger_characters: Some(vec![":".to_string()]),
2629 ..Default::default()
2630 }),
2631 ..Default::default()
2632 },
2633 ..Default::default()
2634 },
2635 );
2636
2637 let buffer = project
2638 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2639 .await
2640 .unwrap();
2641
2642 let fake_server = fake_language_servers.next().await.unwrap();
2643
2644 let text = "let a = b.fqn";
2645 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2646 let completions = project.update(cx, |project, cx| {
2647 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2648 });
2649
2650 fake_server
2651 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2652 Ok(Some(lsp::CompletionResponse::Array(vec![
2653 lsp::CompletionItem {
2654 label: "fullyQualifiedName?".into(),
2655 insert_text: Some("fullyQualifiedName".into()),
2656 ..Default::default()
2657 },
2658 ])))
2659 })
2660 .next()
2661 .await;
2662 let completions = completions.await.unwrap();
2663 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2664 assert_eq!(completions.len(), 1);
2665 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2666 assert_eq!(
2667 completions[0].old_range.to_offset(&snapshot),
2668 text.len() - 3..text.len()
2669 );
2670
2671 let text = "let a = \"atoms/cmp\"";
2672 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2673 let completions = project.update(cx, |project, cx| {
2674 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2675 });
2676
2677 fake_server
2678 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2679 Ok(Some(lsp::CompletionResponse::Array(vec![
2680 lsp::CompletionItem {
2681 label: "component".into(),
2682 ..Default::default()
2683 },
2684 ])))
2685 })
2686 .next()
2687 .await;
2688 let completions = completions.await.unwrap();
2689 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2690 assert_eq!(completions.len(), 1);
2691 assert_eq!(completions[0].new_text, "component");
2692 assert_eq!(
2693 completions[0].old_range.to_offset(&snapshot),
2694 text.len() - 4..text.len() - 1
2695 );
2696}
2697
2698#[gpui::test]
2699async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2700 init_test(cx);
2701
2702 let fs = FakeFs::new(cx.executor());
2703 fs.insert_tree(
2704 "/dir",
2705 json!({
2706 "a.ts": "",
2707 }),
2708 )
2709 .await;
2710
2711 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2712
2713 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2714 language_registry.add(typescript_lang());
2715 let mut fake_language_servers = language_registry.register_fake_lsp(
2716 "TypeScript",
2717 FakeLspAdapter {
2718 capabilities: lsp::ServerCapabilities {
2719 completion_provider: Some(lsp::CompletionOptions {
2720 trigger_characters: Some(vec![":".to_string()]),
2721 ..Default::default()
2722 }),
2723 ..Default::default()
2724 },
2725 ..Default::default()
2726 },
2727 );
2728
2729 let buffer = project
2730 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2731 .await
2732 .unwrap();
2733
2734 let fake_server = fake_language_servers.next().await.unwrap();
2735
2736 let text = "let a = b.fqn";
2737 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2738 let completions = project.update(cx, |project, cx| {
2739 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2740 });
2741
2742 fake_server
2743 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2744 Ok(Some(lsp::CompletionResponse::Array(vec![
2745 lsp::CompletionItem {
2746 label: "fullyQualifiedName?".into(),
2747 insert_text: Some("fully\rQualified\r\nName".into()),
2748 ..Default::default()
2749 },
2750 ])))
2751 })
2752 .next()
2753 .await;
2754 let completions = completions.await.unwrap();
2755 assert_eq!(completions.len(), 1);
2756 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2757}
2758
2759#[gpui::test(iterations = 10)]
2760async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2761 init_test(cx);
2762
2763 let fs = FakeFs::new(cx.executor());
2764 fs.insert_tree(
2765 "/dir",
2766 json!({
2767 "a.ts": "a",
2768 }),
2769 )
2770 .await;
2771
2772 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2773
2774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2775 language_registry.add(typescript_lang());
2776 let mut fake_language_servers = language_registry.register_fake_lsp(
2777 "TypeScript",
2778 FakeLspAdapter {
2779 capabilities: lsp::ServerCapabilities {
2780 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2781 lsp::CodeActionOptions {
2782 resolve_provider: Some(true),
2783 ..lsp::CodeActionOptions::default()
2784 },
2785 )),
2786 ..lsp::ServerCapabilities::default()
2787 },
2788 ..FakeLspAdapter::default()
2789 },
2790 );
2791
2792 let buffer = project
2793 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2794 .await
2795 .unwrap();
2796
2797 let fake_server = fake_language_servers.next().await.unwrap();
2798
2799 // Language server returns code actions that contain commands, and not edits.
2800 let actions = project.update(cx, |project, cx| {
2801 project.code_actions(&buffer, 0..0, None, cx)
2802 });
2803 fake_server
2804 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2805 Ok(Some(vec![
2806 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2807 title: "The code action".into(),
2808 data: Some(serde_json::json!({
2809 "command": "_the/command",
2810 })),
2811 ..lsp::CodeAction::default()
2812 }),
2813 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2814 title: "two".into(),
2815 ..lsp::CodeAction::default()
2816 }),
2817 ]))
2818 })
2819 .next()
2820 .await;
2821
2822 let action = actions.await.unwrap()[0].clone();
2823 let apply = project.update(cx, |project, cx| {
2824 project.apply_code_action(buffer.clone(), action, true, cx)
2825 });
2826
2827 // Resolving the code action does not populate its edits. In absence of
2828 // edits, we must execute the given command.
2829 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2830 |mut action, _| async move {
2831 if action.data.is_some() {
2832 action.command = Some(lsp::Command {
2833 title: "The command".into(),
2834 command: "_the/command".into(),
2835 arguments: Some(vec![json!("the-argument")]),
2836 });
2837 }
2838 Ok(action)
2839 },
2840 );
2841
2842 // While executing the command, the language server sends the editor
2843 // a `workspaceEdit` request.
2844 fake_server
2845 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2846 let fake = fake_server.clone();
2847 move |params, _| {
2848 assert_eq!(params.command, "_the/command");
2849 let fake = fake.clone();
2850 async move {
2851 fake.server
2852 .request::<lsp::request::ApplyWorkspaceEdit>(
2853 lsp::ApplyWorkspaceEditParams {
2854 label: None,
2855 edit: lsp::WorkspaceEdit {
2856 changes: Some(
2857 [(
2858 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2859 vec![lsp::TextEdit {
2860 range: lsp::Range::new(
2861 lsp::Position::new(0, 0),
2862 lsp::Position::new(0, 0),
2863 ),
2864 new_text: "X".into(),
2865 }],
2866 )]
2867 .into_iter()
2868 .collect(),
2869 ),
2870 ..Default::default()
2871 },
2872 },
2873 )
2874 .await
2875 .unwrap();
2876 Ok(Some(json!(null)))
2877 }
2878 }
2879 })
2880 .next()
2881 .await;
2882
2883 // Applying the code action returns a project transaction containing the edits
2884 // sent by the language server in its `workspaceEdit` request.
2885 let transaction = apply.await.unwrap();
2886 assert!(transaction.0.contains_key(&buffer));
2887 buffer.update(cx, |buffer, cx| {
2888 assert_eq!(buffer.text(), "Xa");
2889 buffer.undo(cx);
2890 assert_eq!(buffer.text(), "a");
2891 });
2892}
2893
2894#[gpui::test(iterations = 10)]
2895async fn test_save_file(cx: &mut gpui::TestAppContext) {
2896 init_test(cx);
2897
2898 let fs = FakeFs::new(cx.executor());
2899 fs.insert_tree(
2900 "/dir",
2901 json!({
2902 "file1": "the old contents",
2903 }),
2904 )
2905 .await;
2906
2907 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2908 let buffer = project
2909 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2910 .await
2911 .unwrap();
2912 buffer.update(cx, |buffer, cx| {
2913 assert_eq!(buffer.text(), "the old contents");
2914 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2915 });
2916
2917 project
2918 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2919 .await
2920 .unwrap();
2921
2922 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2923 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2924}
2925
2926#[gpui::test(iterations = 30)]
2927async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2928 init_test(cx);
2929
2930 let fs = FakeFs::new(cx.executor().clone());
2931 fs.insert_tree(
2932 "/dir",
2933 json!({
2934 "file1": "the original contents",
2935 }),
2936 )
2937 .await;
2938
2939 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2940 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2941 let buffer = project
2942 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2943 .await
2944 .unwrap();
2945
2946 // Simulate buffer diffs being slow, so that they don't complete before
2947 // the next file change occurs.
2948 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2949
2950 // Change the buffer's file on disk, and then wait for the file change
2951 // to be detected by the worktree, so that the buffer starts reloading.
2952 fs.save(
2953 "/dir/file1".as_ref(),
2954 &"the first contents".into(),
2955 Default::default(),
2956 )
2957 .await
2958 .unwrap();
2959 worktree.next_event(cx).await;
2960
2961 // Change the buffer's file again. Depending on the random seed, the
2962 // previous file change may still be in progress.
2963 fs.save(
2964 "/dir/file1".as_ref(),
2965 &"the second contents".into(),
2966 Default::default(),
2967 )
2968 .await
2969 .unwrap();
2970 worktree.next_event(cx).await;
2971
2972 cx.executor().run_until_parked();
2973 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2974 buffer.read_with(cx, |buffer, _| {
2975 assert_eq!(buffer.text(), on_disk_text);
2976 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2977 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2978 });
2979}
2980
2981#[gpui::test(iterations = 30)]
2982async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2983 init_test(cx);
2984
2985 let fs = FakeFs::new(cx.executor().clone());
2986 fs.insert_tree(
2987 "/dir",
2988 json!({
2989 "file1": "the original contents",
2990 }),
2991 )
2992 .await;
2993
2994 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2995 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2996 let buffer = project
2997 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2998 .await
2999 .unwrap();
3000
3001 // Simulate buffer diffs being slow, so that they don't complete before
3002 // the next file change occurs.
3003 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3004
3005 // Change the buffer's file on disk, and then wait for the file change
3006 // to be detected by the worktree, so that the buffer starts reloading.
3007 fs.save(
3008 "/dir/file1".as_ref(),
3009 &"the first contents".into(),
3010 Default::default(),
3011 )
3012 .await
3013 .unwrap();
3014 worktree.next_event(cx).await;
3015
3016 cx.executor()
3017 .spawn(cx.executor().simulate_random_delay())
3018 .await;
3019
3020 // Perform a noop edit, causing the buffer's version to increase.
3021 buffer.update(cx, |buffer, cx| {
3022 buffer.edit([(0..0, " ")], None, cx);
3023 buffer.undo(cx);
3024 });
3025
3026 cx.executor().run_until_parked();
3027 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3028 buffer.read_with(cx, |buffer, _| {
3029 let buffer_text = buffer.text();
3030 if buffer_text == on_disk_text {
3031 assert!(
3032 !buffer.is_dirty() && !buffer.has_conflict(),
3033 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3034 );
3035 }
3036 // If the file change occurred while the buffer was processing the first
3037 // change, the buffer will be in a conflicting state.
3038 else {
3039 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3040 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3041 }
3042 });
3043}
3044
3045#[gpui::test]
3046async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3047 init_test(cx);
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(
3051 "/dir",
3052 json!({
3053 "file1": "the old contents",
3054 }),
3055 )
3056 .await;
3057
3058 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3059 let buffer = project
3060 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3061 .await
3062 .unwrap();
3063 buffer.update(cx, |buffer, cx| {
3064 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3065 });
3066
3067 project
3068 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3069 .await
3070 .unwrap();
3071
3072 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3073 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3074}
3075
3076#[gpui::test]
3077async fn test_save_as(cx: &mut gpui::TestAppContext) {
3078 init_test(cx);
3079
3080 let fs = FakeFs::new(cx.executor());
3081 fs.insert_tree("/dir", json!({})).await;
3082
3083 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3084
3085 let languages = project.update(cx, |project, _| project.languages().clone());
3086 languages.add(rust_lang());
3087
3088 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3089 buffer.update(cx, |buffer, cx| {
3090 buffer.edit([(0..0, "abc")], None, cx);
3091 assert!(buffer.is_dirty());
3092 assert!(!buffer.has_conflict());
3093 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3094 });
3095 project
3096 .update(cx, |project, cx| {
3097 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3098 let path = ProjectPath {
3099 worktree_id,
3100 path: Arc::from(Path::new("file1.rs")),
3101 };
3102 project.save_buffer_as(buffer.clone(), path, cx)
3103 })
3104 .await
3105 .unwrap();
3106 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3107
3108 cx.executor().run_until_parked();
3109 buffer.update(cx, |buffer, cx| {
3110 assert_eq!(
3111 buffer.file().unwrap().full_path(cx),
3112 Path::new("dir/file1.rs")
3113 );
3114 assert!(!buffer.is_dirty());
3115 assert!(!buffer.has_conflict());
3116 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3117 });
3118
3119 let opened_buffer = project
3120 .update(cx, |project, cx| {
3121 project.open_local_buffer("/dir/file1.rs", cx)
3122 })
3123 .await
3124 .unwrap();
3125 assert_eq!(opened_buffer, buffer);
3126}
3127
3128#[gpui::test(retries = 5)]
3129async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3130 use worktree::WorktreeModelHandle as _;
3131
3132 init_test(cx);
3133 cx.executor().allow_parking();
3134
3135 let dir = temp_tree(json!({
3136 "a": {
3137 "file1": "",
3138 "file2": "",
3139 "file3": "",
3140 },
3141 "b": {
3142 "c": {
3143 "file4": "",
3144 "file5": "",
3145 }
3146 }
3147 }));
3148
3149 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3150
3151 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3152 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3153 async move { buffer.await.unwrap() }
3154 };
3155 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3156 project.update(cx, |project, cx| {
3157 let tree = project.worktrees(cx).next().unwrap();
3158 tree.read(cx)
3159 .entry_for_path(path)
3160 .unwrap_or_else(|| panic!("no entry for path {}", path))
3161 .id
3162 })
3163 };
3164
3165 let buffer2 = buffer_for_path("a/file2", cx).await;
3166 let buffer3 = buffer_for_path("a/file3", cx).await;
3167 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3168 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3169
3170 let file2_id = id_for_path("a/file2", cx);
3171 let file3_id = id_for_path("a/file3", cx);
3172 let file4_id = id_for_path("b/c/file4", cx);
3173
3174 // Create a remote copy of this worktree.
3175 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3176 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3177
3178 let updates = Arc::new(Mutex::new(Vec::new()));
3179 tree.update(cx, |tree, cx| {
3180 let updates = updates.clone();
3181 tree.observe_updates(0, cx, move |update| {
3182 updates.lock().push(update);
3183 async { true }
3184 });
3185 });
3186
3187 let remote =
3188 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3189
3190 cx.executor().run_until_parked();
3191
3192 cx.update(|cx| {
3193 assert!(!buffer2.read(cx).is_dirty());
3194 assert!(!buffer3.read(cx).is_dirty());
3195 assert!(!buffer4.read(cx).is_dirty());
3196 assert!(!buffer5.read(cx).is_dirty());
3197 });
3198
3199 // Rename and delete files and directories.
3200 tree.flush_fs_events(cx).await;
3201 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3202 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3203 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3204 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3205 tree.flush_fs_events(cx).await;
3206
3207 let expected_paths = vec![
3208 "a",
3209 "a/file1",
3210 "a/file2.new",
3211 "b",
3212 "d",
3213 "d/file3",
3214 "d/file4",
3215 ];
3216
3217 cx.update(|app| {
3218 assert_eq!(
3219 tree.read(app)
3220 .paths()
3221 .map(|p| p.to_str().unwrap())
3222 .collect::<Vec<_>>(),
3223 expected_paths
3224 );
3225 });
3226
3227 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3228 assert_eq!(id_for_path("d/file3", cx), file3_id);
3229 assert_eq!(id_for_path("d/file4", cx), file4_id);
3230
3231 cx.update(|cx| {
3232 assert_eq!(
3233 buffer2.read(cx).file().unwrap().path().as_ref(),
3234 Path::new("a/file2.new")
3235 );
3236 assert_eq!(
3237 buffer3.read(cx).file().unwrap().path().as_ref(),
3238 Path::new("d/file3")
3239 );
3240 assert_eq!(
3241 buffer4.read(cx).file().unwrap().path().as_ref(),
3242 Path::new("d/file4")
3243 );
3244 assert_eq!(
3245 buffer5.read(cx).file().unwrap().path().as_ref(),
3246 Path::new("b/c/file5")
3247 );
3248
3249 assert_matches!(
3250 buffer2.read(cx).file().unwrap().disk_state(),
3251 DiskState::Present { .. }
3252 );
3253 assert_matches!(
3254 buffer3.read(cx).file().unwrap().disk_state(),
3255 DiskState::Present { .. }
3256 );
3257 assert_matches!(
3258 buffer4.read(cx).file().unwrap().disk_state(),
3259 DiskState::Present { .. }
3260 );
3261 assert_eq!(
3262 buffer5.read(cx).file().unwrap().disk_state(),
3263 DiskState::Deleted
3264 );
3265 });
3266
3267 // Update the remote worktree. Check that it becomes consistent with the
3268 // local worktree.
3269 cx.executor().run_until_parked();
3270
3271 remote.update(cx, |remote, _| {
3272 for update in updates.lock().drain(..) {
3273 remote.as_remote_mut().unwrap().update_from_remote(update);
3274 }
3275 });
3276 cx.executor().run_until_parked();
3277 remote.update(cx, |remote, _| {
3278 assert_eq!(
3279 remote
3280 .paths()
3281 .map(|p| p.to_str().unwrap())
3282 .collect::<Vec<_>>(),
3283 expected_paths
3284 );
3285 });
3286}
3287
3288#[gpui::test(iterations = 10)]
3289async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3290 init_test(cx);
3291
3292 let fs = FakeFs::new(cx.executor());
3293 fs.insert_tree(
3294 "/dir",
3295 json!({
3296 "a": {
3297 "file1": "",
3298 }
3299 }),
3300 )
3301 .await;
3302
3303 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3304 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3305 let tree_id = tree.update(cx, |tree, _| tree.id());
3306
3307 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3308 project.update(cx, |project, cx| {
3309 let tree = project.worktrees(cx).next().unwrap();
3310 tree.read(cx)
3311 .entry_for_path(path)
3312 .unwrap_or_else(|| panic!("no entry for path {}", path))
3313 .id
3314 })
3315 };
3316
3317 let dir_id = id_for_path("a", cx);
3318 let file_id = id_for_path("a/file1", cx);
3319 let buffer = project
3320 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3321 .await
3322 .unwrap();
3323 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3324
3325 project
3326 .update(cx, |project, cx| {
3327 project.rename_entry(dir_id, Path::new("b"), cx)
3328 })
3329 .unwrap()
3330 .await
3331 .to_included()
3332 .unwrap();
3333 cx.executor().run_until_parked();
3334
3335 assert_eq!(id_for_path("b", cx), dir_id);
3336 assert_eq!(id_for_path("b/file1", cx), file_id);
3337 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3338}
3339
3340#[gpui::test]
3341async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3342 init_test(cx);
3343
3344 let fs = FakeFs::new(cx.executor());
3345 fs.insert_tree(
3346 "/dir",
3347 json!({
3348 "a.txt": "a-contents",
3349 "b.txt": "b-contents",
3350 }),
3351 )
3352 .await;
3353
3354 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3355
3356 // Spawn multiple tasks to open paths, repeating some paths.
3357 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3358 (
3359 p.open_local_buffer("/dir/a.txt", cx),
3360 p.open_local_buffer("/dir/b.txt", cx),
3361 p.open_local_buffer("/dir/a.txt", cx),
3362 )
3363 });
3364
3365 let buffer_a_1 = buffer_a_1.await.unwrap();
3366 let buffer_a_2 = buffer_a_2.await.unwrap();
3367 let buffer_b = buffer_b.await.unwrap();
3368 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3369 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3370
3371 // There is only one buffer per path.
3372 let buffer_a_id = buffer_a_1.entity_id();
3373 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3374
3375 // Open the same path again while it is still open.
3376 drop(buffer_a_1);
3377 let buffer_a_3 = project
3378 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3379 .await
3380 .unwrap();
3381
3382 // There's still only one buffer per path.
3383 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3384}
3385
3386#[gpui::test]
3387async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3388 init_test(cx);
3389
3390 let fs = FakeFs::new(cx.executor());
3391 fs.insert_tree(
3392 "/dir",
3393 json!({
3394 "file1": "abc",
3395 "file2": "def",
3396 "file3": "ghi",
3397 }),
3398 )
3399 .await;
3400
3401 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3402
3403 let buffer1 = project
3404 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3405 .await
3406 .unwrap();
3407 let events = Arc::new(Mutex::new(Vec::new()));
3408
3409 // initially, the buffer isn't dirty.
3410 buffer1.update(cx, |buffer, cx| {
3411 cx.subscribe(&buffer1, {
3412 let events = events.clone();
3413 move |_, _, event, _| match event {
3414 BufferEvent::Operation { .. } => {}
3415 _ => events.lock().push(event.clone()),
3416 }
3417 })
3418 .detach();
3419
3420 assert!(!buffer.is_dirty());
3421 assert!(events.lock().is_empty());
3422
3423 buffer.edit([(1..2, "")], None, cx);
3424 });
3425
3426 // after the first edit, the buffer is dirty, and emits a dirtied event.
3427 buffer1.update(cx, |buffer, cx| {
3428 assert!(buffer.text() == "ac");
3429 assert!(buffer.is_dirty());
3430 assert_eq!(
3431 *events.lock(),
3432 &[
3433 language::BufferEvent::Edited,
3434 language::BufferEvent::DirtyChanged
3435 ]
3436 );
3437 events.lock().clear();
3438 buffer.did_save(
3439 buffer.version(),
3440 buffer.file().unwrap().disk_state().mtime(),
3441 cx,
3442 );
3443 });
3444
3445 // after saving, the buffer is not dirty, and emits a saved event.
3446 buffer1.update(cx, |buffer, cx| {
3447 assert!(!buffer.is_dirty());
3448 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3449 events.lock().clear();
3450
3451 buffer.edit([(1..1, "B")], None, cx);
3452 buffer.edit([(2..2, "D")], None, cx);
3453 });
3454
3455 // after editing again, the buffer is dirty, and emits another dirty event.
3456 buffer1.update(cx, |buffer, cx| {
3457 assert!(buffer.text() == "aBDc");
3458 assert!(buffer.is_dirty());
3459 assert_eq!(
3460 *events.lock(),
3461 &[
3462 language::BufferEvent::Edited,
3463 language::BufferEvent::DirtyChanged,
3464 language::BufferEvent::Edited,
3465 ],
3466 );
3467 events.lock().clear();
3468
3469 // After restoring the buffer to its previously-saved state,
3470 // the buffer is not considered dirty anymore.
3471 buffer.edit([(1..3, "")], None, cx);
3472 assert!(buffer.text() == "ac");
3473 assert!(!buffer.is_dirty());
3474 });
3475
3476 assert_eq!(
3477 *events.lock(),
3478 &[
3479 language::BufferEvent::Edited,
3480 language::BufferEvent::DirtyChanged
3481 ]
3482 );
3483
3484 // When a file is deleted, the buffer is considered dirty.
3485 let events = Arc::new(Mutex::new(Vec::new()));
3486 let buffer2 = project
3487 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3488 .await
3489 .unwrap();
3490 buffer2.update(cx, |_, cx| {
3491 cx.subscribe(&buffer2, {
3492 let events = events.clone();
3493 move |_, _, event, _| events.lock().push(event.clone())
3494 })
3495 .detach();
3496 });
3497
3498 fs.remove_file("/dir/file2".as_ref(), Default::default())
3499 .await
3500 .unwrap();
3501 cx.executor().run_until_parked();
3502 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3503 assert_eq!(
3504 *events.lock(),
3505 &[
3506 language::BufferEvent::DirtyChanged,
3507 language::BufferEvent::FileHandleChanged
3508 ]
3509 );
3510
3511 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3512 let events = Arc::new(Mutex::new(Vec::new()));
3513 let buffer3 = project
3514 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3515 .await
3516 .unwrap();
3517 buffer3.update(cx, |_, cx| {
3518 cx.subscribe(&buffer3, {
3519 let events = events.clone();
3520 move |_, _, event, _| events.lock().push(event.clone())
3521 })
3522 .detach();
3523 });
3524
3525 buffer3.update(cx, |buffer, cx| {
3526 buffer.edit([(0..0, "x")], None, cx);
3527 });
3528 events.lock().clear();
3529 fs.remove_file("/dir/file3".as_ref(), Default::default())
3530 .await
3531 .unwrap();
3532 cx.executor().run_until_parked();
3533 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3534 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3535}
3536
3537#[gpui::test]
3538async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3539 init_test(cx);
3540
3541 let initial_contents = "aaa\nbbbbb\nc\n";
3542 let fs = FakeFs::new(cx.executor());
3543 fs.insert_tree(
3544 "/dir",
3545 json!({
3546 "the-file": initial_contents,
3547 }),
3548 )
3549 .await;
3550 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3551 let buffer = project
3552 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3553 .await
3554 .unwrap();
3555
3556 let anchors = (0..3)
3557 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3558 .collect::<Vec<_>>();
3559
3560 // Change the file on disk, adding two new lines of text, and removing
3561 // one line.
3562 buffer.update(cx, |buffer, _| {
3563 assert!(!buffer.is_dirty());
3564 assert!(!buffer.has_conflict());
3565 });
3566 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3567 fs.save(
3568 "/dir/the-file".as_ref(),
3569 &new_contents.into(),
3570 LineEnding::Unix,
3571 )
3572 .await
3573 .unwrap();
3574
3575 // Because the buffer was not modified, it is reloaded from disk. Its
3576 // contents are edited according to the diff between the old and new
3577 // file contents.
3578 cx.executor().run_until_parked();
3579 buffer.update(cx, |buffer, _| {
3580 assert_eq!(buffer.text(), new_contents);
3581 assert!(!buffer.is_dirty());
3582 assert!(!buffer.has_conflict());
3583
3584 let anchor_positions = anchors
3585 .iter()
3586 .map(|anchor| anchor.to_point(&*buffer))
3587 .collect::<Vec<_>>();
3588 assert_eq!(
3589 anchor_positions,
3590 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3591 );
3592 });
3593
3594 // Modify the buffer
3595 buffer.update(cx, |buffer, cx| {
3596 buffer.edit([(0..0, " ")], None, cx);
3597 assert!(buffer.is_dirty());
3598 assert!(!buffer.has_conflict());
3599 });
3600
3601 // Change the file on disk again, adding blank lines to the beginning.
3602 fs.save(
3603 "/dir/the-file".as_ref(),
3604 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3605 LineEnding::Unix,
3606 )
3607 .await
3608 .unwrap();
3609
3610 // Because the buffer is modified, it doesn't reload from disk, but is
3611 // marked as having a conflict.
3612 cx.executor().run_until_parked();
3613 buffer.update(cx, |buffer, _| {
3614 assert!(buffer.has_conflict());
3615 });
3616}
3617
3618#[gpui::test]
3619async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3620 init_test(cx);
3621
3622 let fs = FakeFs::new(cx.executor());
3623 fs.insert_tree(
3624 "/dir",
3625 json!({
3626 "file1": "a\nb\nc\n",
3627 "file2": "one\r\ntwo\r\nthree\r\n",
3628 }),
3629 )
3630 .await;
3631
3632 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3633 let buffer1 = project
3634 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3635 .await
3636 .unwrap();
3637 let buffer2 = project
3638 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3639 .await
3640 .unwrap();
3641
3642 buffer1.update(cx, |buffer, _| {
3643 assert_eq!(buffer.text(), "a\nb\nc\n");
3644 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3645 });
3646 buffer2.update(cx, |buffer, _| {
3647 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3648 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3649 });
3650
3651 // Change a file's line endings on disk from unix to windows. The buffer's
3652 // state updates correctly.
3653 fs.save(
3654 "/dir/file1".as_ref(),
3655 &"aaa\nb\nc\n".into(),
3656 LineEnding::Windows,
3657 )
3658 .await
3659 .unwrap();
3660 cx.executor().run_until_parked();
3661 buffer1.update(cx, |buffer, _| {
3662 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3663 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3664 });
3665
3666 // Save a file with windows line endings. The file is written correctly.
3667 buffer2.update(cx, |buffer, cx| {
3668 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3669 });
3670 project
3671 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3672 .await
3673 .unwrap();
3674 assert_eq!(
3675 fs.load("/dir/file2".as_ref()).await.unwrap(),
3676 "one\r\ntwo\r\nthree\r\nfour\r\n",
3677 );
3678}
3679
3680#[gpui::test]
3681async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3682 init_test(cx);
3683
3684 let fs = FakeFs::new(cx.executor());
3685 fs.insert_tree(
3686 "/the-dir",
3687 json!({
3688 "a.rs": "
3689 fn foo(mut v: Vec<usize>) {
3690 for x in &v {
3691 v.push(1);
3692 }
3693 }
3694 "
3695 .unindent(),
3696 }),
3697 )
3698 .await;
3699
3700 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3701 let buffer = project
3702 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3703 .await
3704 .unwrap();
3705
3706 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3707 let message = lsp::PublishDiagnosticsParams {
3708 uri: buffer_uri.clone(),
3709 diagnostics: vec![
3710 lsp::Diagnostic {
3711 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3712 severity: Some(DiagnosticSeverity::WARNING),
3713 message: "error 1".to_string(),
3714 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3715 location: lsp::Location {
3716 uri: buffer_uri.clone(),
3717 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3718 },
3719 message: "error 1 hint 1".to_string(),
3720 }]),
3721 ..Default::default()
3722 },
3723 lsp::Diagnostic {
3724 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3725 severity: Some(DiagnosticSeverity::HINT),
3726 message: "error 1 hint 1".to_string(),
3727 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3728 location: lsp::Location {
3729 uri: buffer_uri.clone(),
3730 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3731 },
3732 message: "original diagnostic".to_string(),
3733 }]),
3734 ..Default::default()
3735 },
3736 lsp::Diagnostic {
3737 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3738 severity: Some(DiagnosticSeverity::ERROR),
3739 message: "error 2".to_string(),
3740 related_information: Some(vec![
3741 lsp::DiagnosticRelatedInformation {
3742 location: lsp::Location {
3743 uri: buffer_uri.clone(),
3744 range: lsp::Range::new(
3745 lsp::Position::new(1, 13),
3746 lsp::Position::new(1, 15),
3747 ),
3748 },
3749 message: "error 2 hint 1".to_string(),
3750 },
3751 lsp::DiagnosticRelatedInformation {
3752 location: lsp::Location {
3753 uri: buffer_uri.clone(),
3754 range: lsp::Range::new(
3755 lsp::Position::new(1, 13),
3756 lsp::Position::new(1, 15),
3757 ),
3758 },
3759 message: "error 2 hint 2".to_string(),
3760 },
3761 ]),
3762 ..Default::default()
3763 },
3764 lsp::Diagnostic {
3765 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3766 severity: Some(DiagnosticSeverity::HINT),
3767 message: "error 2 hint 1".to_string(),
3768 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3769 location: lsp::Location {
3770 uri: buffer_uri.clone(),
3771 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3772 },
3773 message: "original diagnostic".to_string(),
3774 }]),
3775 ..Default::default()
3776 },
3777 lsp::Diagnostic {
3778 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3779 severity: Some(DiagnosticSeverity::HINT),
3780 message: "error 2 hint 2".to_string(),
3781 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3782 location: lsp::Location {
3783 uri: buffer_uri,
3784 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3785 },
3786 message: "original diagnostic".to_string(),
3787 }]),
3788 ..Default::default()
3789 },
3790 ],
3791 version: None,
3792 };
3793
3794 project
3795 .update(cx, |p, cx| {
3796 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3797 })
3798 .unwrap();
3799 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3800
3801 assert_eq!(
3802 buffer
3803 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3804 .collect::<Vec<_>>(),
3805 &[
3806 DiagnosticEntry {
3807 range: Point::new(1, 8)..Point::new(1, 9),
3808 diagnostic: Diagnostic {
3809 severity: DiagnosticSeverity::WARNING,
3810 message: "error 1".to_string(),
3811 group_id: 1,
3812 is_primary: true,
3813 ..Default::default()
3814 }
3815 },
3816 DiagnosticEntry {
3817 range: Point::new(1, 8)..Point::new(1, 9),
3818 diagnostic: Diagnostic {
3819 severity: DiagnosticSeverity::HINT,
3820 message: "error 1 hint 1".to_string(),
3821 group_id: 1,
3822 is_primary: false,
3823 ..Default::default()
3824 }
3825 },
3826 DiagnosticEntry {
3827 range: Point::new(1, 13)..Point::new(1, 15),
3828 diagnostic: Diagnostic {
3829 severity: DiagnosticSeverity::HINT,
3830 message: "error 2 hint 1".to_string(),
3831 group_id: 0,
3832 is_primary: false,
3833 ..Default::default()
3834 }
3835 },
3836 DiagnosticEntry {
3837 range: Point::new(1, 13)..Point::new(1, 15),
3838 diagnostic: Diagnostic {
3839 severity: DiagnosticSeverity::HINT,
3840 message: "error 2 hint 2".to_string(),
3841 group_id: 0,
3842 is_primary: false,
3843 ..Default::default()
3844 }
3845 },
3846 DiagnosticEntry {
3847 range: Point::new(2, 8)..Point::new(2, 17),
3848 diagnostic: Diagnostic {
3849 severity: DiagnosticSeverity::ERROR,
3850 message: "error 2".to_string(),
3851 group_id: 0,
3852 is_primary: true,
3853 ..Default::default()
3854 }
3855 }
3856 ]
3857 );
3858
3859 assert_eq!(
3860 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3861 &[
3862 DiagnosticEntry {
3863 range: Point::new(1, 13)..Point::new(1, 15),
3864 diagnostic: Diagnostic {
3865 severity: DiagnosticSeverity::HINT,
3866 message: "error 2 hint 1".to_string(),
3867 group_id: 0,
3868 is_primary: false,
3869 ..Default::default()
3870 }
3871 },
3872 DiagnosticEntry {
3873 range: Point::new(1, 13)..Point::new(1, 15),
3874 diagnostic: Diagnostic {
3875 severity: DiagnosticSeverity::HINT,
3876 message: "error 2 hint 2".to_string(),
3877 group_id: 0,
3878 is_primary: false,
3879 ..Default::default()
3880 }
3881 },
3882 DiagnosticEntry {
3883 range: Point::new(2, 8)..Point::new(2, 17),
3884 diagnostic: Diagnostic {
3885 severity: DiagnosticSeverity::ERROR,
3886 message: "error 2".to_string(),
3887 group_id: 0,
3888 is_primary: true,
3889 ..Default::default()
3890 }
3891 }
3892 ]
3893 );
3894
3895 assert_eq!(
3896 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3897 &[
3898 DiagnosticEntry {
3899 range: Point::new(1, 8)..Point::new(1, 9),
3900 diagnostic: Diagnostic {
3901 severity: DiagnosticSeverity::WARNING,
3902 message: "error 1".to_string(),
3903 group_id: 1,
3904 is_primary: true,
3905 ..Default::default()
3906 }
3907 },
3908 DiagnosticEntry {
3909 range: Point::new(1, 8)..Point::new(1, 9),
3910 diagnostic: Diagnostic {
3911 severity: DiagnosticSeverity::HINT,
3912 message: "error 1 hint 1".to_string(),
3913 group_id: 1,
3914 is_primary: false,
3915 ..Default::default()
3916 }
3917 },
3918 ]
3919 );
3920}
3921
3922#[gpui::test]
3923async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3924 init_test(cx);
3925
3926 let fs = FakeFs::new(cx.executor());
3927 fs.insert_tree(
3928 "/dir",
3929 json!({
3930 "one.rs": "const ONE: usize = 1;",
3931 "two": {
3932 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3933 }
3934
3935 }),
3936 )
3937 .await;
3938 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3939
3940 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3941 language_registry.add(rust_lang());
3942 let watched_paths = lsp::FileOperationRegistrationOptions {
3943 filters: vec![
3944 FileOperationFilter {
3945 scheme: Some("file".to_owned()),
3946 pattern: lsp::FileOperationPattern {
3947 glob: "**/*.rs".to_owned(),
3948 matches: Some(lsp::FileOperationPatternKind::File),
3949 options: None,
3950 },
3951 },
3952 FileOperationFilter {
3953 scheme: Some("file".to_owned()),
3954 pattern: lsp::FileOperationPattern {
3955 glob: "**/**".to_owned(),
3956 matches: Some(lsp::FileOperationPatternKind::Folder),
3957 options: None,
3958 },
3959 },
3960 ],
3961 };
3962 let mut fake_servers = language_registry.register_fake_lsp(
3963 "Rust",
3964 FakeLspAdapter {
3965 capabilities: lsp::ServerCapabilities {
3966 workspace: Some(lsp::WorkspaceServerCapabilities {
3967 workspace_folders: None,
3968 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
3969 did_rename: Some(watched_paths.clone()),
3970 will_rename: Some(watched_paths),
3971 ..Default::default()
3972 }),
3973 }),
3974 ..Default::default()
3975 },
3976 ..Default::default()
3977 },
3978 );
3979
3980 let _ = project
3981 .update(cx, |project, cx| {
3982 project.open_local_buffer("/dir/one.rs", cx)
3983 })
3984 .await
3985 .unwrap();
3986
3987 let fake_server = fake_servers.next().await.unwrap();
3988 let response = project.update(cx, |project, cx| {
3989 let worktree = project.worktrees(cx).next().unwrap();
3990 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
3991 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
3992 });
3993 let expected_edit = lsp::WorkspaceEdit {
3994 changes: None,
3995 document_changes: Some(DocumentChanges::Edits({
3996 vec![TextDocumentEdit {
3997 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
3998 range: lsp::Range {
3999 start: lsp::Position {
4000 line: 0,
4001 character: 1,
4002 },
4003 end: lsp::Position {
4004 line: 0,
4005 character: 3,
4006 },
4007 },
4008 new_text: "This is not a drill".to_owned(),
4009 })],
4010 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4011 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4012 version: Some(1337),
4013 },
4014 }]
4015 })),
4016 change_annotations: None,
4017 };
4018 let resolved_workspace_edit = Arc::new(OnceLock::new());
4019 fake_server
4020 .handle_request::<WillRenameFiles, _, _>({
4021 let resolved_workspace_edit = resolved_workspace_edit.clone();
4022 let expected_edit = expected_edit.clone();
4023 move |params, _| {
4024 let resolved_workspace_edit = resolved_workspace_edit.clone();
4025 let expected_edit = expected_edit.clone();
4026 async move {
4027 assert_eq!(params.files.len(), 1);
4028 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4029 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4030 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4031 Ok(Some(expected_edit))
4032 }
4033 }
4034 })
4035 .next()
4036 .await
4037 .unwrap();
4038 let _ = response.await.unwrap();
4039 fake_server
4040 .handle_notification::<DidRenameFiles, _>(|params, _| {
4041 assert_eq!(params.files.len(), 1);
4042 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4043 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4044 })
4045 .next()
4046 .await
4047 .unwrap();
4048 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4049}
4050
4051#[gpui::test]
4052async fn test_rename(cx: &mut gpui::TestAppContext) {
4053 // hi
4054 init_test(cx);
4055
4056 let fs = FakeFs::new(cx.executor());
4057 fs.insert_tree(
4058 "/dir",
4059 json!({
4060 "one.rs": "const ONE: usize = 1;",
4061 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4062 }),
4063 )
4064 .await;
4065
4066 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4067
4068 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4069 language_registry.add(rust_lang());
4070 let mut fake_servers = language_registry.register_fake_lsp(
4071 "Rust",
4072 FakeLspAdapter {
4073 capabilities: lsp::ServerCapabilities {
4074 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4075 prepare_provider: Some(true),
4076 work_done_progress_options: Default::default(),
4077 })),
4078 ..Default::default()
4079 },
4080 ..Default::default()
4081 },
4082 );
4083
4084 let buffer = project
4085 .update(cx, |project, cx| {
4086 project.open_local_buffer("/dir/one.rs", cx)
4087 })
4088 .await
4089 .unwrap();
4090
4091 let fake_server = fake_servers.next().await.unwrap();
4092
4093 let response = project.update(cx, |project, cx| {
4094 project.prepare_rename(buffer.clone(), 7, cx)
4095 });
4096 fake_server
4097 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4098 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4099 assert_eq!(params.position, lsp::Position::new(0, 7));
4100 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4101 lsp::Position::new(0, 6),
4102 lsp::Position::new(0, 9),
4103 ))))
4104 })
4105 .next()
4106 .await
4107 .unwrap();
4108 let range = response.await.unwrap().unwrap();
4109 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4110 assert_eq!(range, 6..9);
4111
4112 let response = project.update(cx, |project, cx| {
4113 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4114 });
4115 fake_server
4116 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4117 assert_eq!(
4118 params.text_document_position.text_document.uri.as_str(),
4119 "file:///dir/one.rs"
4120 );
4121 assert_eq!(
4122 params.text_document_position.position,
4123 lsp::Position::new(0, 7)
4124 );
4125 assert_eq!(params.new_name, "THREE");
4126 Ok(Some(lsp::WorkspaceEdit {
4127 changes: Some(
4128 [
4129 (
4130 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4131 vec![lsp::TextEdit::new(
4132 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4133 "THREE".to_string(),
4134 )],
4135 ),
4136 (
4137 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4138 vec![
4139 lsp::TextEdit::new(
4140 lsp::Range::new(
4141 lsp::Position::new(0, 24),
4142 lsp::Position::new(0, 27),
4143 ),
4144 "THREE".to_string(),
4145 ),
4146 lsp::TextEdit::new(
4147 lsp::Range::new(
4148 lsp::Position::new(0, 35),
4149 lsp::Position::new(0, 38),
4150 ),
4151 "THREE".to_string(),
4152 ),
4153 ],
4154 ),
4155 ]
4156 .into_iter()
4157 .collect(),
4158 ),
4159 ..Default::default()
4160 }))
4161 })
4162 .next()
4163 .await
4164 .unwrap();
4165 let mut transaction = response.await.unwrap().0;
4166 assert_eq!(transaction.len(), 2);
4167 assert_eq!(
4168 transaction
4169 .remove_entry(&buffer)
4170 .unwrap()
4171 .0
4172 .update(cx, |buffer, _| buffer.text()),
4173 "const THREE: usize = 1;"
4174 );
4175 assert_eq!(
4176 transaction
4177 .into_keys()
4178 .next()
4179 .unwrap()
4180 .update(cx, |buffer, _| buffer.text()),
4181 "const TWO: usize = one::THREE + one::THREE;"
4182 );
4183}
4184
4185#[gpui::test]
4186async fn test_search(cx: &mut gpui::TestAppContext) {
4187 init_test(cx);
4188
4189 let fs = FakeFs::new(cx.executor());
4190 fs.insert_tree(
4191 "/dir",
4192 json!({
4193 "one.rs": "const ONE: usize = 1;",
4194 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4195 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4196 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4197 }),
4198 )
4199 .await;
4200 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4201 assert_eq!(
4202 search(
4203 &project,
4204 SearchQuery::text(
4205 "TWO",
4206 false,
4207 true,
4208 false,
4209 Default::default(),
4210 Default::default(),
4211 None
4212 )
4213 .unwrap(),
4214 cx
4215 )
4216 .await
4217 .unwrap(),
4218 HashMap::from_iter([
4219 ("dir/two.rs".to_string(), vec![6..9]),
4220 ("dir/three.rs".to_string(), vec![37..40])
4221 ])
4222 );
4223
4224 let buffer_4 = project
4225 .update(cx, |project, cx| {
4226 project.open_local_buffer("/dir/four.rs", cx)
4227 })
4228 .await
4229 .unwrap();
4230 buffer_4.update(cx, |buffer, cx| {
4231 let text = "two::TWO";
4232 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4233 });
4234
4235 assert_eq!(
4236 search(
4237 &project,
4238 SearchQuery::text(
4239 "TWO",
4240 false,
4241 true,
4242 false,
4243 Default::default(),
4244 Default::default(),
4245 None,
4246 )
4247 .unwrap(),
4248 cx
4249 )
4250 .await
4251 .unwrap(),
4252 HashMap::from_iter([
4253 ("dir/two.rs".to_string(), vec![6..9]),
4254 ("dir/three.rs".to_string(), vec![37..40]),
4255 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4256 ])
4257 );
4258}
4259
4260#[gpui::test]
4261async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4262 init_test(cx);
4263
4264 let search_query = "file";
4265
4266 let fs = FakeFs::new(cx.executor());
4267 fs.insert_tree(
4268 "/dir",
4269 json!({
4270 "one.rs": r#"// Rust file one"#,
4271 "one.ts": r#"// TypeScript file one"#,
4272 "two.rs": r#"// Rust file two"#,
4273 "two.ts": r#"// TypeScript file two"#,
4274 }),
4275 )
4276 .await;
4277 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4278
4279 assert!(
4280 search(
4281 &project,
4282 SearchQuery::text(
4283 search_query,
4284 false,
4285 true,
4286 false,
4287 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4288 Default::default(),
4289 None
4290 )
4291 .unwrap(),
4292 cx
4293 )
4294 .await
4295 .unwrap()
4296 .is_empty(),
4297 "If no inclusions match, no files should be returned"
4298 );
4299
4300 assert_eq!(
4301 search(
4302 &project,
4303 SearchQuery::text(
4304 search_query,
4305 false,
4306 true,
4307 false,
4308 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4309 Default::default(),
4310 None
4311 )
4312 .unwrap(),
4313 cx
4314 )
4315 .await
4316 .unwrap(),
4317 HashMap::from_iter([
4318 ("dir/one.rs".to_string(), vec![8..12]),
4319 ("dir/two.rs".to_string(), vec![8..12]),
4320 ]),
4321 "Rust only search should give only Rust files"
4322 );
4323
4324 assert_eq!(
4325 search(
4326 &project,
4327 SearchQuery::text(
4328 search_query,
4329 false,
4330 true,
4331 false,
4332
4333 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4334
4335 Default::default(),
4336 None,
4337 ).unwrap(),
4338 cx
4339 )
4340 .await
4341 .unwrap(),
4342 HashMap::from_iter([
4343 ("dir/one.ts".to_string(), vec![14..18]),
4344 ("dir/two.ts".to_string(), vec![14..18]),
4345 ]),
4346 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4347 );
4348
4349 assert_eq!(
4350 search(
4351 &project,
4352 SearchQuery::text(
4353 search_query,
4354 false,
4355 true,
4356 false,
4357
4358 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4359
4360 Default::default(),
4361 None,
4362 ).unwrap(),
4363 cx
4364 )
4365 .await
4366 .unwrap(),
4367 HashMap::from_iter([
4368 ("dir/two.ts".to_string(), vec![14..18]),
4369 ("dir/one.rs".to_string(), vec![8..12]),
4370 ("dir/one.ts".to_string(), vec![14..18]),
4371 ("dir/two.rs".to_string(), vec![8..12]),
4372 ]),
4373 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4374 );
4375}
4376
4377#[gpui::test]
4378async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4379 init_test(cx);
4380
4381 let search_query = "file";
4382
4383 let fs = FakeFs::new(cx.executor());
4384 fs.insert_tree(
4385 "/dir",
4386 json!({
4387 "one.rs": r#"// Rust file one"#,
4388 "one.ts": r#"// TypeScript file one"#,
4389 "two.rs": r#"// Rust file two"#,
4390 "two.ts": r#"// TypeScript file two"#,
4391 }),
4392 )
4393 .await;
4394 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4395
4396 assert_eq!(
4397 search(
4398 &project,
4399 SearchQuery::text(
4400 search_query,
4401 false,
4402 true,
4403 false,
4404 Default::default(),
4405 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4406 None,
4407 )
4408 .unwrap(),
4409 cx
4410 )
4411 .await
4412 .unwrap(),
4413 HashMap::from_iter([
4414 ("dir/one.rs".to_string(), vec![8..12]),
4415 ("dir/one.ts".to_string(), vec![14..18]),
4416 ("dir/two.rs".to_string(), vec![8..12]),
4417 ("dir/two.ts".to_string(), vec![14..18]),
4418 ]),
4419 "If no exclusions match, all files should be returned"
4420 );
4421
4422 assert_eq!(
4423 search(
4424 &project,
4425 SearchQuery::text(
4426 search_query,
4427 false,
4428 true,
4429 false,
4430 Default::default(),
4431 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4432 None,
4433 )
4434 .unwrap(),
4435 cx
4436 )
4437 .await
4438 .unwrap(),
4439 HashMap::from_iter([
4440 ("dir/one.ts".to_string(), vec![14..18]),
4441 ("dir/two.ts".to_string(), vec![14..18]),
4442 ]),
4443 "Rust exclusion search should give only TypeScript files"
4444 );
4445
4446 assert_eq!(
4447 search(
4448 &project,
4449 SearchQuery::text(
4450 search_query,
4451 false,
4452 true,
4453 false,
4454 Default::default(),
4455 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4456 None,
4457 ).unwrap(),
4458 cx
4459 )
4460 .await
4461 .unwrap(),
4462 HashMap::from_iter([
4463 ("dir/one.rs".to_string(), vec![8..12]),
4464 ("dir/two.rs".to_string(), vec![8..12]),
4465 ]),
4466 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4467 );
4468
4469 assert!(
4470 search(
4471 &project,
4472 SearchQuery::text(
4473 search_query,
4474 false,
4475 true,
4476 false,
4477 Default::default(),
4478
4479 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4480 None,
4481
4482 ).unwrap(),
4483 cx
4484 )
4485 .await
4486 .unwrap().is_empty(),
4487 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4488 );
4489}
4490
4491#[gpui::test]
4492async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4493 init_test(cx);
4494
4495 let search_query = "file";
4496
4497 let fs = FakeFs::new(cx.executor());
4498 fs.insert_tree(
4499 "/dir",
4500 json!({
4501 "one.rs": r#"// Rust file one"#,
4502 "one.ts": r#"// TypeScript file one"#,
4503 "two.rs": r#"// Rust file two"#,
4504 "two.ts": r#"// TypeScript file two"#,
4505 }),
4506 )
4507 .await;
4508 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4509
4510 assert!(
4511 search(
4512 &project,
4513 SearchQuery::text(
4514 search_query,
4515 false,
4516 true,
4517 false,
4518 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4519 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4520 None,
4521 )
4522 .unwrap(),
4523 cx
4524 )
4525 .await
4526 .unwrap()
4527 .is_empty(),
4528 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4529 );
4530
4531 assert!(
4532 search(
4533 &project,
4534 SearchQuery::text(
4535 search_query,
4536 false,
4537 true,
4538 false,
4539 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4540 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4541 None,
4542 ).unwrap(),
4543 cx
4544 )
4545 .await
4546 .unwrap()
4547 .is_empty(),
4548 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4549 );
4550
4551 assert!(
4552 search(
4553 &project,
4554 SearchQuery::text(
4555 search_query,
4556 false,
4557 true,
4558 false,
4559 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4560 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4561 None,
4562 )
4563 .unwrap(),
4564 cx
4565 )
4566 .await
4567 .unwrap()
4568 .is_empty(),
4569 "Non-matching inclusions and exclusions should not change that."
4570 );
4571
4572 assert_eq!(
4573 search(
4574 &project,
4575 SearchQuery::text(
4576 search_query,
4577 false,
4578 true,
4579 false,
4580 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4581 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4582 None,
4583 )
4584 .unwrap(),
4585 cx
4586 )
4587 .await
4588 .unwrap(),
4589 HashMap::from_iter([
4590 ("dir/one.ts".to_string(), vec![14..18]),
4591 ("dir/two.ts".to_string(), vec![14..18]),
4592 ]),
4593 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4594 );
4595}
4596
4597#[gpui::test]
4598async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4599 init_test(cx);
4600
4601 let fs = FakeFs::new(cx.executor());
4602 fs.insert_tree(
4603 "/worktree-a",
4604 json!({
4605 "haystack.rs": r#"// NEEDLE"#,
4606 "haystack.ts": r#"// NEEDLE"#,
4607 }),
4608 )
4609 .await;
4610 fs.insert_tree(
4611 "/worktree-b",
4612 json!({
4613 "haystack.rs": r#"// NEEDLE"#,
4614 "haystack.ts": r#"// NEEDLE"#,
4615 }),
4616 )
4617 .await;
4618
4619 let project = Project::test(
4620 fs.clone(),
4621 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4622 cx,
4623 )
4624 .await;
4625
4626 assert_eq!(
4627 search(
4628 &project,
4629 SearchQuery::text(
4630 "NEEDLE",
4631 false,
4632 true,
4633 false,
4634 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4635 Default::default(),
4636 None,
4637 )
4638 .unwrap(),
4639 cx
4640 )
4641 .await
4642 .unwrap(),
4643 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4644 "should only return results from included worktree"
4645 );
4646 assert_eq!(
4647 search(
4648 &project,
4649 SearchQuery::text(
4650 "NEEDLE",
4651 false,
4652 true,
4653 false,
4654 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4655 Default::default(),
4656 None,
4657 )
4658 .unwrap(),
4659 cx
4660 )
4661 .await
4662 .unwrap(),
4663 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4664 "should only return results from included worktree"
4665 );
4666
4667 assert_eq!(
4668 search(
4669 &project,
4670 SearchQuery::text(
4671 "NEEDLE",
4672 false,
4673 true,
4674 false,
4675 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4676 Default::default(),
4677 None,
4678 )
4679 .unwrap(),
4680 cx
4681 )
4682 .await
4683 .unwrap(),
4684 HashMap::from_iter([
4685 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4686 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4687 ]),
4688 "should return results from both worktrees"
4689 );
4690}
4691
4692#[gpui::test]
4693async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4694 init_test(cx);
4695
4696 let fs = FakeFs::new(cx.background_executor.clone());
4697 fs.insert_tree(
4698 "/dir",
4699 json!({
4700 ".git": {},
4701 ".gitignore": "**/target\n/node_modules\n",
4702 "target": {
4703 "index.txt": "index_key:index_value"
4704 },
4705 "node_modules": {
4706 "eslint": {
4707 "index.ts": "const eslint_key = 'eslint value'",
4708 "package.json": r#"{ "some_key": "some value" }"#,
4709 },
4710 "prettier": {
4711 "index.ts": "const prettier_key = 'prettier value'",
4712 "package.json": r#"{ "other_key": "other value" }"#,
4713 },
4714 },
4715 "package.json": r#"{ "main_key": "main value" }"#,
4716 }),
4717 )
4718 .await;
4719 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4720
4721 let query = "key";
4722 assert_eq!(
4723 search(
4724 &project,
4725 SearchQuery::text(
4726 query,
4727 false,
4728 false,
4729 false,
4730 Default::default(),
4731 Default::default(),
4732 None,
4733 )
4734 .unwrap(),
4735 cx
4736 )
4737 .await
4738 .unwrap(),
4739 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4740 "Only one non-ignored file should have the query"
4741 );
4742
4743 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4744 assert_eq!(
4745 search(
4746 &project,
4747 SearchQuery::text(
4748 query,
4749 false,
4750 false,
4751 true,
4752 Default::default(),
4753 Default::default(),
4754 None,
4755 )
4756 .unwrap(),
4757 cx
4758 )
4759 .await
4760 .unwrap(),
4761 HashMap::from_iter([
4762 ("dir/package.json".to_string(), vec![8..11]),
4763 ("dir/target/index.txt".to_string(), vec![6..9]),
4764 (
4765 "dir/node_modules/prettier/package.json".to_string(),
4766 vec![9..12]
4767 ),
4768 (
4769 "dir/node_modules/prettier/index.ts".to_string(),
4770 vec![15..18]
4771 ),
4772 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4773 (
4774 "dir/node_modules/eslint/package.json".to_string(),
4775 vec![8..11]
4776 ),
4777 ]),
4778 "Unrestricted search with ignored directories should find every file with the query"
4779 );
4780
4781 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4782 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4783 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4784 assert_eq!(
4785 search(
4786 &project,
4787 SearchQuery::text(
4788 query,
4789 false,
4790 false,
4791 true,
4792 files_to_include,
4793 files_to_exclude,
4794 None,
4795 )
4796 .unwrap(),
4797 cx
4798 )
4799 .await
4800 .unwrap(),
4801 HashMap::from_iter([(
4802 "dir/node_modules/prettier/package.json".to_string(),
4803 vec![9..12]
4804 )]),
4805 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4806 );
4807}
4808
4809#[gpui::test]
4810async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4811 init_test(cx);
4812
4813 let fs = FakeFs::new(cx.executor().clone());
4814 fs.insert_tree(
4815 "/one/two",
4816 json!({
4817 "three": {
4818 "a.txt": "",
4819 "four": {}
4820 },
4821 "c.rs": ""
4822 }),
4823 )
4824 .await;
4825
4826 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4827 project
4828 .update(cx, |project, cx| {
4829 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4830 project.create_entry((id, "b.."), true, cx)
4831 })
4832 .await
4833 .unwrap()
4834 .to_included()
4835 .unwrap();
4836
4837 // Can't create paths outside the project
4838 let result = project
4839 .update(cx, |project, cx| {
4840 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4841 project.create_entry((id, "../../boop"), true, cx)
4842 })
4843 .await;
4844 assert!(result.is_err());
4845
4846 // Can't create paths with '..'
4847 let result = project
4848 .update(cx, |project, cx| {
4849 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4850 project.create_entry((id, "four/../beep"), true, cx)
4851 })
4852 .await;
4853 assert!(result.is_err());
4854
4855 assert_eq!(
4856 fs.paths(true),
4857 vec![
4858 PathBuf::from("/"),
4859 PathBuf::from("/one"),
4860 PathBuf::from("/one/two"),
4861 PathBuf::from("/one/two/c.rs"),
4862 PathBuf::from("/one/two/three"),
4863 PathBuf::from("/one/two/three/a.txt"),
4864 PathBuf::from("/one/two/three/b.."),
4865 PathBuf::from("/one/two/three/four"),
4866 ]
4867 );
4868
4869 // And we cannot open buffers with '..'
4870 let result = project
4871 .update(cx, |project, cx| {
4872 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4873 project.open_buffer((id, "../c.rs"), cx)
4874 })
4875 .await;
4876 assert!(result.is_err())
4877}
4878
4879#[gpui::test]
4880async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4881 init_test(cx);
4882
4883 let fs = FakeFs::new(cx.executor());
4884 fs.insert_tree(
4885 "/dir",
4886 json!({
4887 "a.tsx": "a",
4888 }),
4889 )
4890 .await;
4891
4892 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4893
4894 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4895 language_registry.add(tsx_lang());
4896 let language_server_names = [
4897 "TypeScriptServer",
4898 "TailwindServer",
4899 "ESLintServer",
4900 "NoHoverCapabilitiesServer",
4901 ];
4902 let mut language_servers = [
4903 language_registry.register_fake_lsp(
4904 "tsx",
4905 FakeLspAdapter {
4906 name: language_server_names[0],
4907 capabilities: lsp::ServerCapabilities {
4908 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4909 ..lsp::ServerCapabilities::default()
4910 },
4911 ..FakeLspAdapter::default()
4912 },
4913 ),
4914 language_registry.register_fake_lsp(
4915 "tsx",
4916 FakeLspAdapter {
4917 name: language_server_names[1],
4918 capabilities: lsp::ServerCapabilities {
4919 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4920 ..lsp::ServerCapabilities::default()
4921 },
4922 ..FakeLspAdapter::default()
4923 },
4924 ),
4925 language_registry.register_fake_lsp(
4926 "tsx",
4927 FakeLspAdapter {
4928 name: language_server_names[2],
4929 capabilities: lsp::ServerCapabilities {
4930 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4931 ..lsp::ServerCapabilities::default()
4932 },
4933 ..FakeLspAdapter::default()
4934 },
4935 ),
4936 language_registry.register_fake_lsp(
4937 "tsx",
4938 FakeLspAdapter {
4939 name: language_server_names[3],
4940 capabilities: lsp::ServerCapabilities {
4941 hover_provider: None,
4942 ..lsp::ServerCapabilities::default()
4943 },
4944 ..FakeLspAdapter::default()
4945 },
4946 ),
4947 ];
4948
4949 let buffer = project
4950 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4951 .await
4952 .unwrap();
4953 cx.executor().run_until_parked();
4954
4955 let mut servers_with_hover_requests = HashMap::default();
4956 for i in 0..language_server_names.len() {
4957 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4958 panic!(
4959 "Failed to get language server #{i} with name {}",
4960 &language_server_names[i]
4961 )
4962 });
4963 let new_server_name = new_server.server.name();
4964 assert!(
4965 !servers_with_hover_requests.contains_key(&new_server_name),
4966 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4967 );
4968 match new_server_name.as_ref() {
4969 "TailwindServer" | "TypeScriptServer" => {
4970 servers_with_hover_requests.insert(
4971 new_server_name.clone(),
4972 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4973 let name = new_server_name.clone();
4974 async move {
4975 Ok(Some(lsp::Hover {
4976 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4977 format!("{name} hover"),
4978 )),
4979 range: None,
4980 }))
4981 }
4982 }),
4983 );
4984 }
4985 "ESLintServer" => {
4986 servers_with_hover_requests.insert(
4987 new_server_name,
4988 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4989 |_, _| async move { Ok(None) },
4990 ),
4991 );
4992 }
4993 "NoHoverCapabilitiesServer" => {
4994 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4995 |_, _| async move {
4996 panic!(
4997 "Should not call for hovers server with no corresponding capabilities"
4998 )
4999 },
5000 );
5001 }
5002 unexpected => panic!("Unexpected server name: {unexpected}"),
5003 }
5004 }
5005
5006 let hover_task = project.update(cx, |project, cx| {
5007 project.hover(&buffer, Point::new(0, 0), cx)
5008 });
5009 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5010 |mut hover_request| async move {
5011 hover_request
5012 .next()
5013 .await
5014 .expect("All hover requests should have been triggered")
5015 },
5016 ))
5017 .await;
5018 assert_eq!(
5019 vec!["TailwindServer hover", "TypeScriptServer hover"],
5020 hover_task
5021 .await
5022 .into_iter()
5023 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5024 .sorted()
5025 .collect::<Vec<_>>(),
5026 "Should receive hover responses from all related servers with hover capabilities"
5027 );
5028}
5029
5030#[gpui::test]
5031async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5032 init_test(cx);
5033
5034 let fs = FakeFs::new(cx.executor());
5035 fs.insert_tree(
5036 "/dir",
5037 json!({
5038 "a.ts": "a",
5039 }),
5040 )
5041 .await;
5042
5043 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5044
5045 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5046 language_registry.add(typescript_lang());
5047 let mut fake_language_servers = language_registry.register_fake_lsp(
5048 "TypeScript",
5049 FakeLspAdapter {
5050 capabilities: lsp::ServerCapabilities {
5051 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5052 ..lsp::ServerCapabilities::default()
5053 },
5054 ..FakeLspAdapter::default()
5055 },
5056 );
5057
5058 let buffer = project
5059 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
5060 .await
5061 .unwrap();
5062 cx.executor().run_until_parked();
5063
5064 let fake_server = fake_language_servers
5065 .next()
5066 .await
5067 .expect("failed to get the language server");
5068
5069 let mut request_handled =
5070 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5071 Ok(Some(lsp::Hover {
5072 contents: lsp::HoverContents::Array(vec![
5073 lsp::MarkedString::String("".to_string()),
5074 lsp::MarkedString::String(" ".to_string()),
5075 lsp::MarkedString::String("\n\n\n".to_string()),
5076 ]),
5077 range: None,
5078 }))
5079 });
5080
5081 let hover_task = project.update(cx, |project, cx| {
5082 project.hover(&buffer, Point::new(0, 0), cx)
5083 });
5084 let () = request_handled
5085 .next()
5086 .await
5087 .expect("All hover requests should have been triggered");
5088 assert_eq!(
5089 Vec::<String>::new(),
5090 hover_task
5091 .await
5092 .into_iter()
5093 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5094 .sorted()
5095 .collect::<Vec<_>>(),
5096 "Empty hover parts should be ignored"
5097 );
5098}
5099
5100#[gpui::test]
5101async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5102 init_test(cx);
5103
5104 let fs = FakeFs::new(cx.executor());
5105 fs.insert_tree(
5106 "/dir",
5107 json!({
5108 "a.ts": "a",
5109 }),
5110 )
5111 .await;
5112
5113 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5114
5115 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5116 language_registry.add(typescript_lang());
5117 let mut fake_language_servers = language_registry.register_fake_lsp(
5118 "TypeScript",
5119 FakeLspAdapter {
5120 capabilities: lsp::ServerCapabilities {
5121 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5122 ..lsp::ServerCapabilities::default()
5123 },
5124 ..FakeLspAdapter::default()
5125 },
5126 );
5127
5128 let buffer = project
5129 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
5130 .await
5131 .unwrap();
5132 cx.executor().run_until_parked();
5133
5134 let fake_server = fake_language_servers
5135 .next()
5136 .await
5137 .expect("failed to get the language server");
5138
5139 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5140 move |_, _| async move {
5141 Ok(Some(vec![
5142 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5143 title: "organize imports".to_string(),
5144 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5145 ..lsp::CodeAction::default()
5146 }),
5147 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5148 title: "fix code".to_string(),
5149 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5150 ..lsp::CodeAction::default()
5151 }),
5152 ]))
5153 },
5154 );
5155
5156 let code_actions_task = project.update(cx, |project, cx| {
5157 project.code_actions(
5158 &buffer,
5159 0..buffer.read(cx).len(),
5160 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5161 cx,
5162 )
5163 });
5164
5165 let () = request_handled
5166 .next()
5167 .await
5168 .expect("The code action request should have been triggered");
5169
5170 let code_actions = code_actions_task.await.unwrap();
5171 assert_eq!(code_actions.len(), 1);
5172 assert_eq!(
5173 code_actions[0].lsp_action.kind,
5174 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5175 );
5176}
5177
5178#[gpui::test]
5179async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5180 init_test(cx);
5181
5182 let fs = FakeFs::new(cx.executor());
5183 fs.insert_tree(
5184 "/dir",
5185 json!({
5186 "a.tsx": "a",
5187 }),
5188 )
5189 .await;
5190
5191 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5192
5193 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5194 language_registry.add(tsx_lang());
5195 let language_server_names = [
5196 "TypeScriptServer",
5197 "TailwindServer",
5198 "ESLintServer",
5199 "NoActionsCapabilitiesServer",
5200 ];
5201
5202 let mut language_server_rxs = [
5203 language_registry.register_fake_lsp(
5204 "tsx",
5205 FakeLspAdapter {
5206 name: language_server_names[0],
5207 capabilities: lsp::ServerCapabilities {
5208 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5209 ..lsp::ServerCapabilities::default()
5210 },
5211 ..FakeLspAdapter::default()
5212 },
5213 ),
5214 language_registry.register_fake_lsp(
5215 "tsx",
5216 FakeLspAdapter {
5217 name: language_server_names[1],
5218 capabilities: lsp::ServerCapabilities {
5219 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5220 ..lsp::ServerCapabilities::default()
5221 },
5222 ..FakeLspAdapter::default()
5223 },
5224 ),
5225 language_registry.register_fake_lsp(
5226 "tsx",
5227 FakeLspAdapter {
5228 name: language_server_names[2],
5229 capabilities: lsp::ServerCapabilities {
5230 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5231 ..lsp::ServerCapabilities::default()
5232 },
5233 ..FakeLspAdapter::default()
5234 },
5235 ),
5236 language_registry.register_fake_lsp(
5237 "tsx",
5238 FakeLspAdapter {
5239 name: language_server_names[3],
5240 capabilities: lsp::ServerCapabilities {
5241 code_action_provider: None,
5242 ..lsp::ServerCapabilities::default()
5243 },
5244 ..FakeLspAdapter::default()
5245 },
5246 ),
5247 ];
5248
5249 let buffer = project
5250 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5251 .await
5252 .unwrap();
5253 cx.executor().run_until_parked();
5254
5255 let mut servers_with_actions_requests = HashMap::default();
5256 for i in 0..language_server_names.len() {
5257 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5258 panic!(
5259 "Failed to get language server #{i} with name {}",
5260 &language_server_names[i]
5261 )
5262 });
5263 let new_server_name = new_server.server.name();
5264
5265 assert!(
5266 !servers_with_actions_requests.contains_key(&new_server_name),
5267 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5268 );
5269 match new_server_name.0.as_ref() {
5270 "TailwindServer" | "TypeScriptServer" => {
5271 servers_with_actions_requests.insert(
5272 new_server_name.clone(),
5273 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5274 move |_, _| {
5275 let name = new_server_name.clone();
5276 async move {
5277 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5278 lsp::CodeAction {
5279 title: format!("{name} code action"),
5280 ..lsp::CodeAction::default()
5281 },
5282 )]))
5283 }
5284 },
5285 ),
5286 );
5287 }
5288 "ESLintServer" => {
5289 servers_with_actions_requests.insert(
5290 new_server_name,
5291 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5292 |_, _| async move { Ok(None) },
5293 ),
5294 );
5295 }
5296 "NoActionsCapabilitiesServer" => {
5297 let _never_handled = new_server
5298 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5299 panic!(
5300 "Should not call for code actions server with no corresponding capabilities"
5301 )
5302 });
5303 }
5304 unexpected => panic!("Unexpected server name: {unexpected}"),
5305 }
5306 }
5307
5308 let code_actions_task = project.update(cx, |project, cx| {
5309 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5310 });
5311
5312 // cx.run_until_parked();
5313 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5314 |mut code_actions_request| async move {
5315 code_actions_request
5316 .next()
5317 .await
5318 .expect("All code actions requests should have been triggered")
5319 },
5320 ))
5321 .await;
5322 assert_eq!(
5323 vec!["TailwindServer code action", "TypeScriptServer code action"],
5324 code_actions_task
5325 .await
5326 .unwrap()
5327 .into_iter()
5328 .map(|code_action| code_action.lsp_action.title)
5329 .sorted()
5330 .collect::<Vec<_>>(),
5331 "Should receive code actions responses from all related servers with hover capabilities"
5332 );
5333}
5334
5335#[gpui::test]
5336async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5337 init_test(cx);
5338
5339 let fs = FakeFs::new(cx.executor());
5340 fs.insert_tree(
5341 "/dir",
5342 json!({
5343 "a.rs": "let a = 1;",
5344 "b.rs": "let b = 2;",
5345 "c.rs": "let c = 2;",
5346 }),
5347 )
5348 .await;
5349
5350 let project = Project::test(
5351 fs,
5352 [
5353 "/dir/a.rs".as_ref(),
5354 "/dir/b.rs".as_ref(),
5355 "/dir/c.rs".as_ref(),
5356 ],
5357 cx,
5358 )
5359 .await;
5360
5361 // check the initial state and get the worktrees
5362 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5363 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5364 assert_eq!(worktrees.len(), 3);
5365
5366 let worktree_a = worktrees[0].read(cx);
5367 let worktree_b = worktrees[1].read(cx);
5368 let worktree_c = worktrees[2].read(cx);
5369
5370 // check they start in the right order
5371 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5372 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5373 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5374
5375 (
5376 worktrees[0].clone(),
5377 worktrees[1].clone(),
5378 worktrees[2].clone(),
5379 )
5380 });
5381
5382 // move first worktree to after the second
5383 // [a, b, c] -> [b, a, c]
5384 project
5385 .update(cx, |project, cx| {
5386 let first = worktree_a.read(cx);
5387 let second = worktree_b.read(cx);
5388 project.move_worktree(first.id(), second.id(), cx)
5389 })
5390 .expect("moving first after second");
5391
5392 // check the state after moving
5393 project.update(cx, |project, cx| {
5394 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5395 assert_eq!(worktrees.len(), 3);
5396
5397 let first = worktrees[0].read(cx);
5398 let second = worktrees[1].read(cx);
5399 let third = worktrees[2].read(cx);
5400
5401 // check they are now in the right order
5402 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5403 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5404 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5405 });
5406
5407 // move the second worktree to before the first
5408 // [b, a, c] -> [a, b, c]
5409 project
5410 .update(cx, |project, cx| {
5411 let second = worktree_a.read(cx);
5412 let first = worktree_b.read(cx);
5413 project.move_worktree(first.id(), second.id(), cx)
5414 })
5415 .expect("moving second before first");
5416
5417 // check the state after moving
5418 project.update(cx, |project, cx| {
5419 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5420 assert_eq!(worktrees.len(), 3);
5421
5422 let first = worktrees[0].read(cx);
5423 let second = worktrees[1].read(cx);
5424 let third = worktrees[2].read(cx);
5425
5426 // check they are now in the right order
5427 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5428 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5429 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5430 });
5431
5432 // move the second worktree to after the third
5433 // [a, b, c] -> [a, c, b]
5434 project
5435 .update(cx, |project, cx| {
5436 let second = worktree_b.read(cx);
5437 let third = worktree_c.read(cx);
5438 project.move_worktree(second.id(), third.id(), cx)
5439 })
5440 .expect("moving second after third");
5441
5442 // check the state after moving
5443 project.update(cx, |project, cx| {
5444 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5445 assert_eq!(worktrees.len(), 3);
5446
5447 let first = worktrees[0].read(cx);
5448 let second = worktrees[1].read(cx);
5449 let third = worktrees[2].read(cx);
5450
5451 // check they are now in the right order
5452 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5453 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5454 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5455 });
5456
5457 // move the third worktree to before the second
5458 // [a, c, b] -> [a, b, c]
5459 project
5460 .update(cx, |project, cx| {
5461 let third = worktree_c.read(cx);
5462 let second = worktree_b.read(cx);
5463 project.move_worktree(third.id(), second.id(), cx)
5464 })
5465 .expect("moving third before second");
5466
5467 // check the state after moving
5468 project.update(cx, |project, cx| {
5469 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5470 assert_eq!(worktrees.len(), 3);
5471
5472 let first = worktrees[0].read(cx);
5473 let second = worktrees[1].read(cx);
5474 let third = worktrees[2].read(cx);
5475
5476 // check they are now in the right order
5477 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5478 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5479 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5480 });
5481
5482 // move the first worktree to after the third
5483 // [a, b, c] -> [b, c, a]
5484 project
5485 .update(cx, |project, cx| {
5486 let first = worktree_a.read(cx);
5487 let third = worktree_c.read(cx);
5488 project.move_worktree(first.id(), third.id(), cx)
5489 })
5490 .expect("moving first after third");
5491
5492 // check the state after moving
5493 project.update(cx, |project, cx| {
5494 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5495 assert_eq!(worktrees.len(), 3);
5496
5497 let first = worktrees[0].read(cx);
5498 let second = worktrees[1].read(cx);
5499 let third = worktrees[2].read(cx);
5500
5501 // check they are now in the right order
5502 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5503 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5504 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5505 });
5506
5507 // move the third worktree to before the first
5508 // [b, c, a] -> [a, b, c]
5509 project
5510 .update(cx, |project, cx| {
5511 let third = worktree_a.read(cx);
5512 let first = worktree_b.read(cx);
5513 project.move_worktree(third.id(), first.id(), cx)
5514 })
5515 .expect("moving third before first");
5516
5517 // check the state after moving
5518 project.update(cx, |project, cx| {
5519 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5520 assert_eq!(worktrees.len(), 3);
5521
5522 let first = worktrees[0].read(cx);
5523 let second = worktrees[1].read(cx);
5524 let third = worktrees[2].read(cx);
5525
5526 // check they are now in the right order
5527 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5528 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5529 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5530 });
5531}
5532
5533#[gpui::test]
5534async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5535 init_test(cx);
5536
5537 let staged_contents = r#"
5538 fn main() {
5539 println!("hello world");
5540 }
5541 "#
5542 .unindent();
5543 let file_contents = r#"
5544 // print goodbye
5545 fn main() {
5546 println!("goodbye world");
5547 }
5548 "#
5549 .unindent();
5550
5551 let fs = FakeFs::new(cx.background_executor.clone());
5552 fs.insert_tree(
5553 "/dir",
5554 json!({
5555 ".git": {},
5556 "src": {
5557 "main.rs": file_contents,
5558 }
5559 }),
5560 )
5561 .await;
5562
5563 fs.set_index_for_repo(
5564 Path::new("/dir/.git"),
5565 &[(Path::new("src/main.rs"), staged_contents)],
5566 );
5567
5568 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5569
5570 let buffer = project
5571 .update(cx, |project, cx| {
5572 project.open_local_buffer("/dir/src/main.rs", cx)
5573 })
5574 .await
5575 .unwrap();
5576 let unstaged_changes = project
5577 .update(cx, |project, cx| {
5578 project.open_unstaged_changes(buffer.clone(), cx)
5579 })
5580 .await
5581 .unwrap();
5582
5583 cx.run_until_parked();
5584 unstaged_changes.update(cx, |unstaged_changes, cx| {
5585 let snapshot = buffer.read(cx).snapshot();
5586 assert_hunks(
5587 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5588 &snapshot,
5589 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5590 &[
5591 (0..1, "", "// print goodbye\n"),
5592 (
5593 2..3,
5594 " println!(\"hello world\");\n",
5595 " println!(\"goodbye world\");\n",
5596 ),
5597 ],
5598 );
5599 });
5600
5601 let staged_contents = r#"
5602 // print goodbye
5603 fn main() {
5604 }
5605 "#
5606 .unindent();
5607
5608 fs.set_index_for_repo(
5609 Path::new("/dir/.git"),
5610 &[(Path::new("src/main.rs"), staged_contents)],
5611 );
5612
5613 cx.run_until_parked();
5614 unstaged_changes.update(cx, |unstaged_changes, cx| {
5615 let snapshot = buffer.read(cx).snapshot();
5616 assert_hunks(
5617 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5618 &snapshot,
5619 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5620 &[(2..3, "", " println!(\"goodbye world\");\n")],
5621 );
5622 });
5623}
5624
5625async fn search(
5626 project: &Model<Project>,
5627 query: SearchQuery,
5628 cx: &mut gpui::TestAppContext,
5629) -> Result<HashMap<String, Vec<Range<usize>>>> {
5630 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5631 let mut results = HashMap::default();
5632 while let Some(search_result) = search_rx.next().await {
5633 match search_result {
5634 SearchResult::Buffer { buffer, ranges } => {
5635 results.entry(buffer).or_insert(ranges);
5636 }
5637 SearchResult::LimitReached => {}
5638 }
5639 }
5640 Ok(results
5641 .into_iter()
5642 .map(|(buffer, ranges)| {
5643 buffer.update(cx, |buffer, cx| {
5644 let path = buffer
5645 .file()
5646 .unwrap()
5647 .full_path(cx)
5648 .to_string_lossy()
5649 .to_string();
5650 let ranges = ranges
5651 .into_iter()
5652 .map(|range| range.to_offset(buffer))
5653 .collect::<Vec<_>>();
5654 (path, ranges)
5655 })
5656 })
5657 .collect())
5658}
5659
5660pub fn init_test(cx: &mut gpui::TestAppContext) {
5661 if std::env::var("RUST_LOG").is_ok() {
5662 env_logger::try_init().ok();
5663 }
5664
5665 cx.update(|cx| {
5666 let settings_store = SettingsStore::test(cx);
5667 cx.set_global(settings_store);
5668 release_channel::init(SemanticVersion::default(), cx);
5669 language::init(cx);
5670 Project::init_settings(cx);
5671 });
5672}
5673
5674fn json_lang() -> Arc<Language> {
5675 Arc::new(Language::new(
5676 LanguageConfig {
5677 name: "JSON".into(),
5678 matcher: LanguageMatcher {
5679 path_suffixes: vec!["json".to_string()],
5680 ..Default::default()
5681 },
5682 ..Default::default()
5683 },
5684 None,
5685 ))
5686}
5687
5688fn js_lang() -> Arc<Language> {
5689 Arc::new(Language::new(
5690 LanguageConfig {
5691 name: "JavaScript".into(),
5692 matcher: LanguageMatcher {
5693 path_suffixes: vec!["js".to_string()],
5694 ..Default::default()
5695 },
5696 ..Default::default()
5697 },
5698 None,
5699 ))
5700}
5701
5702fn rust_lang() -> Arc<Language> {
5703 Arc::new(Language::new(
5704 LanguageConfig {
5705 name: "Rust".into(),
5706 matcher: LanguageMatcher {
5707 path_suffixes: vec!["rs".to_string()],
5708 ..Default::default()
5709 },
5710 ..Default::default()
5711 },
5712 Some(tree_sitter_rust::LANGUAGE.into()),
5713 ))
5714}
5715
5716fn typescript_lang() -> Arc<Language> {
5717 Arc::new(Language::new(
5718 LanguageConfig {
5719 name: "TypeScript".into(),
5720 matcher: LanguageMatcher {
5721 path_suffixes: vec!["ts".to_string()],
5722 ..Default::default()
5723 },
5724 ..Default::default()
5725 },
5726 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5727 ))
5728}
5729
5730fn tsx_lang() -> Arc<Language> {
5731 Arc::new(Language::new(
5732 LanguageConfig {
5733 name: "tsx".into(),
5734 matcher: LanguageMatcher {
5735 path_suffixes: vec!["tsx".to_string()],
5736 ..Default::default()
5737 },
5738 ..Default::default()
5739 },
5740 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5741 ))
5742}
5743
5744fn get_all_tasks(
5745 project: &Model<Project>,
5746 worktree_id: Option<WorktreeId>,
5747 task_context: &TaskContext,
5748 cx: &mut AppContext,
5749) -> Vec<(TaskSourceKind, ResolvedTask)> {
5750 let (mut old, new) = project.update(cx, |project, cx| {
5751 project
5752 .task_store
5753 .read(cx)
5754 .task_inventory()
5755 .unwrap()
5756 .read(cx)
5757 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5758 });
5759 old.extend(new);
5760 old
5761}