1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, DiskState, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::{assert_eq, assert_matches};
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let dir = temp_tree(json!({
99 ".editorconfig": r#"
100 root = true
101 [*.rs]
102 indent_style = tab
103 indent_size = 3
104 end_of_line = lf
105 insert_final_newline = true
106 trim_trailing_whitespace = true
107 [*.js]
108 tab_width = 10
109 "#,
110 ".zed": {
111 "settings.json": r#"{
112 "tab_size": 8,
113 "hard_tabs": false,
114 "ensure_final_newline_on_save": false,
115 "remove_trailing_whitespace_on_save": false,
116 "soft_wrap": "editor_width"
117 }"#,
118 },
119 "a.rs": "fn a() {\n A\n}",
120 "b": {
121 ".editorconfig": r#"
122 [*.rs]
123 indent_size = 2
124 "#,
125 "b.rs": "fn b() {\n B\n}",
126 },
127 "c.js": "def c\n C\nend",
128 "README.json": "tabs are better\n",
129 }));
130
131 let path = dir.path();
132 let fs = FakeFs::new(cx.executor());
133 fs.insert_tree_from_real_fs(path, path).await;
134 let project = Project::test(fs, [path], cx).await;
135
136 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
137 language_registry.add(js_lang());
138 language_registry.add(json_lang());
139 language_registry.add(rust_lang());
140
141 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
142
143 cx.executor().run_until_parked();
144
145 cx.update(|cx| {
146 let tree = worktree.read(cx);
147 let settings_for = |path: &str| {
148 let file_entry = tree.entry_for_path(path).unwrap().clone();
149 let file = File::for_entry(file_entry, worktree.clone());
150 let file_language = project
151 .read(cx)
152 .languages()
153 .language_for_file_path(file.path.as_ref());
154 let file_language = cx
155 .background_executor()
156 .block(file_language)
157 .expect("Failed to get file language");
158 let file = file as _;
159 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
160 };
161
162 let settings_a = settings_for("a.rs");
163 let settings_b = settings_for("b/b.rs");
164 let settings_c = settings_for("c.js");
165 let settings_readme = settings_for("README.json");
166
167 // .editorconfig overrides .zed/settings
168 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
169 assert_eq!(settings_a.hard_tabs, true);
170 assert_eq!(settings_a.ensure_final_newline_on_save, true);
171 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
172
173 // .editorconfig in b/ overrides .editorconfig in root
174 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
175
176 // "indent_size" is not set, so "tab_width" is used
177 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
178
179 // README.md should not be affected by .editorconfig's globe "*.rs"
180 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
181 });
182}
183
184#[gpui::test]
185async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
186 init_test(cx);
187 TaskStore::init(None);
188
189 let fs = FakeFs::new(cx.executor());
190 fs.insert_tree(
191 "/the-root",
192 json!({
193 ".zed": {
194 "settings.json": r#"{ "tab_size": 8 }"#,
195 "tasks.json": r#"[{
196 "label": "cargo check all",
197 "command": "cargo",
198 "args": ["check", "--all"]
199 },]"#,
200 },
201 "a": {
202 "a.rs": "fn a() {\n A\n}"
203 },
204 "b": {
205 ".zed": {
206 "settings.json": r#"{ "tab_size": 2 }"#,
207 "tasks.json": r#"[{
208 "label": "cargo check",
209 "command": "cargo",
210 "args": ["check"]
211 },]"#,
212 },
213 "b.rs": "fn b() {\n B\n}"
214 }
215 }),
216 )
217 .await;
218
219 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
220 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
221 let task_context = TaskContext::default();
222
223 cx.executor().run_until_parked();
224 let worktree_id = cx.update(|cx| {
225 project.update(cx, |project, cx| {
226 project.worktrees(cx).next().unwrap().read(cx).id()
227 })
228 });
229 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
230 id: worktree_id,
231 directory_in_worktree: PathBuf::from(".zed"),
232 id_base: "local worktree tasks from directory \".zed\"".into(),
233 };
234
235 let all_tasks = cx
236 .update(|cx| {
237 let tree = worktree.read(cx);
238
239 let file_a = File::for_entry(
240 tree.entry_for_path("a/a.rs").unwrap().clone(),
241 worktree.clone(),
242 ) as _;
243 let settings_a = language_settings(None, Some(&file_a), cx);
244 let file_b = File::for_entry(
245 tree.entry_for_path("b/b.rs").unwrap().clone(),
246 worktree.clone(),
247 ) as _;
248 let settings_b = language_settings(None, Some(&file_b), cx);
249
250 assert_eq!(settings_a.tab_size.get(), 8);
251 assert_eq!(settings_b.tab_size.get(), 2);
252
253 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
254 })
255 .into_iter()
256 .map(|(source_kind, task)| {
257 let resolved = task.resolved.unwrap();
258 (
259 source_kind,
260 task.resolved_label,
261 resolved.args,
262 resolved.env,
263 )
264 })
265 .collect::<Vec<_>>();
266 assert_eq!(
267 all_tasks,
268 vec![
269 (
270 TaskSourceKind::Worktree {
271 id: worktree_id,
272 directory_in_worktree: PathBuf::from("b/.zed"),
273 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
274 },
275 "cargo check".to_string(),
276 vec!["check".to_string()],
277 HashMap::default(),
278 ),
279 (
280 topmost_local_task_source_kind.clone(),
281 "cargo check all".to_string(),
282 vec!["check".to_string(), "--all".to_string()],
283 HashMap::default(),
284 ),
285 ]
286 );
287
288 let (_, resolved_task) = cx
289 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
290 .into_iter()
291 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
292 .expect("should have one global task");
293 project.update(cx, |project, cx| {
294 let task_inventory = project
295 .task_store
296 .read(cx)
297 .task_inventory()
298 .cloned()
299 .unwrap();
300 task_inventory.update(cx, |inventory, _| {
301 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
302 inventory
303 .update_file_based_tasks(
304 None,
305 Some(
306 &json!([{
307 "label": "cargo check unstable",
308 "command": "cargo",
309 "args": [
310 "check",
311 "--all",
312 "--all-targets"
313 ],
314 "env": {
315 "RUSTFLAGS": "-Zunstable-options"
316 }
317 }])
318 .to_string(),
319 ),
320 )
321 .unwrap();
322 });
323 });
324 cx.run_until_parked();
325
326 let all_tasks = cx
327 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
328 .into_iter()
329 .map(|(source_kind, task)| {
330 let resolved = task.resolved.unwrap();
331 (
332 source_kind,
333 task.resolved_label,
334 resolved.args,
335 resolved.env,
336 )
337 })
338 .collect::<Vec<_>>();
339 assert_eq!(
340 all_tasks,
341 vec![
342 (
343 topmost_local_task_source_kind.clone(),
344 "cargo check all".to_string(),
345 vec!["check".to_string(), "--all".to_string()],
346 HashMap::default(),
347 ),
348 (
349 TaskSourceKind::Worktree {
350 id: worktree_id,
351 directory_in_worktree: PathBuf::from("b/.zed"),
352 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
353 },
354 "cargo check".to_string(),
355 vec!["check".to_string()],
356 HashMap::default(),
357 ),
358 (
359 TaskSourceKind::AbsPath {
360 abs_path: paths::tasks_file().clone(),
361 id_base: "global tasks.json".into(),
362 },
363 "cargo check unstable".to_string(),
364 vec![
365 "check".to_string(),
366 "--all".to_string(),
367 "--all-targets".to_string(),
368 ],
369 HashMap::from_iter(Some((
370 "RUSTFLAGS".to_string(),
371 "-Zunstable-options".to_string()
372 ))),
373 ),
374 ]
375 );
376}
377
378#[gpui::test]
379async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
380 init_test(cx);
381
382 let fs = FakeFs::new(cx.executor());
383 fs.insert_tree(
384 "/the-root",
385 json!({
386 "test.rs": "const A: i32 = 1;",
387 "test2.rs": "",
388 "Cargo.toml": "a = 1",
389 "package.json": "{\"a\": 1}",
390 }),
391 )
392 .await;
393
394 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
395 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
396
397 let mut fake_rust_servers = language_registry.register_fake_lsp(
398 "Rust",
399 FakeLspAdapter {
400 name: "the-rust-language-server",
401 capabilities: lsp::ServerCapabilities {
402 completion_provider: Some(lsp::CompletionOptions {
403 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
404 ..Default::default()
405 }),
406 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
407 lsp::TextDocumentSyncOptions {
408 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
409 ..Default::default()
410 },
411 )),
412 ..Default::default()
413 },
414 ..Default::default()
415 },
416 );
417 let mut fake_json_servers = language_registry.register_fake_lsp(
418 "JSON",
419 FakeLspAdapter {
420 name: "the-json-language-server",
421 capabilities: lsp::ServerCapabilities {
422 completion_provider: Some(lsp::CompletionOptions {
423 trigger_characters: Some(vec![":".to_string()]),
424 ..Default::default()
425 }),
426 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
427 lsp::TextDocumentSyncOptions {
428 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
429 ..Default::default()
430 },
431 )),
432 ..Default::default()
433 },
434 ..Default::default()
435 },
436 );
437
438 // Open a buffer without an associated language server.
439 let toml_buffer = project
440 .update(cx, |project, cx| {
441 project.open_local_buffer("/the-root/Cargo.toml", cx)
442 })
443 .await
444 .unwrap();
445
446 // Open a buffer with an associated language server before the language for it has been loaded.
447 let rust_buffer = project
448 .update(cx, |project, cx| {
449 project.open_local_buffer("/the-root/test.rs", cx)
450 })
451 .await
452 .unwrap();
453 rust_buffer.update(cx, |buffer, _| {
454 assert_eq!(buffer.language().map(|l| l.name()), None);
455 });
456
457 // Now we add the languages to the project, and ensure they get assigned to all
458 // the relevant open buffers.
459 language_registry.add(json_lang());
460 language_registry.add(rust_lang());
461 cx.executor().run_until_parked();
462 rust_buffer.update(cx, |buffer, _| {
463 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
464 });
465
466 // A server is started up, and it is notified about Rust files.
467 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
468 assert_eq!(
469 fake_rust_server
470 .receive_notification::<lsp::notification::DidOpenTextDocument>()
471 .await
472 .text_document,
473 lsp::TextDocumentItem {
474 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
475 version: 0,
476 text: "const A: i32 = 1;".to_string(),
477 language_id: "rust".to_string(),
478 }
479 );
480
481 // The buffer is configured based on the language server's capabilities.
482 rust_buffer.update(cx, |buffer, _| {
483 assert_eq!(
484 buffer
485 .completion_triggers()
486 .into_iter()
487 .cloned()
488 .collect::<Vec<_>>(),
489 &[".".to_string(), "::".to_string()]
490 );
491 });
492 toml_buffer.update(cx, |buffer, _| {
493 assert!(buffer.completion_triggers().is_empty());
494 });
495
496 // Edit a buffer. The changes are reported to the language server.
497 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
498 assert_eq!(
499 fake_rust_server
500 .receive_notification::<lsp::notification::DidChangeTextDocument>()
501 .await
502 .text_document,
503 lsp::VersionedTextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
505 1
506 )
507 );
508
509 // Open a third buffer with a different associated language server.
510 let json_buffer = project
511 .update(cx, |project, cx| {
512 project.open_local_buffer("/the-root/package.json", cx)
513 })
514 .await
515 .unwrap();
516
517 // A json language server is started up and is only notified about the json buffer.
518 let mut fake_json_server = fake_json_servers.next().await.unwrap();
519 assert_eq!(
520 fake_json_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 lsp::TextDocumentItem {
525 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
526 version: 0,
527 text: "{\"a\": 1}".to_string(),
528 language_id: "json".to_string(),
529 }
530 );
531
532 // This buffer is configured based on the second language server's
533 // capabilities.
534 json_buffer.update(cx, |buffer, _| {
535 assert_eq!(
536 buffer
537 .completion_triggers()
538 .into_iter()
539 .cloned()
540 .collect::<Vec<_>>(),
541 &[":".to_string()]
542 );
543 });
544
545 // When opening another buffer whose language server is already running,
546 // it is also configured based on the existing language server's capabilities.
547 let rust_buffer2 = project
548 .update(cx, |project, cx| {
549 project.open_local_buffer("/the-root/test2.rs", cx)
550 })
551 .await
552 .unwrap();
553 rust_buffer2.update(cx, |buffer, _| {
554 assert_eq!(
555 buffer
556 .completion_triggers()
557 .into_iter()
558 .cloned()
559 .collect::<Vec<_>>(),
560 &[".".to_string(), "::".to_string()]
561 );
562 });
563
564 // Changes are reported only to servers matching the buffer's language.
565 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
566 rust_buffer2.update(cx, |buffer, cx| {
567 buffer.edit([(0..0, "let x = 1;")], None, cx)
568 });
569 assert_eq!(
570 fake_rust_server
571 .receive_notification::<lsp::notification::DidChangeTextDocument>()
572 .await
573 .text_document,
574 lsp::VersionedTextDocumentIdentifier::new(
575 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
576 1
577 )
578 );
579
580 // Save notifications are reported to all servers.
581 project
582 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
583 .await
584 .unwrap();
585 assert_eq!(
586 fake_rust_server
587 .receive_notification::<lsp::notification::DidSaveTextDocument>()
588 .await
589 .text_document,
590 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
591 );
592 assert_eq!(
593 fake_json_server
594 .receive_notification::<lsp::notification::DidSaveTextDocument>()
595 .await
596 .text_document,
597 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
598 );
599
600 // Renames are reported only to servers matching the buffer's language.
601 fs.rename(
602 Path::new("/the-root/test2.rs"),
603 Path::new("/the-root/test3.rs"),
604 Default::default(),
605 )
606 .await
607 .unwrap();
608 assert_eq!(
609 fake_rust_server
610 .receive_notification::<lsp::notification::DidCloseTextDocument>()
611 .await
612 .text_document,
613 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
614 );
615 assert_eq!(
616 fake_rust_server
617 .receive_notification::<lsp::notification::DidOpenTextDocument>()
618 .await
619 .text_document,
620 lsp::TextDocumentItem {
621 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
622 version: 0,
623 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
624 language_id: "rust".to_string(),
625 },
626 );
627
628 rust_buffer2.update(cx, |buffer, cx| {
629 buffer.update_diagnostics(
630 LanguageServerId(0),
631 DiagnosticSet::from_sorted_entries(
632 vec![DiagnosticEntry {
633 diagnostic: Default::default(),
634 range: Anchor::MIN..Anchor::MAX,
635 }],
636 &buffer.snapshot(),
637 ),
638 cx,
639 );
640 assert_eq!(
641 buffer
642 .snapshot()
643 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
644 .count(),
645 1
646 );
647 });
648
649 // When the rename changes the extension of the file, the buffer gets closed on the old
650 // language server and gets opened on the new one.
651 fs.rename(
652 Path::new("/the-root/test3.rs"),
653 Path::new("/the-root/test3.json"),
654 Default::default(),
655 )
656 .await
657 .unwrap();
658 assert_eq!(
659 fake_rust_server
660 .receive_notification::<lsp::notification::DidCloseTextDocument>()
661 .await
662 .text_document,
663 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
664 );
665 assert_eq!(
666 fake_json_server
667 .receive_notification::<lsp::notification::DidOpenTextDocument>()
668 .await
669 .text_document,
670 lsp::TextDocumentItem {
671 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
672 version: 0,
673 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
674 language_id: "json".to_string(),
675 },
676 );
677
678 // We clear the diagnostics, since the language has changed.
679 rust_buffer2.update(cx, |buffer, _| {
680 assert_eq!(
681 buffer
682 .snapshot()
683 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
684 .count(),
685 0
686 );
687 });
688
689 // The renamed file's version resets after changing language server.
690 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
691 assert_eq!(
692 fake_json_server
693 .receive_notification::<lsp::notification::DidChangeTextDocument>()
694 .await
695 .text_document,
696 lsp::VersionedTextDocumentIdentifier::new(
697 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
698 1
699 )
700 );
701
702 // Restart language servers
703 project.update(cx, |project, cx| {
704 project.restart_language_servers_for_buffers(
705 vec![rust_buffer.clone(), json_buffer.clone()],
706 cx,
707 );
708 });
709
710 let mut rust_shutdown_requests = fake_rust_server
711 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
712 let mut json_shutdown_requests = fake_json_server
713 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
714 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
715
716 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
717 let mut fake_json_server = fake_json_servers.next().await.unwrap();
718
719 // Ensure rust document is reopened in new rust language server
720 assert_eq!(
721 fake_rust_server
722 .receive_notification::<lsp::notification::DidOpenTextDocument>()
723 .await
724 .text_document,
725 lsp::TextDocumentItem {
726 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
727 version: 0,
728 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
729 language_id: "rust".to_string(),
730 }
731 );
732
733 // Ensure json documents are reopened in new json language server
734 assert_set_eq!(
735 [
736 fake_json_server
737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
738 .await
739 .text_document,
740 fake_json_server
741 .receive_notification::<lsp::notification::DidOpenTextDocument>()
742 .await
743 .text_document,
744 ],
745 [
746 lsp::TextDocumentItem {
747 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
748 version: 0,
749 text: json_buffer.update(cx, |buffer, _| buffer.text()),
750 language_id: "json".to_string(),
751 },
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
754 version: 0,
755 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 }
758 ]
759 );
760
761 // Close notifications are reported only to servers matching the buffer's language.
762 cx.update(|_| drop(json_buffer));
763 let close_message = lsp::DidCloseTextDocumentParams {
764 text_document: lsp::TextDocumentIdentifier::new(
765 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
766 ),
767 };
768 assert_eq!(
769 fake_json_server
770 .receive_notification::<lsp::notification::DidCloseTextDocument>()
771 .await,
772 close_message,
773 );
774}
775
776#[gpui::test]
777async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
778 init_test(cx);
779
780 let fs = FakeFs::new(cx.executor());
781 fs.insert_tree(
782 "/the-root",
783 json!({
784 ".gitignore": "target\n",
785 "src": {
786 "a.rs": "",
787 "b.rs": "",
788 },
789 "target": {
790 "x": {
791 "out": {
792 "x.rs": ""
793 }
794 },
795 "y": {
796 "out": {
797 "y.rs": "",
798 }
799 },
800 "z": {
801 "out": {
802 "z.rs": ""
803 }
804 }
805 }
806 }),
807 )
808 .await;
809
810 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
811 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
812 language_registry.add(rust_lang());
813 let mut fake_servers = language_registry.register_fake_lsp(
814 "Rust",
815 FakeLspAdapter {
816 name: "the-language-server",
817 ..Default::default()
818 },
819 );
820
821 cx.executor().run_until_parked();
822
823 // Start the language server by opening a buffer with a compatible file extension.
824 let _buffer = project
825 .update(cx, |project, cx| {
826 project.open_local_buffer("/the-root/src/a.rs", cx)
827 })
828 .await
829 .unwrap();
830
831 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
832 project.update(cx, |project, cx| {
833 let worktree = project.worktrees(cx).next().unwrap();
834 assert_eq!(
835 worktree
836 .read(cx)
837 .snapshot()
838 .entries(true, 0)
839 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
840 .collect::<Vec<_>>(),
841 &[
842 (Path::new(""), false),
843 (Path::new(".gitignore"), false),
844 (Path::new("src"), false),
845 (Path::new("src/a.rs"), false),
846 (Path::new("src/b.rs"), false),
847 (Path::new("target"), true),
848 ]
849 );
850 });
851
852 let prev_read_dir_count = fs.read_dir_call_count();
853
854 // Keep track of the FS events reported to the language server.
855 let fake_server = fake_servers.next().await.unwrap();
856 let file_changes = Arc::new(Mutex::new(Vec::new()));
857 fake_server
858 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
859 registrations: vec![lsp::Registration {
860 id: Default::default(),
861 method: "workspace/didChangeWatchedFiles".to_string(),
862 register_options: serde_json::to_value(
863 lsp::DidChangeWatchedFilesRegistrationOptions {
864 watchers: vec![
865 lsp::FileSystemWatcher {
866 glob_pattern: lsp::GlobPattern::String(
867 "/the-root/Cargo.toml".to_string(),
868 ),
869 kind: None,
870 },
871 lsp::FileSystemWatcher {
872 glob_pattern: lsp::GlobPattern::String(
873 "/the-root/src/*.{rs,c}".to_string(),
874 ),
875 kind: None,
876 },
877 lsp::FileSystemWatcher {
878 glob_pattern: lsp::GlobPattern::String(
879 "/the-root/target/y/**/*.rs".to_string(),
880 ),
881 kind: None,
882 },
883 ],
884 },
885 )
886 .ok(),
887 }],
888 })
889 .await
890 .unwrap();
891 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
892 let file_changes = file_changes.clone();
893 move |params, _| {
894 let mut file_changes = file_changes.lock();
895 file_changes.extend(params.changes);
896 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
897 }
898 });
899
900 cx.executor().run_until_parked();
901 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
902 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
903
904 // Now the language server has asked us to watch an ignored directory path,
905 // so we recursively load it.
906 project.update(cx, |project, cx| {
907 let worktree = project.worktrees(cx).next().unwrap();
908 assert_eq!(
909 worktree
910 .read(cx)
911 .snapshot()
912 .entries(true, 0)
913 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
914 .collect::<Vec<_>>(),
915 &[
916 (Path::new(""), false),
917 (Path::new(".gitignore"), false),
918 (Path::new("src"), false),
919 (Path::new("src/a.rs"), false),
920 (Path::new("src/b.rs"), false),
921 (Path::new("target"), true),
922 (Path::new("target/x"), true),
923 (Path::new("target/y"), true),
924 (Path::new("target/y/out"), true),
925 (Path::new("target/y/out/y.rs"), true),
926 (Path::new("target/z"), true),
927 ]
928 );
929 });
930
931 // Perform some file system mutations, two of which match the watched patterns,
932 // and one of which does not.
933 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
934 .await
935 .unwrap();
936 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
937 .await
938 .unwrap();
939 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
940 .await
941 .unwrap();
942 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
943 .await
944 .unwrap();
945 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
946 .await
947 .unwrap();
948
949 // The language server receives events for the FS mutations that match its watch patterns.
950 cx.executor().run_until_parked();
951 assert_eq!(
952 &*file_changes.lock(),
953 &[
954 lsp::FileEvent {
955 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
956 typ: lsp::FileChangeType::DELETED,
957 },
958 lsp::FileEvent {
959 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
960 typ: lsp::FileChangeType::CREATED,
961 },
962 lsp::FileEvent {
963 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
964 typ: lsp::FileChangeType::CREATED,
965 },
966 ]
967 );
968}
969
970#[gpui::test]
971async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
972 init_test(cx);
973
974 let fs = FakeFs::new(cx.executor());
975 fs.insert_tree(
976 "/dir",
977 json!({
978 "a.rs": "let a = 1;",
979 "b.rs": "let b = 2;"
980 }),
981 )
982 .await;
983
984 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
985
986 let buffer_a = project
987 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
988 .await
989 .unwrap();
990 let buffer_b = project
991 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
992 .await
993 .unwrap();
994
995 project.update(cx, |project, cx| {
996 project
997 .update_diagnostics(
998 LanguageServerId(0),
999 lsp::PublishDiagnosticsParams {
1000 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1001 version: None,
1002 diagnostics: vec![lsp::Diagnostic {
1003 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1004 severity: Some(lsp::DiagnosticSeverity::ERROR),
1005 message: "error 1".to_string(),
1006 ..Default::default()
1007 }],
1008 },
1009 &[],
1010 cx,
1011 )
1012 .unwrap();
1013 project
1014 .update_diagnostics(
1015 LanguageServerId(0),
1016 lsp::PublishDiagnosticsParams {
1017 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1018 version: None,
1019 diagnostics: vec![lsp::Diagnostic {
1020 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1021 severity: Some(DiagnosticSeverity::WARNING),
1022 message: "error 2".to_string(),
1023 ..Default::default()
1024 }],
1025 },
1026 &[],
1027 cx,
1028 )
1029 .unwrap();
1030 });
1031
1032 buffer_a.update(cx, |buffer, _| {
1033 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1034 assert_eq!(
1035 chunks
1036 .iter()
1037 .map(|(s, d)| (s.as_str(), *d))
1038 .collect::<Vec<_>>(),
1039 &[
1040 ("let ", None),
1041 ("a", Some(DiagnosticSeverity::ERROR)),
1042 (" = 1;", None),
1043 ]
1044 );
1045 });
1046 buffer_b.update(cx, |buffer, _| {
1047 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1048 assert_eq!(
1049 chunks
1050 .iter()
1051 .map(|(s, d)| (s.as_str(), *d))
1052 .collect::<Vec<_>>(),
1053 &[
1054 ("let ", None),
1055 ("b", Some(DiagnosticSeverity::WARNING)),
1056 (" = 2;", None),
1057 ]
1058 );
1059 });
1060}
1061
1062#[gpui::test]
1063async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1064 init_test(cx);
1065
1066 let fs = FakeFs::new(cx.executor());
1067 fs.insert_tree(
1068 "/root",
1069 json!({
1070 "dir": {
1071 ".git": {
1072 "HEAD": "ref: refs/heads/main",
1073 },
1074 ".gitignore": "b.rs",
1075 "a.rs": "let a = 1;",
1076 "b.rs": "let b = 2;",
1077 },
1078 "other.rs": "let b = c;"
1079 }),
1080 )
1081 .await;
1082
1083 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1084 let (worktree, _) = project
1085 .update(cx, |project, cx| {
1086 project.find_or_create_worktree("/root/dir", true, cx)
1087 })
1088 .await
1089 .unwrap();
1090 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1091
1092 let (worktree, _) = project
1093 .update(cx, |project, cx| {
1094 project.find_or_create_worktree("/root/other.rs", false, cx)
1095 })
1096 .await
1097 .unwrap();
1098 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1099
1100 let server_id = LanguageServerId(0);
1101 project.update(cx, |project, cx| {
1102 project
1103 .update_diagnostics(
1104 server_id,
1105 lsp::PublishDiagnosticsParams {
1106 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1107 version: None,
1108 diagnostics: vec![lsp::Diagnostic {
1109 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1110 severity: Some(lsp::DiagnosticSeverity::ERROR),
1111 message: "unused variable 'b'".to_string(),
1112 ..Default::default()
1113 }],
1114 },
1115 &[],
1116 cx,
1117 )
1118 .unwrap();
1119 project
1120 .update_diagnostics(
1121 server_id,
1122 lsp::PublishDiagnosticsParams {
1123 uri: Url::from_file_path("/root/other.rs").unwrap(),
1124 version: None,
1125 diagnostics: vec![lsp::Diagnostic {
1126 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1127 severity: Some(lsp::DiagnosticSeverity::ERROR),
1128 message: "unknown variable 'c'".to_string(),
1129 ..Default::default()
1130 }],
1131 },
1132 &[],
1133 cx,
1134 )
1135 .unwrap();
1136 });
1137
1138 let main_ignored_buffer = project
1139 .update(cx, |project, cx| {
1140 project.open_buffer((main_worktree_id, "b.rs"), cx)
1141 })
1142 .await
1143 .unwrap();
1144 main_ignored_buffer.update(cx, |buffer, _| {
1145 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1146 assert_eq!(
1147 chunks
1148 .iter()
1149 .map(|(s, d)| (s.as_str(), *d))
1150 .collect::<Vec<_>>(),
1151 &[
1152 ("let ", None),
1153 ("b", Some(DiagnosticSeverity::ERROR)),
1154 (" = 2;", None),
1155 ],
1156 "Gigitnored buffers should still get in-buffer diagnostics",
1157 );
1158 });
1159 let other_buffer = project
1160 .update(cx, |project, cx| {
1161 project.open_buffer((other_worktree_id, ""), cx)
1162 })
1163 .await
1164 .unwrap();
1165 other_buffer.update(cx, |buffer, _| {
1166 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1167 assert_eq!(
1168 chunks
1169 .iter()
1170 .map(|(s, d)| (s.as_str(), *d))
1171 .collect::<Vec<_>>(),
1172 &[
1173 ("let b = ", None),
1174 ("c", Some(DiagnosticSeverity::ERROR)),
1175 (";", None),
1176 ],
1177 "Buffers from hidden projects should still get in-buffer diagnostics"
1178 );
1179 });
1180
1181 project.update(cx, |project, cx| {
1182 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1183 assert_eq!(
1184 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1185 vec![(
1186 ProjectPath {
1187 worktree_id: main_worktree_id,
1188 path: Arc::from(Path::new("b.rs")),
1189 },
1190 server_id,
1191 DiagnosticSummary {
1192 error_count: 1,
1193 warning_count: 0,
1194 }
1195 )]
1196 );
1197 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1198 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1199 });
1200}
1201
1202#[gpui::test]
1203async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1204 init_test(cx);
1205
1206 let progress_token = "the-progress-token";
1207
1208 let fs = FakeFs::new(cx.executor());
1209 fs.insert_tree(
1210 "/dir",
1211 json!({
1212 "a.rs": "fn a() { A }",
1213 "b.rs": "const y: i32 = 1",
1214 }),
1215 )
1216 .await;
1217
1218 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1219 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1220
1221 language_registry.add(rust_lang());
1222 let mut fake_servers = language_registry.register_fake_lsp(
1223 "Rust",
1224 FakeLspAdapter {
1225 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1226 disk_based_diagnostics_sources: vec!["disk".into()],
1227 ..Default::default()
1228 },
1229 );
1230
1231 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1232
1233 // Cause worktree to start the fake language server
1234 let _buffer = project
1235 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1236 .await
1237 .unwrap();
1238
1239 let mut events = cx.events(&project);
1240
1241 let fake_server = fake_servers.next().await.unwrap();
1242 assert_eq!(
1243 events.next().await.unwrap(),
1244 Event::LanguageServerAdded(
1245 LanguageServerId(0),
1246 fake_server.server.name(),
1247 Some(worktree_id)
1248 ),
1249 );
1250
1251 fake_server
1252 .start_progress(format!("{}/0", progress_token))
1253 .await;
1254 assert_eq!(
1255 events.next().await.unwrap(),
1256 Event::DiskBasedDiagnosticsStarted {
1257 language_server_id: LanguageServerId(0),
1258 }
1259 );
1260
1261 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1262 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1263 version: None,
1264 diagnostics: vec![lsp::Diagnostic {
1265 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1266 severity: Some(lsp::DiagnosticSeverity::ERROR),
1267 message: "undefined variable 'A'".to_string(),
1268 ..Default::default()
1269 }],
1270 });
1271 assert_eq!(
1272 events.next().await.unwrap(),
1273 Event::DiagnosticsUpdated {
1274 language_server_id: LanguageServerId(0),
1275 path: (worktree_id, Path::new("a.rs")).into()
1276 }
1277 );
1278
1279 fake_server.end_progress(format!("{}/0", progress_token));
1280 assert_eq!(
1281 events.next().await.unwrap(),
1282 Event::DiskBasedDiagnosticsFinished {
1283 language_server_id: LanguageServerId(0)
1284 }
1285 );
1286
1287 let buffer = project
1288 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1289 .await
1290 .unwrap();
1291
1292 buffer.update(cx, |buffer, _| {
1293 let snapshot = buffer.snapshot();
1294 let diagnostics = snapshot
1295 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1296 .collect::<Vec<_>>();
1297 assert_eq!(
1298 diagnostics,
1299 &[DiagnosticEntry {
1300 range: Point::new(0, 9)..Point::new(0, 10),
1301 diagnostic: Diagnostic {
1302 severity: lsp::DiagnosticSeverity::ERROR,
1303 message: "undefined variable 'A'".to_string(),
1304 group_id: 0,
1305 is_primary: true,
1306 ..Default::default()
1307 }
1308 }]
1309 )
1310 });
1311
1312 // Ensure publishing empty diagnostics twice only results in one update event.
1313 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1314 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1315 version: None,
1316 diagnostics: Default::default(),
1317 });
1318 assert_eq!(
1319 events.next().await.unwrap(),
1320 Event::DiagnosticsUpdated {
1321 language_server_id: LanguageServerId(0),
1322 path: (worktree_id, Path::new("a.rs")).into()
1323 }
1324 );
1325
1326 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1327 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1328 version: None,
1329 diagnostics: Default::default(),
1330 });
1331 cx.executor().run_until_parked();
1332 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1333}
1334
1335#[gpui::test]
1336async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1337 init_test(cx);
1338
1339 let progress_token = "the-progress-token";
1340
1341 let fs = FakeFs::new(cx.executor());
1342 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1343
1344 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1345
1346 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1347 language_registry.add(rust_lang());
1348 let mut fake_servers = language_registry.register_fake_lsp(
1349 "Rust",
1350 FakeLspAdapter {
1351 name: "the-language-server",
1352 disk_based_diagnostics_sources: vec!["disk".into()],
1353 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1354 ..Default::default()
1355 },
1356 );
1357
1358 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1359
1360 let buffer = project
1361 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1362 .await
1363 .unwrap();
1364
1365 // Simulate diagnostics starting to update.
1366 let fake_server = fake_servers.next().await.unwrap();
1367 fake_server.start_progress(progress_token).await;
1368
1369 // Restart the server before the diagnostics finish updating.
1370 project.update(cx, |project, cx| {
1371 project.restart_language_servers_for_buffers([buffer], cx);
1372 });
1373 let mut events = cx.events(&project);
1374
1375 // Simulate the newly started server sending more diagnostics.
1376 let fake_server = fake_servers.next().await.unwrap();
1377 assert_eq!(
1378 events.next().await.unwrap(),
1379 Event::LanguageServerAdded(
1380 LanguageServerId(1),
1381 fake_server.server.name(),
1382 Some(worktree_id)
1383 )
1384 );
1385 fake_server.start_progress(progress_token).await;
1386 assert_eq!(
1387 events.next().await.unwrap(),
1388 Event::DiskBasedDiagnosticsStarted {
1389 language_server_id: LanguageServerId(1)
1390 }
1391 );
1392 project.update(cx, |project, cx| {
1393 assert_eq!(
1394 project
1395 .language_servers_running_disk_based_diagnostics(cx)
1396 .collect::<Vec<_>>(),
1397 [LanguageServerId(1)]
1398 );
1399 });
1400
1401 // All diagnostics are considered done, despite the old server's diagnostic
1402 // task never completing.
1403 fake_server.end_progress(progress_token);
1404 assert_eq!(
1405 events.next().await.unwrap(),
1406 Event::DiskBasedDiagnosticsFinished {
1407 language_server_id: LanguageServerId(1)
1408 }
1409 );
1410 project.update(cx, |project, cx| {
1411 assert_eq!(
1412 project
1413 .language_servers_running_disk_based_diagnostics(cx)
1414 .collect::<Vec<_>>(),
1415 [] as [language::LanguageServerId; 0]
1416 );
1417 });
1418}
1419
1420#[gpui::test]
1421async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1422 init_test(cx);
1423
1424 let fs = FakeFs::new(cx.executor());
1425 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1426
1427 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1428
1429 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1430 language_registry.add(rust_lang());
1431 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1432
1433 let buffer = project
1434 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1435 .await
1436 .unwrap();
1437
1438 // Publish diagnostics
1439 let fake_server = fake_servers.next().await.unwrap();
1440 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1441 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1442 version: None,
1443 diagnostics: vec![lsp::Diagnostic {
1444 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1445 severity: Some(lsp::DiagnosticSeverity::ERROR),
1446 message: "the message".to_string(),
1447 ..Default::default()
1448 }],
1449 });
1450
1451 cx.executor().run_until_parked();
1452 buffer.update(cx, |buffer, _| {
1453 assert_eq!(
1454 buffer
1455 .snapshot()
1456 .diagnostics_in_range::<_, usize>(0..1, false)
1457 .map(|entry| entry.diagnostic.message.clone())
1458 .collect::<Vec<_>>(),
1459 ["the message".to_string()]
1460 );
1461 });
1462 project.update(cx, |project, cx| {
1463 assert_eq!(
1464 project.diagnostic_summary(false, cx),
1465 DiagnosticSummary {
1466 error_count: 1,
1467 warning_count: 0,
1468 }
1469 );
1470 });
1471
1472 project.update(cx, |project, cx| {
1473 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1474 });
1475
1476 // The diagnostics are cleared.
1477 cx.executor().run_until_parked();
1478 buffer.update(cx, |buffer, _| {
1479 assert_eq!(
1480 buffer
1481 .snapshot()
1482 .diagnostics_in_range::<_, usize>(0..1, false)
1483 .map(|entry| entry.diagnostic.message.clone())
1484 .collect::<Vec<_>>(),
1485 Vec::<String>::new(),
1486 );
1487 });
1488 project.update(cx, |project, cx| {
1489 assert_eq!(
1490 project.diagnostic_summary(false, cx),
1491 DiagnosticSummary {
1492 error_count: 0,
1493 warning_count: 0,
1494 }
1495 );
1496 });
1497}
1498
1499#[gpui::test]
1500async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1501 init_test(cx);
1502
1503 let fs = FakeFs::new(cx.executor());
1504 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1505
1506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1508
1509 language_registry.add(rust_lang());
1510 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1511
1512 let buffer = project
1513 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1514 .await
1515 .unwrap();
1516
1517 // Before restarting the server, report diagnostics with an unknown buffer version.
1518 let fake_server = fake_servers.next().await.unwrap();
1519 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1520 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1521 version: Some(10000),
1522 diagnostics: Vec::new(),
1523 });
1524 cx.executor().run_until_parked();
1525
1526 project.update(cx, |project, cx| {
1527 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1528 });
1529 let mut fake_server = fake_servers.next().await.unwrap();
1530 let notification = fake_server
1531 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1532 .await
1533 .text_document;
1534 assert_eq!(notification.version, 0);
1535}
1536
1537#[gpui::test]
1538async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1539 init_test(cx);
1540
1541 let progress_token = "the-progress-token";
1542
1543 let fs = FakeFs::new(cx.executor());
1544 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1545
1546 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1547
1548 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1549 language_registry.add(rust_lang());
1550 let mut fake_servers = language_registry.register_fake_lsp(
1551 "Rust",
1552 FakeLspAdapter {
1553 name: "the-language-server",
1554 disk_based_diagnostics_sources: vec!["disk".into()],
1555 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1556 ..Default::default()
1557 },
1558 );
1559
1560 let buffer = project
1561 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1562 .await
1563 .unwrap();
1564
1565 // Simulate diagnostics starting to update.
1566 let mut fake_server = fake_servers.next().await.unwrap();
1567 fake_server
1568 .start_progress_with(
1569 "another-token",
1570 lsp::WorkDoneProgressBegin {
1571 cancellable: Some(false),
1572 ..Default::default()
1573 },
1574 )
1575 .await;
1576 fake_server
1577 .start_progress_with(
1578 progress_token,
1579 lsp::WorkDoneProgressBegin {
1580 cancellable: Some(true),
1581 ..Default::default()
1582 },
1583 )
1584 .await;
1585 cx.executor().run_until_parked();
1586
1587 project.update(cx, |project, cx| {
1588 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1589 });
1590
1591 let cancel_notification = fake_server
1592 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1593 .await;
1594 assert_eq!(
1595 cancel_notification.token,
1596 NumberOrString::String(progress_token.into())
1597 );
1598}
1599
1600#[gpui::test]
1601async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1602 init_test(cx);
1603
1604 let fs = FakeFs::new(cx.executor());
1605 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1606 .await;
1607
1608 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1609 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1610
1611 let mut fake_rust_servers = language_registry.register_fake_lsp(
1612 "Rust",
1613 FakeLspAdapter {
1614 name: "rust-lsp",
1615 ..Default::default()
1616 },
1617 );
1618 let mut fake_js_servers = language_registry.register_fake_lsp(
1619 "JavaScript",
1620 FakeLspAdapter {
1621 name: "js-lsp",
1622 ..Default::default()
1623 },
1624 );
1625 language_registry.add(rust_lang());
1626 language_registry.add(js_lang());
1627
1628 let _rs_buffer = project
1629 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1630 .await
1631 .unwrap();
1632 let _js_buffer = project
1633 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1634 .await
1635 .unwrap();
1636
1637 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1638 assert_eq!(
1639 fake_rust_server_1
1640 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1641 .await
1642 .text_document
1643 .uri
1644 .as_str(),
1645 "file:///dir/a.rs"
1646 );
1647
1648 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1649 assert_eq!(
1650 fake_js_server
1651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1652 .await
1653 .text_document
1654 .uri
1655 .as_str(),
1656 "file:///dir/b.js"
1657 );
1658
1659 // Disable Rust language server, ensuring only that server gets stopped.
1660 cx.update(|cx| {
1661 SettingsStore::update_global(cx, |settings, cx| {
1662 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1663 settings.languages.insert(
1664 "Rust".into(),
1665 LanguageSettingsContent {
1666 enable_language_server: Some(false),
1667 ..Default::default()
1668 },
1669 );
1670 });
1671 })
1672 });
1673 fake_rust_server_1
1674 .receive_notification::<lsp::notification::Exit>()
1675 .await;
1676
1677 // Enable Rust and disable JavaScript language servers, ensuring that the
1678 // former gets started again and that the latter stops.
1679 cx.update(|cx| {
1680 SettingsStore::update_global(cx, |settings, cx| {
1681 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1682 settings.languages.insert(
1683 LanguageName::new("Rust"),
1684 LanguageSettingsContent {
1685 enable_language_server: Some(true),
1686 ..Default::default()
1687 },
1688 );
1689 settings.languages.insert(
1690 LanguageName::new("JavaScript"),
1691 LanguageSettingsContent {
1692 enable_language_server: Some(false),
1693 ..Default::default()
1694 },
1695 );
1696 });
1697 })
1698 });
1699 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1700 assert_eq!(
1701 fake_rust_server_2
1702 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1703 .await
1704 .text_document
1705 .uri
1706 .as_str(),
1707 "file:///dir/a.rs"
1708 );
1709 fake_js_server
1710 .receive_notification::<lsp::notification::Exit>()
1711 .await;
1712}
1713
1714#[gpui::test(iterations = 3)]
1715async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1716 init_test(cx);
1717
1718 let text = "
1719 fn a() { A }
1720 fn b() { BB }
1721 fn c() { CCC }
1722 "
1723 .unindent();
1724
1725 let fs = FakeFs::new(cx.executor());
1726 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1727
1728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1729 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1730
1731 language_registry.add(rust_lang());
1732 let mut fake_servers = language_registry.register_fake_lsp(
1733 "Rust",
1734 FakeLspAdapter {
1735 disk_based_diagnostics_sources: vec!["disk".into()],
1736 ..Default::default()
1737 },
1738 );
1739
1740 let buffer = project
1741 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1742 .await
1743 .unwrap();
1744
1745 let mut fake_server = fake_servers.next().await.unwrap();
1746 let open_notification = fake_server
1747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1748 .await;
1749
1750 // Edit the buffer, moving the content down
1751 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1752 let change_notification_1 = fake_server
1753 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1754 .await;
1755 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1756
1757 // Report some diagnostics for the initial version of the buffer
1758 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1759 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1760 version: Some(open_notification.text_document.version),
1761 diagnostics: vec![
1762 lsp::Diagnostic {
1763 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1764 severity: Some(DiagnosticSeverity::ERROR),
1765 message: "undefined variable 'A'".to_string(),
1766 source: Some("disk".to_string()),
1767 ..Default::default()
1768 },
1769 lsp::Diagnostic {
1770 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1771 severity: Some(DiagnosticSeverity::ERROR),
1772 message: "undefined variable 'BB'".to_string(),
1773 source: Some("disk".to_string()),
1774 ..Default::default()
1775 },
1776 lsp::Diagnostic {
1777 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1778 severity: Some(DiagnosticSeverity::ERROR),
1779 source: Some("disk".to_string()),
1780 message: "undefined variable 'CCC'".to_string(),
1781 ..Default::default()
1782 },
1783 ],
1784 });
1785
1786 // The diagnostics have moved down since they were created.
1787 cx.executor().run_until_parked();
1788 buffer.update(cx, |buffer, _| {
1789 assert_eq!(
1790 buffer
1791 .snapshot()
1792 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1793 .collect::<Vec<_>>(),
1794 &[
1795 DiagnosticEntry {
1796 range: Point::new(3, 9)..Point::new(3, 11),
1797 diagnostic: Diagnostic {
1798 source: Some("disk".into()),
1799 severity: DiagnosticSeverity::ERROR,
1800 message: "undefined variable 'BB'".to_string(),
1801 is_disk_based: true,
1802 group_id: 1,
1803 is_primary: true,
1804 ..Default::default()
1805 },
1806 },
1807 DiagnosticEntry {
1808 range: Point::new(4, 9)..Point::new(4, 12),
1809 diagnostic: Diagnostic {
1810 source: Some("disk".into()),
1811 severity: DiagnosticSeverity::ERROR,
1812 message: "undefined variable 'CCC'".to_string(),
1813 is_disk_based: true,
1814 group_id: 2,
1815 is_primary: true,
1816 ..Default::default()
1817 }
1818 }
1819 ]
1820 );
1821 assert_eq!(
1822 chunks_with_diagnostics(buffer, 0..buffer.len()),
1823 [
1824 ("\n\nfn a() { ".to_string(), None),
1825 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1826 (" }\nfn b() { ".to_string(), None),
1827 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1828 (" }\nfn c() { ".to_string(), None),
1829 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1830 (" }\n".to_string(), None),
1831 ]
1832 );
1833 assert_eq!(
1834 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1835 [
1836 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1837 (" }\nfn c() { ".to_string(), None),
1838 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1839 ]
1840 );
1841 });
1842
1843 // Ensure overlapping diagnostics are highlighted correctly.
1844 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1845 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1846 version: Some(open_notification.text_document.version),
1847 diagnostics: vec![
1848 lsp::Diagnostic {
1849 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1850 severity: Some(DiagnosticSeverity::ERROR),
1851 message: "undefined variable 'A'".to_string(),
1852 source: Some("disk".to_string()),
1853 ..Default::default()
1854 },
1855 lsp::Diagnostic {
1856 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1857 severity: Some(DiagnosticSeverity::WARNING),
1858 message: "unreachable statement".to_string(),
1859 source: Some("disk".to_string()),
1860 ..Default::default()
1861 },
1862 ],
1863 });
1864
1865 cx.executor().run_until_parked();
1866 buffer.update(cx, |buffer, _| {
1867 assert_eq!(
1868 buffer
1869 .snapshot()
1870 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1871 .collect::<Vec<_>>(),
1872 &[
1873 DiagnosticEntry {
1874 range: Point::new(2, 9)..Point::new(2, 12),
1875 diagnostic: Diagnostic {
1876 source: Some("disk".into()),
1877 severity: DiagnosticSeverity::WARNING,
1878 message: "unreachable statement".to_string(),
1879 is_disk_based: true,
1880 group_id: 4,
1881 is_primary: true,
1882 ..Default::default()
1883 }
1884 },
1885 DiagnosticEntry {
1886 range: Point::new(2, 9)..Point::new(2, 10),
1887 diagnostic: Diagnostic {
1888 source: Some("disk".into()),
1889 severity: DiagnosticSeverity::ERROR,
1890 message: "undefined variable 'A'".to_string(),
1891 is_disk_based: true,
1892 group_id: 3,
1893 is_primary: true,
1894 ..Default::default()
1895 },
1896 }
1897 ]
1898 );
1899 assert_eq!(
1900 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1901 [
1902 ("fn a() { ".to_string(), None),
1903 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1904 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1905 ("\n".to_string(), None),
1906 ]
1907 );
1908 assert_eq!(
1909 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1910 [
1911 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1912 ("\n".to_string(), None),
1913 ]
1914 );
1915 });
1916
1917 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1918 // changes since the last save.
1919 buffer.update(cx, |buffer, cx| {
1920 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1921 buffer.edit(
1922 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1923 None,
1924 cx,
1925 );
1926 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1927 });
1928 let change_notification_2 = fake_server
1929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1930 .await;
1931 assert!(
1932 change_notification_2.text_document.version > change_notification_1.text_document.version
1933 );
1934
1935 // Handle out-of-order diagnostics
1936 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1937 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1938 version: Some(change_notification_2.text_document.version),
1939 diagnostics: vec![
1940 lsp::Diagnostic {
1941 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1942 severity: Some(DiagnosticSeverity::ERROR),
1943 message: "undefined variable 'BB'".to_string(),
1944 source: Some("disk".to_string()),
1945 ..Default::default()
1946 },
1947 lsp::Diagnostic {
1948 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1949 severity: Some(DiagnosticSeverity::WARNING),
1950 message: "undefined variable 'A'".to_string(),
1951 source: Some("disk".to_string()),
1952 ..Default::default()
1953 },
1954 ],
1955 });
1956
1957 cx.executor().run_until_parked();
1958 buffer.update(cx, |buffer, _| {
1959 assert_eq!(
1960 buffer
1961 .snapshot()
1962 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1963 .collect::<Vec<_>>(),
1964 &[
1965 DiagnosticEntry {
1966 range: Point::new(2, 21)..Point::new(2, 22),
1967 diagnostic: Diagnostic {
1968 source: Some("disk".into()),
1969 severity: DiagnosticSeverity::WARNING,
1970 message: "undefined variable 'A'".to_string(),
1971 is_disk_based: true,
1972 group_id: 6,
1973 is_primary: true,
1974 ..Default::default()
1975 }
1976 },
1977 DiagnosticEntry {
1978 range: Point::new(3, 9)..Point::new(3, 14),
1979 diagnostic: Diagnostic {
1980 source: Some("disk".into()),
1981 severity: DiagnosticSeverity::ERROR,
1982 message: "undefined variable 'BB'".to_string(),
1983 is_disk_based: true,
1984 group_id: 5,
1985 is_primary: true,
1986 ..Default::default()
1987 },
1988 }
1989 ]
1990 );
1991 });
1992}
1993
1994#[gpui::test]
1995async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1996 init_test(cx);
1997
1998 let text = concat!(
1999 "let one = ;\n", //
2000 "let two = \n",
2001 "let three = 3;\n",
2002 );
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2006
2007 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2008 let buffer = project
2009 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2010 .await
2011 .unwrap();
2012
2013 project.update(cx, |project, cx| {
2014 project.lsp_store.update(cx, |lsp_store, cx| {
2015 lsp_store
2016 .update_buffer_diagnostics(
2017 &buffer,
2018 LanguageServerId(0),
2019 None,
2020 vec![
2021 DiagnosticEntry {
2022 range: Unclipped(PointUtf16::new(0, 10))
2023 ..Unclipped(PointUtf16::new(0, 10)),
2024 diagnostic: Diagnostic {
2025 severity: DiagnosticSeverity::ERROR,
2026 message: "syntax error 1".to_string(),
2027 ..Default::default()
2028 },
2029 },
2030 DiagnosticEntry {
2031 range: Unclipped(PointUtf16::new(1, 10))
2032 ..Unclipped(PointUtf16::new(1, 10)),
2033 diagnostic: Diagnostic {
2034 severity: DiagnosticSeverity::ERROR,
2035 message: "syntax error 2".to_string(),
2036 ..Default::default()
2037 },
2038 },
2039 ],
2040 cx,
2041 )
2042 .unwrap();
2043 })
2044 });
2045
2046 // An empty range is extended forward to include the following character.
2047 // At the end of a line, an empty range is extended backward to include
2048 // the preceding character.
2049 buffer.update(cx, |buffer, _| {
2050 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2051 assert_eq!(
2052 chunks
2053 .iter()
2054 .map(|(s, d)| (s.as_str(), *d))
2055 .collect::<Vec<_>>(),
2056 &[
2057 ("let one = ", None),
2058 (";", Some(DiagnosticSeverity::ERROR)),
2059 ("\nlet two =", None),
2060 (" ", Some(DiagnosticSeverity::ERROR)),
2061 ("\nlet three = 3;\n", None)
2062 ]
2063 );
2064 });
2065}
2066
2067#[gpui::test]
2068async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2069 init_test(cx);
2070
2071 let fs = FakeFs::new(cx.executor());
2072 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2073 .await;
2074
2075 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2076
2077 project.update(cx, |project, cx| {
2078 project
2079 .update_diagnostic_entries(
2080 LanguageServerId(0),
2081 Path::new("/dir/a.rs").to_owned(),
2082 None,
2083 vec![DiagnosticEntry {
2084 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2085 diagnostic: Diagnostic {
2086 severity: DiagnosticSeverity::ERROR,
2087 is_primary: true,
2088 message: "syntax error a1".to_string(),
2089 ..Default::default()
2090 },
2091 }],
2092 cx,
2093 )
2094 .unwrap();
2095 project
2096 .update_diagnostic_entries(
2097 LanguageServerId(1),
2098 Path::new("/dir/a.rs").to_owned(),
2099 None,
2100 vec![DiagnosticEntry {
2101 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2102 diagnostic: Diagnostic {
2103 severity: DiagnosticSeverity::ERROR,
2104 is_primary: true,
2105 message: "syntax error b1".to_string(),
2106 ..Default::default()
2107 },
2108 }],
2109 cx,
2110 )
2111 .unwrap();
2112
2113 assert_eq!(
2114 project.diagnostic_summary(false, cx),
2115 DiagnosticSummary {
2116 error_count: 2,
2117 warning_count: 0,
2118 }
2119 );
2120 });
2121}
2122
2123#[gpui::test]
2124async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2125 init_test(cx);
2126
2127 let text = "
2128 fn a() {
2129 f1();
2130 }
2131 fn b() {
2132 f2();
2133 }
2134 fn c() {
2135 f3();
2136 }
2137 "
2138 .unindent();
2139
2140 let fs = FakeFs::new(cx.executor());
2141 fs.insert_tree(
2142 "/dir",
2143 json!({
2144 "a.rs": text.clone(),
2145 }),
2146 )
2147 .await;
2148
2149 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2150 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2151
2152 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2153 language_registry.add(rust_lang());
2154 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2155
2156 let buffer = project
2157 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2158 .await
2159 .unwrap();
2160
2161 let mut fake_server = fake_servers.next().await.unwrap();
2162 let lsp_document_version = fake_server
2163 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2164 .await
2165 .text_document
2166 .version;
2167
2168 // Simulate editing the buffer after the language server computes some edits.
2169 buffer.update(cx, |buffer, cx| {
2170 buffer.edit(
2171 [(
2172 Point::new(0, 0)..Point::new(0, 0),
2173 "// above first function\n",
2174 )],
2175 None,
2176 cx,
2177 );
2178 buffer.edit(
2179 [(
2180 Point::new(2, 0)..Point::new(2, 0),
2181 " // inside first function\n",
2182 )],
2183 None,
2184 cx,
2185 );
2186 buffer.edit(
2187 [(
2188 Point::new(6, 4)..Point::new(6, 4),
2189 "// inside second function ",
2190 )],
2191 None,
2192 cx,
2193 );
2194
2195 assert_eq!(
2196 buffer.text(),
2197 "
2198 // above first function
2199 fn a() {
2200 // inside first function
2201 f1();
2202 }
2203 fn b() {
2204 // inside second function f2();
2205 }
2206 fn c() {
2207 f3();
2208 }
2209 "
2210 .unindent()
2211 );
2212 });
2213
2214 let edits = lsp_store
2215 .update(cx, |lsp_store, cx| {
2216 lsp_store.edits_from_lsp(
2217 &buffer,
2218 vec![
2219 // replace body of first function
2220 lsp::TextEdit {
2221 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2222 new_text: "
2223 fn a() {
2224 f10();
2225 }
2226 "
2227 .unindent(),
2228 },
2229 // edit inside second function
2230 lsp::TextEdit {
2231 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2232 new_text: "00".into(),
2233 },
2234 // edit inside third function via two distinct edits
2235 lsp::TextEdit {
2236 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2237 new_text: "4000".into(),
2238 },
2239 lsp::TextEdit {
2240 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2241 new_text: "".into(),
2242 },
2243 ],
2244 LanguageServerId(0),
2245 Some(lsp_document_version),
2246 cx,
2247 )
2248 })
2249 .await
2250 .unwrap();
2251
2252 buffer.update(cx, |buffer, cx| {
2253 for (range, new_text) in edits {
2254 buffer.edit([(range, new_text)], None, cx);
2255 }
2256 assert_eq!(
2257 buffer.text(),
2258 "
2259 // above first function
2260 fn a() {
2261 // inside first function
2262 f10();
2263 }
2264 fn b() {
2265 // inside second function f200();
2266 }
2267 fn c() {
2268 f4000();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = "
2281 use a::b;
2282 use a::c;
2283
2284 fn f() {
2285 b();
2286 c();
2287 }
2288 "
2289 .unindent();
2290
2291 let fs = FakeFs::new(cx.executor());
2292 fs.insert_tree(
2293 "/dir",
2294 json!({
2295 "a.rs": text.clone(),
2296 }),
2297 )
2298 .await;
2299
2300 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2301 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2302 let buffer = project
2303 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2304 .await
2305 .unwrap();
2306
2307 // Simulate the language server sending us a small edit in the form of a very large diff.
2308 // Rust-analyzer does this when performing a merge-imports code action.
2309 let edits = lsp_store
2310 .update(cx, |lsp_store, cx| {
2311 lsp_store.edits_from_lsp(
2312 &buffer,
2313 [
2314 // Replace the first use statement without editing the semicolon.
2315 lsp::TextEdit {
2316 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2317 new_text: "a::{b, c}".into(),
2318 },
2319 // Reinsert the remainder of the file between the semicolon and the final
2320 // newline of the file.
2321 lsp::TextEdit {
2322 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2323 new_text: "\n\n".into(),
2324 },
2325 lsp::TextEdit {
2326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2327 new_text: "
2328 fn f() {
2329 b();
2330 c();
2331 }"
2332 .unindent(),
2333 },
2334 // Delete everything after the first newline of the file.
2335 lsp::TextEdit {
2336 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2337 new_text: "".into(),
2338 },
2339 ],
2340 LanguageServerId(0),
2341 None,
2342 cx,
2343 )
2344 })
2345 .await
2346 .unwrap();
2347
2348 buffer.update(cx, |buffer, cx| {
2349 let edits = edits
2350 .into_iter()
2351 .map(|(range, text)| {
2352 (
2353 range.start.to_point(buffer)..range.end.to_point(buffer),
2354 text,
2355 )
2356 })
2357 .collect::<Vec<_>>();
2358
2359 assert_eq!(
2360 edits,
2361 [
2362 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2363 (Point::new(1, 0)..Point::new(2, 0), "".into())
2364 ]
2365 );
2366
2367 for (range, new_text) in edits {
2368 buffer.edit([(range, new_text)], None, cx);
2369 }
2370 assert_eq!(
2371 buffer.text(),
2372 "
2373 use a::{b, c};
2374
2375 fn f() {
2376 b();
2377 c();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383}
2384
2385#[gpui::test]
2386async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2387 init_test(cx);
2388
2389 let text = "
2390 use a::b;
2391 use a::c;
2392
2393 fn f() {
2394 b();
2395 c();
2396 }
2397 "
2398 .unindent();
2399
2400 let fs = FakeFs::new(cx.executor());
2401 fs.insert_tree(
2402 "/dir",
2403 json!({
2404 "a.rs": text.clone(),
2405 }),
2406 )
2407 .await;
2408
2409 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2410 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2411 let buffer = project
2412 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2413 .await
2414 .unwrap();
2415
2416 // Simulate the language server sending us edits in a non-ordered fashion,
2417 // with ranges sometimes being inverted or pointing to invalid locations.
2418 let edits = lsp_store
2419 .update(cx, |lsp_store, cx| {
2420 lsp_store.edits_from_lsp(
2421 &buffer,
2422 [
2423 lsp::TextEdit {
2424 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2425 new_text: "\n\n".into(),
2426 },
2427 lsp::TextEdit {
2428 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2429 new_text: "a::{b, c}".into(),
2430 },
2431 lsp::TextEdit {
2432 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2433 new_text: "".into(),
2434 },
2435 lsp::TextEdit {
2436 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2437 new_text: "
2438 fn f() {
2439 b();
2440 c();
2441 }"
2442 .unindent(),
2443 },
2444 ],
2445 LanguageServerId(0),
2446 None,
2447 cx,
2448 )
2449 })
2450 .await
2451 .unwrap();
2452
2453 buffer.update(cx, |buffer, cx| {
2454 let edits = edits
2455 .into_iter()
2456 .map(|(range, text)| {
2457 (
2458 range.start.to_point(buffer)..range.end.to_point(buffer),
2459 text,
2460 )
2461 })
2462 .collect::<Vec<_>>();
2463
2464 assert_eq!(
2465 edits,
2466 [
2467 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2468 (Point::new(1, 0)..Point::new(2, 0), "".into())
2469 ]
2470 );
2471
2472 for (range, new_text) in edits {
2473 buffer.edit([(range, new_text)], None, cx);
2474 }
2475 assert_eq!(
2476 buffer.text(),
2477 "
2478 use a::{b, c};
2479
2480 fn f() {
2481 b();
2482 c();
2483 }
2484 "
2485 .unindent()
2486 );
2487 });
2488}
2489
2490fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2491 buffer: &Buffer,
2492 range: Range<T>,
2493) -> Vec<(String, Option<DiagnosticSeverity>)> {
2494 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2495 for chunk in buffer.snapshot().chunks(range, true) {
2496 if chunks.last().map_or(false, |prev_chunk| {
2497 prev_chunk.1 == chunk.diagnostic_severity
2498 }) {
2499 chunks.last_mut().unwrap().0.push_str(chunk.text);
2500 } else {
2501 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2502 }
2503 }
2504 chunks
2505}
2506
2507#[gpui::test(iterations = 10)]
2508async fn test_definition(cx: &mut gpui::TestAppContext) {
2509 init_test(cx);
2510
2511 let fs = FakeFs::new(cx.executor());
2512 fs.insert_tree(
2513 "/dir",
2514 json!({
2515 "a.rs": "const fn a() { A }",
2516 "b.rs": "const y: i32 = crate::a()",
2517 }),
2518 )
2519 .await;
2520
2521 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2522
2523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2524 language_registry.add(rust_lang());
2525 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2526
2527 let buffer = project
2528 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2529 .await
2530 .unwrap();
2531
2532 let fake_server = fake_servers.next().await.unwrap();
2533 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2534 let params = params.text_document_position_params;
2535 assert_eq!(
2536 params.text_document.uri.to_file_path().unwrap(),
2537 Path::new("/dir/b.rs"),
2538 );
2539 assert_eq!(params.position, lsp::Position::new(0, 22));
2540
2541 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2542 lsp::Location::new(
2543 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2544 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2545 ),
2546 )))
2547 });
2548
2549 let mut definitions = project
2550 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2551 .await
2552 .unwrap();
2553
2554 // Assert no new language server started
2555 cx.executor().run_until_parked();
2556 assert!(fake_servers.try_next().is_err());
2557
2558 assert_eq!(definitions.len(), 1);
2559 let definition = definitions.pop().unwrap();
2560 cx.update(|cx| {
2561 let target_buffer = definition.target.buffer.read(cx);
2562 assert_eq!(
2563 target_buffer
2564 .file()
2565 .unwrap()
2566 .as_local()
2567 .unwrap()
2568 .abs_path(cx),
2569 Path::new("/dir/a.rs"),
2570 );
2571 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2572 assert_eq!(
2573 list_worktrees(&project, cx),
2574 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2575 );
2576
2577 drop(definition);
2578 });
2579 cx.update(|cx| {
2580 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2581 });
2582
2583 fn list_worktrees<'a>(
2584 project: &'a Model<Project>,
2585 cx: &'a AppContext,
2586 ) -> Vec<(&'a Path, bool)> {
2587 project
2588 .read(cx)
2589 .worktrees(cx)
2590 .map(|worktree| {
2591 let worktree = worktree.read(cx);
2592 (
2593 worktree.as_local().unwrap().abs_path().as_ref(),
2594 worktree.is_visible(),
2595 )
2596 })
2597 .collect::<Vec<_>>()
2598 }
2599}
2600
2601#[gpui::test]
2602async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2603 init_test(cx);
2604
2605 let fs = FakeFs::new(cx.executor());
2606 fs.insert_tree(
2607 "/dir",
2608 json!({
2609 "a.ts": "",
2610 }),
2611 )
2612 .await;
2613
2614 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2615
2616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2617 language_registry.add(typescript_lang());
2618 let mut fake_language_servers = language_registry.register_fake_lsp(
2619 "TypeScript",
2620 FakeLspAdapter {
2621 capabilities: lsp::ServerCapabilities {
2622 completion_provider: Some(lsp::CompletionOptions {
2623 trigger_characters: Some(vec![":".to_string()]),
2624 ..Default::default()
2625 }),
2626 ..Default::default()
2627 },
2628 ..Default::default()
2629 },
2630 );
2631
2632 let buffer = project
2633 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2634 .await
2635 .unwrap();
2636
2637 let fake_server = fake_language_servers.next().await.unwrap();
2638
2639 let text = "let a = b.fqn";
2640 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2641 let completions = project.update(cx, |project, cx| {
2642 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2643 });
2644
2645 fake_server
2646 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2647 Ok(Some(lsp::CompletionResponse::Array(vec![
2648 lsp::CompletionItem {
2649 label: "fullyQualifiedName?".into(),
2650 insert_text: Some("fullyQualifiedName".into()),
2651 ..Default::default()
2652 },
2653 ])))
2654 })
2655 .next()
2656 .await;
2657 let completions = completions.await.unwrap();
2658 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2659 assert_eq!(completions.len(), 1);
2660 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2661 assert_eq!(
2662 completions[0].old_range.to_offset(&snapshot),
2663 text.len() - 3..text.len()
2664 );
2665
2666 let text = "let a = \"atoms/cmp\"";
2667 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2668 let completions = project.update(cx, |project, cx| {
2669 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2670 });
2671
2672 fake_server
2673 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2674 Ok(Some(lsp::CompletionResponse::Array(vec![
2675 lsp::CompletionItem {
2676 label: "component".into(),
2677 ..Default::default()
2678 },
2679 ])))
2680 })
2681 .next()
2682 .await;
2683 let completions = completions.await.unwrap();
2684 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2685 assert_eq!(completions.len(), 1);
2686 assert_eq!(completions[0].new_text, "component");
2687 assert_eq!(
2688 completions[0].old_range.to_offset(&snapshot),
2689 text.len() - 4..text.len() - 1
2690 );
2691}
2692
2693#[gpui::test]
2694async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2695 init_test(cx);
2696
2697 let fs = FakeFs::new(cx.executor());
2698 fs.insert_tree(
2699 "/dir",
2700 json!({
2701 "a.ts": "",
2702 }),
2703 )
2704 .await;
2705
2706 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2707
2708 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2709 language_registry.add(typescript_lang());
2710 let mut fake_language_servers = language_registry.register_fake_lsp(
2711 "TypeScript",
2712 FakeLspAdapter {
2713 capabilities: lsp::ServerCapabilities {
2714 completion_provider: Some(lsp::CompletionOptions {
2715 trigger_characters: Some(vec![":".to_string()]),
2716 ..Default::default()
2717 }),
2718 ..Default::default()
2719 },
2720 ..Default::default()
2721 },
2722 );
2723
2724 let buffer = project
2725 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2726 .await
2727 .unwrap();
2728
2729 let fake_server = fake_language_servers.next().await.unwrap();
2730
2731 let text = "let a = b.fqn";
2732 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2733 let completions = project.update(cx, |project, cx| {
2734 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2735 });
2736
2737 fake_server
2738 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2739 Ok(Some(lsp::CompletionResponse::Array(vec![
2740 lsp::CompletionItem {
2741 label: "fullyQualifiedName?".into(),
2742 insert_text: Some("fully\rQualified\r\nName".into()),
2743 ..Default::default()
2744 },
2745 ])))
2746 })
2747 .next()
2748 .await;
2749 let completions = completions.await.unwrap();
2750 assert_eq!(completions.len(), 1);
2751 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2752}
2753
2754#[gpui::test(iterations = 10)]
2755async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2756 init_test(cx);
2757
2758 let fs = FakeFs::new(cx.executor());
2759 fs.insert_tree(
2760 "/dir",
2761 json!({
2762 "a.ts": "a",
2763 }),
2764 )
2765 .await;
2766
2767 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2768
2769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2770 language_registry.add(typescript_lang());
2771 let mut fake_language_servers = language_registry.register_fake_lsp(
2772 "TypeScript",
2773 FakeLspAdapter {
2774 capabilities: lsp::ServerCapabilities {
2775 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2776 lsp::CodeActionOptions {
2777 resolve_provider: Some(true),
2778 ..lsp::CodeActionOptions::default()
2779 },
2780 )),
2781 ..lsp::ServerCapabilities::default()
2782 },
2783 ..FakeLspAdapter::default()
2784 },
2785 );
2786
2787 let buffer = project
2788 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2789 .await
2790 .unwrap();
2791
2792 let fake_server = fake_language_servers.next().await.unwrap();
2793
2794 // Language server returns code actions that contain commands, and not edits.
2795 let actions = project.update(cx, |project, cx| {
2796 project.code_actions(&buffer, 0..0, None, cx)
2797 });
2798 fake_server
2799 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2800 Ok(Some(vec![
2801 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2802 title: "The code action".into(),
2803 data: Some(serde_json::json!({
2804 "command": "_the/command",
2805 })),
2806 ..lsp::CodeAction::default()
2807 }),
2808 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2809 title: "two".into(),
2810 ..lsp::CodeAction::default()
2811 }),
2812 ]))
2813 })
2814 .next()
2815 .await;
2816
2817 let action = actions.await.unwrap()[0].clone();
2818 let apply = project.update(cx, |project, cx| {
2819 project.apply_code_action(buffer.clone(), action, true, cx)
2820 });
2821
2822 // Resolving the code action does not populate its edits. In absence of
2823 // edits, we must execute the given command.
2824 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2825 |mut action, _| async move {
2826 if action.data.is_some() {
2827 action.command = Some(lsp::Command {
2828 title: "The command".into(),
2829 command: "_the/command".into(),
2830 arguments: Some(vec![json!("the-argument")]),
2831 });
2832 }
2833 Ok(action)
2834 },
2835 );
2836
2837 // While executing the command, the language server sends the editor
2838 // a `workspaceEdit` request.
2839 fake_server
2840 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2841 let fake = fake_server.clone();
2842 move |params, _| {
2843 assert_eq!(params.command, "_the/command");
2844 let fake = fake.clone();
2845 async move {
2846 fake.server
2847 .request::<lsp::request::ApplyWorkspaceEdit>(
2848 lsp::ApplyWorkspaceEditParams {
2849 label: None,
2850 edit: lsp::WorkspaceEdit {
2851 changes: Some(
2852 [(
2853 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2854 vec![lsp::TextEdit {
2855 range: lsp::Range::new(
2856 lsp::Position::new(0, 0),
2857 lsp::Position::new(0, 0),
2858 ),
2859 new_text: "X".into(),
2860 }],
2861 )]
2862 .into_iter()
2863 .collect(),
2864 ),
2865 ..Default::default()
2866 },
2867 },
2868 )
2869 .await
2870 .unwrap();
2871 Ok(Some(json!(null)))
2872 }
2873 }
2874 })
2875 .next()
2876 .await;
2877
2878 // Applying the code action returns a project transaction containing the edits
2879 // sent by the language server in its `workspaceEdit` request.
2880 let transaction = apply.await.unwrap();
2881 assert!(transaction.0.contains_key(&buffer));
2882 buffer.update(cx, |buffer, cx| {
2883 assert_eq!(buffer.text(), "Xa");
2884 buffer.undo(cx);
2885 assert_eq!(buffer.text(), "a");
2886 });
2887}
2888
2889#[gpui::test(iterations = 10)]
2890async fn test_save_file(cx: &mut gpui::TestAppContext) {
2891 init_test(cx);
2892
2893 let fs = FakeFs::new(cx.executor());
2894 fs.insert_tree(
2895 "/dir",
2896 json!({
2897 "file1": "the old contents",
2898 }),
2899 )
2900 .await;
2901
2902 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2903 let buffer = project
2904 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2905 .await
2906 .unwrap();
2907 buffer.update(cx, |buffer, cx| {
2908 assert_eq!(buffer.text(), "the old contents");
2909 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2910 });
2911
2912 project
2913 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2914 .await
2915 .unwrap();
2916
2917 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2918 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2919}
2920
2921#[gpui::test(iterations = 30)]
2922async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2923 init_test(cx);
2924
2925 let fs = FakeFs::new(cx.executor().clone());
2926 fs.insert_tree(
2927 "/dir",
2928 json!({
2929 "file1": "the original contents",
2930 }),
2931 )
2932 .await;
2933
2934 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2935 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2936 let buffer = project
2937 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2938 .await
2939 .unwrap();
2940
2941 // Simulate buffer diffs being slow, so that they don't complete before
2942 // the next file change occurs.
2943 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2944
2945 // Change the buffer's file on disk, and then wait for the file change
2946 // to be detected by the worktree, so that the buffer starts reloading.
2947 fs.save(
2948 "/dir/file1".as_ref(),
2949 &"the first contents".into(),
2950 Default::default(),
2951 )
2952 .await
2953 .unwrap();
2954 worktree.next_event(cx).await;
2955
2956 // Change the buffer's file again. Depending on the random seed, the
2957 // previous file change may still be in progress.
2958 fs.save(
2959 "/dir/file1".as_ref(),
2960 &"the second contents".into(),
2961 Default::default(),
2962 )
2963 .await
2964 .unwrap();
2965 worktree.next_event(cx).await;
2966
2967 cx.executor().run_until_parked();
2968 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2969 buffer.read_with(cx, |buffer, _| {
2970 assert_eq!(buffer.text(), on_disk_text);
2971 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2972 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2973 });
2974}
2975
2976#[gpui::test(iterations = 30)]
2977async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2978 init_test(cx);
2979
2980 let fs = FakeFs::new(cx.executor().clone());
2981 fs.insert_tree(
2982 "/dir",
2983 json!({
2984 "file1": "the original contents",
2985 }),
2986 )
2987 .await;
2988
2989 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2990 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2991 let buffer = project
2992 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2993 .await
2994 .unwrap();
2995
2996 // Simulate buffer diffs being slow, so that they don't complete before
2997 // the next file change occurs.
2998 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2999
3000 // Change the buffer's file on disk, and then wait for the file change
3001 // to be detected by the worktree, so that the buffer starts reloading.
3002 fs.save(
3003 "/dir/file1".as_ref(),
3004 &"the first contents".into(),
3005 Default::default(),
3006 )
3007 .await
3008 .unwrap();
3009 worktree.next_event(cx).await;
3010
3011 cx.executor()
3012 .spawn(cx.executor().simulate_random_delay())
3013 .await;
3014
3015 // Perform a noop edit, causing the buffer's version to increase.
3016 buffer.update(cx, |buffer, cx| {
3017 buffer.edit([(0..0, " ")], None, cx);
3018 buffer.undo(cx);
3019 });
3020
3021 cx.executor().run_until_parked();
3022 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3023 buffer.read_with(cx, |buffer, _| {
3024 let buffer_text = buffer.text();
3025 if buffer_text == on_disk_text {
3026 assert!(
3027 !buffer.is_dirty() && !buffer.has_conflict(),
3028 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3029 );
3030 }
3031 // If the file change occurred while the buffer was processing the first
3032 // change, the buffer will be in a conflicting state.
3033 else {
3034 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3035 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3036 }
3037 });
3038}
3039
3040#[gpui::test]
3041async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3042 init_test(cx);
3043
3044 let fs = FakeFs::new(cx.executor());
3045 fs.insert_tree(
3046 "/dir",
3047 json!({
3048 "file1": "the old contents",
3049 }),
3050 )
3051 .await;
3052
3053 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3054 let buffer = project
3055 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3056 .await
3057 .unwrap();
3058 buffer.update(cx, |buffer, cx| {
3059 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3060 });
3061
3062 project
3063 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3064 .await
3065 .unwrap();
3066
3067 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3068 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3069}
3070
3071#[gpui::test]
3072async fn test_save_as(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let fs = FakeFs::new(cx.executor());
3076 fs.insert_tree("/dir", json!({})).await;
3077
3078 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3079
3080 let languages = project.update(cx, |project, _| project.languages().clone());
3081 languages.add(rust_lang());
3082
3083 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3084 buffer.update(cx, |buffer, cx| {
3085 buffer.edit([(0..0, "abc")], None, cx);
3086 assert!(buffer.is_dirty());
3087 assert!(!buffer.has_conflict());
3088 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3089 });
3090 project
3091 .update(cx, |project, cx| {
3092 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3093 let path = ProjectPath {
3094 worktree_id,
3095 path: Arc::from(Path::new("file1.rs")),
3096 };
3097 project.save_buffer_as(buffer.clone(), path, cx)
3098 })
3099 .await
3100 .unwrap();
3101 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3102
3103 cx.executor().run_until_parked();
3104 buffer.update(cx, |buffer, cx| {
3105 assert_eq!(
3106 buffer.file().unwrap().full_path(cx),
3107 Path::new("dir/file1.rs")
3108 );
3109 assert!(!buffer.is_dirty());
3110 assert!(!buffer.has_conflict());
3111 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3112 });
3113
3114 let opened_buffer = project
3115 .update(cx, |project, cx| {
3116 project.open_local_buffer("/dir/file1.rs", cx)
3117 })
3118 .await
3119 .unwrap();
3120 assert_eq!(opened_buffer, buffer);
3121}
3122
3123#[gpui::test(retries = 5)]
3124async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3125 use worktree::WorktreeModelHandle as _;
3126
3127 init_test(cx);
3128 cx.executor().allow_parking();
3129
3130 let dir = temp_tree(json!({
3131 "a": {
3132 "file1": "",
3133 "file2": "",
3134 "file3": "",
3135 },
3136 "b": {
3137 "c": {
3138 "file4": "",
3139 "file5": "",
3140 }
3141 }
3142 }));
3143
3144 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3145
3146 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3147 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3148 async move { buffer.await.unwrap() }
3149 };
3150 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3151 project.update(cx, |project, cx| {
3152 let tree = project.worktrees(cx).next().unwrap();
3153 tree.read(cx)
3154 .entry_for_path(path)
3155 .unwrap_or_else(|| panic!("no entry for path {}", path))
3156 .id
3157 })
3158 };
3159
3160 let buffer2 = buffer_for_path("a/file2", cx).await;
3161 let buffer3 = buffer_for_path("a/file3", cx).await;
3162 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3163 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3164
3165 let file2_id = id_for_path("a/file2", cx);
3166 let file3_id = id_for_path("a/file3", cx);
3167 let file4_id = id_for_path("b/c/file4", cx);
3168
3169 // Create a remote copy of this worktree.
3170 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3171 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3172
3173 let updates = Arc::new(Mutex::new(Vec::new()));
3174 tree.update(cx, |tree, cx| {
3175 let updates = updates.clone();
3176 tree.observe_updates(0, cx, move |update| {
3177 updates.lock().push(update);
3178 async { true }
3179 });
3180 });
3181
3182 let remote =
3183 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3184
3185 cx.executor().run_until_parked();
3186
3187 cx.update(|cx| {
3188 assert!(!buffer2.read(cx).is_dirty());
3189 assert!(!buffer3.read(cx).is_dirty());
3190 assert!(!buffer4.read(cx).is_dirty());
3191 assert!(!buffer5.read(cx).is_dirty());
3192 });
3193
3194 // Rename and delete files and directories.
3195 tree.flush_fs_events(cx).await;
3196 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3197 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3198 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3199 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3200 tree.flush_fs_events(cx).await;
3201
3202 let expected_paths = vec![
3203 "a",
3204 "a/file1",
3205 "a/file2.new",
3206 "b",
3207 "d",
3208 "d/file3",
3209 "d/file4",
3210 ];
3211
3212 cx.update(|app| {
3213 assert_eq!(
3214 tree.read(app)
3215 .paths()
3216 .map(|p| p.to_str().unwrap())
3217 .collect::<Vec<_>>(),
3218 expected_paths
3219 );
3220 });
3221
3222 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3223 assert_eq!(id_for_path("d/file3", cx), file3_id);
3224 assert_eq!(id_for_path("d/file4", cx), file4_id);
3225
3226 cx.update(|cx| {
3227 assert_eq!(
3228 buffer2.read(cx).file().unwrap().path().as_ref(),
3229 Path::new("a/file2.new")
3230 );
3231 assert_eq!(
3232 buffer3.read(cx).file().unwrap().path().as_ref(),
3233 Path::new("d/file3")
3234 );
3235 assert_eq!(
3236 buffer4.read(cx).file().unwrap().path().as_ref(),
3237 Path::new("d/file4")
3238 );
3239 assert_eq!(
3240 buffer5.read(cx).file().unwrap().path().as_ref(),
3241 Path::new("b/c/file5")
3242 );
3243
3244 assert_matches!(
3245 buffer2.read(cx).file().unwrap().disk_state(),
3246 DiskState::Present { .. }
3247 );
3248 assert_matches!(
3249 buffer3.read(cx).file().unwrap().disk_state(),
3250 DiskState::Present { .. }
3251 );
3252 assert_matches!(
3253 buffer4.read(cx).file().unwrap().disk_state(),
3254 DiskState::Present { .. }
3255 );
3256 assert_eq!(
3257 buffer5.read(cx).file().unwrap().disk_state(),
3258 DiskState::Deleted
3259 );
3260 });
3261
3262 // Update the remote worktree. Check that it becomes consistent with the
3263 // local worktree.
3264 cx.executor().run_until_parked();
3265
3266 remote.update(cx, |remote, _| {
3267 for update in updates.lock().drain(..) {
3268 remote.as_remote_mut().unwrap().update_from_remote(update);
3269 }
3270 });
3271 cx.executor().run_until_parked();
3272 remote.update(cx, |remote, _| {
3273 assert_eq!(
3274 remote
3275 .paths()
3276 .map(|p| p.to_str().unwrap())
3277 .collect::<Vec<_>>(),
3278 expected_paths
3279 );
3280 });
3281}
3282
3283#[gpui::test(iterations = 10)]
3284async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3285 init_test(cx);
3286
3287 let fs = FakeFs::new(cx.executor());
3288 fs.insert_tree(
3289 "/dir",
3290 json!({
3291 "a": {
3292 "file1": "",
3293 }
3294 }),
3295 )
3296 .await;
3297
3298 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3299 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3300 let tree_id = tree.update(cx, |tree, _| tree.id());
3301
3302 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3303 project.update(cx, |project, cx| {
3304 let tree = project.worktrees(cx).next().unwrap();
3305 tree.read(cx)
3306 .entry_for_path(path)
3307 .unwrap_or_else(|| panic!("no entry for path {}", path))
3308 .id
3309 })
3310 };
3311
3312 let dir_id = id_for_path("a", cx);
3313 let file_id = id_for_path("a/file1", cx);
3314 let buffer = project
3315 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3316 .await
3317 .unwrap();
3318 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3319
3320 project
3321 .update(cx, |project, cx| {
3322 project.rename_entry(dir_id, Path::new("b"), cx)
3323 })
3324 .unwrap()
3325 .await
3326 .to_included()
3327 .unwrap();
3328 cx.executor().run_until_parked();
3329
3330 assert_eq!(id_for_path("b", cx), dir_id);
3331 assert_eq!(id_for_path("b/file1", cx), file_id);
3332 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3333}
3334
3335#[gpui::test]
3336async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3337 init_test(cx);
3338
3339 let fs = FakeFs::new(cx.executor());
3340 fs.insert_tree(
3341 "/dir",
3342 json!({
3343 "a.txt": "a-contents",
3344 "b.txt": "b-contents",
3345 }),
3346 )
3347 .await;
3348
3349 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3350
3351 // Spawn multiple tasks to open paths, repeating some paths.
3352 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3353 (
3354 p.open_local_buffer("/dir/a.txt", cx),
3355 p.open_local_buffer("/dir/b.txt", cx),
3356 p.open_local_buffer("/dir/a.txt", cx),
3357 )
3358 });
3359
3360 let buffer_a_1 = buffer_a_1.await.unwrap();
3361 let buffer_a_2 = buffer_a_2.await.unwrap();
3362 let buffer_b = buffer_b.await.unwrap();
3363 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3364 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3365
3366 // There is only one buffer per path.
3367 let buffer_a_id = buffer_a_1.entity_id();
3368 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3369
3370 // Open the same path again while it is still open.
3371 drop(buffer_a_1);
3372 let buffer_a_3 = project
3373 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3374 .await
3375 .unwrap();
3376
3377 // There's still only one buffer per path.
3378 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3379}
3380
3381#[gpui::test]
3382async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3383 init_test(cx);
3384
3385 let fs = FakeFs::new(cx.executor());
3386 fs.insert_tree(
3387 "/dir",
3388 json!({
3389 "file1": "abc",
3390 "file2": "def",
3391 "file3": "ghi",
3392 }),
3393 )
3394 .await;
3395
3396 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3397
3398 let buffer1 = project
3399 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3400 .await
3401 .unwrap();
3402 let events = Arc::new(Mutex::new(Vec::new()));
3403
3404 // initially, the buffer isn't dirty.
3405 buffer1.update(cx, |buffer, cx| {
3406 cx.subscribe(&buffer1, {
3407 let events = events.clone();
3408 move |_, _, event, _| match event {
3409 BufferEvent::Operation { .. } => {}
3410 _ => events.lock().push(event.clone()),
3411 }
3412 })
3413 .detach();
3414
3415 assert!(!buffer.is_dirty());
3416 assert!(events.lock().is_empty());
3417
3418 buffer.edit([(1..2, "")], None, cx);
3419 });
3420
3421 // after the first edit, the buffer is dirty, and emits a dirtied event.
3422 buffer1.update(cx, |buffer, cx| {
3423 assert!(buffer.text() == "ac");
3424 assert!(buffer.is_dirty());
3425 assert_eq!(
3426 *events.lock(),
3427 &[
3428 language::BufferEvent::Edited,
3429 language::BufferEvent::DirtyChanged
3430 ]
3431 );
3432 events.lock().clear();
3433 buffer.did_save(
3434 buffer.version(),
3435 buffer.file().unwrap().disk_state().mtime(),
3436 cx,
3437 );
3438 });
3439
3440 // after saving, the buffer is not dirty, and emits a saved event.
3441 buffer1.update(cx, |buffer, cx| {
3442 assert!(!buffer.is_dirty());
3443 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3444 events.lock().clear();
3445
3446 buffer.edit([(1..1, "B")], None, cx);
3447 buffer.edit([(2..2, "D")], None, cx);
3448 });
3449
3450 // after editing again, the buffer is dirty, and emits another dirty event.
3451 buffer1.update(cx, |buffer, cx| {
3452 assert!(buffer.text() == "aBDc");
3453 assert!(buffer.is_dirty());
3454 assert_eq!(
3455 *events.lock(),
3456 &[
3457 language::BufferEvent::Edited,
3458 language::BufferEvent::DirtyChanged,
3459 language::BufferEvent::Edited,
3460 ],
3461 );
3462 events.lock().clear();
3463
3464 // After restoring the buffer to its previously-saved state,
3465 // the buffer is not considered dirty anymore.
3466 buffer.edit([(1..3, "")], None, cx);
3467 assert!(buffer.text() == "ac");
3468 assert!(!buffer.is_dirty());
3469 });
3470
3471 assert_eq!(
3472 *events.lock(),
3473 &[
3474 language::BufferEvent::Edited,
3475 language::BufferEvent::DirtyChanged
3476 ]
3477 );
3478
3479 // When a file is deleted, the buffer is considered dirty.
3480 let events = Arc::new(Mutex::new(Vec::new()));
3481 let buffer2 = project
3482 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3483 .await
3484 .unwrap();
3485 buffer2.update(cx, |_, cx| {
3486 cx.subscribe(&buffer2, {
3487 let events = events.clone();
3488 move |_, _, event, _| events.lock().push(event.clone())
3489 })
3490 .detach();
3491 });
3492
3493 fs.remove_file("/dir/file2".as_ref(), Default::default())
3494 .await
3495 .unwrap();
3496 cx.executor().run_until_parked();
3497 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3498 assert_eq!(
3499 *events.lock(),
3500 &[
3501 language::BufferEvent::DirtyChanged,
3502 language::BufferEvent::FileHandleChanged
3503 ]
3504 );
3505
3506 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3507 let events = Arc::new(Mutex::new(Vec::new()));
3508 let buffer3 = project
3509 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3510 .await
3511 .unwrap();
3512 buffer3.update(cx, |_, cx| {
3513 cx.subscribe(&buffer3, {
3514 let events = events.clone();
3515 move |_, _, event, _| events.lock().push(event.clone())
3516 })
3517 .detach();
3518 });
3519
3520 buffer3.update(cx, |buffer, cx| {
3521 buffer.edit([(0..0, "x")], None, cx);
3522 });
3523 events.lock().clear();
3524 fs.remove_file("/dir/file3".as_ref(), Default::default())
3525 .await
3526 .unwrap();
3527 cx.executor().run_until_parked();
3528 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3529 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3530}
3531
3532#[gpui::test]
3533async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3534 init_test(cx);
3535
3536 let initial_contents = "aaa\nbbbbb\nc\n";
3537 let fs = FakeFs::new(cx.executor());
3538 fs.insert_tree(
3539 "/dir",
3540 json!({
3541 "the-file": initial_contents,
3542 }),
3543 )
3544 .await;
3545 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3546 let buffer = project
3547 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3548 .await
3549 .unwrap();
3550
3551 let anchors = (0..3)
3552 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3553 .collect::<Vec<_>>();
3554
3555 // Change the file on disk, adding two new lines of text, and removing
3556 // one line.
3557 buffer.update(cx, |buffer, _| {
3558 assert!(!buffer.is_dirty());
3559 assert!(!buffer.has_conflict());
3560 });
3561 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3562 fs.save(
3563 "/dir/the-file".as_ref(),
3564 &new_contents.into(),
3565 LineEnding::Unix,
3566 )
3567 .await
3568 .unwrap();
3569
3570 // Because the buffer was not modified, it is reloaded from disk. Its
3571 // contents are edited according to the diff between the old and new
3572 // file contents.
3573 cx.executor().run_until_parked();
3574 buffer.update(cx, |buffer, _| {
3575 assert_eq!(buffer.text(), new_contents);
3576 assert!(!buffer.is_dirty());
3577 assert!(!buffer.has_conflict());
3578
3579 let anchor_positions = anchors
3580 .iter()
3581 .map(|anchor| anchor.to_point(&*buffer))
3582 .collect::<Vec<_>>();
3583 assert_eq!(
3584 anchor_positions,
3585 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3586 );
3587 });
3588
3589 // Modify the buffer
3590 buffer.update(cx, |buffer, cx| {
3591 buffer.edit([(0..0, " ")], None, cx);
3592 assert!(buffer.is_dirty());
3593 assert!(!buffer.has_conflict());
3594 });
3595
3596 // Change the file on disk again, adding blank lines to the beginning.
3597 fs.save(
3598 "/dir/the-file".as_ref(),
3599 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3600 LineEnding::Unix,
3601 )
3602 .await
3603 .unwrap();
3604
3605 // Because the buffer is modified, it doesn't reload from disk, but is
3606 // marked as having a conflict.
3607 cx.executor().run_until_parked();
3608 buffer.update(cx, |buffer, _| {
3609 assert!(buffer.has_conflict());
3610 });
3611}
3612
3613#[gpui::test]
3614async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let fs = FakeFs::new(cx.executor());
3618 fs.insert_tree(
3619 "/dir",
3620 json!({
3621 "file1": "a\nb\nc\n",
3622 "file2": "one\r\ntwo\r\nthree\r\n",
3623 }),
3624 )
3625 .await;
3626
3627 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3628 let buffer1 = project
3629 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3630 .await
3631 .unwrap();
3632 let buffer2 = project
3633 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3634 .await
3635 .unwrap();
3636
3637 buffer1.update(cx, |buffer, _| {
3638 assert_eq!(buffer.text(), "a\nb\nc\n");
3639 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3640 });
3641 buffer2.update(cx, |buffer, _| {
3642 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3643 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3644 });
3645
3646 // Change a file's line endings on disk from unix to windows. The buffer's
3647 // state updates correctly.
3648 fs.save(
3649 "/dir/file1".as_ref(),
3650 &"aaa\nb\nc\n".into(),
3651 LineEnding::Windows,
3652 )
3653 .await
3654 .unwrap();
3655 cx.executor().run_until_parked();
3656 buffer1.update(cx, |buffer, _| {
3657 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3658 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3659 });
3660
3661 // Save a file with windows line endings. The file is written correctly.
3662 buffer2.update(cx, |buffer, cx| {
3663 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3664 });
3665 project
3666 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3667 .await
3668 .unwrap();
3669 assert_eq!(
3670 fs.load("/dir/file2".as_ref()).await.unwrap(),
3671 "one\r\ntwo\r\nthree\r\nfour\r\n",
3672 );
3673}
3674
3675#[gpui::test]
3676async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3677 init_test(cx);
3678
3679 let fs = FakeFs::new(cx.executor());
3680 fs.insert_tree(
3681 "/the-dir",
3682 json!({
3683 "a.rs": "
3684 fn foo(mut v: Vec<usize>) {
3685 for x in &v {
3686 v.push(1);
3687 }
3688 }
3689 "
3690 .unindent(),
3691 }),
3692 )
3693 .await;
3694
3695 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3696 let buffer = project
3697 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3698 .await
3699 .unwrap();
3700
3701 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3702 let message = lsp::PublishDiagnosticsParams {
3703 uri: buffer_uri.clone(),
3704 diagnostics: vec![
3705 lsp::Diagnostic {
3706 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3707 severity: Some(DiagnosticSeverity::WARNING),
3708 message: "error 1".to_string(),
3709 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3710 location: lsp::Location {
3711 uri: buffer_uri.clone(),
3712 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3713 },
3714 message: "error 1 hint 1".to_string(),
3715 }]),
3716 ..Default::default()
3717 },
3718 lsp::Diagnostic {
3719 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3720 severity: Some(DiagnosticSeverity::HINT),
3721 message: "error 1 hint 1".to_string(),
3722 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3723 location: lsp::Location {
3724 uri: buffer_uri.clone(),
3725 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3726 },
3727 message: "original diagnostic".to_string(),
3728 }]),
3729 ..Default::default()
3730 },
3731 lsp::Diagnostic {
3732 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3733 severity: Some(DiagnosticSeverity::ERROR),
3734 message: "error 2".to_string(),
3735 related_information: Some(vec![
3736 lsp::DiagnosticRelatedInformation {
3737 location: lsp::Location {
3738 uri: buffer_uri.clone(),
3739 range: lsp::Range::new(
3740 lsp::Position::new(1, 13),
3741 lsp::Position::new(1, 15),
3742 ),
3743 },
3744 message: "error 2 hint 1".to_string(),
3745 },
3746 lsp::DiagnosticRelatedInformation {
3747 location: lsp::Location {
3748 uri: buffer_uri.clone(),
3749 range: lsp::Range::new(
3750 lsp::Position::new(1, 13),
3751 lsp::Position::new(1, 15),
3752 ),
3753 },
3754 message: "error 2 hint 2".to_string(),
3755 },
3756 ]),
3757 ..Default::default()
3758 },
3759 lsp::Diagnostic {
3760 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3761 severity: Some(DiagnosticSeverity::HINT),
3762 message: "error 2 hint 1".to_string(),
3763 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3764 location: lsp::Location {
3765 uri: buffer_uri.clone(),
3766 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3767 },
3768 message: "original diagnostic".to_string(),
3769 }]),
3770 ..Default::default()
3771 },
3772 lsp::Diagnostic {
3773 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3774 severity: Some(DiagnosticSeverity::HINT),
3775 message: "error 2 hint 2".to_string(),
3776 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3777 location: lsp::Location {
3778 uri: buffer_uri,
3779 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3780 },
3781 message: "original diagnostic".to_string(),
3782 }]),
3783 ..Default::default()
3784 },
3785 ],
3786 version: None,
3787 };
3788
3789 project
3790 .update(cx, |p, cx| {
3791 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3792 })
3793 .unwrap();
3794 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3795
3796 assert_eq!(
3797 buffer
3798 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3799 .collect::<Vec<_>>(),
3800 &[
3801 DiagnosticEntry {
3802 range: Point::new(1, 8)..Point::new(1, 9),
3803 diagnostic: Diagnostic {
3804 severity: DiagnosticSeverity::WARNING,
3805 message: "error 1".to_string(),
3806 group_id: 1,
3807 is_primary: true,
3808 ..Default::default()
3809 }
3810 },
3811 DiagnosticEntry {
3812 range: Point::new(1, 8)..Point::new(1, 9),
3813 diagnostic: Diagnostic {
3814 severity: DiagnosticSeverity::HINT,
3815 message: "error 1 hint 1".to_string(),
3816 group_id: 1,
3817 is_primary: false,
3818 ..Default::default()
3819 }
3820 },
3821 DiagnosticEntry {
3822 range: Point::new(1, 13)..Point::new(1, 15),
3823 diagnostic: Diagnostic {
3824 severity: DiagnosticSeverity::HINT,
3825 message: "error 2 hint 1".to_string(),
3826 group_id: 0,
3827 is_primary: false,
3828 ..Default::default()
3829 }
3830 },
3831 DiagnosticEntry {
3832 range: Point::new(1, 13)..Point::new(1, 15),
3833 diagnostic: Diagnostic {
3834 severity: DiagnosticSeverity::HINT,
3835 message: "error 2 hint 2".to_string(),
3836 group_id: 0,
3837 is_primary: false,
3838 ..Default::default()
3839 }
3840 },
3841 DiagnosticEntry {
3842 range: Point::new(2, 8)..Point::new(2, 17),
3843 diagnostic: Diagnostic {
3844 severity: DiagnosticSeverity::ERROR,
3845 message: "error 2".to_string(),
3846 group_id: 0,
3847 is_primary: true,
3848 ..Default::default()
3849 }
3850 }
3851 ]
3852 );
3853
3854 assert_eq!(
3855 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3856 &[
3857 DiagnosticEntry {
3858 range: Point::new(1, 13)..Point::new(1, 15),
3859 diagnostic: Diagnostic {
3860 severity: DiagnosticSeverity::HINT,
3861 message: "error 2 hint 1".to_string(),
3862 group_id: 0,
3863 is_primary: false,
3864 ..Default::default()
3865 }
3866 },
3867 DiagnosticEntry {
3868 range: Point::new(1, 13)..Point::new(1, 15),
3869 diagnostic: Diagnostic {
3870 severity: DiagnosticSeverity::HINT,
3871 message: "error 2 hint 2".to_string(),
3872 group_id: 0,
3873 is_primary: false,
3874 ..Default::default()
3875 }
3876 },
3877 DiagnosticEntry {
3878 range: Point::new(2, 8)..Point::new(2, 17),
3879 diagnostic: Diagnostic {
3880 severity: DiagnosticSeverity::ERROR,
3881 message: "error 2".to_string(),
3882 group_id: 0,
3883 is_primary: true,
3884 ..Default::default()
3885 }
3886 }
3887 ]
3888 );
3889
3890 assert_eq!(
3891 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3892 &[
3893 DiagnosticEntry {
3894 range: Point::new(1, 8)..Point::new(1, 9),
3895 diagnostic: Diagnostic {
3896 severity: DiagnosticSeverity::WARNING,
3897 message: "error 1".to_string(),
3898 group_id: 1,
3899 is_primary: true,
3900 ..Default::default()
3901 }
3902 },
3903 DiagnosticEntry {
3904 range: Point::new(1, 8)..Point::new(1, 9),
3905 diagnostic: Diagnostic {
3906 severity: DiagnosticSeverity::HINT,
3907 message: "error 1 hint 1".to_string(),
3908 group_id: 1,
3909 is_primary: false,
3910 ..Default::default()
3911 }
3912 },
3913 ]
3914 );
3915}
3916
3917#[gpui::test]
3918async fn test_rename(cx: &mut gpui::TestAppContext) {
3919 // hi
3920 init_test(cx);
3921
3922 let fs = FakeFs::new(cx.executor());
3923 fs.insert_tree(
3924 "/dir",
3925 json!({
3926 "one.rs": "const ONE: usize = 1;",
3927 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3928 }),
3929 )
3930 .await;
3931
3932 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3933
3934 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3935 language_registry.add(rust_lang());
3936 let mut fake_servers = language_registry.register_fake_lsp(
3937 "Rust",
3938 FakeLspAdapter {
3939 capabilities: lsp::ServerCapabilities {
3940 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3941 prepare_provider: Some(true),
3942 work_done_progress_options: Default::default(),
3943 })),
3944 ..Default::default()
3945 },
3946 ..Default::default()
3947 },
3948 );
3949
3950 let buffer = project
3951 .update(cx, |project, cx| {
3952 project.open_local_buffer("/dir/one.rs", cx)
3953 })
3954 .await
3955 .unwrap();
3956
3957 let fake_server = fake_servers.next().await.unwrap();
3958
3959 let response = project.update(cx, |project, cx| {
3960 project.prepare_rename(buffer.clone(), 7, cx)
3961 });
3962 fake_server
3963 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3964 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3965 assert_eq!(params.position, lsp::Position::new(0, 7));
3966 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3967 lsp::Position::new(0, 6),
3968 lsp::Position::new(0, 9),
3969 ))))
3970 })
3971 .next()
3972 .await
3973 .unwrap();
3974 let range = response.await.unwrap().unwrap();
3975 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3976 assert_eq!(range, 6..9);
3977
3978 let response = project.update(cx, |project, cx| {
3979 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3980 });
3981 fake_server
3982 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3983 assert_eq!(
3984 params.text_document_position.text_document.uri.as_str(),
3985 "file:///dir/one.rs"
3986 );
3987 assert_eq!(
3988 params.text_document_position.position,
3989 lsp::Position::new(0, 7)
3990 );
3991 assert_eq!(params.new_name, "THREE");
3992 Ok(Some(lsp::WorkspaceEdit {
3993 changes: Some(
3994 [
3995 (
3996 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3997 vec![lsp::TextEdit::new(
3998 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3999 "THREE".to_string(),
4000 )],
4001 ),
4002 (
4003 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4004 vec![
4005 lsp::TextEdit::new(
4006 lsp::Range::new(
4007 lsp::Position::new(0, 24),
4008 lsp::Position::new(0, 27),
4009 ),
4010 "THREE".to_string(),
4011 ),
4012 lsp::TextEdit::new(
4013 lsp::Range::new(
4014 lsp::Position::new(0, 35),
4015 lsp::Position::new(0, 38),
4016 ),
4017 "THREE".to_string(),
4018 ),
4019 ],
4020 ),
4021 ]
4022 .into_iter()
4023 .collect(),
4024 ),
4025 ..Default::default()
4026 }))
4027 })
4028 .next()
4029 .await
4030 .unwrap();
4031 let mut transaction = response.await.unwrap().0;
4032 assert_eq!(transaction.len(), 2);
4033 assert_eq!(
4034 transaction
4035 .remove_entry(&buffer)
4036 .unwrap()
4037 .0
4038 .update(cx, |buffer, _| buffer.text()),
4039 "const THREE: usize = 1;"
4040 );
4041 assert_eq!(
4042 transaction
4043 .into_keys()
4044 .next()
4045 .unwrap()
4046 .update(cx, |buffer, _| buffer.text()),
4047 "const TWO: usize = one::THREE + one::THREE;"
4048 );
4049}
4050
4051#[gpui::test]
4052async fn test_search(cx: &mut gpui::TestAppContext) {
4053 init_test(cx);
4054
4055 let fs = FakeFs::new(cx.executor());
4056 fs.insert_tree(
4057 "/dir",
4058 json!({
4059 "one.rs": "const ONE: usize = 1;",
4060 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4061 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4062 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4063 }),
4064 )
4065 .await;
4066 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4067 assert_eq!(
4068 search(
4069 &project,
4070 SearchQuery::text(
4071 "TWO",
4072 false,
4073 true,
4074 false,
4075 Default::default(),
4076 Default::default(),
4077 None
4078 )
4079 .unwrap(),
4080 cx
4081 )
4082 .await
4083 .unwrap(),
4084 HashMap::from_iter([
4085 ("dir/two.rs".to_string(), vec![6..9]),
4086 ("dir/three.rs".to_string(), vec![37..40])
4087 ])
4088 );
4089
4090 let buffer_4 = project
4091 .update(cx, |project, cx| {
4092 project.open_local_buffer("/dir/four.rs", cx)
4093 })
4094 .await
4095 .unwrap();
4096 buffer_4.update(cx, |buffer, cx| {
4097 let text = "two::TWO";
4098 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4099 });
4100
4101 assert_eq!(
4102 search(
4103 &project,
4104 SearchQuery::text(
4105 "TWO",
4106 false,
4107 true,
4108 false,
4109 Default::default(),
4110 Default::default(),
4111 None,
4112 )
4113 .unwrap(),
4114 cx
4115 )
4116 .await
4117 .unwrap(),
4118 HashMap::from_iter([
4119 ("dir/two.rs".to_string(), vec![6..9]),
4120 ("dir/three.rs".to_string(), vec![37..40]),
4121 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4122 ])
4123 );
4124}
4125
4126#[gpui::test]
4127async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4128 init_test(cx);
4129
4130 let search_query = "file";
4131
4132 let fs = FakeFs::new(cx.executor());
4133 fs.insert_tree(
4134 "/dir",
4135 json!({
4136 "one.rs": r#"// Rust file one"#,
4137 "one.ts": r#"// TypeScript file one"#,
4138 "two.rs": r#"// Rust file two"#,
4139 "two.ts": r#"// TypeScript file two"#,
4140 }),
4141 )
4142 .await;
4143 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4144
4145 assert!(
4146 search(
4147 &project,
4148 SearchQuery::text(
4149 search_query,
4150 false,
4151 true,
4152 false,
4153 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4154 Default::default(),
4155 None
4156 )
4157 .unwrap(),
4158 cx
4159 )
4160 .await
4161 .unwrap()
4162 .is_empty(),
4163 "If no inclusions match, no files should be returned"
4164 );
4165
4166 assert_eq!(
4167 search(
4168 &project,
4169 SearchQuery::text(
4170 search_query,
4171 false,
4172 true,
4173 false,
4174 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4175 Default::default(),
4176 None
4177 )
4178 .unwrap(),
4179 cx
4180 )
4181 .await
4182 .unwrap(),
4183 HashMap::from_iter([
4184 ("dir/one.rs".to_string(), vec![8..12]),
4185 ("dir/two.rs".to_string(), vec![8..12]),
4186 ]),
4187 "Rust only search should give only Rust files"
4188 );
4189
4190 assert_eq!(
4191 search(
4192 &project,
4193 SearchQuery::text(
4194 search_query,
4195 false,
4196 true,
4197 false,
4198
4199 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4200
4201 Default::default(),
4202 None,
4203 ).unwrap(),
4204 cx
4205 )
4206 .await
4207 .unwrap(),
4208 HashMap::from_iter([
4209 ("dir/one.ts".to_string(), vec![14..18]),
4210 ("dir/two.ts".to_string(), vec![14..18]),
4211 ]),
4212 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4213 );
4214
4215 assert_eq!(
4216 search(
4217 &project,
4218 SearchQuery::text(
4219 search_query,
4220 false,
4221 true,
4222 false,
4223
4224 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4225
4226 Default::default(),
4227 None,
4228 ).unwrap(),
4229 cx
4230 )
4231 .await
4232 .unwrap(),
4233 HashMap::from_iter([
4234 ("dir/two.ts".to_string(), vec![14..18]),
4235 ("dir/one.rs".to_string(), vec![8..12]),
4236 ("dir/one.ts".to_string(), vec![14..18]),
4237 ("dir/two.rs".to_string(), vec![8..12]),
4238 ]),
4239 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4240 );
4241}
4242
4243#[gpui::test]
4244async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4245 init_test(cx);
4246
4247 let search_query = "file";
4248
4249 let fs = FakeFs::new(cx.executor());
4250 fs.insert_tree(
4251 "/dir",
4252 json!({
4253 "one.rs": r#"// Rust file one"#,
4254 "one.ts": r#"// TypeScript file one"#,
4255 "two.rs": r#"// Rust file two"#,
4256 "two.ts": r#"// TypeScript file two"#,
4257 }),
4258 )
4259 .await;
4260 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4261
4262 assert_eq!(
4263 search(
4264 &project,
4265 SearchQuery::text(
4266 search_query,
4267 false,
4268 true,
4269 false,
4270 Default::default(),
4271 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4272 None,
4273 )
4274 .unwrap(),
4275 cx
4276 )
4277 .await
4278 .unwrap(),
4279 HashMap::from_iter([
4280 ("dir/one.rs".to_string(), vec![8..12]),
4281 ("dir/one.ts".to_string(), vec![14..18]),
4282 ("dir/two.rs".to_string(), vec![8..12]),
4283 ("dir/two.ts".to_string(), vec![14..18]),
4284 ]),
4285 "If no exclusions match, all files should be returned"
4286 );
4287
4288 assert_eq!(
4289 search(
4290 &project,
4291 SearchQuery::text(
4292 search_query,
4293 false,
4294 true,
4295 false,
4296 Default::default(),
4297 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4298 None,
4299 )
4300 .unwrap(),
4301 cx
4302 )
4303 .await
4304 .unwrap(),
4305 HashMap::from_iter([
4306 ("dir/one.ts".to_string(), vec![14..18]),
4307 ("dir/two.ts".to_string(), vec![14..18]),
4308 ]),
4309 "Rust exclusion search should give only TypeScript files"
4310 );
4311
4312 assert_eq!(
4313 search(
4314 &project,
4315 SearchQuery::text(
4316 search_query,
4317 false,
4318 true,
4319 false,
4320 Default::default(),
4321 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4322 None,
4323 ).unwrap(),
4324 cx
4325 )
4326 .await
4327 .unwrap(),
4328 HashMap::from_iter([
4329 ("dir/one.rs".to_string(), vec![8..12]),
4330 ("dir/two.rs".to_string(), vec![8..12]),
4331 ]),
4332 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4333 );
4334
4335 assert!(
4336 search(
4337 &project,
4338 SearchQuery::text(
4339 search_query,
4340 false,
4341 true,
4342 false,
4343 Default::default(),
4344
4345 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4346 None,
4347
4348 ).unwrap(),
4349 cx
4350 )
4351 .await
4352 .unwrap().is_empty(),
4353 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4354 );
4355}
4356
4357#[gpui::test]
4358async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4359 init_test(cx);
4360
4361 let search_query = "file";
4362
4363 let fs = FakeFs::new(cx.executor());
4364 fs.insert_tree(
4365 "/dir",
4366 json!({
4367 "one.rs": r#"// Rust file one"#,
4368 "one.ts": r#"// TypeScript file one"#,
4369 "two.rs": r#"// Rust file two"#,
4370 "two.ts": r#"// TypeScript file two"#,
4371 }),
4372 )
4373 .await;
4374 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4375
4376 assert!(
4377 search(
4378 &project,
4379 SearchQuery::text(
4380 search_query,
4381 false,
4382 true,
4383 false,
4384 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4385 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4386 None,
4387 )
4388 .unwrap(),
4389 cx
4390 )
4391 .await
4392 .unwrap()
4393 .is_empty(),
4394 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4395 );
4396
4397 assert!(
4398 search(
4399 &project,
4400 SearchQuery::text(
4401 search_query,
4402 false,
4403 true,
4404 false,
4405 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4406 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4407 None,
4408 ).unwrap(),
4409 cx
4410 )
4411 .await
4412 .unwrap()
4413 .is_empty(),
4414 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4415 );
4416
4417 assert!(
4418 search(
4419 &project,
4420 SearchQuery::text(
4421 search_query,
4422 false,
4423 true,
4424 false,
4425 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4426 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4427 None,
4428 )
4429 .unwrap(),
4430 cx
4431 )
4432 .await
4433 .unwrap()
4434 .is_empty(),
4435 "Non-matching inclusions and exclusions should not change that."
4436 );
4437
4438 assert_eq!(
4439 search(
4440 &project,
4441 SearchQuery::text(
4442 search_query,
4443 false,
4444 true,
4445 false,
4446 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4447 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4448 None,
4449 )
4450 .unwrap(),
4451 cx
4452 )
4453 .await
4454 .unwrap(),
4455 HashMap::from_iter([
4456 ("dir/one.ts".to_string(), vec![14..18]),
4457 ("dir/two.ts".to_string(), vec![14..18]),
4458 ]),
4459 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4460 );
4461}
4462
4463#[gpui::test]
4464async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4465 init_test(cx);
4466
4467 let fs = FakeFs::new(cx.executor());
4468 fs.insert_tree(
4469 "/worktree-a",
4470 json!({
4471 "haystack.rs": r#"// NEEDLE"#,
4472 "haystack.ts": r#"// NEEDLE"#,
4473 }),
4474 )
4475 .await;
4476 fs.insert_tree(
4477 "/worktree-b",
4478 json!({
4479 "haystack.rs": r#"// NEEDLE"#,
4480 "haystack.ts": r#"// NEEDLE"#,
4481 }),
4482 )
4483 .await;
4484
4485 let project = Project::test(
4486 fs.clone(),
4487 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4488 cx,
4489 )
4490 .await;
4491
4492 assert_eq!(
4493 search(
4494 &project,
4495 SearchQuery::text(
4496 "NEEDLE",
4497 false,
4498 true,
4499 false,
4500 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4501 Default::default(),
4502 None,
4503 )
4504 .unwrap(),
4505 cx
4506 )
4507 .await
4508 .unwrap(),
4509 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4510 "should only return results from included worktree"
4511 );
4512 assert_eq!(
4513 search(
4514 &project,
4515 SearchQuery::text(
4516 "NEEDLE",
4517 false,
4518 true,
4519 false,
4520 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4521 Default::default(),
4522 None,
4523 )
4524 .unwrap(),
4525 cx
4526 )
4527 .await
4528 .unwrap(),
4529 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4530 "should only return results from included worktree"
4531 );
4532
4533 assert_eq!(
4534 search(
4535 &project,
4536 SearchQuery::text(
4537 "NEEDLE",
4538 false,
4539 true,
4540 false,
4541 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4542 Default::default(),
4543 None,
4544 )
4545 .unwrap(),
4546 cx
4547 )
4548 .await
4549 .unwrap(),
4550 HashMap::from_iter([
4551 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4552 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4553 ]),
4554 "should return results from both worktrees"
4555 );
4556}
4557
4558#[gpui::test]
4559async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4560 init_test(cx);
4561
4562 let fs = FakeFs::new(cx.background_executor.clone());
4563 fs.insert_tree(
4564 "/dir",
4565 json!({
4566 ".git": {},
4567 ".gitignore": "**/target\n/node_modules\n",
4568 "target": {
4569 "index.txt": "index_key:index_value"
4570 },
4571 "node_modules": {
4572 "eslint": {
4573 "index.ts": "const eslint_key = 'eslint value'",
4574 "package.json": r#"{ "some_key": "some value" }"#,
4575 },
4576 "prettier": {
4577 "index.ts": "const prettier_key = 'prettier value'",
4578 "package.json": r#"{ "other_key": "other value" }"#,
4579 },
4580 },
4581 "package.json": r#"{ "main_key": "main value" }"#,
4582 }),
4583 )
4584 .await;
4585 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4586
4587 let query = "key";
4588 assert_eq!(
4589 search(
4590 &project,
4591 SearchQuery::text(
4592 query,
4593 false,
4594 false,
4595 false,
4596 Default::default(),
4597 Default::default(),
4598 None,
4599 )
4600 .unwrap(),
4601 cx
4602 )
4603 .await
4604 .unwrap(),
4605 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4606 "Only one non-ignored file should have the query"
4607 );
4608
4609 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4610 assert_eq!(
4611 search(
4612 &project,
4613 SearchQuery::text(
4614 query,
4615 false,
4616 false,
4617 true,
4618 Default::default(),
4619 Default::default(),
4620 None,
4621 )
4622 .unwrap(),
4623 cx
4624 )
4625 .await
4626 .unwrap(),
4627 HashMap::from_iter([
4628 ("dir/package.json".to_string(), vec![8..11]),
4629 ("dir/target/index.txt".to_string(), vec![6..9]),
4630 (
4631 "dir/node_modules/prettier/package.json".to_string(),
4632 vec![9..12]
4633 ),
4634 (
4635 "dir/node_modules/prettier/index.ts".to_string(),
4636 vec![15..18]
4637 ),
4638 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4639 (
4640 "dir/node_modules/eslint/package.json".to_string(),
4641 vec![8..11]
4642 ),
4643 ]),
4644 "Unrestricted search with ignored directories should find every file with the query"
4645 );
4646
4647 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4648 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4649 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4650 assert_eq!(
4651 search(
4652 &project,
4653 SearchQuery::text(
4654 query,
4655 false,
4656 false,
4657 true,
4658 files_to_include,
4659 files_to_exclude,
4660 None,
4661 )
4662 .unwrap(),
4663 cx
4664 )
4665 .await
4666 .unwrap(),
4667 HashMap::from_iter([(
4668 "dir/node_modules/prettier/package.json".to_string(),
4669 vec![9..12]
4670 )]),
4671 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4672 );
4673}
4674
4675#[gpui::test]
4676async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4677 init_test(cx);
4678
4679 let fs = FakeFs::new(cx.executor().clone());
4680 fs.insert_tree(
4681 "/one/two",
4682 json!({
4683 "three": {
4684 "a.txt": "",
4685 "four": {}
4686 },
4687 "c.rs": ""
4688 }),
4689 )
4690 .await;
4691
4692 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4693 project
4694 .update(cx, |project, cx| {
4695 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4696 project.create_entry((id, "b.."), true, cx)
4697 })
4698 .await
4699 .unwrap()
4700 .to_included()
4701 .unwrap();
4702
4703 // Can't create paths outside the project
4704 let result = project
4705 .update(cx, |project, cx| {
4706 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4707 project.create_entry((id, "../../boop"), true, cx)
4708 })
4709 .await;
4710 assert!(result.is_err());
4711
4712 // Can't create paths with '..'
4713 let result = project
4714 .update(cx, |project, cx| {
4715 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4716 project.create_entry((id, "four/../beep"), true, cx)
4717 })
4718 .await;
4719 assert!(result.is_err());
4720
4721 assert_eq!(
4722 fs.paths(true),
4723 vec![
4724 PathBuf::from("/"),
4725 PathBuf::from("/one"),
4726 PathBuf::from("/one/two"),
4727 PathBuf::from("/one/two/c.rs"),
4728 PathBuf::from("/one/two/three"),
4729 PathBuf::from("/one/two/three/a.txt"),
4730 PathBuf::from("/one/two/three/b.."),
4731 PathBuf::from("/one/two/three/four"),
4732 ]
4733 );
4734
4735 // And we cannot open buffers with '..'
4736 let result = project
4737 .update(cx, |project, cx| {
4738 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4739 project.open_buffer((id, "../c.rs"), cx)
4740 })
4741 .await;
4742 assert!(result.is_err())
4743}
4744
4745#[gpui::test]
4746async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4747 init_test(cx);
4748
4749 let fs = FakeFs::new(cx.executor());
4750 fs.insert_tree(
4751 "/dir",
4752 json!({
4753 "a.tsx": "a",
4754 }),
4755 )
4756 .await;
4757
4758 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4759
4760 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4761 language_registry.add(tsx_lang());
4762 let language_server_names = [
4763 "TypeScriptServer",
4764 "TailwindServer",
4765 "ESLintServer",
4766 "NoHoverCapabilitiesServer",
4767 ];
4768 let mut language_servers = [
4769 language_registry.register_fake_lsp(
4770 "tsx",
4771 FakeLspAdapter {
4772 name: language_server_names[0],
4773 capabilities: lsp::ServerCapabilities {
4774 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4775 ..lsp::ServerCapabilities::default()
4776 },
4777 ..FakeLspAdapter::default()
4778 },
4779 ),
4780 language_registry.register_fake_lsp(
4781 "tsx",
4782 FakeLspAdapter {
4783 name: language_server_names[1],
4784 capabilities: lsp::ServerCapabilities {
4785 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4786 ..lsp::ServerCapabilities::default()
4787 },
4788 ..FakeLspAdapter::default()
4789 },
4790 ),
4791 language_registry.register_fake_lsp(
4792 "tsx",
4793 FakeLspAdapter {
4794 name: language_server_names[2],
4795 capabilities: lsp::ServerCapabilities {
4796 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4797 ..lsp::ServerCapabilities::default()
4798 },
4799 ..FakeLspAdapter::default()
4800 },
4801 ),
4802 language_registry.register_fake_lsp(
4803 "tsx",
4804 FakeLspAdapter {
4805 name: language_server_names[3],
4806 capabilities: lsp::ServerCapabilities {
4807 hover_provider: None,
4808 ..lsp::ServerCapabilities::default()
4809 },
4810 ..FakeLspAdapter::default()
4811 },
4812 ),
4813 ];
4814
4815 let buffer = project
4816 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4817 .await
4818 .unwrap();
4819 cx.executor().run_until_parked();
4820
4821 let mut servers_with_hover_requests = HashMap::default();
4822 for i in 0..language_server_names.len() {
4823 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4824 panic!(
4825 "Failed to get language server #{i} with name {}",
4826 &language_server_names[i]
4827 )
4828 });
4829 let new_server_name = new_server.server.name();
4830 assert!(
4831 !servers_with_hover_requests.contains_key(&new_server_name),
4832 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4833 );
4834 match new_server_name.as_ref() {
4835 "TailwindServer" | "TypeScriptServer" => {
4836 servers_with_hover_requests.insert(
4837 new_server_name.clone(),
4838 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4839 let name = new_server_name.clone();
4840 async move {
4841 Ok(Some(lsp::Hover {
4842 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4843 format!("{name} hover"),
4844 )),
4845 range: None,
4846 }))
4847 }
4848 }),
4849 );
4850 }
4851 "ESLintServer" => {
4852 servers_with_hover_requests.insert(
4853 new_server_name,
4854 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4855 |_, _| async move { Ok(None) },
4856 ),
4857 );
4858 }
4859 "NoHoverCapabilitiesServer" => {
4860 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4861 |_, _| async move {
4862 panic!(
4863 "Should not call for hovers server with no corresponding capabilities"
4864 )
4865 },
4866 );
4867 }
4868 unexpected => panic!("Unexpected server name: {unexpected}"),
4869 }
4870 }
4871
4872 let hover_task = project.update(cx, |project, cx| {
4873 project.hover(&buffer, Point::new(0, 0), cx)
4874 });
4875 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4876 |mut hover_request| async move {
4877 hover_request
4878 .next()
4879 .await
4880 .expect("All hover requests should have been triggered")
4881 },
4882 ))
4883 .await;
4884 assert_eq!(
4885 vec!["TailwindServer hover", "TypeScriptServer hover"],
4886 hover_task
4887 .await
4888 .into_iter()
4889 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4890 .sorted()
4891 .collect::<Vec<_>>(),
4892 "Should receive hover responses from all related servers with hover capabilities"
4893 );
4894}
4895
4896#[gpui::test]
4897async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4898 init_test(cx);
4899
4900 let fs = FakeFs::new(cx.executor());
4901 fs.insert_tree(
4902 "/dir",
4903 json!({
4904 "a.ts": "a",
4905 }),
4906 )
4907 .await;
4908
4909 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4910
4911 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4912 language_registry.add(typescript_lang());
4913 let mut fake_language_servers = language_registry.register_fake_lsp(
4914 "TypeScript",
4915 FakeLspAdapter {
4916 capabilities: lsp::ServerCapabilities {
4917 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4918 ..lsp::ServerCapabilities::default()
4919 },
4920 ..FakeLspAdapter::default()
4921 },
4922 );
4923
4924 let buffer = project
4925 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4926 .await
4927 .unwrap();
4928 cx.executor().run_until_parked();
4929
4930 let fake_server = fake_language_servers
4931 .next()
4932 .await
4933 .expect("failed to get the language server");
4934
4935 let mut request_handled =
4936 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4937 Ok(Some(lsp::Hover {
4938 contents: lsp::HoverContents::Array(vec![
4939 lsp::MarkedString::String("".to_string()),
4940 lsp::MarkedString::String(" ".to_string()),
4941 lsp::MarkedString::String("\n\n\n".to_string()),
4942 ]),
4943 range: None,
4944 }))
4945 });
4946
4947 let hover_task = project.update(cx, |project, cx| {
4948 project.hover(&buffer, Point::new(0, 0), cx)
4949 });
4950 let () = request_handled
4951 .next()
4952 .await
4953 .expect("All hover requests should have been triggered");
4954 assert_eq!(
4955 Vec::<String>::new(),
4956 hover_task
4957 .await
4958 .into_iter()
4959 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4960 .sorted()
4961 .collect::<Vec<_>>(),
4962 "Empty hover parts should be ignored"
4963 );
4964}
4965
4966#[gpui::test]
4967async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
4968 init_test(cx);
4969
4970 let fs = FakeFs::new(cx.executor());
4971 fs.insert_tree(
4972 "/dir",
4973 json!({
4974 "a.ts": "a",
4975 }),
4976 )
4977 .await;
4978
4979 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4980
4981 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4982 language_registry.add(typescript_lang());
4983 let mut fake_language_servers = language_registry.register_fake_lsp(
4984 "TypeScript",
4985 FakeLspAdapter {
4986 capabilities: lsp::ServerCapabilities {
4987 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4988 ..lsp::ServerCapabilities::default()
4989 },
4990 ..FakeLspAdapter::default()
4991 },
4992 );
4993
4994 let buffer = project
4995 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4996 .await
4997 .unwrap();
4998 cx.executor().run_until_parked();
4999
5000 let fake_server = fake_language_servers
5001 .next()
5002 .await
5003 .expect("failed to get the language server");
5004
5005 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5006 move |_, _| async move {
5007 Ok(Some(vec![
5008 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5009 title: "organize imports".to_string(),
5010 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5011 ..lsp::CodeAction::default()
5012 }),
5013 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5014 title: "fix code".to_string(),
5015 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5016 ..lsp::CodeAction::default()
5017 }),
5018 ]))
5019 },
5020 );
5021
5022 let code_actions_task = project.update(cx, |project, cx| {
5023 project.code_actions(
5024 &buffer,
5025 0..buffer.read(cx).len(),
5026 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5027 cx,
5028 )
5029 });
5030
5031 let () = request_handled
5032 .next()
5033 .await
5034 .expect("The code action request should have been triggered");
5035
5036 let code_actions = code_actions_task.await.unwrap();
5037 assert_eq!(code_actions.len(), 1);
5038 assert_eq!(
5039 code_actions[0].lsp_action.kind,
5040 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5041 );
5042}
5043
5044#[gpui::test]
5045async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5046 init_test(cx);
5047
5048 let fs = FakeFs::new(cx.executor());
5049 fs.insert_tree(
5050 "/dir",
5051 json!({
5052 "a.tsx": "a",
5053 }),
5054 )
5055 .await;
5056
5057 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5058
5059 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5060 language_registry.add(tsx_lang());
5061 let language_server_names = [
5062 "TypeScriptServer",
5063 "TailwindServer",
5064 "ESLintServer",
5065 "NoActionsCapabilitiesServer",
5066 ];
5067
5068 let mut language_server_rxs = [
5069 language_registry.register_fake_lsp(
5070 "tsx",
5071 FakeLspAdapter {
5072 name: language_server_names[0],
5073 capabilities: lsp::ServerCapabilities {
5074 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5075 ..lsp::ServerCapabilities::default()
5076 },
5077 ..FakeLspAdapter::default()
5078 },
5079 ),
5080 language_registry.register_fake_lsp(
5081 "tsx",
5082 FakeLspAdapter {
5083 name: language_server_names[1],
5084 capabilities: lsp::ServerCapabilities {
5085 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5086 ..lsp::ServerCapabilities::default()
5087 },
5088 ..FakeLspAdapter::default()
5089 },
5090 ),
5091 language_registry.register_fake_lsp(
5092 "tsx",
5093 FakeLspAdapter {
5094 name: language_server_names[2],
5095 capabilities: lsp::ServerCapabilities {
5096 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5097 ..lsp::ServerCapabilities::default()
5098 },
5099 ..FakeLspAdapter::default()
5100 },
5101 ),
5102 language_registry.register_fake_lsp(
5103 "tsx",
5104 FakeLspAdapter {
5105 name: language_server_names[3],
5106 capabilities: lsp::ServerCapabilities {
5107 code_action_provider: None,
5108 ..lsp::ServerCapabilities::default()
5109 },
5110 ..FakeLspAdapter::default()
5111 },
5112 ),
5113 ];
5114
5115 let buffer = project
5116 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5117 .await
5118 .unwrap();
5119 cx.executor().run_until_parked();
5120
5121 let mut servers_with_actions_requests = HashMap::default();
5122 for i in 0..language_server_names.len() {
5123 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5124 panic!(
5125 "Failed to get language server #{i} with name {}",
5126 &language_server_names[i]
5127 )
5128 });
5129 let new_server_name = new_server.server.name();
5130
5131 assert!(
5132 !servers_with_actions_requests.contains_key(&new_server_name),
5133 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5134 );
5135 match new_server_name.0.as_ref() {
5136 "TailwindServer" | "TypeScriptServer" => {
5137 servers_with_actions_requests.insert(
5138 new_server_name.clone(),
5139 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5140 move |_, _| {
5141 let name = new_server_name.clone();
5142 async move {
5143 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5144 lsp::CodeAction {
5145 title: format!("{name} code action"),
5146 ..lsp::CodeAction::default()
5147 },
5148 )]))
5149 }
5150 },
5151 ),
5152 );
5153 }
5154 "ESLintServer" => {
5155 servers_with_actions_requests.insert(
5156 new_server_name,
5157 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5158 |_, _| async move { Ok(None) },
5159 ),
5160 );
5161 }
5162 "NoActionsCapabilitiesServer" => {
5163 let _never_handled = new_server
5164 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5165 panic!(
5166 "Should not call for code actions server with no corresponding capabilities"
5167 )
5168 });
5169 }
5170 unexpected => panic!("Unexpected server name: {unexpected}"),
5171 }
5172 }
5173
5174 let code_actions_task = project.update(cx, |project, cx| {
5175 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5176 });
5177
5178 // cx.run_until_parked();
5179 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5180 |mut code_actions_request| async move {
5181 code_actions_request
5182 .next()
5183 .await
5184 .expect("All code actions requests should have been triggered")
5185 },
5186 ))
5187 .await;
5188 assert_eq!(
5189 vec!["TailwindServer code action", "TypeScriptServer code action"],
5190 code_actions_task
5191 .await
5192 .unwrap()
5193 .into_iter()
5194 .map(|code_action| code_action.lsp_action.title)
5195 .sorted()
5196 .collect::<Vec<_>>(),
5197 "Should receive code actions responses from all related servers with hover capabilities"
5198 );
5199}
5200
5201#[gpui::test]
5202async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5203 init_test(cx);
5204
5205 let fs = FakeFs::new(cx.executor());
5206 fs.insert_tree(
5207 "/dir",
5208 json!({
5209 "a.rs": "let a = 1;",
5210 "b.rs": "let b = 2;",
5211 "c.rs": "let c = 2;",
5212 }),
5213 )
5214 .await;
5215
5216 let project = Project::test(
5217 fs,
5218 [
5219 "/dir/a.rs".as_ref(),
5220 "/dir/b.rs".as_ref(),
5221 "/dir/c.rs".as_ref(),
5222 ],
5223 cx,
5224 )
5225 .await;
5226
5227 // check the initial state and get the worktrees
5228 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5229 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5230 assert_eq!(worktrees.len(), 3);
5231
5232 let worktree_a = worktrees[0].read(cx);
5233 let worktree_b = worktrees[1].read(cx);
5234 let worktree_c = worktrees[2].read(cx);
5235
5236 // check they start in the right order
5237 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5238 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5239 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5240
5241 (
5242 worktrees[0].clone(),
5243 worktrees[1].clone(),
5244 worktrees[2].clone(),
5245 )
5246 });
5247
5248 // move first worktree to after the second
5249 // [a, b, c] -> [b, a, c]
5250 project
5251 .update(cx, |project, cx| {
5252 let first = worktree_a.read(cx);
5253 let second = worktree_b.read(cx);
5254 project.move_worktree(first.id(), second.id(), cx)
5255 })
5256 .expect("moving first after second");
5257
5258 // check the state after moving
5259 project.update(cx, |project, cx| {
5260 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5261 assert_eq!(worktrees.len(), 3);
5262
5263 let first = worktrees[0].read(cx);
5264 let second = worktrees[1].read(cx);
5265 let third = worktrees[2].read(cx);
5266
5267 // check they are now in the right order
5268 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5269 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5270 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5271 });
5272
5273 // move the second worktree to before the first
5274 // [b, a, c] -> [a, b, c]
5275 project
5276 .update(cx, |project, cx| {
5277 let second = worktree_a.read(cx);
5278 let first = worktree_b.read(cx);
5279 project.move_worktree(first.id(), second.id(), cx)
5280 })
5281 .expect("moving second before first");
5282
5283 // check the state after moving
5284 project.update(cx, |project, cx| {
5285 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5286 assert_eq!(worktrees.len(), 3);
5287
5288 let first = worktrees[0].read(cx);
5289 let second = worktrees[1].read(cx);
5290 let third = worktrees[2].read(cx);
5291
5292 // check they are now in the right order
5293 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5294 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5295 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5296 });
5297
5298 // move the second worktree to after the third
5299 // [a, b, c] -> [a, c, b]
5300 project
5301 .update(cx, |project, cx| {
5302 let second = worktree_b.read(cx);
5303 let third = worktree_c.read(cx);
5304 project.move_worktree(second.id(), third.id(), cx)
5305 })
5306 .expect("moving second after third");
5307
5308 // check the state after moving
5309 project.update(cx, |project, cx| {
5310 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5311 assert_eq!(worktrees.len(), 3);
5312
5313 let first = worktrees[0].read(cx);
5314 let second = worktrees[1].read(cx);
5315 let third = worktrees[2].read(cx);
5316
5317 // check they are now in the right order
5318 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5319 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5320 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5321 });
5322
5323 // move the third worktree to before the second
5324 // [a, c, b] -> [a, b, c]
5325 project
5326 .update(cx, |project, cx| {
5327 let third = worktree_c.read(cx);
5328 let second = worktree_b.read(cx);
5329 project.move_worktree(third.id(), second.id(), cx)
5330 })
5331 .expect("moving third before second");
5332
5333 // check the state after moving
5334 project.update(cx, |project, cx| {
5335 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5336 assert_eq!(worktrees.len(), 3);
5337
5338 let first = worktrees[0].read(cx);
5339 let second = worktrees[1].read(cx);
5340 let third = worktrees[2].read(cx);
5341
5342 // check they are now in the right order
5343 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5344 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5345 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5346 });
5347
5348 // move the first worktree to after the third
5349 // [a, b, c] -> [b, c, a]
5350 project
5351 .update(cx, |project, cx| {
5352 let first = worktree_a.read(cx);
5353 let third = worktree_c.read(cx);
5354 project.move_worktree(first.id(), third.id(), cx)
5355 })
5356 .expect("moving first after third");
5357
5358 // check the state after moving
5359 project.update(cx, |project, cx| {
5360 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5361 assert_eq!(worktrees.len(), 3);
5362
5363 let first = worktrees[0].read(cx);
5364 let second = worktrees[1].read(cx);
5365 let third = worktrees[2].read(cx);
5366
5367 // check they are now in the right order
5368 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5369 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5370 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5371 });
5372
5373 // move the third worktree to before the first
5374 // [b, c, a] -> [a, b, c]
5375 project
5376 .update(cx, |project, cx| {
5377 let third = worktree_a.read(cx);
5378 let first = worktree_b.read(cx);
5379 project.move_worktree(third.id(), first.id(), cx)
5380 })
5381 .expect("moving third before first");
5382
5383 // check the state after moving
5384 project.update(cx, |project, cx| {
5385 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5386 assert_eq!(worktrees.len(), 3);
5387
5388 let first = worktrees[0].read(cx);
5389 let second = worktrees[1].read(cx);
5390 let third = worktrees[2].read(cx);
5391
5392 // check they are now in the right order
5393 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5394 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5395 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5396 });
5397}
5398
5399async fn search(
5400 project: &Model<Project>,
5401 query: SearchQuery,
5402 cx: &mut gpui::TestAppContext,
5403) -> Result<HashMap<String, Vec<Range<usize>>>> {
5404 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5405 let mut results = HashMap::default();
5406 while let Some(search_result) = search_rx.next().await {
5407 match search_result {
5408 SearchResult::Buffer { buffer, ranges } => {
5409 results.entry(buffer).or_insert(ranges);
5410 }
5411 SearchResult::LimitReached => {}
5412 }
5413 }
5414 Ok(results
5415 .into_iter()
5416 .map(|(buffer, ranges)| {
5417 buffer.update(cx, |buffer, cx| {
5418 let path = buffer
5419 .file()
5420 .unwrap()
5421 .full_path(cx)
5422 .to_string_lossy()
5423 .to_string();
5424 let ranges = ranges
5425 .into_iter()
5426 .map(|range| range.to_offset(buffer))
5427 .collect::<Vec<_>>();
5428 (path, ranges)
5429 })
5430 })
5431 .collect())
5432}
5433
5434pub fn init_test(cx: &mut gpui::TestAppContext) {
5435 if std::env::var("RUST_LOG").is_ok() {
5436 env_logger::try_init().ok();
5437 }
5438
5439 cx.update(|cx| {
5440 let settings_store = SettingsStore::test(cx);
5441 cx.set_global(settings_store);
5442 release_channel::init(SemanticVersion::default(), cx);
5443 language::init(cx);
5444 Project::init_settings(cx);
5445 });
5446}
5447
5448fn json_lang() -> Arc<Language> {
5449 Arc::new(Language::new(
5450 LanguageConfig {
5451 name: "JSON".into(),
5452 matcher: LanguageMatcher {
5453 path_suffixes: vec!["json".to_string()],
5454 ..Default::default()
5455 },
5456 ..Default::default()
5457 },
5458 None,
5459 ))
5460}
5461
5462fn js_lang() -> Arc<Language> {
5463 Arc::new(Language::new(
5464 LanguageConfig {
5465 name: "JavaScript".into(),
5466 matcher: LanguageMatcher {
5467 path_suffixes: vec!["js".to_string()],
5468 ..Default::default()
5469 },
5470 ..Default::default()
5471 },
5472 None,
5473 ))
5474}
5475
5476fn rust_lang() -> Arc<Language> {
5477 Arc::new(Language::new(
5478 LanguageConfig {
5479 name: "Rust".into(),
5480 matcher: LanguageMatcher {
5481 path_suffixes: vec!["rs".to_string()],
5482 ..Default::default()
5483 },
5484 ..Default::default()
5485 },
5486 Some(tree_sitter_rust::LANGUAGE.into()),
5487 ))
5488}
5489
5490fn typescript_lang() -> Arc<Language> {
5491 Arc::new(Language::new(
5492 LanguageConfig {
5493 name: "TypeScript".into(),
5494 matcher: LanguageMatcher {
5495 path_suffixes: vec!["ts".to_string()],
5496 ..Default::default()
5497 },
5498 ..Default::default()
5499 },
5500 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5501 ))
5502}
5503
5504fn tsx_lang() -> Arc<Language> {
5505 Arc::new(Language::new(
5506 LanguageConfig {
5507 name: "tsx".into(),
5508 matcher: LanguageMatcher {
5509 path_suffixes: vec!["tsx".to_string()],
5510 ..Default::default()
5511 },
5512 ..Default::default()
5513 },
5514 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5515 ))
5516}
5517
5518fn get_all_tasks(
5519 project: &Model<Project>,
5520 worktree_id: Option<WorktreeId>,
5521 task_context: &TaskContext,
5522 cx: &mut AppContext,
5523) -> Vec<(TaskSourceKind, ResolvedTask)> {
5524 let (mut old, new) = project.update(cx, |project, cx| {
5525 project
5526 .task_store
5527 .read(cx)
5528 .task_inventory()
5529 .unwrap()
5530 .read(cx)
5531 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5532 });
5533 old.extend(new);
5534 old
5535}