1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let dir = temp_tree(json!({
99 ".editorconfig": r#"
100 root = true
101 [*.rs]
102 indent_style = tab
103 indent_size = 3
104 end_of_line = lf
105 insert_final_newline = true
106 trim_trailing_whitespace = true
107 [*.js]
108 tab_width = 10
109 "#,
110 ".zed": {
111 "settings.json": r#"{
112 "tab_size": 8,
113 "hard_tabs": false,
114 "ensure_final_newline_on_save": false,
115 "remove_trailing_whitespace_on_save": false,
116 "soft_wrap": "editor_width"
117 }"#,
118 },
119 "a.rs": "fn a() {\n A\n}",
120 "b": {
121 ".editorconfig": r#"
122 [*.rs]
123 indent_size = 2
124 "#,
125 "b.rs": "fn b() {\n B\n}",
126 },
127 "c.js": "def c\n C\nend",
128 "README.json": "tabs are better\n",
129 }));
130
131 let path = dir.path();
132 let fs = FakeFs::new(cx.executor());
133 fs.insert_tree_from_real_fs(path, path).await;
134 let project = Project::test(fs, [path], cx).await;
135
136 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
137 language_registry.add(js_lang());
138 language_registry.add(json_lang());
139 language_registry.add(rust_lang());
140
141 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
142
143 cx.executor().run_until_parked();
144
145 cx.update(|cx| {
146 let tree = worktree.read(cx);
147 let settings_for = |path: &str| {
148 let file_entry = tree.entry_for_path(path).unwrap().clone();
149 let file = File::for_entry(file_entry, worktree.clone());
150 let file_language = project
151 .read(cx)
152 .languages()
153 .language_for_file_path(file.path.as_ref());
154 let file_language = cx
155 .background_executor()
156 .block(file_language)
157 .expect("Failed to get file language");
158 let file = file as _;
159 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
160 };
161
162 let settings_a = settings_for("a.rs");
163 let settings_b = settings_for("b/b.rs");
164 let settings_c = settings_for("c.js");
165 let settings_readme = settings_for("README.json");
166
167 // .editorconfig overrides .zed/settings
168 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
169 assert_eq!(settings_a.hard_tabs, true);
170 assert_eq!(settings_a.ensure_final_newline_on_save, true);
171 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
172
173 // .editorconfig in b/ overrides .editorconfig in root
174 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
175
176 // "indent_size" is not set, so "tab_width" is used
177 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
178
179 // README.md should not be affected by .editorconfig's globe "*.rs"
180 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
181 });
182}
183
184#[gpui::test]
185async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
186 init_test(cx);
187 TaskStore::init(None);
188
189 let fs = FakeFs::new(cx.executor());
190 fs.insert_tree(
191 "/the-root",
192 json!({
193 ".zed": {
194 "settings.json": r#"{ "tab_size": 8 }"#,
195 "tasks.json": r#"[{
196 "label": "cargo check all",
197 "command": "cargo",
198 "args": ["check", "--all"]
199 },]"#,
200 },
201 "a": {
202 "a.rs": "fn a() {\n A\n}"
203 },
204 "b": {
205 ".zed": {
206 "settings.json": r#"{ "tab_size": 2 }"#,
207 "tasks.json": r#"[{
208 "label": "cargo check",
209 "command": "cargo",
210 "args": ["check"]
211 },]"#,
212 },
213 "b.rs": "fn b() {\n B\n}"
214 }
215 }),
216 )
217 .await;
218
219 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
220 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
221 let task_context = TaskContext::default();
222
223 cx.executor().run_until_parked();
224 let worktree_id = cx.update(|cx| {
225 project.update(cx, |project, cx| {
226 project.worktrees(cx).next().unwrap().read(cx).id()
227 })
228 });
229 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
230 id: worktree_id,
231 directory_in_worktree: PathBuf::from(".zed"),
232 id_base: "local worktree tasks from directory \".zed\"".into(),
233 };
234
235 let all_tasks = cx
236 .update(|cx| {
237 let tree = worktree.read(cx);
238
239 let file_a = File::for_entry(
240 tree.entry_for_path("a/a.rs").unwrap().clone(),
241 worktree.clone(),
242 ) as _;
243 let settings_a = language_settings(None, Some(&file_a), cx);
244 let file_b = File::for_entry(
245 tree.entry_for_path("b/b.rs").unwrap().clone(),
246 worktree.clone(),
247 ) as _;
248 let settings_b = language_settings(None, Some(&file_b), cx);
249
250 assert_eq!(settings_a.tab_size.get(), 8);
251 assert_eq!(settings_b.tab_size.get(), 2);
252
253 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
254 })
255 .into_iter()
256 .map(|(source_kind, task)| {
257 let resolved = task.resolved.unwrap();
258 (
259 source_kind,
260 task.resolved_label,
261 resolved.args,
262 resolved.env,
263 )
264 })
265 .collect::<Vec<_>>();
266 assert_eq!(
267 all_tasks,
268 vec![
269 (
270 TaskSourceKind::Worktree {
271 id: worktree_id,
272 directory_in_worktree: PathBuf::from("b/.zed"),
273 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
274 },
275 "cargo check".to_string(),
276 vec!["check".to_string()],
277 HashMap::default(),
278 ),
279 (
280 topmost_local_task_source_kind.clone(),
281 "cargo check all".to_string(),
282 vec!["check".to_string(), "--all".to_string()],
283 HashMap::default(),
284 ),
285 ]
286 );
287
288 let (_, resolved_task) = cx
289 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
290 .into_iter()
291 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
292 .expect("should have one global task");
293 project.update(cx, |project, cx| {
294 let task_inventory = project
295 .task_store
296 .read(cx)
297 .task_inventory()
298 .cloned()
299 .unwrap();
300 task_inventory.update(cx, |inventory, _| {
301 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
302 inventory
303 .update_file_based_tasks(
304 None,
305 Some(
306 &json!([{
307 "label": "cargo check unstable",
308 "command": "cargo",
309 "args": [
310 "check",
311 "--all",
312 "--all-targets"
313 ],
314 "env": {
315 "RUSTFLAGS": "-Zunstable-options"
316 }
317 }])
318 .to_string(),
319 ),
320 )
321 .unwrap();
322 });
323 });
324 cx.run_until_parked();
325
326 let all_tasks = cx
327 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
328 .into_iter()
329 .map(|(source_kind, task)| {
330 let resolved = task.resolved.unwrap();
331 (
332 source_kind,
333 task.resolved_label,
334 resolved.args,
335 resolved.env,
336 )
337 })
338 .collect::<Vec<_>>();
339 assert_eq!(
340 all_tasks,
341 vec![
342 (
343 topmost_local_task_source_kind.clone(),
344 "cargo check all".to_string(),
345 vec!["check".to_string(), "--all".to_string()],
346 HashMap::default(),
347 ),
348 (
349 TaskSourceKind::Worktree {
350 id: worktree_id,
351 directory_in_worktree: PathBuf::from("b/.zed"),
352 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
353 },
354 "cargo check".to_string(),
355 vec!["check".to_string()],
356 HashMap::default(),
357 ),
358 (
359 TaskSourceKind::AbsPath {
360 abs_path: paths::tasks_file().clone(),
361 id_base: "global tasks.json".into(),
362 },
363 "cargo check unstable".to_string(),
364 vec![
365 "check".to_string(),
366 "--all".to_string(),
367 "--all-targets".to_string(),
368 ],
369 HashMap::from_iter(Some((
370 "RUSTFLAGS".to_string(),
371 "-Zunstable-options".to_string()
372 ))),
373 ),
374 ]
375 );
376}
377
378#[gpui::test]
379async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
380 init_test(cx);
381
382 let fs = FakeFs::new(cx.executor());
383 fs.insert_tree(
384 "/the-root",
385 json!({
386 "test.rs": "const A: i32 = 1;",
387 "test2.rs": "",
388 "Cargo.toml": "a = 1",
389 "package.json": "{\"a\": 1}",
390 }),
391 )
392 .await;
393
394 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
395 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
396
397 let mut fake_rust_servers = language_registry.register_fake_lsp(
398 "Rust",
399 FakeLspAdapter {
400 name: "the-rust-language-server",
401 capabilities: lsp::ServerCapabilities {
402 completion_provider: Some(lsp::CompletionOptions {
403 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
404 ..Default::default()
405 }),
406 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
407 lsp::TextDocumentSyncOptions {
408 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
409 ..Default::default()
410 },
411 )),
412 ..Default::default()
413 },
414 ..Default::default()
415 },
416 );
417 let mut fake_json_servers = language_registry.register_fake_lsp(
418 "JSON",
419 FakeLspAdapter {
420 name: "the-json-language-server",
421 capabilities: lsp::ServerCapabilities {
422 completion_provider: Some(lsp::CompletionOptions {
423 trigger_characters: Some(vec![":".to_string()]),
424 ..Default::default()
425 }),
426 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
427 lsp::TextDocumentSyncOptions {
428 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
429 ..Default::default()
430 },
431 )),
432 ..Default::default()
433 },
434 ..Default::default()
435 },
436 );
437
438 // Open a buffer without an associated language server.
439 let toml_buffer = project
440 .update(cx, |project, cx| {
441 project.open_local_buffer("/the-root/Cargo.toml", cx)
442 })
443 .await
444 .unwrap();
445
446 // Open a buffer with an associated language server before the language for it has been loaded.
447 let rust_buffer = project
448 .update(cx, |project, cx| {
449 project.open_local_buffer("/the-root/test.rs", cx)
450 })
451 .await
452 .unwrap();
453 rust_buffer.update(cx, |buffer, _| {
454 assert_eq!(buffer.language().map(|l| l.name()), None);
455 });
456
457 // Now we add the languages to the project, and ensure they get assigned to all
458 // the relevant open buffers.
459 language_registry.add(json_lang());
460 language_registry.add(rust_lang());
461 cx.executor().run_until_parked();
462 rust_buffer.update(cx, |buffer, _| {
463 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
464 });
465
466 // A server is started up, and it is notified about Rust files.
467 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
468 assert_eq!(
469 fake_rust_server
470 .receive_notification::<lsp::notification::DidOpenTextDocument>()
471 .await
472 .text_document,
473 lsp::TextDocumentItem {
474 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
475 version: 0,
476 text: "const A: i32 = 1;".to_string(),
477 language_id: "rust".to_string(),
478 }
479 );
480
481 // The buffer is configured based on the language server's capabilities.
482 rust_buffer.update(cx, |buffer, _| {
483 assert_eq!(
484 buffer.completion_triggers(),
485 &[".".to_string(), "::".to_string()]
486 );
487 });
488 toml_buffer.update(cx, |buffer, _| {
489 assert!(buffer.completion_triggers().is_empty());
490 });
491
492 // Edit a buffer. The changes are reported to the language server.
493 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
494 assert_eq!(
495 fake_rust_server
496 .receive_notification::<lsp::notification::DidChangeTextDocument>()
497 .await
498 .text_document,
499 lsp::VersionedTextDocumentIdentifier::new(
500 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
501 1
502 )
503 );
504
505 // Open a third buffer with a different associated language server.
506 let json_buffer = project
507 .update(cx, |project, cx| {
508 project.open_local_buffer("/the-root/package.json", cx)
509 })
510 .await
511 .unwrap();
512
513 // A json language server is started up and is only notified about the json buffer.
514 let mut fake_json_server = fake_json_servers.next().await.unwrap();
515 assert_eq!(
516 fake_json_server
517 .receive_notification::<lsp::notification::DidOpenTextDocument>()
518 .await
519 .text_document,
520 lsp::TextDocumentItem {
521 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
522 version: 0,
523 text: "{\"a\": 1}".to_string(),
524 language_id: "json".to_string(),
525 }
526 );
527
528 // This buffer is configured based on the second language server's
529 // capabilities.
530 json_buffer.update(cx, |buffer, _| {
531 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
532 });
533
534 // When opening another buffer whose language server is already running,
535 // it is also configured based on the existing language server's capabilities.
536 let rust_buffer2 = project
537 .update(cx, |project, cx| {
538 project.open_local_buffer("/the-root/test2.rs", cx)
539 })
540 .await
541 .unwrap();
542 rust_buffer2.update(cx, |buffer, _| {
543 assert_eq!(
544 buffer.completion_triggers(),
545 &[".".to_string(), "::".to_string()]
546 );
547 });
548
549 // Changes are reported only to servers matching the buffer's language.
550 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
551 rust_buffer2.update(cx, |buffer, cx| {
552 buffer.edit([(0..0, "let x = 1;")], None, cx)
553 });
554 assert_eq!(
555 fake_rust_server
556 .receive_notification::<lsp::notification::DidChangeTextDocument>()
557 .await
558 .text_document,
559 lsp::VersionedTextDocumentIdentifier::new(
560 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
561 1
562 )
563 );
564
565 // Save notifications are reported to all servers.
566 project
567 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
568 .await
569 .unwrap();
570 assert_eq!(
571 fake_rust_server
572 .receive_notification::<lsp::notification::DidSaveTextDocument>()
573 .await
574 .text_document,
575 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
576 );
577 assert_eq!(
578 fake_json_server
579 .receive_notification::<lsp::notification::DidSaveTextDocument>()
580 .await
581 .text_document,
582 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
583 );
584
585 // Renames are reported only to servers matching the buffer's language.
586 fs.rename(
587 Path::new("/the-root/test2.rs"),
588 Path::new("/the-root/test3.rs"),
589 Default::default(),
590 )
591 .await
592 .unwrap();
593 assert_eq!(
594 fake_rust_server
595 .receive_notification::<lsp::notification::DidCloseTextDocument>()
596 .await
597 .text_document,
598 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
599 );
600 assert_eq!(
601 fake_rust_server
602 .receive_notification::<lsp::notification::DidOpenTextDocument>()
603 .await
604 .text_document,
605 lsp::TextDocumentItem {
606 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
607 version: 0,
608 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
609 language_id: "rust".to_string(),
610 },
611 );
612
613 rust_buffer2.update(cx, |buffer, cx| {
614 buffer.update_diagnostics(
615 LanguageServerId(0),
616 DiagnosticSet::from_sorted_entries(
617 vec![DiagnosticEntry {
618 diagnostic: Default::default(),
619 range: Anchor::MIN..Anchor::MAX,
620 }],
621 &buffer.snapshot(),
622 ),
623 cx,
624 );
625 assert_eq!(
626 buffer
627 .snapshot()
628 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
629 .count(),
630 1
631 );
632 });
633
634 // When the rename changes the extension of the file, the buffer gets closed on the old
635 // language server and gets opened on the new one.
636 fs.rename(
637 Path::new("/the-root/test3.rs"),
638 Path::new("/the-root/test3.json"),
639 Default::default(),
640 )
641 .await
642 .unwrap();
643 assert_eq!(
644 fake_rust_server
645 .receive_notification::<lsp::notification::DidCloseTextDocument>()
646 .await
647 .text_document,
648 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
649 );
650 assert_eq!(
651 fake_json_server
652 .receive_notification::<lsp::notification::DidOpenTextDocument>()
653 .await
654 .text_document,
655 lsp::TextDocumentItem {
656 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
657 version: 0,
658 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
659 language_id: "json".to_string(),
660 },
661 );
662
663 // We clear the diagnostics, since the language has changed.
664 rust_buffer2.update(cx, |buffer, _| {
665 assert_eq!(
666 buffer
667 .snapshot()
668 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
669 .count(),
670 0
671 );
672 });
673
674 // The renamed file's version resets after changing language server.
675 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
676 assert_eq!(
677 fake_json_server
678 .receive_notification::<lsp::notification::DidChangeTextDocument>()
679 .await
680 .text_document,
681 lsp::VersionedTextDocumentIdentifier::new(
682 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 1
684 )
685 );
686
687 // Restart language servers
688 project.update(cx, |project, cx| {
689 project.restart_language_servers_for_buffers(
690 vec![rust_buffer.clone(), json_buffer.clone()],
691 cx,
692 );
693 });
694
695 let mut rust_shutdown_requests = fake_rust_server
696 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
697 let mut json_shutdown_requests = fake_json_server
698 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
699 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
700
701 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
702 let mut fake_json_server = fake_json_servers.next().await.unwrap();
703
704 // Ensure rust document is reopened in new rust language server
705 assert_eq!(
706 fake_rust_server
707 .receive_notification::<lsp::notification::DidOpenTextDocument>()
708 .await
709 .text_document,
710 lsp::TextDocumentItem {
711 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
712 version: 0,
713 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
714 language_id: "rust".to_string(),
715 }
716 );
717
718 // Ensure json documents are reopened in new json language server
719 assert_set_eq!(
720 [
721 fake_json_server
722 .receive_notification::<lsp::notification::DidOpenTextDocument>()
723 .await
724 .text_document,
725 fake_json_server
726 .receive_notification::<lsp::notification::DidOpenTextDocument>()
727 .await
728 .text_document,
729 ],
730 [
731 lsp::TextDocumentItem {
732 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
733 version: 0,
734 text: json_buffer.update(cx, |buffer, _| buffer.text()),
735 language_id: "json".to_string(),
736 },
737 lsp::TextDocumentItem {
738 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
739 version: 0,
740 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
741 language_id: "json".to_string(),
742 }
743 ]
744 );
745
746 // Close notifications are reported only to servers matching the buffer's language.
747 cx.update(|_| drop(json_buffer));
748 let close_message = lsp::DidCloseTextDocumentParams {
749 text_document: lsp::TextDocumentIdentifier::new(
750 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
751 ),
752 };
753 assert_eq!(
754 fake_json_server
755 .receive_notification::<lsp::notification::DidCloseTextDocument>()
756 .await,
757 close_message,
758 );
759}
760
761#[gpui::test]
762async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
763 init_test(cx);
764
765 let fs = FakeFs::new(cx.executor());
766 fs.insert_tree(
767 "/the-root",
768 json!({
769 ".gitignore": "target\n",
770 "src": {
771 "a.rs": "",
772 "b.rs": "",
773 },
774 "target": {
775 "x": {
776 "out": {
777 "x.rs": ""
778 }
779 },
780 "y": {
781 "out": {
782 "y.rs": "",
783 }
784 },
785 "z": {
786 "out": {
787 "z.rs": ""
788 }
789 }
790 }
791 }),
792 )
793 .await;
794
795 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
796 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
797 language_registry.add(rust_lang());
798 let mut fake_servers = language_registry.register_fake_lsp(
799 "Rust",
800 FakeLspAdapter {
801 name: "the-language-server",
802 ..Default::default()
803 },
804 );
805
806 cx.executor().run_until_parked();
807
808 // Start the language server by opening a buffer with a compatible file extension.
809 let _buffer = project
810 .update(cx, |project, cx| {
811 project.open_local_buffer("/the-root/src/a.rs", cx)
812 })
813 .await
814 .unwrap();
815
816 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
817 project.update(cx, |project, cx| {
818 let worktree = project.worktrees(cx).next().unwrap();
819 assert_eq!(
820 worktree
821 .read(cx)
822 .snapshot()
823 .entries(true, 0)
824 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
825 .collect::<Vec<_>>(),
826 &[
827 (Path::new(""), false),
828 (Path::new(".gitignore"), false),
829 (Path::new("src"), false),
830 (Path::new("src/a.rs"), false),
831 (Path::new("src/b.rs"), false),
832 (Path::new("target"), true),
833 ]
834 );
835 });
836
837 let prev_read_dir_count = fs.read_dir_call_count();
838
839 // Keep track of the FS events reported to the language server.
840 let fake_server = fake_servers.next().await.unwrap();
841 let file_changes = Arc::new(Mutex::new(Vec::new()));
842 fake_server
843 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
844 registrations: vec![lsp::Registration {
845 id: Default::default(),
846 method: "workspace/didChangeWatchedFiles".to_string(),
847 register_options: serde_json::to_value(
848 lsp::DidChangeWatchedFilesRegistrationOptions {
849 watchers: vec![
850 lsp::FileSystemWatcher {
851 glob_pattern: lsp::GlobPattern::String(
852 "/the-root/Cargo.toml".to_string(),
853 ),
854 kind: None,
855 },
856 lsp::FileSystemWatcher {
857 glob_pattern: lsp::GlobPattern::String(
858 "/the-root/src/*.{rs,c}".to_string(),
859 ),
860 kind: None,
861 },
862 lsp::FileSystemWatcher {
863 glob_pattern: lsp::GlobPattern::String(
864 "/the-root/target/y/**/*.rs".to_string(),
865 ),
866 kind: None,
867 },
868 ],
869 },
870 )
871 .ok(),
872 }],
873 })
874 .await
875 .unwrap();
876 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
877 let file_changes = file_changes.clone();
878 move |params, _| {
879 let mut file_changes = file_changes.lock();
880 file_changes.extend(params.changes);
881 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
882 }
883 });
884
885 cx.executor().run_until_parked();
886 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
887 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
888
889 // Now the language server has asked us to watch an ignored directory path,
890 // so we recursively load it.
891 project.update(cx, |project, cx| {
892 let worktree = project.worktrees(cx).next().unwrap();
893 assert_eq!(
894 worktree
895 .read(cx)
896 .snapshot()
897 .entries(true, 0)
898 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
899 .collect::<Vec<_>>(),
900 &[
901 (Path::new(""), false),
902 (Path::new(".gitignore"), false),
903 (Path::new("src"), false),
904 (Path::new("src/a.rs"), false),
905 (Path::new("src/b.rs"), false),
906 (Path::new("target"), true),
907 (Path::new("target/x"), true),
908 (Path::new("target/y"), true),
909 (Path::new("target/y/out"), true),
910 (Path::new("target/y/out/y.rs"), true),
911 (Path::new("target/z"), true),
912 ]
913 );
914 });
915
916 // Perform some file system mutations, two of which match the watched patterns,
917 // and one of which does not.
918 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
919 .await
920 .unwrap();
921 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
922 .await
923 .unwrap();
924 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
925 .await
926 .unwrap();
927 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
928 .await
929 .unwrap();
930 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
931 .await
932 .unwrap();
933
934 // The language server receives events for the FS mutations that match its watch patterns.
935 cx.executor().run_until_parked();
936 assert_eq!(
937 &*file_changes.lock(),
938 &[
939 lsp::FileEvent {
940 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
941 typ: lsp::FileChangeType::DELETED,
942 },
943 lsp::FileEvent {
944 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
945 typ: lsp::FileChangeType::CREATED,
946 },
947 lsp::FileEvent {
948 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
949 typ: lsp::FileChangeType::CREATED,
950 },
951 ]
952 );
953}
954
955#[gpui::test]
956async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
957 init_test(cx);
958
959 let fs = FakeFs::new(cx.executor());
960 fs.insert_tree(
961 "/dir",
962 json!({
963 "a.rs": "let a = 1;",
964 "b.rs": "let b = 2;"
965 }),
966 )
967 .await;
968
969 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
970
971 let buffer_a = project
972 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
973 .await
974 .unwrap();
975 let buffer_b = project
976 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
977 .await
978 .unwrap();
979
980 project.update(cx, |project, cx| {
981 project
982 .update_diagnostics(
983 LanguageServerId(0),
984 lsp::PublishDiagnosticsParams {
985 uri: Url::from_file_path("/dir/a.rs").unwrap(),
986 version: None,
987 diagnostics: vec![lsp::Diagnostic {
988 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
989 severity: Some(lsp::DiagnosticSeverity::ERROR),
990 message: "error 1".to_string(),
991 ..Default::default()
992 }],
993 },
994 &[],
995 cx,
996 )
997 .unwrap();
998 project
999 .update_diagnostics(
1000 LanguageServerId(0),
1001 lsp::PublishDiagnosticsParams {
1002 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1003 version: None,
1004 diagnostics: vec![lsp::Diagnostic {
1005 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1006 severity: Some(DiagnosticSeverity::WARNING),
1007 message: "error 2".to_string(),
1008 ..Default::default()
1009 }],
1010 },
1011 &[],
1012 cx,
1013 )
1014 .unwrap();
1015 });
1016
1017 buffer_a.update(cx, |buffer, _| {
1018 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1019 assert_eq!(
1020 chunks
1021 .iter()
1022 .map(|(s, d)| (s.as_str(), *d))
1023 .collect::<Vec<_>>(),
1024 &[
1025 ("let ", None),
1026 ("a", Some(DiagnosticSeverity::ERROR)),
1027 (" = 1;", None),
1028 ]
1029 );
1030 });
1031 buffer_b.update(cx, |buffer, _| {
1032 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1033 assert_eq!(
1034 chunks
1035 .iter()
1036 .map(|(s, d)| (s.as_str(), *d))
1037 .collect::<Vec<_>>(),
1038 &[
1039 ("let ", None),
1040 ("b", Some(DiagnosticSeverity::WARNING)),
1041 (" = 2;", None),
1042 ]
1043 );
1044 });
1045}
1046
1047#[gpui::test]
1048async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1049 init_test(cx);
1050
1051 let fs = FakeFs::new(cx.executor());
1052 fs.insert_tree(
1053 "/root",
1054 json!({
1055 "dir": {
1056 ".git": {
1057 "HEAD": "ref: refs/heads/main",
1058 },
1059 ".gitignore": "b.rs",
1060 "a.rs": "let a = 1;",
1061 "b.rs": "let b = 2;",
1062 },
1063 "other.rs": "let b = c;"
1064 }),
1065 )
1066 .await;
1067
1068 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1069 let (worktree, _) = project
1070 .update(cx, |project, cx| {
1071 project.find_or_create_worktree("/root/dir", true, cx)
1072 })
1073 .await
1074 .unwrap();
1075 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1076
1077 let (worktree, _) = project
1078 .update(cx, |project, cx| {
1079 project.find_or_create_worktree("/root/other.rs", false, cx)
1080 })
1081 .await
1082 .unwrap();
1083 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1084
1085 let server_id = LanguageServerId(0);
1086 project.update(cx, |project, cx| {
1087 project
1088 .update_diagnostics(
1089 server_id,
1090 lsp::PublishDiagnosticsParams {
1091 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1092 version: None,
1093 diagnostics: vec![lsp::Diagnostic {
1094 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1095 severity: Some(lsp::DiagnosticSeverity::ERROR),
1096 message: "unused variable 'b'".to_string(),
1097 ..Default::default()
1098 }],
1099 },
1100 &[],
1101 cx,
1102 )
1103 .unwrap();
1104 project
1105 .update_diagnostics(
1106 server_id,
1107 lsp::PublishDiagnosticsParams {
1108 uri: Url::from_file_path("/root/other.rs").unwrap(),
1109 version: None,
1110 diagnostics: vec![lsp::Diagnostic {
1111 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1112 severity: Some(lsp::DiagnosticSeverity::ERROR),
1113 message: "unknown variable 'c'".to_string(),
1114 ..Default::default()
1115 }],
1116 },
1117 &[],
1118 cx,
1119 )
1120 .unwrap();
1121 });
1122
1123 let main_ignored_buffer = project
1124 .update(cx, |project, cx| {
1125 project.open_buffer((main_worktree_id, "b.rs"), cx)
1126 })
1127 .await
1128 .unwrap();
1129 main_ignored_buffer.update(cx, |buffer, _| {
1130 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1131 assert_eq!(
1132 chunks
1133 .iter()
1134 .map(|(s, d)| (s.as_str(), *d))
1135 .collect::<Vec<_>>(),
1136 &[
1137 ("let ", None),
1138 ("b", Some(DiagnosticSeverity::ERROR)),
1139 (" = 2;", None),
1140 ],
1141 "Gigitnored buffers should still get in-buffer diagnostics",
1142 );
1143 });
1144 let other_buffer = project
1145 .update(cx, |project, cx| {
1146 project.open_buffer((other_worktree_id, ""), cx)
1147 })
1148 .await
1149 .unwrap();
1150 other_buffer.update(cx, |buffer, _| {
1151 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1152 assert_eq!(
1153 chunks
1154 .iter()
1155 .map(|(s, d)| (s.as_str(), *d))
1156 .collect::<Vec<_>>(),
1157 &[
1158 ("let b = ", None),
1159 ("c", Some(DiagnosticSeverity::ERROR)),
1160 (";", None),
1161 ],
1162 "Buffers from hidden projects should still get in-buffer diagnostics"
1163 );
1164 });
1165
1166 project.update(cx, |project, cx| {
1167 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1168 assert_eq!(
1169 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1170 vec![(
1171 ProjectPath {
1172 worktree_id: main_worktree_id,
1173 path: Arc::from(Path::new("b.rs")),
1174 },
1175 server_id,
1176 DiagnosticSummary {
1177 error_count: 1,
1178 warning_count: 0,
1179 }
1180 )]
1181 );
1182 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1183 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1184 });
1185}
1186
1187#[gpui::test]
1188async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1189 init_test(cx);
1190
1191 let progress_token = "the-progress-token";
1192
1193 let fs = FakeFs::new(cx.executor());
1194 fs.insert_tree(
1195 "/dir",
1196 json!({
1197 "a.rs": "fn a() { A }",
1198 "b.rs": "const y: i32 = 1",
1199 }),
1200 )
1201 .await;
1202
1203 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1204 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1205
1206 language_registry.add(rust_lang());
1207 let mut fake_servers = language_registry.register_fake_lsp(
1208 "Rust",
1209 FakeLspAdapter {
1210 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1211 disk_based_diagnostics_sources: vec!["disk".into()],
1212 ..Default::default()
1213 },
1214 );
1215
1216 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1217
1218 // Cause worktree to start the fake language server
1219 let _buffer = project
1220 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1221 .await
1222 .unwrap();
1223
1224 let mut events = cx.events(&project);
1225
1226 let fake_server = fake_servers.next().await.unwrap();
1227 assert_eq!(
1228 events.next().await.unwrap(),
1229 Event::LanguageServerAdded(
1230 LanguageServerId(0),
1231 fake_server.server.name().into(),
1232 Some(worktree_id)
1233 ),
1234 );
1235
1236 fake_server
1237 .start_progress(format!("{}/0", progress_token))
1238 .await;
1239 assert_eq!(
1240 events.next().await.unwrap(),
1241 Event::DiskBasedDiagnosticsStarted {
1242 language_server_id: LanguageServerId(0),
1243 }
1244 );
1245
1246 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1247 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1248 version: None,
1249 diagnostics: vec![lsp::Diagnostic {
1250 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1251 severity: Some(lsp::DiagnosticSeverity::ERROR),
1252 message: "undefined variable 'A'".to_string(),
1253 ..Default::default()
1254 }],
1255 });
1256 assert_eq!(
1257 events.next().await.unwrap(),
1258 Event::DiagnosticsUpdated {
1259 language_server_id: LanguageServerId(0),
1260 path: (worktree_id, Path::new("a.rs")).into()
1261 }
1262 );
1263
1264 fake_server.end_progress(format!("{}/0", progress_token));
1265 assert_eq!(
1266 events.next().await.unwrap(),
1267 Event::DiskBasedDiagnosticsFinished {
1268 language_server_id: LanguageServerId(0)
1269 }
1270 );
1271
1272 let buffer = project
1273 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1274 .await
1275 .unwrap();
1276
1277 buffer.update(cx, |buffer, _| {
1278 let snapshot = buffer.snapshot();
1279 let diagnostics = snapshot
1280 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1281 .collect::<Vec<_>>();
1282 assert_eq!(
1283 diagnostics,
1284 &[DiagnosticEntry {
1285 range: Point::new(0, 9)..Point::new(0, 10),
1286 diagnostic: Diagnostic {
1287 severity: lsp::DiagnosticSeverity::ERROR,
1288 message: "undefined variable 'A'".to_string(),
1289 group_id: 0,
1290 is_primary: true,
1291 ..Default::default()
1292 }
1293 }]
1294 )
1295 });
1296
1297 // Ensure publishing empty diagnostics twice only results in one update event.
1298 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1299 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1300 version: None,
1301 diagnostics: Default::default(),
1302 });
1303 assert_eq!(
1304 events.next().await.unwrap(),
1305 Event::DiagnosticsUpdated {
1306 language_server_id: LanguageServerId(0),
1307 path: (worktree_id, Path::new("a.rs")).into()
1308 }
1309 );
1310
1311 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1312 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1313 version: None,
1314 diagnostics: Default::default(),
1315 });
1316 cx.executor().run_until_parked();
1317 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1318}
1319
1320#[gpui::test]
1321async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1322 init_test(cx);
1323
1324 let progress_token = "the-progress-token";
1325
1326 let fs = FakeFs::new(cx.executor());
1327 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1328
1329 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1330
1331 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1332 language_registry.add(rust_lang());
1333 let mut fake_servers = language_registry.register_fake_lsp(
1334 "Rust",
1335 FakeLspAdapter {
1336 name: "the-language-server",
1337 disk_based_diagnostics_sources: vec!["disk".into()],
1338 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1339 ..Default::default()
1340 },
1341 );
1342
1343 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1344
1345 let buffer = project
1346 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1347 .await
1348 .unwrap();
1349
1350 // Simulate diagnostics starting to update.
1351 let fake_server = fake_servers.next().await.unwrap();
1352 fake_server.start_progress(progress_token).await;
1353
1354 // Restart the server before the diagnostics finish updating.
1355 project.update(cx, |project, cx| {
1356 project.restart_language_servers_for_buffers([buffer], cx);
1357 });
1358 let mut events = cx.events(&project);
1359
1360 // Simulate the newly started server sending more diagnostics.
1361 let fake_server = fake_servers.next().await.unwrap();
1362 assert_eq!(
1363 events.next().await.unwrap(),
1364 Event::LanguageServerAdded(
1365 LanguageServerId(1),
1366 fake_server.server.name().into(),
1367 Some(worktree_id)
1368 )
1369 );
1370 fake_server.start_progress(progress_token).await;
1371 assert_eq!(
1372 events.next().await.unwrap(),
1373 Event::DiskBasedDiagnosticsStarted {
1374 language_server_id: LanguageServerId(1)
1375 }
1376 );
1377 project.update(cx, |project, cx| {
1378 assert_eq!(
1379 project
1380 .language_servers_running_disk_based_diagnostics(cx)
1381 .collect::<Vec<_>>(),
1382 [LanguageServerId(1)]
1383 );
1384 });
1385
1386 // All diagnostics are considered done, despite the old server's diagnostic
1387 // task never completing.
1388 fake_server.end_progress(progress_token);
1389 assert_eq!(
1390 events.next().await.unwrap(),
1391 Event::DiskBasedDiagnosticsFinished {
1392 language_server_id: LanguageServerId(1)
1393 }
1394 );
1395 project.update(cx, |project, cx| {
1396 assert_eq!(
1397 project
1398 .language_servers_running_disk_based_diagnostics(cx)
1399 .collect::<Vec<_>>(),
1400 [] as [language::LanguageServerId; 0]
1401 );
1402 });
1403}
1404
1405#[gpui::test]
1406async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1407 init_test(cx);
1408
1409 let fs = FakeFs::new(cx.executor());
1410 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1411
1412 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1413
1414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1415 language_registry.add(rust_lang());
1416 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1417
1418 let buffer = project
1419 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1420 .await
1421 .unwrap();
1422
1423 // Publish diagnostics
1424 let fake_server = fake_servers.next().await.unwrap();
1425 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1426 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1427 version: None,
1428 diagnostics: vec![lsp::Diagnostic {
1429 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1430 severity: Some(lsp::DiagnosticSeverity::ERROR),
1431 message: "the message".to_string(),
1432 ..Default::default()
1433 }],
1434 });
1435
1436 cx.executor().run_until_parked();
1437 buffer.update(cx, |buffer, _| {
1438 assert_eq!(
1439 buffer
1440 .snapshot()
1441 .diagnostics_in_range::<_, usize>(0..1, false)
1442 .map(|entry| entry.diagnostic.message.clone())
1443 .collect::<Vec<_>>(),
1444 ["the message".to_string()]
1445 );
1446 });
1447 project.update(cx, |project, cx| {
1448 assert_eq!(
1449 project.diagnostic_summary(false, cx),
1450 DiagnosticSummary {
1451 error_count: 1,
1452 warning_count: 0,
1453 }
1454 );
1455 });
1456
1457 project.update(cx, |project, cx| {
1458 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1459 });
1460
1461 // The diagnostics are cleared.
1462 cx.executor().run_until_parked();
1463 buffer.update(cx, |buffer, _| {
1464 assert_eq!(
1465 buffer
1466 .snapshot()
1467 .diagnostics_in_range::<_, usize>(0..1, false)
1468 .map(|entry| entry.diagnostic.message.clone())
1469 .collect::<Vec<_>>(),
1470 Vec::<String>::new(),
1471 );
1472 });
1473 project.update(cx, |project, cx| {
1474 assert_eq!(
1475 project.diagnostic_summary(false, cx),
1476 DiagnosticSummary {
1477 error_count: 0,
1478 warning_count: 0,
1479 }
1480 );
1481 });
1482}
1483
1484#[gpui::test]
1485async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1486 init_test(cx);
1487
1488 let fs = FakeFs::new(cx.executor());
1489 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1490
1491 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1492 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1493
1494 language_registry.add(rust_lang());
1495 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1496
1497 let buffer = project
1498 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1499 .await
1500 .unwrap();
1501
1502 // Before restarting the server, report diagnostics with an unknown buffer version.
1503 let fake_server = fake_servers.next().await.unwrap();
1504 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1505 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1506 version: Some(10000),
1507 diagnostics: Vec::new(),
1508 });
1509 cx.executor().run_until_parked();
1510
1511 project.update(cx, |project, cx| {
1512 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1513 });
1514 let mut fake_server = fake_servers.next().await.unwrap();
1515 let notification = fake_server
1516 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1517 .await
1518 .text_document;
1519 assert_eq!(notification.version, 0);
1520}
1521
1522#[gpui::test]
1523async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1524 init_test(cx);
1525
1526 let progress_token = "the-progress-token";
1527
1528 let fs = FakeFs::new(cx.executor());
1529 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1530
1531 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1532
1533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1534 language_registry.add(rust_lang());
1535 let mut fake_servers = language_registry.register_fake_lsp(
1536 "Rust",
1537 FakeLspAdapter {
1538 name: "the-language-server",
1539 disk_based_diagnostics_sources: vec!["disk".into()],
1540 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1541 ..Default::default()
1542 },
1543 );
1544
1545 let buffer = project
1546 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1547 .await
1548 .unwrap();
1549
1550 // Simulate diagnostics starting to update.
1551 let mut fake_server = fake_servers.next().await.unwrap();
1552 fake_server
1553 .start_progress_with(
1554 "another-token",
1555 lsp::WorkDoneProgressBegin {
1556 cancellable: Some(false),
1557 ..Default::default()
1558 },
1559 )
1560 .await;
1561 fake_server
1562 .start_progress_with(
1563 progress_token,
1564 lsp::WorkDoneProgressBegin {
1565 cancellable: Some(true),
1566 ..Default::default()
1567 },
1568 )
1569 .await;
1570 cx.executor().run_until_parked();
1571
1572 project.update(cx, |project, cx| {
1573 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1574 });
1575
1576 let cancel_notification = fake_server
1577 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1578 .await;
1579 assert_eq!(
1580 cancel_notification.token,
1581 NumberOrString::String(progress_token.into())
1582 );
1583}
1584
1585#[gpui::test]
1586async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1587 init_test(cx);
1588
1589 let fs = FakeFs::new(cx.executor());
1590 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1591 .await;
1592
1593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1594 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1595
1596 let mut fake_rust_servers = language_registry.register_fake_lsp(
1597 "Rust",
1598 FakeLspAdapter {
1599 name: "rust-lsp",
1600 ..Default::default()
1601 },
1602 );
1603 let mut fake_js_servers = language_registry.register_fake_lsp(
1604 "JavaScript",
1605 FakeLspAdapter {
1606 name: "js-lsp",
1607 ..Default::default()
1608 },
1609 );
1610 language_registry.add(rust_lang());
1611 language_registry.add(js_lang());
1612
1613 let _rs_buffer = project
1614 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1615 .await
1616 .unwrap();
1617 let _js_buffer = project
1618 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1619 .await
1620 .unwrap();
1621
1622 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1623 assert_eq!(
1624 fake_rust_server_1
1625 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1626 .await
1627 .text_document
1628 .uri
1629 .as_str(),
1630 "file:///dir/a.rs"
1631 );
1632
1633 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1634 assert_eq!(
1635 fake_js_server
1636 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1637 .await
1638 .text_document
1639 .uri
1640 .as_str(),
1641 "file:///dir/b.js"
1642 );
1643
1644 // Disable Rust language server, ensuring only that server gets stopped.
1645 cx.update(|cx| {
1646 SettingsStore::update_global(cx, |settings, cx| {
1647 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1648 settings.languages.insert(
1649 "Rust".into(),
1650 LanguageSettingsContent {
1651 enable_language_server: Some(false),
1652 ..Default::default()
1653 },
1654 );
1655 });
1656 })
1657 });
1658 fake_rust_server_1
1659 .receive_notification::<lsp::notification::Exit>()
1660 .await;
1661
1662 // Enable Rust and disable JavaScript language servers, ensuring that the
1663 // former gets started again and that the latter stops.
1664 cx.update(|cx| {
1665 SettingsStore::update_global(cx, |settings, cx| {
1666 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1667 settings.languages.insert(
1668 LanguageName::new("Rust"),
1669 LanguageSettingsContent {
1670 enable_language_server: Some(true),
1671 ..Default::default()
1672 },
1673 );
1674 settings.languages.insert(
1675 LanguageName::new("JavaScript"),
1676 LanguageSettingsContent {
1677 enable_language_server: Some(false),
1678 ..Default::default()
1679 },
1680 );
1681 });
1682 })
1683 });
1684 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1685 assert_eq!(
1686 fake_rust_server_2
1687 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1688 .await
1689 .text_document
1690 .uri
1691 .as_str(),
1692 "file:///dir/a.rs"
1693 );
1694 fake_js_server
1695 .receive_notification::<lsp::notification::Exit>()
1696 .await;
1697}
1698
1699#[gpui::test(iterations = 3)]
1700async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1701 init_test(cx);
1702
1703 let text = "
1704 fn a() { A }
1705 fn b() { BB }
1706 fn c() { CCC }
1707 "
1708 .unindent();
1709
1710 let fs = FakeFs::new(cx.executor());
1711 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1712
1713 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1714 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1715
1716 language_registry.add(rust_lang());
1717 let mut fake_servers = language_registry.register_fake_lsp(
1718 "Rust",
1719 FakeLspAdapter {
1720 disk_based_diagnostics_sources: vec!["disk".into()],
1721 ..Default::default()
1722 },
1723 );
1724
1725 let buffer = project
1726 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1727 .await
1728 .unwrap();
1729
1730 let mut fake_server = fake_servers.next().await.unwrap();
1731 let open_notification = fake_server
1732 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1733 .await;
1734
1735 // Edit the buffer, moving the content down
1736 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1737 let change_notification_1 = fake_server
1738 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1739 .await;
1740 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1741
1742 // Report some diagnostics for the initial version of the buffer
1743 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1744 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1745 version: Some(open_notification.text_document.version),
1746 diagnostics: vec![
1747 lsp::Diagnostic {
1748 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1749 severity: Some(DiagnosticSeverity::ERROR),
1750 message: "undefined variable 'A'".to_string(),
1751 source: Some("disk".to_string()),
1752 ..Default::default()
1753 },
1754 lsp::Diagnostic {
1755 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1756 severity: Some(DiagnosticSeverity::ERROR),
1757 message: "undefined variable 'BB'".to_string(),
1758 source: Some("disk".to_string()),
1759 ..Default::default()
1760 },
1761 lsp::Diagnostic {
1762 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1763 severity: Some(DiagnosticSeverity::ERROR),
1764 source: Some("disk".to_string()),
1765 message: "undefined variable 'CCC'".to_string(),
1766 ..Default::default()
1767 },
1768 ],
1769 });
1770
1771 // The diagnostics have moved down since they were created.
1772 cx.executor().run_until_parked();
1773 buffer.update(cx, |buffer, _| {
1774 assert_eq!(
1775 buffer
1776 .snapshot()
1777 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1778 .collect::<Vec<_>>(),
1779 &[
1780 DiagnosticEntry {
1781 range: Point::new(3, 9)..Point::new(3, 11),
1782 diagnostic: Diagnostic {
1783 source: Some("disk".into()),
1784 severity: DiagnosticSeverity::ERROR,
1785 message: "undefined variable 'BB'".to_string(),
1786 is_disk_based: true,
1787 group_id: 1,
1788 is_primary: true,
1789 ..Default::default()
1790 },
1791 },
1792 DiagnosticEntry {
1793 range: Point::new(4, 9)..Point::new(4, 12),
1794 diagnostic: Diagnostic {
1795 source: Some("disk".into()),
1796 severity: DiagnosticSeverity::ERROR,
1797 message: "undefined variable 'CCC'".to_string(),
1798 is_disk_based: true,
1799 group_id: 2,
1800 is_primary: true,
1801 ..Default::default()
1802 }
1803 }
1804 ]
1805 );
1806 assert_eq!(
1807 chunks_with_diagnostics(buffer, 0..buffer.len()),
1808 [
1809 ("\n\nfn a() { ".to_string(), None),
1810 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1811 (" }\nfn b() { ".to_string(), None),
1812 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1813 (" }\nfn c() { ".to_string(), None),
1814 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1815 (" }\n".to_string(), None),
1816 ]
1817 );
1818 assert_eq!(
1819 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1820 [
1821 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1822 (" }\nfn c() { ".to_string(), None),
1823 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1824 ]
1825 );
1826 });
1827
1828 // Ensure overlapping diagnostics are highlighted correctly.
1829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1830 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1831 version: Some(open_notification.text_document.version),
1832 diagnostics: vec![
1833 lsp::Diagnostic {
1834 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1835 severity: Some(DiagnosticSeverity::ERROR),
1836 message: "undefined variable 'A'".to_string(),
1837 source: Some("disk".to_string()),
1838 ..Default::default()
1839 },
1840 lsp::Diagnostic {
1841 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1842 severity: Some(DiagnosticSeverity::WARNING),
1843 message: "unreachable statement".to_string(),
1844 source: Some("disk".to_string()),
1845 ..Default::default()
1846 },
1847 ],
1848 });
1849
1850 cx.executor().run_until_parked();
1851 buffer.update(cx, |buffer, _| {
1852 assert_eq!(
1853 buffer
1854 .snapshot()
1855 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1856 .collect::<Vec<_>>(),
1857 &[
1858 DiagnosticEntry {
1859 range: Point::new(2, 9)..Point::new(2, 12),
1860 diagnostic: Diagnostic {
1861 source: Some("disk".into()),
1862 severity: DiagnosticSeverity::WARNING,
1863 message: "unreachable statement".to_string(),
1864 is_disk_based: true,
1865 group_id: 4,
1866 is_primary: true,
1867 ..Default::default()
1868 }
1869 },
1870 DiagnosticEntry {
1871 range: Point::new(2, 9)..Point::new(2, 10),
1872 diagnostic: Diagnostic {
1873 source: Some("disk".into()),
1874 severity: DiagnosticSeverity::ERROR,
1875 message: "undefined variable 'A'".to_string(),
1876 is_disk_based: true,
1877 group_id: 3,
1878 is_primary: true,
1879 ..Default::default()
1880 },
1881 }
1882 ]
1883 );
1884 assert_eq!(
1885 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1886 [
1887 ("fn a() { ".to_string(), None),
1888 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1889 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1890 ("\n".to_string(), None),
1891 ]
1892 );
1893 assert_eq!(
1894 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1895 [
1896 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1897 ("\n".to_string(), None),
1898 ]
1899 );
1900 });
1901
1902 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1903 // changes since the last save.
1904 buffer.update(cx, |buffer, cx| {
1905 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1906 buffer.edit(
1907 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1908 None,
1909 cx,
1910 );
1911 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1912 });
1913 let change_notification_2 = fake_server
1914 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1915 .await;
1916 assert!(
1917 change_notification_2.text_document.version > change_notification_1.text_document.version
1918 );
1919
1920 // Handle out-of-order diagnostics
1921 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1922 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1923 version: Some(change_notification_2.text_document.version),
1924 diagnostics: vec![
1925 lsp::Diagnostic {
1926 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1927 severity: Some(DiagnosticSeverity::ERROR),
1928 message: "undefined variable 'BB'".to_string(),
1929 source: Some("disk".to_string()),
1930 ..Default::default()
1931 },
1932 lsp::Diagnostic {
1933 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1934 severity: Some(DiagnosticSeverity::WARNING),
1935 message: "undefined variable 'A'".to_string(),
1936 source: Some("disk".to_string()),
1937 ..Default::default()
1938 },
1939 ],
1940 });
1941
1942 cx.executor().run_until_parked();
1943 buffer.update(cx, |buffer, _| {
1944 assert_eq!(
1945 buffer
1946 .snapshot()
1947 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1948 .collect::<Vec<_>>(),
1949 &[
1950 DiagnosticEntry {
1951 range: Point::new(2, 21)..Point::new(2, 22),
1952 diagnostic: Diagnostic {
1953 source: Some("disk".into()),
1954 severity: DiagnosticSeverity::WARNING,
1955 message: "undefined variable 'A'".to_string(),
1956 is_disk_based: true,
1957 group_id: 6,
1958 is_primary: true,
1959 ..Default::default()
1960 }
1961 },
1962 DiagnosticEntry {
1963 range: Point::new(3, 9)..Point::new(3, 14),
1964 diagnostic: Diagnostic {
1965 source: Some("disk".into()),
1966 severity: DiagnosticSeverity::ERROR,
1967 message: "undefined variable 'BB'".to_string(),
1968 is_disk_based: true,
1969 group_id: 5,
1970 is_primary: true,
1971 ..Default::default()
1972 },
1973 }
1974 ]
1975 );
1976 });
1977}
1978
1979#[gpui::test]
1980async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1981 init_test(cx);
1982
1983 let text = concat!(
1984 "let one = ;\n", //
1985 "let two = \n",
1986 "let three = 3;\n",
1987 );
1988
1989 let fs = FakeFs::new(cx.executor());
1990 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1991
1992 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1993 let buffer = project
1994 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1995 .await
1996 .unwrap();
1997
1998 project.update(cx, |project, cx| {
1999 project.lsp_store.update(cx, |lsp_store, cx| {
2000 lsp_store
2001 .update_buffer_diagnostics(
2002 &buffer,
2003 LanguageServerId(0),
2004 None,
2005 vec![
2006 DiagnosticEntry {
2007 range: Unclipped(PointUtf16::new(0, 10))
2008 ..Unclipped(PointUtf16::new(0, 10)),
2009 diagnostic: Diagnostic {
2010 severity: DiagnosticSeverity::ERROR,
2011 message: "syntax error 1".to_string(),
2012 ..Default::default()
2013 },
2014 },
2015 DiagnosticEntry {
2016 range: Unclipped(PointUtf16::new(1, 10))
2017 ..Unclipped(PointUtf16::new(1, 10)),
2018 diagnostic: Diagnostic {
2019 severity: DiagnosticSeverity::ERROR,
2020 message: "syntax error 2".to_string(),
2021 ..Default::default()
2022 },
2023 },
2024 ],
2025 cx,
2026 )
2027 .unwrap();
2028 })
2029 });
2030
2031 // An empty range is extended forward to include the following character.
2032 // At the end of a line, an empty range is extended backward to include
2033 // the preceding character.
2034 buffer.update(cx, |buffer, _| {
2035 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2036 assert_eq!(
2037 chunks
2038 .iter()
2039 .map(|(s, d)| (s.as_str(), *d))
2040 .collect::<Vec<_>>(),
2041 &[
2042 ("let one = ", None),
2043 (";", Some(DiagnosticSeverity::ERROR)),
2044 ("\nlet two =", None),
2045 (" ", Some(DiagnosticSeverity::ERROR)),
2046 ("\nlet three = 3;\n", None)
2047 ]
2048 );
2049 });
2050}
2051
2052#[gpui::test]
2053async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2054 init_test(cx);
2055
2056 let fs = FakeFs::new(cx.executor());
2057 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2058 .await;
2059
2060 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2061
2062 project.update(cx, |project, cx| {
2063 project
2064 .update_diagnostic_entries(
2065 LanguageServerId(0),
2066 Path::new("/dir/a.rs").to_owned(),
2067 None,
2068 vec![DiagnosticEntry {
2069 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2070 diagnostic: Diagnostic {
2071 severity: DiagnosticSeverity::ERROR,
2072 is_primary: true,
2073 message: "syntax error a1".to_string(),
2074 ..Default::default()
2075 },
2076 }],
2077 cx,
2078 )
2079 .unwrap();
2080 project
2081 .update_diagnostic_entries(
2082 LanguageServerId(1),
2083 Path::new("/dir/a.rs").to_owned(),
2084 None,
2085 vec![DiagnosticEntry {
2086 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2087 diagnostic: Diagnostic {
2088 severity: DiagnosticSeverity::ERROR,
2089 is_primary: true,
2090 message: "syntax error b1".to_string(),
2091 ..Default::default()
2092 },
2093 }],
2094 cx,
2095 )
2096 .unwrap();
2097
2098 assert_eq!(
2099 project.diagnostic_summary(false, cx),
2100 DiagnosticSummary {
2101 error_count: 2,
2102 warning_count: 0,
2103 }
2104 );
2105 });
2106}
2107
2108#[gpui::test]
2109async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2110 init_test(cx);
2111
2112 let text = "
2113 fn a() {
2114 f1();
2115 }
2116 fn b() {
2117 f2();
2118 }
2119 fn c() {
2120 f3();
2121 }
2122 "
2123 .unindent();
2124
2125 let fs = FakeFs::new(cx.executor());
2126 fs.insert_tree(
2127 "/dir",
2128 json!({
2129 "a.rs": text.clone(),
2130 }),
2131 )
2132 .await;
2133
2134 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2135 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2136
2137 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2138 language_registry.add(rust_lang());
2139 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2140
2141 let buffer = project
2142 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2143 .await
2144 .unwrap();
2145
2146 let mut fake_server = fake_servers.next().await.unwrap();
2147 let lsp_document_version = fake_server
2148 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2149 .await
2150 .text_document
2151 .version;
2152
2153 // Simulate editing the buffer after the language server computes some edits.
2154 buffer.update(cx, |buffer, cx| {
2155 buffer.edit(
2156 [(
2157 Point::new(0, 0)..Point::new(0, 0),
2158 "// above first function\n",
2159 )],
2160 None,
2161 cx,
2162 );
2163 buffer.edit(
2164 [(
2165 Point::new(2, 0)..Point::new(2, 0),
2166 " // inside first function\n",
2167 )],
2168 None,
2169 cx,
2170 );
2171 buffer.edit(
2172 [(
2173 Point::new(6, 4)..Point::new(6, 4),
2174 "// inside second function ",
2175 )],
2176 None,
2177 cx,
2178 );
2179
2180 assert_eq!(
2181 buffer.text(),
2182 "
2183 // above first function
2184 fn a() {
2185 // inside first function
2186 f1();
2187 }
2188 fn b() {
2189 // inside second function f2();
2190 }
2191 fn c() {
2192 f3();
2193 }
2194 "
2195 .unindent()
2196 );
2197 });
2198
2199 let edits = lsp_store
2200 .update(cx, |lsp_store, cx| {
2201 lsp_store.edits_from_lsp(
2202 &buffer,
2203 vec![
2204 // replace body of first function
2205 lsp::TextEdit {
2206 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2207 new_text: "
2208 fn a() {
2209 f10();
2210 }
2211 "
2212 .unindent(),
2213 },
2214 // edit inside second function
2215 lsp::TextEdit {
2216 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2217 new_text: "00".into(),
2218 },
2219 // edit inside third function via two distinct edits
2220 lsp::TextEdit {
2221 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2222 new_text: "4000".into(),
2223 },
2224 lsp::TextEdit {
2225 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2226 new_text: "".into(),
2227 },
2228 ],
2229 LanguageServerId(0),
2230 Some(lsp_document_version),
2231 cx,
2232 )
2233 })
2234 .await
2235 .unwrap();
2236
2237 buffer.update(cx, |buffer, cx| {
2238 for (range, new_text) in edits {
2239 buffer.edit([(range, new_text)], None, cx);
2240 }
2241 assert_eq!(
2242 buffer.text(),
2243 "
2244 // above first function
2245 fn a() {
2246 // inside first function
2247 f10();
2248 }
2249 fn b() {
2250 // inside second function f200();
2251 }
2252 fn c() {
2253 f4000();
2254 }
2255 "
2256 .unindent()
2257 );
2258 });
2259}
2260
2261#[gpui::test]
2262async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2263 init_test(cx);
2264
2265 let text = "
2266 use a::b;
2267 use a::c;
2268
2269 fn f() {
2270 b();
2271 c();
2272 }
2273 "
2274 .unindent();
2275
2276 let fs = FakeFs::new(cx.executor());
2277 fs.insert_tree(
2278 "/dir",
2279 json!({
2280 "a.rs": text.clone(),
2281 }),
2282 )
2283 .await;
2284
2285 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2286 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2287 let buffer = project
2288 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2289 .await
2290 .unwrap();
2291
2292 // Simulate the language server sending us a small edit in the form of a very large diff.
2293 // Rust-analyzer does this when performing a merge-imports code action.
2294 let edits = lsp_store
2295 .update(cx, |lsp_store, cx| {
2296 lsp_store.edits_from_lsp(
2297 &buffer,
2298 [
2299 // Replace the first use statement without editing the semicolon.
2300 lsp::TextEdit {
2301 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2302 new_text: "a::{b, c}".into(),
2303 },
2304 // Reinsert the remainder of the file between the semicolon and the final
2305 // newline of the file.
2306 lsp::TextEdit {
2307 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2308 new_text: "\n\n".into(),
2309 },
2310 lsp::TextEdit {
2311 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2312 new_text: "
2313 fn f() {
2314 b();
2315 c();
2316 }"
2317 .unindent(),
2318 },
2319 // Delete everything after the first newline of the file.
2320 lsp::TextEdit {
2321 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2322 new_text: "".into(),
2323 },
2324 ],
2325 LanguageServerId(0),
2326 None,
2327 cx,
2328 )
2329 })
2330 .await
2331 .unwrap();
2332
2333 buffer.update(cx, |buffer, cx| {
2334 let edits = edits
2335 .into_iter()
2336 .map(|(range, text)| {
2337 (
2338 range.start.to_point(buffer)..range.end.to_point(buffer),
2339 text,
2340 )
2341 })
2342 .collect::<Vec<_>>();
2343
2344 assert_eq!(
2345 edits,
2346 [
2347 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2348 (Point::new(1, 0)..Point::new(2, 0), "".into())
2349 ]
2350 );
2351
2352 for (range, new_text) in edits {
2353 buffer.edit([(range, new_text)], None, cx);
2354 }
2355 assert_eq!(
2356 buffer.text(),
2357 "
2358 use a::{b, c};
2359
2360 fn f() {
2361 b();
2362 c();
2363 }
2364 "
2365 .unindent()
2366 );
2367 });
2368}
2369
2370#[gpui::test]
2371async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2372 init_test(cx);
2373
2374 let text = "
2375 use a::b;
2376 use a::c;
2377
2378 fn f() {
2379 b();
2380 c();
2381 }
2382 "
2383 .unindent();
2384
2385 let fs = FakeFs::new(cx.executor());
2386 fs.insert_tree(
2387 "/dir",
2388 json!({
2389 "a.rs": text.clone(),
2390 }),
2391 )
2392 .await;
2393
2394 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2395 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2396 let buffer = project
2397 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2398 .await
2399 .unwrap();
2400
2401 // Simulate the language server sending us edits in a non-ordered fashion,
2402 // with ranges sometimes being inverted or pointing to invalid locations.
2403 let edits = lsp_store
2404 .update(cx, |lsp_store, cx| {
2405 lsp_store.edits_from_lsp(
2406 &buffer,
2407 [
2408 lsp::TextEdit {
2409 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2410 new_text: "\n\n".into(),
2411 },
2412 lsp::TextEdit {
2413 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2414 new_text: "a::{b, c}".into(),
2415 },
2416 lsp::TextEdit {
2417 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2418 new_text: "".into(),
2419 },
2420 lsp::TextEdit {
2421 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2422 new_text: "
2423 fn f() {
2424 b();
2425 c();
2426 }"
2427 .unindent(),
2428 },
2429 ],
2430 LanguageServerId(0),
2431 None,
2432 cx,
2433 )
2434 })
2435 .await
2436 .unwrap();
2437
2438 buffer.update(cx, |buffer, cx| {
2439 let edits = edits
2440 .into_iter()
2441 .map(|(range, text)| {
2442 (
2443 range.start.to_point(buffer)..range.end.to_point(buffer),
2444 text,
2445 )
2446 })
2447 .collect::<Vec<_>>();
2448
2449 assert_eq!(
2450 edits,
2451 [
2452 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2453 (Point::new(1, 0)..Point::new(2, 0), "".into())
2454 ]
2455 );
2456
2457 for (range, new_text) in edits {
2458 buffer.edit([(range, new_text)], None, cx);
2459 }
2460 assert_eq!(
2461 buffer.text(),
2462 "
2463 use a::{b, c};
2464
2465 fn f() {
2466 b();
2467 c();
2468 }
2469 "
2470 .unindent()
2471 );
2472 });
2473}
2474
2475fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2476 buffer: &Buffer,
2477 range: Range<T>,
2478) -> Vec<(String, Option<DiagnosticSeverity>)> {
2479 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2480 for chunk in buffer.snapshot().chunks(range, true) {
2481 if chunks.last().map_or(false, |prev_chunk| {
2482 prev_chunk.1 == chunk.diagnostic_severity
2483 }) {
2484 chunks.last_mut().unwrap().0.push_str(chunk.text);
2485 } else {
2486 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2487 }
2488 }
2489 chunks
2490}
2491
2492#[gpui::test(iterations = 10)]
2493async fn test_definition(cx: &mut gpui::TestAppContext) {
2494 init_test(cx);
2495
2496 let fs = FakeFs::new(cx.executor());
2497 fs.insert_tree(
2498 "/dir",
2499 json!({
2500 "a.rs": "const fn a() { A }",
2501 "b.rs": "const y: i32 = crate::a()",
2502 }),
2503 )
2504 .await;
2505
2506 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2507
2508 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2509 language_registry.add(rust_lang());
2510 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2511
2512 let buffer = project
2513 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2514 .await
2515 .unwrap();
2516
2517 let fake_server = fake_servers.next().await.unwrap();
2518 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2519 let params = params.text_document_position_params;
2520 assert_eq!(
2521 params.text_document.uri.to_file_path().unwrap(),
2522 Path::new("/dir/b.rs"),
2523 );
2524 assert_eq!(params.position, lsp::Position::new(0, 22));
2525
2526 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2527 lsp::Location::new(
2528 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2529 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2530 ),
2531 )))
2532 });
2533
2534 let mut definitions = project
2535 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2536 .await
2537 .unwrap();
2538
2539 // Assert no new language server started
2540 cx.executor().run_until_parked();
2541 assert!(fake_servers.try_next().is_err());
2542
2543 assert_eq!(definitions.len(), 1);
2544 let definition = definitions.pop().unwrap();
2545 cx.update(|cx| {
2546 let target_buffer = definition.target.buffer.read(cx);
2547 assert_eq!(
2548 target_buffer
2549 .file()
2550 .unwrap()
2551 .as_local()
2552 .unwrap()
2553 .abs_path(cx),
2554 Path::new("/dir/a.rs"),
2555 );
2556 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2557 assert_eq!(
2558 list_worktrees(&project, cx),
2559 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2560 );
2561
2562 drop(definition);
2563 });
2564 cx.update(|cx| {
2565 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2566 });
2567
2568 fn list_worktrees<'a>(
2569 project: &'a Model<Project>,
2570 cx: &'a AppContext,
2571 ) -> Vec<(&'a Path, bool)> {
2572 project
2573 .read(cx)
2574 .worktrees(cx)
2575 .map(|worktree| {
2576 let worktree = worktree.read(cx);
2577 (
2578 worktree.as_local().unwrap().abs_path().as_ref(),
2579 worktree.is_visible(),
2580 )
2581 })
2582 .collect::<Vec<_>>()
2583 }
2584}
2585
2586#[gpui::test]
2587async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2588 init_test(cx);
2589
2590 let fs = FakeFs::new(cx.executor());
2591 fs.insert_tree(
2592 "/dir",
2593 json!({
2594 "a.ts": "",
2595 }),
2596 )
2597 .await;
2598
2599 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2600
2601 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2602 language_registry.add(typescript_lang());
2603 let mut fake_language_servers = language_registry.register_fake_lsp(
2604 "TypeScript",
2605 FakeLspAdapter {
2606 capabilities: lsp::ServerCapabilities {
2607 completion_provider: Some(lsp::CompletionOptions {
2608 trigger_characters: Some(vec![":".to_string()]),
2609 ..Default::default()
2610 }),
2611 ..Default::default()
2612 },
2613 ..Default::default()
2614 },
2615 );
2616
2617 let buffer = project
2618 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2619 .await
2620 .unwrap();
2621
2622 let fake_server = fake_language_servers.next().await.unwrap();
2623
2624 let text = "let a = b.fqn";
2625 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2626 let completions = project.update(cx, |project, cx| {
2627 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2628 });
2629
2630 fake_server
2631 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2632 Ok(Some(lsp::CompletionResponse::Array(vec![
2633 lsp::CompletionItem {
2634 label: "fullyQualifiedName?".into(),
2635 insert_text: Some("fullyQualifiedName".into()),
2636 ..Default::default()
2637 },
2638 ])))
2639 })
2640 .next()
2641 .await;
2642 let completions = completions.await.unwrap();
2643 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2644 assert_eq!(completions.len(), 1);
2645 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2646 assert_eq!(
2647 completions[0].old_range.to_offset(&snapshot),
2648 text.len() - 3..text.len()
2649 );
2650
2651 let text = "let a = \"atoms/cmp\"";
2652 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2653 let completions = project.update(cx, |project, cx| {
2654 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2655 });
2656
2657 fake_server
2658 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2659 Ok(Some(lsp::CompletionResponse::Array(vec![
2660 lsp::CompletionItem {
2661 label: "component".into(),
2662 ..Default::default()
2663 },
2664 ])))
2665 })
2666 .next()
2667 .await;
2668 let completions = completions.await.unwrap();
2669 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2670 assert_eq!(completions.len(), 1);
2671 assert_eq!(completions[0].new_text, "component");
2672 assert_eq!(
2673 completions[0].old_range.to_offset(&snapshot),
2674 text.len() - 4..text.len() - 1
2675 );
2676}
2677
2678#[gpui::test]
2679async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2680 init_test(cx);
2681
2682 let fs = FakeFs::new(cx.executor());
2683 fs.insert_tree(
2684 "/dir",
2685 json!({
2686 "a.ts": "",
2687 }),
2688 )
2689 .await;
2690
2691 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2692
2693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2694 language_registry.add(typescript_lang());
2695 let mut fake_language_servers = language_registry.register_fake_lsp(
2696 "TypeScript",
2697 FakeLspAdapter {
2698 capabilities: lsp::ServerCapabilities {
2699 completion_provider: Some(lsp::CompletionOptions {
2700 trigger_characters: Some(vec![":".to_string()]),
2701 ..Default::default()
2702 }),
2703 ..Default::default()
2704 },
2705 ..Default::default()
2706 },
2707 );
2708
2709 let buffer = project
2710 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2711 .await
2712 .unwrap();
2713
2714 let fake_server = fake_language_servers.next().await.unwrap();
2715
2716 let text = "let a = b.fqn";
2717 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2718 let completions = project.update(cx, |project, cx| {
2719 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2720 });
2721
2722 fake_server
2723 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2724 Ok(Some(lsp::CompletionResponse::Array(vec![
2725 lsp::CompletionItem {
2726 label: "fullyQualifiedName?".into(),
2727 insert_text: Some("fully\rQualified\r\nName".into()),
2728 ..Default::default()
2729 },
2730 ])))
2731 })
2732 .next()
2733 .await;
2734 let completions = completions.await.unwrap();
2735 assert_eq!(completions.len(), 1);
2736 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2737}
2738
2739#[gpui::test(iterations = 10)]
2740async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2741 init_test(cx);
2742
2743 let fs = FakeFs::new(cx.executor());
2744 fs.insert_tree(
2745 "/dir",
2746 json!({
2747 "a.ts": "a",
2748 }),
2749 )
2750 .await;
2751
2752 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2753
2754 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2755 language_registry.add(typescript_lang());
2756 let mut fake_language_servers = language_registry.register_fake_lsp(
2757 "TypeScript",
2758 FakeLspAdapter {
2759 capabilities: lsp::ServerCapabilities {
2760 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2761 lsp::CodeActionOptions {
2762 resolve_provider: Some(true),
2763 ..lsp::CodeActionOptions::default()
2764 },
2765 )),
2766 ..lsp::ServerCapabilities::default()
2767 },
2768 ..FakeLspAdapter::default()
2769 },
2770 );
2771
2772 let buffer = project
2773 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2774 .await
2775 .unwrap();
2776
2777 let fake_server = fake_language_servers.next().await.unwrap();
2778
2779 // Language server returns code actions that contain commands, and not edits.
2780 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2781 fake_server
2782 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2783 Ok(Some(vec![
2784 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2785 title: "The code action".into(),
2786 data: Some(serde_json::json!({
2787 "command": "_the/command",
2788 })),
2789 ..lsp::CodeAction::default()
2790 }),
2791 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2792 title: "two".into(),
2793 ..lsp::CodeAction::default()
2794 }),
2795 ]))
2796 })
2797 .next()
2798 .await;
2799
2800 let action = actions.await.unwrap()[0].clone();
2801 let apply = project.update(cx, |project, cx| {
2802 project.apply_code_action(buffer.clone(), action, true, cx)
2803 });
2804
2805 // Resolving the code action does not populate its edits. In absence of
2806 // edits, we must execute the given command.
2807 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2808 |mut action, _| async move {
2809 if action.data.is_some() {
2810 action.command = Some(lsp::Command {
2811 title: "The command".into(),
2812 command: "_the/command".into(),
2813 arguments: Some(vec![json!("the-argument")]),
2814 });
2815 }
2816 Ok(action)
2817 },
2818 );
2819
2820 // While executing the command, the language server sends the editor
2821 // a `workspaceEdit` request.
2822 fake_server
2823 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2824 let fake = fake_server.clone();
2825 move |params, _| {
2826 assert_eq!(params.command, "_the/command");
2827 let fake = fake.clone();
2828 async move {
2829 fake.server
2830 .request::<lsp::request::ApplyWorkspaceEdit>(
2831 lsp::ApplyWorkspaceEditParams {
2832 label: None,
2833 edit: lsp::WorkspaceEdit {
2834 changes: Some(
2835 [(
2836 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2837 vec![lsp::TextEdit {
2838 range: lsp::Range::new(
2839 lsp::Position::new(0, 0),
2840 lsp::Position::new(0, 0),
2841 ),
2842 new_text: "X".into(),
2843 }],
2844 )]
2845 .into_iter()
2846 .collect(),
2847 ),
2848 ..Default::default()
2849 },
2850 },
2851 )
2852 .await
2853 .unwrap();
2854 Ok(Some(json!(null)))
2855 }
2856 }
2857 })
2858 .next()
2859 .await;
2860
2861 // Applying the code action returns a project transaction containing the edits
2862 // sent by the language server in its `workspaceEdit` request.
2863 let transaction = apply.await.unwrap();
2864 assert!(transaction.0.contains_key(&buffer));
2865 buffer.update(cx, |buffer, cx| {
2866 assert_eq!(buffer.text(), "Xa");
2867 buffer.undo(cx);
2868 assert_eq!(buffer.text(), "a");
2869 });
2870}
2871
2872#[gpui::test(iterations = 10)]
2873async fn test_save_file(cx: &mut gpui::TestAppContext) {
2874 init_test(cx);
2875
2876 let fs = FakeFs::new(cx.executor());
2877 fs.insert_tree(
2878 "/dir",
2879 json!({
2880 "file1": "the old contents",
2881 }),
2882 )
2883 .await;
2884
2885 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2886 let buffer = project
2887 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2888 .await
2889 .unwrap();
2890 buffer.update(cx, |buffer, cx| {
2891 assert_eq!(buffer.text(), "the old contents");
2892 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2893 });
2894
2895 project
2896 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2897 .await
2898 .unwrap();
2899
2900 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2901 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2902}
2903
2904#[gpui::test(iterations = 30)]
2905async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2906 init_test(cx);
2907
2908 let fs = FakeFs::new(cx.executor().clone());
2909 fs.insert_tree(
2910 "/dir",
2911 json!({
2912 "file1": "the original contents",
2913 }),
2914 )
2915 .await;
2916
2917 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2918 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2919 let buffer = project
2920 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2921 .await
2922 .unwrap();
2923
2924 // Simulate buffer diffs being slow, so that they don't complete before
2925 // the next file change occurs.
2926 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2927
2928 // Change the buffer's file on disk, and then wait for the file change
2929 // to be detected by the worktree, so that the buffer starts reloading.
2930 fs.save(
2931 "/dir/file1".as_ref(),
2932 &"the first contents".into(),
2933 Default::default(),
2934 )
2935 .await
2936 .unwrap();
2937 worktree.next_event(cx).await;
2938
2939 // Change the buffer's file again. Depending on the random seed, the
2940 // previous file change may still be in progress.
2941 fs.save(
2942 "/dir/file1".as_ref(),
2943 &"the second contents".into(),
2944 Default::default(),
2945 )
2946 .await
2947 .unwrap();
2948 worktree.next_event(cx).await;
2949
2950 cx.executor().run_until_parked();
2951 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2952 buffer.read_with(cx, |buffer, _| {
2953 assert_eq!(buffer.text(), on_disk_text);
2954 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2955 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2956 });
2957}
2958
2959#[gpui::test(iterations = 30)]
2960async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2961 init_test(cx);
2962
2963 let fs = FakeFs::new(cx.executor().clone());
2964 fs.insert_tree(
2965 "/dir",
2966 json!({
2967 "file1": "the original contents",
2968 }),
2969 )
2970 .await;
2971
2972 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2973 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2974 let buffer = project
2975 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2976 .await
2977 .unwrap();
2978
2979 // Simulate buffer diffs being slow, so that they don't complete before
2980 // the next file change occurs.
2981 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2982
2983 // Change the buffer's file on disk, and then wait for the file change
2984 // to be detected by the worktree, so that the buffer starts reloading.
2985 fs.save(
2986 "/dir/file1".as_ref(),
2987 &"the first contents".into(),
2988 Default::default(),
2989 )
2990 .await
2991 .unwrap();
2992 worktree.next_event(cx).await;
2993
2994 cx.executor()
2995 .spawn(cx.executor().simulate_random_delay())
2996 .await;
2997
2998 // Perform a noop edit, causing the buffer's version to increase.
2999 buffer.update(cx, |buffer, cx| {
3000 buffer.edit([(0..0, " ")], None, cx);
3001 buffer.undo(cx);
3002 });
3003
3004 cx.executor().run_until_parked();
3005 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3006 buffer.read_with(cx, |buffer, _| {
3007 let buffer_text = buffer.text();
3008 if buffer_text == on_disk_text {
3009 assert!(
3010 !buffer.is_dirty() && !buffer.has_conflict(),
3011 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3012 );
3013 }
3014 // If the file change occurred while the buffer was processing the first
3015 // change, the buffer will be in a conflicting state.
3016 else {
3017 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3018 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3019 }
3020 });
3021}
3022
3023#[gpui::test]
3024async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3025 init_test(cx);
3026
3027 let fs = FakeFs::new(cx.executor());
3028 fs.insert_tree(
3029 "/dir",
3030 json!({
3031 "file1": "the old contents",
3032 }),
3033 )
3034 .await;
3035
3036 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3037 let buffer = project
3038 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3039 .await
3040 .unwrap();
3041 buffer.update(cx, |buffer, cx| {
3042 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3043 });
3044
3045 project
3046 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3047 .await
3048 .unwrap();
3049
3050 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3051 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3052}
3053
3054#[gpui::test]
3055async fn test_save_as(cx: &mut gpui::TestAppContext) {
3056 init_test(cx);
3057
3058 let fs = FakeFs::new(cx.executor());
3059 fs.insert_tree("/dir", json!({})).await;
3060
3061 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3062
3063 let languages = project.update(cx, |project, _| project.languages().clone());
3064 languages.add(rust_lang());
3065
3066 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3067 buffer.update(cx, |buffer, cx| {
3068 buffer.edit([(0..0, "abc")], None, cx);
3069 assert!(buffer.is_dirty());
3070 assert!(!buffer.has_conflict());
3071 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3072 });
3073 project
3074 .update(cx, |project, cx| {
3075 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3076 let path = ProjectPath {
3077 worktree_id,
3078 path: Arc::from(Path::new("file1.rs")),
3079 };
3080 project.save_buffer_as(buffer.clone(), path, cx)
3081 })
3082 .await
3083 .unwrap();
3084 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3085
3086 cx.executor().run_until_parked();
3087 buffer.update(cx, |buffer, cx| {
3088 assert_eq!(
3089 buffer.file().unwrap().full_path(cx),
3090 Path::new("dir/file1.rs")
3091 );
3092 assert!(!buffer.is_dirty());
3093 assert!(!buffer.has_conflict());
3094 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3095 });
3096
3097 let opened_buffer = project
3098 .update(cx, |project, cx| {
3099 project.open_local_buffer("/dir/file1.rs", cx)
3100 })
3101 .await
3102 .unwrap();
3103 assert_eq!(opened_buffer, buffer);
3104}
3105
3106#[gpui::test(retries = 5)]
3107async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3108 use worktree::WorktreeModelHandle as _;
3109
3110 init_test(cx);
3111 cx.executor().allow_parking();
3112
3113 let dir = temp_tree(json!({
3114 "a": {
3115 "file1": "",
3116 "file2": "",
3117 "file3": "",
3118 },
3119 "b": {
3120 "c": {
3121 "file4": "",
3122 "file5": "",
3123 }
3124 }
3125 }));
3126
3127 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3128
3129 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3130 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3131 async move { buffer.await.unwrap() }
3132 };
3133 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3134 project.update(cx, |project, cx| {
3135 let tree = project.worktrees(cx).next().unwrap();
3136 tree.read(cx)
3137 .entry_for_path(path)
3138 .unwrap_or_else(|| panic!("no entry for path {}", path))
3139 .id
3140 })
3141 };
3142
3143 let buffer2 = buffer_for_path("a/file2", cx).await;
3144 let buffer3 = buffer_for_path("a/file3", cx).await;
3145 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3146 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3147
3148 let file2_id = id_for_path("a/file2", cx);
3149 let file3_id = id_for_path("a/file3", cx);
3150 let file4_id = id_for_path("b/c/file4", cx);
3151
3152 // Create a remote copy of this worktree.
3153 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3154 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3155
3156 let updates = Arc::new(Mutex::new(Vec::new()));
3157 tree.update(cx, |tree, cx| {
3158 let updates = updates.clone();
3159 tree.observe_updates(0, cx, move |update| {
3160 updates.lock().push(update);
3161 async { true }
3162 });
3163 });
3164
3165 let remote =
3166 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3167
3168 cx.executor().run_until_parked();
3169
3170 cx.update(|cx| {
3171 assert!(!buffer2.read(cx).is_dirty());
3172 assert!(!buffer3.read(cx).is_dirty());
3173 assert!(!buffer4.read(cx).is_dirty());
3174 assert!(!buffer5.read(cx).is_dirty());
3175 });
3176
3177 // Rename and delete files and directories.
3178 tree.flush_fs_events(cx).await;
3179 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3180 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3181 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3182 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3183 tree.flush_fs_events(cx).await;
3184
3185 let expected_paths = vec![
3186 "a",
3187 "a/file1",
3188 "a/file2.new",
3189 "b",
3190 "d",
3191 "d/file3",
3192 "d/file4",
3193 ];
3194
3195 cx.update(|app| {
3196 assert_eq!(
3197 tree.read(app)
3198 .paths()
3199 .map(|p| p.to_str().unwrap())
3200 .collect::<Vec<_>>(),
3201 expected_paths
3202 );
3203 });
3204
3205 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3206 assert_eq!(id_for_path("d/file3", cx), file3_id);
3207 assert_eq!(id_for_path("d/file4", cx), file4_id);
3208
3209 cx.update(|cx| {
3210 assert_eq!(
3211 buffer2.read(cx).file().unwrap().path().as_ref(),
3212 Path::new("a/file2.new")
3213 );
3214 assert_eq!(
3215 buffer3.read(cx).file().unwrap().path().as_ref(),
3216 Path::new("d/file3")
3217 );
3218 assert_eq!(
3219 buffer4.read(cx).file().unwrap().path().as_ref(),
3220 Path::new("d/file4")
3221 );
3222 assert_eq!(
3223 buffer5.read(cx).file().unwrap().path().as_ref(),
3224 Path::new("b/c/file5")
3225 );
3226
3227 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3228 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3229 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3230 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3231 });
3232
3233 // Update the remote worktree. Check that it becomes consistent with the
3234 // local worktree.
3235 cx.executor().run_until_parked();
3236
3237 remote.update(cx, |remote, _| {
3238 for update in updates.lock().drain(..) {
3239 remote.as_remote_mut().unwrap().update_from_remote(update);
3240 }
3241 });
3242 cx.executor().run_until_parked();
3243 remote.update(cx, |remote, _| {
3244 assert_eq!(
3245 remote
3246 .paths()
3247 .map(|p| p.to_str().unwrap())
3248 .collect::<Vec<_>>(),
3249 expected_paths
3250 );
3251 });
3252}
3253
3254#[gpui::test(iterations = 10)]
3255async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3256 init_test(cx);
3257
3258 let fs = FakeFs::new(cx.executor());
3259 fs.insert_tree(
3260 "/dir",
3261 json!({
3262 "a": {
3263 "file1": "",
3264 }
3265 }),
3266 )
3267 .await;
3268
3269 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3270 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3271 let tree_id = tree.update(cx, |tree, _| tree.id());
3272
3273 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3274 project.update(cx, |project, cx| {
3275 let tree = project.worktrees(cx).next().unwrap();
3276 tree.read(cx)
3277 .entry_for_path(path)
3278 .unwrap_or_else(|| panic!("no entry for path {}", path))
3279 .id
3280 })
3281 };
3282
3283 let dir_id = id_for_path("a", cx);
3284 let file_id = id_for_path("a/file1", cx);
3285 let buffer = project
3286 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3287 .await
3288 .unwrap();
3289 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3290
3291 project
3292 .update(cx, |project, cx| {
3293 project.rename_entry(dir_id, Path::new("b"), cx)
3294 })
3295 .unwrap()
3296 .await
3297 .to_included()
3298 .unwrap();
3299 cx.executor().run_until_parked();
3300
3301 assert_eq!(id_for_path("b", cx), dir_id);
3302 assert_eq!(id_for_path("b/file1", cx), file_id);
3303 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3304}
3305
3306#[gpui::test]
3307async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3308 init_test(cx);
3309
3310 let fs = FakeFs::new(cx.executor());
3311 fs.insert_tree(
3312 "/dir",
3313 json!({
3314 "a.txt": "a-contents",
3315 "b.txt": "b-contents",
3316 }),
3317 )
3318 .await;
3319
3320 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3321
3322 // Spawn multiple tasks to open paths, repeating some paths.
3323 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3324 (
3325 p.open_local_buffer("/dir/a.txt", cx),
3326 p.open_local_buffer("/dir/b.txt", cx),
3327 p.open_local_buffer("/dir/a.txt", cx),
3328 )
3329 });
3330
3331 let buffer_a_1 = buffer_a_1.await.unwrap();
3332 let buffer_a_2 = buffer_a_2.await.unwrap();
3333 let buffer_b = buffer_b.await.unwrap();
3334 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3335 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3336
3337 // There is only one buffer per path.
3338 let buffer_a_id = buffer_a_1.entity_id();
3339 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3340
3341 // Open the same path again while it is still open.
3342 drop(buffer_a_1);
3343 let buffer_a_3 = project
3344 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3345 .await
3346 .unwrap();
3347
3348 // There's still only one buffer per path.
3349 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3350}
3351
3352#[gpui::test]
3353async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3354 init_test(cx);
3355
3356 let fs = FakeFs::new(cx.executor());
3357 fs.insert_tree(
3358 "/dir",
3359 json!({
3360 "file1": "abc",
3361 "file2": "def",
3362 "file3": "ghi",
3363 }),
3364 )
3365 .await;
3366
3367 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3368
3369 let buffer1 = project
3370 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3371 .await
3372 .unwrap();
3373 let events = Arc::new(Mutex::new(Vec::new()));
3374
3375 // initially, the buffer isn't dirty.
3376 buffer1.update(cx, |buffer, cx| {
3377 cx.subscribe(&buffer1, {
3378 let events = events.clone();
3379 move |_, _, event, _| match event {
3380 BufferEvent::Operation { .. } => {}
3381 _ => events.lock().push(event.clone()),
3382 }
3383 })
3384 .detach();
3385
3386 assert!(!buffer.is_dirty());
3387 assert!(events.lock().is_empty());
3388
3389 buffer.edit([(1..2, "")], None, cx);
3390 });
3391
3392 // after the first edit, the buffer is dirty, and emits a dirtied event.
3393 buffer1.update(cx, |buffer, cx| {
3394 assert!(buffer.text() == "ac");
3395 assert!(buffer.is_dirty());
3396 assert_eq!(
3397 *events.lock(),
3398 &[
3399 language::BufferEvent::Edited,
3400 language::BufferEvent::DirtyChanged
3401 ]
3402 );
3403 events.lock().clear();
3404 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3405 });
3406
3407 // after saving, the buffer is not dirty, and emits a saved event.
3408 buffer1.update(cx, |buffer, cx| {
3409 assert!(!buffer.is_dirty());
3410 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3411 events.lock().clear();
3412
3413 buffer.edit([(1..1, "B")], None, cx);
3414 buffer.edit([(2..2, "D")], None, cx);
3415 });
3416
3417 // after editing again, the buffer is dirty, and emits another dirty event.
3418 buffer1.update(cx, |buffer, cx| {
3419 assert!(buffer.text() == "aBDc");
3420 assert!(buffer.is_dirty());
3421 assert_eq!(
3422 *events.lock(),
3423 &[
3424 language::BufferEvent::Edited,
3425 language::BufferEvent::DirtyChanged,
3426 language::BufferEvent::Edited,
3427 ],
3428 );
3429 events.lock().clear();
3430
3431 // After restoring the buffer to its previously-saved state,
3432 // the buffer is not considered dirty anymore.
3433 buffer.edit([(1..3, "")], None, cx);
3434 assert!(buffer.text() == "ac");
3435 assert!(!buffer.is_dirty());
3436 });
3437
3438 assert_eq!(
3439 *events.lock(),
3440 &[
3441 language::BufferEvent::Edited,
3442 language::BufferEvent::DirtyChanged
3443 ]
3444 );
3445
3446 // When a file is deleted, the buffer is considered dirty.
3447 let events = Arc::new(Mutex::new(Vec::new()));
3448 let buffer2 = project
3449 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3450 .await
3451 .unwrap();
3452 buffer2.update(cx, |_, cx| {
3453 cx.subscribe(&buffer2, {
3454 let events = events.clone();
3455 move |_, _, event, _| events.lock().push(event.clone())
3456 })
3457 .detach();
3458 });
3459
3460 fs.remove_file("/dir/file2".as_ref(), Default::default())
3461 .await
3462 .unwrap();
3463 cx.executor().run_until_parked();
3464 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3465 assert_eq!(
3466 *events.lock(),
3467 &[
3468 language::BufferEvent::DirtyChanged,
3469 language::BufferEvent::FileHandleChanged
3470 ]
3471 );
3472
3473 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3474 let events = Arc::new(Mutex::new(Vec::new()));
3475 let buffer3 = project
3476 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3477 .await
3478 .unwrap();
3479 buffer3.update(cx, |_, cx| {
3480 cx.subscribe(&buffer3, {
3481 let events = events.clone();
3482 move |_, _, event, _| events.lock().push(event.clone())
3483 })
3484 .detach();
3485 });
3486
3487 buffer3.update(cx, |buffer, cx| {
3488 buffer.edit([(0..0, "x")], None, cx);
3489 });
3490 events.lock().clear();
3491 fs.remove_file("/dir/file3".as_ref(), Default::default())
3492 .await
3493 .unwrap();
3494 cx.executor().run_until_parked();
3495 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3496 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3497}
3498
3499#[gpui::test]
3500async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3501 init_test(cx);
3502
3503 let initial_contents = "aaa\nbbbbb\nc\n";
3504 let fs = FakeFs::new(cx.executor());
3505 fs.insert_tree(
3506 "/dir",
3507 json!({
3508 "the-file": initial_contents,
3509 }),
3510 )
3511 .await;
3512 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3513 let buffer = project
3514 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3515 .await
3516 .unwrap();
3517
3518 let anchors = (0..3)
3519 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3520 .collect::<Vec<_>>();
3521
3522 // Change the file on disk, adding two new lines of text, and removing
3523 // one line.
3524 buffer.update(cx, |buffer, _| {
3525 assert!(!buffer.is_dirty());
3526 assert!(!buffer.has_conflict());
3527 });
3528 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3529 fs.save(
3530 "/dir/the-file".as_ref(),
3531 &new_contents.into(),
3532 LineEnding::Unix,
3533 )
3534 .await
3535 .unwrap();
3536
3537 // Because the buffer was not modified, it is reloaded from disk. Its
3538 // contents are edited according to the diff between the old and new
3539 // file contents.
3540 cx.executor().run_until_parked();
3541 buffer.update(cx, |buffer, _| {
3542 assert_eq!(buffer.text(), new_contents);
3543 assert!(!buffer.is_dirty());
3544 assert!(!buffer.has_conflict());
3545
3546 let anchor_positions = anchors
3547 .iter()
3548 .map(|anchor| anchor.to_point(&*buffer))
3549 .collect::<Vec<_>>();
3550 assert_eq!(
3551 anchor_positions,
3552 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3553 );
3554 });
3555
3556 // Modify the buffer
3557 buffer.update(cx, |buffer, cx| {
3558 buffer.edit([(0..0, " ")], None, cx);
3559 assert!(buffer.is_dirty());
3560 assert!(!buffer.has_conflict());
3561 });
3562
3563 // Change the file on disk again, adding blank lines to the beginning.
3564 fs.save(
3565 "/dir/the-file".as_ref(),
3566 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3567 LineEnding::Unix,
3568 )
3569 .await
3570 .unwrap();
3571
3572 // Because the buffer is modified, it doesn't reload from disk, but is
3573 // marked as having a conflict.
3574 cx.executor().run_until_parked();
3575 buffer.update(cx, |buffer, _| {
3576 assert!(buffer.has_conflict());
3577 });
3578}
3579
3580#[gpui::test]
3581async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3582 init_test(cx);
3583
3584 let fs = FakeFs::new(cx.executor());
3585 fs.insert_tree(
3586 "/dir",
3587 json!({
3588 "file1": "a\nb\nc\n",
3589 "file2": "one\r\ntwo\r\nthree\r\n",
3590 }),
3591 )
3592 .await;
3593
3594 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3595 let buffer1 = project
3596 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3597 .await
3598 .unwrap();
3599 let buffer2 = project
3600 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3601 .await
3602 .unwrap();
3603
3604 buffer1.update(cx, |buffer, _| {
3605 assert_eq!(buffer.text(), "a\nb\nc\n");
3606 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3607 });
3608 buffer2.update(cx, |buffer, _| {
3609 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3610 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3611 });
3612
3613 // Change a file's line endings on disk from unix to windows. The buffer's
3614 // state updates correctly.
3615 fs.save(
3616 "/dir/file1".as_ref(),
3617 &"aaa\nb\nc\n".into(),
3618 LineEnding::Windows,
3619 )
3620 .await
3621 .unwrap();
3622 cx.executor().run_until_parked();
3623 buffer1.update(cx, |buffer, _| {
3624 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3625 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3626 });
3627
3628 // Save a file with windows line endings. The file is written correctly.
3629 buffer2.update(cx, |buffer, cx| {
3630 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3631 });
3632 project
3633 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3634 .await
3635 .unwrap();
3636 assert_eq!(
3637 fs.load("/dir/file2".as_ref()).await.unwrap(),
3638 "one\r\ntwo\r\nthree\r\nfour\r\n",
3639 );
3640}
3641
3642#[gpui::test]
3643async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3644 init_test(cx);
3645
3646 let fs = FakeFs::new(cx.executor());
3647 fs.insert_tree(
3648 "/the-dir",
3649 json!({
3650 "a.rs": "
3651 fn foo(mut v: Vec<usize>) {
3652 for x in &v {
3653 v.push(1);
3654 }
3655 }
3656 "
3657 .unindent(),
3658 }),
3659 )
3660 .await;
3661
3662 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3663 let buffer = project
3664 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3665 .await
3666 .unwrap();
3667
3668 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3669 let message = lsp::PublishDiagnosticsParams {
3670 uri: buffer_uri.clone(),
3671 diagnostics: vec![
3672 lsp::Diagnostic {
3673 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3674 severity: Some(DiagnosticSeverity::WARNING),
3675 message: "error 1".to_string(),
3676 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3677 location: lsp::Location {
3678 uri: buffer_uri.clone(),
3679 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3680 },
3681 message: "error 1 hint 1".to_string(),
3682 }]),
3683 ..Default::default()
3684 },
3685 lsp::Diagnostic {
3686 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3687 severity: Some(DiagnosticSeverity::HINT),
3688 message: "error 1 hint 1".to_string(),
3689 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3690 location: lsp::Location {
3691 uri: buffer_uri.clone(),
3692 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3693 },
3694 message: "original diagnostic".to_string(),
3695 }]),
3696 ..Default::default()
3697 },
3698 lsp::Diagnostic {
3699 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3700 severity: Some(DiagnosticSeverity::ERROR),
3701 message: "error 2".to_string(),
3702 related_information: Some(vec![
3703 lsp::DiagnosticRelatedInformation {
3704 location: lsp::Location {
3705 uri: buffer_uri.clone(),
3706 range: lsp::Range::new(
3707 lsp::Position::new(1, 13),
3708 lsp::Position::new(1, 15),
3709 ),
3710 },
3711 message: "error 2 hint 1".to_string(),
3712 },
3713 lsp::DiagnosticRelatedInformation {
3714 location: lsp::Location {
3715 uri: buffer_uri.clone(),
3716 range: lsp::Range::new(
3717 lsp::Position::new(1, 13),
3718 lsp::Position::new(1, 15),
3719 ),
3720 },
3721 message: "error 2 hint 2".to_string(),
3722 },
3723 ]),
3724 ..Default::default()
3725 },
3726 lsp::Diagnostic {
3727 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3728 severity: Some(DiagnosticSeverity::HINT),
3729 message: "error 2 hint 1".to_string(),
3730 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3731 location: lsp::Location {
3732 uri: buffer_uri.clone(),
3733 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3734 },
3735 message: "original diagnostic".to_string(),
3736 }]),
3737 ..Default::default()
3738 },
3739 lsp::Diagnostic {
3740 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3741 severity: Some(DiagnosticSeverity::HINT),
3742 message: "error 2 hint 2".to_string(),
3743 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3744 location: lsp::Location {
3745 uri: buffer_uri,
3746 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3747 },
3748 message: "original diagnostic".to_string(),
3749 }]),
3750 ..Default::default()
3751 },
3752 ],
3753 version: None,
3754 };
3755
3756 project
3757 .update(cx, |p, cx| {
3758 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3759 })
3760 .unwrap();
3761 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3762
3763 assert_eq!(
3764 buffer
3765 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3766 .collect::<Vec<_>>(),
3767 &[
3768 DiagnosticEntry {
3769 range: Point::new(1, 8)..Point::new(1, 9),
3770 diagnostic: Diagnostic {
3771 severity: DiagnosticSeverity::WARNING,
3772 message: "error 1".to_string(),
3773 group_id: 1,
3774 is_primary: true,
3775 ..Default::default()
3776 }
3777 },
3778 DiagnosticEntry {
3779 range: Point::new(1, 8)..Point::new(1, 9),
3780 diagnostic: Diagnostic {
3781 severity: DiagnosticSeverity::HINT,
3782 message: "error 1 hint 1".to_string(),
3783 group_id: 1,
3784 is_primary: false,
3785 ..Default::default()
3786 }
3787 },
3788 DiagnosticEntry {
3789 range: Point::new(1, 13)..Point::new(1, 15),
3790 diagnostic: Diagnostic {
3791 severity: DiagnosticSeverity::HINT,
3792 message: "error 2 hint 1".to_string(),
3793 group_id: 0,
3794 is_primary: false,
3795 ..Default::default()
3796 }
3797 },
3798 DiagnosticEntry {
3799 range: Point::new(1, 13)..Point::new(1, 15),
3800 diagnostic: Diagnostic {
3801 severity: DiagnosticSeverity::HINT,
3802 message: "error 2 hint 2".to_string(),
3803 group_id: 0,
3804 is_primary: false,
3805 ..Default::default()
3806 }
3807 },
3808 DiagnosticEntry {
3809 range: Point::new(2, 8)..Point::new(2, 17),
3810 diagnostic: Diagnostic {
3811 severity: DiagnosticSeverity::ERROR,
3812 message: "error 2".to_string(),
3813 group_id: 0,
3814 is_primary: true,
3815 ..Default::default()
3816 }
3817 }
3818 ]
3819 );
3820
3821 assert_eq!(
3822 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3823 &[
3824 DiagnosticEntry {
3825 range: Point::new(1, 13)..Point::new(1, 15),
3826 diagnostic: Diagnostic {
3827 severity: DiagnosticSeverity::HINT,
3828 message: "error 2 hint 1".to_string(),
3829 group_id: 0,
3830 is_primary: false,
3831 ..Default::default()
3832 }
3833 },
3834 DiagnosticEntry {
3835 range: Point::new(1, 13)..Point::new(1, 15),
3836 diagnostic: Diagnostic {
3837 severity: DiagnosticSeverity::HINT,
3838 message: "error 2 hint 2".to_string(),
3839 group_id: 0,
3840 is_primary: false,
3841 ..Default::default()
3842 }
3843 },
3844 DiagnosticEntry {
3845 range: Point::new(2, 8)..Point::new(2, 17),
3846 diagnostic: Diagnostic {
3847 severity: DiagnosticSeverity::ERROR,
3848 message: "error 2".to_string(),
3849 group_id: 0,
3850 is_primary: true,
3851 ..Default::default()
3852 }
3853 }
3854 ]
3855 );
3856
3857 assert_eq!(
3858 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3859 &[
3860 DiagnosticEntry {
3861 range: Point::new(1, 8)..Point::new(1, 9),
3862 diagnostic: Diagnostic {
3863 severity: DiagnosticSeverity::WARNING,
3864 message: "error 1".to_string(),
3865 group_id: 1,
3866 is_primary: true,
3867 ..Default::default()
3868 }
3869 },
3870 DiagnosticEntry {
3871 range: Point::new(1, 8)..Point::new(1, 9),
3872 diagnostic: Diagnostic {
3873 severity: DiagnosticSeverity::HINT,
3874 message: "error 1 hint 1".to_string(),
3875 group_id: 1,
3876 is_primary: false,
3877 ..Default::default()
3878 }
3879 },
3880 ]
3881 );
3882}
3883
3884#[gpui::test]
3885async fn test_rename(cx: &mut gpui::TestAppContext) {
3886 // hi
3887 init_test(cx);
3888
3889 let fs = FakeFs::new(cx.executor());
3890 fs.insert_tree(
3891 "/dir",
3892 json!({
3893 "one.rs": "const ONE: usize = 1;",
3894 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3895 }),
3896 )
3897 .await;
3898
3899 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3900
3901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3902 language_registry.add(rust_lang());
3903 let mut fake_servers = language_registry.register_fake_lsp(
3904 "Rust",
3905 FakeLspAdapter {
3906 capabilities: lsp::ServerCapabilities {
3907 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3908 prepare_provider: Some(true),
3909 work_done_progress_options: Default::default(),
3910 })),
3911 ..Default::default()
3912 },
3913 ..Default::default()
3914 },
3915 );
3916
3917 let buffer = project
3918 .update(cx, |project, cx| {
3919 project.open_local_buffer("/dir/one.rs", cx)
3920 })
3921 .await
3922 .unwrap();
3923
3924 let fake_server = fake_servers.next().await.unwrap();
3925
3926 let response = project.update(cx, |project, cx| {
3927 project.prepare_rename(buffer.clone(), 7, cx)
3928 });
3929 fake_server
3930 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3931 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3932 assert_eq!(params.position, lsp::Position::new(0, 7));
3933 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3934 lsp::Position::new(0, 6),
3935 lsp::Position::new(0, 9),
3936 ))))
3937 })
3938 .next()
3939 .await
3940 .unwrap();
3941 let range = response.await.unwrap().unwrap();
3942 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3943 assert_eq!(range, 6..9);
3944
3945 let response = project.update(cx, |project, cx| {
3946 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3947 });
3948 fake_server
3949 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3950 assert_eq!(
3951 params.text_document_position.text_document.uri.as_str(),
3952 "file:///dir/one.rs"
3953 );
3954 assert_eq!(
3955 params.text_document_position.position,
3956 lsp::Position::new(0, 7)
3957 );
3958 assert_eq!(params.new_name, "THREE");
3959 Ok(Some(lsp::WorkspaceEdit {
3960 changes: Some(
3961 [
3962 (
3963 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3964 vec![lsp::TextEdit::new(
3965 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3966 "THREE".to_string(),
3967 )],
3968 ),
3969 (
3970 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3971 vec![
3972 lsp::TextEdit::new(
3973 lsp::Range::new(
3974 lsp::Position::new(0, 24),
3975 lsp::Position::new(0, 27),
3976 ),
3977 "THREE".to_string(),
3978 ),
3979 lsp::TextEdit::new(
3980 lsp::Range::new(
3981 lsp::Position::new(0, 35),
3982 lsp::Position::new(0, 38),
3983 ),
3984 "THREE".to_string(),
3985 ),
3986 ],
3987 ),
3988 ]
3989 .into_iter()
3990 .collect(),
3991 ),
3992 ..Default::default()
3993 }))
3994 })
3995 .next()
3996 .await
3997 .unwrap();
3998 let mut transaction = response.await.unwrap().0;
3999 assert_eq!(transaction.len(), 2);
4000 assert_eq!(
4001 transaction
4002 .remove_entry(&buffer)
4003 .unwrap()
4004 .0
4005 .update(cx, |buffer, _| buffer.text()),
4006 "const THREE: usize = 1;"
4007 );
4008 assert_eq!(
4009 transaction
4010 .into_keys()
4011 .next()
4012 .unwrap()
4013 .update(cx, |buffer, _| buffer.text()),
4014 "const TWO: usize = one::THREE + one::THREE;"
4015 );
4016}
4017
4018#[gpui::test]
4019async fn test_search(cx: &mut gpui::TestAppContext) {
4020 init_test(cx);
4021
4022 let fs = FakeFs::new(cx.executor());
4023 fs.insert_tree(
4024 "/dir",
4025 json!({
4026 "one.rs": "const ONE: usize = 1;",
4027 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4028 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4029 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4030 }),
4031 )
4032 .await;
4033 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4034 assert_eq!(
4035 search(
4036 &project,
4037 SearchQuery::text(
4038 "TWO",
4039 false,
4040 true,
4041 false,
4042 Default::default(),
4043 Default::default(),
4044 None
4045 )
4046 .unwrap(),
4047 cx
4048 )
4049 .await
4050 .unwrap(),
4051 HashMap::from_iter([
4052 ("dir/two.rs".to_string(), vec![6..9]),
4053 ("dir/three.rs".to_string(), vec![37..40])
4054 ])
4055 );
4056
4057 let buffer_4 = project
4058 .update(cx, |project, cx| {
4059 project.open_local_buffer("/dir/four.rs", cx)
4060 })
4061 .await
4062 .unwrap();
4063 buffer_4.update(cx, |buffer, cx| {
4064 let text = "two::TWO";
4065 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4066 });
4067
4068 assert_eq!(
4069 search(
4070 &project,
4071 SearchQuery::text(
4072 "TWO",
4073 false,
4074 true,
4075 false,
4076 Default::default(),
4077 Default::default(),
4078 None,
4079 )
4080 .unwrap(),
4081 cx
4082 )
4083 .await
4084 .unwrap(),
4085 HashMap::from_iter([
4086 ("dir/two.rs".to_string(), vec![6..9]),
4087 ("dir/three.rs".to_string(), vec![37..40]),
4088 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4089 ])
4090 );
4091}
4092
4093#[gpui::test]
4094async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4095 init_test(cx);
4096
4097 let search_query = "file";
4098
4099 let fs = FakeFs::new(cx.executor());
4100 fs.insert_tree(
4101 "/dir",
4102 json!({
4103 "one.rs": r#"// Rust file one"#,
4104 "one.ts": r#"// TypeScript file one"#,
4105 "two.rs": r#"// Rust file two"#,
4106 "two.ts": r#"// TypeScript file two"#,
4107 }),
4108 )
4109 .await;
4110 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4111
4112 assert!(
4113 search(
4114 &project,
4115 SearchQuery::text(
4116 search_query,
4117 false,
4118 true,
4119 false,
4120 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4121 Default::default(),
4122 None
4123 )
4124 .unwrap(),
4125 cx
4126 )
4127 .await
4128 .unwrap()
4129 .is_empty(),
4130 "If no inclusions match, no files should be returned"
4131 );
4132
4133 assert_eq!(
4134 search(
4135 &project,
4136 SearchQuery::text(
4137 search_query,
4138 false,
4139 true,
4140 false,
4141 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4142 Default::default(),
4143 None
4144 )
4145 .unwrap(),
4146 cx
4147 )
4148 .await
4149 .unwrap(),
4150 HashMap::from_iter([
4151 ("dir/one.rs".to_string(), vec![8..12]),
4152 ("dir/two.rs".to_string(), vec![8..12]),
4153 ]),
4154 "Rust only search should give only Rust files"
4155 );
4156
4157 assert_eq!(
4158 search(
4159 &project,
4160 SearchQuery::text(
4161 search_query,
4162 false,
4163 true,
4164 false,
4165
4166 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4167
4168 Default::default(),
4169 None,
4170 ).unwrap(),
4171 cx
4172 )
4173 .await
4174 .unwrap(),
4175 HashMap::from_iter([
4176 ("dir/one.ts".to_string(), vec![14..18]),
4177 ("dir/two.ts".to_string(), vec![14..18]),
4178 ]),
4179 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4180 );
4181
4182 assert_eq!(
4183 search(
4184 &project,
4185 SearchQuery::text(
4186 search_query,
4187 false,
4188 true,
4189 false,
4190
4191 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4192
4193 Default::default(),
4194 None,
4195 ).unwrap(),
4196 cx
4197 )
4198 .await
4199 .unwrap(),
4200 HashMap::from_iter([
4201 ("dir/two.ts".to_string(), vec![14..18]),
4202 ("dir/one.rs".to_string(), vec![8..12]),
4203 ("dir/one.ts".to_string(), vec![14..18]),
4204 ("dir/two.rs".to_string(), vec![8..12]),
4205 ]),
4206 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4207 );
4208}
4209
4210#[gpui::test]
4211async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4212 init_test(cx);
4213
4214 let search_query = "file";
4215
4216 let fs = FakeFs::new(cx.executor());
4217 fs.insert_tree(
4218 "/dir",
4219 json!({
4220 "one.rs": r#"// Rust file one"#,
4221 "one.ts": r#"// TypeScript file one"#,
4222 "two.rs": r#"// Rust file two"#,
4223 "two.ts": r#"// TypeScript file two"#,
4224 }),
4225 )
4226 .await;
4227 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4228
4229 assert_eq!(
4230 search(
4231 &project,
4232 SearchQuery::text(
4233 search_query,
4234 false,
4235 true,
4236 false,
4237 Default::default(),
4238 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4239 None,
4240 )
4241 .unwrap(),
4242 cx
4243 )
4244 .await
4245 .unwrap(),
4246 HashMap::from_iter([
4247 ("dir/one.rs".to_string(), vec![8..12]),
4248 ("dir/one.ts".to_string(), vec![14..18]),
4249 ("dir/two.rs".to_string(), vec![8..12]),
4250 ("dir/two.ts".to_string(), vec![14..18]),
4251 ]),
4252 "If no exclusions match, all files should be returned"
4253 );
4254
4255 assert_eq!(
4256 search(
4257 &project,
4258 SearchQuery::text(
4259 search_query,
4260 false,
4261 true,
4262 false,
4263 Default::default(),
4264 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4265 None,
4266 )
4267 .unwrap(),
4268 cx
4269 )
4270 .await
4271 .unwrap(),
4272 HashMap::from_iter([
4273 ("dir/one.ts".to_string(), vec![14..18]),
4274 ("dir/two.ts".to_string(), vec![14..18]),
4275 ]),
4276 "Rust exclusion search should give only TypeScript files"
4277 );
4278
4279 assert_eq!(
4280 search(
4281 &project,
4282 SearchQuery::text(
4283 search_query,
4284 false,
4285 true,
4286 false,
4287 Default::default(),
4288 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4289 None,
4290 ).unwrap(),
4291 cx
4292 )
4293 .await
4294 .unwrap(),
4295 HashMap::from_iter([
4296 ("dir/one.rs".to_string(), vec![8..12]),
4297 ("dir/two.rs".to_string(), vec![8..12]),
4298 ]),
4299 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4300 );
4301
4302 assert!(
4303 search(
4304 &project,
4305 SearchQuery::text(
4306 search_query,
4307 false,
4308 true,
4309 false,
4310 Default::default(),
4311
4312 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4313 None,
4314
4315 ).unwrap(),
4316 cx
4317 )
4318 .await
4319 .unwrap().is_empty(),
4320 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4321 );
4322}
4323
4324#[gpui::test]
4325async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4326 init_test(cx);
4327
4328 let search_query = "file";
4329
4330 let fs = FakeFs::new(cx.executor());
4331 fs.insert_tree(
4332 "/dir",
4333 json!({
4334 "one.rs": r#"// Rust file one"#,
4335 "one.ts": r#"// TypeScript file one"#,
4336 "two.rs": r#"// Rust file two"#,
4337 "two.ts": r#"// TypeScript file two"#,
4338 }),
4339 )
4340 .await;
4341 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4342
4343 assert!(
4344 search(
4345 &project,
4346 SearchQuery::text(
4347 search_query,
4348 false,
4349 true,
4350 false,
4351 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4352 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4353 None,
4354 )
4355 .unwrap(),
4356 cx
4357 )
4358 .await
4359 .unwrap()
4360 .is_empty(),
4361 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4362 );
4363
4364 assert!(
4365 search(
4366 &project,
4367 SearchQuery::text(
4368 search_query,
4369 false,
4370 true,
4371 false,
4372 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4373 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4374 None,
4375 ).unwrap(),
4376 cx
4377 )
4378 .await
4379 .unwrap()
4380 .is_empty(),
4381 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4382 );
4383
4384 assert!(
4385 search(
4386 &project,
4387 SearchQuery::text(
4388 search_query,
4389 false,
4390 true,
4391 false,
4392 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4393 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4394 None,
4395 )
4396 .unwrap(),
4397 cx
4398 )
4399 .await
4400 .unwrap()
4401 .is_empty(),
4402 "Non-matching inclusions and exclusions should not change that."
4403 );
4404
4405 assert_eq!(
4406 search(
4407 &project,
4408 SearchQuery::text(
4409 search_query,
4410 false,
4411 true,
4412 false,
4413 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4414 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4415 None,
4416 )
4417 .unwrap(),
4418 cx
4419 )
4420 .await
4421 .unwrap(),
4422 HashMap::from_iter([
4423 ("dir/one.ts".to_string(), vec![14..18]),
4424 ("dir/two.ts".to_string(), vec![14..18]),
4425 ]),
4426 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4427 );
4428}
4429
4430#[gpui::test]
4431async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4432 init_test(cx);
4433
4434 let fs = FakeFs::new(cx.executor());
4435 fs.insert_tree(
4436 "/worktree-a",
4437 json!({
4438 "haystack.rs": r#"// NEEDLE"#,
4439 "haystack.ts": r#"// NEEDLE"#,
4440 }),
4441 )
4442 .await;
4443 fs.insert_tree(
4444 "/worktree-b",
4445 json!({
4446 "haystack.rs": r#"// NEEDLE"#,
4447 "haystack.ts": r#"// NEEDLE"#,
4448 }),
4449 )
4450 .await;
4451
4452 let project = Project::test(
4453 fs.clone(),
4454 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4455 cx,
4456 )
4457 .await;
4458
4459 assert_eq!(
4460 search(
4461 &project,
4462 SearchQuery::text(
4463 "NEEDLE",
4464 false,
4465 true,
4466 false,
4467 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4468 Default::default(),
4469 None,
4470 )
4471 .unwrap(),
4472 cx
4473 )
4474 .await
4475 .unwrap(),
4476 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4477 "should only return results from included worktree"
4478 );
4479 assert_eq!(
4480 search(
4481 &project,
4482 SearchQuery::text(
4483 "NEEDLE",
4484 false,
4485 true,
4486 false,
4487 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4488 Default::default(),
4489 None,
4490 )
4491 .unwrap(),
4492 cx
4493 )
4494 .await
4495 .unwrap(),
4496 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4497 "should only return results from included worktree"
4498 );
4499
4500 assert_eq!(
4501 search(
4502 &project,
4503 SearchQuery::text(
4504 "NEEDLE",
4505 false,
4506 true,
4507 false,
4508 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4509 Default::default(),
4510 None,
4511 )
4512 .unwrap(),
4513 cx
4514 )
4515 .await
4516 .unwrap(),
4517 HashMap::from_iter([
4518 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4519 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4520 ]),
4521 "should return results from both worktrees"
4522 );
4523}
4524
4525#[gpui::test]
4526async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4527 init_test(cx);
4528
4529 let fs = FakeFs::new(cx.background_executor.clone());
4530 fs.insert_tree(
4531 "/dir",
4532 json!({
4533 ".git": {},
4534 ".gitignore": "**/target\n/node_modules\n",
4535 "target": {
4536 "index.txt": "index_key:index_value"
4537 },
4538 "node_modules": {
4539 "eslint": {
4540 "index.ts": "const eslint_key = 'eslint value'",
4541 "package.json": r#"{ "some_key": "some value" }"#,
4542 },
4543 "prettier": {
4544 "index.ts": "const prettier_key = 'prettier value'",
4545 "package.json": r#"{ "other_key": "other value" }"#,
4546 },
4547 },
4548 "package.json": r#"{ "main_key": "main value" }"#,
4549 }),
4550 )
4551 .await;
4552 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4553
4554 let query = "key";
4555 assert_eq!(
4556 search(
4557 &project,
4558 SearchQuery::text(
4559 query,
4560 false,
4561 false,
4562 false,
4563 Default::default(),
4564 Default::default(),
4565 None,
4566 )
4567 .unwrap(),
4568 cx
4569 )
4570 .await
4571 .unwrap(),
4572 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4573 "Only one non-ignored file should have the query"
4574 );
4575
4576 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4577 assert_eq!(
4578 search(
4579 &project,
4580 SearchQuery::text(
4581 query,
4582 false,
4583 false,
4584 true,
4585 Default::default(),
4586 Default::default(),
4587 None,
4588 )
4589 .unwrap(),
4590 cx
4591 )
4592 .await
4593 .unwrap(),
4594 HashMap::from_iter([
4595 ("dir/package.json".to_string(), vec![8..11]),
4596 ("dir/target/index.txt".to_string(), vec![6..9]),
4597 (
4598 "dir/node_modules/prettier/package.json".to_string(),
4599 vec![9..12]
4600 ),
4601 (
4602 "dir/node_modules/prettier/index.ts".to_string(),
4603 vec![15..18]
4604 ),
4605 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4606 (
4607 "dir/node_modules/eslint/package.json".to_string(),
4608 vec![8..11]
4609 ),
4610 ]),
4611 "Unrestricted search with ignored directories should find every file with the query"
4612 );
4613
4614 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4615 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4616 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4617 assert_eq!(
4618 search(
4619 &project,
4620 SearchQuery::text(
4621 query,
4622 false,
4623 false,
4624 true,
4625 files_to_include,
4626 files_to_exclude,
4627 None,
4628 )
4629 .unwrap(),
4630 cx
4631 )
4632 .await
4633 .unwrap(),
4634 HashMap::from_iter([(
4635 "dir/node_modules/prettier/package.json".to_string(),
4636 vec![9..12]
4637 )]),
4638 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4639 );
4640}
4641
4642#[gpui::test]
4643async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4644 init_test(cx);
4645
4646 let fs = FakeFs::new(cx.background_executor.clone());
4647 fs.insert_tree(
4648 "/dir",
4649 json!({
4650 ".git": {},
4651 ".gitignore": "**/target\n/node_modules\n",
4652 "aaa.txt": "key:value",
4653 "bbb": {
4654 "index.txt": "index_key:index_value"
4655 },
4656 "node_modules": {
4657 "10 eleven": "key",
4658 "1 two": "key"
4659 },
4660 }),
4661 )
4662 .await;
4663 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4664
4665 let mut search = project.update(cx, |project, cx| {
4666 project.search(
4667 SearchQuery::text(
4668 "key",
4669 false,
4670 false,
4671 true,
4672 Default::default(),
4673 Default::default(),
4674 None,
4675 )
4676 .unwrap(),
4677 cx,
4678 )
4679 });
4680
4681 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4682 match search_result.unwrap() {
4683 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4684 buffer.file().unwrap().path().to_string_lossy().to_string()
4685 }),
4686 _ => panic!("Expected buffer"),
4687 }
4688 }
4689
4690 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4691 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4692 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4693 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4694 assert!(search.next().await.is_none())
4695}
4696
4697#[gpui::test]
4698async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4699 init_test(cx);
4700
4701 let fs = FakeFs::new(cx.executor().clone());
4702 fs.insert_tree(
4703 "/one/two",
4704 json!({
4705 "three": {
4706 "a.txt": "",
4707 "four": {}
4708 },
4709 "c.rs": ""
4710 }),
4711 )
4712 .await;
4713
4714 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4715 project
4716 .update(cx, |project, cx| {
4717 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4718 project.create_entry((id, "b.."), true, cx)
4719 })
4720 .await
4721 .unwrap()
4722 .to_included()
4723 .unwrap();
4724
4725 // Can't create paths outside the project
4726 let result = project
4727 .update(cx, |project, cx| {
4728 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4729 project.create_entry((id, "../../boop"), true, cx)
4730 })
4731 .await;
4732 assert!(result.is_err());
4733
4734 // Can't create paths with '..'
4735 let result = project
4736 .update(cx, |project, cx| {
4737 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4738 project.create_entry((id, "four/../beep"), true, cx)
4739 })
4740 .await;
4741 assert!(result.is_err());
4742
4743 assert_eq!(
4744 fs.paths(true),
4745 vec![
4746 PathBuf::from("/"),
4747 PathBuf::from("/one"),
4748 PathBuf::from("/one/two"),
4749 PathBuf::from("/one/two/c.rs"),
4750 PathBuf::from("/one/two/three"),
4751 PathBuf::from("/one/two/three/a.txt"),
4752 PathBuf::from("/one/two/three/b.."),
4753 PathBuf::from("/one/two/three/four"),
4754 ]
4755 );
4756
4757 // And we cannot open buffers with '..'
4758 let result = project
4759 .update(cx, |project, cx| {
4760 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4761 project.open_buffer((id, "../c.rs"), cx)
4762 })
4763 .await;
4764 assert!(result.is_err())
4765}
4766
4767#[gpui::test]
4768async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4769 init_test(cx);
4770
4771 let fs = FakeFs::new(cx.executor());
4772 fs.insert_tree(
4773 "/dir",
4774 json!({
4775 "a.tsx": "a",
4776 }),
4777 )
4778 .await;
4779
4780 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4781
4782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4783 language_registry.add(tsx_lang());
4784 let language_server_names = [
4785 "TypeScriptServer",
4786 "TailwindServer",
4787 "ESLintServer",
4788 "NoHoverCapabilitiesServer",
4789 ];
4790 let mut language_servers = [
4791 language_registry.register_fake_lsp(
4792 "tsx",
4793 FakeLspAdapter {
4794 name: language_server_names[0],
4795 capabilities: lsp::ServerCapabilities {
4796 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4797 ..lsp::ServerCapabilities::default()
4798 },
4799 ..FakeLspAdapter::default()
4800 },
4801 ),
4802 language_registry.register_fake_lsp(
4803 "tsx",
4804 FakeLspAdapter {
4805 name: language_server_names[1],
4806 capabilities: lsp::ServerCapabilities {
4807 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4808 ..lsp::ServerCapabilities::default()
4809 },
4810 ..FakeLspAdapter::default()
4811 },
4812 ),
4813 language_registry.register_fake_lsp(
4814 "tsx",
4815 FakeLspAdapter {
4816 name: language_server_names[2],
4817 capabilities: lsp::ServerCapabilities {
4818 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4819 ..lsp::ServerCapabilities::default()
4820 },
4821 ..FakeLspAdapter::default()
4822 },
4823 ),
4824 language_registry.register_fake_lsp(
4825 "tsx",
4826 FakeLspAdapter {
4827 name: language_server_names[3],
4828 capabilities: lsp::ServerCapabilities {
4829 hover_provider: None,
4830 ..lsp::ServerCapabilities::default()
4831 },
4832 ..FakeLspAdapter::default()
4833 },
4834 ),
4835 ];
4836
4837 let buffer = project
4838 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4839 .await
4840 .unwrap();
4841 cx.executor().run_until_parked();
4842
4843 let mut servers_with_hover_requests = HashMap::default();
4844 for i in 0..language_server_names.len() {
4845 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4846 panic!(
4847 "Failed to get language server #{i} with name {}",
4848 &language_server_names[i]
4849 )
4850 });
4851 let new_server_name = new_server.server.name();
4852 assert!(
4853 !servers_with_hover_requests.contains_key(new_server_name),
4854 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4855 );
4856 let new_server_name = new_server_name.to_string();
4857 match new_server_name.as_str() {
4858 "TailwindServer" | "TypeScriptServer" => {
4859 servers_with_hover_requests.insert(
4860 new_server_name.clone(),
4861 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4862 let name = new_server_name.clone();
4863 async move {
4864 Ok(Some(lsp::Hover {
4865 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4866 format!("{name} hover"),
4867 )),
4868 range: None,
4869 }))
4870 }
4871 }),
4872 );
4873 }
4874 "ESLintServer" => {
4875 servers_with_hover_requests.insert(
4876 new_server_name,
4877 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4878 |_, _| async move { Ok(None) },
4879 ),
4880 );
4881 }
4882 "NoHoverCapabilitiesServer" => {
4883 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4884 |_, _| async move {
4885 panic!(
4886 "Should not call for hovers server with no corresponding capabilities"
4887 )
4888 },
4889 );
4890 }
4891 unexpected => panic!("Unexpected server name: {unexpected}"),
4892 }
4893 }
4894
4895 let hover_task = project.update(cx, |project, cx| {
4896 project.hover(&buffer, Point::new(0, 0), cx)
4897 });
4898 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4899 |mut hover_request| async move {
4900 hover_request
4901 .next()
4902 .await
4903 .expect("All hover requests should have been triggered")
4904 },
4905 ))
4906 .await;
4907 assert_eq!(
4908 vec!["TailwindServer hover", "TypeScriptServer hover"],
4909 hover_task
4910 .await
4911 .into_iter()
4912 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4913 .sorted()
4914 .collect::<Vec<_>>(),
4915 "Should receive hover responses from all related servers with hover capabilities"
4916 );
4917}
4918
4919#[gpui::test]
4920async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4921 init_test(cx);
4922
4923 let fs = FakeFs::new(cx.executor());
4924 fs.insert_tree(
4925 "/dir",
4926 json!({
4927 "a.ts": "a",
4928 }),
4929 )
4930 .await;
4931
4932 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4933
4934 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4935 language_registry.add(typescript_lang());
4936 let mut fake_language_servers = language_registry.register_fake_lsp(
4937 "TypeScript",
4938 FakeLspAdapter {
4939 capabilities: lsp::ServerCapabilities {
4940 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4941 ..lsp::ServerCapabilities::default()
4942 },
4943 ..FakeLspAdapter::default()
4944 },
4945 );
4946
4947 let buffer = project
4948 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4949 .await
4950 .unwrap();
4951 cx.executor().run_until_parked();
4952
4953 let fake_server = fake_language_servers
4954 .next()
4955 .await
4956 .expect("failed to get the language server");
4957
4958 let mut request_handled =
4959 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4960 Ok(Some(lsp::Hover {
4961 contents: lsp::HoverContents::Array(vec![
4962 lsp::MarkedString::String("".to_string()),
4963 lsp::MarkedString::String(" ".to_string()),
4964 lsp::MarkedString::String("\n\n\n".to_string()),
4965 ]),
4966 range: None,
4967 }))
4968 });
4969
4970 let hover_task = project.update(cx, |project, cx| {
4971 project.hover(&buffer, Point::new(0, 0), cx)
4972 });
4973 let () = request_handled
4974 .next()
4975 .await
4976 .expect("All hover requests should have been triggered");
4977 assert_eq!(
4978 Vec::<String>::new(),
4979 hover_task
4980 .await
4981 .into_iter()
4982 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4983 .sorted()
4984 .collect::<Vec<_>>(),
4985 "Empty hover parts should be ignored"
4986 );
4987}
4988
4989#[gpui::test]
4990async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4991 init_test(cx);
4992
4993 let fs = FakeFs::new(cx.executor());
4994 fs.insert_tree(
4995 "/dir",
4996 json!({
4997 "a.tsx": "a",
4998 }),
4999 )
5000 .await;
5001
5002 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5003
5004 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5005 language_registry.add(tsx_lang());
5006 let language_server_names = [
5007 "TypeScriptServer",
5008 "TailwindServer",
5009 "ESLintServer",
5010 "NoActionsCapabilitiesServer",
5011 ];
5012
5013 let mut language_server_rxs = [
5014 language_registry.register_fake_lsp(
5015 "tsx",
5016 FakeLspAdapter {
5017 name: language_server_names[0],
5018 capabilities: lsp::ServerCapabilities {
5019 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5020 ..lsp::ServerCapabilities::default()
5021 },
5022 ..FakeLspAdapter::default()
5023 },
5024 ),
5025 language_registry.register_fake_lsp(
5026 "tsx",
5027 FakeLspAdapter {
5028 name: language_server_names[1],
5029 capabilities: lsp::ServerCapabilities {
5030 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5031 ..lsp::ServerCapabilities::default()
5032 },
5033 ..FakeLspAdapter::default()
5034 },
5035 ),
5036 language_registry.register_fake_lsp(
5037 "tsx",
5038 FakeLspAdapter {
5039 name: language_server_names[2],
5040 capabilities: lsp::ServerCapabilities {
5041 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5042 ..lsp::ServerCapabilities::default()
5043 },
5044 ..FakeLspAdapter::default()
5045 },
5046 ),
5047 language_registry.register_fake_lsp(
5048 "tsx",
5049 FakeLspAdapter {
5050 name: language_server_names[3],
5051 capabilities: lsp::ServerCapabilities {
5052 code_action_provider: None,
5053 ..lsp::ServerCapabilities::default()
5054 },
5055 ..FakeLspAdapter::default()
5056 },
5057 ),
5058 ];
5059
5060 let buffer = project
5061 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5062 .await
5063 .unwrap();
5064 cx.executor().run_until_parked();
5065
5066 let mut servers_with_actions_requests = HashMap::default();
5067 for i in 0..language_server_names.len() {
5068 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5069 panic!(
5070 "Failed to get language server #{i} with name {}",
5071 &language_server_names[i]
5072 )
5073 });
5074 let new_server_name = new_server.server.name();
5075
5076 assert!(
5077 !servers_with_actions_requests.contains_key(new_server_name),
5078 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5079 );
5080 let new_server_name = new_server_name.to_string();
5081 match new_server_name.as_str() {
5082 "TailwindServer" | "TypeScriptServer" => {
5083 servers_with_actions_requests.insert(
5084 new_server_name.clone(),
5085 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5086 move |_, _| {
5087 let name = new_server_name.clone();
5088 async move {
5089 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5090 lsp::CodeAction {
5091 title: format!("{name} code action"),
5092 ..lsp::CodeAction::default()
5093 },
5094 )]))
5095 }
5096 },
5097 ),
5098 );
5099 }
5100 "ESLintServer" => {
5101 servers_with_actions_requests.insert(
5102 new_server_name,
5103 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5104 |_, _| async move { Ok(None) },
5105 ),
5106 );
5107 }
5108 "NoActionsCapabilitiesServer" => {
5109 let _never_handled = new_server
5110 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5111 panic!(
5112 "Should not call for code actions server with no corresponding capabilities"
5113 )
5114 });
5115 }
5116 unexpected => panic!("Unexpected server name: {unexpected}"),
5117 }
5118 }
5119
5120 let code_actions_task = project.update(cx, |project, cx| {
5121 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5122 });
5123
5124 // cx.run_until_parked();
5125 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5126 |mut code_actions_request| async move {
5127 code_actions_request
5128 .next()
5129 .await
5130 .expect("All code actions requests should have been triggered")
5131 },
5132 ))
5133 .await;
5134 assert_eq!(
5135 vec!["TailwindServer code action", "TypeScriptServer code action"],
5136 code_actions_task
5137 .await
5138 .unwrap()
5139 .into_iter()
5140 .map(|code_action| code_action.lsp_action.title)
5141 .sorted()
5142 .collect::<Vec<_>>(),
5143 "Should receive code actions responses from all related servers with hover capabilities"
5144 );
5145}
5146
5147#[gpui::test]
5148async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5149 init_test(cx);
5150
5151 let fs = FakeFs::new(cx.executor());
5152 fs.insert_tree(
5153 "/dir",
5154 json!({
5155 "a.rs": "let a = 1;",
5156 "b.rs": "let b = 2;",
5157 "c.rs": "let c = 2;",
5158 }),
5159 )
5160 .await;
5161
5162 let project = Project::test(
5163 fs,
5164 [
5165 "/dir/a.rs".as_ref(),
5166 "/dir/b.rs".as_ref(),
5167 "/dir/c.rs".as_ref(),
5168 ],
5169 cx,
5170 )
5171 .await;
5172
5173 // check the initial state and get the worktrees
5174 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5175 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5176 assert_eq!(worktrees.len(), 3);
5177
5178 let worktree_a = worktrees[0].read(cx);
5179 let worktree_b = worktrees[1].read(cx);
5180 let worktree_c = worktrees[2].read(cx);
5181
5182 // check they start in the right order
5183 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5184 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5185 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5186
5187 (
5188 worktrees[0].clone(),
5189 worktrees[1].clone(),
5190 worktrees[2].clone(),
5191 )
5192 });
5193
5194 // move first worktree to after the second
5195 // [a, b, c] -> [b, a, c]
5196 project
5197 .update(cx, |project, cx| {
5198 let first = worktree_a.read(cx);
5199 let second = worktree_b.read(cx);
5200 project.move_worktree(first.id(), second.id(), cx)
5201 })
5202 .expect("moving first after second");
5203
5204 // check the state after moving
5205 project.update(cx, |project, cx| {
5206 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5207 assert_eq!(worktrees.len(), 3);
5208
5209 let first = worktrees[0].read(cx);
5210 let second = worktrees[1].read(cx);
5211 let third = worktrees[2].read(cx);
5212
5213 // check they are now in the right order
5214 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5215 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5216 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5217 });
5218
5219 // move the second worktree to before the first
5220 // [b, a, c] -> [a, b, c]
5221 project
5222 .update(cx, |project, cx| {
5223 let second = worktree_a.read(cx);
5224 let first = worktree_b.read(cx);
5225 project.move_worktree(first.id(), second.id(), cx)
5226 })
5227 .expect("moving second before first");
5228
5229 // check the state after moving
5230 project.update(cx, |project, cx| {
5231 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5232 assert_eq!(worktrees.len(), 3);
5233
5234 let first = worktrees[0].read(cx);
5235 let second = worktrees[1].read(cx);
5236 let third = worktrees[2].read(cx);
5237
5238 // check they are now in the right order
5239 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5240 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5241 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5242 });
5243
5244 // move the second worktree to after the third
5245 // [a, b, c] -> [a, c, b]
5246 project
5247 .update(cx, |project, cx| {
5248 let second = worktree_b.read(cx);
5249 let third = worktree_c.read(cx);
5250 project.move_worktree(second.id(), third.id(), cx)
5251 })
5252 .expect("moving second after third");
5253
5254 // check the state after moving
5255 project.update(cx, |project, cx| {
5256 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5257 assert_eq!(worktrees.len(), 3);
5258
5259 let first = worktrees[0].read(cx);
5260 let second = worktrees[1].read(cx);
5261 let third = worktrees[2].read(cx);
5262
5263 // check they are now in the right order
5264 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5265 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5266 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5267 });
5268
5269 // move the third worktree to before the second
5270 // [a, c, b] -> [a, b, c]
5271 project
5272 .update(cx, |project, cx| {
5273 let third = worktree_c.read(cx);
5274 let second = worktree_b.read(cx);
5275 project.move_worktree(third.id(), second.id(), cx)
5276 })
5277 .expect("moving third before second");
5278
5279 // check the state after moving
5280 project.update(cx, |project, cx| {
5281 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5282 assert_eq!(worktrees.len(), 3);
5283
5284 let first = worktrees[0].read(cx);
5285 let second = worktrees[1].read(cx);
5286 let third = worktrees[2].read(cx);
5287
5288 // check they are now in the right order
5289 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5290 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5291 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5292 });
5293
5294 // move the first worktree to after the third
5295 // [a, b, c] -> [b, c, a]
5296 project
5297 .update(cx, |project, cx| {
5298 let first = worktree_a.read(cx);
5299 let third = worktree_c.read(cx);
5300 project.move_worktree(first.id(), third.id(), cx)
5301 })
5302 .expect("moving first after third");
5303
5304 // check the state after moving
5305 project.update(cx, |project, cx| {
5306 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5307 assert_eq!(worktrees.len(), 3);
5308
5309 let first = worktrees[0].read(cx);
5310 let second = worktrees[1].read(cx);
5311 let third = worktrees[2].read(cx);
5312
5313 // check they are now in the right order
5314 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5315 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5316 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5317 });
5318
5319 // move the third worktree to before the first
5320 // [b, c, a] -> [a, b, c]
5321 project
5322 .update(cx, |project, cx| {
5323 let third = worktree_a.read(cx);
5324 let first = worktree_b.read(cx);
5325 project.move_worktree(third.id(), first.id(), cx)
5326 })
5327 .expect("moving third before first");
5328
5329 // check the state after moving
5330 project.update(cx, |project, cx| {
5331 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5332 assert_eq!(worktrees.len(), 3);
5333
5334 let first = worktrees[0].read(cx);
5335 let second = worktrees[1].read(cx);
5336 let third = worktrees[2].read(cx);
5337
5338 // check they are now in the right order
5339 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5340 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5341 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5342 });
5343}
5344
5345async fn search(
5346 project: &Model<Project>,
5347 query: SearchQuery,
5348 cx: &mut gpui::TestAppContext,
5349) -> Result<HashMap<String, Vec<Range<usize>>>> {
5350 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5351 let mut results = HashMap::default();
5352 while let Some(search_result) = search_rx.next().await {
5353 match search_result {
5354 SearchResult::Buffer { buffer, ranges } => {
5355 results.entry(buffer).or_insert(ranges);
5356 }
5357 SearchResult::LimitReached => {}
5358 }
5359 }
5360 Ok(results
5361 .into_iter()
5362 .map(|(buffer, ranges)| {
5363 buffer.update(cx, |buffer, cx| {
5364 let path = buffer
5365 .file()
5366 .unwrap()
5367 .full_path(cx)
5368 .to_string_lossy()
5369 .to_string();
5370 let ranges = ranges
5371 .into_iter()
5372 .map(|range| range.to_offset(buffer))
5373 .collect::<Vec<_>>();
5374 (path, ranges)
5375 })
5376 })
5377 .collect())
5378}
5379
5380pub fn init_test(cx: &mut gpui::TestAppContext) {
5381 if std::env::var("RUST_LOG").is_ok() {
5382 env_logger::try_init().ok();
5383 }
5384
5385 cx.update(|cx| {
5386 let settings_store = SettingsStore::test(cx);
5387 cx.set_global(settings_store);
5388 release_channel::init(SemanticVersion::default(), cx);
5389 language::init(cx);
5390 Project::init_settings(cx);
5391 });
5392}
5393
5394fn json_lang() -> Arc<Language> {
5395 Arc::new(Language::new(
5396 LanguageConfig {
5397 name: "JSON".into(),
5398 matcher: LanguageMatcher {
5399 path_suffixes: vec!["json".to_string()],
5400 ..Default::default()
5401 },
5402 ..Default::default()
5403 },
5404 None,
5405 ))
5406}
5407
5408fn js_lang() -> Arc<Language> {
5409 Arc::new(Language::new(
5410 LanguageConfig {
5411 name: "JavaScript".into(),
5412 matcher: LanguageMatcher {
5413 path_suffixes: vec!["js".to_string()],
5414 ..Default::default()
5415 },
5416 ..Default::default()
5417 },
5418 None,
5419 ))
5420}
5421
5422fn rust_lang() -> Arc<Language> {
5423 Arc::new(Language::new(
5424 LanguageConfig {
5425 name: "Rust".into(),
5426 matcher: LanguageMatcher {
5427 path_suffixes: vec!["rs".to_string()],
5428 ..Default::default()
5429 },
5430 ..Default::default()
5431 },
5432 Some(tree_sitter_rust::LANGUAGE.into()),
5433 ))
5434}
5435
5436fn typescript_lang() -> Arc<Language> {
5437 Arc::new(Language::new(
5438 LanguageConfig {
5439 name: "TypeScript".into(),
5440 matcher: LanguageMatcher {
5441 path_suffixes: vec!["ts".to_string()],
5442 ..Default::default()
5443 },
5444 ..Default::default()
5445 },
5446 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5447 ))
5448}
5449
5450fn tsx_lang() -> Arc<Language> {
5451 Arc::new(Language::new(
5452 LanguageConfig {
5453 name: "tsx".into(),
5454 matcher: LanguageMatcher {
5455 path_suffixes: vec!["tsx".to_string()],
5456 ..Default::default()
5457 },
5458 ..Default::default()
5459 },
5460 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5461 ))
5462}
5463
5464fn get_all_tasks(
5465 project: &Model<Project>,
5466 worktree_id: Option<WorktreeId>,
5467 task_context: &TaskContext,
5468 cx: &mut AppContext,
5469) -> Vec<(TaskSourceKind, ResolvedTask)> {
5470 let (mut old, new) = project.update(cx, |project, cx| {
5471 project
5472 .task_store
5473 .read(cx)
5474 .task_inventory()
5475 .unwrap()
5476 .read(cx)
5477 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5478 });
5479 old.extend(new);
5480 old
5481}