1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{
8 language_settings, AllLanguageSettings, LanguageSettingsContent, SoftWrap,
9 },
10 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
11 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{DiagnosticSeverity, NumberOrString};
14use parking_lot::Mutex;
15use pretty_assertions::assert_eq;
16use serde_json::json;
17#[cfg(not(windows))]
18use std::os;
19
20use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
21use task::{ResolvedTask, TaskContext};
22use unindent::Unindent as _;
23use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
24
25#[gpui::test]
26async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
27 cx.executor().allow_parking();
28
29 let (tx, mut rx) = futures::channel::mpsc::unbounded();
30 let _thread = std::thread::spawn(move || {
31 std::fs::metadata("/tmp").unwrap();
32 std::thread::sleep(Duration::from_millis(1000));
33 tx.unbounded_send(1).unwrap();
34 });
35 rx.next().await.unwrap();
36}
37
38#[gpui::test]
39async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
40 cx.executor().allow_parking();
41
42 let io_task = smol::unblock(move || {
43 println!("sleeping on thread {:?}", std::thread::current().id());
44 std::thread::sleep(Duration::from_millis(10));
45 1
46 });
47
48 let task = cx.foreground_executor().spawn(async move {
49 io_task.await;
50 });
51
52 task.await;
53}
54
55#[cfg(not(windows))]
56#[gpui::test]
57async fn test_symlinks(cx: &mut gpui::TestAppContext) {
58 init_test(cx);
59 cx.executor().allow_parking();
60
61 let dir = temp_tree(json!({
62 "root": {
63 "apple": "",
64 "banana": {
65 "carrot": {
66 "date": "",
67 "endive": "",
68 }
69 },
70 "fennel": {
71 "grape": "",
72 }
73 }
74 }));
75
76 let root_link_path = dir.path().join("root_link");
77 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
78 os::unix::fs::symlink(
79 dir.path().join("root/fennel"),
80 dir.path().join("root/finnochio"),
81 )
82 .unwrap();
83
84 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
85
86 project.update(cx, |project, cx| {
87 let tree = project.worktrees(cx).next().unwrap().read(cx);
88 assert_eq!(tree.file_count(), 5);
89 assert_eq!(
90 tree.inode_for_path("fennel/grape"),
91 tree.inode_for_path("finnochio/grape")
92 );
93 });
94}
95
96#[gpui::test]
97async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
98 init_test(cx);
99
100 let dir = temp_tree(json!({
101 ".editorconfig": r#"
102 root = true
103 [*.rs]
104 indent_style = tab
105 indent_size = 3
106 end_of_line = lf
107 insert_final_newline = true
108 trim_trailing_whitespace = true
109 max_line_length = 80
110 [*.js]
111 tab_width = 10
112 "#,
113 ".zed": {
114 "settings.json": r#"{
115 "tab_size": 8,
116 "hard_tabs": false,
117 "ensure_final_newline_on_save": false,
118 "remove_trailing_whitespace_on_save": false,
119 "preferred_line_length": 64,
120 "soft_wrap": "editor_width"
121 }"#,
122 },
123 "a.rs": "fn a() {\n A\n}",
124 "b": {
125 ".editorconfig": r#"
126 [*.rs]
127 indent_size = 2
128 max_line_length = off
129 "#,
130 "b.rs": "fn b() {\n B\n}",
131 },
132 "c.js": "def c\n C\nend",
133 "README.json": "tabs are better\n",
134 }));
135
136 let path = dir.path();
137 let fs = FakeFs::new(cx.executor());
138 fs.insert_tree_from_real_fs(path, path).await;
139 let project = Project::test(fs, [path], cx).await;
140
141 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
142 language_registry.add(js_lang());
143 language_registry.add(json_lang());
144 language_registry.add(rust_lang());
145
146 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
147
148 cx.executor().run_until_parked();
149
150 cx.update(|cx| {
151 let tree = worktree.read(cx);
152 let settings_for = |path: &str| {
153 let file_entry = tree.entry_for_path(path).unwrap().clone();
154 let file = File::for_entry(file_entry, worktree.clone());
155 let file_language = project
156 .read(cx)
157 .languages()
158 .language_for_file_path(file.path.as_ref());
159 let file_language = cx
160 .background_executor()
161 .block(file_language)
162 .expect("Failed to get file language");
163 let file = file as _;
164 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
165 };
166
167 let settings_a = settings_for("a.rs");
168 let settings_b = settings_for("b/b.rs");
169 let settings_c = settings_for("c.js");
170 let settings_readme = settings_for("README.json");
171
172 // .editorconfig overrides .zed/settings
173 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
174 assert_eq!(settings_a.hard_tabs, true);
175 assert_eq!(settings_a.ensure_final_newline_on_save, true);
176 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
177 assert_eq!(settings_a.preferred_line_length, 80);
178
179 // "max_line_length" also sets "soft_wrap"
180 assert_eq!(settings_a.soft_wrap, SoftWrap::PreferredLineLength);
181
182 // .editorconfig in b/ overrides .editorconfig in root
183 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
184
185 // "indent_size" is not set, so "tab_width" is used
186 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
187
188 // When max_line_length is "off", default to .zed/settings.json
189 assert_eq!(settings_b.preferred_line_length, 64);
190 assert_eq!(settings_b.soft_wrap, SoftWrap::EditorWidth);
191
192 // README.md should not be affected by .editorconfig's globe "*.rs"
193 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
194 });
195}
196
197#[gpui::test]
198async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
199 init_test(cx);
200 TaskStore::init(None);
201
202 let fs = FakeFs::new(cx.executor());
203 fs.insert_tree(
204 "/the-root",
205 json!({
206 ".zed": {
207 "settings.json": r#"{ "tab_size": 8 }"#,
208 "tasks.json": r#"[{
209 "label": "cargo check all",
210 "command": "cargo",
211 "args": ["check", "--all"]
212 },]"#,
213 },
214 "a": {
215 "a.rs": "fn a() {\n A\n}"
216 },
217 "b": {
218 ".zed": {
219 "settings.json": r#"{ "tab_size": 2 }"#,
220 "tasks.json": r#"[{
221 "label": "cargo check",
222 "command": "cargo",
223 "args": ["check"]
224 },]"#,
225 },
226 "b.rs": "fn b() {\n B\n}"
227 }
228 }),
229 )
230 .await;
231
232 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
233 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
234 let task_context = TaskContext::default();
235
236 cx.executor().run_until_parked();
237 let worktree_id = cx.update(|cx| {
238 project.update(cx, |project, cx| {
239 project.worktrees(cx).next().unwrap().read(cx).id()
240 })
241 });
242 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
243 id: worktree_id,
244 directory_in_worktree: PathBuf::from(".zed"),
245 id_base: "local worktree tasks from directory \".zed\"".into(),
246 };
247
248 let all_tasks = cx
249 .update(|cx| {
250 let tree = worktree.read(cx);
251
252 let file_a = File::for_entry(
253 tree.entry_for_path("a/a.rs").unwrap().clone(),
254 worktree.clone(),
255 ) as _;
256 let settings_a = language_settings(None, Some(&file_a), cx);
257 let file_b = File::for_entry(
258 tree.entry_for_path("b/b.rs").unwrap().clone(),
259 worktree.clone(),
260 ) as _;
261 let settings_b = language_settings(None, Some(&file_b), cx);
262
263 assert_eq!(settings_a.tab_size.get(), 8);
264 assert_eq!(settings_b.tab_size.get(), 2);
265
266 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
267 })
268 .into_iter()
269 .map(|(source_kind, task)| {
270 let resolved = task.resolved.unwrap();
271 (
272 source_kind,
273 task.resolved_label,
274 resolved.args,
275 resolved.env,
276 )
277 })
278 .collect::<Vec<_>>();
279 assert_eq!(
280 all_tasks,
281 vec![
282 (
283 TaskSourceKind::Worktree {
284 id: worktree_id,
285 directory_in_worktree: PathBuf::from("b/.zed"),
286 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
287 },
288 "cargo check".to_string(),
289 vec!["check".to_string()],
290 HashMap::default(),
291 ),
292 (
293 topmost_local_task_source_kind.clone(),
294 "cargo check all".to_string(),
295 vec!["check".to_string(), "--all".to_string()],
296 HashMap::default(),
297 ),
298 ]
299 );
300
301 let (_, resolved_task) = cx
302 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
303 .into_iter()
304 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
305 .expect("should have one global task");
306 project.update(cx, |project, cx| {
307 let task_inventory = project
308 .task_store
309 .read(cx)
310 .task_inventory()
311 .cloned()
312 .unwrap();
313 task_inventory.update(cx, |inventory, _| {
314 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
315 inventory
316 .update_file_based_tasks(
317 None,
318 Some(
319 &json!([{
320 "label": "cargo check unstable",
321 "command": "cargo",
322 "args": [
323 "check",
324 "--all",
325 "--all-targets"
326 ],
327 "env": {
328 "RUSTFLAGS": "-Zunstable-options"
329 }
330 }])
331 .to_string(),
332 ),
333 )
334 .unwrap();
335 });
336 });
337 cx.run_until_parked();
338
339 let all_tasks = cx
340 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
341 .into_iter()
342 .map(|(source_kind, task)| {
343 let resolved = task.resolved.unwrap();
344 (
345 source_kind,
346 task.resolved_label,
347 resolved.args,
348 resolved.env,
349 )
350 })
351 .collect::<Vec<_>>();
352 assert_eq!(
353 all_tasks,
354 vec![
355 (
356 topmost_local_task_source_kind.clone(),
357 "cargo check all".to_string(),
358 vec!["check".to_string(), "--all".to_string()],
359 HashMap::default(),
360 ),
361 (
362 TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: PathBuf::from("b/.zed"),
365 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
366 },
367 "cargo check".to_string(),
368 vec!["check".to_string()],
369 HashMap::default(),
370 ),
371 (
372 TaskSourceKind::AbsPath {
373 abs_path: paths::tasks_file().clone(),
374 id_base: "global tasks.json".into(),
375 },
376 "cargo check unstable".to_string(),
377 vec![
378 "check".to_string(),
379 "--all".to_string(),
380 "--all-targets".to_string(),
381 ],
382 HashMap::from_iter(Some((
383 "RUSTFLAGS".to_string(),
384 "-Zunstable-options".to_string()
385 ))),
386 ),
387 ]
388 );
389}
390
391#[gpui::test]
392async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
393 init_test(cx);
394
395 let fs = FakeFs::new(cx.executor());
396 fs.insert_tree(
397 "/the-root",
398 json!({
399 "test.rs": "const A: i32 = 1;",
400 "test2.rs": "",
401 "Cargo.toml": "a = 1",
402 "package.json": "{\"a\": 1}",
403 }),
404 )
405 .await;
406
407 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
408 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
409
410 let mut fake_rust_servers = language_registry.register_fake_lsp(
411 "Rust",
412 FakeLspAdapter {
413 name: "the-rust-language-server",
414 capabilities: lsp::ServerCapabilities {
415 completion_provider: Some(lsp::CompletionOptions {
416 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
417 ..Default::default()
418 }),
419 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
420 lsp::TextDocumentSyncOptions {
421 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
422 ..Default::default()
423 },
424 )),
425 ..Default::default()
426 },
427 ..Default::default()
428 },
429 );
430 let mut fake_json_servers = language_registry.register_fake_lsp(
431 "JSON",
432 FakeLspAdapter {
433 name: "the-json-language-server",
434 capabilities: lsp::ServerCapabilities {
435 completion_provider: Some(lsp::CompletionOptions {
436 trigger_characters: Some(vec![":".to_string()]),
437 ..Default::default()
438 }),
439 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
440 lsp::TextDocumentSyncOptions {
441 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
442 ..Default::default()
443 },
444 )),
445 ..Default::default()
446 },
447 ..Default::default()
448 },
449 );
450
451 // Open a buffer without an associated language server.
452 let toml_buffer = project
453 .update(cx, |project, cx| {
454 project.open_local_buffer("/the-root/Cargo.toml", cx)
455 })
456 .await
457 .unwrap();
458
459 // Open a buffer with an associated language server before the language for it has been loaded.
460 let rust_buffer = project
461 .update(cx, |project, cx| {
462 project.open_local_buffer("/the-root/test.rs", cx)
463 })
464 .await
465 .unwrap();
466 rust_buffer.update(cx, |buffer, _| {
467 assert_eq!(buffer.language().map(|l| l.name()), None);
468 });
469
470 // Now we add the languages to the project, and ensure they get assigned to all
471 // the relevant open buffers.
472 language_registry.add(json_lang());
473 language_registry.add(rust_lang());
474 cx.executor().run_until_parked();
475 rust_buffer.update(cx, |buffer, _| {
476 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
477 });
478
479 // A server is started up, and it is notified about Rust files.
480 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
481 assert_eq!(
482 fake_rust_server
483 .receive_notification::<lsp::notification::DidOpenTextDocument>()
484 .await
485 .text_document,
486 lsp::TextDocumentItem {
487 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
488 version: 0,
489 text: "const A: i32 = 1;".to_string(),
490 language_id: "rust".to_string(),
491 }
492 );
493
494 // The buffer is configured based on the language server's capabilities.
495 rust_buffer.update(cx, |buffer, _| {
496 assert_eq!(
497 buffer.completion_triggers(),
498 &[".".to_string(), "::".to_string()]
499 );
500 });
501 toml_buffer.update(cx, |buffer, _| {
502 assert!(buffer.completion_triggers().is_empty());
503 });
504
505 // Edit a buffer. The changes are reported to the language server.
506 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
507 assert_eq!(
508 fake_rust_server
509 .receive_notification::<lsp::notification::DidChangeTextDocument>()
510 .await
511 .text_document,
512 lsp::VersionedTextDocumentIdentifier::new(
513 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
514 1
515 )
516 );
517
518 // Open a third buffer with a different associated language server.
519 let json_buffer = project
520 .update(cx, |project, cx| {
521 project.open_local_buffer("/the-root/package.json", cx)
522 })
523 .await
524 .unwrap();
525
526 // A json language server is started up and is only notified about the json buffer.
527 let mut fake_json_server = fake_json_servers.next().await.unwrap();
528 assert_eq!(
529 fake_json_server
530 .receive_notification::<lsp::notification::DidOpenTextDocument>()
531 .await
532 .text_document,
533 lsp::TextDocumentItem {
534 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
535 version: 0,
536 text: "{\"a\": 1}".to_string(),
537 language_id: "json".to_string(),
538 }
539 );
540
541 // This buffer is configured based on the second language server's
542 // capabilities.
543 json_buffer.update(cx, |buffer, _| {
544 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
545 });
546
547 // When opening another buffer whose language server is already running,
548 // it is also configured based on the existing language server's capabilities.
549 let rust_buffer2 = project
550 .update(cx, |project, cx| {
551 project.open_local_buffer("/the-root/test2.rs", cx)
552 })
553 .await
554 .unwrap();
555 rust_buffer2.update(cx, |buffer, _| {
556 assert_eq!(
557 buffer.completion_triggers(),
558 &[".".to_string(), "::".to_string()]
559 );
560 });
561
562 // Changes are reported only to servers matching the buffer's language.
563 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
564 rust_buffer2.update(cx, |buffer, cx| {
565 buffer.edit([(0..0, "let x = 1;")], None, cx)
566 });
567 assert_eq!(
568 fake_rust_server
569 .receive_notification::<lsp::notification::DidChangeTextDocument>()
570 .await
571 .text_document,
572 lsp::VersionedTextDocumentIdentifier::new(
573 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
574 1
575 )
576 );
577
578 // Save notifications are reported to all servers.
579 project
580 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
581 .await
582 .unwrap();
583 assert_eq!(
584 fake_rust_server
585 .receive_notification::<lsp::notification::DidSaveTextDocument>()
586 .await
587 .text_document,
588 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
589 );
590 assert_eq!(
591 fake_json_server
592 .receive_notification::<lsp::notification::DidSaveTextDocument>()
593 .await
594 .text_document,
595 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
596 );
597
598 // Renames are reported only to servers matching the buffer's language.
599 fs.rename(
600 Path::new("/the-root/test2.rs"),
601 Path::new("/the-root/test3.rs"),
602 Default::default(),
603 )
604 .await
605 .unwrap();
606 assert_eq!(
607 fake_rust_server
608 .receive_notification::<lsp::notification::DidCloseTextDocument>()
609 .await
610 .text_document,
611 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
612 );
613 assert_eq!(
614 fake_rust_server
615 .receive_notification::<lsp::notification::DidOpenTextDocument>()
616 .await
617 .text_document,
618 lsp::TextDocumentItem {
619 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
620 version: 0,
621 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
622 language_id: "rust".to_string(),
623 },
624 );
625
626 rust_buffer2.update(cx, |buffer, cx| {
627 buffer.update_diagnostics(
628 LanguageServerId(0),
629 DiagnosticSet::from_sorted_entries(
630 vec![DiagnosticEntry {
631 diagnostic: Default::default(),
632 range: Anchor::MIN..Anchor::MAX,
633 }],
634 &buffer.snapshot(),
635 ),
636 cx,
637 );
638 assert_eq!(
639 buffer
640 .snapshot()
641 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
642 .count(),
643 1
644 );
645 });
646
647 // When the rename changes the extension of the file, the buffer gets closed on the old
648 // language server and gets opened on the new one.
649 fs.rename(
650 Path::new("/the-root/test3.rs"),
651 Path::new("/the-root/test3.json"),
652 Default::default(),
653 )
654 .await
655 .unwrap();
656 assert_eq!(
657 fake_rust_server
658 .receive_notification::<lsp::notification::DidCloseTextDocument>()
659 .await
660 .text_document,
661 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
662 );
663 assert_eq!(
664 fake_json_server
665 .receive_notification::<lsp::notification::DidOpenTextDocument>()
666 .await
667 .text_document,
668 lsp::TextDocumentItem {
669 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
670 version: 0,
671 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
672 language_id: "json".to_string(),
673 },
674 );
675
676 // We clear the diagnostics, since the language has changed.
677 rust_buffer2.update(cx, |buffer, _| {
678 assert_eq!(
679 buffer
680 .snapshot()
681 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
682 .count(),
683 0
684 );
685 });
686
687 // The renamed file's version resets after changing language server.
688 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
689 assert_eq!(
690 fake_json_server
691 .receive_notification::<lsp::notification::DidChangeTextDocument>()
692 .await
693 .text_document,
694 lsp::VersionedTextDocumentIdentifier::new(
695 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
696 1
697 )
698 );
699
700 // Restart language servers
701 project.update(cx, |project, cx| {
702 project.restart_language_servers_for_buffers(
703 vec![rust_buffer.clone(), json_buffer.clone()],
704 cx,
705 );
706 });
707
708 let mut rust_shutdown_requests = fake_rust_server
709 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
710 let mut json_shutdown_requests = fake_json_server
711 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
712 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
713
714 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
715 let mut fake_json_server = fake_json_servers.next().await.unwrap();
716
717 // Ensure rust document is reopened in new rust language server
718 assert_eq!(
719 fake_rust_server
720 .receive_notification::<lsp::notification::DidOpenTextDocument>()
721 .await
722 .text_document,
723 lsp::TextDocumentItem {
724 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
725 version: 0,
726 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
727 language_id: "rust".to_string(),
728 }
729 );
730
731 // Ensure json documents are reopened in new json language server
732 assert_set_eq!(
733 [
734 fake_json_server
735 .receive_notification::<lsp::notification::DidOpenTextDocument>()
736 .await
737 .text_document,
738 fake_json_server
739 .receive_notification::<lsp::notification::DidOpenTextDocument>()
740 .await
741 .text_document,
742 ],
743 [
744 lsp::TextDocumentItem {
745 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
746 version: 0,
747 text: json_buffer.update(cx, |buffer, _| buffer.text()),
748 language_id: "json".to_string(),
749 },
750 lsp::TextDocumentItem {
751 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
752 version: 0,
753 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
754 language_id: "json".to_string(),
755 }
756 ]
757 );
758
759 // Close notifications are reported only to servers matching the buffer's language.
760 cx.update(|_| drop(json_buffer));
761 let close_message = lsp::DidCloseTextDocumentParams {
762 text_document: lsp::TextDocumentIdentifier::new(
763 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
764 ),
765 };
766 assert_eq!(
767 fake_json_server
768 .receive_notification::<lsp::notification::DidCloseTextDocument>()
769 .await,
770 close_message,
771 );
772}
773
774#[gpui::test]
775async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
776 init_test(cx);
777
778 let fs = FakeFs::new(cx.executor());
779 fs.insert_tree(
780 "/the-root",
781 json!({
782 ".gitignore": "target\n",
783 "src": {
784 "a.rs": "",
785 "b.rs": "",
786 },
787 "target": {
788 "x": {
789 "out": {
790 "x.rs": ""
791 }
792 },
793 "y": {
794 "out": {
795 "y.rs": "",
796 }
797 },
798 "z": {
799 "out": {
800 "z.rs": ""
801 }
802 }
803 }
804 }),
805 )
806 .await;
807
808 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
809 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
810 language_registry.add(rust_lang());
811 let mut fake_servers = language_registry.register_fake_lsp(
812 "Rust",
813 FakeLspAdapter {
814 name: "the-language-server",
815 ..Default::default()
816 },
817 );
818
819 cx.executor().run_until_parked();
820
821 // Start the language server by opening a buffer with a compatible file extension.
822 let _buffer = project
823 .update(cx, |project, cx| {
824 project.open_local_buffer("/the-root/src/a.rs", cx)
825 })
826 .await
827 .unwrap();
828
829 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
830 project.update(cx, |project, cx| {
831 let worktree = project.worktrees(cx).next().unwrap();
832 assert_eq!(
833 worktree
834 .read(cx)
835 .snapshot()
836 .entries(true, 0)
837 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
838 .collect::<Vec<_>>(),
839 &[
840 (Path::new(""), false),
841 (Path::new(".gitignore"), false),
842 (Path::new("src"), false),
843 (Path::new("src/a.rs"), false),
844 (Path::new("src/b.rs"), false),
845 (Path::new("target"), true),
846 ]
847 );
848 });
849
850 let prev_read_dir_count = fs.read_dir_call_count();
851
852 // Keep track of the FS events reported to the language server.
853 let fake_server = fake_servers.next().await.unwrap();
854 let file_changes = Arc::new(Mutex::new(Vec::new()));
855 fake_server
856 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
857 registrations: vec![lsp::Registration {
858 id: Default::default(),
859 method: "workspace/didChangeWatchedFiles".to_string(),
860 register_options: serde_json::to_value(
861 lsp::DidChangeWatchedFilesRegistrationOptions {
862 watchers: vec![
863 lsp::FileSystemWatcher {
864 glob_pattern: lsp::GlobPattern::String(
865 "/the-root/Cargo.toml".to_string(),
866 ),
867 kind: None,
868 },
869 lsp::FileSystemWatcher {
870 glob_pattern: lsp::GlobPattern::String(
871 "/the-root/src/*.{rs,c}".to_string(),
872 ),
873 kind: None,
874 },
875 lsp::FileSystemWatcher {
876 glob_pattern: lsp::GlobPattern::String(
877 "/the-root/target/y/**/*.rs".to_string(),
878 ),
879 kind: None,
880 },
881 ],
882 },
883 )
884 .ok(),
885 }],
886 })
887 .await
888 .unwrap();
889 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
890 let file_changes = file_changes.clone();
891 move |params, _| {
892 let mut file_changes = file_changes.lock();
893 file_changes.extend(params.changes);
894 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
895 }
896 });
897
898 cx.executor().run_until_parked();
899 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
900 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
901
902 // Now the language server has asked us to watch an ignored directory path,
903 // so we recursively load it.
904 project.update(cx, |project, cx| {
905 let worktree = project.worktrees(cx).next().unwrap();
906 assert_eq!(
907 worktree
908 .read(cx)
909 .snapshot()
910 .entries(true, 0)
911 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
912 .collect::<Vec<_>>(),
913 &[
914 (Path::new(""), false),
915 (Path::new(".gitignore"), false),
916 (Path::new("src"), false),
917 (Path::new("src/a.rs"), false),
918 (Path::new("src/b.rs"), false),
919 (Path::new("target"), true),
920 (Path::new("target/x"), true),
921 (Path::new("target/y"), true),
922 (Path::new("target/y/out"), true),
923 (Path::new("target/y/out/y.rs"), true),
924 (Path::new("target/z"), true),
925 ]
926 );
927 });
928
929 // Perform some file system mutations, two of which match the watched patterns,
930 // and one of which does not.
931 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
932 .await
933 .unwrap();
934 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
935 .await
936 .unwrap();
937 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
938 .await
939 .unwrap();
940 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
941 .await
942 .unwrap();
943 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
944 .await
945 .unwrap();
946
947 // The language server receives events for the FS mutations that match its watch patterns.
948 cx.executor().run_until_parked();
949 assert_eq!(
950 &*file_changes.lock(),
951 &[
952 lsp::FileEvent {
953 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
954 typ: lsp::FileChangeType::DELETED,
955 },
956 lsp::FileEvent {
957 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
958 typ: lsp::FileChangeType::CREATED,
959 },
960 lsp::FileEvent {
961 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
962 typ: lsp::FileChangeType::CREATED,
963 },
964 ]
965 );
966}
967
968#[gpui::test]
969async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
970 init_test(cx);
971
972 let fs = FakeFs::new(cx.executor());
973 fs.insert_tree(
974 "/dir",
975 json!({
976 "a.rs": "let a = 1;",
977 "b.rs": "let b = 2;"
978 }),
979 )
980 .await;
981
982 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
983
984 let buffer_a = project
985 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
986 .await
987 .unwrap();
988 let buffer_b = project
989 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
990 .await
991 .unwrap();
992
993 project.update(cx, |project, cx| {
994 project
995 .update_diagnostics(
996 LanguageServerId(0),
997 lsp::PublishDiagnosticsParams {
998 uri: Url::from_file_path("/dir/a.rs").unwrap(),
999 version: None,
1000 diagnostics: vec![lsp::Diagnostic {
1001 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1002 severity: Some(lsp::DiagnosticSeverity::ERROR),
1003 message: "error 1".to_string(),
1004 ..Default::default()
1005 }],
1006 },
1007 &[],
1008 cx,
1009 )
1010 .unwrap();
1011 project
1012 .update_diagnostics(
1013 LanguageServerId(0),
1014 lsp::PublishDiagnosticsParams {
1015 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1016 version: None,
1017 diagnostics: vec![lsp::Diagnostic {
1018 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1019 severity: Some(DiagnosticSeverity::WARNING),
1020 message: "error 2".to_string(),
1021 ..Default::default()
1022 }],
1023 },
1024 &[],
1025 cx,
1026 )
1027 .unwrap();
1028 });
1029
1030 buffer_a.update(cx, |buffer, _| {
1031 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1032 assert_eq!(
1033 chunks
1034 .iter()
1035 .map(|(s, d)| (s.as_str(), *d))
1036 .collect::<Vec<_>>(),
1037 &[
1038 ("let ", None),
1039 ("a", Some(DiagnosticSeverity::ERROR)),
1040 (" = 1;", None),
1041 ]
1042 );
1043 });
1044 buffer_b.update(cx, |buffer, _| {
1045 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1046 assert_eq!(
1047 chunks
1048 .iter()
1049 .map(|(s, d)| (s.as_str(), *d))
1050 .collect::<Vec<_>>(),
1051 &[
1052 ("let ", None),
1053 ("b", Some(DiagnosticSeverity::WARNING)),
1054 (" = 2;", None),
1055 ]
1056 );
1057 });
1058}
1059
1060#[gpui::test]
1061async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1062 init_test(cx);
1063
1064 let fs = FakeFs::new(cx.executor());
1065 fs.insert_tree(
1066 "/root",
1067 json!({
1068 "dir": {
1069 ".git": {
1070 "HEAD": "ref: refs/heads/main",
1071 },
1072 ".gitignore": "b.rs",
1073 "a.rs": "let a = 1;",
1074 "b.rs": "let b = 2;",
1075 },
1076 "other.rs": "let b = c;"
1077 }),
1078 )
1079 .await;
1080
1081 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1082 let (worktree, _) = project
1083 .update(cx, |project, cx| {
1084 project.find_or_create_worktree("/root/dir", true, cx)
1085 })
1086 .await
1087 .unwrap();
1088 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1089
1090 let (worktree, _) = project
1091 .update(cx, |project, cx| {
1092 project.find_or_create_worktree("/root/other.rs", false, cx)
1093 })
1094 .await
1095 .unwrap();
1096 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1097
1098 let server_id = LanguageServerId(0);
1099 project.update(cx, |project, cx| {
1100 project
1101 .update_diagnostics(
1102 server_id,
1103 lsp::PublishDiagnosticsParams {
1104 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1105 version: None,
1106 diagnostics: vec![lsp::Diagnostic {
1107 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1108 severity: Some(lsp::DiagnosticSeverity::ERROR),
1109 message: "unused variable 'b'".to_string(),
1110 ..Default::default()
1111 }],
1112 },
1113 &[],
1114 cx,
1115 )
1116 .unwrap();
1117 project
1118 .update_diagnostics(
1119 server_id,
1120 lsp::PublishDiagnosticsParams {
1121 uri: Url::from_file_path("/root/other.rs").unwrap(),
1122 version: None,
1123 diagnostics: vec![lsp::Diagnostic {
1124 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1125 severity: Some(lsp::DiagnosticSeverity::ERROR),
1126 message: "unknown variable 'c'".to_string(),
1127 ..Default::default()
1128 }],
1129 },
1130 &[],
1131 cx,
1132 )
1133 .unwrap();
1134 });
1135
1136 let main_ignored_buffer = project
1137 .update(cx, |project, cx| {
1138 project.open_buffer((main_worktree_id, "b.rs"), cx)
1139 })
1140 .await
1141 .unwrap();
1142 main_ignored_buffer.update(cx, |buffer, _| {
1143 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1144 assert_eq!(
1145 chunks
1146 .iter()
1147 .map(|(s, d)| (s.as_str(), *d))
1148 .collect::<Vec<_>>(),
1149 &[
1150 ("let ", None),
1151 ("b", Some(DiagnosticSeverity::ERROR)),
1152 (" = 2;", None),
1153 ],
1154 "Gigitnored buffers should still get in-buffer diagnostics",
1155 );
1156 });
1157 let other_buffer = project
1158 .update(cx, |project, cx| {
1159 project.open_buffer((other_worktree_id, ""), cx)
1160 })
1161 .await
1162 .unwrap();
1163 other_buffer.update(cx, |buffer, _| {
1164 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1165 assert_eq!(
1166 chunks
1167 .iter()
1168 .map(|(s, d)| (s.as_str(), *d))
1169 .collect::<Vec<_>>(),
1170 &[
1171 ("let b = ", None),
1172 ("c", Some(DiagnosticSeverity::ERROR)),
1173 (";", None),
1174 ],
1175 "Buffers from hidden projects should still get in-buffer diagnostics"
1176 );
1177 });
1178
1179 project.update(cx, |project, cx| {
1180 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1181 assert_eq!(
1182 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1183 vec![(
1184 ProjectPath {
1185 worktree_id: main_worktree_id,
1186 path: Arc::from(Path::new("b.rs")),
1187 },
1188 server_id,
1189 DiagnosticSummary {
1190 error_count: 1,
1191 warning_count: 0,
1192 }
1193 )]
1194 );
1195 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1196 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1197 });
1198}
1199
1200#[gpui::test]
1201async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1202 init_test(cx);
1203
1204 let progress_token = "the-progress-token";
1205
1206 let fs = FakeFs::new(cx.executor());
1207 fs.insert_tree(
1208 "/dir",
1209 json!({
1210 "a.rs": "fn a() { A }",
1211 "b.rs": "const y: i32 = 1",
1212 }),
1213 )
1214 .await;
1215
1216 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1217 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1218
1219 language_registry.add(rust_lang());
1220 let mut fake_servers = language_registry.register_fake_lsp(
1221 "Rust",
1222 FakeLspAdapter {
1223 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1224 disk_based_diagnostics_sources: vec!["disk".into()],
1225 ..Default::default()
1226 },
1227 );
1228
1229 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1230
1231 // Cause worktree to start the fake language server
1232 let _buffer = project
1233 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1234 .await
1235 .unwrap();
1236
1237 let mut events = cx.events(&project);
1238
1239 let fake_server = fake_servers.next().await.unwrap();
1240 assert_eq!(
1241 events.next().await.unwrap(),
1242 Event::LanguageServerAdded(
1243 LanguageServerId(0),
1244 fake_server.server.name().into(),
1245 Some(worktree_id)
1246 ),
1247 );
1248
1249 fake_server
1250 .start_progress(format!("{}/0", progress_token))
1251 .await;
1252 assert_eq!(
1253 events.next().await.unwrap(),
1254 Event::DiskBasedDiagnosticsStarted {
1255 language_server_id: LanguageServerId(0),
1256 }
1257 );
1258
1259 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "undefined variable 'A'".to_string(),
1266 ..Default::default()
1267 }],
1268 });
1269 assert_eq!(
1270 events.next().await.unwrap(),
1271 Event::DiagnosticsUpdated {
1272 language_server_id: LanguageServerId(0),
1273 path: (worktree_id, Path::new("a.rs")).into()
1274 }
1275 );
1276
1277 fake_server.end_progress(format!("{}/0", progress_token));
1278 assert_eq!(
1279 events.next().await.unwrap(),
1280 Event::DiskBasedDiagnosticsFinished {
1281 language_server_id: LanguageServerId(0)
1282 }
1283 );
1284
1285 let buffer = project
1286 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1287 .await
1288 .unwrap();
1289
1290 buffer.update(cx, |buffer, _| {
1291 let snapshot = buffer.snapshot();
1292 let diagnostics = snapshot
1293 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1294 .collect::<Vec<_>>();
1295 assert_eq!(
1296 diagnostics,
1297 &[DiagnosticEntry {
1298 range: Point::new(0, 9)..Point::new(0, 10),
1299 diagnostic: Diagnostic {
1300 severity: lsp::DiagnosticSeverity::ERROR,
1301 message: "undefined variable 'A'".to_string(),
1302 group_id: 0,
1303 is_primary: true,
1304 ..Default::default()
1305 }
1306 }]
1307 )
1308 });
1309
1310 // Ensure publishing empty diagnostics twice only results in one update event.
1311 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1312 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1313 version: None,
1314 diagnostics: Default::default(),
1315 });
1316 assert_eq!(
1317 events.next().await.unwrap(),
1318 Event::DiagnosticsUpdated {
1319 language_server_id: LanguageServerId(0),
1320 path: (worktree_id, Path::new("a.rs")).into()
1321 }
1322 );
1323
1324 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1325 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1326 version: None,
1327 diagnostics: Default::default(),
1328 });
1329 cx.executor().run_until_parked();
1330 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1331}
1332
1333#[gpui::test]
1334async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1335 init_test(cx);
1336
1337 let progress_token = "the-progress-token";
1338
1339 let fs = FakeFs::new(cx.executor());
1340 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1341
1342 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1343
1344 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1345 language_registry.add(rust_lang());
1346 let mut fake_servers = language_registry.register_fake_lsp(
1347 "Rust",
1348 FakeLspAdapter {
1349 name: "the-language-server",
1350 disk_based_diagnostics_sources: vec!["disk".into()],
1351 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1352 ..Default::default()
1353 },
1354 );
1355
1356 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1357
1358 let buffer = project
1359 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1360 .await
1361 .unwrap();
1362
1363 // Simulate diagnostics starting to update.
1364 let fake_server = fake_servers.next().await.unwrap();
1365 fake_server.start_progress(progress_token).await;
1366
1367 // Restart the server before the diagnostics finish updating.
1368 project.update(cx, |project, cx| {
1369 project.restart_language_servers_for_buffers([buffer], cx);
1370 });
1371 let mut events = cx.events(&project);
1372
1373 // Simulate the newly started server sending more diagnostics.
1374 let fake_server = fake_servers.next().await.unwrap();
1375 assert_eq!(
1376 events.next().await.unwrap(),
1377 Event::LanguageServerAdded(
1378 LanguageServerId(1),
1379 fake_server.server.name().into(),
1380 Some(worktree_id)
1381 )
1382 );
1383 fake_server.start_progress(progress_token).await;
1384 assert_eq!(
1385 events.next().await.unwrap(),
1386 Event::DiskBasedDiagnosticsStarted {
1387 language_server_id: LanguageServerId(1)
1388 }
1389 );
1390 project.update(cx, |project, cx| {
1391 assert_eq!(
1392 project
1393 .language_servers_running_disk_based_diagnostics(cx)
1394 .collect::<Vec<_>>(),
1395 [LanguageServerId(1)]
1396 );
1397 });
1398
1399 // All diagnostics are considered done, despite the old server's diagnostic
1400 // task never completing.
1401 fake_server.end_progress(progress_token);
1402 assert_eq!(
1403 events.next().await.unwrap(),
1404 Event::DiskBasedDiagnosticsFinished {
1405 language_server_id: LanguageServerId(1)
1406 }
1407 );
1408 project.update(cx, |project, cx| {
1409 assert_eq!(
1410 project
1411 .language_servers_running_disk_based_diagnostics(cx)
1412 .collect::<Vec<_>>(),
1413 [] as [language::LanguageServerId; 0]
1414 );
1415 });
1416}
1417
1418#[gpui::test]
1419async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1420 init_test(cx);
1421
1422 let fs = FakeFs::new(cx.executor());
1423 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1424
1425 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1426
1427 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1428 language_registry.add(rust_lang());
1429 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1430
1431 let buffer = project
1432 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1433 .await
1434 .unwrap();
1435
1436 // Publish diagnostics
1437 let fake_server = fake_servers.next().await.unwrap();
1438 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1439 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1440 version: None,
1441 diagnostics: vec![lsp::Diagnostic {
1442 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1443 severity: Some(lsp::DiagnosticSeverity::ERROR),
1444 message: "the message".to_string(),
1445 ..Default::default()
1446 }],
1447 });
1448
1449 cx.executor().run_until_parked();
1450 buffer.update(cx, |buffer, _| {
1451 assert_eq!(
1452 buffer
1453 .snapshot()
1454 .diagnostics_in_range::<_, usize>(0..1, false)
1455 .map(|entry| entry.diagnostic.message.clone())
1456 .collect::<Vec<_>>(),
1457 ["the message".to_string()]
1458 );
1459 });
1460 project.update(cx, |project, cx| {
1461 assert_eq!(
1462 project.diagnostic_summary(false, cx),
1463 DiagnosticSummary {
1464 error_count: 1,
1465 warning_count: 0,
1466 }
1467 );
1468 });
1469
1470 project.update(cx, |project, cx| {
1471 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1472 });
1473
1474 // The diagnostics are cleared.
1475 cx.executor().run_until_parked();
1476 buffer.update(cx, |buffer, _| {
1477 assert_eq!(
1478 buffer
1479 .snapshot()
1480 .diagnostics_in_range::<_, usize>(0..1, false)
1481 .map(|entry| entry.diagnostic.message.clone())
1482 .collect::<Vec<_>>(),
1483 Vec::<String>::new(),
1484 );
1485 });
1486 project.update(cx, |project, cx| {
1487 assert_eq!(
1488 project.diagnostic_summary(false, cx),
1489 DiagnosticSummary {
1490 error_count: 0,
1491 warning_count: 0,
1492 }
1493 );
1494 });
1495}
1496
1497#[gpui::test]
1498async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1499 init_test(cx);
1500
1501 let fs = FakeFs::new(cx.executor());
1502 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1503
1504 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1506
1507 language_registry.add(rust_lang());
1508 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1509
1510 let buffer = project
1511 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1512 .await
1513 .unwrap();
1514
1515 // Before restarting the server, report diagnostics with an unknown buffer version.
1516 let fake_server = fake_servers.next().await.unwrap();
1517 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1518 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1519 version: Some(10000),
1520 diagnostics: Vec::new(),
1521 });
1522 cx.executor().run_until_parked();
1523
1524 project.update(cx, |project, cx| {
1525 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1526 });
1527 let mut fake_server = fake_servers.next().await.unwrap();
1528 let notification = fake_server
1529 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1530 .await
1531 .text_document;
1532 assert_eq!(notification.version, 0);
1533}
1534
1535#[gpui::test]
1536async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1537 init_test(cx);
1538
1539 let progress_token = "the-progress-token";
1540
1541 let fs = FakeFs::new(cx.executor());
1542 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1543
1544 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1545
1546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1547 language_registry.add(rust_lang());
1548 let mut fake_servers = language_registry.register_fake_lsp(
1549 "Rust",
1550 FakeLspAdapter {
1551 name: "the-language-server",
1552 disk_based_diagnostics_sources: vec!["disk".into()],
1553 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1554 ..Default::default()
1555 },
1556 );
1557
1558 let buffer = project
1559 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1560 .await
1561 .unwrap();
1562
1563 // Simulate diagnostics starting to update.
1564 let mut fake_server = fake_servers.next().await.unwrap();
1565 fake_server
1566 .start_progress_with(
1567 "another-token",
1568 lsp::WorkDoneProgressBegin {
1569 cancellable: Some(false),
1570 ..Default::default()
1571 },
1572 )
1573 .await;
1574 fake_server
1575 .start_progress_with(
1576 progress_token,
1577 lsp::WorkDoneProgressBegin {
1578 cancellable: Some(true),
1579 ..Default::default()
1580 },
1581 )
1582 .await;
1583 cx.executor().run_until_parked();
1584
1585 project.update(cx, |project, cx| {
1586 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1587 });
1588
1589 let cancel_notification = fake_server
1590 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1591 .await;
1592 assert_eq!(
1593 cancel_notification.token,
1594 NumberOrString::String(progress_token.into())
1595 );
1596}
1597
1598#[gpui::test]
1599async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1600 init_test(cx);
1601
1602 let fs = FakeFs::new(cx.executor());
1603 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1604 .await;
1605
1606 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1608
1609 let mut fake_rust_servers = language_registry.register_fake_lsp(
1610 "Rust",
1611 FakeLspAdapter {
1612 name: "rust-lsp",
1613 ..Default::default()
1614 },
1615 );
1616 let mut fake_js_servers = language_registry.register_fake_lsp(
1617 "JavaScript",
1618 FakeLspAdapter {
1619 name: "js-lsp",
1620 ..Default::default()
1621 },
1622 );
1623 language_registry.add(rust_lang());
1624 language_registry.add(js_lang());
1625
1626 let _rs_buffer = project
1627 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1628 .await
1629 .unwrap();
1630 let _js_buffer = project
1631 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1632 .await
1633 .unwrap();
1634
1635 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1636 assert_eq!(
1637 fake_rust_server_1
1638 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1639 .await
1640 .text_document
1641 .uri
1642 .as_str(),
1643 "file:///dir/a.rs"
1644 );
1645
1646 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1647 assert_eq!(
1648 fake_js_server
1649 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1650 .await
1651 .text_document
1652 .uri
1653 .as_str(),
1654 "file:///dir/b.js"
1655 );
1656
1657 // Disable Rust language server, ensuring only that server gets stopped.
1658 cx.update(|cx| {
1659 SettingsStore::update_global(cx, |settings, cx| {
1660 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1661 settings.languages.insert(
1662 "Rust".into(),
1663 LanguageSettingsContent {
1664 enable_language_server: Some(false),
1665 ..Default::default()
1666 },
1667 );
1668 });
1669 })
1670 });
1671 fake_rust_server_1
1672 .receive_notification::<lsp::notification::Exit>()
1673 .await;
1674
1675 // Enable Rust and disable JavaScript language servers, ensuring that the
1676 // former gets started again and that the latter stops.
1677 cx.update(|cx| {
1678 SettingsStore::update_global(cx, |settings, cx| {
1679 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1680 settings.languages.insert(
1681 LanguageName::new("Rust"),
1682 LanguageSettingsContent {
1683 enable_language_server: Some(true),
1684 ..Default::default()
1685 },
1686 );
1687 settings.languages.insert(
1688 LanguageName::new("JavaScript"),
1689 LanguageSettingsContent {
1690 enable_language_server: Some(false),
1691 ..Default::default()
1692 },
1693 );
1694 });
1695 })
1696 });
1697 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1698 assert_eq!(
1699 fake_rust_server_2
1700 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1701 .await
1702 .text_document
1703 .uri
1704 .as_str(),
1705 "file:///dir/a.rs"
1706 );
1707 fake_js_server
1708 .receive_notification::<lsp::notification::Exit>()
1709 .await;
1710}
1711
1712#[gpui::test(iterations = 3)]
1713async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1714 init_test(cx);
1715
1716 let text = "
1717 fn a() { A }
1718 fn b() { BB }
1719 fn c() { CCC }
1720 "
1721 .unindent();
1722
1723 let fs = FakeFs::new(cx.executor());
1724 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1725
1726 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1727 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1728
1729 language_registry.add(rust_lang());
1730 let mut fake_servers = language_registry.register_fake_lsp(
1731 "Rust",
1732 FakeLspAdapter {
1733 disk_based_diagnostics_sources: vec!["disk".into()],
1734 ..Default::default()
1735 },
1736 );
1737
1738 let buffer = project
1739 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1740 .await
1741 .unwrap();
1742
1743 let mut fake_server = fake_servers.next().await.unwrap();
1744 let open_notification = fake_server
1745 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1746 .await;
1747
1748 // Edit the buffer, moving the content down
1749 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1750 let change_notification_1 = fake_server
1751 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1752 .await;
1753 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1754
1755 // Report some diagnostics for the initial version of the buffer
1756 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1757 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1758 version: Some(open_notification.text_document.version),
1759 diagnostics: vec![
1760 lsp::Diagnostic {
1761 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1762 severity: Some(DiagnosticSeverity::ERROR),
1763 message: "undefined variable 'A'".to_string(),
1764 source: Some("disk".to_string()),
1765 ..Default::default()
1766 },
1767 lsp::Diagnostic {
1768 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1769 severity: Some(DiagnosticSeverity::ERROR),
1770 message: "undefined variable 'BB'".to_string(),
1771 source: Some("disk".to_string()),
1772 ..Default::default()
1773 },
1774 lsp::Diagnostic {
1775 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1776 severity: Some(DiagnosticSeverity::ERROR),
1777 source: Some("disk".to_string()),
1778 message: "undefined variable 'CCC'".to_string(),
1779 ..Default::default()
1780 },
1781 ],
1782 });
1783
1784 // The diagnostics have moved down since they were created.
1785 cx.executor().run_until_parked();
1786 buffer.update(cx, |buffer, _| {
1787 assert_eq!(
1788 buffer
1789 .snapshot()
1790 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1791 .collect::<Vec<_>>(),
1792 &[
1793 DiagnosticEntry {
1794 range: Point::new(3, 9)..Point::new(3, 11),
1795 diagnostic: Diagnostic {
1796 source: Some("disk".into()),
1797 severity: DiagnosticSeverity::ERROR,
1798 message: "undefined variable 'BB'".to_string(),
1799 is_disk_based: true,
1800 group_id: 1,
1801 is_primary: true,
1802 ..Default::default()
1803 },
1804 },
1805 DiagnosticEntry {
1806 range: Point::new(4, 9)..Point::new(4, 12),
1807 diagnostic: Diagnostic {
1808 source: Some("disk".into()),
1809 severity: DiagnosticSeverity::ERROR,
1810 message: "undefined variable 'CCC'".to_string(),
1811 is_disk_based: true,
1812 group_id: 2,
1813 is_primary: true,
1814 ..Default::default()
1815 }
1816 }
1817 ]
1818 );
1819 assert_eq!(
1820 chunks_with_diagnostics(buffer, 0..buffer.len()),
1821 [
1822 ("\n\nfn a() { ".to_string(), None),
1823 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1824 (" }\nfn b() { ".to_string(), None),
1825 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1826 (" }\nfn c() { ".to_string(), None),
1827 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1828 (" }\n".to_string(), None),
1829 ]
1830 );
1831 assert_eq!(
1832 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1833 [
1834 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1835 (" }\nfn c() { ".to_string(), None),
1836 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1837 ]
1838 );
1839 });
1840
1841 // Ensure overlapping diagnostics are highlighted correctly.
1842 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1843 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1844 version: Some(open_notification.text_document.version),
1845 diagnostics: vec![
1846 lsp::Diagnostic {
1847 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1848 severity: Some(DiagnosticSeverity::ERROR),
1849 message: "undefined variable 'A'".to_string(),
1850 source: Some("disk".to_string()),
1851 ..Default::default()
1852 },
1853 lsp::Diagnostic {
1854 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1855 severity: Some(DiagnosticSeverity::WARNING),
1856 message: "unreachable statement".to_string(),
1857 source: Some("disk".to_string()),
1858 ..Default::default()
1859 },
1860 ],
1861 });
1862
1863 cx.executor().run_until_parked();
1864 buffer.update(cx, |buffer, _| {
1865 assert_eq!(
1866 buffer
1867 .snapshot()
1868 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1869 .collect::<Vec<_>>(),
1870 &[
1871 DiagnosticEntry {
1872 range: Point::new(2, 9)..Point::new(2, 12),
1873 diagnostic: Diagnostic {
1874 source: Some("disk".into()),
1875 severity: DiagnosticSeverity::WARNING,
1876 message: "unreachable statement".to_string(),
1877 is_disk_based: true,
1878 group_id: 4,
1879 is_primary: true,
1880 ..Default::default()
1881 }
1882 },
1883 DiagnosticEntry {
1884 range: Point::new(2, 9)..Point::new(2, 10),
1885 diagnostic: Diagnostic {
1886 source: Some("disk".into()),
1887 severity: DiagnosticSeverity::ERROR,
1888 message: "undefined variable 'A'".to_string(),
1889 is_disk_based: true,
1890 group_id: 3,
1891 is_primary: true,
1892 ..Default::default()
1893 },
1894 }
1895 ]
1896 );
1897 assert_eq!(
1898 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1899 [
1900 ("fn a() { ".to_string(), None),
1901 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1902 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1903 ("\n".to_string(), None),
1904 ]
1905 );
1906 assert_eq!(
1907 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1908 [
1909 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1910 ("\n".to_string(), None),
1911 ]
1912 );
1913 });
1914
1915 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1916 // changes since the last save.
1917 buffer.update(cx, |buffer, cx| {
1918 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1919 buffer.edit(
1920 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1921 None,
1922 cx,
1923 );
1924 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1925 });
1926 let change_notification_2 = fake_server
1927 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1928 .await;
1929 assert!(
1930 change_notification_2.text_document.version > change_notification_1.text_document.version
1931 );
1932
1933 // Handle out-of-order diagnostics
1934 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1935 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1936 version: Some(change_notification_2.text_document.version),
1937 diagnostics: vec![
1938 lsp::Diagnostic {
1939 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1940 severity: Some(DiagnosticSeverity::ERROR),
1941 message: "undefined variable 'BB'".to_string(),
1942 source: Some("disk".to_string()),
1943 ..Default::default()
1944 },
1945 lsp::Diagnostic {
1946 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1947 severity: Some(DiagnosticSeverity::WARNING),
1948 message: "undefined variable 'A'".to_string(),
1949 source: Some("disk".to_string()),
1950 ..Default::default()
1951 },
1952 ],
1953 });
1954
1955 cx.executor().run_until_parked();
1956 buffer.update(cx, |buffer, _| {
1957 assert_eq!(
1958 buffer
1959 .snapshot()
1960 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1961 .collect::<Vec<_>>(),
1962 &[
1963 DiagnosticEntry {
1964 range: Point::new(2, 21)..Point::new(2, 22),
1965 diagnostic: Diagnostic {
1966 source: Some("disk".into()),
1967 severity: DiagnosticSeverity::WARNING,
1968 message: "undefined variable 'A'".to_string(),
1969 is_disk_based: true,
1970 group_id: 6,
1971 is_primary: true,
1972 ..Default::default()
1973 }
1974 },
1975 DiagnosticEntry {
1976 range: Point::new(3, 9)..Point::new(3, 14),
1977 diagnostic: Diagnostic {
1978 source: Some("disk".into()),
1979 severity: DiagnosticSeverity::ERROR,
1980 message: "undefined variable 'BB'".to_string(),
1981 is_disk_based: true,
1982 group_id: 5,
1983 is_primary: true,
1984 ..Default::default()
1985 },
1986 }
1987 ]
1988 );
1989 });
1990}
1991
1992#[gpui::test]
1993async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1994 init_test(cx);
1995
1996 let text = concat!(
1997 "let one = ;\n", //
1998 "let two = \n",
1999 "let three = 3;\n",
2000 );
2001
2002 let fs = FakeFs::new(cx.executor());
2003 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2004
2005 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2006 let buffer = project
2007 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2008 .await
2009 .unwrap();
2010
2011 project.update(cx, |project, cx| {
2012 project.lsp_store.update(cx, |lsp_store, cx| {
2013 lsp_store
2014 .update_buffer_diagnostics(
2015 &buffer,
2016 LanguageServerId(0),
2017 None,
2018 vec![
2019 DiagnosticEntry {
2020 range: Unclipped(PointUtf16::new(0, 10))
2021 ..Unclipped(PointUtf16::new(0, 10)),
2022 diagnostic: Diagnostic {
2023 severity: DiagnosticSeverity::ERROR,
2024 message: "syntax error 1".to_string(),
2025 ..Default::default()
2026 },
2027 },
2028 DiagnosticEntry {
2029 range: Unclipped(PointUtf16::new(1, 10))
2030 ..Unclipped(PointUtf16::new(1, 10)),
2031 diagnostic: Diagnostic {
2032 severity: DiagnosticSeverity::ERROR,
2033 message: "syntax error 2".to_string(),
2034 ..Default::default()
2035 },
2036 },
2037 ],
2038 cx,
2039 )
2040 .unwrap();
2041 })
2042 });
2043
2044 // An empty range is extended forward to include the following character.
2045 // At the end of a line, an empty range is extended backward to include
2046 // the preceding character.
2047 buffer.update(cx, |buffer, _| {
2048 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2049 assert_eq!(
2050 chunks
2051 .iter()
2052 .map(|(s, d)| (s.as_str(), *d))
2053 .collect::<Vec<_>>(),
2054 &[
2055 ("let one = ", None),
2056 (";", Some(DiagnosticSeverity::ERROR)),
2057 ("\nlet two =", None),
2058 (" ", Some(DiagnosticSeverity::ERROR)),
2059 ("\nlet three = 3;\n", None)
2060 ]
2061 );
2062 });
2063}
2064
2065#[gpui::test]
2066async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2067 init_test(cx);
2068
2069 let fs = FakeFs::new(cx.executor());
2070 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2071 .await;
2072
2073 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2074
2075 project.update(cx, |project, cx| {
2076 project
2077 .update_diagnostic_entries(
2078 LanguageServerId(0),
2079 Path::new("/dir/a.rs").to_owned(),
2080 None,
2081 vec![DiagnosticEntry {
2082 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2083 diagnostic: Diagnostic {
2084 severity: DiagnosticSeverity::ERROR,
2085 is_primary: true,
2086 message: "syntax error a1".to_string(),
2087 ..Default::default()
2088 },
2089 }],
2090 cx,
2091 )
2092 .unwrap();
2093 project
2094 .update_diagnostic_entries(
2095 LanguageServerId(1),
2096 Path::new("/dir/a.rs").to_owned(),
2097 None,
2098 vec![DiagnosticEntry {
2099 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2100 diagnostic: Diagnostic {
2101 severity: DiagnosticSeverity::ERROR,
2102 is_primary: true,
2103 message: "syntax error b1".to_string(),
2104 ..Default::default()
2105 },
2106 }],
2107 cx,
2108 )
2109 .unwrap();
2110
2111 assert_eq!(
2112 project.diagnostic_summary(false, cx),
2113 DiagnosticSummary {
2114 error_count: 2,
2115 warning_count: 0,
2116 }
2117 );
2118 });
2119}
2120
2121#[gpui::test]
2122async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2123 init_test(cx);
2124
2125 let text = "
2126 fn a() {
2127 f1();
2128 }
2129 fn b() {
2130 f2();
2131 }
2132 fn c() {
2133 f3();
2134 }
2135 "
2136 .unindent();
2137
2138 let fs = FakeFs::new(cx.executor());
2139 fs.insert_tree(
2140 "/dir",
2141 json!({
2142 "a.rs": text.clone(),
2143 }),
2144 )
2145 .await;
2146
2147 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2148 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2149
2150 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2151 language_registry.add(rust_lang());
2152 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2153
2154 let buffer = project
2155 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2156 .await
2157 .unwrap();
2158
2159 let mut fake_server = fake_servers.next().await.unwrap();
2160 let lsp_document_version = fake_server
2161 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2162 .await
2163 .text_document
2164 .version;
2165
2166 // Simulate editing the buffer after the language server computes some edits.
2167 buffer.update(cx, |buffer, cx| {
2168 buffer.edit(
2169 [(
2170 Point::new(0, 0)..Point::new(0, 0),
2171 "// above first function\n",
2172 )],
2173 None,
2174 cx,
2175 );
2176 buffer.edit(
2177 [(
2178 Point::new(2, 0)..Point::new(2, 0),
2179 " // inside first function\n",
2180 )],
2181 None,
2182 cx,
2183 );
2184 buffer.edit(
2185 [(
2186 Point::new(6, 4)..Point::new(6, 4),
2187 "// inside second function ",
2188 )],
2189 None,
2190 cx,
2191 );
2192
2193 assert_eq!(
2194 buffer.text(),
2195 "
2196 // above first function
2197 fn a() {
2198 // inside first function
2199 f1();
2200 }
2201 fn b() {
2202 // inside second function f2();
2203 }
2204 fn c() {
2205 f3();
2206 }
2207 "
2208 .unindent()
2209 );
2210 });
2211
2212 let edits = lsp_store
2213 .update(cx, |lsp_store, cx| {
2214 lsp_store.edits_from_lsp(
2215 &buffer,
2216 vec![
2217 // replace body of first function
2218 lsp::TextEdit {
2219 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2220 new_text: "
2221 fn a() {
2222 f10();
2223 }
2224 "
2225 .unindent(),
2226 },
2227 // edit inside second function
2228 lsp::TextEdit {
2229 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2230 new_text: "00".into(),
2231 },
2232 // edit inside third function via two distinct edits
2233 lsp::TextEdit {
2234 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2235 new_text: "4000".into(),
2236 },
2237 lsp::TextEdit {
2238 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2239 new_text: "".into(),
2240 },
2241 ],
2242 LanguageServerId(0),
2243 Some(lsp_document_version),
2244 cx,
2245 )
2246 })
2247 .await
2248 .unwrap();
2249
2250 buffer.update(cx, |buffer, cx| {
2251 for (range, new_text) in edits {
2252 buffer.edit([(range, new_text)], None, cx);
2253 }
2254 assert_eq!(
2255 buffer.text(),
2256 "
2257 // above first function
2258 fn a() {
2259 // inside first function
2260 f10();
2261 }
2262 fn b() {
2263 // inside second function f200();
2264 }
2265 fn c() {
2266 f4000();
2267 }
2268 "
2269 .unindent()
2270 );
2271 });
2272}
2273
2274#[gpui::test]
2275async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2276 init_test(cx);
2277
2278 let text = "
2279 use a::b;
2280 use a::c;
2281
2282 fn f() {
2283 b();
2284 c();
2285 }
2286 "
2287 .unindent();
2288
2289 let fs = FakeFs::new(cx.executor());
2290 fs.insert_tree(
2291 "/dir",
2292 json!({
2293 "a.rs": text.clone(),
2294 }),
2295 )
2296 .await;
2297
2298 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2299 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2300 let buffer = project
2301 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2302 .await
2303 .unwrap();
2304
2305 // Simulate the language server sending us a small edit in the form of a very large diff.
2306 // Rust-analyzer does this when performing a merge-imports code action.
2307 let edits = lsp_store
2308 .update(cx, |lsp_store, cx| {
2309 lsp_store.edits_from_lsp(
2310 &buffer,
2311 [
2312 // Replace the first use statement without editing the semicolon.
2313 lsp::TextEdit {
2314 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2315 new_text: "a::{b, c}".into(),
2316 },
2317 // Reinsert the remainder of the file between the semicolon and the final
2318 // newline of the file.
2319 lsp::TextEdit {
2320 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2321 new_text: "\n\n".into(),
2322 },
2323 lsp::TextEdit {
2324 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2325 new_text: "
2326 fn f() {
2327 b();
2328 c();
2329 }"
2330 .unindent(),
2331 },
2332 // Delete everything after the first newline of the file.
2333 lsp::TextEdit {
2334 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2335 new_text: "".into(),
2336 },
2337 ],
2338 LanguageServerId(0),
2339 None,
2340 cx,
2341 )
2342 })
2343 .await
2344 .unwrap();
2345
2346 buffer.update(cx, |buffer, cx| {
2347 let edits = edits
2348 .into_iter()
2349 .map(|(range, text)| {
2350 (
2351 range.start.to_point(buffer)..range.end.to_point(buffer),
2352 text,
2353 )
2354 })
2355 .collect::<Vec<_>>();
2356
2357 assert_eq!(
2358 edits,
2359 [
2360 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2361 (Point::new(1, 0)..Point::new(2, 0), "".into())
2362 ]
2363 );
2364
2365 for (range, new_text) in edits {
2366 buffer.edit([(range, new_text)], None, cx);
2367 }
2368 assert_eq!(
2369 buffer.text(),
2370 "
2371 use a::{b, c};
2372
2373 fn f() {
2374 b();
2375 c();
2376 }
2377 "
2378 .unindent()
2379 );
2380 });
2381}
2382
2383#[gpui::test]
2384async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2385 init_test(cx);
2386
2387 let text = "
2388 use a::b;
2389 use a::c;
2390
2391 fn f() {
2392 b();
2393 c();
2394 }
2395 "
2396 .unindent();
2397
2398 let fs = FakeFs::new(cx.executor());
2399 fs.insert_tree(
2400 "/dir",
2401 json!({
2402 "a.rs": text.clone(),
2403 }),
2404 )
2405 .await;
2406
2407 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2408 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2409 let buffer = project
2410 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2411 .await
2412 .unwrap();
2413
2414 // Simulate the language server sending us edits in a non-ordered fashion,
2415 // with ranges sometimes being inverted or pointing to invalid locations.
2416 let edits = lsp_store
2417 .update(cx, |lsp_store, cx| {
2418 lsp_store.edits_from_lsp(
2419 &buffer,
2420 [
2421 lsp::TextEdit {
2422 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2423 new_text: "\n\n".into(),
2424 },
2425 lsp::TextEdit {
2426 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2427 new_text: "a::{b, c}".into(),
2428 },
2429 lsp::TextEdit {
2430 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2431 new_text: "".into(),
2432 },
2433 lsp::TextEdit {
2434 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2435 new_text: "
2436 fn f() {
2437 b();
2438 c();
2439 }"
2440 .unindent(),
2441 },
2442 ],
2443 LanguageServerId(0),
2444 None,
2445 cx,
2446 )
2447 })
2448 .await
2449 .unwrap();
2450
2451 buffer.update(cx, |buffer, cx| {
2452 let edits = edits
2453 .into_iter()
2454 .map(|(range, text)| {
2455 (
2456 range.start.to_point(buffer)..range.end.to_point(buffer),
2457 text,
2458 )
2459 })
2460 .collect::<Vec<_>>();
2461
2462 assert_eq!(
2463 edits,
2464 [
2465 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2466 (Point::new(1, 0)..Point::new(2, 0), "".into())
2467 ]
2468 );
2469
2470 for (range, new_text) in edits {
2471 buffer.edit([(range, new_text)], None, cx);
2472 }
2473 assert_eq!(
2474 buffer.text(),
2475 "
2476 use a::{b, c};
2477
2478 fn f() {
2479 b();
2480 c();
2481 }
2482 "
2483 .unindent()
2484 );
2485 });
2486}
2487
2488fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2489 buffer: &Buffer,
2490 range: Range<T>,
2491) -> Vec<(String, Option<DiagnosticSeverity>)> {
2492 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2493 for chunk in buffer.snapshot().chunks(range, true) {
2494 if chunks.last().map_or(false, |prev_chunk| {
2495 prev_chunk.1 == chunk.diagnostic_severity
2496 }) {
2497 chunks.last_mut().unwrap().0.push_str(chunk.text);
2498 } else {
2499 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2500 }
2501 }
2502 chunks
2503}
2504
2505#[gpui::test(iterations = 10)]
2506async fn test_definition(cx: &mut gpui::TestAppContext) {
2507 init_test(cx);
2508
2509 let fs = FakeFs::new(cx.executor());
2510 fs.insert_tree(
2511 "/dir",
2512 json!({
2513 "a.rs": "const fn a() { A }",
2514 "b.rs": "const y: i32 = crate::a()",
2515 }),
2516 )
2517 .await;
2518
2519 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2520
2521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2522 language_registry.add(rust_lang());
2523 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2524
2525 let buffer = project
2526 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2527 .await
2528 .unwrap();
2529
2530 let fake_server = fake_servers.next().await.unwrap();
2531 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2532 let params = params.text_document_position_params;
2533 assert_eq!(
2534 params.text_document.uri.to_file_path().unwrap(),
2535 Path::new("/dir/b.rs"),
2536 );
2537 assert_eq!(params.position, lsp::Position::new(0, 22));
2538
2539 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2540 lsp::Location::new(
2541 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2542 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2543 ),
2544 )))
2545 });
2546
2547 let mut definitions = project
2548 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2549 .await
2550 .unwrap();
2551
2552 // Assert no new language server started
2553 cx.executor().run_until_parked();
2554 assert!(fake_servers.try_next().is_err());
2555
2556 assert_eq!(definitions.len(), 1);
2557 let definition = definitions.pop().unwrap();
2558 cx.update(|cx| {
2559 let target_buffer = definition.target.buffer.read(cx);
2560 assert_eq!(
2561 target_buffer
2562 .file()
2563 .unwrap()
2564 .as_local()
2565 .unwrap()
2566 .abs_path(cx),
2567 Path::new("/dir/a.rs"),
2568 );
2569 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2570 assert_eq!(
2571 list_worktrees(&project, cx),
2572 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2573 );
2574
2575 drop(definition);
2576 });
2577 cx.update(|cx| {
2578 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2579 });
2580
2581 fn list_worktrees<'a>(
2582 project: &'a Model<Project>,
2583 cx: &'a AppContext,
2584 ) -> Vec<(&'a Path, bool)> {
2585 project
2586 .read(cx)
2587 .worktrees(cx)
2588 .map(|worktree| {
2589 let worktree = worktree.read(cx);
2590 (
2591 worktree.as_local().unwrap().abs_path().as_ref(),
2592 worktree.is_visible(),
2593 )
2594 })
2595 .collect::<Vec<_>>()
2596 }
2597}
2598
2599#[gpui::test]
2600async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2601 init_test(cx);
2602
2603 let fs = FakeFs::new(cx.executor());
2604 fs.insert_tree(
2605 "/dir",
2606 json!({
2607 "a.ts": "",
2608 }),
2609 )
2610 .await;
2611
2612 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2613
2614 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2615 language_registry.add(typescript_lang());
2616 let mut fake_language_servers = language_registry.register_fake_lsp(
2617 "TypeScript",
2618 FakeLspAdapter {
2619 capabilities: lsp::ServerCapabilities {
2620 completion_provider: Some(lsp::CompletionOptions {
2621 trigger_characters: Some(vec![":".to_string()]),
2622 ..Default::default()
2623 }),
2624 ..Default::default()
2625 },
2626 ..Default::default()
2627 },
2628 );
2629
2630 let buffer = project
2631 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2632 .await
2633 .unwrap();
2634
2635 let fake_server = fake_language_servers.next().await.unwrap();
2636
2637 let text = "let a = b.fqn";
2638 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2639 let completions = project.update(cx, |project, cx| {
2640 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2641 });
2642
2643 fake_server
2644 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2645 Ok(Some(lsp::CompletionResponse::Array(vec![
2646 lsp::CompletionItem {
2647 label: "fullyQualifiedName?".into(),
2648 insert_text: Some("fullyQualifiedName".into()),
2649 ..Default::default()
2650 },
2651 ])))
2652 })
2653 .next()
2654 .await;
2655 let completions = completions.await.unwrap();
2656 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2657 assert_eq!(completions.len(), 1);
2658 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2659 assert_eq!(
2660 completions[0].old_range.to_offset(&snapshot),
2661 text.len() - 3..text.len()
2662 );
2663
2664 let text = "let a = \"atoms/cmp\"";
2665 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2666 let completions = project.update(cx, |project, cx| {
2667 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2668 });
2669
2670 fake_server
2671 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2672 Ok(Some(lsp::CompletionResponse::Array(vec![
2673 lsp::CompletionItem {
2674 label: "component".into(),
2675 ..Default::default()
2676 },
2677 ])))
2678 })
2679 .next()
2680 .await;
2681 let completions = completions.await.unwrap();
2682 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2683 assert_eq!(completions.len(), 1);
2684 assert_eq!(completions[0].new_text, "component");
2685 assert_eq!(
2686 completions[0].old_range.to_offset(&snapshot),
2687 text.len() - 4..text.len() - 1
2688 );
2689}
2690
2691#[gpui::test]
2692async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2693 init_test(cx);
2694
2695 let fs = FakeFs::new(cx.executor());
2696 fs.insert_tree(
2697 "/dir",
2698 json!({
2699 "a.ts": "",
2700 }),
2701 )
2702 .await;
2703
2704 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2705
2706 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2707 language_registry.add(typescript_lang());
2708 let mut fake_language_servers = language_registry.register_fake_lsp(
2709 "TypeScript",
2710 FakeLspAdapter {
2711 capabilities: lsp::ServerCapabilities {
2712 completion_provider: Some(lsp::CompletionOptions {
2713 trigger_characters: Some(vec![":".to_string()]),
2714 ..Default::default()
2715 }),
2716 ..Default::default()
2717 },
2718 ..Default::default()
2719 },
2720 );
2721
2722 let buffer = project
2723 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2724 .await
2725 .unwrap();
2726
2727 let fake_server = fake_language_servers.next().await.unwrap();
2728
2729 let text = "let a = b.fqn";
2730 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2731 let completions = project.update(cx, |project, cx| {
2732 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2733 });
2734
2735 fake_server
2736 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2737 Ok(Some(lsp::CompletionResponse::Array(vec![
2738 lsp::CompletionItem {
2739 label: "fullyQualifiedName?".into(),
2740 insert_text: Some("fully\rQualified\r\nName".into()),
2741 ..Default::default()
2742 },
2743 ])))
2744 })
2745 .next()
2746 .await;
2747 let completions = completions.await.unwrap();
2748 assert_eq!(completions.len(), 1);
2749 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2750}
2751
2752#[gpui::test(iterations = 10)]
2753async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2754 init_test(cx);
2755
2756 let fs = FakeFs::new(cx.executor());
2757 fs.insert_tree(
2758 "/dir",
2759 json!({
2760 "a.ts": "a",
2761 }),
2762 )
2763 .await;
2764
2765 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2766
2767 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2768 language_registry.add(typescript_lang());
2769 let mut fake_language_servers = language_registry.register_fake_lsp(
2770 "TypeScript",
2771 FakeLspAdapter {
2772 capabilities: lsp::ServerCapabilities {
2773 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2774 lsp::CodeActionOptions {
2775 resolve_provider: Some(true),
2776 ..lsp::CodeActionOptions::default()
2777 },
2778 )),
2779 ..lsp::ServerCapabilities::default()
2780 },
2781 ..FakeLspAdapter::default()
2782 },
2783 );
2784
2785 let buffer = project
2786 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2787 .await
2788 .unwrap();
2789
2790 let fake_server = fake_language_servers.next().await.unwrap();
2791
2792 // Language server returns code actions that contain commands, and not edits.
2793 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2794 fake_server
2795 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2796 Ok(Some(vec![
2797 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2798 title: "The code action".into(),
2799 data: Some(serde_json::json!({
2800 "command": "_the/command",
2801 })),
2802 ..lsp::CodeAction::default()
2803 }),
2804 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2805 title: "two".into(),
2806 ..lsp::CodeAction::default()
2807 }),
2808 ]))
2809 })
2810 .next()
2811 .await;
2812
2813 let action = actions.await.unwrap()[0].clone();
2814 let apply = project.update(cx, |project, cx| {
2815 project.apply_code_action(buffer.clone(), action, true, cx)
2816 });
2817
2818 // Resolving the code action does not populate its edits. In absence of
2819 // edits, we must execute the given command.
2820 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2821 |mut action, _| async move {
2822 if action.data.is_some() {
2823 action.command = Some(lsp::Command {
2824 title: "The command".into(),
2825 command: "_the/command".into(),
2826 arguments: Some(vec![json!("the-argument")]),
2827 });
2828 }
2829 Ok(action)
2830 },
2831 );
2832
2833 // While executing the command, the language server sends the editor
2834 // a `workspaceEdit` request.
2835 fake_server
2836 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2837 let fake = fake_server.clone();
2838 move |params, _| {
2839 assert_eq!(params.command, "_the/command");
2840 let fake = fake.clone();
2841 async move {
2842 fake.server
2843 .request::<lsp::request::ApplyWorkspaceEdit>(
2844 lsp::ApplyWorkspaceEditParams {
2845 label: None,
2846 edit: lsp::WorkspaceEdit {
2847 changes: Some(
2848 [(
2849 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2850 vec![lsp::TextEdit {
2851 range: lsp::Range::new(
2852 lsp::Position::new(0, 0),
2853 lsp::Position::new(0, 0),
2854 ),
2855 new_text: "X".into(),
2856 }],
2857 )]
2858 .into_iter()
2859 .collect(),
2860 ),
2861 ..Default::default()
2862 },
2863 },
2864 )
2865 .await
2866 .unwrap();
2867 Ok(Some(json!(null)))
2868 }
2869 }
2870 })
2871 .next()
2872 .await;
2873
2874 // Applying the code action returns a project transaction containing the edits
2875 // sent by the language server in its `workspaceEdit` request.
2876 let transaction = apply.await.unwrap();
2877 assert!(transaction.0.contains_key(&buffer));
2878 buffer.update(cx, |buffer, cx| {
2879 assert_eq!(buffer.text(), "Xa");
2880 buffer.undo(cx);
2881 assert_eq!(buffer.text(), "a");
2882 });
2883}
2884
2885#[gpui::test(iterations = 10)]
2886async fn test_save_file(cx: &mut gpui::TestAppContext) {
2887 init_test(cx);
2888
2889 let fs = FakeFs::new(cx.executor());
2890 fs.insert_tree(
2891 "/dir",
2892 json!({
2893 "file1": "the old contents",
2894 }),
2895 )
2896 .await;
2897
2898 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2899 let buffer = project
2900 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2901 .await
2902 .unwrap();
2903 buffer.update(cx, |buffer, cx| {
2904 assert_eq!(buffer.text(), "the old contents");
2905 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2906 });
2907
2908 project
2909 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2910 .await
2911 .unwrap();
2912
2913 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2914 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2915}
2916
2917#[gpui::test(iterations = 30)]
2918async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2919 init_test(cx);
2920
2921 let fs = FakeFs::new(cx.executor().clone());
2922 fs.insert_tree(
2923 "/dir",
2924 json!({
2925 "file1": "the original contents",
2926 }),
2927 )
2928 .await;
2929
2930 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2931 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2932 let buffer = project
2933 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2934 .await
2935 .unwrap();
2936
2937 // Simulate buffer diffs being slow, so that they don't complete before
2938 // the next file change occurs.
2939 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2940
2941 // Change the buffer's file on disk, and then wait for the file change
2942 // to be detected by the worktree, so that the buffer starts reloading.
2943 fs.save(
2944 "/dir/file1".as_ref(),
2945 &"the first contents".into(),
2946 Default::default(),
2947 )
2948 .await
2949 .unwrap();
2950 worktree.next_event(cx).await;
2951
2952 // Change the buffer's file again. Depending on the random seed, the
2953 // previous file change may still be in progress.
2954 fs.save(
2955 "/dir/file1".as_ref(),
2956 &"the second contents".into(),
2957 Default::default(),
2958 )
2959 .await
2960 .unwrap();
2961 worktree.next_event(cx).await;
2962
2963 cx.executor().run_until_parked();
2964 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2965 buffer.read_with(cx, |buffer, _| {
2966 assert_eq!(buffer.text(), on_disk_text);
2967 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2968 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2969 });
2970}
2971
2972#[gpui::test(iterations = 30)]
2973async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2974 init_test(cx);
2975
2976 let fs = FakeFs::new(cx.executor().clone());
2977 fs.insert_tree(
2978 "/dir",
2979 json!({
2980 "file1": "the original contents",
2981 }),
2982 )
2983 .await;
2984
2985 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2986 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2987 let buffer = project
2988 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2989 .await
2990 .unwrap();
2991
2992 // Simulate buffer diffs being slow, so that they don't complete before
2993 // the next file change occurs.
2994 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2995
2996 // Change the buffer's file on disk, and then wait for the file change
2997 // to be detected by the worktree, so that the buffer starts reloading.
2998 fs.save(
2999 "/dir/file1".as_ref(),
3000 &"the first contents".into(),
3001 Default::default(),
3002 )
3003 .await
3004 .unwrap();
3005 worktree.next_event(cx).await;
3006
3007 cx.executor()
3008 .spawn(cx.executor().simulate_random_delay())
3009 .await;
3010
3011 // Perform a noop edit, causing the buffer's version to increase.
3012 buffer.update(cx, |buffer, cx| {
3013 buffer.edit([(0..0, " ")], None, cx);
3014 buffer.undo(cx);
3015 });
3016
3017 cx.executor().run_until_parked();
3018 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3019 buffer.read_with(cx, |buffer, _| {
3020 let buffer_text = buffer.text();
3021 if buffer_text == on_disk_text {
3022 assert!(
3023 !buffer.is_dirty() && !buffer.has_conflict(),
3024 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3025 );
3026 }
3027 // If the file change occurred while the buffer was processing the first
3028 // change, the buffer will be in a conflicting state.
3029 else {
3030 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3031 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3032 }
3033 });
3034}
3035
3036#[gpui::test]
3037async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3038 init_test(cx);
3039
3040 let fs = FakeFs::new(cx.executor());
3041 fs.insert_tree(
3042 "/dir",
3043 json!({
3044 "file1": "the old contents",
3045 }),
3046 )
3047 .await;
3048
3049 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3050 let buffer = project
3051 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3052 .await
3053 .unwrap();
3054 buffer.update(cx, |buffer, cx| {
3055 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3056 });
3057
3058 project
3059 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3060 .await
3061 .unwrap();
3062
3063 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3064 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3065}
3066
3067#[gpui::test]
3068async fn test_save_as(cx: &mut gpui::TestAppContext) {
3069 init_test(cx);
3070
3071 let fs = FakeFs::new(cx.executor());
3072 fs.insert_tree("/dir", json!({})).await;
3073
3074 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3075
3076 let languages = project.update(cx, |project, _| project.languages().clone());
3077 languages.add(rust_lang());
3078
3079 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3080 buffer.update(cx, |buffer, cx| {
3081 buffer.edit([(0..0, "abc")], None, cx);
3082 assert!(buffer.is_dirty());
3083 assert!(!buffer.has_conflict());
3084 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3085 });
3086 project
3087 .update(cx, |project, cx| {
3088 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3089 let path = ProjectPath {
3090 worktree_id,
3091 path: Arc::from(Path::new("file1.rs")),
3092 };
3093 project.save_buffer_as(buffer.clone(), path, cx)
3094 })
3095 .await
3096 .unwrap();
3097 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3098
3099 cx.executor().run_until_parked();
3100 buffer.update(cx, |buffer, cx| {
3101 assert_eq!(
3102 buffer.file().unwrap().full_path(cx),
3103 Path::new("dir/file1.rs")
3104 );
3105 assert!(!buffer.is_dirty());
3106 assert!(!buffer.has_conflict());
3107 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3108 });
3109
3110 let opened_buffer = project
3111 .update(cx, |project, cx| {
3112 project.open_local_buffer("/dir/file1.rs", cx)
3113 })
3114 .await
3115 .unwrap();
3116 assert_eq!(opened_buffer, buffer);
3117}
3118
3119#[gpui::test(retries = 5)]
3120async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3121 use worktree::WorktreeModelHandle as _;
3122
3123 init_test(cx);
3124 cx.executor().allow_parking();
3125
3126 let dir = temp_tree(json!({
3127 "a": {
3128 "file1": "",
3129 "file2": "",
3130 "file3": "",
3131 },
3132 "b": {
3133 "c": {
3134 "file4": "",
3135 "file5": "",
3136 }
3137 }
3138 }));
3139
3140 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3141
3142 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3143 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3144 async move { buffer.await.unwrap() }
3145 };
3146 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3147 project.update(cx, |project, cx| {
3148 let tree = project.worktrees(cx).next().unwrap();
3149 tree.read(cx)
3150 .entry_for_path(path)
3151 .unwrap_or_else(|| panic!("no entry for path {}", path))
3152 .id
3153 })
3154 };
3155
3156 let buffer2 = buffer_for_path("a/file2", cx).await;
3157 let buffer3 = buffer_for_path("a/file3", cx).await;
3158 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3159 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3160
3161 let file2_id = id_for_path("a/file2", cx);
3162 let file3_id = id_for_path("a/file3", cx);
3163 let file4_id = id_for_path("b/c/file4", cx);
3164
3165 // Create a remote copy of this worktree.
3166 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3167 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3168
3169 let updates = Arc::new(Mutex::new(Vec::new()));
3170 tree.update(cx, |tree, cx| {
3171 let updates = updates.clone();
3172 tree.observe_updates(0, cx, move |update| {
3173 updates.lock().push(update);
3174 async { true }
3175 });
3176 });
3177
3178 let remote =
3179 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3180
3181 cx.executor().run_until_parked();
3182
3183 cx.update(|cx| {
3184 assert!(!buffer2.read(cx).is_dirty());
3185 assert!(!buffer3.read(cx).is_dirty());
3186 assert!(!buffer4.read(cx).is_dirty());
3187 assert!(!buffer5.read(cx).is_dirty());
3188 });
3189
3190 // Rename and delete files and directories.
3191 tree.flush_fs_events(cx).await;
3192 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3193 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3194 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3195 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3196 tree.flush_fs_events(cx).await;
3197
3198 let expected_paths = vec![
3199 "a",
3200 "a/file1",
3201 "a/file2.new",
3202 "b",
3203 "d",
3204 "d/file3",
3205 "d/file4",
3206 ];
3207
3208 cx.update(|app| {
3209 assert_eq!(
3210 tree.read(app)
3211 .paths()
3212 .map(|p| p.to_str().unwrap())
3213 .collect::<Vec<_>>(),
3214 expected_paths
3215 );
3216 });
3217
3218 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3219 assert_eq!(id_for_path("d/file3", cx), file3_id);
3220 assert_eq!(id_for_path("d/file4", cx), file4_id);
3221
3222 cx.update(|cx| {
3223 assert_eq!(
3224 buffer2.read(cx).file().unwrap().path().as_ref(),
3225 Path::new("a/file2.new")
3226 );
3227 assert_eq!(
3228 buffer3.read(cx).file().unwrap().path().as_ref(),
3229 Path::new("d/file3")
3230 );
3231 assert_eq!(
3232 buffer4.read(cx).file().unwrap().path().as_ref(),
3233 Path::new("d/file4")
3234 );
3235 assert_eq!(
3236 buffer5.read(cx).file().unwrap().path().as_ref(),
3237 Path::new("b/c/file5")
3238 );
3239
3240 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3241 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3242 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3243 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3244 });
3245
3246 // Update the remote worktree. Check that it becomes consistent with the
3247 // local worktree.
3248 cx.executor().run_until_parked();
3249
3250 remote.update(cx, |remote, _| {
3251 for update in updates.lock().drain(..) {
3252 remote.as_remote_mut().unwrap().update_from_remote(update);
3253 }
3254 });
3255 cx.executor().run_until_parked();
3256 remote.update(cx, |remote, _| {
3257 assert_eq!(
3258 remote
3259 .paths()
3260 .map(|p| p.to_str().unwrap())
3261 .collect::<Vec<_>>(),
3262 expected_paths
3263 );
3264 });
3265}
3266
3267#[gpui::test(iterations = 10)]
3268async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3269 init_test(cx);
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(
3273 "/dir",
3274 json!({
3275 "a": {
3276 "file1": "",
3277 }
3278 }),
3279 )
3280 .await;
3281
3282 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3283 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3284 let tree_id = tree.update(cx, |tree, _| tree.id());
3285
3286 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3287 project.update(cx, |project, cx| {
3288 let tree = project.worktrees(cx).next().unwrap();
3289 tree.read(cx)
3290 .entry_for_path(path)
3291 .unwrap_or_else(|| panic!("no entry for path {}", path))
3292 .id
3293 })
3294 };
3295
3296 let dir_id = id_for_path("a", cx);
3297 let file_id = id_for_path("a/file1", cx);
3298 let buffer = project
3299 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3300 .await
3301 .unwrap();
3302 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3303
3304 project
3305 .update(cx, |project, cx| {
3306 project.rename_entry(dir_id, Path::new("b"), cx)
3307 })
3308 .unwrap()
3309 .await
3310 .to_included()
3311 .unwrap();
3312 cx.executor().run_until_parked();
3313
3314 assert_eq!(id_for_path("b", cx), dir_id);
3315 assert_eq!(id_for_path("b/file1", cx), file_id);
3316 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3317}
3318
3319#[gpui::test]
3320async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3321 init_test(cx);
3322
3323 let fs = FakeFs::new(cx.executor());
3324 fs.insert_tree(
3325 "/dir",
3326 json!({
3327 "a.txt": "a-contents",
3328 "b.txt": "b-contents",
3329 }),
3330 )
3331 .await;
3332
3333 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3334
3335 // Spawn multiple tasks to open paths, repeating some paths.
3336 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3337 (
3338 p.open_local_buffer("/dir/a.txt", cx),
3339 p.open_local_buffer("/dir/b.txt", cx),
3340 p.open_local_buffer("/dir/a.txt", cx),
3341 )
3342 });
3343
3344 let buffer_a_1 = buffer_a_1.await.unwrap();
3345 let buffer_a_2 = buffer_a_2.await.unwrap();
3346 let buffer_b = buffer_b.await.unwrap();
3347 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3348 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3349
3350 // There is only one buffer per path.
3351 let buffer_a_id = buffer_a_1.entity_id();
3352 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3353
3354 // Open the same path again while it is still open.
3355 drop(buffer_a_1);
3356 let buffer_a_3 = project
3357 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3358 .await
3359 .unwrap();
3360
3361 // There's still only one buffer per path.
3362 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3363}
3364
3365#[gpui::test]
3366async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3367 init_test(cx);
3368
3369 let fs = FakeFs::new(cx.executor());
3370 fs.insert_tree(
3371 "/dir",
3372 json!({
3373 "file1": "abc",
3374 "file2": "def",
3375 "file3": "ghi",
3376 }),
3377 )
3378 .await;
3379
3380 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3381
3382 let buffer1 = project
3383 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3384 .await
3385 .unwrap();
3386 let events = Arc::new(Mutex::new(Vec::new()));
3387
3388 // initially, the buffer isn't dirty.
3389 buffer1.update(cx, |buffer, cx| {
3390 cx.subscribe(&buffer1, {
3391 let events = events.clone();
3392 move |_, _, event, _| match event {
3393 BufferEvent::Operation { .. } => {}
3394 _ => events.lock().push(event.clone()),
3395 }
3396 })
3397 .detach();
3398
3399 assert!(!buffer.is_dirty());
3400 assert!(events.lock().is_empty());
3401
3402 buffer.edit([(1..2, "")], None, cx);
3403 });
3404
3405 // after the first edit, the buffer is dirty, and emits a dirtied event.
3406 buffer1.update(cx, |buffer, cx| {
3407 assert!(buffer.text() == "ac");
3408 assert!(buffer.is_dirty());
3409 assert_eq!(
3410 *events.lock(),
3411 &[
3412 language::BufferEvent::Edited,
3413 language::BufferEvent::DirtyChanged
3414 ]
3415 );
3416 events.lock().clear();
3417 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3418 });
3419
3420 // after saving, the buffer is not dirty, and emits a saved event.
3421 buffer1.update(cx, |buffer, cx| {
3422 assert!(!buffer.is_dirty());
3423 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3424 events.lock().clear();
3425
3426 buffer.edit([(1..1, "B")], None, cx);
3427 buffer.edit([(2..2, "D")], None, cx);
3428 });
3429
3430 // after editing again, the buffer is dirty, and emits another dirty event.
3431 buffer1.update(cx, |buffer, cx| {
3432 assert!(buffer.text() == "aBDc");
3433 assert!(buffer.is_dirty());
3434 assert_eq!(
3435 *events.lock(),
3436 &[
3437 language::BufferEvent::Edited,
3438 language::BufferEvent::DirtyChanged,
3439 language::BufferEvent::Edited,
3440 ],
3441 );
3442 events.lock().clear();
3443
3444 // After restoring the buffer to its previously-saved state,
3445 // the buffer is not considered dirty anymore.
3446 buffer.edit([(1..3, "")], None, cx);
3447 assert!(buffer.text() == "ac");
3448 assert!(!buffer.is_dirty());
3449 });
3450
3451 assert_eq!(
3452 *events.lock(),
3453 &[
3454 language::BufferEvent::Edited,
3455 language::BufferEvent::DirtyChanged
3456 ]
3457 );
3458
3459 // When a file is deleted, the buffer is considered dirty.
3460 let events = Arc::new(Mutex::new(Vec::new()));
3461 let buffer2 = project
3462 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3463 .await
3464 .unwrap();
3465 buffer2.update(cx, |_, cx| {
3466 cx.subscribe(&buffer2, {
3467 let events = events.clone();
3468 move |_, _, event, _| events.lock().push(event.clone())
3469 })
3470 .detach();
3471 });
3472
3473 fs.remove_file("/dir/file2".as_ref(), Default::default())
3474 .await
3475 .unwrap();
3476 cx.executor().run_until_parked();
3477 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3478 assert_eq!(
3479 *events.lock(),
3480 &[
3481 language::BufferEvent::DirtyChanged,
3482 language::BufferEvent::FileHandleChanged
3483 ]
3484 );
3485
3486 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3487 let events = Arc::new(Mutex::new(Vec::new()));
3488 let buffer3 = project
3489 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3490 .await
3491 .unwrap();
3492 buffer3.update(cx, |_, cx| {
3493 cx.subscribe(&buffer3, {
3494 let events = events.clone();
3495 move |_, _, event, _| events.lock().push(event.clone())
3496 })
3497 .detach();
3498 });
3499
3500 buffer3.update(cx, |buffer, cx| {
3501 buffer.edit([(0..0, "x")], None, cx);
3502 });
3503 events.lock().clear();
3504 fs.remove_file("/dir/file3".as_ref(), Default::default())
3505 .await
3506 .unwrap();
3507 cx.executor().run_until_parked();
3508 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3509 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3510}
3511
3512#[gpui::test]
3513async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3514 init_test(cx);
3515
3516 let initial_contents = "aaa\nbbbbb\nc\n";
3517 let fs = FakeFs::new(cx.executor());
3518 fs.insert_tree(
3519 "/dir",
3520 json!({
3521 "the-file": initial_contents,
3522 }),
3523 )
3524 .await;
3525 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3526 let buffer = project
3527 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3528 .await
3529 .unwrap();
3530
3531 let anchors = (0..3)
3532 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3533 .collect::<Vec<_>>();
3534
3535 // Change the file on disk, adding two new lines of text, and removing
3536 // one line.
3537 buffer.update(cx, |buffer, _| {
3538 assert!(!buffer.is_dirty());
3539 assert!(!buffer.has_conflict());
3540 });
3541 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3542 fs.save(
3543 "/dir/the-file".as_ref(),
3544 &new_contents.into(),
3545 LineEnding::Unix,
3546 )
3547 .await
3548 .unwrap();
3549
3550 // Because the buffer was not modified, it is reloaded from disk. Its
3551 // contents are edited according to the diff between the old and new
3552 // file contents.
3553 cx.executor().run_until_parked();
3554 buffer.update(cx, |buffer, _| {
3555 assert_eq!(buffer.text(), new_contents);
3556 assert!(!buffer.is_dirty());
3557 assert!(!buffer.has_conflict());
3558
3559 let anchor_positions = anchors
3560 .iter()
3561 .map(|anchor| anchor.to_point(&*buffer))
3562 .collect::<Vec<_>>();
3563 assert_eq!(
3564 anchor_positions,
3565 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3566 );
3567 });
3568
3569 // Modify the buffer
3570 buffer.update(cx, |buffer, cx| {
3571 buffer.edit([(0..0, " ")], None, cx);
3572 assert!(buffer.is_dirty());
3573 assert!(!buffer.has_conflict());
3574 });
3575
3576 // Change the file on disk again, adding blank lines to the beginning.
3577 fs.save(
3578 "/dir/the-file".as_ref(),
3579 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3580 LineEnding::Unix,
3581 )
3582 .await
3583 .unwrap();
3584
3585 // Because the buffer is modified, it doesn't reload from disk, but is
3586 // marked as having a conflict.
3587 cx.executor().run_until_parked();
3588 buffer.update(cx, |buffer, _| {
3589 assert!(buffer.has_conflict());
3590 });
3591}
3592
3593#[gpui::test]
3594async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3595 init_test(cx);
3596
3597 let fs = FakeFs::new(cx.executor());
3598 fs.insert_tree(
3599 "/dir",
3600 json!({
3601 "file1": "a\nb\nc\n",
3602 "file2": "one\r\ntwo\r\nthree\r\n",
3603 }),
3604 )
3605 .await;
3606
3607 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3608 let buffer1 = project
3609 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3610 .await
3611 .unwrap();
3612 let buffer2 = project
3613 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3614 .await
3615 .unwrap();
3616
3617 buffer1.update(cx, |buffer, _| {
3618 assert_eq!(buffer.text(), "a\nb\nc\n");
3619 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3620 });
3621 buffer2.update(cx, |buffer, _| {
3622 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3623 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3624 });
3625
3626 // Change a file's line endings on disk from unix to windows. The buffer's
3627 // state updates correctly.
3628 fs.save(
3629 "/dir/file1".as_ref(),
3630 &"aaa\nb\nc\n".into(),
3631 LineEnding::Windows,
3632 )
3633 .await
3634 .unwrap();
3635 cx.executor().run_until_parked();
3636 buffer1.update(cx, |buffer, _| {
3637 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3638 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3639 });
3640
3641 // Save a file with windows line endings. The file is written correctly.
3642 buffer2.update(cx, |buffer, cx| {
3643 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3644 });
3645 project
3646 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3647 .await
3648 .unwrap();
3649 assert_eq!(
3650 fs.load("/dir/file2".as_ref()).await.unwrap(),
3651 "one\r\ntwo\r\nthree\r\nfour\r\n",
3652 );
3653}
3654
3655#[gpui::test]
3656async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3657 init_test(cx);
3658
3659 let fs = FakeFs::new(cx.executor());
3660 fs.insert_tree(
3661 "/the-dir",
3662 json!({
3663 "a.rs": "
3664 fn foo(mut v: Vec<usize>) {
3665 for x in &v {
3666 v.push(1);
3667 }
3668 }
3669 "
3670 .unindent(),
3671 }),
3672 )
3673 .await;
3674
3675 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3676 let buffer = project
3677 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3678 .await
3679 .unwrap();
3680
3681 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3682 let message = lsp::PublishDiagnosticsParams {
3683 uri: buffer_uri.clone(),
3684 diagnostics: vec![
3685 lsp::Diagnostic {
3686 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3687 severity: Some(DiagnosticSeverity::WARNING),
3688 message: "error 1".to_string(),
3689 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3690 location: lsp::Location {
3691 uri: buffer_uri.clone(),
3692 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3693 },
3694 message: "error 1 hint 1".to_string(),
3695 }]),
3696 ..Default::default()
3697 },
3698 lsp::Diagnostic {
3699 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3700 severity: Some(DiagnosticSeverity::HINT),
3701 message: "error 1 hint 1".to_string(),
3702 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3703 location: lsp::Location {
3704 uri: buffer_uri.clone(),
3705 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3706 },
3707 message: "original diagnostic".to_string(),
3708 }]),
3709 ..Default::default()
3710 },
3711 lsp::Diagnostic {
3712 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3713 severity: Some(DiagnosticSeverity::ERROR),
3714 message: "error 2".to_string(),
3715 related_information: Some(vec![
3716 lsp::DiagnosticRelatedInformation {
3717 location: lsp::Location {
3718 uri: buffer_uri.clone(),
3719 range: lsp::Range::new(
3720 lsp::Position::new(1, 13),
3721 lsp::Position::new(1, 15),
3722 ),
3723 },
3724 message: "error 2 hint 1".to_string(),
3725 },
3726 lsp::DiagnosticRelatedInformation {
3727 location: lsp::Location {
3728 uri: buffer_uri.clone(),
3729 range: lsp::Range::new(
3730 lsp::Position::new(1, 13),
3731 lsp::Position::new(1, 15),
3732 ),
3733 },
3734 message: "error 2 hint 2".to_string(),
3735 },
3736 ]),
3737 ..Default::default()
3738 },
3739 lsp::Diagnostic {
3740 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3741 severity: Some(DiagnosticSeverity::HINT),
3742 message: "error 2 hint 1".to_string(),
3743 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3744 location: lsp::Location {
3745 uri: buffer_uri.clone(),
3746 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3747 },
3748 message: "original diagnostic".to_string(),
3749 }]),
3750 ..Default::default()
3751 },
3752 lsp::Diagnostic {
3753 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3754 severity: Some(DiagnosticSeverity::HINT),
3755 message: "error 2 hint 2".to_string(),
3756 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3757 location: lsp::Location {
3758 uri: buffer_uri,
3759 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3760 },
3761 message: "original diagnostic".to_string(),
3762 }]),
3763 ..Default::default()
3764 },
3765 ],
3766 version: None,
3767 };
3768
3769 project
3770 .update(cx, |p, cx| {
3771 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3772 })
3773 .unwrap();
3774 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3775
3776 assert_eq!(
3777 buffer
3778 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3779 .collect::<Vec<_>>(),
3780 &[
3781 DiagnosticEntry {
3782 range: Point::new(1, 8)..Point::new(1, 9),
3783 diagnostic: Diagnostic {
3784 severity: DiagnosticSeverity::WARNING,
3785 message: "error 1".to_string(),
3786 group_id: 1,
3787 is_primary: true,
3788 ..Default::default()
3789 }
3790 },
3791 DiagnosticEntry {
3792 range: Point::new(1, 8)..Point::new(1, 9),
3793 diagnostic: Diagnostic {
3794 severity: DiagnosticSeverity::HINT,
3795 message: "error 1 hint 1".to_string(),
3796 group_id: 1,
3797 is_primary: false,
3798 ..Default::default()
3799 }
3800 },
3801 DiagnosticEntry {
3802 range: Point::new(1, 13)..Point::new(1, 15),
3803 diagnostic: Diagnostic {
3804 severity: DiagnosticSeverity::HINT,
3805 message: "error 2 hint 1".to_string(),
3806 group_id: 0,
3807 is_primary: false,
3808 ..Default::default()
3809 }
3810 },
3811 DiagnosticEntry {
3812 range: Point::new(1, 13)..Point::new(1, 15),
3813 diagnostic: Diagnostic {
3814 severity: DiagnosticSeverity::HINT,
3815 message: "error 2 hint 2".to_string(),
3816 group_id: 0,
3817 is_primary: false,
3818 ..Default::default()
3819 }
3820 },
3821 DiagnosticEntry {
3822 range: Point::new(2, 8)..Point::new(2, 17),
3823 diagnostic: Diagnostic {
3824 severity: DiagnosticSeverity::ERROR,
3825 message: "error 2".to_string(),
3826 group_id: 0,
3827 is_primary: true,
3828 ..Default::default()
3829 }
3830 }
3831 ]
3832 );
3833
3834 assert_eq!(
3835 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3836 &[
3837 DiagnosticEntry {
3838 range: Point::new(1, 13)..Point::new(1, 15),
3839 diagnostic: Diagnostic {
3840 severity: DiagnosticSeverity::HINT,
3841 message: "error 2 hint 1".to_string(),
3842 group_id: 0,
3843 is_primary: false,
3844 ..Default::default()
3845 }
3846 },
3847 DiagnosticEntry {
3848 range: Point::new(1, 13)..Point::new(1, 15),
3849 diagnostic: Diagnostic {
3850 severity: DiagnosticSeverity::HINT,
3851 message: "error 2 hint 2".to_string(),
3852 group_id: 0,
3853 is_primary: false,
3854 ..Default::default()
3855 }
3856 },
3857 DiagnosticEntry {
3858 range: Point::new(2, 8)..Point::new(2, 17),
3859 diagnostic: Diagnostic {
3860 severity: DiagnosticSeverity::ERROR,
3861 message: "error 2".to_string(),
3862 group_id: 0,
3863 is_primary: true,
3864 ..Default::default()
3865 }
3866 }
3867 ]
3868 );
3869
3870 assert_eq!(
3871 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3872 &[
3873 DiagnosticEntry {
3874 range: Point::new(1, 8)..Point::new(1, 9),
3875 diagnostic: Diagnostic {
3876 severity: DiagnosticSeverity::WARNING,
3877 message: "error 1".to_string(),
3878 group_id: 1,
3879 is_primary: true,
3880 ..Default::default()
3881 }
3882 },
3883 DiagnosticEntry {
3884 range: Point::new(1, 8)..Point::new(1, 9),
3885 diagnostic: Diagnostic {
3886 severity: DiagnosticSeverity::HINT,
3887 message: "error 1 hint 1".to_string(),
3888 group_id: 1,
3889 is_primary: false,
3890 ..Default::default()
3891 }
3892 },
3893 ]
3894 );
3895}
3896
3897#[gpui::test]
3898async fn test_rename(cx: &mut gpui::TestAppContext) {
3899 // hi
3900 init_test(cx);
3901
3902 let fs = FakeFs::new(cx.executor());
3903 fs.insert_tree(
3904 "/dir",
3905 json!({
3906 "one.rs": "const ONE: usize = 1;",
3907 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3908 }),
3909 )
3910 .await;
3911
3912 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3913
3914 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3915 language_registry.add(rust_lang());
3916 let mut fake_servers = language_registry.register_fake_lsp(
3917 "Rust",
3918 FakeLspAdapter {
3919 capabilities: lsp::ServerCapabilities {
3920 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3921 prepare_provider: Some(true),
3922 work_done_progress_options: Default::default(),
3923 })),
3924 ..Default::default()
3925 },
3926 ..Default::default()
3927 },
3928 );
3929
3930 let buffer = project
3931 .update(cx, |project, cx| {
3932 project.open_local_buffer("/dir/one.rs", cx)
3933 })
3934 .await
3935 .unwrap();
3936
3937 let fake_server = fake_servers.next().await.unwrap();
3938
3939 let response = project.update(cx, |project, cx| {
3940 project.prepare_rename(buffer.clone(), 7, cx)
3941 });
3942 fake_server
3943 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3944 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3945 assert_eq!(params.position, lsp::Position::new(0, 7));
3946 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3947 lsp::Position::new(0, 6),
3948 lsp::Position::new(0, 9),
3949 ))))
3950 })
3951 .next()
3952 .await
3953 .unwrap();
3954 let range = response.await.unwrap().unwrap();
3955 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3956 assert_eq!(range, 6..9);
3957
3958 let response = project.update(cx, |project, cx| {
3959 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3960 });
3961 fake_server
3962 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3963 assert_eq!(
3964 params.text_document_position.text_document.uri.as_str(),
3965 "file:///dir/one.rs"
3966 );
3967 assert_eq!(
3968 params.text_document_position.position,
3969 lsp::Position::new(0, 7)
3970 );
3971 assert_eq!(params.new_name, "THREE");
3972 Ok(Some(lsp::WorkspaceEdit {
3973 changes: Some(
3974 [
3975 (
3976 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3977 vec![lsp::TextEdit::new(
3978 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3979 "THREE".to_string(),
3980 )],
3981 ),
3982 (
3983 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3984 vec![
3985 lsp::TextEdit::new(
3986 lsp::Range::new(
3987 lsp::Position::new(0, 24),
3988 lsp::Position::new(0, 27),
3989 ),
3990 "THREE".to_string(),
3991 ),
3992 lsp::TextEdit::new(
3993 lsp::Range::new(
3994 lsp::Position::new(0, 35),
3995 lsp::Position::new(0, 38),
3996 ),
3997 "THREE".to_string(),
3998 ),
3999 ],
4000 ),
4001 ]
4002 .into_iter()
4003 .collect(),
4004 ),
4005 ..Default::default()
4006 }))
4007 })
4008 .next()
4009 .await
4010 .unwrap();
4011 let mut transaction = response.await.unwrap().0;
4012 assert_eq!(transaction.len(), 2);
4013 assert_eq!(
4014 transaction
4015 .remove_entry(&buffer)
4016 .unwrap()
4017 .0
4018 .update(cx, |buffer, _| buffer.text()),
4019 "const THREE: usize = 1;"
4020 );
4021 assert_eq!(
4022 transaction
4023 .into_keys()
4024 .next()
4025 .unwrap()
4026 .update(cx, |buffer, _| buffer.text()),
4027 "const TWO: usize = one::THREE + one::THREE;"
4028 );
4029}
4030
4031#[gpui::test]
4032async fn test_search(cx: &mut gpui::TestAppContext) {
4033 init_test(cx);
4034
4035 let fs = FakeFs::new(cx.executor());
4036 fs.insert_tree(
4037 "/dir",
4038 json!({
4039 "one.rs": "const ONE: usize = 1;",
4040 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4041 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4042 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4043 }),
4044 )
4045 .await;
4046 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4047 assert_eq!(
4048 search(
4049 &project,
4050 SearchQuery::text(
4051 "TWO",
4052 false,
4053 true,
4054 false,
4055 Default::default(),
4056 Default::default(),
4057 None
4058 )
4059 .unwrap(),
4060 cx
4061 )
4062 .await
4063 .unwrap(),
4064 HashMap::from_iter([
4065 ("dir/two.rs".to_string(), vec![6..9]),
4066 ("dir/three.rs".to_string(), vec![37..40])
4067 ])
4068 );
4069
4070 let buffer_4 = project
4071 .update(cx, |project, cx| {
4072 project.open_local_buffer("/dir/four.rs", cx)
4073 })
4074 .await
4075 .unwrap();
4076 buffer_4.update(cx, |buffer, cx| {
4077 let text = "two::TWO";
4078 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4079 });
4080
4081 assert_eq!(
4082 search(
4083 &project,
4084 SearchQuery::text(
4085 "TWO",
4086 false,
4087 true,
4088 false,
4089 Default::default(),
4090 Default::default(),
4091 None,
4092 )
4093 .unwrap(),
4094 cx
4095 )
4096 .await
4097 .unwrap(),
4098 HashMap::from_iter([
4099 ("dir/two.rs".to_string(), vec![6..9]),
4100 ("dir/three.rs".to_string(), vec![37..40]),
4101 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4102 ])
4103 );
4104}
4105
4106#[gpui::test]
4107async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4108 init_test(cx);
4109
4110 let search_query = "file";
4111
4112 let fs = FakeFs::new(cx.executor());
4113 fs.insert_tree(
4114 "/dir",
4115 json!({
4116 "one.rs": r#"// Rust file one"#,
4117 "one.ts": r#"// TypeScript file one"#,
4118 "two.rs": r#"// Rust file two"#,
4119 "two.ts": r#"// TypeScript file two"#,
4120 }),
4121 )
4122 .await;
4123 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4124
4125 assert!(
4126 search(
4127 &project,
4128 SearchQuery::text(
4129 search_query,
4130 false,
4131 true,
4132 false,
4133 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4134 Default::default(),
4135 None
4136 )
4137 .unwrap(),
4138 cx
4139 )
4140 .await
4141 .unwrap()
4142 .is_empty(),
4143 "If no inclusions match, no files should be returned"
4144 );
4145
4146 assert_eq!(
4147 search(
4148 &project,
4149 SearchQuery::text(
4150 search_query,
4151 false,
4152 true,
4153 false,
4154 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4155 Default::default(),
4156 None
4157 )
4158 .unwrap(),
4159 cx
4160 )
4161 .await
4162 .unwrap(),
4163 HashMap::from_iter([
4164 ("dir/one.rs".to_string(), vec![8..12]),
4165 ("dir/two.rs".to_string(), vec![8..12]),
4166 ]),
4167 "Rust only search should give only Rust files"
4168 );
4169
4170 assert_eq!(
4171 search(
4172 &project,
4173 SearchQuery::text(
4174 search_query,
4175 false,
4176 true,
4177 false,
4178
4179 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4180
4181 Default::default(),
4182 None,
4183 ).unwrap(),
4184 cx
4185 )
4186 .await
4187 .unwrap(),
4188 HashMap::from_iter([
4189 ("dir/one.ts".to_string(), vec![14..18]),
4190 ("dir/two.ts".to_string(), vec![14..18]),
4191 ]),
4192 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4193 );
4194
4195 assert_eq!(
4196 search(
4197 &project,
4198 SearchQuery::text(
4199 search_query,
4200 false,
4201 true,
4202 false,
4203
4204 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4205
4206 Default::default(),
4207 None,
4208 ).unwrap(),
4209 cx
4210 )
4211 .await
4212 .unwrap(),
4213 HashMap::from_iter([
4214 ("dir/two.ts".to_string(), vec![14..18]),
4215 ("dir/one.rs".to_string(), vec![8..12]),
4216 ("dir/one.ts".to_string(), vec![14..18]),
4217 ("dir/two.rs".to_string(), vec![8..12]),
4218 ]),
4219 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4220 );
4221}
4222
4223#[gpui::test]
4224async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4225 init_test(cx);
4226
4227 let search_query = "file";
4228
4229 let fs = FakeFs::new(cx.executor());
4230 fs.insert_tree(
4231 "/dir",
4232 json!({
4233 "one.rs": r#"// Rust file one"#,
4234 "one.ts": r#"// TypeScript file one"#,
4235 "two.rs": r#"// Rust file two"#,
4236 "two.ts": r#"// TypeScript file two"#,
4237 }),
4238 )
4239 .await;
4240 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4241
4242 assert_eq!(
4243 search(
4244 &project,
4245 SearchQuery::text(
4246 search_query,
4247 false,
4248 true,
4249 false,
4250 Default::default(),
4251 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4252 None,
4253 )
4254 .unwrap(),
4255 cx
4256 )
4257 .await
4258 .unwrap(),
4259 HashMap::from_iter([
4260 ("dir/one.rs".to_string(), vec![8..12]),
4261 ("dir/one.ts".to_string(), vec![14..18]),
4262 ("dir/two.rs".to_string(), vec![8..12]),
4263 ("dir/two.ts".to_string(), vec![14..18]),
4264 ]),
4265 "If no exclusions match, all files should be returned"
4266 );
4267
4268 assert_eq!(
4269 search(
4270 &project,
4271 SearchQuery::text(
4272 search_query,
4273 false,
4274 true,
4275 false,
4276 Default::default(),
4277 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4278 None,
4279 )
4280 .unwrap(),
4281 cx
4282 )
4283 .await
4284 .unwrap(),
4285 HashMap::from_iter([
4286 ("dir/one.ts".to_string(), vec![14..18]),
4287 ("dir/two.ts".to_string(), vec![14..18]),
4288 ]),
4289 "Rust exclusion search should give only TypeScript files"
4290 );
4291
4292 assert_eq!(
4293 search(
4294 &project,
4295 SearchQuery::text(
4296 search_query,
4297 false,
4298 true,
4299 false,
4300 Default::default(),
4301 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4302 None,
4303 ).unwrap(),
4304 cx
4305 )
4306 .await
4307 .unwrap(),
4308 HashMap::from_iter([
4309 ("dir/one.rs".to_string(), vec![8..12]),
4310 ("dir/two.rs".to_string(), vec![8..12]),
4311 ]),
4312 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4313 );
4314
4315 assert!(
4316 search(
4317 &project,
4318 SearchQuery::text(
4319 search_query,
4320 false,
4321 true,
4322 false,
4323 Default::default(),
4324
4325 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4326 None,
4327
4328 ).unwrap(),
4329 cx
4330 )
4331 .await
4332 .unwrap().is_empty(),
4333 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4334 );
4335}
4336
4337#[gpui::test]
4338async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4339 init_test(cx);
4340
4341 let search_query = "file";
4342
4343 let fs = FakeFs::new(cx.executor());
4344 fs.insert_tree(
4345 "/dir",
4346 json!({
4347 "one.rs": r#"// Rust file one"#,
4348 "one.ts": r#"// TypeScript file one"#,
4349 "two.rs": r#"// Rust file two"#,
4350 "two.ts": r#"// TypeScript file two"#,
4351 }),
4352 )
4353 .await;
4354 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4355
4356 assert!(
4357 search(
4358 &project,
4359 SearchQuery::text(
4360 search_query,
4361 false,
4362 true,
4363 false,
4364 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4365 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4366 None,
4367 )
4368 .unwrap(),
4369 cx
4370 )
4371 .await
4372 .unwrap()
4373 .is_empty(),
4374 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4375 );
4376
4377 assert!(
4378 search(
4379 &project,
4380 SearchQuery::text(
4381 search_query,
4382 false,
4383 true,
4384 false,
4385 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4386 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4387 None,
4388 ).unwrap(),
4389 cx
4390 )
4391 .await
4392 .unwrap()
4393 .is_empty(),
4394 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4395 );
4396
4397 assert!(
4398 search(
4399 &project,
4400 SearchQuery::text(
4401 search_query,
4402 false,
4403 true,
4404 false,
4405 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4406 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4407 None,
4408 )
4409 .unwrap(),
4410 cx
4411 )
4412 .await
4413 .unwrap()
4414 .is_empty(),
4415 "Non-matching inclusions and exclusions should not change that."
4416 );
4417
4418 assert_eq!(
4419 search(
4420 &project,
4421 SearchQuery::text(
4422 search_query,
4423 false,
4424 true,
4425 false,
4426 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4427 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4428 None,
4429 )
4430 .unwrap(),
4431 cx
4432 )
4433 .await
4434 .unwrap(),
4435 HashMap::from_iter([
4436 ("dir/one.ts".to_string(), vec![14..18]),
4437 ("dir/two.ts".to_string(), vec![14..18]),
4438 ]),
4439 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4440 );
4441}
4442
4443#[gpui::test]
4444async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4445 init_test(cx);
4446
4447 let fs = FakeFs::new(cx.executor());
4448 fs.insert_tree(
4449 "/worktree-a",
4450 json!({
4451 "haystack.rs": r#"// NEEDLE"#,
4452 "haystack.ts": r#"// NEEDLE"#,
4453 }),
4454 )
4455 .await;
4456 fs.insert_tree(
4457 "/worktree-b",
4458 json!({
4459 "haystack.rs": r#"// NEEDLE"#,
4460 "haystack.ts": r#"// NEEDLE"#,
4461 }),
4462 )
4463 .await;
4464
4465 let project = Project::test(
4466 fs.clone(),
4467 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4468 cx,
4469 )
4470 .await;
4471
4472 assert_eq!(
4473 search(
4474 &project,
4475 SearchQuery::text(
4476 "NEEDLE",
4477 false,
4478 true,
4479 false,
4480 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4481 Default::default(),
4482 None,
4483 )
4484 .unwrap(),
4485 cx
4486 )
4487 .await
4488 .unwrap(),
4489 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4490 "should only return results from included worktree"
4491 );
4492 assert_eq!(
4493 search(
4494 &project,
4495 SearchQuery::text(
4496 "NEEDLE",
4497 false,
4498 true,
4499 false,
4500 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4501 Default::default(),
4502 None,
4503 )
4504 .unwrap(),
4505 cx
4506 )
4507 .await
4508 .unwrap(),
4509 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4510 "should only return results from included worktree"
4511 );
4512
4513 assert_eq!(
4514 search(
4515 &project,
4516 SearchQuery::text(
4517 "NEEDLE",
4518 false,
4519 true,
4520 false,
4521 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4522 Default::default(),
4523 None,
4524 )
4525 .unwrap(),
4526 cx
4527 )
4528 .await
4529 .unwrap(),
4530 HashMap::from_iter([
4531 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4532 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4533 ]),
4534 "should return results from both worktrees"
4535 );
4536}
4537
4538#[gpui::test]
4539async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4540 init_test(cx);
4541
4542 let fs = FakeFs::new(cx.background_executor.clone());
4543 fs.insert_tree(
4544 "/dir",
4545 json!({
4546 ".git": {},
4547 ".gitignore": "**/target\n/node_modules\n",
4548 "target": {
4549 "index.txt": "index_key:index_value"
4550 },
4551 "node_modules": {
4552 "eslint": {
4553 "index.ts": "const eslint_key = 'eslint value'",
4554 "package.json": r#"{ "some_key": "some value" }"#,
4555 },
4556 "prettier": {
4557 "index.ts": "const prettier_key = 'prettier value'",
4558 "package.json": r#"{ "other_key": "other value" }"#,
4559 },
4560 },
4561 "package.json": r#"{ "main_key": "main value" }"#,
4562 }),
4563 )
4564 .await;
4565 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4566
4567 let query = "key";
4568 assert_eq!(
4569 search(
4570 &project,
4571 SearchQuery::text(
4572 query,
4573 false,
4574 false,
4575 false,
4576 Default::default(),
4577 Default::default(),
4578 None,
4579 )
4580 .unwrap(),
4581 cx
4582 )
4583 .await
4584 .unwrap(),
4585 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4586 "Only one non-ignored file should have the query"
4587 );
4588
4589 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4590 assert_eq!(
4591 search(
4592 &project,
4593 SearchQuery::text(
4594 query,
4595 false,
4596 false,
4597 true,
4598 Default::default(),
4599 Default::default(),
4600 None,
4601 )
4602 .unwrap(),
4603 cx
4604 )
4605 .await
4606 .unwrap(),
4607 HashMap::from_iter([
4608 ("dir/package.json".to_string(), vec![8..11]),
4609 ("dir/target/index.txt".to_string(), vec![6..9]),
4610 (
4611 "dir/node_modules/prettier/package.json".to_string(),
4612 vec![9..12]
4613 ),
4614 (
4615 "dir/node_modules/prettier/index.ts".to_string(),
4616 vec![15..18]
4617 ),
4618 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4619 (
4620 "dir/node_modules/eslint/package.json".to_string(),
4621 vec![8..11]
4622 ),
4623 ]),
4624 "Unrestricted search with ignored directories should find every file with the query"
4625 );
4626
4627 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4628 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4629 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4630 assert_eq!(
4631 search(
4632 &project,
4633 SearchQuery::text(
4634 query,
4635 false,
4636 false,
4637 true,
4638 files_to_include,
4639 files_to_exclude,
4640 None,
4641 )
4642 .unwrap(),
4643 cx
4644 )
4645 .await
4646 .unwrap(),
4647 HashMap::from_iter([(
4648 "dir/node_modules/prettier/package.json".to_string(),
4649 vec![9..12]
4650 )]),
4651 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4652 );
4653}
4654
4655#[gpui::test]
4656async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
4657 init_test(cx);
4658
4659 let fs = FakeFs::new(cx.background_executor.clone());
4660 fs.insert_tree(
4661 "/dir",
4662 json!({
4663 ".git": {},
4664 ".gitignore": "**/target\n/node_modules\n",
4665 "aaa.txt": "key:value",
4666 "bbb": {
4667 "index.txt": "index_key:index_value"
4668 },
4669 "node_modules": {
4670 "10 eleven": "key",
4671 "1 two": "key"
4672 },
4673 }),
4674 )
4675 .await;
4676 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4677
4678 let mut search = project.update(cx, |project, cx| {
4679 project.search(
4680 SearchQuery::text(
4681 "key",
4682 false,
4683 false,
4684 true,
4685 Default::default(),
4686 Default::default(),
4687 None,
4688 )
4689 .unwrap(),
4690 cx,
4691 )
4692 });
4693
4694 fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
4695 match search_result.unwrap() {
4696 SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
4697 buffer.file().unwrap().path().to_string_lossy().to_string()
4698 }),
4699 _ => panic!("Expected buffer"),
4700 }
4701 }
4702
4703 assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
4704 assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
4705 assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
4706 assert_eq!(file_name(search.next().await, cx), "aaa.txt");
4707 assert!(search.next().await.is_none())
4708}
4709
4710#[gpui::test]
4711async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4712 init_test(cx);
4713
4714 let fs = FakeFs::new(cx.executor().clone());
4715 fs.insert_tree(
4716 "/one/two",
4717 json!({
4718 "three": {
4719 "a.txt": "",
4720 "four": {}
4721 },
4722 "c.rs": ""
4723 }),
4724 )
4725 .await;
4726
4727 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4728 project
4729 .update(cx, |project, cx| {
4730 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4731 project.create_entry((id, "b.."), true, cx)
4732 })
4733 .await
4734 .unwrap()
4735 .to_included()
4736 .unwrap();
4737
4738 // Can't create paths outside the project
4739 let result = project
4740 .update(cx, |project, cx| {
4741 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4742 project.create_entry((id, "../../boop"), true, cx)
4743 })
4744 .await;
4745 assert!(result.is_err());
4746
4747 // Can't create paths with '..'
4748 let result = project
4749 .update(cx, |project, cx| {
4750 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4751 project.create_entry((id, "four/../beep"), true, cx)
4752 })
4753 .await;
4754 assert!(result.is_err());
4755
4756 assert_eq!(
4757 fs.paths(true),
4758 vec![
4759 PathBuf::from("/"),
4760 PathBuf::from("/one"),
4761 PathBuf::from("/one/two"),
4762 PathBuf::from("/one/two/c.rs"),
4763 PathBuf::from("/one/two/three"),
4764 PathBuf::from("/one/two/three/a.txt"),
4765 PathBuf::from("/one/two/three/b.."),
4766 PathBuf::from("/one/two/three/four"),
4767 ]
4768 );
4769
4770 // And we cannot open buffers with '..'
4771 let result = project
4772 .update(cx, |project, cx| {
4773 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4774 project.open_buffer((id, "../c.rs"), cx)
4775 })
4776 .await;
4777 assert!(result.is_err())
4778}
4779
4780#[gpui::test]
4781async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4782 init_test(cx);
4783
4784 let fs = FakeFs::new(cx.executor());
4785 fs.insert_tree(
4786 "/dir",
4787 json!({
4788 "a.tsx": "a",
4789 }),
4790 )
4791 .await;
4792
4793 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4794
4795 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4796 language_registry.add(tsx_lang());
4797 let language_server_names = [
4798 "TypeScriptServer",
4799 "TailwindServer",
4800 "ESLintServer",
4801 "NoHoverCapabilitiesServer",
4802 ];
4803 let mut language_servers = [
4804 language_registry.register_fake_lsp(
4805 "tsx",
4806 FakeLspAdapter {
4807 name: language_server_names[0],
4808 capabilities: lsp::ServerCapabilities {
4809 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4810 ..lsp::ServerCapabilities::default()
4811 },
4812 ..FakeLspAdapter::default()
4813 },
4814 ),
4815 language_registry.register_fake_lsp(
4816 "tsx",
4817 FakeLspAdapter {
4818 name: language_server_names[1],
4819 capabilities: lsp::ServerCapabilities {
4820 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4821 ..lsp::ServerCapabilities::default()
4822 },
4823 ..FakeLspAdapter::default()
4824 },
4825 ),
4826 language_registry.register_fake_lsp(
4827 "tsx",
4828 FakeLspAdapter {
4829 name: language_server_names[2],
4830 capabilities: lsp::ServerCapabilities {
4831 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4832 ..lsp::ServerCapabilities::default()
4833 },
4834 ..FakeLspAdapter::default()
4835 },
4836 ),
4837 language_registry.register_fake_lsp(
4838 "tsx",
4839 FakeLspAdapter {
4840 name: language_server_names[3],
4841 capabilities: lsp::ServerCapabilities {
4842 hover_provider: None,
4843 ..lsp::ServerCapabilities::default()
4844 },
4845 ..FakeLspAdapter::default()
4846 },
4847 ),
4848 ];
4849
4850 let buffer = project
4851 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4852 .await
4853 .unwrap();
4854 cx.executor().run_until_parked();
4855
4856 let mut servers_with_hover_requests = HashMap::default();
4857 for i in 0..language_server_names.len() {
4858 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4859 panic!(
4860 "Failed to get language server #{i} with name {}",
4861 &language_server_names[i]
4862 )
4863 });
4864 let new_server_name = new_server.server.name();
4865 assert!(
4866 !servers_with_hover_requests.contains_key(new_server_name),
4867 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4868 );
4869 let new_server_name = new_server_name.to_string();
4870 match new_server_name.as_str() {
4871 "TailwindServer" | "TypeScriptServer" => {
4872 servers_with_hover_requests.insert(
4873 new_server_name.clone(),
4874 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4875 let name = new_server_name.clone();
4876 async move {
4877 Ok(Some(lsp::Hover {
4878 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4879 format!("{name} hover"),
4880 )),
4881 range: None,
4882 }))
4883 }
4884 }),
4885 );
4886 }
4887 "ESLintServer" => {
4888 servers_with_hover_requests.insert(
4889 new_server_name,
4890 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4891 |_, _| async move { Ok(None) },
4892 ),
4893 );
4894 }
4895 "NoHoverCapabilitiesServer" => {
4896 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4897 |_, _| async move {
4898 panic!(
4899 "Should not call for hovers server with no corresponding capabilities"
4900 )
4901 },
4902 );
4903 }
4904 unexpected => panic!("Unexpected server name: {unexpected}"),
4905 }
4906 }
4907
4908 let hover_task = project.update(cx, |project, cx| {
4909 project.hover(&buffer, Point::new(0, 0), cx)
4910 });
4911 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4912 |mut hover_request| async move {
4913 hover_request
4914 .next()
4915 .await
4916 .expect("All hover requests should have been triggered")
4917 },
4918 ))
4919 .await;
4920 assert_eq!(
4921 vec!["TailwindServer hover", "TypeScriptServer hover"],
4922 hover_task
4923 .await
4924 .into_iter()
4925 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4926 .sorted()
4927 .collect::<Vec<_>>(),
4928 "Should receive hover responses from all related servers with hover capabilities"
4929 );
4930}
4931
4932#[gpui::test]
4933async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4934 init_test(cx);
4935
4936 let fs = FakeFs::new(cx.executor());
4937 fs.insert_tree(
4938 "/dir",
4939 json!({
4940 "a.ts": "a",
4941 }),
4942 )
4943 .await;
4944
4945 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4946
4947 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4948 language_registry.add(typescript_lang());
4949 let mut fake_language_servers = language_registry.register_fake_lsp(
4950 "TypeScript",
4951 FakeLspAdapter {
4952 capabilities: lsp::ServerCapabilities {
4953 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4954 ..lsp::ServerCapabilities::default()
4955 },
4956 ..FakeLspAdapter::default()
4957 },
4958 );
4959
4960 let buffer = project
4961 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4962 .await
4963 .unwrap();
4964 cx.executor().run_until_parked();
4965
4966 let fake_server = fake_language_servers
4967 .next()
4968 .await
4969 .expect("failed to get the language server");
4970
4971 let mut request_handled =
4972 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4973 Ok(Some(lsp::Hover {
4974 contents: lsp::HoverContents::Array(vec![
4975 lsp::MarkedString::String("".to_string()),
4976 lsp::MarkedString::String(" ".to_string()),
4977 lsp::MarkedString::String("\n\n\n".to_string()),
4978 ]),
4979 range: None,
4980 }))
4981 });
4982
4983 let hover_task = project.update(cx, |project, cx| {
4984 project.hover(&buffer, Point::new(0, 0), cx)
4985 });
4986 let () = request_handled
4987 .next()
4988 .await
4989 .expect("All hover requests should have been triggered");
4990 assert_eq!(
4991 Vec::<String>::new(),
4992 hover_task
4993 .await
4994 .into_iter()
4995 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4996 .sorted()
4997 .collect::<Vec<_>>(),
4998 "Empty hover parts should be ignored"
4999 );
5000}
5001
5002#[gpui::test]
5003async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5004 init_test(cx);
5005
5006 let fs = FakeFs::new(cx.executor());
5007 fs.insert_tree(
5008 "/dir",
5009 json!({
5010 "a.tsx": "a",
5011 }),
5012 )
5013 .await;
5014
5015 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5016
5017 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5018 language_registry.add(tsx_lang());
5019 let language_server_names = [
5020 "TypeScriptServer",
5021 "TailwindServer",
5022 "ESLintServer",
5023 "NoActionsCapabilitiesServer",
5024 ];
5025
5026 let mut language_server_rxs = [
5027 language_registry.register_fake_lsp(
5028 "tsx",
5029 FakeLspAdapter {
5030 name: language_server_names[0],
5031 capabilities: lsp::ServerCapabilities {
5032 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5033 ..lsp::ServerCapabilities::default()
5034 },
5035 ..FakeLspAdapter::default()
5036 },
5037 ),
5038 language_registry.register_fake_lsp(
5039 "tsx",
5040 FakeLspAdapter {
5041 name: language_server_names[1],
5042 capabilities: lsp::ServerCapabilities {
5043 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5044 ..lsp::ServerCapabilities::default()
5045 },
5046 ..FakeLspAdapter::default()
5047 },
5048 ),
5049 language_registry.register_fake_lsp(
5050 "tsx",
5051 FakeLspAdapter {
5052 name: language_server_names[2],
5053 capabilities: lsp::ServerCapabilities {
5054 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5055 ..lsp::ServerCapabilities::default()
5056 },
5057 ..FakeLspAdapter::default()
5058 },
5059 ),
5060 language_registry.register_fake_lsp(
5061 "tsx",
5062 FakeLspAdapter {
5063 name: language_server_names[3],
5064 capabilities: lsp::ServerCapabilities {
5065 code_action_provider: None,
5066 ..lsp::ServerCapabilities::default()
5067 },
5068 ..FakeLspAdapter::default()
5069 },
5070 ),
5071 ];
5072
5073 let buffer = project
5074 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5075 .await
5076 .unwrap();
5077 cx.executor().run_until_parked();
5078
5079 let mut servers_with_actions_requests = HashMap::default();
5080 for i in 0..language_server_names.len() {
5081 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5082 panic!(
5083 "Failed to get language server #{i} with name {}",
5084 &language_server_names[i]
5085 )
5086 });
5087 let new_server_name = new_server.server.name();
5088
5089 assert!(
5090 !servers_with_actions_requests.contains_key(new_server_name),
5091 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5092 );
5093 let new_server_name = new_server_name.to_string();
5094 match new_server_name.as_str() {
5095 "TailwindServer" | "TypeScriptServer" => {
5096 servers_with_actions_requests.insert(
5097 new_server_name.clone(),
5098 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5099 move |_, _| {
5100 let name = new_server_name.clone();
5101 async move {
5102 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5103 lsp::CodeAction {
5104 title: format!("{name} code action"),
5105 ..lsp::CodeAction::default()
5106 },
5107 )]))
5108 }
5109 },
5110 ),
5111 );
5112 }
5113 "ESLintServer" => {
5114 servers_with_actions_requests.insert(
5115 new_server_name,
5116 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5117 |_, _| async move { Ok(None) },
5118 ),
5119 );
5120 }
5121 "NoActionsCapabilitiesServer" => {
5122 let _never_handled = new_server
5123 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5124 panic!(
5125 "Should not call for code actions server with no corresponding capabilities"
5126 )
5127 });
5128 }
5129 unexpected => panic!("Unexpected server name: {unexpected}"),
5130 }
5131 }
5132
5133 let code_actions_task = project.update(cx, |project, cx| {
5134 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5135 });
5136
5137 // cx.run_until_parked();
5138 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5139 |mut code_actions_request| async move {
5140 code_actions_request
5141 .next()
5142 .await
5143 .expect("All code actions requests should have been triggered")
5144 },
5145 ))
5146 .await;
5147 assert_eq!(
5148 vec!["TailwindServer code action", "TypeScriptServer code action"],
5149 code_actions_task
5150 .await
5151 .unwrap()
5152 .into_iter()
5153 .map(|code_action| code_action.lsp_action.title)
5154 .sorted()
5155 .collect::<Vec<_>>(),
5156 "Should receive code actions responses from all related servers with hover capabilities"
5157 );
5158}
5159
5160#[gpui::test]
5161async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5162 init_test(cx);
5163
5164 let fs = FakeFs::new(cx.executor());
5165 fs.insert_tree(
5166 "/dir",
5167 json!({
5168 "a.rs": "let a = 1;",
5169 "b.rs": "let b = 2;",
5170 "c.rs": "let c = 2;",
5171 }),
5172 )
5173 .await;
5174
5175 let project = Project::test(
5176 fs,
5177 [
5178 "/dir/a.rs".as_ref(),
5179 "/dir/b.rs".as_ref(),
5180 "/dir/c.rs".as_ref(),
5181 ],
5182 cx,
5183 )
5184 .await;
5185
5186 // check the initial state and get the worktrees
5187 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5188 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5189 assert_eq!(worktrees.len(), 3);
5190
5191 let worktree_a = worktrees[0].read(cx);
5192 let worktree_b = worktrees[1].read(cx);
5193 let worktree_c = worktrees[2].read(cx);
5194
5195 // check they start in the right order
5196 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5197 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5198 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5199
5200 (
5201 worktrees[0].clone(),
5202 worktrees[1].clone(),
5203 worktrees[2].clone(),
5204 )
5205 });
5206
5207 // move first worktree to after the second
5208 // [a, b, c] -> [b, a, c]
5209 project
5210 .update(cx, |project, cx| {
5211 let first = worktree_a.read(cx);
5212 let second = worktree_b.read(cx);
5213 project.move_worktree(first.id(), second.id(), cx)
5214 })
5215 .expect("moving first after second");
5216
5217 // check the state after moving
5218 project.update(cx, |project, cx| {
5219 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5220 assert_eq!(worktrees.len(), 3);
5221
5222 let first = worktrees[0].read(cx);
5223 let second = worktrees[1].read(cx);
5224 let third = worktrees[2].read(cx);
5225
5226 // check they are now in the right order
5227 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5228 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5229 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5230 });
5231
5232 // move the second worktree to before the first
5233 // [b, a, c] -> [a, b, c]
5234 project
5235 .update(cx, |project, cx| {
5236 let second = worktree_a.read(cx);
5237 let first = worktree_b.read(cx);
5238 project.move_worktree(first.id(), second.id(), cx)
5239 })
5240 .expect("moving second before first");
5241
5242 // check the state after moving
5243 project.update(cx, |project, cx| {
5244 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5245 assert_eq!(worktrees.len(), 3);
5246
5247 let first = worktrees[0].read(cx);
5248 let second = worktrees[1].read(cx);
5249 let third = worktrees[2].read(cx);
5250
5251 // check they are now in the right order
5252 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5253 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5254 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5255 });
5256
5257 // move the second worktree to after the third
5258 // [a, b, c] -> [a, c, b]
5259 project
5260 .update(cx, |project, cx| {
5261 let second = worktree_b.read(cx);
5262 let third = worktree_c.read(cx);
5263 project.move_worktree(second.id(), third.id(), cx)
5264 })
5265 .expect("moving second after third");
5266
5267 // check the state after moving
5268 project.update(cx, |project, cx| {
5269 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5270 assert_eq!(worktrees.len(), 3);
5271
5272 let first = worktrees[0].read(cx);
5273 let second = worktrees[1].read(cx);
5274 let third = worktrees[2].read(cx);
5275
5276 // check they are now in the right order
5277 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5278 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5279 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5280 });
5281
5282 // move the third worktree to before the second
5283 // [a, c, b] -> [a, b, c]
5284 project
5285 .update(cx, |project, cx| {
5286 let third = worktree_c.read(cx);
5287 let second = worktree_b.read(cx);
5288 project.move_worktree(third.id(), second.id(), cx)
5289 })
5290 .expect("moving third before second");
5291
5292 // check the state after moving
5293 project.update(cx, |project, cx| {
5294 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5295 assert_eq!(worktrees.len(), 3);
5296
5297 let first = worktrees[0].read(cx);
5298 let second = worktrees[1].read(cx);
5299 let third = worktrees[2].read(cx);
5300
5301 // check they are now in the right order
5302 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5303 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5304 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5305 });
5306
5307 // move the first worktree to after the third
5308 // [a, b, c] -> [b, c, a]
5309 project
5310 .update(cx, |project, cx| {
5311 let first = worktree_a.read(cx);
5312 let third = worktree_c.read(cx);
5313 project.move_worktree(first.id(), third.id(), cx)
5314 })
5315 .expect("moving first after third");
5316
5317 // check the state after moving
5318 project.update(cx, |project, cx| {
5319 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5320 assert_eq!(worktrees.len(), 3);
5321
5322 let first = worktrees[0].read(cx);
5323 let second = worktrees[1].read(cx);
5324 let third = worktrees[2].read(cx);
5325
5326 // check they are now in the right order
5327 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5328 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5329 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5330 });
5331
5332 // move the third worktree to before the first
5333 // [b, c, a] -> [a, b, c]
5334 project
5335 .update(cx, |project, cx| {
5336 let third = worktree_a.read(cx);
5337 let first = worktree_b.read(cx);
5338 project.move_worktree(third.id(), first.id(), cx)
5339 })
5340 .expect("moving third before first");
5341
5342 // check the state after moving
5343 project.update(cx, |project, cx| {
5344 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5345 assert_eq!(worktrees.len(), 3);
5346
5347 let first = worktrees[0].read(cx);
5348 let second = worktrees[1].read(cx);
5349 let third = worktrees[2].read(cx);
5350
5351 // check they are now in the right order
5352 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5353 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5354 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5355 });
5356}
5357
5358async fn search(
5359 project: &Model<Project>,
5360 query: SearchQuery,
5361 cx: &mut gpui::TestAppContext,
5362) -> Result<HashMap<String, Vec<Range<usize>>>> {
5363 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5364 let mut results = HashMap::default();
5365 while let Some(search_result) = search_rx.next().await {
5366 match search_result {
5367 SearchResult::Buffer { buffer, ranges } => {
5368 results.entry(buffer).or_insert(ranges);
5369 }
5370 SearchResult::LimitReached => {}
5371 }
5372 }
5373 Ok(results
5374 .into_iter()
5375 .map(|(buffer, ranges)| {
5376 buffer.update(cx, |buffer, cx| {
5377 let path = buffer
5378 .file()
5379 .unwrap()
5380 .full_path(cx)
5381 .to_string_lossy()
5382 .to_string();
5383 let ranges = ranges
5384 .into_iter()
5385 .map(|range| range.to_offset(buffer))
5386 .collect::<Vec<_>>();
5387 (path, ranges)
5388 })
5389 })
5390 .collect())
5391}
5392
5393pub fn init_test(cx: &mut gpui::TestAppContext) {
5394 if std::env::var("RUST_LOG").is_ok() {
5395 env_logger::try_init().ok();
5396 }
5397
5398 cx.update(|cx| {
5399 let settings_store = SettingsStore::test(cx);
5400 cx.set_global(settings_store);
5401 release_channel::init(SemanticVersion::default(), cx);
5402 language::init(cx);
5403 Project::init_settings(cx);
5404 });
5405}
5406
5407fn json_lang() -> Arc<Language> {
5408 Arc::new(Language::new(
5409 LanguageConfig {
5410 name: "JSON".into(),
5411 matcher: LanguageMatcher {
5412 path_suffixes: vec!["json".to_string()],
5413 ..Default::default()
5414 },
5415 ..Default::default()
5416 },
5417 None,
5418 ))
5419}
5420
5421fn js_lang() -> Arc<Language> {
5422 Arc::new(Language::new(
5423 LanguageConfig {
5424 name: "JavaScript".into(),
5425 matcher: LanguageMatcher {
5426 path_suffixes: vec!["js".to_string()],
5427 ..Default::default()
5428 },
5429 ..Default::default()
5430 },
5431 None,
5432 ))
5433}
5434
5435fn rust_lang() -> Arc<Language> {
5436 Arc::new(Language::new(
5437 LanguageConfig {
5438 name: "Rust".into(),
5439 matcher: LanguageMatcher {
5440 path_suffixes: vec!["rs".to_string()],
5441 ..Default::default()
5442 },
5443 ..Default::default()
5444 },
5445 Some(tree_sitter_rust::LANGUAGE.into()),
5446 ))
5447}
5448
5449fn typescript_lang() -> Arc<Language> {
5450 Arc::new(Language::new(
5451 LanguageConfig {
5452 name: "TypeScript".into(),
5453 matcher: LanguageMatcher {
5454 path_suffixes: vec!["ts".to_string()],
5455 ..Default::default()
5456 },
5457 ..Default::default()
5458 },
5459 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5460 ))
5461}
5462
5463fn tsx_lang() -> Arc<Language> {
5464 Arc::new(Language::new(
5465 LanguageConfig {
5466 name: "tsx".into(),
5467 matcher: LanguageMatcher {
5468 path_suffixes: vec!["tsx".to_string()],
5469 ..Default::default()
5470 },
5471 ..Default::default()
5472 },
5473 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5474 ))
5475}
5476
5477fn get_all_tasks(
5478 project: &Model<Project>,
5479 worktree_id: Option<WorktreeId>,
5480 task_context: &TaskContext,
5481 cx: &mut AppContext,
5482) -> Vec<(TaskSourceKind, ResolvedTask)> {
5483 let (mut old, new) = project.update(cx, |project, cx| {
5484 project
5485 .task_store
5486 .read(cx)
5487 .task_inventory()
5488 .unwrap()
5489 .read(cx)
5490 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5491 });
5492 old.extend(new);
5493 old
5494}