1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, DiskState, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::{assert_eq, assert_matches};
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let dir = temp_tree(json!({
99 ".editorconfig": r#"
100 root = true
101 [*.rs]
102 indent_style = tab
103 indent_size = 3
104 end_of_line = lf
105 insert_final_newline = true
106 trim_trailing_whitespace = true
107 [*.js]
108 tab_width = 10
109 "#,
110 ".zed": {
111 "settings.json": r#"{
112 "tab_size": 8,
113 "hard_tabs": false,
114 "ensure_final_newline_on_save": false,
115 "remove_trailing_whitespace_on_save": false,
116 "soft_wrap": "editor_width"
117 }"#,
118 },
119 "a.rs": "fn a() {\n A\n}",
120 "b": {
121 ".editorconfig": r#"
122 [*.rs]
123 indent_size = 2
124 "#,
125 "b.rs": "fn b() {\n B\n}",
126 },
127 "c.js": "def c\n C\nend",
128 "README.json": "tabs are better\n",
129 }));
130
131 let path = dir.path();
132 let fs = FakeFs::new(cx.executor());
133 fs.insert_tree_from_real_fs(path, path).await;
134 let project = Project::test(fs, [path], cx).await;
135
136 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
137 language_registry.add(js_lang());
138 language_registry.add(json_lang());
139 language_registry.add(rust_lang());
140
141 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
142
143 cx.executor().run_until_parked();
144
145 cx.update(|cx| {
146 let tree = worktree.read(cx);
147 let settings_for = |path: &str| {
148 let file_entry = tree.entry_for_path(path).unwrap().clone();
149 let file = File::for_entry(file_entry, worktree.clone());
150 let file_language = project
151 .read(cx)
152 .languages()
153 .language_for_file_path(file.path.as_ref());
154 let file_language = cx
155 .background_executor()
156 .block(file_language)
157 .expect("Failed to get file language");
158 let file = file as _;
159 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
160 };
161
162 let settings_a = settings_for("a.rs");
163 let settings_b = settings_for("b/b.rs");
164 let settings_c = settings_for("c.js");
165 let settings_readme = settings_for("README.json");
166
167 // .editorconfig overrides .zed/settings
168 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
169 assert_eq!(settings_a.hard_tabs, true);
170 assert_eq!(settings_a.ensure_final_newline_on_save, true);
171 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
172
173 // .editorconfig in b/ overrides .editorconfig in root
174 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
175
176 // "indent_size" is not set, so "tab_width" is used
177 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
178
179 // README.md should not be affected by .editorconfig's globe "*.rs"
180 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
181 });
182}
183
184#[gpui::test]
185async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
186 init_test(cx);
187 TaskStore::init(None);
188
189 let fs = FakeFs::new(cx.executor());
190 fs.insert_tree(
191 "/the-root",
192 json!({
193 ".zed": {
194 "settings.json": r#"{ "tab_size": 8 }"#,
195 "tasks.json": r#"[{
196 "label": "cargo check all",
197 "command": "cargo",
198 "args": ["check", "--all"]
199 },]"#,
200 },
201 "a": {
202 "a.rs": "fn a() {\n A\n}"
203 },
204 "b": {
205 ".zed": {
206 "settings.json": r#"{ "tab_size": 2 }"#,
207 "tasks.json": r#"[{
208 "label": "cargo check",
209 "command": "cargo",
210 "args": ["check"]
211 },]"#,
212 },
213 "b.rs": "fn b() {\n B\n}"
214 }
215 }),
216 )
217 .await;
218
219 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
220 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
221 let task_context = TaskContext::default();
222
223 cx.executor().run_until_parked();
224 let worktree_id = cx.update(|cx| {
225 project.update(cx, |project, cx| {
226 project.worktrees(cx).next().unwrap().read(cx).id()
227 })
228 });
229 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
230 id: worktree_id,
231 directory_in_worktree: PathBuf::from(".zed"),
232 id_base: "local worktree tasks from directory \".zed\"".into(),
233 };
234
235 let all_tasks = cx
236 .update(|cx| {
237 let tree = worktree.read(cx);
238
239 let file_a = File::for_entry(
240 tree.entry_for_path("a/a.rs").unwrap().clone(),
241 worktree.clone(),
242 ) as _;
243 let settings_a = language_settings(None, Some(&file_a), cx);
244 let file_b = File::for_entry(
245 tree.entry_for_path("b/b.rs").unwrap().clone(),
246 worktree.clone(),
247 ) as _;
248 let settings_b = language_settings(None, Some(&file_b), cx);
249
250 assert_eq!(settings_a.tab_size.get(), 8);
251 assert_eq!(settings_b.tab_size.get(), 2);
252
253 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
254 })
255 .into_iter()
256 .map(|(source_kind, task)| {
257 let resolved = task.resolved.unwrap();
258 (
259 source_kind,
260 task.resolved_label,
261 resolved.args,
262 resolved.env,
263 )
264 })
265 .collect::<Vec<_>>();
266 assert_eq!(
267 all_tasks,
268 vec![
269 (
270 TaskSourceKind::Worktree {
271 id: worktree_id,
272 directory_in_worktree: PathBuf::from("b/.zed"),
273 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
274 },
275 "cargo check".to_string(),
276 vec!["check".to_string()],
277 HashMap::default(),
278 ),
279 (
280 topmost_local_task_source_kind.clone(),
281 "cargo check all".to_string(),
282 vec!["check".to_string(), "--all".to_string()],
283 HashMap::default(),
284 ),
285 ]
286 );
287
288 let (_, resolved_task) = cx
289 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
290 .into_iter()
291 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
292 .expect("should have one global task");
293 project.update(cx, |project, cx| {
294 let task_inventory = project
295 .task_store
296 .read(cx)
297 .task_inventory()
298 .cloned()
299 .unwrap();
300 task_inventory.update(cx, |inventory, _| {
301 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
302 inventory
303 .update_file_based_tasks(
304 None,
305 Some(
306 &json!([{
307 "label": "cargo check unstable",
308 "command": "cargo",
309 "args": [
310 "check",
311 "--all",
312 "--all-targets"
313 ],
314 "env": {
315 "RUSTFLAGS": "-Zunstable-options"
316 }
317 }])
318 .to_string(),
319 ),
320 )
321 .unwrap();
322 });
323 });
324 cx.run_until_parked();
325
326 let all_tasks = cx
327 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
328 .into_iter()
329 .map(|(source_kind, task)| {
330 let resolved = task.resolved.unwrap();
331 (
332 source_kind,
333 task.resolved_label,
334 resolved.args,
335 resolved.env,
336 )
337 })
338 .collect::<Vec<_>>();
339 assert_eq!(
340 all_tasks,
341 vec![
342 (
343 topmost_local_task_source_kind.clone(),
344 "cargo check all".to_string(),
345 vec!["check".to_string(), "--all".to_string()],
346 HashMap::default(),
347 ),
348 (
349 TaskSourceKind::Worktree {
350 id: worktree_id,
351 directory_in_worktree: PathBuf::from("b/.zed"),
352 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
353 },
354 "cargo check".to_string(),
355 vec!["check".to_string()],
356 HashMap::default(),
357 ),
358 (
359 TaskSourceKind::AbsPath {
360 abs_path: paths::tasks_file().clone(),
361 id_base: "global tasks.json".into(),
362 },
363 "cargo check unstable".to_string(),
364 vec![
365 "check".to_string(),
366 "--all".to_string(),
367 "--all-targets".to_string(),
368 ],
369 HashMap::from_iter(Some((
370 "RUSTFLAGS".to_string(),
371 "-Zunstable-options".to_string()
372 ))),
373 ),
374 ]
375 );
376}
377
378#[gpui::test]
379async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
380 init_test(cx);
381
382 let fs = FakeFs::new(cx.executor());
383 fs.insert_tree(
384 "/the-root",
385 json!({
386 "test.rs": "const A: i32 = 1;",
387 "test2.rs": "",
388 "Cargo.toml": "a = 1",
389 "package.json": "{\"a\": 1}",
390 }),
391 )
392 .await;
393
394 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
395 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
396
397 let mut fake_rust_servers = language_registry.register_fake_lsp(
398 "Rust",
399 FakeLspAdapter {
400 name: "the-rust-language-server",
401 capabilities: lsp::ServerCapabilities {
402 completion_provider: Some(lsp::CompletionOptions {
403 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
404 ..Default::default()
405 }),
406 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
407 lsp::TextDocumentSyncOptions {
408 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
409 ..Default::default()
410 },
411 )),
412 ..Default::default()
413 },
414 ..Default::default()
415 },
416 );
417 let mut fake_json_servers = language_registry.register_fake_lsp(
418 "JSON",
419 FakeLspAdapter {
420 name: "the-json-language-server",
421 capabilities: lsp::ServerCapabilities {
422 completion_provider: Some(lsp::CompletionOptions {
423 trigger_characters: Some(vec![":".to_string()]),
424 ..Default::default()
425 }),
426 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
427 lsp::TextDocumentSyncOptions {
428 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
429 ..Default::default()
430 },
431 )),
432 ..Default::default()
433 },
434 ..Default::default()
435 },
436 );
437
438 // Open a buffer without an associated language server.
439 let toml_buffer = project
440 .update(cx, |project, cx| {
441 project.open_local_buffer("/the-root/Cargo.toml", cx)
442 })
443 .await
444 .unwrap();
445
446 // Open a buffer with an associated language server before the language for it has been loaded.
447 let rust_buffer = project
448 .update(cx, |project, cx| {
449 project.open_local_buffer("/the-root/test.rs", cx)
450 })
451 .await
452 .unwrap();
453 rust_buffer.update(cx, |buffer, _| {
454 assert_eq!(buffer.language().map(|l| l.name()), None);
455 });
456
457 // Now we add the languages to the project, and ensure they get assigned to all
458 // the relevant open buffers.
459 language_registry.add(json_lang());
460 language_registry.add(rust_lang());
461 cx.executor().run_until_parked();
462 rust_buffer.update(cx, |buffer, _| {
463 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
464 });
465
466 // A server is started up, and it is notified about Rust files.
467 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
468 assert_eq!(
469 fake_rust_server
470 .receive_notification::<lsp::notification::DidOpenTextDocument>()
471 .await
472 .text_document,
473 lsp::TextDocumentItem {
474 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
475 version: 0,
476 text: "const A: i32 = 1;".to_string(),
477 language_id: "rust".to_string(),
478 }
479 );
480
481 // The buffer is configured based on the language server's capabilities.
482 rust_buffer.update(cx, |buffer, _| {
483 assert_eq!(
484 buffer
485 .completion_triggers()
486 .into_iter()
487 .cloned()
488 .collect::<Vec<_>>(),
489 &[".".to_string(), "::".to_string()]
490 );
491 });
492 toml_buffer.update(cx, |buffer, _| {
493 assert!(buffer.completion_triggers().is_empty());
494 });
495
496 // Edit a buffer. The changes are reported to the language server.
497 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
498 assert_eq!(
499 fake_rust_server
500 .receive_notification::<lsp::notification::DidChangeTextDocument>()
501 .await
502 .text_document,
503 lsp::VersionedTextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
505 1
506 )
507 );
508
509 // Open a third buffer with a different associated language server.
510 let json_buffer = project
511 .update(cx, |project, cx| {
512 project.open_local_buffer("/the-root/package.json", cx)
513 })
514 .await
515 .unwrap();
516
517 // A json language server is started up and is only notified about the json buffer.
518 let mut fake_json_server = fake_json_servers.next().await.unwrap();
519 assert_eq!(
520 fake_json_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 lsp::TextDocumentItem {
525 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
526 version: 0,
527 text: "{\"a\": 1}".to_string(),
528 language_id: "json".to_string(),
529 }
530 );
531
532 // This buffer is configured based on the second language server's
533 // capabilities.
534 json_buffer.update(cx, |buffer, _| {
535 assert_eq!(
536 buffer
537 .completion_triggers()
538 .into_iter()
539 .cloned()
540 .collect::<Vec<_>>(),
541 &[":".to_string()]
542 );
543 });
544
545 // When opening another buffer whose language server is already running,
546 // it is also configured based on the existing language server's capabilities.
547 let rust_buffer2 = project
548 .update(cx, |project, cx| {
549 project.open_local_buffer("/the-root/test2.rs", cx)
550 })
551 .await
552 .unwrap();
553 rust_buffer2.update(cx, |buffer, _| {
554 assert_eq!(
555 buffer
556 .completion_triggers()
557 .into_iter()
558 .cloned()
559 .collect::<Vec<_>>(),
560 &[".".to_string(), "::".to_string()]
561 );
562 });
563
564 // Changes are reported only to servers matching the buffer's language.
565 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
566 rust_buffer2.update(cx, |buffer, cx| {
567 buffer.edit([(0..0, "let x = 1;")], None, cx)
568 });
569 assert_eq!(
570 fake_rust_server
571 .receive_notification::<lsp::notification::DidChangeTextDocument>()
572 .await
573 .text_document,
574 lsp::VersionedTextDocumentIdentifier::new(
575 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
576 1
577 )
578 );
579
580 // Save notifications are reported to all servers.
581 project
582 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
583 .await
584 .unwrap();
585 assert_eq!(
586 fake_rust_server
587 .receive_notification::<lsp::notification::DidSaveTextDocument>()
588 .await
589 .text_document,
590 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
591 );
592 assert_eq!(
593 fake_json_server
594 .receive_notification::<lsp::notification::DidSaveTextDocument>()
595 .await
596 .text_document,
597 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
598 );
599
600 // Renames are reported only to servers matching the buffer's language.
601 fs.rename(
602 Path::new("/the-root/test2.rs"),
603 Path::new("/the-root/test3.rs"),
604 Default::default(),
605 )
606 .await
607 .unwrap();
608 assert_eq!(
609 fake_rust_server
610 .receive_notification::<lsp::notification::DidCloseTextDocument>()
611 .await
612 .text_document,
613 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
614 );
615 assert_eq!(
616 fake_rust_server
617 .receive_notification::<lsp::notification::DidOpenTextDocument>()
618 .await
619 .text_document,
620 lsp::TextDocumentItem {
621 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
622 version: 0,
623 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
624 language_id: "rust".to_string(),
625 },
626 );
627
628 rust_buffer2.update(cx, |buffer, cx| {
629 buffer.update_diagnostics(
630 LanguageServerId(0),
631 DiagnosticSet::from_sorted_entries(
632 vec![DiagnosticEntry {
633 diagnostic: Default::default(),
634 range: Anchor::MIN..Anchor::MAX,
635 }],
636 &buffer.snapshot(),
637 ),
638 cx,
639 );
640 assert_eq!(
641 buffer
642 .snapshot()
643 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
644 .count(),
645 1
646 );
647 });
648
649 // When the rename changes the extension of the file, the buffer gets closed on the old
650 // language server and gets opened on the new one.
651 fs.rename(
652 Path::new("/the-root/test3.rs"),
653 Path::new("/the-root/test3.json"),
654 Default::default(),
655 )
656 .await
657 .unwrap();
658 assert_eq!(
659 fake_rust_server
660 .receive_notification::<lsp::notification::DidCloseTextDocument>()
661 .await
662 .text_document,
663 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
664 );
665 assert_eq!(
666 fake_json_server
667 .receive_notification::<lsp::notification::DidOpenTextDocument>()
668 .await
669 .text_document,
670 lsp::TextDocumentItem {
671 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
672 version: 0,
673 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
674 language_id: "json".to_string(),
675 },
676 );
677
678 // We clear the diagnostics, since the language has changed.
679 rust_buffer2.update(cx, |buffer, _| {
680 assert_eq!(
681 buffer
682 .snapshot()
683 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
684 .count(),
685 0
686 );
687 });
688
689 // The renamed file's version resets after changing language server.
690 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
691 assert_eq!(
692 fake_json_server
693 .receive_notification::<lsp::notification::DidChangeTextDocument>()
694 .await
695 .text_document,
696 lsp::VersionedTextDocumentIdentifier::new(
697 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
698 1
699 )
700 );
701
702 // Restart language servers
703 project.update(cx, |project, cx| {
704 project.restart_language_servers_for_buffers(
705 vec![rust_buffer.clone(), json_buffer.clone()],
706 cx,
707 );
708 });
709
710 let mut rust_shutdown_requests = fake_rust_server
711 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
712 let mut json_shutdown_requests = fake_json_server
713 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
714 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
715
716 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
717 let mut fake_json_server = fake_json_servers.next().await.unwrap();
718
719 // Ensure rust document is reopened in new rust language server
720 assert_eq!(
721 fake_rust_server
722 .receive_notification::<lsp::notification::DidOpenTextDocument>()
723 .await
724 .text_document,
725 lsp::TextDocumentItem {
726 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
727 version: 0,
728 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
729 language_id: "rust".to_string(),
730 }
731 );
732
733 // Ensure json documents are reopened in new json language server
734 assert_set_eq!(
735 [
736 fake_json_server
737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
738 .await
739 .text_document,
740 fake_json_server
741 .receive_notification::<lsp::notification::DidOpenTextDocument>()
742 .await
743 .text_document,
744 ],
745 [
746 lsp::TextDocumentItem {
747 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
748 version: 0,
749 text: json_buffer.update(cx, |buffer, _| buffer.text()),
750 language_id: "json".to_string(),
751 },
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
754 version: 0,
755 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 }
758 ]
759 );
760
761 // Close notifications are reported only to servers matching the buffer's language.
762 cx.update(|_| drop(json_buffer));
763 let close_message = lsp::DidCloseTextDocumentParams {
764 text_document: lsp::TextDocumentIdentifier::new(
765 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
766 ),
767 };
768 assert_eq!(
769 fake_json_server
770 .receive_notification::<lsp::notification::DidCloseTextDocument>()
771 .await,
772 close_message,
773 );
774}
775
776#[gpui::test]
777async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
778 init_test(cx);
779
780 let fs = FakeFs::new(cx.executor());
781 fs.insert_tree(
782 "/the-root",
783 json!({
784 ".gitignore": "target\n",
785 "src": {
786 "a.rs": "",
787 "b.rs": "",
788 },
789 "target": {
790 "x": {
791 "out": {
792 "x.rs": ""
793 }
794 },
795 "y": {
796 "out": {
797 "y.rs": "",
798 }
799 },
800 "z": {
801 "out": {
802 "z.rs": ""
803 }
804 }
805 }
806 }),
807 )
808 .await;
809
810 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
811 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
812 language_registry.add(rust_lang());
813 let mut fake_servers = language_registry.register_fake_lsp(
814 "Rust",
815 FakeLspAdapter {
816 name: "the-language-server",
817 ..Default::default()
818 },
819 );
820
821 cx.executor().run_until_parked();
822
823 // Start the language server by opening a buffer with a compatible file extension.
824 let _buffer = project
825 .update(cx, |project, cx| {
826 project.open_local_buffer("/the-root/src/a.rs", cx)
827 })
828 .await
829 .unwrap();
830
831 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
832 project.update(cx, |project, cx| {
833 let worktree = project.worktrees(cx).next().unwrap();
834 assert_eq!(
835 worktree
836 .read(cx)
837 .snapshot()
838 .entries(true, 0)
839 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
840 .collect::<Vec<_>>(),
841 &[
842 (Path::new(""), false),
843 (Path::new(".gitignore"), false),
844 (Path::new("src"), false),
845 (Path::new("src/a.rs"), false),
846 (Path::new("src/b.rs"), false),
847 (Path::new("target"), true),
848 ]
849 );
850 });
851
852 let prev_read_dir_count = fs.read_dir_call_count();
853
854 // Keep track of the FS events reported to the language server.
855 let fake_server = fake_servers.next().await.unwrap();
856 let file_changes = Arc::new(Mutex::new(Vec::new()));
857 fake_server
858 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
859 registrations: vec![lsp::Registration {
860 id: Default::default(),
861 method: "workspace/didChangeWatchedFiles".to_string(),
862 register_options: serde_json::to_value(
863 lsp::DidChangeWatchedFilesRegistrationOptions {
864 watchers: vec![
865 lsp::FileSystemWatcher {
866 glob_pattern: lsp::GlobPattern::String(
867 "/the-root/Cargo.toml".to_string(),
868 ),
869 kind: None,
870 },
871 lsp::FileSystemWatcher {
872 glob_pattern: lsp::GlobPattern::String(
873 "/the-root/src/*.{rs,c}".to_string(),
874 ),
875 kind: None,
876 },
877 lsp::FileSystemWatcher {
878 glob_pattern: lsp::GlobPattern::String(
879 "/the-root/target/y/**/*.rs".to_string(),
880 ),
881 kind: None,
882 },
883 ],
884 },
885 )
886 .ok(),
887 }],
888 })
889 .await
890 .unwrap();
891 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
892 let file_changes = file_changes.clone();
893 move |params, _| {
894 let mut file_changes = file_changes.lock();
895 file_changes.extend(params.changes);
896 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
897 }
898 });
899
900 cx.executor().run_until_parked();
901 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
902 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
903
904 // Now the language server has asked us to watch an ignored directory path,
905 // so we recursively load it.
906 project.update(cx, |project, cx| {
907 let worktree = project.worktrees(cx).next().unwrap();
908 assert_eq!(
909 worktree
910 .read(cx)
911 .snapshot()
912 .entries(true, 0)
913 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
914 .collect::<Vec<_>>(),
915 &[
916 (Path::new(""), false),
917 (Path::new(".gitignore"), false),
918 (Path::new("src"), false),
919 (Path::new("src/a.rs"), false),
920 (Path::new("src/b.rs"), false),
921 (Path::new("target"), true),
922 (Path::new("target/x"), true),
923 (Path::new("target/y"), true),
924 (Path::new("target/y/out"), true),
925 (Path::new("target/y/out/y.rs"), true),
926 (Path::new("target/z"), true),
927 ]
928 );
929 });
930
931 // Perform some file system mutations, two of which match the watched patterns,
932 // and one of which does not.
933 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
934 .await
935 .unwrap();
936 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
937 .await
938 .unwrap();
939 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
940 .await
941 .unwrap();
942 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
943 .await
944 .unwrap();
945 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
946 .await
947 .unwrap();
948
949 // The language server receives events for the FS mutations that match its watch patterns.
950 cx.executor().run_until_parked();
951 assert_eq!(
952 &*file_changes.lock(),
953 &[
954 lsp::FileEvent {
955 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
956 typ: lsp::FileChangeType::DELETED,
957 },
958 lsp::FileEvent {
959 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
960 typ: lsp::FileChangeType::CREATED,
961 },
962 lsp::FileEvent {
963 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
964 typ: lsp::FileChangeType::CREATED,
965 },
966 ]
967 );
968}
969
970#[gpui::test]
971async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
972 init_test(cx);
973
974 let fs = FakeFs::new(cx.executor());
975 fs.insert_tree(
976 "/dir",
977 json!({
978 "a.rs": "let a = 1;",
979 "b.rs": "let b = 2;"
980 }),
981 )
982 .await;
983
984 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
985
986 let buffer_a = project
987 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
988 .await
989 .unwrap();
990 let buffer_b = project
991 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
992 .await
993 .unwrap();
994
995 project.update(cx, |project, cx| {
996 project
997 .update_diagnostics(
998 LanguageServerId(0),
999 lsp::PublishDiagnosticsParams {
1000 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1001 version: None,
1002 diagnostics: vec![lsp::Diagnostic {
1003 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1004 severity: Some(lsp::DiagnosticSeverity::ERROR),
1005 message: "error 1".to_string(),
1006 ..Default::default()
1007 }],
1008 },
1009 &[],
1010 cx,
1011 )
1012 .unwrap();
1013 project
1014 .update_diagnostics(
1015 LanguageServerId(0),
1016 lsp::PublishDiagnosticsParams {
1017 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1018 version: None,
1019 diagnostics: vec![lsp::Diagnostic {
1020 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1021 severity: Some(DiagnosticSeverity::WARNING),
1022 message: "error 2".to_string(),
1023 ..Default::default()
1024 }],
1025 },
1026 &[],
1027 cx,
1028 )
1029 .unwrap();
1030 });
1031
1032 buffer_a.update(cx, |buffer, _| {
1033 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1034 assert_eq!(
1035 chunks
1036 .iter()
1037 .map(|(s, d)| (s.as_str(), *d))
1038 .collect::<Vec<_>>(),
1039 &[
1040 ("let ", None),
1041 ("a", Some(DiagnosticSeverity::ERROR)),
1042 (" = 1;", None),
1043 ]
1044 );
1045 });
1046 buffer_b.update(cx, |buffer, _| {
1047 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1048 assert_eq!(
1049 chunks
1050 .iter()
1051 .map(|(s, d)| (s.as_str(), *d))
1052 .collect::<Vec<_>>(),
1053 &[
1054 ("let ", None),
1055 ("b", Some(DiagnosticSeverity::WARNING)),
1056 (" = 2;", None),
1057 ]
1058 );
1059 });
1060}
1061
1062#[gpui::test]
1063async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1064 init_test(cx);
1065
1066 let fs = FakeFs::new(cx.executor());
1067 fs.insert_tree(
1068 "/root",
1069 json!({
1070 "dir": {
1071 ".git": {
1072 "HEAD": "ref: refs/heads/main",
1073 },
1074 ".gitignore": "b.rs",
1075 "a.rs": "let a = 1;",
1076 "b.rs": "let b = 2;",
1077 },
1078 "other.rs": "let b = c;"
1079 }),
1080 )
1081 .await;
1082
1083 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1084 let (worktree, _) = project
1085 .update(cx, |project, cx| {
1086 project.find_or_create_worktree("/root/dir", true, cx)
1087 })
1088 .await
1089 .unwrap();
1090 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1091
1092 let (worktree, _) = project
1093 .update(cx, |project, cx| {
1094 project.find_or_create_worktree("/root/other.rs", false, cx)
1095 })
1096 .await
1097 .unwrap();
1098 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1099
1100 let server_id = LanguageServerId(0);
1101 project.update(cx, |project, cx| {
1102 project
1103 .update_diagnostics(
1104 server_id,
1105 lsp::PublishDiagnosticsParams {
1106 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1107 version: None,
1108 diagnostics: vec![lsp::Diagnostic {
1109 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1110 severity: Some(lsp::DiagnosticSeverity::ERROR),
1111 message: "unused variable 'b'".to_string(),
1112 ..Default::default()
1113 }],
1114 },
1115 &[],
1116 cx,
1117 )
1118 .unwrap();
1119 project
1120 .update_diagnostics(
1121 server_id,
1122 lsp::PublishDiagnosticsParams {
1123 uri: Url::from_file_path("/root/other.rs").unwrap(),
1124 version: None,
1125 diagnostics: vec![lsp::Diagnostic {
1126 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1127 severity: Some(lsp::DiagnosticSeverity::ERROR),
1128 message: "unknown variable 'c'".to_string(),
1129 ..Default::default()
1130 }],
1131 },
1132 &[],
1133 cx,
1134 )
1135 .unwrap();
1136 });
1137
1138 let main_ignored_buffer = project
1139 .update(cx, |project, cx| {
1140 project.open_buffer((main_worktree_id, "b.rs"), cx)
1141 })
1142 .await
1143 .unwrap();
1144 main_ignored_buffer.update(cx, |buffer, _| {
1145 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1146 assert_eq!(
1147 chunks
1148 .iter()
1149 .map(|(s, d)| (s.as_str(), *d))
1150 .collect::<Vec<_>>(),
1151 &[
1152 ("let ", None),
1153 ("b", Some(DiagnosticSeverity::ERROR)),
1154 (" = 2;", None),
1155 ],
1156 "Gigitnored buffers should still get in-buffer diagnostics",
1157 );
1158 });
1159 let other_buffer = project
1160 .update(cx, |project, cx| {
1161 project.open_buffer((other_worktree_id, ""), cx)
1162 })
1163 .await
1164 .unwrap();
1165 other_buffer.update(cx, |buffer, _| {
1166 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1167 assert_eq!(
1168 chunks
1169 .iter()
1170 .map(|(s, d)| (s.as_str(), *d))
1171 .collect::<Vec<_>>(),
1172 &[
1173 ("let b = ", None),
1174 ("c", Some(DiagnosticSeverity::ERROR)),
1175 (";", None),
1176 ],
1177 "Buffers from hidden projects should still get in-buffer diagnostics"
1178 );
1179 });
1180
1181 project.update(cx, |project, cx| {
1182 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1183 assert_eq!(
1184 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1185 vec![(
1186 ProjectPath {
1187 worktree_id: main_worktree_id,
1188 path: Arc::from(Path::new("b.rs")),
1189 },
1190 server_id,
1191 DiagnosticSummary {
1192 error_count: 1,
1193 warning_count: 0,
1194 }
1195 )]
1196 );
1197 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1198 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1199 });
1200}
1201
1202#[gpui::test]
1203async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1204 init_test(cx);
1205
1206 let progress_token = "the-progress-token";
1207
1208 let fs = FakeFs::new(cx.executor());
1209 fs.insert_tree(
1210 "/dir",
1211 json!({
1212 "a.rs": "fn a() { A }",
1213 "b.rs": "const y: i32 = 1",
1214 }),
1215 )
1216 .await;
1217
1218 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1219 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1220
1221 language_registry.add(rust_lang());
1222 let mut fake_servers = language_registry.register_fake_lsp(
1223 "Rust",
1224 FakeLspAdapter {
1225 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1226 disk_based_diagnostics_sources: vec!["disk".into()],
1227 ..Default::default()
1228 },
1229 );
1230
1231 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1232
1233 // Cause worktree to start the fake language server
1234 let _buffer = project
1235 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1236 .await
1237 .unwrap();
1238
1239 let mut events = cx.events(&project);
1240
1241 let fake_server = fake_servers.next().await.unwrap();
1242 assert_eq!(
1243 events.next().await.unwrap(),
1244 Event::LanguageServerAdded(
1245 LanguageServerId(0),
1246 fake_server.server.name(),
1247 Some(worktree_id)
1248 ),
1249 );
1250
1251 fake_server
1252 .start_progress(format!("{}/0", progress_token))
1253 .await;
1254 assert_eq!(
1255 events.next().await.unwrap(),
1256 Event::DiskBasedDiagnosticsStarted {
1257 language_server_id: LanguageServerId(0),
1258 }
1259 );
1260
1261 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1262 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1263 version: None,
1264 diagnostics: vec![lsp::Diagnostic {
1265 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1266 severity: Some(lsp::DiagnosticSeverity::ERROR),
1267 message: "undefined variable 'A'".to_string(),
1268 ..Default::default()
1269 }],
1270 });
1271 assert_eq!(
1272 events.next().await.unwrap(),
1273 Event::DiagnosticsUpdated {
1274 language_server_id: LanguageServerId(0),
1275 path: (worktree_id, Path::new("a.rs")).into()
1276 }
1277 );
1278
1279 fake_server.end_progress(format!("{}/0", progress_token));
1280 assert_eq!(
1281 events.next().await.unwrap(),
1282 Event::DiskBasedDiagnosticsFinished {
1283 language_server_id: LanguageServerId(0)
1284 }
1285 );
1286
1287 let buffer = project
1288 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1289 .await
1290 .unwrap();
1291
1292 buffer.update(cx, |buffer, _| {
1293 let snapshot = buffer.snapshot();
1294 let diagnostics = snapshot
1295 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1296 .collect::<Vec<_>>();
1297 assert_eq!(
1298 diagnostics,
1299 &[DiagnosticEntry {
1300 range: Point::new(0, 9)..Point::new(0, 10),
1301 diagnostic: Diagnostic {
1302 severity: lsp::DiagnosticSeverity::ERROR,
1303 message: "undefined variable 'A'".to_string(),
1304 group_id: 0,
1305 is_primary: true,
1306 ..Default::default()
1307 }
1308 }]
1309 )
1310 });
1311
1312 // Ensure publishing empty diagnostics twice only results in one update event.
1313 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1314 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1315 version: None,
1316 diagnostics: Default::default(),
1317 });
1318 assert_eq!(
1319 events.next().await.unwrap(),
1320 Event::DiagnosticsUpdated {
1321 language_server_id: LanguageServerId(0),
1322 path: (worktree_id, Path::new("a.rs")).into()
1323 }
1324 );
1325
1326 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1327 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1328 version: None,
1329 diagnostics: Default::default(),
1330 });
1331 cx.executor().run_until_parked();
1332 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1333}
1334
1335#[gpui::test]
1336async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1337 init_test(cx);
1338
1339 let progress_token = "the-progress-token";
1340
1341 let fs = FakeFs::new(cx.executor());
1342 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1343
1344 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1345
1346 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1347 language_registry.add(rust_lang());
1348 let mut fake_servers = language_registry.register_fake_lsp(
1349 "Rust",
1350 FakeLspAdapter {
1351 name: "the-language-server",
1352 disk_based_diagnostics_sources: vec!["disk".into()],
1353 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1354 ..Default::default()
1355 },
1356 );
1357
1358 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1359
1360 let buffer = project
1361 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1362 .await
1363 .unwrap();
1364
1365 // Simulate diagnostics starting to update.
1366 let fake_server = fake_servers.next().await.unwrap();
1367 fake_server.start_progress(progress_token).await;
1368
1369 // Restart the server before the diagnostics finish updating.
1370 project.update(cx, |project, cx| {
1371 project.restart_language_servers_for_buffers([buffer], cx);
1372 });
1373 let mut events = cx.events(&project);
1374
1375 // Simulate the newly started server sending more diagnostics.
1376 let fake_server = fake_servers.next().await.unwrap();
1377 assert_eq!(
1378 events.next().await.unwrap(),
1379 Event::LanguageServerAdded(
1380 LanguageServerId(1),
1381 fake_server.server.name(),
1382 Some(worktree_id)
1383 )
1384 );
1385 fake_server.start_progress(progress_token).await;
1386 assert_eq!(
1387 events.next().await.unwrap(),
1388 Event::DiskBasedDiagnosticsStarted {
1389 language_server_id: LanguageServerId(1)
1390 }
1391 );
1392 project.update(cx, |project, cx| {
1393 assert_eq!(
1394 project
1395 .language_servers_running_disk_based_diagnostics(cx)
1396 .collect::<Vec<_>>(),
1397 [LanguageServerId(1)]
1398 );
1399 });
1400
1401 // All diagnostics are considered done, despite the old server's diagnostic
1402 // task never completing.
1403 fake_server.end_progress(progress_token);
1404 assert_eq!(
1405 events.next().await.unwrap(),
1406 Event::DiskBasedDiagnosticsFinished {
1407 language_server_id: LanguageServerId(1)
1408 }
1409 );
1410 project.update(cx, |project, cx| {
1411 assert_eq!(
1412 project
1413 .language_servers_running_disk_based_diagnostics(cx)
1414 .collect::<Vec<_>>(),
1415 [] as [language::LanguageServerId; 0]
1416 );
1417 });
1418}
1419
1420#[gpui::test]
1421async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1422 init_test(cx);
1423
1424 let fs = FakeFs::new(cx.executor());
1425 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1426
1427 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1428
1429 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1430 language_registry.add(rust_lang());
1431 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1432
1433 let buffer = project
1434 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1435 .await
1436 .unwrap();
1437
1438 // Publish diagnostics
1439 let fake_server = fake_servers.next().await.unwrap();
1440 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1441 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1442 version: None,
1443 diagnostics: vec![lsp::Diagnostic {
1444 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1445 severity: Some(lsp::DiagnosticSeverity::ERROR),
1446 message: "the message".to_string(),
1447 ..Default::default()
1448 }],
1449 });
1450
1451 cx.executor().run_until_parked();
1452 buffer.update(cx, |buffer, _| {
1453 assert_eq!(
1454 buffer
1455 .snapshot()
1456 .diagnostics_in_range::<_, usize>(0..1, false)
1457 .map(|entry| entry.diagnostic.message.clone())
1458 .collect::<Vec<_>>(),
1459 ["the message".to_string()]
1460 );
1461 });
1462 project.update(cx, |project, cx| {
1463 assert_eq!(
1464 project.diagnostic_summary(false, cx),
1465 DiagnosticSummary {
1466 error_count: 1,
1467 warning_count: 0,
1468 }
1469 );
1470 });
1471
1472 project.update(cx, |project, cx| {
1473 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1474 });
1475
1476 // The diagnostics are cleared.
1477 cx.executor().run_until_parked();
1478 buffer.update(cx, |buffer, _| {
1479 assert_eq!(
1480 buffer
1481 .snapshot()
1482 .diagnostics_in_range::<_, usize>(0..1, false)
1483 .map(|entry| entry.diagnostic.message.clone())
1484 .collect::<Vec<_>>(),
1485 Vec::<String>::new(),
1486 );
1487 });
1488 project.update(cx, |project, cx| {
1489 assert_eq!(
1490 project.diagnostic_summary(false, cx),
1491 DiagnosticSummary {
1492 error_count: 0,
1493 warning_count: 0,
1494 }
1495 );
1496 });
1497}
1498
1499#[gpui::test]
1500async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1501 init_test(cx);
1502
1503 let fs = FakeFs::new(cx.executor());
1504 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1505
1506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1508
1509 language_registry.add(rust_lang());
1510 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1511
1512 let buffer = project
1513 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1514 .await
1515 .unwrap();
1516
1517 // Before restarting the server, report diagnostics with an unknown buffer version.
1518 let fake_server = fake_servers.next().await.unwrap();
1519 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1520 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1521 version: Some(10000),
1522 diagnostics: Vec::new(),
1523 });
1524 cx.executor().run_until_parked();
1525
1526 project.update(cx, |project, cx| {
1527 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1528 });
1529 let mut fake_server = fake_servers.next().await.unwrap();
1530 let notification = fake_server
1531 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1532 .await
1533 .text_document;
1534 assert_eq!(notification.version, 0);
1535}
1536
1537#[gpui::test]
1538async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1539 init_test(cx);
1540
1541 let progress_token = "the-progress-token";
1542
1543 let fs = FakeFs::new(cx.executor());
1544 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1545
1546 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1547
1548 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1549 language_registry.add(rust_lang());
1550 let mut fake_servers = language_registry.register_fake_lsp(
1551 "Rust",
1552 FakeLspAdapter {
1553 name: "the-language-server",
1554 disk_based_diagnostics_sources: vec!["disk".into()],
1555 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1556 ..Default::default()
1557 },
1558 );
1559
1560 let buffer = project
1561 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1562 .await
1563 .unwrap();
1564
1565 // Simulate diagnostics starting to update.
1566 let mut fake_server = fake_servers.next().await.unwrap();
1567 fake_server
1568 .start_progress_with(
1569 "another-token",
1570 lsp::WorkDoneProgressBegin {
1571 cancellable: Some(false),
1572 ..Default::default()
1573 },
1574 )
1575 .await;
1576 fake_server
1577 .start_progress_with(
1578 progress_token,
1579 lsp::WorkDoneProgressBegin {
1580 cancellable: Some(true),
1581 ..Default::default()
1582 },
1583 )
1584 .await;
1585 cx.executor().run_until_parked();
1586
1587 project.update(cx, |project, cx| {
1588 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1589 });
1590
1591 let cancel_notification = fake_server
1592 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1593 .await;
1594 assert_eq!(
1595 cancel_notification.token,
1596 NumberOrString::String(progress_token.into())
1597 );
1598}
1599
1600#[gpui::test]
1601async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1602 init_test(cx);
1603
1604 let fs = FakeFs::new(cx.executor());
1605 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1606 .await;
1607
1608 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1609 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1610
1611 let mut fake_rust_servers = language_registry.register_fake_lsp(
1612 "Rust",
1613 FakeLspAdapter {
1614 name: "rust-lsp",
1615 ..Default::default()
1616 },
1617 );
1618 let mut fake_js_servers = language_registry.register_fake_lsp(
1619 "JavaScript",
1620 FakeLspAdapter {
1621 name: "js-lsp",
1622 ..Default::default()
1623 },
1624 );
1625 language_registry.add(rust_lang());
1626 language_registry.add(js_lang());
1627
1628 let _rs_buffer = project
1629 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1630 .await
1631 .unwrap();
1632 let _js_buffer = project
1633 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1634 .await
1635 .unwrap();
1636
1637 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1638 assert_eq!(
1639 fake_rust_server_1
1640 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1641 .await
1642 .text_document
1643 .uri
1644 .as_str(),
1645 "file:///dir/a.rs"
1646 );
1647
1648 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1649 assert_eq!(
1650 fake_js_server
1651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1652 .await
1653 .text_document
1654 .uri
1655 .as_str(),
1656 "file:///dir/b.js"
1657 );
1658
1659 // Disable Rust language server, ensuring only that server gets stopped.
1660 cx.update(|cx| {
1661 SettingsStore::update_global(cx, |settings, cx| {
1662 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1663 settings.languages.insert(
1664 "Rust".into(),
1665 LanguageSettingsContent {
1666 enable_language_server: Some(false),
1667 ..Default::default()
1668 },
1669 );
1670 });
1671 })
1672 });
1673 fake_rust_server_1
1674 .receive_notification::<lsp::notification::Exit>()
1675 .await;
1676
1677 // Enable Rust and disable JavaScript language servers, ensuring that the
1678 // former gets started again and that the latter stops.
1679 cx.update(|cx| {
1680 SettingsStore::update_global(cx, |settings, cx| {
1681 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1682 settings.languages.insert(
1683 LanguageName::new("Rust"),
1684 LanguageSettingsContent {
1685 enable_language_server: Some(true),
1686 ..Default::default()
1687 },
1688 );
1689 settings.languages.insert(
1690 LanguageName::new("JavaScript"),
1691 LanguageSettingsContent {
1692 enable_language_server: Some(false),
1693 ..Default::default()
1694 },
1695 );
1696 });
1697 })
1698 });
1699 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1700 assert_eq!(
1701 fake_rust_server_2
1702 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1703 .await
1704 .text_document
1705 .uri
1706 .as_str(),
1707 "file:///dir/a.rs"
1708 );
1709 fake_js_server
1710 .receive_notification::<lsp::notification::Exit>()
1711 .await;
1712}
1713
1714#[gpui::test(iterations = 3)]
1715async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1716 init_test(cx);
1717
1718 let text = "
1719 fn a() { A }
1720 fn b() { BB }
1721 fn c() { CCC }
1722 "
1723 .unindent();
1724
1725 let fs = FakeFs::new(cx.executor());
1726 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1727
1728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1729 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1730
1731 language_registry.add(rust_lang());
1732 let mut fake_servers = language_registry.register_fake_lsp(
1733 "Rust",
1734 FakeLspAdapter {
1735 disk_based_diagnostics_sources: vec!["disk".into()],
1736 ..Default::default()
1737 },
1738 );
1739
1740 let buffer = project
1741 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1742 .await
1743 .unwrap();
1744
1745 let mut fake_server = fake_servers.next().await.unwrap();
1746 let open_notification = fake_server
1747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1748 .await;
1749
1750 // Edit the buffer, moving the content down
1751 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1752 let change_notification_1 = fake_server
1753 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1754 .await;
1755 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1756
1757 // Report some diagnostics for the initial version of the buffer
1758 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1759 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1760 version: Some(open_notification.text_document.version),
1761 diagnostics: vec![
1762 lsp::Diagnostic {
1763 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1764 severity: Some(DiagnosticSeverity::ERROR),
1765 message: "undefined variable 'A'".to_string(),
1766 source: Some("disk".to_string()),
1767 ..Default::default()
1768 },
1769 lsp::Diagnostic {
1770 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1771 severity: Some(DiagnosticSeverity::ERROR),
1772 message: "undefined variable 'BB'".to_string(),
1773 source: Some("disk".to_string()),
1774 ..Default::default()
1775 },
1776 lsp::Diagnostic {
1777 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1778 severity: Some(DiagnosticSeverity::ERROR),
1779 source: Some("disk".to_string()),
1780 message: "undefined variable 'CCC'".to_string(),
1781 ..Default::default()
1782 },
1783 ],
1784 });
1785
1786 // The diagnostics have moved down since they were created.
1787 cx.executor().run_until_parked();
1788 buffer.update(cx, |buffer, _| {
1789 assert_eq!(
1790 buffer
1791 .snapshot()
1792 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1793 .collect::<Vec<_>>(),
1794 &[
1795 DiagnosticEntry {
1796 range: Point::new(3, 9)..Point::new(3, 11),
1797 diagnostic: Diagnostic {
1798 source: Some("disk".into()),
1799 severity: DiagnosticSeverity::ERROR,
1800 message: "undefined variable 'BB'".to_string(),
1801 is_disk_based: true,
1802 group_id: 1,
1803 is_primary: true,
1804 ..Default::default()
1805 },
1806 },
1807 DiagnosticEntry {
1808 range: Point::new(4, 9)..Point::new(4, 12),
1809 diagnostic: Diagnostic {
1810 source: Some("disk".into()),
1811 severity: DiagnosticSeverity::ERROR,
1812 message: "undefined variable 'CCC'".to_string(),
1813 is_disk_based: true,
1814 group_id: 2,
1815 is_primary: true,
1816 ..Default::default()
1817 }
1818 }
1819 ]
1820 );
1821 assert_eq!(
1822 chunks_with_diagnostics(buffer, 0..buffer.len()),
1823 [
1824 ("\n\nfn a() { ".to_string(), None),
1825 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1826 (" }\nfn b() { ".to_string(), None),
1827 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1828 (" }\nfn c() { ".to_string(), None),
1829 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1830 (" }\n".to_string(), None),
1831 ]
1832 );
1833 assert_eq!(
1834 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1835 [
1836 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1837 (" }\nfn c() { ".to_string(), None),
1838 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1839 ]
1840 );
1841 });
1842
1843 // Ensure overlapping diagnostics are highlighted correctly.
1844 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1845 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1846 version: Some(open_notification.text_document.version),
1847 diagnostics: vec![
1848 lsp::Diagnostic {
1849 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1850 severity: Some(DiagnosticSeverity::ERROR),
1851 message: "undefined variable 'A'".to_string(),
1852 source: Some("disk".to_string()),
1853 ..Default::default()
1854 },
1855 lsp::Diagnostic {
1856 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1857 severity: Some(DiagnosticSeverity::WARNING),
1858 message: "unreachable statement".to_string(),
1859 source: Some("disk".to_string()),
1860 ..Default::default()
1861 },
1862 ],
1863 });
1864
1865 cx.executor().run_until_parked();
1866 buffer.update(cx, |buffer, _| {
1867 assert_eq!(
1868 buffer
1869 .snapshot()
1870 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1871 .collect::<Vec<_>>(),
1872 &[
1873 DiagnosticEntry {
1874 range: Point::new(2, 9)..Point::new(2, 12),
1875 diagnostic: Diagnostic {
1876 source: Some("disk".into()),
1877 severity: DiagnosticSeverity::WARNING,
1878 message: "unreachable statement".to_string(),
1879 is_disk_based: true,
1880 group_id: 4,
1881 is_primary: true,
1882 ..Default::default()
1883 }
1884 },
1885 DiagnosticEntry {
1886 range: Point::new(2, 9)..Point::new(2, 10),
1887 diagnostic: Diagnostic {
1888 source: Some("disk".into()),
1889 severity: DiagnosticSeverity::ERROR,
1890 message: "undefined variable 'A'".to_string(),
1891 is_disk_based: true,
1892 group_id: 3,
1893 is_primary: true,
1894 ..Default::default()
1895 },
1896 }
1897 ]
1898 );
1899 assert_eq!(
1900 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1901 [
1902 ("fn a() { ".to_string(), None),
1903 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1904 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1905 ("\n".to_string(), None),
1906 ]
1907 );
1908 assert_eq!(
1909 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1910 [
1911 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1912 ("\n".to_string(), None),
1913 ]
1914 );
1915 });
1916
1917 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1918 // changes since the last save.
1919 buffer.update(cx, |buffer, cx| {
1920 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1921 buffer.edit(
1922 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1923 None,
1924 cx,
1925 );
1926 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1927 });
1928 let change_notification_2 = fake_server
1929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1930 .await;
1931 assert!(
1932 change_notification_2.text_document.version > change_notification_1.text_document.version
1933 );
1934
1935 // Handle out-of-order diagnostics
1936 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1937 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1938 version: Some(change_notification_2.text_document.version),
1939 diagnostics: vec![
1940 lsp::Diagnostic {
1941 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1942 severity: Some(DiagnosticSeverity::ERROR),
1943 message: "undefined variable 'BB'".to_string(),
1944 source: Some("disk".to_string()),
1945 ..Default::default()
1946 },
1947 lsp::Diagnostic {
1948 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1949 severity: Some(DiagnosticSeverity::WARNING),
1950 message: "undefined variable 'A'".to_string(),
1951 source: Some("disk".to_string()),
1952 ..Default::default()
1953 },
1954 ],
1955 });
1956
1957 cx.executor().run_until_parked();
1958 buffer.update(cx, |buffer, _| {
1959 assert_eq!(
1960 buffer
1961 .snapshot()
1962 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1963 .collect::<Vec<_>>(),
1964 &[
1965 DiagnosticEntry {
1966 range: Point::new(2, 21)..Point::new(2, 22),
1967 diagnostic: Diagnostic {
1968 source: Some("disk".into()),
1969 severity: DiagnosticSeverity::WARNING,
1970 message: "undefined variable 'A'".to_string(),
1971 is_disk_based: true,
1972 group_id: 6,
1973 is_primary: true,
1974 ..Default::default()
1975 }
1976 },
1977 DiagnosticEntry {
1978 range: Point::new(3, 9)..Point::new(3, 14),
1979 diagnostic: Diagnostic {
1980 source: Some("disk".into()),
1981 severity: DiagnosticSeverity::ERROR,
1982 message: "undefined variable 'BB'".to_string(),
1983 is_disk_based: true,
1984 group_id: 5,
1985 is_primary: true,
1986 ..Default::default()
1987 },
1988 }
1989 ]
1990 );
1991 });
1992}
1993
1994#[gpui::test]
1995async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1996 init_test(cx);
1997
1998 let text = concat!(
1999 "let one = ;\n", //
2000 "let two = \n",
2001 "let three = 3;\n",
2002 );
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2006
2007 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2008 let buffer = project
2009 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2010 .await
2011 .unwrap();
2012
2013 project.update(cx, |project, cx| {
2014 project.lsp_store.update(cx, |lsp_store, cx| {
2015 lsp_store
2016 .update_buffer_diagnostics(
2017 &buffer,
2018 LanguageServerId(0),
2019 None,
2020 vec![
2021 DiagnosticEntry {
2022 range: Unclipped(PointUtf16::new(0, 10))
2023 ..Unclipped(PointUtf16::new(0, 10)),
2024 diagnostic: Diagnostic {
2025 severity: DiagnosticSeverity::ERROR,
2026 message: "syntax error 1".to_string(),
2027 ..Default::default()
2028 },
2029 },
2030 DiagnosticEntry {
2031 range: Unclipped(PointUtf16::new(1, 10))
2032 ..Unclipped(PointUtf16::new(1, 10)),
2033 diagnostic: Diagnostic {
2034 severity: DiagnosticSeverity::ERROR,
2035 message: "syntax error 2".to_string(),
2036 ..Default::default()
2037 },
2038 },
2039 ],
2040 cx,
2041 )
2042 .unwrap();
2043 })
2044 });
2045
2046 // An empty range is extended forward to include the following character.
2047 // At the end of a line, an empty range is extended backward to include
2048 // the preceding character.
2049 buffer.update(cx, |buffer, _| {
2050 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2051 assert_eq!(
2052 chunks
2053 .iter()
2054 .map(|(s, d)| (s.as_str(), *d))
2055 .collect::<Vec<_>>(),
2056 &[
2057 ("let one = ", None),
2058 (";", Some(DiagnosticSeverity::ERROR)),
2059 ("\nlet two =", None),
2060 (" ", Some(DiagnosticSeverity::ERROR)),
2061 ("\nlet three = 3;\n", None)
2062 ]
2063 );
2064 });
2065}
2066
2067#[gpui::test]
2068async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2069 init_test(cx);
2070
2071 let fs = FakeFs::new(cx.executor());
2072 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2073 .await;
2074
2075 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2076
2077 project.update(cx, |project, cx| {
2078 project
2079 .update_diagnostic_entries(
2080 LanguageServerId(0),
2081 Path::new("/dir/a.rs").to_owned(),
2082 None,
2083 vec![DiagnosticEntry {
2084 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2085 diagnostic: Diagnostic {
2086 severity: DiagnosticSeverity::ERROR,
2087 is_primary: true,
2088 message: "syntax error a1".to_string(),
2089 ..Default::default()
2090 },
2091 }],
2092 cx,
2093 )
2094 .unwrap();
2095 project
2096 .update_diagnostic_entries(
2097 LanguageServerId(1),
2098 Path::new("/dir/a.rs").to_owned(),
2099 None,
2100 vec![DiagnosticEntry {
2101 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2102 diagnostic: Diagnostic {
2103 severity: DiagnosticSeverity::ERROR,
2104 is_primary: true,
2105 message: "syntax error b1".to_string(),
2106 ..Default::default()
2107 },
2108 }],
2109 cx,
2110 )
2111 .unwrap();
2112
2113 assert_eq!(
2114 project.diagnostic_summary(false, cx),
2115 DiagnosticSummary {
2116 error_count: 2,
2117 warning_count: 0,
2118 }
2119 );
2120 });
2121}
2122
2123#[gpui::test]
2124async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2125 init_test(cx);
2126
2127 let text = "
2128 fn a() {
2129 f1();
2130 }
2131 fn b() {
2132 f2();
2133 }
2134 fn c() {
2135 f3();
2136 }
2137 "
2138 .unindent();
2139
2140 let fs = FakeFs::new(cx.executor());
2141 fs.insert_tree(
2142 "/dir",
2143 json!({
2144 "a.rs": text.clone(),
2145 }),
2146 )
2147 .await;
2148
2149 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2150 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2151
2152 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2153 language_registry.add(rust_lang());
2154 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2155
2156 let buffer = project
2157 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2158 .await
2159 .unwrap();
2160
2161 let mut fake_server = fake_servers.next().await.unwrap();
2162 let lsp_document_version = fake_server
2163 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2164 .await
2165 .text_document
2166 .version;
2167
2168 // Simulate editing the buffer after the language server computes some edits.
2169 buffer.update(cx, |buffer, cx| {
2170 buffer.edit(
2171 [(
2172 Point::new(0, 0)..Point::new(0, 0),
2173 "// above first function\n",
2174 )],
2175 None,
2176 cx,
2177 );
2178 buffer.edit(
2179 [(
2180 Point::new(2, 0)..Point::new(2, 0),
2181 " // inside first function\n",
2182 )],
2183 None,
2184 cx,
2185 );
2186 buffer.edit(
2187 [(
2188 Point::new(6, 4)..Point::new(6, 4),
2189 "// inside second function ",
2190 )],
2191 None,
2192 cx,
2193 );
2194
2195 assert_eq!(
2196 buffer.text(),
2197 "
2198 // above first function
2199 fn a() {
2200 // inside first function
2201 f1();
2202 }
2203 fn b() {
2204 // inside second function f2();
2205 }
2206 fn c() {
2207 f3();
2208 }
2209 "
2210 .unindent()
2211 );
2212 });
2213
2214 let edits = lsp_store
2215 .update(cx, |lsp_store, cx| {
2216 lsp_store.edits_from_lsp(
2217 &buffer,
2218 vec![
2219 // replace body of first function
2220 lsp::TextEdit {
2221 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2222 new_text: "
2223 fn a() {
2224 f10();
2225 }
2226 "
2227 .unindent(),
2228 },
2229 // edit inside second function
2230 lsp::TextEdit {
2231 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2232 new_text: "00".into(),
2233 },
2234 // edit inside third function via two distinct edits
2235 lsp::TextEdit {
2236 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2237 new_text: "4000".into(),
2238 },
2239 lsp::TextEdit {
2240 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2241 new_text: "".into(),
2242 },
2243 ],
2244 LanguageServerId(0),
2245 Some(lsp_document_version),
2246 cx,
2247 )
2248 })
2249 .await
2250 .unwrap();
2251
2252 buffer.update(cx, |buffer, cx| {
2253 for (range, new_text) in edits {
2254 buffer.edit([(range, new_text)], None, cx);
2255 }
2256 assert_eq!(
2257 buffer.text(),
2258 "
2259 // above first function
2260 fn a() {
2261 // inside first function
2262 f10();
2263 }
2264 fn b() {
2265 // inside second function f200();
2266 }
2267 fn c() {
2268 f4000();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = "
2281 use a::b;
2282 use a::c;
2283
2284 fn f() {
2285 b();
2286 c();
2287 }
2288 "
2289 .unindent();
2290
2291 let fs = FakeFs::new(cx.executor());
2292 fs.insert_tree(
2293 "/dir",
2294 json!({
2295 "a.rs": text.clone(),
2296 }),
2297 )
2298 .await;
2299
2300 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2301 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2302 let buffer = project
2303 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2304 .await
2305 .unwrap();
2306
2307 // Simulate the language server sending us a small edit in the form of a very large diff.
2308 // Rust-analyzer does this when performing a merge-imports code action.
2309 let edits = lsp_store
2310 .update(cx, |lsp_store, cx| {
2311 lsp_store.edits_from_lsp(
2312 &buffer,
2313 [
2314 // Replace the first use statement without editing the semicolon.
2315 lsp::TextEdit {
2316 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2317 new_text: "a::{b, c}".into(),
2318 },
2319 // Reinsert the remainder of the file between the semicolon and the final
2320 // newline of the file.
2321 lsp::TextEdit {
2322 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2323 new_text: "\n\n".into(),
2324 },
2325 lsp::TextEdit {
2326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2327 new_text: "
2328 fn f() {
2329 b();
2330 c();
2331 }"
2332 .unindent(),
2333 },
2334 // Delete everything after the first newline of the file.
2335 lsp::TextEdit {
2336 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2337 new_text: "".into(),
2338 },
2339 ],
2340 LanguageServerId(0),
2341 None,
2342 cx,
2343 )
2344 })
2345 .await
2346 .unwrap();
2347
2348 buffer.update(cx, |buffer, cx| {
2349 let edits = edits
2350 .into_iter()
2351 .map(|(range, text)| {
2352 (
2353 range.start.to_point(buffer)..range.end.to_point(buffer),
2354 text,
2355 )
2356 })
2357 .collect::<Vec<_>>();
2358
2359 assert_eq!(
2360 edits,
2361 [
2362 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2363 (Point::new(1, 0)..Point::new(2, 0), "".into())
2364 ]
2365 );
2366
2367 for (range, new_text) in edits {
2368 buffer.edit([(range, new_text)], None, cx);
2369 }
2370 assert_eq!(
2371 buffer.text(),
2372 "
2373 use a::{b, c};
2374
2375 fn f() {
2376 b();
2377 c();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383}
2384
2385#[gpui::test]
2386async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2387 init_test(cx);
2388
2389 let text = "
2390 use a::b;
2391 use a::c;
2392
2393 fn f() {
2394 b();
2395 c();
2396 }
2397 "
2398 .unindent();
2399
2400 let fs = FakeFs::new(cx.executor());
2401 fs.insert_tree(
2402 "/dir",
2403 json!({
2404 "a.rs": text.clone(),
2405 }),
2406 )
2407 .await;
2408
2409 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2410 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2411 let buffer = project
2412 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2413 .await
2414 .unwrap();
2415
2416 // Simulate the language server sending us edits in a non-ordered fashion,
2417 // with ranges sometimes being inverted or pointing to invalid locations.
2418 let edits = lsp_store
2419 .update(cx, |lsp_store, cx| {
2420 lsp_store.edits_from_lsp(
2421 &buffer,
2422 [
2423 lsp::TextEdit {
2424 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2425 new_text: "\n\n".into(),
2426 },
2427 lsp::TextEdit {
2428 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2429 new_text: "a::{b, c}".into(),
2430 },
2431 lsp::TextEdit {
2432 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2433 new_text: "".into(),
2434 },
2435 lsp::TextEdit {
2436 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2437 new_text: "
2438 fn f() {
2439 b();
2440 c();
2441 }"
2442 .unindent(),
2443 },
2444 ],
2445 LanguageServerId(0),
2446 None,
2447 cx,
2448 )
2449 })
2450 .await
2451 .unwrap();
2452
2453 buffer.update(cx, |buffer, cx| {
2454 let edits = edits
2455 .into_iter()
2456 .map(|(range, text)| {
2457 (
2458 range.start.to_point(buffer)..range.end.to_point(buffer),
2459 text,
2460 )
2461 })
2462 .collect::<Vec<_>>();
2463
2464 assert_eq!(
2465 edits,
2466 [
2467 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2468 (Point::new(1, 0)..Point::new(2, 0), "".into())
2469 ]
2470 );
2471
2472 for (range, new_text) in edits {
2473 buffer.edit([(range, new_text)], None, cx);
2474 }
2475 assert_eq!(
2476 buffer.text(),
2477 "
2478 use a::{b, c};
2479
2480 fn f() {
2481 b();
2482 c();
2483 }
2484 "
2485 .unindent()
2486 );
2487 });
2488}
2489
2490fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2491 buffer: &Buffer,
2492 range: Range<T>,
2493) -> Vec<(String, Option<DiagnosticSeverity>)> {
2494 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2495 for chunk in buffer.snapshot().chunks(range, true) {
2496 if chunks.last().map_or(false, |prev_chunk| {
2497 prev_chunk.1 == chunk.diagnostic_severity
2498 }) {
2499 chunks.last_mut().unwrap().0.push_str(chunk.text);
2500 } else {
2501 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2502 }
2503 }
2504 chunks
2505}
2506
2507#[gpui::test(iterations = 10)]
2508async fn test_definition(cx: &mut gpui::TestAppContext) {
2509 init_test(cx);
2510
2511 let fs = FakeFs::new(cx.executor());
2512 fs.insert_tree(
2513 "/dir",
2514 json!({
2515 "a.rs": "const fn a() { A }",
2516 "b.rs": "const y: i32 = crate::a()",
2517 }),
2518 )
2519 .await;
2520
2521 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2522
2523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2524 language_registry.add(rust_lang());
2525 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2526
2527 let buffer = project
2528 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2529 .await
2530 .unwrap();
2531
2532 let fake_server = fake_servers.next().await.unwrap();
2533 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2534 let params = params.text_document_position_params;
2535 assert_eq!(
2536 params.text_document.uri.to_file_path().unwrap(),
2537 Path::new("/dir/b.rs"),
2538 );
2539 assert_eq!(params.position, lsp::Position::new(0, 22));
2540
2541 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2542 lsp::Location::new(
2543 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2544 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2545 ),
2546 )))
2547 });
2548
2549 let mut definitions = project
2550 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2551 .await
2552 .unwrap();
2553
2554 // Assert no new language server started
2555 cx.executor().run_until_parked();
2556 assert!(fake_servers.try_next().is_err());
2557
2558 assert_eq!(definitions.len(), 1);
2559 let definition = definitions.pop().unwrap();
2560 cx.update(|cx| {
2561 let target_buffer = definition.target.buffer.read(cx);
2562 assert_eq!(
2563 target_buffer
2564 .file()
2565 .unwrap()
2566 .as_local()
2567 .unwrap()
2568 .abs_path(cx),
2569 Path::new("/dir/a.rs"),
2570 );
2571 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2572 assert_eq!(
2573 list_worktrees(&project, cx),
2574 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2575 );
2576
2577 drop(definition);
2578 });
2579 cx.update(|cx| {
2580 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2581 });
2582
2583 fn list_worktrees<'a>(
2584 project: &'a Model<Project>,
2585 cx: &'a AppContext,
2586 ) -> Vec<(&'a Path, bool)> {
2587 project
2588 .read(cx)
2589 .worktrees(cx)
2590 .map(|worktree| {
2591 let worktree = worktree.read(cx);
2592 (
2593 worktree.as_local().unwrap().abs_path().as_ref(),
2594 worktree.is_visible(),
2595 )
2596 })
2597 .collect::<Vec<_>>()
2598 }
2599}
2600
2601#[gpui::test]
2602async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2603 init_test(cx);
2604
2605 let fs = FakeFs::new(cx.executor());
2606 fs.insert_tree(
2607 "/dir",
2608 json!({
2609 "a.ts": "",
2610 }),
2611 )
2612 .await;
2613
2614 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2615
2616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2617 language_registry.add(typescript_lang());
2618 let mut fake_language_servers = language_registry.register_fake_lsp(
2619 "TypeScript",
2620 FakeLspAdapter {
2621 capabilities: lsp::ServerCapabilities {
2622 completion_provider: Some(lsp::CompletionOptions {
2623 trigger_characters: Some(vec![":".to_string()]),
2624 ..Default::default()
2625 }),
2626 ..Default::default()
2627 },
2628 ..Default::default()
2629 },
2630 );
2631
2632 let buffer = project
2633 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2634 .await
2635 .unwrap();
2636
2637 let fake_server = fake_language_servers.next().await.unwrap();
2638
2639 let text = "let a = b.fqn";
2640 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2641 let completions = project.update(cx, |project, cx| {
2642 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2643 });
2644
2645 fake_server
2646 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2647 Ok(Some(lsp::CompletionResponse::Array(vec![
2648 lsp::CompletionItem {
2649 label: "fullyQualifiedName?".into(),
2650 insert_text: Some("fullyQualifiedName".into()),
2651 ..Default::default()
2652 },
2653 ])))
2654 })
2655 .next()
2656 .await;
2657 let completions = completions.await.unwrap();
2658 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2659 assert_eq!(completions.len(), 1);
2660 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2661 assert_eq!(
2662 completions[0].old_range.to_offset(&snapshot),
2663 text.len() - 3..text.len()
2664 );
2665
2666 let text = "let a = \"atoms/cmp\"";
2667 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2668 let completions = project.update(cx, |project, cx| {
2669 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2670 });
2671
2672 fake_server
2673 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2674 Ok(Some(lsp::CompletionResponse::Array(vec![
2675 lsp::CompletionItem {
2676 label: "component".into(),
2677 ..Default::default()
2678 },
2679 ])))
2680 })
2681 .next()
2682 .await;
2683 let completions = completions.await.unwrap();
2684 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2685 assert_eq!(completions.len(), 1);
2686 assert_eq!(completions[0].new_text, "component");
2687 assert_eq!(
2688 completions[0].old_range.to_offset(&snapshot),
2689 text.len() - 4..text.len() - 1
2690 );
2691}
2692
2693#[gpui::test]
2694async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2695 init_test(cx);
2696
2697 let fs = FakeFs::new(cx.executor());
2698 fs.insert_tree(
2699 "/dir",
2700 json!({
2701 "a.ts": "",
2702 }),
2703 )
2704 .await;
2705
2706 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2707
2708 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2709 language_registry.add(typescript_lang());
2710 let mut fake_language_servers = language_registry.register_fake_lsp(
2711 "TypeScript",
2712 FakeLspAdapter {
2713 capabilities: lsp::ServerCapabilities {
2714 completion_provider: Some(lsp::CompletionOptions {
2715 trigger_characters: Some(vec![":".to_string()]),
2716 ..Default::default()
2717 }),
2718 ..Default::default()
2719 },
2720 ..Default::default()
2721 },
2722 );
2723
2724 let buffer = project
2725 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2726 .await
2727 .unwrap();
2728
2729 let fake_server = fake_language_servers.next().await.unwrap();
2730
2731 let text = "let a = b.fqn";
2732 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2733 let completions = project.update(cx, |project, cx| {
2734 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2735 });
2736
2737 fake_server
2738 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2739 Ok(Some(lsp::CompletionResponse::Array(vec![
2740 lsp::CompletionItem {
2741 label: "fullyQualifiedName?".into(),
2742 insert_text: Some("fully\rQualified\r\nName".into()),
2743 ..Default::default()
2744 },
2745 ])))
2746 })
2747 .next()
2748 .await;
2749 let completions = completions.await.unwrap();
2750 assert_eq!(completions.len(), 1);
2751 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2752}
2753
2754#[gpui::test(iterations = 10)]
2755async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2756 init_test(cx);
2757
2758 let fs = FakeFs::new(cx.executor());
2759 fs.insert_tree(
2760 "/dir",
2761 json!({
2762 "a.ts": "a",
2763 }),
2764 )
2765 .await;
2766
2767 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2768
2769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2770 language_registry.add(typescript_lang());
2771 let mut fake_language_servers = language_registry.register_fake_lsp(
2772 "TypeScript",
2773 FakeLspAdapter {
2774 capabilities: lsp::ServerCapabilities {
2775 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2776 lsp::CodeActionOptions {
2777 resolve_provider: Some(true),
2778 ..lsp::CodeActionOptions::default()
2779 },
2780 )),
2781 ..lsp::ServerCapabilities::default()
2782 },
2783 ..FakeLspAdapter::default()
2784 },
2785 );
2786
2787 let buffer = project
2788 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2789 .await
2790 .unwrap();
2791
2792 let fake_server = fake_language_servers.next().await.unwrap();
2793
2794 // Language server returns code actions that contain commands, and not edits.
2795 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2796 fake_server
2797 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2798 Ok(Some(vec![
2799 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2800 title: "The code action".into(),
2801 data: Some(serde_json::json!({
2802 "command": "_the/command",
2803 })),
2804 ..lsp::CodeAction::default()
2805 }),
2806 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2807 title: "two".into(),
2808 ..lsp::CodeAction::default()
2809 }),
2810 ]))
2811 })
2812 .next()
2813 .await;
2814
2815 let action = actions.await.unwrap()[0].clone();
2816 let apply = project.update(cx, |project, cx| {
2817 project.apply_code_action(buffer.clone(), action, true, cx)
2818 });
2819
2820 // Resolving the code action does not populate its edits. In absence of
2821 // edits, we must execute the given command.
2822 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2823 |mut action, _| async move {
2824 if action.data.is_some() {
2825 action.command = Some(lsp::Command {
2826 title: "The command".into(),
2827 command: "_the/command".into(),
2828 arguments: Some(vec![json!("the-argument")]),
2829 });
2830 }
2831 Ok(action)
2832 },
2833 );
2834
2835 // While executing the command, the language server sends the editor
2836 // a `workspaceEdit` request.
2837 fake_server
2838 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2839 let fake = fake_server.clone();
2840 move |params, _| {
2841 assert_eq!(params.command, "_the/command");
2842 let fake = fake.clone();
2843 async move {
2844 fake.server
2845 .request::<lsp::request::ApplyWorkspaceEdit>(
2846 lsp::ApplyWorkspaceEditParams {
2847 label: None,
2848 edit: lsp::WorkspaceEdit {
2849 changes: Some(
2850 [(
2851 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2852 vec![lsp::TextEdit {
2853 range: lsp::Range::new(
2854 lsp::Position::new(0, 0),
2855 lsp::Position::new(0, 0),
2856 ),
2857 new_text: "X".into(),
2858 }],
2859 )]
2860 .into_iter()
2861 .collect(),
2862 ),
2863 ..Default::default()
2864 },
2865 },
2866 )
2867 .await
2868 .unwrap();
2869 Ok(Some(json!(null)))
2870 }
2871 }
2872 })
2873 .next()
2874 .await;
2875
2876 // Applying the code action returns a project transaction containing the edits
2877 // sent by the language server in its `workspaceEdit` request.
2878 let transaction = apply.await.unwrap();
2879 assert!(transaction.0.contains_key(&buffer));
2880 buffer.update(cx, |buffer, cx| {
2881 assert_eq!(buffer.text(), "Xa");
2882 buffer.undo(cx);
2883 assert_eq!(buffer.text(), "a");
2884 });
2885}
2886
2887#[gpui::test(iterations = 10)]
2888async fn test_save_file(cx: &mut gpui::TestAppContext) {
2889 init_test(cx);
2890
2891 let fs = FakeFs::new(cx.executor());
2892 fs.insert_tree(
2893 "/dir",
2894 json!({
2895 "file1": "the old contents",
2896 }),
2897 )
2898 .await;
2899
2900 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2901 let buffer = project
2902 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2903 .await
2904 .unwrap();
2905 buffer.update(cx, |buffer, cx| {
2906 assert_eq!(buffer.text(), "the old contents");
2907 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2908 });
2909
2910 project
2911 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2912 .await
2913 .unwrap();
2914
2915 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2916 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2917}
2918
2919#[gpui::test(iterations = 30)]
2920async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2921 init_test(cx);
2922
2923 let fs = FakeFs::new(cx.executor().clone());
2924 fs.insert_tree(
2925 "/dir",
2926 json!({
2927 "file1": "the original contents",
2928 }),
2929 )
2930 .await;
2931
2932 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2933 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2934 let buffer = project
2935 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2936 .await
2937 .unwrap();
2938
2939 // Simulate buffer diffs being slow, so that they don't complete before
2940 // the next file change occurs.
2941 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2942
2943 // Change the buffer's file on disk, and then wait for the file change
2944 // to be detected by the worktree, so that the buffer starts reloading.
2945 fs.save(
2946 "/dir/file1".as_ref(),
2947 &"the first contents".into(),
2948 Default::default(),
2949 )
2950 .await
2951 .unwrap();
2952 worktree.next_event(cx).await;
2953
2954 // Change the buffer's file again. Depending on the random seed, the
2955 // previous file change may still be in progress.
2956 fs.save(
2957 "/dir/file1".as_ref(),
2958 &"the second contents".into(),
2959 Default::default(),
2960 )
2961 .await
2962 .unwrap();
2963 worktree.next_event(cx).await;
2964
2965 cx.executor().run_until_parked();
2966 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2967 buffer.read_with(cx, |buffer, _| {
2968 assert_eq!(buffer.text(), on_disk_text);
2969 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2970 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2971 });
2972}
2973
2974#[gpui::test(iterations = 30)]
2975async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2976 init_test(cx);
2977
2978 let fs = FakeFs::new(cx.executor().clone());
2979 fs.insert_tree(
2980 "/dir",
2981 json!({
2982 "file1": "the original contents",
2983 }),
2984 )
2985 .await;
2986
2987 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2988 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2989 let buffer = project
2990 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2991 .await
2992 .unwrap();
2993
2994 // Simulate buffer diffs being slow, so that they don't complete before
2995 // the next file change occurs.
2996 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2997
2998 // Change the buffer's file on disk, and then wait for the file change
2999 // to be detected by the worktree, so that the buffer starts reloading.
3000 fs.save(
3001 "/dir/file1".as_ref(),
3002 &"the first contents".into(),
3003 Default::default(),
3004 )
3005 .await
3006 .unwrap();
3007 worktree.next_event(cx).await;
3008
3009 cx.executor()
3010 .spawn(cx.executor().simulate_random_delay())
3011 .await;
3012
3013 // Perform a noop edit, causing the buffer's version to increase.
3014 buffer.update(cx, |buffer, cx| {
3015 buffer.edit([(0..0, " ")], None, cx);
3016 buffer.undo(cx);
3017 });
3018
3019 cx.executor().run_until_parked();
3020 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3021 buffer.read_with(cx, |buffer, _| {
3022 let buffer_text = buffer.text();
3023 if buffer_text == on_disk_text {
3024 assert!(
3025 !buffer.is_dirty() && !buffer.has_conflict(),
3026 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3027 );
3028 }
3029 // If the file change occurred while the buffer was processing the first
3030 // change, the buffer will be in a conflicting state.
3031 else {
3032 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3033 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3034 }
3035 });
3036}
3037
3038#[gpui::test]
3039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3040 init_test(cx);
3041
3042 let fs = FakeFs::new(cx.executor());
3043 fs.insert_tree(
3044 "/dir",
3045 json!({
3046 "file1": "the old contents",
3047 }),
3048 )
3049 .await;
3050
3051 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3052 let buffer = project
3053 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3054 .await
3055 .unwrap();
3056 buffer.update(cx, |buffer, cx| {
3057 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3058 });
3059
3060 project
3061 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3062 .await
3063 .unwrap();
3064
3065 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3066 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3067}
3068
3069#[gpui::test]
3070async fn test_save_as(cx: &mut gpui::TestAppContext) {
3071 init_test(cx);
3072
3073 let fs = FakeFs::new(cx.executor());
3074 fs.insert_tree("/dir", json!({})).await;
3075
3076 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3077
3078 let languages = project.update(cx, |project, _| project.languages().clone());
3079 languages.add(rust_lang());
3080
3081 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3082 buffer.update(cx, |buffer, cx| {
3083 buffer.edit([(0..0, "abc")], None, cx);
3084 assert!(buffer.is_dirty());
3085 assert!(!buffer.has_conflict());
3086 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3087 });
3088 project
3089 .update(cx, |project, cx| {
3090 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3091 let path = ProjectPath {
3092 worktree_id,
3093 path: Arc::from(Path::new("file1.rs")),
3094 };
3095 project.save_buffer_as(buffer.clone(), path, cx)
3096 })
3097 .await
3098 .unwrap();
3099 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3100
3101 cx.executor().run_until_parked();
3102 buffer.update(cx, |buffer, cx| {
3103 assert_eq!(
3104 buffer.file().unwrap().full_path(cx),
3105 Path::new("dir/file1.rs")
3106 );
3107 assert!(!buffer.is_dirty());
3108 assert!(!buffer.has_conflict());
3109 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3110 });
3111
3112 let opened_buffer = project
3113 .update(cx, |project, cx| {
3114 project.open_local_buffer("/dir/file1.rs", cx)
3115 })
3116 .await
3117 .unwrap();
3118 assert_eq!(opened_buffer, buffer);
3119}
3120
3121#[gpui::test(retries = 5)]
3122async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3123 use worktree::WorktreeModelHandle as _;
3124
3125 init_test(cx);
3126 cx.executor().allow_parking();
3127
3128 let dir = temp_tree(json!({
3129 "a": {
3130 "file1": "",
3131 "file2": "",
3132 "file3": "",
3133 },
3134 "b": {
3135 "c": {
3136 "file4": "",
3137 "file5": "",
3138 }
3139 }
3140 }));
3141
3142 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3143
3144 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3145 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3146 async move { buffer.await.unwrap() }
3147 };
3148 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3149 project.update(cx, |project, cx| {
3150 let tree = project.worktrees(cx).next().unwrap();
3151 tree.read(cx)
3152 .entry_for_path(path)
3153 .unwrap_or_else(|| panic!("no entry for path {}", path))
3154 .id
3155 })
3156 };
3157
3158 let buffer2 = buffer_for_path("a/file2", cx).await;
3159 let buffer3 = buffer_for_path("a/file3", cx).await;
3160 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3161 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3162
3163 let file2_id = id_for_path("a/file2", cx);
3164 let file3_id = id_for_path("a/file3", cx);
3165 let file4_id = id_for_path("b/c/file4", cx);
3166
3167 // Create a remote copy of this worktree.
3168 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3169 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3170
3171 let updates = Arc::new(Mutex::new(Vec::new()));
3172 tree.update(cx, |tree, cx| {
3173 let updates = updates.clone();
3174 tree.observe_updates(0, cx, move |update| {
3175 updates.lock().push(update);
3176 async { true }
3177 });
3178 });
3179
3180 let remote =
3181 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3182
3183 cx.executor().run_until_parked();
3184
3185 cx.update(|cx| {
3186 assert!(!buffer2.read(cx).is_dirty());
3187 assert!(!buffer3.read(cx).is_dirty());
3188 assert!(!buffer4.read(cx).is_dirty());
3189 assert!(!buffer5.read(cx).is_dirty());
3190 });
3191
3192 // Rename and delete files and directories.
3193 tree.flush_fs_events(cx).await;
3194 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3195 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3196 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3197 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3198 tree.flush_fs_events(cx).await;
3199
3200 let expected_paths = vec![
3201 "a",
3202 "a/file1",
3203 "a/file2.new",
3204 "b",
3205 "d",
3206 "d/file3",
3207 "d/file4",
3208 ];
3209
3210 cx.update(|app| {
3211 assert_eq!(
3212 tree.read(app)
3213 .paths()
3214 .map(|p| p.to_str().unwrap())
3215 .collect::<Vec<_>>(),
3216 expected_paths
3217 );
3218 });
3219
3220 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3221 assert_eq!(id_for_path("d/file3", cx), file3_id);
3222 assert_eq!(id_for_path("d/file4", cx), file4_id);
3223
3224 cx.update(|cx| {
3225 assert_eq!(
3226 buffer2.read(cx).file().unwrap().path().as_ref(),
3227 Path::new("a/file2.new")
3228 );
3229 assert_eq!(
3230 buffer3.read(cx).file().unwrap().path().as_ref(),
3231 Path::new("d/file3")
3232 );
3233 assert_eq!(
3234 buffer4.read(cx).file().unwrap().path().as_ref(),
3235 Path::new("d/file4")
3236 );
3237 assert_eq!(
3238 buffer5.read(cx).file().unwrap().path().as_ref(),
3239 Path::new("b/c/file5")
3240 );
3241
3242 assert_matches!(
3243 buffer2.read(cx).file().unwrap().disk_state(),
3244 DiskState::Present { .. }
3245 );
3246 assert_matches!(
3247 buffer3.read(cx).file().unwrap().disk_state(),
3248 DiskState::Present { .. }
3249 );
3250 assert_matches!(
3251 buffer4.read(cx).file().unwrap().disk_state(),
3252 DiskState::Present { .. }
3253 );
3254 assert_eq!(
3255 buffer5.read(cx).file().unwrap().disk_state(),
3256 DiskState::Deleted
3257 );
3258 });
3259
3260 // Update the remote worktree. Check that it becomes consistent with the
3261 // local worktree.
3262 cx.executor().run_until_parked();
3263
3264 remote.update(cx, |remote, _| {
3265 for update in updates.lock().drain(..) {
3266 remote.as_remote_mut().unwrap().update_from_remote(update);
3267 }
3268 });
3269 cx.executor().run_until_parked();
3270 remote.update(cx, |remote, _| {
3271 assert_eq!(
3272 remote
3273 .paths()
3274 .map(|p| p.to_str().unwrap())
3275 .collect::<Vec<_>>(),
3276 expected_paths
3277 );
3278 });
3279}
3280
3281#[gpui::test(iterations = 10)]
3282async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3283 init_test(cx);
3284
3285 let fs = FakeFs::new(cx.executor());
3286 fs.insert_tree(
3287 "/dir",
3288 json!({
3289 "a": {
3290 "file1": "",
3291 }
3292 }),
3293 )
3294 .await;
3295
3296 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3297 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3298 let tree_id = tree.update(cx, |tree, _| tree.id());
3299
3300 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3301 project.update(cx, |project, cx| {
3302 let tree = project.worktrees(cx).next().unwrap();
3303 tree.read(cx)
3304 .entry_for_path(path)
3305 .unwrap_or_else(|| panic!("no entry for path {}", path))
3306 .id
3307 })
3308 };
3309
3310 let dir_id = id_for_path("a", cx);
3311 let file_id = id_for_path("a/file1", cx);
3312 let buffer = project
3313 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3314 .await
3315 .unwrap();
3316 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3317
3318 project
3319 .update(cx, |project, cx| {
3320 project.rename_entry(dir_id, Path::new("b"), cx)
3321 })
3322 .unwrap()
3323 .await
3324 .to_included()
3325 .unwrap();
3326 cx.executor().run_until_parked();
3327
3328 assert_eq!(id_for_path("b", cx), dir_id);
3329 assert_eq!(id_for_path("b/file1", cx), file_id);
3330 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3331}
3332
3333#[gpui::test]
3334async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3335 init_test(cx);
3336
3337 let fs = FakeFs::new(cx.executor());
3338 fs.insert_tree(
3339 "/dir",
3340 json!({
3341 "a.txt": "a-contents",
3342 "b.txt": "b-contents",
3343 }),
3344 )
3345 .await;
3346
3347 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3348
3349 // Spawn multiple tasks to open paths, repeating some paths.
3350 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3351 (
3352 p.open_local_buffer("/dir/a.txt", cx),
3353 p.open_local_buffer("/dir/b.txt", cx),
3354 p.open_local_buffer("/dir/a.txt", cx),
3355 )
3356 });
3357
3358 let buffer_a_1 = buffer_a_1.await.unwrap();
3359 let buffer_a_2 = buffer_a_2.await.unwrap();
3360 let buffer_b = buffer_b.await.unwrap();
3361 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3362 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3363
3364 // There is only one buffer per path.
3365 let buffer_a_id = buffer_a_1.entity_id();
3366 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3367
3368 // Open the same path again while it is still open.
3369 drop(buffer_a_1);
3370 let buffer_a_3 = project
3371 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3372 .await
3373 .unwrap();
3374
3375 // There's still only one buffer per path.
3376 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3377}
3378
3379#[gpui::test]
3380async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3381 init_test(cx);
3382
3383 let fs = FakeFs::new(cx.executor());
3384 fs.insert_tree(
3385 "/dir",
3386 json!({
3387 "file1": "abc",
3388 "file2": "def",
3389 "file3": "ghi",
3390 }),
3391 )
3392 .await;
3393
3394 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3395
3396 let buffer1 = project
3397 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3398 .await
3399 .unwrap();
3400 let events = Arc::new(Mutex::new(Vec::new()));
3401
3402 // initially, the buffer isn't dirty.
3403 buffer1.update(cx, |buffer, cx| {
3404 cx.subscribe(&buffer1, {
3405 let events = events.clone();
3406 move |_, _, event, _| match event {
3407 BufferEvent::Operation { .. } => {}
3408 _ => events.lock().push(event.clone()),
3409 }
3410 })
3411 .detach();
3412
3413 assert!(!buffer.is_dirty());
3414 assert!(events.lock().is_empty());
3415
3416 buffer.edit([(1..2, "")], None, cx);
3417 });
3418
3419 // after the first edit, the buffer is dirty, and emits a dirtied event.
3420 buffer1.update(cx, |buffer, cx| {
3421 assert!(buffer.text() == "ac");
3422 assert!(buffer.is_dirty());
3423 assert_eq!(
3424 *events.lock(),
3425 &[
3426 language::BufferEvent::Edited,
3427 language::BufferEvent::DirtyChanged
3428 ]
3429 );
3430 events.lock().clear();
3431 buffer.did_save(
3432 buffer.version(),
3433 buffer.file().unwrap().disk_state().mtime(),
3434 cx,
3435 );
3436 });
3437
3438 // after saving, the buffer is not dirty, and emits a saved event.
3439 buffer1.update(cx, |buffer, cx| {
3440 assert!(!buffer.is_dirty());
3441 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3442 events.lock().clear();
3443
3444 buffer.edit([(1..1, "B")], None, cx);
3445 buffer.edit([(2..2, "D")], None, cx);
3446 });
3447
3448 // after editing again, the buffer is dirty, and emits another dirty event.
3449 buffer1.update(cx, |buffer, cx| {
3450 assert!(buffer.text() == "aBDc");
3451 assert!(buffer.is_dirty());
3452 assert_eq!(
3453 *events.lock(),
3454 &[
3455 language::BufferEvent::Edited,
3456 language::BufferEvent::DirtyChanged,
3457 language::BufferEvent::Edited,
3458 ],
3459 );
3460 events.lock().clear();
3461
3462 // After restoring the buffer to its previously-saved state,
3463 // the buffer is not considered dirty anymore.
3464 buffer.edit([(1..3, "")], None, cx);
3465 assert!(buffer.text() == "ac");
3466 assert!(!buffer.is_dirty());
3467 });
3468
3469 assert_eq!(
3470 *events.lock(),
3471 &[
3472 language::BufferEvent::Edited,
3473 language::BufferEvent::DirtyChanged
3474 ]
3475 );
3476
3477 // When a file is deleted, the buffer is considered dirty.
3478 let events = Arc::new(Mutex::new(Vec::new()));
3479 let buffer2 = project
3480 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3481 .await
3482 .unwrap();
3483 buffer2.update(cx, |_, cx| {
3484 cx.subscribe(&buffer2, {
3485 let events = events.clone();
3486 move |_, _, event, _| events.lock().push(event.clone())
3487 })
3488 .detach();
3489 });
3490
3491 fs.remove_file("/dir/file2".as_ref(), Default::default())
3492 .await
3493 .unwrap();
3494 cx.executor().run_until_parked();
3495 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3496 assert_eq!(
3497 *events.lock(),
3498 &[
3499 language::BufferEvent::DirtyChanged,
3500 language::BufferEvent::FileHandleChanged
3501 ]
3502 );
3503
3504 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3505 let events = Arc::new(Mutex::new(Vec::new()));
3506 let buffer3 = project
3507 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3508 .await
3509 .unwrap();
3510 buffer3.update(cx, |_, cx| {
3511 cx.subscribe(&buffer3, {
3512 let events = events.clone();
3513 move |_, _, event, _| events.lock().push(event.clone())
3514 })
3515 .detach();
3516 });
3517
3518 buffer3.update(cx, |buffer, cx| {
3519 buffer.edit([(0..0, "x")], None, cx);
3520 });
3521 events.lock().clear();
3522 fs.remove_file("/dir/file3".as_ref(), Default::default())
3523 .await
3524 .unwrap();
3525 cx.executor().run_until_parked();
3526 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3527 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3528}
3529
3530#[gpui::test]
3531async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3532 init_test(cx);
3533
3534 let initial_contents = "aaa\nbbbbb\nc\n";
3535 let fs = FakeFs::new(cx.executor());
3536 fs.insert_tree(
3537 "/dir",
3538 json!({
3539 "the-file": initial_contents,
3540 }),
3541 )
3542 .await;
3543 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3544 let buffer = project
3545 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3546 .await
3547 .unwrap();
3548
3549 let anchors = (0..3)
3550 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3551 .collect::<Vec<_>>();
3552
3553 // Change the file on disk, adding two new lines of text, and removing
3554 // one line.
3555 buffer.update(cx, |buffer, _| {
3556 assert!(!buffer.is_dirty());
3557 assert!(!buffer.has_conflict());
3558 });
3559 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3560 fs.save(
3561 "/dir/the-file".as_ref(),
3562 &new_contents.into(),
3563 LineEnding::Unix,
3564 )
3565 .await
3566 .unwrap();
3567
3568 // Because the buffer was not modified, it is reloaded from disk. Its
3569 // contents are edited according to the diff between the old and new
3570 // file contents.
3571 cx.executor().run_until_parked();
3572 buffer.update(cx, |buffer, _| {
3573 assert_eq!(buffer.text(), new_contents);
3574 assert!(!buffer.is_dirty());
3575 assert!(!buffer.has_conflict());
3576
3577 let anchor_positions = anchors
3578 .iter()
3579 .map(|anchor| anchor.to_point(&*buffer))
3580 .collect::<Vec<_>>();
3581 assert_eq!(
3582 anchor_positions,
3583 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3584 );
3585 });
3586
3587 // Modify the buffer
3588 buffer.update(cx, |buffer, cx| {
3589 buffer.edit([(0..0, " ")], None, cx);
3590 assert!(buffer.is_dirty());
3591 assert!(!buffer.has_conflict());
3592 });
3593
3594 // Change the file on disk again, adding blank lines to the beginning.
3595 fs.save(
3596 "/dir/the-file".as_ref(),
3597 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3598 LineEnding::Unix,
3599 )
3600 .await
3601 .unwrap();
3602
3603 // Because the buffer is modified, it doesn't reload from disk, but is
3604 // marked as having a conflict.
3605 cx.executor().run_until_parked();
3606 buffer.update(cx, |buffer, _| {
3607 assert!(buffer.has_conflict());
3608 });
3609}
3610
3611#[gpui::test]
3612async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3613 init_test(cx);
3614
3615 let fs = FakeFs::new(cx.executor());
3616 fs.insert_tree(
3617 "/dir",
3618 json!({
3619 "file1": "a\nb\nc\n",
3620 "file2": "one\r\ntwo\r\nthree\r\n",
3621 }),
3622 )
3623 .await;
3624
3625 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3626 let buffer1 = project
3627 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3628 .await
3629 .unwrap();
3630 let buffer2 = project
3631 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3632 .await
3633 .unwrap();
3634
3635 buffer1.update(cx, |buffer, _| {
3636 assert_eq!(buffer.text(), "a\nb\nc\n");
3637 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3638 });
3639 buffer2.update(cx, |buffer, _| {
3640 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3641 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3642 });
3643
3644 // Change a file's line endings on disk from unix to windows. The buffer's
3645 // state updates correctly.
3646 fs.save(
3647 "/dir/file1".as_ref(),
3648 &"aaa\nb\nc\n".into(),
3649 LineEnding::Windows,
3650 )
3651 .await
3652 .unwrap();
3653 cx.executor().run_until_parked();
3654 buffer1.update(cx, |buffer, _| {
3655 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3656 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3657 });
3658
3659 // Save a file with windows line endings. The file is written correctly.
3660 buffer2.update(cx, |buffer, cx| {
3661 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3662 });
3663 project
3664 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3665 .await
3666 .unwrap();
3667 assert_eq!(
3668 fs.load("/dir/file2".as_ref()).await.unwrap(),
3669 "one\r\ntwo\r\nthree\r\nfour\r\n",
3670 );
3671}
3672
3673#[gpui::test]
3674async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3675 init_test(cx);
3676
3677 let fs = FakeFs::new(cx.executor());
3678 fs.insert_tree(
3679 "/the-dir",
3680 json!({
3681 "a.rs": "
3682 fn foo(mut v: Vec<usize>) {
3683 for x in &v {
3684 v.push(1);
3685 }
3686 }
3687 "
3688 .unindent(),
3689 }),
3690 )
3691 .await;
3692
3693 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3694 let buffer = project
3695 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3696 .await
3697 .unwrap();
3698
3699 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3700 let message = lsp::PublishDiagnosticsParams {
3701 uri: buffer_uri.clone(),
3702 diagnostics: vec![
3703 lsp::Diagnostic {
3704 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3705 severity: Some(DiagnosticSeverity::WARNING),
3706 message: "error 1".to_string(),
3707 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3708 location: lsp::Location {
3709 uri: buffer_uri.clone(),
3710 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3711 },
3712 message: "error 1 hint 1".to_string(),
3713 }]),
3714 ..Default::default()
3715 },
3716 lsp::Diagnostic {
3717 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3718 severity: Some(DiagnosticSeverity::HINT),
3719 message: "error 1 hint 1".to_string(),
3720 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3721 location: lsp::Location {
3722 uri: buffer_uri.clone(),
3723 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3724 },
3725 message: "original diagnostic".to_string(),
3726 }]),
3727 ..Default::default()
3728 },
3729 lsp::Diagnostic {
3730 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3731 severity: Some(DiagnosticSeverity::ERROR),
3732 message: "error 2".to_string(),
3733 related_information: Some(vec![
3734 lsp::DiagnosticRelatedInformation {
3735 location: lsp::Location {
3736 uri: buffer_uri.clone(),
3737 range: lsp::Range::new(
3738 lsp::Position::new(1, 13),
3739 lsp::Position::new(1, 15),
3740 ),
3741 },
3742 message: "error 2 hint 1".to_string(),
3743 },
3744 lsp::DiagnosticRelatedInformation {
3745 location: lsp::Location {
3746 uri: buffer_uri.clone(),
3747 range: lsp::Range::new(
3748 lsp::Position::new(1, 13),
3749 lsp::Position::new(1, 15),
3750 ),
3751 },
3752 message: "error 2 hint 2".to_string(),
3753 },
3754 ]),
3755 ..Default::default()
3756 },
3757 lsp::Diagnostic {
3758 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3759 severity: Some(DiagnosticSeverity::HINT),
3760 message: "error 2 hint 1".to_string(),
3761 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3762 location: lsp::Location {
3763 uri: buffer_uri.clone(),
3764 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3765 },
3766 message: "original diagnostic".to_string(),
3767 }]),
3768 ..Default::default()
3769 },
3770 lsp::Diagnostic {
3771 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3772 severity: Some(DiagnosticSeverity::HINT),
3773 message: "error 2 hint 2".to_string(),
3774 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3775 location: lsp::Location {
3776 uri: buffer_uri,
3777 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3778 },
3779 message: "original diagnostic".to_string(),
3780 }]),
3781 ..Default::default()
3782 },
3783 ],
3784 version: None,
3785 };
3786
3787 project
3788 .update(cx, |p, cx| {
3789 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3790 })
3791 .unwrap();
3792 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3793
3794 assert_eq!(
3795 buffer
3796 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3797 .collect::<Vec<_>>(),
3798 &[
3799 DiagnosticEntry {
3800 range: Point::new(1, 8)..Point::new(1, 9),
3801 diagnostic: Diagnostic {
3802 severity: DiagnosticSeverity::WARNING,
3803 message: "error 1".to_string(),
3804 group_id: 1,
3805 is_primary: true,
3806 ..Default::default()
3807 }
3808 },
3809 DiagnosticEntry {
3810 range: Point::new(1, 8)..Point::new(1, 9),
3811 diagnostic: Diagnostic {
3812 severity: DiagnosticSeverity::HINT,
3813 message: "error 1 hint 1".to_string(),
3814 group_id: 1,
3815 is_primary: false,
3816 ..Default::default()
3817 }
3818 },
3819 DiagnosticEntry {
3820 range: Point::new(1, 13)..Point::new(1, 15),
3821 diagnostic: Diagnostic {
3822 severity: DiagnosticSeverity::HINT,
3823 message: "error 2 hint 1".to_string(),
3824 group_id: 0,
3825 is_primary: false,
3826 ..Default::default()
3827 }
3828 },
3829 DiagnosticEntry {
3830 range: Point::new(1, 13)..Point::new(1, 15),
3831 diagnostic: Diagnostic {
3832 severity: DiagnosticSeverity::HINT,
3833 message: "error 2 hint 2".to_string(),
3834 group_id: 0,
3835 is_primary: false,
3836 ..Default::default()
3837 }
3838 },
3839 DiagnosticEntry {
3840 range: Point::new(2, 8)..Point::new(2, 17),
3841 diagnostic: Diagnostic {
3842 severity: DiagnosticSeverity::ERROR,
3843 message: "error 2".to_string(),
3844 group_id: 0,
3845 is_primary: true,
3846 ..Default::default()
3847 }
3848 }
3849 ]
3850 );
3851
3852 assert_eq!(
3853 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3854 &[
3855 DiagnosticEntry {
3856 range: Point::new(1, 13)..Point::new(1, 15),
3857 diagnostic: Diagnostic {
3858 severity: DiagnosticSeverity::HINT,
3859 message: "error 2 hint 1".to_string(),
3860 group_id: 0,
3861 is_primary: false,
3862 ..Default::default()
3863 }
3864 },
3865 DiagnosticEntry {
3866 range: Point::new(1, 13)..Point::new(1, 15),
3867 diagnostic: Diagnostic {
3868 severity: DiagnosticSeverity::HINT,
3869 message: "error 2 hint 2".to_string(),
3870 group_id: 0,
3871 is_primary: false,
3872 ..Default::default()
3873 }
3874 },
3875 DiagnosticEntry {
3876 range: Point::new(2, 8)..Point::new(2, 17),
3877 diagnostic: Diagnostic {
3878 severity: DiagnosticSeverity::ERROR,
3879 message: "error 2".to_string(),
3880 group_id: 0,
3881 is_primary: true,
3882 ..Default::default()
3883 }
3884 }
3885 ]
3886 );
3887
3888 assert_eq!(
3889 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3890 &[
3891 DiagnosticEntry {
3892 range: Point::new(1, 8)..Point::new(1, 9),
3893 diagnostic: Diagnostic {
3894 severity: DiagnosticSeverity::WARNING,
3895 message: "error 1".to_string(),
3896 group_id: 1,
3897 is_primary: true,
3898 ..Default::default()
3899 }
3900 },
3901 DiagnosticEntry {
3902 range: Point::new(1, 8)..Point::new(1, 9),
3903 diagnostic: Diagnostic {
3904 severity: DiagnosticSeverity::HINT,
3905 message: "error 1 hint 1".to_string(),
3906 group_id: 1,
3907 is_primary: false,
3908 ..Default::default()
3909 }
3910 },
3911 ]
3912 );
3913}
3914
3915#[gpui::test]
3916async fn test_rename(cx: &mut gpui::TestAppContext) {
3917 // hi
3918 init_test(cx);
3919
3920 let fs = FakeFs::new(cx.executor());
3921 fs.insert_tree(
3922 "/dir",
3923 json!({
3924 "one.rs": "const ONE: usize = 1;",
3925 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3926 }),
3927 )
3928 .await;
3929
3930 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3931
3932 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3933 language_registry.add(rust_lang());
3934 let mut fake_servers = language_registry.register_fake_lsp(
3935 "Rust",
3936 FakeLspAdapter {
3937 capabilities: lsp::ServerCapabilities {
3938 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3939 prepare_provider: Some(true),
3940 work_done_progress_options: Default::default(),
3941 })),
3942 ..Default::default()
3943 },
3944 ..Default::default()
3945 },
3946 );
3947
3948 let buffer = project
3949 .update(cx, |project, cx| {
3950 project.open_local_buffer("/dir/one.rs", cx)
3951 })
3952 .await
3953 .unwrap();
3954
3955 let fake_server = fake_servers.next().await.unwrap();
3956
3957 let response = project.update(cx, |project, cx| {
3958 project.prepare_rename(buffer.clone(), 7, cx)
3959 });
3960 fake_server
3961 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3962 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3963 assert_eq!(params.position, lsp::Position::new(0, 7));
3964 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3965 lsp::Position::new(0, 6),
3966 lsp::Position::new(0, 9),
3967 ))))
3968 })
3969 .next()
3970 .await
3971 .unwrap();
3972 let range = response.await.unwrap().unwrap();
3973 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3974 assert_eq!(range, 6..9);
3975
3976 let response = project.update(cx, |project, cx| {
3977 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3978 });
3979 fake_server
3980 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3981 assert_eq!(
3982 params.text_document_position.text_document.uri.as_str(),
3983 "file:///dir/one.rs"
3984 );
3985 assert_eq!(
3986 params.text_document_position.position,
3987 lsp::Position::new(0, 7)
3988 );
3989 assert_eq!(params.new_name, "THREE");
3990 Ok(Some(lsp::WorkspaceEdit {
3991 changes: Some(
3992 [
3993 (
3994 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3995 vec![lsp::TextEdit::new(
3996 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3997 "THREE".to_string(),
3998 )],
3999 ),
4000 (
4001 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4002 vec![
4003 lsp::TextEdit::new(
4004 lsp::Range::new(
4005 lsp::Position::new(0, 24),
4006 lsp::Position::new(0, 27),
4007 ),
4008 "THREE".to_string(),
4009 ),
4010 lsp::TextEdit::new(
4011 lsp::Range::new(
4012 lsp::Position::new(0, 35),
4013 lsp::Position::new(0, 38),
4014 ),
4015 "THREE".to_string(),
4016 ),
4017 ],
4018 ),
4019 ]
4020 .into_iter()
4021 .collect(),
4022 ),
4023 ..Default::default()
4024 }))
4025 })
4026 .next()
4027 .await
4028 .unwrap();
4029 let mut transaction = response.await.unwrap().0;
4030 assert_eq!(transaction.len(), 2);
4031 assert_eq!(
4032 transaction
4033 .remove_entry(&buffer)
4034 .unwrap()
4035 .0
4036 .update(cx, |buffer, _| buffer.text()),
4037 "const THREE: usize = 1;"
4038 );
4039 assert_eq!(
4040 transaction
4041 .into_keys()
4042 .next()
4043 .unwrap()
4044 .update(cx, |buffer, _| buffer.text()),
4045 "const TWO: usize = one::THREE + one::THREE;"
4046 );
4047}
4048
4049#[gpui::test]
4050async fn test_search(cx: &mut gpui::TestAppContext) {
4051 init_test(cx);
4052
4053 let fs = FakeFs::new(cx.executor());
4054 fs.insert_tree(
4055 "/dir",
4056 json!({
4057 "one.rs": "const ONE: usize = 1;",
4058 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4059 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4060 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4061 }),
4062 )
4063 .await;
4064 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4065 assert_eq!(
4066 search(
4067 &project,
4068 SearchQuery::text(
4069 "TWO",
4070 false,
4071 true,
4072 false,
4073 Default::default(),
4074 Default::default(),
4075 None
4076 )
4077 .unwrap(),
4078 cx
4079 )
4080 .await
4081 .unwrap(),
4082 HashMap::from_iter([
4083 ("dir/two.rs".to_string(), vec![6..9]),
4084 ("dir/three.rs".to_string(), vec![37..40])
4085 ])
4086 );
4087
4088 let buffer_4 = project
4089 .update(cx, |project, cx| {
4090 project.open_local_buffer("/dir/four.rs", cx)
4091 })
4092 .await
4093 .unwrap();
4094 buffer_4.update(cx, |buffer, cx| {
4095 let text = "two::TWO";
4096 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4097 });
4098
4099 assert_eq!(
4100 search(
4101 &project,
4102 SearchQuery::text(
4103 "TWO",
4104 false,
4105 true,
4106 false,
4107 Default::default(),
4108 Default::default(),
4109 None,
4110 )
4111 .unwrap(),
4112 cx
4113 )
4114 .await
4115 .unwrap(),
4116 HashMap::from_iter([
4117 ("dir/two.rs".to_string(), vec![6..9]),
4118 ("dir/three.rs".to_string(), vec![37..40]),
4119 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4120 ])
4121 );
4122}
4123
4124#[gpui::test]
4125async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4126 init_test(cx);
4127
4128 let search_query = "file";
4129
4130 let fs = FakeFs::new(cx.executor());
4131 fs.insert_tree(
4132 "/dir",
4133 json!({
4134 "one.rs": r#"// Rust file one"#,
4135 "one.ts": r#"// TypeScript file one"#,
4136 "two.rs": r#"// Rust file two"#,
4137 "two.ts": r#"// TypeScript file two"#,
4138 }),
4139 )
4140 .await;
4141 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4142
4143 assert!(
4144 search(
4145 &project,
4146 SearchQuery::text(
4147 search_query,
4148 false,
4149 true,
4150 false,
4151 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4152 Default::default(),
4153 None
4154 )
4155 .unwrap(),
4156 cx
4157 )
4158 .await
4159 .unwrap()
4160 .is_empty(),
4161 "If no inclusions match, no files should be returned"
4162 );
4163
4164 assert_eq!(
4165 search(
4166 &project,
4167 SearchQuery::text(
4168 search_query,
4169 false,
4170 true,
4171 false,
4172 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4173 Default::default(),
4174 None
4175 )
4176 .unwrap(),
4177 cx
4178 )
4179 .await
4180 .unwrap(),
4181 HashMap::from_iter([
4182 ("dir/one.rs".to_string(), vec![8..12]),
4183 ("dir/two.rs".to_string(), vec![8..12]),
4184 ]),
4185 "Rust only search should give only Rust files"
4186 );
4187
4188 assert_eq!(
4189 search(
4190 &project,
4191 SearchQuery::text(
4192 search_query,
4193 false,
4194 true,
4195 false,
4196
4197 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4198
4199 Default::default(),
4200 None,
4201 ).unwrap(),
4202 cx
4203 )
4204 .await
4205 .unwrap(),
4206 HashMap::from_iter([
4207 ("dir/one.ts".to_string(), vec![14..18]),
4208 ("dir/two.ts".to_string(), vec![14..18]),
4209 ]),
4210 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4211 );
4212
4213 assert_eq!(
4214 search(
4215 &project,
4216 SearchQuery::text(
4217 search_query,
4218 false,
4219 true,
4220 false,
4221
4222 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4223
4224 Default::default(),
4225 None,
4226 ).unwrap(),
4227 cx
4228 )
4229 .await
4230 .unwrap(),
4231 HashMap::from_iter([
4232 ("dir/two.ts".to_string(), vec![14..18]),
4233 ("dir/one.rs".to_string(), vec![8..12]),
4234 ("dir/one.ts".to_string(), vec![14..18]),
4235 ("dir/two.rs".to_string(), vec![8..12]),
4236 ]),
4237 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4238 );
4239}
4240
4241#[gpui::test]
4242async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4243 init_test(cx);
4244
4245 let search_query = "file";
4246
4247 let fs = FakeFs::new(cx.executor());
4248 fs.insert_tree(
4249 "/dir",
4250 json!({
4251 "one.rs": r#"// Rust file one"#,
4252 "one.ts": r#"// TypeScript file one"#,
4253 "two.rs": r#"// Rust file two"#,
4254 "two.ts": r#"// TypeScript file two"#,
4255 }),
4256 )
4257 .await;
4258 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4259
4260 assert_eq!(
4261 search(
4262 &project,
4263 SearchQuery::text(
4264 search_query,
4265 false,
4266 true,
4267 false,
4268 Default::default(),
4269 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4270 None,
4271 )
4272 .unwrap(),
4273 cx
4274 )
4275 .await
4276 .unwrap(),
4277 HashMap::from_iter([
4278 ("dir/one.rs".to_string(), vec![8..12]),
4279 ("dir/one.ts".to_string(), vec![14..18]),
4280 ("dir/two.rs".to_string(), vec![8..12]),
4281 ("dir/two.ts".to_string(), vec![14..18]),
4282 ]),
4283 "If no exclusions match, all files should be returned"
4284 );
4285
4286 assert_eq!(
4287 search(
4288 &project,
4289 SearchQuery::text(
4290 search_query,
4291 false,
4292 true,
4293 false,
4294 Default::default(),
4295 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4296 None,
4297 )
4298 .unwrap(),
4299 cx
4300 )
4301 .await
4302 .unwrap(),
4303 HashMap::from_iter([
4304 ("dir/one.ts".to_string(), vec![14..18]),
4305 ("dir/two.ts".to_string(), vec![14..18]),
4306 ]),
4307 "Rust exclusion search should give only TypeScript files"
4308 );
4309
4310 assert_eq!(
4311 search(
4312 &project,
4313 SearchQuery::text(
4314 search_query,
4315 false,
4316 true,
4317 false,
4318 Default::default(),
4319 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4320 None,
4321 ).unwrap(),
4322 cx
4323 )
4324 .await
4325 .unwrap(),
4326 HashMap::from_iter([
4327 ("dir/one.rs".to_string(), vec![8..12]),
4328 ("dir/two.rs".to_string(), vec![8..12]),
4329 ]),
4330 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4331 );
4332
4333 assert!(
4334 search(
4335 &project,
4336 SearchQuery::text(
4337 search_query,
4338 false,
4339 true,
4340 false,
4341 Default::default(),
4342
4343 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4344 None,
4345
4346 ).unwrap(),
4347 cx
4348 )
4349 .await
4350 .unwrap().is_empty(),
4351 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4352 );
4353}
4354
4355#[gpui::test]
4356async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4357 init_test(cx);
4358
4359 let search_query = "file";
4360
4361 let fs = FakeFs::new(cx.executor());
4362 fs.insert_tree(
4363 "/dir",
4364 json!({
4365 "one.rs": r#"// Rust file one"#,
4366 "one.ts": r#"// TypeScript file one"#,
4367 "two.rs": r#"// Rust file two"#,
4368 "two.ts": r#"// TypeScript file two"#,
4369 }),
4370 )
4371 .await;
4372 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4373
4374 assert!(
4375 search(
4376 &project,
4377 SearchQuery::text(
4378 search_query,
4379 false,
4380 true,
4381 false,
4382 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4383 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4384 None,
4385 )
4386 .unwrap(),
4387 cx
4388 )
4389 .await
4390 .unwrap()
4391 .is_empty(),
4392 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4393 );
4394
4395 assert!(
4396 search(
4397 &project,
4398 SearchQuery::text(
4399 search_query,
4400 false,
4401 true,
4402 false,
4403 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4404 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4405 None,
4406 ).unwrap(),
4407 cx
4408 )
4409 .await
4410 .unwrap()
4411 .is_empty(),
4412 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4413 );
4414
4415 assert!(
4416 search(
4417 &project,
4418 SearchQuery::text(
4419 search_query,
4420 false,
4421 true,
4422 false,
4423 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4424 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4425 None,
4426 )
4427 .unwrap(),
4428 cx
4429 )
4430 .await
4431 .unwrap()
4432 .is_empty(),
4433 "Non-matching inclusions and exclusions should not change that."
4434 );
4435
4436 assert_eq!(
4437 search(
4438 &project,
4439 SearchQuery::text(
4440 search_query,
4441 false,
4442 true,
4443 false,
4444 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4445 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4446 None,
4447 )
4448 .unwrap(),
4449 cx
4450 )
4451 .await
4452 .unwrap(),
4453 HashMap::from_iter([
4454 ("dir/one.ts".to_string(), vec![14..18]),
4455 ("dir/two.ts".to_string(), vec![14..18]),
4456 ]),
4457 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4458 );
4459}
4460
4461#[gpui::test]
4462async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4463 init_test(cx);
4464
4465 let fs = FakeFs::new(cx.executor());
4466 fs.insert_tree(
4467 "/worktree-a",
4468 json!({
4469 "haystack.rs": r#"// NEEDLE"#,
4470 "haystack.ts": r#"// NEEDLE"#,
4471 }),
4472 )
4473 .await;
4474 fs.insert_tree(
4475 "/worktree-b",
4476 json!({
4477 "haystack.rs": r#"// NEEDLE"#,
4478 "haystack.ts": r#"// NEEDLE"#,
4479 }),
4480 )
4481 .await;
4482
4483 let project = Project::test(
4484 fs.clone(),
4485 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4486 cx,
4487 )
4488 .await;
4489
4490 assert_eq!(
4491 search(
4492 &project,
4493 SearchQuery::text(
4494 "NEEDLE",
4495 false,
4496 true,
4497 false,
4498 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4499 Default::default(),
4500 None,
4501 )
4502 .unwrap(),
4503 cx
4504 )
4505 .await
4506 .unwrap(),
4507 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4508 "should only return results from included worktree"
4509 );
4510 assert_eq!(
4511 search(
4512 &project,
4513 SearchQuery::text(
4514 "NEEDLE",
4515 false,
4516 true,
4517 false,
4518 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4519 Default::default(),
4520 None,
4521 )
4522 .unwrap(),
4523 cx
4524 )
4525 .await
4526 .unwrap(),
4527 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4528 "should only return results from included worktree"
4529 );
4530
4531 assert_eq!(
4532 search(
4533 &project,
4534 SearchQuery::text(
4535 "NEEDLE",
4536 false,
4537 true,
4538 false,
4539 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4540 Default::default(),
4541 None,
4542 )
4543 .unwrap(),
4544 cx
4545 )
4546 .await
4547 .unwrap(),
4548 HashMap::from_iter([
4549 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4550 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4551 ]),
4552 "should return results from both worktrees"
4553 );
4554}
4555
4556#[gpui::test]
4557async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4558 init_test(cx);
4559
4560 let fs = FakeFs::new(cx.background_executor.clone());
4561 fs.insert_tree(
4562 "/dir",
4563 json!({
4564 ".git": {},
4565 ".gitignore": "**/target\n/node_modules\n",
4566 "target": {
4567 "index.txt": "index_key:index_value"
4568 },
4569 "node_modules": {
4570 "eslint": {
4571 "index.ts": "const eslint_key = 'eslint value'",
4572 "package.json": r#"{ "some_key": "some value" }"#,
4573 },
4574 "prettier": {
4575 "index.ts": "const prettier_key = 'prettier value'",
4576 "package.json": r#"{ "other_key": "other value" }"#,
4577 },
4578 },
4579 "package.json": r#"{ "main_key": "main value" }"#,
4580 }),
4581 )
4582 .await;
4583 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4584
4585 let query = "key";
4586 assert_eq!(
4587 search(
4588 &project,
4589 SearchQuery::text(
4590 query,
4591 false,
4592 false,
4593 false,
4594 Default::default(),
4595 Default::default(),
4596 None,
4597 )
4598 .unwrap(),
4599 cx
4600 )
4601 .await
4602 .unwrap(),
4603 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4604 "Only one non-ignored file should have the query"
4605 );
4606
4607 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4608 assert_eq!(
4609 search(
4610 &project,
4611 SearchQuery::text(
4612 query,
4613 false,
4614 false,
4615 true,
4616 Default::default(),
4617 Default::default(),
4618 None,
4619 )
4620 .unwrap(),
4621 cx
4622 )
4623 .await
4624 .unwrap(),
4625 HashMap::from_iter([
4626 ("dir/package.json".to_string(), vec![8..11]),
4627 ("dir/target/index.txt".to_string(), vec![6..9]),
4628 (
4629 "dir/node_modules/prettier/package.json".to_string(),
4630 vec![9..12]
4631 ),
4632 (
4633 "dir/node_modules/prettier/index.ts".to_string(),
4634 vec![15..18]
4635 ),
4636 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4637 (
4638 "dir/node_modules/eslint/package.json".to_string(),
4639 vec![8..11]
4640 ),
4641 ]),
4642 "Unrestricted search with ignored directories should find every file with the query"
4643 );
4644
4645 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4646 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4647 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4648 assert_eq!(
4649 search(
4650 &project,
4651 SearchQuery::text(
4652 query,
4653 false,
4654 false,
4655 true,
4656 files_to_include,
4657 files_to_exclude,
4658 None,
4659 )
4660 .unwrap(),
4661 cx
4662 )
4663 .await
4664 .unwrap(),
4665 HashMap::from_iter([(
4666 "dir/node_modules/prettier/package.json".to_string(),
4667 vec![9..12]
4668 )]),
4669 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4670 );
4671}
4672
4673#[gpui::test]
4674async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4675 init_test(cx);
4676
4677 let fs = FakeFs::new(cx.executor().clone());
4678 fs.insert_tree(
4679 "/one/two",
4680 json!({
4681 "three": {
4682 "a.txt": "",
4683 "four": {}
4684 },
4685 "c.rs": ""
4686 }),
4687 )
4688 .await;
4689
4690 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4691 project
4692 .update(cx, |project, cx| {
4693 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4694 project.create_entry((id, "b.."), true, cx)
4695 })
4696 .await
4697 .unwrap()
4698 .to_included()
4699 .unwrap();
4700
4701 // Can't create paths outside the project
4702 let result = project
4703 .update(cx, |project, cx| {
4704 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4705 project.create_entry((id, "../../boop"), true, cx)
4706 })
4707 .await;
4708 assert!(result.is_err());
4709
4710 // Can't create paths with '..'
4711 let result = project
4712 .update(cx, |project, cx| {
4713 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4714 project.create_entry((id, "four/../beep"), true, cx)
4715 })
4716 .await;
4717 assert!(result.is_err());
4718
4719 assert_eq!(
4720 fs.paths(true),
4721 vec![
4722 PathBuf::from("/"),
4723 PathBuf::from("/one"),
4724 PathBuf::from("/one/two"),
4725 PathBuf::from("/one/two/c.rs"),
4726 PathBuf::from("/one/two/three"),
4727 PathBuf::from("/one/two/three/a.txt"),
4728 PathBuf::from("/one/two/three/b.."),
4729 PathBuf::from("/one/two/three/four"),
4730 ]
4731 );
4732
4733 // And we cannot open buffers with '..'
4734 let result = project
4735 .update(cx, |project, cx| {
4736 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4737 project.open_buffer((id, "../c.rs"), cx)
4738 })
4739 .await;
4740 assert!(result.is_err())
4741}
4742
4743#[gpui::test]
4744async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4745 init_test(cx);
4746
4747 let fs = FakeFs::new(cx.executor());
4748 fs.insert_tree(
4749 "/dir",
4750 json!({
4751 "a.tsx": "a",
4752 }),
4753 )
4754 .await;
4755
4756 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4757
4758 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4759 language_registry.add(tsx_lang());
4760 let language_server_names = [
4761 "TypeScriptServer",
4762 "TailwindServer",
4763 "ESLintServer",
4764 "NoHoverCapabilitiesServer",
4765 ];
4766 let mut language_servers = [
4767 language_registry.register_fake_lsp(
4768 "tsx",
4769 FakeLspAdapter {
4770 name: language_server_names[0],
4771 capabilities: lsp::ServerCapabilities {
4772 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4773 ..lsp::ServerCapabilities::default()
4774 },
4775 ..FakeLspAdapter::default()
4776 },
4777 ),
4778 language_registry.register_fake_lsp(
4779 "tsx",
4780 FakeLspAdapter {
4781 name: language_server_names[1],
4782 capabilities: lsp::ServerCapabilities {
4783 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4784 ..lsp::ServerCapabilities::default()
4785 },
4786 ..FakeLspAdapter::default()
4787 },
4788 ),
4789 language_registry.register_fake_lsp(
4790 "tsx",
4791 FakeLspAdapter {
4792 name: language_server_names[2],
4793 capabilities: lsp::ServerCapabilities {
4794 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4795 ..lsp::ServerCapabilities::default()
4796 },
4797 ..FakeLspAdapter::default()
4798 },
4799 ),
4800 language_registry.register_fake_lsp(
4801 "tsx",
4802 FakeLspAdapter {
4803 name: language_server_names[3],
4804 capabilities: lsp::ServerCapabilities {
4805 hover_provider: None,
4806 ..lsp::ServerCapabilities::default()
4807 },
4808 ..FakeLspAdapter::default()
4809 },
4810 ),
4811 ];
4812
4813 let buffer = project
4814 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4815 .await
4816 .unwrap();
4817 cx.executor().run_until_parked();
4818
4819 let mut servers_with_hover_requests = HashMap::default();
4820 for i in 0..language_server_names.len() {
4821 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4822 panic!(
4823 "Failed to get language server #{i} with name {}",
4824 &language_server_names[i]
4825 )
4826 });
4827 let new_server_name = new_server.server.name();
4828 assert!(
4829 !servers_with_hover_requests.contains_key(&new_server_name),
4830 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4831 );
4832 match new_server_name.as_ref() {
4833 "TailwindServer" | "TypeScriptServer" => {
4834 servers_with_hover_requests.insert(
4835 new_server_name.clone(),
4836 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4837 let name = new_server_name.clone();
4838 async move {
4839 Ok(Some(lsp::Hover {
4840 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4841 format!("{name} hover"),
4842 )),
4843 range: None,
4844 }))
4845 }
4846 }),
4847 );
4848 }
4849 "ESLintServer" => {
4850 servers_with_hover_requests.insert(
4851 new_server_name,
4852 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4853 |_, _| async move { Ok(None) },
4854 ),
4855 );
4856 }
4857 "NoHoverCapabilitiesServer" => {
4858 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4859 |_, _| async move {
4860 panic!(
4861 "Should not call for hovers server with no corresponding capabilities"
4862 )
4863 },
4864 );
4865 }
4866 unexpected => panic!("Unexpected server name: {unexpected}"),
4867 }
4868 }
4869
4870 let hover_task = project.update(cx, |project, cx| {
4871 project.hover(&buffer, Point::new(0, 0), cx)
4872 });
4873 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4874 |mut hover_request| async move {
4875 hover_request
4876 .next()
4877 .await
4878 .expect("All hover requests should have been triggered")
4879 },
4880 ))
4881 .await;
4882 assert_eq!(
4883 vec!["TailwindServer hover", "TypeScriptServer hover"],
4884 hover_task
4885 .await
4886 .into_iter()
4887 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4888 .sorted()
4889 .collect::<Vec<_>>(),
4890 "Should receive hover responses from all related servers with hover capabilities"
4891 );
4892}
4893
4894#[gpui::test]
4895async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4896 init_test(cx);
4897
4898 let fs = FakeFs::new(cx.executor());
4899 fs.insert_tree(
4900 "/dir",
4901 json!({
4902 "a.ts": "a",
4903 }),
4904 )
4905 .await;
4906
4907 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4908
4909 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4910 language_registry.add(typescript_lang());
4911 let mut fake_language_servers = language_registry.register_fake_lsp(
4912 "TypeScript",
4913 FakeLspAdapter {
4914 capabilities: lsp::ServerCapabilities {
4915 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4916 ..lsp::ServerCapabilities::default()
4917 },
4918 ..FakeLspAdapter::default()
4919 },
4920 );
4921
4922 let buffer = project
4923 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4924 .await
4925 .unwrap();
4926 cx.executor().run_until_parked();
4927
4928 let fake_server = fake_language_servers
4929 .next()
4930 .await
4931 .expect("failed to get the language server");
4932
4933 let mut request_handled =
4934 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4935 Ok(Some(lsp::Hover {
4936 contents: lsp::HoverContents::Array(vec![
4937 lsp::MarkedString::String("".to_string()),
4938 lsp::MarkedString::String(" ".to_string()),
4939 lsp::MarkedString::String("\n\n\n".to_string()),
4940 ]),
4941 range: None,
4942 }))
4943 });
4944
4945 let hover_task = project.update(cx, |project, cx| {
4946 project.hover(&buffer, Point::new(0, 0), cx)
4947 });
4948 let () = request_handled
4949 .next()
4950 .await
4951 .expect("All hover requests should have been triggered");
4952 assert_eq!(
4953 Vec::<String>::new(),
4954 hover_task
4955 .await
4956 .into_iter()
4957 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4958 .sorted()
4959 .collect::<Vec<_>>(),
4960 "Empty hover parts should be ignored"
4961 );
4962}
4963
4964#[gpui::test]
4965async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4966 init_test(cx);
4967
4968 let fs = FakeFs::new(cx.executor());
4969 fs.insert_tree(
4970 "/dir",
4971 json!({
4972 "a.tsx": "a",
4973 }),
4974 )
4975 .await;
4976
4977 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4978
4979 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4980 language_registry.add(tsx_lang());
4981 let language_server_names = [
4982 "TypeScriptServer",
4983 "TailwindServer",
4984 "ESLintServer",
4985 "NoActionsCapabilitiesServer",
4986 ];
4987
4988 let mut language_server_rxs = [
4989 language_registry.register_fake_lsp(
4990 "tsx",
4991 FakeLspAdapter {
4992 name: language_server_names[0],
4993 capabilities: lsp::ServerCapabilities {
4994 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4995 ..lsp::ServerCapabilities::default()
4996 },
4997 ..FakeLspAdapter::default()
4998 },
4999 ),
5000 language_registry.register_fake_lsp(
5001 "tsx",
5002 FakeLspAdapter {
5003 name: language_server_names[1],
5004 capabilities: lsp::ServerCapabilities {
5005 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5006 ..lsp::ServerCapabilities::default()
5007 },
5008 ..FakeLspAdapter::default()
5009 },
5010 ),
5011 language_registry.register_fake_lsp(
5012 "tsx",
5013 FakeLspAdapter {
5014 name: language_server_names[2],
5015 capabilities: lsp::ServerCapabilities {
5016 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5017 ..lsp::ServerCapabilities::default()
5018 },
5019 ..FakeLspAdapter::default()
5020 },
5021 ),
5022 language_registry.register_fake_lsp(
5023 "tsx",
5024 FakeLspAdapter {
5025 name: language_server_names[3],
5026 capabilities: lsp::ServerCapabilities {
5027 code_action_provider: None,
5028 ..lsp::ServerCapabilities::default()
5029 },
5030 ..FakeLspAdapter::default()
5031 },
5032 ),
5033 ];
5034
5035 let buffer = project
5036 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5037 .await
5038 .unwrap();
5039 cx.executor().run_until_parked();
5040
5041 let mut servers_with_actions_requests = HashMap::default();
5042 for i in 0..language_server_names.len() {
5043 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5044 panic!(
5045 "Failed to get language server #{i} with name {}",
5046 &language_server_names[i]
5047 )
5048 });
5049 let new_server_name = new_server.server.name();
5050
5051 assert!(
5052 !servers_with_actions_requests.contains_key(&new_server_name),
5053 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5054 );
5055 match new_server_name.0.as_ref() {
5056 "TailwindServer" | "TypeScriptServer" => {
5057 servers_with_actions_requests.insert(
5058 new_server_name.clone(),
5059 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5060 move |_, _| {
5061 let name = new_server_name.clone();
5062 async move {
5063 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5064 lsp::CodeAction {
5065 title: format!("{name} code action"),
5066 ..lsp::CodeAction::default()
5067 },
5068 )]))
5069 }
5070 },
5071 ),
5072 );
5073 }
5074 "ESLintServer" => {
5075 servers_with_actions_requests.insert(
5076 new_server_name,
5077 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5078 |_, _| async move { Ok(None) },
5079 ),
5080 );
5081 }
5082 "NoActionsCapabilitiesServer" => {
5083 let _never_handled = new_server
5084 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5085 panic!(
5086 "Should not call for code actions server with no corresponding capabilities"
5087 )
5088 });
5089 }
5090 unexpected => panic!("Unexpected server name: {unexpected}"),
5091 }
5092 }
5093
5094 let code_actions_task = project.update(cx, |project, cx| {
5095 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5096 });
5097
5098 // cx.run_until_parked();
5099 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5100 |mut code_actions_request| async move {
5101 code_actions_request
5102 .next()
5103 .await
5104 .expect("All code actions requests should have been triggered")
5105 },
5106 ))
5107 .await;
5108 assert_eq!(
5109 vec!["TailwindServer code action", "TypeScriptServer code action"],
5110 code_actions_task
5111 .await
5112 .unwrap()
5113 .into_iter()
5114 .map(|code_action| code_action.lsp_action.title)
5115 .sorted()
5116 .collect::<Vec<_>>(),
5117 "Should receive code actions responses from all related servers with hover capabilities"
5118 );
5119}
5120
5121#[gpui::test]
5122async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5123 init_test(cx);
5124
5125 let fs = FakeFs::new(cx.executor());
5126 fs.insert_tree(
5127 "/dir",
5128 json!({
5129 "a.rs": "let a = 1;",
5130 "b.rs": "let b = 2;",
5131 "c.rs": "let c = 2;",
5132 }),
5133 )
5134 .await;
5135
5136 let project = Project::test(
5137 fs,
5138 [
5139 "/dir/a.rs".as_ref(),
5140 "/dir/b.rs".as_ref(),
5141 "/dir/c.rs".as_ref(),
5142 ],
5143 cx,
5144 )
5145 .await;
5146
5147 // check the initial state and get the worktrees
5148 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5149 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5150 assert_eq!(worktrees.len(), 3);
5151
5152 let worktree_a = worktrees[0].read(cx);
5153 let worktree_b = worktrees[1].read(cx);
5154 let worktree_c = worktrees[2].read(cx);
5155
5156 // check they start in the right order
5157 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5158 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5159 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5160
5161 (
5162 worktrees[0].clone(),
5163 worktrees[1].clone(),
5164 worktrees[2].clone(),
5165 )
5166 });
5167
5168 // move first worktree to after the second
5169 // [a, b, c] -> [b, a, c]
5170 project
5171 .update(cx, |project, cx| {
5172 let first = worktree_a.read(cx);
5173 let second = worktree_b.read(cx);
5174 project.move_worktree(first.id(), second.id(), cx)
5175 })
5176 .expect("moving first after second");
5177
5178 // check the state after moving
5179 project.update(cx, |project, cx| {
5180 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5181 assert_eq!(worktrees.len(), 3);
5182
5183 let first = worktrees[0].read(cx);
5184 let second = worktrees[1].read(cx);
5185 let third = worktrees[2].read(cx);
5186
5187 // check they are now in the right order
5188 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5189 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5190 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5191 });
5192
5193 // move the second worktree to before the first
5194 // [b, a, c] -> [a, b, c]
5195 project
5196 .update(cx, |project, cx| {
5197 let second = worktree_a.read(cx);
5198 let first = worktree_b.read(cx);
5199 project.move_worktree(first.id(), second.id(), cx)
5200 })
5201 .expect("moving second before first");
5202
5203 // check the state after moving
5204 project.update(cx, |project, cx| {
5205 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5206 assert_eq!(worktrees.len(), 3);
5207
5208 let first = worktrees[0].read(cx);
5209 let second = worktrees[1].read(cx);
5210 let third = worktrees[2].read(cx);
5211
5212 // check they are now in the right order
5213 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5214 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5215 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5216 });
5217
5218 // move the second worktree to after the third
5219 // [a, b, c] -> [a, c, b]
5220 project
5221 .update(cx, |project, cx| {
5222 let second = worktree_b.read(cx);
5223 let third = worktree_c.read(cx);
5224 project.move_worktree(second.id(), third.id(), cx)
5225 })
5226 .expect("moving second after third");
5227
5228 // check the state after moving
5229 project.update(cx, |project, cx| {
5230 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5231 assert_eq!(worktrees.len(), 3);
5232
5233 let first = worktrees[0].read(cx);
5234 let second = worktrees[1].read(cx);
5235 let third = worktrees[2].read(cx);
5236
5237 // check they are now in the right order
5238 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5239 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5240 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5241 });
5242
5243 // move the third worktree to before the second
5244 // [a, c, b] -> [a, b, c]
5245 project
5246 .update(cx, |project, cx| {
5247 let third = worktree_c.read(cx);
5248 let second = worktree_b.read(cx);
5249 project.move_worktree(third.id(), second.id(), cx)
5250 })
5251 .expect("moving third before second");
5252
5253 // check the state after moving
5254 project.update(cx, |project, cx| {
5255 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5256 assert_eq!(worktrees.len(), 3);
5257
5258 let first = worktrees[0].read(cx);
5259 let second = worktrees[1].read(cx);
5260 let third = worktrees[2].read(cx);
5261
5262 // check they are now in the right order
5263 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5264 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5265 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5266 });
5267
5268 // move the first worktree to after the third
5269 // [a, b, c] -> [b, c, a]
5270 project
5271 .update(cx, |project, cx| {
5272 let first = worktree_a.read(cx);
5273 let third = worktree_c.read(cx);
5274 project.move_worktree(first.id(), third.id(), cx)
5275 })
5276 .expect("moving first after third");
5277
5278 // check the state after moving
5279 project.update(cx, |project, cx| {
5280 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5281 assert_eq!(worktrees.len(), 3);
5282
5283 let first = worktrees[0].read(cx);
5284 let second = worktrees[1].read(cx);
5285 let third = worktrees[2].read(cx);
5286
5287 // check they are now in the right order
5288 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5289 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5290 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5291 });
5292
5293 // move the third worktree to before the first
5294 // [b, c, a] -> [a, b, c]
5295 project
5296 .update(cx, |project, cx| {
5297 let third = worktree_a.read(cx);
5298 let first = worktree_b.read(cx);
5299 project.move_worktree(third.id(), first.id(), cx)
5300 })
5301 .expect("moving third before first");
5302
5303 // check the state after moving
5304 project.update(cx, |project, cx| {
5305 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5306 assert_eq!(worktrees.len(), 3);
5307
5308 let first = worktrees[0].read(cx);
5309 let second = worktrees[1].read(cx);
5310 let third = worktrees[2].read(cx);
5311
5312 // check they are now in the right order
5313 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5314 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5315 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5316 });
5317}
5318
5319async fn search(
5320 project: &Model<Project>,
5321 query: SearchQuery,
5322 cx: &mut gpui::TestAppContext,
5323) -> Result<HashMap<String, Vec<Range<usize>>>> {
5324 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5325 let mut results = HashMap::default();
5326 while let Some(search_result) = search_rx.next().await {
5327 match search_result {
5328 SearchResult::Buffer { buffer, ranges } => {
5329 results.entry(buffer).or_insert(ranges);
5330 }
5331 SearchResult::LimitReached => {}
5332 }
5333 }
5334 Ok(results
5335 .into_iter()
5336 .map(|(buffer, ranges)| {
5337 buffer.update(cx, |buffer, cx| {
5338 let path = buffer
5339 .file()
5340 .unwrap()
5341 .full_path(cx)
5342 .to_string_lossy()
5343 .to_string();
5344 let ranges = ranges
5345 .into_iter()
5346 .map(|range| range.to_offset(buffer))
5347 .collect::<Vec<_>>();
5348 (path, ranges)
5349 })
5350 })
5351 .collect())
5352}
5353
5354pub fn init_test(cx: &mut gpui::TestAppContext) {
5355 if std::env::var("RUST_LOG").is_ok() {
5356 env_logger::try_init().ok();
5357 }
5358
5359 cx.update(|cx| {
5360 let settings_store = SettingsStore::test(cx);
5361 cx.set_global(settings_store);
5362 release_channel::init(SemanticVersion::default(), cx);
5363 language::init(cx);
5364 Project::init_settings(cx);
5365 });
5366}
5367
5368fn json_lang() -> Arc<Language> {
5369 Arc::new(Language::new(
5370 LanguageConfig {
5371 name: "JSON".into(),
5372 matcher: LanguageMatcher {
5373 path_suffixes: vec!["json".to_string()],
5374 ..Default::default()
5375 },
5376 ..Default::default()
5377 },
5378 None,
5379 ))
5380}
5381
5382fn js_lang() -> Arc<Language> {
5383 Arc::new(Language::new(
5384 LanguageConfig {
5385 name: "JavaScript".into(),
5386 matcher: LanguageMatcher {
5387 path_suffixes: vec!["js".to_string()],
5388 ..Default::default()
5389 },
5390 ..Default::default()
5391 },
5392 None,
5393 ))
5394}
5395
5396fn rust_lang() -> Arc<Language> {
5397 Arc::new(Language::new(
5398 LanguageConfig {
5399 name: "Rust".into(),
5400 matcher: LanguageMatcher {
5401 path_suffixes: vec!["rs".to_string()],
5402 ..Default::default()
5403 },
5404 ..Default::default()
5405 },
5406 Some(tree_sitter_rust::LANGUAGE.into()),
5407 ))
5408}
5409
5410fn typescript_lang() -> Arc<Language> {
5411 Arc::new(Language::new(
5412 LanguageConfig {
5413 name: "TypeScript".into(),
5414 matcher: LanguageMatcher {
5415 path_suffixes: vec!["ts".to_string()],
5416 ..Default::default()
5417 },
5418 ..Default::default()
5419 },
5420 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5421 ))
5422}
5423
5424fn tsx_lang() -> Arc<Language> {
5425 Arc::new(Language::new(
5426 LanguageConfig {
5427 name: "tsx".into(),
5428 matcher: LanguageMatcher {
5429 path_suffixes: vec!["tsx".to_string()],
5430 ..Default::default()
5431 },
5432 ..Default::default()
5433 },
5434 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5435 ))
5436}
5437
5438fn get_all_tasks(
5439 project: &Model<Project>,
5440 worktree_id: Option<WorktreeId>,
5441 task_context: &TaskContext,
5442 cx: &mut AppContext,
5443) -> Vec<(TaskSourceKind, ResolvedTask)> {
5444 let (mut old, new) = project.update(cx, |project, cx| {
5445 project
5446 .task_store
5447 .read(cx)
5448 .task_inventory()
5449 .unwrap()
5450 .read(cx)
5451 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5452 });
5453 old.extend(new);
5454 old
5455}