1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use http_client::Url;
6use language::{
7 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
9 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
10};
11use lsp::{DiagnosticSeverity, NumberOrString};
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15#[cfg(not(windows))]
16use std::os;
17
18use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
19use task::{ResolvedTask, TaskContext};
20use unindent::Unindent as _;
21use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
22
23#[gpui::test]
24async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
25 cx.executor().allow_parking();
26
27 let (tx, mut rx) = futures::channel::mpsc::unbounded();
28 let _thread = std::thread::spawn(move || {
29 std::fs::metadata("/tmp").unwrap();
30 std::thread::sleep(Duration::from_millis(1000));
31 tx.unbounded_send(1).unwrap();
32 });
33 rx.next().await.unwrap();
34}
35
36#[gpui::test]
37async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
38 cx.executor().allow_parking();
39
40 let io_task = smol::unblock(move || {
41 println!("sleeping on thread {:?}", std::thread::current().id());
42 std::thread::sleep(Duration::from_millis(10));
43 1
44 });
45
46 let task = cx.foreground_executor().spawn(async move {
47 io_task.await;
48 });
49
50 task.await;
51}
52
53#[cfg(not(windows))]
54#[gpui::test]
55async fn test_symlinks(cx: &mut gpui::TestAppContext) {
56 init_test(cx);
57 cx.executor().allow_parking();
58
59 let dir = temp_tree(json!({
60 "root": {
61 "apple": "",
62 "banana": {
63 "carrot": {
64 "date": "",
65 "endive": "",
66 }
67 },
68 "fennel": {
69 "grape": "",
70 }
71 }
72 }));
73
74 let root_link_path = dir.path().join("root_link");
75 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
76 os::unix::fs::symlink(
77 dir.path().join("root/fennel"),
78 dir.path().join("root/finnochio"),
79 )
80 .unwrap();
81
82 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
83
84 project.update(cx, |project, cx| {
85 let tree = project.worktrees(cx).next().unwrap().read(cx);
86 assert_eq!(tree.file_count(), 5);
87 assert_eq!(
88 tree.inode_for_path("fennel/grape"),
89 tree.inode_for_path("finnochio/grape")
90 );
91 });
92}
93
94#[gpui::test]
95async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
96 init_test(cx);
97
98 let dir = temp_tree(json!({
99 ".editorconfig": r#"
100 root = true
101 [*.rs]
102 indent_style = tab
103 indent_size = 3
104 end_of_line = lf
105 insert_final_newline = true
106 trim_trailing_whitespace = true
107 [*.js]
108 tab_width = 10
109 "#,
110 ".zed": {
111 "settings.json": r#"{
112 "tab_size": 8,
113 "hard_tabs": false,
114 "ensure_final_newline_on_save": false,
115 "remove_trailing_whitespace_on_save": false,
116 "soft_wrap": "editor_width"
117 }"#,
118 },
119 "a.rs": "fn a() {\n A\n}",
120 "b": {
121 ".editorconfig": r#"
122 [*.rs]
123 indent_size = 2
124 "#,
125 "b.rs": "fn b() {\n B\n}",
126 },
127 "c.js": "def c\n C\nend",
128 "README.json": "tabs are better\n",
129 }));
130
131 let path = dir.path();
132 let fs = FakeFs::new(cx.executor());
133 fs.insert_tree_from_real_fs(path, path).await;
134 let project = Project::test(fs, [path], cx).await;
135
136 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
137 language_registry.add(js_lang());
138 language_registry.add(json_lang());
139 language_registry.add(rust_lang());
140
141 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
142
143 cx.executor().run_until_parked();
144
145 cx.update(|cx| {
146 let tree = worktree.read(cx);
147 let settings_for = |path: &str| {
148 let file_entry = tree.entry_for_path(path).unwrap().clone();
149 let file = File::for_entry(file_entry, worktree.clone());
150 let file_language = project
151 .read(cx)
152 .languages()
153 .language_for_file_path(file.path.as_ref());
154 let file_language = cx
155 .background_executor()
156 .block(file_language)
157 .expect("Failed to get file language");
158 let file = file as _;
159 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
160 };
161
162 let settings_a = settings_for("a.rs");
163 let settings_b = settings_for("b/b.rs");
164 let settings_c = settings_for("c.js");
165 let settings_readme = settings_for("README.json");
166
167 // .editorconfig overrides .zed/settings
168 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
169 assert_eq!(settings_a.hard_tabs, true);
170 assert_eq!(settings_a.ensure_final_newline_on_save, true);
171 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
172
173 // .editorconfig in b/ overrides .editorconfig in root
174 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
175
176 // "indent_size" is not set, so "tab_width" is used
177 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
178
179 // README.md should not be affected by .editorconfig's globe "*.rs"
180 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
181 });
182}
183
184#[gpui::test]
185async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
186 init_test(cx);
187 TaskStore::init(None);
188
189 let fs = FakeFs::new(cx.executor());
190 fs.insert_tree(
191 "/the-root",
192 json!({
193 ".zed": {
194 "settings.json": r#"{ "tab_size": 8 }"#,
195 "tasks.json": r#"[{
196 "label": "cargo check all",
197 "command": "cargo",
198 "args": ["check", "--all"]
199 },]"#,
200 },
201 "a": {
202 "a.rs": "fn a() {\n A\n}"
203 },
204 "b": {
205 ".zed": {
206 "settings.json": r#"{ "tab_size": 2 }"#,
207 "tasks.json": r#"[{
208 "label": "cargo check",
209 "command": "cargo",
210 "args": ["check"]
211 },]"#,
212 },
213 "b.rs": "fn b() {\n B\n}"
214 }
215 }),
216 )
217 .await;
218
219 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
220 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
221 let task_context = TaskContext::default();
222
223 cx.executor().run_until_parked();
224 let worktree_id = cx.update(|cx| {
225 project.update(cx, |project, cx| {
226 project.worktrees(cx).next().unwrap().read(cx).id()
227 })
228 });
229 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
230 id: worktree_id,
231 directory_in_worktree: PathBuf::from(".zed"),
232 id_base: "local worktree tasks from directory \".zed\"".into(),
233 };
234
235 let all_tasks = cx
236 .update(|cx| {
237 let tree = worktree.read(cx);
238
239 let file_a = File::for_entry(
240 tree.entry_for_path("a/a.rs").unwrap().clone(),
241 worktree.clone(),
242 ) as _;
243 let settings_a = language_settings(None, Some(&file_a), cx);
244 let file_b = File::for_entry(
245 tree.entry_for_path("b/b.rs").unwrap().clone(),
246 worktree.clone(),
247 ) as _;
248 let settings_b = language_settings(None, Some(&file_b), cx);
249
250 assert_eq!(settings_a.tab_size.get(), 8);
251 assert_eq!(settings_b.tab_size.get(), 2);
252
253 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
254 })
255 .into_iter()
256 .map(|(source_kind, task)| {
257 let resolved = task.resolved.unwrap();
258 (
259 source_kind,
260 task.resolved_label,
261 resolved.args,
262 resolved.env,
263 )
264 })
265 .collect::<Vec<_>>();
266 assert_eq!(
267 all_tasks,
268 vec![
269 (
270 TaskSourceKind::Worktree {
271 id: worktree_id,
272 directory_in_worktree: PathBuf::from("b/.zed"),
273 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
274 },
275 "cargo check".to_string(),
276 vec!["check".to_string()],
277 HashMap::default(),
278 ),
279 (
280 topmost_local_task_source_kind.clone(),
281 "cargo check all".to_string(),
282 vec!["check".to_string(), "--all".to_string()],
283 HashMap::default(),
284 ),
285 ]
286 );
287
288 let (_, resolved_task) = cx
289 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
290 .into_iter()
291 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
292 .expect("should have one global task");
293 project.update(cx, |project, cx| {
294 let task_inventory = project
295 .task_store
296 .read(cx)
297 .task_inventory()
298 .cloned()
299 .unwrap();
300 task_inventory.update(cx, |inventory, _| {
301 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
302 inventory
303 .update_file_based_tasks(
304 None,
305 Some(
306 &json!([{
307 "label": "cargo check unstable",
308 "command": "cargo",
309 "args": [
310 "check",
311 "--all",
312 "--all-targets"
313 ],
314 "env": {
315 "RUSTFLAGS": "-Zunstable-options"
316 }
317 }])
318 .to_string(),
319 ),
320 )
321 .unwrap();
322 });
323 });
324 cx.run_until_parked();
325
326 let all_tasks = cx
327 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
328 .into_iter()
329 .map(|(source_kind, task)| {
330 let resolved = task.resolved.unwrap();
331 (
332 source_kind,
333 task.resolved_label,
334 resolved.args,
335 resolved.env,
336 )
337 })
338 .collect::<Vec<_>>();
339 assert_eq!(
340 all_tasks,
341 vec![
342 (
343 topmost_local_task_source_kind.clone(),
344 "cargo check all".to_string(),
345 vec!["check".to_string(), "--all".to_string()],
346 HashMap::default(),
347 ),
348 (
349 TaskSourceKind::Worktree {
350 id: worktree_id,
351 directory_in_worktree: PathBuf::from("b/.zed"),
352 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
353 },
354 "cargo check".to_string(),
355 vec!["check".to_string()],
356 HashMap::default(),
357 ),
358 (
359 TaskSourceKind::AbsPath {
360 abs_path: paths::tasks_file().clone(),
361 id_base: "global tasks.json".into(),
362 },
363 "cargo check unstable".to_string(),
364 vec![
365 "check".to_string(),
366 "--all".to_string(),
367 "--all-targets".to_string(),
368 ],
369 HashMap::from_iter(Some((
370 "RUSTFLAGS".to_string(),
371 "-Zunstable-options".to_string()
372 ))),
373 ),
374 ]
375 );
376}
377
378#[gpui::test]
379async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
380 init_test(cx);
381
382 let fs = FakeFs::new(cx.executor());
383 fs.insert_tree(
384 "/the-root",
385 json!({
386 "test.rs": "const A: i32 = 1;",
387 "test2.rs": "",
388 "Cargo.toml": "a = 1",
389 "package.json": "{\"a\": 1}",
390 }),
391 )
392 .await;
393
394 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
395 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
396
397 let mut fake_rust_servers = language_registry.register_fake_lsp(
398 "Rust",
399 FakeLspAdapter {
400 name: "the-rust-language-server",
401 capabilities: lsp::ServerCapabilities {
402 completion_provider: Some(lsp::CompletionOptions {
403 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
404 ..Default::default()
405 }),
406 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
407 lsp::TextDocumentSyncOptions {
408 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
409 ..Default::default()
410 },
411 )),
412 ..Default::default()
413 },
414 ..Default::default()
415 },
416 );
417 let mut fake_json_servers = language_registry.register_fake_lsp(
418 "JSON",
419 FakeLspAdapter {
420 name: "the-json-language-server",
421 capabilities: lsp::ServerCapabilities {
422 completion_provider: Some(lsp::CompletionOptions {
423 trigger_characters: Some(vec![":".to_string()]),
424 ..Default::default()
425 }),
426 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
427 lsp::TextDocumentSyncOptions {
428 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
429 ..Default::default()
430 },
431 )),
432 ..Default::default()
433 },
434 ..Default::default()
435 },
436 );
437
438 // Open a buffer without an associated language server.
439 let toml_buffer = project
440 .update(cx, |project, cx| {
441 project.open_local_buffer("/the-root/Cargo.toml", cx)
442 })
443 .await
444 .unwrap();
445
446 // Open a buffer with an associated language server before the language for it has been loaded.
447 let rust_buffer = project
448 .update(cx, |project, cx| {
449 project.open_local_buffer("/the-root/test.rs", cx)
450 })
451 .await
452 .unwrap();
453 rust_buffer.update(cx, |buffer, _| {
454 assert_eq!(buffer.language().map(|l| l.name()), None);
455 });
456
457 // Now we add the languages to the project, and ensure they get assigned to all
458 // the relevant open buffers.
459 language_registry.add(json_lang());
460 language_registry.add(rust_lang());
461 cx.executor().run_until_parked();
462 rust_buffer.update(cx, |buffer, _| {
463 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
464 });
465
466 // A server is started up, and it is notified about Rust files.
467 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
468 assert_eq!(
469 fake_rust_server
470 .receive_notification::<lsp::notification::DidOpenTextDocument>()
471 .await
472 .text_document,
473 lsp::TextDocumentItem {
474 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
475 version: 0,
476 text: "const A: i32 = 1;".to_string(),
477 language_id: "rust".to_string(),
478 }
479 );
480
481 // The buffer is configured based on the language server's capabilities.
482 rust_buffer.update(cx, |buffer, _| {
483 assert_eq!(
484 buffer
485 .completion_triggers()
486 .into_iter()
487 .cloned()
488 .collect::<Vec<_>>(),
489 &[".".to_string(), "::".to_string()]
490 );
491 });
492 toml_buffer.update(cx, |buffer, _| {
493 assert!(buffer.completion_triggers().is_empty());
494 });
495
496 // Edit a buffer. The changes are reported to the language server.
497 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
498 assert_eq!(
499 fake_rust_server
500 .receive_notification::<lsp::notification::DidChangeTextDocument>()
501 .await
502 .text_document,
503 lsp::VersionedTextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
505 1
506 )
507 );
508
509 // Open a third buffer with a different associated language server.
510 let json_buffer = project
511 .update(cx, |project, cx| {
512 project.open_local_buffer("/the-root/package.json", cx)
513 })
514 .await
515 .unwrap();
516
517 // A json language server is started up and is only notified about the json buffer.
518 let mut fake_json_server = fake_json_servers.next().await.unwrap();
519 assert_eq!(
520 fake_json_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 lsp::TextDocumentItem {
525 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
526 version: 0,
527 text: "{\"a\": 1}".to_string(),
528 language_id: "json".to_string(),
529 }
530 );
531
532 // This buffer is configured based on the second language server's
533 // capabilities.
534 json_buffer.update(cx, |buffer, _| {
535 assert_eq!(
536 buffer
537 .completion_triggers()
538 .into_iter()
539 .cloned()
540 .collect::<Vec<_>>(),
541 &[":".to_string()]
542 );
543 });
544
545 // When opening another buffer whose language server is already running,
546 // it is also configured based on the existing language server's capabilities.
547 let rust_buffer2 = project
548 .update(cx, |project, cx| {
549 project.open_local_buffer("/the-root/test2.rs", cx)
550 })
551 .await
552 .unwrap();
553 rust_buffer2.update(cx, |buffer, _| {
554 assert_eq!(
555 buffer
556 .completion_triggers()
557 .into_iter()
558 .cloned()
559 .collect::<Vec<_>>(),
560 &[".".to_string(), "::".to_string()]
561 );
562 });
563
564 // Changes are reported only to servers matching the buffer's language.
565 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
566 rust_buffer2.update(cx, |buffer, cx| {
567 buffer.edit([(0..0, "let x = 1;")], None, cx)
568 });
569 assert_eq!(
570 fake_rust_server
571 .receive_notification::<lsp::notification::DidChangeTextDocument>()
572 .await
573 .text_document,
574 lsp::VersionedTextDocumentIdentifier::new(
575 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
576 1
577 )
578 );
579
580 // Save notifications are reported to all servers.
581 project
582 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
583 .await
584 .unwrap();
585 assert_eq!(
586 fake_rust_server
587 .receive_notification::<lsp::notification::DidSaveTextDocument>()
588 .await
589 .text_document,
590 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
591 );
592 assert_eq!(
593 fake_json_server
594 .receive_notification::<lsp::notification::DidSaveTextDocument>()
595 .await
596 .text_document,
597 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
598 );
599
600 // Renames are reported only to servers matching the buffer's language.
601 fs.rename(
602 Path::new("/the-root/test2.rs"),
603 Path::new("/the-root/test3.rs"),
604 Default::default(),
605 )
606 .await
607 .unwrap();
608 assert_eq!(
609 fake_rust_server
610 .receive_notification::<lsp::notification::DidCloseTextDocument>()
611 .await
612 .text_document,
613 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
614 );
615 assert_eq!(
616 fake_rust_server
617 .receive_notification::<lsp::notification::DidOpenTextDocument>()
618 .await
619 .text_document,
620 lsp::TextDocumentItem {
621 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
622 version: 0,
623 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
624 language_id: "rust".to_string(),
625 },
626 );
627
628 rust_buffer2.update(cx, |buffer, cx| {
629 buffer.update_diagnostics(
630 LanguageServerId(0),
631 DiagnosticSet::from_sorted_entries(
632 vec![DiagnosticEntry {
633 diagnostic: Default::default(),
634 range: Anchor::MIN..Anchor::MAX,
635 }],
636 &buffer.snapshot(),
637 ),
638 cx,
639 );
640 assert_eq!(
641 buffer
642 .snapshot()
643 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
644 .count(),
645 1
646 );
647 });
648
649 // When the rename changes the extension of the file, the buffer gets closed on the old
650 // language server and gets opened on the new one.
651 fs.rename(
652 Path::new("/the-root/test3.rs"),
653 Path::new("/the-root/test3.json"),
654 Default::default(),
655 )
656 .await
657 .unwrap();
658 assert_eq!(
659 fake_rust_server
660 .receive_notification::<lsp::notification::DidCloseTextDocument>()
661 .await
662 .text_document,
663 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
664 );
665 assert_eq!(
666 fake_json_server
667 .receive_notification::<lsp::notification::DidOpenTextDocument>()
668 .await
669 .text_document,
670 lsp::TextDocumentItem {
671 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
672 version: 0,
673 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
674 language_id: "json".to_string(),
675 },
676 );
677
678 // We clear the diagnostics, since the language has changed.
679 rust_buffer2.update(cx, |buffer, _| {
680 assert_eq!(
681 buffer
682 .snapshot()
683 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
684 .count(),
685 0
686 );
687 });
688
689 // The renamed file's version resets after changing language server.
690 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
691 assert_eq!(
692 fake_json_server
693 .receive_notification::<lsp::notification::DidChangeTextDocument>()
694 .await
695 .text_document,
696 lsp::VersionedTextDocumentIdentifier::new(
697 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
698 1
699 )
700 );
701
702 // Restart language servers
703 project.update(cx, |project, cx| {
704 project.restart_language_servers_for_buffers(
705 vec![rust_buffer.clone(), json_buffer.clone()],
706 cx,
707 );
708 });
709
710 let mut rust_shutdown_requests = fake_rust_server
711 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
712 let mut json_shutdown_requests = fake_json_server
713 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
714 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
715
716 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
717 let mut fake_json_server = fake_json_servers.next().await.unwrap();
718
719 // Ensure rust document is reopened in new rust language server
720 assert_eq!(
721 fake_rust_server
722 .receive_notification::<lsp::notification::DidOpenTextDocument>()
723 .await
724 .text_document,
725 lsp::TextDocumentItem {
726 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
727 version: 0,
728 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
729 language_id: "rust".to_string(),
730 }
731 );
732
733 // Ensure json documents are reopened in new json language server
734 assert_set_eq!(
735 [
736 fake_json_server
737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
738 .await
739 .text_document,
740 fake_json_server
741 .receive_notification::<lsp::notification::DidOpenTextDocument>()
742 .await
743 .text_document,
744 ],
745 [
746 lsp::TextDocumentItem {
747 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
748 version: 0,
749 text: json_buffer.update(cx, |buffer, _| buffer.text()),
750 language_id: "json".to_string(),
751 },
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
754 version: 0,
755 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 }
758 ]
759 );
760
761 // Close notifications are reported only to servers matching the buffer's language.
762 cx.update(|_| drop(json_buffer));
763 let close_message = lsp::DidCloseTextDocumentParams {
764 text_document: lsp::TextDocumentIdentifier::new(
765 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
766 ),
767 };
768 assert_eq!(
769 fake_json_server
770 .receive_notification::<lsp::notification::DidCloseTextDocument>()
771 .await,
772 close_message,
773 );
774}
775
776#[gpui::test]
777async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
778 init_test(cx);
779
780 let fs = FakeFs::new(cx.executor());
781 fs.insert_tree(
782 "/the-root",
783 json!({
784 ".gitignore": "target\n",
785 "src": {
786 "a.rs": "",
787 "b.rs": "",
788 },
789 "target": {
790 "x": {
791 "out": {
792 "x.rs": ""
793 }
794 },
795 "y": {
796 "out": {
797 "y.rs": "",
798 }
799 },
800 "z": {
801 "out": {
802 "z.rs": ""
803 }
804 }
805 }
806 }),
807 )
808 .await;
809
810 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
811 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
812 language_registry.add(rust_lang());
813 let mut fake_servers = language_registry.register_fake_lsp(
814 "Rust",
815 FakeLspAdapter {
816 name: "the-language-server",
817 ..Default::default()
818 },
819 );
820
821 cx.executor().run_until_parked();
822
823 // Start the language server by opening a buffer with a compatible file extension.
824 let _buffer = project
825 .update(cx, |project, cx| {
826 project.open_local_buffer("/the-root/src/a.rs", cx)
827 })
828 .await
829 .unwrap();
830
831 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
832 project.update(cx, |project, cx| {
833 let worktree = project.worktrees(cx).next().unwrap();
834 assert_eq!(
835 worktree
836 .read(cx)
837 .snapshot()
838 .entries(true, 0)
839 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
840 .collect::<Vec<_>>(),
841 &[
842 (Path::new(""), false),
843 (Path::new(".gitignore"), false),
844 (Path::new("src"), false),
845 (Path::new("src/a.rs"), false),
846 (Path::new("src/b.rs"), false),
847 (Path::new("target"), true),
848 ]
849 );
850 });
851
852 let prev_read_dir_count = fs.read_dir_call_count();
853
854 // Keep track of the FS events reported to the language server.
855 let fake_server = fake_servers.next().await.unwrap();
856 let file_changes = Arc::new(Mutex::new(Vec::new()));
857 fake_server
858 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
859 registrations: vec![lsp::Registration {
860 id: Default::default(),
861 method: "workspace/didChangeWatchedFiles".to_string(),
862 register_options: serde_json::to_value(
863 lsp::DidChangeWatchedFilesRegistrationOptions {
864 watchers: vec![
865 lsp::FileSystemWatcher {
866 glob_pattern: lsp::GlobPattern::String(
867 "/the-root/Cargo.toml".to_string(),
868 ),
869 kind: None,
870 },
871 lsp::FileSystemWatcher {
872 glob_pattern: lsp::GlobPattern::String(
873 "/the-root/src/*.{rs,c}".to_string(),
874 ),
875 kind: None,
876 },
877 lsp::FileSystemWatcher {
878 glob_pattern: lsp::GlobPattern::String(
879 "/the-root/target/y/**/*.rs".to_string(),
880 ),
881 kind: None,
882 },
883 ],
884 },
885 )
886 .ok(),
887 }],
888 })
889 .await
890 .unwrap();
891 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
892 let file_changes = file_changes.clone();
893 move |params, _| {
894 let mut file_changes = file_changes.lock();
895 file_changes.extend(params.changes);
896 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
897 }
898 });
899
900 cx.executor().run_until_parked();
901 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
902 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
903
904 // Now the language server has asked us to watch an ignored directory path,
905 // so we recursively load it.
906 project.update(cx, |project, cx| {
907 let worktree = project.worktrees(cx).next().unwrap();
908 assert_eq!(
909 worktree
910 .read(cx)
911 .snapshot()
912 .entries(true, 0)
913 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
914 .collect::<Vec<_>>(),
915 &[
916 (Path::new(""), false),
917 (Path::new(".gitignore"), false),
918 (Path::new("src"), false),
919 (Path::new("src/a.rs"), false),
920 (Path::new("src/b.rs"), false),
921 (Path::new("target"), true),
922 (Path::new("target/x"), true),
923 (Path::new("target/y"), true),
924 (Path::new("target/y/out"), true),
925 (Path::new("target/y/out/y.rs"), true),
926 (Path::new("target/z"), true),
927 ]
928 );
929 });
930
931 // Perform some file system mutations, two of which match the watched patterns,
932 // and one of which does not.
933 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
934 .await
935 .unwrap();
936 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
937 .await
938 .unwrap();
939 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
940 .await
941 .unwrap();
942 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
943 .await
944 .unwrap();
945 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
946 .await
947 .unwrap();
948
949 // The language server receives events for the FS mutations that match its watch patterns.
950 cx.executor().run_until_parked();
951 assert_eq!(
952 &*file_changes.lock(),
953 &[
954 lsp::FileEvent {
955 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
956 typ: lsp::FileChangeType::DELETED,
957 },
958 lsp::FileEvent {
959 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
960 typ: lsp::FileChangeType::CREATED,
961 },
962 lsp::FileEvent {
963 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
964 typ: lsp::FileChangeType::CREATED,
965 },
966 ]
967 );
968}
969
970#[gpui::test]
971async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
972 init_test(cx);
973
974 let fs = FakeFs::new(cx.executor());
975 fs.insert_tree(
976 "/dir",
977 json!({
978 "a.rs": "let a = 1;",
979 "b.rs": "let b = 2;"
980 }),
981 )
982 .await;
983
984 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
985
986 let buffer_a = project
987 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
988 .await
989 .unwrap();
990 let buffer_b = project
991 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
992 .await
993 .unwrap();
994
995 project.update(cx, |project, cx| {
996 project
997 .update_diagnostics(
998 LanguageServerId(0),
999 lsp::PublishDiagnosticsParams {
1000 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1001 version: None,
1002 diagnostics: vec![lsp::Diagnostic {
1003 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1004 severity: Some(lsp::DiagnosticSeverity::ERROR),
1005 message: "error 1".to_string(),
1006 ..Default::default()
1007 }],
1008 },
1009 &[],
1010 cx,
1011 )
1012 .unwrap();
1013 project
1014 .update_diagnostics(
1015 LanguageServerId(0),
1016 lsp::PublishDiagnosticsParams {
1017 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1018 version: None,
1019 diagnostics: vec![lsp::Diagnostic {
1020 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1021 severity: Some(DiagnosticSeverity::WARNING),
1022 message: "error 2".to_string(),
1023 ..Default::default()
1024 }],
1025 },
1026 &[],
1027 cx,
1028 )
1029 .unwrap();
1030 });
1031
1032 buffer_a.update(cx, |buffer, _| {
1033 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1034 assert_eq!(
1035 chunks
1036 .iter()
1037 .map(|(s, d)| (s.as_str(), *d))
1038 .collect::<Vec<_>>(),
1039 &[
1040 ("let ", None),
1041 ("a", Some(DiagnosticSeverity::ERROR)),
1042 (" = 1;", None),
1043 ]
1044 );
1045 });
1046 buffer_b.update(cx, |buffer, _| {
1047 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1048 assert_eq!(
1049 chunks
1050 .iter()
1051 .map(|(s, d)| (s.as_str(), *d))
1052 .collect::<Vec<_>>(),
1053 &[
1054 ("let ", None),
1055 ("b", Some(DiagnosticSeverity::WARNING)),
1056 (" = 2;", None),
1057 ]
1058 );
1059 });
1060}
1061
1062#[gpui::test]
1063async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1064 init_test(cx);
1065
1066 let fs = FakeFs::new(cx.executor());
1067 fs.insert_tree(
1068 "/root",
1069 json!({
1070 "dir": {
1071 ".git": {
1072 "HEAD": "ref: refs/heads/main",
1073 },
1074 ".gitignore": "b.rs",
1075 "a.rs": "let a = 1;",
1076 "b.rs": "let b = 2;",
1077 },
1078 "other.rs": "let b = c;"
1079 }),
1080 )
1081 .await;
1082
1083 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1084 let (worktree, _) = project
1085 .update(cx, |project, cx| {
1086 project.find_or_create_worktree("/root/dir", true, cx)
1087 })
1088 .await
1089 .unwrap();
1090 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1091
1092 let (worktree, _) = project
1093 .update(cx, |project, cx| {
1094 project.find_or_create_worktree("/root/other.rs", false, cx)
1095 })
1096 .await
1097 .unwrap();
1098 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1099
1100 let server_id = LanguageServerId(0);
1101 project.update(cx, |project, cx| {
1102 project
1103 .update_diagnostics(
1104 server_id,
1105 lsp::PublishDiagnosticsParams {
1106 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1107 version: None,
1108 diagnostics: vec![lsp::Diagnostic {
1109 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1110 severity: Some(lsp::DiagnosticSeverity::ERROR),
1111 message: "unused variable 'b'".to_string(),
1112 ..Default::default()
1113 }],
1114 },
1115 &[],
1116 cx,
1117 )
1118 .unwrap();
1119 project
1120 .update_diagnostics(
1121 server_id,
1122 lsp::PublishDiagnosticsParams {
1123 uri: Url::from_file_path("/root/other.rs").unwrap(),
1124 version: None,
1125 diagnostics: vec![lsp::Diagnostic {
1126 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1127 severity: Some(lsp::DiagnosticSeverity::ERROR),
1128 message: "unknown variable 'c'".to_string(),
1129 ..Default::default()
1130 }],
1131 },
1132 &[],
1133 cx,
1134 )
1135 .unwrap();
1136 });
1137
1138 let main_ignored_buffer = project
1139 .update(cx, |project, cx| {
1140 project.open_buffer((main_worktree_id, "b.rs"), cx)
1141 })
1142 .await
1143 .unwrap();
1144 main_ignored_buffer.update(cx, |buffer, _| {
1145 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1146 assert_eq!(
1147 chunks
1148 .iter()
1149 .map(|(s, d)| (s.as_str(), *d))
1150 .collect::<Vec<_>>(),
1151 &[
1152 ("let ", None),
1153 ("b", Some(DiagnosticSeverity::ERROR)),
1154 (" = 2;", None),
1155 ],
1156 "Gigitnored buffers should still get in-buffer diagnostics",
1157 );
1158 });
1159 let other_buffer = project
1160 .update(cx, |project, cx| {
1161 project.open_buffer((other_worktree_id, ""), cx)
1162 })
1163 .await
1164 .unwrap();
1165 other_buffer.update(cx, |buffer, _| {
1166 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1167 assert_eq!(
1168 chunks
1169 .iter()
1170 .map(|(s, d)| (s.as_str(), *d))
1171 .collect::<Vec<_>>(),
1172 &[
1173 ("let b = ", None),
1174 ("c", Some(DiagnosticSeverity::ERROR)),
1175 (";", None),
1176 ],
1177 "Buffers from hidden projects should still get in-buffer diagnostics"
1178 );
1179 });
1180
1181 project.update(cx, |project, cx| {
1182 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1183 assert_eq!(
1184 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1185 vec![(
1186 ProjectPath {
1187 worktree_id: main_worktree_id,
1188 path: Arc::from(Path::new("b.rs")),
1189 },
1190 server_id,
1191 DiagnosticSummary {
1192 error_count: 1,
1193 warning_count: 0,
1194 }
1195 )]
1196 );
1197 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1198 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1199 });
1200}
1201
1202#[gpui::test]
1203async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1204 init_test(cx);
1205
1206 let progress_token = "the-progress-token";
1207
1208 let fs = FakeFs::new(cx.executor());
1209 fs.insert_tree(
1210 "/dir",
1211 json!({
1212 "a.rs": "fn a() { A }",
1213 "b.rs": "const y: i32 = 1",
1214 }),
1215 )
1216 .await;
1217
1218 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1219 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1220
1221 language_registry.add(rust_lang());
1222 let mut fake_servers = language_registry.register_fake_lsp(
1223 "Rust",
1224 FakeLspAdapter {
1225 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1226 disk_based_diagnostics_sources: vec!["disk".into()],
1227 ..Default::default()
1228 },
1229 );
1230
1231 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1232
1233 // Cause worktree to start the fake language server
1234 let _buffer = project
1235 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1236 .await
1237 .unwrap();
1238
1239 let mut events = cx.events(&project);
1240
1241 let fake_server = fake_servers.next().await.unwrap();
1242 assert_eq!(
1243 events.next().await.unwrap(),
1244 Event::LanguageServerAdded(
1245 LanguageServerId(0),
1246 fake_server.server.name(),
1247 Some(worktree_id)
1248 ),
1249 );
1250
1251 fake_server
1252 .start_progress(format!("{}/0", progress_token))
1253 .await;
1254 assert_eq!(
1255 events.next().await.unwrap(),
1256 Event::DiskBasedDiagnosticsStarted {
1257 language_server_id: LanguageServerId(0),
1258 }
1259 );
1260
1261 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1262 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1263 version: None,
1264 diagnostics: vec![lsp::Diagnostic {
1265 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1266 severity: Some(lsp::DiagnosticSeverity::ERROR),
1267 message: "undefined variable 'A'".to_string(),
1268 ..Default::default()
1269 }],
1270 });
1271 assert_eq!(
1272 events.next().await.unwrap(),
1273 Event::DiagnosticsUpdated {
1274 language_server_id: LanguageServerId(0),
1275 path: (worktree_id, Path::new("a.rs")).into()
1276 }
1277 );
1278
1279 fake_server.end_progress(format!("{}/0", progress_token));
1280 assert_eq!(
1281 events.next().await.unwrap(),
1282 Event::DiskBasedDiagnosticsFinished {
1283 language_server_id: LanguageServerId(0)
1284 }
1285 );
1286
1287 let buffer = project
1288 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1289 .await
1290 .unwrap();
1291
1292 buffer.update(cx, |buffer, _| {
1293 let snapshot = buffer.snapshot();
1294 let diagnostics = snapshot
1295 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1296 .collect::<Vec<_>>();
1297 assert_eq!(
1298 diagnostics,
1299 &[DiagnosticEntry {
1300 range: Point::new(0, 9)..Point::new(0, 10),
1301 diagnostic: Diagnostic {
1302 severity: lsp::DiagnosticSeverity::ERROR,
1303 message: "undefined variable 'A'".to_string(),
1304 group_id: 0,
1305 is_primary: true,
1306 ..Default::default()
1307 }
1308 }]
1309 )
1310 });
1311
1312 // Ensure publishing empty diagnostics twice only results in one update event.
1313 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1314 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1315 version: None,
1316 diagnostics: Default::default(),
1317 });
1318 assert_eq!(
1319 events.next().await.unwrap(),
1320 Event::DiagnosticsUpdated {
1321 language_server_id: LanguageServerId(0),
1322 path: (worktree_id, Path::new("a.rs")).into()
1323 }
1324 );
1325
1326 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1327 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1328 version: None,
1329 diagnostics: Default::default(),
1330 });
1331 cx.executor().run_until_parked();
1332 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1333}
1334
1335#[gpui::test]
1336async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1337 init_test(cx);
1338
1339 let progress_token = "the-progress-token";
1340
1341 let fs = FakeFs::new(cx.executor());
1342 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1343
1344 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1345
1346 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1347 language_registry.add(rust_lang());
1348 let mut fake_servers = language_registry.register_fake_lsp(
1349 "Rust",
1350 FakeLspAdapter {
1351 name: "the-language-server",
1352 disk_based_diagnostics_sources: vec!["disk".into()],
1353 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1354 ..Default::default()
1355 },
1356 );
1357
1358 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1359
1360 let buffer = project
1361 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1362 .await
1363 .unwrap();
1364
1365 // Simulate diagnostics starting to update.
1366 let fake_server = fake_servers.next().await.unwrap();
1367 fake_server.start_progress(progress_token).await;
1368
1369 // Restart the server before the diagnostics finish updating.
1370 project.update(cx, |project, cx| {
1371 project.restart_language_servers_for_buffers([buffer], cx);
1372 });
1373 let mut events = cx.events(&project);
1374
1375 // Simulate the newly started server sending more diagnostics.
1376 let fake_server = fake_servers.next().await.unwrap();
1377 assert_eq!(
1378 events.next().await.unwrap(),
1379 Event::LanguageServerAdded(
1380 LanguageServerId(1),
1381 fake_server.server.name(),
1382 Some(worktree_id)
1383 )
1384 );
1385 fake_server.start_progress(progress_token).await;
1386 assert_eq!(
1387 events.next().await.unwrap(),
1388 Event::DiskBasedDiagnosticsStarted {
1389 language_server_id: LanguageServerId(1)
1390 }
1391 );
1392 project.update(cx, |project, cx| {
1393 assert_eq!(
1394 project
1395 .language_servers_running_disk_based_diagnostics(cx)
1396 .collect::<Vec<_>>(),
1397 [LanguageServerId(1)]
1398 );
1399 });
1400
1401 // All diagnostics are considered done, despite the old server's diagnostic
1402 // task never completing.
1403 fake_server.end_progress(progress_token);
1404 assert_eq!(
1405 events.next().await.unwrap(),
1406 Event::DiskBasedDiagnosticsFinished {
1407 language_server_id: LanguageServerId(1)
1408 }
1409 );
1410 project.update(cx, |project, cx| {
1411 assert_eq!(
1412 project
1413 .language_servers_running_disk_based_diagnostics(cx)
1414 .collect::<Vec<_>>(),
1415 [] as [language::LanguageServerId; 0]
1416 );
1417 });
1418}
1419
1420#[gpui::test]
1421async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1422 init_test(cx);
1423
1424 let fs = FakeFs::new(cx.executor());
1425 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1426
1427 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1428
1429 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1430 language_registry.add(rust_lang());
1431 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1432
1433 let buffer = project
1434 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1435 .await
1436 .unwrap();
1437
1438 // Publish diagnostics
1439 let fake_server = fake_servers.next().await.unwrap();
1440 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1441 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1442 version: None,
1443 diagnostics: vec![lsp::Diagnostic {
1444 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1445 severity: Some(lsp::DiagnosticSeverity::ERROR),
1446 message: "the message".to_string(),
1447 ..Default::default()
1448 }],
1449 });
1450
1451 cx.executor().run_until_parked();
1452 buffer.update(cx, |buffer, _| {
1453 assert_eq!(
1454 buffer
1455 .snapshot()
1456 .diagnostics_in_range::<_, usize>(0..1, false)
1457 .map(|entry| entry.diagnostic.message.clone())
1458 .collect::<Vec<_>>(),
1459 ["the message".to_string()]
1460 );
1461 });
1462 project.update(cx, |project, cx| {
1463 assert_eq!(
1464 project.diagnostic_summary(false, cx),
1465 DiagnosticSummary {
1466 error_count: 1,
1467 warning_count: 0,
1468 }
1469 );
1470 });
1471
1472 project.update(cx, |project, cx| {
1473 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1474 });
1475
1476 // The diagnostics are cleared.
1477 cx.executor().run_until_parked();
1478 buffer.update(cx, |buffer, _| {
1479 assert_eq!(
1480 buffer
1481 .snapshot()
1482 .diagnostics_in_range::<_, usize>(0..1, false)
1483 .map(|entry| entry.diagnostic.message.clone())
1484 .collect::<Vec<_>>(),
1485 Vec::<String>::new(),
1486 );
1487 });
1488 project.update(cx, |project, cx| {
1489 assert_eq!(
1490 project.diagnostic_summary(false, cx),
1491 DiagnosticSummary {
1492 error_count: 0,
1493 warning_count: 0,
1494 }
1495 );
1496 });
1497}
1498
1499#[gpui::test]
1500async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1501 init_test(cx);
1502
1503 let fs = FakeFs::new(cx.executor());
1504 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1505
1506 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1508
1509 language_registry.add(rust_lang());
1510 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1511
1512 let buffer = project
1513 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1514 .await
1515 .unwrap();
1516
1517 // Before restarting the server, report diagnostics with an unknown buffer version.
1518 let fake_server = fake_servers.next().await.unwrap();
1519 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1520 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1521 version: Some(10000),
1522 diagnostics: Vec::new(),
1523 });
1524 cx.executor().run_until_parked();
1525
1526 project.update(cx, |project, cx| {
1527 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1528 });
1529 let mut fake_server = fake_servers.next().await.unwrap();
1530 let notification = fake_server
1531 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1532 .await
1533 .text_document;
1534 assert_eq!(notification.version, 0);
1535}
1536
1537#[gpui::test]
1538async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1539 init_test(cx);
1540
1541 let progress_token = "the-progress-token";
1542
1543 let fs = FakeFs::new(cx.executor());
1544 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1545
1546 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1547
1548 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1549 language_registry.add(rust_lang());
1550 let mut fake_servers = language_registry.register_fake_lsp(
1551 "Rust",
1552 FakeLspAdapter {
1553 name: "the-language-server",
1554 disk_based_diagnostics_sources: vec!["disk".into()],
1555 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1556 ..Default::default()
1557 },
1558 );
1559
1560 let buffer = project
1561 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1562 .await
1563 .unwrap();
1564
1565 // Simulate diagnostics starting to update.
1566 let mut fake_server = fake_servers.next().await.unwrap();
1567 fake_server
1568 .start_progress_with(
1569 "another-token",
1570 lsp::WorkDoneProgressBegin {
1571 cancellable: Some(false),
1572 ..Default::default()
1573 },
1574 )
1575 .await;
1576 fake_server
1577 .start_progress_with(
1578 progress_token,
1579 lsp::WorkDoneProgressBegin {
1580 cancellable: Some(true),
1581 ..Default::default()
1582 },
1583 )
1584 .await;
1585 cx.executor().run_until_parked();
1586
1587 project.update(cx, |project, cx| {
1588 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1589 });
1590
1591 let cancel_notification = fake_server
1592 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1593 .await;
1594 assert_eq!(
1595 cancel_notification.token,
1596 NumberOrString::String(progress_token.into())
1597 );
1598}
1599
1600#[gpui::test]
1601async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1602 init_test(cx);
1603
1604 let fs = FakeFs::new(cx.executor());
1605 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1606 .await;
1607
1608 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1609 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1610
1611 let mut fake_rust_servers = language_registry.register_fake_lsp(
1612 "Rust",
1613 FakeLspAdapter {
1614 name: "rust-lsp",
1615 ..Default::default()
1616 },
1617 );
1618 let mut fake_js_servers = language_registry.register_fake_lsp(
1619 "JavaScript",
1620 FakeLspAdapter {
1621 name: "js-lsp",
1622 ..Default::default()
1623 },
1624 );
1625 language_registry.add(rust_lang());
1626 language_registry.add(js_lang());
1627
1628 let _rs_buffer = project
1629 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1630 .await
1631 .unwrap();
1632 let _js_buffer = project
1633 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1634 .await
1635 .unwrap();
1636
1637 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1638 assert_eq!(
1639 fake_rust_server_1
1640 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1641 .await
1642 .text_document
1643 .uri
1644 .as_str(),
1645 "file:///dir/a.rs"
1646 );
1647
1648 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1649 assert_eq!(
1650 fake_js_server
1651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1652 .await
1653 .text_document
1654 .uri
1655 .as_str(),
1656 "file:///dir/b.js"
1657 );
1658
1659 // Disable Rust language server, ensuring only that server gets stopped.
1660 cx.update(|cx| {
1661 SettingsStore::update_global(cx, |settings, cx| {
1662 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1663 settings.languages.insert(
1664 "Rust".into(),
1665 LanguageSettingsContent {
1666 enable_language_server: Some(false),
1667 ..Default::default()
1668 },
1669 );
1670 });
1671 })
1672 });
1673 fake_rust_server_1
1674 .receive_notification::<lsp::notification::Exit>()
1675 .await;
1676
1677 // Enable Rust and disable JavaScript language servers, ensuring that the
1678 // former gets started again and that the latter stops.
1679 cx.update(|cx| {
1680 SettingsStore::update_global(cx, |settings, cx| {
1681 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1682 settings.languages.insert(
1683 LanguageName::new("Rust"),
1684 LanguageSettingsContent {
1685 enable_language_server: Some(true),
1686 ..Default::default()
1687 },
1688 );
1689 settings.languages.insert(
1690 LanguageName::new("JavaScript"),
1691 LanguageSettingsContent {
1692 enable_language_server: Some(false),
1693 ..Default::default()
1694 },
1695 );
1696 });
1697 })
1698 });
1699 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1700 assert_eq!(
1701 fake_rust_server_2
1702 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1703 .await
1704 .text_document
1705 .uri
1706 .as_str(),
1707 "file:///dir/a.rs"
1708 );
1709 fake_js_server
1710 .receive_notification::<lsp::notification::Exit>()
1711 .await;
1712}
1713
1714#[gpui::test(iterations = 3)]
1715async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1716 init_test(cx);
1717
1718 let text = "
1719 fn a() { A }
1720 fn b() { BB }
1721 fn c() { CCC }
1722 "
1723 .unindent();
1724
1725 let fs = FakeFs::new(cx.executor());
1726 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1727
1728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1729 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1730
1731 language_registry.add(rust_lang());
1732 let mut fake_servers = language_registry.register_fake_lsp(
1733 "Rust",
1734 FakeLspAdapter {
1735 disk_based_diagnostics_sources: vec!["disk".into()],
1736 ..Default::default()
1737 },
1738 );
1739
1740 let buffer = project
1741 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1742 .await
1743 .unwrap();
1744
1745 let mut fake_server = fake_servers.next().await.unwrap();
1746 let open_notification = fake_server
1747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1748 .await;
1749
1750 // Edit the buffer, moving the content down
1751 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1752 let change_notification_1 = fake_server
1753 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1754 .await;
1755 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1756
1757 // Report some diagnostics for the initial version of the buffer
1758 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1759 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1760 version: Some(open_notification.text_document.version),
1761 diagnostics: vec![
1762 lsp::Diagnostic {
1763 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1764 severity: Some(DiagnosticSeverity::ERROR),
1765 message: "undefined variable 'A'".to_string(),
1766 source: Some("disk".to_string()),
1767 ..Default::default()
1768 },
1769 lsp::Diagnostic {
1770 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1771 severity: Some(DiagnosticSeverity::ERROR),
1772 message: "undefined variable 'BB'".to_string(),
1773 source: Some("disk".to_string()),
1774 ..Default::default()
1775 },
1776 lsp::Diagnostic {
1777 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1778 severity: Some(DiagnosticSeverity::ERROR),
1779 source: Some("disk".to_string()),
1780 message: "undefined variable 'CCC'".to_string(),
1781 ..Default::default()
1782 },
1783 ],
1784 });
1785
1786 // The diagnostics have moved down since they were created.
1787 cx.executor().run_until_parked();
1788 buffer.update(cx, |buffer, _| {
1789 assert_eq!(
1790 buffer
1791 .snapshot()
1792 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1793 .collect::<Vec<_>>(),
1794 &[
1795 DiagnosticEntry {
1796 range: Point::new(3, 9)..Point::new(3, 11),
1797 diagnostic: Diagnostic {
1798 source: Some("disk".into()),
1799 severity: DiagnosticSeverity::ERROR,
1800 message: "undefined variable 'BB'".to_string(),
1801 is_disk_based: true,
1802 group_id: 1,
1803 is_primary: true,
1804 ..Default::default()
1805 },
1806 },
1807 DiagnosticEntry {
1808 range: Point::new(4, 9)..Point::new(4, 12),
1809 diagnostic: Diagnostic {
1810 source: Some("disk".into()),
1811 severity: DiagnosticSeverity::ERROR,
1812 message: "undefined variable 'CCC'".to_string(),
1813 is_disk_based: true,
1814 group_id: 2,
1815 is_primary: true,
1816 ..Default::default()
1817 }
1818 }
1819 ]
1820 );
1821 assert_eq!(
1822 chunks_with_diagnostics(buffer, 0..buffer.len()),
1823 [
1824 ("\n\nfn a() { ".to_string(), None),
1825 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1826 (" }\nfn b() { ".to_string(), None),
1827 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1828 (" }\nfn c() { ".to_string(), None),
1829 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1830 (" }\n".to_string(), None),
1831 ]
1832 );
1833 assert_eq!(
1834 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1835 [
1836 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1837 (" }\nfn c() { ".to_string(), None),
1838 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1839 ]
1840 );
1841 });
1842
1843 // Ensure overlapping diagnostics are highlighted correctly.
1844 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1845 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1846 version: Some(open_notification.text_document.version),
1847 diagnostics: vec![
1848 lsp::Diagnostic {
1849 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1850 severity: Some(DiagnosticSeverity::ERROR),
1851 message: "undefined variable 'A'".to_string(),
1852 source: Some("disk".to_string()),
1853 ..Default::default()
1854 },
1855 lsp::Diagnostic {
1856 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1857 severity: Some(DiagnosticSeverity::WARNING),
1858 message: "unreachable statement".to_string(),
1859 source: Some("disk".to_string()),
1860 ..Default::default()
1861 },
1862 ],
1863 });
1864
1865 cx.executor().run_until_parked();
1866 buffer.update(cx, |buffer, _| {
1867 assert_eq!(
1868 buffer
1869 .snapshot()
1870 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1871 .collect::<Vec<_>>(),
1872 &[
1873 DiagnosticEntry {
1874 range: Point::new(2, 9)..Point::new(2, 12),
1875 diagnostic: Diagnostic {
1876 source: Some("disk".into()),
1877 severity: DiagnosticSeverity::WARNING,
1878 message: "unreachable statement".to_string(),
1879 is_disk_based: true,
1880 group_id: 4,
1881 is_primary: true,
1882 ..Default::default()
1883 }
1884 },
1885 DiagnosticEntry {
1886 range: Point::new(2, 9)..Point::new(2, 10),
1887 diagnostic: Diagnostic {
1888 source: Some("disk".into()),
1889 severity: DiagnosticSeverity::ERROR,
1890 message: "undefined variable 'A'".to_string(),
1891 is_disk_based: true,
1892 group_id: 3,
1893 is_primary: true,
1894 ..Default::default()
1895 },
1896 }
1897 ]
1898 );
1899 assert_eq!(
1900 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1901 [
1902 ("fn a() { ".to_string(), None),
1903 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1904 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1905 ("\n".to_string(), None),
1906 ]
1907 );
1908 assert_eq!(
1909 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1910 [
1911 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1912 ("\n".to_string(), None),
1913 ]
1914 );
1915 });
1916
1917 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1918 // changes since the last save.
1919 buffer.update(cx, |buffer, cx| {
1920 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1921 buffer.edit(
1922 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1923 None,
1924 cx,
1925 );
1926 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1927 });
1928 let change_notification_2 = fake_server
1929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1930 .await;
1931 assert!(
1932 change_notification_2.text_document.version > change_notification_1.text_document.version
1933 );
1934
1935 // Handle out-of-order diagnostics
1936 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1937 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1938 version: Some(change_notification_2.text_document.version),
1939 diagnostics: vec![
1940 lsp::Diagnostic {
1941 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1942 severity: Some(DiagnosticSeverity::ERROR),
1943 message: "undefined variable 'BB'".to_string(),
1944 source: Some("disk".to_string()),
1945 ..Default::default()
1946 },
1947 lsp::Diagnostic {
1948 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1949 severity: Some(DiagnosticSeverity::WARNING),
1950 message: "undefined variable 'A'".to_string(),
1951 source: Some("disk".to_string()),
1952 ..Default::default()
1953 },
1954 ],
1955 });
1956
1957 cx.executor().run_until_parked();
1958 buffer.update(cx, |buffer, _| {
1959 assert_eq!(
1960 buffer
1961 .snapshot()
1962 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1963 .collect::<Vec<_>>(),
1964 &[
1965 DiagnosticEntry {
1966 range: Point::new(2, 21)..Point::new(2, 22),
1967 diagnostic: Diagnostic {
1968 source: Some("disk".into()),
1969 severity: DiagnosticSeverity::WARNING,
1970 message: "undefined variable 'A'".to_string(),
1971 is_disk_based: true,
1972 group_id: 6,
1973 is_primary: true,
1974 ..Default::default()
1975 }
1976 },
1977 DiagnosticEntry {
1978 range: Point::new(3, 9)..Point::new(3, 14),
1979 diagnostic: Diagnostic {
1980 source: Some("disk".into()),
1981 severity: DiagnosticSeverity::ERROR,
1982 message: "undefined variable 'BB'".to_string(),
1983 is_disk_based: true,
1984 group_id: 5,
1985 is_primary: true,
1986 ..Default::default()
1987 },
1988 }
1989 ]
1990 );
1991 });
1992}
1993
1994#[gpui::test]
1995async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1996 init_test(cx);
1997
1998 let text = concat!(
1999 "let one = ;\n", //
2000 "let two = \n",
2001 "let three = 3;\n",
2002 );
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2006
2007 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2008 let buffer = project
2009 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2010 .await
2011 .unwrap();
2012
2013 project.update(cx, |project, cx| {
2014 project.lsp_store.update(cx, |lsp_store, cx| {
2015 lsp_store
2016 .update_buffer_diagnostics(
2017 &buffer,
2018 LanguageServerId(0),
2019 None,
2020 vec![
2021 DiagnosticEntry {
2022 range: Unclipped(PointUtf16::new(0, 10))
2023 ..Unclipped(PointUtf16::new(0, 10)),
2024 diagnostic: Diagnostic {
2025 severity: DiagnosticSeverity::ERROR,
2026 message: "syntax error 1".to_string(),
2027 ..Default::default()
2028 },
2029 },
2030 DiagnosticEntry {
2031 range: Unclipped(PointUtf16::new(1, 10))
2032 ..Unclipped(PointUtf16::new(1, 10)),
2033 diagnostic: Diagnostic {
2034 severity: DiagnosticSeverity::ERROR,
2035 message: "syntax error 2".to_string(),
2036 ..Default::default()
2037 },
2038 },
2039 ],
2040 cx,
2041 )
2042 .unwrap();
2043 })
2044 });
2045
2046 // An empty range is extended forward to include the following character.
2047 // At the end of a line, an empty range is extended backward to include
2048 // the preceding character.
2049 buffer.update(cx, |buffer, _| {
2050 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2051 assert_eq!(
2052 chunks
2053 .iter()
2054 .map(|(s, d)| (s.as_str(), *d))
2055 .collect::<Vec<_>>(),
2056 &[
2057 ("let one = ", None),
2058 (";", Some(DiagnosticSeverity::ERROR)),
2059 ("\nlet two =", None),
2060 (" ", Some(DiagnosticSeverity::ERROR)),
2061 ("\nlet three = 3;\n", None)
2062 ]
2063 );
2064 });
2065}
2066
2067#[gpui::test]
2068async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2069 init_test(cx);
2070
2071 let fs = FakeFs::new(cx.executor());
2072 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2073 .await;
2074
2075 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2076
2077 project.update(cx, |project, cx| {
2078 project
2079 .update_diagnostic_entries(
2080 LanguageServerId(0),
2081 Path::new("/dir/a.rs").to_owned(),
2082 None,
2083 vec![DiagnosticEntry {
2084 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2085 diagnostic: Diagnostic {
2086 severity: DiagnosticSeverity::ERROR,
2087 is_primary: true,
2088 message: "syntax error a1".to_string(),
2089 ..Default::default()
2090 },
2091 }],
2092 cx,
2093 )
2094 .unwrap();
2095 project
2096 .update_diagnostic_entries(
2097 LanguageServerId(1),
2098 Path::new("/dir/a.rs").to_owned(),
2099 None,
2100 vec![DiagnosticEntry {
2101 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2102 diagnostic: Diagnostic {
2103 severity: DiagnosticSeverity::ERROR,
2104 is_primary: true,
2105 message: "syntax error b1".to_string(),
2106 ..Default::default()
2107 },
2108 }],
2109 cx,
2110 )
2111 .unwrap();
2112
2113 assert_eq!(
2114 project.diagnostic_summary(false, cx),
2115 DiagnosticSummary {
2116 error_count: 2,
2117 warning_count: 0,
2118 }
2119 );
2120 });
2121}
2122
2123#[gpui::test]
2124async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2125 init_test(cx);
2126
2127 let text = "
2128 fn a() {
2129 f1();
2130 }
2131 fn b() {
2132 f2();
2133 }
2134 fn c() {
2135 f3();
2136 }
2137 "
2138 .unindent();
2139
2140 let fs = FakeFs::new(cx.executor());
2141 fs.insert_tree(
2142 "/dir",
2143 json!({
2144 "a.rs": text.clone(),
2145 }),
2146 )
2147 .await;
2148
2149 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2150 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2151
2152 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2153 language_registry.add(rust_lang());
2154 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2155
2156 let buffer = project
2157 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2158 .await
2159 .unwrap();
2160
2161 let mut fake_server = fake_servers.next().await.unwrap();
2162 let lsp_document_version = fake_server
2163 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2164 .await
2165 .text_document
2166 .version;
2167
2168 // Simulate editing the buffer after the language server computes some edits.
2169 buffer.update(cx, |buffer, cx| {
2170 buffer.edit(
2171 [(
2172 Point::new(0, 0)..Point::new(0, 0),
2173 "// above first function\n",
2174 )],
2175 None,
2176 cx,
2177 );
2178 buffer.edit(
2179 [(
2180 Point::new(2, 0)..Point::new(2, 0),
2181 " // inside first function\n",
2182 )],
2183 None,
2184 cx,
2185 );
2186 buffer.edit(
2187 [(
2188 Point::new(6, 4)..Point::new(6, 4),
2189 "// inside second function ",
2190 )],
2191 None,
2192 cx,
2193 );
2194
2195 assert_eq!(
2196 buffer.text(),
2197 "
2198 // above first function
2199 fn a() {
2200 // inside first function
2201 f1();
2202 }
2203 fn b() {
2204 // inside second function f2();
2205 }
2206 fn c() {
2207 f3();
2208 }
2209 "
2210 .unindent()
2211 );
2212 });
2213
2214 let edits = lsp_store
2215 .update(cx, |lsp_store, cx| {
2216 lsp_store.edits_from_lsp(
2217 &buffer,
2218 vec![
2219 // replace body of first function
2220 lsp::TextEdit {
2221 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2222 new_text: "
2223 fn a() {
2224 f10();
2225 }
2226 "
2227 .unindent(),
2228 },
2229 // edit inside second function
2230 lsp::TextEdit {
2231 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2232 new_text: "00".into(),
2233 },
2234 // edit inside third function via two distinct edits
2235 lsp::TextEdit {
2236 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2237 new_text: "4000".into(),
2238 },
2239 lsp::TextEdit {
2240 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2241 new_text: "".into(),
2242 },
2243 ],
2244 LanguageServerId(0),
2245 Some(lsp_document_version),
2246 cx,
2247 )
2248 })
2249 .await
2250 .unwrap();
2251
2252 buffer.update(cx, |buffer, cx| {
2253 for (range, new_text) in edits {
2254 buffer.edit([(range, new_text)], None, cx);
2255 }
2256 assert_eq!(
2257 buffer.text(),
2258 "
2259 // above first function
2260 fn a() {
2261 // inside first function
2262 f10();
2263 }
2264 fn b() {
2265 // inside second function f200();
2266 }
2267 fn c() {
2268 f4000();
2269 }
2270 "
2271 .unindent()
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = "
2281 use a::b;
2282 use a::c;
2283
2284 fn f() {
2285 b();
2286 c();
2287 }
2288 "
2289 .unindent();
2290
2291 let fs = FakeFs::new(cx.executor());
2292 fs.insert_tree(
2293 "/dir",
2294 json!({
2295 "a.rs": text.clone(),
2296 }),
2297 )
2298 .await;
2299
2300 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2301 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2302 let buffer = project
2303 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2304 .await
2305 .unwrap();
2306
2307 // Simulate the language server sending us a small edit in the form of a very large diff.
2308 // Rust-analyzer does this when performing a merge-imports code action.
2309 let edits = lsp_store
2310 .update(cx, |lsp_store, cx| {
2311 lsp_store.edits_from_lsp(
2312 &buffer,
2313 [
2314 // Replace the first use statement without editing the semicolon.
2315 lsp::TextEdit {
2316 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2317 new_text: "a::{b, c}".into(),
2318 },
2319 // Reinsert the remainder of the file between the semicolon and the final
2320 // newline of the file.
2321 lsp::TextEdit {
2322 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2323 new_text: "\n\n".into(),
2324 },
2325 lsp::TextEdit {
2326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2327 new_text: "
2328 fn f() {
2329 b();
2330 c();
2331 }"
2332 .unindent(),
2333 },
2334 // Delete everything after the first newline of the file.
2335 lsp::TextEdit {
2336 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2337 new_text: "".into(),
2338 },
2339 ],
2340 LanguageServerId(0),
2341 None,
2342 cx,
2343 )
2344 })
2345 .await
2346 .unwrap();
2347
2348 buffer.update(cx, |buffer, cx| {
2349 let edits = edits
2350 .into_iter()
2351 .map(|(range, text)| {
2352 (
2353 range.start.to_point(buffer)..range.end.to_point(buffer),
2354 text,
2355 )
2356 })
2357 .collect::<Vec<_>>();
2358
2359 assert_eq!(
2360 edits,
2361 [
2362 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2363 (Point::new(1, 0)..Point::new(2, 0), "".into())
2364 ]
2365 );
2366
2367 for (range, new_text) in edits {
2368 buffer.edit([(range, new_text)], None, cx);
2369 }
2370 assert_eq!(
2371 buffer.text(),
2372 "
2373 use a::{b, c};
2374
2375 fn f() {
2376 b();
2377 c();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383}
2384
2385#[gpui::test]
2386async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2387 init_test(cx);
2388
2389 let text = "
2390 use a::b;
2391 use a::c;
2392
2393 fn f() {
2394 b();
2395 c();
2396 }
2397 "
2398 .unindent();
2399
2400 let fs = FakeFs::new(cx.executor());
2401 fs.insert_tree(
2402 "/dir",
2403 json!({
2404 "a.rs": text.clone(),
2405 }),
2406 )
2407 .await;
2408
2409 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2410 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2411 let buffer = project
2412 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2413 .await
2414 .unwrap();
2415
2416 // Simulate the language server sending us edits in a non-ordered fashion,
2417 // with ranges sometimes being inverted or pointing to invalid locations.
2418 let edits = lsp_store
2419 .update(cx, |lsp_store, cx| {
2420 lsp_store.edits_from_lsp(
2421 &buffer,
2422 [
2423 lsp::TextEdit {
2424 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2425 new_text: "\n\n".into(),
2426 },
2427 lsp::TextEdit {
2428 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2429 new_text: "a::{b, c}".into(),
2430 },
2431 lsp::TextEdit {
2432 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2433 new_text: "".into(),
2434 },
2435 lsp::TextEdit {
2436 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2437 new_text: "
2438 fn f() {
2439 b();
2440 c();
2441 }"
2442 .unindent(),
2443 },
2444 ],
2445 LanguageServerId(0),
2446 None,
2447 cx,
2448 )
2449 })
2450 .await
2451 .unwrap();
2452
2453 buffer.update(cx, |buffer, cx| {
2454 let edits = edits
2455 .into_iter()
2456 .map(|(range, text)| {
2457 (
2458 range.start.to_point(buffer)..range.end.to_point(buffer),
2459 text,
2460 )
2461 })
2462 .collect::<Vec<_>>();
2463
2464 assert_eq!(
2465 edits,
2466 [
2467 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2468 (Point::new(1, 0)..Point::new(2, 0), "".into())
2469 ]
2470 );
2471
2472 for (range, new_text) in edits {
2473 buffer.edit([(range, new_text)], None, cx);
2474 }
2475 assert_eq!(
2476 buffer.text(),
2477 "
2478 use a::{b, c};
2479
2480 fn f() {
2481 b();
2482 c();
2483 }
2484 "
2485 .unindent()
2486 );
2487 });
2488}
2489
2490fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2491 buffer: &Buffer,
2492 range: Range<T>,
2493) -> Vec<(String, Option<DiagnosticSeverity>)> {
2494 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2495 for chunk in buffer.snapshot().chunks(range, true) {
2496 if chunks.last().map_or(false, |prev_chunk| {
2497 prev_chunk.1 == chunk.diagnostic_severity
2498 }) {
2499 chunks.last_mut().unwrap().0.push_str(chunk.text);
2500 } else {
2501 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2502 }
2503 }
2504 chunks
2505}
2506
2507#[gpui::test(iterations = 10)]
2508async fn test_definition(cx: &mut gpui::TestAppContext) {
2509 init_test(cx);
2510
2511 let fs = FakeFs::new(cx.executor());
2512 fs.insert_tree(
2513 "/dir",
2514 json!({
2515 "a.rs": "const fn a() { A }",
2516 "b.rs": "const y: i32 = crate::a()",
2517 }),
2518 )
2519 .await;
2520
2521 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2522
2523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2524 language_registry.add(rust_lang());
2525 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2526
2527 let buffer = project
2528 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2529 .await
2530 .unwrap();
2531
2532 let fake_server = fake_servers.next().await.unwrap();
2533 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2534 let params = params.text_document_position_params;
2535 assert_eq!(
2536 params.text_document.uri.to_file_path().unwrap(),
2537 Path::new("/dir/b.rs"),
2538 );
2539 assert_eq!(params.position, lsp::Position::new(0, 22));
2540
2541 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2542 lsp::Location::new(
2543 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2544 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2545 ),
2546 )))
2547 });
2548
2549 let mut definitions = project
2550 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2551 .await
2552 .unwrap();
2553
2554 // Assert no new language server started
2555 cx.executor().run_until_parked();
2556 assert!(fake_servers.try_next().is_err());
2557
2558 assert_eq!(definitions.len(), 1);
2559 let definition = definitions.pop().unwrap();
2560 cx.update(|cx| {
2561 let target_buffer = definition.target.buffer.read(cx);
2562 assert_eq!(
2563 target_buffer
2564 .file()
2565 .unwrap()
2566 .as_local()
2567 .unwrap()
2568 .abs_path(cx),
2569 Path::new("/dir/a.rs"),
2570 );
2571 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2572 assert_eq!(
2573 list_worktrees(&project, cx),
2574 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2575 );
2576
2577 drop(definition);
2578 });
2579 cx.update(|cx| {
2580 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2581 });
2582
2583 fn list_worktrees<'a>(
2584 project: &'a Model<Project>,
2585 cx: &'a AppContext,
2586 ) -> Vec<(&'a Path, bool)> {
2587 project
2588 .read(cx)
2589 .worktrees(cx)
2590 .map(|worktree| {
2591 let worktree = worktree.read(cx);
2592 (
2593 worktree.as_local().unwrap().abs_path().as_ref(),
2594 worktree.is_visible(),
2595 )
2596 })
2597 .collect::<Vec<_>>()
2598 }
2599}
2600
2601#[gpui::test]
2602async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2603 init_test(cx);
2604
2605 let fs = FakeFs::new(cx.executor());
2606 fs.insert_tree(
2607 "/dir",
2608 json!({
2609 "a.ts": "",
2610 }),
2611 )
2612 .await;
2613
2614 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2615
2616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2617 language_registry.add(typescript_lang());
2618 let mut fake_language_servers = language_registry.register_fake_lsp(
2619 "TypeScript",
2620 FakeLspAdapter {
2621 capabilities: lsp::ServerCapabilities {
2622 completion_provider: Some(lsp::CompletionOptions {
2623 trigger_characters: Some(vec![":".to_string()]),
2624 ..Default::default()
2625 }),
2626 ..Default::default()
2627 },
2628 ..Default::default()
2629 },
2630 );
2631
2632 let buffer = project
2633 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2634 .await
2635 .unwrap();
2636
2637 let fake_server = fake_language_servers.next().await.unwrap();
2638
2639 let text = "let a = b.fqn";
2640 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2641 let completions = project.update(cx, |project, cx| {
2642 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2643 });
2644
2645 fake_server
2646 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2647 Ok(Some(lsp::CompletionResponse::Array(vec![
2648 lsp::CompletionItem {
2649 label: "fullyQualifiedName?".into(),
2650 insert_text: Some("fullyQualifiedName".into()),
2651 ..Default::default()
2652 },
2653 ])))
2654 })
2655 .next()
2656 .await;
2657 let completions = completions.await.unwrap();
2658 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2659 assert_eq!(completions.len(), 1);
2660 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2661 assert_eq!(
2662 completions[0].old_range.to_offset(&snapshot),
2663 text.len() - 3..text.len()
2664 );
2665
2666 let text = "let a = \"atoms/cmp\"";
2667 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2668 let completions = project.update(cx, |project, cx| {
2669 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2670 });
2671
2672 fake_server
2673 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2674 Ok(Some(lsp::CompletionResponse::Array(vec![
2675 lsp::CompletionItem {
2676 label: "component".into(),
2677 ..Default::default()
2678 },
2679 ])))
2680 })
2681 .next()
2682 .await;
2683 let completions = completions.await.unwrap();
2684 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2685 assert_eq!(completions.len(), 1);
2686 assert_eq!(completions[0].new_text, "component");
2687 assert_eq!(
2688 completions[0].old_range.to_offset(&snapshot),
2689 text.len() - 4..text.len() - 1
2690 );
2691}
2692
2693#[gpui::test]
2694async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2695 init_test(cx);
2696
2697 let fs = FakeFs::new(cx.executor());
2698 fs.insert_tree(
2699 "/dir",
2700 json!({
2701 "a.ts": "",
2702 }),
2703 )
2704 .await;
2705
2706 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2707
2708 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2709 language_registry.add(typescript_lang());
2710 let mut fake_language_servers = language_registry.register_fake_lsp(
2711 "TypeScript",
2712 FakeLspAdapter {
2713 capabilities: lsp::ServerCapabilities {
2714 completion_provider: Some(lsp::CompletionOptions {
2715 trigger_characters: Some(vec![":".to_string()]),
2716 ..Default::default()
2717 }),
2718 ..Default::default()
2719 },
2720 ..Default::default()
2721 },
2722 );
2723
2724 let buffer = project
2725 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2726 .await
2727 .unwrap();
2728
2729 let fake_server = fake_language_servers.next().await.unwrap();
2730
2731 let text = "let a = b.fqn";
2732 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2733 let completions = project.update(cx, |project, cx| {
2734 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2735 });
2736
2737 fake_server
2738 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2739 Ok(Some(lsp::CompletionResponse::Array(vec![
2740 lsp::CompletionItem {
2741 label: "fullyQualifiedName?".into(),
2742 insert_text: Some("fully\rQualified\r\nName".into()),
2743 ..Default::default()
2744 },
2745 ])))
2746 })
2747 .next()
2748 .await;
2749 let completions = completions.await.unwrap();
2750 assert_eq!(completions.len(), 1);
2751 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2752}
2753
2754#[gpui::test(iterations = 10)]
2755async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2756 init_test(cx);
2757
2758 let fs = FakeFs::new(cx.executor());
2759 fs.insert_tree(
2760 "/dir",
2761 json!({
2762 "a.ts": "a",
2763 }),
2764 )
2765 .await;
2766
2767 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2768
2769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2770 language_registry.add(typescript_lang());
2771 let mut fake_language_servers = language_registry.register_fake_lsp(
2772 "TypeScript",
2773 FakeLspAdapter {
2774 capabilities: lsp::ServerCapabilities {
2775 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2776 lsp::CodeActionOptions {
2777 resolve_provider: Some(true),
2778 ..lsp::CodeActionOptions::default()
2779 },
2780 )),
2781 ..lsp::ServerCapabilities::default()
2782 },
2783 ..FakeLspAdapter::default()
2784 },
2785 );
2786
2787 let buffer = project
2788 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2789 .await
2790 .unwrap();
2791
2792 let fake_server = fake_language_servers.next().await.unwrap();
2793
2794 // Language server returns code actions that contain commands, and not edits.
2795 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2796 fake_server
2797 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2798 Ok(Some(vec![
2799 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2800 title: "The code action".into(),
2801 data: Some(serde_json::json!({
2802 "command": "_the/command",
2803 })),
2804 ..lsp::CodeAction::default()
2805 }),
2806 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2807 title: "two".into(),
2808 ..lsp::CodeAction::default()
2809 }),
2810 ]))
2811 })
2812 .next()
2813 .await;
2814
2815 let action = actions.await.unwrap()[0].clone();
2816 let apply = project.update(cx, |project, cx| {
2817 project.apply_code_action(buffer.clone(), action, true, cx)
2818 });
2819
2820 // Resolving the code action does not populate its edits. In absence of
2821 // edits, we must execute the given command.
2822 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2823 |mut action, _| async move {
2824 if action.data.is_some() {
2825 action.command = Some(lsp::Command {
2826 title: "The command".into(),
2827 command: "_the/command".into(),
2828 arguments: Some(vec![json!("the-argument")]),
2829 });
2830 }
2831 Ok(action)
2832 },
2833 );
2834
2835 // While executing the command, the language server sends the editor
2836 // a `workspaceEdit` request.
2837 fake_server
2838 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2839 let fake = fake_server.clone();
2840 move |params, _| {
2841 assert_eq!(params.command, "_the/command");
2842 let fake = fake.clone();
2843 async move {
2844 fake.server
2845 .request::<lsp::request::ApplyWorkspaceEdit>(
2846 lsp::ApplyWorkspaceEditParams {
2847 label: None,
2848 edit: lsp::WorkspaceEdit {
2849 changes: Some(
2850 [(
2851 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2852 vec![lsp::TextEdit {
2853 range: lsp::Range::new(
2854 lsp::Position::new(0, 0),
2855 lsp::Position::new(0, 0),
2856 ),
2857 new_text: "X".into(),
2858 }],
2859 )]
2860 .into_iter()
2861 .collect(),
2862 ),
2863 ..Default::default()
2864 },
2865 },
2866 )
2867 .await
2868 .unwrap();
2869 Ok(Some(json!(null)))
2870 }
2871 }
2872 })
2873 .next()
2874 .await;
2875
2876 // Applying the code action returns a project transaction containing the edits
2877 // sent by the language server in its `workspaceEdit` request.
2878 let transaction = apply.await.unwrap();
2879 assert!(transaction.0.contains_key(&buffer));
2880 buffer.update(cx, |buffer, cx| {
2881 assert_eq!(buffer.text(), "Xa");
2882 buffer.undo(cx);
2883 assert_eq!(buffer.text(), "a");
2884 });
2885}
2886
2887#[gpui::test(iterations = 10)]
2888async fn test_save_file(cx: &mut gpui::TestAppContext) {
2889 init_test(cx);
2890
2891 let fs = FakeFs::new(cx.executor());
2892 fs.insert_tree(
2893 "/dir",
2894 json!({
2895 "file1": "the old contents",
2896 }),
2897 )
2898 .await;
2899
2900 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2901 let buffer = project
2902 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2903 .await
2904 .unwrap();
2905 buffer.update(cx, |buffer, cx| {
2906 assert_eq!(buffer.text(), "the old contents");
2907 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2908 });
2909
2910 project
2911 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2912 .await
2913 .unwrap();
2914
2915 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2916 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2917}
2918
2919#[gpui::test(iterations = 30)]
2920async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2921 init_test(cx);
2922
2923 let fs = FakeFs::new(cx.executor().clone());
2924 fs.insert_tree(
2925 "/dir",
2926 json!({
2927 "file1": "the original contents",
2928 }),
2929 )
2930 .await;
2931
2932 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2933 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2934 let buffer = project
2935 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2936 .await
2937 .unwrap();
2938
2939 // Simulate buffer diffs being slow, so that they don't complete before
2940 // the next file change occurs.
2941 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2942
2943 // Change the buffer's file on disk, and then wait for the file change
2944 // to be detected by the worktree, so that the buffer starts reloading.
2945 fs.save(
2946 "/dir/file1".as_ref(),
2947 &"the first contents".into(),
2948 Default::default(),
2949 )
2950 .await
2951 .unwrap();
2952 worktree.next_event(cx).await;
2953
2954 // Change the buffer's file again. Depending on the random seed, the
2955 // previous file change may still be in progress.
2956 fs.save(
2957 "/dir/file1".as_ref(),
2958 &"the second contents".into(),
2959 Default::default(),
2960 )
2961 .await
2962 .unwrap();
2963 worktree.next_event(cx).await;
2964
2965 cx.executor().run_until_parked();
2966 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2967 buffer.read_with(cx, |buffer, _| {
2968 assert_eq!(buffer.text(), on_disk_text);
2969 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2970 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2971 });
2972}
2973
2974#[gpui::test(iterations = 30)]
2975async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2976 init_test(cx);
2977
2978 let fs = FakeFs::new(cx.executor().clone());
2979 fs.insert_tree(
2980 "/dir",
2981 json!({
2982 "file1": "the original contents",
2983 }),
2984 )
2985 .await;
2986
2987 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2988 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2989 let buffer = project
2990 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2991 .await
2992 .unwrap();
2993
2994 // Simulate buffer diffs being slow, so that they don't complete before
2995 // the next file change occurs.
2996 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2997
2998 // Change the buffer's file on disk, and then wait for the file change
2999 // to be detected by the worktree, so that the buffer starts reloading.
3000 fs.save(
3001 "/dir/file1".as_ref(),
3002 &"the first contents".into(),
3003 Default::default(),
3004 )
3005 .await
3006 .unwrap();
3007 worktree.next_event(cx).await;
3008
3009 cx.executor()
3010 .spawn(cx.executor().simulate_random_delay())
3011 .await;
3012
3013 // Perform a noop edit, causing the buffer's version to increase.
3014 buffer.update(cx, |buffer, cx| {
3015 buffer.edit([(0..0, " ")], None, cx);
3016 buffer.undo(cx);
3017 });
3018
3019 cx.executor().run_until_parked();
3020 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3021 buffer.read_with(cx, |buffer, _| {
3022 let buffer_text = buffer.text();
3023 if buffer_text == on_disk_text {
3024 assert!(
3025 !buffer.is_dirty() && !buffer.has_conflict(),
3026 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3027 );
3028 }
3029 // If the file change occurred while the buffer was processing the first
3030 // change, the buffer will be in a conflicting state.
3031 else {
3032 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3033 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3034 }
3035 });
3036}
3037
3038#[gpui::test]
3039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3040 init_test(cx);
3041
3042 let fs = FakeFs::new(cx.executor());
3043 fs.insert_tree(
3044 "/dir",
3045 json!({
3046 "file1": "the old contents",
3047 }),
3048 )
3049 .await;
3050
3051 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3052 let buffer = project
3053 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3054 .await
3055 .unwrap();
3056 buffer.update(cx, |buffer, cx| {
3057 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3058 });
3059
3060 project
3061 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3062 .await
3063 .unwrap();
3064
3065 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3066 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3067}
3068
3069#[gpui::test]
3070async fn test_save_as(cx: &mut gpui::TestAppContext) {
3071 init_test(cx);
3072
3073 let fs = FakeFs::new(cx.executor());
3074 fs.insert_tree("/dir", json!({})).await;
3075
3076 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3077
3078 let languages = project.update(cx, |project, _| project.languages().clone());
3079 languages.add(rust_lang());
3080
3081 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3082 buffer.update(cx, |buffer, cx| {
3083 buffer.edit([(0..0, "abc")], None, cx);
3084 assert!(buffer.is_dirty());
3085 assert!(!buffer.has_conflict());
3086 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3087 });
3088 project
3089 .update(cx, |project, cx| {
3090 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3091 let path = ProjectPath {
3092 worktree_id,
3093 path: Arc::from(Path::new("file1.rs")),
3094 };
3095 project.save_buffer_as(buffer.clone(), path, cx)
3096 })
3097 .await
3098 .unwrap();
3099 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3100
3101 cx.executor().run_until_parked();
3102 buffer.update(cx, |buffer, cx| {
3103 assert_eq!(
3104 buffer.file().unwrap().full_path(cx),
3105 Path::new("dir/file1.rs")
3106 );
3107 assert!(!buffer.is_dirty());
3108 assert!(!buffer.has_conflict());
3109 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3110 });
3111
3112 let opened_buffer = project
3113 .update(cx, |project, cx| {
3114 project.open_local_buffer("/dir/file1.rs", cx)
3115 })
3116 .await
3117 .unwrap();
3118 assert_eq!(opened_buffer, buffer);
3119}
3120
3121#[gpui::test(retries = 5)]
3122async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3123 use worktree::WorktreeModelHandle as _;
3124
3125 init_test(cx);
3126 cx.executor().allow_parking();
3127
3128 let dir = temp_tree(json!({
3129 "a": {
3130 "file1": "",
3131 "file2": "",
3132 "file3": "",
3133 },
3134 "b": {
3135 "c": {
3136 "file4": "",
3137 "file5": "",
3138 }
3139 }
3140 }));
3141
3142 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3143
3144 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3145 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3146 async move { buffer.await.unwrap() }
3147 };
3148 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3149 project.update(cx, |project, cx| {
3150 let tree = project.worktrees(cx).next().unwrap();
3151 tree.read(cx)
3152 .entry_for_path(path)
3153 .unwrap_or_else(|| panic!("no entry for path {}", path))
3154 .id
3155 })
3156 };
3157
3158 let buffer2 = buffer_for_path("a/file2", cx).await;
3159 let buffer3 = buffer_for_path("a/file3", cx).await;
3160 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3161 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3162
3163 let file2_id = id_for_path("a/file2", cx);
3164 let file3_id = id_for_path("a/file3", cx);
3165 let file4_id = id_for_path("b/c/file4", cx);
3166
3167 // Create a remote copy of this worktree.
3168 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3169 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3170
3171 let updates = Arc::new(Mutex::new(Vec::new()));
3172 tree.update(cx, |tree, cx| {
3173 let updates = updates.clone();
3174 tree.observe_updates(0, cx, move |update| {
3175 updates.lock().push(update);
3176 async { true }
3177 });
3178 });
3179
3180 let remote =
3181 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3182
3183 cx.executor().run_until_parked();
3184
3185 cx.update(|cx| {
3186 assert!(!buffer2.read(cx).is_dirty());
3187 assert!(!buffer3.read(cx).is_dirty());
3188 assert!(!buffer4.read(cx).is_dirty());
3189 assert!(!buffer5.read(cx).is_dirty());
3190 });
3191
3192 // Rename and delete files and directories.
3193 tree.flush_fs_events(cx).await;
3194 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3195 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3196 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3197 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3198 tree.flush_fs_events(cx).await;
3199
3200 let expected_paths = vec![
3201 "a",
3202 "a/file1",
3203 "a/file2.new",
3204 "b",
3205 "d",
3206 "d/file3",
3207 "d/file4",
3208 ];
3209
3210 cx.update(|app| {
3211 assert_eq!(
3212 tree.read(app)
3213 .paths()
3214 .map(|p| p.to_str().unwrap())
3215 .collect::<Vec<_>>(),
3216 expected_paths
3217 );
3218 });
3219
3220 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3221 assert_eq!(id_for_path("d/file3", cx), file3_id);
3222 assert_eq!(id_for_path("d/file4", cx), file4_id);
3223
3224 cx.update(|cx| {
3225 assert_eq!(
3226 buffer2.read(cx).file().unwrap().path().as_ref(),
3227 Path::new("a/file2.new")
3228 );
3229 assert_eq!(
3230 buffer3.read(cx).file().unwrap().path().as_ref(),
3231 Path::new("d/file3")
3232 );
3233 assert_eq!(
3234 buffer4.read(cx).file().unwrap().path().as_ref(),
3235 Path::new("d/file4")
3236 );
3237 assert_eq!(
3238 buffer5.read(cx).file().unwrap().path().as_ref(),
3239 Path::new("b/c/file5")
3240 );
3241
3242 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3243 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3244 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3245 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3246 });
3247
3248 // Update the remote worktree. Check that it becomes consistent with the
3249 // local worktree.
3250 cx.executor().run_until_parked();
3251
3252 remote.update(cx, |remote, _| {
3253 for update in updates.lock().drain(..) {
3254 remote.as_remote_mut().unwrap().update_from_remote(update);
3255 }
3256 });
3257 cx.executor().run_until_parked();
3258 remote.update(cx, |remote, _| {
3259 assert_eq!(
3260 remote
3261 .paths()
3262 .map(|p| p.to_str().unwrap())
3263 .collect::<Vec<_>>(),
3264 expected_paths
3265 );
3266 });
3267}
3268
3269#[gpui::test(iterations = 10)]
3270async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3271 init_test(cx);
3272
3273 let fs = FakeFs::new(cx.executor());
3274 fs.insert_tree(
3275 "/dir",
3276 json!({
3277 "a": {
3278 "file1": "",
3279 }
3280 }),
3281 )
3282 .await;
3283
3284 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3285 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3286 let tree_id = tree.update(cx, |tree, _| tree.id());
3287
3288 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3289 project.update(cx, |project, cx| {
3290 let tree = project.worktrees(cx).next().unwrap();
3291 tree.read(cx)
3292 .entry_for_path(path)
3293 .unwrap_or_else(|| panic!("no entry for path {}", path))
3294 .id
3295 })
3296 };
3297
3298 let dir_id = id_for_path("a", cx);
3299 let file_id = id_for_path("a/file1", cx);
3300 let buffer = project
3301 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3302 .await
3303 .unwrap();
3304 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3305
3306 project
3307 .update(cx, |project, cx| {
3308 project.rename_entry(dir_id, Path::new("b"), cx)
3309 })
3310 .unwrap()
3311 .await
3312 .to_included()
3313 .unwrap();
3314 cx.executor().run_until_parked();
3315
3316 assert_eq!(id_for_path("b", cx), dir_id);
3317 assert_eq!(id_for_path("b/file1", cx), file_id);
3318 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3319}
3320
3321#[gpui::test]
3322async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3323 init_test(cx);
3324
3325 let fs = FakeFs::new(cx.executor());
3326 fs.insert_tree(
3327 "/dir",
3328 json!({
3329 "a.txt": "a-contents",
3330 "b.txt": "b-contents",
3331 }),
3332 )
3333 .await;
3334
3335 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3336
3337 // Spawn multiple tasks to open paths, repeating some paths.
3338 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3339 (
3340 p.open_local_buffer("/dir/a.txt", cx),
3341 p.open_local_buffer("/dir/b.txt", cx),
3342 p.open_local_buffer("/dir/a.txt", cx),
3343 )
3344 });
3345
3346 let buffer_a_1 = buffer_a_1.await.unwrap();
3347 let buffer_a_2 = buffer_a_2.await.unwrap();
3348 let buffer_b = buffer_b.await.unwrap();
3349 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3350 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3351
3352 // There is only one buffer per path.
3353 let buffer_a_id = buffer_a_1.entity_id();
3354 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3355
3356 // Open the same path again while it is still open.
3357 drop(buffer_a_1);
3358 let buffer_a_3 = project
3359 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3360 .await
3361 .unwrap();
3362
3363 // There's still only one buffer per path.
3364 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3365}
3366
3367#[gpui::test]
3368async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3369 init_test(cx);
3370
3371 let fs = FakeFs::new(cx.executor());
3372 fs.insert_tree(
3373 "/dir",
3374 json!({
3375 "file1": "abc",
3376 "file2": "def",
3377 "file3": "ghi",
3378 }),
3379 )
3380 .await;
3381
3382 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3383
3384 let buffer1 = project
3385 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3386 .await
3387 .unwrap();
3388 let events = Arc::new(Mutex::new(Vec::new()));
3389
3390 // initially, the buffer isn't dirty.
3391 buffer1.update(cx, |buffer, cx| {
3392 cx.subscribe(&buffer1, {
3393 let events = events.clone();
3394 move |_, _, event, _| match event {
3395 BufferEvent::Operation { .. } => {}
3396 _ => events.lock().push(event.clone()),
3397 }
3398 })
3399 .detach();
3400
3401 assert!(!buffer.is_dirty());
3402 assert!(events.lock().is_empty());
3403
3404 buffer.edit([(1..2, "")], None, cx);
3405 });
3406
3407 // after the first edit, the buffer is dirty, and emits a dirtied event.
3408 buffer1.update(cx, |buffer, cx| {
3409 assert!(buffer.text() == "ac");
3410 assert!(buffer.is_dirty());
3411 assert_eq!(
3412 *events.lock(),
3413 &[
3414 language::BufferEvent::Edited,
3415 language::BufferEvent::DirtyChanged
3416 ]
3417 );
3418 events.lock().clear();
3419 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3420 });
3421
3422 // after saving, the buffer is not dirty, and emits a saved event.
3423 buffer1.update(cx, |buffer, cx| {
3424 assert!(!buffer.is_dirty());
3425 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3426 events.lock().clear();
3427
3428 buffer.edit([(1..1, "B")], None, cx);
3429 buffer.edit([(2..2, "D")], None, cx);
3430 });
3431
3432 // after editing again, the buffer is dirty, and emits another dirty event.
3433 buffer1.update(cx, |buffer, cx| {
3434 assert!(buffer.text() == "aBDc");
3435 assert!(buffer.is_dirty());
3436 assert_eq!(
3437 *events.lock(),
3438 &[
3439 language::BufferEvent::Edited,
3440 language::BufferEvent::DirtyChanged,
3441 language::BufferEvent::Edited,
3442 ],
3443 );
3444 events.lock().clear();
3445
3446 // After restoring the buffer to its previously-saved state,
3447 // the buffer is not considered dirty anymore.
3448 buffer.edit([(1..3, "")], None, cx);
3449 assert!(buffer.text() == "ac");
3450 assert!(!buffer.is_dirty());
3451 });
3452
3453 assert_eq!(
3454 *events.lock(),
3455 &[
3456 language::BufferEvent::Edited,
3457 language::BufferEvent::DirtyChanged
3458 ]
3459 );
3460
3461 // When a file is deleted, the buffer is considered dirty.
3462 let events = Arc::new(Mutex::new(Vec::new()));
3463 let buffer2 = project
3464 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3465 .await
3466 .unwrap();
3467 buffer2.update(cx, |_, cx| {
3468 cx.subscribe(&buffer2, {
3469 let events = events.clone();
3470 move |_, _, event, _| events.lock().push(event.clone())
3471 })
3472 .detach();
3473 });
3474
3475 fs.remove_file("/dir/file2".as_ref(), Default::default())
3476 .await
3477 .unwrap();
3478 cx.executor().run_until_parked();
3479 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3480 assert_eq!(
3481 *events.lock(),
3482 &[
3483 language::BufferEvent::DirtyChanged,
3484 language::BufferEvent::FileHandleChanged
3485 ]
3486 );
3487
3488 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3489 let events = Arc::new(Mutex::new(Vec::new()));
3490 let buffer3 = project
3491 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3492 .await
3493 .unwrap();
3494 buffer3.update(cx, |_, cx| {
3495 cx.subscribe(&buffer3, {
3496 let events = events.clone();
3497 move |_, _, event, _| events.lock().push(event.clone())
3498 })
3499 .detach();
3500 });
3501
3502 buffer3.update(cx, |buffer, cx| {
3503 buffer.edit([(0..0, "x")], None, cx);
3504 });
3505 events.lock().clear();
3506 fs.remove_file("/dir/file3".as_ref(), Default::default())
3507 .await
3508 .unwrap();
3509 cx.executor().run_until_parked();
3510 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3511 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3512}
3513
3514#[gpui::test]
3515async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3516 init_test(cx);
3517
3518 let initial_contents = "aaa\nbbbbb\nc\n";
3519 let fs = FakeFs::new(cx.executor());
3520 fs.insert_tree(
3521 "/dir",
3522 json!({
3523 "the-file": initial_contents,
3524 }),
3525 )
3526 .await;
3527 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3528 let buffer = project
3529 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3530 .await
3531 .unwrap();
3532
3533 let anchors = (0..3)
3534 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3535 .collect::<Vec<_>>();
3536
3537 // Change the file on disk, adding two new lines of text, and removing
3538 // one line.
3539 buffer.update(cx, |buffer, _| {
3540 assert!(!buffer.is_dirty());
3541 assert!(!buffer.has_conflict());
3542 });
3543 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3544 fs.save(
3545 "/dir/the-file".as_ref(),
3546 &new_contents.into(),
3547 LineEnding::Unix,
3548 )
3549 .await
3550 .unwrap();
3551
3552 // Because the buffer was not modified, it is reloaded from disk. Its
3553 // contents are edited according to the diff between the old and new
3554 // file contents.
3555 cx.executor().run_until_parked();
3556 buffer.update(cx, |buffer, _| {
3557 assert_eq!(buffer.text(), new_contents);
3558 assert!(!buffer.is_dirty());
3559 assert!(!buffer.has_conflict());
3560
3561 let anchor_positions = anchors
3562 .iter()
3563 .map(|anchor| anchor.to_point(&*buffer))
3564 .collect::<Vec<_>>();
3565 assert_eq!(
3566 anchor_positions,
3567 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3568 );
3569 });
3570
3571 // Modify the buffer
3572 buffer.update(cx, |buffer, cx| {
3573 buffer.edit([(0..0, " ")], None, cx);
3574 assert!(buffer.is_dirty());
3575 assert!(!buffer.has_conflict());
3576 });
3577
3578 // Change the file on disk again, adding blank lines to the beginning.
3579 fs.save(
3580 "/dir/the-file".as_ref(),
3581 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3582 LineEnding::Unix,
3583 )
3584 .await
3585 .unwrap();
3586
3587 // Because the buffer is modified, it doesn't reload from disk, but is
3588 // marked as having a conflict.
3589 cx.executor().run_until_parked();
3590 buffer.update(cx, |buffer, _| {
3591 assert!(buffer.has_conflict());
3592 });
3593}
3594
3595#[gpui::test]
3596async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3597 init_test(cx);
3598
3599 let fs = FakeFs::new(cx.executor());
3600 fs.insert_tree(
3601 "/dir",
3602 json!({
3603 "file1": "a\nb\nc\n",
3604 "file2": "one\r\ntwo\r\nthree\r\n",
3605 }),
3606 )
3607 .await;
3608
3609 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3610 let buffer1 = project
3611 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3612 .await
3613 .unwrap();
3614 let buffer2 = project
3615 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3616 .await
3617 .unwrap();
3618
3619 buffer1.update(cx, |buffer, _| {
3620 assert_eq!(buffer.text(), "a\nb\nc\n");
3621 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3622 });
3623 buffer2.update(cx, |buffer, _| {
3624 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3625 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3626 });
3627
3628 // Change a file's line endings on disk from unix to windows. The buffer's
3629 // state updates correctly.
3630 fs.save(
3631 "/dir/file1".as_ref(),
3632 &"aaa\nb\nc\n".into(),
3633 LineEnding::Windows,
3634 )
3635 .await
3636 .unwrap();
3637 cx.executor().run_until_parked();
3638 buffer1.update(cx, |buffer, _| {
3639 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3640 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3641 });
3642
3643 // Save a file with windows line endings. The file is written correctly.
3644 buffer2.update(cx, |buffer, cx| {
3645 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3646 });
3647 project
3648 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3649 .await
3650 .unwrap();
3651 assert_eq!(
3652 fs.load("/dir/file2".as_ref()).await.unwrap(),
3653 "one\r\ntwo\r\nthree\r\nfour\r\n",
3654 );
3655}
3656
3657#[gpui::test]
3658async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3659 init_test(cx);
3660
3661 let fs = FakeFs::new(cx.executor());
3662 fs.insert_tree(
3663 "/the-dir",
3664 json!({
3665 "a.rs": "
3666 fn foo(mut v: Vec<usize>) {
3667 for x in &v {
3668 v.push(1);
3669 }
3670 }
3671 "
3672 .unindent(),
3673 }),
3674 )
3675 .await;
3676
3677 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3678 let buffer = project
3679 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3680 .await
3681 .unwrap();
3682
3683 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3684 let message = lsp::PublishDiagnosticsParams {
3685 uri: buffer_uri.clone(),
3686 diagnostics: vec![
3687 lsp::Diagnostic {
3688 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3689 severity: Some(DiagnosticSeverity::WARNING),
3690 message: "error 1".to_string(),
3691 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3692 location: lsp::Location {
3693 uri: buffer_uri.clone(),
3694 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3695 },
3696 message: "error 1 hint 1".to_string(),
3697 }]),
3698 ..Default::default()
3699 },
3700 lsp::Diagnostic {
3701 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3702 severity: Some(DiagnosticSeverity::HINT),
3703 message: "error 1 hint 1".to_string(),
3704 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3705 location: lsp::Location {
3706 uri: buffer_uri.clone(),
3707 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3708 },
3709 message: "original diagnostic".to_string(),
3710 }]),
3711 ..Default::default()
3712 },
3713 lsp::Diagnostic {
3714 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3715 severity: Some(DiagnosticSeverity::ERROR),
3716 message: "error 2".to_string(),
3717 related_information: Some(vec![
3718 lsp::DiagnosticRelatedInformation {
3719 location: lsp::Location {
3720 uri: buffer_uri.clone(),
3721 range: lsp::Range::new(
3722 lsp::Position::new(1, 13),
3723 lsp::Position::new(1, 15),
3724 ),
3725 },
3726 message: "error 2 hint 1".to_string(),
3727 },
3728 lsp::DiagnosticRelatedInformation {
3729 location: lsp::Location {
3730 uri: buffer_uri.clone(),
3731 range: lsp::Range::new(
3732 lsp::Position::new(1, 13),
3733 lsp::Position::new(1, 15),
3734 ),
3735 },
3736 message: "error 2 hint 2".to_string(),
3737 },
3738 ]),
3739 ..Default::default()
3740 },
3741 lsp::Diagnostic {
3742 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3743 severity: Some(DiagnosticSeverity::HINT),
3744 message: "error 2 hint 1".to_string(),
3745 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3746 location: lsp::Location {
3747 uri: buffer_uri.clone(),
3748 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3749 },
3750 message: "original diagnostic".to_string(),
3751 }]),
3752 ..Default::default()
3753 },
3754 lsp::Diagnostic {
3755 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3756 severity: Some(DiagnosticSeverity::HINT),
3757 message: "error 2 hint 2".to_string(),
3758 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3759 location: lsp::Location {
3760 uri: buffer_uri,
3761 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3762 },
3763 message: "original diagnostic".to_string(),
3764 }]),
3765 ..Default::default()
3766 },
3767 ],
3768 version: None,
3769 };
3770
3771 project
3772 .update(cx, |p, cx| {
3773 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3774 })
3775 .unwrap();
3776 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3777
3778 assert_eq!(
3779 buffer
3780 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3781 .collect::<Vec<_>>(),
3782 &[
3783 DiagnosticEntry {
3784 range: Point::new(1, 8)..Point::new(1, 9),
3785 diagnostic: Diagnostic {
3786 severity: DiagnosticSeverity::WARNING,
3787 message: "error 1".to_string(),
3788 group_id: 1,
3789 is_primary: true,
3790 ..Default::default()
3791 }
3792 },
3793 DiagnosticEntry {
3794 range: Point::new(1, 8)..Point::new(1, 9),
3795 diagnostic: Diagnostic {
3796 severity: DiagnosticSeverity::HINT,
3797 message: "error 1 hint 1".to_string(),
3798 group_id: 1,
3799 is_primary: false,
3800 ..Default::default()
3801 }
3802 },
3803 DiagnosticEntry {
3804 range: Point::new(1, 13)..Point::new(1, 15),
3805 diagnostic: Diagnostic {
3806 severity: DiagnosticSeverity::HINT,
3807 message: "error 2 hint 1".to_string(),
3808 group_id: 0,
3809 is_primary: false,
3810 ..Default::default()
3811 }
3812 },
3813 DiagnosticEntry {
3814 range: Point::new(1, 13)..Point::new(1, 15),
3815 diagnostic: Diagnostic {
3816 severity: DiagnosticSeverity::HINT,
3817 message: "error 2 hint 2".to_string(),
3818 group_id: 0,
3819 is_primary: false,
3820 ..Default::default()
3821 }
3822 },
3823 DiagnosticEntry {
3824 range: Point::new(2, 8)..Point::new(2, 17),
3825 diagnostic: Diagnostic {
3826 severity: DiagnosticSeverity::ERROR,
3827 message: "error 2".to_string(),
3828 group_id: 0,
3829 is_primary: true,
3830 ..Default::default()
3831 }
3832 }
3833 ]
3834 );
3835
3836 assert_eq!(
3837 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3838 &[
3839 DiagnosticEntry {
3840 range: Point::new(1, 13)..Point::new(1, 15),
3841 diagnostic: Diagnostic {
3842 severity: DiagnosticSeverity::HINT,
3843 message: "error 2 hint 1".to_string(),
3844 group_id: 0,
3845 is_primary: false,
3846 ..Default::default()
3847 }
3848 },
3849 DiagnosticEntry {
3850 range: Point::new(1, 13)..Point::new(1, 15),
3851 diagnostic: Diagnostic {
3852 severity: DiagnosticSeverity::HINT,
3853 message: "error 2 hint 2".to_string(),
3854 group_id: 0,
3855 is_primary: false,
3856 ..Default::default()
3857 }
3858 },
3859 DiagnosticEntry {
3860 range: Point::new(2, 8)..Point::new(2, 17),
3861 diagnostic: Diagnostic {
3862 severity: DiagnosticSeverity::ERROR,
3863 message: "error 2".to_string(),
3864 group_id: 0,
3865 is_primary: true,
3866 ..Default::default()
3867 }
3868 }
3869 ]
3870 );
3871
3872 assert_eq!(
3873 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3874 &[
3875 DiagnosticEntry {
3876 range: Point::new(1, 8)..Point::new(1, 9),
3877 diagnostic: Diagnostic {
3878 severity: DiagnosticSeverity::WARNING,
3879 message: "error 1".to_string(),
3880 group_id: 1,
3881 is_primary: true,
3882 ..Default::default()
3883 }
3884 },
3885 DiagnosticEntry {
3886 range: Point::new(1, 8)..Point::new(1, 9),
3887 diagnostic: Diagnostic {
3888 severity: DiagnosticSeverity::HINT,
3889 message: "error 1 hint 1".to_string(),
3890 group_id: 1,
3891 is_primary: false,
3892 ..Default::default()
3893 }
3894 },
3895 ]
3896 );
3897}
3898
3899#[gpui::test]
3900async fn test_rename(cx: &mut gpui::TestAppContext) {
3901 // hi
3902 init_test(cx);
3903
3904 let fs = FakeFs::new(cx.executor());
3905 fs.insert_tree(
3906 "/dir",
3907 json!({
3908 "one.rs": "const ONE: usize = 1;",
3909 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3910 }),
3911 )
3912 .await;
3913
3914 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3915
3916 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3917 language_registry.add(rust_lang());
3918 let mut fake_servers = language_registry.register_fake_lsp(
3919 "Rust",
3920 FakeLspAdapter {
3921 capabilities: lsp::ServerCapabilities {
3922 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3923 prepare_provider: Some(true),
3924 work_done_progress_options: Default::default(),
3925 })),
3926 ..Default::default()
3927 },
3928 ..Default::default()
3929 },
3930 );
3931
3932 let buffer = project
3933 .update(cx, |project, cx| {
3934 project.open_local_buffer("/dir/one.rs", cx)
3935 })
3936 .await
3937 .unwrap();
3938
3939 let fake_server = fake_servers.next().await.unwrap();
3940
3941 let response = project.update(cx, |project, cx| {
3942 project.prepare_rename(buffer.clone(), 7, cx)
3943 });
3944 fake_server
3945 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3946 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3947 assert_eq!(params.position, lsp::Position::new(0, 7));
3948 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3949 lsp::Position::new(0, 6),
3950 lsp::Position::new(0, 9),
3951 ))))
3952 })
3953 .next()
3954 .await
3955 .unwrap();
3956 let range = response.await.unwrap().unwrap();
3957 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3958 assert_eq!(range, 6..9);
3959
3960 let response = project.update(cx, |project, cx| {
3961 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3962 });
3963 fake_server
3964 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3965 assert_eq!(
3966 params.text_document_position.text_document.uri.as_str(),
3967 "file:///dir/one.rs"
3968 );
3969 assert_eq!(
3970 params.text_document_position.position,
3971 lsp::Position::new(0, 7)
3972 );
3973 assert_eq!(params.new_name, "THREE");
3974 Ok(Some(lsp::WorkspaceEdit {
3975 changes: Some(
3976 [
3977 (
3978 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3979 vec![lsp::TextEdit::new(
3980 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3981 "THREE".to_string(),
3982 )],
3983 ),
3984 (
3985 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3986 vec![
3987 lsp::TextEdit::new(
3988 lsp::Range::new(
3989 lsp::Position::new(0, 24),
3990 lsp::Position::new(0, 27),
3991 ),
3992 "THREE".to_string(),
3993 ),
3994 lsp::TextEdit::new(
3995 lsp::Range::new(
3996 lsp::Position::new(0, 35),
3997 lsp::Position::new(0, 38),
3998 ),
3999 "THREE".to_string(),
4000 ),
4001 ],
4002 ),
4003 ]
4004 .into_iter()
4005 .collect(),
4006 ),
4007 ..Default::default()
4008 }))
4009 })
4010 .next()
4011 .await
4012 .unwrap();
4013 let mut transaction = response.await.unwrap().0;
4014 assert_eq!(transaction.len(), 2);
4015 assert_eq!(
4016 transaction
4017 .remove_entry(&buffer)
4018 .unwrap()
4019 .0
4020 .update(cx, |buffer, _| buffer.text()),
4021 "const THREE: usize = 1;"
4022 );
4023 assert_eq!(
4024 transaction
4025 .into_keys()
4026 .next()
4027 .unwrap()
4028 .update(cx, |buffer, _| buffer.text()),
4029 "const TWO: usize = one::THREE + one::THREE;"
4030 );
4031}
4032
4033#[gpui::test]
4034async fn test_search(cx: &mut gpui::TestAppContext) {
4035 init_test(cx);
4036
4037 let fs = FakeFs::new(cx.executor());
4038 fs.insert_tree(
4039 "/dir",
4040 json!({
4041 "one.rs": "const ONE: usize = 1;",
4042 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4043 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4044 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4045 }),
4046 )
4047 .await;
4048 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4049 assert_eq!(
4050 search(
4051 &project,
4052 SearchQuery::text(
4053 "TWO",
4054 false,
4055 true,
4056 false,
4057 Default::default(),
4058 Default::default(),
4059 None
4060 )
4061 .unwrap(),
4062 cx
4063 )
4064 .await
4065 .unwrap(),
4066 HashMap::from_iter([
4067 ("dir/two.rs".to_string(), vec![6..9]),
4068 ("dir/three.rs".to_string(), vec![37..40])
4069 ])
4070 );
4071
4072 let buffer_4 = project
4073 .update(cx, |project, cx| {
4074 project.open_local_buffer("/dir/four.rs", cx)
4075 })
4076 .await
4077 .unwrap();
4078 buffer_4.update(cx, |buffer, cx| {
4079 let text = "two::TWO";
4080 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4081 });
4082
4083 assert_eq!(
4084 search(
4085 &project,
4086 SearchQuery::text(
4087 "TWO",
4088 false,
4089 true,
4090 false,
4091 Default::default(),
4092 Default::default(),
4093 None,
4094 )
4095 .unwrap(),
4096 cx
4097 )
4098 .await
4099 .unwrap(),
4100 HashMap::from_iter([
4101 ("dir/two.rs".to_string(), vec![6..9]),
4102 ("dir/three.rs".to_string(), vec![37..40]),
4103 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4104 ])
4105 );
4106}
4107
4108#[gpui::test]
4109async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4110 init_test(cx);
4111
4112 let search_query = "file";
4113
4114 let fs = FakeFs::new(cx.executor());
4115 fs.insert_tree(
4116 "/dir",
4117 json!({
4118 "one.rs": r#"// Rust file one"#,
4119 "one.ts": r#"// TypeScript file one"#,
4120 "two.rs": r#"// Rust file two"#,
4121 "two.ts": r#"// TypeScript file two"#,
4122 }),
4123 )
4124 .await;
4125 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4126
4127 assert!(
4128 search(
4129 &project,
4130 SearchQuery::text(
4131 search_query,
4132 false,
4133 true,
4134 false,
4135 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4136 Default::default(),
4137 None
4138 )
4139 .unwrap(),
4140 cx
4141 )
4142 .await
4143 .unwrap()
4144 .is_empty(),
4145 "If no inclusions match, no files should be returned"
4146 );
4147
4148 assert_eq!(
4149 search(
4150 &project,
4151 SearchQuery::text(
4152 search_query,
4153 false,
4154 true,
4155 false,
4156 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4157 Default::default(),
4158 None
4159 )
4160 .unwrap(),
4161 cx
4162 )
4163 .await
4164 .unwrap(),
4165 HashMap::from_iter([
4166 ("dir/one.rs".to_string(), vec![8..12]),
4167 ("dir/two.rs".to_string(), vec![8..12]),
4168 ]),
4169 "Rust only search should give only Rust files"
4170 );
4171
4172 assert_eq!(
4173 search(
4174 &project,
4175 SearchQuery::text(
4176 search_query,
4177 false,
4178 true,
4179 false,
4180
4181 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4182
4183 Default::default(),
4184 None,
4185 ).unwrap(),
4186 cx
4187 )
4188 .await
4189 .unwrap(),
4190 HashMap::from_iter([
4191 ("dir/one.ts".to_string(), vec![14..18]),
4192 ("dir/two.ts".to_string(), vec![14..18]),
4193 ]),
4194 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4195 );
4196
4197 assert_eq!(
4198 search(
4199 &project,
4200 SearchQuery::text(
4201 search_query,
4202 false,
4203 true,
4204 false,
4205
4206 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4207
4208 Default::default(),
4209 None,
4210 ).unwrap(),
4211 cx
4212 )
4213 .await
4214 .unwrap(),
4215 HashMap::from_iter([
4216 ("dir/two.ts".to_string(), vec![14..18]),
4217 ("dir/one.rs".to_string(), vec![8..12]),
4218 ("dir/one.ts".to_string(), vec![14..18]),
4219 ("dir/two.rs".to_string(), vec![8..12]),
4220 ]),
4221 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4222 );
4223}
4224
4225#[gpui::test]
4226async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4227 init_test(cx);
4228
4229 let search_query = "file";
4230
4231 let fs = FakeFs::new(cx.executor());
4232 fs.insert_tree(
4233 "/dir",
4234 json!({
4235 "one.rs": r#"// Rust file one"#,
4236 "one.ts": r#"// TypeScript file one"#,
4237 "two.rs": r#"// Rust file two"#,
4238 "two.ts": r#"// TypeScript file two"#,
4239 }),
4240 )
4241 .await;
4242 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4243
4244 assert_eq!(
4245 search(
4246 &project,
4247 SearchQuery::text(
4248 search_query,
4249 false,
4250 true,
4251 false,
4252 Default::default(),
4253 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4254 None,
4255 )
4256 .unwrap(),
4257 cx
4258 )
4259 .await
4260 .unwrap(),
4261 HashMap::from_iter([
4262 ("dir/one.rs".to_string(), vec![8..12]),
4263 ("dir/one.ts".to_string(), vec![14..18]),
4264 ("dir/two.rs".to_string(), vec![8..12]),
4265 ("dir/two.ts".to_string(), vec![14..18]),
4266 ]),
4267 "If no exclusions match, all files should be returned"
4268 );
4269
4270 assert_eq!(
4271 search(
4272 &project,
4273 SearchQuery::text(
4274 search_query,
4275 false,
4276 true,
4277 false,
4278 Default::default(),
4279 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4280 None,
4281 )
4282 .unwrap(),
4283 cx
4284 )
4285 .await
4286 .unwrap(),
4287 HashMap::from_iter([
4288 ("dir/one.ts".to_string(), vec![14..18]),
4289 ("dir/two.ts".to_string(), vec![14..18]),
4290 ]),
4291 "Rust exclusion search should give only TypeScript files"
4292 );
4293
4294 assert_eq!(
4295 search(
4296 &project,
4297 SearchQuery::text(
4298 search_query,
4299 false,
4300 true,
4301 false,
4302 Default::default(),
4303 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4304 None,
4305 ).unwrap(),
4306 cx
4307 )
4308 .await
4309 .unwrap(),
4310 HashMap::from_iter([
4311 ("dir/one.rs".to_string(), vec![8..12]),
4312 ("dir/two.rs".to_string(), vec![8..12]),
4313 ]),
4314 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4315 );
4316
4317 assert!(
4318 search(
4319 &project,
4320 SearchQuery::text(
4321 search_query,
4322 false,
4323 true,
4324 false,
4325 Default::default(),
4326
4327 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4328 None,
4329
4330 ).unwrap(),
4331 cx
4332 )
4333 .await
4334 .unwrap().is_empty(),
4335 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4336 );
4337}
4338
4339#[gpui::test]
4340async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4341 init_test(cx);
4342
4343 let search_query = "file";
4344
4345 let fs = FakeFs::new(cx.executor());
4346 fs.insert_tree(
4347 "/dir",
4348 json!({
4349 "one.rs": r#"// Rust file one"#,
4350 "one.ts": r#"// TypeScript file one"#,
4351 "two.rs": r#"// Rust file two"#,
4352 "two.ts": r#"// TypeScript file two"#,
4353 }),
4354 )
4355 .await;
4356 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4357
4358 assert!(
4359 search(
4360 &project,
4361 SearchQuery::text(
4362 search_query,
4363 false,
4364 true,
4365 false,
4366 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4367 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4368 None,
4369 )
4370 .unwrap(),
4371 cx
4372 )
4373 .await
4374 .unwrap()
4375 .is_empty(),
4376 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4377 );
4378
4379 assert!(
4380 search(
4381 &project,
4382 SearchQuery::text(
4383 search_query,
4384 false,
4385 true,
4386 false,
4387 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4388 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4389 None,
4390 ).unwrap(),
4391 cx
4392 )
4393 .await
4394 .unwrap()
4395 .is_empty(),
4396 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4397 );
4398
4399 assert!(
4400 search(
4401 &project,
4402 SearchQuery::text(
4403 search_query,
4404 false,
4405 true,
4406 false,
4407 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4408 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4409 None,
4410 )
4411 .unwrap(),
4412 cx
4413 )
4414 .await
4415 .unwrap()
4416 .is_empty(),
4417 "Non-matching inclusions and exclusions should not change that."
4418 );
4419
4420 assert_eq!(
4421 search(
4422 &project,
4423 SearchQuery::text(
4424 search_query,
4425 false,
4426 true,
4427 false,
4428 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4429 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4430 None,
4431 )
4432 .unwrap(),
4433 cx
4434 )
4435 .await
4436 .unwrap(),
4437 HashMap::from_iter([
4438 ("dir/one.ts".to_string(), vec![14..18]),
4439 ("dir/two.ts".to_string(), vec![14..18]),
4440 ]),
4441 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4442 );
4443}
4444
4445#[gpui::test]
4446async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4447 init_test(cx);
4448
4449 let fs = FakeFs::new(cx.executor());
4450 fs.insert_tree(
4451 "/worktree-a",
4452 json!({
4453 "haystack.rs": r#"// NEEDLE"#,
4454 "haystack.ts": r#"// NEEDLE"#,
4455 }),
4456 )
4457 .await;
4458 fs.insert_tree(
4459 "/worktree-b",
4460 json!({
4461 "haystack.rs": r#"// NEEDLE"#,
4462 "haystack.ts": r#"// NEEDLE"#,
4463 }),
4464 )
4465 .await;
4466
4467 let project = Project::test(
4468 fs.clone(),
4469 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4470 cx,
4471 )
4472 .await;
4473
4474 assert_eq!(
4475 search(
4476 &project,
4477 SearchQuery::text(
4478 "NEEDLE",
4479 false,
4480 true,
4481 false,
4482 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4483 Default::default(),
4484 None,
4485 )
4486 .unwrap(),
4487 cx
4488 )
4489 .await
4490 .unwrap(),
4491 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4492 "should only return results from included worktree"
4493 );
4494 assert_eq!(
4495 search(
4496 &project,
4497 SearchQuery::text(
4498 "NEEDLE",
4499 false,
4500 true,
4501 false,
4502 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4503 Default::default(),
4504 None,
4505 )
4506 .unwrap(),
4507 cx
4508 )
4509 .await
4510 .unwrap(),
4511 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4512 "should only return results from included worktree"
4513 );
4514
4515 assert_eq!(
4516 search(
4517 &project,
4518 SearchQuery::text(
4519 "NEEDLE",
4520 false,
4521 true,
4522 false,
4523 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4524 Default::default(),
4525 None,
4526 )
4527 .unwrap(),
4528 cx
4529 )
4530 .await
4531 .unwrap(),
4532 HashMap::from_iter([
4533 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4534 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4535 ]),
4536 "should return results from both worktrees"
4537 );
4538}
4539
4540#[gpui::test]
4541async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4542 init_test(cx);
4543
4544 let fs = FakeFs::new(cx.background_executor.clone());
4545 fs.insert_tree(
4546 "/dir",
4547 json!({
4548 ".git": {},
4549 ".gitignore": "**/target\n/node_modules\n",
4550 "target": {
4551 "index.txt": "index_key:index_value"
4552 },
4553 "node_modules": {
4554 "eslint": {
4555 "index.ts": "const eslint_key = 'eslint value'",
4556 "package.json": r#"{ "some_key": "some value" }"#,
4557 },
4558 "prettier": {
4559 "index.ts": "const prettier_key = 'prettier value'",
4560 "package.json": r#"{ "other_key": "other value" }"#,
4561 },
4562 },
4563 "package.json": r#"{ "main_key": "main value" }"#,
4564 }),
4565 )
4566 .await;
4567 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4568
4569 let query = "key";
4570 assert_eq!(
4571 search(
4572 &project,
4573 SearchQuery::text(
4574 query,
4575 false,
4576 false,
4577 false,
4578 Default::default(),
4579 Default::default(),
4580 None,
4581 )
4582 .unwrap(),
4583 cx
4584 )
4585 .await
4586 .unwrap(),
4587 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4588 "Only one non-ignored file should have the query"
4589 );
4590
4591 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4592 assert_eq!(
4593 search(
4594 &project,
4595 SearchQuery::text(
4596 query,
4597 false,
4598 false,
4599 true,
4600 Default::default(),
4601 Default::default(),
4602 None,
4603 )
4604 .unwrap(),
4605 cx
4606 )
4607 .await
4608 .unwrap(),
4609 HashMap::from_iter([
4610 ("dir/package.json".to_string(), vec![8..11]),
4611 ("dir/target/index.txt".to_string(), vec![6..9]),
4612 (
4613 "dir/node_modules/prettier/package.json".to_string(),
4614 vec![9..12]
4615 ),
4616 (
4617 "dir/node_modules/prettier/index.ts".to_string(),
4618 vec![15..18]
4619 ),
4620 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4621 (
4622 "dir/node_modules/eslint/package.json".to_string(),
4623 vec![8..11]
4624 ),
4625 ]),
4626 "Unrestricted search with ignored directories should find every file with the query"
4627 );
4628
4629 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4630 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4631 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4632 assert_eq!(
4633 search(
4634 &project,
4635 SearchQuery::text(
4636 query,
4637 false,
4638 false,
4639 true,
4640 files_to_include,
4641 files_to_exclude,
4642 None,
4643 )
4644 .unwrap(),
4645 cx
4646 )
4647 .await
4648 .unwrap(),
4649 HashMap::from_iter([(
4650 "dir/node_modules/prettier/package.json".to_string(),
4651 vec![9..12]
4652 )]),
4653 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4654 );
4655}
4656
4657#[gpui::test]
4658async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4659 init_test(cx);
4660
4661 let fs = FakeFs::new(cx.executor().clone());
4662 fs.insert_tree(
4663 "/one/two",
4664 json!({
4665 "three": {
4666 "a.txt": "",
4667 "four": {}
4668 },
4669 "c.rs": ""
4670 }),
4671 )
4672 .await;
4673
4674 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4675 project
4676 .update(cx, |project, cx| {
4677 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4678 project.create_entry((id, "b.."), true, cx)
4679 })
4680 .await
4681 .unwrap()
4682 .to_included()
4683 .unwrap();
4684
4685 // Can't create paths outside the project
4686 let result = project
4687 .update(cx, |project, cx| {
4688 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4689 project.create_entry((id, "../../boop"), true, cx)
4690 })
4691 .await;
4692 assert!(result.is_err());
4693
4694 // Can't create paths with '..'
4695 let result = project
4696 .update(cx, |project, cx| {
4697 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4698 project.create_entry((id, "four/../beep"), true, cx)
4699 })
4700 .await;
4701 assert!(result.is_err());
4702
4703 assert_eq!(
4704 fs.paths(true),
4705 vec![
4706 PathBuf::from("/"),
4707 PathBuf::from("/one"),
4708 PathBuf::from("/one/two"),
4709 PathBuf::from("/one/two/c.rs"),
4710 PathBuf::from("/one/two/three"),
4711 PathBuf::from("/one/two/three/a.txt"),
4712 PathBuf::from("/one/two/three/b.."),
4713 PathBuf::from("/one/two/three/four"),
4714 ]
4715 );
4716
4717 // And we cannot open buffers with '..'
4718 let result = project
4719 .update(cx, |project, cx| {
4720 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4721 project.open_buffer((id, "../c.rs"), cx)
4722 })
4723 .await;
4724 assert!(result.is_err())
4725}
4726
4727#[gpui::test]
4728async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4729 init_test(cx);
4730
4731 let fs = FakeFs::new(cx.executor());
4732 fs.insert_tree(
4733 "/dir",
4734 json!({
4735 "a.tsx": "a",
4736 }),
4737 )
4738 .await;
4739
4740 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4741
4742 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4743 language_registry.add(tsx_lang());
4744 let language_server_names = [
4745 "TypeScriptServer",
4746 "TailwindServer",
4747 "ESLintServer",
4748 "NoHoverCapabilitiesServer",
4749 ];
4750 let mut language_servers = [
4751 language_registry.register_fake_lsp(
4752 "tsx",
4753 FakeLspAdapter {
4754 name: language_server_names[0],
4755 capabilities: lsp::ServerCapabilities {
4756 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4757 ..lsp::ServerCapabilities::default()
4758 },
4759 ..FakeLspAdapter::default()
4760 },
4761 ),
4762 language_registry.register_fake_lsp(
4763 "tsx",
4764 FakeLspAdapter {
4765 name: language_server_names[1],
4766 capabilities: lsp::ServerCapabilities {
4767 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4768 ..lsp::ServerCapabilities::default()
4769 },
4770 ..FakeLspAdapter::default()
4771 },
4772 ),
4773 language_registry.register_fake_lsp(
4774 "tsx",
4775 FakeLspAdapter {
4776 name: language_server_names[2],
4777 capabilities: lsp::ServerCapabilities {
4778 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4779 ..lsp::ServerCapabilities::default()
4780 },
4781 ..FakeLspAdapter::default()
4782 },
4783 ),
4784 language_registry.register_fake_lsp(
4785 "tsx",
4786 FakeLspAdapter {
4787 name: language_server_names[3],
4788 capabilities: lsp::ServerCapabilities {
4789 hover_provider: None,
4790 ..lsp::ServerCapabilities::default()
4791 },
4792 ..FakeLspAdapter::default()
4793 },
4794 ),
4795 ];
4796
4797 let buffer = project
4798 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4799 .await
4800 .unwrap();
4801 cx.executor().run_until_parked();
4802
4803 let mut servers_with_hover_requests = HashMap::default();
4804 for i in 0..language_server_names.len() {
4805 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4806 panic!(
4807 "Failed to get language server #{i} with name {}",
4808 &language_server_names[i]
4809 )
4810 });
4811 let new_server_name = new_server.server.name();
4812 assert!(
4813 !servers_with_hover_requests.contains_key(&new_server_name),
4814 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4815 );
4816 match new_server_name.as_ref() {
4817 "TailwindServer" | "TypeScriptServer" => {
4818 servers_with_hover_requests.insert(
4819 new_server_name.clone(),
4820 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4821 let name = new_server_name.clone();
4822 async move {
4823 Ok(Some(lsp::Hover {
4824 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4825 format!("{name} hover"),
4826 )),
4827 range: None,
4828 }))
4829 }
4830 }),
4831 );
4832 }
4833 "ESLintServer" => {
4834 servers_with_hover_requests.insert(
4835 new_server_name,
4836 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4837 |_, _| async move { Ok(None) },
4838 ),
4839 );
4840 }
4841 "NoHoverCapabilitiesServer" => {
4842 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4843 |_, _| async move {
4844 panic!(
4845 "Should not call for hovers server with no corresponding capabilities"
4846 )
4847 },
4848 );
4849 }
4850 unexpected => panic!("Unexpected server name: {unexpected}"),
4851 }
4852 }
4853
4854 let hover_task = project.update(cx, |project, cx| {
4855 project.hover(&buffer, Point::new(0, 0), cx)
4856 });
4857 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4858 |mut hover_request| async move {
4859 hover_request
4860 .next()
4861 .await
4862 .expect("All hover requests should have been triggered")
4863 },
4864 ))
4865 .await;
4866 assert_eq!(
4867 vec!["TailwindServer hover", "TypeScriptServer hover"],
4868 hover_task
4869 .await
4870 .into_iter()
4871 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4872 .sorted()
4873 .collect::<Vec<_>>(),
4874 "Should receive hover responses from all related servers with hover capabilities"
4875 );
4876}
4877
4878#[gpui::test]
4879async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4880 init_test(cx);
4881
4882 let fs = FakeFs::new(cx.executor());
4883 fs.insert_tree(
4884 "/dir",
4885 json!({
4886 "a.ts": "a",
4887 }),
4888 )
4889 .await;
4890
4891 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4892
4893 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4894 language_registry.add(typescript_lang());
4895 let mut fake_language_servers = language_registry.register_fake_lsp(
4896 "TypeScript",
4897 FakeLspAdapter {
4898 capabilities: lsp::ServerCapabilities {
4899 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4900 ..lsp::ServerCapabilities::default()
4901 },
4902 ..FakeLspAdapter::default()
4903 },
4904 );
4905
4906 let buffer = project
4907 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4908 .await
4909 .unwrap();
4910 cx.executor().run_until_parked();
4911
4912 let fake_server = fake_language_servers
4913 .next()
4914 .await
4915 .expect("failed to get the language server");
4916
4917 let mut request_handled =
4918 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4919 Ok(Some(lsp::Hover {
4920 contents: lsp::HoverContents::Array(vec![
4921 lsp::MarkedString::String("".to_string()),
4922 lsp::MarkedString::String(" ".to_string()),
4923 lsp::MarkedString::String("\n\n\n".to_string()),
4924 ]),
4925 range: None,
4926 }))
4927 });
4928
4929 let hover_task = project.update(cx, |project, cx| {
4930 project.hover(&buffer, Point::new(0, 0), cx)
4931 });
4932 let () = request_handled
4933 .next()
4934 .await
4935 .expect("All hover requests should have been triggered");
4936 assert_eq!(
4937 Vec::<String>::new(),
4938 hover_task
4939 .await
4940 .into_iter()
4941 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4942 .sorted()
4943 .collect::<Vec<_>>(),
4944 "Empty hover parts should be ignored"
4945 );
4946}
4947
4948#[gpui::test]
4949async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4950 init_test(cx);
4951
4952 let fs = FakeFs::new(cx.executor());
4953 fs.insert_tree(
4954 "/dir",
4955 json!({
4956 "a.tsx": "a",
4957 }),
4958 )
4959 .await;
4960
4961 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4962
4963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4964 language_registry.add(tsx_lang());
4965 let language_server_names = [
4966 "TypeScriptServer",
4967 "TailwindServer",
4968 "ESLintServer",
4969 "NoActionsCapabilitiesServer",
4970 ];
4971
4972 let mut language_server_rxs = [
4973 language_registry.register_fake_lsp(
4974 "tsx",
4975 FakeLspAdapter {
4976 name: language_server_names[0],
4977 capabilities: lsp::ServerCapabilities {
4978 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4979 ..lsp::ServerCapabilities::default()
4980 },
4981 ..FakeLspAdapter::default()
4982 },
4983 ),
4984 language_registry.register_fake_lsp(
4985 "tsx",
4986 FakeLspAdapter {
4987 name: language_server_names[1],
4988 capabilities: lsp::ServerCapabilities {
4989 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4990 ..lsp::ServerCapabilities::default()
4991 },
4992 ..FakeLspAdapter::default()
4993 },
4994 ),
4995 language_registry.register_fake_lsp(
4996 "tsx",
4997 FakeLspAdapter {
4998 name: language_server_names[2],
4999 capabilities: lsp::ServerCapabilities {
5000 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5001 ..lsp::ServerCapabilities::default()
5002 },
5003 ..FakeLspAdapter::default()
5004 },
5005 ),
5006 language_registry.register_fake_lsp(
5007 "tsx",
5008 FakeLspAdapter {
5009 name: language_server_names[3],
5010 capabilities: lsp::ServerCapabilities {
5011 code_action_provider: None,
5012 ..lsp::ServerCapabilities::default()
5013 },
5014 ..FakeLspAdapter::default()
5015 },
5016 ),
5017 ];
5018
5019 let buffer = project
5020 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5021 .await
5022 .unwrap();
5023 cx.executor().run_until_parked();
5024
5025 let mut servers_with_actions_requests = HashMap::default();
5026 for i in 0..language_server_names.len() {
5027 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5028 panic!(
5029 "Failed to get language server #{i} with name {}",
5030 &language_server_names[i]
5031 )
5032 });
5033 let new_server_name = new_server.server.name();
5034
5035 assert!(
5036 !servers_with_actions_requests.contains_key(&new_server_name),
5037 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5038 );
5039 match new_server_name.0.as_ref() {
5040 "TailwindServer" | "TypeScriptServer" => {
5041 servers_with_actions_requests.insert(
5042 new_server_name.clone(),
5043 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5044 move |_, _| {
5045 let name = new_server_name.clone();
5046 async move {
5047 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5048 lsp::CodeAction {
5049 title: format!("{name} code action"),
5050 ..lsp::CodeAction::default()
5051 },
5052 )]))
5053 }
5054 },
5055 ),
5056 );
5057 }
5058 "ESLintServer" => {
5059 servers_with_actions_requests.insert(
5060 new_server_name,
5061 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5062 |_, _| async move { Ok(None) },
5063 ),
5064 );
5065 }
5066 "NoActionsCapabilitiesServer" => {
5067 let _never_handled = new_server
5068 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5069 panic!(
5070 "Should not call for code actions server with no corresponding capabilities"
5071 )
5072 });
5073 }
5074 unexpected => panic!("Unexpected server name: {unexpected}"),
5075 }
5076 }
5077
5078 let code_actions_task = project.update(cx, |project, cx| {
5079 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
5080 });
5081
5082 // cx.run_until_parked();
5083 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5084 |mut code_actions_request| async move {
5085 code_actions_request
5086 .next()
5087 .await
5088 .expect("All code actions requests should have been triggered")
5089 },
5090 ))
5091 .await;
5092 assert_eq!(
5093 vec!["TailwindServer code action", "TypeScriptServer code action"],
5094 code_actions_task
5095 .await
5096 .unwrap()
5097 .into_iter()
5098 .map(|code_action| code_action.lsp_action.title)
5099 .sorted()
5100 .collect::<Vec<_>>(),
5101 "Should receive code actions responses from all related servers with hover capabilities"
5102 );
5103}
5104
5105#[gpui::test]
5106async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5107 init_test(cx);
5108
5109 let fs = FakeFs::new(cx.executor());
5110 fs.insert_tree(
5111 "/dir",
5112 json!({
5113 "a.rs": "let a = 1;",
5114 "b.rs": "let b = 2;",
5115 "c.rs": "let c = 2;",
5116 }),
5117 )
5118 .await;
5119
5120 let project = Project::test(
5121 fs,
5122 [
5123 "/dir/a.rs".as_ref(),
5124 "/dir/b.rs".as_ref(),
5125 "/dir/c.rs".as_ref(),
5126 ],
5127 cx,
5128 )
5129 .await;
5130
5131 // check the initial state and get the worktrees
5132 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5133 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5134 assert_eq!(worktrees.len(), 3);
5135
5136 let worktree_a = worktrees[0].read(cx);
5137 let worktree_b = worktrees[1].read(cx);
5138 let worktree_c = worktrees[2].read(cx);
5139
5140 // check they start in the right order
5141 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5142 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5143 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5144
5145 (
5146 worktrees[0].clone(),
5147 worktrees[1].clone(),
5148 worktrees[2].clone(),
5149 )
5150 });
5151
5152 // move first worktree to after the second
5153 // [a, b, c] -> [b, a, c]
5154 project
5155 .update(cx, |project, cx| {
5156 let first = worktree_a.read(cx);
5157 let second = worktree_b.read(cx);
5158 project.move_worktree(first.id(), second.id(), cx)
5159 })
5160 .expect("moving first after second");
5161
5162 // check the state after moving
5163 project.update(cx, |project, cx| {
5164 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5165 assert_eq!(worktrees.len(), 3);
5166
5167 let first = worktrees[0].read(cx);
5168 let second = worktrees[1].read(cx);
5169 let third = worktrees[2].read(cx);
5170
5171 // check they are now in the right order
5172 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5173 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5174 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5175 });
5176
5177 // move the second worktree to before the first
5178 // [b, a, c] -> [a, b, c]
5179 project
5180 .update(cx, |project, cx| {
5181 let second = worktree_a.read(cx);
5182 let first = worktree_b.read(cx);
5183 project.move_worktree(first.id(), second.id(), cx)
5184 })
5185 .expect("moving second before first");
5186
5187 // check the state after moving
5188 project.update(cx, |project, cx| {
5189 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5190 assert_eq!(worktrees.len(), 3);
5191
5192 let first = worktrees[0].read(cx);
5193 let second = worktrees[1].read(cx);
5194 let third = worktrees[2].read(cx);
5195
5196 // check they are now in the right order
5197 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5198 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5199 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5200 });
5201
5202 // move the second worktree to after the third
5203 // [a, b, c] -> [a, c, b]
5204 project
5205 .update(cx, |project, cx| {
5206 let second = worktree_b.read(cx);
5207 let third = worktree_c.read(cx);
5208 project.move_worktree(second.id(), third.id(), cx)
5209 })
5210 .expect("moving second after third");
5211
5212 // check the state after moving
5213 project.update(cx, |project, cx| {
5214 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5215 assert_eq!(worktrees.len(), 3);
5216
5217 let first = worktrees[0].read(cx);
5218 let second = worktrees[1].read(cx);
5219 let third = worktrees[2].read(cx);
5220
5221 // check they are now in the right order
5222 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5223 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5224 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5225 });
5226
5227 // move the third worktree to before the second
5228 // [a, c, b] -> [a, b, c]
5229 project
5230 .update(cx, |project, cx| {
5231 let third = worktree_c.read(cx);
5232 let second = worktree_b.read(cx);
5233 project.move_worktree(third.id(), second.id(), cx)
5234 })
5235 .expect("moving third before second");
5236
5237 // check the state after moving
5238 project.update(cx, |project, cx| {
5239 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5240 assert_eq!(worktrees.len(), 3);
5241
5242 let first = worktrees[0].read(cx);
5243 let second = worktrees[1].read(cx);
5244 let third = worktrees[2].read(cx);
5245
5246 // check they are now in the right order
5247 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5248 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5249 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5250 });
5251
5252 // move the first worktree to after the third
5253 // [a, b, c] -> [b, c, a]
5254 project
5255 .update(cx, |project, cx| {
5256 let first = worktree_a.read(cx);
5257 let third = worktree_c.read(cx);
5258 project.move_worktree(first.id(), third.id(), cx)
5259 })
5260 .expect("moving first after third");
5261
5262 // check the state after moving
5263 project.update(cx, |project, cx| {
5264 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5265 assert_eq!(worktrees.len(), 3);
5266
5267 let first = worktrees[0].read(cx);
5268 let second = worktrees[1].read(cx);
5269 let third = worktrees[2].read(cx);
5270
5271 // check they are now in the right order
5272 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5273 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5274 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5275 });
5276
5277 // move the third worktree to before the first
5278 // [b, c, a] -> [a, b, c]
5279 project
5280 .update(cx, |project, cx| {
5281 let third = worktree_a.read(cx);
5282 let first = worktree_b.read(cx);
5283 project.move_worktree(third.id(), first.id(), cx)
5284 })
5285 .expect("moving third before first");
5286
5287 // check the state after moving
5288 project.update(cx, |project, cx| {
5289 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5290 assert_eq!(worktrees.len(), 3);
5291
5292 let first = worktrees[0].read(cx);
5293 let second = worktrees[1].read(cx);
5294 let third = worktrees[2].read(cx);
5295
5296 // check they are now in the right order
5297 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5298 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5299 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5300 });
5301}
5302
5303async fn search(
5304 project: &Model<Project>,
5305 query: SearchQuery,
5306 cx: &mut gpui::TestAppContext,
5307) -> Result<HashMap<String, Vec<Range<usize>>>> {
5308 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5309 let mut results = HashMap::default();
5310 while let Some(search_result) = search_rx.next().await {
5311 match search_result {
5312 SearchResult::Buffer { buffer, ranges } => {
5313 results.entry(buffer).or_insert(ranges);
5314 }
5315 SearchResult::LimitReached => {}
5316 }
5317 }
5318 Ok(results
5319 .into_iter()
5320 .map(|(buffer, ranges)| {
5321 buffer.update(cx, |buffer, cx| {
5322 let path = buffer
5323 .file()
5324 .unwrap()
5325 .full_path(cx)
5326 .to_string_lossy()
5327 .to_string();
5328 let ranges = ranges
5329 .into_iter()
5330 .map(|range| range.to_offset(buffer))
5331 .collect::<Vec<_>>();
5332 (path, ranges)
5333 })
5334 })
5335 .collect())
5336}
5337
5338pub fn init_test(cx: &mut gpui::TestAppContext) {
5339 if std::env::var("RUST_LOG").is_ok() {
5340 env_logger::try_init().ok();
5341 }
5342
5343 cx.update(|cx| {
5344 let settings_store = SettingsStore::test(cx);
5345 cx.set_global(settings_store);
5346 release_channel::init(SemanticVersion::default(), cx);
5347 language::init(cx);
5348 Project::init_settings(cx);
5349 });
5350}
5351
5352fn json_lang() -> Arc<Language> {
5353 Arc::new(Language::new(
5354 LanguageConfig {
5355 name: "JSON".into(),
5356 matcher: LanguageMatcher {
5357 path_suffixes: vec!["json".to_string()],
5358 ..Default::default()
5359 },
5360 ..Default::default()
5361 },
5362 None,
5363 ))
5364}
5365
5366fn js_lang() -> Arc<Language> {
5367 Arc::new(Language::new(
5368 LanguageConfig {
5369 name: "JavaScript".into(),
5370 matcher: LanguageMatcher {
5371 path_suffixes: vec!["js".to_string()],
5372 ..Default::default()
5373 },
5374 ..Default::default()
5375 },
5376 None,
5377 ))
5378}
5379
5380fn rust_lang() -> Arc<Language> {
5381 Arc::new(Language::new(
5382 LanguageConfig {
5383 name: "Rust".into(),
5384 matcher: LanguageMatcher {
5385 path_suffixes: vec!["rs".to_string()],
5386 ..Default::default()
5387 },
5388 ..Default::default()
5389 },
5390 Some(tree_sitter_rust::LANGUAGE.into()),
5391 ))
5392}
5393
5394fn typescript_lang() -> Arc<Language> {
5395 Arc::new(Language::new(
5396 LanguageConfig {
5397 name: "TypeScript".into(),
5398 matcher: LanguageMatcher {
5399 path_suffixes: vec!["ts".to_string()],
5400 ..Default::default()
5401 },
5402 ..Default::default()
5403 },
5404 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5405 ))
5406}
5407
5408fn tsx_lang() -> Arc<Language> {
5409 Arc::new(Language::new(
5410 LanguageConfig {
5411 name: "tsx".into(),
5412 matcher: LanguageMatcher {
5413 path_suffixes: vec!["tsx".to_string()],
5414 ..Default::default()
5415 },
5416 ..Default::default()
5417 },
5418 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5419 ))
5420}
5421
5422fn get_all_tasks(
5423 project: &Model<Project>,
5424 worktree_id: Option<WorktreeId>,
5425 task_context: &TaskContext,
5426 cx: &mut AppContext,
5427) -> Vec<(TaskSourceKind, ResolvedTask)> {
5428 let (mut old, new) = project.update(cx, |project, cx| {
5429 project
5430 .task_store
5431 .read(cx)
5432 .task_inventory()
5433 .unwrap()
5434 .read(cx)
5435 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5436 });
5437 old.extend(new);
5438 old
5439}