1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[cfg(not(windows))]
49#[gpui::test]
50async fn test_symlinks(cx: &mut gpui::TestAppContext) {
51 init_test(cx);
52 cx.executor().allow_parking();
53
54 let dir = temp_tree(json!({
55 "root": {
56 "apple": "",
57 "banana": {
58 "carrot": {
59 "date": "",
60 "endive": "",
61 }
62 },
63 "fennel": {
64 "grape": "",
65 }
66 }
67 }));
68
69 let root_link_path = dir.path().join("root_link");
70 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
71 os::unix::fs::symlink(
72 &dir.path().join("root/fennel"),
73 &dir.path().join("root/finnochio"),
74 )
75 .unwrap();
76
77 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
78
79 project.update(cx, |project, cx| {
80 let tree = project.worktrees().next().unwrap().read(cx);
81 assert_eq!(tree.file_count(), 5);
82 assert_eq!(
83 tree.inode_for_path("fennel/grape"),
84 tree.inode_for_path("finnochio/grape")
85 );
86 });
87}
88
89#[gpui::test]
90async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
91 init_test(cx);
92
93 let fs = FakeFs::new(cx.executor());
94 fs.insert_tree(
95 "/the-root",
96 json!({
97 ".zed": {
98 "settings.json": r#"{ "tab_size": 8 }"#,
99 "tasks.json": r#"[{
100 "label": "cargo check",
101 "command": "cargo",
102 "args": ["check", "--all"]
103 },]"#,
104 },
105 "a": {
106 "a.rs": "fn a() {\n A\n}"
107 },
108 "b": {
109 ".zed": {
110 "settings.json": r#"{ "tab_size": 2 }"#,
111 "tasks.json": r#"[{
112 "label": "cargo check",
113 "command": "cargo",
114 "args": ["check"]
115 },]"#,
116 },
117 "b.rs": "fn b() {\n B\n}"
118 }
119 }),
120 )
121 .await;
122
123 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
124 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
125
126 cx.executor().run_until_parked();
127 cx.update(|cx| {
128 let tree = worktree.read(cx);
129
130 let settings_a = language_settings(
131 None,
132 Some(
133 &(File::for_entry(
134 tree.entry_for_path("a/a.rs").unwrap().clone(),
135 worktree.clone(),
136 ) as _),
137 ),
138 cx,
139 );
140 let settings_b = language_settings(
141 None,
142 Some(
143 &(File::for_entry(
144 tree.entry_for_path("b/b.rs").unwrap().clone(),
145 worktree.clone(),
146 ) as _),
147 ),
148 cx,
149 );
150
151 assert_eq!(settings_a.tab_size.get(), 8);
152 assert_eq!(settings_b.tab_size.get(), 2);
153
154 let workree_id = project.update(cx, |project, cx| {
155 project.worktrees().next().unwrap().read(cx).id()
156 });
157 let all_tasks = project
158 .update(cx, |project, cx| {
159 project.task_inventory().update(cx, |inventory, cx| {
160 inventory.list_tasks(None, None, false, cx)
161 })
162 })
163 .into_iter()
164 .map(|(source_kind, task)| (source_kind, task.name().to_string()))
165 .collect::<Vec<_>>();
166 assert_eq!(
167 all_tasks,
168 vec![
169 (
170 TaskSourceKind::Worktree {
171 id: workree_id,
172 abs_path: PathBuf::from("/the-root/.zed/tasks.json")
173 },
174 "cargo check".to_string()
175 ),
176 (
177 TaskSourceKind::Worktree {
178 id: workree_id,
179 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json")
180 },
181 "cargo check".to_string()
182 ),
183 ]
184 );
185 });
186}
187
188#[gpui::test]
189async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
190 init_test(cx);
191
192 let fs = FakeFs::new(cx.executor());
193 fs.insert_tree(
194 "/the-root",
195 json!({
196 "test.rs": "const A: i32 = 1;",
197 "test2.rs": "",
198 "Cargo.toml": "a = 1",
199 "package.json": "{\"a\": 1}",
200 }),
201 )
202 .await;
203
204 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
205 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
206
207 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
208 "Rust",
209 FakeLspAdapter {
210 name: "the-rust-language-server",
211 capabilities: lsp::ServerCapabilities {
212 completion_provider: Some(lsp::CompletionOptions {
213 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
214 ..Default::default()
215 }),
216 ..Default::default()
217 },
218 ..Default::default()
219 },
220 );
221 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
222 "JSON",
223 FakeLspAdapter {
224 name: "the-json-language-server",
225 capabilities: lsp::ServerCapabilities {
226 completion_provider: Some(lsp::CompletionOptions {
227 trigger_characters: Some(vec![":".to_string()]),
228 ..Default::default()
229 }),
230 ..Default::default()
231 },
232 ..Default::default()
233 },
234 );
235
236 // Open a buffer without an associated language server.
237 let toml_buffer = project
238 .update(cx, |project, cx| {
239 project.open_local_buffer("/the-root/Cargo.toml", cx)
240 })
241 .await
242 .unwrap();
243
244 // Open a buffer with an associated language server before the language for it has been loaded.
245 let rust_buffer = project
246 .update(cx, |project, cx| {
247 project.open_local_buffer("/the-root/test.rs", cx)
248 })
249 .await
250 .unwrap();
251 rust_buffer.update(cx, |buffer, _| {
252 assert_eq!(buffer.language().map(|l| l.name()), None);
253 });
254
255 // Now we add the languages to the project, and ensure they get assigned to all
256 // the relevant open buffers.
257 language_registry.add(json_lang());
258 language_registry.add(rust_lang());
259 cx.executor().run_until_parked();
260 rust_buffer.update(cx, |buffer, _| {
261 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
262 });
263
264 // A server is started up, and it is notified about Rust files.
265 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
266 assert_eq!(
267 fake_rust_server
268 .receive_notification::<lsp::notification::DidOpenTextDocument>()
269 .await
270 .text_document,
271 lsp::TextDocumentItem {
272 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
273 version: 0,
274 text: "const A: i32 = 1;".to_string(),
275 language_id: Default::default()
276 }
277 );
278
279 // The buffer is configured based on the language server's capabilities.
280 rust_buffer.update(cx, |buffer, _| {
281 assert_eq!(
282 buffer.completion_triggers(),
283 &[".".to_string(), "::".to_string()]
284 );
285 });
286 toml_buffer.update(cx, |buffer, _| {
287 assert!(buffer.completion_triggers().is_empty());
288 });
289
290 // Edit a buffer. The changes are reported to the language server.
291 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
292 assert_eq!(
293 fake_rust_server
294 .receive_notification::<lsp::notification::DidChangeTextDocument>()
295 .await
296 .text_document,
297 lsp::VersionedTextDocumentIdentifier::new(
298 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
299 1
300 )
301 );
302
303 // Open a third buffer with a different associated language server.
304 let json_buffer = project
305 .update(cx, |project, cx| {
306 project.open_local_buffer("/the-root/package.json", cx)
307 })
308 .await
309 .unwrap();
310
311 // A json language server is started up and is only notified about the json buffer.
312 let mut fake_json_server = fake_json_servers.next().await.unwrap();
313 assert_eq!(
314 fake_json_server
315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
316 .await
317 .text_document,
318 lsp::TextDocumentItem {
319 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
320 version: 0,
321 text: "{\"a\": 1}".to_string(),
322 language_id: Default::default()
323 }
324 );
325
326 // This buffer is configured based on the second language server's
327 // capabilities.
328 json_buffer.update(cx, |buffer, _| {
329 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
330 });
331
332 // When opening another buffer whose language server is already running,
333 // it is also configured based on the existing language server's capabilities.
334 let rust_buffer2 = project
335 .update(cx, |project, cx| {
336 project.open_local_buffer("/the-root/test2.rs", cx)
337 })
338 .await
339 .unwrap();
340 rust_buffer2.update(cx, |buffer, _| {
341 assert_eq!(
342 buffer.completion_triggers(),
343 &[".".to_string(), "::".to_string()]
344 );
345 });
346
347 // Changes are reported only to servers matching the buffer's language.
348 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
349 rust_buffer2.update(cx, |buffer, cx| {
350 buffer.edit([(0..0, "let x = 1;")], None, cx)
351 });
352 assert_eq!(
353 fake_rust_server
354 .receive_notification::<lsp::notification::DidChangeTextDocument>()
355 .await
356 .text_document,
357 lsp::VersionedTextDocumentIdentifier::new(
358 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
359 1
360 )
361 );
362
363 // Save notifications are reported to all servers.
364 project
365 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
366 .await
367 .unwrap();
368 assert_eq!(
369 fake_rust_server
370 .receive_notification::<lsp::notification::DidSaveTextDocument>()
371 .await
372 .text_document,
373 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
374 );
375 assert_eq!(
376 fake_json_server
377 .receive_notification::<lsp::notification::DidSaveTextDocument>()
378 .await
379 .text_document,
380 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
381 );
382
383 // Renames are reported only to servers matching the buffer's language.
384 fs.rename(
385 Path::new("/the-root/test2.rs"),
386 Path::new("/the-root/test3.rs"),
387 Default::default(),
388 )
389 .await
390 .unwrap();
391 assert_eq!(
392 fake_rust_server
393 .receive_notification::<lsp::notification::DidCloseTextDocument>()
394 .await
395 .text_document,
396 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
397 );
398 assert_eq!(
399 fake_rust_server
400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
401 .await
402 .text_document,
403 lsp::TextDocumentItem {
404 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
405 version: 0,
406 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
407 language_id: Default::default()
408 },
409 );
410
411 rust_buffer2.update(cx, |buffer, cx| {
412 buffer.update_diagnostics(
413 LanguageServerId(0),
414 DiagnosticSet::from_sorted_entries(
415 vec![DiagnosticEntry {
416 diagnostic: Default::default(),
417 range: Anchor::MIN..Anchor::MAX,
418 }],
419 &buffer.snapshot(),
420 ),
421 cx,
422 );
423 assert_eq!(
424 buffer
425 .snapshot()
426 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
427 .count(),
428 1
429 );
430 });
431
432 // When the rename changes the extension of the file, the buffer gets closed on the old
433 // language server and gets opened on the new one.
434 fs.rename(
435 Path::new("/the-root/test3.rs"),
436 Path::new("/the-root/test3.json"),
437 Default::default(),
438 )
439 .await
440 .unwrap();
441 assert_eq!(
442 fake_rust_server
443 .receive_notification::<lsp::notification::DidCloseTextDocument>()
444 .await
445 .text_document,
446 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
447 );
448 assert_eq!(
449 fake_json_server
450 .receive_notification::<lsp::notification::DidOpenTextDocument>()
451 .await
452 .text_document,
453 lsp::TextDocumentItem {
454 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
455 version: 0,
456 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
457 language_id: Default::default()
458 },
459 );
460
461 // We clear the diagnostics, since the language has changed.
462 rust_buffer2.update(cx, |buffer, _| {
463 assert_eq!(
464 buffer
465 .snapshot()
466 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
467 .count(),
468 0
469 );
470 });
471
472 // The renamed file's version resets after changing language server.
473 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
474 assert_eq!(
475 fake_json_server
476 .receive_notification::<lsp::notification::DidChangeTextDocument>()
477 .await
478 .text_document,
479 lsp::VersionedTextDocumentIdentifier::new(
480 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
481 1
482 )
483 );
484
485 // Restart language servers
486 project.update(cx, |project, cx| {
487 project.restart_language_servers_for_buffers(
488 vec![rust_buffer.clone(), json_buffer.clone()],
489 cx,
490 );
491 });
492
493 let mut rust_shutdown_requests = fake_rust_server
494 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
495 let mut json_shutdown_requests = fake_json_server
496 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
497 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
498
499 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
500 let mut fake_json_server = fake_json_servers.next().await.unwrap();
501
502 // Ensure rust document is reopened in new rust language server
503 assert_eq!(
504 fake_rust_server
505 .receive_notification::<lsp::notification::DidOpenTextDocument>()
506 .await
507 .text_document,
508 lsp::TextDocumentItem {
509 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
510 version: 0,
511 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
512 language_id: Default::default()
513 }
514 );
515
516 // Ensure json documents are reopened in new json language server
517 assert_set_eq!(
518 [
519 fake_json_server
520 .receive_notification::<lsp::notification::DidOpenTextDocument>()
521 .await
522 .text_document,
523 fake_json_server
524 .receive_notification::<lsp::notification::DidOpenTextDocument>()
525 .await
526 .text_document,
527 ],
528 [
529 lsp::TextDocumentItem {
530 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
531 version: 0,
532 text: json_buffer.update(cx, |buffer, _| buffer.text()),
533 language_id: Default::default()
534 },
535 lsp::TextDocumentItem {
536 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
537 version: 0,
538 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
539 language_id: Default::default()
540 }
541 ]
542 );
543
544 // Close notifications are reported only to servers matching the buffer's language.
545 cx.update(|_| drop(json_buffer));
546 let close_message = lsp::DidCloseTextDocumentParams {
547 text_document: lsp::TextDocumentIdentifier::new(
548 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
549 ),
550 };
551 assert_eq!(
552 fake_json_server
553 .receive_notification::<lsp::notification::DidCloseTextDocument>()
554 .await,
555 close_message,
556 );
557}
558
559#[gpui::test]
560async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
561 init_test(cx);
562
563 let fs = FakeFs::new(cx.executor());
564 fs.insert_tree(
565 "/the-root",
566 json!({
567 ".gitignore": "target\n",
568 "src": {
569 "a.rs": "",
570 "b.rs": "",
571 },
572 "target": {
573 "x": {
574 "out": {
575 "x.rs": ""
576 }
577 },
578 "y": {
579 "out": {
580 "y.rs": "",
581 }
582 },
583 "z": {
584 "out": {
585 "z.rs": ""
586 }
587 }
588 }
589 }),
590 )
591 .await;
592
593 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
594 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
595 language_registry.add(rust_lang());
596 let mut fake_servers = language_registry.register_fake_lsp_adapter(
597 "Rust",
598 FakeLspAdapter {
599 name: "the-language-server",
600 ..Default::default()
601 },
602 );
603
604 cx.executor().run_until_parked();
605
606 // Start the language server by opening a buffer with a compatible file extension.
607 let _buffer = project
608 .update(cx, |project, cx| {
609 project.open_local_buffer("/the-root/src/a.rs", cx)
610 })
611 .await
612 .unwrap();
613
614 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
615 project.update(cx, |project, cx| {
616 let worktree = project.worktrees().next().unwrap();
617 assert_eq!(
618 worktree
619 .read(cx)
620 .snapshot()
621 .entries(true)
622 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
623 .collect::<Vec<_>>(),
624 &[
625 (Path::new(""), false),
626 (Path::new(".gitignore"), false),
627 (Path::new("src"), false),
628 (Path::new("src/a.rs"), false),
629 (Path::new("src/b.rs"), false),
630 (Path::new("target"), true),
631 ]
632 );
633 });
634
635 let prev_read_dir_count = fs.read_dir_call_count();
636
637 // Keep track of the FS events reported to the language server.
638 let fake_server = fake_servers.next().await.unwrap();
639 let file_changes = Arc::new(Mutex::new(Vec::new()));
640 fake_server
641 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
642 registrations: vec![lsp::Registration {
643 id: Default::default(),
644 method: "workspace/didChangeWatchedFiles".to_string(),
645 register_options: serde_json::to_value(
646 lsp::DidChangeWatchedFilesRegistrationOptions {
647 watchers: vec![
648 lsp::FileSystemWatcher {
649 glob_pattern: lsp::GlobPattern::String(
650 "/the-root/Cargo.toml".to_string(),
651 ),
652 kind: None,
653 },
654 lsp::FileSystemWatcher {
655 glob_pattern: lsp::GlobPattern::String(
656 "/the-root/src/*.{rs,c}".to_string(),
657 ),
658 kind: None,
659 },
660 lsp::FileSystemWatcher {
661 glob_pattern: lsp::GlobPattern::String(
662 "/the-root/target/y/**/*.rs".to_string(),
663 ),
664 kind: None,
665 },
666 ],
667 },
668 )
669 .ok(),
670 }],
671 })
672 .await
673 .unwrap();
674 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
675 let file_changes = file_changes.clone();
676 move |params, _| {
677 let mut file_changes = file_changes.lock();
678 file_changes.extend(params.changes);
679 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
680 }
681 });
682
683 cx.executor().run_until_parked();
684 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
685 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
686
687 // Now the language server has asked us to watch an ignored directory path,
688 // so we recursively load it.
689 project.update(cx, |project, cx| {
690 let worktree = project.worktrees().next().unwrap();
691 assert_eq!(
692 worktree
693 .read(cx)
694 .snapshot()
695 .entries(true)
696 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
697 .collect::<Vec<_>>(),
698 &[
699 (Path::new(""), false),
700 (Path::new(".gitignore"), false),
701 (Path::new("src"), false),
702 (Path::new("src/a.rs"), false),
703 (Path::new("src/b.rs"), false),
704 (Path::new("target"), true),
705 (Path::new("target/x"), true),
706 (Path::new("target/y"), true),
707 (Path::new("target/y/out"), true),
708 (Path::new("target/y/out/y.rs"), true),
709 (Path::new("target/z"), true),
710 ]
711 );
712 });
713
714 // Perform some file system mutations, two of which match the watched patterns,
715 // and one of which does not.
716 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
717 .await
718 .unwrap();
719 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
720 .await
721 .unwrap();
722 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
723 .await
724 .unwrap();
725 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
726 .await
727 .unwrap();
728 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
729 .await
730 .unwrap();
731
732 // The language server receives events for the FS mutations that match its watch patterns.
733 cx.executor().run_until_parked();
734 assert_eq!(
735 &*file_changes.lock(),
736 &[
737 lsp::FileEvent {
738 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
739 typ: lsp::FileChangeType::DELETED,
740 },
741 lsp::FileEvent {
742 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
743 typ: lsp::FileChangeType::CREATED,
744 },
745 lsp::FileEvent {
746 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
747 typ: lsp::FileChangeType::CREATED,
748 },
749 ]
750 );
751}
752
753#[gpui::test]
754async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
755 init_test(cx);
756
757 let fs = FakeFs::new(cx.executor());
758 fs.insert_tree(
759 "/dir",
760 json!({
761 "a.rs": "let a = 1;",
762 "b.rs": "let b = 2;"
763 }),
764 )
765 .await;
766
767 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
768
769 let buffer_a = project
770 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
771 .await
772 .unwrap();
773 let buffer_b = project
774 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
775 .await
776 .unwrap();
777
778 project.update(cx, |project, cx| {
779 project
780 .update_diagnostics(
781 LanguageServerId(0),
782 lsp::PublishDiagnosticsParams {
783 uri: Url::from_file_path("/dir/a.rs").unwrap(),
784 version: None,
785 diagnostics: vec![lsp::Diagnostic {
786 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
787 severity: Some(lsp::DiagnosticSeverity::ERROR),
788 message: "error 1".to_string(),
789 ..Default::default()
790 }],
791 },
792 &[],
793 cx,
794 )
795 .unwrap();
796 project
797 .update_diagnostics(
798 LanguageServerId(0),
799 lsp::PublishDiagnosticsParams {
800 uri: Url::from_file_path("/dir/b.rs").unwrap(),
801 version: None,
802 diagnostics: vec![lsp::Diagnostic {
803 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
804 severity: Some(lsp::DiagnosticSeverity::WARNING),
805 message: "error 2".to_string(),
806 ..Default::default()
807 }],
808 },
809 &[],
810 cx,
811 )
812 .unwrap();
813 });
814
815 buffer_a.update(cx, |buffer, _| {
816 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
817 assert_eq!(
818 chunks
819 .iter()
820 .map(|(s, d)| (s.as_str(), *d))
821 .collect::<Vec<_>>(),
822 &[
823 ("let ", None),
824 ("a", Some(DiagnosticSeverity::ERROR)),
825 (" = 1;", None),
826 ]
827 );
828 });
829 buffer_b.update(cx, |buffer, _| {
830 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
831 assert_eq!(
832 chunks
833 .iter()
834 .map(|(s, d)| (s.as_str(), *d))
835 .collect::<Vec<_>>(),
836 &[
837 ("let ", None),
838 ("b", Some(DiagnosticSeverity::WARNING)),
839 (" = 2;", None),
840 ]
841 );
842 });
843}
844
845#[gpui::test]
846async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
847 init_test(cx);
848
849 let fs = FakeFs::new(cx.executor());
850 fs.insert_tree(
851 "/root",
852 json!({
853 "dir": {
854 ".git": {
855 "HEAD": "ref: refs/heads/main",
856 },
857 ".gitignore": "b.rs",
858 "a.rs": "let a = 1;",
859 "b.rs": "let b = 2;",
860 },
861 "other.rs": "let b = c;"
862 }),
863 )
864 .await;
865
866 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
867 let (worktree, _) = project
868 .update(cx, |project, cx| {
869 project.find_or_create_local_worktree("/root/dir", true, cx)
870 })
871 .await
872 .unwrap();
873 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
874
875 let (worktree, _) = project
876 .update(cx, |project, cx| {
877 project.find_or_create_local_worktree("/root/other.rs", false, cx)
878 })
879 .await
880 .unwrap();
881 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
882
883 let server_id = LanguageServerId(0);
884 project.update(cx, |project, cx| {
885 project
886 .update_diagnostics(
887 server_id,
888 lsp::PublishDiagnosticsParams {
889 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
890 version: None,
891 diagnostics: vec![lsp::Diagnostic {
892 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
893 severity: Some(lsp::DiagnosticSeverity::ERROR),
894 message: "unused variable 'b'".to_string(),
895 ..Default::default()
896 }],
897 },
898 &[],
899 cx,
900 )
901 .unwrap();
902 project
903 .update_diagnostics(
904 server_id,
905 lsp::PublishDiagnosticsParams {
906 uri: Url::from_file_path("/root/other.rs").unwrap(),
907 version: None,
908 diagnostics: vec![lsp::Diagnostic {
909 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
910 severity: Some(lsp::DiagnosticSeverity::ERROR),
911 message: "unknown variable 'c'".to_string(),
912 ..Default::default()
913 }],
914 },
915 &[],
916 cx,
917 )
918 .unwrap();
919 });
920
921 let main_ignored_buffer = project
922 .update(cx, |project, cx| {
923 project.open_buffer((main_worktree_id, "b.rs"), cx)
924 })
925 .await
926 .unwrap();
927 main_ignored_buffer.update(cx, |buffer, _| {
928 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
929 assert_eq!(
930 chunks
931 .iter()
932 .map(|(s, d)| (s.as_str(), *d))
933 .collect::<Vec<_>>(),
934 &[
935 ("let ", None),
936 ("b", Some(DiagnosticSeverity::ERROR)),
937 (" = 2;", None),
938 ],
939 "Gigitnored buffers should still get in-buffer diagnostics",
940 );
941 });
942 let other_buffer = project
943 .update(cx, |project, cx| {
944 project.open_buffer((other_worktree_id, ""), cx)
945 })
946 .await
947 .unwrap();
948 other_buffer.update(cx, |buffer, _| {
949 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
950 assert_eq!(
951 chunks
952 .iter()
953 .map(|(s, d)| (s.as_str(), *d))
954 .collect::<Vec<_>>(),
955 &[
956 ("let b = ", None),
957 ("c", Some(DiagnosticSeverity::ERROR)),
958 (";", None),
959 ],
960 "Buffers from hidden projects should still get in-buffer diagnostics"
961 );
962 });
963
964 project.update(cx, |project, cx| {
965 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
966 assert_eq!(
967 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
968 vec![(
969 ProjectPath {
970 worktree_id: main_worktree_id,
971 path: Arc::from(Path::new("b.rs")),
972 },
973 server_id,
974 DiagnosticSummary {
975 error_count: 1,
976 warning_count: 0,
977 }
978 )]
979 );
980 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
981 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
982 });
983}
984
985#[gpui::test]
986async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
987 init_test(cx);
988
989 let progress_token = "the-progress-token";
990
991 let fs = FakeFs::new(cx.executor());
992 fs.insert_tree(
993 "/dir",
994 json!({
995 "a.rs": "fn a() { A }",
996 "b.rs": "const y: i32 = 1",
997 }),
998 )
999 .await;
1000
1001 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1002 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1003
1004 language_registry.add(rust_lang());
1005 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1006 "Rust",
1007 FakeLspAdapter {
1008 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1009 disk_based_diagnostics_sources: vec!["disk".into()],
1010 ..Default::default()
1011 },
1012 );
1013
1014 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1015
1016 // Cause worktree to start the fake language server
1017 let _buffer = project
1018 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1019 .await
1020 .unwrap();
1021
1022 let mut events = cx.events(&project);
1023
1024 let fake_server = fake_servers.next().await.unwrap();
1025 assert_eq!(
1026 events.next().await.unwrap(),
1027 Event::LanguageServerAdded(LanguageServerId(0)),
1028 );
1029
1030 fake_server
1031 .start_progress(format!("{}/0", progress_token))
1032 .await;
1033 assert_eq!(
1034 events.next().await.unwrap(),
1035 Event::DiskBasedDiagnosticsStarted {
1036 language_server_id: LanguageServerId(0),
1037 }
1038 );
1039
1040 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1041 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1042 version: None,
1043 diagnostics: vec![lsp::Diagnostic {
1044 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1045 severity: Some(lsp::DiagnosticSeverity::ERROR),
1046 message: "undefined variable 'A'".to_string(),
1047 ..Default::default()
1048 }],
1049 });
1050 assert_eq!(
1051 events.next().await.unwrap(),
1052 Event::DiagnosticsUpdated {
1053 language_server_id: LanguageServerId(0),
1054 path: (worktree_id, Path::new("a.rs")).into()
1055 }
1056 );
1057
1058 fake_server.end_progress(format!("{}/0", progress_token));
1059 assert_eq!(
1060 events.next().await.unwrap(),
1061 Event::DiskBasedDiagnosticsFinished {
1062 language_server_id: LanguageServerId(0)
1063 }
1064 );
1065
1066 let buffer = project
1067 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1068 .await
1069 .unwrap();
1070
1071 buffer.update(cx, |buffer, _| {
1072 let snapshot = buffer.snapshot();
1073 let diagnostics = snapshot
1074 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1075 .collect::<Vec<_>>();
1076 assert_eq!(
1077 diagnostics,
1078 &[DiagnosticEntry {
1079 range: Point::new(0, 9)..Point::new(0, 10),
1080 diagnostic: Diagnostic {
1081 severity: lsp::DiagnosticSeverity::ERROR,
1082 message: "undefined variable 'A'".to_string(),
1083 group_id: 0,
1084 is_primary: true,
1085 ..Default::default()
1086 }
1087 }]
1088 )
1089 });
1090
1091 // Ensure publishing empty diagnostics twice only results in one update event.
1092 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1093 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1094 version: None,
1095 diagnostics: Default::default(),
1096 });
1097 assert_eq!(
1098 events.next().await.unwrap(),
1099 Event::DiagnosticsUpdated {
1100 language_server_id: LanguageServerId(0),
1101 path: (worktree_id, Path::new("a.rs")).into()
1102 }
1103 );
1104
1105 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1106 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1107 version: None,
1108 diagnostics: Default::default(),
1109 });
1110 cx.executor().run_until_parked();
1111 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1112}
1113
1114#[gpui::test]
1115async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1116 init_test(cx);
1117
1118 let progress_token = "the-progress-token";
1119
1120 let fs = FakeFs::new(cx.executor());
1121 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1122
1123 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1124
1125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1126 language_registry.add(rust_lang());
1127 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1128 "Rust",
1129 FakeLspAdapter {
1130 name: "the-language-server",
1131 disk_based_diagnostics_sources: vec!["disk".into()],
1132 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1133 ..Default::default()
1134 },
1135 );
1136
1137 let buffer = project
1138 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1139 .await
1140 .unwrap();
1141
1142 // Simulate diagnostics starting to update.
1143 let fake_server = fake_servers.next().await.unwrap();
1144 fake_server.start_progress(progress_token).await;
1145
1146 // Restart the server before the diagnostics finish updating.
1147 project.update(cx, |project, cx| {
1148 project.restart_language_servers_for_buffers([buffer], cx);
1149 });
1150 let mut events = cx.events(&project);
1151
1152 // Simulate the newly started server sending more diagnostics.
1153 let fake_server = fake_servers.next().await.unwrap();
1154 assert_eq!(
1155 events.next().await.unwrap(),
1156 Event::LanguageServerAdded(LanguageServerId(1))
1157 );
1158 fake_server.start_progress(progress_token).await;
1159 assert_eq!(
1160 events.next().await.unwrap(),
1161 Event::DiskBasedDiagnosticsStarted {
1162 language_server_id: LanguageServerId(1)
1163 }
1164 );
1165 project.update(cx, |project, _| {
1166 assert_eq!(
1167 project
1168 .language_servers_running_disk_based_diagnostics()
1169 .collect::<Vec<_>>(),
1170 [LanguageServerId(1)]
1171 );
1172 });
1173
1174 // All diagnostics are considered done, despite the old server's diagnostic
1175 // task never completing.
1176 fake_server.end_progress(progress_token);
1177 assert_eq!(
1178 events.next().await.unwrap(),
1179 Event::DiskBasedDiagnosticsFinished {
1180 language_server_id: LanguageServerId(1)
1181 }
1182 );
1183 project.update(cx, |project, _| {
1184 assert_eq!(
1185 project
1186 .language_servers_running_disk_based_diagnostics()
1187 .collect::<Vec<_>>(),
1188 [LanguageServerId(0); 0]
1189 );
1190 });
1191}
1192
1193#[gpui::test]
1194async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1195 init_test(cx);
1196
1197 let fs = FakeFs::new(cx.executor());
1198 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1199
1200 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1201
1202 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1203 language_registry.add(rust_lang());
1204 let mut fake_servers =
1205 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1206
1207 let buffer = project
1208 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1209 .await
1210 .unwrap();
1211
1212 // Publish diagnostics
1213 let fake_server = fake_servers.next().await.unwrap();
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: None,
1217 diagnostics: vec![lsp::Diagnostic {
1218 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1219 severity: Some(lsp::DiagnosticSeverity::ERROR),
1220 message: "the message".to_string(),
1221 ..Default::default()
1222 }],
1223 });
1224
1225 cx.executor().run_until_parked();
1226 buffer.update(cx, |buffer, _| {
1227 assert_eq!(
1228 buffer
1229 .snapshot()
1230 .diagnostics_in_range::<_, usize>(0..1, false)
1231 .map(|entry| entry.diagnostic.message.clone())
1232 .collect::<Vec<_>>(),
1233 ["the message".to_string()]
1234 );
1235 });
1236 project.update(cx, |project, cx| {
1237 assert_eq!(
1238 project.diagnostic_summary(false, cx),
1239 DiagnosticSummary {
1240 error_count: 1,
1241 warning_count: 0,
1242 }
1243 );
1244 });
1245
1246 project.update(cx, |project, cx| {
1247 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1248 });
1249
1250 // The diagnostics are cleared.
1251 cx.executor().run_until_parked();
1252 buffer.update(cx, |buffer, _| {
1253 assert_eq!(
1254 buffer
1255 .snapshot()
1256 .diagnostics_in_range::<_, usize>(0..1, false)
1257 .map(|entry| entry.diagnostic.message.clone())
1258 .collect::<Vec<_>>(),
1259 Vec::<String>::new(),
1260 );
1261 });
1262 project.update(cx, |project, cx| {
1263 assert_eq!(
1264 project.diagnostic_summary(false, cx),
1265 DiagnosticSummary {
1266 error_count: 0,
1267 warning_count: 0,
1268 }
1269 );
1270 });
1271}
1272
1273#[gpui::test]
1274async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1275 init_test(cx);
1276
1277 let fs = FakeFs::new(cx.executor());
1278 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1279
1280 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1281 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1282
1283 language_registry.add(rust_lang());
1284 let mut fake_servers =
1285 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1286
1287 let buffer = project
1288 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1289 .await
1290 .unwrap();
1291
1292 // Before restarting the server, report diagnostics with an unknown buffer version.
1293 let fake_server = fake_servers.next().await.unwrap();
1294 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1295 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1296 version: Some(10000),
1297 diagnostics: Vec::new(),
1298 });
1299 cx.executor().run_until_parked();
1300
1301 project.update(cx, |project, cx| {
1302 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1303 });
1304 let mut fake_server = fake_servers.next().await.unwrap();
1305 let notification = fake_server
1306 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1307 .await
1308 .text_document;
1309 assert_eq!(notification.version, 0);
1310}
1311
1312#[gpui::test]
1313async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1314 init_test(cx);
1315
1316 let fs = FakeFs::new(cx.executor());
1317 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1318 .await;
1319
1320 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1321 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1322
1323 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1324 "Rust",
1325 FakeLspAdapter {
1326 name: "rust-lsp",
1327 ..Default::default()
1328 },
1329 );
1330 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1331 "JavaScript",
1332 FakeLspAdapter {
1333 name: "js-lsp",
1334 ..Default::default()
1335 },
1336 );
1337 language_registry.add(rust_lang());
1338 language_registry.add(js_lang());
1339
1340 let _rs_buffer = project
1341 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1342 .await
1343 .unwrap();
1344 let _js_buffer = project
1345 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1346 .await
1347 .unwrap();
1348
1349 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1350 assert_eq!(
1351 fake_rust_server_1
1352 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1353 .await
1354 .text_document
1355 .uri
1356 .as_str(),
1357 "file:///dir/a.rs"
1358 );
1359
1360 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1361 assert_eq!(
1362 fake_js_server
1363 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1364 .await
1365 .text_document
1366 .uri
1367 .as_str(),
1368 "file:///dir/b.js"
1369 );
1370
1371 // Disable Rust language server, ensuring only that server gets stopped.
1372 cx.update(|cx| {
1373 cx.update_global(|settings: &mut SettingsStore, cx| {
1374 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1375 settings.languages.insert(
1376 Arc::from("Rust"),
1377 LanguageSettingsContent {
1378 enable_language_server: Some(false),
1379 ..Default::default()
1380 },
1381 );
1382 });
1383 })
1384 });
1385 fake_rust_server_1
1386 .receive_notification::<lsp::notification::Exit>()
1387 .await;
1388
1389 // Enable Rust and disable JavaScript language servers, ensuring that the
1390 // former gets started again and that the latter stops.
1391 cx.update(|cx| {
1392 cx.update_global(|settings: &mut SettingsStore, cx| {
1393 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1394 settings.languages.insert(
1395 Arc::from("Rust"),
1396 LanguageSettingsContent {
1397 enable_language_server: Some(true),
1398 ..Default::default()
1399 },
1400 );
1401 settings.languages.insert(
1402 Arc::from("JavaScript"),
1403 LanguageSettingsContent {
1404 enable_language_server: Some(false),
1405 ..Default::default()
1406 },
1407 );
1408 });
1409 })
1410 });
1411 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1412 assert_eq!(
1413 fake_rust_server_2
1414 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1415 .await
1416 .text_document
1417 .uri
1418 .as_str(),
1419 "file:///dir/a.rs"
1420 );
1421 fake_js_server
1422 .receive_notification::<lsp::notification::Exit>()
1423 .await;
1424}
1425
1426#[gpui::test(iterations = 3)]
1427async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1428 init_test(cx);
1429
1430 let text = "
1431 fn a() { A }
1432 fn b() { BB }
1433 fn c() { CCC }
1434 "
1435 .unindent();
1436
1437 let fs = FakeFs::new(cx.executor());
1438 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1439
1440 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1441 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1442
1443 language_registry.add(rust_lang());
1444 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1445 "Rust",
1446 FakeLspAdapter {
1447 disk_based_diagnostics_sources: vec!["disk".into()],
1448 ..Default::default()
1449 },
1450 );
1451
1452 let buffer = project
1453 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1454 .await
1455 .unwrap();
1456
1457 let mut fake_server = fake_servers.next().await.unwrap();
1458 let open_notification = fake_server
1459 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1460 .await;
1461
1462 // Edit the buffer, moving the content down
1463 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1464 let change_notification_1 = fake_server
1465 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1466 .await;
1467 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1468
1469 // Report some diagnostics for the initial version of the buffer
1470 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1471 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1472 version: Some(open_notification.text_document.version),
1473 diagnostics: vec![
1474 lsp::Diagnostic {
1475 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1476 severity: Some(DiagnosticSeverity::ERROR),
1477 message: "undefined variable 'A'".to_string(),
1478 source: Some("disk".to_string()),
1479 ..Default::default()
1480 },
1481 lsp::Diagnostic {
1482 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1483 severity: Some(DiagnosticSeverity::ERROR),
1484 message: "undefined variable 'BB'".to_string(),
1485 source: Some("disk".to_string()),
1486 ..Default::default()
1487 },
1488 lsp::Diagnostic {
1489 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1490 severity: Some(DiagnosticSeverity::ERROR),
1491 source: Some("disk".to_string()),
1492 message: "undefined variable 'CCC'".to_string(),
1493 ..Default::default()
1494 },
1495 ],
1496 });
1497
1498 // The diagnostics have moved down since they were created.
1499 cx.executor().run_until_parked();
1500 buffer.update(cx, |buffer, _| {
1501 assert_eq!(
1502 buffer
1503 .snapshot()
1504 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1505 .collect::<Vec<_>>(),
1506 &[
1507 DiagnosticEntry {
1508 range: Point::new(3, 9)..Point::new(3, 11),
1509 diagnostic: Diagnostic {
1510 source: Some("disk".into()),
1511 severity: DiagnosticSeverity::ERROR,
1512 message: "undefined variable 'BB'".to_string(),
1513 is_disk_based: true,
1514 group_id: 1,
1515 is_primary: true,
1516 ..Default::default()
1517 },
1518 },
1519 DiagnosticEntry {
1520 range: Point::new(4, 9)..Point::new(4, 12),
1521 diagnostic: Diagnostic {
1522 source: Some("disk".into()),
1523 severity: DiagnosticSeverity::ERROR,
1524 message: "undefined variable 'CCC'".to_string(),
1525 is_disk_based: true,
1526 group_id: 2,
1527 is_primary: true,
1528 ..Default::default()
1529 }
1530 }
1531 ]
1532 );
1533 assert_eq!(
1534 chunks_with_diagnostics(buffer, 0..buffer.len()),
1535 [
1536 ("\n\nfn a() { ".to_string(), None),
1537 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1538 (" }\nfn b() { ".to_string(), None),
1539 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1540 (" }\nfn c() { ".to_string(), None),
1541 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1542 (" }\n".to_string(), None),
1543 ]
1544 );
1545 assert_eq!(
1546 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1547 [
1548 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1549 (" }\nfn c() { ".to_string(), None),
1550 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1551 ]
1552 );
1553 });
1554
1555 // Ensure overlapping diagnostics are highlighted correctly.
1556 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1557 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1558 version: Some(open_notification.text_document.version),
1559 diagnostics: vec![
1560 lsp::Diagnostic {
1561 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1562 severity: Some(DiagnosticSeverity::ERROR),
1563 message: "undefined variable 'A'".to_string(),
1564 source: Some("disk".to_string()),
1565 ..Default::default()
1566 },
1567 lsp::Diagnostic {
1568 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1569 severity: Some(DiagnosticSeverity::WARNING),
1570 message: "unreachable statement".to_string(),
1571 source: Some("disk".to_string()),
1572 ..Default::default()
1573 },
1574 ],
1575 });
1576
1577 cx.executor().run_until_parked();
1578 buffer.update(cx, |buffer, _| {
1579 assert_eq!(
1580 buffer
1581 .snapshot()
1582 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1583 .collect::<Vec<_>>(),
1584 &[
1585 DiagnosticEntry {
1586 range: Point::new(2, 9)..Point::new(2, 12),
1587 diagnostic: Diagnostic {
1588 source: Some("disk".into()),
1589 severity: DiagnosticSeverity::WARNING,
1590 message: "unreachable statement".to_string(),
1591 is_disk_based: true,
1592 group_id: 4,
1593 is_primary: true,
1594 ..Default::default()
1595 }
1596 },
1597 DiagnosticEntry {
1598 range: Point::new(2, 9)..Point::new(2, 10),
1599 diagnostic: Diagnostic {
1600 source: Some("disk".into()),
1601 severity: DiagnosticSeverity::ERROR,
1602 message: "undefined variable 'A'".to_string(),
1603 is_disk_based: true,
1604 group_id: 3,
1605 is_primary: true,
1606 ..Default::default()
1607 },
1608 }
1609 ]
1610 );
1611 assert_eq!(
1612 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1613 [
1614 ("fn a() { ".to_string(), None),
1615 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1616 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1617 ("\n".to_string(), None),
1618 ]
1619 );
1620 assert_eq!(
1621 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1622 [
1623 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1624 ("\n".to_string(), None),
1625 ]
1626 );
1627 });
1628
1629 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1630 // changes since the last save.
1631 buffer.update(cx, |buffer, cx| {
1632 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1633 buffer.edit(
1634 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1635 None,
1636 cx,
1637 );
1638 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1639 });
1640 let change_notification_2 = fake_server
1641 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1642 .await;
1643 assert!(
1644 change_notification_2.text_document.version > change_notification_1.text_document.version
1645 );
1646
1647 // Handle out-of-order diagnostics
1648 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1649 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1650 version: Some(change_notification_2.text_document.version),
1651 diagnostics: vec![
1652 lsp::Diagnostic {
1653 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1654 severity: Some(DiagnosticSeverity::ERROR),
1655 message: "undefined variable 'BB'".to_string(),
1656 source: Some("disk".to_string()),
1657 ..Default::default()
1658 },
1659 lsp::Diagnostic {
1660 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1661 severity: Some(DiagnosticSeverity::WARNING),
1662 message: "undefined variable 'A'".to_string(),
1663 source: Some("disk".to_string()),
1664 ..Default::default()
1665 },
1666 ],
1667 });
1668
1669 cx.executor().run_until_parked();
1670 buffer.update(cx, |buffer, _| {
1671 assert_eq!(
1672 buffer
1673 .snapshot()
1674 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1675 .collect::<Vec<_>>(),
1676 &[
1677 DiagnosticEntry {
1678 range: Point::new(2, 21)..Point::new(2, 22),
1679 diagnostic: Diagnostic {
1680 source: Some("disk".into()),
1681 severity: DiagnosticSeverity::WARNING,
1682 message: "undefined variable 'A'".to_string(),
1683 is_disk_based: true,
1684 group_id: 6,
1685 is_primary: true,
1686 ..Default::default()
1687 }
1688 },
1689 DiagnosticEntry {
1690 range: Point::new(3, 9)..Point::new(3, 14),
1691 diagnostic: Diagnostic {
1692 source: Some("disk".into()),
1693 severity: DiagnosticSeverity::ERROR,
1694 message: "undefined variable 'BB'".to_string(),
1695 is_disk_based: true,
1696 group_id: 5,
1697 is_primary: true,
1698 ..Default::default()
1699 },
1700 }
1701 ]
1702 );
1703 });
1704}
1705
1706#[gpui::test]
1707async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1708 init_test(cx);
1709
1710 let text = concat!(
1711 "let one = ;\n", //
1712 "let two = \n",
1713 "let three = 3;\n",
1714 );
1715
1716 let fs = FakeFs::new(cx.executor());
1717 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1718
1719 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1720 let buffer = project
1721 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1722 .await
1723 .unwrap();
1724
1725 project.update(cx, |project, cx| {
1726 project
1727 .update_buffer_diagnostics(
1728 &buffer,
1729 LanguageServerId(0),
1730 None,
1731 vec![
1732 DiagnosticEntry {
1733 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1734 diagnostic: Diagnostic {
1735 severity: DiagnosticSeverity::ERROR,
1736 message: "syntax error 1".to_string(),
1737 ..Default::default()
1738 },
1739 },
1740 DiagnosticEntry {
1741 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1742 diagnostic: Diagnostic {
1743 severity: DiagnosticSeverity::ERROR,
1744 message: "syntax error 2".to_string(),
1745 ..Default::default()
1746 },
1747 },
1748 ],
1749 cx,
1750 )
1751 .unwrap();
1752 });
1753
1754 // An empty range is extended forward to include the following character.
1755 // At the end of a line, an empty range is extended backward to include
1756 // the preceding character.
1757 buffer.update(cx, |buffer, _| {
1758 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1759 assert_eq!(
1760 chunks
1761 .iter()
1762 .map(|(s, d)| (s.as_str(), *d))
1763 .collect::<Vec<_>>(),
1764 &[
1765 ("let one = ", None),
1766 (";", Some(DiagnosticSeverity::ERROR)),
1767 ("\nlet two =", None),
1768 (" ", Some(DiagnosticSeverity::ERROR)),
1769 ("\nlet three = 3;\n", None)
1770 ]
1771 );
1772 });
1773}
1774
1775#[gpui::test]
1776async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1777 init_test(cx);
1778
1779 let fs = FakeFs::new(cx.executor());
1780 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1781 .await;
1782
1783 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1784
1785 project.update(cx, |project, cx| {
1786 project
1787 .update_diagnostic_entries(
1788 LanguageServerId(0),
1789 Path::new("/dir/a.rs").to_owned(),
1790 None,
1791 vec![DiagnosticEntry {
1792 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1793 diagnostic: Diagnostic {
1794 severity: DiagnosticSeverity::ERROR,
1795 is_primary: true,
1796 message: "syntax error a1".to_string(),
1797 ..Default::default()
1798 },
1799 }],
1800 cx,
1801 )
1802 .unwrap();
1803 project
1804 .update_diagnostic_entries(
1805 LanguageServerId(1),
1806 Path::new("/dir/a.rs").to_owned(),
1807 None,
1808 vec![DiagnosticEntry {
1809 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1810 diagnostic: Diagnostic {
1811 severity: DiagnosticSeverity::ERROR,
1812 is_primary: true,
1813 message: "syntax error b1".to_string(),
1814 ..Default::default()
1815 },
1816 }],
1817 cx,
1818 )
1819 .unwrap();
1820
1821 assert_eq!(
1822 project.diagnostic_summary(false, cx),
1823 DiagnosticSummary {
1824 error_count: 2,
1825 warning_count: 0,
1826 }
1827 );
1828 });
1829}
1830
1831#[gpui::test]
1832async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1833 init_test(cx);
1834
1835 let text = "
1836 fn a() {
1837 f1();
1838 }
1839 fn b() {
1840 f2();
1841 }
1842 fn c() {
1843 f3();
1844 }
1845 "
1846 .unindent();
1847
1848 let fs = FakeFs::new(cx.executor());
1849 fs.insert_tree(
1850 "/dir",
1851 json!({
1852 "a.rs": text.clone(),
1853 }),
1854 )
1855 .await;
1856
1857 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1858
1859 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1860 language_registry.add(rust_lang());
1861 let mut fake_servers =
1862 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1863
1864 let buffer = project
1865 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1866 .await
1867 .unwrap();
1868
1869 let mut fake_server = fake_servers.next().await.unwrap();
1870 let lsp_document_version = fake_server
1871 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1872 .await
1873 .text_document
1874 .version;
1875
1876 // Simulate editing the buffer after the language server computes some edits.
1877 buffer.update(cx, |buffer, cx| {
1878 buffer.edit(
1879 [(
1880 Point::new(0, 0)..Point::new(0, 0),
1881 "// above first function\n",
1882 )],
1883 None,
1884 cx,
1885 );
1886 buffer.edit(
1887 [(
1888 Point::new(2, 0)..Point::new(2, 0),
1889 " // inside first function\n",
1890 )],
1891 None,
1892 cx,
1893 );
1894 buffer.edit(
1895 [(
1896 Point::new(6, 4)..Point::new(6, 4),
1897 "// inside second function ",
1898 )],
1899 None,
1900 cx,
1901 );
1902
1903 assert_eq!(
1904 buffer.text(),
1905 "
1906 // above first function
1907 fn a() {
1908 // inside first function
1909 f1();
1910 }
1911 fn b() {
1912 // inside second function f2();
1913 }
1914 fn c() {
1915 f3();
1916 }
1917 "
1918 .unindent()
1919 );
1920 });
1921
1922 let edits = project
1923 .update(cx, |project, cx| {
1924 project.edits_from_lsp(
1925 &buffer,
1926 vec![
1927 // replace body of first function
1928 lsp::TextEdit {
1929 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1930 new_text: "
1931 fn a() {
1932 f10();
1933 }
1934 "
1935 .unindent(),
1936 },
1937 // edit inside second function
1938 lsp::TextEdit {
1939 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1940 new_text: "00".into(),
1941 },
1942 // edit inside third function via two distinct edits
1943 lsp::TextEdit {
1944 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1945 new_text: "4000".into(),
1946 },
1947 lsp::TextEdit {
1948 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1949 new_text: "".into(),
1950 },
1951 ],
1952 LanguageServerId(0),
1953 Some(lsp_document_version),
1954 cx,
1955 )
1956 })
1957 .await
1958 .unwrap();
1959
1960 buffer.update(cx, |buffer, cx| {
1961 for (range, new_text) in edits {
1962 buffer.edit([(range, new_text)], None, cx);
1963 }
1964 assert_eq!(
1965 buffer.text(),
1966 "
1967 // above first function
1968 fn a() {
1969 // inside first function
1970 f10();
1971 }
1972 fn b() {
1973 // inside second function f200();
1974 }
1975 fn c() {
1976 f4000();
1977 }
1978 "
1979 .unindent()
1980 );
1981 });
1982}
1983
1984#[gpui::test]
1985async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1986 init_test(cx);
1987
1988 let text = "
1989 use a::b;
1990 use a::c;
1991
1992 fn f() {
1993 b();
1994 c();
1995 }
1996 "
1997 .unindent();
1998
1999 let fs = FakeFs::new(cx.executor());
2000 fs.insert_tree(
2001 "/dir",
2002 json!({
2003 "a.rs": text.clone(),
2004 }),
2005 )
2006 .await;
2007
2008 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2009 let buffer = project
2010 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2011 .await
2012 .unwrap();
2013
2014 // Simulate the language server sending us a small edit in the form of a very large diff.
2015 // Rust-analyzer does this when performing a merge-imports code action.
2016 let edits = project
2017 .update(cx, |project, cx| {
2018 project.edits_from_lsp(
2019 &buffer,
2020 [
2021 // Replace the first use statement without editing the semicolon.
2022 lsp::TextEdit {
2023 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2024 new_text: "a::{b, c}".into(),
2025 },
2026 // Reinsert the remainder of the file between the semicolon and the final
2027 // newline of the file.
2028 lsp::TextEdit {
2029 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2030 new_text: "\n\n".into(),
2031 },
2032 lsp::TextEdit {
2033 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2034 new_text: "
2035 fn f() {
2036 b();
2037 c();
2038 }"
2039 .unindent(),
2040 },
2041 // Delete everything after the first newline of the file.
2042 lsp::TextEdit {
2043 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2044 new_text: "".into(),
2045 },
2046 ],
2047 LanguageServerId(0),
2048 None,
2049 cx,
2050 )
2051 })
2052 .await
2053 .unwrap();
2054
2055 buffer.update(cx, |buffer, cx| {
2056 let edits = edits
2057 .into_iter()
2058 .map(|(range, text)| {
2059 (
2060 range.start.to_point(buffer)..range.end.to_point(buffer),
2061 text,
2062 )
2063 })
2064 .collect::<Vec<_>>();
2065
2066 assert_eq!(
2067 edits,
2068 [
2069 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2070 (Point::new(1, 0)..Point::new(2, 0), "".into())
2071 ]
2072 );
2073
2074 for (range, new_text) in edits {
2075 buffer.edit([(range, new_text)], None, cx);
2076 }
2077 assert_eq!(
2078 buffer.text(),
2079 "
2080 use a::{b, c};
2081
2082 fn f() {
2083 b();
2084 c();
2085 }
2086 "
2087 .unindent()
2088 );
2089 });
2090}
2091
2092#[gpui::test]
2093async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2094 init_test(cx);
2095
2096 let text = "
2097 use a::b;
2098 use a::c;
2099
2100 fn f() {
2101 b();
2102 c();
2103 }
2104 "
2105 .unindent();
2106
2107 let fs = FakeFs::new(cx.executor());
2108 fs.insert_tree(
2109 "/dir",
2110 json!({
2111 "a.rs": text.clone(),
2112 }),
2113 )
2114 .await;
2115
2116 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2117 let buffer = project
2118 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2119 .await
2120 .unwrap();
2121
2122 // Simulate the language server sending us edits in a non-ordered fashion,
2123 // with ranges sometimes being inverted or pointing to invalid locations.
2124 let edits = project
2125 .update(cx, |project, cx| {
2126 project.edits_from_lsp(
2127 &buffer,
2128 [
2129 lsp::TextEdit {
2130 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2131 new_text: "\n\n".into(),
2132 },
2133 lsp::TextEdit {
2134 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2135 new_text: "a::{b, c}".into(),
2136 },
2137 lsp::TextEdit {
2138 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2139 new_text: "".into(),
2140 },
2141 lsp::TextEdit {
2142 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2143 new_text: "
2144 fn f() {
2145 b();
2146 c();
2147 }"
2148 .unindent(),
2149 },
2150 ],
2151 LanguageServerId(0),
2152 None,
2153 cx,
2154 )
2155 })
2156 .await
2157 .unwrap();
2158
2159 buffer.update(cx, |buffer, cx| {
2160 let edits = edits
2161 .into_iter()
2162 .map(|(range, text)| {
2163 (
2164 range.start.to_point(buffer)..range.end.to_point(buffer),
2165 text,
2166 )
2167 })
2168 .collect::<Vec<_>>();
2169
2170 assert_eq!(
2171 edits,
2172 [
2173 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2174 (Point::new(1, 0)..Point::new(2, 0), "".into())
2175 ]
2176 );
2177
2178 for (range, new_text) in edits {
2179 buffer.edit([(range, new_text)], None, cx);
2180 }
2181 assert_eq!(
2182 buffer.text(),
2183 "
2184 use a::{b, c};
2185
2186 fn f() {
2187 b();
2188 c();
2189 }
2190 "
2191 .unindent()
2192 );
2193 });
2194}
2195
2196fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2197 buffer: &Buffer,
2198 range: Range<T>,
2199) -> Vec<(String, Option<DiagnosticSeverity>)> {
2200 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2201 for chunk in buffer.snapshot().chunks(range, true) {
2202 if chunks.last().map_or(false, |prev_chunk| {
2203 prev_chunk.1 == chunk.diagnostic_severity
2204 }) {
2205 chunks.last_mut().unwrap().0.push_str(chunk.text);
2206 } else {
2207 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2208 }
2209 }
2210 chunks
2211}
2212
2213#[gpui::test(iterations = 10)]
2214async fn test_definition(cx: &mut gpui::TestAppContext) {
2215 init_test(cx);
2216
2217 let fs = FakeFs::new(cx.executor());
2218 fs.insert_tree(
2219 "/dir",
2220 json!({
2221 "a.rs": "const fn a() { A }",
2222 "b.rs": "const y: i32 = crate::a()",
2223 }),
2224 )
2225 .await;
2226
2227 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2228
2229 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2230 language_registry.add(rust_lang());
2231 let mut fake_servers =
2232 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2233
2234 let buffer = project
2235 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2236 .await
2237 .unwrap();
2238
2239 let fake_server = fake_servers.next().await.unwrap();
2240 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2241 let params = params.text_document_position_params;
2242 assert_eq!(
2243 params.text_document.uri.to_file_path().unwrap(),
2244 Path::new("/dir/b.rs"),
2245 );
2246 assert_eq!(params.position, lsp::Position::new(0, 22));
2247
2248 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2249 lsp::Location::new(
2250 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2251 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2252 ),
2253 )))
2254 });
2255
2256 let mut definitions = project
2257 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2258 .await
2259 .unwrap();
2260
2261 // Assert no new language server started
2262 cx.executor().run_until_parked();
2263 assert!(fake_servers.try_next().is_err());
2264
2265 assert_eq!(definitions.len(), 1);
2266 let definition = definitions.pop().unwrap();
2267 cx.update(|cx| {
2268 let target_buffer = definition.target.buffer.read(cx);
2269 assert_eq!(
2270 target_buffer
2271 .file()
2272 .unwrap()
2273 .as_local()
2274 .unwrap()
2275 .abs_path(cx),
2276 Path::new("/dir/a.rs"),
2277 );
2278 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2279 assert_eq!(
2280 list_worktrees(&project, cx),
2281 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2282 );
2283
2284 drop(definition);
2285 });
2286 cx.update(|cx| {
2287 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2288 });
2289
2290 fn list_worktrees<'a>(
2291 project: &'a Model<Project>,
2292 cx: &'a AppContext,
2293 ) -> Vec<(&'a Path, bool)> {
2294 project
2295 .read(cx)
2296 .worktrees()
2297 .map(|worktree| {
2298 let worktree = worktree.read(cx);
2299 (
2300 worktree.as_local().unwrap().abs_path().as_ref(),
2301 worktree.is_visible(),
2302 )
2303 })
2304 .collect::<Vec<_>>()
2305 }
2306}
2307
2308#[gpui::test]
2309async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2310 init_test(cx);
2311
2312 let fs = FakeFs::new(cx.executor());
2313 fs.insert_tree(
2314 "/dir",
2315 json!({
2316 "a.ts": "",
2317 }),
2318 )
2319 .await;
2320
2321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2322
2323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2324 language_registry.add(typescript_lang());
2325 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2326 "TypeScript",
2327 FakeLspAdapter {
2328 capabilities: lsp::ServerCapabilities {
2329 completion_provider: Some(lsp::CompletionOptions {
2330 trigger_characters: Some(vec![":".to_string()]),
2331 ..Default::default()
2332 }),
2333 ..Default::default()
2334 },
2335 ..Default::default()
2336 },
2337 );
2338
2339 let buffer = project
2340 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2341 .await
2342 .unwrap();
2343
2344 let fake_server = fake_language_servers.next().await.unwrap();
2345
2346 let text = "let a = b.fqn";
2347 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2348 let completions = project.update(cx, |project, cx| {
2349 project.completions(&buffer, text.len(), cx)
2350 });
2351
2352 fake_server
2353 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2354 Ok(Some(lsp::CompletionResponse::Array(vec![
2355 lsp::CompletionItem {
2356 label: "fullyQualifiedName?".into(),
2357 insert_text: Some("fullyQualifiedName".into()),
2358 ..Default::default()
2359 },
2360 ])))
2361 })
2362 .next()
2363 .await;
2364 let completions = completions.await.unwrap();
2365 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2366 assert_eq!(completions.len(), 1);
2367 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2368 assert_eq!(
2369 completions[0].old_range.to_offset(&snapshot),
2370 text.len() - 3..text.len()
2371 );
2372
2373 let text = "let a = \"atoms/cmp\"";
2374 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2375 let completions = project.update(cx, |project, cx| {
2376 project.completions(&buffer, text.len() - 1, cx)
2377 });
2378
2379 fake_server
2380 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2381 Ok(Some(lsp::CompletionResponse::Array(vec![
2382 lsp::CompletionItem {
2383 label: "component".into(),
2384 ..Default::default()
2385 },
2386 ])))
2387 })
2388 .next()
2389 .await;
2390 let completions = completions.await.unwrap();
2391 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2392 assert_eq!(completions.len(), 1);
2393 assert_eq!(completions[0].new_text, "component");
2394 assert_eq!(
2395 completions[0].old_range.to_offset(&snapshot),
2396 text.len() - 4..text.len() - 1
2397 );
2398}
2399
2400#[gpui::test]
2401async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2402 init_test(cx);
2403
2404 let fs = FakeFs::new(cx.executor());
2405 fs.insert_tree(
2406 "/dir",
2407 json!({
2408 "a.ts": "",
2409 }),
2410 )
2411 .await;
2412
2413 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2414
2415 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2416 language_registry.add(typescript_lang());
2417 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2418 "TypeScript",
2419 FakeLspAdapter {
2420 capabilities: lsp::ServerCapabilities {
2421 completion_provider: Some(lsp::CompletionOptions {
2422 trigger_characters: Some(vec![":".to_string()]),
2423 ..Default::default()
2424 }),
2425 ..Default::default()
2426 },
2427 ..Default::default()
2428 },
2429 );
2430
2431 let buffer = project
2432 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2433 .await
2434 .unwrap();
2435
2436 let fake_server = fake_language_servers.next().await.unwrap();
2437
2438 let text = "let a = b.fqn";
2439 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2440 let completions = project.update(cx, |project, cx| {
2441 project.completions(&buffer, text.len(), cx)
2442 });
2443
2444 fake_server
2445 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2446 Ok(Some(lsp::CompletionResponse::Array(vec![
2447 lsp::CompletionItem {
2448 label: "fullyQualifiedName?".into(),
2449 insert_text: Some("fully\rQualified\r\nName".into()),
2450 ..Default::default()
2451 },
2452 ])))
2453 })
2454 .next()
2455 .await;
2456 let completions = completions.await.unwrap();
2457 assert_eq!(completions.len(), 1);
2458 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2459}
2460
2461#[gpui::test(iterations = 10)]
2462async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2463 init_test(cx);
2464
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(
2467 "/dir",
2468 json!({
2469 "a.ts": "a",
2470 }),
2471 )
2472 .await;
2473
2474 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2475
2476 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2477 language_registry.add(typescript_lang());
2478 let mut fake_language_servers =
2479 language_registry.register_fake_lsp_adapter("TypeScript", Default::default());
2480
2481 let buffer = project
2482 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2483 .await
2484 .unwrap();
2485
2486 let fake_server = fake_language_servers.next().await.unwrap();
2487
2488 // Language server returns code actions that contain commands, and not edits.
2489 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2490 fake_server
2491 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2492 Ok(Some(vec![
2493 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2494 title: "The code action".into(),
2495 command: Some(lsp::Command {
2496 title: "The command".into(),
2497 command: "_the/command".into(),
2498 arguments: Some(vec![json!("the-argument")]),
2499 }),
2500 ..Default::default()
2501 }),
2502 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2503 title: "two".into(),
2504 ..Default::default()
2505 }),
2506 ]))
2507 })
2508 .next()
2509 .await;
2510
2511 let action = actions.await.unwrap()[0].clone();
2512 let apply = project.update(cx, |project, cx| {
2513 project.apply_code_action(buffer.clone(), action, true, cx)
2514 });
2515
2516 // Resolving the code action does not populate its edits. In absence of
2517 // edits, we must execute the given command.
2518 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2519 |action, _| async move { Ok(action) },
2520 );
2521
2522 // While executing the command, the language server sends the editor
2523 // a `workspaceEdit` request.
2524 fake_server
2525 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2526 let fake = fake_server.clone();
2527 move |params, _| {
2528 assert_eq!(params.command, "_the/command");
2529 let fake = fake.clone();
2530 async move {
2531 fake.server
2532 .request::<lsp::request::ApplyWorkspaceEdit>(
2533 lsp::ApplyWorkspaceEditParams {
2534 label: None,
2535 edit: lsp::WorkspaceEdit {
2536 changes: Some(
2537 [(
2538 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2539 vec![lsp::TextEdit {
2540 range: lsp::Range::new(
2541 lsp::Position::new(0, 0),
2542 lsp::Position::new(0, 0),
2543 ),
2544 new_text: "X".into(),
2545 }],
2546 )]
2547 .into_iter()
2548 .collect(),
2549 ),
2550 ..Default::default()
2551 },
2552 },
2553 )
2554 .await
2555 .unwrap();
2556 Ok(Some(json!(null)))
2557 }
2558 }
2559 })
2560 .next()
2561 .await;
2562
2563 // Applying the code action returns a project transaction containing the edits
2564 // sent by the language server in its `workspaceEdit` request.
2565 let transaction = apply.await.unwrap();
2566 assert!(transaction.0.contains_key(&buffer));
2567 buffer.update(cx, |buffer, cx| {
2568 assert_eq!(buffer.text(), "Xa");
2569 buffer.undo(cx);
2570 assert_eq!(buffer.text(), "a");
2571 });
2572}
2573
2574#[gpui::test(iterations = 10)]
2575async fn test_save_file(cx: &mut gpui::TestAppContext) {
2576 init_test(cx);
2577
2578 let fs = FakeFs::new(cx.executor());
2579 fs.insert_tree(
2580 "/dir",
2581 json!({
2582 "file1": "the old contents",
2583 }),
2584 )
2585 .await;
2586
2587 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2588 let buffer = project
2589 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2590 .await
2591 .unwrap();
2592 buffer.update(cx, |buffer, cx| {
2593 assert_eq!(buffer.text(), "the old contents");
2594 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2595 });
2596
2597 project
2598 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2599 .await
2600 .unwrap();
2601
2602 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2603 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2604}
2605
2606#[gpui::test(iterations = 30)]
2607async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2608 init_test(cx);
2609
2610 let fs = FakeFs::new(cx.executor().clone());
2611 fs.insert_tree(
2612 "/dir",
2613 json!({
2614 "file1": "the original contents",
2615 }),
2616 )
2617 .await;
2618
2619 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2620 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2621 let buffer = project
2622 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2623 .await
2624 .unwrap();
2625
2626 // Simulate buffer diffs being slow, so that they don't complete before
2627 // the next file change occurs.
2628 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2629
2630 // Change the buffer's file on disk, and then wait for the file change
2631 // to be detected by the worktree, so that the buffer starts reloading.
2632 fs.save(
2633 "/dir/file1".as_ref(),
2634 &"the first contents".into(),
2635 Default::default(),
2636 )
2637 .await
2638 .unwrap();
2639 worktree.next_event(cx);
2640
2641 // Change the buffer's file again. Depending on the random seed, the
2642 // previous file change may still be in progress.
2643 fs.save(
2644 "/dir/file1".as_ref(),
2645 &"the second contents".into(),
2646 Default::default(),
2647 )
2648 .await
2649 .unwrap();
2650 worktree.next_event(cx);
2651
2652 cx.executor().run_until_parked();
2653 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2654 buffer.read_with(cx, |buffer, _| {
2655 assert_eq!(buffer.text(), on_disk_text);
2656 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2657 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2658 });
2659}
2660
2661#[gpui::test(iterations = 30)]
2662async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2663 init_test(cx);
2664
2665 let fs = FakeFs::new(cx.executor().clone());
2666 fs.insert_tree(
2667 "/dir",
2668 json!({
2669 "file1": "the original contents",
2670 }),
2671 )
2672 .await;
2673
2674 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2675 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2676 let buffer = project
2677 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2678 .await
2679 .unwrap();
2680
2681 // Simulate buffer diffs being slow, so that they don't complete before
2682 // the next file change occurs.
2683 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2684
2685 // Change the buffer's file on disk, and then wait for the file change
2686 // to be detected by the worktree, so that the buffer starts reloading.
2687 fs.save(
2688 "/dir/file1".as_ref(),
2689 &"the first contents".into(),
2690 Default::default(),
2691 )
2692 .await
2693 .unwrap();
2694 worktree.next_event(cx);
2695
2696 cx.executor()
2697 .spawn(cx.executor().simulate_random_delay())
2698 .await;
2699
2700 // Perform a noop edit, causing the buffer's version to increase.
2701 buffer.update(cx, |buffer, cx| {
2702 buffer.edit([(0..0, " ")], None, cx);
2703 buffer.undo(cx);
2704 });
2705
2706 cx.executor().run_until_parked();
2707 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2708 buffer.read_with(cx, |buffer, _| {
2709 let buffer_text = buffer.text();
2710 if buffer_text == on_disk_text {
2711 assert!(
2712 !buffer.is_dirty() && !buffer.has_conflict(),
2713 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2714 );
2715 }
2716 // If the file change occurred while the buffer was processing the first
2717 // change, the buffer will be in a conflicting state.
2718 else {
2719 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2720 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2721 }
2722 });
2723}
2724
2725#[gpui::test]
2726async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2727 init_test(cx);
2728
2729 let fs = FakeFs::new(cx.executor());
2730 fs.insert_tree(
2731 "/dir",
2732 json!({
2733 "file1": "the old contents",
2734 }),
2735 )
2736 .await;
2737
2738 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2739 let buffer = project
2740 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2741 .await
2742 .unwrap();
2743 buffer.update(cx, |buffer, cx| {
2744 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2745 });
2746
2747 project
2748 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2749 .await
2750 .unwrap();
2751
2752 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2753 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2754}
2755
2756#[gpui::test]
2757async fn test_save_as(cx: &mut gpui::TestAppContext) {
2758 init_test(cx);
2759
2760 let fs = FakeFs::new(cx.executor());
2761 fs.insert_tree("/dir", json!({})).await;
2762
2763 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2764
2765 let languages = project.update(cx, |project, _| project.languages().clone());
2766 languages.add(rust_lang());
2767
2768 let buffer = project.update(cx, |project, cx| {
2769 project.create_buffer("", None, cx).unwrap()
2770 });
2771 buffer.update(cx, |buffer, cx| {
2772 buffer.edit([(0..0, "abc")], None, cx);
2773 assert!(buffer.is_dirty());
2774 assert!(!buffer.has_conflict());
2775 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2776 });
2777 project
2778 .update(cx, |project, cx| {
2779 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2780 })
2781 .await
2782 .unwrap();
2783 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2784
2785 cx.executor().run_until_parked();
2786 buffer.update(cx, |buffer, cx| {
2787 assert_eq!(
2788 buffer.file().unwrap().full_path(cx),
2789 Path::new("dir/file1.rs")
2790 );
2791 assert!(!buffer.is_dirty());
2792 assert!(!buffer.has_conflict());
2793 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2794 });
2795
2796 let opened_buffer = project
2797 .update(cx, |project, cx| {
2798 project.open_local_buffer("/dir/file1.rs", cx)
2799 })
2800 .await
2801 .unwrap();
2802 assert_eq!(opened_buffer, buffer);
2803}
2804
2805#[gpui::test(retries = 5)]
2806async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2807 init_test(cx);
2808 cx.executor().allow_parking();
2809
2810 let dir = temp_tree(json!({
2811 "a": {
2812 "file1": "",
2813 "file2": "",
2814 "file3": "",
2815 },
2816 "b": {
2817 "c": {
2818 "file4": "",
2819 "file5": "",
2820 }
2821 }
2822 }));
2823
2824 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2825 let rpc = project.update(cx, |p, _| p.client.clone());
2826
2827 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2828 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2829 async move { buffer.await.unwrap() }
2830 };
2831 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2832 project.update(cx, |project, cx| {
2833 let tree = project.worktrees().next().unwrap();
2834 tree.read(cx)
2835 .entry_for_path(path)
2836 .unwrap_or_else(|| panic!("no entry for path {}", path))
2837 .id
2838 })
2839 };
2840
2841 let buffer2 = buffer_for_path("a/file2", cx).await;
2842 let buffer3 = buffer_for_path("a/file3", cx).await;
2843 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2844 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2845
2846 let file2_id = id_for_path("a/file2", cx);
2847 let file3_id = id_for_path("a/file3", cx);
2848 let file4_id = id_for_path("b/c/file4", cx);
2849
2850 // Create a remote copy of this worktree.
2851 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2852
2853 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2854
2855 let updates = Arc::new(Mutex::new(Vec::new()));
2856 tree.update(cx, |tree, cx| {
2857 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2858 let updates = updates.clone();
2859 move |update| {
2860 updates.lock().push(update);
2861 async { true }
2862 }
2863 });
2864 });
2865
2866 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2867
2868 cx.executor().run_until_parked();
2869
2870 cx.update(|cx| {
2871 assert!(!buffer2.read(cx).is_dirty());
2872 assert!(!buffer3.read(cx).is_dirty());
2873 assert!(!buffer4.read(cx).is_dirty());
2874 assert!(!buffer5.read(cx).is_dirty());
2875 });
2876
2877 // Rename and delete files and directories.
2878 tree.flush_fs_events(cx).await;
2879 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2880 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2881 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2882 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2883 tree.flush_fs_events(cx).await;
2884
2885 let expected_paths = vec![
2886 "a",
2887 "a/file1",
2888 "a/file2.new",
2889 "b",
2890 "d",
2891 "d/file3",
2892 "d/file4",
2893 ];
2894
2895 cx.update(|app| {
2896 assert_eq!(
2897 tree.read(app)
2898 .paths()
2899 .map(|p| p.to_str().unwrap())
2900 .collect::<Vec<_>>(),
2901 expected_paths
2902 );
2903 });
2904
2905 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2906 assert_eq!(id_for_path("d/file3", cx), file3_id);
2907 assert_eq!(id_for_path("d/file4", cx), file4_id);
2908
2909 cx.update(|cx| {
2910 assert_eq!(
2911 buffer2.read(cx).file().unwrap().path().as_ref(),
2912 Path::new("a/file2.new")
2913 );
2914 assert_eq!(
2915 buffer3.read(cx).file().unwrap().path().as_ref(),
2916 Path::new("d/file3")
2917 );
2918 assert_eq!(
2919 buffer4.read(cx).file().unwrap().path().as_ref(),
2920 Path::new("d/file4")
2921 );
2922 assert_eq!(
2923 buffer5.read(cx).file().unwrap().path().as_ref(),
2924 Path::new("b/c/file5")
2925 );
2926
2927 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2928 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2929 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2930 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2931 });
2932
2933 // Update the remote worktree. Check that it becomes consistent with the
2934 // local worktree.
2935 cx.executor().run_until_parked();
2936
2937 remote.update(cx, |remote, _| {
2938 for update in updates.lock().drain(..) {
2939 remote.as_remote_mut().unwrap().update_from_remote(update);
2940 }
2941 });
2942 cx.executor().run_until_parked();
2943 remote.update(cx, |remote, _| {
2944 assert_eq!(
2945 remote
2946 .paths()
2947 .map(|p| p.to_str().unwrap())
2948 .collect::<Vec<_>>(),
2949 expected_paths
2950 );
2951 });
2952}
2953
2954#[gpui::test(iterations = 10)]
2955async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2956 init_test(cx);
2957
2958 let fs = FakeFs::new(cx.executor());
2959 fs.insert_tree(
2960 "/dir",
2961 json!({
2962 "a": {
2963 "file1": "",
2964 }
2965 }),
2966 )
2967 .await;
2968
2969 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2970 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2971 let tree_id = tree.update(cx, |tree, _| tree.id());
2972
2973 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2974 project.update(cx, |project, cx| {
2975 let tree = project.worktrees().next().unwrap();
2976 tree.read(cx)
2977 .entry_for_path(path)
2978 .unwrap_or_else(|| panic!("no entry for path {}", path))
2979 .id
2980 })
2981 };
2982
2983 let dir_id = id_for_path("a", cx);
2984 let file_id = id_for_path("a/file1", cx);
2985 let buffer = project
2986 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2987 .await
2988 .unwrap();
2989 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2990
2991 project
2992 .update(cx, |project, cx| {
2993 project.rename_entry(dir_id, Path::new("b"), cx)
2994 })
2995 .unwrap()
2996 .await
2997 .unwrap();
2998 cx.executor().run_until_parked();
2999
3000 assert_eq!(id_for_path("b", cx), dir_id);
3001 assert_eq!(id_for_path("b/file1", cx), file_id);
3002 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3003}
3004
3005#[gpui::test]
3006async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3007 init_test(cx);
3008
3009 let fs = FakeFs::new(cx.executor());
3010 fs.insert_tree(
3011 "/dir",
3012 json!({
3013 "a.txt": "a-contents",
3014 "b.txt": "b-contents",
3015 }),
3016 )
3017 .await;
3018
3019 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3020
3021 // Spawn multiple tasks to open paths, repeating some paths.
3022 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3023 (
3024 p.open_local_buffer("/dir/a.txt", cx),
3025 p.open_local_buffer("/dir/b.txt", cx),
3026 p.open_local_buffer("/dir/a.txt", cx),
3027 )
3028 });
3029
3030 let buffer_a_1 = buffer_a_1.await.unwrap();
3031 let buffer_a_2 = buffer_a_2.await.unwrap();
3032 let buffer_b = buffer_b.await.unwrap();
3033 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3034 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3035
3036 // There is only one buffer per path.
3037 let buffer_a_id = buffer_a_1.entity_id();
3038 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3039
3040 // Open the same path again while it is still open.
3041 drop(buffer_a_1);
3042 let buffer_a_3 = project
3043 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3044 .await
3045 .unwrap();
3046
3047 // There's still only one buffer per path.
3048 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3049}
3050
3051#[gpui::test]
3052async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3053 init_test(cx);
3054
3055 let fs = FakeFs::new(cx.executor());
3056 fs.insert_tree(
3057 "/dir",
3058 json!({
3059 "file1": "abc",
3060 "file2": "def",
3061 "file3": "ghi",
3062 }),
3063 )
3064 .await;
3065
3066 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3067
3068 let buffer1 = project
3069 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3070 .await
3071 .unwrap();
3072 let events = Arc::new(Mutex::new(Vec::new()));
3073
3074 // initially, the buffer isn't dirty.
3075 buffer1.update(cx, |buffer, cx| {
3076 cx.subscribe(&buffer1, {
3077 let events = events.clone();
3078 move |_, _, event, _| match event {
3079 BufferEvent::Operation(_) => {}
3080 _ => events.lock().push(event.clone()),
3081 }
3082 })
3083 .detach();
3084
3085 assert!(!buffer.is_dirty());
3086 assert!(events.lock().is_empty());
3087
3088 buffer.edit([(1..2, "")], None, cx);
3089 });
3090
3091 // after the first edit, the buffer is dirty, and emits a dirtied event.
3092 buffer1.update(cx, |buffer, cx| {
3093 assert!(buffer.text() == "ac");
3094 assert!(buffer.is_dirty());
3095 assert_eq!(
3096 *events.lock(),
3097 &[language::Event::Edited, language::Event::DirtyChanged]
3098 );
3099 events.lock().clear();
3100 buffer.did_save(
3101 buffer.version(),
3102 buffer.as_rope().fingerprint(),
3103 buffer.file().unwrap().mtime(),
3104 cx,
3105 );
3106 });
3107
3108 // after saving, the buffer is not dirty, and emits a saved event.
3109 buffer1.update(cx, |buffer, cx| {
3110 assert!(!buffer.is_dirty());
3111 assert_eq!(*events.lock(), &[language::Event::Saved]);
3112 events.lock().clear();
3113
3114 buffer.edit([(1..1, "B")], None, cx);
3115 buffer.edit([(2..2, "D")], None, cx);
3116 });
3117
3118 // after editing again, the buffer is dirty, and emits another dirty event.
3119 buffer1.update(cx, |buffer, cx| {
3120 assert!(buffer.text() == "aBDc");
3121 assert!(buffer.is_dirty());
3122 assert_eq!(
3123 *events.lock(),
3124 &[
3125 language::Event::Edited,
3126 language::Event::DirtyChanged,
3127 language::Event::Edited,
3128 ],
3129 );
3130 events.lock().clear();
3131
3132 // After restoring the buffer to its previously-saved state,
3133 // the buffer is not considered dirty anymore.
3134 buffer.edit([(1..3, "")], None, cx);
3135 assert!(buffer.text() == "ac");
3136 assert!(!buffer.is_dirty());
3137 });
3138
3139 assert_eq!(
3140 *events.lock(),
3141 &[language::Event::Edited, language::Event::DirtyChanged]
3142 );
3143
3144 // When a file is deleted, the buffer is considered dirty.
3145 let events = Arc::new(Mutex::new(Vec::new()));
3146 let buffer2 = project
3147 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3148 .await
3149 .unwrap();
3150 buffer2.update(cx, |_, cx| {
3151 cx.subscribe(&buffer2, {
3152 let events = events.clone();
3153 move |_, _, event, _| events.lock().push(event.clone())
3154 })
3155 .detach();
3156 });
3157
3158 fs.remove_file("/dir/file2".as_ref(), Default::default())
3159 .await
3160 .unwrap();
3161 cx.executor().run_until_parked();
3162 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3163 assert_eq!(
3164 *events.lock(),
3165 &[
3166 language::Event::DirtyChanged,
3167 language::Event::FileHandleChanged
3168 ]
3169 );
3170
3171 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3172 let events = Arc::new(Mutex::new(Vec::new()));
3173 let buffer3 = project
3174 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3175 .await
3176 .unwrap();
3177 buffer3.update(cx, |_, cx| {
3178 cx.subscribe(&buffer3, {
3179 let events = events.clone();
3180 move |_, _, event, _| events.lock().push(event.clone())
3181 })
3182 .detach();
3183 });
3184
3185 buffer3.update(cx, |buffer, cx| {
3186 buffer.edit([(0..0, "x")], None, cx);
3187 });
3188 events.lock().clear();
3189 fs.remove_file("/dir/file3".as_ref(), Default::default())
3190 .await
3191 .unwrap();
3192 cx.executor().run_until_parked();
3193 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3194 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3195}
3196
3197#[gpui::test]
3198async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3199 init_test(cx);
3200
3201 let initial_contents = "aaa\nbbbbb\nc\n";
3202 let fs = FakeFs::new(cx.executor());
3203 fs.insert_tree(
3204 "/dir",
3205 json!({
3206 "the-file": initial_contents,
3207 }),
3208 )
3209 .await;
3210 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3211 let buffer = project
3212 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3213 .await
3214 .unwrap();
3215
3216 let anchors = (0..3)
3217 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3218 .collect::<Vec<_>>();
3219
3220 // Change the file on disk, adding two new lines of text, and removing
3221 // one line.
3222 buffer.update(cx, |buffer, _| {
3223 assert!(!buffer.is_dirty());
3224 assert!(!buffer.has_conflict());
3225 });
3226 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3227 fs.save(
3228 "/dir/the-file".as_ref(),
3229 &new_contents.into(),
3230 LineEnding::Unix,
3231 )
3232 .await
3233 .unwrap();
3234
3235 // Because the buffer was not modified, it is reloaded from disk. Its
3236 // contents are edited according to the diff between the old and new
3237 // file contents.
3238 cx.executor().run_until_parked();
3239 buffer.update(cx, |buffer, _| {
3240 assert_eq!(buffer.text(), new_contents);
3241 assert!(!buffer.is_dirty());
3242 assert!(!buffer.has_conflict());
3243
3244 let anchor_positions = anchors
3245 .iter()
3246 .map(|anchor| anchor.to_point(&*buffer))
3247 .collect::<Vec<_>>();
3248 assert_eq!(
3249 anchor_positions,
3250 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3251 );
3252 });
3253
3254 // Modify the buffer
3255 buffer.update(cx, |buffer, cx| {
3256 buffer.edit([(0..0, " ")], None, cx);
3257 assert!(buffer.is_dirty());
3258 assert!(!buffer.has_conflict());
3259 });
3260
3261 // Change the file on disk again, adding blank lines to the beginning.
3262 fs.save(
3263 "/dir/the-file".as_ref(),
3264 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3265 LineEnding::Unix,
3266 )
3267 .await
3268 .unwrap();
3269
3270 // Because the buffer is modified, it doesn't reload from disk, but is
3271 // marked as having a conflict.
3272 cx.executor().run_until_parked();
3273 buffer.update(cx, |buffer, _| {
3274 assert!(buffer.has_conflict());
3275 });
3276}
3277
3278#[gpui::test]
3279async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3280 init_test(cx);
3281
3282 let fs = FakeFs::new(cx.executor());
3283 fs.insert_tree(
3284 "/dir",
3285 json!({
3286 "file1": "a\nb\nc\n",
3287 "file2": "one\r\ntwo\r\nthree\r\n",
3288 }),
3289 )
3290 .await;
3291
3292 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3293 let buffer1 = project
3294 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3295 .await
3296 .unwrap();
3297 let buffer2 = project
3298 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3299 .await
3300 .unwrap();
3301
3302 buffer1.update(cx, |buffer, _| {
3303 assert_eq!(buffer.text(), "a\nb\nc\n");
3304 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3305 });
3306 buffer2.update(cx, |buffer, _| {
3307 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3308 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3309 });
3310
3311 // Change a file's line endings on disk from unix to windows. The buffer's
3312 // state updates correctly.
3313 fs.save(
3314 "/dir/file1".as_ref(),
3315 &"aaa\nb\nc\n".into(),
3316 LineEnding::Windows,
3317 )
3318 .await
3319 .unwrap();
3320 cx.executor().run_until_parked();
3321 buffer1.update(cx, |buffer, _| {
3322 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3323 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3324 });
3325
3326 // Save a file with windows line endings. The file is written correctly.
3327 buffer2.update(cx, |buffer, cx| {
3328 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3329 });
3330 project
3331 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3332 .await
3333 .unwrap();
3334 assert_eq!(
3335 fs.load("/dir/file2".as_ref()).await.unwrap(),
3336 "one\r\ntwo\r\nthree\r\nfour\r\n",
3337 );
3338}
3339
3340#[gpui::test]
3341async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3342 init_test(cx);
3343
3344 let fs = FakeFs::new(cx.executor());
3345 fs.insert_tree(
3346 "/the-dir",
3347 json!({
3348 "a.rs": "
3349 fn foo(mut v: Vec<usize>) {
3350 for x in &v {
3351 v.push(1);
3352 }
3353 }
3354 "
3355 .unindent(),
3356 }),
3357 )
3358 .await;
3359
3360 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3361 let buffer = project
3362 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3363 .await
3364 .unwrap();
3365
3366 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3367 let message = lsp::PublishDiagnosticsParams {
3368 uri: buffer_uri.clone(),
3369 diagnostics: vec![
3370 lsp::Diagnostic {
3371 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3372 severity: Some(DiagnosticSeverity::WARNING),
3373 message: "error 1".to_string(),
3374 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3375 location: lsp::Location {
3376 uri: buffer_uri.clone(),
3377 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3378 },
3379 message: "error 1 hint 1".to_string(),
3380 }]),
3381 ..Default::default()
3382 },
3383 lsp::Diagnostic {
3384 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3385 severity: Some(DiagnosticSeverity::HINT),
3386 message: "error 1 hint 1".to_string(),
3387 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3388 location: lsp::Location {
3389 uri: buffer_uri.clone(),
3390 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3391 },
3392 message: "original diagnostic".to_string(),
3393 }]),
3394 ..Default::default()
3395 },
3396 lsp::Diagnostic {
3397 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3398 severity: Some(DiagnosticSeverity::ERROR),
3399 message: "error 2".to_string(),
3400 related_information: Some(vec![
3401 lsp::DiagnosticRelatedInformation {
3402 location: lsp::Location {
3403 uri: buffer_uri.clone(),
3404 range: lsp::Range::new(
3405 lsp::Position::new(1, 13),
3406 lsp::Position::new(1, 15),
3407 ),
3408 },
3409 message: "error 2 hint 1".to_string(),
3410 },
3411 lsp::DiagnosticRelatedInformation {
3412 location: lsp::Location {
3413 uri: buffer_uri.clone(),
3414 range: lsp::Range::new(
3415 lsp::Position::new(1, 13),
3416 lsp::Position::new(1, 15),
3417 ),
3418 },
3419 message: "error 2 hint 2".to_string(),
3420 },
3421 ]),
3422 ..Default::default()
3423 },
3424 lsp::Diagnostic {
3425 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3426 severity: Some(DiagnosticSeverity::HINT),
3427 message: "error 2 hint 1".to_string(),
3428 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3429 location: lsp::Location {
3430 uri: buffer_uri.clone(),
3431 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3432 },
3433 message: "original diagnostic".to_string(),
3434 }]),
3435 ..Default::default()
3436 },
3437 lsp::Diagnostic {
3438 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3439 severity: Some(DiagnosticSeverity::HINT),
3440 message: "error 2 hint 2".to_string(),
3441 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3442 location: lsp::Location {
3443 uri: buffer_uri,
3444 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3445 },
3446 message: "original diagnostic".to_string(),
3447 }]),
3448 ..Default::default()
3449 },
3450 ],
3451 version: None,
3452 };
3453
3454 project
3455 .update(cx, |p, cx| {
3456 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3457 })
3458 .unwrap();
3459 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3460
3461 assert_eq!(
3462 buffer
3463 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3464 .collect::<Vec<_>>(),
3465 &[
3466 DiagnosticEntry {
3467 range: Point::new(1, 8)..Point::new(1, 9),
3468 diagnostic: Diagnostic {
3469 severity: DiagnosticSeverity::WARNING,
3470 message: "error 1".to_string(),
3471 group_id: 1,
3472 is_primary: true,
3473 ..Default::default()
3474 }
3475 },
3476 DiagnosticEntry {
3477 range: Point::new(1, 8)..Point::new(1, 9),
3478 diagnostic: Diagnostic {
3479 severity: DiagnosticSeverity::HINT,
3480 message: "error 1 hint 1".to_string(),
3481 group_id: 1,
3482 is_primary: false,
3483 ..Default::default()
3484 }
3485 },
3486 DiagnosticEntry {
3487 range: Point::new(1, 13)..Point::new(1, 15),
3488 diagnostic: Diagnostic {
3489 severity: DiagnosticSeverity::HINT,
3490 message: "error 2 hint 1".to_string(),
3491 group_id: 0,
3492 is_primary: false,
3493 ..Default::default()
3494 }
3495 },
3496 DiagnosticEntry {
3497 range: Point::new(1, 13)..Point::new(1, 15),
3498 diagnostic: Diagnostic {
3499 severity: DiagnosticSeverity::HINT,
3500 message: "error 2 hint 2".to_string(),
3501 group_id: 0,
3502 is_primary: false,
3503 ..Default::default()
3504 }
3505 },
3506 DiagnosticEntry {
3507 range: Point::new(2, 8)..Point::new(2, 17),
3508 diagnostic: Diagnostic {
3509 severity: DiagnosticSeverity::ERROR,
3510 message: "error 2".to_string(),
3511 group_id: 0,
3512 is_primary: true,
3513 ..Default::default()
3514 }
3515 }
3516 ]
3517 );
3518
3519 assert_eq!(
3520 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3521 &[
3522 DiagnosticEntry {
3523 range: Point::new(1, 13)..Point::new(1, 15),
3524 diagnostic: Diagnostic {
3525 severity: DiagnosticSeverity::HINT,
3526 message: "error 2 hint 1".to_string(),
3527 group_id: 0,
3528 is_primary: false,
3529 ..Default::default()
3530 }
3531 },
3532 DiagnosticEntry {
3533 range: Point::new(1, 13)..Point::new(1, 15),
3534 diagnostic: Diagnostic {
3535 severity: DiagnosticSeverity::HINT,
3536 message: "error 2 hint 2".to_string(),
3537 group_id: 0,
3538 is_primary: false,
3539 ..Default::default()
3540 }
3541 },
3542 DiagnosticEntry {
3543 range: Point::new(2, 8)..Point::new(2, 17),
3544 diagnostic: Diagnostic {
3545 severity: DiagnosticSeverity::ERROR,
3546 message: "error 2".to_string(),
3547 group_id: 0,
3548 is_primary: true,
3549 ..Default::default()
3550 }
3551 }
3552 ]
3553 );
3554
3555 assert_eq!(
3556 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3557 &[
3558 DiagnosticEntry {
3559 range: Point::new(1, 8)..Point::new(1, 9),
3560 diagnostic: Diagnostic {
3561 severity: DiagnosticSeverity::WARNING,
3562 message: "error 1".to_string(),
3563 group_id: 1,
3564 is_primary: true,
3565 ..Default::default()
3566 }
3567 },
3568 DiagnosticEntry {
3569 range: Point::new(1, 8)..Point::new(1, 9),
3570 diagnostic: Diagnostic {
3571 severity: DiagnosticSeverity::HINT,
3572 message: "error 1 hint 1".to_string(),
3573 group_id: 1,
3574 is_primary: false,
3575 ..Default::default()
3576 }
3577 },
3578 ]
3579 );
3580}
3581
3582#[gpui::test]
3583async fn test_rename(cx: &mut gpui::TestAppContext) {
3584 init_test(cx);
3585
3586 let fs = FakeFs::new(cx.executor());
3587 fs.insert_tree(
3588 "/dir",
3589 json!({
3590 "one.rs": "const ONE: usize = 1;",
3591 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3592 }),
3593 )
3594 .await;
3595
3596 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3597
3598 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3599 language_registry.add(rust_lang());
3600 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3601 "Rust",
3602 FakeLspAdapter {
3603 capabilities: lsp::ServerCapabilities {
3604 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3605 prepare_provider: Some(true),
3606 work_done_progress_options: Default::default(),
3607 })),
3608 ..Default::default()
3609 },
3610 ..Default::default()
3611 },
3612 );
3613
3614 let buffer = project
3615 .update(cx, |project, cx| {
3616 project.open_local_buffer("/dir/one.rs", cx)
3617 })
3618 .await
3619 .unwrap();
3620
3621 let fake_server = fake_servers.next().await.unwrap();
3622
3623 let response = project.update(cx, |project, cx| {
3624 project.prepare_rename(buffer.clone(), 7, cx)
3625 });
3626 fake_server
3627 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3628 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3629 assert_eq!(params.position, lsp::Position::new(0, 7));
3630 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3631 lsp::Position::new(0, 6),
3632 lsp::Position::new(0, 9),
3633 ))))
3634 })
3635 .next()
3636 .await
3637 .unwrap();
3638 let range = response.await.unwrap().unwrap();
3639 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3640 assert_eq!(range, 6..9);
3641
3642 let response = project.update(cx, |project, cx| {
3643 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3644 });
3645 fake_server
3646 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3647 assert_eq!(
3648 params.text_document_position.text_document.uri.as_str(),
3649 "file:///dir/one.rs"
3650 );
3651 assert_eq!(
3652 params.text_document_position.position,
3653 lsp::Position::new(0, 7)
3654 );
3655 assert_eq!(params.new_name, "THREE");
3656 Ok(Some(lsp::WorkspaceEdit {
3657 changes: Some(
3658 [
3659 (
3660 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3661 vec![lsp::TextEdit::new(
3662 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3663 "THREE".to_string(),
3664 )],
3665 ),
3666 (
3667 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3668 vec![
3669 lsp::TextEdit::new(
3670 lsp::Range::new(
3671 lsp::Position::new(0, 24),
3672 lsp::Position::new(0, 27),
3673 ),
3674 "THREE".to_string(),
3675 ),
3676 lsp::TextEdit::new(
3677 lsp::Range::new(
3678 lsp::Position::new(0, 35),
3679 lsp::Position::new(0, 38),
3680 ),
3681 "THREE".to_string(),
3682 ),
3683 ],
3684 ),
3685 ]
3686 .into_iter()
3687 .collect(),
3688 ),
3689 ..Default::default()
3690 }))
3691 })
3692 .next()
3693 .await
3694 .unwrap();
3695 let mut transaction = response.await.unwrap().0;
3696 assert_eq!(transaction.len(), 2);
3697 assert_eq!(
3698 transaction
3699 .remove_entry(&buffer)
3700 .unwrap()
3701 .0
3702 .update(cx, |buffer, _| buffer.text()),
3703 "const THREE: usize = 1;"
3704 );
3705 assert_eq!(
3706 transaction
3707 .into_keys()
3708 .next()
3709 .unwrap()
3710 .update(cx, |buffer, _| buffer.text()),
3711 "const TWO: usize = one::THREE + one::THREE;"
3712 );
3713}
3714
3715#[gpui::test]
3716async fn test_search(cx: &mut gpui::TestAppContext) {
3717 init_test(cx);
3718
3719 let fs = FakeFs::new(cx.executor());
3720 fs.insert_tree(
3721 "/dir",
3722 json!({
3723 "one.rs": "const ONE: usize = 1;",
3724 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3725 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3726 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3727 }),
3728 )
3729 .await;
3730 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3731 assert_eq!(
3732 search(
3733 &project,
3734 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3735 cx
3736 )
3737 .await
3738 .unwrap(),
3739 HashMap::from_iter([
3740 ("two.rs".to_string(), vec![6..9]),
3741 ("three.rs".to_string(), vec![37..40])
3742 ])
3743 );
3744
3745 let buffer_4 = project
3746 .update(cx, |project, cx| {
3747 project.open_local_buffer("/dir/four.rs", cx)
3748 })
3749 .await
3750 .unwrap();
3751 buffer_4.update(cx, |buffer, cx| {
3752 let text = "two::TWO";
3753 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3754 });
3755
3756 assert_eq!(
3757 search(
3758 &project,
3759 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3760 cx
3761 )
3762 .await
3763 .unwrap(),
3764 HashMap::from_iter([
3765 ("two.rs".to_string(), vec![6..9]),
3766 ("three.rs".to_string(), vec![37..40]),
3767 ("four.rs".to_string(), vec![25..28, 36..39])
3768 ])
3769 );
3770}
3771
3772#[gpui::test]
3773async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3774 init_test(cx);
3775
3776 let search_query = "file";
3777
3778 let fs = FakeFs::new(cx.executor());
3779 fs.insert_tree(
3780 "/dir",
3781 json!({
3782 "one.rs": r#"// Rust file one"#,
3783 "one.ts": r#"// TypeScript file one"#,
3784 "two.rs": r#"// Rust file two"#,
3785 "two.ts": r#"// TypeScript file two"#,
3786 }),
3787 )
3788 .await;
3789 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3790
3791 assert!(
3792 search(
3793 &project,
3794 SearchQuery::text(
3795 search_query,
3796 false,
3797 true,
3798 false,
3799 vec![PathMatcher::new("*.odd").unwrap()],
3800 Vec::new()
3801 )
3802 .unwrap(),
3803 cx
3804 )
3805 .await
3806 .unwrap()
3807 .is_empty(),
3808 "If no inclusions match, no files should be returned"
3809 );
3810
3811 assert_eq!(
3812 search(
3813 &project,
3814 SearchQuery::text(
3815 search_query,
3816 false,
3817 true,
3818 false,
3819 vec![PathMatcher::new("*.rs").unwrap()],
3820 Vec::new()
3821 )
3822 .unwrap(),
3823 cx
3824 )
3825 .await
3826 .unwrap(),
3827 HashMap::from_iter([
3828 ("one.rs".to_string(), vec![8..12]),
3829 ("two.rs".to_string(), vec![8..12]),
3830 ]),
3831 "Rust only search should give only Rust files"
3832 );
3833
3834 assert_eq!(
3835 search(
3836 &project,
3837 SearchQuery::text(
3838 search_query,
3839 false,
3840 true,
3841 false,
3842 vec![
3843 PathMatcher::new("*.ts").unwrap(),
3844 PathMatcher::new("*.odd").unwrap(),
3845 ],
3846 Vec::new()
3847 ).unwrap(),
3848 cx
3849 )
3850 .await
3851 .unwrap(),
3852 HashMap::from_iter([
3853 ("one.ts".to_string(), vec![14..18]),
3854 ("two.ts".to_string(), vec![14..18]),
3855 ]),
3856 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3857 );
3858
3859 assert_eq!(
3860 search(
3861 &project,
3862 SearchQuery::text(
3863 search_query,
3864 false,
3865 true,
3866 false,
3867 vec![
3868 PathMatcher::new("*.rs").unwrap(),
3869 PathMatcher::new("*.ts").unwrap(),
3870 PathMatcher::new("*.odd").unwrap(),
3871 ],
3872 Vec::new()
3873 ).unwrap(),
3874 cx
3875 )
3876 .await
3877 .unwrap(),
3878 HashMap::from_iter([
3879 ("one.rs".to_string(), vec![8..12]),
3880 ("one.ts".to_string(), vec![14..18]),
3881 ("two.rs".to_string(), vec![8..12]),
3882 ("two.ts".to_string(), vec![14..18]),
3883 ]),
3884 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3885 );
3886}
3887
3888#[gpui::test]
3889async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3890 init_test(cx);
3891
3892 let search_query = "file";
3893
3894 let fs = FakeFs::new(cx.executor());
3895 fs.insert_tree(
3896 "/dir",
3897 json!({
3898 "one.rs": r#"// Rust file one"#,
3899 "one.ts": r#"// TypeScript file one"#,
3900 "two.rs": r#"// Rust file two"#,
3901 "two.ts": r#"// TypeScript file two"#,
3902 }),
3903 )
3904 .await;
3905 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3906
3907 assert_eq!(
3908 search(
3909 &project,
3910 SearchQuery::text(
3911 search_query,
3912 false,
3913 true,
3914 false,
3915 Vec::new(),
3916 vec![PathMatcher::new("*.odd").unwrap()],
3917 )
3918 .unwrap(),
3919 cx
3920 )
3921 .await
3922 .unwrap(),
3923 HashMap::from_iter([
3924 ("one.rs".to_string(), vec![8..12]),
3925 ("one.ts".to_string(), vec![14..18]),
3926 ("two.rs".to_string(), vec![8..12]),
3927 ("two.ts".to_string(), vec![14..18]),
3928 ]),
3929 "If no exclusions match, all files should be returned"
3930 );
3931
3932 assert_eq!(
3933 search(
3934 &project,
3935 SearchQuery::text(
3936 search_query,
3937 false,
3938 true,
3939 false,
3940 Vec::new(),
3941 vec![PathMatcher::new("*.rs").unwrap()],
3942 )
3943 .unwrap(),
3944 cx
3945 )
3946 .await
3947 .unwrap(),
3948 HashMap::from_iter([
3949 ("one.ts".to_string(), vec![14..18]),
3950 ("two.ts".to_string(), vec![14..18]),
3951 ]),
3952 "Rust exclusion search should give only TypeScript files"
3953 );
3954
3955 assert_eq!(
3956 search(
3957 &project,
3958 SearchQuery::text(
3959 search_query,
3960 false,
3961 true,
3962 false,
3963 Vec::new(),
3964 vec![
3965 PathMatcher::new("*.ts").unwrap(),
3966 PathMatcher::new("*.odd").unwrap(),
3967 ],
3968 ).unwrap(),
3969 cx
3970 )
3971 .await
3972 .unwrap(),
3973 HashMap::from_iter([
3974 ("one.rs".to_string(), vec![8..12]),
3975 ("two.rs".to_string(), vec![8..12]),
3976 ]),
3977 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3978 );
3979
3980 assert!(
3981 search(
3982 &project,
3983 SearchQuery::text(
3984 search_query,
3985 false,
3986 true,
3987 false,
3988 Vec::new(),
3989 vec![
3990 PathMatcher::new("*.rs").unwrap(),
3991 PathMatcher::new("*.ts").unwrap(),
3992 PathMatcher::new("*.odd").unwrap(),
3993 ],
3994 ).unwrap(),
3995 cx
3996 )
3997 .await
3998 .unwrap().is_empty(),
3999 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4000 );
4001}
4002
4003#[gpui::test]
4004async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4005 init_test(cx);
4006
4007 let search_query = "file";
4008
4009 let fs = FakeFs::new(cx.executor());
4010 fs.insert_tree(
4011 "/dir",
4012 json!({
4013 "one.rs": r#"// Rust file one"#,
4014 "one.ts": r#"// TypeScript file one"#,
4015 "two.rs": r#"// Rust file two"#,
4016 "two.ts": r#"// TypeScript file two"#,
4017 }),
4018 )
4019 .await;
4020 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4021
4022 assert!(
4023 search(
4024 &project,
4025 SearchQuery::text(
4026 search_query,
4027 false,
4028 true,
4029 false,
4030 vec![PathMatcher::new("*.odd").unwrap()],
4031 vec![PathMatcher::new("*.odd").unwrap()],
4032 )
4033 .unwrap(),
4034 cx
4035 )
4036 .await
4037 .unwrap()
4038 .is_empty(),
4039 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4040 );
4041
4042 assert!(
4043 search(
4044 &project,
4045 SearchQuery::text(
4046 search_query,
4047 false,
4048 true,
4049 false,
4050 vec![PathMatcher::new("*.ts").unwrap()],
4051 vec![PathMatcher::new("*.ts").unwrap()],
4052 ).unwrap(),
4053 cx
4054 )
4055 .await
4056 .unwrap()
4057 .is_empty(),
4058 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4059 );
4060
4061 assert!(
4062 search(
4063 &project,
4064 SearchQuery::text(
4065 search_query,
4066 false,
4067 true,
4068 false,
4069 vec![
4070 PathMatcher::new("*.ts").unwrap(),
4071 PathMatcher::new("*.odd").unwrap()
4072 ],
4073 vec![
4074 PathMatcher::new("*.ts").unwrap(),
4075 PathMatcher::new("*.odd").unwrap()
4076 ],
4077 )
4078 .unwrap(),
4079 cx
4080 )
4081 .await
4082 .unwrap()
4083 .is_empty(),
4084 "Non-matching inclusions and exclusions should not change that."
4085 );
4086
4087 assert_eq!(
4088 search(
4089 &project,
4090 SearchQuery::text(
4091 search_query,
4092 false,
4093 true,
4094 false,
4095 vec![
4096 PathMatcher::new("*.ts").unwrap(),
4097 PathMatcher::new("*.odd").unwrap()
4098 ],
4099 vec![
4100 PathMatcher::new("*.rs").unwrap(),
4101 PathMatcher::new("*.odd").unwrap()
4102 ],
4103 )
4104 .unwrap(),
4105 cx
4106 )
4107 .await
4108 .unwrap(),
4109 HashMap::from_iter([
4110 ("one.ts".to_string(), vec![14..18]),
4111 ("two.ts".to_string(), vec![14..18]),
4112 ]),
4113 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4114 );
4115}
4116
4117#[gpui::test]
4118async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4119 init_test(cx);
4120
4121 let fs = FakeFs::new(cx.background_executor.clone());
4122 fs.insert_tree(
4123 "/dir",
4124 json!({
4125 ".git": {},
4126 ".gitignore": "**/target\n/node_modules\n",
4127 "target": {
4128 "index.txt": "index_key:index_value"
4129 },
4130 "node_modules": {
4131 "eslint": {
4132 "index.ts": "const eslint_key = 'eslint value'",
4133 "package.json": r#"{ "some_key": "some value" }"#,
4134 },
4135 "prettier": {
4136 "index.ts": "const prettier_key = 'prettier value'",
4137 "package.json": r#"{ "other_key": "other value" }"#,
4138 },
4139 },
4140 "package.json": r#"{ "main_key": "main value" }"#,
4141 }),
4142 )
4143 .await;
4144 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4145
4146 let query = "key";
4147 assert_eq!(
4148 search(
4149 &project,
4150 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4151 cx
4152 )
4153 .await
4154 .unwrap(),
4155 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4156 "Only one non-ignored file should have the query"
4157 );
4158
4159 assert_eq!(
4160 search(
4161 &project,
4162 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4163 cx
4164 )
4165 .await
4166 .unwrap(),
4167 HashMap::from_iter([
4168 ("package.json".to_string(), vec![8..11]),
4169 ("target/index.txt".to_string(), vec![6..9]),
4170 (
4171 "node_modules/prettier/package.json".to_string(),
4172 vec![9..12]
4173 ),
4174 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4175 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4176 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4177 ]),
4178 "Unrestricted search with ignored directories should find every file with the query"
4179 );
4180
4181 assert_eq!(
4182 search(
4183 &project,
4184 SearchQuery::text(
4185 query,
4186 false,
4187 false,
4188 true,
4189 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4190 vec![PathMatcher::new("*.ts").unwrap()],
4191 )
4192 .unwrap(),
4193 cx
4194 )
4195 .await
4196 .unwrap(),
4197 HashMap::from_iter([(
4198 "node_modules/prettier/package.json".to_string(),
4199 vec![9..12]
4200 )]),
4201 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4202 );
4203}
4204
4205#[test]
4206fn test_glob_literal_prefix() {
4207 assert_eq!(glob_literal_prefix("**/*.js"), "");
4208 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4209 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4210 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4211}
4212
4213#[gpui::test]
4214async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4215 init_test(cx);
4216
4217 let fs = FakeFs::new(cx.executor().clone());
4218 fs.insert_tree(
4219 "/one/two",
4220 json!({
4221 "three": {
4222 "a.txt": "",
4223 "four": {}
4224 },
4225 "c.rs": ""
4226 }),
4227 )
4228 .await;
4229
4230 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4231 project
4232 .update(cx, |project, cx| {
4233 let id = project.worktrees().next().unwrap().read(cx).id();
4234 project.create_entry((id, "b.."), true, cx)
4235 })
4236 .unwrap()
4237 .await
4238 .unwrap();
4239
4240 // Can't create paths outside the project
4241 let result = project
4242 .update(cx, |project, cx| {
4243 let id = project.worktrees().next().unwrap().read(cx).id();
4244 project.create_entry((id, "../../boop"), true, cx)
4245 })
4246 .await;
4247 assert!(result.is_err());
4248
4249 // Can't create paths with '..'
4250 let result = project
4251 .update(cx, |project, cx| {
4252 let id = project.worktrees().next().unwrap().read(cx).id();
4253 project.create_entry((id, "four/../beep"), true, cx)
4254 })
4255 .await;
4256 assert!(result.is_err());
4257
4258 assert_eq!(
4259 fs.paths(true),
4260 vec![
4261 PathBuf::from("/"),
4262 PathBuf::from("/one"),
4263 PathBuf::from("/one/two"),
4264 PathBuf::from("/one/two/c.rs"),
4265 PathBuf::from("/one/two/three"),
4266 PathBuf::from("/one/two/three/a.txt"),
4267 PathBuf::from("/one/two/three/b.."),
4268 PathBuf::from("/one/two/three/four"),
4269 ]
4270 );
4271
4272 // And we cannot open buffers with '..'
4273 let result = project
4274 .update(cx, |project, cx| {
4275 let id = project.worktrees().next().unwrap().read(cx).id();
4276 project.open_buffer((id, "../c.rs"), cx)
4277 })
4278 .await;
4279 assert!(result.is_err())
4280}
4281
4282async fn search(
4283 project: &Model<Project>,
4284 query: SearchQuery,
4285 cx: &mut gpui::TestAppContext,
4286) -> Result<HashMap<String, Vec<Range<usize>>>> {
4287 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4288 let mut result = HashMap::default();
4289 while let Some((buffer, range)) = search_rx.next().await {
4290 result.entry(buffer).or_insert(range);
4291 }
4292 Ok(result
4293 .into_iter()
4294 .map(|(buffer, ranges)| {
4295 buffer.update(cx, |buffer, _| {
4296 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4297 let ranges = ranges
4298 .into_iter()
4299 .map(|range| range.to_offset(buffer))
4300 .collect::<Vec<_>>();
4301 (path, ranges)
4302 })
4303 })
4304 .collect())
4305}
4306
4307fn init_test(cx: &mut gpui::TestAppContext) {
4308 if std::env::var("RUST_LOG").is_ok() {
4309 env_logger::try_init().ok();
4310 }
4311
4312 cx.update(|cx| {
4313 let settings_store = SettingsStore::test(cx);
4314 cx.set_global(settings_store);
4315 release_channel::init("0.0.0", cx);
4316 language::init(cx);
4317 Project::init_settings(cx);
4318 });
4319}
4320
4321fn json_lang() -> Arc<Language> {
4322 Arc::new(Language::new(
4323 LanguageConfig {
4324 name: "JSON".into(),
4325 matcher: LanguageMatcher {
4326 path_suffixes: vec!["json".to_string()],
4327 ..Default::default()
4328 },
4329 ..Default::default()
4330 },
4331 None,
4332 ))
4333}
4334
4335fn js_lang() -> Arc<Language> {
4336 Arc::new(Language::new(
4337 LanguageConfig {
4338 name: Arc::from("JavaScript"),
4339 matcher: LanguageMatcher {
4340 path_suffixes: vec!["js".to_string()],
4341 ..Default::default()
4342 },
4343 ..Default::default()
4344 },
4345 None,
4346 ))
4347}
4348
4349fn rust_lang() -> Arc<Language> {
4350 Arc::new(Language::new(
4351 LanguageConfig {
4352 name: "Rust".into(),
4353 matcher: LanguageMatcher {
4354 path_suffixes: vec!["rs".to_string()],
4355 ..Default::default()
4356 },
4357 ..Default::default()
4358 },
4359 Some(tree_sitter_rust::language()),
4360 ))
4361}
4362
4363fn typescript_lang() -> Arc<Language> {
4364 Arc::new(Language::new(
4365 LanguageConfig {
4366 name: "TypeScript".into(),
4367 matcher: LanguageMatcher {
4368 path_suffixes: vec!["ts".to_string()],
4369 ..Default::default()
4370 },
4371 ..Default::default()
4372 },
4373 Some(tree_sitter_typescript::language_typescript()),
4374 ))
4375}