1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[cfg(not(windows))]
49#[gpui::test]
50async fn test_symlinks(cx: &mut gpui::TestAppContext) {
51 init_test(cx);
52 cx.executor().allow_parking();
53
54 let dir = temp_tree(json!({
55 "root": {
56 "apple": "",
57 "banana": {
58 "carrot": {
59 "date": "",
60 "endive": "",
61 }
62 },
63 "fennel": {
64 "grape": "",
65 }
66 }
67 }));
68
69 let root_link_path = dir.path().join("root_link");
70 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
71 os::unix::fs::symlink(
72 &dir.path().join("root/fennel"),
73 &dir.path().join("root/finnochio"),
74 )
75 .unwrap();
76
77 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
78
79 project.update(cx, |project, cx| {
80 let tree = project.worktrees().next().unwrap().read(cx);
81 assert_eq!(tree.file_count(), 5);
82 assert_eq!(
83 tree.inode_for_path("fennel/grape"),
84 tree.inode_for_path("finnochio/grape")
85 );
86 });
87}
88
89#[gpui::test]
90async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
91 init_test(cx);
92
93 let fs = FakeFs::new(cx.executor());
94 fs.insert_tree(
95 "/the-root",
96 json!({
97 ".zed": {
98 "settings.json": r#"{ "tab_size": 8 }"#,
99 "tasks.json": r#"[{
100 "label": "cargo check",
101 "command": "cargo",
102 "args": ["check", "--all"]
103 },]"#,
104 },
105 "a": {
106 "a.rs": "fn a() {\n A\n}"
107 },
108 "b": {
109 ".zed": {
110 "settings.json": r#"{ "tab_size": 2 }"#,
111 "tasks.json": r#"[{
112 "label": "cargo check",
113 "command": "cargo",
114 "args": ["check"]
115 },]"#,
116 },
117 "b.rs": "fn b() {\n B\n}"
118 }
119 }),
120 )
121 .await;
122
123 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
124 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
125
126 cx.executor().run_until_parked();
127 cx.update(|cx| {
128 let tree = worktree.read(cx);
129
130 let settings_a = language_settings(
131 None,
132 Some(
133 &(File::for_entry(
134 tree.entry_for_path("a/a.rs").unwrap().clone(),
135 worktree.clone(),
136 ) as _),
137 ),
138 cx,
139 );
140 let settings_b = language_settings(
141 None,
142 Some(
143 &(File::for_entry(
144 tree.entry_for_path("b/b.rs").unwrap().clone(),
145 worktree.clone(),
146 ) as _),
147 ),
148 cx,
149 );
150
151 assert_eq!(settings_a.tab_size.get(), 8);
152 assert_eq!(settings_b.tab_size.get(), 2);
153
154 let workree_id = project.update(cx, |project, cx| {
155 project.worktrees().next().unwrap().read(cx).id()
156 });
157 let all_tasks = project
158 .update(cx, |project, cx| {
159 project.task_inventory().update(cx, |inventory, cx| {
160 inventory.list_tasks(None, None, false, cx)
161 })
162 })
163 .into_iter()
164 .map(|(source_kind, task)| (source_kind, task.name().to_string()))
165 .collect::<Vec<_>>();
166 assert_eq!(
167 all_tasks,
168 vec![
169 (
170 TaskSourceKind::Worktree {
171 id: workree_id,
172 abs_path: PathBuf::from("/the-root/.zed/tasks.json")
173 },
174 "cargo check".to_string()
175 ),
176 (
177 TaskSourceKind::Worktree {
178 id: workree_id,
179 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json")
180 },
181 "cargo check".to_string()
182 ),
183 ]
184 );
185 });
186}
187
188#[gpui::test]
189async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
190 init_test(cx);
191
192 let fs = FakeFs::new(cx.executor());
193 fs.insert_tree(
194 "/the-root",
195 json!({
196 "test.rs": "const A: i32 = 1;",
197 "test2.rs": "",
198 "Cargo.toml": "a = 1",
199 "package.json": "{\"a\": 1}",
200 }),
201 )
202 .await;
203
204 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
205 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
206
207 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
208 "Rust",
209 FakeLspAdapter {
210 name: "the-rust-language-server",
211 capabilities: lsp::ServerCapabilities {
212 completion_provider: Some(lsp::CompletionOptions {
213 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
214 ..Default::default()
215 }),
216 ..Default::default()
217 },
218 ..Default::default()
219 },
220 );
221 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
222 "JSON",
223 FakeLspAdapter {
224 name: "the-json-language-server",
225 capabilities: lsp::ServerCapabilities {
226 completion_provider: Some(lsp::CompletionOptions {
227 trigger_characters: Some(vec![":".to_string()]),
228 ..Default::default()
229 }),
230 ..Default::default()
231 },
232 ..Default::default()
233 },
234 );
235
236 // Open a buffer without an associated language server.
237 let toml_buffer = project
238 .update(cx, |project, cx| {
239 project.open_local_buffer("/the-root/Cargo.toml", cx)
240 })
241 .await
242 .unwrap();
243
244 // Open a buffer with an associated language server before the language for it has been loaded.
245 let rust_buffer = project
246 .update(cx, |project, cx| {
247 project.open_local_buffer("/the-root/test.rs", cx)
248 })
249 .await
250 .unwrap();
251 rust_buffer.update(cx, |buffer, _| {
252 assert_eq!(buffer.language().map(|l| l.name()), None);
253 });
254
255 // Now we add the languages to the project, and ensure they get assigned to all
256 // the relevant open buffers.
257 language_registry.add(json_lang());
258 language_registry.add(rust_lang());
259 cx.executor().run_until_parked();
260 rust_buffer.update(cx, |buffer, _| {
261 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
262 });
263
264 // A server is started up, and it is notified about Rust files.
265 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
266 assert_eq!(
267 fake_rust_server
268 .receive_notification::<lsp::notification::DidOpenTextDocument>()
269 .await
270 .text_document,
271 lsp::TextDocumentItem {
272 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
273 version: 0,
274 text: "const A: i32 = 1;".to_string(),
275 language_id: Default::default()
276 }
277 );
278
279 // The buffer is configured based on the language server's capabilities.
280 rust_buffer.update(cx, |buffer, _| {
281 assert_eq!(
282 buffer.completion_triggers(),
283 &[".".to_string(), "::".to_string()]
284 );
285 });
286 toml_buffer.update(cx, |buffer, _| {
287 assert!(buffer.completion_triggers().is_empty());
288 });
289
290 // Edit a buffer. The changes are reported to the language server.
291 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
292 assert_eq!(
293 fake_rust_server
294 .receive_notification::<lsp::notification::DidChangeTextDocument>()
295 .await
296 .text_document,
297 lsp::VersionedTextDocumentIdentifier::new(
298 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
299 1
300 )
301 );
302
303 // Open a third buffer with a different associated language server.
304 let json_buffer = project
305 .update(cx, |project, cx| {
306 project.open_local_buffer("/the-root/package.json", cx)
307 })
308 .await
309 .unwrap();
310
311 // A json language server is started up and is only notified about the json buffer.
312 let mut fake_json_server = fake_json_servers.next().await.unwrap();
313 assert_eq!(
314 fake_json_server
315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
316 .await
317 .text_document,
318 lsp::TextDocumentItem {
319 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
320 version: 0,
321 text: "{\"a\": 1}".to_string(),
322 language_id: Default::default()
323 }
324 );
325
326 // This buffer is configured based on the second language server's
327 // capabilities.
328 json_buffer.update(cx, |buffer, _| {
329 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
330 });
331
332 // When opening another buffer whose language server is already running,
333 // it is also configured based on the existing language server's capabilities.
334 let rust_buffer2 = project
335 .update(cx, |project, cx| {
336 project.open_local_buffer("/the-root/test2.rs", cx)
337 })
338 .await
339 .unwrap();
340 rust_buffer2.update(cx, |buffer, _| {
341 assert_eq!(
342 buffer.completion_triggers(),
343 &[".".to_string(), "::".to_string()]
344 );
345 });
346
347 // Changes are reported only to servers matching the buffer's language.
348 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
349 rust_buffer2.update(cx, |buffer, cx| {
350 buffer.edit([(0..0, "let x = 1;")], None, cx)
351 });
352 assert_eq!(
353 fake_rust_server
354 .receive_notification::<lsp::notification::DidChangeTextDocument>()
355 .await
356 .text_document,
357 lsp::VersionedTextDocumentIdentifier::new(
358 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
359 1
360 )
361 );
362
363 // Save notifications are reported to all servers.
364 project
365 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
366 .await
367 .unwrap();
368 assert_eq!(
369 fake_rust_server
370 .receive_notification::<lsp::notification::DidSaveTextDocument>()
371 .await
372 .text_document,
373 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
374 );
375 assert_eq!(
376 fake_json_server
377 .receive_notification::<lsp::notification::DidSaveTextDocument>()
378 .await
379 .text_document,
380 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
381 );
382
383 // Renames are reported only to servers matching the buffer's language.
384 fs.rename(
385 Path::new("/the-root/test2.rs"),
386 Path::new("/the-root/test3.rs"),
387 Default::default(),
388 )
389 .await
390 .unwrap();
391 assert_eq!(
392 fake_rust_server
393 .receive_notification::<lsp::notification::DidCloseTextDocument>()
394 .await
395 .text_document,
396 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
397 );
398 assert_eq!(
399 fake_rust_server
400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
401 .await
402 .text_document,
403 lsp::TextDocumentItem {
404 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
405 version: 0,
406 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
407 language_id: Default::default()
408 },
409 );
410
411 rust_buffer2.update(cx, |buffer, cx| {
412 buffer.update_diagnostics(
413 LanguageServerId(0),
414 DiagnosticSet::from_sorted_entries(
415 vec![DiagnosticEntry {
416 diagnostic: Default::default(),
417 range: Anchor::MIN..Anchor::MAX,
418 }],
419 &buffer.snapshot(),
420 ),
421 cx,
422 );
423 assert_eq!(
424 buffer
425 .snapshot()
426 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
427 .count(),
428 1
429 );
430 });
431
432 // When the rename changes the extension of the file, the buffer gets closed on the old
433 // language server and gets opened on the new one.
434 fs.rename(
435 Path::new("/the-root/test3.rs"),
436 Path::new("/the-root/test3.json"),
437 Default::default(),
438 )
439 .await
440 .unwrap();
441 assert_eq!(
442 fake_rust_server
443 .receive_notification::<lsp::notification::DidCloseTextDocument>()
444 .await
445 .text_document,
446 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
447 );
448 assert_eq!(
449 fake_json_server
450 .receive_notification::<lsp::notification::DidOpenTextDocument>()
451 .await
452 .text_document,
453 lsp::TextDocumentItem {
454 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
455 version: 0,
456 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
457 language_id: Default::default()
458 },
459 );
460
461 // We clear the diagnostics, since the language has changed.
462 rust_buffer2.update(cx, |buffer, _| {
463 assert_eq!(
464 buffer
465 .snapshot()
466 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
467 .count(),
468 0
469 );
470 });
471
472 // The renamed file's version resets after changing language server.
473 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
474 assert_eq!(
475 fake_json_server
476 .receive_notification::<lsp::notification::DidChangeTextDocument>()
477 .await
478 .text_document,
479 lsp::VersionedTextDocumentIdentifier::new(
480 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
481 1
482 )
483 );
484
485 // Restart language servers
486 project.update(cx, |project, cx| {
487 project.restart_language_servers_for_buffers(
488 vec![rust_buffer.clone(), json_buffer.clone()],
489 cx,
490 );
491 });
492
493 let mut rust_shutdown_requests = fake_rust_server
494 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
495 let mut json_shutdown_requests = fake_json_server
496 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
497 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
498
499 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
500 let mut fake_json_server = fake_json_servers.next().await.unwrap();
501
502 // Ensure rust document is reopened in new rust language server
503 assert_eq!(
504 fake_rust_server
505 .receive_notification::<lsp::notification::DidOpenTextDocument>()
506 .await
507 .text_document,
508 lsp::TextDocumentItem {
509 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
510 version: 0,
511 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
512 language_id: Default::default()
513 }
514 );
515
516 // Ensure json documents are reopened in new json language server
517 assert_set_eq!(
518 [
519 fake_json_server
520 .receive_notification::<lsp::notification::DidOpenTextDocument>()
521 .await
522 .text_document,
523 fake_json_server
524 .receive_notification::<lsp::notification::DidOpenTextDocument>()
525 .await
526 .text_document,
527 ],
528 [
529 lsp::TextDocumentItem {
530 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
531 version: 0,
532 text: json_buffer.update(cx, |buffer, _| buffer.text()),
533 language_id: Default::default()
534 },
535 lsp::TextDocumentItem {
536 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
537 version: 0,
538 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
539 language_id: Default::default()
540 }
541 ]
542 );
543
544 // Close notifications are reported only to servers matching the buffer's language.
545 cx.update(|_| drop(json_buffer));
546 let close_message = lsp::DidCloseTextDocumentParams {
547 text_document: lsp::TextDocumentIdentifier::new(
548 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
549 ),
550 };
551 assert_eq!(
552 fake_json_server
553 .receive_notification::<lsp::notification::DidCloseTextDocument>()
554 .await,
555 close_message,
556 );
557}
558
559#[gpui::test]
560async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
561 init_test(cx);
562
563 let fs = FakeFs::new(cx.executor());
564 fs.insert_tree(
565 "/the-root",
566 json!({
567 ".gitignore": "target\n",
568 "src": {
569 "a.rs": "",
570 "b.rs": "",
571 },
572 "target": {
573 "x": {
574 "out": {
575 "x.rs": ""
576 }
577 },
578 "y": {
579 "out": {
580 "y.rs": "",
581 }
582 },
583 "z": {
584 "out": {
585 "z.rs": ""
586 }
587 }
588 }
589 }),
590 )
591 .await;
592
593 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
594 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
595 language_registry.add(rust_lang());
596 let mut fake_servers = language_registry.register_fake_lsp_adapter(
597 "Rust",
598 FakeLspAdapter {
599 name: "the-language-server",
600 ..Default::default()
601 },
602 );
603
604 cx.executor().run_until_parked();
605
606 // Start the language server by opening a buffer with a compatible file extension.
607 let _buffer = project
608 .update(cx, |project, cx| {
609 project.open_local_buffer("/the-root/src/a.rs", cx)
610 })
611 .await
612 .unwrap();
613
614 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
615 project.update(cx, |project, cx| {
616 let worktree = project.worktrees().next().unwrap();
617 assert_eq!(
618 worktree
619 .read(cx)
620 .snapshot()
621 .entries(true)
622 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
623 .collect::<Vec<_>>(),
624 &[
625 (Path::new(""), false),
626 (Path::new(".gitignore"), false),
627 (Path::new("src"), false),
628 (Path::new("src/a.rs"), false),
629 (Path::new("src/b.rs"), false),
630 (Path::new("target"), true),
631 ]
632 );
633 });
634
635 let prev_read_dir_count = fs.read_dir_call_count();
636
637 // Keep track of the FS events reported to the language server.
638 let fake_server = fake_servers.next().await.unwrap();
639 let file_changes = Arc::new(Mutex::new(Vec::new()));
640 fake_server
641 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
642 registrations: vec![lsp::Registration {
643 id: Default::default(),
644 method: "workspace/didChangeWatchedFiles".to_string(),
645 register_options: serde_json::to_value(
646 lsp::DidChangeWatchedFilesRegistrationOptions {
647 watchers: vec![
648 lsp::FileSystemWatcher {
649 glob_pattern: lsp::GlobPattern::String(
650 "/the-root/Cargo.toml".to_string(),
651 ),
652 kind: None,
653 },
654 lsp::FileSystemWatcher {
655 glob_pattern: lsp::GlobPattern::String(
656 "/the-root/src/*.{rs,c}".to_string(),
657 ),
658 kind: None,
659 },
660 lsp::FileSystemWatcher {
661 glob_pattern: lsp::GlobPattern::String(
662 "/the-root/target/y/**/*.rs".to_string(),
663 ),
664 kind: None,
665 },
666 ],
667 },
668 )
669 .ok(),
670 }],
671 })
672 .await
673 .unwrap();
674 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
675 let file_changes = file_changes.clone();
676 move |params, _| {
677 let mut file_changes = file_changes.lock();
678 file_changes.extend(params.changes);
679 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
680 }
681 });
682
683 cx.executor().run_until_parked();
684 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
685 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
686
687 // Now the language server has asked us to watch an ignored directory path,
688 // so we recursively load it.
689 project.update(cx, |project, cx| {
690 let worktree = project.worktrees().next().unwrap();
691 assert_eq!(
692 worktree
693 .read(cx)
694 .snapshot()
695 .entries(true)
696 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
697 .collect::<Vec<_>>(),
698 &[
699 (Path::new(""), false),
700 (Path::new(".gitignore"), false),
701 (Path::new("src"), false),
702 (Path::new("src/a.rs"), false),
703 (Path::new("src/b.rs"), false),
704 (Path::new("target"), true),
705 (Path::new("target/x"), true),
706 (Path::new("target/y"), true),
707 (Path::new("target/y/out"), true),
708 (Path::new("target/y/out/y.rs"), true),
709 (Path::new("target/z"), true),
710 ]
711 );
712 });
713
714 // Perform some file system mutations, two of which match the watched patterns,
715 // and one of which does not.
716 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
717 .await
718 .unwrap();
719 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
720 .await
721 .unwrap();
722 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
723 .await
724 .unwrap();
725 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
726 .await
727 .unwrap();
728 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
729 .await
730 .unwrap();
731
732 // The language server receives events for the FS mutations that match its watch patterns.
733 cx.executor().run_until_parked();
734 assert_eq!(
735 &*file_changes.lock(),
736 &[
737 lsp::FileEvent {
738 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
739 typ: lsp::FileChangeType::DELETED,
740 },
741 lsp::FileEvent {
742 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
743 typ: lsp::FileChangeType::CREATED,
744 },
745 lsp::FileEvent {
746 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
747 typ: lsp::FileChangeType::CREATED,
748 },
749 ]
750 );
751}
752
753#[gpui::test]
754async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
755 init_test(cx);
756
757 let fs = FakeFs::new(cx.executor());
758 fs.insert_tree(
759 "/dir",
760 json!({
761 "a.rs": "let a = 1;",
762 "b.rs": "let b = 2;"
763 }),
764 )
765 .await;
766
767 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
768
769 let buffer_a = project
770 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
771 .await
772 .unwrap();
773 let buffer_b = project
774 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
775 .await
776 .unwrap();
777
778 project.update(cx, |project, cx| {
779 project
780 .update_diagnostics(
781 LanguageServerId(0),
782 lsp::PublishDiagnosticsParams {
783 uri: Url::from_file_path("/dir/a.rs").unwrap(),
784 version: None,
785 diagnostics: vec![lsp::Diagnostic {
786 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
787 severity: Some(lsp::DiagnosticSeverity::ERROR),
788 message: "error 1".to_string(),
789 ..Default::default()
790 }],
791 },
792 &[],
793 cx,
794 )
795 .unwrap();
796 project
797 .update_diagnostics(
798 LanguageServerId(0),
799 lsp::PublishDiagnosticsParams {
800 uri: Url::from_file_path("/dir/b.rs").unwrap(),
801 version: None,
802 diagnostics: vec![lsp::Diagnostic {
803 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
804 severity: Some(lsp::DiagnosticSeverity::WARNING),
805 message: "error 2".to_string(),
806 ..Default::default()
807 }],
808 },
809 &[],
810 cx,
811 )
812 .unwrap();
813 });
814
815 buffer_a.update(cx, |buffer, _| {
816 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
817 assert_eq!(
818 chunks
819 .iter()
820 .map(|(s, d)| (s.as_str(), *d))
821 .collect::<Vec<_>>(),
822 &[
823 ("let ", None),
824 ("a", Some(DiagnosticSeverity::ERROR)),
825 (" = 1;", None),
826 ]
827 );
828 });
829 buffer_b.update(cx, |buffer, _| {
830 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
831 assert_eq!(
832 chunks
833 .iter()
834 .map(|(s, d)| (s.as_str(), *d))
835 .collect::<Vec<_>>(),
836 &[
837 ("let ", None),
838 ("b", Some(DiagnosticSeverity::WARNING)),
839 (" = 2;", None),
840 ]
841 );
842 });
843}
844
845#[gpui::test]
846async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
847 init_test(cx);
848
849 let fs = FakeFs::new(cx.executor());
850 fs.insert_tree(
851 "/root",
852 json!({
853 "dir": {
854 ".git": {
855 "HEAD": "ref: refs/heads/main",
856 },
857 ".gitignore": "b.rs",
858 "a.rs": "let a = 1;",
859 "b.rs": "let b = 2;",
860 },
861 "other.rs": "let b = c;"
862 }),
863 )
864 .await;
865
866 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
867 let (worktree, _) = project
868 .update(cx, |project, cx| {
869 project.find_or_create_local_worktree("/root/dir", true, cx)
870 })
871 .await
872 .unwrap();
873 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
874
875 let (worktree, _) = project
876 .update(cx, |project, cx| {
877 project.find_or_create_local_worktree("/root/other.rs", false, cx)
878 })
879 .await
880 .unwrap();
881 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
882
883 let server_id = LanguageServerId(0);
884 project.update(cx, |project, cx| {
885 project
886 .update_diagnostics(
887 server_id,
888 lsp::PublishDiagnosticsParams {
889 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
890 version: None,
891 diagnostics: vec![lsp::Diagnostic {
892 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
893 severity: Some(lsp::DiagnosticSeverity::ERROR),
894 message: "unused variable 'b'".to_string(),
895 ..Default::default()
896 }],
897 },
898 &[],
899 cx,
900 )
901 .unwrap();
902 project
903 .update_diagnostics(
904 server_id,
905 lsp::PublishDiagnosticsParams {
906 uri: Url::from_file_path("/root/other.rs").unwrap(),
907 version: None,
908 diagnostics: vec![lsp::Diagnostic {
909 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
910 severity: Some(lsp::DiagnosticSeverity::ERROR),
911 message: "unknown variable 'c'".to_string(),
912 ..Default::default()
913 }],
914 },
915 &[],
916 cx,
917 )
918 .unwrap();
919 });
920
921 let main_ignored_buffer = project
922 .update(cx, |project, cx| {
923 project.open_buffer((main_worktree_id, "b.rs"), cx)
924 })
925 .await
926 .unwrap();
927 main_ignored_buffer.update(cx, |buffer, _| {
928 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
929 assert_eq!(
930 chunks
931 .iter()
932 .map(|(s, d)| (s.as_str(), *d))
933 .collect::<Vec<_>>(),
934 &[
935 ("let ", None),
936 ("b", Some(DiagnosticSeverity::ERROR)),
937 (" = 2;", None),
938 ],
939 "Gigitnored buffers should still get in-buffer diagnostics",
940 );
941 });
942 let other_buffer = project
943 .update(cx, |project, cx| {
944 project.open_buffer((other_worktree_id, ""), cx)
945 })
946 .await
947 .unwrap();
948 other_buffer.update(cx, |buffer, _| {
949 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
950 assert_eq!(
951 chunks
952 .iter()
953 .map(|(s, d)| (s.as_str(), *d))
954 .collect::<Vec<_>>(),
955 &[
956 ("let b = ", None),
957 ("c", Some(DiagnosticSeverity::ERROR)),
958 (";", None),
959 ],
960 "Buffers from hidden projects should still get in-buffer diagnostics"
961 );
962 });
963
964 project.update(cx, |project, cx| {
965 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
966 assert_eq!(
967 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
968 vec![(
969 ProjectPath {
970 worktree_id: main_worktree_id,
971 path: Arc::from(Path::new("b.rs")),
972 },
973 server_id,
974 DiagnosticSummary {
975 error_count: 1,
976 warning_count: 0,
977 }
978 )]
979 );
980 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
981 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
982 });
983}
984
985#[gpui::test]
986async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
987 init_test(cx);
988
989 let progress_token = "the-progress-token";
990
991 let fs = FakeFs::new(cx.executor());
992 fs.insert_tree(
993 "/dir",
994 json!({
995 "a.rs": "fn a() { A }",
996 "b.rs": "const y: i32 = 1",
997 }),
998 )
999 .await;
1000
1001 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1002 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1003
1004 language_registry.add(rust_lang());
1005 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1006 "Rust",
1007 FakeLspAdapter {
1008 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1009 disk_based_diagnostics_sources: vec!["disk".into()],
1010 ..Default::default()
1011 },
1012 );
1013
1014 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1015
1016 // Cause worktree to start the fake language server
1017 let _buffer = project
1018 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1019 .await
1020 .unwrap();
1021
1022 let mut events = cx.events(&project);
1023
1024 let fake_server = fake_servers.next().await.unwrap();
1025 assert_eq!(
1026 events.next().await.unwrap(),
1027 Event::LanguageServerAdded(LanguageServerId(0)),
1028 );
1029
1030 fake_server
1031 .start_progress(format!("{}/0", progress_token))
1032 .await;
1033 assert_eq!(
1034 events.next().await.unwrap(),
1035 Event::DiskBasedDiagnosticsStarted {
1036 language_server_id: LanguageServerId(0),
1037 }
1038 );
1039
1040 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1041 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1042 version: None,
1043 diagnostics: vec![lsp::Diagnostic {
1044 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1045 severity: Some(lsp::DiagnosticSeverity::ERROR),
1046 message: "undefined variable 'A'".to_string(),
1047 ..Default::default()
1048 }],
1049 });
1050 assert_eq!(
1051 events.next().await.unwrap(),
1052 Event::DiagnosticsUpdated {
1053 language_server_id: LanguageServerId(0),
1054 path: (worktree_id, Path::new("a.rs")).into()
1055 }
1056 );
1057
1058 fake_server.end_progress(format!("{}/0", progress_token));
1059 assert_eq!(
1060 events.next().await.unwrap(),
1061 Event::DiskBasedDiagnosticsFinished {
1062 language_server_id: LanguageServerId(0)
1063 }
1064 );
1065
1066 let buffer = project
1067 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1068 .await
1069 .unwrap();
1070
1071 buffer.update(cx, |buffer, _| {
1072 let snapshot = buffer.snapshot();
1073 let diagnostics = snapshot
1074 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1075 .collect::<Vec<_>>();
1076 assert_eq!(
1077 diagnostics,
1078 &[DiagnosticEntry {
1079 range: Point::new(0, 9)..Point::new(0, 10),
1080 diagnostic: Diagnostic {
1081 severity: lsp::DiagnosticSeverity::ERROR,
1082 message: "undefined variable 'A'".to_string(),
1083 group_id: 0,
1084 is_primary: true,
1085 ..Default::default()
1086 }
1087 }]
1088 )
1089 });
1090
1091 // Ensure publishing empty diagnostics twice only results in one update event.
1092 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1093 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1094 version: None,
1095 diagnostics: Default::default(),
1096 });
1097 assert_eq!(
1098 events.next().await.unwrap(),
1099 Event::DiagnosticsUpdated {
1100 language_server_id: LanguageServerId(0),
1101 path: (worktree_id, Path::new("a.rs")).into()
1102 }
1103 );
1104
1105 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1106 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1107 version: None,
1108 diagnostics: Default::default(),
1109 });
1110 cx.executor().run_until_parked();
1111 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1112}
1113
1114#[gpui::test]
1115async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1116 init_test(cx);
1117
1118 let progress_token = "the-progress-token";
1119
1120 let fs = FakeFs::new(cx.executor());
1121 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1122
1123 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1124
1125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1126 language_registry.add(rust_lang());
1127 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1128 "Rust",
1129 FakeLspAdapter {
1130 name: "the-language-server",
1131 disk_based_diagnostics_sources: vec!["disk".into()],
1132 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1133 ..Default::default()
1134 },
1135 );
1136
1137 let buffer = project
1138 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1139 .await
1140 .unwrap();
1141
1142 // Simulate diagnostics starting to update.
1143 let fake_server = fake_servers.next().await.unwrap();
1144 fake_server.start_progress(progress_token).await;
1145
1146 // Restart the server before the diagnostics finish updating.
1147 project.update(cx, |project, cx| {
1148 project.restart_language_servers_for_buffers([buffer], cx);
1149 });
1150 let mut events = cx.events(&project);
1151
1152 // Simulate the newly started server sending more diagnostics.
1153 let fake_server = fake_servers.next().await.unwrap();
1154 assert_eq!(
1155 events.next().await.unwrap(),
1156 Event::LanguageServerAdded(LanguageServerId(1))
1157 );
1158 fake_server.start_progress(progress_token).await;
1159 assert_eq!(
1160 events.next().await.unwrap(),
1161 Event::DiskBasedDiagnosticsStarted {
1162 language_server_id: LanguageServerId(1)
1163 }
1164 );
1165 project.update(cx, |project, _| {
1166 assert_eq!(
1167 project
1168 .language_servers_running_disk_based_diagnostics()
1169 .collect::<Vec<_>>(),
1170 [LanguageServerId(1)]
1171 );
1172 });
1173
1174 // All diagnostics are considered done, despite the old server's diagnostic
1175 // task never completing.
1176 fake_server.end_progress(progress_token);
1177 assert_eq!(
1178 events.next().await.unwrap(),
1179 Event::DiskBasedDiagnosticsFinished {
1180 language_server_id: LanguageServerId(1)
1181 }
1182 );
1183 project.update(cx, |project, _| {
1184 assert_eq!(
1185 project
1186 .language_servers_running_disk_based_diagnostics()
1187 .collect::<Vec<_>>(),
1188 [LanguageServerId(0); 0]
1189 );
1190 });
1191}
1192
1193#[gpui::test]
1194async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1195 init_test(cx);
1196
1197 let fs = FakeFs::new(cx.executor());
1198 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1199
1200 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1201
1202 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1203 language_registry.add(rust_lang());
1204 let mut fake_servers =
1205 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1206
1207 let buffer = project
1208 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1209 .await
1210 .unwrap();
1211
1212 // Publish diagnostics
1213 let fake_server = fake_servers.next().await.unwrap();
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: None,
1217 diagnostics: vec![lsp::Diagnostic {
1218 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1219 severity: Some(lsp::DiagnosticSeverity::ERROR),
1220 message: "the message".to_string(),
1221 ..Default::default()
1222 }],
1223 });
1224
1225 cx.executor().run_until_parked();
1226 buffer.update(cx, |buffer, _| {
1227 assert_eq!(
1228 buffer
1229 .snapshot()
1230 .diagnostics_in_range::<_, usize>(0..1, false)
1231 .map(|entry| entry.diagnostic.message.clone())
1232 .collect::<Vec<_>>(),
1233 ["the message".to_string()]
1234 );
1235 });
1236 project.update(cx, |project, cx| {
1237 assert_eq!(
1238 project.diagnostic_summary(false, cx),
1239 DiagnosticSummary {
1240 error_count: 1,
1241 warning_count: 0,
1242 }
1243 );
1244 });
1245
1246 project.update(cx, |project, cx| {
1247 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1248 });
1249
1250 // The diagnostics are cleared.
1251 cx.executor().run_until_parked();
1252 buffer.update(cx, |buffer, _| {
1253 assert_eq!(
1254 buffer
1255 .snapshot()
1256 .diagnostics_in_range::<_, usize>(0..1, false)
1257 .map(|entry| entry.diagnostic.message.clone())
1258 .collect::<Vec<_>>(),
1259 Vec::<String>::new(),
1260 );
1261 });
1262 project.update(cx, |project, cx| {
1263 assert_eq!(
1264 project.diagnostic_summary(false, cx),
1265 DiagnosticSummary {
1266 error_count: 0,
1267 warning_count: 0,
1268 }
1269 );
1270 });
1271}
1272
1273#[gpui::test]
1274async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1275 init_test(cx);
1276
1277 let fs = FakeFs::new(cx.executor());
1278 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1279
1280 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1281 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1282
1283 language_registry.add(rust_lang());
1284 let mut fake_servers =
1285 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1286
1287 let buffer = project
1288 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1289 .await
1290 .unwrap();
1291
1292 // Before restarting the server, report diagnostics with an unknown buffer version.
1293 let fake_server = fake_servers.next().await.unwrap();
1294 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1295 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1296 version: Some(10000),
1297 diagnostics: Vec::new(),
1298 });
1299 cx.executor().run_until_parked();
1300
1301 project.update(cx, |project, cx| {
1302 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1303 });
1304 let mut fake_server = fake_servers.next().await.unwrap();
1305 let notification = fake_server
1306 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1307 .await
1308 .text_document;
1309 assert_eq!(notification.version, 0);
1310}
1311
1312#[gpui::test]
1313async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1314 init_test(cx);
1315
1316 let fs = FakeFs::new(cx.executor());
1317 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1318 .await;
1319
1320 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1321 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1322
1323 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1324 "Rust",
1325 FakeLspAdapter {
1326 name: "rust-lsp",
1327 ..Default::default()
1328 },
1329 );
1330 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1331 "JavaScript",
1332 FakeLspAdapter {
1333 name: "js-lsp",
1334 ..Default::default()
1335 },
1336 );
1337 language_registry.add(rust_lang());
1338 language_registry.add(js_lang());
1339
1340 let _rs_buffer = project
1341 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1342 .await
1343 .unwrap();
1344 let _js_buffer = project
1345 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1346 .await
1347 .unwrap();
1348
1349 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1350 assert_eq!(
1351 fake_rust_server_1
1352 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1353 .await
1354 .text_document
1355 .uri
1356 .as_str(),
1357 "file:///dir/a.rs"
1358 );
1359
1360 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1361 assert_eq!(
1362 fake_js_server
1363 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1364 .await
1365 .text_document
1366 .uri
1367 .as_str(),
1368 "file:///dir/b.js"
1369 );
1370
1371 // Disable Rust language server, ensuring only that server gets stopped.
1372 cx.update(|cx| {
1373 cx.update_global(|settings: &mut SettingsStore, cx| {
1374 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1375 settings.languages.insert(
1376 Arc::from("Rust"),
1377 LanguageSettingsContent {
1378 enable_language_server: Some(false),
1379 ..Default::default()
1380 },
1381 );
1382 });
1383 })
1384 });
1385 fake_rust_server_1
1386 .receive_notification::<lsp::notification::Exit>()
1387 .await;
1388
1389 // Enable Rust and disable JavaScript language servers, ensuring that the
1390 // former gets started again and that the latter stops.
1391 cx.update(|cx| {
1392 cx.update_global(|settings: &mut SettingsStore, cx| {
1393 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1394 settings.languages.insert(
1395 Arc::from("Rust"),
1396 LanguageSettingsContent {
1397 enable_language_server: Some(true),
1398 ..Default::default()
1399 },
1400 );
1401 settings.languages.insert(
1402 Arc::from("JavaScript"),
1403 LanguageSettingsContent {
1404 enable_language_server: Some(false),
1405 ..Default::default()
1406 },
1407 );
1408 });
1409 })
1410 });
1411 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1412 assert_eq!(
1413 fake_rust_server_2
1414 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1415 .await
1416 .text_document
1417 .uri
1418 .as_str(),
1419 "file:///dir/a.rs"
1420 );
1421 fake_js_server
1422 .receive_notification::<lsp::notification::Exit>()
1423 .await;
1424}
1425
1426#[gpui::test(iterations = 3)]
1427async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1428 init_test(cx);
1429
1430 let text = "
1431 fn a() { A }
1432 fn b() { BB }
1433 fn c() { CCC }
1434 "
1435 .unindent();
1436
1437 let fs = FakeFs::new(cx.executor());
1438 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1439
1440 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1441 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1442
1443 language_registry.add(rust_lang());
1444 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1445 "Rust",
1446 FakeLspAdapter {
1447 disk_based_diagnostics_sources: vec!["disk".into()],
1448 ..Default::default()
1449 },
1450 );
1451
1452 let buffer = project
1453 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1454 .await
1455 .unwrap();
1456
1457 let mut fake_server = fake_servers.next().await.unwrap();
1458 let open_notification = fake_server
1459 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1460 .await;
1461
1462 // Edit the buffer, moving the content down
1463 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1464 let change_notification_1 = fake_server
1465 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1466 .await;
1467 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1468
1469 // Report some diagnostics for the initial version of the buffer
1470 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1471 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1472 version: Some(open_notification.text_document.version),
1473 diagnostics: vec![
1474 lsp::Diagnostic {
1475 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1476 severity: Some(DiagnosticSeverity::ERROR),
1477 message: "undefined variable 'A'".to_string(),
1478 source: Some("disk".to_string()),
1479 ..Default::default()
1480 },
1481 lsp::Diagnostic {
1482 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1483 severity: Some(DiagnosticSeverity::ERROR),
1484 message: "undefined variable 'BB'".to_string(),
1485 source: Some("disk".to_string()),
1486 ..Default::default()
1487 },
1488 lsp::Diagnostic {
1489 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1490 severity: Some(DiagnosticSeverity::ERROR),
1491 source: Some("disk".to_string()),
1492 message: "undefined variable 'CCC'".to_string(),
1493 ..Default::default()
1494 },
1495 ],
1496 });
1497
1498 // The diagnostics have moved down since they were created.
1499 cx.executor().run_until_parked();
1500 buffer.update(cx, |buffer, _| {
1501 assert_eq!(
1502 buffer
1503 .snapshot()
1504 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1505 .collect::<Vec<_>>(),
1506 &[
1507 DiagnosticEntry {
1508 range: Point::new(3, 9)..Point::new(3, 11),
1509 diagnostic: Diagnostic {
1510 source: Some("disk".into()),
1511 severity: DiagnosticSeverity::ERROR,
1512 message: "undefined variable 'BB'".to_string(),
1513 is_disk_based: true,
1514 group_id: 1,
1515 is_primary: true,
1516 ..Default::default()
1517 },
1518 },
1519 DiagnosticEntry {
1520 range: Point::new(4, 9)..Point::new(4, 12),
1521 diagnostic: Diagnostic {
1522 source: Some("disk".into()),
1523 severity: DiagnosticSeverity::ERROR,
1524 message: "undefined variable 'CCC'".to_string(),
1525 is_disk_based: true,
1526 group_id: 2,
1527 is_primary: true,
1528 ..Default::default()
1529 }
1530 }
1531 ]
1532 );
1533 assert_eq!(
1534 chunks_with_diagnostics(buffer, 0..buffer.len()),
1535 [
1536 ("\n\nfn a() { ".to_string(), None),
1537 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1538 (" }\nfn b() { ".to_string(), None),
1539 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1540 (" }\nfn c() { ".to_string(), None),
1541 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1542 (" }\n".to_string(), None),
1543 ]
1544 );
1545 assert_eq!(
1546 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1547 [
1548 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1549 (" }\nfn c() { ".to_string(), None),
1550 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1551 ]
1552 );
1553 });
1554
1555 // Ensure overlapping diagnostics are highlighted correctly.
1556 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1557 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1558 version: Some(open_notification.text_document.version),
1559 diagnostics: vec![
1560 lsp::Diagnostic {
1561 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1562 severity: Some(DiagnosticSeverity::ERROR),
1563 message: "undefined variable 'A'".to_string(),
1564 source: Some("disk".to_string()),
1565 ..Default::default()
1566 },
1567 lsp::Diagnostic {
1568 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1569 severity: Some(DiagnosticSeverity::WARNING),
1570 message: "unreachable statement".to_string(),
1571 source: Some("disk".to_string()),
1572 ..Default::default()
1573 },
1574 ],
1575 });
1576
1577 cx.executor().run_until_parked();
1578 buffer.update(cx, |buffer, _| {
1579 assert_eq!(
1580 buffer
1581 .snapshot()
1582 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1583 .collect::<Vec<_>>(),
1584 &[
1585 DiagnosticEntry {
1586 range: Point::new(2, 9)..Point::new(2, 12),
1587 diagnostic: Diagnostic {
1588 source: Some("disk".into()),
1589 severity: DiagnosticSeverity::WARNING,
1590 message: "unreachable statement".to_string(),
1591 is_disk_based: true,
1592 group_id: 4,
1593 is_primary: true,
1594 ..Default::default()
1595 }
1596 },
1597 DiagnosticEntry {
1598 range: Point::new(2, 9)..Point::new(2, 10),
1599 diagnostic: Diagnostic {
1600 source: Some("disk".into()),
1601 severity: DiagnosticSeverity::ERROR,
1602 message: "undefined variable 'A'".to_string(),
1603 is_disk_based: true,
1604 group_id: 3,
1605 is_primary: true,
1606 ..Default::default()
1607 },
1608 }
1609 ]
1610 );
1611 assert_eq!(
1612 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1613 [
1614 ("fn a() { ".to_string(), None),
1615 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1616 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1617 ("\n".to_string(), None),
1618 ]
1619 );
1620 assert_eq!(
1621 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1622 [
1623 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1624 ("\n".to_string(), None),
1625 ]
1626 );
1627 });
1628
1629 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1630 // changes since the last save.
1631 buffer.update(cx, |buffer, cx| {
1632 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1633 buffer.edit(
1634 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1635 None,
1636 cx,
1637 );
1638 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1639 });
1640 let change_notification_2 = fake_server
1641 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1642 .await;
1643 assert!(
1644 change_notification_2.text_document.version > change_notification_1.text_document.version
1645 );
1646
1647 // Handle out-of-order diagnostics
1648 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1649 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1650 version: Some(change_notification_2.text_document.version),
1651 diagnostics: vec![
1652 lsp::Diagnostic {
1653 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1654 severity: Some(DiagnosticSeverity::ERROR),
1655 message: "undefined variable 'BB'".to_string(),
1656 source: Some("disk".to_string()),
1657 ..Default::default()
1658 },
1659 lsp::Diagnostic {
1660 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1661 severity: Some(DiagnosticSeverity::WARNING),
1662 message: "undefined variable 'A'".to_string(),
1663 source: Some("disk".to_string()),
1664 ..Default::default()
1665 },
1666 ],
1667 });
1668
1669 cx.executor().run_until_parked();
1670 buffer.update(cx, |buffer, _| {
1671 assert_eq!(
1672 buffer
1673 .snapshot()
1674 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1675 .collect::<Vec<_>>(),
1676 &[
1677 DiagnosticEntry {
1678 range: Point::new(2, 21)..Point::new(2, 22),
1679 diagnostic: Diagnostic {
1680 source: Some("disk".into()),
1681 severity: DiagnosticSeverity::WARNING,
1682 message: "undefined variable 'A'".to_string(),
1683 is_disk_based: true,
1684 group_id: 6,
1685 is_primary: true,
1686 ..Default::default()
1687 }
1688 },
1689 DiagnosticEntry {
1690 range: Point::new(3, 9)..Point::new(3, 14),
1691 diagnostic: Diagnostic {
1692 source: Some("disk".into()),
1693 severity: DiagnosticSeverity::ERROR,
1694 message: "undefined variable 'BB'".to_string(),
1695 is_disk_based: true,
1696 group_id: 5,
1697 is_primary: true,
1698 ..Default::default()
1699 },
1700 }
1701 ]
1702 );
1703 });
1704}
1705
1706#[gpui::test]
1707async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1708 init_test(cx);
1709
1710 let text = concat!(
1711 "let one = ;\n", //
1712 "let two = \n",
1713 "let three = 3;\n",
1714 );
1715
1716 let fs = FakeFs::new(cx.executor());
1717 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1718
1719 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1720 let buffer = project
1721 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1722 .await
1723 .unwrap();
1724
1725 project.update(cx, |project, cx| {
1726 project
1727 .update_buffer_diagnostics(
1728 &buffer,
1729 LanguageServerId(0),
1730 None,
1731 vec![
1732 DiagnosticEntry {
1733 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1734 diagnostic: Diagnostic {
1735 severity: DiagnosticSeverity::ERROR,
1736 message: "syntax error 1".to_string(),
1737 ..Default::default()
1738 },
1739 },
1740 DiagnosticEntry {
1741 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1742 diagnostic: Diagnostic {
1743 severity: DiagnosticSeverity::ERROR,
1744 message: "syntax error 2".to_string(),
1745 ..Default::default()
1746 },
1747 },
1748 ],
1749 cx,
1750 )
1751 .unwrap();
1752 });
1753
1754 // An empty range is extended forward to include the following character.
1755 // At the end of a line, an empty range is extended backward to include
1756 // the preceding character.
1757 buffer.update(cx, |buffer, _| {
1758 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1759 assert_eq!(
1760 chunks
1761 .iter()
1762 .map(|(s, d)| (s.as_str(), *d))
1763 .collect::<Vec<_>>(),
1764 &[
1765 ("let one = ", None),
1766 (";", Some(DiagnosticSeverity::ERROR)),
1767 ("\nlet two =", None),
1768 (" ", Some(DiagnosticSeverity::ERROR)),
1769 ("\nlet three = 3;\n", None)
1770 ]
1771 );
1772 });
1773}
1774
1775#[gpui::test]
1776async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1777 init_test(cx);
1778
1779 let fs = FakeFs::new(cx.executor());
1780 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1781 .await;
1782
1783 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1784
1785 project.update(cx, |project, cx| {
1786 project
1787 .update_diagnostic_entries(
1788 LanguageServerId(0),
1789 Path::new("/dir/a.rs").to_owned(),
1790 None,
1791 vec![DiagnosticEntry {
1792 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1793 diagnostic: Diagnostic {
1794 severity: DiagnosticSeverity::ERROR,
1795 is_primary: true,
1796 message: "syntax error a1".to_string(),
1797 ..Default::default()
1798 },
1799 }],
1800 cx,
1801 )
1802 .unwrap();
1803 project
1804 .update_diagnostic_entries(
1805 LanguageServerId(1),
1806 Path::new("/dir/a.rs").to_owned(),
1807 None,
1808 vec![DiagnosticEntry {
1809 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1810 diagnostic: Diagnostic {
1811 severity: DiagnosticSeverity::ERROR,
1812 is_primary: true,
1813 message: "syntax error b1".to_string(),
1814 ..Default::default()
1815 },
1816 }],
1817 cx,
1818 )
1819 .unwrap();
1820
1821 assert_eq!(
1822 project.diagnostic_summary(false, cx),
1823 DiagnosticSummary {
1824 error_count: 2,
1825 warning_count: 0,
1826 }
1827 );
1828 });
1829}
1830
1831#[gpui::test]
1832async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1833 init_test(cx);
1834
1835 let text = "
1836 fn a() {
1837 f1();
1838 }
1839 fn b() {
1840 f2();
1841 }
1842 fn c() {
1843 f3();
1844 }
1845 "
1846 .unindent();
1847
1848 let fs = FakeFs::new(cx.executor());
1849 fs.insert_tree(
1850 "/dir",
1851 json!({
1852 "a.rs": text.clone(),
1853 }),
1854 )
1855 .await;
1856
1857 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1858
1859 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1860 language_registry.add(rust_lang());
1861 let mut fake_servers =
1862 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1863
1864 let buffer = project
1865 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1866 .await
1867 .unwrap();
1868
1869 let mut fake_server = fake_servers.next().await.unwrap();
1870 let lsp_document_version = fake_server
1871 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1872 .await
1873 .text_document
1874 .version;
1875
1876 // Simulate editing the buffer after the language server computes some edits.
1877 buffer.update(cx, |buffer, cx| {
1878 buffer.edit(
1879 [(
1880 Point::new(0, 0)..Point::new(0, 0),
1881 "// above first function\n",
1882 )],
1883 None,
1884 cx,
1885 );
1886 buffer.edit(
1887 [(
1888 Point::new(2, 0)..Point::new(2, 0),
1889 " // inside first function\n",
1890 )],
1891 None,
1892 cx,
1893 );
1894 buffer.edit(
1895 [(
1896 Point::new(6, 4)..Point::new(6, 4),
1897 "// inside second function ",
1898 )],
1899 None,
1900 cx,
1901 );
1902
1903 assert_eq!(
1904 buffer.text(),
1905 "
1906 // above first function
1907 fn a() {
1908 // inside first function
1909 f1();
1910 }
1911 fn b() {
1912 // inside second function f2();
1913 }
1914 fn c() {
1915 f3();
1916 }
1917 "
1918 .unindent()
1919 );
1920 });
1921
1922 let edits = project
1923 .update(cx, |project, cx| {
1924 project.edits_from_lsp(
1925 &buffer,
1926 vec![
1927 // replace body of first function
1928 lsp::TextEdit {
1929 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1930 new_text: "
1931 fn a() {
1932 f10();
1933 }
1934 "
1935 .unindent(),
1936 },
1937 // edit inside second function
1938 lsp::TextEdit {
1939 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1940 new_text: "00".into(),
1941 },
1942 // edit inside third function via two distinct edits
1943 lsp::TextEdit {
1944 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1945 new_text: "4000".into(),
1946 },
1947 lsp::TextEdit {
1948 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1949 new_text: "".into(),
1950 },
1951 ],
1952 LanguageServerId(0),
1953 Some(lsp_document_version),
1954 cx,
1955 )
1956 })
1957 .await
1958 .unwrap();
1959
1960 buffer.update(cx, |buffer, cx| {
1961 for (range, new_text) in edits {
1962 buffer.edit([(range, new_text)], None, cx);
1963 }
1964 assert_eq!(
1965 buffer.text(),
1966 "
1967 // above first function
1968 fn a() {
1969 // inside first function
1970 f10();
1971 }
1972 fn b() {
1973 // inside second function f200();
1974 }
1975 fn c() {
1976 f4000();
1977 }
1978 "
1979 .unindent()
1980 );
1981 });
1982}
1983
1984#[gpui::test]
1985async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1986 init_test(cx);
1987
1988 let text = "
1989 use a::b;
1990 use a::c;
1991
1992 fn f() {
1993 b();
1994 c();
1995 }
1996 "
1997 .unindent();
1998
1999 let fs = FakeFs::new(cx.executor());
2000 fs.insert_tree(
2001 "/dir",
2002 json!({
2003 "a.rs": text.clone(),
2004 }),
2005 )
2006 .await;
2007
2008 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2009 let buffer = project
2010 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2011 .await
2012 .unwrap();
2013
2014 // Simulate the language server sending us a small edit in the form of a very large diff.
2015 // Rust-analyzer does this when performing a merge-imports code action.
2016 let edits = project
2017 .update(cx, |project, cx| {
2018 project.edits_from_lsp(
2019 &buffer,
2020 [
2021 // Replace the first use statement without editing the semicolon.
2022 lsp::TextEdit {
2023 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2024 new_text: "a::{b, c}".into(),
2025 },
2026 // Reinsert the remainder of the file between the semicolon and the final
2027 // newline of the file.
2028 lsp::TextEdit {
2029 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2030 new_text: "\n\n".into(),
2031 },
2032 lsp::TextEdit {
2033 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2034 new_text: "
2035 fn f() {
2036 b();
2037 c();
2038 }"
2039 .unindent(),
2040 },
2041 // Delete everything after the first newline of the file.
2042 lsp::TextEdit {
2043 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2044 new_text: "".into(),
2045 },
2046 ],
2047 LanguageServerId(0),
2048 None,
2049 cx,
2050 )
2051 })
2052 .await
2053 .unwrap();
2054
2055 buffer.update(cx, |buffer, cx| {
2056 let edits = edits
2057 .into_iter()
2058 .map(|(range, text)| {
2059 (
2060 range.start.to_point(buffer)..range.end.to_point(buffer),
2061 text,
2062 )
2063 })
2064 .collect::<Vec<_>>();
2065
2066 assert_eq!(
2067 edits,
2068 [
2069 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2070 (Point::new(1, 0)..Point::new(2, 0), "".into())
2071 ]
2072 );
2073
2074 for (range, new_text) in edits {
2075 buffer.edit([(range, new_text)], None, cx);
2076 }
2077 assert_eq!(
2078 buffer.text(),
2079 "
2080 use a::{b, c};
2081
2082 fn f() {
2083 b();
2084 c();
2085 }
2086 "
2087 .unindent()
2088 );
2089 });
2090}
2091
2092#[gpui::test]
2093async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2094 init_test(cx);
2095
2096 let text = "
2097 use a::b;
2098 use a::c;
2099
2100 fn f() {
2101 b();
2102 c();
2103 }
2104 "
2105 .unindent();
2106
2107 let fs = FakeFs::new(cx.executor());
2108 fs.insert_tree(
2109 "/dir",
2110 json!({
2111 "a.rs": text.clone(),
2112 }),
2113 )
2114 .await;
2115
2116 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2117 let buffer = project
2118 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2119 .await
2120 .unwrap();
2121
2122 // Simulate the language server sending us edits in a non-ordered fashion,
2123 // with ranges sometimes being inverted or pointing to invalid locations.
2124 let edits = project
2125 .update(cx, |project, cx| {
2126 project.edits_from_lsp(
2127 &buffer,
2128 [
2129 lsp::TextEdit {
2130 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2131 new_text: "\n\n".into(),
2132 },
2133 lsp::TextEdit {
2134 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2135 new_text: "a::{b, c}".into(),
2136 },
2137 lsp::TextEdit {
2138 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2139 new_text: "".into(),
2140 },
2141 lsp::TextEdit {
2142 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2143 new_text: "
2144 fn f() {
2145 b();
2146 c();
2147 }"
2148 .unindent(),
2149 },
2150 ],
2151 LanguageServerId(0),
2152 None,
2153 cx,
2154 )
2155 })
2156 .await
2157 .unwrap();
2158
2159 buffer.update(cx, |buffer, cx| {
2160 let edits = edits
2161 .into_iter()
2162 .map(|(range, text)| {
2163 (
2164 range.start.to_point(buffer)..range.end.to_point(buffer),
2165 text,
2166 )
2167 })
2168 .collect::<Vec<_>>();
2169
2170 assert_eq!(
2171 edits,
2172 [
2173 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2174 (Point::new(1, 0)..Point::new(2, 0), "".into())
2175 ]
2176 );
2177
2178 for (range, new_text) in edits {
2179 buffer.edit([(range, new_text)], None, cx);
2180 }
2181 assert_eq!(
2182 buffer.text(),
2183 "
2184 use a::{b, c};
2185
2186 fn f() {
2187 b();
2188 c();
2189 }
2190 "
2191 .unindent()
2192 );
2193 });
2194}
2195
2196fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2197 buffer: &Buffer,
2198 range: Range<T>,
2199) -> Vec<(String, Option<DiagnosticSeverity>)> {
2200 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2201 for chunk in buffer.snapshot().chunks(range, true) {
2202 if chunks.last().map_or(false, |prev_chunk| {
2203 prev_chunk.1 == chunk.diagnostic_severity
2204 }) {
2205 chunks.last_mut().unwrap().0.push_str(chunk.text);
2206 } else {
2207 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2208 }
2209 }
2210 chunks
2211}
2212
2213#[gpui::test(iterations = 10)]
2214async fn test_definition(cx: &mut gpui::TestAppContext) {
2215 init_test(cx);
2216
2217 let fs = FakeFs::new(cx.executor());
2218 fs.insert_tree(
2219 "/dir",
2220 json!({
2221 "a.rs": "const fn a() { A }",
2222 "b.rs": "const y: i32 = crate::a()",
2223 }),
2224 )
2225 .await;
2226
2227 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2228
2229 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2230 language_registry.add(rust_lang());
2231 let mut fake_servers =
2232 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2233
2234 let buffer = project
2235 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2236 .await
2237 .unwrap();
2238
2239 let fake_server = fake_servers.next().await.unwrap();
2240 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2241 let params = params.text_document_position_params;
2242 assert_eq!(
2243 params.text_document.uri.to_file_path().unwrap(),
2244 Path::new("/dir/b.rs"),
2245 );
2246 assert_eq!(params.position, lsp::Position::new(0, 22));
2247
2248 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2249 lsp::Location::new(
2250 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2251 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2252 ),
2253 )))
2254 });
2255
2256 let mut definitions = project
2257 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2258 .await
2259 .unwrap();
2260
2261 // Assert no new language server started
2262 cx.executor().run_until_parked();
2263 assert!(fake_servers.try_next().is_err());
2264
2265 assert_eq!(definitions.len(), 1);
2266 let definition = definitions.pop().unwrap();
2267 cx.update(|cx| {
2268 let target_buffer = definition.target.buffer.read(cx);
2269 assert_eq!(
2270 target_buffer
2271 .file()
2272 .unwrap()
2273 .as_local()
2274 .unwrap()
2275 .abs_path(cx),
2276 Path::new("/dir/a.rs"),
2277 );
2278 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2279 assert_eq!(
2280 list_worktrees(&project, cx),
2281 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2282 );
2283
2284 drop(definition);
2285 });
2286 cx.update(|cx| {
2287 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2288 });
2289
2290 fn list_worktrees<'a>(
2291 project: &'a Model<Project>,
2292 cx: &'a AppContext,
2293 ) -> Vec<(&'a Path, bool)> {
2294 project
2295 .read(cx)
2296 .worktrees()
2297 .map(|worktree| {
2298 let worktree = worktree.read(cx);
2299 (
2300 worktree.as_local().unwrap().abs_path().as_ref(),
2301 worktree.is_visible(),
2302 )
2303 })
2304 .collect::<Vec<_>>()
2305 }
2306}
2307
2308#[gpui::test]
2309async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2310 init_test(cx);
2311
2312 let fs = FakeFs::new(cx.executor());
2313 fs.insert_tree(
2314 "/dir",
2315 json!({
2316 "a.ts": "",
2317 }),
2318 )
2319 .await;
2320
2321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2322
2323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2324 language_registry.add(typescript_lang());
2325 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2326 "TypeScript",
2327 FakeLspAdapter {
2328 capabilities: lsp::ServerCapabilities {
2329 completion_provider: Some(lsp::CompletionOptions {
2330 trigger_characters: Some(vec![":".to_string()]),
2331 ..Default::default()
2332 }),
2333 ..Default::default()
2334 },
2335 ..Default::default()
2336 },
2337 );
2338
2339 let buffer = project
2340 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2341 .await
2342 .unwrap();
2343
2344 let fake_server = fake_language_servers.next().await.unwrap();
2345
2346 let text = "let a = b.fqn";
2347 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2348 let completions = project.update(cx, |project, cx| {
2349 project.completions(&buffer, text.len(), cx)
2350 });
2351
2352 fake_server
2353 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2354 Ok(Some(lsp::CompletionResponse::Array(vec![
2355 lsp::CompletionItem {
2356 label: "fullyQualifiedName?".into(),
2357 insert_text: Some("fullyQualifiedName".into()),
2358 ..Default::default()
2359 },
2360 ])))
2361 })
2362 .next()
2363 .await;
2364 let completions = completions.await.unwrap();
2365 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2366 assert_eq!(completions.len(), 1);
2367 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2368 assert_eq!(
2369 completions[0].old_range.to_offset(&snapshot),
2370 text.len() - 3..text.len()
2371 );
2372
2373 let text = "let a = \"atoms/cmp\"";
2374 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2375 let completions = project.update(cx, |project, cx| {
2376 project.completions(&buffer, text.len() - 1, cx)
2377 });
2378
2379 fake_server
2380 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2381 Ok(Some(lsp::CompletionResponse::Array(vec![
2382 lsp::CompletionItem {
2383 label: "component".into(),
2384 ..Default::default()
2385 },
2386 ])))
2387 })
2388 .next()
2389 .await;
2390 let completions = completions.await.unwrap();
2391 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2392 assert_eq!(completions.len(), 1);
2393 assert_eq!(completions[0].new_text, "component");
2394 assert_eq!(
2395 completions[0].old_range.to_offset(&snapshot),
2396 text.len() - 4..text.len() - 1
2397 );
2398}
2399
2400#[gpui::test]
2401async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2402 init_test(cx);
2403
2404 let fs = FakeFs::new(cx.executor());
2405 fs.insert_tree(
2406 "/dir",
2407 json!({
2408 "a.ts": "",
2409 }),
2410 )
2411 .await;
2412
2413 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2414
2415 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2416 language_registry.add(typescript_lang());
2417 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2418 "TypeScript",
2419 FakeLspAdapter {
2420 capabilities: lsp::ServerCapabilities {
2421 completion_provider: Some(lsp::CompletionOptions {
2422 trigger_characters: Some(vec![":".to_string()]),
2423 ..Default::default()
2424 }),
2425 ..Default::default()
2426 },
2427 ..Default::default()
2428 },
2429 );
2430
2431 let buffer = project
2432 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2433 .await
2434 .unwrap();
2435
2436 let fake_server = fake_language_servers.next().await.unwrap();
2437
2438 let text = "let a = b.fqn";
2439 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2440 let completions = project.update(cx, |project, cx| {
2441 project.completions(&buffer, text.len(), cx)
2442 });
2443
2444 fake_server
2445 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2446 Ok(Some(lsp::CompletionResponse::Array(vec![
2447 lsp::CompletionItem {
2448 label: "fullyQualifiedName?".into(),
2449 insert_text: Some("fully\rQualified\r\nName".into()),
2450 ..Default::default()
2451 },
2452 ])))
2453 })
2454 .next()
2455 .await;
2456 let completions = completions.await.unwrap();
2457 assert_eq!(completions.len(), 1);
2458 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2459}
2460
2461#[gpui::test(iterations = 10)]
2462async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2463 init_test(cx);
2464
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(
2467 "/dir",
2468 json!({
2469 "a.ts": "a",
2470 }),
2471 )
2472 .await;
2473
2474 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2475
2476 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2477 language_registry.add(typescript_lang());
2478 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2479 "TypeScript",
2480 FakeLspAdapter {
2481 capabilities: lsp::ServerCapabilities {
2482 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2483 lsp::CodeActionOptions {
2484 resolve_provider: Some(true),
2485 ..lsp::CodeActionOptions::default()
2486 },
2487 )),
2488 ..lsp::ServerCapabilities::default()
2489 },
2490 ..FakeLspAdapter::default()
2491 },
2492 );
2493
2494 let buffer = project
2495 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2496 .await
2497 .unwrap();
2498
2499 let fake_server = fake_language_servers.next().await.unwrap();
2500
2501 // Language server returns code actions that contain commands, and not edits.
2502 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2503 fake_server
2504 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2505 Ok(Some(vec![
2506 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2507 title: "The code action".into(),
2508 data: Some(serde_json::json!({
2509 "command": "_the/command",
2510 })),
2511 ..lsp::CodeAction::default()
2512 }),
2513 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2514 title: "two".into(),
2515 ..lsp::CodeAction::default()
2516 }),
2517 ]))
2518 })
2519 .next()
2520 .await;
2521
2522 let action = actions.await.unwrap()[0].clone();
2523 let apply = project.update(cx, |project, cx| {
2524 project.apply_code_action(buffer.clone(), action, true, cx)
2525 });
2526
2527 // Resolving the code action does not populate its edits. In absence of
2528 // edits, we must execute the given command.
2529 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2530 |mut action, _| async move {
2531 if action.data.is_some() {
2532 action.command = Some(lsp::Command {
2533 title: "The command".into(),
2534 command: "_the/command".into(),
2535 arguments: Some(vec![json!("the-argument")]),
2536 });
2537 }
2538 Ok(action)
2539 },
2540 );
2541
2542 // While executing the command, the language server sends the editor
2543 // a `workspaceEdit` request.
2544 fake_server
2545 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2546 let fake = fake_server.clone();
2547 move |params, _| {
2548 assert_eq!(params.command, "_the/command");
2549 let fake = fake.clone();
2550 async move {
2551 fake.server
2552 .request::<lsp::request::ApplyWorkspaceEdit>(
2553 lsp::ApplyWorkspaceEditParams {
2554 label: None,
2555 edit: lsp::WorkspaceEdit {
2556 changes: Some(
2557 [(
2558 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2559 vec![lsp::TextEdit {
2560 range: lsp::Range::new(
2561 lsp::Position::new(0, 0),
2562 lsp::Position::new(0, 0),
2563 ),
2564 new_text: "X".into(),
2565 }],
2566 )]
2567 .into_iter()
2568 .collect(),
2569 ),
2570 ..Default::default()
2571 },
2572 },
2573 )
2574 .await
2575 .unwrap();
2576 Ok(Some(json!(null)))
2577 }
2578 }
2579 })
2580 .next()
2581 .await;
2582
2583 // Applying the code action returns a project transaction containing the edits
2584 // sent by the language server in its `workspaceEdit` request.
2585 let transaction = apply.await.unwrap();
2586 assert!(transaction.0.contains_key(&buffer));
2587 buffer.update(cx, |buffer, cx| {
2588 assert_eq!(buffer.text(), "Xa");
2589 buffer.undo(cx);
2590 assert_eq!(buffer.text(), "a");
2591 });
2592}
2593
2594#[gpui::test(iterations = 10)]
2595async fn test_save_file(cx: &mut gpui::TestAppContext) {
2596 init_test(cx);
2597
2598 let fs = FakeFs::new(cx.executor());
2599 fs.insert_tree(
2600 "/dir",
2601 json!({
2602 "file1": "the old contents",
2603 }),
2604 )
2605 .await;
2606
2607 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2608 let buffer = project
2609 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2610 .await
2611 .unwrap();
2612 buffer.update(cx, |buffer, cx| {
2613 assert_eq!(buffer.text(), "the old contents");
2614 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2615 });
2616
2617 project
2618 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2619 .await
2620 .unwrap();
2621
2622 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2623 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2624}
2625
2626#[gpui::test(iterations = 30)]
2627async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2628 init_test(cx);
2629
2630 let fs = FakeFs::new(cx.executor().clone());
2631 fs.insert_tree(
2632 "/dir",
2633 json!({
2634 "file1": "the original contents",
2635 }),
2636 )
2637 .await;
2638
2639 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2640 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2641 let buffer = project
2642 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2643 .await
2644 .unwrap();
2645
2646 // Simulate buffer diffs being slow, so that they don't complete before
2647 // the next file change occurs.
2648 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2649
2650 // Change the buffer's file on disk, and then wait for the file change
2651 // to be detected by the worktree, so that the buffer starts reloading.
2652 fs.save(
2653 "/dir/file1".as_ref(),
2654 &"the first contents".into(),
2655 Default::default(),
2656 )
2657 .await
2658 .unwrap();
2659 worktree.next_event(cx);
2660
2661 // Change the buffer's file again. Depending on the random seed, the
2662 // previous file change may still be in progress.
2663 fs.save(
2664 "/dir/file1".as_ref(),
2665 &"the second contents".into(),
2666 Default::default(),
2667 )
2668 .await
2669 .unwrap();
2670 worktree.next_event(cx);
2671
2672 cx.executor().run_until_parked();
2673 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2674 buffer.read_with(cx, |buffer, _| {
2675 assert_eq!(buffer.text(), on_disk_text);
2676 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2677 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2678 });
2679}
2680
2681#[gpui::test(iterations = 30)]
2682async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2683 init_test(cx);
2684
2685 let fs = FakeFs::new(cx.executor().clone());
2686 fs.insert_tree(
2687 "/dir",
2688 json!({
2689 "file1": "the original contents",
2690 }),
2691 )
2692 .await;
2693
2694 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2695 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2696 let buffer = project
2697 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2698 .await
2699 .unwrap();
2700
2701 // Simulate buffer diffs being slow, so that they don't complete before
2702 // the next file change occurs.
2703 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2704
2705 // Change the buffer's file on disk, and then wait for the file change
2706 // to be detected by the worktree, so that the buffer starts reloading.
2707 fs.save(
2708 "/dir/file1".as_ref(),
2709 &"the first contents".into(),
2710 Default::default(),
2711 )
2712 .await
2713 .unwrap();
2714 worktree.next_event(cx);
2715
2716 cx.executor()
2717 .spawn(cx.executor().simulate_random_delay())
2718 .await;
2719
2720 // Perform a noop edit, causing the buffer's version to increase.
2721 buffer.update(cx, |buffer, cx| {
2722 buffer.edit([(0..0, " ")], None, cx);
2723 buffer.undo(cx);
2724 });
2725
2726 cx.executor().run_until_parked();
2727 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2728 buffer.read_with(cx, |buffer, _| {
2729 let buffer_text = buffer.text();
2730 if buffer_text == on_disk_text {
2731 assert!(
2732 !buffer.is_dirty() && !buffer.has_conflict(),
2733 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2734 );
2735 }
2736 // If the file change occurred while the buffer was processing the first
2737 // change, the buffer will be in a conflicting state.
2738 else {
2739 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2740 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2741 }
2742 });
2743}
2744
2745#[gpui::test]
2746async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2747 init_test(cx);
2748
2749 let fs = FakeFs::new(cx.executor());
2750 fs.insert_tree(
2751 "/dir",
2752 json!({
2753 "file1": "the old contents",
2754 }),
2755 )
2756 .await;
2757
2758 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2759 let buffer = project
2760 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2761 .await
2762 .unwrap();
2763 buffer.update(cx, |buffer, cx| {
2764 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2765 });
2766
2767 project
2768 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2769 .await
2770 .unwrap();
2771
2772 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2773 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2774}
2775
2776#[gpui::test]
2777async fn test_save_as(cx: &mut gpui::TestAppContext) {
2778 init_test(cx);
2779
2780 let fs = FakeFs::new(cx.executor());
2781 fs.insert_tree("/dir", json!({})).await;
2782
2783 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2784
2785 let languages = project.update(cx, |project, _| project.languages().clone());
2786 languages.add(rust_lang());
2787
2788 let buffer = project.update(cx, |project, cx| {
2789 project.create_buffer("", None, cx).unwrap()
2790 });
2791 buffer.update(cx, |buffer, cx| {
2792 buffer.edit([(0..0, "abc")], None, cx);
2793 assert!(buffer.is_dirty());
2794 assert!(!buffer.has_conflict());
2795 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2796 });
2797 project
2798 .update(cx, |project, cx| {
2799 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2800 })
2801 .await
2802 .unwrap();
2803 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2804
2805 cx.executor().run_until_parked();
2806 buffer.update(cx, |buffer, cx| {
2807 assert_eq!(
2808 buffer.file().unwrap().full_path(cx),
2809 Path::new("dir/file1.rs")
2810 );
2811 assert!(!buffer.is_dirty());
2812 assert!(!buffer.has_conflict());
2813 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2814 });
2815
2816 let opened_buffer = project
2817 .update(cx, |project, cx| {
2818 project.open_local_buffer("/dir/file1.rs", cx)
2819 })
2820 .await
2821 .unwrap();
2822 assert_eq!(opened_buffer, buffer);
2823}
2824
2825#[gpui::test(retries = 5)]
2826async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2827 init_test(cx);
2828 cx.executor().allow_parking();
2829
2830 let dir = temp_tree(json!({
2831 "a": {
2832 "file1": "",
2833 "file2": "",
2834 "file3": "",
2835 },
2836 "b": {
2837 "c": {
2838 "file4": "",
2839 "file5": "",
2840 }
2841 }
2842 }));
2843
2844 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2845 let rpc = project.update(cx, |p, _| p.client.clone());
2846
2847 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2848 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2849 async move { buffer.await.unwrap() }
2850 };
2851 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2852 project.update(cx, |project, cx| {
2853 let tree = project.worktrees().next().unwrap();
2854 tree.read(cx)
2855 .entry_for_path(path)
2856 .unwrap_or_else(|| panic!("no entry for path {}", path))
2857 .id
2858 })
2859 };
2860
2861 let buffer2 = buffer_for_path("a/file2", cx).await;
2862 let buffer3 = buffer_for_path("a/file3", cx).await;
2863 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2864 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2865
2866 let file2_id = id_for_path("a/file2", cx);
2867 let file3_id = id_for_path("a/file3", cx);
2868 let file4_id = id_for_path("b/c/file4", cx);
2869
2870 // Create a remote copy of this worktree.
2871 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2872
2873 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2874
2875 let updates = Arc::new(Mutex::new(Vec::new()));
2876 tree.update(cx, |tree, cx| {
2877 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2878 let updates = updates.clone();
2879 move |update| {
2880 updates.lock().push(update);
2881 async { true }
2882 }
2883 });
2884 });
2885
2886 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2887
2888 cx.executor().run_until_parked();
2889
2890 cx.update(|cx| {
2891 assert!(!buffer2.read(cx).is_dirty());
2892 assert!(!buffer3.read(cx).is_dirty());
2893 assert!(!buffer4.read(cx).is_dirty());
2894 assert!(!buffer5.read(cx).is_dirty());
2895 });
2896
2897 // Rename and delete files and directories.
2898 tree.flush_fs_events(cx).await;
2899 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2900 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2901 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2902 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2903 tree.flush_fs_events(cx).await;
2904
2905 let expected_paths = vec![
2906 "a",
2907 "a/file1",
2908 "a/file2.new",
2909 "b",
2910 "d",
2911 "d/file3",
2912 "d/file4",
2913 ];
2914
2915 cx.update(|app| {
2916 assert_eq!(
2917 tree.read(app)
2918 .paths()
2919 .map(|p| p.to_str().unwrap())
2920 .collect::<Vec<_>>(),
2921 expected_paths
2922 );
2923 });
2924
2925 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2926 assert_eq!(id_for_path("d/file3", cx), file3_id);
2927 assert_eq!(id_for_path("d/file4", cx), file4_id);
2928
2929 cx.update(|cx| {
2930 assert_eq!(
2931 buffer2.read(cx).file().unwrap().path().as_ref(),
2932 Path::new("a/file2.new")
2933 );
2934 assert_eq!(
2935 buffer3.read(cx).file().unwrap().path().as_ref(),
2936 Path::new("d/file3")
2937 );
2938 assert_eq!(
2939 buffer4.read(cx).file().unwrap().path().as_ref(),
2940 Path::new("d/file4")
2941 );
2942 assert_eq!(
2943 buffer5.read(cx).file().unwrap().path().as_ref(),
2944 Path::new("b/c/file5")
2945 );
2946
2947 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2948 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2949 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2950 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2951 });
2952
2953 // Update the remote worktree. Check that it becomes consistent with the
2954 // local worktree.
2955 cx.executor().run_until_parked();
2956
2957 remote.update(cx, |remote, _| {
2958 for update in updates.lock().drain(..) {
2959 remote.as_remote_mut().unwrap().update_from_remote(update);
2960 }
2961 });
2962 cx.executor().run_until_parked();
2963 remote.update(cx, |remote, _| {
2964 assert_eq!(
2965 remote
2966 .paths()
2967 .map(|p| p.to_str().unwrap())
2968 .collect::<Vec<_>>(),
2969 expected_paths
2970 );
2971 });
2972}
2973
2974#[gpui::test(iterations = 10)]
2975async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2976 init_test(cx);
2977
2978 let fs = FakeFs::new(cx.executor());
2979 fs.insert_tree(
2980 "/dir",
2981 json!({
2982 "a": {
2983 "file1": "",
2984 }
2985 }),
2986 )
2987 .await;
2988
2989 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2990 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2991 let tree_id = tree.update(cx, |tree, _| tree.id());
2992
2993 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2994 project.update(cx, |project, cx| {
2995 let tree = project.worktrees().next().unwrap();
2996 tree.read(cx)
2997 .entry_for_path(path)
2998 .unwrap_or_else(|| panic!("no entry for path {}", path))
2999 .id
3000 })
3001 };
3002
3003 let dir_id = id_for_path("a", cx);
3004 let file_id = id_for_path("a/file1", cx);
3005 let buffer = project
3006 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3007 .await
3008 .unwrap();
3009 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3010
3011 project
3012 .update(cx, |project, cx| {
3013 project.rename_entry(dir_id, Path::new("b"), cx)
3014 })
3015 .unwrap()
3016 .await
3017 .unwrap();
3018 cx.executor().run_until_parked();
3019
3020 assert_eq!(id_for_path("b", cx), dir_id);
3021 assert_eq!(id_for_path("b/file1", cx), file_id);
3022 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3023}
3024
3025#[gpui::test]
3026async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3027 init_test(cx);
3028
3029 let fs = FakeFs::new(cx.executor());
3030 fs.insert_tree(
3031 "/dir",
3032 json!({
3033 "a.txt": "a-contents",
3034 "b.txt": "b-contents",
3035 }),
3036 )
3037 .await;
3038
3039 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3040
3041 // Spawn multiple tasks to open paths, repeating some paths.
3042 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3043 (
3044 p.open_local_buffer("/dir/a.txt", cx),
3045 p.open_local_buffer("/dir/b.txt", cx),
3046 p.open_local_buffer("/dir/a.txt", cx),
3047 )
3048 });
3049
3050 let buffer_a_1 = buffer_a_1.await.unwrap();
3051 let buffer_a_2 = buffer_a_2.await.unwrap();
3052 let buffer_b = buffer_b.await.unwrap();
3053 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3054 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3055
3056 // There is only one buffer per path.
3057 let buffer_a_id = buffer_a_1.entity_id();
3058 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3059
3060 // Open the same path again while it is still open.
3061 drop(buffer_a_1);
3062 let buffer_a_3 = project
3063 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3064 .await
3065 .unwrap();
3066
3067 // There's still only one buffer per path.
3068 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3069}
3070
3071#[gpui::test]
3072async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3073 init_test(cx);
3074
3075 let fs = FakeFs::new(cx.executor());
3076 fs.insert_tree(
3077 "/dir",
3078 json!({
3079 "file1": "abc",
3080 "file2": "def",
3081 "file3": "ghi",
3082 }),
3083 )
3084 .await;
3085
3086 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3087
3088 let buffer1 = project
3089 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3090 .await
3091 .unwrap();
3092 let events = Arc::new(Mutex::new(Vec::new()));
3093
3094 // initially, the buffer isn't dirty.
3095 buffer1.update(cx, |buffer, cx| {
3096 cx.subscribe(&buffer1, {
3097 let events = events.clone();
3098 move |_, _, event, _| match event {
3099 BufferEvent::Operation(_) => {}
3100 _ => events.lock().push(event.clone()),
3101 }
3102 })
3103 .detach();
3104
3105 assert!(!buffer.is_dirty());
3106 assert!(events.lock().is_empty());
3107
3108 buffer.edit([(1..2, "")], None, cx);
3109 });
3110
3111 // after the first edit, the buffer is dirty, and emits a dirtied event.
3112 buffer1.update(cx, |buffer, cx| {
3113 assert!(buffer.text() == "ac");
3114 assert!(buffer.is_dirty());
3115 assert_eq!(
3116 *events.lock(),
3117 &[language::Event::Edited, language::Event::DirtyChanged]
3118 );
3119 events.lock().clear();
3120 buffer.did_save(
3121 buffer.version(),
3122 buffer.as_rope().fingerprint(),
3123 buffer.file().unwrap().mtime(),
3124 cx,
3125 );
3126 });
3127
3128 // after saving, the buffer is not dirty, and emits a saved event.
3129 buffer1.update(cx, |buffer, cx| {
3130 assert!(!buffer.is_dirty());
3131 assert_eq!(*events.lock(), &[language::Event::Saved]);
3132 events.lock().clear();
3133
3134 buffer.edit([(1..1, "B")], None, cx);
3135 buffer.edit([(2..2, "D")], None, cx);
3136 });
3137
3138 // after editing again, the buffer is dirty, and emits another dirty event.
3139 buffer1.update(cx, |buffer, cx| {
3140 assert!(buffer.text() == "aBDc");
3141 assert!(buffer.is_dirty());
3142 assert_eq!(
3143 *events.lock(),
3144 &[
3145 language::Event::Edited,
3146 language::Event::DirtyChanged,
3147 language::Event::Edited,
3148 ],
3149 );
3150 events.lock().clear();
3151
3152 // After restoring the buffer to its previously-saved state,
3153 // the buffer is not considered dirty anymore.
3154 buffer.edit([(1..3, "")], None, cx);
3155 assert!(buffer.text() == "ac");
3156 assert!(!buffer.is_dirty());
3157 });
3158
3159 assert_eq!(
3160 *events.lock(),
3161 &[language::Event::Edited, language::Event::DirtyChanged]
3162 );
3163
3164 // When a file is deleted, the buffer is considered dirty.
3165 let events = Arc::new(Mutex::new(Vec::new()));
3166 let buffer2 = project
3167 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3168 .await
3169 .unwrap();
3170 buffer2.update(cx, |_, cx| {
3171 cx.subscribe(&buffer2, {
3172 let events = events.clone();
3173 move |_, _, event, _| events.lock().push(event.clone())
3174 })
3175 .detach();
3176 });
3177
3178 fs.remove_file("/dir/file2".as_ref(), Default::default())
3179 .await
3180 .unwrap();
3181 cx.executor().run_until_parked();
3182 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3183 assert_eq!(
3184 *events.lock(),
3185 &[
3186 language::Event::DirtyChanged,
3187 language::Event::FileHandleChanged
3188 ]
3189 );
3190
3191 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3192 let events = Arc::new(Mutex::new(Vec::new()));
3193 let buffer3 = project
3194 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3195 .await
3196 .unwrap();
3197 buffer3.update(cx, |_, cx| {
3198 cx.subscribe(&buffer3, {
3199 let events = events.clone();
3200 move |_, _, event, _| events.lock().push(event.clone())
3201 })
3202 .detach();
3203 });
3204
3205 buffer3.update(cx, |buffer, cx| {
3206 buffer.edit([(0..0, "x")], None, cx);
3207 });
3208 events.lock().clear();
3209 fs.remove_file("/dir/file3".as_ref(), Default::default())
3210 .await
3211 .unwrap();
3212 cx.executor().run_until_parked();
3213 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3214 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3215}
3216
3217#[gpui::test]
3218async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3219 init_test(cx);
3220
3221 let initial_contents = "aaa\nbbbbb\nc\n";
3222 let fs = FakeFs::new(cx.executor());
3223 fs.insert_tree(
3224 "/dir",
3225 json!({
3226 "the-file": initial_contents,
3227 }),
3228 )
3229 .await;
3230 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3231 let buffer = project
3232 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3233 .await
3234 .unwrap();
3235
3236 let anchors = (0..3)
3237 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3238 .collect::<Vec<_>>();
3239
3240 // Change the file on disk, adding two new lines of text, and removing
3241 // one line.
3242 buffer.update(cx, |buffer, _| {
3243 assert!(!buffer.is_dirty());
3244 assert!(!buffer.has_conflict());
3245 });
3246 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3247 fs.save(
3248 "/dir/the-file".as_ref(),
3249 &new_contents.into(),
3250 LineEnding::Unix,
3251 )
3252 .await
3253 .unwrap();
3254
3255 // Because the buffer was not modified, it is reloaded from disk. Its
3256 // contents are edited according to the diff between the old and new
3257 // file contents.
3258 cx.executor().run_until_parked();
3259 buffer.update(cx, |buffer, _| {
3260 assert_eq!(buffer.text(), new_contents);
3261 assert!(!buffer.is_dirty());
3262 assert!(!buffer.has_conflict());
3263
3264 let anchor_positions = anchors
3265 .iter()
3266 .map(|anchor| anchor.to_point(&*buffer))
3267 .collect::<Vec<_>>();
3268 assert_eq!(
3269 anchor_positions,
3270 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3271 );
3272 });
3273
3274 // Modify the buffer
3275 buffer.update(cx, |buffer, cx| {
3276 buffer.edit([(0..0, " ")], None, cx);
3277 assert!(buffer.is_dirty());
3278 assert!(!buffer.has_conflict());
3279 });
3280
3281 // Change the file on disk again, adding blank lines to the beginning.
3282 fs.save(
3283 "/dir/the-file".as_ref(),
3284 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3285 LineEnding::Unix,
3286 )
3287 .await
3288 .unwrap();
3289
3290 // Because the buffer is modified, it doesn't reload from disk, but is
3291 // marked as having a conflict.
3292 cx.executor().run_until_parked();
3293 buffer.update(cx, |buffer, _| {
3294 assert!(buffer.has_conflict());
3295 });
3296}
3297
3298#[gpui::test]
3299async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3300 init_test(cx);
3301
3302 let fs = FakeFs::new(cx.executor());
3303 fs.insert_tree(
3304 "/dir",
3305 json!({
3306 "file1": "a\nb\nc\n",
3307 "file2": "one\r\ntwo\r\nthree\r\n",
3308 }),
3309 )
3310 .await;
3311
3312 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3313 let buffer1 = project
3314 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3315 .await
3316 .unwrap();
3317 let buffer2 = project
3318 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3319 .await
3320 .unwrap();
3321
3322 buffer1.update(cx, |buffer, _| {
3323 assert_eq!(buffer.text(), "a\nb\nc\n");
3324 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3325 });
3326 buffer2.update(cx, |buffer, _| {
3327 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3328 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3329 });
3330
3331 // Change a file's line endings on disk from unix to windows. The buffer's
3332 // state updates correctly.
3333 fs.save(
3334 "/dir/file1".as_ref(),
3335 &"aaa\nb\nc\n".into(),
3336 LineEnding::Windows,
3337 )
3338 .await
3339 .unwrap();
3340 cx.executor().run_until_parked();
3341 buffer1.update(cx, |buffer, _| {
3342 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3343 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3344 });
3345
3346 // Save a file with windows line endings. The file is written correctly.
3347 buffer2.update(cx, |buffer, cx| {
3348 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3349 });
3350 project
3351 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3352 .await
3353 .unwrap();
3354 assert_eq!(
3355 fs.load("/dir/file2".as_ref()).await.unwrap(),
3356 "one\r\ntwo\r\nthree\r\nfour\r\n",
3357 );
3358}
3359
3360#[gpui::test]
3361async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3362 init_test(cx);
3363
3364 let fs = FakeFs::new(cx.executor());
3365 fs.insert_tree(
3366 "/the-dir",
3367 json!({
3368 "a.rs": "
3369 fn foo(mut v: Vec<usize>) {
3370 for x in &v {
3371 v.push(1);
3372 }
3373 }
3374 "
3375 .unindent(),
3376 }),
3377 )
3378 .await;
3379
3380 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3381 let buffer = project
3382 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3383 .await
3384 .unwrap();
3385
3386 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3387 let message = lsp::PublishDiagnosticsParams {
3388 uri: buffer_uri.clone(),
3389 diagnostics: vec![
3390 lsp::Diagnostic {
3391 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3392 severity: Some(DiagnosticSeverity::WARNING),
3393 message: "error 1".to_string(),
3394 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3395 location: lsp::Location {
3396 uri: buffer_uri.clone(),
3397 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3398 },
3399 message: "error 1 hint 1".to_string(),
3400 }]),
3401 ..Default::default()
3402 },
3403 lsp::Diagnostic {
3404 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3405 severity: Some(DiagnosticSeverity::HINT),
3406 message: "error 1 hint 1".to_string(),
3407 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3408 location: lsp::Location {
3409 uri: buffer_uri.clone(),
3410 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3411 },
3412 message: "original diagnostic".to_string(),
3413 }]),
3414 ..Default::default()
3415 },
3416 lsp::Diagnostic {
3417 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3418 severity: Some(DiagnosticSeverity::ERROR),
3419 message: "error 2".to_string(),
3420 related_information: Some(vec![
3421 lsp::DiagnosticRelatedInformation {
3422 location: lsp::Location {
3423 uri: buffer_uri.clone(),
3424 range: lsp::Range::new(
3425 lsp::Position::new(1, 13),
3426 lsp::Position::new(1, 15),
3427 ),
3428 },
3429 message: "error 2 hint 1".to_string(),
3430 },
3431 lsp::DiagnosticRelatedInformation {
3432 location: lsp::Location {
3433 uri: buffer_uri.clone(),
3434 range: lsp::Range::new(
3435 lsp::Position::new(1, 13),
3436 lsp::Position::new(1, 15),
3437 ),
3438 },
3439 message: "error 2 hint 2".to_string(),
3440 },
3441 ]),
3442 ..Default::default()
3443 },
3444 lsp::Diagnostic {
3445 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3446 severity: Some(DiagnosticSeverity::HINT),
3447 message: "error 2 hint 1".to_string(),
3448 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3449 location: lsp::Location {
3450 uri: buffer_uri.clone(),
3451 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3452 },
3453 message: "original diagnostic".to_string(),
3454 }]),
3455 ..Default::default()
3456 },
3457 lsp::Diagnostic {
3458 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3459 severity: Some(DiagnosticSeverity::HINT),
3460 message: "error 2 hint 2".to_string(),
3461 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3462 location: lsp::Location {
3463 uri: buffer_uri,
3464 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3465 },
3466 message: "original diagnostic".to_string(),
3467 }]),
3468 ..Default::default()
3469 },
3470 ],
3471 version: None,
3472 };
3473
3474 project
3475 .update(cx, |p, cx| {
3476 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3477 })
3478 .unwrap();
3479 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3480
3481 assert_eq!(
3482 buffer
3483 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3484 .collect::<Vec<_>>(),
3485 &[
3486 DiagnosticEntry {
3487 range: Point::new(1, 8)..Point::new(1, 9),
3488 diagnostic: Diagnostic {
3489 severity: DiagnosticSeverity::WARNING,
3490 message: "error 1".to_string(),
3491 group_id: 1,
3492 is_primary: true,
3493 ..Default::default()
3494 }
3495 },
3496 DiagnosticEntry {
3497 range: Point::new(1, 8)..Point::new(1, 9),
3498 diagnostic: Diagnostic {
3499 severity: DiagnosticSeverity::HINT,
3500 message: "error 1 hint 1".to_string(),
3501 group_id: 1,
3502 is_primary: false,
3503 ..Default::default()
3504 }
3505 },
3506 DiagnosticEntry {
3507 range: Point::new(1, 13)..Point::new(1, 15),
3508 diagnostic: Diagnostic {
3509 severity: DiagnosticSeverity::HINT,
3510 message: "error 2 hint 1".to_string(),
3511 group_id: 0,
3512 is_primary: false,
3513 ..Default::default()
3514 }
3515 },
3516 DiagnosticEntry {
3517 range: Point::new(1, 13)..Point::new(1, 15),
3518 diagnostic: Diagnostic {
3519 severity: DiagnosticSeverity::HINT,
3520 message: "error 2 hint 2".to_string(),
3521 group_id: 0,
3522 is_primary: false,
3523 ..Default::default()
3524 }
3525 },
3526 DiagnosticEntry {
3527 range: Point::new(2, 8)..Point::new(2, 17),
3528 diagnostic: Diagnostic {
3529 severity: DiagnosticSeverity::ERROR,
3530 message: "error 2".to_string(),
3531 group_id: 0,
3532 is_primary: true,
3533 ..Default::default()
3534 }
3535 }
3536 ]
3537 );
3538
3539 assert_eq!(
3540 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3541 &[
3542 DiagnosticEntry {
3543 range: Point::new(1, 13)..Point::new(1, 15),
3544 diagnostic: Diagnostic {
3545 severity: DiagnosticSeverity::HINT,
3546 message: "error 2 hint 1".to_string(),
3547 group_id: 0,
3548 is_primary: false,
3549 ..Default::default()
3550 }
3551 },
3552 DiagnosticEntry {
3553 range: Point::new(1, 13)..Point::new(1, 15),
3554 diagnostic: Diagnostic {
3555 severity: DiagnosticSeverity::HINT,
3556 message: "error 2 hint 2".to_string(),
3557 group_id: 0,
3558 is_primary: false,
3559 ..Default::default()
3560 }
3561 },
3562 DiagnosticEntry {
3563 range: Point::new(2, 8)..Point::new(2, 17),
3564 diagnostic: Diagnostic {
3565 severity: DiagnosticSeverity::ERROR,
3566 message: "error 2".to_string(),
3567 group_id: 0,
3568 is_primary: true,
3569 ..Default::default()
3570 }
3571 }
3572 ]
3573 );
3574
3575 assert_eq!(
3576 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3577 &[
3578 DiagnosticEntry {
3579 range: Point::new(1, 8)..Point::new(1, 9),
3580 diagnostic: Diagnostic {
3581 severity: DiagnosticSeverity::WARNING,
3582 message: "error 1".to_string(),
3583 group_id: 1,
3584 is_primary: true,
3585 ..Default::default()
3586 }
3587 },
3588 DiagnosticEntry {
3589 range: Point::new(1, 8)..Point::new(1, 9),
3590 diagnostic: Diagnostic {
3591 severity: DiagnosticSeverity::HINT,
3592 message: "error 1 hint 1".to_string(),
3593 group_id: 1,
3594 is_primary: false,
3595 ..Default::default()
3596 }
3597 },
3598 ]
3599 );
3600}
3601
3602#[gpui::test]
3603async fn test_rename(cx: &mut gpui::TestAppContext) {
3604 init_test(cx);
3605
3606 let fs = FakeFs::new(cx.executor());
3607 fs.insert_tree(
3608 "/dir",
3609 json!({
3610 "one.rs": "const ONE: usize = 1;",
3611 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3612 }),
3613 )
3614 .await;
3615
3616 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3617
3618 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3619 language_registry.add(rust_lang());
3620 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3621 "Rust",
3622 FakeLspAdapter {
3623 capabilities: lsp::ServerCapabilities {
3624 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3625 prepare_provider: Some(true),
3626 work_done_progress_options: Default::default(),
3627 })),
3628 ..Default::default()
3629 },
3630 ..Default::default()
3631 },
3632 );
3633
3634 let buffer = project
3635 .update(cx, |project, cx| {
3636 project.open_local_buffer("/dir/one.rs", cx)
3637 })
3638 .await
3639 .unwrap();
3640
3641 let fake_server = fake_servers.next().await.unwrap();
3642
3643 let response = project.update(cx, |project, cx| {
3644 project.prepare_rename(buffer.clone(), 7, cx)
3645 });
3646 fake_server
3647 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3648 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3649 assert_eq!(params.position, lsp::Position::new(0, 7));
3650 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3651 lsp::Position::new(0, 6),
3652 lsp::Position::new(0, 9),
3653 ))))
3654 })
3655 .next()
3656 .await
3657 .unwrap();
3658 let range = response.await.unwrap().unwrap();
3659 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3660 assert_eq!(range, 6..9);
3661
3662 let response = project.update(cx, |project, cx| {
3663 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3664 });
3665 fake_server
3666 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3667 assert_eq!(
3668 params.text_document_position.text_document.uri.as_str(),
3669 "file:///dir/one.rs"
3670 );
3671 assert_eq!(
3672 params.text_document_position.position,
3673 lsp::Position::new(0, 7)
3674 );
3675 assert_eq!(params.new_name, "THREE");
3676 Ok(Some(lsp::WorkspaceEdit {
3677 changes: Some(
3678 [
3679 (
3680 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3681 vec![lsp::TextEdit::new(
3682 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3683 "THREE".to_string(),
3684 )],
3685 ),
3686 (
3687 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3688 vec![
3689 lsp::TextEdit::new(
3690 lsp::Range::new(
3691 lsp::Position::new(0, 24),
3692 lsp::Position::new(0, 27),
3693 ),
3694 "THREE".to_string(),
3695 ),
3696 lsp::TextEdit::new(
3697 lsp::Range::new(
3698 lsp::Position::new(0, 35),
3699 lsp::Position::new(0, 38),
3700 ),
3701 "THREE".to_string(),
3702 ),
3703 ],
3704 ),
3705 ]
3706 .into_iter()
3707 .collect(),
3708 ),
3709 ..Default::default()
3710 }))
3711 })
3712 .next()
3713 .await
3714 .unwrap();
3715 let mut transaction = response.await.unwrap().0;
3716 assert_eq!(transaction.len(), 2);
3717 assert_eq!(
3718 transaction
3719 .remove_entry(&buffer)
3720 .unwrap()
3721 .0
3722 .update(cx, |buffer, _| buffer.text()),
3723 "const THREE: usize = 1;"
3724 );
3725 assert_eq!(
3726 transaction
3727 .into_keys()
3728 .next()
3729 .unwrap()
3730 .update(cx, |buffer, _| buffer.text()),
3731 "const TWO: usize = one::THREE + one::THREE;"
3732 );
3733}
3734
3735#[gpui::test]
3736async fn test_search(cx: &mut gpui::TestAppContext) {
3737 init_test(cx);
3738
3739 let fs = FakeFs::new(cx.executor());
3740 fs.insert_tree(
3741 "/dir",
3742 json!({
3743 "one.rs": "const ONE: usize = 1;",
3744 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3745 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3746 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3747 }),
3748 )
3749 .await;
3750 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3751 assert_eq!(
3752 search(
3753 &project,
3754 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3755 cx
3756 )
3757 .await
3758 .unwrap(),
3759 HashMap::from_iter([
3760 ("two.rs".to_string(), vec![6..9]),
3761 ("three.rs".to_string(), vec![37..40])
3762 ])
3763 );
3764
3765 let buffer_4 = project
3766 .update(cx, |project, cx| {
3767 project.open_local_buffer("/dir/four.rs", cx)
3768 })
3769 .await
3770 .unwrap();
3771 buffer_4.update(cx, |buffer, cx| {
3772 let text = "two::TWO";
3773 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3774 });
3775
3776 assert_eq!(
3777 search(
3778 &project,
3779 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3780 cx
3781 )
3782 .await
3783 .unwrap(),
3784 HashMap::from_iter([
3785 ("two.rs".to_string(), vec![6..9]),
3786 ("three.rs".to_string(), vec![37..40]),
3787 ("four.rs".to_string(), vec![25..28, 36..39])
3788 ])
3789 );
3790}
3791
3792#[gpui::test]
3793async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3794 init_test(cx);
3795
3796 let search_query = "file";
3797
3798 let fs = FakeFs::new(cx.executor());
3799 fs.insert_tree(
3800 "/dir",
3801 json!({
3802 "one.rs": r#"// Rust file one"#,
3803 "one.ts": r#"// TypeScript file one"#,
3804 "two.rs": r#"// Rust file two"#,
3805 "two.ts": r#"// TypeScript file two"#,
3806 }),
3807 )
3808 .await;
3809 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3810
3811 assert!(
3812 search(
3813 &project,
3814 SearchQuery::text(
3815 search_query,
3816 false,
3817 true,
3818 false,
3819 vec![PathMatcher::new("*.odd").unwrap()],
3820 Vec::new()
3821 )
3822 .unwrap(),
3823 cx
3824 )
3825 .await
3826 .unwrap()
3827 .is_empty(),
3828 "If no inclusions match, no files should be returned"
3829 );
3830
3831 assert_eq!(
3832 search(
3833 &project,
3834 SearchQuery::text(
3835 search_query,
3836 false,
3837 true,
3838 false,
3839 vec![PathMatcher::new("*.rs").unwrap()],
3840 Vec::new()
3841 )
3842 .unwrap(),
3843 cx
3844 )
3845 .await
3846 .unwrap(),
3847 HashMap::from_iter([
3848 ("one.rs".to_string(), vec![8..12]),
3849 ("two.rs".to_string(), vec![8..12]),
3850 ]),
3851 "Rust only search should give only Rust files"
3852 );
3853
3854 assert_eq!(
3855 search(
3856 &project,
3857 SearchQuery::text(
3858 search_query,
3859 false,
3860 true,
3861 false,
3862 vec![
3863 PathMatcher::new("*.ts").unwrap(),
3864 PathMatcher::new("*.odd").unwrap(),
3865 ],
3866 Vec::new()
3867 ).unwrap(),
3868 cx
3869 )
3870 .await
3871 .unwrap(),
3872 HashMap::from_iter([
3873 ("one.ts".to_string(), vec![14..18]),
3874 ("two.ts".to_string(), vec![14..18]),
3875 ]),
3876 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3877 );
3878
3879 assert_eq!(
3880 search(
3881 &project,
3882 SearchQuery::text(
3883 search_query,
3884 false,
3885 true,
3886 false,
3887 vec![
3888 PathMatcher::new("*.rs").unwrap(),
3889 PathMatcher::new("*.ts").unwrap(),
3890 PathMatcher::new("*.odd").unwrap(),
3891 ],
3892 Vec::new()
3893 ).unwrap(),
3894 cx
3895 )
3896 .await
3897 .unwrap(),
3898 HashMap::from_iter([
3899 ("one.rs".to_string(), vec![8..12]),
3900 ("one.ts".to_string(), vec![14..18]),
3901 ("two.rs".to_string(), vec![8..12]),
3902 ("two.ts".to_string(), vec![14..18]),
3903 ]),
3904 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3905 );
3906}
3907
3908#[gpui::test]
3909async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3910 init_test(cx);
3911
3912 let search_query = "file";
3913
3914 let fs = FakeFs::new(cx.executor());
3915 fs.insert_tree(
3916 "/dir",
3917 json!({
3918 "one.rs": r#"// Rust file one"#,
3919 "one.ts": r#"// TypeScript file one"#,
3920 "two.rs": r#"// Rust file two"#,
3921 "two.ts": r#"// TypeScript file two"#,
3922 }),
3923 )
3924 .await;
3925 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3926
3927 assert_eq!(
3928 search(
3929 &project,
3930 SearchQuery::text(
3931 search_query,
3932 false,
3933 true,
3934 false,
3935 Vec::new(),
3936 vec![PathMatcher::new("*.odd").unwrap()],
3937 )
3938 .unwrap(),
3939 cx
3940 )
3941 .await
3942 .unwrap(),
3943 HashMap::from_iter([
3944 ("one.rs".to_string(), vec![8..12]),
3945 ("one.ts".to_string(), vec![14..18]),
3946 ("two.rs".to_string(), vec![8..12]),
3947 ("two.ts".to_string(), vec![14..18]),
3948 ]),
3949 "If no exclusions match, all files should be returned"
3950 );
3951
3952 assert_eq!(
3953 search(
3954 &project,
3955 SearchQuery::text(
3956 search_query,
3957 false,
3958 true,
3959 false,
3960 Vec::new(),
3961 vec![PathMatcher::new("*.rs").unwrap()],
3962 )
3963 .unwrap(),
3964 cx
3965 )
3966 .await
3967 .unwrap(),
3968 HashMap::from_iter([
3969 ("one.ts".to_string(), vec![14..18]),
3970 ("two.ts".to_string(), vec![14..18]),
3971 ]),
3972 "Rust exclusion search should give only TypeScript files"
3973 );
3974
3975 assert_eq!(
3976 search(
3977 &project,
3978 SearchQuery::text(
3979 search_query,
3980 false,
3981 true,
3982 false,
3983 Vec::new(),
3984 vec![
3985 PathMatcher::new("*.ts").unwrap(),
3986 PathMatcher::new("*.odd").unwrap(),
3987 ],
3988 ).unwrap(),
3989 cx
3990 )
3991 .await
3992 .unwrap(),
3993 HashMap::from_iter([
3994 ("one.rs".to_string(), vec![8..12]),
3995 ("two.rs".to_string(), vec![8..12]),
3996 ]),
3997 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3998 );
3999
4000 assert!(
4001 search(
4002 &project,
4003 SearchQuery::text(
4004 search_query,
4005 false,
4006 true,
4007 false,
4008 Vec::new(),
4009 vec![
4010 PathMatcher::new("*.rs").unwrap(),
4011 PathMatcher::new("*.ts").unwrap(),
4012 PathMatcher::new("*.odd").unwrap(),
4013 ],
4014 ).unwrap(),
4015 cx
4016 )
4017 .await
4018 .unwrap().is_empty(),
4019 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4020 );
4021}
4022
4023#[gpui::test]
4024async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4025 init_test(cx);
4026
4027 let search_query = "file";
4028
4029 let fs = FakeFs::new(cx.executor());
4030 fs.insert_tree(
4031 "/dir",
4032 json!({
4033 "one.rs": r#"// Rust file one"#,
4034 "one.ts": r#"// TypeScript file one"#,
4035 "two.rs": r#"// Rust file two"#,
4036 "two.ts": r#"// TypeScript file two"#,
4037 }),
4038 )
4039 .await;
4040 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4041
4042 assert!(
4043 search(
4044 &project,
4045 SearchQuery::text(
4046 search_query,
4047 false,
4048 true,
4049 false,
4050 vec![PathMatcher::new("*.odd").unwrap()],
4051 vec![PathMatcher::new("*.odd").unwrap()],
4052 )
4053 .unwrap(),
4054 cx
4055 )
4056 .await
4057 .unwrap()
4058 .is_empty(),
4059 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4060 );
4061
4062 assert!(
4063 search(
4064 &project,
4065 SearchQuery::text(
4066 search_query,
4067 false,
4068 true,
4069 false,
4070 vec![PathMatcher::new("*.ts").unwrap()],
4071 vec![PathMatcher::new("*.ts").unwrap()],
4072 ).unwrap(),
4073 cx
4074 )
4075 .await
4076 .unwrap()
4077 .is_empty(),
4078 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4079 );
4080
4081 assert!(
4082 search(
4083 &project,
4084 SearchQuery::text(
4085 search_query,
4086 false,
4087 true,
4088 false,
4089 vec![
4090 PathMatcher::new("*.ts").unwrap(),
4091 PathMatcher::new("*.odd").unwrap()
4092 ],
4093 vec![
4094 PathMatcher::new("*.ts").unwrap(),
4095 PathMatcher::new("*.odd").unwrap()
4096 ],
4097 )
4098 .unwrap(),
4099 cx
4100 )
4101 .await
4102 .unwrap()
4103 .is_empty(),
4104 "Non-matching inclusions and exclusions should not change that."
4105 );
4106
4107 assert_eq!(
4108 search(
4109 &project,
4110 SearchQuery::text(
4111 search_query,
4112 false,
4113 true,
4114 false,
4115 vec![
4116 PathMatcher::new("*.ts").unwrap(),
4117 PathMatcher::new("*.odd").unwrap()
4118 ],
4119 vec![
4120 PathMatcher::new("*.rs").unwrap(),
4121 PathMatcher::new("*.odd").unwrap()
4122 ],
4123 )
4124 .unwrap(),
4125 cx
4126 )
4127 .await
4128 .unwrap(),
4129 HashMap::from_iter([
4130 ("one.ts".to_string(), vec![14..18]),
4131 ("two.ts".to_string(), vec![14..18]),
4132 ]),
4133 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4134 );
4135}
4136
4137#[gpui::test]
4138async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4139 init_test(cx);
4140
4141 let fs = FakeFs::new(cx.background_executor.clone());
4142 fs.insert_tree(
4143 "/dir",
4144 json!({
4145 ".git": {},
4146 ".gitignore": "**/target\n/node_modules\n",
4147 "target": {
4148 "index.txt": "index_key:index_value"
4149 },
4150 "node_modules": {
4151 "eslint": {
4152 "index.ts": "const eslint_key = 'eslint value'",
4153 "package.json": r#"{ "some_key": "some value" }"#,
4154 },
4155 "prettier": {
4156 "index.ts": "const prettier_key = 'prettier value'",
4157 "package.json": r#"{ "other_key": "other value" }"#,
4158 },
4159 },
4160 "package.json": r#"{ "main_key": "main value" }"#,
4161 }),
4162 )
4163 .await;
4164 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4165
4166 let query = "key";
4167 assert_eq!(
4168 search(
4169 &project,
4170 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4171 cx
4172 )
4173 .await
4174 .unwrap(),
4175 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4176 "Only one non-ignored file should have the query"
4177 );
4178
4179 assert_eq!(
4180 search(
4181 &project,
4182 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4183 cx
4184 )
4185 .await
4186 .unwrap(),
4187 HashMap::from_iter([
4188 ("package.json".to_string(), vec![8..11]),
4189 ("target/index.txt".to_string(), vec![6..9]),
4190 (
4191 "node_modules/prettier/package.json".to_string(),
4192 vec![9..12]
4193 ),
4194 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4195 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4196 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4197 ]),
4198 "Unrestricted search with ignored directories should find every file with the query"
4199 );
4200
4201 assert_eq!(
4202 search(
4203 &project,
4204 SearchQuery::text(
4205 query,
4206 false,
4207 false,
4208 true,
4209 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4210 vec![PathMatcher::new("*.ts").unwrap()],
4211 )
4212 .unwrap(),
4213 cx
4214 )
4215 .await
4216 .unwrap(),
4217 HashMap::from_iter([(
4218 "node_modules/prettier/package.json".to_string(),
4219 vec![9..12]
4220 )]),
4221 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4222 );
4223}
4224
4225#[test]
4226fn test_glob_literal_prefix() {
4227 assert_eq!(glob_literal_prefix("**/*.js"), "");
4228 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4229 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4230 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4231}
4232
4233#[gpui::test]
4234async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4235 init_test(cx);
4236
4237 let fs = FakeFs::new(cx.executor().clone());
4238 fs.insert_tree(
4239 "/one/two",
4240 json!({
4241 "three": {
4242 "a.txt": "",
4243 "four": {}
4244 },
4245 "c.rs": ""
4246 }),
4247 )
4248 .await;
4249
4250 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4251 project
4252 .update(cx, |project, cx| {
4253 let id = project.worktrees().next().unwrap().read(cx).id();
4254 project.create_entry((id, "b.."), true, cx)
4255 })
4256 .unwrap()
4257 .await
4258 .unwrap();
4259
4260 // Can't create paths outside the project
4261 let result = project
4262 .update(cx, |project, cx| {
4263 let id = project.worktrees().next().unwrap().read(cx).id();
4264 project.create_entry((id, "../../boop"), true, cx)
4265 })
4266 .await;
4267 assert!(result.is_err());
4268
4269 // Can't create paths with '..'
4270 let result = project
4271 .update(cx, |project, cx| {
4272 let id = project.worktrees().next().unwrap().read(cx).id();
4273 project.create_entry((id, "four/../beep"), true, cx)
4274 })
4275 .await;
4276 assert!(result.is_err());
4277
4278 assert_eq!(
4279 fs.paths(true),
4280 vec![
4281 PathBuf::from("/"),
4282 PathBuf::from("/one"),
4283 PathBuf::from("/one/two"),
4284 PathBuf::from("/one/two/c.rs"),
4285 PathBuf::from("/one/two/three"),
4286 PathBuf::from("/one/two/three/a.txt"),
4287 PathBuf::from("/one/two/three/b.."),
4288 PathBuf::from("/one/two/three/four"),
4289 ]
4290 );
4291
4292 // And we cannot open buffers with '..'
4293 let result = project
4294 .update(cx, |project, cx| {
4295 let id = project.worktrees().next().unwrap().read(cx).id();
4296 project.open_buffer((id, "../c.rs"), cx)
4297 })
4298 .await;
4299 assert!(result.is_err())
4300}
4301
4302async fn search(
4303 project: &Model<Project>,
4304 query: SearchQuery,
4305 cx: &mut gpui::TestAppContext,
4306) -> Result<HashMap<String, Vec<Range<usize>>>> {
4307 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4308 let mut result = HashMap::default();
4309 while let Some((buffer, range)) = search_rx.next().await {
4310 result.entry(buffer).or_insert(range);
4311 }
4312 Ok(result
4313 .into_iter()
4314 .map(|(buffer, ranges)| {
4315 buffer.update(cx, |buffer, _| {
4316 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4317 let ranges = ranges
4318 .into_iter()
4319 .map(|range| range.to_offset(buffer))
4320 .collect::<Vec<_>>();
4321 (path, ranges)
4322 })
4323 })
4324 .collect())
4325}
4326
4327fn init_test(cx: &mut gpui::TestAppContext) {
4328 if std::env::var("RUST_LOG").is_ok() {
4329 env_logger::try_init().ok();
4330 }
4331
4332 cx.update(|cx| {
4333 let settings_store = SettingsStore::test(cx);
4334 cx.set_global(settings_store);
4335 release_channel::init("0.0.0", cx);
4336 language::init(cx);
4337 Project::init_settings(cx);
4338 });
4339}
4340
4341fn json_lang() -> Arc<Language> {
4342 Arc::new(Language::new(
4343 LanguageConfig {
4344 name: "JSON".into(),
4345 matcher: LanguageMatcher {
4346 path_suffixes: vec!["json".to_string()],
4347 ..Default::default()
4348 },
4349 ..Default::default()
4350 },
4351 None,
4352 ))
4353}
4354
4355fn js_lang() -> Arc<Language> {
4356 Arc::new(Language::new(
4357 LanguageConfig {
4358 name: Arc::from("JavaScript"),
4359 matcher: LanguageMatcher {
4360 path_suffixes: vec!["js".to_string()],
4361 ..Default::default()
4362 },
4363 ..Default::default()
4364 },
4365 None,
4366 ))
4367}
4368
4369fn rust_lang() -> Arc<Language> {
4370 Arc::new(Language::new(
4371 LanguageConfig {
4372 name: "Rust".into(),
4373 matcher: LanguageMatcher {
4374 path_suffixes: vec!["rs".to_string()],
4375 ..Default::default()
4376 },
4377 ..Default::default()
4378 },
4379 Some(tree_sitter_rust::language()),
4380 ))
4381}
4382
4383fn typescript_lang() -> Arc<Language> {
4384 Arc::new(Language::new(
4385 LanguageConfig {
4386 name: "TypeScript".into(),
4387 matcher: LanguageMatcher {
4388 path_suffixes: vec!["ts".to_string()],
4389 ..Default::default()
4390 },
4391 ..Default::default()
4392 },
4393 Some(tree_sitter_typescript::language_typescript()),
4394 ))
4395}