1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17use worktree::WorktreeModelHandle as _;
18
19#[gpui::test]
20async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
21 cx.executor().allow_parking();
22
23 let (tx, mut rx) = futures::channel::mpsc::unbounded();
24 let _thread = std::thread::spawn(move || {
25 std::fs::metadata("/Users").unwrap();
26 std::thread::sleep(Duration::from_millis(1000));
27 tx.unbounded_send(1).unwrap();
28 });
29 rx.next().await.unwrap();
30}
31
32#[gpui::test]
33async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
34 cx.executor().allow_parking();
35
36 let io_task = smol::unblock(move || {
37 println!("sleeping on thread {:?}", std::thread::current().id());
38 std::thread::sleep(Duration::from_millis(10));
39 1
40 });
41
42 let task = cx.foreground_executor().spawn(async move {
43 io_task.await;
44 });
45
46 task.await;
47}
48
49#[cfg(not(windows))]
50#[gpui::test]
51async fn test_symlinks(cx: &mut gpui::TestAppContext) {
52 init_test(cx);
53 cx.executor().allow_parking();
54
55 let dir = temp_tree(json!({
56 "root": {
57 "apple": "",
58 "banana": {
59 "carrot": {
60 "date": "",
61 "endive": "",
62 }
63 },
64 "fennel": {
65 "grape": "",
66 }
67 }
68 }));
69
70 let root_link_path = dir.path().join("root_link");
71 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
72 os::unix::fs::symlink(
73 &dir.path().join("root/fennel"),
74 &dir.path().join("root/finnochio"),
75 )
76 .unwrap();
77
78 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
79
80 project.update(cx, |project, cx| {
81 let tree = project.worktrees().next().unwrap().read(cx);
82 assert_eq!(tree.file_count(), 5);
83 assert_eq!(
84 tree.inode_for_path("fennel/grape"),
85 tree.inode_for_path("finnochio/grape")
86 );
87 });
88}
89
90#[gpui::test]
91async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
92 init_test(cx);
93
94 let fs = FakeFs::new(cx.executor());
95 fs.insert_tree(
96 "/the-root",
97 json!({
98 ".zed": {
99 "settings.json": r#"{ "tab_size": 8 }"#,
100 "tasks.json": r#"[{
101 "label": "cargo check",
102 "command": "cargo",
103 "args": ["check", "--all"]
104 },]"#,
105 },
106 "a": {
107 "a.rs": "fn a() {\n A\n}"
108 },
109 "b": {
110 ".zed": {
111 "settings.json": r#"{ "tab_size": 2 }"#,
112 "tasks.json": r#"[{
113 "label": "cargo check",
114 "command": "cargo",
115 "args": ["check"]
116 },]"#,
117 },
118 "b.rs": "fn b() {\n B\n}"
119 }
120 }),
121 )
122 .await;
123
124 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
125 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
126
127 cx.executor().run_until_parked();
128 cx.update(|cx| {
129 let tree = worktree.read(cx);
130
131 let settings_a = language_settings(
132 None,
133 Some(
134 &(File::for_entry(
135 tree.entry_for_path("a/a.rs").unwrap().clone(),
136 worktree.clone(),
137 ) as _),
138 ),
139 cx,
140 );
141 let settings_b = language_settings(
142 None,
143 Some(
144 &(File::for_entry(
145 tree.entry_for_path("b/b.rs").unwrap().clone(),
146 worktree.clone(),
147 ) as _),
148 ),
149 cx,
150 );
151
152 assert_eq!(settings_a.tab_size.get(), 8);
153 assert_eq!(settings_b.tab_size.get(), 2);
154
155 let workree_id = project.update(cx, |project, cx| {
156 project.worktrees().next().unwrap().read(cx).id()
157 });
158 let all_tasks = project
159 .update(cx, |project, cx| {
160 project.task_inventory().update(cx, |inventory, cx| {
161 inventory.list_tasks(None, None, false, cx)
162 })
163 })
164 .into_iter()
165 .map(|(source_kind, task)| (source_kind, task.name().to_string()))
166 .collect::<Vec<_>>();
167 assert_eq!(
168 all_tasks,
169 vec![
170 (
171 TaskSourceKind::Worktree {
172 id: workree_id,
173 abs_path: PathBuf::from("/the-root/.zed/tasks.json")
174 },
175 "cargo check".to_string()
176 ),
177 (
178 TaskSourceKind::Worktree {
179 id: workree_id,
180 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json")
181 },
182 "cargo check".to_string()
183 ),
184 ]
185 );
186 });
187}
188
189#[gpui::test]
190async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
191 init_test(cx);
192
193 let fs = FakeFs::new(cx.executor());
194 fs.insert_tree(
195 "/the-root",
196 json!({
197 "test.rs": "const A: i32 = 1;",
198 "test2.rs": "",
199 "Cargo.toml": "a = 1",
200 "package.json": "{\"a\": 1}",
201 }),
202 )
203 .await;
204
205 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
206 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
207
208 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
209 "Rust",
210 FakeLspAdapter {
211 name: "the-rust-language-server",
212 capabilities: lsp::ServerCapabilities {
213 completion_provider: Some(lsp::CompletionOptions {
214 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
215 ..Default::default()
216 }),
217 ..Default::default()
218 },
219 ..Default::default()
220 },
221 );
222 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
223 "JSON",
224 FakeLspAdapter {
225 name: "the-json-language-server",
226 capabilities: lsp::ServerCapabilities {
227 completion_provider: Some(lsp::CompletionOptions {
228 trigger_characters: Some(vec![":".to_string()]),
229 ..Default::default()
230 }),
231 ..Default::default()
232 },
233 ..Default::default()
234 },
235 );
236
237 // Open a buffer without an associated language server.
238 let toml_buffer = project
239 .update(cx, |project, cx| {
240 project.open_local_buffer("/the-root/Cargo.toml", cx)
241 })
242 .await
243 .unwrap();
244
245 // Open a buffer with an associated language server before the language for it has been loaded.
246 let rust_buffer = project
247 .update(cx, |project, cx| {
248 project.open_local_buffer("/the-root/test.rs", cx)
249 })
250 .await
251 .unwrap();
252 rust_buffer.update(cx, |buffer, _| {
253 assert_eq!(buffer.language().map(|l| l.name()), None);
254 });
255
256 // Now we add the languages to the project, and ensure they get assigned to all
257 // the relevant open buffers.
258 language_registry.add(json_lang());
259 language_registry.add(rust_lang());
260 cx.executor().run_until_parked();
261 rust_buffer.update(cx, |buffer, _| {
262 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
263 });
264
265 // A server is started up, and it is notified about Rust files.
266 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
267 assert_eq!(
268 fake_rust_server
269 .receive_notification::<lsp::notification::DidOpenTextDocument>()
270 .await
271 .text_document,
272 lsp::TextDocumentItem {
273 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
274 version: 0,
275 text: "const A: i32 = 1;".to_string(),
276 language_id: Default::default()
277 }
278 );
279
280 // The buffer is configured based on the language server's capabilities.
281 rust_buffer.update(cx, |buffer, _| {
282 assert_eq!(
283 buffer.completion_triggers(),
284 &[".".to_string(), "::".to_string()]
285 );
286 });
287 toml_buffer.update(cx, |buffer, _| {
288 assert!(buffer.completion_triggers().is_empty());
289 });
290
291 // Edit a buffer. The changes are reported to the language server.
292 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
293 assert_eq!(
294 fake_rust_server
295 .receive_notification::<lsp::notification::DidChangeTextDocument>()
296 .await
297 .text_document,
298 lsp::VersionedTextDocumentIdentifier::new(
299 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
300 1
301 )
302 );
303
304 // Open a third buffer with a different associated language server.
305 let json_buffer = project
306 .update(cx, |project, cx| {
307 project.open_local_buffer("/the-root/package.json", cx)
308 })
309 .await
310 .unwrap();
311
312 // A json language server is started up and is only notified about the json buffer.
313 let mut fake_json_server = fake_json_servers.next().await.unwrap();
314 assert_eq!(
315 fake_json_server
316 .receive_notification::<lsp::notification::DidOpenTextDocument>()
317 .await
318 .text_document,
319 lsp::TextDocumentItem {
320 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
321 version: 0,
322 text: "{\"a\": 1}".to_string(),
323 language_id: Default::default()
324 }
325 );
326
327 // This buffer is configured based on the second language server's
328 // capabilities.
329 json_buffer.update(cx, |buffer, _| {
330 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
331 });
332
333 // When opening another buffer whose language server is already running,
334 // it is also configured based on the existing language server's capabilities.
335 let rust_buffer2 = project
336 .update(cx, |project, cx| {
337 project.open_local_buffer("/the-root/test2.rs", cx)
338 })
339 .await
340 .unwrap();
341 rust_buffer2.update(cx, |buffer, _| {
342 assert_eq!(
343 buffer.completion_triggers(),
344 &[".".to_string(), "::".to_string()]
345 );
346 });
347
348 // Changes are reported only to servers matching the buffer's language.
349 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
350 rust_buffer2.update(cx, |buffer, cx| {
351 buffer.edit([(0..0, "let x = 1;")], None, cx)
352 });
353 assert_eq!(
354 fake_rust_server
355 .receive_notification::<lsp::notification::DidChangeTextDocument>()
356 .await
357 .text_document,
358 lsp::VersionedTextDocumentIdentifier::new(
359 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
360 1
361 )
362 );
363
364 // Save notifications are reported to all servers.
365 project
366 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
367 .await
368 .unwrap();
369 assert_eq!(
370 fake_rust_server
371 .receive_notification::<lsp::notification::DidSaveTextDocument>()
372 .await
373 .text_document,
374 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
375 );
376 assert_eq!(
377 fake_json_server
378 .receive_notification::<lsp::notification::DidSaveTextDocument>()
379 .await
380 .text_document,
381 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
382 );
383
384 // Renames are reported only to servers matching the buffer's language.
385 fs.rename(
386 Path::new("/the-root/test2.rs"),
387 Path::new("/the-root/test3.rs"),
388 Default::default(),
389 )
390 .await
391 .unwrap();
392 assert_eq!(
393 fake_rust_server
394 .receive_notification::<lsp::notification::DidCloseTextDocument>()
395 .await
396 .text_document,
397 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
398 );
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
406 version: 0,
407 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 },
410 );
411
412 rust_buffer2.update(cx, |buffer, cx| {
413 buffer.update_diagnostics(
414 LanguageServerId(0),
415 DiagnosticSet::from_sorted_entries(
416 vec![DiagnosticEntry {
417 diagnostic: Default::default(),
418 range: Anchor::MIN..Anchor::MAX,
419 }],
420 &buffer.snapshot(),
421 ),
422 cx,
423 );
424 assert_eq!(
425 buffer
426 .snapshot()
427 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
428 .count(),
429 1
430 );
431 });
432
433 // When the rename changes the extension of the file, the buffer gets closed on the old
434 // language server and gets opened on the new one.
435 fs.rename(
436 Path::new("/the-root/test3.rs"),
437 Path::new("/the-root/test3.json"),
438 Default::default(),
439 )
440 .await
441 .unwrap();
442 assert_eq!(
443 fake_rust_server
444 .receive_notification::<lsp::notification::DidCloseTextDocument>()
445 .await
446 .text_document,
447 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
448 );
449 assert_eq!(
450 fake_json_server
451 .receive_notification::<lsp::notification::DidOpenTextDocument>()
452 .await
453 .text_document,
454 lsp::TextDocumentItem {
455 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
456 version: 0,
457 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
458 language_id: Default::default()
459 },
460 );
461
462 // We clear the diagnostics, since the language has changed.
463 rust_buffer2.update(cx, |buffer, _| {
464 assert_eq!(
465 buffer
466 .snapshot()
467 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
468 .count(),
469 0
470 );
471 });
472
473 // The renamed file's version resets after changing language server.
474 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
475 assert_eq!(
476 fake_json_server
477 .receive_notification::<lsp::notification::DidChangeTextDocument>()
478 .await
479 .text_document,
480 lsp::VersionedTextDocumentIdentifier::new(
481 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
482 1
483 )
484 );
485
486 // Restart language servers
487 project.update(cx, |project, cx| {
488 project.restart_language_servers_for_buffers(
489 vec![rust_buffer.clone(), json_buffer.clone()],
490 cx,
491 );
492 });
493
494 let mut rust_shutdown_requests = fake_rust_server
495 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
496 let mut json_shutdown_requests = fake_json_server
497 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
498 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
499
500 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
501 let mut fake_json_server = fake_json_servers.next().await.unwrap();
502
503 // Ensure rust document is reopened in new rust language server
504 assert_eq!(
505 fake_rust_server
506 .receive_notification::<lsp::notification::DidOpenTextDocument>()
507 .await
508 .text_document,
509 lsp::TextDocumentItem {
510 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
511 version: 0,
512 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
513 language_id: Default::default()
514 }
515 );
516
517 // Ensure json documents are reopened in new json language server
518 assert_set_eq!(
519 [
520 fake_json_server
521 .receive_notification::<lsp::notification::DidOpenTextDocument>()
522 .await
523 .text_document,
524 fake_json_server
525 .receive_notification::<lsp::notification::DidOpenTextDocument>()
526 .await
527 .text_document,
528 ],
529 [
530 lsp::TextDocumentItem {
531 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
532 version: 0,
533 text: json_buffer.update(cx, |buffer, _| buffer.text()),
534 language_id: Default::default()
535 },
536 lsp::TextDocumentItem {
537 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
538 version: 0,
539 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
540 language_id: Default::default()
541 }
542 ]
543 );
544
545 // Close notifications are reported only to servers matching the buffer's language.
546 cx.update(|_| drop(json_buffer));
547 let close_message = lsp::DidCloseTextDocumentParams {
548 text_document: lsp::TextDocumentIdentifier::new(
549 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
550 ),
551 };
552 assert_eq!(
553 fake_json_server
554 .receive_notification::<lsp::notification::DidCloseTextDocument>()
555 .await,
556 close_message,
557 );
558}
559
560#[gpui::test]
561async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
562 init_test(cx);
563
564 let fs = FakeFs::new(cx.executor());
565 fs.insert_tree(
566 "/the-root",
567 json!({
568 ".gitignore": "target\n",
569 "src": {
570 "a.rs": "",
571 "b.rs": "",
572 },
573 "target": {
574 "x": {
575 "out": {
576 "x.rs": ""
577 }
578 },
579 "y": {
580 "out": {
581 "y.rs": "",
582 }
583 },
584 "z": {
585 "out": {
586 "z.rs": ""
587 }
588 }
589 }
590 }),
591 )
592 .await;
593
594 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
595 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
596 language_registry.add(rust_lang());
597 let mut fake_servers = language_registry.register_fake_lsp_adapter(
598 "Rust",
599 FakeLspAdapter {
600 name: "the-language-server",
601 ..Default::default()
602 },
603 );
604
605 cx.executor().run_until_parked();
606
607 // Start the language server by opening a buffer with a compatible file extension.
608 let _buffer = project
609 .update(cx, |project, cx| {
610 project.open_local_buffer("/the-root/src/a.rs", cx)
611 })
612 .await
613 .unwrap();
614
615 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
616 project.update(cx, |project, cx| {
617 let worktree = project.worktrees().next().unwrap();
618 assert_eq!(
619 worktree
620 .read(cx)
621 .snapshot()
622 .entries(true)
623 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
624 .collect::<Vec<_>>(),
625 &[
626 (Path::new(""), false),
627 (Path::new(".gitignore"), false),
628 (Path::new("src"), false),
629 (Path::new("src/a.rs"), false),
630 (Path::new("src/b.rs"), false),
631 (Path::new("target"), true),
632 ]
633 );
634 });
635
636 let prev_read_dir_count = fs.read_dir_call_count();
637
638 // Keep track of the FS events reported to the language server.
639 let fake_server = fake_servers.next().await.unwrap();
640 let file_changes = Arc::new(Mutex::new(Vec::new()));
641 fake_server
642 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
643 registrations: vec![lsp::Registration {
644 id: Default::default(),
645 method: "workspace/didChangeWatchedFiles".to_string(),
646 register_options: serde_json::to_value(
647 lsp::DidChangeWatchedFilesRegistrationOptions {
648 watchers: vec![
649 lsp::FileSystemWatcher {
650 glob_pattern: lsp::GlobPattern::String(
651 "/the-root/Cargo.toml".to_string(),
652 ),
653 kind: None,
654 },
655 lsp::FileSystemWatcher {
656 glob_pattern: lsp::GlobPattern::String(
657 "/the-root/src/*.{rs,c}".to_string(),
658 ),
659 kind: None,
660 },
661 lsp::FileSystemWatcher {
662 glob_pattern: lsp::GlobPattern::String(
663 "/the-root/target/y/**/*.rs".to_string(),
664 ),
665 kind: None,
666 },
667 ],
668 },
669 )
670 .ok(),
671 }],
672 })
673 .await
674 .unwrap();
675 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
676 let file_changes = file_changes.clone();
677 move |params, _| {
678 let mut file_changes = file_changes.lock();
679 file_changes.extend(params.changes);
680 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
681 }
682 });
683
684 cx.executor().run_until_parked();
685 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
686 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
687
688 // Now the language server has asked us to watch an ignored directory path,
689 // so we recursively load it.
690 project.update(cx, |project, cx| {
691 let worktree = project.worktrees().next().unwrap();
692 assert_eq!(
693 worktree
694 .read(cx)
695 .snapshot()
696 .entries(true)
697 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
698 .collect::<Vec<_>>(),
699 &[
700 (Path::new(""), false),
701 (Path::new(".gitignore"), false),
702 (Path::new("src"), false),
703 (Path::new("src/a.rs"), false),
704 (Path::new("src/b.rs"), false),
705 (Path::new("target"), true),
706 (Path::new("target/x"), true),
707 (Path::new("target/y"), true),
708 (Path::new("target/y/out"), true),
709 (Path::new("target/y/out/y.rs"), true),
710 (Path::new("target/z"), true),
711 ]
712 );
713 });
714
715 // Perform some file system mutations, two of which match the watched patterns,
716 // and one of which does not.
717 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
718 .await
719 .unwrap();
720 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
721 .await
722 .unwrap();
723 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
724 .await
725 .unwrap();
726 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
727 .await
728 .unwrap();
729 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
730 .await
731 .unwrap();
732
733 // The language server receives events for the FS mutations that match its watch patterns.
734 cx.executor().run_until_parked();
735 assert_eq!(
736 &*file_changes.lock(),
737 &[
738 lsp::FileEvent {
739 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
740 typ: lsp::FileChangeType::DELETED,
741 },
742 lsp::FileEvent {
743 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
744 typ: lsp::FileChangeType::CREATED,
745 },
746 lsp::FileEvent {
747 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
748 typ: lsp::FileChangeType::CREATED,
749 },
750 ]
751 );
752}
753
754#[gpui::test]
755async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
756 init_test(cx);
757
758 let fs = FakeFs::new(cx.executor());
759 fs.insert_tree(
760 "/dir",
761 json!({
762 "a.rs": "let a = 1;",
763 "b.rs": "let b = 2;"
764 }),
765 )
766 .await;
767
768 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
769
770 let buffer_a = project
771 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
772 .await
773 .unwrap();
774 let buffer_b = project
775 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
776 .await
777 .unwrap();
778
779 project.update(cx, |project, cx| {
780 project
781 .update_diagnostics(
782 LanguageServerId(0),
783 lsp::PublishDiagnosticsParams {
784 uri: Url::from_file_path("/dir/a.rs").unwrap(),
785 version: None,
786 diagnostics: vec![lsp::Diagnostic {
787 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
788 severity: Some(lsp::DiagnosticSeverity::ERROR),
789 message: "error 1".to_string(),
790 ..Default::default()
791 }],
792 },
793 &[],
794 cx,
795 )
796 .unwrap();
797 project
798 .update_diagnostics(
799 LanguageServerId(0),
800 lsp::PublishDiagnosticsParams {
801 uri: Url::from_file_path("/dir/b.rs").unwrap(),
802 version: None,
803 diagnostics: vec![lsp::Diagnostic {
804 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
805 severity: Some(lsp::DiagnosticSeverity::WARNING),
806 message: "error 2".to_string(),
807 ..Default::default()
808 }],
809 },
810 &[],
811 cx,
812 )
813 .unwrap();
814 });
815
816 buffer_a.update(cx, |buffer, _| {
817 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
818 assert_eq!(
819 chunks
820 .iter()
821 .map(|(s, d)| (s.as_str(), *d))
822 .collect::<Vec<_>>(),
823 &[
824 ("let ", None),
825 ("a", Some(DiagnosticSeverity::ERROR)),
826 (" = 1;", None),
827 ]
828 );
829 });
830 buffer_b.update(cx, |buffer, _| {
831 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
832 assert_eq!(
833 chunks
834 .iter()
835 .map(|(s, d)| (s.as_str(), *d))
836 .collect::<Vec<_>>(),
837 &[
838 ("let ", None),
839 ("b", Some(DiagnosticSeverity::WARNING)),
840 (" = 2;", None),
841 ]
842 );
843 });
844}
845
846#[gpui::test]
847async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
848 init_test(cx);
849
850 let fs = FakeFs::new(cx.executor());
851 fs.insert_tree(
852 "/root",
853 json!({
854 "dir": {
855 ".git": {
856 "HEAD": "ref: refs/heads/main",
857 },
858 ".gitignore": "b.rs",
859 "a.rs": "let a = 1;",
860 "b.rs": "let b = 2;",
861 },
862 "other.rs": "let b = c;"
863 }),
864 )
865 .await;
866
867 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
868 let (worktree, _) = project
869 .update(cx, |project, cx| {
870 project.find_or_create_local_worktree("/root/dir", true, cx)
871 })
872 .await
873 .unwrap();
874 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
875
876 let (worktree, _) = project
877 .update(cx, |project, cx| {
878 project.find_or_create_local_worktree("/root/other.rs", false, cx)
879 })
880 .await
881 .unwrap();
882 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
883
884 let server_id = LanguageServerId(0);
885 project.update(cx, |project, cx| {
886 project
887 .update_diagnostics(
888 server_id,
889 lsp::PublishDiagnosticsParams {
890 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
891 version: None,
892 diagnostics: vec![lsp::Diagnostic {
893 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
894 severity: Some(lsp::DiagnosticSeverity::ERROR),
895 message: "unused variable 'b'".to_string(),
896 ..Default::default()
897 }],
898 },
899 &[],
900 cx,
901 )
902 .unwrap();
903 project
904 .update_diagnostics(
905 server_id,
906 lsp::PublishDiagnosticsParams {
907 uri: Url::from_file_path("/root/other.rs").unwrap(),
908 version: None,
909 diagnostics: vec![lsp::Diagnostic {
910 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
911 severity: Some(lsp::DiagnosticSeverity::ERROR),
912 message: "unknown variable 'c'".to_string(),
913 ..Default::default()
914 }],
915 },
916 &[],
917 cx,
918 )
919 .unwrap();
920 });
921
922 let main_ignored_buffer = project
923 .update(cx, |project, cx| {
924 project.open_buffer((main_worktree_id, "b.rs"), cx)
925 })
926 .await
927 .unwrap();
928 main_ignored_buffer.update(cx, |buffer, _| {
929 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
930 assert_eq!(
931 chunks
932 .iter()
933 .map(|(s, d)| (s.as_str(), *d))
934 .collect::<Vec<_>>(),
935 &[
936 ("let ", None),
937 ("b", Some(DiagnosticSeverity::ERROR)),
938 (" = 2;", None),
939 ],
940 "Gigitnored buffers should still get in-buffer diagnostics",
941 );
942 });
943 let other_buffer = project
944 .update(cx, |project, cx| {
945 project.open_buffer((other_worktree_id, ""), cx)
946 })
947 .await
948 .unwrap();
949 other_buffer.update(cx, |buffer, _| {
950 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
951 assert_eq!(
952 chunks
953 .iter()
954 .map(|(s, d)| (s.as_str(), *d))
955 .collect::<Vec<_>>(),
956 &[
957 ("let b = ", None),
958 ("c", Some(DiagnosticSeverity::ERROR)),
959 (";", None),
960 ],
961 "Buffers from hidden projects should still get in-buffer diagnostics"
962 );
963 });
964
965 project.update(cx, |project, cx| {
966 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
967 assert_eq!(
968 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
969 vec![(
970 ProjectPath {
971 worktree_id: main_worktree_id,
972 path: Arc::from(Path::new("b.rs")),
973 },
974 server_id,
975 DiagnosticSummary {
976 error_count: 1,
977 warning_count: 0,
978 }
979 )]
980 );
981 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
982 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
983 });
984}
985
986#[gpui::test]
987async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
988 init_test(cx);
989
990 let progress_token = "the-progress-token";
991
992 let fs = FakeFs::new(cx.executor());
993 fs.insert_tree(
994 "/dir",
995 json!({
996 "a.rs": "fn a() { A }",
997 "b.rs": "const y: i32 = 1",
998 }),
999 )
1000 .await;
1001
1002 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1004
1005 language_registry.add(rust_lang());
1006 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1007 "Rust",
1008 FakeLspAdapter {
1009 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1010 disk_based_diagnostics_sources: vec!["disk".into()],
1011 ..Default::default()
1012 },
1013 );
1014
1015 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1016
1017 // Cause worktree to start the fake language server
1018 let _buffer = project
1019 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1020 .await
1021 .unwrap();
1022
1023 let mut events = cx.events(&project);
1024
1025 let fake_server = fake_servers.next().await.unwrap();
1026 assert_eq!(
1027 events.next().await.unwrap(),
1028 Event::LanguageServerAdded(LanguageServerId(0)),
1029 );
1030
1031 fake_server
1032 .start_progress(format!("{}/0", progress_token))
1033 .await;
1034 assert_eq!(
1035 events.next().await.unwrap(),
1036 Event::DiskBasedDiagnosticsStarted {
1037 language_server_id: LanguageServerId(0),
1038 }
1039 );
1040
1041 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1042 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1043 version: None,
1044 diagnostics: vec![lsp::Diagnostic {
1045 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1046 severity: Some(lsp::DiagnosticSeverity::ERROR),
1047 message: "undefined variable 'A'".to_string(),
1048 ..Default::default()
1049 }],
1050 });
1051 assert_eq!(
1052 events.next().await.unwrap(),
1053 Event::DiagnosticsUpdated {
1054 language_server_id: LanguageServerId(0),
1055 path: (worktree_id, Path::new("a.rs")).into()
1056 }
1057 );
1058
1059 fake_server.end_progress(format!("{}/0", progress_token));
1060 assert_eq!(
1061 events.next().await.unwrap(),
1062 Event::DiskBasedDiagnosticsFinished {
1063 language_server_id: LanguageServerId(0)
1064 }
1065 );
1066
1067 let buffer = project
1068 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1069 .await
1070 .unwrap();
1071
1072 buffer.update(cx, |buffer, _| {
1073 let snapshot = buffer.snapshot();
1074 let diagnostics = snapshot
1075 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1076 .collect::<Vec<_>>();
1077 assert_eq!(
1078 diagnostics,
1079 &[DiagnosticEntry {
1080 range: Point::new(0, 9)..Point::new(0, 10),
1081 diagnostic: Diagnostic {
1082 severity: lsp::DiagnosticSeverity::ERROR,
1083 message: "undefined variable 'A'".to_string(),
1084 group_id: 0,
1085 is_primary: true,
1086 ..Default::default()
1087 }
1088 }]
1089 )
1090 });
1091
1092 // Ensure publishing empty diagnostics twice only results in one update event.
1093 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1094 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1095 version: None,
1096 diagnostics: Default::default(),
1097 });
1098 assert_eq!(
1099 events.next().await.unwrap(),
1100 Event::DiagnosticsUpdated {
1101 language_server_id: LanguageServerId(0),
1102 path: (worktree_id, Path::new("a.rs")).into()
1103 }
1104 );
1105
1106 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1107 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1108 version: None,
1109 diagnostics: Default::default(),
1110 });
1111 cx.executor().run_until_parked();
1112 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1113}
1114
1115#[gpui::test]
1116async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1117 init_test(cx);
1118
1119 let progress_token = "the-progress-token";
1120
1121 let fs = FakeFs::new(cx.executor());
1122 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1123
1124 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1125
1126 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1127 language_registry.add(rust_lang());
1128 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1129 "Rust",
1130 FakeLspAdapter {
1131 name: "the-language-server",
1132 disk_based_diagnostics_sources: vec!["disk".into()],
1133 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1134 ..Default::default()
1135 },
1136 );
1137
1138 let buffer = project
1139 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1140 .await
1141 .unwrap();
1142
1143 // Simulate diagnostics starting to update.
1144 let fake_server = fake_servers.next().await.unwrap();
1145 fake_server.start_progress(progress_token).await;
1146
1147 // Restart the server before the diagnostics finish updating.
1148 project.update(cx, |project, cx| {
1149 project.restart_language_servers_for_buffers([buffer], cx);
1150 });
1151 let mut events = cx.events(&project);
1152
1153 // Simulate the newly started server sending more diagnostics.
1154 let fake_server = fake_servers.next().await.unwrap();
1155 assert_eq!(
1156 events.next().await.unwrap(),
1157 Event::LanguageServerAdded(LanguageServerId(1))
1158 );
1159 fake_server.start_progress(progress_token).await;
1160 assert_eq!(
1161 events.next().await.unwrap(),
1162 Event::DiskBasedDiagnosticsStarted {
1163 language_server_id: LanguageServerId(1)
1164 }
1165 );
1166 project.update(cx, |project, _| {
1167 assert_eq!(
1168 project
1169 .language_servers_running_disk_based_diagnostics()
1170 .collect::<Vec<_>>(),
1171 [LanguageServerId(1)]
1172 );
1173 });
1174
1175 // All diagnostics are considered done, despite the old server's diagnostic
1176 // task never completing.
1177 fake_server.end_progress(progress_token);
1178 assert_eq!(
1179 events.next().await.unwrap(),
1180 Event::DiskBasedDiagnosticsFinished {
1181 language_server_id: LanguageServerId(1)
1182 }
1183 );
1184 project.update(cx, |project, _| {
1185 assert_eq!(
1186 project
1187 .language_servers_running_disk_based_diagnostics()
1188 .collect::<Vec<_>>(),
1189 [LanguageServerId(0); 0]
1190 );
1191 });
1192}
1193
1194#[gpui::test]
1195async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1196 init_test(cx);
1197
1198 let fs = FakeFs::new(cx.executor());
1199 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1200
1201 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1202
1203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1204 language_registry.add(rust_lang());
1205 let mut fake_servers =
1206 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1207
1208 let buffer = project
1209 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1210 .await
1211 .unwrap();
1212
1213 // Publish diagnostics
1214 let fake_server = fake_servers.next().await.unwrap();
1215 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1216 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1217 version: None,
1218 diagnostics: vec![lsp::Diagnostic {
1219 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1220 severity: Some(lsp::DiagnosticSeverity::ERROR),
1221 message: "the message".to_string(),
1222 ..Default::default()
1223 }],
1224 });
1225
1226 cx.executor().run_until_parked();
1227 buffer.update(cx, |buffer, _| {
1228 assert_eq!(
1229 buffer
1230 .snapshot()
1231 .diagnostics_in_range::<_, usize>(0..1, false)
1232 .map(|entry| entry.diagnostic.message.clone())
1233 .collect::<Vec<_>>(),
1234 ["the message".to_string()]
1235 );
1236 });
1237 project.update(cx, |project, cx| {
1238 assert_eq!(
1239 project.diagnostic_summary(false, cx),
1240 DiagnosticSummary {
1241 error_count: 1,
1242 warning_count: 0,
1243 }
1244 );
1245 });
1246
1247 project.update(cx, |project, cx| {
1248 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1249 });
1250
1251 // The diagnostics are cleared.
1252 cx.executor().run_until_parked();
1253 buffer.update(cx, |buffer, _| {
1254 assert_eq!(
1255 buffer
1256 .snapshot()
1257 .diagnostics_in_range::<_, usize>(0..1, false)
1258 .map(|entry| entry.diagnostic.message.clone())
1259 .collect::<Vec<_>>(),
1260 Vec::<String>::new(),
1261 );
1262 });
1263 project.update(cx, |project, cx| {
1264 assert_eq!(
1265 project.diagnostic_summary(false, cx),
1266 DiagnosticSummary {
1267 error_count: 0,
1268 warning_count: 0,
1269 }
1270 );
1271 });
1272}
1273
1274#[gpui::test]
1275async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1276 init_test(cx);
1277
1278 let fs = FakeFs::new(cx.executor());
1279 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1280
1281 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1282 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1283
1284 language_registry.add(rust_lang());
1285 let mut fake_servers =
1286 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1287
1288 let buffer = project
1289 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1290 .await
1291 .unwrap();
1292
1293 // Before restarting the server, report diagnostics with an unknown buffer version.
1294 let fake_server = fake_servers.next().await.unwrap();
1295 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1296 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1297 version: Some(10000),
1298 diagnostics: Vec::new(),
1299 });
1300 cx.executor().run_until_parked();
1301
1302 project.update(cx, |project, cx| {
1303 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1304 });
1305 let mut fake_server = fake_servers.next().await.unwrap();
1306 let notification = fake_server
1307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1308 .await
1309 .text_document;
1310 assert_eq!(notification.version, 0);
1311}
1312
1313#[gpui::test]
1314async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1315 init_test(cx);
1316
1317 let fs = FakeFs::new(cx.executor());
1318 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1319 .await;
1320
1321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1322 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1323
1324 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1325 "Rust",
1326 FakeLspAdapter {
1327 name: "rust-lsp",
1328 ..Default::default()
1329 },
1330 );
1331 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1332 "JavaScript",
1333 FakeLspAdapter {
1334 name: "js-lsp",
1335 ..Default::default()
1336 },
1337 );
1338 language_registry.add(rust_lang());
1339 language_registry.add(js_lang());
1340
1341 let _rs_buffer = project
1342 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1343 .await
1344 .unwrap();
1345 let _js_buffer = project
1346 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1347 .await
1348 .unwrap();
1349
1350 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1351 assert_eq!(
1352 fake_rust_server_1
1353 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1354 .await
1355 .text_document
1356 .uri
1357 .as_str(),
1358 "file:///dir/a.rs"
1359 );
1360
1361 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1362 assert_eq!(
1363 fake_js_server
1364 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1365 .await
1366 .text_document
1367 .uri
1368 .as_str(),
1369 "file:///dir/b.js"
1370 );
1371
1372 // Disable Rust language server, ensuring only that server gets stopped.
1373 cx.update(|cx| {
1374 cx.update_global(|settings: &mut SettingsStore, cx| {
1375 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1376 settings.languages.insert(
1377 Arc::from("Rust"),
1378 LanguageSettingsContent {
1379 enable_language_server: Some(false),
1380 ..Default::default()
1381 },
1382 );
1383 });
1384 })
1385 });
1386 fake_rust_server_1
1387 .receive_notification::<lsp::notification::Exit>()
1388 .await;
1389
1390 // Enable Rust and disable JavaScript language servers, ensuring that the
1391 // former gets started again and that the latter stops.
1392 cx.update(|cx| {
1393 cx.update_global(|settings: &mut SettingsStore, cx| {
1394 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1395 settings.languages.insert(
1396 Arc::from("Rust"),
1397 LanguageSettingsContent {
1398 enable_language_server: Some(true),
1399 ..Default::default()
1400 },
1401 );
1402 settings.languages.insert(
1403 Arc::from("JavaScript"),
1404 LanguageSettingsContent {
1405 enable_language_server: Some(false),
1406 ..Default::default()
1407 },
1408 );
1409 });
1410 })
1411 });
1412 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1413 assert_eq!(
1414 fake_rust_server_2
1415 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1416 .await
1417 .text_document
1418 .uri
1419 .as_str(),
1420 "file:///dir/a.rs"
1421 );
1422 fake_js_server
1423 .receive_notification::<lsp::notification::Exit>()
1424 .await;
1425}
1426
1427#[gpui::test(iterations = 3)]
1428async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1429 init_test(cx);
1430
1431 let text = "
1432 fn a() { A }
1433 fn b() { BB }
1434 fn c() { CCC }
1435 "
1436 .unindent();
1437
1438 let fs = FakeFs::new(cx.executor());
1439 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1440
1441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1442 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1443
1444 language_registry.add(rust_lang());
1445 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1446 "Rust",
1447 FakeLspAdapter {
1448 disk_based_diagnostics_sources: vec!["disk".into()],
1449 ..Default::default()
1450 },
1451 );
1452
1453 let buffer = project
1454 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1455 .await
1456 .unwrap();
1457
1458 let mut fake_server = fake_servers.next().await.unwrap();
1459 let open_notification = fake_server
1460 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1461 .await;
1462
1463 // Edit the buffer, moving the content down
1464 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1465 let change_notification_1 = fake_server
1466 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1467 .await;
1468 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1469
1470 // Report some diagnostics for the initial version of the buffer
1471 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1472 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1473 version: Some(open_notification.text_document.version),
1474 diagnostics: vec![
1475 lsp::Diagnostic {
1476 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1477 severity: Some(DiagnosticSeverity::ERROR),
1478 message: "undefined variable 'A'".to_string(),
1479 source: Some("disk".to_string()),
1480 ..Default::default()
1481 },
1482 lsp::Diagnostic {
1483 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1484 severity: Some(DiagnosticSeverity::ERROR),
1485 message: "undefined variable 'BB'".to_string(),
1486 source: Some("disk".to_string()),
1487 ..Default::default()
1488 },
1489 lsp::Diagnostic {
1490 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1491 severity: Some(DiagnosticSeverity::ERROR),
1492 source: Some("disk".to_string()),
1493 message: "undefined variable 'CCC'".to_string(),
1494 ..Default::default()
1495 },
1496 ],
1497 });
1498
1499 // The diagnostics have moved down since they were created.
1500 cx.executor().run_until_parked();
1501 buffer.update(cx, |buffer, _| {
1502 assert_eq!(
1503 buffer
1504 .snapshot()
1505 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1506 .collect::<Vec<_>>(),
1507 &[
1508 DiagnosticEntry {
1509 range: Point::new(3, 9)..Point::new(3, 11),
1510 diagnostic: Diagnostic {
1511 source: Some("disk".into()),
1512 severity: DiagnosticSeverity::ERROR,
1513 message: "undefined variable 'BB'".to_string(),
1514 is_disk_based: true,
1515 group_id: 1,
1516 is_primary: true,
1517 ..Default::default()
1518 },
1519 },
1520 DiagnosticEntry {
1521 range: Point::new(4, 9)..Point::new(4, 12),
1522 diagnostic: Diagnostic {
1523 source: Some("disk".into()),
1524 severity: DiagnosticSeverity::ERROR,
1525 message: "undefined variable 'CCC'".to_string(),
1526 is_disk_based: true,
1527 group_id: 2,
1528 is_primary: true,
1529 ..Default::default()
1530 }
1531 }
1532 ]
1533 );
1534 assert_eq!(
1535 chunks_with_diagnostics(buffer, 0..buffer.len()),
1536 [
1537 ("\n\nfn a() { ".to_string(), None),
1538 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1539 (" }\nfn b() { ".to_string(), None),
1540 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1541 (" }\nfn c() { ".to_string(), None),
1542 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1543 (" }\n".to_string(), None),
1544 ]
1545 );
1546 assert_eq!(
1547 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1548 [
1549 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1550 (" }\nfn c() { ".to_string(), None),
1551 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1552 ]
1553 );
1554 });
1555
1556 // Ensure overlapping diagnostics are highlighted correctly.
1557 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1558 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1559 version: Some(open_notification.text_document.version),
1560 diagnostics: vec![
1561 lsp::Diagnostic {
1562 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1563 severity: Some(DiagnosticSeverity::ERROR),
1564 message: "undefined variable 'A'".to_string(),
1565 source: Some("disk".to_string()),
1566 ..Default::default()
1567 },
1568 lsp::Diagnostic {
1569 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1570 severity: Some(DiagnosticSeverity::WARNING),
1571 message: "unreachable statement".to_string(),
1572 source: Some("disk".to_string()),
1573 ..Default::default()
1574 },
1575 ],
1576 });
1577
1578 cx.executor().run_until_parked();
1579 buffer.update(cx, |buffer, _| {
1580 assert_eq!(
1581 buffer
1582 .snapshot()
1583 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1584 .collect::<Vec<_>>(),
1585 &[
1586 DiagnosticEntry {
1587 range: Point::new(2, 9)..Point::new(2, 12),
1588 diagnostic: Diagnostic {
1589 source: Some("disk".into()),
1590 severity: DiagnosticSeverity::WARNING,
1591 message: "unreachable statement".to_string(),
1592 is_disk_based: true,
1593 group_id: 4,
1594 is_primary: true,
1595 ..Default::default()
1596 }
1597 },
1598 DiagnosticEntry {
1599 range: Point::new(2, 9)..Point::new(2, 10),
1600 diagnostic: Diagnostic {
1601 source: Some("disk".into()),
1602 severity: DiagnosticSeverity::ERROR,
1603 message: "undefined variable 'A'".to_string(),
1604 is_disk_based: true,
1605 group_id: 3,
1606 is_primary: true,
1607 ..Default::default()
1608 },
1609 }
1610 ]
1611 );
1612 assert_eq!(
1613 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1614 [
1615 ("fn a() { ".to_string(), None),
1616 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1617 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1618 ("\n".to_string(), None),
1619 ]
1620 );
1621 assert_eq!(
1622 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1623 [
1624 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1625 ("\n".to_string(), None),
1626 ]
1627 );
1628 });
1629
1630 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1631 // changes since the last save.
1632 buffer.update(cx, |buffer, cx| {
1633 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1634 buffer.edit(
1635 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1636 None,
1637 cx,
1638 );
1639 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1640 });
1641 let change_notification_2 = fake_server
1642 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1643 .await;
1644 assert!(
1645 change_notification_2.text_document.version > change_notification_1.text_document.version
1646 );
1647
1648 // Handle out-of-order diagnostics
1649 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1650 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1651 version: Some(change_notification_2.text_document.version),
1652 diagnostics: vec![
1653 lsp::Diagnostic {
1654 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1655 severity: Some(DiagnosticSeverity::ERROR),
1656 message: "undefined variable 'BB'".to_string(),
1657 source: Some("disk".to_string()),
1658 ..Default::default()
1659 },
1660 lsp::Diagnostic {
1661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1662 severity: Some(DiagnosticSeverity::WARNING),
1663 message: "undefined variable 'A'".to_string(),
1664 source: Some("disk".to_string()),
1665 ..Default::default()
1666 },
1667 ],
1668 });
1669
1670 cx.executor().run_until_parked();
1671 buffer.update(cx, |buffer, _| {
1672 assert_eq!(
1673 buffer
1674 .snapshot()
1675 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1676 .collect::<Vec<_>>(),
1677 &[
1678 DiagnosticEntry {
1679 range: Point::new(2, 21)..Point::new(2, 22),
1680 diagnostic: Diagnostic {
1681 source: Some("disk".into()),
1682 severity: DiagnosticSeverity::WARNING,
1683 message: "undefined variable 'A'".to_string(),
1684 is_disk_based: true,
1685 group_id: 6,
1686 is_primary: true,
1687 ..Default::default()
1688 }
1689 },
1690 DiagnosticEntry {
1691 range: Point::new(3, 9)..Point::new(3, 14),
1692 diagnostic: Diagnostic {
1693 source: Some("disk".into()),
1694 severity: DiagnosticSeverity::ERROR,
1695 message: "undefined variable 'BB'".to_string(),
1696 is_disk_based: true,
1697 group_id: 5,
1698 is_primary: true,
1699 ..Default::default()
1700 },
1701 }
1702 ]
1703 );
1704 });
1705}
1706
1707#[gpui::test]
1708async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1709 init_test(cx);
1710
1711 let text = concat!(
1712 "let one = ;\n", //
1713 "let two = \n",
1714 "let three = 3;\n",
1715 );
1716
1717 let fs = FakeFs::new(cx.executor());
1718 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1719
1720 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1721 let buffer = project
1722 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1723 .await
1724 .unwrap();
1725
1726 project.update(cx, |project, cx| {
1727 project
1728 .update_buffer_diagnostics(
1729 &buffer,
1730 LanguageServerId(0),
1731 None,
1732 vec![
1733 DiagnosticEntry {
1734 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1735 diagnostic: Diagnostic {
1736 severity: DiagnosticSeverity::ERROR,
1737 message: "syntax error 1".to_string(),
1738 ..Default::default()
1739 },
1740 },
1741 DiagnosticEntry {
1742 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1743 diagnostic: Diagnostic {
1744 severity: DiagnosticSeverity::ERROR,
1745 message: "syntax error 2".to_string(),
1746 ..Default::default()
1747 },
1748 },
1749 ],
1750 cx,
1751 )
1752 .unwrap();
1753 });
1754
1755 // An empty range is extended forward to include the following character.
1756 // At the end of a line, an empty range is extended backward to include
1757 // the preceding character.
1758 buffer.update(cx, |buffer, _| {
1759 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1760 assert_eq!(
1761 chunks
1762 .iter()
1763 .map(|(s, d)| (s.as_str(), *d))
1764 .collect::<Vec<_>>(),
1765 &[
1766 ("let one = ", None),
1767 (";", Some(DiagnosticSeverity::ERROR)),
1768 ("\nlet two =", None),
1769 (" ", Some(DiagnosticSeverity::ERROR)),
1770 ("\nlet three = 3;\n", None)
1771 ]
1772 );
1773 });
1774}
1775
1776#[gpui::test]
1777async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1778 init_test(cx);
1779
1780 let fs = FakeFs::new(cx.executor());
1781 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1782 .await;
1783
1784 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1785
1786 project.update(cx, |project, cx| {
1787 project
1788 .update_diagnostic_entries(
1789 LanguageServerId(0),
1790 Path::new("/dir/a.rs").to_owned(),
1791 None,
1792 vec![DiagnosticEntry {
1793 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1794 diagnostic: Diagnostic {
1795 severity: DiagnosticSeverity::ERROR,
1796 is_primary: true,
1797 message: "syntax error a1".to_string(),
1798 ..Default::default()
1799 },
1800 }],
1801 cx,
1802 )
1803 .unwrap();
1804 project
1805 .update_diagnostic_entries(
1806 LanguageServerId(1),
1807 Path::new("/dir/a.rs").to_owned(),
1808 None,
1809 vec![DiagnosticEntry {
1810 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1811 diagnostic: Diagnostic {
1812 severity: DiagnosticSeverity::ERROR,
1813 is_primary: true,
1814 message: "syntax error b1".to_string(),
1815 ..Default::default()
1816 },
1817 }],
1818 cx,
1819 )
1820 .unwrap();
1821
1822 assert_eq!(
1823 project.diagnostic_summary(false, cx),
1824 DiagnosticSummary {
1825 error_count: 2,
1826 warning_count: 0,
1827 }
1828 );
1829 });
1830}
1831
1832#[gpui::test]
1833async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1834 init_test(cx);
1835
1836 let text = "
1837 fn a() {
1838 f1();
1839 }
1840 fn b() {
1841 f2();
1842 }
1843 fn c() {
1844 f3();
1845 }
1846 "
1847 .unindent();
1848
1849 let fs = FakeFs::new(cx.executor());
1850 fs.insert_tree(
1851 "/dir",
1852 json!({
1853 "a.rs": text.clone(),
1854 }),
1855 )
1856 .await;
1857
1858 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1859
1860 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1861 language_registry.add(rust_lang());
1862 let mut fake_servers =
1863 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1864
1865 let buffer = project
1866 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1867 .await
1868 .unwrap();
1869
1870 let mut fake_server = fake_servers.next().await.unwrap();
1871 let lsp_document_version = fake_server
1872 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1873 .await
1874 .text_document
1875 .version;
1876
1877 // Simulate editing the buffer after the language server computes some edits.
1878 buffer.update(cx, |buffer, cx| {
1879 buffer.edit(
1880 [(
1881 Point::new(0, 0)..Point::new(0, 0),
1882 "// above first function\n",
1883 )],
1884 None,
1885 cx,
1886 );
1887 buffer.edit(
1888 [(
1889 Point::new(2, 0)..Point::new(2, 0),
1890 " // inside first function\n",
1891 )],
1892 None,
1893 cx,
1894 );
1895 buffer.edit(
1896 [(
1897 Point::new(6, 4)..Point::new(6, 4),
1898 "// inside second function ",
1899 )],
1900 None,
1901 cx,
1902 );
1903
1904 assert_eq!(
1905 buffer.text(),
1906 "
1907 // above first function
1908 fn a() {
1909 // inside first function
1910 f1();
1911 }
1912 fn b() {
1913 // inside second function f2();
1914 }
1915 fn c() {
1916 f3();
1917 }
1918 "
1919 .unindent()
1920 );
1921 });
1922
1923 let edits = project
1924 .update(cx, |project, cx| {
1925 project.edits_from_lsp(
1926 &buffer,
1927 vec![
1928 // replace body of first function
1929 lsp::TextEdit {
1930 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1931 new_text: "
1932 fn a() {
1933 f10();
1934 }
1935 "
1936 .unindent(),
1937 },
1938 // edit inside second function
1939 lsp::TextEdit {
1940 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1941 new_text: "00".into(),
1942 },
1943 // edit inside third function via two distinct edits
1944 lsp::TextEdit {
1945 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1946 new_text: "4000".into(),
1947 },
1948 lsp::TextEdit {
1949 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1950 new_text: "".into(),
1951 },
1952 ],
1953 LanguageServerId(0),
1954 Some(lsp_document_version),
1955 cx,
1956 )
1957 })
1958 .await
1959 .unwrap();
1960
1961 buffer.update(cx, |buffer, cx| {
1962 for (range, new_text) in edits {
1963 buffer.edit([(range, new_text)], None, cx);
1964 }
1965 assert_eq!(
1966 buffer.text(),
1967 "
1968 // above first function
1969 fn a() {
1970 // inside first function
1971 f10();
1972 }
1973 fn b() {
1974 // inside second function f200();
1975 }
1976 fn c() {
1977 f4000();
1978 }
1979 "
1980 .unindent()
1981 );
1982 });
1983}
1984
1985#[gpui::test]
1986async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1987 init_test(cx);
1988
1989 let text = "
1990 use a::b;
1991 use a::c;
1992
1993 fn f() {
1994 b();
1995 c();
1996 }
1997 "
1998 .unindent();
1999
2000 let fs = FakeFs::new(cx.executor());
2001 fs.insert_tree(
2002 "/dir",
2003 json!({
2004 "a.rs": text.clone(),
2005 }),
2006 )
2007 .await;
2008
2009 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2010 let buffer = project
2011 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2012 .await
2013 .unwrap();
2014
2015 // Simulate the language server sending us a small edit in the form of a very large diff.
2016 // Rust-analyzer does this when performing a merge-imports code action.
2017 let edits = project
2018 .update(cx, |project, cx| {
2019 project.edits_from_lsp(
2020 &buffer,
2021 [
2022 // Replace the first use statement without editing the semicolon.
2023 lsp::TextEdit {
2024 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2025 new_text: "a::{b, c}".into(),
2026 },
2027 // Reinsert the remainder of the file between the semicolon and the final
2028 // newline of the file.
2029 lsp::TextEdit {
2030 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2031 new_text: "\n\n".into(),
2032 },
2033 lsp::TextEdit {
2034 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2035 new_text: "
2036 fn f() {
2037 b();
2038 c();
2039 }"
2040 .unindent(),
2041 },
2042 // Delete everything after the first newline of the file.
2043 lsp::TextEdit {
2044 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2045 new_text: "".into(),
2046 },
2047 ],
2048 LanguageServerId(0),
2049 None,
2050 cx,
2051 )
2052 })
2053 .await
2054 .unwrap();
2055
2056 buffer.update(cx, |buffer, cx| {
2057 let edits = edits
2058 .into_iter()
2059 .map(|(range, text)| {
2060 (
2061 range.start.to_point(buffer)..range.end.to_point(buffer),
2062 text,
2063 )
2064 })
2065 .collect::<Vec<_>>();
2066
2067 assert_eq!(
2068 edits,
2069 [
2070 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2071 (Point::new(1, 0)..Point::new(2, 0), "".into())
2072 ]
2073 );
2074
2075 for (range, new_text) in edits {
2076 buffer.edit([(range, new_text)], None, cx);
2077 }
2078 assert_eq!(
2079 buffer.text(),
2080 "
2081 use a::{b, c};
2082
2083 fn f() {
2084 b();
2085 c();
2086 }
2087 "
2088 .unindent()
2089 );
2090 });
2091}
2092
2093#[gpui::test]
2094async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2095 init_test(cx);
2096
2097 let text = "
2098 use a::b;
2099 use a::c;
2100
2101 fn f() {
2102 b();
2103 c();
2104 }
2105 "
2106 .unindent();
2107
2108 let fs = FakeFs::new(cx.executor());
2109 fs.insert_tree(
2110 "/dir",
2111 json!({
2112 "a.rs": text.clone(),
2113 }),
2114 )
2115 .await;
2116
2117 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2118 let buffer = project
2119 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2120 .await
2121 .unwrap();
2122
2123 // Simulate the language server sending us edits in a non-ordered fashion,
2124 // with ranges sometimes being inverted or pointing to invalid locations.
2125 let edits = project
2126 .update(cx, |project, cx| {
2127 project.edits_from_lsp(
2128 &buffer,
2129 [
2130 lsp::TextEdit {
2131 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2132 new_text: "\n\n".into(),
2133 },
2134 lsp::TextEdit {
2135 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2136 new_text: "a::{b, c}".into(),
2137 },
2138 lsp::TextEdit {
2139 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2140 new_text: "".into(),
2141 },
2142 lsp::TextEdit {
2143 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2144 new_text: "
2145 fn f() {
2146 b();
2147 c();
2148 }"
2149 .unindent(),
2150 },
2151 ],
2152 LanguageServerId(0),
2153 None,
2154 cx,
2155 )
2156 })
2157 .await
2158 .unwrap();
2159
2160 buffer.update(cx, |buffer, cx| {
2161 let edits = edits
2162 .into_iter()
2163 .map(|(range, text)| {
2164 (
2165 range.start.to_point(buffer)..range.end.to_point(buffer),
2166 text,
2167 )
2168 })
2169 .collect::<Vec<_>>();
2170
2171 assert_eq!(
2172 edits,
2173 [
2174 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2175 (Point::new(1, 0)..Point::new(2, 0), "".into())
2176 ]
2177 );
2178
2179 for (range, new_text) in edits {
2180 buffer.edit([(range, new_text)], None, cx);
2181 }
2182 assert_eq!(
2183 buffer.text(),
2184 "
2185 use a::{b, c};
2186
2187 fn f() {
2188 b();
2189 c();
2190 }
2191 "
2192 .unindent()
2193 );
2194 });
2195}
2196
2197fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2198 buffer: &Buffer,
2199 range: Range<T>,
2200) -> Vec<(String, Option<DiagnosticSeverity>)> {
2201 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2202 for chunk in buffer.snapshot().chunks(range, true) {
2203 if chunks.last().map_or(false, |prev_chunk| {
2204 prev_chunk.1 == chunk.diagnostic_severity
2205 }) {
2206 chunks.last_mut().unwrap().0.push_str(chunk.text);
2207 } else {
2208 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2209 }
2210 }
2211 chunks
2212}
2213
2214#[gpui::test(iterations = 10)]
2215async fn test_definition(cx: &mut gpui::TestAppContext) {
2216 init_test(cx);
2217
2218 let fs = FakeFs::new(cx.executor());
2219 fs.insert_tree(
2220 "/dir",
2221 json!({
2222 "a.rs": "const fn a() { A }",
2223 "b.rs": "const y: i32 = crate::a()",
2224 }),
2225 )
2226 .await;
2227
2228 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2229
2230 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2231 language_registry.add(rust_lang());
2232 let mut fake_servers =
2233 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2234
2235 let buffer = project
2236 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2237 .await
2238 .unwrap();
2239
2240 let fake_server = fake_servers.next().await.unwrap();
2241 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2242 let params = params.text_document_position_params;
2243 assert_eq!(
2244 params.text_document.uri.to_file_path().unwrap(),
2245 Path::new("/dir/b.rs"),
2246 );
2247 assert_eq!(params.position, lsp::Position::new(0, 22));
2248
2249 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2250 lsp::Location::new(
2251 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2252 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2253 ),
2254 )))
2255 });
2256
2257 let mut definitions = project
2258 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2259 .await
2260 .unwrap();
2261
2262 // Assert no new language server started
2263 cx.executor().run_until_parked();
2264 assert!(fake_servers.try_next().is_err());
2265
2266 assert_eq!(definitions.len(), 1);
2267 let definition = definitions.pop().unwrap();
2268 cx.update(|cx| {
2269 let target_buffer = definition.target.buffer.read(cx);
2270 assert_eq!(
2271 target_buffer
2272 .file()
2273 .unwrap()
2274 .as_local()
2275 .unwrap()
2276 .abs_path(cx),
2277 Path::new("/dir/a.rs"),
2278 );
2279 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2280 assert_eq!(
2281 list_worktrees(&project, cx),
2282 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2283 );
2284
2285 drop(definition);
2286 });
2287 cx.update(|cx| {
2288 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2289 });
2290
2291 fn list_worktrees<'a>(
2292 project: &'a Model<Project>,
2293 cx: &'a AppContext,
2294 ) -> Vec<(&'a Path, bool)> {
2295 project
2296 .read(cx)
2297 .worktrees()
2298 .map(|worktree| {
2299 let worktree = worktree.read(cx);
2300 (
2301 worktree.as_local().unwrap().abs_path().as_ref(),
2302 worktree.is_visible(),
2303 )
2304 })
2305 .collect::<Vec<_>>()
2306 }
2307}
2308
2309#[gpui::test]
2310async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2311 init_test(cx);
2312
2313 let fs = FakeFs::new(cx.executor());
2314 fs.insert_tree(
2315 "/dir",
2316 json!({
2317 "a.ts": "",
2318 }),
2319 )
2320 .await;
2321
2322 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2323
2324 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2325 language_registry.add(typescript_lang());
2326 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2327 "TypeScript",
2328 FakeLspAdapter {
2329 capabilities: lsp::ServerCapabilities {
2330 completion_provider: Some(lsp::CompletionOptions {
2331 trigger_characters: Some(vec![":".to_string()]),
2332 ..Default::default()
2333 }),
2334 ..Default::default()
2335 },
2336 ..Default::default()
2337 },
2338 );
2339
2340 let buffer = project
2341 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2342 .await
2343 .unwrap();
2344
2345 let fake_server = fake_language_servers.next().await.unwrap();
2346
2347 let text = "let a = b.fqn";
2348 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2349 let completions = project.update(cx, |project, cx| {
2350 project.completions(&buffer, text.len(), cx)
2351 });
2352
2353 fake_server
2354 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2355 Ok(Some(lsp::CompletionResponse::Array(vec![
2356 lsp::CompletionItem {
2357 label: "fullyQualifiedName?".into(),
2358 insert_text: Some("fullyQualifiedName".into()),
2359 ..Default::default()
2360 },
2361 ])))
2362 })
2363 .next()
2364 .await;
2365 let completions = completions.await.unwrap();
2366 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2367 assert_eq!(completions.len(), 1);
2368 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2369 assert_eq!(
2370 completions[0].old_range.to_offset(&snapshot),
2371 text.len() - 3..text.len()
2372 );
2373
2374 let text = "let a = \"atoms/cmp\"";
2375 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2376 let completions = project.update(cx, |project, cx| {
2377 project.completions(&buffer, text.len() - 1, cx)
2378 });
2379
2380 fake_server
2381 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2382 Ok(Some(lsp::CompletionResponse::Array(vec![
2383 lsp::CompletionItem {
2384 label: "component".into(),
2385 ..Default::default()
2386 },
2387 ])))
2388 })
2389 .next()
2390 .await;
2391 let completions = completions.await.unwrap();
2392 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2393 assert_eq!(completions.len(), 1);
2394 assert_eq!(completions[0].new_text, "component");
2395 assert_eq!(
2396 completions[0].old_range.to_offset(&snapshot),
2397 text.len() - 4..text.len() - 1
2398 );
2399}
2400
2401#[gpui::test]
2402async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2403 init_test(cx);
2404
2405 let fs = FakeFs::new(cx.executor());
2406 fs.insert_tree(
2407 "/dir",
2408 json!({
2409 "a.ts": "",
2410 }),
2411 )
2412 .await;
2413
2414 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2415
2416 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2417 language_registry.add(typescript_lang());
2418 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2419 "TypeScript",
2420 FakeLspAdapter {
2421 capabilities: lsp::ServerCapabilities {
2422 completion_provider: Some(lsp::CompletionOptions {
2423 trigger_characters: Some(vec![":".to_string()]),
2424 ..Default::default()
2425 }),
2426 ..Default::default()
2427 },
2428 ..Default::default()
2429 },
2430 );
2431
2432 let buffer = project
2433 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2434 .await
2435 .unwrap();
2436
2437 let fake_server = fake_language_servers.next().await.unwrap();
2438
2439 let text = "let a = b.fqn";
2440 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2441 let completions = project.update(cx, |project, cx| {
2442 project.completions(&buffer, text.len(), cx)
2443 });
2444
2445 fake_server
2446 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2447 Ok(Some(lsp::CompletionResponse::Array(vec![
2448 lsp::CompletionItem {
2449 label: "fullyQualifiedName?".into(),
2450 insert_text: Some("fully\rQualified\r\nName".into()),
2451 ..Default::default()
2452 },
2453 ])))
2454 })
2455 .next()
2456 .await;
2457 let completions = completions.await.unwrap();
2458 assert_eq!(completions.len(), 1);
2459 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2460}
2461
2462#[gpui::test(iterations = 10)]
2463async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2464 init_test(cx);
2465
2466 let fs = FakeFs::new(cx.executor());
2467 fs.insert_tree(
2468 "/dir",
2469 json!({
2470 "a.ts": "a",
2471 }),
2472 )
2473 .await;
2474
2475 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2476
2477 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2478 language_registry.add(typescript_lang());
2479 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2480 "TypeScript",
2481 FakeLspAdapter {
2482 capabilities: lsp::ServerCapabilities {
2483 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2484 lsp::CodeActionOptions {
2485 resolve_provider: Some(true),
2486 ..lsp::CodeActionOptions::default()
2487 },
2488 )),
2489 ..lsp::ServerCapabilities::default()
2490 },
2491 ..FakeLspAdapter::default()
2492 },
2493 );
2494
2495 let buffer = project
2496 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2497 .await
2498 .unwrap();
2499
2500 let fake_server = fake_language_servers.next().await.unwrap();
2501
2502 // Language server returns code actions that contain commands, and not edits.
2503 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2504 fake_server
2505 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2506 Ok(Some(vec![
2507 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2508 title: "The code action".into(),
2509 data: Some(serde_json::json!({
2510 "command": "_the/command",
2511 })),
2512 ..lsp::CodeAction::default()
2513 }),
2514 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2515 title: "two".into(),
2516 ..lsp::CodeAction::default()
2517 }),
2518 ]))
2519 })
2520 .next()
2521 .await;
2522
2523 let action = actions.await.unwrap()[0].clone();
2524 let apply = project.update(cx, |project, cx| {
2525 project.apply_code_action(buffer.clone(), action, true, cx)
2526 });
2527
2528 // Resolving the code action does not populate its edits. In absence of
2529 // edits, we must execute the given command.
2530 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2531 |mut action, _| async move {
2532 if action.data.is_some() {
2533 action.command = Some(lsp::Command {
2534 title: "The command".into(),
2535 command: "_the/command".into(),
2536 arguments: Some(vec![json!("the-argument")]),
2537 });
2538 }
2539 Ok(action)
2540 },
2541 );
2542
2543 // While executing the command, the language server sends the editor
2544 // a `workspaceEdit` request.
2545 fake_server
2546 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2547 let fake = fake_server.clone();
2548 move |params, _| {
2549 assert_eq!(params.command, "_the/command");
2550 let fake = fake.clone();
2551 async move {
2552 fake.server
2553 .request::<lsp::request::ApplyWorkspaceEdit>(
2554 lsp::ApplyWorkspaceEditParams {
2555 label: None,
2556 edit: lsp::WorkspaceEdit {
2557 changes: Some(
2558 [(
2559 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2560 vec![lsp::TextEdit {
2561 range: lsp::Range::new(
2562 lsp::Position::new(0, 0),
2563 lsp::Position::new(0, 0),
2564 ),
2565 new_text: "X".into(),
2566 }],
2567 )]
2568 .into_iter()
2569 .collect(),
2570 ),
2571 ..Default::default()
2572 },
2573 },
2574 )
2575 .await
2576 .unwrap();
2577 Ok(Some(json!(null)))
2578 }
2579 }
2580 })
2581 .next()
2582 .await;
2583
2584 // Applying the code action returns a project transaction containing the edits
2585 // sent by the language server in its `workspaceEdit` request.
2586 let transaction = apply.await.unwrap();
2587 assert!(transaction.0.contains_key(&buffer));
2588 buffer.update(cx, |buffer, cx| {
2589 assert_eq!(buffer.text(), "Xa");
2590 buffer.undo(cx);
2591 assert_eq!(buffer.text(), "a");
2592 });
2593}
2594
2595#[gpui::test(iterations = 10)]
2596async fn test_save_file(cx: &mut gpui::TestAppContext) {
2597 init_test(cx);
2598
2599 let fs = FakeFs::new(cx.executor());
2600 fs.insert_tree(
2601 "/dir",
2602 json!({
2603 "file1": "the old contents",
2604 }),
2605 )
2606 .await;
2607
2608 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2609 let buffer = project
2610 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2611 .await
2612 .unwrap();
2613 buffer.update(cx, |buffer, cx| {
2614 assert_eq!(buffer.text(), "the old contents");
2615 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2616 });
2617
2618 project
2619 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2620 .await
2621 .unwrap();
2622
2623 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2624 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2625}
2626
2627#[gpui::test(iterations = 30)]
2628async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2629 init_test(cx);
2630
2631 let fs = FakeFs::new(cx.executor().clone());
2632 fs.insert_tree(
2633 "/dir",
2634 json!({
2635 "file1": "the original contents",
2636 }),
2637 )
2638 .await;
2639
2640 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2641 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2642 let buffer = project
2643 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2644 .await
2645 .unwrap();
2646
2647 // Simulate buffer diffs being slow, so that they don't complete before
2648 // the next file change occurs.
2649 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2650
2651 // Change the buffer's file on disk, and then wait for the file change
2652 // to be detected by the worktree, so that the buffer starts reloading.
2653 fs.save(
2654 "/dir/file1".as_ref(),
2655 &"the first contents".into(),
2656 Default::default(),
2657 )
2658 .await
2659 .unwrap();
2660 worktree.next_event(cx);
2661
2662 // Change the buffer's file again. Depending on the random seed, the
2663 // previous file change may still be in progress.
2664 fs.save(
2665 "/dir/file1".as_ref(),
2666 &"the second contents".into(),
2667 Default::default(),
2668 )
2669 .await
2670 .unwrap();
2671 worktree.next_event(cx);
2672
2673 cx.executor().run_until_parked();
2674 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2675 buffer.read_with(cx, |buffer, _| {
2676 assert_eq!(buffer.text(), on_disk_text);
2677 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2678 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2679 });
2680}
2681
2682#[gpui::test(iterations = 30)]
2683async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2684 init_test(cx);
2685
2686 let fs = FakeFs::new(cx.executor().clone());
2687 fs.insert_tree(
2688 "/dir",
2689 json!({
2690 "file1": "the original contents",
2691 }),
2692 )
2693 .await;
2694
2695 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2696 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2697 let buffer = project
2698 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2699 .await
2700 .unwrap();
2701
2702 // Simulate buffer diffs being slow, so that they don't complete before
2703 // the next file change occurs.
2704 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2705
2706 // Change the buffer's file on disk, and then wait for the file change
2707 // to be detected by the worktree, so that the buffer starts reloading.
2708 fs.save(
2709 "/dir/file1".as_ref(),
2710 &"the first contents".into(),
2711 Default::default(),
2712 )
2713 .await
2714 .unwrap();
2715 worktree.next_event(cx);
2716
2717 cx.executor()
2718 .spawn(cx.executor().simulate_random_delay())
2719 .await;
2720
2721 // Perform a noop edit, causing the buffer's version to increase.
2722 buffer.update(cx, |buffer, cx| {
2723 buffer.edit([(0..0, " ")], None, cx);
2724 buffer.undo(cx);
2725 });
2726
2727 cx.executor().run_until_parked();
2728 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2729 buffer.read_with(cx, |buffer, _| {
2730 let buffer_text = buffer.text();
2731 if buffer_text == on_disk_text {
2732 assert!(
2733 !buffer.is_dirty() && !buffer.has_conflict(),
2734 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2735 );
2736 }
2737 // If the file change occurred while the buffer was processing the first
2738 // change, the buffer will be in a conflicting state.
2739 else {
2740 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2741 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2742 }
2743 });
2744}
2745
2746#[gpui::test]
2747async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2748 init_test(cx);
2749
2750 let fs = FakeFs::new(cx.executor());
2751 fs.insert_tree(
2752 "/dir",
2753 json!({
2754 "file1": "the old contents",
2755 }),
2756 )
2757 .await;
2758
2759 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2760 let buffer = project
2761 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2762 .await
2763 .unwrap();
2764 buffer.update(cx, |buffer, cx| {
2765 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2766 });
2767
2768 project
2769 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2770 .await
2771 .unwrap();
2772
2773 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2774 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2775}
2776
2777#[gpui::test]
2778async fn test_save_as(cx: &mut gpui::TestAppContext) {
2779 init_test(cx);
2780
2781 let fs = FakeFs::new(cx.executor());
2782 fs.insert_tree("/dir", json!({})).await;
2783
2784 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2785
2786 let languages = project.update(cx, |project, _| project.languages().clone());
2787 languages.add(rust_lang());
2788
2789 let buffer = project.update(cx, |project, cx| {
2790 project.create_buffer("", None, cx).unwrap()
2791 });
2792 buffer.update(cx, |buffer, cx| {
2793 buffer.edit([(0..0, "abc")], None, cx);
2794 assert!(buffer.is_dirty());
2795 assert!(!buffer.has_conflict());
2796 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2797 });
2798 project
2799 .update(cx, |project, cx| {
2800 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2801 })
2802 .await
2803 .unwrap();
2804 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2805
2806 cx.executor().run_until_parked();
2807 buffer.update(cx, |buffer, cx| {
2808 assert_eq!(
2809 buffer.file().unwrap().full_path(cx),
2810 Path::new("dir/file1.rs")
2811 );
2812 assert!(!buffer.is_dirty());
2813 assert!(!buffer.has_conflict());
2814 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2815 });
2816
2817 let opened_buffer = project
2818 .update(cx, |project, cx| {
2819 project.open_local_buffer("/dir/file1.rs", cx)
2820 })
2821 .await
2822 .unwrap();
2823 assert_eq!(opened_buffer, buffer);
2824}
2825
2826#[gpui::test(retries = 5)]
2827async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2828 init_test(cx);
2829 cx.executor().allow_parking();
2830
2831 let dir = temp_tree(json!({
2832 "a": {
2833 "file1": "",
2834 "file2": "",
2835 "file3": "",
2836 },
2837 "b": {
2838 "c": {
2839 "file4": "",
2840 "file5": "",
2841 }
2842 }
2843 }));
2844
2845 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2846 let rpc = project.update(cx, |p, _| p.client.clone());
2847
2848 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2849 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2850 async move { buffer.await.unwrap() }
2851 };
2852 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2853 project.update(cx, |project, cx| {
2854 let tree = project.worktrees().next().unwrap();
2855 tree.read(cx)
2856 .entry_for_path(path)
2857 .unwrap_or_else(|| panic!("no entry for path {}", path))
2858 .id
2859 })
2860 };
2861
2862 let buffer2 = buffer_for_path("a/file2", cx).await;
2863 let buffer3 = buffer_for_path("a/file3", cx).await;
2864 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2865 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2866
2867 let file2_id = id_for_path("a/file2", cx);
2868 let file3_id = id_for_path("a/file3", cx);
2869 let file4_id = id_for_path("b/c/file4", cx);
2870
2871 // Create a remote copy of this worktree.
2872 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2873
2874 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2875
2876 let updates = Arc::new(Mutex::new(Vec::new()));
2877 tree.update(cx, |tree, cx| {
2878 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2879 let updates = updates.clone();
2880 move |update| {
2881 updates.lock().push(update);
2882 async { true }
2883 }
2884 });
2885 });
2886
2887 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2888
2889 cx.executor().run_until_parked();
2890
2891 cx.update(|cx| {
2892 assert!(!buffer2.read(cx).is_dirty());
2893 assert!(!buffer3.read(cx).is_dirty());
2894 assert!(!buffer4.read(cx).is_dirty());
2895 assert!(!buffer5.read(cx).is_dirty());
2896 });
2897
2898 // Rename and delete files and directories.
2899 tree.flush_fs_events(cx).await;
2900 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2901 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2902 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2903 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2904 tree.flush_fs_events(cx).await;
2905
2906 let expected_paths = vec![
2907 "a",
2908 "a/file1",
2909 "a/file2.new",
2910 "b",
2911 "d",
2912 "d/file3",
2913 "d/file4",
2914 ];
2915
2916 cx.update(|app| {
2917 assert_eq!(
2918 tree.read(app)
2919 .paths()
2920 .map(|p| p.to_str().unwrap())
2921 .collect::<Vec<_>>(),
2922 expected_paths
2923 );
2924 });
2925
2926 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2927 assert_eq!(id_for_path("d/file3", cx), file3_id);
2928 assert_eq!(id_for_path("d/file4", cx), file4_id);
2929
2930 cx.update(|cx| {
2931 assert_eq!(
2932 buffer2.read(cx).file().unwrap().path().as_ref(),
2933 Path::new("a/file2.new")
2934 );
2935 assert_eq!(
2936 buffer3.read(cx).file().unwrap().path().as_ref(),
2937 Path::new("d/file3")
2938 );
2939 assert_eq!(
2940 buffer4.read(cx).file().unwrap().path().as_ref(),
2941 Path::new("d/file4")
2942 );
2943 assert_eq!(
2944 buffer5.read(cx).file().unwrap().path().as_ref(),
2945 Path::new("b/c/file5")
2946 );
2947
2948 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2949 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2950 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2951 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2952 });
2953
2954 // Update the remote worktree. Check that it becomes consistent with the
2955 // local worktree.
2956 cx.executor().run_until_parked();
2957
2958 remote.update(cx, |remote, _| {
2959 for update in updates.lock().drain(..) {
2960 remote.as_remote_mut().unwrap().update_from_remote(update);
2961 }
2962 });
2963 cx.executor().run_until_parked();
2964 remote.update(cx, |remote, _| {
2965 assert_eq!(
2966 remote
2967 .paths()
2968 .map(|p| p.to_str().unwrap())
2969 .collect::<Vec<_>>(),
2970 expected_paths
2971 );
2972 });
2973}
2974
2975#[gpui::test(iterations = 10)]
2976async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2977 init_test(cx);
2978
2979 let fs = FakeFs::new(cx.executor());
2980 fs.insert_tree(
2981 "/dir",
2982 json!({
2983 "a": {
2984 "file1": "",
2985 }
2986 }),
2987 )
2988 .await;
2989
2990 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2991 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2992 let tree_id = tree.update(cx, |tree, _| tree.id());
2993
2994 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2995 project.update(cx, |project, cx| {
2996 let tree = project.worktrees().next().unwrap();
2997 tree.read(cx)
2998 .entry_for_path(path)
2999 .unwrap_or_else(|| panic!("no entry for path {}", path))
3000 .id
3001 })
3002 };
3003
3004 let dir_id = id_for_path("a", cx);
3005 let file_id = id_for_path("a/file1", cx);
3006 let buffer = project
3007 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3008 .await
3009 .unwrap();
3010 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3011
3012 project
3013 .update(cx, |project, cx| {
3014 project.rename_entry(dir_id, Path::new("b"), cx)
3015 })
3016 .unwrap()
3017 .await
3018 .unwrap();
3019 cx.executor().run_until_parked();
3020
3021 assert_eq!(id_for_path("b", cx), dir_id);
3022 assert_eq!(id_for_path("b/file1", cx), file_id);
3023 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3024}
3025
3026#[gpui::test]
3027async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3028 init_test(cx);
3029
3030 let fs = FakeFs::new(cx.executor());
3031 fs.insert_tree(
3032 "/dir",
3033 json!({
3034 "a.txt": "a-contents",
3035 "b.txt": "b-contents",
3036 }),
3037 )
3038 .await;
3039
3040 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3041
3042 // Spawn multiple tasks to open paths, repeating some paths.
3043 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3044 (
3045 p.open_local_buffer("/dir/a.txt", cx),
3046 p.open_local_buffer("/dir/b.txt", cx),
3047 p.open_local_buffer("/dir/a.txt", cx),
3048 )
3049 });
3050
3051 let buffer_a_1 = buffer_a_1.await.unwrap();
3052 let buffer_a_2 = buffer_a_2.await.unwrap();
3053 let buffer_b = buffer_b.await.unwrap();
3054 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3055 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3056
3057 // There is only one buffer per path.
3058 let buffer_a_id = buffer_a_1.entity_id();
3059 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3060
3061 // Open the same path again while it is still open.
3062 drop(buffer_a_1);
3063 let buffer_a_3 = project
3064 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3065 .await
3066 .unwrap();
3067
3068 // There's still only one buffer per path.
3069 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3070}
3071
3072#[gpui::test]
3073async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3074 init_test(cx);
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree(
3078 "/dir",
3079 json!({
3080 "file1": "abc",
3081 "file2": "def",
3082 "file3": "ghi",
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3088
3089 let buffer1 = project
3090 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3091 .await
3092 .unwrap();
3093 let events = Arc::new(Mutex::new(Vec::new()));
3094
3095 // initially, the buffer isn't dirty.
3096 buffer1.update(cx, |buffer, cx| {
3097 cx.subscribe(&buffer1, {
3098 let events = events.clone();
3099 move |_, _, event, _| match event {
3100 BufferEvent::Operation(_) => {}
3101 _ => events.lock().push(event.clone()),
3102 }
3103 })
3104 .detach();
3105
3106 assert!(!buffer.is_dirty());
3107 assert!(events.lock().is_empty());
3108
3109 buffer.edit([(1..2, "")], None, cx);
3110 });
3111
3112 // after the first edit, the buffer is dirty, and emits a dirtied event.
3113 buffer1.update(cx, |buffer, cx| {
3114 assert!(buffer.text() == "ac");
3115 assert!(buffer.is_dirty());
3116 assert_eq!(
3117 *events.lock(),
3118 &[language::Event::Edited, language::Event::DirtyChanged]
3119 );
3120 events.lock().clear();
3121 buffer.did_save(
3122 buffer.version(),
3123 buffer.as_rope().fingerprint(),
3124 buffer.file().unwrap().mtime(),
3125 cx,
3126 );
3127 });
3128
3129 // after saving, the buffer is not dirty, and emits a saved event.
3130 buffer1.update(cx, |buffer, cx| {
3131 assert!(!buffer.is_dirty());
3132 assert_eq!(*events.lock(), &[language::Event::Saved]);
3133 events.lock().clear();
3134
3135 buffer.edit([(1..1, "B")], None, cx);
3136 buffer.edit([(2..2, "D")], None, cx);
3137 });
3138
3139 // after editing again, the buffer is dirty, and emits another dirty event.
3140 buffer1.update(cx, |buffer, cx| {
3141 assert!(buffer.text() == "aBDc");
3142 assert!(buffer.is_dirty());
3143 assert_eq!(
3144 *events.lock(),
3145 &[
3146 language::Event::Edited,
3147 language::Event::DirtyChanged,
3148 language::Event::Edited,
3149 ],
3150 );
3151 events.lock().clear();
3152
3153 // After restoring the buffer to its previously-saved state,
3154 // the buffer is not considered dirty anymore.
3155 buffer.edit([(1..3, "")], None, cx);
3156 assert!(buffer.text() == "ac");
3157 assert!(!buffer.is_dirty());
3158 });
3159
3160 assert_eq!(
3161 *events.lock(),
3162 &[language::Event::Edited, language::Event::DirtyChanged]
3163 );
3164
3165 // When a file is deleted, the buffer is considered dirty.
3166 let events = Arc::new(Mutex::new(Vec::new()));
3167 let buffer2 = project
3168 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3169 .await
3170 .unwrap();
3171 buffer2.update(cx, |_, cx| {
3172 cx.subscribe(&buffer2, {
3173 let events = events.clone();
3174 move |_, _, event, _| events.lock().push(event.clone())
3175 })
3176 .detach();
3177 });
3178
3179 fs.remove_file("/dir/file2".as_ref(), Default::default())
3180 .await
3181 .unwrap();
3182 cx.executor().run_until_parked();
3183 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3184 assert_eq!(
3185 *events.lock(),
3186 &[
3187 language::Event::DirtyChanged,
3188 language::Event::FileHandleChanged
3189 ]
3190 );
3191
3192 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3193 let events = Arc::new(Mutex::new(Vec::new()));
3194 let buffer3 = project
3195 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3196 .await
3197 .unwrap();
3198 buffer3.update(cx, |_, cx| {
3199 cx.subscribe(&buffer3, {
3200 let events = events.clone();
3201 move |_, _, event, _| events.lock().push(event.clone())
3202 })
3203 .detach();
3204 });
3205
3206 buffer3.update(cx, |buffer, cx| {
3207 buffer.edit([(0..0, "x")], None, cx);
3208 });
3209 events.lock().clear();
3210 fs.remove_file("/dir/file3".as_ref(), Default::default())
3211 .await
3212 .unwrap();
3213 cx.executor().run_until_parked();
3214 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3215 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3216}
3217
3218#[gpui::test]
3219async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3220 init_test(cx);
3221
3222 let initial_contents = "aaa\nbbbbb\nc\n";
3223 let fs = FakeFs::new(cx.executor());
3224 fs.insert_tree(
3225 "/dir",
3226 json!({
3227 "the-file": initial_contents,
3228 }),
3229 )
3230 .await;
3231 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3232 let buffer = project
3233 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3234 .await
3235 .unwrap();
3236
3237 let anchors = (0..3)
3238 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3239 .collect::<Vec<_>>();
3240
3241 // Change the file on disk, adding two new lines of text, and removing
3242 // one line.
3243 buffer.update(cx, |buffer, _| {
3244 assert!(!buffer.is_dirty());
3245 assert!(!buffer.has_conflict());
3246 });
3247 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3248 fs.save(
3249 "/dir/the-file".as_ref(),
3250 &new_contents.into(),
3251 LineEnding::Unix,
3252 )
3253 .await
3254 .unwrap();
3255
3256 // Because the buffer was not modified, it is reloaded from disk. Its
3257 // contents are edited according to the diff between the old and new
3258 // file contents.
3259 cx.executor().run_until_parked();
3260 buffer.update(cx, |buffer, _| {
3261 assert_eq!(buffer.text(), new_contents);
3262 assert!(!buffer.is_dirty());
3263 assert!(!buffer.has_conflict());
3264
3265 let anchor_positions = anchors
3266 .iter()
3267 .map(|anchor| anchor.to_point(&*buffer))
3268 .collect::<Vec<_>>();
3269 assert_eq!(
3270 anchor_positions,
3271 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3272 );
3273 });
3274
3275 // Modify the buffer
3276 buffer.update(cx, |buffer, cx| {
3277 buffer.edit([(0..0, " ")], None, cx);
3278 assert!(buffer.is_dirty());
3279 assert!(!buffer.has_conflict());
3280 });
3281
3282 // Change the file on disk again, adding blank lines to the beginning.
3283 fs.save(
3284 "/dir/the-file".as_ref(),
3285 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3286 LineEnding::Unix,
3287 )
3288 .await
3289 .unwrap();
3290
3291 // Because the buffer is modified, it doesn't reload from disk, but is
3292 // marked as having a conflict.
3293 cx.executor().run_until_parked();
3294 buffer.update(cx, |buffer, _| {
3295 assert!(buffer.has_conflict());
3296 });
3297}
3298
3299#[gpui::test]
3300async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3301 init_test(cx);
3302
3303 let fs = FakeFs::new(cx.executor());
3304 fs.insert_tree(
3305 "/dir",
3306 json!({
3307 "file1": "a\nb\nc\n",
3308 "file2": "one\r\ntwo\r\nthree\r\n",
3309 }),
3310 )
3311 .await;
3312
3313 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3314 let buffer1 = project
3315 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3316 .await
3317 .unwrap();
3318 let buffer2 = project
3319 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3320 .await
3321 .unwrap();
3322
3323 buffer1.update(cx, |buffer, _| {
3324 assert_eq!(buffer.text(), "a\nb\nc\n");
3325 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3326 });
3327 buffer2.update(cx, |buffer, _| {
3328 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3329 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3330 });
3331
3332 // Change a file's line endings on disk from unix to windows. The buffer's
3333 // state updates correctly.
3334 fs.save(
3335 "/dir/file1".as_ref(),
3336 &"aaa\nb\nc\n".into(),
3337 LineEnding::Windows,
3338 )
3339 .await
3340 .unwrap();
3341 cx.executor().run_until_parked();
3342 buffer1.update(cx, |buffer, _| {
3343 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3344 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3345 });
3346
3347 // Save a file with windows line endings. The file is written correctly.
3348 buffer2.update(cx, |buffer, cx| {
3349 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3350 });
3351 project
3352 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3353 .await
3354 .unwrap();
3355 assert_eq!(
3356 fs.load("/dir/file2".as_ref()).await.unwrap(),
3357 "one\r\ntwo\r\nthree\r\nfour\r\n",
3358 );
3359}
3360
3361#[gpui::test]
3362async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3363 init_test(cx);
3364
3365 let fs = FakeFs::new(cx.executor());
3366 fs.insert_tree(
3367 "/the-dir",
3368 json!({
3369 "a.rs": "
3370 fn foo(mut v: Vec<usize>) {
3371 for x in &v {
3372 v.push(1);
3373 }
3374 }
3375 "
3376 .unindent(),
3377 }),
3378 )
3379 .await;
3380
3381 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3382 let buffer = project
3383 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3384 .await
3385 .unwrap();
3386
3387 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3388 let message = lsp::PublishDiagnosticsParams {
3389 uri: buffer_uri.clone(),
3390 diagnostics: vec![
3391 lsp::Diagnostic {
3392 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3393 severity: Some(DiagnosticSeverity::WARNING),
3394 message: "error 1".to_string(),
3395 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3396 location: lsp::Location {
3397 uri: buffer_uri.clone(),
3398 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3399 },
3400 message: "error 1 hint 1".to_string(),
3401 }]),
3402 ..Default::default()
3403 },
3404 lsp::Diagnostic {
3405 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3406 severity: Some(DiagnosticSeverity::HINT),
3407 message: "error 1 hint 1".to_string(),
3408 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3409 location: lsp::Location {
3410 uri: buffer_uri.clone(),
3411 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3412 },
3413 message: "original diagnostic".to_string(),
3414 }]),
3415 ..Default::default()
3416 },
3417 lsp::Diagnostic {
3418 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3419 severity: Some(DiagnosticSeverity::ERROR),
3420 message: "error 2".to_string(),
3421 related_information: Some(vec![
3422 lsp::DiagnosticRelatedInformation {
3423 location: lsp::Location {
3424 uri: buffer_uri.clone(),
3425 range: lsp::Range::new(
3426 lsp::Position::new(1, 13),
3427 lsp::Position::new(1, 15),
3428 ),
3429 },
3430 message: "error 2 hint 1".to_string(),
3431 },
3432 lsp::DiagnosticRelatedInformation {
3433 location: lsp::Location {
3434 uri: buffer_uri.clone(),
3435 range: lsp::Range::new(
3436 lsp::Position::new(1, 13),
3437 lsp::Position::new(1, 15),
3438 ),
3439 },
3440 message: "error 2 hint 2".to_string(),
3441 },
3442 ]),
3443 ..Default::default()
3444 },
3445 lsp::Diagnostic {
3446 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3447 severity: Some(DiagnosticSeverity::HINT),
3448 message: "error 2 hint 1".to_string(),
3449 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3450 location: lsp::Location {
3451 uri: buffer_uri.clone(),
3452 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3453 },
3454 message: "original diagnostic".to_string(),
3455 }]),
3456 ..Default::default()
3457 },
3458 lsp::Diagnostic {
3459 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3460 severity: Some(DiagnosticSeverity::HINT),
3461 message: "error 2 hint 2".to_string(),
3462 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3463 location: lsp::Location {
3464 uri: buffer_uri,
3465 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3466 },
3467 message: "original diagnostic".to_string(),
3468 }]),
3469 ..Default::default()
3470 },
3471 ],
3472 version: None,
3473 };
3474
3475 project
3476 .update(cx, |p, cx| {
3477 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3478 })
3479 .unwrap();
3480 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3481
3482 assert_eq!(
3483 buffer
3484 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3485 .collect::<Vec<_>>(),
3486 &[
3487 DiagnosticEntry {
3488 range: Point::new(1, 8)..Point::new(1, 9),
3489 diagnostic: Diagnostic {
3490 severity: DiagnosticSeverity::WARNING,
3491 message: "error 1".to_string(),
3492 group_id: 1,
3493 is_primary: true,
3494 ..Default::default()
3495 }
3496 },
3497 DiagnosticEntry {
3498 range: Point::new(1, 8)..Point::new(1, 9),
3499 diagnostic: Diagnostic {
3500 severity: DiagnosticSeverity::HINT,
3501 message: "error 1 hint 1".to_string(),
3502 group_id: 1,
3503 is_primary: false,
3504 ..Default::default()
3505 }
3506 },
3507 DiagnosticEntry {
3508 range: Point::new(1, 13)..Point::new(1, 15),
3509 diagnostic: Diagnostic {
3510 severity: DiagnosticSeverity::HINT,
3511 message: "error 2 hint 1".to_string(),
3512 group_id: 0,
3513 is_primary: false,
3514 ..Default::default()
3515 }
3516 },
3517 DiagnosticEntry {
3518 range: Point::new(1, 13)..Point::new(1, 15),
3519 diagnostic: Diagnostic {
3520 severity: DiagnosticSeverity::HINT,
3521 message: "error 2 hint 2".to_string(),
3522 group_id: 0,
3523 is_primary: false,
3524 ..Default::default()
3525 }
3526 },
3527 DiagnosticEntry {
3528 range: Point::new(2, 8)..Point::new(2, 17),
3529 diagnostic: Diagnostic {
3530 severity: DiagnosticSeverity::ERROR,
3531 message: "error 2".to_string(),
3532 group_id: 0,
3533 is_primary: true,
3534 ..Default::default()
3535 }
3536 }
3537 ]
3538 );
3539
3540 assert_eq!(
3541 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3542 &[
3543 DiagnosticEntry {
3544 range: Point::new(1, 13)..Point::new(1, 15),
3545 diagnostic: Diagnostic {
3546 severity: DiagnosticSeverity::HINT,
3547 message: "error 2 hint 1".to_string(),
3548 group_id: 0,
3549 is_primary: false,
3550 ..Default::default()
3551 }
3552 },
3553 DiagnosticEntry {
3554 range: Point::new(1, 13)..Point::new(1, 15),
3555 diagnostic: Diagnostic {
3556 severity: DiagnosticSeverity::HINT,
3557 message: "error 2 hint 2".to_string(),
3558 group_id: 0,
3559 is_primary: false,
3560 ..Default::default()
3561 }
3562 },
3563 DiagnosticEntry {
3564 range: Point::new(2, 8)..Point::new(2, 17),
3565 diagnostic: Diagnostic {
3566 severity: DiagnosticSeverity::ERROR,
3567 message: "error 2".to_string(),
3568 group_id: 0,
3569 is_primary: true,
3570 ..Default::default()
3571 }
3572 }
3573 ]
3574 );
3575
3576 assert_eq!(
3577 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3578 &[
3579 DiagnosticEntry {
3580 range: Point::new(1, 8)..Point::new(1, 9),
3581 diagnostic: Diagnostic {
3582 severity: DiagnosticSeverity::WARNING,
3583 message: "error 1".to_string(),
3584 group_id: 1,
3585 is_primary: true,
3586 ..Default::default()
3587 }
3588 },
3589 DiagnosticEntry {
3590 range: Point::new(1, 8)..Point::new(1, 9),
3591 diagnostic: Diagnostic {
3592 severity: DiagnosticSeverity::HINT,
3593 message: "error 1 hint 1".to_string(),
3594 group_id: 1,
3595 is_primary: false,
3596 ..Default::default()
3597 }
3598 },
3599 ]
3600 );
3601}
3602
3603#[gpui::test]
3604async fn test_rename(cx: &mut gpui::TestAppContext) {
3605 init_test(cx);
3606
3607 let fs = FakeFs::new(cx.executor());
3608 fs.insert_tree(
3609 "/dir",
3610 json!({
3611 "one.rs": "const ONE: usize = 1;",
3612 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3613 }),
3614 )
3615 .await;
3616
3617 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3618
3619 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3620 language_registry.add(rust_lang());
3621 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3622 "Rust",
3623 FakeLspAdapter {
3624 capabilities: lsp::ServerCapabilities {
3625 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3626 prepare_provider: Some(true),
3627 work_done_progress_options: Default::default(),
3628 })),
3629 ..Default::default()
3630 },
3631 ..Default::default()
3632 },
3633 );
3634
3635 let buffer = project
3636 .update(cx, |project, cx| {
3637 project.open_local_buffer("/dir/one.rs", cx)
3638 })
3639 .await
3640 .unwrap();
3641
3642 let fake_server = fake_servers.next().await.unwrap();
3643
3644 let response = project.update(cx, |project, cx| {
3645 project.prepare_rename(buffer.clone(), 7, cx)
3646 });
3647 fake_server
3648 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3649 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3650 assert_eq!(params.position, lsp::Position::new(0, 7));
3651 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3652 lsp::Position::new(0, 6),
3653 lsp::Position::new(0, 9),
3654 ))))
3655 })
3656 .next()
3657 .await
3658 .unwrap();
3659 let range = response.await.unwrap().unwrap();
3660 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3661 assert_eq!(range, 6..9);
3662
3663 let response = project.update(cx, |project, cx| {
3664 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3665 });
3666 fake_server
3667 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3668 assert_eq!(
3669 params.text_document_position.text_document.uri.as_str(),
3670 "file:///dir/one.rs"
3671 );
3672 assert_eq!(
3673 params.text_document_position.position,
3674 lsp::Position::new(0, 7)
3675 );
3676 assert_eq!(params.new_name, "THREE");
3677 Ok(Some(lsp::WorkspaceEdit {
3678 changes: Some(
3679 [
3680 (
3681 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3682 vec![lsp::TextEdit::new(
3683 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3684 "THREE".to_string(),
3685 )],
3686 ),
3687 (
3688 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3689 vec![
3690 lsp::TextEdit::new(
3691 lsp::Range::new(
3692 lsp::Position::new(0, 24),
3693 lsp::Position::new(0, 27),
3694 ),
3695 "THREE".to_string(),
3696 ),
3697 lsp::TextEdit::new(
3698 lsp::Range::new(
3699 lsp::Position::new(0, 35),
3700 lsp::Position::new(0, 38),
3701 ),
3702 "THREE".to_string(),
3703 ),
3704 ],
3705 ),
3706 ]
3707 .into_iter()
3708 .collect(),
3709 ),
3710 ..Default::default()
3711 }))
3712 })
3713 .next()
3714 .await
3715 .unwrap();
3716 let mut transaction = response.await.unwrap().0;
3717 assert_eq!(transaction.len(), 2);
3718 assert_eq!(
3719 transaction
3720 .remove_entry(&buffer)
3721 .unwrap()
3722 .0
3723 .update(cx, |buffer, _| buffer.text()),
3724 "const THREE: usize = 1;"
3725 );
3726 assert_eq!(
3727 transaction
3728 .into_keys()
3729 .next()
3730 .unwrap()
3731 .update(cx, |buffer, _| buffer.text()),
3732 "const TWO: usize = one::THREE + one::THREE;"
3733 );
3734}
3735
3736#[gpui::test]
3737async fn test_search(cx: &mut gpui::TestAppContext) {
3738 init_test(cx);
3739
3740 let fs = FakeFs::new(cx.executor());
3741 fs.insert_tree(
3742 "/dir",
3743 json!({
3744 "one.rs": "const ONE: usize = 1;",
3745 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3746 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3747 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3748 }),
3749 )
3750 .await;
3751 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3752 assert_eq!(
3753 search(
3754 &project,
3755 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3756 cx
3757 )
3758 .await
3759 .unwrap(),
3760 HashMap::from_iter([
3761 ("two.rs".to_string(), vec![6..9]),
3762 ("three.rs".to_string(), vec![37..40])
3763 ])
3764 );
3765
3766 let buffer_4 = project
3767 .update(cx, |project, cx| {
3768 project.open_local_buffer("/dir/four.rs", cx)
3769 })
3770 .await
3771 .unwrap();
3772 buffer_4.update(cx, |buffer, cx| {
3773 let text = "two::TWO";
3774 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3775 });
3776
3777 assert_eq!(
3778 search(
3779 &project,
3780 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3781 cx
3782 )
3783 .await
3784 .unwrap(),
3785 HashMap::from_iter([
3786 ("two.rs".to_string(), vec![6..9]),
3787 ("three.rs".to_string(), vec![37..40]),
3788 ("four.rs".to_string(), vec![25..28, 36..39])
3789 ])
3790 );
3791}
3792
3793#[gpui::test]
3794async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3795 init_test(cx);
3796
3797 let search_query = "file";
3798
3799 let fs = FakeFs::new(cx.executor());
3800 fs.insert_tree(
3801 "/dir",
3802 json!({
3803 "one.rs": r#"// Rust file one"#,
3804 "one.ts": r#"// TypeScript file one"#,
3805 "two.rs": r#"// Rust file two"#,
3806 "two.ts": r#"// TypeScript file two"#,
3807 }),
3808 )
3809 .await;
3810 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3811
3812 assert!(
3813 search(
3814 &project,
3815 SearchQuery::text(
3816 search_query,
3817 false,
3818 true,
3819 false,
3820 vec![PathMatcher::new("*.odd").unwrap()],
3821 Vec::new()
3822 )
3823 .unwrap(),
3824 cx
3825 )
3826 .await
3827 .unwrap()
3828 .is_empty(),
3829 "If no inclusions match, no files should be returned"
3830 );
3831
3832 assert_eq!(
3833 search(
3834 &project,
3835 SearchQuery::text(
3836 search_query,
3837 false,
3838 true,
3839 false,
3840 vec![PathMatcher::new("*.rs").unwrap()],
3841 Vec::new()
3842 )
3843 .unwrap(),
3844 cx
3845 )
3846 .await
3847 .unwrap(),
3848 HashMap::from_iter([
3849 ("one.rs".to_string(), vec![8..12]),
3850 ("two.rs".to_string(), vec![8..12]),
3851 ]),
3852 "Rust only search should give only Rust files"
3853 );
3854
3855 assert_eq!(
3856 search(
3857 &project,
3858 SearchQuery::text(
3859 search_query,
3860 false,
3861 true,
3862 false,
3863 vec![
3864 PathMatcher::new("*.ts").unwrap(),
3865 PathMatcher::new("*.odd").unwrap(),
3866 ],
3867 Vec::new()
3868 ).unwrap(),
3869 cx
3870 )
3871 .await
3872 .unwrap(),
3873 HashMap::from_iter([
3874 ("one.ts".to_string(), vec![14..18]),
3875 ("two.ts".to_string(), vec![14..18]),
3876 ]),
3877 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3878 );
3879
3880 assert_eq!(
3881 search(
3882 &project,
3883 SearchQuery::text(
3884 search_query,
3885 false,
3886 true,
3887 false,
3888 vec![
3889 PathMatcher::new("*.rs").unwrap(),
3890 PathMatcher::new("*.ts").unwrap(),
3891 PathMatcher::new("*.odd").unwrap(),
3892 ],
3893 Vec::new()
3894 ).unwrap(),
3895 cx
3896 )
3897 .await
3898 .unwrap(),
3899 HashMap::from_iter([
3900 ("one.rs".to_string(), vec![8..12]),
3901 ("one.ts".to_string(), vec![14..18]),
3902 ("two.rs".to_string(), vec![8..12]),
3903 ("two.ts".to_string(), vec![14..18]),
3904 ]),
3905 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3906 );
3907}
3908
3909#[gpui::test]
3910async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3911 init_test(cx);
3912
3913 let search_query = "file";
3914
3915 let fs = FakeFs::new(cx.executor());
3916 fs.insert_tree(
3917 "/dir",
3918 json!({
3919 "one.rs": r#"// Rust file one"#,
3920 "one.ts": r#"// TypeScript file one"#,
3921 "two.rs": r#"// Rust file two"#,
3922 "two.ts": r#"// TypeScript file two"#,
3923 }),
3924 )
3925 .await;
3926 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3927
3928 assert_eq!(
3929 search(
3930 &project,
3931 SearchQuery::text(
3932 search_query,
3933 false,
3934 true,
3935 false,
3936 Vec::new(),
3937 vec![PathMatcher::new("*.odd").unwrap()],
3938 )
3939 .unwrap(),
3940 cx
3941 )
3942 .await
3943 .unwrap(),
3944 HashMap::from_iter([
3945 ("one.rs".to_string(), vec![8..12]),
3946 ("one.ts".to_string(), vec![14..18]),
3947 ("two.rs".to_string(), vec![8..12]),
3948 ("two.ts".to_string(), vec![14..18]),
3949 ]),
3950 "If no exclusions match, all files should be returned"
3951 );
3952
3953 assert_eq!(
3954 search(
3955 &project,
3956 SearchQuery::text(
3957 search_query,
3958 false,
3959 true,
3960 false,
3961 Vec::new(),
3962 vec![PathMatcher::new("*.rs").unwrap()],
3963 )
3964 .unwrap(),
3965 cx
3966 )
3967 .await
3968 .unwrap(),
3969 HashMap::from_iter([
3970 ("one.ts".to_string(), vec![14..18]),
3971 ("two.ts".to_string(), vec![14..18]),
3972 ]),
3973 "Rust exclusion search should give only TypeScript files"
3974 );
3975
3976 assert_eq!(
3977 search(
3978 &project,
3979 SearchQuery::text(
3980 search_query,
3981 false,
3982 true,
3983 false,
3984 Vec::new(),
3985 vec![
3986 PathMatcher::new("*.ts").unwrap(),
3987 PathMatcher::new("*.odd").unwrap(),
3988 ],
3989 ).unwrap(),
3990 cx
3991 )
3992 .await
3993 .unwrap(),
3994 HashMap::from_iter([
3995 ("one.rs".to_string(), vec![8..12]),
3996 ("two.rs".to_string(), vec![8..12]),
3997 ]),
3998 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3999 );
4000
4001 assert!(
4002 search(
4003 &project,
4004 SearchQuery::text(
4005 search_query,
4006 false,
4007 true,
4008 false,
4009 Vec::new(),
4010 vec![
4011 PathMatcher::new("*.rs").unwrap(),
4012 PathMatcher::new("*.ts").unwrap(),
4013 PathMatcher::new("*.odd").unwrap(),
4014 ],
4015 ).unwrap(),
4016 cx
4017 )
4018 .await
4019 .unwrap().is_empty(),
4020 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4021 );
4022}
4023
4024#[gpui::test]
4025async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4026 init_test(cx);
4027
4028 let search_query = "file";
4029
4030 let fs = FakeFs::new(cx.executor());
4031 fs.insert_tree(
4032 "/dir",
4033 json!({
4034 "one.rs": r#"// Rust file one"#,
4035 "one.ts": r#"// TypeScript file one"#,
4036 "two.rs": r#"// Rust file two"#,
4037 "two.ts": r#"// TypeScript file two"#,
4038 }),
4039 )
4040 .await;
4041 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4042
4043 assert!(
4044 search(
4045 &project,
4046 SearchQuery::text(
4047 search_query,
4048 false,
4049 true,
4050 false,
4051 vec![PathMatcher::new("*.odd").unwrap()],
4052 vec![PathMatcher::new("*.odd").unwrap()],
4053 )
4054 .unwrap(),
4055 cx
4056 )
4057 .await
4058 .unwrap()
4059 .is_empty(),
4060 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4061 );
4062
4063 assert!(
4064 search(
4065 &project,
4066 SearchQuery::text(
4067 search_query,
4068 false,
4069 true,
4070 false,
4071 vec![PathMatcher::new("*.ts").unwrap()],
4072 vec![PathMatcher::new("*.ts").unwrap()],
4073 ).unwrap(),
4074 cx
4075 )
4076 .await
4077 .unwrap()
4078 .is_empty(),
4079 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4080 );
4081
4082 assert!(
4083 search(
4084 &project,
4085 SearchQuery::text(
4086 search_query,
4087 false,
4088 true,
4089 false,
4090 vec![
4091 PathMatcher::new("*.ts").unwrap(),
4092 PathMatcher::new("*.odd").unwrap()
4093 ],
4094 vec![
4095 PathMatcher::new("*.ts").unwrap(),
4096 PathMatcher::new("*.odd").unwrap()
4097 ],
4098 )
4099 .unwrap(),
4100 cx
4101 )
4102 .await
4103 .unwrap()
4104 .is_empty(),
4105 "Non-matching inclusions and exclusions should not change that."
4106 );
4107
4108 assert_eq!(
4109 search(
4110 &project,
4111 SearchQuery::text(
4112 search_query,
4113 false,
4114 true,
4115 false,
4116 vec![
4117 PathMatcher::new("*.ts").unwrap(),
4118 PathMatcher::new("*.odd").unwrap()
4119 ],
4120 vec![
4121 PathMatcher::new("*.rs").unwrap(),
4122 PathMatcher::new("*.odd").unwrap()
4123 ],
4124 )
4125 .unwrap(),
4126 cx
4127 )
4128 .await
4129 .unwrap(),
4130 HashMap::from_iter([
4131 ("one.ts".to_string(), vec![14..18]),
4132 ("two.ts".to_string(), vec![14..18]),
4133 ]),
4134 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4135 );
4136}
4137
4138#[gpui::test]
4139async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4140 init_test(cx);
4141
4142 let fs = FakeFs::new(cx.background_executor.clone());
4143 fs.insert_tree(
4144 "/dir",
4145 json!({
4146 ".git": {},
4147 ".gitignore": "**/target\n/node_modules\n",
4148 "target": {
4149 "index.txt": "index_key:index_value"
4150 },
4151 "node_modules": {
4152 "eslint": {
4153 "index.ts": "const eslint_key = 'eslint value'",
4154 "package.json": r#"{ "some_key": "some value" }"#,
4155 },
4156 "prettier": {
4157 "index.ts": "const prettier_key = 'prettier value'",
4158 "package.json": r#"{ "other_key": "other value" }"#,
4159 },
4160 },
4161 "package.json": r#"{ "main_key": "main value" }"#,
4162 }),
4163 )
4164 .await;
4165 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4166
4167 let query = "key";
4168 assert_eq!(
4169 search(
4170 &project,
4171 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4172 cx
4173 )
4174 .await
4175 .unwrap(),
4176 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4177 "Only one non-ignored file should have the query"
4178 );
4179
4180 assert_eq!(
4181 search(
4182 &project,
4183 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4184 cx
4185 )
4186 .await
4187 .unwrap(),
4188 HashMap::from_iter([
4189 ("package.json".to_string(), vec![8..11]),
4190 ("target/index.txt".to_string(), vec![6..9]),
4191 (
4192 "node_modules/prettier/package.json".to_string(),
4193 vec![9..12]
4194 ),
4195 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4196 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4197 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4198 ]),
4199 "Unrestricted search with ignored directories should find every file with the query"
4200 );
4201
4202 assert_eq!(
4203 search(
4204 &project,
4205 SearchQuery::text(
4206 query,
4207 false,
4208 false,
4209 true,
4210 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4211 vec![PathMatcher::new("*.ts").unwrap()],
4212 )
4213 .unwrap(),
4214 cx
4215 )
4216 .await
4217 .unwrap(),
4218 HashMap::from_iter([(
4219 "node_modules/prettier/package.json".to_string(),
4220 vec![9..12]
4221 )]),
4222 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4223 );
4224}
4225
4226#[test]
4227fn test_glob_literal_prefix() {
4228 assert_eq!(glob_literal_prefix("**/*.js"), "");
4229 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4230 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4231 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4232}
4233
4234#[gpui::test]
4235async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4236 init_test(cx);
4237
4238 let fs = FakeFs::new(cx.executor().clone());
4239 fs.insert_tree(
4240 "/one/two",
4241 json!({
4242 "three": {
4243 "a.txt": "",
4244 "four": {}
4245 },
4246 "c.rs": ""
4247 }),
4248 )
4249 .await;
4250
4251 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4252 project
4253 .update(cx, |project, cx| {
4254 let id = project.worktrees().next().unwrap().read(cx).id();
4255 project.create_entry((id, "b.."), true, cx)
4256 })
4257 .unwrap()
4258 .await
4259 .unwrap();
4260
4261 // Can't create paths outside the project
4262 let result = project
4263 .update(cx, |project, cx| {
4264 let id = project.worktrees().next().unwrap().read(cx).id();
4265 project.create_entry((id, "../../boop"), true, cx)
4266 })
4267 .await;
4268 assert!(result.is_err());
4269
4270 // Can't create paths with '..'
4271 let result = project
4272 .update(cx, |project, cx| {
4273 let id = project.worktrees().next().unwrap().read(cx).id();
4274 project.create_entry((id, "four/../beep"), true, cx)
4275 })
4276 .await;
4277 assert!(result.is_err());
4278
4279 assert_eq!(
4280 fs.paths(true),
4281 vec![
4282 PathBuf::from("/"),
4283 PathBuf::from("/one"),
4284 PathBuf::from("/one/two"),
4285 PathBuf::from("/one/two/c.rs"),
4286 PathBuf::from("/one/two/three"),
4287 PathBuf::from("/one/two/three/a.txt"),
4288 PathBuf::from("/one/two/three/b.."),
4289 PathBuf::from("/one/two/three/four"),
4290 ]
4291 );
4292
4293 // And we cannot open buffers with '..'
4294 let result = project
4295 .update(cx, |project, cx| {
4296 let id = project.worktrees().next().unwrap().read(cx).id();
4297 project.open_buffer((id, "../c.rs"), cx)
4298 })
4299 .await;
4300 assert!(result.is_err())
4301}
4302
4303async fn search(
4304 project: &Model<Project>,
4305 query: SearchQuery,
4306 cx: &mut gpui::TestAppContext,
4307) -> Result<HashMap<String, Vec<Range<usize>>>> {
4308 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4309 let mut result = HashMap::default();
4310 while let Some((buffer, range)) = search_rx.next().await {
4311 result.entry(buffer).or_insert(range);
4312 }
4313 Ok(result
4314 .into_iter()
4315 .map(|(buffer, ranges)| {
4316 buffer.update(cx, |buffer, _| {
4317 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4318 let ranges = ranges
4319 .into_iter()
4320 .map(|range| range.to_offset(buffer))
4321 .collect::<Vec<_>>();
4322 (path, ranges)
4323 })
4324 })
4325 .collect())
4326}
4327
4328fn init_test(cx: &mut gpui::TestAppContext) {
4329 if std::env::var("RUST_LOG").is_ok() {
4330 env_logger::try_init().ok();
4331 }
4332
4333 cx.update(|cx| {
4334 let settings_store = SettingsStore::test(cx);
4335 cx.set_global(settings_store);
4336 release_channel::init("0.0.0", cx);
4337 language::init(cx);
4338 Project::init_settings(cx);
4339 });
4340}
4341
4342fn json_lang() -> Arc<Language> {
4343 Arc::new(Language::new(
4344 LanguageConfig {
4345 name: "JSON".into(),
4346 matcher: LanguageMatcher {
4347 path_suffixes: vec!["json".to_string()],
4348 ..Default::default()
4349 },
4350 ..Default::default()
4351 },
4352 None,
4353 ))
4354}
4355
4356fn js_lang() -> Arc<Language> {
4357 Arc::new(Language::new(
4358 LanguageConfig {
4359 name: Arc::from("JavaScript"),
4360 matcher: LanguageMatcher {
4361 path_suffixes: vec!["js".to_string()],
4362 ..Default::default()
4363 },
4364 ..Default::default()
4365 },
4366 None,
4367 ))
4368}
4369
4370fn rust_lang() -> Arc<Language> {
4371 Arc::new(Language::new(
4372 LanguageConfig {
4373 name: "Rust".into(),
4374 matcher: LanguageMatcher {
4375 path_suffixes: vec!["rs".to_string()],
4376 ..Default::default()
4377 },
4378 ..Default::default()
4379 },
4380 Some(tree_sitter_rust::language()),
4381 ))
4382}
4383
4384fn typescript_lang() -> Arc<Language> {
4385 Arc::new(Language::new(
4386 LanguageConfig {
4387 name: "TypeScript".into(),
4388 matcher: LanguageMatcher {
4389 path_suffixes: vec!["ts".to_string()],
4390 ..Default::default()
4391 },
4392 ..Default::default()
4393 },
4394 Some(tree_sitter_typescript::language_typescript()),
4395 ))
4396}