1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::RealFs;
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe};
5use language::{
6 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
7 LineEnding, OffsetRangeExt, Point, ToPoint,
8};
9use lsp::Url;
10use serde_json::json;
11use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
12use unindent::Unindent as _;
13use util::{assert_set_eq, test::temp_tree};
14
15#[gpui::test]
16async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
17 let dir = temp_tree(json!({
18 "root": {
19 "apple": "",
20 "banana": {
21 "carrot": {
22 "date": "",
23 "endive": "",
24 }
25 },
26 "fennel": {
27 "grape": "",
28 }
29 }
30 }));
31
32 let root_link_path = dir.path().join("root_link");
33 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
34 unix::fs::symlink(
35 &dir.path().join("root/fennel"),
36 &dir.path().join("root/finnochio"),
37 )
38 .unwrap();
39
40 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
41
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50
51 let cancel_flag = Default::default();
52 let results = project
53 .read_with(cx, |project, cx| {
54 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
55 })
56 .await;
57 assert_eq!(
58 results
59 .into_iter()
60 .map(|result| result.path)
61 .collect::<Vec<Arc<Path>>>(),
62 vec![
63 PathBuf::from("banana/carrot/date").into(),
64 PathBuf::from("banana/carrot/endive").into(),
65 ]
66 );
67}
68
69#[gpui::test]
70async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
71 cx.foreground().forbid_parking();
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-rust-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }));
100 let mut fake_json_servers = json_language.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
101 name: "the-json-language-server",
102 capabilities: lsp::ServerCapabilities {
103 completion_provider: Some(lsp::CompletionOptions {
104 trigger_characters: Some(vec![":".to_string()]),
105 ..Default::default()
106 }),
107 ..Default::default()
108 },
109 ..Default::default()
110 }));
111
112 let fs = FakeFs::new(cx.background());
113 fs.insert_tree(
114 "/the-root",
115 json!({
116 "test.rs": "const A: i32 = 1;",
117 "test2.rs": "",
118 "Cargo.toml": "a = 1",
119 "package.json": "{\"a\": 1}",
120 }),
121 )
122 .await;
123
124 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
125
126 // Open a buffer before languages have been added
127 let json_buffer = project
128 .update(cx, |project, cx| {
129 project.open_local_buffer("/the-root/package.json", cx)
130 })
131 .await
132 .unwrap();
133
134 // Assert that this buffer does not have a language
135 assert!(json_buffer.read_with(cx, |buffer, _| { buffer.language().is_none() }));
136
137 // Now we add the languages to the project, and subscribe to the watcher
138 project.update(cx, |project, cx| {
139 // Get a handle to the channel and clear out default item
140 let mut recv = project.languages.subscribe();
141 recv.blocking_recv();
142
143 // Add, then wait to be notified that JSON has been added
144 project.languages.add(Arc::new(json_language));
145 recv.blocking_recv();
146
147 // Add, then wait to be notified that Rust has been added
148 project.languages.add(Arc::new(rust_language));
149 recv.blocking_recv();
150 // Uncommenting this would cause the thread to block indefinitely:
151 // recv.blocking_recv();
152
153 // Force the assignment, we know the watcher has been notified
154 // but have no way to wait for the watcher to assign to the project
155 project.assign_language_to_buffer(&json_buffer, cx);
156 });
157
158 // Assert that the opened buffer does have a language, and that it is JSON
159 let name = json_buffer.read_with(cx, |buffer, _| buffer.language().map(|l| l.name()));
160 assert_eq!(name, Some("JSON".into()));
161
162 // Close the JSON buffer we opened
163 cx.update(|_| drop(json_buffer));
164
165 // Open a buffer without an associated language server.
166 let toml_buffer = project
167 .update(cx, |project, cx| {
168 project.open_local_buffer("/the-root/Cargo.toml", cx)
169 })
170 .await
171 .unwrap();
172
173 // Open a buffer with an associated language server.
174 let rust_buffer = project
175 .update(cx, |project, cx| {
176 project.open_local_buffer("/the-root/test.rs", cx)
177 })
178 .await
179 .unwrap();
180
181 // A server is started up, and it is notified about Rust files.
182 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
183 assert_eq!(
184 fake_rust_server
185 .receive_notification::<lsp::notification::DidOpenTextDocument>()
186 .await
187 .text_document,
188 lsp::TextDocumentItem {
189 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
190 version: 0,
191 text: "const A: i32 = 1;".to_string(),
192 language_id: Default::default()
193 }
194 );
195
196 // The buffer is configured based on the language server's capabilities.
197 rust_buffer.read_with(cx, |buffer, _| {
198 assert_eq!(
199 buffer.completion_triggers(),
200 &[".".to_string(), "::".to_string()]
201 );
202 });
203 toml_buffer.read_with(cx, |buffer, _| {
204 assert!(buffer.completion_triggers().is_empty());
205 });
206
207 // Edit a buffer. The changes are reported to the language server.
208 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
209 assert_eq!(
210 fake_rust_server
211 .receive_notification::<lsp::notification::DidChangeTextDocument>()
212 .await
213 .text_document,
214 lsp::VersionedTextDocumentIdentifier::new(
215 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
216 1
217 )
218 );
219
220 // Open a third buffer with a different associated language server.
221 let json_buffer = project
222 .update(cx, |project, cx| {
223 project.open_local_buffer("/the-root/package.json", cx)
224 })
225 .await
226 .unwrap();
227
228 // A json language server is started up and is only notified about the json buffer.
229 let mut fake_json_server = fake_json_servers.next().await.unwrap();
230 assert_eq!(
231 fake_json_server
232 .receive_notification::<lsp::notification::DidOpenTextDocument>()
233 .await
234 .text_document,
235 lsp::TextDocumentItem {
236 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
237 version: 0,
238 text: "{\"a\": 1}".to_string(),
239 language_id: Default::default()
240 }
241 );
242
243 // This buffer is configured based on the second language server's
244 // capabilities.
245 json_buffer.read_with(cx, |buffer, _| {
246 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
247 });
248
249 // When opening another buffer whose language server is already running,
250 // it is also configured based on the existing language server's capabilities.
251 let rust_buffer2 = project
252 .update(cx, |project, cx| {
253 project.open_local_buffer("/the-root/test2.rs", cx)
254 })
255 .await
256 .unwrap();
257 rust_buffer2.read_with(cx, |buffer, _| {
258 assert_eq!(
259 buffer.completion_triggers(),
260 &[".".to_string(), "::".to_string()]
261 );
262 });
263
264 // Changes are reported only to servers matching the buffer's language.
265 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
266 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
267 assert_eq!(
268 fake_rust_server
269 .receive_notification::<lsp::notification::DidChangeTextDocument>()
270 .await
271 .text_document,
272 lsp::VersionedTextDocumentIdentifier::new(
273 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
274 1
275 )
276 );
277
278 // Save notifications are reported to all servers.
279 toml_buffer
280 .update(cx, |buffer, cx| buffer.save(cx))
281 .await
282 .unwrap();
283 assert_eq!(
284 fake_rust_server
285 .receive_notification::<lsp::notification::DidSaveTextDocument>()
286 .await
287 .text_document,
288 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
289 );
290 assert_eq!(
291 fake_json_server
292 .receive_notification::<lsp::notification::DidSaveTextDocument>()
293 .await
294 .text_document,
295 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
296 );
297
298 // Renames are reported only to servers matching the buffer's language.
299 fs.rename(
300 Path::new("/the-root/test2.rs"),
301 Path::new("/the-root/test3.rs"),
302 Default::default(),
303 )
304 .await
305 .unwrap();
306 assert_eq!(
307 fake_rust_server
308 .receive_notification::<lsp::notification::DidCloseTextDocument>()
309 .await
310 .text_document,
311 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
312 );
313 assert_eq!(
314 fake_rust_server
315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
316 .await
317 .text_document,
318 lsp::TextDocumentItem {
319 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
320 version: 0,
321 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
322 language_id: Default::default()
323 },
324 );
325
326 rust_buffer2.update(cx, |buffer, cx| {
327 buffer.update_diagnostics(
328 DiagnosticSet::from_sorted_entries(
329 vec![DiagnosticEntry {
330 diagnostic: Default::default(),
331 range: Anchor::MIN..Anchor::MAX,
332 }],
333 &buffer.snapshot(),
334 ),
335 cx,
336 );
337 assert_eq!(
338 buffer
339 .snapshot()
340 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
341 .count(),
342 1
343 );
344 });
345
346 // When the rename changes the extension of the file, the buffer gets closed on the old
347 // language server and gets opened on the new one.
348 fs.rename(
349 Path::new("/the-root/test3.rs"),
350 Path::new("/the-root/test3.json"),
351 Default::default(),
352 )
353 .await
354 .unwrap();
355 assert_eq!(
356 fake_rust_server
357 .receive_notification::<lsp::notification::DidCloseTextDocument>()
358 .await
359 .text_document,
360 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
361 );
362 assert_eq!(
363 fake_json_server
364 .receive_notification::<lsp::notification::DidOpenTextDocument>()
365 .await
366 .text_document,
367 lsp::TextDocumentItem {
368 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
369 version: 0,
370 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
371 language_id: Default::default()
372 },
373 );
374
375 // We clear the diagnostics, since the language has changed.
376 rust_buffer2.read_with(cx, |buffer, _| {
377 assert_eq!(
378 buffer
379 .snapshot()
380 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
381 .count(),
382 0
383 );
384 });
385
386 // The renamed file's version resets after changing language server.
387 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
388 assert_eq!(
389 fake_json_server
390 .receive_notification::<lsp::notification::DidChangeTextDocument>()
391 .await
392 .text_document,
393 lsp::VersionedTextDocumentIdentifier::new(
394 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
395 1
396 )
397 );
398
399 // Restart language servers
400 project.update(cx, |project, cx| {
401 project.restart_language_servers_for_buffers(
402 vec![rust_buffer.clone(), json_buffer.clone()],
403 cx,
404 );
405 });
406
407 let mut rust_shutdown_requests = fake_rust_server
408 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
409 let mut json_shutdown_requests = fake_json_server
410 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
411 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
412
413 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
414 let mut fake_json_server = fake_json_servers.next().await.unwrap();
415
416 // Ensure rust document is reopened in new rust language server
417 assert_eq!(
418 fake_rust_server
419 .receive_notification::<lsp::notification::DidOpenTextDocument>()
420 .await
421 .text_document,
422 lsp::TextDocumentItem {
423 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
424 version: 1,
425 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
426 language_id: Default::default()
427 }
428 );
429
430 // Ensure json documents are reopened in new json language server
431 assert_set_eq!(
432 [
433 fake_json_server
434 .receive_notification::<lsp::notification::DidOpenTextDocument>()
435 .await
436 .text_document,
437 fake_json_server
438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
439 .await
440 .text_document,
441 ],
442 [
443 lsp::TextDocumentItem {
444 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 version: 0,
446 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
447 language_id: Default::default()
448 },
449 lsp::TextDocumentItem {
450 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
451 version: 1,
452 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
453 language_id: Default::default()
454 }
455 ]
456 );
457
458 // Close notifications are reported only to servers matching the buffer's language.
459 cx.update(|_| drop(json_buffer));
460 let close_message = lsp::DidCloseTextDocumentParams {
461 text_document: lsp::TextDocumentIdentifier::new(
462 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
463 ),
464 };
465 assert_eq!(
466 fake_json_server
467 .receive_notification::<lsp::notification::DidCloseTextDocument>()
468 .await,
469 close_message,
470 );
471}
472
473#[gpui::test]
474async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
475 cx.foreground().forbid_parking();
476
477 let fs = FakeFs::new(cx.background());
478 fs.insert_tree(
479 "/dir",
480 json!({
481 "a.rs": "let a = 1;",
482 "b.rs": "let b = 2;"
483 }),
484 )
485 .await;
486
487 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
488
489 let buffer_a = project
490 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
491 .await
492 .unwrap();
493 let buffer_b = project
494 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
495 .await
496 .unwrap();
497
498 project.update(cx, |project, cx| {
499 project
500 .update_diagnostics(
501 0,
502 lsp::PublishDiagnosticsParams {
503 uri: Url::from_file_path("/dir/a.rs").unwrap(),
504 version: None,
505 diagnostics: vec![lsp::Diagnostic {
506 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
507 severity: Some(lsp::DiagnosticSeverity::ERROR),
508 message: "error 1".to_string(),
509 ..Default::default()
510 }],
511 },
512 &[],
513 cx,
514 )
515 .unwrap();
516 project
517 .update_diagnostics(
518 0,
519 lsp::PublishDiagnosticsParams {
520 uri: Url::from_file_path("/dir/b.rs").unwrap(),
521 version: None,
522 diagnostics: vec![lsp::Diagnostic {
523 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
524 severity: Some(lsp::DiagnosticSeverity::WARNING),
525 message: "error 2".to_string(),
526 ..Default::default()
527 }],
528 },
529 &[],
530 cx,
531 )
532 .unwrap();
533 });
534
535 buffer_a.read_with(cx, |buffer, _| {
536 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
537 assert_eq!(
538 chunks
539 .iter()
540 .map(|(s, d)| (s.as_str(), *d))
541 .collect::<Vec<_>>(),
542 &[
543 ("let ", None),
544 ("a", Some(DiagnosticSeverity::ERROR)),
545 (" = 1;", None),
546 ]
547 );
548 });
549 buffer_b.read_with(cx, |buffer, _| {
550 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
551 assert_eq!(
552 chunks
553 .iter()
554 .map(|(s, d)| (s.as_str(), *d))
555 .collect::<Vec<_>>(),
556 &[
557 ("let ", None),
558 ("b", Some(DiagnosticSeverity::WARNING)),
559 (" = 2;", None),
560 ]
561 );
562 });
563}
564
565#[gpui::test]
566async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
567 cx.foreground().forbid_parking();
568
569 let fs = FakeFs::new(cx.background());
570 fs.insert_tree(
571 "/root",
572 json!({
573 "dir": {
574 "a.rs": "let a = 1;",
575 },
576 "other.rs": "let b = c;"
577 }),
578 )
579 .await;
580
581 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
582
583 let (worktree, _) = project
584 .update(cx, |project, cx| {
585 project.find_or_create_local_worktree("/root/other.rs", false, cx)
586 })
587 .await
588 .unwrap();
589 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
590
591 project.update(cx, |project, cx| {
592 project
593 .update_diagnostics(
594 0,
595 lsp::PublishDiagnosticsParams {
596 uri: Url::from_file_path("/root/other.rs").unwrap(),
597 version: None,
598 diagnostics: vec![lsp::Diagnostic {
599 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
600 severity: Some(lsp::DiagnosticSeverity::ERROR),
601 message: "unknown variable 'c'".to_string(),
602 ..Default::default()
603 }],
604 },
605 &[],
606 cx,
607 )
608 .unwrap();
609 });
610
611 let buffer = project
612 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
613 .await
614 .unwrap();
615 buffer.read_with(cx, |buffer, _| {
616 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
617 assert_eq!(
618 chunks
619 .iter()
620 .map(|(s, d)| (s.as_str(), *d))
621 .collect::<Vec<_>>(),
622 &[
623 ("let b = ", None),
624 ("c", Some(DiagnosticSeverity::ERROR)),
625 (";", None),
626 ]
627 );
628 });
629
630 project.read_with(cx, |project, cx| {
631 assert_eq!(project.diagnostic_summaries(cx).next(), None);
632 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
633 });
634}
635
636#[gpui::test]
637async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
638 cx.foreground().forbid_parking();
639
640 let progress_token = "the-progress-token";
641 let mut language = Language::new(
642 LanguageConfig {
643 name: "Rust".into(),
644 path_suffixes: vec!["rs".to_string()],
645 ..Default::default()
646 },
647 Some(tree_sitter_rust::language()),
648 );
649 let mut fake_servers = language.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
650 disk_based_diagnostics_progress_token: Some(progress_token.into()),
651 disk_based_diagnostics_sources: vec!["disk".into()],
652 ..Default::default()
653 }));
654
655 let fs = FakeFs::new(cx.background());
656 fs.insert_tree(
657 "/dir",
658 json!({
659 "a.rs": "fn a() { A }",
660 "b.rs": "const y: i32 = 1",
661 }),
662 )
663 .await;
664
665 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
666 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
667 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
668
669 // Cause worktree to start the fake language server
670 let _buffer = project
671 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
672 .await
673 .unwrap();
674
675 let mut events = subscribe(&project, cx);
676
677 let fake_server = fake_servers.next().await.unwrap();
678 fake_server.start_progress(progress_token).await;
679 assert_eq!(
680 events.next().await.unwrap(),
681 Event::DiskBasedDiagnosticsStarted {
682 language_server_id: 0,
683 }
684 );
685
686 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
687 uri: Url::from_file_path("/dir/a.rs").unwrap(),
688 version: None,
689 diagnostics: vec![lsp::Diagnostic {
690 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
691 severity: Some(lsp::DiagnosticSeverity::ERROR),
692 message: "undefined variable 'A'".to_string(),
693 ..Default::default()
694 }],
695 });
696 assert_eq!(
697 events.next().await.unwrap(),
698 Event::DiagnosticsUpdated {
699 language_server_id: 0,
700 path: (worktree_id, Path::new("a.rs")).into()
701 }
702 );
703
704 fake_server.end_progress(progress_token);
705 assert_eq!(
706 events.next().await.unwrap(),
707 Event::DiskBasedDiagnosticsFinished {
708 language_server_id: 0
709 }
710 );
711
712 let buffer = project
713 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
714 .await
715 .unwrap();
716
717 buffer.read_with(cx, |buffer, _| {
718 let snapshot = buffer.snapshot();
719 let diagnostics = snapshot
720 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
721 .collect::<Vec<_>>();
722 assert_eq!(
723 diagnostics,
724 &[DiagnosticEntry {
725 range: Point::new(0, 9)..Point::new(0, 10),
726 diagnostic: Diagnostic {
727 severity: lsp::DiagnosticSeverity::ERROR,
728 message: "undefined variable 'A'".to_string(),
729 group_id: 0,
730 is_primary: true,
731 ..Default::default()
732 }
733 }]
734 )
735 });
736
737 // Ensure publishing empty diagnostics twice only results in one update event.
738 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
739 uri: Url::from_file_path("/dir/a.rs").unwrap(),
740 version: None,
741 diagnostics: Default::default(),
742 });
743 assert_eq!(
744 events.next().await.unwrap(),
745 Event::DiagnosticsUpdated {
746 language_server_id: 0,
747 path: (worktree_id, Path::new("a.rs")).into()
748 }
749 );
750
751 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
752 uri: Url::from_file_path("/dir/a.rs").unwrap(),
753 version: None,
754 diagnostics: Default::default(),
755 });
756 cx.foreground().run_until_parked();
757 assert_eq!(futures::poll!(events.next()), Poll::Pending);
758}
759
760#[gpui::test]
761async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
762 cx.foreground().forbid_parking();
763
764 let progress_token = "the-progress-token";
765 let mut language = Language::new(
766 LanguageConfig {
767 path_suffixes: vec!["rs".to_string()],
768 ..Default::default()
769 },
770 None,
771 );
772 let mut fake_servers = language.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
773 disk_based_diagnostics_sources: vec!["disk".into()],
774 disk_based_diagnostics_progress_token: Some(progress_token.into()),
775 ..Default::default()
776 }));
777
778 let fs = FakeFs::new(cx.background());
779 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
780
781 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
782 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
783
784 let buffer = project
785 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
786 .await
787 .unwrap();
788
789 // Simulate diagnostics starting to update.
790 let fake_server = fake_servers.next().await.unwrap();
791 fake_server.start_progress(progress_token).await;
792
793 // Restart the server before the diagnostics finish updating.
794 project.update(cx, |project, cx| {
795 project.restart_language_servers_for_buffers([buffer], cx);
796 });
797 let mut events = subscribe(&project, cx);
798
799 // Simulate the newly started server sending more diagnostics.
800 let fake_server = fake_servers.next().await.unwrap();
801 fake_server.start_progress(progress_token).await;
802 assert_eq!(
803 events.next().await.unwrap(),
804 Event::DiskBasedDiagnosticsStarted {
805 language_server_id: 1
806 }
807 );
808 project.read_with(cx, |project, _| {
809 assert_eq!(
810 project
811 .language_servers_running_disk_based_diagnostics()
812 .collect::<Vec<_>>(),
813 [1]
814 );
815 });
816
817 // All diagnostics are considered done, despite the old server's diagnostic
818 // task never completing.
819 fake_server.end_progress(progress_token);
820 assert_eq!(
821 events.next().await.unwrap(),
822 Event::DiskBasedDiagnosticsFinished {
823 language_server_id: 1
824 }
825 );
826 project.read_with(cx, |project, _| {
827 assert_eq!(
828 project
829 .language_servers_running_disk_based_diagnostics()
830 .collect::<Vec<_>>(),
831 [0; 0]
832 );
833 });
834}
835
836#[gpui::test]
837async fn test_toggling_enable_language_server(
838 deterministic: Arc<Deterministic>,
839 cx: &mut gpui::TestAppContext,
840) {
841 deterministic.forbid_parking();
842
843 let mut rust = Language::new(
844 LanguageConfig {
845 name: Arc::from("Rust"),
846 path_suffixes: vec!["rs".to_string()],
847 ..Default::default()
848 },
849 None,
850 );
851 let mut fake_rust_servers = rust.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
852 name: "rust-lsp",
853 ..Default::default()
854 }));
855 let mut js = Language::new(
856 LanguageConfig {
857 name: Arc::from("JavaScript"),
858 path_suffixes: vec!["js".to_string()],
859 ..Default::default()
860 },
861 None,
862 );
863 let mut fake_js_servers = js.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
864 name: "js-lsp",
865 ..Default::default()
866 }));
867
868 let fs = FakeFs::new(cx.background());
869 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
870 .await;
871
872 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
873 project.update(cx, |project, _| {
874 project.languages.add(Arc::new(rust));
875 project.languages.add(Arc::new(js));
876 });
877
878 let _rs_buffer = project
879 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
880 .await
881 .unwrap();
882 let _js_buffer = project
883 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
884 .await
885 .unwrap();
886
887 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
888 assert_eq!(
889 fake_rust_server_1
890 .receive_notification::<lsp::notification::DidOpenTextDocument>()
891 .await
892 .text_document
893 .uri
894 .as_str(),
895 "file:///dir/a.rs"
896 );
897
898 let mut fake_js_server = fake_js_servers.next().await.unwrap();
899 assert_eq!(
900 fake_js_server
901 .receive_notification::<lsp::notification::DidOpenTextDocument>()
902 .await
903 .text_document
904 .uri
905 .as_str(),
906 "file:///dir/b.js"
907 );
908
909 // Disable Rust language server, ensuring only that server gets stopped.
910 cx.update(|cx| {
911 cx.update_global(|settings: &mut Settings, _| {
912 settings.language_overrides.insert(
913 Arc::from("Rust"),
914 settings::LanguageSettings {
915 enable_language_server: Some(false),
916 ..Default::default()
917 },
918 );
919 })
920 });
921 fake_rust_server_1
922 .receive_notification::<lsp::notification::Exit>()
923 .await;
924
925 // Enable Rust and disable JavaScript language servers, ensuring that the
926 // former gets started again and that the latter stops.
927 cx.update(|cx| {
928 cx.update_global(|settings: &mut Settings, _| {
929 settings.language_overrides.insert(
930 Arc::from("Rust"),
931 settings::LanguageSettings {
932 enable_language_server: Some(true),
933 ..Default::default()
934 },
935 );
936 settings.language_overrides.insert(
937 Arc::from("JavaScript"),
938 settings::LanguageSettings {
939 enable_language_server: Some(false),
940 ..Default::default()
941 },
942 );
943 })
944 });
945 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
946 assert_eq!(
947 fake_rust_server_2
948 .receive_notification::<lsp::notification::DidOpenTextDocument>()
949 .await
950 .text_document
951 .uri
952 .as_str(),
953 "file:///dir/a.rs"
954 );
955 fake_js_server
956 .receive_notification::<lsp::notification::Exit>()
957 .await;
958}
959
960#[gpui::test]
961async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
962 cx.foreground().forbid_parking();
963
964 let mut language = Language::new(
965 LanguageConfig {
966 name: "Rust".into(),
967 path_suffixes: vec!["rs".to_string()],
968 ..Default::default()
969 },
970 Some(tree_sitter_rust::language()),
971 );
972 let mut fake_servers = language.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
973 disk_based_diagnostics_sources: vec!["disk".into()],
974 ..Default::default()
975 }));
976
977 let text = "
978 fn a() { A }
979 fn b() { BB }
980 fn c() { CCC }
981 "
982 .unindent();
983
984 let fs = FakeFs::new(cx.background());
985 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
986
987 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
988 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
989
990 let buffer = project
991 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
992 .await
993 .unwrap();
994
995 let mut fake_server = fake_servers.next().await.unwrap();
996 let open_notification = fake_server
997 .receive_notification::<lsp::notification::DidOpenTextDocument>()
998 .await;
999
1000 // Edit the buffer, moving the content down
1001 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
1002 let change_notification_1 = fake_server
1003 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1004 .await;
1005 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1006
1007 // Report some diagnostics for the initial version of the buffer
1008 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1009 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1010 version: Some(open_notification.text_document.version),
1011 diagnostics: vec![
1012 lsp::Diagnostic {
1013 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1014 severity: Some(DiagnosticSeverity::ERROR),
1015 message: "undefined variable 'A'".to_string(),
1016 source: Some("disk".to_string()),
1017 ..Default::default()
1018 },
1019 lsp::Diagnostic {
1020 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1021 severity: Some(DiagnosticSeverity::ERROR),
1022 message: "undefined variable 'BB'".to_string(),
1023 source: Some("disk".to_string()),
1024 ..Default::default()
1025 },
1026 lsp::Diagnostic {
1027 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1028 severity: Some(DiagnosticSeverity::ERROR),
1029 source: Some("disk".to_string()),
1030 message: "undefined variable 'CCC'".to_string(),
1031 ..Default::default()
1032 },
1033 ],
1034 });
1035
1036 // The diagnostics have moved down since they were created.
1037 buffer.next_notification(cx).await;
1038 buffer.read_with(cx, |buffer, _| {
1039 assert_eq!(
1040 buffer
1041 .snapshot()
1042 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1043 .collect::<Vec<_>>(),
1044 &[
1045 DiagnosticEntry {
1046 range: Point::new(3, 9)..Point::new(3, 11),
1047 diagnostic: Diagnostic {
1048 severity: DiagnosticSeverity::ERROR,
1049 message: "undefined variable 'BB'".to_string(),
1050 is_disk_based: true,
1051 group_id: 1,
1052 is_primary: true,
1053 ..Default::default()
1054 },
1055 },
1056 DiagnosticEntry {
1057 range: Point::new(4, 9)..Point::new(4, 12),
1058 diagnostic: Diagnostic {
1059 severity: DiagnosticSeverity::ERROR,
1060 message: "undefined variable 'CCC'".to_string(),
1061 is_disk_based: true,
1062 group_id: 2,
1063 is_primary: true,
1064 ..Default::default()
1065 }
1066 }
1067 ]
1068 );
1069 assert_eq!(
1070 chunks_with_diagnostics(buffer, 0..buffer.len()),
1071 [
1072 ("\n\nfn a() { ".to_string(), None),
1073 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1074 (" }\nfn b() { ".to_string(), None),
1075 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1076 (" }\nfn c() { ".to_string(), None),
1077 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1078 (" }\n".to_string(), None),
1079 ]
1080 );
1081 assert_eq!(
1082 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1083 [
1084 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1085 (" }\nfn c() { ".to_string(), None),
1086 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1087 ]
1088 );
1089 });
1090
1091 // Ensure overlapping diagnostics are highlighted correctly.
1092 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1093 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1094 version: Some(open_notification.text_document.version),
1095 diagnostics: vec![
1096 lsp::Diagnostic {
1097 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1098 severity: Some(DiagnosticSeverity::ERROR),
1099 message: "undefined variable 'A'".to_string(),
1100 source: Some("disk".to_string()),
1101 ..Default::default()
1102 },
1103 lsp::Diagnostic {
1104 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1105 severity: Some(DiagnosticSeverity::WARNING),
1106 message: "unreachable statement".to_string(),
1107 source: Some("disk".to_string()),
1108 ..Default::default()
1109 },
1110 ],
1111 });
1112
1113 buffer.next_notification(cx).await;
1114 buffer.read_with(cx, |buffer, _| {
1115 assert_eq!(
1116 buffer
1117 .snapshot()
1118 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1119 .collect::<Vec<_>>(),
1120 &[
1121 DiagnosticEntry {
1122 range: Point::new(2, 9)..Point::new(2, 12),
1123 diagnostic: Diagnostic {
1124 severity: DiagnosticSeverity::WARNING,
1125 message: "unreachable statement".to_string(),
1126 is_disk_based: true,
1127 group_id: 4,
1128 is_primary: true,
1129 ..Default::default()
1130 }
1131 },
1132 DiagnosticEntry {
1133 range: Point::new(2, 9)..Point::new(2, 10),
1134 diagnostic: Diagnostic {
1135 severity: DiagnosticSeverity::ERROR,
1136 message: "undefined variable 'A'".to_string(),
1137 is_disk_based: true,
1138 group_id: 3,
1139 is_primary: true,
1140 ..Default::default()
1141 },
1142 }
1143 ]
1144 );
1145 assert_eq!(
1146 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1147 [
1148 ("fn a() { ".to_string(), None),
1149 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1150 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1151 ("\n".to_string(), None),
1152 ]
1153 );
1154 assert_eq!(
1155 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1156 [
1157 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1158 ("\n".to_string(), None),
1159 ]
1160 );
1161 });
1162
1163 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1164 // changes since the last save.
1165 buffer.update(cx, |buffer, cx| {
1166 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
1167 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
1168 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
1169 });
1170 let change_notification_2 = fake_server
1171 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1172 .await;
1173 assert!(
1174 change_notification_2.text_document.version > change_notification_1.text_document.version
1175 );
1176
1177 // Handle out-of-order diagnostics
1178 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1179 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1180 version: Some(change_notification_2.text_document.version),
1181 diagnostics: vec![
1182 lsp::Diagnostic {
1183 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1184 severity: Some(DiagnosticSeverity::ERROR),
1185 message: "undefined variable 'BB'".to_string(),
1186 source: Some("disk".to_string()),
1187 ..Default::default()
1188 },
1189 lsp::Diagnostic {
1190 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1191 severity: Some(DiagnosticSeverity::WARNING),
1192 message: "undefined variable 'A'".to_string(),
1193 source: Some("disk".to_string()),
1194 ..Default::default()
1195 },
1196 ],
1197 });
1198
1199 buffer.next_notification(cx).await;
1200 buffer.read_with(cx, |buffer, _| {
1201 assert_eq!(
1202 buffer
1203 .snapshot()
1204 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1205 .collect::<Vec<_>>(),
1206 &[
1207 DiagnosticEntry {
1208 range: Point::new(2, 21)..Point::new(2, 22),
1209 diagnostic: Diagnostic {
1210 severity: DiagnosticSeverity::WARNING,
1211 message: "undefined variable 'A'".to_string(),
1212 is_disk_based: true,
1213 group_id: 6,
1214 is_primary: true,
1215 ..Default::default()
1216 }
1217 },
1218 DiagnosticEntry {
1219 range: Point::new(3, 9)..Point::new(3, 14),
1220 diagnostic: Diagnostic {
1221 severity: DiagnosticSeverity::ERROR,
1222 message: "undefined variable 'BB'".to_string(),
1223 is_disk_based: true,
1224 group_id: 5,
1225 is_primary: true,
1226 ..Default::default()
1227 },
1228 }
1229 ]
1230 );
1231 });
1232}
1233
1234#[gpui::test]
1235async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1236 cx.foreground().forbid_parking();
1237
1238 let text = concat!(
1239 "let one = ;\n", //
1240 "let two = \n",
1241 "let three = 3;\n",
1242 );
1243
1244 let fs = FakeFs::new(cx.background());
1245 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1246
1247 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1248 let buffer = project
1249 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1250 .await
1251 .unwrap();
1252
1253 project.update(cx, |project, cx| {
1254 project
1255 .update_buffer_diagnostics(
1256 &buffer,
1257 vec![
1258 DiagnosticEntry {
1259 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1260 diagnostic: Diagnostic {
1261 severity: DiagnosticSeverity::ERROR,
1262 message: "syntax error 1".to_string(),
1263 ..Default::default()
1264 },
1265 },
1266 DiagnosticEntry {
1267 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1268 diagnostic: Diagnostic {
1269 severity: DiagnosticSeverity::ERROR,
1270 message: "syntax error 2".to_string(),
1271 ..Default::default()
1272 },
1273 },
1274 ],
1275 None,
1276 cx,
1277 )
1278 .unwrap();
1279 });
1280
1281 // An empty range is extended forward to include the following character.
1282 // At the end of a line, an empty range is extended backward to include
1283 // the preceding character.
1284 buffer.read_with(cx, |buffer, _| {
1285 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1286 assert_eq!(
1287 chunks
1288 .iter()
1289 .map(|(s, d)| (s.as_str(), *d))
1290 .collect::<Vec<_>>(),
1291 &[
1292 ("let one = ", None),
1293 (";", Some(DiagnosticSeverity::ERROR)),
1294 ("\nlet two =", None),
1295 (" ", Some(DiagnosticSeverity::ERROR)),
1296 ("\nlet three = 3;\n", None)
1297 ]
1298 );
1299 });
1300}
1301
1302#[gpui::test]
1303async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1304 cx.foreground().forbid_parking();
1305
1306 let mut language = Language::new(
1307 LanguageConfig {
1308 name: "Rust".into(),
1309 path_suffixes: vec!["rs".to_string()],
1310 ..Default::default()
1311 },
1312 Some(tree_sitter_rust::language()),
1313 );
1314 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1315
1316 let text = "
1317 fn a() {
1318 f1();
1319 }
1320 fn b() {
1321 f2();
1322 }
1323 fn c() {
1324 f3();
1325 }
1326 "
1327 .unindent();
1328
1329 let fs = FakeFs::new(cx.background());
1330 fs.insert_tree(
1331 "/dir",
1332 json!({
1333 "a.rs": text.clone(),
1334 }),
1335 )
1336 .await;
1337
1338 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1339 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1340 let buffer = project
1341 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1342 .await
1343 .unwrap();
1344
1345 let mut fake_server = fake_servers.next().await.unwrap();
1346 let lsp_document_version = fake_server
1347 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1348 .await
1349 .text_document
1350 .version;
1351
1352 // Simulate editing the buffer after the language server computes some edits.
1353 buffer.update(cx, |buffer, cx| {
1354 buffer.edit(
1355 [(
1356 Point::new(0, 0)..Point::new(0, 0),
1357 "// above first function\n",
1358 )],
1359 cx,
1360 );
1361 buffer.edit(
1362 [(
1363 Point::new(2, 0)..Point::new(2, 0),
1364 " // inside first function\n",
1365 )],
1366 cx,
1367 );
1368 buffer.edit(
1369 [(
1370 Point::new(6, 4)..Point::new(6, 4),
1371 "// inside second function ",
1372 )],
1373 cx,
1374 );
1375
1376 assert_eq!(
1377 buffer.text(),
1378 "
1379 // above first function
1380 fn a() {
1381 // inside first function
1382 f1();
1383 }
1384 fn b() {
1385 // inside second function f2();
1386 }
1387 fn c() {
1388 f3();
1389 }
1390 "
1391 .unindent()
1392 );
1393 });
1394
1395 let edits = project
1396 .update(cx, |project, cx| {
1397 project.edits_from_lsp(
1398 &buffer,
1399 vec![
1400 // replace body of first function
1401 lsp::TextEdit {
1402 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1403 new_text: "
1404 fn a() {
1405 f10();
1406 }
1407 "
1408 .unindent(),
1409 },
1410 // edit inside second function
1411 lsp::TextEdit {
1412 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1413 new_text: "00".into(),
1414 },
1415 // edit inside third function via two distinct edits
1416 lsp::TextEdit {
1417 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1418 new_text: "4000".into(),
1419 },
1420 lsp::TextEdit {
1421 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1422 new_text: "".into(),
1423 },
1424 ],
1425 Some(lsp_document_version),
1426 cx,
1427 )
1428 })
1429 .await
1430 .unwrap();
1431
1432 buffer.update(cx, |buffer, cx| {
1433 for (range, new_text) in edits {
1434 buffer.edit([(range, new_text)], cx);
1435 }
1436 assert_eq!(
1437 buffer.text(),
1438 "
1439 // above first function
1440 fn a() {
1441 // inside first function
1442 f10();
1443 }
1444 fn b() {
1445 // inside second function f200();
1446 }
1447 fn c() {
1448 f4000();
1449 }
1450 "
1451 .unindent()
1452 );
1453 });
1454}
1455
1456#[gpui::test]
1457async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1458 cx.foreground().forbid_parking();
1459
1460 let text = "
1461 use a::b;
1462 use a::c;
1463
1464 fn f() {
1465 b();
1466 c();
1467 }
1468 "
1469 .unindent();
1470
1471 let fs = FakeFs::new(cx.background());
1472 fs.insert_tree(
1473 "/dir",
1474 json!({
1475 "a.rs": text.clone(),
1476 }),
1477 )
1478 .await;
1479
1480 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1481 let buffer = project
1482 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1483 .await
1484 .unwrap();
1485
1486 // Simulate the language server sending us a small edit in the form of a very large diff.
1487 // Rust-analyzer does this when performing a merge-imports code action.
1488 let edits = project
1489 .update(cx, |project, cx| {
1490 project.edits_from_lsp(
1491 &buffer,
1492 [
1493 // Replace the first use statement without editing the semicolon.
1494 lsp::TextEdit {
1495 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1496 new_text: "a::{b, c}".into(),
1497 },
1498 // Reinsert the remainder of the file between the semicolon and the final
1499 // newline of the file.
1500 lsp::TextEdit {
1501 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1502 new_text: "\n\n".into(),
1503 },
1504 lsp::TextEdit {
1505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1506 new_text: "
1507 fn f() {
1508 b();
1509 c();
1510 }"
1511 .unindent(),
1512 },
1513 // Delete everything after the first newline of the file.
1514 lsp::TextEdit {
1515 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1516 new_text: "".into(),
1517 },
1518 ],
1519 None,
1520 cx,
1521 )
1522 })
1523 .await
1524 .unwrap();
1525
1526 buffer.update(cx, |buffer, cx| {
1527 let edits = edits
1528 .into_iter()
1529 .map(|(range, text)| {
1530 (
1531 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1532 text,
1533 )
1534 })
1535 .collect::<Vec<_>>();
1536
1537 assert_eq!(
1538 edits,
1539 [
1540 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1541 (Point::new(1, 0)..Point::new(2, 0), "".into())
1542 ]
1543 );
1544
1545 for (range, new_text) in edits {
1546 buffer.edit([(range, new_text)], cx);
1547 }
1548 assert_eq!(
1549 buffer.text(),
1550 "
1551 use a::{b, c};
1552
1553 fn f() {
1554 b();
1555 c();
1556 }
1557 "
1558 .unindent()
1559 );
1560 });
1561}
1562
1563#[gpui::test]
1564async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1565 cx.foreground().forbid_parking();
1566
1567 let text = "
1568 use a::b;
1569 use a::c;
1570
1571 fn f() {
1572 b();
1573 c();
1574 }
1575 "
1576 .unindent();
1577
1578 let fs = FakeFs::new(cx.background());
1579 fs.insert_tree(
1580 "/dir",
1581 json!({
1582 "a.rs": text.clone(),
1583 }),
1584 )
1585 .await;
1586
1587 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1588 let buffer = project
1589 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1590 .await
1591 .unwrap();
1592
1593 // Simulate the language server sending us edits in a non-ordered fashion,
1594 // with ranges sometimes being inverted.
1595 let edits = project
1596 .update(cx, |project, cx| {
1597 project.edits_from_lsp(
1598 &buffer,
1599 [
1600 lsp::TextEdit {
1601 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1602 new_text: "\n\n".into(),
1603 },
1604 lsp::TextEdit {
1605 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1606 new_text: "a::{b, c}".into(),
1607 },
1608 lsp::TextEdit {
1609 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1610 new_text: "".into(),
1611 },
1612 lsp::TextEdit {
1613 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1614 new_text: "
1615 fn f() {
1616 b();
1617 c();
1618 }"
1619 .unindent(),
1620 },
1621 ],
1622 None,
1623 cx,
1624 )
1625 })
1626 .await
1627 .unwrap();
1628
1629 buffer.update(cx, |buffer, cx| {
1630 let edits = edits
1631 .into_iter()
1632 .map(|(range, text)| {
1633 (
1634 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1635 text,
1636 )
1637 })
1638 .collect::<Vec<_>>();
1639
1640 assert_eq!(
1641 edits,
1642 [
1643 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1644 (Point::new(1, 0)..Point::new(2, 0), "".into())
1645 ]
1646 );
1647
1648 for (range, new_text) in edits {
1649 buffer.edit([(range, new_text)], cx);
1650 }
1651 assert_eq!(
1652 buffer.text(),
1653 "
1654 use a::{b, c};
1655
1656 fn f() {
1657 b();
1658 c();
1659 }
1660 "
1661 .unindent()
1662 );
1663 });
1664}
1665
1666fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1667 buffer: &Buffer,
1668 range: Range<T>,
1669) -> Vec<(String, Option<DiagnosticSeverity>)> {
1670 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1671 for chunk in buffer.snapshot().chunks(range, true) {
1672 if chunks.last().map_or(false, |prev_chunk| {
1673 prev_chunk.1 == chunk.diagnostic_severity
1674 }) {
1675 chunks.last_mut().unwrap().0.push_str(chunk.text);
1676 } else {
1677 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1678 }
1679 }
1680 chunks
1681}
1682
1683#[gpui::test]
1684async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
1685 let dir = temp_tree(json!({
1686 "root": {
1687 "dir1": {},
1688 "dir2": {
1689 "dir3": {}
1690 }
1691 }
1692 }));
1693
1694 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
1695 let cancel_flag = Default::default();
1696 let results = project
1697 .read_with(cx, |project, cx| {
1698 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
1699 })
1700 .await;
1701
1702 assert!(results.is_empty());
1703}
1704
1705#[gpui::test(iterations = 10)]
1706async fn test_definition(cx: &mut gpui::TestAppContext) {
1707 let mut language = Language::new(
1708 LanguageConfig {
1709 name: "Rust".into(),
1710 path_suffixes: vec!["rs".to_string()],
1711 ..Default::default()
1712 },
1713 Some(tree_sitter_rust::language()),
1714 );
1715 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
1716
1717 let fs = FakeFs::new(cx.background());
1718 fs.insert_tree(
1719 "/dir",
1720 json!({
1721 "a.rs": "const fn a() { A }",
1722 "b.rs": "const y: i32 = crate::a()",
1723 }),
1724 )
1725 .await;
1726
1727 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1728 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1729
1730 let buffer = project
1731 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1732 .await
1733 .unwrap();
1734
1735 let fake_server = fake_servers.next().await.unwrap();
1736 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1737 let params = params.text_document_position_params;
1738 assert_eq!(
1739 params.text_document.uri.to_file_path().unwrap(),
1740 Path::new("/dir/b.rs"),
1741 );
1742 assert_eq!(params.position, lsp::Position::new(0, 22));
1743
1744 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1745 lsp::Location::new(
1746 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1747 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1748 ),
1749 )))
1750 });
1751
1752 let mut definitions = project
1753 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1754 .await
1755 .unwrap();
1756
1757 // Assert no new language server started
1758 cx.foreground().run_until_parked();
1759 assert!(fake_servers.try_next().is_err());
1760
1761 assert_eq!(definitions.len(), 1);
1762 let definition = definitions.pop().unwrap();
1763 cx.update(|cx| {
1764 let target_buffer = definition.target.buffer.read(cx);
1765 assert_eq!(
1766 target_buffer
1767 .file()
1768 .unwrap()
1769 .as_local()
1770 .unwrap()
1771 .abs_path(cx),
1772 Path::new("/dir/a.rs"),
1773 );
1774 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1775 assert_eq!(
1776 list_worktrees(&project, cx),
1777 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1778 );
1779
1780 drop(definition);
1781 });
1782 cx.read(|cx| {
1783 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1784 });
1785
1786 fn list_worktrees<'a>(
1787 project: &'a ModelHandle<Project>,
1788 cx: &'a AppContext,
1789 ) -> Vec<(&'a Path, bool)> {
1790 project
1791 .read(cx)
1792 .worktrees(cx)
1793 .map(|worktree| {
1794 let worktree = worktree.read(cx);
1795 (
1796 worktree.as_local().unwrap().abs_path().as_ref(),
1797 worktree.is_visible(),
1798 )
1799 })
1800 .collect::<Vec<_>>()
1801 }
1802}
1803
1804#[gpui::test]
1805async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1806 let mut language = Language::new(
1807 LanguageConfig {
1808 name: "TypeScript".into(),
1809 path_suffixes: vec!["ts".to_string()],
1810 ..Default::default()
1811 },
1812 Some(tree_sitter_typescript::language_typescript()),
1813 );
1814 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1815
1816 let fs = FakeFs::new(cx.background());
1817 fs.insert_tree(
1818 "/dir",
1819 json!({
1820 "a.ts": "",
1821 }),
1822 )
1823 .await;
1824
1825 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1826 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1827 let buffer = project
1828 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1829 .await
1830 .unwrap();
1831
1832 let fake_server = fake_language_servers.next().await.unwrap();
1833
1834 let text = "let a = b.fqn";
1835 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1836 let completions = project.update(cx, |project, cx| {
1837 project.completions(&buffer, text.len(), cx)
1838 });
1839
1840 fake_server
1841 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1842 Ok(Some(lsp::CompletionResponse::Array(vec![
1843 lsp::CompletionItem {
1844 label: "fullyQualifiedName?".into(),
1845 insert_text: Some("fullyQualifiedName".into()),
1846 ..Default::default()
1847 },
1848 ])))
1849 })
1850 .next()
1851 .await;
1852 let completions = completions.await.unwrap();
1853 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1854 assert_eq!(completions.len(), 1);
1855 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1856 assert_eq!(
1857 completions[0].old_range.to_offset(&snapshot),
1858 text.len() - 3..text.len()
1859 );
1860
1861 let text = "let a = \"atoms/cmp\"";
1862 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1863 let completions = project.update(cx, |project, cx| {
1864 project.completions(&buffer, text.len() - 1, cx)
1865 });
1866
1867 fake_server
1868 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1869 Ok(Some(lsp::CompletionResponse::Array(vec![
1870 lsp::CompletionItem {
1871 label: "component".into(),
1872 ..Default::default()
1873 },
1874 ])))
1875 })
1876 .next()
1877 .await;
1878 let completions = completions.await.unwrap();
1879 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1880 assert_eq!(completions.len(), 1);
1881 assert_eq!(completions[0].new_text, "component");
1882 assert_eq!(
1883 completions[0].old_range.to_offset(&snapshot),
1884 text.len() - 4..text.len() - 1
1885 );
1886}
1887
1888#[gpui::test]
1889async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1890 let mut language = Language::new(
1891 LanguageConfig {
1892 name: "TypeScript".into(),
1893 path_suffixes: vec!["ts".to_string()],
1894 ..Default::default()
1895 },
1896 Some(tree_sitter_typescript::language_typescript()),
1897 );
1898 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1899
1900 let fs = FakeFs::new(cx.background());
1901 fs.insert_tree(
1902 "/dir",
1903 json!({
1904 "a.ts": "",
1905 }),
1906 )
1907 .await;
1908
1909 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1910 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1911 let buffer = project
1912 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1913 .await
1914 .unwrap();
1915
1916 let fake_server = fake_language_servers.next().await.unwrap();
1917
1918 let text = "let a = b.fqn";
1919 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1920 let completions = project.update(cx, |project, cx| {
1921 project.completions(&buffer, text.len(), cx)
1922 });
1923
1924 fake_server
1925 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1926 Ok(Some(lsp::CompletionResponse::Array(vec![
1927 lsp::CompletionItem {
1928 label: "fullyQualifiedName?".into(),
1929 insert_text: Some("fully\rQualified\r\nName".into()),
1930 ..Default::default()
1931 },
1932 ])))
1933 })
1934 .next()
1935 .await;
1936 let completions = completions.await.unwrap();
1937 assert_eq!(completions.len(), 1);
1938 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1939}
1940
1941#[gpui::test(iterations = 10)]
1942async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1943 let mut language = Language::new(
1944 LanguageConfig {
1945 name: "TypeScript".into(),
1946 path_suffixes: vec!["ts".to_string()],
1947 ..Default::default()
1948 },
1949 None,
1950 );
1951 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
1952
1953 let fs = FakeFs::new(cx.background());
1954 fs.insert_tree(
1955 "/dir",
1956 json!({
1957 "a.ts": "a",
1958 }),
1959 )
1960 .await;
1961
1962 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1963 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1964 let buffer = project
1965 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1966 .await
1967 .unwrap();
1968
1969 let fake_server = fake_language_servers.next().await.unwrap();
1970
1971 // Language server returns code actions that contain commands, and not edits.
1972 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1973 fake_server
1974 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1975 Ok(Some(vec![
1976 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1977 title: "The code action".into(),
1978 command: Some(lsp::Command {
1979 title: "The command".into(),
1980 command: "_the/command".into(),
1981 arguments: Some(vec![json!("the-argument")]),
1982 }),
1983 ..Default::default()
1984 }),
1985 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1986 title: "two".into(),
1987 ..Default::default()
1988 }),
1989 ]))
1990 })
1991 .next()
1992 .await;
1993
1994 let action = actions.await.unwrap()[0].clone();
1995 let apply = project.update(cx, |project, cx| {
1996 project.apply_code_action(buffer.clone(), action, true, cx)
1997 });
1998
1999 // Resolving the code action does not populate its edits. In absence of
2000 // edits, we must execute the given command.
2001 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2002 |action, _| async move { Ok(action) },
2003 );
2004
2005 // While executing the command, the language server sends the editor
2006 // a `workspaceEdit` request.
2007 fake_server
2008 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2009 let fake = fake_server.clone();
2010 move |params, _| {
2011 assert_eq!(params.command, "_the/command");
2012 let fake = fake.clone();
2013 async move {
2014 fake.server
2015 .request::<lsp::request::ApplyWorkspaceEdit>(
2016 lsp::ApplyWorkspaceEditParams {
2017 label: None,
2018 edit: lsp::WorkspaceEdit {
2019 changes: Some(
2020 [(
2021 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2022 vec![lsp::TextEdit {
2023 range: lsp::Range::new(
2024 lsp::Position::new(0, 0),
2025 lsp::Position::new(0, 0),
2026 ),
2027 new_text: "X".into(),
2028 }],
2029 )]
2030 .into_iter()
2031 .collect(),
2032 ),
2033 ..Default::default()
2034 },
2035 },
2036 )
2037 .await
2038 .unwrap();
2039 Ok(Some(json!(null)))
2040 }
2041 }
2042 })
2043 .next()
2044 .await;
2045
2046 // Applying the code action returns a project transaction containing the edits
2047 // sent by the language server in its `workspaceEdit` request.
2048 let transaction = apply.await.unwrap();
2049 assert!(transaction.0.contains_key(&buffer));
2050 buffer.update(cx, |buffer, cx| {
2051 assert_eq!(buffer.text(), "Xa");
2052 buffer.undo(cx);
2053 assert_eq!(buffer.text(), "a");
2054 });
2055}
2056
2057#[gpui::test]
2058async fn test_save_file(cx: &mut gpui::TestAppContext) {
2059 let fs = FakeFs::new(cx.background());
2060 fs.insert_tree(
2061 "/dir",
2062 json!({
2063 "file1": "the old contents",
2064 }),
2065 )
2066 .await;
2067
2068 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2069 let buffer = project
2070 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2071 .await
2072 .unwrap();
2073 buffer
2074 .update(cx, |buffer, cx| {
2075 assert_eq!(buffer.text(), "the old contents");
2076 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2077 buffer.save(cx)
2078 })
2079 .await
2080 .unwrap();
2081
2082 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2083 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2084}
2085
2086#[gpui::test]
2087async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2088 let fs = FakeFs::new(cx.background());
2089 fs.insert_tree(
2090 "/dir",
2091 json!({
2092 "file1": "the old contents",
2093 }),
2094 )
2095 .await;
2096
2097 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2098 let buffer = project
2099 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2100 .await
2101 .unwrap();
2102 buffer
2103 .update(cx, |buffer, cx| {
2104 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
2105 buffer.save(cx)
2106 })
2107 .await
2108 .unwrap();
2109
2110 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2111 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2112}
2113
2114#[gpui::test]
2115async fn test_save_as(cx: &mut gpui::TestAppContext) {
2116 let fs = FakeFs::new(cx.background());
2117 fs.insert_tree("/dir", json!({})).await;
2118
2119 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2120 let buffer = project.update(cx, |project, cx| {
2121 project.create_buffer("", None, cx).unwrap()
2122 });
2123 buffer.update(cx, |buffer, cx| {
2124 buffer.edit([(0..0, "abc")], cx);
2125 assert!(buffer.is_dirty());
2126 assert!(!buffer.has_conflict());
2127 });
2128 project
2129 .update(cx, |project, cx| {
2130 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2131 })
2132 .await
2133 .unwrap();
2134 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2135 buffer.read_with(cx, |buffer, cx| {
2136 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2137 assert!(!buffer.is_dirty());
2138 assert!(!buffer.has_conflict());
2139 });
2140
2141 let opened_buffer = project
2142 .update(cx, |project, cx| {
2143 project.open_local_buffer("/dir/file1", cx)
2144 })
2145 .await
2146 .unwrap();
2147 assert_eq!(opened_buffer, buffer);
2148}
2149
2150#[gpui::test(retries = 5)]
2151async fn test_rescan_and_remote_updates(
2152 deterministic: Arc<Deterministic>,
2153 cx: &mut gpui::TestAppContext,
2154) {
2155 let dir = temp_tree(json!({
2156 "a": {
2157 "file1": "",
2158 "file2": "",
2159 "file3": "",
2160 },
2161 "b": {
2162 "c": {
2163 "file4": "",
2164 "file5": "",
2165 }
2166 }
2167 }));
2168
2169 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2170 let rpc = project.read_with(cx, |p, _| p.client.clone());
2171
2172 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2173 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2174 async move { buffer.await.unwrap() }
2175 };
2176 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2177 project.read_with(cx, |project, cx| {
2178 let tree = project.worktrees(cx).next().unwrap();
2179 tree.read(cx)
2180 .entry_for_path(path)
2181 .expect(&format!("no entry for path {}", path))
2182 .id
2183 })
2184 };
2185
2186 let buffer2 = buffer_for_path("a/file2", cx).await;
2187 let buffer3 = buffer_for_path("a/file3", cx).await;
2188 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2189 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2190
2191 let file2_id = id_for_path("a/file2", &cx);
2192 let file3_id = id_for_path("a/file3", &cx);
2193 let file4_id = id_for_path("b/c/file4", &cx);
2194
2195 // Create a remote copy of this worktree.
2196 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2197 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2198 let remote = cx.update(|cx| {
2199 Worktree::remote(
2200 1,
2201 1,
2202 proto::WorktreeMetadata {
2203 id: initial_snapshot.id().to_proto(),
2204 root_name: initial_snapshot.root_name().into(),
2205 visible: true,
2206 },
2207 rpc.clone(),
2208 cx,
2209 )
2210 });
2211 remote.update(cx, |remote, _| {
2212 let update = initial_snapshot.build_initial_update(1);
2213 remote.as_remote_mut().unwrap().update_from_remote(update);
2214 });
2215 deterministic.run_until_parked();
2216
2217 cx.read(|cx| {
2218 assert!(!buffer2.read(cx).is_dirty());
2219 assert!(!buffer3.read(cx).is_dirty());
2220 assert!(!buffer4.read(cx).is_dirty());
2221 assert!(!buffer5.read(cx).is_dirty());
2222 });
2223
2224 // Rename and delete files and directories.
2225 tree.flush_fs_events(&cx).await;
2226 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2227 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2228 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2229 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2230 tree.flush_fs_events(&cx).await;
2231
2232 let expected_paths = vec![
2233 "a",
2234 "a/file1",
2235 "a/file2.new",
2236 "b",
2237 "d",
2238 "d/file3",
2239 "d/file4",
2240 ];
2241
2242 cx.read(|app| {
2243 assert_eq!(
2244 tree.read(app)
2245 .paths()
2246 .map(|p| p.to_str().unwrap())
2247 .collect::<Vec<_>>(),
2248 expected_paths
2249 );
2250
2251 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
2252 assert_eq!(id_for_path("d/file3", &cx), file3_id);
2253 assert_eq!(id_for_path("d/file4", &cx), file4_id);
2254
2255 assert_eq!(
2256 buffer2.read(app).file().unwrap().path().as_ref(),
2257 Path::new("a/file2.new")
2258 );
2259 assert_eq!(
2260 buffer3.read(app).file().unwrap().path().as_ref(),
2261 Path::new("d/file3")
2262 );
2263 assert_eq!(
2264 buffer4.read(app).file().unwrap().path().as_ref(),
2265 Path::new("d/file4")
2266 );
2267 assert_eq!(
2268 buffer5.read(app).file().unwrap().path().as_ref(),
2269 Path::new("b/c/file5")
2270 );
2271
2272 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2273 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2274 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2275 assert!(buffer5.read(app).file().unwrap().is_deleted());
2276 });
2277
2278 // Update the remote worktree. Check that it becomes consistent with the
2279 // local worktree.
2280 remote.update(cx, |remote, cx| {
2281 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2282 &initial_snapshot,
2283 1,
2284 1,
2285 true,
2286 );
2287 remote.as_remote_mut().unwrap().update_from_remote(update);
2288 });
2289 deterministic.run_until_parked();
2290 remote.read_with(cx, |remote, _| {
2291 assert_eq!(
2292 remote
2293 .paths()
2294 .map(|p| p.to_str().unwrap())
2295 .collect::<Vec<_>>(),
2296 expected_paths
2297 );
2298 });
2299}
2300
2301#[gpui::test]
2302async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2303 let fs = FakeFs::new(cx.background());
2304 fs.insert_tree(
2305 "/dir",
2306 json!({
2307 "a.txt": "a-contents",
2308 "b.txt": "b-contents",
2309 }),
2310 )
2311 .await;
2312
2313 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2314
2315 // Spawn multiple tasks to open paths, repeating some paths.
2316 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2317 (
2318 p.open_local_buffer("/dir/a.txt", cx),
2319 p.open_local_buffer("/dir/b.txt", cx),
2320 p.open_local_buffer("/dir/a.txt", cx),
2321 )
2322 });
2323
2324 let buffer_a_1 = buffer_a_1.await.unwrap();
2325 let buffer_a_2 = buffer_a_2.await.unwrap();
2326 let buffer_b = buffer_b.await.unwrap();
2327 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2328 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2329
2330 // There is only one buffer per path.
2331 let buffer_a_id = buffer_a_1.id();
2332 assert_eq!(buffer_a_2.id(), buffer_a_id);
2333
2334 // Open the same path again while it is still open.
2335 drop(buffer_a_1);
2336 let buffer_a_3 = project
2337 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2338 .await
2339 .unwrap();
2340
2341 // There's still only one buffer per path.
2342 assert_eq!(buffer_a_3.id(), buffer_a_id);
2343}
2344
2345#[gpui::test]
2346async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2347 let fs = FakeFs::new(cx.background());
2348 fs.insert_tree(
2349 "/dir",
2350 json!({
2351 "file1": "abc",
2352 "file2": "def",
2353 "file3": "ghi",
2354 }),
2355 )
2356 .await;
2357
2358 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2359
2360 let buffer1 = project
2361 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2362 .await
2363 .unwrap();
2364 let events = Rc::new(RefCell::new(Vec::new()));
2365
2366 // initially, the buffer isn't dirty.
2367 buffer1.update(cx, |buffer, cx| {
2368 cx.subscribe(&buffer1, {
2369 let events = events.clone();
2370 move |_, _, event, _| match event {
2371 BufferEvent::Operation(_) => {}
2372 _ => events.borrow_mut().push(event.clone()),
2373 }
2374 })
2375 .detach();
2376
2377 assert!(!buffer.is_dirty());
2378 assert!(events.borrow().is_empty());
2379
2380 buffer.edit([(1..2, "")], cx);
2381 });
2382
2383 // after the first edit, the buffer is dirty, and emits a dirtied event.
2384 buffer1.update(cx, |buffer, cx| {
2385 assert!(buffer.text() == "ac");
2386 assert!(buffer.is_dirty());
2387 assert_eq!(
2388 *events.borrow(),
2389 &[language::Event::Edited, language::Event::DirtyChanged]
2390 );
2391 events.borrow_mut().clear();
2392 buffer.did_save(
2393 buffer.version(),
2394 buffer.as_rope().fingerprint(),
2395 buffer.file().unwrap().mtime(),
2396 None,
2397 cx,
2398 );
2399 });
2400
2401 // after saving, the buffer is not dirty, and emits a saved event.
2402 buffer1.update(cx, |buffer, cx| {
2403 assert!(!buffer.is_dirty());
2404 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2405 events.borrow_mut().clear();
2406
2407 buffer.edit([(1..1, "B")], cx);
2408 buffer.edit([(2..2, "D")], cx);
2409 });
2410
2411 // after editing again, the buffer is dirty, and emits another dirty event.
2412 buffer1.update(cx, |buffer, cx| {
2413 assert!(buffer.text() == "aBDc");
2414 assert!(buffer.is_dirty());
2415 assert_eq!(
2416 *events.borrow(),
2417 &[
2418 language::Event::Edited,
2419 language::Event::DirtyChanged,
2420 language::Event::Edited,
2421 ],
2422 );
2423 events.borrow_mut().clear();
2424
2425 // After restoring the buffer to its previously-saved state,
2426 // the buffer is not considered dirty anymore.
2427 buffer.edit([(1..3, "")], cx);
2428 assert!(buffer.text() == "ac");
2429 assert!(!buffer.is_dirty());
2430 });
2431
2432 assert_eq!(
2433 *events.borrow(),
2434 &[language::Event::Edited, language::Event::DirtyChanged]
2435 );
2436
2437 // When a file is deleted, the buffer is considered dirty.
2438 let events = Rc::new(RefCell::new(Vec::new()));
2439 let buffer2 = project
2440 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2441 .await
2442 .unwrap();
2443 buffer2.update(cx, |_, cx| {
2444 cx.subscribe(&buffer2, {
2445 let events = events.clone();
2446 move |_, _, event, _| events.borrow_mut().push(event.clone())
2447 })
2448 .detach();
2449 });
2450
2451 fs.remove_file("/dir/file2".as_ref(), Default::default())
2452 .await
2453 .unwrap();
2454 cx.foreground().run_until_parked();
2455 assert_eq!(
2456 *events.borrow(),
2457 &[
2458 language::Event::DirtyChanged,
2459 language::Event::FileHandleChanged
2460 ]
2461 );
2462
2463 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2464 let events = Rc::new(RefCell::new(Vec::new()));
2465 let buffer3 = project
2466 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2467 .await
2468 .unwrap();
2469 buffer3.update(cx, |_, cx| {
2470 cx.subscribe(&buffer3, {
2471 let events = events.clone();
2472 move |_, _, event, _| events.borrow_mut().push(event.clone())
2473 })
2474 .detach();
2475 });
2476
2477 buffer3.update(cx, |buffer, cx| {
2478 buffer.edit([(0..0, "x")], cx);
2479 });
2480 events.borrow_mut().clear();
2481 fs.remove_file("/dir/file3".as_ref(), Default::default())
2482 .await
2483 .unwrap();
2484 cx.foreground().run_until_parked();
2485 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2486 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2487}
2488
2489#[gpui::test]
2490async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2491 let initial_contents = "aaa\nbbbbb\nc\n";
2492 let fs = FakeFs::new(cx.background());
2493 fs.insert_tree(
2494 "/dir",
2495 json!({
2496 "the-file": initial_contents,
2497 }),
2498 )
2499 .await;
2500 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2501 let buffer = project
2502 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2503 .await
2504 .unwrap();
2505
2506 let anchors = (0..3)
2507 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2508 .collect::<Vec<_>>();
2509
2510 // Change the file on disk, adding two new lines of text, and removing
2511 // one line.
2512 buffer.read_with(cx, |buffer, _| {
2513 assert!(!buffer.is_dirty());
2514 assert!(!buffer.has_conflict());
2515 });
2516 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2517 fs.save(
2518 "/dir/the-file".as_ref(),
2519 &new_contents.into(),
2520 LineEnding::Unix,
2521 )
2522 .await
2523 .unwrap();
2524
2525 // Because the buffer was not modified, it is reloaded from disk. Its
2526 // contents are edited according to the diff between the old and new
2527 // file contents.
2528 cx.foreground().run_until_parked();
2529 buffer.update(cx, |buffer, _| {
2530 assert_eq!(buffer.text(), new_contents);
2531 assert!(!buffer.is_dirty());
2532 assert!(!buffer.has_conflict());
2533
2534 let anchor_positions = anchors
2535 .iter()
2536 .map(|anchor| anchor.to_point(&*buffer))
2537 .collect::<Vec<_>>();
2538 assert_eq!(
2539 anchor_positions,
2540 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
2541 );
2542 });
2543
2544 // Modify the buffer
2545 buffer.update(cx, |buffer, cx| {
2546 buffer.edit([(0..0, " ")], cx);
2547 assert!(buffer.is_dirty());
2548 assert!(!buffer.has_conflict());
2549 });
2550
2551 // Change the file on disk again, adding blank lines to the beginning.
2552 fs.save(
2553 "/dir/the-file".as_ref(),
2554 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2555 LineEnding::Unix,
2556 )
2557 .await
2558 .unwrap();
2559
2560 // Because the buffer is modified, it doesn't reload from disk, but is
2561 // marked as having a conflict.
2562 cx.foreground().run_until_parked();
2563 buffer.read_with(cx, |buffer, _| {
2564 assert!(buffer.has_conflict());
2565 });
2566}
2567
2568#[gpui::test]
2569async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2570 let fs = FakeFs::new(cx.background());
2571 fs.insert_tree(
2572 "/dir",
2573 json!({
2574 "file1": "a\nb\nc\n",
2575 "file2": "one\r\ntwo\r\nthree\r\n",
2576 }),
2577 )
2578 .await;
2579
2580 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2581 let buffer1 = project
2582 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2583 .await
2584 .unwrap();
2585 let buffer2 = project
2586 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2587 .await
2588 .unwrap();
2589
2590 buffer1.read_with(cx, |buffer, _| {
2591 assert_eq!(buffer.text(), "a\nb\nc\n");
2592 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2593 });
2594 buffer2.read_with(cx, |buffer, _| {
2595 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2596 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2597 });
2598
2599 // Change a file's line endings on disk from unix to windows. The buffer's
2600 // state updates correctly.
2601 fs.save(
2602 "/dir/file1".as_ref(),
2603 &"aaa\nb\nc\n".into(),
2604 LineEnding::Windows,
2605 )
2606 .await
2607 .unwrap();
2608 cx.foreground().run_until_parked();
2609 buffer1.read_with(cx, |buffer, _| {
2610 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2611 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2612 });
2613
2614 // Save a file with windows line endings. The file is written correctly.
2615 buffer2
2616 .update(cx, |buffer, cx| {
2617 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2618 buffer.save(cx)
2619 })
2620 .await
2621 .unwrap();
2622 assert_eq!(
2623 fs.load("/dir/file2".as_ref()).await.unwrap(),
2624 "one\r\ntwo\r\nthree\r\nfour\r\n",
2625 );
2626}
2627
2628#[gpui::test]
2629async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2630 cx.foreground().forbid_parking();
2631
2632 let fs = FakeFs::new(cx.background());
2633 fs.insert_tree(
2634 "/the-dir",
2635 json!({
2636 "a.rs": "
2637 fn foo(mut v: Vec<usize>) {
2638 for x in &v {
2639 v.push(1);
2640 }
2641 }
2642 "
2643 .unindent(),
2644 }),
2645 )
2646 .await;
2647
2648 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2649 let buffer = project
2650 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2651 .await
2652 .unwrap();
2653
2654 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2655 let message = lsp::PublishDiagnosticsParams {
2656 uri: buffer_uri.clone(),
2657 diagnostics: vec![
2658 lsp::Diagnostic {
2659 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2660 severity: Some(DiagnosticSeverity::WARNING),
2661 message: "error 1".to_string(),
2662 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2663 location: lsp::Location {
2664 uri: buffer_uri.clone(),
2665 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2666 },
2667 message: "error 1 hint 1".to_string(),
2668 }]),
2669 ..Default::default()
2670 },
2671 lsp::Diagnostic {
2672 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2673 severity: Some(DiagnosticSeverity::HINT),
2674 message: "error 1 hint 1".to_string(),
2675 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2676 location: lsp::Location {
2677 uri: buffer_uri.clone(),
2678 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2679 },
2680 message: "original diagnostic".to_string(),
2681 }]),
2682 ..Default::default()
2683 },
2684 lsp::Diagnostic {
2685 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2686 severity: Some(DiagnosticSeverity::ERROR),
2687 message: "error 2".to_string(),
2688 related_information: Some(vec![
2689 lsp::DiagnosticRelatedInformation {
2690 location: lsp::Location {
2691 uri: buffer_uri.clone(),
2692 range: lsp::Range::new(
2693 lsp::Position::new(1, 13),
2694 lsp::Position::new(1, 15),
2695 ),
2696 },
2697 message: "error 2 hint 1".to_string(),
2698 },
2699 lsp::DiagnosticRelatedInformation {
2700 location: lsp::Location {
2701 uri: buffer_uri.clone(),
2702 range: lsp::Range::new(
2703 lsp::Position::new(1, 13),
2704 lsp::Position::new(1, 15),
2705 ),
2706 },
2707 message: "error 2 hint 2".to_string(),
2708 },
2709 ]),
2710 ..Default::default()
2711 },
2712 lsp::Diagnostic {
2713 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2714 severity: Some(DiagnosticSeverity::HINT),
2715 message: "error 2 hint 1".to_string(),
2716 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2717 location: lsp::Location {
2718 uri: buffer_uri.clone(),
2719 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2720 },
2721 message: "original diagnostic".to_string(),
2722 }]),
2723 ..Default::default()
2724 },
2725 lsp::Diagnostic {
2726 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2727 severity: Some(DiagnosticSeverity::HINT),
2728 message: "error 2 hint 2".to_string(),
2729 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2730 location: lsp::Location {
2731 uri: buffer_uri.clone(),
2732 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2733 },
2734 message: "original diagnostic".to_string(),
2735 }]),
2736 ..Default::default()
2737 },
2738 ],
2739 version: None,
2740 };
2741
2742 project
2743 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2744 .unwrap();
2745 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2746
2747 assert_eq!(
2748 buffer
2749 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2750 .collect::<Vec<_>>(),
2751 &[
2752 DiagnosticEntry {
2753 range: Point::new(1, 8)..Point::new(1, 9),
2754 diagnostic: Diagnostic {
2755 severity: DiagnosticSeverity::WARNING,
2756 message: "error 1".to_string(),
2757 group_id: 0,
2758 is_primary: true,
2759 ..Default::default()
2760 }
2761 },
2762 DiagnosticEntry {
2763 range: Point::new(1, 8)..Point::new(1, 9),
2764 diagnostic: Diagnostic {
2765 severity: DiagnosticSeverity::HINT,
2766 message: "error 1 hint 1".to_string(),
2767 group_id: 0,
2768 is_primary: false,
2769 ..Default::default()
2770 }
2771 },
2772 DiagnosticEntry {
2773 range: Point::new(1, 13)..Point::new(1, 15),
2774 diagnostic: Diagnostic {
2775 severity: DiagnosticSeverity::HINT,
2776 message: "error 2 hint 1".to_string(),
2777 group_id: 1,
2778 is_primary: false,
2779 ..Default::default()
2780 }
2781 },
2782 DiagnosticEntry {
2783 range: Point::new(1, 13)..Point::new(1, 15),
2784 diagnostic: Diagnostic {
2785 severity: DiagnosticSeverity::HINT,
2786 message: "error 2 hint 2".to_string(),
2787 group_id: 1,
2788 is_primary: false,
2789 ..Default::default()
2790 }
2791 },
2792 DiagnosticEntry {
2793 range: Point::new(2, 8)..Point::new(2, 17),
2794 diagnostic: Diagnostic {
2795 severity: DiagnosticSeverity::ERROR,
2796 message: "error 2".to_string(),
2797 group_id: 1,
2798 is_primary: true,
2799 ..Default::default()
2800 }
2801 }
2802 ]
2803 );
2804
2805 assert_eq!(
2806 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2807 &[
2808 DiagnosticEntry {
2809 range: Point::new(1, 8)..Point::new(1, 9),
2810 diagnostic: Diagnostic {
2811 severity: DiagnosticSeverity::WARNING,
2812 message: "error 1".to_string(),
2813 group_id: 0,
2814 is_primary: true,
2815 ..Default::default()
2816 }
2817 },
2818 DiagnosticEntry {
2819 range: Point::new(1, 8)..Point::new(1, 9),
2820 diagnostic: Diagnostic {
2821 severity: DiagnosticSeverity::HINT,
2822 message: "error 1 hint 1".to_string(),
2823 group_id: 0,
2824 is_primary: false,
2825 ..Default::default()
2826 }
2827 },
2828 ]
2829 );
2830 assert_eq!(
2831 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2832 &[
2833 DiagnosticEntry {
2834 range: Point::new(1, 13)..Point::new(1, 15),
2835 diagnostic: Diagnostic {
2836 severity: DiagnosticSeverity::HINT,
2837 message: "error 2 hint 1".to_string(),
2838 group_id: 1,
2839 is_primary: false,
2840 ..Default::default()
2841 }
2842 },
2843 DiagnosticEntry {
2844 range: Point::new(1, 13)..Point::new(1, 15),
2845 diagnostic: Diagnostic {
2846 severity: DiagnosticSeverity::HINT,
2847 message: "error 2 hint 2".to_string(),
2848 group_id: 1,
2849 is_primary: false,
2850 ..Default::default()
2851 }
2852 },
2853 DiagnosticEntry {
2854 range: Point::new(2, 8)..Point::new(2, 17),
2855 diagnostic: Diagnostic {
2856 severity: DiagnosticSeverity::ERROR,
2857 message: "error 2".to_string(),
2858 group_id: 1,
2859 is_primary: true,
2860 ..Default::default()
2861 }
2862 }
2863 ]
2864 );
2865}
2866
2867#[gpui::test]
2868async fn test_rename(cx: &mut gpui::TestAppContext) {
2869 cx.foreground().forbid_parking();
2870
2871 let mut language = Language::new(
2872 LanguageConfig {
2873 name: "Rust".into(),
2874 path_suffixes: vec!["rs".to_string()],
2875 ..Default::default()
2876 },
2877 Some(tree_sitter_rust::language()),
2878 );
2879 let mut fake_servers = language.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2880 capabilities: lsp::ServerCapabilities {
2881 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2882 prepare_provider: Some(true),
2883 work_done_progress_options: Default::default(),
2884 })),
2885 ..Default::default()
2886 },
2887 ..Default::default()
2888 }));
2889
2890 let fs = FakeFs::new(cx.background());
2891 fs.insert_tree(
2892 "/dir",
2893 json!({
2894 "one.rs": "const ONE: usize = 1;",
2895 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2896 }),
2897 )
2898 .await;
2899
2900 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2901 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2902 let buffer = project
2903 .update(cx, |project, cx| {
2904 project.open_local_buffer("/dir/one.rs", cx)
2905 })
2906 .await
2907 .unwrap();
2908
2909 let fake_server = fake_servers.next().await.unwrap();
2910
2911 let response = project.update(cx, |project, cx| {
2912 project.prepare_rename(buffer.clone(), 7, cx)
2913 });
2914 fake_server
2915 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2916 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2917 assert_eq!(params.position, lsp::Position::new(0, 7));
2918 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2919 lsp::Position::new(0, 6),
2920 lsp::Position::new(0, 9),
2921 ))))
2922 })
2923 .next()
2924 .await
2925 .unwrap();
2926 let range = response.await.unwrap().unwrap();
2927 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2928 assert_eq!(range, 6..9);
2929
2930 let response = project.update(cx, |project, cx| {
2931 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2932 });
2933 fake_server
2934 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2935 assert_eq!(
2936 params.text_document_position.text_document.uri.as_str(),
2937 "file:///dir/one.rs"
2938 );
2939 assert_eq!(
2940 params.text_document_position.position,
2941 lsp::Position::new(0, 7)
2942 );
2943 assert_eq!(params.new_name, "THREE");
2944 Ok(Some(lsp::WorkspaceEdit {
2945 changes: Some(
2946 [
2947 (
2948 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2949 vec![lsp::TextEdit::new(
2950 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2951 "THREE".to_string(),
2952 )],
2953 ),
2954 (
2955 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2956 vec![
2957 lsp::TextEdit::new(
2958 lsp::Range::new(
2959 lsp::Position::new(0, 24),
2960 lsp::Position::new(0, 27),
2961 ),
2962 "THREE".to_string(),
2963 ),
2964 lsp::TextEdit::new(
2965 lsp::Range::new(
2966 lsp::Position::new(0, 35),
2967 lsp::Position::new(0, 38),
2968 ),
2969 "THREE".to_string(),
2970 ),
2971 ],
2972 ),
2973 ]
2974 .into_iter()
2975 .collect(),
2976 ),
2977 ..Default::default()
2978 }))
2979 })
2980 .next()
2981 .await
2982 .unwrap();
2983 let mut transaction = response.await.unwrap().0;
2984 assert_eq!(transaction.len(), 2);
2985 assert_eq!(
2986 transaction
2987 .remove_entry(&buffer)
2988 .unwrap()
2989 .0
2990 .read_with(cx, |buffer, _| buffer.text()),
2991 "const THREE: usize = 1;"
2992 );
2993 assert_eq!(
2994 transaction
2995 .into_keys()
2996 .next()
2997 .unwrap()
2998 .read_with(cx, |buffer, _| buffer.text()),
2999 "const TWO: usize = one::THREE + one::THREE;"
3000 );
3001}
3002
3003#[gpui::test]
3004async fn test_search(cx: &mut gpui::TestAppContext) {
3005 let fs = FakeFs::new(cx.background());
3006 fs.insert_tree(
3007 "/dir",
3008 json!({
3009 "one.rs": "const ONE: usize = 1;",
3010 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3011 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3012 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3013 }),
3014 )
3015 .await;
3016 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3017 assert_eq!(
3018 search(&project, SearchQuery::text("TWO", false, true), cx)
3019 .await
3020 .unwrap(),
3021 HashMap::from_iter([
3022 ("two.rs".to_string(), vec![6..9]),
3023 ("three.rs".to_string(), vec![37..40])
3024 ])
3025 );
3026
3027 let buffer_4 = project
3028 .update(cx, |project, cx| {
3029 project.open_local_buffer("/dir/four.rs", cx)
3030 })
3031 .await
3032 .unwrap();
3033 buffer_4.update(cx, |buffer, cx| {
3034 let text = "two::TWO";
3035 buffer.edit([(20..28, text), (31..43, text)], cx);
3036 });
3037
3038 assert_eq!(
3039 search(&project, SearchQuery::text("TWO", false, true), cx)
3040 .await
3041 .unwrap(),
3042 HashMap::from_iter([
3043 ("two.rs".to_string(), vec![6..9]),
3044 ("three.rs".to_string(), vec![37..40]),
3045 ("four.rs".to_string(), vec![25..28, 36..39])
3046 ])
3047 );
3048
3049 async fn search(
3050 project: &ModelHandle<Project>,
3051 query: SearchQuery,
3052 cx: &mut gpui::TestAppContext,
3053 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3054 let results = project
3055 .update(cx, |project, cx| project.search(query, cx))
3056 .await?;
3057
3058 Ok(results
3059 .into_iter()
3060 .map(|(buffer, ranges)| {
3061 buffer.read_with(cx, |buffer, _| {
3062 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3063 let ranges = ranges
3064 .into_iter()
3065 .map(|range| range.to_offset(buffer))
3066 .collect::<Vec<_>>();
3067 (path, ranges)
3068 })
3069 })
3070 .collect())
3071 }
3072}