1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use pretty_assertions::assert_eq;
12use serde_json::json;
13use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
14use unindent::Unindent as _;
15use util::{assert_set_eq, test::temp_tree};
16
17#[gpui::test]
18async fn test_symlinks(cx: &mut gpui::TestAppContext) {
19 let dir = temp_tree(json!({
20 "root": {
21 "apple": "",
22 "banana": {
23 "carrot": {
24 "date": "",
25 "endive": "",
26 }
27 },
28 "fennel": {
29 "grape": "",
30 }
31 }
32 }));
33
34 let root_link_path = dir.path().join("root_link");
35 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
36 unix::fs::symlink(
37 &dir.path().join("root/fennel"),
38 &dir.path().join("root/finnochio"),
39 )
40 .unwrap();
41
42 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
43 project.read_with(cx, |project, cx| {
44 let tree = project.worktrees(cx).next().unwrap().read(cx);
45 assert_eq!(tree.file_count(), 5);
46 assert_eq!(
47 tree.inode_for_path("fennel/grape"),
48 tree.inode_for_path("finnochio/grape")
49 );
50 });
51}
52
53#[gpui::test]
54async fn test_managing_language_servers(
55 deterministic: Arc<Deterministic>,
56 cx: &mut gpui::TestAppContext,
57) {
58 cx.foreground().forbid_parking();
59
60 let mut rust_language = Language::new(
61 LanguageConfig {
62 name: "Rust".into(),
63 path_suffixes: vec!["rs".to_string()],
64 ..Default::default()
65 },
66 Some(tree_sitter_rust::language()),
67 );
68 let mut json_language = Language::new(
69 LanguageConfig {
70 name: "JSON".into(),
71 path_suffixes: vec!["json".to_string()],
72 ..Default::default()
73 },
74 None,
75 );
76 let mut fake_rust_servers = rust_language
77 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
78 name: "the-rust-language-server",
79 capabilities: lsp::ServerCapabilities {
80 completion_provider: Some(lsp::CompletionOptions {
81 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
82 ..Default::default()
83 }),
84 ..Default::default()
85 },
86 ..Default::default()
87 }))
88 .await;
89 let mut fake_json_servers = json_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-json-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![":".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102
103 let fs = FakeFs::new(cx.background());
104 fs.insert_tree(
105 "/the-root",
106 json!({
107 "test.rs": "const A: i32 = 1;",
108 "test2.rs": "",
109 "Cargo.toml": "a = 1",
110 "package.json": "{\"a\": 1}",
111 }),
112 )
113 .await;
114
115 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
116
117 // Open a buffer without an associated language server.
118 let toml_buffer = project
119 .update(cx, |project, cx| {
120 project.open_local_buffer("/the-root/Cargo.toml", cx)
121 })
122 .await
123 .unwrap();
124
125 // Open a buffer with an associated language server before the language for it has been loaded.
126 let rust_buffer = project
127 .update(cx, |project, cx| {
128 project.open_local_buffer("/the-root/test.rs", cx)
129 })
130 .await
131 .unwrap();
132 rust_buffer.read_with(cx, |buffer, _| {
133 assert_eq!(buffer.language().map(|l| l.name()), None);
134 });
135
136 // Now we add the languages to the project, and ensure they get assigned to all
137 // the relevant open buffers.
138 project.update(cx, |project, _| {
139 project.languages.add(Arc::new(json_language));
140 project.languages.add(Arc::new(rust_language));
141 });
142 deterministic.run_until_parked();
143 rust_buffer.read_with(cx, |buffer, _| {
144 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
145 });
146
147 // A server is started up, and it is notified about Rust files.
148 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
149 assert_eq!(
150 fake_rust_server
151 .receive_notification::<lsp::notification::DidOpenTextDocument>()
152 .await
153 .text_document,
154 lsp::TextDocumentItem {
155 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
156 version: 0,
157 text: "const A: i32 = 1;".to_string(),
158 language_id: Default::default()
159 }
160 );
161
162 // The buffer is configured based on the language server's capabilities.
163 rust_buffer.read_with(cx, |buffer, _| {
164 assert_eq!(
165 buffer.completion_triggers(),
166 &[".".to_string(), "::".to_string()]
167 );
168 });
169 toml_buffer.read_with(cx, |buffer, _| {
170 assert!(buffer.completion_triggers().is_empty());
171 });
172
173 // Edit a buffer. The changes are reported to the language server.
174 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
175 assert_eq!(
176 fake_rust_server
177 .receive_notification::<lsp::notification::DidChangeTextDocument>()
178 .await
179 .text_document,
180 lsp::VersionedTextDocumentIdentifier::new(
181 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
182 1
183 )
184 );
185
186 // Open a third buffer with a different associated language server.
187 let json_buffer = project
188 .update(cx, |project, cx| {
189 project.open_local_buffer("/the-root/package.json", cx)
190 })
191 .await
192 .unwrap();
193
194 // A json language server is started up and is only notified about the json buffer.
195 let mut fake_json_server = fake_json_servers.next().await.unwrap();
196 assert_eq!(
197 fake_json_server
198 .receive_notification::<lsp::notification::DidOpenTextDocument>()
199 .await
200 .text_document,
201 lsp::TextDocumentItem {
202 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
203 version: 0,
204 text: "{\"a\": 1}".to_string(),
205 language_id: Default::default()
206 }
207 );
208
209 // This buffer is configured based on the second language server's
210 // capabilities.
211 json_buffer.read_with(cx, |buffer, _| {
212 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
213 });
214
215 // When opening another buffer whose language server is already running,
216 // it is also configured based on the existing language server's capabilities.
217 let rust_buffer2 = project
218 .update(cx, |project, cx| {
219 project.open_local_buffer("/the-root/test2.rs", cx)
220 })
221 .await
222 .unwrap();
223 rust_buffer2.read_with(cx, |buffer, _| {
224 assert_eq!(
225 buffer.completion_triggers(),
226 &[".".to_string(), "::".to_string()]
227 );
228 });
229
230 // Changes are reported only to servers matching the buffer's language.
231 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
232 rust_buffer2.update(cx, |buffer, cx| {
233 buffer.edit([(0..0, "let x = 1;")], None, cx)
234 });
235 assert_eq!(
236 fake_rust_server
237 .receive_notification::<lsp::notification::DidChangeTextDocument>()
238 .await
239 .text_document,
240 lsp::VersionedTextDocumentIdentifier::new(
241 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
242 1
243 )
244 );
245
246 // Save notifications are reported to all servers.
247 project
248 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
249 .await
250 .unwrap();
251 assert_eq!(
252 fake_rust_server
253 .receive_notification::<lsp::notification::DidSaveTextDocument>()
254 .await
255 .text_document,
256 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
257 );
258 assert_eq!(
259 fake_json_server
260 .receive_notification::<lsp::notification::DidSaveTextDocument>()
261 .await
262 .text_document,
263 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
264 );
265
266 // Renames are reported only to servers matching the buffer's language.
267 fs.rename(
268 Path::new("/the-root/test2.rs"),
269 Path::new("/the-root/test3.rs"),
270 Default::default(),
271 )
272 .await
273 .unwrap();
274 assert_eq!(
275 fake_rust_server
276 .receive_notification::<lsp::notification::DidCloseTextDocument>()
277 .await
278 .text_document,
279 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
280 );
281 assert_eq!(
282 fake_rust_server
283 .receive_notification::<lsp::notification::DidOpenTextDocument>()
284 .await
285 .text_document,
286 lsp::TextDocumentItem {
287 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
288 version: 0,
289 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
290 language_id: Default::default()
291 },
292 );
293
294 rust_buffer2.update(cx, |buffer, cx| {
295 buffer.update_diagnostics(
296 DiagnosticSet::from_sorted_entries(
297 vec![DiagnosticEntry {
298 diagnostic: Default::default(),
299 range: Anchor::MIN..Anchor::MAX,
300 }],
301 &buffer.snapshot(),
302 ),
303 cx,
304 );
305 assert_eq!(
306 buffer
307 .snapshot()
308 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
309 .count(),
310 1
311 );
312 });
313
314 // When the rename changes the extension of the file, the buffer gets closed on the old
315 // language server and gets opened on the new one.
316 fs.rename(
317 Path::new("/the-root/test3.rs"),
318 Path::new("/the-root/test3.json"),
319 Default::default(),
320 )
321 .await
322 .unwrap();
323 assert_eq!(
324 fake_rust_server
325 .receive_notification::<lsp::notification::DidCloseTextDocument>()
326 .await
327 .text_document,
328 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
329 );
330 assert_eq!(
331 fake_json_server
332 .receive_notification::<lsp::notification::DidOpenTextDocument>()
333 .await
334 .text_document,
335 lsp::TextDocumentItem {
336 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
337 version: 0,
338 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
339 language_id: Default::default()
340 },
341 );
342
343 // We clear the diagnostics, since the language has changed.
344 rust_buffer2.read_with(cx, |buffer, _| {
345 assert_eq!(
346 buffer
347 .snapshot()
348 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
349 .count(),
350 0
351 );
352 });
353
354 // The renamed file's version resets after changing language server.
355 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
356 assert_eq!(
357 fake_json_server
358 .receive_notification::<lsp::notification::DidChangeTextDocument>()
359 .await
360 .text_document,
361 lsp::VersionedTextDocumentIdentifier::new(
362 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
363 1
364 )
365 );
366
367 // Restart language servers
368 project.update(cx, |project, cx| {
369 project.restart_language_servers_for_buffers(
370 vec![rust_buffer.clone(), json_buffer.clone()],
371 cx,
372 );
373 });
374
375 let mut rust_shutdown_requests = fake_rust_server
376 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
377 let mut json_shutdown_requests = fake_json_server
378 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
379 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
380
381 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
382 let mut fake_json_server = fake_json_servers.next().await.unwrap();
383
384 // Ensure rust document is reopened in new rust language server
385 assert_eq!(
386 fake_rust_server
387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
388 .await
389 .text_document,
390 lsp::TextDocumentItem {
391 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
392 version: 1,
393 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
394 language_id: Default::default()
395 }
396 );
397
398 // Ensure json documents are reopened in new json language server
399 assert_set_eq!(
400 [
401 fake_json_server
402 .receive_notification::<lsp::notification::DidOpenTextDocument>()
403 .await
404 .text_document,
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 ],
410 [
411 lsp::TextDocumentItem {
412 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
413 version: 0,
414 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
415 language_id: Default::default()
416 },
417 lsp::TextDocumentItem {
418 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
419 version: 1,
420 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
421 language_id: Default::default()
422 }
423 ]
424 );
425
426 // Close notifications are reported only to servers matching the buffer's language.
427 cx.update(|_| drop(json_buffer));
428 let close_message = lsp::DidCloseTextDocumentParams {
429 text_document: lsp::TextDocumentIdentifier::new(
430 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
431 ),
432 };
433 assert_eq!(
434 fake_json_server
435 .receive_notification::<lsp::notification::DidCloseTextDocument>()
436 .await,
437 close_message,
438 );
439}
440
441#[gpui::test]
442async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
443 cx.foreground().forbid_parking();
444
445 let fs = FakeFs::new(cx.background());
446 fs.insert_tree(
447 "/dir",
448 json!({
449 "a.rs": "let a = 1;",
450 "b.rs": "let b = 2;"
451 }),
452 )
453 .await;
454
455 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
456
457 let buffer_a = project
458 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
459 .await
460 .unwrap();
461 let buffer_b = project
462 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
463 .await
464 .unwrap();
465
466 project.update(cx, |project, cx| {
467 project
468 .update_diagnostics(
469 0,
470 lsp::PublishDiagnosticsParams {
471 uri: Url::from_file_path("/dir/a.rs").unwrap(),
472 version: None,
473 diagnostics: vec![lsp::Diagnostic {
474 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
475 severity: Some(lsp::DiagnosticSeverity::ERROR),
476 message: "error 1".to_string(),
477 ..Default::default()
478 }],
479 },
480 &[],
481 cx,
482 )
483 .unwrap();
484 project
485 .update_diagnostics(
486 0,
487 lsp::PublishDiagnosticsParams {
488 uri: Url::from_file_path("/dir/b.rs").unwrap(),
489 version: None,
490 diagnostics: vec![lsp::Diagnostic {
491 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
492 severity: Some(lsp::DiagnosticSeverity::WARNING),
493 message: "error 2".to_string(),
494 ..Default::default()
495 }],
496 },
497 &[],
498 cx,
499 )
500 .unwrap();
501 });
502
503 buffer_a.read_with(cx, |buffer, _| {
504 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
505 assert_eq!(
506 chunks
507 .iter()
508 .map(|(s, d)| (s.as_str(), *d))
509 .collect::<Vec<_>>(),
510 &[
511 ("let ", None),
512 ("a", Some(DiagnosticSeverity::ERROR)),
513 (" = 1;", None),
514 ]
515 );
516 });
517 buffer_b.read_with(cx, |buffer, _| {
518 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
519 assert_eq!(
520 chunks
521 .iter()
522 .map(|(s, d)| (s.as_str(), *d))
523 .collect::<Vec<_>>(),
524 &[
525 ("let ", None),
526 ("b", Some(DiagnosticSeverity::WARNING)),
527 (" = 2;", None),
528 ]
529 );
530 });
531}
532
533#[gpui::test]
534async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
535 cx.foreground().forbid_parking();
536
537 let fs = FakeFs::new(cx.background());
538 fs.insert_tree(
539 "/root",
540 json!({
541 "dir": {
542 "a.rs": "let a = 1;",
543 },
544 "other.rs": "let b = c;"
545 }),
546 )
547 .await;
548
549 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
550
551 let (worktree, _) = project
552 .update(cx, |project, cx| {
553 project.find_or_create_local_worktree("/root/other.rs", false, cx)
554 })
555 .await
556 .unwrap();
557 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
558
559 project.update(cx, |project, cx| {
560 project
561 .update_diagnostics(
562 0,
563 lsp::PublishDiagnosticsParams {
564 uri: Url::from_file_path("/root/other.rs").unwrap(),
565 version: None,
566 diagnostics: vec![lsp::Diagnostic {
567 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
568 severity: Some(lsp::DiagnosticSeverity::ERROR),
569 message: "unknown variable 'c'".to_string(),
570 ..Default::default()
571 }],
572 },
573 &[],
574 cx,
575 )
576 .unwrap();
577 });
578
579 let buffer = project
580 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
581 .await
582 .unwrap();
583 buffer.read_with(cx, |buffer, _| {
584 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
585 assert_eq!(
586 chunks
587 .iter()
588 .map(|(s, d)| (s.as_str(), *d))
589 .collect::<Vec<_>>(),
590 &[
591 ("let b = ", None),
592 ("c", Some(DiagnosticSeverity::ERROR)),
593 (";", None),
594 ]
595 );
596 });
597
598 project.read_with(cx, |project, cx| {
599 assert_eq!(project.diagnostic_summaries(cx).next(), None);
600 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
601 });
602}
603
604#[gpui::test]
605async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
606 cx.foreground().forbid_parking();
607
608 let progress_token = "the-progress-token";
609 let mut language = Language::new(
610 LanguageConfig {
611 name: "Rust".into(),
612 path_suffixes: vec!["rs".to_string()],
613 ..Default::default()
614 },
615 Some(tree_sitter_rust::language()),
616 );
617 let mut fake_servers = language
618 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
619 disk_based_diagnostics_progress_token: Some(progress_token.into()),
620 disk_based_diagnostics_sources: vec!["disk".into()],
621 ..Default::default()
622 }))
623 .await;
624
625 let fs = FakeFs::new(cx.background());
626 fs.insert_tree(
627 "/dir",
628 json!({
629 "a.rs": "fn a() { A }",
630 "b.rs": "const y: i32 = 1",
631 }),
632 )
633 .await;
634
635 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
636 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
637 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
638
639 // Cause worktree to start the fake language server
640 let _buffer = project
641 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
642 .await
643 .unwrap();
644
645 let mut events = subscribe(&project, cx);
646
647 let fake_server = fake_servers.next().await.unwrap();
648 fake_server
649 .start_progress(format!("{}/0", progress_token))
650 .await;
651 assert_eq!(
652 events.next().await.unwrap(),
653 Event::DiskBasedDiagnosticsStarted {
654 language_server_id: 0,
655 }
656 );
657
658 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
659 uri: Url::from_file_path("/dir/a.rs").unwrap(),
660 version: None,
661 diagnostics: vec![lsp::Diagnostic {
662 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
663 severity: Some(lsp::DiagnosticSeverity::ERROR),
664 message: "undefined variable 'A'".to_string(),
665 ..Default::default()
666 }],
667 });
668 assert_eq!(
669 events.next().await.unwrap(),
670 Event::DiagnosticsUpdated {
671 language_server_id: 0,
672 path: (worktree_id, Path::new("a.rs")).into()
673 }
674 );
675
676 fake_server.end_progress(format!("{}/0", progress_token));
677 assert_eq!(
678 events.next().await.unwrap(),
679 Event::DiskBasedDiagnosticsFinished {
680 language_server_id: 0
681 }
682 );
683
684 let buffer = project
685 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
686 .await
687 .unwrap();
688
689 buffer.read_with(cx, |buffer, _| {
690 let snapshot = buffer.snapshot();
691 let diagnostics = snapshot
692 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
693 .collect::<Vec<_>>();
694 assert_eq!(
695 diagnostics,
696 &[DiagnosticEntry {
697 range: Point::new(0, 9)..Point::new(0, 10),
698 diagnostic: Diagnostic {
699 severity: lsp::DiagnosticSeverity::ERROR,
700 message: "undefined variable 'A'".to_string(),
701 group_id: 0,
702 is_primary: true,
703 ..Default::default()
704 }
705 }]
706 )
707 });
708
709 // Ensure publishing empty diagnostics twice only results in one update event.
710 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
711 uri: Url::from_file_path("/dir/a.rs").unwrap(),
712 version: None,
713 diagnostics: Default::default(),
714 });
715 assert_eq!(
716 events.next().await.unwrap(),
717 Event::DiagnosticsUpdated {
718 language_server_id: 0,
719 path: (worktree_id, Path::new("a.rs")).into()
720 }
721 );
722
723 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
724 uri: Url::from_file_path("/dir/a.rs").unwrap(),
725 version: None,
726 diagnostics: Default::default(),
727 });
728 cx.foreground().run_until_parked();
729 assert_eq!(futures::poll!(events.next()), Poll::Pending);
730}
731
732#[gpui::test]
733async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
734 cx.foreground().forbid_parking();
735
736 let progress_token = "the-progress-token";
737 let mut language = Language::new(
738 LanguageConfig {
739 path_suffixes: vec!["rs".to_string()],
740 ..Default::default()
741 },
742 None,
743 );
744 let mut fake_servers = language
745 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
746 disk_based_diagnostics_sources: vec!["disk".into()],
747 disk_based_diagnostics_progress_token: Some(progress_token.into()),
748 ..Default::default()
749 }))
750 .await;
751
752 let fs = FakeFs::new(cx.background());
753 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
754
755 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
756 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
757
758 let buffer = project
759 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
760 .await
761 .unwrap();
762
763 // Simulate diagnostics starting to update.
764 let fake_server = fake_servers.next().await.unwrap();
765 fake_server.start_progress(progress_token).await;
766
767 // Restart the server before the diagnostics finish updating.
768 project.update(cx, |project, cx| {
769 project.restart_language_servers_for_buffers([buffer], cx);
770 });
771 let mut events = subscribe(&project, cx);
772
773 // Simulate the newly started server sending more diagnostics.
774 let fake_server = fake_servers.next().await.unwrap();
775 fake_server.start_progress(progress_token).await;
776 assert_eq!(
777 events.next().await.unwrap(),
778 Event::DiskBasedDiagnosticsStarted {
779 language_server_id: 1
780 }
781 );
782 project.read_with(cx, |project, _| {
783 assert_eq!(
784 project
785 .language_servers_running_disk_based_diagnostics()
786 .collect::<Vec<_>>(),
787 [1]
788 );
789 });
790
791 // All diagnostics are considered done, despite the old server's diagnostic
792 // task never completing.
793 fake_server.end_progress(progress_token);
794 assert_eq!(
795 events.next().await.unwrap(),
796 Event::DiskBasedDiagnosticsFinished {
797 language_server_id: 1
798 }
799 );
800 project.read_with(cx, |project, _| {
801 assert_eq!(
802 project
803 .language_servers_running_disk_based_diagnostics()
804 .collect::<Vec<_>>(),
805 [0; 0]
806 );
807 });
808}
809
810#[gpui::test]
811async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
812 cx.foreground().forbid_parking();
813
814 let mut language = Language::new(
815 LanguageConfig {
816 path_suffixes: vec!["rs".to_string()],
817 ..Default::default()
818 },
819 None,
820 );
821 let mut fake_servers = language
822 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
823 name: "the-lsp",
824 ..Default::default()
825 }))
826 .await;
827
828 let fs = FakeFs::new(cx.background());
829 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
830
831 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
832 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
833
834 let buffer = project
835 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
836 .await
837 .unwrap();
838
839 // Before restarting the server, report diagnostics with an unknown buffer version.
840 let fake_server = fake_servers.next().await.unwrap();
841 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
842 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
843 version: Some(10000),
844 diagnostics: Vec::new(),
845 });
846 cx.foreground().run_until_parked();
847
848 project.update(cx, |project, cx| {
849 project.restart_language_servers_for_buffers([buffer.clone()], cx);
850 });
851 let mut fake_server = fake_servers.next().await.unwrap();
852 let notification = fake_server
853 .receive_notification::<lsp::notification::DidOpenTextDocument>()
854 .await
855 .text_document;
856 assert_eq!(notification.version, 0);
857}
858
859#[gpui::test]
860async fn test_toggling_enable_language_server(
861 deterministic: Arc<Deterministic>,
862 cx: &mut gpui::TestAppContext,
863) {
864 deterministic.forbid_parking();
865
866 let mut rust = Language::new(
867 LanguageConfig {
868 name: Arc::from("Rust"),
869 path_suffixes: vec!["rs".to_string()],
870 ..Default::default()
871 },
872 None,
873 );
874 let mut fake_rust_servers = rust
875 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
876 name: "rust-lsp",
877 ..Default::default()
878 }))
879 .await;
880 let mut js = Language::new(
881 LanguageConfig {
882 name: Arc::from("JavaScript"),
883 path_suffixes: vec!["js".to_string()],
884 ..Default::default()
885 },
886 None,
887 );
888 let mut fake_js_servers = js
889 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
890 name: "js-lsp",
891 ..Default::default()
892 }))
893 .await;
894
895 let fs = FakeFs::new(cx.background());
896 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
897 .await;
898
899 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
900 project.update(cx, |project, _| {
901 project.languages.add(Arc::new(rust));
902 project.languages.add(Arc::new(js));
903 });
904
905 let _rs_buffer = project
906 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
907 .await
908 .unwrap();
909 let _js_buffer = project
910 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
911 .await
912 .unwrap();
913
914 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
915 assert_eq!(
916 fake_rust_server_1
917 .receive_notification::<lsp::notification::DidOpenTextDocument>()
918 .await
919 .text_document
920 .uri
921 .as_str(),
922 "file:///dir/a.rs"
923 );
924
925 let mut fake_js_server = fake_js_servers.next().await.unwrap();
926 assert_eq!(
927 fake_js_server
928 .receive_notification::<lsp::notification::DidOpenTextDocument>()
929 .await
930 .text_document
931 .uri
932 .as_str(),
933 "file:///dir/b.js"
934 );
935
936 // Disable Rust language server, ensuring only that server gets stopped.
937 cx.update(|cx| {
938 cx.update_global(|settings: &mut Settings, _| {
939 settings.language_overrides.insert(
940 Arc::from("Rust"),
941 settings::EditorSettings {
942 enable_language_server: Some(false),
943 ..Default::default()
944 },
945 );
946 })
947 });
948 fake_rust_server_1
949 .receive_notification::<lsp::notification::Exit>()
950 .await;
951
952 // Enable Rust and disable JavaScript language servers, ensuring that the
953 // former gets started again and that the latter stops.
954 cx.update(|cx| {
955 cx.update_global(|settings: &mut Settings, _| {
956 settings.language_overrides.insert(
957 Arc::from("Rust"),
958 settings::EditorSettings {
959 enable_language_server: Some(true),
960 ..Default::default()
961 },
962 );
963 settings.language_overrides.insert(
964 Arc::from("JavaScript"),
965 settings::EditorSettings {
966 enable_language_server: Some(false),
967 ..Default::default()
968 },
969 );
970 })
971 });
972 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
973 assert_eq!(
974 fake_rust_server_2
975 .receive_notification::<lsp::notification::DidOpenTextDocument>()
976 .await
977 .text_document
978 .uri
979 .as_str(),
980 "file:///dir/a.rs"
981 );
982 fake_js_server
983 .receive_notification::<lsp::notification::Exit>()
984 .await;
985}
986
987#[gpui::test]
988async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
989 cx.foreground().forbid_parking();
990
991 let mut language = Language::new(
992 LanguageConfig {
993 name: "Rust".into(),
994 path_suffixes: vec!["rs".to_string()],
995 ..Default::default()
996 },
997 Some(tree_sitter_rust::language()),
998 );
999 let mut fake_servers = language
1000 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1001 disk_based_diagnostics_sources: vec!["disk".into()],
1002 ..Default::default()
1003 }))
1004 .await;
1005
1006 let text = "
1007 fn a() { A }
1008 fn b() { BB }
1009 fn c() { CCC }
1010 "
1011 .unindent();
1012
1013 let fs = FakeFs::new(cx.background());
1014 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1015
1016 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1017 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1018
1019 let buffer = project
1020 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1021 .await
1022 .unwrap();
1023
1024 let mut fake_server = fake_servers.next().await.unwrap();
1025 let open_notification = fake_server
1026 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1027 .await;
1028
1029 // Edit the buffer, moving the content down
1030 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1031 let change_notification_1 = fake_server
1032 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1033 .await;
1034 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1035
1036 // Report some diagnostics for the initial version of the buffer
1037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1038 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1039 version: Some(open_notification.text_document.version),
1040 diagnostics: vec![
1041 lsp::Diagnostic {
1042 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1043 severity: Some(DiagnosticSeverity::ERROR),
1044 message: "undefined variable 'A'".to_string(),
1045 source: Some("disk".to_string()),
1046 ..Default::default()
1047 },
1048 lsp::Diagnostic {
1049 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1050 severity: Some(DiagnosticSeverity::ERROR),
1051 message: "undefined variable 'BB'".to_string(),
1052 source: Some("disk".to_string()),
1053 ..Default::default()
1054 },
1055 lsp::Diagnostic {
1056 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1057 severity: Some(DiagnosticSeverity::ERROR),
1058 source: Some("disk".to_string()),
1059 message: "undefined variable 'CCC'".to_string(),
1060 ..Default::default()
1061 },
1062 ],
1063 });
1064
1065 // The diagnostics have moved down since they were created.
1066 buffer.next_notification(cx).await;
1067 buffer.read_with(cx, |buffer, _| {
1068 assert_eq!(
1069 buffer
1070 .snapshot()
1071 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1072 .collect::<Vec<_>>(),
1073 &[
1074 DiagnosticEntry {
1075 range: Point::new(3, 9)..Point::new(3, 11),
1076 diagnostic: Diagnostic {
1077 severity: DiagnosticSeverity::ERROR,
1078 message: "undefined variable 'BB'".to_string(),
1079 is_disk_based: true,
1080 group_id: 1,
1081 is_primary: true,
1082 ..Default::default()
1083 },
1084 },
1085 DiagnosticEntry {
1086 range: Point::new(4, 9)..Point::new(4, 12),
1087 diagnostic: Diagnostic {
1088 severity: DiagnosticSeverity::ERROR,
1089 message: "undefined variable 'CCC'".to_string(),
1090 is_disk_based: true,
1091 group_id: 2,
1092 is_primary: true,
1093 ..Default::default()
1094 }
1095 }
1096 ]
1097 );
1098 assert_eq!(
1099 chunks_with_diagnostics(buffer, 0..buffer.len()),
1100 [
1101 ("\n\nfn a() { ".to_string(), None),
1102 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1103 (" }\nfn b() { ".to_string(), None),
1104 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1105 (" }\nfn c() { ".to_string(), None),
1106 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1107 (" }\n".to_string(), None),
1108 ]
1109 );
1110 assert_eq!(
1111 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1112 [
1113 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1114 (" }\nfn c() { ".to_string(), None),
1115 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1116 ]
1117 );
1118 });
1119
1120 // Ensure overlapping diagnostics are highlighted correctly.
1121 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1122 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1123 version: Some(open_notification.text_document.version),
1124 diagnostics: vec![
1125 lsp::Diagnostic {
1126 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1127 severity: Some(DiagnosticSeverity::ERROR),
1128 message: "undefined variable 'A'".to_string(),
1129 source: Some("disk".to_string()),
1130 ..Default::default()
1131 },
1132 lsp::Diagnostic {
1133 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1134 severity: Some(DiagnosticSeverity::WARNING),
1135 message: "unreachable statement".to_string(),
1136 source: Some("disk".to_string()),
1137 ..Default::default()
1138 },
1139 ],
1140 });
1141
1142 buffer.next_notification(cx).await;
1143 buffer.read_with(cx, |buffer, _| {
1144 assert_eq!(
1145 buffer
1146 .snapshot()
1147 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1148 .collect::<Vec<_>>(),
1149 &[
1150 DiagnosticEntry {
1151 range: Point::new(2, 9)..Point::new(2, 12),
1152 diagnostic: Diagnostic {
1153 severity: DiagnosticSeverity::WARNING,
1154 message: "unreachable statement".to_string(),
1155 is_disk_based: true,
1156 group_id: 4,
1157 is_primary: true,
1158 ..Default::default()
1159 }
1160 },
1161 DiagnosticEntry {
1162 range: Point::new(2, 9)..Point::new(2, 10),
1163 diagnostic: Diagnostic {
1164 severity: DiagnosticSeverity::ERROR,
1165 message: "undefined variable 'A'".to_string(),
1166 is_disk_based: true,
1167 group_id: 3,
1168 is_primary: true,
1169 ..Default::default()
1170 },
1171 }
1172 ]
1173 );
1174 assert_eq!(
1175 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1176 [
1177 ("fn a() { ".to_string(), None),
1178 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1179 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1180 ("\n".to_string(), None),
1181 ]
1182 );
1183 assert_eq!(
1184 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1185 [
1186 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1187 ("\n".to_string(), None),
1188 ]
1189 );
1190 });
1191
1192 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1193 // changes since the last save.
1194 buffer.update(cx, |buffer, cx| {
1195 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1196 buffer.edit(
1197 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1198 None,
1199 cx,
1200 );
1201 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1202 });
1203 let change_notification_2 = fake_server
1204 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1205 .await;
1206 assert!(
1207 change_notification_2.text_document.version > change_notification_1.text_document.version
1208 );
1209
1210 // Handle out-of-order diagnostics
1211 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1212 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1213 version: Some(change_notification_2.text_document.version),
1214 diagnostics: vec![
1215 lsp::Diagnostic {
1216 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1217 severity: Some(DiagnosticSeverity::ERROR),
1218 message: "undefined variable 'BB'".to_string(),
1219 source: Some("disk".to_string()),
1220 ..Default::default()
1221 },
1222 lsp::Diagnostic {
1223 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1224 severity: Some(DiagnosticSeverity::WARNING),
1225 message: "undefined variable 'A'".to_string(),
1226 source: Some("disk".to_string()),
1227 ..Default::default()
1228 },
1229 ],
1230 });
1231
1232 buffer.next_notification(cx).await;
1233 buffer.read_with(cx, |buffer, _| {
1234 assert_eq!(
1235 buffer
1236 .snapshot()
1237 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1238 .collect::<Vec<_>>(),
1239 &[
1240 DiagnosticEntry {
1241 range: Point::new(2, 21)..Point::new(2, 22),
1242 diagnostic: Diagnostic {
1243 severity: DiagnosticSeverity::WARNING,
1244 message: "undefined variable 'A'".to_string(),
1245 is_disk_based: true,
1246 group_id: 6,
1247 is_primary: true,
1248 ..Default::default()
1249 }
1250 },
1251 DiagnosticEntry {
1252 range: Point::new(3, 9)..Point::new(3, 14),
1253 diagnostic: Diagnostic {
1254 severity: DiagnosticSeverity::ERROR,
1255 message: "undefined variable 'BB'".to_string(),
1256 is_disk_based: true,
1257 group_id: 5,
1258 is_primary: true,
1259 ..Default::default()
1260 },
1261 }
1262 ]
1263 );
1264 });
1265}
1266
1267#[gpui::test]
1268async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1269 cx.foreground().forbid_parking();
1270
1271 let text = concat!(
1272 "let one = ;\n", //
1273 "let two = \n",
1274 "let three = 3;\n",
1275 );
1276
1277 let fs = FakeFs::new(cx.background());
1278 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1279
1280 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1281 let buffer = project
1282 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1283 .await
1284 .unwrap();
1285
1286 project.update(cx, |project, cx| {
1287 project
1288 .update_buffer_diagnostics(
1289 &buffer,
1290 vec![
1291 DiagnosticEntry {
1292 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1293 diagnostic: Diagnostic {
1294 severity: DiagnosticSeverity::ERROR,
1295 message: "syntax error 1".to_string(),
1296 ..Default::default()
1297 },
1298 },
1299 DiagnosticEntry {
1300 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1301 diagnostic: Diagnostic {
1302 severity: DiagnosticSeverity::ERROR,
1303 message: "syntax error 2".to_string(),
1304 ..Default::default()
1305 },
1306 },
1307 ],
1308 None,
1309 cx,
1310 )
1311 .unwrap();
1312 });
1313
1314 // An empty range is extended forward to include the following character.
1315 // At the end of a line, an empty range is extended backward to include
1316 // the preceding character.
1317 buffer.read_with(cx, |buffer, _| {
1318 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1319 assert_eq!(
1320 chunks
1321 .iter()
1322 .map(|(s, d)| (s.as_str(), *d))
1323 .collect::<Vec<_>>(),
1324 &[
1325 ("let one = ", None),
1326 (";", Some(DiagnosticSeverity::ERROR)),
1327 ("\nlet two =", None),
1328 (" ", Some(DiagnosticSeverity::ERROR)),
1329 ("\nlet three = 3;\n", None)
1330 ]
1331 );
1332 });
1333}
1334
1335#[gpui::test]
1336async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1337 cx.foreground().forbid_parking();
1338
1339 let mut language = Language::new(
1340 LanguageConfig {
1341 name: "Rust".into(),
1342 path_suffixes: vec!["rs".to_string()],
1343 ..Default::default()
1344 },
1345 Some(tree_sitter_rust::language()),
1346 );
1347 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1348
1349 let text = "
1350 fn a() {
1351 f1();
1352 }
1353 fn b() {
1354 f2();
1355 }
1356 fn c() {
1357 f3();
1358 }
1359 "
1360 .unindent();
1361
1362 let fs = FakeFs::new(cx.background());
1363 fs.insert_tree(
1364 "/dir",
1365 json!({
1366 "a.rs": text.clone(),
1367 }),
1368 )
1369 .await;
1370
1371 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1372 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1373 let buffer = project
1374 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1375 .await
1376 .unwrap();
1377
1378 let mut fake_server = fake_servers.next().await.unwrap();
1379 let lsp_document_version = fake_server
1380 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1381 .await
1382 .text_document
1383 .version;
1384
1385 // Simulate editing the buffer after the language server computes some edits.
1386 buffer.update(cx, |buffer, cx| {
1387 buffer.edit(
1388 [(
1389 Point::new(0, 0)..Point::new(0, 0),
1390 "// above first function\n",
1391 )],
1392 None,
1393 cx,
1394 );
1395 buffer.edit(
1396 [(
1397 Point::new(2, 0)..Point::new(2, 0),
1398 " // inside first function\n",
1399 )],
1400 None,
1401 cx,
1402 );
1403 buffer.edit(
1404 [(
1405 Point::new(6, 4)..Point::new(6, 4),
1406 "// inside second function ",
1407 )],
1408 None,
1409 cx,
1410 );
1411
1412 assert_eq!(
1413 buffer.text(),
1414 "
1415 // above first function
1416 fn a() {
1417 // inside first function
1418 f1();
1419 }
1420 fn b() {
1421 // inside second function f2();
1422 }
1423 fn c() {
1424 f3();
1425 }
1426 "
1427 .unindent()
1428 );
1429 });
1430
1431 let edits = project
1432 .update(cx, |project, cx| {
1433 project.edits_from_lsp(
1434 &buffer,
1435 vec![
1436 // replace body of first function
1437 lsp::TextEdit {
1438 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1439 new_text: "
1440 fn a() {
1441 f10();
1442 }
1443 "
1444 .unindent(),
1445 },
1446 // edit inside second function
1447 lsp::TextEdit {
1448 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1449 new_text: "00".into(),
1450 },
1451 // edit inside third function via two distinct edits
1452 lsp::TextEdit {
1453 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1454 new_text: "4000".into(),
1455 },
1456 lsp::TextEdit {
1457 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1458 new_text: "".into(),
1459 },
1460 ],
1461 Some(lsp_document_version),
1462 cx,
1463 )
1464 })
1465 .await
1466 .unwrap();
1467
1468 buffer.update(cx, |buffer, cx| {
1469 for (range, new_text) in edits {
1470 buffer.edit([(range, new_text)], None, cx);
1471 }
1472 assert_eq!(
1473 buffer.text(),
1474 "
1475 // above first function
1476 fn a() {
1477 // inside first function
1478 f10();
1479 }
1480 fn b() {
1481 // inside second function f200();
1482 }
1483 fn c() {
1484 f4000();
1485 }
1486 "
1487 .unindent()
1488 );
1489 });
1490}
1491
1492#[gpui::test]
1493async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1494 cx.foreground().forbid_parking();
1495
1496 let text = "
1497 use a::b;
1498 use a::c;
1499
1500 fn f() {
1501 b();
1502 c();
1503 }
1504 "
1505 .unindent();
1506
1507 let fs = FakeFs::new(cx.background());
1508 fs.insert_tree(
1509 "/dir",
1510 json!({
1511 "a.rs": text.clone(),
1512 }),
1513 )
1514 .await;
1515
1516 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1517 let buffer = project
1518 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1519 .await
1520 .unwrap();
1521
1522 // Simulate the language server sending us a small edit in the form of a very large diff.
1523 // Rust-analyzer does this when performing a merge-imports code action.
1524 let edits = project
1525 .update(cx, |project, cx| {
1526 project.edits_from_lsp(
1527 &buffer,
1528 [
1529 // Replace the first use statement without editing the semicolon.
1530 lsp::TextEdit {
1531 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1532 new_text: "a::{b, c}".into(),
1533 },
1534 // Reinsert the remainder of the file between the semicolon and the final
1535 // newline of the file.
1536 lsp::TextEdit {
1537 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1538 new_text: "\n\n".into(),
1539 },
1540 lsp::TextEdit {
1541 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1542 new_text: "
1543 fn f() {
1544 b();
1545 c();
1546 }"
1547 .unindent(),
1548 },
1549 // Delete everything after the first newline of the file.
1550 lsp::TextEdit {
1551 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1552 new_text: "".into(),
1553 },
1554 ],
1555 None,
1556 cx,
1557 )
1558 })
1559 .await
1560 .unwrap();
1561
1562 buffer.update(cx, |buffer, cx| {
1563 let edits = edits
1564 .into_iter()
1565 .map(|(range, text)| {
1566 (
1567 range.start.to_point(buffer)..range.end.to_point(buffer),
1568 text,
1569 )
1570 })
1571 .collect::<Vec<_>>();
1572
1573 assert_eq!(
1574 edits,
1575 [
1576 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1577 (Point::new(1, 0)..Point::new(2, 0), "".into())
1578 ]
1579 );
1580
1581 for (range, new_text) in edits {
1582 buffer.edit([(range, new_text)], None, cx);
1583 }
1584 assert_eq!(
1585 buffer.text(),
1586 "
1587 use a::{b, c};
1588
1589 fn f() {
1590 b();
1591 c();
1592 }
1593 "
1594 .unindent()
1595 );
1596 });
1597}
1598
1599#[gpui::test]
1600async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1601 cx.foreground().forbid_parking();
1602
1603 let text = "
1604 use a::b;
1605 use a::c;
1606
1607 fn f() {
1608 b();
1609 c();
1610 }
1611 "
1612 .unindent();
1613
1614 let fs = FakeFs::new(cx.background());
1615 fs.insert_tree(
1616 "/dir",
1617 json!({
1618 "a.rs": text.clone(),
1619 }),
1620 )
1621 .await;
1622
1623 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1624 let buffer = project
1625 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1626 .await
1627 .unwrap();
1628
1629 // Simulate the language server sending us edits in a non-ordered fashion,
1630 // with ranges sometimes being inverted or pointing to invalid locations.
1631 let edits = project
1632 .update(cx, |project, cx| {
1633 project.edits_from_lsp(
1634 &buffer,
1635 [
1636 lsp::TextEdit {
1637 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1638 new_text: "\n\n".into(),
1639 },
1640 lsp::TextEdit {
1641 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1642 new_text: "a::{b, c}".into(),
1643 },
1644 lsp::TextEdit {
1645 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1646 new_text: "".into(),
1647 },
1648 lsp::TextEdit {
1649 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1650 new_text: "
1651 fn f() {
1652 b();
1653 c();
1654 }"
1655 .unindent(),
1656 },
1657 ],
1658 None,
1659 cx,
1660 )
1661 })
1662 .await
1663 .unwrap();
1664
1665 buffer.update(cx, |buffer, cx| {
1666 let edits = edits
1667 .into_iter()
1668 .map(|(range, text)| {
1669 (
1670 range.start.to_point(buffer)..range.end.to_point(buffer),
1671 text,
1672 )
1673 })
1674 .collect::<Vec<_>>();
1675
1676 assert_eq!(
1677 edits,
1678 [
1679 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1680 (Point::new(1, 0)..Point::new(2, 0), "".into())
1681 ]
1682 );
1683
1684 for (range, new_text) in edits {
1685 buffer.edit([(range, new_text)], None, cx);
1686 }
1687 assert_eq!(
1688 buffer.text(),
1689 "
1690 use a::{b, c};
1691
1692 fn f() {
1693 b();
1694 c();
1695 }
1696 "
1697 .unindent()
1698 );
1699 });
1700}
1701
1702fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1703 buffer: &Buffer,
1704 range: Range<T>,
1705) -> Vec<(String, Option<DiagnosticSeverity>)> {
1706 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1707 for chunk in buffer.snapshot().chunks(range, true) {
1708 if chunks.last().map_or(false, |prev_chunk| {
1709 prev_chunk.1 == chunk.diagnostic_severity
1710 }) {
1711 chunks.last_mut().unwrap().0.push_str(chunk.text);
1712 } else {
1713 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1714 }
1715 }
1716 chunks
1717}
1718
1719#[gpui::test(iterations = 10)]
1720async fn test_definition(cx: &mut gpui::TestAppContext) {
1721 let mut language = Language::new(
1722 LanguageConfig {
1723 name: "Rust".into(),
1724 path_suffixes: vec!["rs".to_string()],
1725 ..Default::default()
1726 },
1727 Some(tree_sitter_rust::language()),
1728 );
1729 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1730
1731 let fs = FakeFs::new(cx.background());
1732 fs.insert_tree(
1733 "/dir",
1734 json!({
1735 "a.rs": "const fn a() { A }",
1736 "b.rs": "const y: i32 = crate::a()",
1737 }),
1738 )
1739 .await;
1740
1741 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1742 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1743
1744 let buffer = project
1745 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1746 .await
1747 .unwrap();
1748
1749 let fake_server = fake_servers.next().await.unwrap();
1750 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1751 let params = params.text_document_position_params;
1752 assert_eq!(
1753 params.text_document.uri.to_file_path().unwrap(),
1754 Path::new("/dir/b.rs"),
1755 );
1756 assert_eq!(params.position, lsp::Position::new(0, 22));
1757
1758 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1759 lsp::Location::new(
1760 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1761 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1762 ),
1763 )))
1764 });
1765
1766 let mut definitions = project
1767 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1768 .await
1769 .unwrap();
1770
1771 // Assert no new language server started
1772 cx.foreground().run_until_parked();
1773 assert!(fake_servers.try_next().is_err());
1774
1775 assert_eq!(definitions.len(), 1);
1776 let definition = definitions.pop().unwrap();
1777 cx.update(|cx| {
1778 let target_buffer = definition.target.buffer.read(cx);
1779 assert_eq!(
1780 target_buffer
1781 .file()
1782 .unwrap()
1783 .as_local()
1784 .unwrap()
1785 .abs_path(cx),
1786 Path::new("/dir/a.rs"),
1787 );
1788 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1789 assert_eq!(
1790 list_worktrees(&project, cx),
1791 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1792 );
1793
1794 drop(definition);
1795 });
1796 cx.read(|cx| {
1797 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1798 });
1799
1800 fn list_worktrees<'a>(
1801 project: &'a ModelHandle<Project>,
1802 cx: &'a AppContext,
1803 ) -> Vec<(&'a Path, bool)> {
1804 project
1805 .read(cx)
1806 .worktrees(cx)
1807 .map(|worktree| {
1808 let worktree = worktree.read(cx);
1809 (
1810 worktree.as_local().unwrap().abs_path().as_ref(),
1811 worktree.is_visible(),
1812 )
1813 })
1814 .collect::<Vec<_>>()
1815 }
1816}
1817
1818#[gpui::test]
1819async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1820 let mut language = Language::new(
1821 LanguageConfig {
1822 name: "TypeScript".into(),
1823 path_suffixes: vec!["ts".to_string()],
1824 ..Default::default()
1825 },
1826 Some(tree_sitter_typescript::language_typescript()),
1827 );
1828 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1829
1830 let fs = FakeFs::new(cx.background());
1831 fs.insert_tree(
1832 "/dir",
1833 json!({
1834 "a.ts": "",
1835 }),
1836 )
1837 .await;
1838
1839 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1840 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1841 let buffer = project
1842 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1843 .await
1844 .unwrap();
1845
1846 let fake_server = fake_language_servers.next().await.unwrap();
1847
1848 let text = "let a = b.fqn";
1849 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1850 let completions = project.update(cx, |project, cx| {
1851 project.completions(&buffer, text.len(), cx)
1852 });
1853
1854 fake_server
1855 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1856 Ok(Some(lsp::CompletionResponse::Array(vec![
1857 lsp::CompletionItem {
1858 label: "fullyQualifiedName?".into(),
1859 insert_text: Some("fullyQualifiedName".into()),
1860 ..Default::default()
1861 },
1862 ])))
1863 })
1864 .next()
1865 .await;
1866 let completions = completions.await.unwrap();
1867 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1868 assert_eq!(completions.len(), 1);
1869 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1870 assert_eq!(
1871 completions[0].old_range.to_offset(&snapshot),
1872 text.len() - 3..text.len()
1873 );
1874
1875 let text = "let a = \"atoms/cmp\"";
1876 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1877 let completions = project.update(cx, |project, cx| {
1878 project.completions(&buffer, text.len() - 1, cx)
1879 });
1880
1881 fake_server
1882 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1883 Ok(Some(lsp::CompletionResponse::Array(vec![
1884 lsp::CompletionItem {
1885 label: "component".into(),
1886 ..Default::default()
1887 },
1888 ])))
1889 })
1890 .next()
1891 .await;
1892 let completions = completions.await.unwrap();
1893 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1894 assert_eq!(completions.len(), 1);
1895 assert_eq!(completions[0].new_text, "component");
1896 assert_eq!(
1897 completions[0].old_range.to_offset(&snapshot),
1898 text.len() - 4..text.len() - 1
1899 );
1900}
1901
1902#[gpui::test]
1903async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1904 let mut language = Language::new(
1905 LanguageConfig {
1906 name: "TypeScript".into(),
1907 path_suffixes: vec!["ts".to_string()],
1908 ..Default::default()
1909 },
1910 Some(tree_sitter_typescript::language_typescript()),
1911 );
1912 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1913
1914 let fs = FakeFs::new(cx.background());
1915 fs.insert_tree(
1916 "/dir",
1917 json!({
1918 "a.ts": "",
1919 }),
1920 )
1921 .await;
1922
1923 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1924 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1925 let buffer = project
1926 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1927 .await
1928 .unwrap();
1929
1930 let fake_server = fake_language_servers.next().await.unwrap();
1931
1932 let text = "let a = b.fqn";
1933 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1934 let completions = project.update(cx, |project, cx| {
1935 project.completions(&buffer, text.len(), cx)
1936 });
1937
1938 fake_server
1939 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1940 Ok(Some(lsp::CompletionResponse::Array(vec![
1941 lsp::CompletionItem {
1942 label: "fullyQualifiedName?".into(),
1943 insert_text: Some("fully\rQualified\r\nName".into()),
1944 ..Default::default()
1945 },
1946 ])))
1947 })
1948 .next()
1949 .await;
1950 let completions = completions.await.unwrap();
1951 assert_eq!(completions.len(), 1);
1952 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1953}
1954
1955#[gpui::test(iterations = 10)]
1956async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1957 let mut language = Language::new(
1958 LanguageConfig {
1959 name: "TypeScript".into(),
1960 path_suffixes: vec!["ts".to_string()],
1961 ..Default::default()
1962 },
1963 None,
1964 );
1965 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1966
1967 let fs = FakeFs::new(cx.background());
1968 fs.insert_tree(
1969 "/dir",
1970 json!({
1971 "a.ts": "a",
1972 }),
1973 )
1974 .await;
1975
1976 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1977 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1978 let buffer = project
1979 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1980 .await
1981 .unwrap();
1982
1983 let fake_server = fake_language_servers.next().await.unwrap();
1984
1985 // Language server returns code actions that contain commands, and not edits.
1986 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1987 fake_server
1988 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1989 Ok(Some(vec![
1990 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1991 title: "The code action".into(),
1992 command: Some(lsp::Command {
1993 title: "The command".into(),
1994 command: "_the/command".into(),
1995 arguments: Some(vec![json!("the-argument")]),
1996 }),
1997 ..Default::default()
1998 }),
1999 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2000 title: "two".into(),
2001 ..Default::default()
2002 }),
2003 ]))
2004 })
2005 .next()
2006 .await;
2007
2008 let action = actions.await.unwrap()[0].clone();
2009 let apply = project.update(cx, |project, cx| {
2010 project.apply_code_action(buffer.clone(), action, true, cx)
2011 });
2012
2013 // Resolving the code action does not populate its edits. In absence of
2014 // edits, we must execute the given command.
2015 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2016 |action, _| async move { Ok(action) },
2017 );
2018
2019 // While executing the command, the language server sends the editor
2020 // a `workspaceEdit` request.
2021 fake_server
2022 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2023 let fake = fake_server.clone();
2024 move |params, _| {
2025 assert_eq!(params.command, "_the/command");
2026 let fake = fake.clone();
2027 async move {
2028 fake.server
2029 .request::<lsp::request::ApplyWorkspaceEdit>(
2030 lsp::ApplyWorkspaceEditParams {
2031 label: None,
2032 edit: lsp::WorkspaceEdit {
2033 changes: Some(
2034 [(
2035 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2036 vec![lsp::TextEdit {
2037 range: lsp::Range::new(
2038 lsp::Position::new(0, 0),
2039 lsp::Position::new(0, 0),
2040 ),
2041 new_text: "X".into(),
2042 }],
2043 )]
2044 .into_iter()
2045 .collect(),
2046 ),
2047 ..Default::default()
2048 },
2049 },
2050 )
2051 .await
2052 .unwrap();
2053 Ok(Some(json!(null)))
2054 }
2055 }
2056 })
2057 .next()
2058 .await;
2059
2060 // Applying the code action returns a project transaction containing the edits
2061 // sent by the language server in its `workspaceEdit` request.
2062 let transaction = apply.await.unwrap();
2063 assert!(transaction.0.contains_key(&buffer));
2064 buffer.update(cx, |buffer, cx| {
2065 assert_eq!(buffer.text(), "Xa");
2066 buffer.undo(cx);
2067 assert_eq!(buffer.text(), "a");
2068 });
2069}
2070
2071#[gpui::test]
2072async fn test_save_file(cx: &mut gpui::TestAppContext) {
2073 let fs = FakeFs::new(cx.background());
2074 fs.insert_tree(
2075 "/dir",
2076 json!({
2077 "file1": "the old contents",
2078 }),
2079 )
2080 .await;
2081
2082 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2083 let buffer = project
2084 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2085 .await
2086 .unwrap();
2087 buffer.update(cx, |buffer, cx| {
2088 assert_eq!(buffer.text(), "the old contents");
2089 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2090 });
2091
2092 project
2093 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2094 .await
2095 .unwrap();
2096
2097 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2098 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2099}
2100
2101#[gpui::test]
2102async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2103 let fs = FakeFs::new(cx.background());
2104 fs.insert_tree(
2105 "/dir",
2106 json!({
2107 "file1": "the old contents",
2108 }),
2109 )
2110 .await;
2111
2112 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2113 let buffer = project
2114 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2115 .await
2116 .unwrap();
2117 buffer.update(cx, |buffer, cx| {
2118 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2119 });
2120
2121 project
2122 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2123 .await
2124 .unwrap();
2125
2126 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2127 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2128}
2129
2130#[gpui::test]
2131async fn test_save_as(cx: &mut gpui::TestAppContext) {
2132 let fs = FakeFs::new(cx.background());
2133 fs.insert_tree("/dir", json!({})).await;
2134
2135 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2136
2137 let languages = project.read_with(cx, |project, _| project.languages().clone());
2138 languages.register(
2139 "/some/path",
2140 LanguageConfig {
2141 name: "Rust".into(),
2142 path_suffixes: vec!["rs".into()],
2143 ..Default::default()
2144 },
2145 tree_sitter_rust::language(),
2146 None,
2147 |_| Default::default(),
2148 );
2149
2150 let buffer = project.update(cx, |project, cx| {
2151 project.create_buffer("", None, cx).unwrap()
2152 });
2153 buffer.update(cx, |buffer, cx| {
2154 buffer.edit([(0..0, "abc")], None, cx);
2155 assert!(buffer.is_dirty());
2156 assert!(!buffer.has_conflict());
2157 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2158 });
2159 project
2160 .update(cx, |project, cx| {
2161 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2162 })
2163 .await
2164 .unwrap();
2165 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2166
2167 cx.foreground().run_until_parked();
2168 buffer.read_with(cx, |buffer, cx| {
2169 assert_eq!(
2170 buffer.file().unwrap().full_path(cx),
2171 Path::new("dir/file1.rs")
2172 );
2173 assert!(!buffer.is_dirty());
2174 assert!(!buffer.has_conflict());
2175 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2176 });
2177
2178 let opened_buffer = project
2179 .update(cx, |project, cx| {
2180 project.open_local_buffer("/dir/file1.rs", cx)
2181 })
2182 .await
2183 .unwrap();
2184 assert_eq!(opened_buffer, buffer);
2185}
2186
2187#[gpui::test(retries = 5)]
2188async fn test_rescan_and_remote_updates(
2189 deterministic: Arc<Deterministic>,
2190 cx: &mut gpui::TestAppContext,
2191) {
2192 let dir = temp_tree(json!({
2193 "a": {
2194 "file1": "",
2195 "file2": "",
2196 "file3": "",
2197 },
2198 "b": {
2199 "c": {
2200 "file4": "",
2201 "file5": "",
2202 }
2203 }
2204 }));
2205
2206 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2207 let rpc = project.read_with(cx, |p, _| p.client.clone());
2208
2209 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2210 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2211 async move { buffer.await.unwrap() }
2212 };
2213 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2214 project.read_with(cx, |project, cx| {
2215 let tree = project.worktrees(cx).next().unwrap();
2216 tree.read(cx)
2217 .entry_for_path(path)
2218 .unwrap_or_else(|| panic!("no entry for path {}", path))
2219 .id
2220 })
2221 };
2222
2223 let buffer2 = buffer_for_path("a/file2", cx).await;
2224 let buffer3 = buffer_for_path("a/file3", cx).await;
2225 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2226 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2227
2228 let file2_id = id_for_path("a/file2", cx);
2229 let file3_id = id_for_path("a/file3", cx);
2230 let file4_id = id_for_path("b/c/file4", cx);
2231
2232 // Create a remote copy of this worktree.
2233 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2234 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2235 let remote = cx.update(|cx| {
2236 Worktree::remote(
2237 1,
2238 1,
2239 proto::WorktreeMetadata {
2240 id: initial_snapshot.id().to_proto(),
2241 root_name: initial_snapshot.root_name().into(),
2242 abs_path: initial_snapshot
2243 .abs_path()
2244 .as_os_str()
2245 .to_string_lossy()
2246 .into(),
2247 visible: true,
2248 },
2249 rpc.clone(),
2250 cx,
2251 )
2252 });
2253 remote.update(cx, |remote, _| {
2254 let update = initial_snapshot.build_initial_update(1);
2255 remote.as_remote_mut().unwrap().update_from_remote(update);
2256 });
2257 deterministic.run_until_parked();
2258
2259 cx.read(|cx| {
2260 assert!(!buffer2.read(cx).is_dirty());
2261 assert!(!buffer3.read(cx).is_dirty());
2262 assert!(!buffer4.read(cx).is_dirty());
2263 assert!(!buffer5.read(cx).is_dirty());
2264 });
2265
2266 // Rename and delete files and directories.
2267 tree.flush_fs_events(cx).await;
2268 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2269 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2270 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2271 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2272 tree.flush_fs_events(cx).await;
2273
2274 let expected_paths = vec![
2275 "a",
2276 "a/file1",
2277 "a/file2.new",
2278 "b",
2279 "d",
2280 "d/file3",
2281 "d/file4",
2282 ];
2283
2284 cx.read(|app| {
2285 assert_eq!(
2286 tree.read(app)
2287 .paths()
2288 .map(|p| p.to_str().unwrap())
2289 .collect::<Vec<_>>(),
2290 expected_paths
2291 );
2292
2293 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2294 assert_eq!(id_for_path("d/file3", cx), file3_id);
2295 assert_eq!(id_for_path("d/file4", cx), file4_id);
2296
2297 assert_eq!(
2298 buffer2.read(app).file().unwrap().path().as_ref(),
2299 Path::new("a/file2.new")
2300 );
2301 assert_eq!(
2302 buffer3.read(app).file().unwrap().path().as_ref(),
2303 Path::new("d/file3")
2304 );
2305 assert_eq!(
2306 buffer4.read(app).file().unwrap().path().as_ref(),
2307 Path::new("d/file4")
2308 );
2309 assert_eq!(
2310 buffer5.read(app).file().unwrap().path().as_ref(),
2311 Path::new("b/c/file5")
2312 );
2313
2314 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2315 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2316 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2317 assert!(buffer5.read(app).file().unwrap().is_deleted());
2318 });
2319
2320 // Update the remote worktree. Check that it becomes consistent with the
2321 // local worktree.
2322 remote.update(cx, |remote, cx| {
2323 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2324 &initial_snapshot,
2325 1,
2326 1,
2327 true,
2328 );
2329 remote.as_remote_mut().unwrap().update_from_remote(update);
2330 });
2331 deterministic.run_until_parked();
2332 remote.read_with(cx, |remote, _| {
2333 assert_eq!(
2334 remote
2335 .paths()
2336 .map(|p| p.to_str().unwrap())
2337 .collect::<Vec<_>>(),
2338 expected_paths
2339 );
2340 });
2341}
2342
2343#[gpui::test(iterations = 10)]
2344async fn test_buffer_identity_across_renames(
2345 deterministic: Arc<Deterministic>,
2346 cx: &mut gpui::TestAppContext,
2347) {
2348 let fs = FakeFs::new(cx.background());
2349 fs.insert_tree(
2350 "/dir",
2351 json!({
2352 "a": {
2353 "file1": "",
2354 }
2355 }),
2356 )
2357 .await;
2358
2359 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2360 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2361 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2362
2363 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2364 project.read_with(cx, |project, cx| {
2365 let tree = project.worktrees(cx).next().unwrap();
2366 tree.read(cx)
2367 .entry_for_path(path)
2368 .unwrap_or_else(|| panic!("no entry for path {}", path))
2369 .id
2370 })
2371 };
2372
2373 let dir_id = id_for_path("a", cx);
2374 let file_id = id_for_path("a/file1", cx);
2375 let buffer = project
2376 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2377 .await
2378 .unwrap();
2379 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2380
2381 project
2382 .update(cx, |project, cx| {
2383 project.rename_entry(dir_id, Path::new("b"), cx)
2384 })
2385 .unwrap()
2386 .await
2387 .unwrap();
2388 deterministic.run_until_parked();
2389 assert_eq!(id_for_path("b", cx), dir_id);
2390 assert_eq!(id_for_path("b/file1", cx), file_id);
2391 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2392}
2393
2394#[gpui::test]
2395async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2396 let fs = FakeFs::new(cx.background());
2397 fs.insert_tree(
2398 "/dir",
2399 json!({
2400 "a.txt": "a-contents",
2401 "b.txt": "b-contents",
2402 }),
2403 )
2404 .await;
2405
2406 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2407
2408 // Spawn multiple tasks to open paths, repeating some paths.
2409 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2410 (
2411 p.open_local_buffer("/dir/a.txt", cx),
2412 p.open_local_buffer("/dir/b.txt", cx),
2413 p.open_local_buffer("/dir/a.txt", cx),
2414 )
2415 });
2416
2417 let buffer_a_1 = buffer_a_1.await.unwrap();
2418 let buffer_a_2 = buffer_a_2.await.unwrap();
2419 let buffer_b = buffer_b.await.unwrap();
2420 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2421 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2422
2423 // There is only one buffer per path.
2424 let buffer_a_id = buffer_a_1.id();
2425 assert_eq!(buffer_a_2.id(), buffer_a_id);
2426
2427 // Open the same path again while it is still open.
2428 drop(buffer_a_1);
2429 let buffer_a_3 = project
2430 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2431 .await
2432 .unwrap();
2433
2434 // There's still only one buffer per path.
2435 assert_eq!(buffer_a_3.id(), buffer_a_id);
2436}
2437
2438#[gpui::test]
2439async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2440 let fs = FakeFs::new(cx.background());
2441 fs.insert_tree(
2442 "/dir",
2443 json!({
2444 "file1": "abc",
2445 "file2": "def",
2446 "file3": "ghi",
2447 }),
2448 )
2449 .await;
2450
2451 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2452
2453 let buffer1 = project
2454 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2455 .await
2456 .unwrap();
2457 let events = Rc::new(RefCell::new(Vec::new()));
2458
2459 // initially, the buffer isn't dirty.
2460 buffer1.update(cx, |buffer, cx| {
2461 cx.subscribe(&buffer1, {
2462 let events = events.clone();
2463 move |_, _, event, _| match event {
2464 BufferEvent::Operation(_) => {}
2465 _ => events.borrow_mut().push(event.clone()),
2466 }
2467 })
2468 .detach();
2469
2470 assert!(!buffer.is_dirty());
2471 assert!(events.borrow().is_empty());
2472
2473 buffer.edit([(1..2, "")], None, cx);
2474 });
2475
2476 // after the first edit, the buffer is dirty, and emits a dirtied event.
2477 buffer1.update(cx, |buffer, cx| {
2478 assert!(buffer.text() == "ac");
2479 assert!(buffer.is_dirty());
2480 assert_eq!(
2481 *events.borrow(),
2482 &[language::Event::Edited, language::Event::DirtyChanged]
2483 );
2484 events.borrow_mut().clear();
2485 buffer.did_save(
2486 buffer.version(),
2487 buffer.as_rope().fingerprint(),
2488 buffer.file().unwrap().mtime(),
2489 cx,
2490 );
2491 });
2492
2493 // after saving, the buffer is not dirty, and emits a saved event.
2494 buffer1.update(cx, |buffer, cx| {
2495 assert!(!buffer.is_dirty());
2496 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2497 events.borrow_mut().clear();
2498
2499 buffer.edit([(1..1, "B")], None, cx);
2500 buffer.edit([(2..2, "D")], None, cx);
2501 });
2502
2503 // after editing again, the buffer is dirty, and emits another dirty event.
2504 buffer1.update(cx, |buffer, cx| {
2505 assert!(buffer.text() == "aBDc");
2506 assert!(buffer.is_dirty());
2507 assert_eq!(
2508 *events.borrow(),
2509 &[
2510 language::Event::Edited,
2511 language::Event::DirtyChanged,
2512 language::Event::Edited,
2513 ],
2514 );
2515 events.borrow_mut().clear();
2516
2517 // After restoring the buffer to its previously-saved state,
2518 // the buffer is not considered dirty anymore.
2519 buffer.edit([(1..3, "")], None, cx);
2520 assert!(buffer.text() == "ac");
2521 assert!(!buffer.is_dirty());
2522 });
2523
2524 assert_eq!(
2525 *events.borrow(),
2526 &[language::Event::Edited, language::Event::DirtyChanged]
2527 );
2528
2529 // When a file is deleted, the buffer is considered dirty.
2530 let events = Rc::new(RefCell::new(Vec::new()));
2531 let buffer2 = project
2532 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2533 .await
2534 .unwrap();
2535 buffer2.update(cx, |_, cx| {
2536 cx.subscribe(&buffer2, {
2537 let events = events.clone();
2538 move |_, _, event, _| events.borrow_mut().push(event.clone())
2539 })
2540 .detach();
2541 });
2542
2543 fs.remove_file("/dir/file2".as_ref(), Default::default())
2544 .await
2545 .unwrap();
2546 cx.foreground().run_until_parked();
2547 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2548 assert_eq!(
2549 *events.borrow(),
2550 &[
2551 language::Event::DirtyChanged,
2552 language::Event::FileHandleChanged
2553 ]
2554 );
2555
2556 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2557 let events = Rc::new(RefCell::new(Vec::new()));
2558 let buffer3 = project
2559 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2560 .await
2561 .unwrap();
2562 buffer3.update(cx, |_, cx| {
2563 cx.subscribe(&buffer3, {
2564 let events = events.clone();
2565 move |_, _, event, _| events.borrow_mut().push(event.clone())
2566 })
2567 .detach();
2568 });
2569
2570 buffer3.update(cx, |buffer, cx| {
2571 buffer.edit([(0..0, "x")], None, cx);
2572 });
2573 events.borrow_mut().clear();
2574 fs.remove_file("/dir/file3".as_ref(), Default::default())
2575 .await
2576 .unwrap();
2577 cx.foreground().run_until_parked();
2578 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2579 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2580}
2581
2582#[gpui::test]
2583async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2584 let initial_contents = "aaa\nbbbbb\nc\n";
2585 let fs = FakeFs::new(cx.background());
2586 fs.insert_tree(
2587 "/dir",
2588 json!({
2589 "the-file": initial_contents,
2590 }),
2591 )
2592 .await;
2593 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2594 let buffer = project
2595 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2596 .await
2597 .unwrap();
2598
2599 let anchors = (0..3)
2600 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2601 .collect::<Vec<_>>();
2602
2603 // Change the file on disk, adding two new lines of text, and removing
2604 // one line.
2605 buffer.read_with(cx, |buffer, _| {
2606 assert!(!buffer.is_dirty());
2607 assert!(!buffer.has_conflict());
2608 });
2609 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2610 fs.save(
2611 "/dir/the-file".as_ref(),
2612 &new_contents.into(),
2613 LineEnding::Unix,
2614 )
2615 .await
2616 .unwrap();
2617
2618 // Because the buffer was not modified, it is reloaded from disk. Its
2619 // contents are edited according to the diff between the old and new
2620 // file contents.
2621 cx.foreground().run_until_parked();
2622 buffer.update(cx, |buffer, _| {
2623 assert_eq!(buffer.text(), new_contents);
2624 assert!(!buffer.is_dirty());
2625 assert!(!buffer.has_conflict());
2626
2627 let anchor_positions = anchors
2628 .iter()
2629 .map(|anchor| anchor.to_point(&*buffer))
2630 .collect::<Vec<_>>();
2631 assert_eq!(
2632 anchor_positions,
2633 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2634 );
2635 });
2636
2637 // Modify the buffer
2638 buffer.update(cx, |buffer, cx| {
2639 buffer.edit([(0..0, " ")], None, cx);
2640 assert!(buffer.is_dirty());
2641 assert!(!buffer.has_conflict());
2642 });
2643
2644 // Change the file on disk again, adding blank lines to the beginning.
2645 fs.save(
2646 "/dir/the-file".as_ref(),
2647 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2648 LineEnding::Unix,
2649 )
2650 .await
2651 .unwrap();
2652
2653 // Because the buffer is modified, it doesn't reload from disk, but is
2654 // marked as having a conflict.
2655 cx.foreground().run_until_parked();
2656 buffer.read_with(cx, |buffer, _| {
2657 assert!(buffer.has_conflict());
2658 });
2659}
2660
2661#[gpui::test]
2662async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2663 let fs = FakeFs::new(cx.background());
2664 fs.insert_tree(
2665 "/dir",
2666 json!({
2667 "file1": "a\nb\nc\n",
2668 "file2": "one\r\ntwo\r\nthree\r\n",
2669 }),
2670 )
2671 .await;
2672
2673 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2674 let buffer1 = project
2675 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2676 .await
2677 .unwrap();
2678 let buffer2 = project
2679 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2680 .await
2681 .unwrap();
2682
2683 buffer1.read_with(cx, |buffer, _| {
2684 assert_eq!(buffer.text(), "a\nb\nc\n");
2685 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2686 });
2687 buffer2.read_with(cx, |buffer, _| {
2688 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2689 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2690 });
2691
2692 // Change a file's line endings on disk from unix to windows. The buffer's
2693 // state updates correctly.
2694 fs.save(
2695 "/dir/file1".as_ref(),
2696 &"aaa\nb\nc\n".into(),
2697 LineEnding::Windows,
2698 )
2699 .await
2700 .unwrap();
2701 cx.foreground().run_until_parked();
2702 buffer1.read_with(cx, |buffer, _| {
2703 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2704 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2705 });
2706
2707 // Save a file with windows line endings. The file is written correctly.
2708 buffer2.update(cx, |buffer, cx| {
2709 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2710 });
2711 project
2712 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
2713 .await
2714 .unwrap();
2715 assert_eq!(
2716 fs.load("/dir/file2".as_ref()).await.unwrap(),
2717 "one\r\ntwo\r\nthree\r\nfour\r\n",
2718 );
2719}
2720
2721#[gpui::test]
2722async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2723 cx.foreground().forbid_parking();
2724
2725 let fs = FakeFs::new(cx.background());
2726 fs.insert_tree(
2727 "/the-dir",
2728 json!({
2729 "a.rs": "
2730 fn foo(mut v: Vec<usize>) {
2731 for x in &v {
2732 v.push(1);
2733 }
2734 }
2735 "
2736 .unindent(),
2737 }),
2738 )
2739 .await;
2740
2741 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2742 let buffer = project
2743 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2744 .await
2745 .unwrap();
2746
2747 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2748 let message = lsp::PublishDiagnosticsParams {
2749 uri: buffer_uri.clone(),
2750 diagnostics: vec![
2751 lsp::Diagnostic {
2752 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2753 severity: Some(DiagnosticSeverity::WARNING),
2754 message: "error 1".to_string(),
2755 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2756 location: lsp::Location {
2757 uri: buffer_uri.clone(),
2758 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2759 },
2760 message: "error 1 hint 1".to_string(),
2761 }]),
2762 ..Default::default()
2763 },
2764 lsp::Diagnostic {
2765 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2766 severity: Some(DiagnosticSeverity::HINT),
2767 message: "error 1 hint 1".to_string(),
2768 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2769 location: lsp::Location {
2770 uri: buffer_uri.clone(),
2771 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2772 },
2773 message: "original diagnostic".to_string(),
2774 }]),
2775 ..Default::default()
2776 },
2777 lsp::Diagnostic {
2778 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2779 severity: Some(DiagnosticSeverity::ERROR),
2780 message: "error 2".to_string(),
2781 related_information: Some(vec![
2782 lsp::DiagnosticRelatedInformation {
2783 location: lsp::Location {
2784 uri: buffer_uri.clone(),
2785 range: lsp::Range::new(
2786 lsp::Position::new(1, 13),
2787 lsp::Position::new(1, 15),
2788 ),
2789 },
2790 message: "error 2 hint 1".to_string(),
2791 },
2792 lsp::DiagnosticRelatedInformation {
2793 location: lsp::Location {
2794 uri: buffer_uri.clone(),
2795 range: lsp::Range::new(
2796 lsp::Position::new(1, 13),
2797 lsp::Position::new(1, 15),
2798 ),
2799 },
2800 message: "error 2 hint 2".to_string(),
2801 },
2802 ]),
2803 ..Default::default()
2804 },
2805 lsp::Diagnostic {
2806 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2807 severity: Some(DiagnosticSeverity::HINT),
2808 message: "error 2 hint 1".to_string(),
2809 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2810 location: lsp::Location {
2811 uri: buffer_uri.clone(),
2812 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2813 },
2814 message: "original diagnostic".to_string(),
2815 }]),
2816 ..Default::default()
2817 },
2818 lsp::Diagnostic {
2819 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2820 severity: Some(DiagnosticSeverity::HINT),
2821 message: "error 2 hint 2".to_string(),
2822 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2823 location: lsp::Location {
2824 uri: buffer_uri,
2825 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2826 },
2827 message: "original diagnostic".to_string(),
2828 }]),
2829 ..Default::default()
2830 },
2831 ],
2832 version: None,
2833 };
2834
2835 project
2836 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2837 .unwrap();
2838 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2839
2840 assert_eq!(
2841 buffer
2842 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2843 .collect::<Vec<_>>(),
2844 &[
2845 DiagnosticEntry {
2846 range: Point::new(1, 8)..Point::new(1, 9),
2847 diagnostic: Diagnostic {
2848 severity: DiagnosticSeverity::WARNING,
2849 message: "error 1".to_string(),
2850 group_id: 1,
2851 is_primary: true,
2852 ..Default::default()
2853 }
2854 },
2855 DiagnosticEntry {
2856 range: Point::new(1, 8)..Point::new(1, 9),
2857 diagnostic: Diagnostic {
2858 severity: DiagnosticSeverity::HINT,
2859 message: "error 1 hint 1".to_string(),
2860 group_id: 1,
2861 is_primary: false,
2862 ..Default::default()
2863 }
2864 },
2865 DiagnosticEntry {
2866 range: Point::new(1, 13)..Point::new(1, 15),
2867 diagnostic: Diagnostic {
2868 severity: DiagnosticSeverity::HINT,
2869 message: "error 2 hint 1".to_string(),
2870 group_id: 0,
2871 is_primary: false,
2872 ..Default::default()
2873 }
2874 },
2875 DiagnosticEntry {
2876 range: Point::new(1, 13)..Point::new(1, 15),
2877 diagnostic: Diagnostic {
2878 severity: DiagnosticSeverity::HINT,
2879 message: "error 2 hint 2".to_string(),
2880 group_id: 0,
2881 is_primary: false,
2882 ..Default::default()
2883 }
2884 },
2885 DiagnosticEntry {
2886 range: Point::new(2, 8)..Point::new(2, 17),
2887 diagnostic: Diagnostic {
2888 severity: DiagnosticSeverity::ERROR,
2889 message: "error 2".to_string(),
2890 group_id: 0,
2891 is_primary: true,
2892 ..Default::default()
2893 }
2894 }
2895 ]
2896 );
2897
2898 assert_eq!(
2899 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2900 &[
2901 DiagnosticEntry {
2902 range: Point::new(1, 13)..Point::new(1, 15),
2903 diagnostic: Diagnostic {
2904 severity: DiagnosticSeverity::HINT,
2905 message: "error 2 hint 1".to_string(),
2906 group_id: 0,
2907 is_primary: false,
2908 ..Default::default()
2909 }
2910 },
2911 DiagnosticEntry {
2912 range: Point::new(1, 13)..Point::new(1, 15),
2913 diagnostic: Diagnostic {
2914 severity: DiagnosticSeverity::HINT,
2915 message: "error 2 hint 2".to_string(),
2916 group_id: 0,
2917 is_primary: false,
2918 ..Default::default()
2919 }
2920 },
2921 DiagnosticEntry {
2922 range: Point::new(2, 8)..Point::new(2, 17),
2923 diagnostic: Diagnostic {
2924 severity: DiagnosticSeverity::ERROR,
2925 message: "error 2".to_string(),
2926 group_id: 0,
2927 is_primary: true,
2928 ..Default::default()
2929 }
2930 }
2931 ]
2932 );
2933
2934 assert_eq!(
2935 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2936 &[
2937 DiagnosticEntry {
2938 range: Point::new(1, 8)..Point::new(1, 9),
2939 diagnostic: Diagnostic {
2940 severity: DiagnosticSeverity::WARNING,
2941 message: "error 1".to_string(),
2942 group_id: 1,
2943 is_primary: true,
2944 ..Default::default()
2945 }
2946 },
2947 DiagnosticEntry {
2948 range: Point::new(1, 8)..Point::new(1, 9),
2949 diagnostic: Diagnostic {
2950 severity: DiagnosticSeverity::HINT,
2951 message: "error 1 hint 1".to_string(),
2952 group_id: 1,
2953 is_primary: false,
2954 ..Default::default()
2955 }
2956 },
2957 ]
2958 );
2959}
2960
2961#[gpui::test]
2962async fn test_rename(cx: &mut gpui::TestAppContext) {
2963 cx.foreground().forbid_parking();
2964
2965 let mut language = Language::new(
2966 LanguageConfig {
2967 name: "Rust".into(),
2968 path_suffixes: vec!["rs".to_string()],
2969 ..Default::default()
2970 },
2971 Some(tree_sitter_rust::language()),
2972 );
2973 let mut fake_servers = language
2974 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2975 capabilities: lsp::ServerCapabilities {
2976 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2977 prepare_provider: Some(true),
2978 work_done_progress_options: Default::default(),
2979 })),
2980 ..Default::default()
2981 },
2982 ..Default::default()
2983 }))
2984 .await;
2985
2986 let fs = FakeFs::new(cx.background());
2987 fs.insert_tree(
2988 "/dir",
2989 json!({
2990 "one.rs": "const ONE: usize = 1;",
2991 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2992 }),
2993 )
2994 .await;
2995
2996 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2997 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2998 let buffer = project
2999 .update(cx, |project, cx| {
3000 project.open_local_buffer("/dir/one.rs", cx)
3001 })
3002 .await
3003 .unwrap();
3004
3005 let fake_server = fake_servers.next().await.unwrap();
3006
3007 let response = project.update(cx, |project, cx| {
3008 project.prepare_rename(buffer.clone(), 7, cx)
3009 });
3010 fake_server
3011 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3012 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3013 assert_eq!(params.position, lsp::Position::new(0, 7));
3014 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3015 lsp::Position::new(0, 6),
3016 lsp::Position::new(0, 9),
3017 ))))
3018 })
3019 .next()
3020 .await
3021 .unwrap();
3022 let range = response.await.unwrap().unwrap();
3023 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3024 assert_eq!(range, 6..9);
3025
3026 let response = project.update(cx, |project, cx| {
3027 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3028 });
3029 fake_server
3030 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3031 assert_eq!(
3032 params.text_document_position.text_document.uri.as_str(),
3033 "file:///dir/one.rs"
3034 );
3035 assert_eq!(
3036 params.text_document_position.position,
3037 lsp::Position::new(0, 7)
3038 );
3039 assert_eq!(params.new_name, "THREE");
3040 Ok(Some(lsp::WorkspaceEdit {
3041 changes: Some(
3042 [
3043 (
3044 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3045 vec![lsp::TextEdit::new(
3046 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3047 "THREE".to_string(),
3048 )],
3049 ),
3050 (
3051 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3052 vec![
3053 lsp::TextEdit::new(
3054 lsp::Range::new(
3055 lsp::Position::new(0, 24),
3056 lsp::Position::new(0, 27),
3057 ),
3058 "THREE".to_string(),
3059 ),
3060 lsp::TextEdit::new(
3061 lsp::Range::new(
3062 lsp::Position::new(0, 35),
3063 lsp::Position::new(0, 38),
3064 ),
3065 "THREE".to_string(),
3066 ),
3067 ],
3068 ),
3069 ]
3070 .into_iter()
3071 .collect(),
3072 ),
3073 ..Default::default()
3074 }))
3075 })
3076 .next()
3077 .await
3078 .unwrap();
3079 let mut transaction = response.await.unwrap().0;
3080 assert_eq!(transaction.len(), 2);
3081 assert_eq!(
3082 transaction
3083 .remove_entry(&buffer)
3084 .unwrap()
3085 .0
3086 .read_with(cx, |buffer, _| buffer.text()),
3087 "const THREE: usize = 1;"
3088 );
3089 assert_eq!(
3090 transaction
3091 .into_keys()
3092 .next()
3093 .unwrap()
3094 .read_with(cx, |buffer, _| buffer.text()),
3095 "const TWO: usize = one::THREE + one::THREE;"
3096 );
3097}
3098
3099#[gpui::test]
3100async fn test_search(cx: &mut gpui::TestAppContext) {
3101 let fs = FakeFs::new(cx.background());
3102 fs.insert_tree(
3103 "/dir",
3104 json!({
3105 "one.rs": "const ONE: usize = 1;",
3106 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3107 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3108 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3109 }),
3110 )
3111 .await;
3112 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3113 assert_eq!(
3114 search(&project, SearchQuery::text("TWO", false, true), cx)
3115 .await
3116 .unwrap(),
3117 HashMap::from_iter([
3118 ("two.rs".to_string(), vec![6..9]),
3119 ("three.rs".to_string(), vec![37..40])
3120 ])
3121 );
3122
3123 let buffer_4 = project
3124 .update(cx, |project, cx| {
3125 project.open_local_buffer("/dir/four.rs", cx)
3126 })
3127 .await
3128 .unwrap();
3129 buffer_4.update(cx, |buffer, cx| {
3130 let text = "two::TWO";
3131 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3132 });
3133
3134 assert_eq!(
3135 search(&project, SearchQuery::text("TWO", false, true), cx)
3136 .await
3137 .unwrap(),
3138 HashMap::from_iter([
3139 ("two.rs".to_string(), vec![6..9]),
3140 ("three.rs".to_string(), vec![37..40]),
3141 ("four.rs".to_string(), vec![25..28, 36..39])
3142 ])
3143 );
3144
3145 async fn search(
3146 project: &ModelHandle<Project>,
3147 query: SearchQuery,
3148 cx: &mut gpui::TestAppContext,
3149 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3150 let results = project
3151 .update(cx, |project, cx| project.search(query, cx))
3152 .await?;
3153
3154 Ok(results
3155 .into_iter()
3156 .map(|(buffer, ranges)| {
3157 buffer.read_with(cx, |buffer, _| {
3158 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3159 let ranges = ranges
3160 .into_iter()
3161 .map(|range| range.to_offset(buffer))
3162 .collect::<Vec<_>>();
3163 (path, ranges)
3164 })
3165 })
3166 .collect())
3167 }
3168}