1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use serde_json::json;
12use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
13use unindent::Unindent as _;
14use util::{assert_set_eq, test::temp_tree};
15
16#[gpui::test]
17async fn test_symlinks(cx: &mut gpui::TestAppContext) {
18 let dir = temp_tree(json!({
19 "root": {
20 "apple": "",
21 "banana": {
22 "carrot": {
23 "date": "",
24 "endive": "",
25 }
26 },
27 "fennel": {
28 "grape": "",
29 }
30 }
31 }));
32
33 let root_link_path = dir.path().join("root_link");
34 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
35 unix::fs::symlink(
36 &dir.path().join("root/fennel"),
37 &dir.path().join("root/finnochio"),
38 )
39 .unwrap();
40
41 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50}
51
52#[gpui::test]
53async fn test_managing_language_servers(
54 deterministic: Arc<Deterministic>,
55 cx: &mut gpui::TestAppContext,
56) {
57 cx.foreground().forbid_parking();
58
59 let mut rust_language = Language::new(
60 LanguageConfig {
61 name: "Rust".into(),
62 path_suffixes: vec!["rs".to_string()],
63 ..Default::default()
64 },
65 Some(tree_sitter_rust::language()),
66 );
67 let mut json_language = Language::new(
68 LanguageConfig {
69 name: "JSON".into(),
70 path_suffixes: vec!["json".to_string()],
71 ..Default::default()
72 },
73 None,
74 );
75 let mut fake_rust_servers = rust_language
76 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
77 name: "the-rust-language-server",
78 capabilities: lsp::ServerCapabilities {
79 completion_provider: Some(lsp::CompletionOptions {
80 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
81 ..Default::default()
82 }),
83 ..Default::default()
84 },
85 ..Default::default()
86 }))
87 .await;
88 let mut fake_json_servers = json_language
89 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-json-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![":".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }))
100 .await;
101
102 let fs = FakeFs::new(cx.background());
103 fs.insert_tree(
104 "/the-root",
105 json!({
106 "test.rs": "const A: i32 = 1;",
107 "test2.rs": "",
108 "Cargo.toml": "a = 1",
109 "package.json": "{\"a\": 1}",
110 }),
111 )
112 .await;
113
114 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
115
116 // Open a buffer without an associated language server.
117 let toml_buffer = project
118 .update(cx, |project, cx| {
119 project.open_local_buffer("/the-root/Cargo.toml", cx)
120 })
121 .await
122 .unwrap();
123
124 // Open a buffer with an associated language server before the language for it has been loaded.
125 let rust_buffer = project
126 .update(cx, |project, cx| {
127 project.open_local_buffer("/the-root/test.rs", cx)
128 })
129 .await
130 .unwrap();
131 rust_buffer.read_with(cx, |buffer, _| {
132 assert_eq!(buffer.language().map(|l| l.name()), None);
133 });
134
135 // Now we add the languages to the project, and ensure they get assigned to all
136 // the relevant open buffers.
137 project.update(cx, |project, _| {
138 project.languages.add(Arc::new(json_language));
139 project.languages.add(Arc::new(rust_language));
140 });
141 deterministic.run_until_parked();
142 rust_buffer.read_with(cx, |buffer, _| {
143 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
144 });
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
231 rust_buffer2.update(cx, |buffer, cx| {
232 buffer.edit([(0..0, "let x = 1;")], None, cx)
233 });
234 assert_eq!(
235 fake_rust_server
236 .receive_notification::<lsp::notification::DidChangeTextDocument>()
237 .await
238 .text_document,
239 lsp::VersionedTextDocumentIdentifier::new(
240 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
241 1
242 )
243 );
244
245 // Save notifications are reported to all servers.
246 project
247 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
248 .await
249 .unwrap();
250 assert_eq!(
251 fake_rust_server
252 .receive_notification::<lsp::notification::DidSaveTextDocument>()
253 .await
254 .text_document,
255 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
256 );
257 assert_eq!(
258 fake_json_server
259 .receive_notification::<lsp::notification::DidSaveTextDocument>()
260 .await
261 .text_document,
262 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
263 );
264
265 // Renames are reported only to servers matching the buffer's language.
266 fs.rename(
267 Path::new("/the-root/test2.rs"),
268 Path::new("/the-root/test3.rs"),
269 Default::default(),
270 )
271 .await
272 .unwrap();
273 assert_eq!(
274 fake_rust_server
275 .receive_notification::<lsp::notification::DidCloseTextDocument>()
276 .await
277 .text_document,
278 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
279 );
280 assert_eq!(
281 fake_rust_server
282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
283 .await
284 .text_document,
285 lsp::TextDocumentItem {
286 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
287 version: 0,
288 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
289 language_id: Default::default()
290 },
291 );
292
293 rust_buffer2.update(cx, |buffer, cx| {
294 buffer.update_diagnostics(
295 DiagnosticSet::from_sorted_entries(
296 vec![DiagnosticEntry {
297 diagnostic: Default::default(),
298 range: Anchor::MIN..Anchor::MAX,
299 }],
300 &buffer.snapshot(),
301 ),
302 cx,
303 );
304 assert_eq!(
305 buffer
306 .snapshot()
307 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
308 .count(),
309 1
310 );
311 });
312
313 // When the rename changes the extension of the file, the buffer gets closed on the old
314 // language server and gets opened on the new one.
315 fs.rename(
316 Path::new("/the-root/test3.rs"),
317 Path::new("/the-root/test3.json"),
318 Default::default(),
319 )
320 .await
321 .unwrap();
322 assert_eq!(
323 fake_rust_server
324 .receive_notification::<lsp::notification::DidCloseTextDocument>()
325 .await
326 .text_document,
327 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
328 );
329 assert_eq!(
330 fake_json_server
331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
332 .await
333 .text_document,
334 lsp::TextDocumentItem {
335 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
336 version: 0,
337 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
338 language_id: Default::default()
339 },
340 );
341
342 // We clear the diagnostics, since the language has changed.
343 rust_buffer2.read_with(cx, |buffer, _| {
344 assert_eq!(
345 buffer
346 .snapshot()
347 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
348 .count(),
349 0
350 );
351 });
352
353 // The renamed file's version resets after changing language server.
354 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
355 assert_eq!(
356 fake_json_server
357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
358 .await
359 .text_document,
360 lsp::VersionedTextDocumentIdentifier::new(
361 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
362 1
363 )
364 );
365
366 // Restart language servers
367 project.update(cx, |project, cx| {
368 project.restart_language_servers_for_buffers(
369 vec![rust_buffer.clone(), json_buffer.clone()],
370 cx,
371 );
372 });
373
374 let mut rust_shutdown_requests = fake_rust_server
375 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
376 let mut json_shutdown_requests = fake_json_server
377 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
378 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
379
380 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
381 let mut fake_json_server = fake_json_servers.next().await.unwrap();
382
383 // Ensure rust document is reopened in new rust language server
384 assert_eq!(
385 fake_rust_server
386 .receive_notification::<lsp::notification::DidOpenTextDocument>()
387 .await
388 .text_document,
389 lsp::TextDocumentItem {
390 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
391 version: 1,
392 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
393 language_id: Default::default()
394 }
395 );
396
397 // Ensure json documents are reopened in new json language server
398 assert_set_eq!(
399 [
400 fake_json_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 ],
409 [
410 lsp::TextDocumentItem {
411 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
412 version: 0,
413 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
414 language_id: Default::default()
415 },
416 lsp::TextDocumentItem {
417 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
418 version: 1,
419 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
420 language_id: Default::default()
421 }
422 ]
423 );
424
425 // Close notifications are reported only to servers matching the buffer's language.
426 cx.update(|_| drop(json_buffer));
427 let close_message = lsp::DidCloseTextDocumentParams {
428 text_document: lsp::TextDocumentIdentifier::new(
429 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
430 ),
431 };
432 assert_eq!(
433 fake_json_server
434 .receive_notification::<lsp::notification::DidCloseTextDocument>()
435 .await,
436 close_message,
437 );
438}
439
440#[gpui::test]
441async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
442 cx.foreground().forbid_parking();
443
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/dir",
447 json!({
448 "a.rs": "let a = 1;",
449 "b.rs": "let b = 2;"
450 }),
451 )
452 .await;
453
454 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
455
456 let buffer_a = project
457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
458 .await
459 .unwrap();
460 let buffer_b = project
461 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
462 .await
463 .unwrap();
464
465 project.update(cx, |project, cx| {
466 project
467 .update_diagnostics(
468 0,
469 lsp::PublishDiagnosticsParams {
470 uri: Url::from_file_path("/dir/a.rs").unwrap(),
471 version: None,
472 diagnostics: vec![lsp::Diagnostic {
473 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
474 severity: Some(lsp::DiagnosticSeverity::ERROR),
475 message: "error 1".to_string(),
476 ..Default::default()
477 }],
478 },
479 &[],
480 cx,
481 )
482 .unwrap();
483 project
484 .update_diagnostics(
485 0,
486 lsp::PublishDiagnosticsParams {
487 uri: Url::from_file_path("/dir/b.rs").unwrap(),
488 version: None,
489 diagnostics: vec![lsp::Diagnostic {
490 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
491 severity: Some(lsp::DiagnosticSeverity::WARNING),
492 message: "error 2".to_string(),
493 ..Default::default()
494 }],
495 },
496 &[],
497 cx,
498 )
499 .unwrap();
500 });
501
502 buffer_a.read_with(cx, |buffer, _| {
503 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
504 assert_eq!(
505 chunks
506 .iter()
507 .map(|(s, d)| (s.as_str(), *d))
508 .collect::<Vec<_>>(),
509 &[
510 ("let ", None),
511 ("a", Some(DiagnosticSeverity::ERROR)),
512 (" = 1;", None),
513 ]
514 );
515 });
516 buffer_b.read_with(cx, |buffer, _| {
517 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
518 assert_eq!(
519 chunks
520 .iter()
521 .map(|(s, d)| (s.as_str(), *d))
522 .collect::<Vec<_>>(),
523 &[
524 ("let ", None),
525 ("b", Some(DiagnosticSeverity::WARNING)),
526 (" = 2;", None),
527 ]
528 );
529 });
530}
531
532#[gpui::test]
533async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
534 cx.foreground().forbid_parking();
535
536 let fs = FakeFs::new(cx.background());
537 fs.insert_tree(
538 "/root",
539 json!({
540 "dir": {
541 "a.rs": "let a = 1;",
542 },
543 "other.rs": "let b = c;"
544 }),
545 )
546 .await;
547
548 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
549
550 let (worktree, _) = project
551 .update(cx, |project, cx| {
552 project.find_or_create_local_worktree("/root/other.rs", false, cx)
553 })
554 .await
555 .unwrap();
556 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
557
558 project.update(cx, |project, cx| {
559 project
560 .update_diagnostics(
561 0,
562 lsp::PublishDiagnosticsParams {
563 uri: Url::from_file_path("/root/other.rs").unwrap(),
564 version: None,
565 diagnostics: vec![lsp::Diagnostic {
566 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
567 severity: Some(lsp::DiagnosticSeverity::ERROR),
568 message: "unknown variable 'c'".to_string(),
569 ..Default::default()
570 }],
571 },
572 &[],
573 cx,
574 )
575 .unwrap();
576 });
577
578 let buffer = project
579 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
580 .await
581 .unwrap();
582 buffer.read_with(cx, |buffer, _| {
583 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
584 assert_eq!(
585 chunks
586 .iter()
587 .map(|(s, d)| (s.as_str(), *d))
588 .collect::<Vec<_>>(),
589 &[
590 ("let b = ", None),
591 ("c", Some(DiagnosticSeverity::ERROR)),
592 (";", None),
593 ]
594 );
595 });
596
597 project.read_with(cx, |project, cx| {
598 assert_eq!(project.diagnostic_summaries(cx).next(), None);
599 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
600 });
601}
602
603#[gpui::test]
604async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
605 cx.foreground().forbid_parking();
606
607 let progress_token = "the-progress-token";
608 let mut language = Language::new(
609 LanguageConfig {
610 name: "Rust".into(),
611 path_suffixes: vec!["rs".to_string()],
612 ..Default::default()
613 },
614 Some(tree_sitter_rust::language()),
615 );
616 let mut fake_servers = language
617 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
618 disk_based_diagnostics_progress_token: Some(progress_token.into()),
619 disk_based_diagnostics_sources: vec!["disk".into()],
620 ..Default::default()
621 }))
622 .await;
623
624 let fs = FakeFs::new(cx.background());
625 fs.insert_tree(
626 "/dir",
627 json!({
628 "a.rs": "fn a() { A }",
629 "b.rs": "const y: i32 = 1",
630 }),
631 )
632 .await;
633
634 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
635 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
636 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
637
638 // Cause worktree to start the fake language server
639 let _buffer = project
640 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
641 .await
642 .unwrap();
643
644 let mut events = subscribe(&project, cx);
645
646 let fake_server = fake_servers.next().await.unwrap();
647 fake_server
648 .start_progress(format!("{}/0", progress_token))
649 .await;
650 assert_eq!(
651 events.next().await.unwrap(),
652 Event::DiskBasedDiagnosticsStarted {
653 language_server_id: 0,
654 }
655 );
656
657 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
658 uri: Url::from_file_path("/dir/a.rs").unwrap(),
659 version: None,
660 diagnostics: vec![lsp::Diagnostic {
661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
662 severity: Some(lsp::DiagnosticSeverity::ERROR),
663 message: "undefined variable 'A'".to_string(),
664 ..Default::default()
665 }],
666 });
667 assert_eq!(
668 events.next().await.unwrap(),
669 Event::DiagnosticsUpdated {
670 language_server_id: 0,
671 path: (worktree_id, Path::new("a.rs")).into()
672 }
673 );
674
675 fake_server.end_progress(format!("{}/0", progress_token));
676 assert_eq!(
677 events.next().await.unwrap(),
678 Event::DiskBasedDiagnosticsFinished {
679 language_server_id: 0
680 }
681 );
682
683 let buffer = project
684 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
685 .await
686 .unwrap();
687
688 buffer.read_with(cx, |buffer, _| {
689 let snapshot = buffer.snapshot();
690 let diagnostics = snapshot
691 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
692 .collect::<Vec<_>>();
693 assert_eq!(
694 diagnostics,
695 &[DiagnosticEntry {
696 range: Point::new(0, 9)..Point::new(0, 10),
697 diagnostic: Diagnostic {
698 severity: lsp::DiagnosticSeverity::ERROR,
699 message: "undefined variable 'A'".to_string(),
700 group_id: 0,
701 is_primary: true,
702 ..Default::default()
703 }
704 }]
705 )
706 });
707
708 // Ensure publishing empty diagnostics twice only results in one update event.
709 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
710 uri: Url::from_file_path("/dir/a.rs").unwrap(),
711 version: None,
712 diagnostics: Default::default(),
713 });
714 assert_eq!(
715 events.next().await.unwrap(),
716 Event::DiagnosticsUpdated {
717 language_server_id: 0,
718 path: (worktree_id, Path::new("a.rs")).into()
719 }
720 );
721
722 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
723 uri: Url::from_file_path("/dir/a.rs").unwrap(),
724 version: None,
725 diagnostics: Default::default(),
726 });
727 cx.foreground().run_until_parked();
728 assert_eq!(futures::poll!(events.next()), Poll::Pending);
729}
730
731#[gpui::test]
732async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
733 cx.foreground().forbid_parking();
734
735 let progress_token = "the-progress-token";
736 let mut language = Language::new(
737 LanguageConfig {
738 path_suffixes: vec!["rs".to_string()],
739 ..Default::default()
740 },
741 None,
742 );
743 let mut fake_servers = language
744 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
745 disk_based_diagnostics_sources: vec!["disk".into()],
746 disk_based_diagnostics_progress_token: Some(progress_token.into()),
747 ..Default::default()
748 }))
749 .await;
750
751 let fs = FakeFs::new(cx.background());
752 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
753
754 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
755 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
756
757 let buffer = project
758 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
759 .await
760 .unwrap();
761
762 // Simulate diagnostics starting to update.
763 let fake_server = fake_servers.next().await.unwrap();
764 fake_server.start_progress(progress_token).await;
765
766 // Restart the server before the diagnostics finish updating.
767 project.update(cx, |project, cx| {
768 project.restart_language_servers_for_buffers([buffer], cx);
769 });
770 let mut events = subscribe(&project, cx);
771
772 // Simulate the newly started server sending more diagnostics.
773 let fake_server = fake_servers.next().await.unwrap();
774 fake_server.start_progress(progress_token).await;
775 assert_eq!(
776 events.next().await.unwrap(),
777 Event::DiskBasedDiagnosticsStarted {
778 language_server_id: 1
779 }
780 );
781 project.read_with(cx, |project, _| {
782 assert_eq!(
783 project
784 .language_servers_running_disk_based_diagnostics()
785 .collect::<Vec<_>>(),
786 [1]
787 );
788 });
789
790 // All diagnostics are considered done, despite the old server's diagnostic
791 // task never completing.
792 fake_server.end_progress(progress_token);
793 assert_eq!(
794 events.next().await.unwrap(),
795 Event::DiskBasedDiagnosticsFinished {
796 language_server_id: 1
797 }
798 );
799 project.read_with(cx, |project, _| {
800 assert_eq!(
801 project
802 .language_servers_running_disk_based_diagnostics()
803 .collect::<Vec<_>>(),
804 [0; 0]
805 );
806 });
807}
808
809#[gpui::test]
810async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
811 cx.foreground().forbid_parking();
812
813 let mut language = Language::new(
814 LanguageConfig {
815 path_suffixes: vec!["rs".to_string()],
816 ..Default::default()
817 },
818 None,
819 );
820 let mut fake_servers = language
821 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
822 name: "the-lsp",
823 ..Default::default()
824 }))
825 .await;
826
827 let fs = FakeFs::new(cx.background());
828 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
829
830 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
831 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
832
833 let buffer = project
834 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
835 .await
836 .unwrap();
837
838 // Before restarting the server, report diagnostics with an unknown buffer version.
839 let fake_server = fake_servers.next().await.unwrap();
840 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
841 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
842 version: Some(10000),
843 diagnostics: Vec::new(),
844 });
845 cx.foreground().run_until_parked();
846
847 project.update(cx, |project, cx| {
848 project.restart_language_servers_for_buffers([buffer.clone()], cx);
849 });
850 let mut fake_server = fake_servers.next().await.unwrap();
851 let notification = fake_server
852 .receive_notification::<lsp::notification::DidOpenTextDocument>()
853 .await
854 .text_document;
855 assert_eq!(notification.version, 0);
856}
857
858#[gpui::test]
859async fn test_toggling_enable_language_server(
860 deterministic: Arc<Deterministic>,
861 cx: &mut gpui::TestAppContext,
862) {
863 deterministic.forbid_parking();
864
865 let mut rust = Language::new(
866 LanguageConfig {
867 name: Arc::from("Rust"),
868 path_suffixes: vec!["rs".to_string()],
869 ..Default::default()
870 },
871 None,
872 );
873 let mut fake_rust_servers = rust
874 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
875 name: "rust-lsp",
876 ..Default::default()
877 }))
878 .await;
879 let mut js = Language::new(
880 LanguageConfig {
881 name: Arc::from("JavaScript"),
882 path_suffixes: vec!["js".to_string()],
883 ..Default::default()
884 },
885 None,
886 );
887 let mut fake_js_servers = js
888 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
889 name: "js-lsp",
890 ..Default::default()
891 }))
892 .await;
893
894 let fs = FakeFs::new(cx.background());
895 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
896 .await;
897
898 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
899 project.update(cx, |project, _| {
900 project.languages.add(Arc::new(rust));
901 project.languages.add(Arc::new(js));
902 });
903
904 let _rs_buffer = project
905 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
906 .await
907 .unwrap();
908 let _js_buffer = project
909 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
910 .await
911 .unwrap();
912
913 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
914 assert_eq!(
915 fake_rust_server_1
916 .receive_notification::<lsp::notification::DidOpenTextDocument>()
917 .await
918 .text_document
919 .uri
920 .as_str(),
921 "file:///dir/a.rs"
922 );
923
924 let mut fake_js_server = fake_js_servers.next().await.unwrap();
925 assert_eq!(
926 fake_js_server
927 .receive_notification::<lsp::notification::DidOpenTextDocument>()
928 .await
929 .text_document
930 .uri
931 .as_str(),
932 "file:///dir/b.js"
933 );
934
935 // Disable Rust language server, ensuring only that server gets stopped.
936 cx.update(|cx| {
937 cx.update_global(|settings: &mut Settings, _| {
938 settings.language_overrides.insert(
939 Arc::from("Rust"),
940 settings::EditorSettings {
941 enable_language_server: Some(false),
942 ..Default::default()
943 },
944 );
945 })
946 });
947 fake_rust_server_1
948 .receive_notification::<lsp::notification::Exit>()
949 .await;
950
951 // Enable Rust and disable JavaScript language servers, ensuring that the
952 // former gets started again and that the latter stops.
953 cx.update(|cx| {
954 cx.update_global(|settings: &mut Settings, _| {
955 settings.language_overrides.insert(
956 Arc::from("Rust"),
957 settings::EditorSettings {
958 enable_language_server: Some(true),
959 ..Default::default()
960 },
961 );
962 settings.language_overrides.insert(
963 Arc::from("JavaScript"),
964 settings::EditorSettings {
965 enable_language_server: Some(false),
966 ..Default::default()
967 },
968 );
969 })
970 });
971 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
972 assert_eq!(
973 fake_rust_server_2
974 .receive_notification::<lsp::notification::DidOpenTextDocument>()
975 .await
976 .text_document
977 .uri
978 .as_str(),
979 "file:///dir/a.rs"
980 );
981 fake_js_server
982 .receive_notification::<lsp::notification::Exit>()
983 .await;
984}
985
986#[gpui::test]
987async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
988 cx.foreground().forbid_parking();
989
990 let mut language = Language::new(
991 LanguageConfig {
992 name: "Rust".into(),
993 path_suffixes: vec!["rs".to_string()],
994 ..Default::default()
995 },
996 Some(tree_sitter_rust::language()),
997 );
998 let mut fake_servers = language
999 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1000 disk_based_diagnostics_sources: vec!["disk".into()],
1001 ..Default::default()
1002 }))
1003 .await;
1004
1005 let text = "
1006 fn a() { A }
1007 fn b() { BB }
1008 fn c() { CCC }
1009 "
1010 .unindent();
1011
1012 let fs = FakeFs::new(cx.background());
1013 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1014
1015 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1016 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1017
1018 let buffer = project
1019 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1020 .await
1021 .unwrap();
1022
1023 let mut fake_server = fake_servers.next().await.unwrap();
1024 let open_notification = fake_server
1025 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1026 .await;
1027
1028 // Edit the buffer, moving the content down
1029 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1030 let change_notification_1 = fake_server
1031 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1032 .await;
1033 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1034
1035 // Report some diagnostics for the initial version of the buffer
1036 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1037 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1038 version: Some(open_notification.text_document.version),
1039 diagnostics: vec![
1040 lsp::Diagnostic {
1041 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1042 severity: Some(DiagnosticSeverity::ERROR),
1043 message: "undefined variable 'A'".to_string(),
1044 source: Some("disk".to_string()),
1045 ..Default::default()
1046 },
1047 lsp::Diagnostic {
1048 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1049 severity: Some(DiagnosticSeverity::ERROR),
1050 message: "undefined variable 'BB'".to_string(),
1051 source: Some("disk".to_string()),
1052 ..Default::default()
1053 },
1054 lsp::Diagnostic {
1055 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1056 severity: Some(DiagnosticSeverity::ERROR),
1057 source: Some("disk".to_string()),
1058 message: "undefined variable 'CCC'".to_string(),
1059 ..Default::default()
1060 },
1061 ],
1062 });
1063
1064 // The diagnostics have moved down since they were created.
1065 buffer.next_notification(cx).await;
1066 buffer.read_with(cx, |buffer, _| {
1067 assert_eq!(
1068 buffer
1069 .snapshot()
1070 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1071 .collect::<Vec<_>>(),
1072 &[
1073 DiagnosticEntry {
1074 range: Point::new(3, 9)..Point::new(3, 11),
1075 diagnostic: Diagnostic {
1076 severity: DiagnosticSeverity::ERROR,
1077 message: "undefined variable 'BB'".to_string(),
1078 is_disk_based: true,
1079 group_id: 1,
1080 is_primary: true,
1081 ..Default::default()
1082 },
1083 },
1084 DiagnosticEntry {
1085 range: Point::new(4, 9)..Point::new(4, 12),
1086 diagnostic: Diagnostic {
1087 severity: DiagnosticSeverity::ERROR,
1088 message: "undefined variable 'CCC'".to_string(),
1089 is_disk_based: true,
1090 group_id: 2,
1091 is_primary: true,
1092 ..Default::default()
1093 }
1094 }
1095 ]
1096 );
1097 assert_eq!(
1098 chunks_with_diagnostics(buffer, 0..buffer.len()),
1099 [
1100 ("\n\nfn a() { ".to_string(), None),
1101 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1102 (" }\nfn b() { ".to_string(), None),
1103 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1104 (" }\nfn c() { ".to_string(), None),
1105 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1106 (" }\n".to_string(), None),
1107 ]
1108 );
1109 assert_eq!(
1110 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1111 [
1112 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1113 (" }\nfn c() { ".to_string(), None),
1114 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1115 ]
1116 );
1117 });
1118
1119 // Ensure overlapping diagnostics are highlighted correctly.
1120 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1121 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1122 version: Some(open_notification.text_document.version),
1123 diagnostics: vec![
1124 lsp::Diagnostic {
1125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1126 severity: Some(DiagnosticSeverity::ERROR),
1127 message: "undefined variable 'A'".to_string(),
1128 source: Some("disk".to_string()),
1129 ..Default::default()
1130 },
1131 lsp::Diagnostic {
1132 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1133 severity: Some(DiagnosticSeverity::WARNING),
1134 message: "unreachable statement".to_string(),
1135 source: Some("disk".to_string()),
1136 ..Default::default()
1137 },
1138 ],
1139 });
1140
1141 buffer.next_notification(cx).await;
1142 buffer.read_with(cx, |buffer, _| {
1143 assert_eq!(
1144 buffer
1145 .snapshot()
1146 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1147 .collect::<Vec<_>>(),
1148 &[
1149 DiagnosticEntry {
1150 range: Point::new(2, 9)..Point::new(2, 12),
1151 diagnostic: Diagnostic {
1152 severity: DiagnosticSeverity::WARNING,
1153 message: "unreachable statement".to_string(),
1154 is_disk_based: true,
1155 group_id: 4,
1156 is_primary: true,
1157 ..Default::default()
1158 }
1159 },
1160 DiagnosticEntry {
1161 range: Point::new(2, 9)..Point::new(2, 10),
1162 diagnostic: Diagnostic {
1163 severity: DiagnosticSeverity::ERROR,
1164 message: "undefined variable 'A'".to_string(),
1165 is_disk_based: true,
1166 group_id: 3,
1167 is_primary: true,
1168 ..Default::default()
1169 },
1170 }
1171 ]
1172 );
1173 assert_eq!(
1174 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1175 [
1176 ("fn a() { ".to_string(), None),
1177 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1178 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1179 ("\n".to_string(), None),
1180 ]
1181 );
1182 assert_eq!(
1183 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1184 [
1185 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1186 ("\n".to_string(), None),
1187 ]
1188 );
1189 });
1190
1191 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1192 // changes since the last save.
1193 buffer.update(cx, |buffer, cx| {
1194 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1195 buffer.edit(
1196 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1197 None,
1198 cx,
1199 );
1200 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1201 });
1202 let change_notification_2 = fake_server
1203 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1204 .await;
1205 assert!(
1206 change_notification_2.text_document.version > change_notification_1.text_document.version
1207 );
1208
1209 // Handle out-of-order diagnostics
1210 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1211 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1212 version: Some(change_notification_2.text_document.version),
1213 diagnostics: vec![
1214 lsp::Diagnostic {
1215 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1216 severity: Some(DiagnosticSeverity::ERROR),
1217 message: "undefined variable 'BB'".to_string(),
1218 source: Some("disk".to_string()),
1219 ..Default::default()
1220 },
1221 lsp::Diagnostic {
1222 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1223 severity: Some(DiagnosticSeverity::WARNING),
1224 message: "undefined variable 'A'".to_string(),
1225 source: Some("disk".to_string()),
1226 ..Default::default()
1227 },
1228 ],
1229 });
1230
1231 buffer.next_notification(cx).await;
1232 buffer.read_with(cx, |buffer, _| {
1233 assert_eq!(
1234 buffer
1235 .snapshot()
1236 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1237 .collect::<Vec<_>>(),
1238 &[
1239 DiagnosticEntry {
1240 range: Point::new(2, 21)..Point::new(2, 22),
1241 diagnostic: Diagnostic {
1242 severity: DiagnosticSeverity::WARNING,
1243 message: "undefined variable 'A'".to_string(),
1244 is_disk_based: true,
1245 group_id: 6,
1246 is_primary: true,
1247 ..Default::default()
1248 }
1249 },
1250 DiagnosticEntry {
1251 range: Point::new(3, 9)..Point::new(3, 14),
1252 diagnostic: Diagnostic {
1253 severity: DiagnosticSeverity::ERROR,
1254 message: "undefined variable 'BB'".to_string(),
1255 is_disk_based: true,
1256 group_id: 5,
1257 is_primary: true,
1258 ..Default::default()
1259 },
1260 }
1261 ]
1262 );
1263 });
1264}
1265
1266#[gpui::test]
1267async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1268 cx.foreground().forbid_parking();
1269
1270 let text = concat!(
1271 "let one = ;\n", //
1272 "let two = \n",
1273 "let three = 3;\n",
1274 );
1275
1276 let fs = FakeFs::new(cx.background());
1277 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1278
1279 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1280 let buffer = project
1281 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1282 .await
1283 .unwrap();
1284
1285 project.update(cx, |project, cx| {
1286 project
1287 .update_buffer_diagnostics(
1288 &buffer,
1289 vec![
1290 DiagnosticEntry {
1291 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1292 diagnostic: Diagnostic {
1293 severity: DiagnosticSeverity::ERROR,
1294 message: "syntax error 1".to_string(),
1295 ..Default::default()
1296 },
1297 },
1298 DiagnosticEntry {
1299 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1300 diagnostic: Diagnostic {
1301 severity: DiagnosticSeverity::ERROR,
1302 message: "syntax error 2".to_string(),
1303 ..Default::default()
1304 },
1305 },
1306 ],
1307 None,
1308 cx,
1309 )
1310 .unwrap();
1311 });
1312
1313 // An empty range is extended forward to include the following character.
1314 // At the end of a line, an empty range is extended backward to include
1315 // the preceding character.
1316 buffer.read_with(cx, |buffer, _| {
1317 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1318 assert_eq!(
1319 chunks
1320 .iter()
1321 .map(|(s, d)| (s.as_str(), *d))
1322 .collect::<Vec<_>>(),
1323 &[
1324 ("let one = ", None),
1325 (";", Some(DiagnosticSeverity::ERROR)),
1326 ("\nlet two =", None),
1327 (" ", Some(DiagnosticSeverity::ERROR)),
1328 ("\nlet three = 3;\n", None)
1329 ]
1330 );
1331 });
1332}
1333
1334#[gpui::test]
1335async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1336 cx.foreground().forbid_parking();
1337
1338 let mut language = Language::new(
1339 LanguageConfig {
1340 name: "Rust".into(),
1341 path_suffixes: vec!["rs".to_string()],
1342 ..Default::default()
1343 },
1344 Some(tree_sitter_rust::language()),
1345 );
1346 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1347
1348 let text = "
1349 fn a() {
1350 f1();
1351 }
1352 fn b() {
1353 f2();
1354 }
1355 fn c() {
1356 f3();
1357 }
1358 "
1359 .unindent();
1360
1361 let fs = FakeFs::new(cx.background());
1362 fs.insert_tree(
1363 "/dir",
1364 json!({
1365 "a.rs": text.clone(),
1366 }),
1367 )
1368 .await;
1369
1370 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1371 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1372 let buffer = project
1373 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1374 .await
1375 .unwrap();
1376
1377 let mut fake_server = fake_servers.next().await.unwrap();
1378 let lsp_document_version = fake_server
1379 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1380 .await
1381 .text_document
1382 .version;
1383
1384 // Simulate editing the buffer after the language server computes some edits.
1385 buffer.update(cx, |buffer, cx| {
1386 buffer.edit(
1387 [(
1388 Point::new(0, 0)..Point::new(0, 0),
1389 "// above first function\n",
1390 )],
1391 None,
1392 cx,
1393 );
1394 buffer.edit(
1395 [(
1396 Point::new(2, 0)..Point::new(2, 0),
1397 " // inside first function\n",
1398 )],
1399 None,
1400 cx,
1401 );
1402 buffer.edit(
1403 [(
1404 Point::new(6, 4)..Point::new(6, 4),
1405 "// inside second function ",
1406 )],
1407 None,
1408 cx,
1409 );
1410
1411 assert_eq!(
1412 buffer.text(),
1413 "
1414 // above first function
1415 fn a() {
1416 // inside first function
1417 f1();
1418 }
1419 fn b() {
1420 // inside second function f2();
1421 }
1422 fn c() {
1423 f3();
1424 }
1425 "
1426 .unindent()
1427 );
1428 });
1429
1430 let edits = project
1431 .update(cx, |project, cx| {
1432 project.edits_from_lsp(
1433 &buffer,
1434 vec![
1435 // replace body of first function
1436 lsp::TextEdit {
1437 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1438 new_text: "
1439 fn a() {
1440 f10();
1441 }
1442 "
1443 .unindent(),
1444 },
1445 // edit inside second function
1446 lsp::TextEdit {
1447 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1448 new_text: "00".into(),
1449 },
1450 // edit inside third function via two distinct edits
1451 lsp::TextEdit {
1452 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1453 new_text: "4000".into(),
1454 },
1455 lsp::TextEdit {
1456 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1457 new_text: "".into(),
1458 },
1459 ],
1460 Some(lsp_document_version),
1461 cx,
1462 )
1463 })
1464 .await
1465 .unwrap();
1466
1467 buffer.update(cx, |buffer, cx| {
1468 for (range, new_text) in edits {
1469 buffer.edit([(range, new_text)], None, cx);
1470 }
1471 assert_eq!(
1472 buffer.text(),
1473 "
1474 // above first function
1475 fn a() {
1476 // inside first function
1477 f10();
1478 }
1479 fn b() {
1480 // inside second function f200();
1481 }
1482 fn c() {
1483 f4000();
1484 }
1485 "
1486 .unindent()
1487 );
1488 });
1489}
1490
1491#[gpui::test]
1492async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1493 cx.foreground().forbid_parking();
1494
1495 let text = "
1496 use a::b;
1497 use a::c;
1498
1499 fn f() {
1500 b();
1501 c();
1502 }
1503 "
1504 .unindent();
1505
1506 let fs = FakeFs::new(cx.background());
1507 fs.insert_tree(
1508 "/dir",
1509 json!({
1510 "a.rs": text.clone(),
1511 }),
1512 )
1513 .await;
1514
1515 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1516 let buffer = project
1517 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1518 .await
1519 .unwrap();
1520
1521 // Simulate the language server sending us a small edit in the form of a very large diff.
1522 // Rust-analyzer does this when performing a merge-imports code action.
1523 let edits = project
1524 .update(cx, |project, cx| {
1525 project.edits_from_lsp(
1526 &buffer,
1527 [
1528 // Replace the first use statement without editing the semicolon.
1529 lsp::TextEdit {
1530 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1531 new_text: "a::{b, c}".into(),
1532 },
1533 // Reinsert the remainder of the file between the semicolon and the final
1534 // newline of the file.
1535 lsp::TextEdit {
1536 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1537 new_text: "\n\n".into(),
1538 },
1539 lsp::TextEdit {
1540 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1541 new_text: "
1542 fn f() {
1543 b();
1544 c();
1545 }"
1546 .unindent(),
1547 },
1548 // Delete everything after the first newline of the file.
1549 lsp::TextEdit {
1550 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1551 new_text: "".into(),
1552 },
1553 ],
1554 None,
1555 cx,
1556 )
1557 })
1558 .await
1559 .unwrap();
1560
1561 buffer.update(cx, |buffer, cx| {
1562 let edits = edits
1563 .into_iter()
1564 .map(|(range, text)| {
1565 (
1566 range.start.to_point(buffer)..range.end.to_point(buffer),
1567 text,
1568 )
1569 })
1570 .collect::<Vec<_>>();
1571
1572 assert_eq!(
1573 edits,
1574 [
1575 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1576 (Point::new(1, 0)..Point::new(2, 0), "".into())
1577 ]
1578 );
1579
1580 for (range, new_text) in edits {
1581 buffer.edit([(range, new_text)], None, cx);
1582 }
1583 assert_eq!(
1584 buffer.text(),
1585 "
1586 use a::{b, c};
1587
1588 fn f() {
1589 b();
1590 c();
1591 }
1592 "
1593 .unindent()
1594 );
1595 });
1596}
1597
1598#[gpui::test]
1599async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1600 cx.foreground().forbid_parking();
1601
1602 let text = "
1603 use a::b;
1604 use a::c;
1605
1606 fn f() {
1607 b();
1608 c();
1609 }
1610 "
1611 .unindent();
1612
1613 let fs = FakeFs::new(cx.background());
1614 fs.insert_tree(
1615 "/dir",
1616 json!({
1617 "a.rs": text.clone(),
1618 }),
1619 )
1620 .await;
1621
1622 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1623 let buffer = project
1624 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1625 .await
1626 .unwrap();
1627
1628 // Simulate the language server sending us edits in a non-ordered fashion,
1629 // with ranges sometimes being inverted or pointing to invalid locations.
1630 let edits = project
1631 .update(cx, |project, cx| {
1632 project.edits_from_lsp(
1633 &buffer,
1634 [
1635 lsp::TextEdit {
1636 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1637 new_text: "\n\n".into(),
1638 },
1639 lsp::TextEdit {
1640 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1641 new_text: "a::{b, c}".into(),
1642 },
1643 lsp::TextEdit {
1644 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1645 new_text: "".into(),
1646 },
1647 lsp::TextEdit {
1648 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1649 new_text: "
1650 fn f() {
1651 b();
1652 c();
1653 }"
1654 .unindent(),
1655 },
1656 ],
1657 None,
1658 cx,
1659 )
1660 })
1661 .await
1662 .unwrap();
1663
1664 buffer.update(cx, |buffer, cx| {
1665 let edits = edits
1666 .into_iter()
1667 .map(|(range, text)| {
1668 (
1669 range.start.to_point(buffer)..range.end.to_point(buffer),
1670 text,
1671 )
1672 })
1673 .collect::<Vec<_>>();
1674
1675 assert_eq!(
1676 edits,
1677 [
1678 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1679 (Point::new(1, 0)..Point::new(2, 0), "".into())
1680 ]
1681 );
1682
1683 for (range, new_text) in edits {
1684 buffer.edit([(range, new_text)], None, cx);
1685 }
1686 assert_eq!(
1687 buffer.text(),
1688 "
1689 use a::{b, c};
1690
1691 fn f() {
1692 b();
1693 c();
1694 }
1695 "
1696 .unindent()
1697 );
1698 });
1699}
1700
1701fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1702 buffer: &Buffer,
1703 range: Range<T>,
1704) -> Vec<(String, Option<DiagnosticSeverity>)> {
1705 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1706 for chunk in buffer.snapshot().chunks(range, true) {
1707 if chunks.last().map_or(false, |prev_chunk| {
1708 prev_chunk.1 == chunk.diagnostic_severity
1709 }) {
1710 chunks.last_mut().unwrap().0.push_str(chunk.text);
1711 } else {
1712 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1713 }
1714 }
1715 chunks
1716}
1717
1718#[gpui::test(iterations = 10)]
1719async fn test_definition(cx: &mut gpui::TestAppContext) {
1720 let mut language = Language::new(
1721 LanguageConfig {
1722 name: "Rust".into(),
1723 path_suffixes: vec!["rs".to_string()],
1724 ..Default::default()
1725 },
1726 Some(tree_sitter_rust::language()),
1727 );
1728 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1729
1730 let fs = FakeFs::new(cx.background());
1731 fs.insert_tree(
1732 "/dir",
1733 json!({
1734 "a.rs": "const fn a() { A }",
1735 "b.rs": "const y: i32 = crate::a()",
1736 }),
1737 )
1738 .await;
1739
1740 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1741 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1742
1743 let buffer = project
1744 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1745 .await
1746 .unwrap();
1747
1748 let fake_server = fake_servers.next().await.unwrap();
1749 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1750 let params = params.text_document_position_params;
1751 assert_eq!(
1752 params.text_document.uri.to_file_path().unwrap(),
1753 Path::new("/dir/b.rs"),
1754 );
1755 assert_eq!(params.position, lsp::Position::new(0, 22));
1756
1757 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1758 lsp::Location::new(
1759 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1760 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1761 ),
1762 )))
1763 });
1764
1765 let mut definitions = project
1766 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1767 .await
1768 .unwrap();
1769
1770 // Assert no new language server started
1771 cx.foreground().run_until_parked();
1772 assert!(fake_servers.try_next().is_err());
1773
1774 assert_eq!(definitions.len(), 1);
1775 let definition = definitions.pop().unwrap();
1776 cx.update(|cx| {
1777 let target_buffer = definition.target.buffer.read(cx);
1778 assert_eq!(
1779 target_buffer
1780 .file()
1781 .unwrap()
1782 .as_local()
1783 .unwrap()
1784 .abs_path(cx),
1785 Path::new("/dir/a.rs"),
1786 );
1787 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1788 assert_eq!(
1789 list_worktrees(&project, cx),
1790 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1791 );
1792
1793 drop(definition);
1794 });
1795 cx.read(|cx| {
1796 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1797 });
1798
1799 fn list_worktrees<'a>(
1800 project: &'a ModelHandle<Project>,
1801 cx: &'a AppContext,
1802 ) -> Vec<(&'a Path, bool)> {
1803 project
1804 .read(cx)
1805 .worktrees(cx)
1806 .map(|worktree| {
1807 let worktree = worktree.read(cx);
1808 (
1809 worktree.as_local().unwrap().abs_path().as_ref(),
1810 worktree.is_visible(),
1811 )
1812 })
1813 .collect::<Vec<_>>()
1814 }
1815}
1816
1817#[gpui::test]
1818async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1819 let mut language = Language::new(
1820 LanguageConfig {
1821 name: "TypeScript".into(),
1822 path_suffixes: vec!["ts".to_string()],
1823 ..Default::default()
1824 },
1825 Some(tree_sitter_typescript::language_typescript()),
1826 );
1827 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1828
1829 let fs = FakeFs::new(cx.background());
1830 fs.insert_tree(
1831 "/dir",
1832 json!({
1833 "a.ts": "",
1834 }),
1835 )
1836 .await;
1837
1838 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1839 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1840 let buffer = project
1841 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1842 .await
1843 .unwrap();
1844
1845 let fake_server = fake_language_servers.next().await.unwrap();
1846
1847 let text = "let a = b.fqn";
1848 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1849 let completions = project.update(cx, |project, cx| {
1850 project.completions(&buffer, text.len(), cx)
1851 });
1852
1853 fake_server
1854 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1855 Ok(Some(lsp::CompletionResponse::Array(vec![
1856 lsp::CompletionItem {
1857 label: "fullyQualifiedName?".into(),
1858 insert_text: Some("fullyQualifiedName".into()),
1859 ..Default::default()
1860 },
1861 ])))
1862 })
1863 .next()
1864 .await;
1865 let completions = completions.await.unwrap();
1866 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1867 assert_eq!(completions.len(), 1);
1868 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1869 assert_eq!(
1870 completions[0].old_range.to_offset(&snapshot),
1871 text.len() - 3..text.len()
1872 );
1873
1874 let text = "let a = \"atoms/cmp\"";
1875 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1876 let completions = project.update(cx, |project, cx| {
1877 project.completions(&buffer, text.len() - 1, cx)
1878 });
1879
1880 fake_server
1881 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1882 Ok(Some(lsp::CompletionResponse::Array(vec![
1883 lsp::CompletionItem {
1884 label: "component".into(),
1885 ..Default::default()
1886 },
1887 ])))
1888 })
1889 .next()
1890 .await;
1891 let completions = completions.await.unwrap();
1892 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1893 assert_eq!(completions.len(), 1);
1894 assert_eq!(completions[0].new_text, "component");
1895 assert_eq!(
1896 completions[0].old_range.to_offset(&snapshot),
1897 text.len() - 4..text.len() - 1
1898 );
1899}
1900
1901#[gpui::test]
1902async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1903 let mut language = Language::new(
1904 LanguageConfig {
1905 name: "TypeScript".into(),
1906 path_suffixes: vec!["ts".to_string()],
1907 ..Default::default()
1908 },
1909 Some(tree_sitter_typescript::language_typescript()),
1910 );
1911 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1912
1913 let fs = FakeFs::new(cx.background());
1914 fs.insert_tree(
1915 "/dir",
1916 json!({
1917 "a.ts": "",
1918 }),
1919 )
1920 .await;
1921
1922 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1923 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1924 let buffer = project
1925 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1926 .await
1927 .unwrap();
1928
1929 let fake_server = fake_language_servers.next().await.unwrap();
1930
1931 let text = "let a = b.fqn";
1932 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1933 let completions = project.update(cx, |project, cx| {
1934 project.completions(&buffer, text.len(), cx)
1935 });
1936
1937 fake_server
1938 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1939 Ok(Some(lsp::CompletionResponse::Array(vec![
1940 lsp::CompletionItem {
1941 label: "fullyQualifiedName?".into(),
1942 insert_text: Some("fully\rQualified\r\nName".into()),
1943 ..Default::default()
1944 },
1945 ])))
1946 })
1947 .next()
1948 .await;
1949 let completions = completions.await.unwrap();
1950 assert_eq!(completions.len(), 1);
1951 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1952}
1953
1954#[gpui::test(iterations = 10)]
1955async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1956 let mut language = Language::new(
1957 LanguageConfig {
1958 name: "TypeScript".into(),
1959 path_suffixes: vec!["ts".to_string()],
1960 ..Default::default()
1961 },
1962 None,
1963 );
1964 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1965
1966 let fs = FakeFs::new(cx.background());
1967 fs.insert_tree(
1968 "/dir",
1969 json!({
1970 "a.ts": "a",
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1976 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1977 let buffer = project
1978 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1979 .await
1980 .unwrap();
1981
1982 let fake_server = fake_language_servers.next().await.unwrap();
1983
1984 // Language server returns code actions that contain commands, and not edits.
1985 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1986 fake_server
1987 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1988 Ok(Some(vec![
1989 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1990 title: "The code action".into(),
1991 command: Some(lsp::Command {
1992 title: "The command".into(),
1993 command: "_the/command".into(),
1994 arguments: Some(vec![json!("the-argument")]),
1995 }),
1996 ..Default::default()
1997 }),
1998 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1999 title: "two".into(),
2000 ..Default::default()
2001 }),
2002 ]))
2003 })
2004 .next()
2005 .await;
2006
2007 let action = actions.await.unwrap()[0].clone();
2008 let apply = project.update(cx, |project, cx| {
2009 project.apply_code_action(buffer.clone(), action, true, cx)
2010 });
2011
2012 // Resolving the code action does not populate its edits. In absence of
2013 // edits, we must execute the given command.
2014 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2015 |action, _| async move { Ok(action) },
2016 );
2017
2018 // While executing the command, the language server sends the editor
2019 // a `workspaceEdit` request.
2020 fake_server
2021 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2022 let fake = fake_server.clone();
2023 move |params, _| {
2024 assert_eq!(params.command, "_the/command");
2025 let fake = fake.clone();
2026 async move {
2027 fake.server
2028 .request::<lsp::request::ApplyWorkspaceEdit>(
2029 lsp::ApplyWorkspaceEditParams {
2030 label: None,
2031 edit: lsp::WorkspaceEdit {
2032 changes: Some(
2033 [(
2034 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2035 vec![lsp::TextEdit {
2036 range: lsp::Range::new(
2037 lsp::Position::new(0, 0),
2038 lsp::Position::new(0, 0),
2039 ),
2040 new_text: "X".into(),
2041 }],
2042 )]
2043 .into_iter()
2044 .collect(),
2045 ),
2046 ..Default::default()
2047 },
2048 },
2049 )
2050 .await
2051 .unwrap();
2052 Ok(Some(json!(null)))
2053 }
2054 }
2055 })
2056 .next()
2057 .await;
2058
2059 // Applying the code action returns a project transaction containing the edits
2060 // sent by the language server in its `workspaceEdit` request.
2061 let transaction = apply.await.unwrap();
2062 assert!(transaction.0.contains_key(&buffer));
2063 buffer.update(cx, |buffer, cx| {
2064 assert_eq!(buffer.text(), "Xa");
2065 buffer.undo(cx);
2066 assert_eq!(buffer.text(), "a");
2067 });
2068}
2069
2070#[gpui::test]
2071async fn test_save_file(cx: &mut gpui::TestAppContext) {
2072 let fs = FakeFs::new(cx.background());
2073 fs.insert_tree(
2074 "/dir",
2075 json!({
2076 "file1": "the old contents",
2077 }),
2078 )
2079 .await;
2080
2081 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2082 let buffer = project
2083 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2084 .await
2085 .unwrap();
2086 buffer.update(cx, |buffer, cx| {
2087 assert_eq!(buffer.text(), "the old contents");
2088 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2089 });
2090
2091 project
2092 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2093 .await
2094 .unwrap();
2095
2096 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2097 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2098}
2099
2100#[gpui::test]
2101async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2102 let fs = FakeFs::new(cx.background());
2103 fs.insert_tree(
2104 "/dir",
2105 json!({
2106 "file1": "the old contents",
2107 }),
2108 )
2109 .await;
2110
2111 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2112 let buffer = project
2113 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2114 .await
2115 .unwrap();
2116 buffer.update(cx, |buffer, cx| {
2117 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2118 });
2119
2120 project
2121 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2122 .await
2123 .unwrap();
2124
2125 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2126 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2127}
2128
2129#[gpui::test]
2130async fn test_save_as(cx: &mut gpui::TestAppContext) {
2131 let fs = FakeFs::new(cx.background());
2132 fs.insert_tree("/dir", json!({})).await;
2133
2134 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2135
2136 let languages = project.read_with(cx, |project, _| project.languages().clone());
2137 languages.register(
2138 "/some/path",
2139 LanguageConfig {
2140 name: "Rust".into(),
2141 path_suffixes: vec!["rs".into()],
2142 ..Default::default()
2143 },
2144 tree_sitter_rust::language(),
2145 None,
2146 |_| Default::default(),
2147 );
2148
2149 let buffer = project.update(cx, |project, cx| {
2150 project.create_buffer("", None, cx).unwrap()
2151 });
2152 buffer.update(cx, |buffer, cx| {
2153 buffer.edit([(0..0, "abc")], None, cx);
2154 assert!(buffer.is_dirty());
2155 assert!(!buffer.has_conflict());
2156 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2157 });
2158 project
2159 .update(cx, |project, cx| {
2160 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2161 })
2162 .await
2163 .unwrap();
2164 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2165
2166 cx.foreground().run_until_parked();
2167 buffer.read_with(cx, |buffer, cx| {
2168 assert_eq!(
2169 buffer.file().unwrap().full_path(cx),
2170 Path::new("dir/file1.rs")
2171 );
2172 assert!(!buffer.is_dirty());
2173 assert!(!buffer.has_conflict());
2174 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2175 });
2176
2177 let opened_buffer = project
2178 .update(cx, |project, cx| {
2179 project.open_local_buffer("/dir/file1.rs", cx)
2180 })
2181 .await
2182 .unwrap();
2183 assert_eq!(opened_buffer, buffer);
2184}
2185
2186#[gpui::test(retries = 5)]
2187async fn test_rescan_and_remote_updates(
2188 deterministic: Arc<Deterministic>,
2189 cx: &mut gpui::TestAppContext,
2190) {
2191 let dir = temp_tree(json!({
2192 "a": {
2193 "file1": "",
2194 "file2": "",
2195 "file3": "",
2196 },
2197 "b": {
2198 "c": {
2199 "file4": "",
2200 "file5": "",
2201 }
2202 }
2203 }));
2204
2205 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2206 let rpc = project.read_with(cx, |p, _| p.client.clone());
2207
2208 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2209 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2210 async move { buffer.await.unwrap() }
2211 };
2212 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2213 project.read_with(cx, |project, cx| {
2214 let tree = project.worktrees(cx).next().unwrap();
2215 tree.read(cx)
2216 .entry_for_path(path)
2217 .unwrap_or_else(|| panic!("no entry for path {}", path))
2218 .id
2219 })
2220 };
2221
2222 let buffer2 = buffer_for_path("a/file2", cx).await;
2223 let buffer3 = buffer_for_path("a/file3", cx).await;
2224 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2225 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2226
2227 let file2_id = id_for_path("a/file2", cx);
2228 let file3_id = id_for_path("a/file3", cx);
2229 let file4_id = id_for_path("b/c/file4", cx);
2230
2231 // Create a remote copy of this worktree.
2232 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2233 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2234 let remote = cx.update(|cx| {
2235 Worktree::remote(
2236 1,
2237 1,
2238 proto::WorktreeMetadata {
2239 id: initial_snapshot.id().to_proto(),
2240 root_name: initial_snapshot.root_name().into(),
2241 abs_path: initial_snapshot
2242 .abs_path()
2243 .as_os_str()
2244 .to_string_lossy()
2245 .into(),
2246 visible: true,
2247 },
2248 rpc.clone(),
2249 cx,
2250 )
2251 });
2252 remote.update(cx, |remote, _| {
2253 let update = initial_snapshot.build_initial_update(1);
2254 remote.as_remote_mut().unwrap().update_from_remote(update);
2255 });
2256 deterministic.run_until_parked();
2257
2258 cx.read(|cx| {
2259 assert!(!buffer2.read(cx).is_dirty());
2260 assert!(!buffer3.read(cx).is_dirty());
2261 assert!(!buffer4.read(cx).is_dirty());
2262 assert!(!buffer5.read(cx).is_dirty());
2263 });
2264
2265 // Rename and delete files and directories.
2266 tree.flush_fs_events(cx).await;
2267 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2268 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2269 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2270 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2271 tree.flush_fs_events(cx).await;
2272
2273 let expected_paths = vec![
2274 "a",
2275 "a/file1",
2276 "a/file2.new",
2277 "b",
2278 "d",
2279 "d/file3",
2280 "d/file4",
2281 ];
2282
2283 cx.read(|app| {
2284 assert_eq!(
2285 tree.read(app)
2286 .paths()
2287 .map(|p| p.to_str().unwrap())
2288 .collect::<Vec<_>>(),
2289 expected_paths
2290 );
2291
2292 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2293 assert_eq!(id_for_path("d/file3", cx), file3_id);
2294 assert_eq!(id_for_path("d/file4", cx), file4_id);
2295
2296 assert_eq!(
2297 buffer2.read(app).file().unwrap().path().as_ref(),
2298 Path::new("a/file2.new")
2299 );
2300 assert_eq!(
2301 buffer3.read(app).file().unwrap().path().as_ref(),
2302 Path::new("d/file3")
2303 );
2304 assert_eq!(
2305 buffer4.read(app).file().unwrap().path().as_ref(),
2306 Path::new("d/file4")
2307 );
2308 assert_eq!(
2309 buffer5.read(app).file().unwrap().path().as_ref(),
2310 Path::new("b/c/file5")
2311 );
2312
2313 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2314 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2315 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2316 assert!(buffer5.read(app).file().unwrap().is_deleted());
2317 });
2318
2319 // Update the remote worktree. Check that it becomes consistent with the
2320 // local worktree.
2321 remote.update(cx, |remote, cx| {
2322 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2323 &initial_snapshot,
2324 1,
2325 1,
2326 true,
2327 );
2328 remote.as_remote_mut().unwrap().update_from_remote(update);
2329 });
2330 deterministic.run_until_parked();
2331 remote.read_with(cx, |remote, _| {
2332 assert_eq!(
2333 remote
2334 .paths()
2335 .map(|p| p.to_str().unwrap())
2336 .collect::<Vec<_>>(),
2337 expected_paths
2338 );
2339 });
2340}
2341
2342#[gpui::test(iterations = 10)]
2343async fn test_buffer_identity_across_renames(
2344 deterministic: Arc<Deterministic>,
2345 cx: &mut gpui::TestAppContext,
2346) {
2347 let fs = FakeFs::new(cx.background());
2348 fs.insert_tree(
2349 "/dir",
2350 json!({
2351 "a": {
2352 "file1": "",
2353 }
2354 }),
2355 )
2356 .await;
2357
2358 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2359 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2360 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2361
2362 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2363 project.read_with(cx, |project, cx| {
2364 let tree = project.worktrees(cx).next().unwrap();
2365 tree.read(cx)
2366 .entry_for_path(path)
2367 .unwrap_or_else(|| panic!("no entry for path {}", path))
2368 .id
2369 })
2370 };
2371
2372 let dir_id = id_for_path("a", cx);
2373 let file_id = id_for_path("a/file1", cx);
2374 let buffer = project
2375 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2376 .await
2377 .unwrap();
2378 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2379
2380 project
2381 .update(cx, |project, cx| {
2382 project.rename_entry(dir_id, Path::new("b"), cx)
2383 })
2384 .unwrap()
2385 .await
2386 .unwrap();
2387 deterministic.run_until_parked();
2388 assert_eq!(id_for_path("b", cx), dir_id);
2389 assert_eq!(id_for_path("b/file1", cx), file_id);
2390 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2391}
2392
2393#[gpui::test]
2394async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2395 let fs = FakeFs::new(cx.background());
2396 fs.insert_tree(
2397 "/dir",
2398 json!({
2399 "a.txt": "a-contents",
2400 "b.txt": "b-contents",
2401 }),
2402 )
2403 .await;
2404
2405 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2406
2407 // Spawn multiple tasks to open paths, repeating some paths.
2408 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2409 (
2410 p.open_local_buffer("/dir/a.txt", cx),
2411 p.open_local_buffer("/dir/b.txt", cx),
2412 p.open_local_buffer("/dir/a.txt", cx),
2413 )
2414 });
2415
2416 let buffer_a_1 = buffer_a_1.await.unwrap();
2417 let buffer_a_2 = buffer_a_2.await.unwrap();
2418 let buffer_b = buffer_b.await.unwrap();
2419 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2420 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2421
2422 // There is only one buffer per path.
2423 let buffer_a_id = buffer_a_1.id();
2424 assert_eq!(buffer_a_2.id(), buffer_a_id);
2425
2426 // Open the same path again while it is still open.
2427 drop(buffer_a_1);
2428 let buffer_a_3 = project
2429 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2430 .await
2431 .unwrap();
2432
2433 // There's still only one buffer per path.
2434 assert_eq!(buffer_a_3.id(), buffer_a_id);
2435}
2436
2437#[gpui::test]
2438async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2439 let fs = FakeFs::new(cx.background());
2440 fs.insert_tree(
2441 "/dir",
2442 json!({
2443 "file1": "abc",
2444 "file2": "def",
2445 "file3": "ghi",
2446 }),
2447 )
2448 .await;
2449
2450 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2451
2452 let buffer1 = project
2453 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2454 .await
2455 .unwrap();
2456 let events = Rc::new(RefCell::new(Vec::new()));
2457
2458 // initially, the buffer isn't dirty.
2459 buffer1.update(cx, |buffer, cx| {
2460 cx.subscribe(&buffer1, {
2461 let events = events.clone();
2462 move |_, _, event, _| match event {
2463 BufferEvent::Operation(_) => {}
2464 _ => events.borrow_mut().push(event.clone()),
2465 }
2466 })
2467 .detach();
2468
2469 assert!(!buffer.is_dirty());
2470 assert!(events.borrow().is_empty());
2471
2472 buffer.edit([(1..2, "")], None, cx);
2473 });
2474
2475 // after the first edit, the buffer is dirty, and emits a dirtied event.
2476 buffer1.update(cx, |buffer, cx| {
2477 assert!(buffer.text() == "ac");
2478 assert!(buffer.is_dirty());
2479 assert_eq!(
2480 *events.borrow(),
2481 &[language::Event::Edited, language::Event::DirtyChanged]
2482 );
2483 events.borrow_mut().clear();
2484 buffer.did_save(
2485 buffer.version(),
2486 buffer.as_rope().fingerprint(),
2487 buffer.file().unwrap().mtime(),
2488 cx,
2489 );
2490 });
2491
2492 // after saving, the buffer is not dirty, and emits a saved event.
2493 buffer1.update(cx, |buffer, cx| {
2494 assert!(!buffer.is_dirty());
2495 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2496 events.borrow_mut().clear();
2497
2498 buffer.edit([(1..1, "B")], None, cx);
2499 buffer.edit([(2..2, "D")], None, cx);
2500 });
2501
2502 // after editing again, the buffer is dirty, and emits another dirty event.
2503 buffer1.update(cx, |buffer, cx| {
2504 assert!(buffer.text() == "aBDc");
2505 assert!(buffer.is_dirty());
2506 assert_eq!(
2507 *events.borrow(),
2508 &[
2509 language::Event::Edited,
2510 language::Event::DirtyChanged,
2511 language::Event::Edited,
2512 ],
2513 );
2514 events.borrow_mut().clear();
2515
2516 // After restoring the buffer to its previously-saved state,
2517 // the buffer is not considered dirty anymore.
2518 buffer.edit([(1..3, "")], None, cx);
2519 assert!(buffer.text() == "ac");
2520 assert!(!buffer.is_dirty());
2521 });
2522
2523 assert_eq!(
2524 *events.borrow(),
2525 &[language::Event::Edited, language::Event::DirtyChanged]
2526 );
2527
2528 // When a file is deleted, the buffer is considered dirty.
2529 let events = Rc::new(RefCell::new(Vec::new()));
2530 let buffer2 = project
2531 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2532 .await
2533 .unwrap();
2534 buffer2.update(cx, |_, cx| {
2535 cx.subscribe(&buffer2, {
2536 let events = events.clone();
2537 move |_, _, event, _| events.borrow_mut().push(event.clone())
2538 })
2539 .detach();
2540 });
2541
2542 fs.remove_file("/dir/file2".as_ref(), Default::default())
2543 .await
2544 .unwrap();
2545 cx.foreground().run_until_parked();
2546 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2547 assert_eq!(
2548 *events.borrow(),
2549 &[
2550 language::Event::DirtyChanged,
2551 language::Event::FileHandleChanged
2552 ]
2553 );
2554
2555 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2556 let events = Rc::new(RefCell::new(Vec::new()));
2557 let buffer3 = project
2558 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2559 .await
2560 .unwrap();
2561 buffer3.update(cx, |_, cx| {
2562 cx.subscribe(&buffer3, {
2563 let events = events.clone();
2564 move |_, _, event, _| events.borrow_mut().push(event.clone())
2565 })
2566 .detach();
2567 });
2568
2569 buffer3.update(cx, |buffer, cx| {
2570 buffer.edit([(0..0, "x")], None, cx);
2571 });
2572 events.borrow_mut().clear();
2573 fs.remove_file("/dir/file3".as_ref(), Default::default())
2574 .await
2575 .unwrap();
2576 cx.foreground().run_until_parked();
2577 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2578 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2579}
2580
2581#[gpui::test]
2582async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2583 let initial_contents = "aaa\nbbbbb\nc\n";
2584 let fs = FakeFs::new(cx.background());
2585 fs.insert_tree(
2586 "/dir",
2587 json!({
2588 "the-file": initial_contents,
2589 }),
2590 )
2591 .await;
2592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2593 let buffer = project
2594 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2595 .await
2596 .unwrap();
2597
2598 let anchors = (0..3)
2599 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2600 .collect::<Vec<_>>();
2601
2602 // Change the file on disk, adding two new lines of text, and removing
2603 // one line.
2604 buffer.read_with(cx, |buffer, _| {
2605 assert!(!buffer.is_dirty());
2606 assert!(!buffer.has_conflict());
2607 });
2608 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2609 fs.save(
2610 "/dir/the-file".as_ref(),
2611 &new_contents.into(),
2612 LineEnding::Unix,
2613 )
2614 .await
2615 .unwrap();
2616
2617 // Because the buffer was not modified, it is reloaded from disk. Its
2618 // contents are edited according to the diff between the old and new
2619 // file contents.
2620 cx.foreground().run_until_parked();
2621 buffer.update(cx, |buffer, _| {
2622 assert_eq!(buffer.text(), new_contents);
2623 assert!(!buffer.is_dirty());
2624 assert!(!buffer.has_conflict());
2625
2626 let anchor_positions = anchors
2627 .iter()
2628 .map(|anchor| anchor.to_point(&*buffer))
2629 .collect::<Vec<_>>();
2630 assert_eq!(
2631 anchor_positions,
2632 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2633 );
2634 });
2635
2636 // Modify the buffer
2637 buffer.update(cx, |buffer, cx| {
2638 buffer.edit([(0..0, " ")], None, cx);
2639 assert!(buffer.is_dirty());
2640 assert!(!buffer.has_conflict());
2641 });
2642
2643 // Change the file on disk again, adding blank lines to the beginning.
2644 fs.save(
2645 "/dir/the-file".as_ref(),
2646 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2647 LineEnding::Unix,
2648 )
2649 .await
2650 .unwrap();
2651
2652 // Because the buffer is modified, it doesn't reload from disk, but is
2653 // marked as having a conflict.
2654 cx.foreground().run_until_parked();
2655 buffer.read_with(cx, |buffer, _| {
2656 assert!(buffer.has_conflict());
2657 });
2658}
2659
2660#[gpui::test]
2661async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2662 let fs = FakeFs::new(cx.background());
2663 fs.insert_tree(
2664 "/dir",
2665 json!({
2666 "file1": "a\nb\nc\n",
2667 "file2": "one\r\ntwo\r\nthree\r\n",
2668 }),
2669 )
2670 .await;
2671
2672 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2673 let buffer1 = project
2674 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2675 .await
2676 .unwrap();
2677 let buffer2 = project
2678 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2679 .await
2680 .unwrap();
2681
2682 buffer1.read_with(cx, |buffer, _| {
2683 assert_eq!(buffer.text(), "a\nb\nc\n");
2684 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2685 });
2686 buffer2.read_with(cx, |buffer, _| {
2687 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2688 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2689 });
2690
2691 // Change a file's line endings on disk from unix to windows. The buffer's
2692 // state updates correctly.
2693 fs.save(
2694 "/dir/file1".as_ref(),
2695 &"aaa\nb\nc\n".into(),
2696 LineEnding::Windows,
2697 )
2698 .await
2699 .unwrap();
2700 cx.foreground().run_until_parked();
2701 buffer1.read_with(cx, |buffer, _| {
2702 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2703 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2704 });
2705
2706 // Save a file with windows line endings. The file is written correctly.
2707 buffer2.update(cx, |buffer, cx| {
2708 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2709 });
2710 project
2711 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
2712 .await
2713 .unwrap();
2714 assert_eq!(
2715 fs.load("/dir/file2".as_ref()).await.unwrap(),
2716 "one\r\ntwo\r\nthree\r\nfour\r\n",
2717 );
2718}
2719
2720#[gpui::test]
2721async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2722 cx.foreground().forbid_parking();
2723
2724 let fs = FakeFs::new(cx.background());
2725 fs.insert_tree(
2726 "/the-dir",
2727 json!({
2728 "a.rs": "
2729 fn foo(mut v: Vec<usize>) {
2730 for x in &v {
2731 v.push(1);
2732 }
2733 }
2734 "
2735 .unindent(),
2736 }),
2737 )
2738 .await;
2739
2740 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2741 let buffer = project
2742 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2743 .await
2744 .unwrap();
2745
2746 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2747 let message = lsp::PublishDiagnosticsParams {
2748 uri: buffer_uri.clone(),
2749 diagnostics: vec![
2750 lsp::Diagnostic {
2751 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2752 severity: Some(DiagnosticSeverity::WARNING),
2753 message: "error 1".to_string(),
2754 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2755 location: lsp::Location {
2756 uri: buffer_uri.clone(),
2757 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2758 },
2759 message: "error 1 hint 1".to_string(),
2760 }]),
2761 ..Default::default()
2762 },
2763 lsp::Diagnostic {
2764 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2765 severity: Some(DiagnosticSeverity::HINT),
2766 message: "error 1 hint 1".to_string(),
2767 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2768 location: lsp::Location {
2769 uri: buffer_uri.clone(),
2770 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2771 },
2772 message: "original diagnostic".to_string(),
2773 }]),
2774 ..Default::default()
2775 },
2776 lsp::Diagnostic {
2777 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2778 severity: Some(DiagnosticSeverity::ERROR),
2779 message: "error 2".to_string(),
2780 related_information: Some(vec![
2781 lsp::DiagnosticRelatedInformation {
2782 location: lsp::Location {
2783 uri: buffer_uri.clone(),
2784 range: lsp::Range::new(
2785 lsp::Position::new(1, 13),
2786 lsp::Position::new(1, 15),
2787 ),
2788 },
2789 message: "error 2 hint 1".to_string(),
2790 },
2791 lsp::DiagnosticRelatedInformation {
2792 location: lsp::Location {
2793 uri: buffer_uri.clone(),
2794 range: lsp::Range::new(
2795 lsp::Position::new(1, 13),
2796 lsp::Position::new(1, 15),
2797 ),
2798 },
2799 message: "error 2 hint 2".to_string(),
2800 },
2801 ]),
2802 ..Default::default()
2803 },
2804 lsp::Diagnostic {
2805 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2806 severity: Some(DiagnosticSeverity::HINT),
2807 message: "error 2 hint 1".to_string(),
2808 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2809 location: lsp::Location {
2810 uri: buffer_uri.clone(),
2811 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2812 },
2813 message: "original diagnostic".to_string(),
2814 }]),
2815 ..Default::default()
2816 },
2817 lsp::Diagnostic {
2818 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2819 severity: Some(DiagnosticSeverity::HINT),
2820 message: "error 2 hint 2".to_string(),
2821 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2822 location: lsp::Location {
2823 uri: buffer_uri,
2824 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2825 },
2826 message: "original diagnostic".to_string(),
2827 }]),
2828 ..Default::default()
2829 },
2830 ],
2831 version: None,
2832 };
2833
2834 project
2835 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2836 .unwrap();
2837 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2838
2839 assert_eq!(
2840 buffer
2841 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2842 .collect::<Vec<_>>(),
2843 &[
2844 DiagnosticEntry {
2845 range: Point::new(1, 8)..Point::new(1, 9),
2846 diagnostic: Diagnostic {
2847 severity: DiagnosticSeverity::WARNING,
2848 message: "error 1".to_string(),
2849 group_id: 0,
2850 is_primary: true,
2851 ..Default::default()
2852 }
2853 },
2854 DiagnosticEntry {
2855 range: Point::new(1, 8)..Point::new(1, 9),
2856 diagnostic: Diagnostic {
2857 severity: DiagnosticSeverity::HINT,
2858 message: "error 1 hint 1".to_string(),
2859 group_id: 0,
2860 is_primary: false,
2861 ..Default::default()
2862 }
2863 },
2864 DiagnosticEntry {
2865 range: Point::new(1, 13)..Point::new(1, 15),
2866 diagnostic: Diagnostic {
2867 severity: DiagnosticSeverity::HINT,
2868 message: "error 2 hint 1".to_string(),
2869 group_id: 1,
2870 is_primary: false,
2871 ..Default::default()
2872 }
2873 },
2874 DiagnosticEntry {
2875 range: Point::new(1, 13)..Point::new(1, 15),
2876 diagnostic: Diagnostic {
2877 severity: DiagnosticSeverity::HINT,
2878 message: "error 2 hint 2".to_string(),
2879 group_id: 1,
2880 is_primary: false,
2881 ..Default::default()
2882 }
2883 },
2884 DiagnosticEntry {
2885 range: Point::new(2, 8)..Point::new(2, 17),
2886 diagnostic: Diagnostic {
2887 severity: DiagnosticSeverity::ERROR,
2888 message: "error 2".to_string(),
2889 group_id: 1,
2890 is_primary: true,
2891 ..Default::default()
2892 }
2893 }
2894 ]
2895 );
2896
2897 assert_eq!(
2898 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2899 &[
2900 DiagnosticEntry {
2901 range: Point::new(1, 8)..Point::new(1, 9),
2902 diagnostic: Diagnostic {
2903 severity: DiagnosticSeverity::WARNING,
2904 message: "error 1".to_string(),
2905 group_id: 0,
2906 is_primary: true,
2907 ..Default::default()
2908 }
2909 },
2910 DiagnosticEntry {
2911 range: Point::new(1, 8)..Point::new(1, 9),
2912 diagnostic: Diagnostic {
2913 severity: DiagnosticSeverity::HINT,
2914 message: "error 1 hint 1".to_string(),
2915 group_id: 0,
2916 is_primary: false,
2917 ..Default::default()
2918 }
2919 },
2920 ]
2921 );
2922 assert_eq!(
2923 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2924 &[
2925 DiagnosticEntry {
2926 range: Point::new(1, 13)..Point::new(1, 15),
2927 diagnostic: Diagnostic {
2928 severity: DiagnosticSeverity::HINT,
2929 message: "error 2 hint 1".to_string(),
2930 group_id: 1,
2931 is_primary: false,
2932 ..Default::default()
2933 }
2934 },
2935 DiagnosticEntry {
2936 range: Point::new(1, 13)..Point::new(1, 15),
2937 diagnostic: Diagnostic {
2938 severity: DiagnosticSeverity::HINT,
2939 message: "error 2 hint 2".to_string(),
2940 group_id: 1,
2941 is_primary: false,
2942 ..Default::default()
2943 }
2944 },
2945 DiagnosticEntry {
2946 range: Point::new(2, 8)..Point::new(2, 17),
2947 diagnostic: Diagnostic {
2948 severity: DiagnosticSeverity::ERROR,
2949 message: "error 2".to_string(),
2950 group_id: 1,
2951 is_primary: true,
2952 ..Default::default()
2953 }
2954 }
2955 ]
2956 );
2957}
2958
2959#[gpui::test]
2960async fn test_rename(cx: &mut gpui::TestAppContext) {
2961 cx.foreground().forbid_parking();
2962
2963 let mut language = Language::new(
2964 LanguageConfig {
2965 name: "Rust".into(),
2966 path_suffixes: vec!["rs".to_string()],
2967 ..Default::default()
2968 },
2969 Some(tree_sitter_rust::language()),
2970 );
2971 let mut fake_servers = language
2972 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2973 capabilities: lsp::ServerCapabilities {
2974 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2975 prepare_provider: Some(true),
2976 work_done_progress_options: Default::default(),
2977 })),
2978 ..Default::default()
2979 },
2980 ..Default::default()
2981 }))
2982 .await;
2983
2984 let fs = FakeFs::new(cx.background());
2985 fs.insert_tree(
2986 "/dir",
2987 json!({
2988 "one.rs": "const ONE: usize = 1;",
2989 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2990 }),
2991 )
2992 .await;
2993
2994 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2995 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2996 let buffer = project
2997 .update(cx, |project, cx| {
2998 project.open_local_buffer("/dir/one.rs", cx)
2999 })
3000 .await
3001 .unwrap();
3002
3003 let fake_server = fake_servers.next().await.unwrap();
3004
3005 let response = project.update(cx, |project, cx| {
3006 project.prepare_rename(buffer.clone(), 7, cx)
3007 });
3008 fake_server
3009 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3010 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3011 assert_eq!(params.position, lsp::Position::new(0, 7));
3012 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3013 lsp::Position::new(0, 6),
3014 lsp::Position::new(0, 9),
3015 ))))
3016 })
3017 .next()
3018 .await
3019 .unwrap();
3020 let range = response.await.unwrap().unwrap();
3021 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3022 assert_eq!(range, 6..9);
3023
3024 let response = project.update(cx, |project, cx| {
3025 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3026 });
3027 fake_server
3028 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3029 assert_eq!(
3030 params.text_document_position.text_document.uri.as_str(),
3031 "file:///dir/one.rs"
3032 );
3033 assert_eq!(
3034 params.text_document_position.position,
3035 lsp::Position::new(0, 7)
3036 );
3037 assert_eq!(params.new_name, "THREE");
3038 Ok(Some(lsp::WorkspaceEdit {
3039 changes: Some(
3040 [
3041 (
3042 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3043 vec![lsp::TextEdit::new(
3044 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3045 "THREE".to_string(),
3046 )],
3047 ),
3048 (
3049 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3050 vec![
3051 lsp::TextEdit::new(
3052 lsp::Range::new(
3053 lsp::Position::new(0, 24),
3054 lsp::Position::new(0, 27),
3055 ),
3056 "THREE".to_string(),
3057 ),
3058 lsp::TextEdit::new(
3059 lsp::Range::new(
3060 lsp::Position::new(0, 35),
3061 lsp::Position::new(0, 38),
3062 ),
3063 "THREE".to_string(),
3064 ),
3065 ],
3066 ),
3067 ]
3068 .into_iter()
3069 .collect(),
3070 ),
3071 ..Default::default()
3072 }))
3073 })
3074 .next()
3075 .await
3076 .unwrap();
3077 let mut transaction = response.await.unwrap().0;
3078 assert_eq!(transaction.len(), 2);
3079 assert_eq!(
3080 transaction
3081 .remove_entry(&buffer)
3082 .unwrap()
3083 .0
3084 .read_with(cx, |buffer, _| buffer.text()),
3085 "const THREE: usize = 1;"
3086 );
3087 assert_eq!(
3088 transaction
3089 .into_keys()
3090 .next()
3091 .unwrap()
3092 .read_with(cx, |buffer, _| buffer.text()),
3093 "const TWO: usize = one::THREE + one::THREE;"
3094 );
3095}
3096
3097#[gpui::test]
3098async fn test_search(cx: &mut gpui::TestAppContext) {
3099 let fs = FakeFs::new(cx.background());
3100 fs.insert_tree(
3101 "/dir",
3102 json!({
3103 "one.rs": "const ONE: usize = 1;",
3104 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3105 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3106 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3107 }),
3108 )
3109 .await;
3110 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3111 assert_eq!(
3112 search(&project, SearchQuery::text("TWO", false, true), cx)
3113 .await
3114 .unwrap(),
3115 HashMap::from_iter([
3116 ("two.rs".to_string(), vec![6..9]),
3117 ("three.rs".to_string(), vec![37..40])
3118 ])
3119 );
3120
3121 let buffer_4 = project
3122 .update(cx, |project, cx| {
3123 project.open_local_buffer("/dir/four.rs", cx)
3124 })
3125 .await
3126 .unwrap();
3127 buffer_4.update(cx, |buffer, cx| {
3128 let text = "two::TWO";
3129 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3130 });
3131
3132 assert_eq!(
3133 search(&project, SearchQuery::text("TWO", false, true), cx)
3134 .await
3135 .unwrap(),
3136 HashMap::from_iter([
3137 ("two.rs".to_string(), vec![6..9]),
3138 ("three.rs".to_string(), vec![37..40]),
3139 ("four.rs".to_string(), vec![25..28, 36..39])
3140 ])
3141 );
3142
3143 async fn search(
3144 project: &ModelHandle<Project>,
3145 query: SearchQuery,
3146 cx: &mut gpui::TestAppContext,
3147 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3148 let results = project
3149 .update(cx, |project, cx| project.search(query, cx))
3150 .await?;
3151
3152 Ok(results
3153 .into_iter()
3154 .map(|(buffer, ranges)| {
3155 buffer.read_with(cx, |buffer, _| {
3156 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3157 let ranges = ranges
3158 .into_iter()
3159 .map(|range| range.to_offset(buffer))
3160 .collect::<Vec<_>>();
3161 (path, ranges)
3162 })
3163 })
3164 .collect())
3165 }
3166}