1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use serde_json::json;
12use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
13use unindent::Unindent as _;
14use util::{assert_set_eq, test::temp_tree};
15
16#[gpui::test]
17async fn test_symlinks(cx: &mut gpui::TestAppContext) {
18 let dir = temp_tree(json!({
19 "root": {
20 "apple": "",
21 "banana": {
22 "carrot": {
23 "date": "",
24 "endive": "",
25 }
26 },
27 "fennel": {
28 "grape": "",
29 }
30 }
31 }));
32
33 let root_link_path = dir.path().join("root_link");
34 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
35 unix::fs::symlink(
36 &dir.path().join("root/fennel"),
37 &dir.path().join("root/finnochio"),
38 )
39 .unwrap();
40
41 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50}
51
52#[gpui::test]
53async fn test_managing_language_servers(
54 deterministic: Arc<Deterministic>,
55 cx: &mut gpui::TestAppContext,
56) {
57 cx.foreground().forbid_parking();
58
59 let mut rust_language = Language::new(
60 LanguageConfig {
61 name: "Rust".into(),
62 path_suffixes: vec!["rs".to_string()],
63 ..Default::default()
64 },
65 Some(tree_sitter_rust::language()),
66 );
67 let mut json_language = Language::new(
68 LanguageConfig {
69 name: "JSON".into(),
70 path_suffixes: vec!["json".to_string()],
71 ..Default::default()
72 },
73 None,
74 );
75 let mut fake_rust_servers = rust_language
76 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
77 name: "the-rust-language-server",
78 capabilities: lsp::ServerCapabilities {
79 completion_provider: Some(lsp::CompletionOptions {
80 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
81 ..Default::default()
82 }),
83 ..Default::default()
84 },
85 ..Default::default()
86 }))
87 .await;
88 let mut fake_json_servers = json_language
89 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-json-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![":".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }))
100 .await;
101
102 let fs = FakeFs::new(cx.background());
103 fs.insert_tree(
104 "/the-root",
105 json!({
106 "test.rs": "const A: i32 = 1;",
107 "test2.rs": "",
108 "Cargo.toml": "a = 1",
109 "package.json": "{\"a\": 1}",
110 }),
111 )
112 .await;
113
114 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
115
116 // Open a buffer without an associated language server.
117 let toml_buffer = project
118 .update(cx, |project, cx| {
119 project.open_local_buffer("/the-root/Cargo.toml", cx)
120 })
121 .await
122 .unwrap();
123
124 // Open a buffer with an associated language server before the language for it has been loaded.
125 let rust_buffer = project
126 .update(cx, |project, cx| {
127 project.open_local_buffer("/the-root/test.rs", cx)
128 })
129 .await
130 .unwrap();
131 rust_buffer.read_with(cx, |buffer, _| {
132 assert_eq!(buffer.language().map(|l| l.name()), None);
133 });
134
135 // Now we add the languages to the project, and ensure they get assigned to all
136 // the relevant open buffers.
137 project.update(cx, |project, _| {
138 project.languages.add(Arc::new(json_language));
139 project.languages.add(Arc::new(rust_language));
140 });
141 deterministic.run_until_parked();
142 rust_buffer.read_with(cx, |buffer, _| {
143 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
144 });
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
231 rust_buffer2.update(cx, |buffer, cx| {
232 buffer.edit([(0..0, "let x = 1;")], None, cx)
233 });
234 assert_eq!(
235 fake_rust_server
236 .receive_notification::<lsp::notification::DidChangeTextDocument>()
237 .await
238 .text_document,
239 lsp::VersionedTextDocumentIdentifier::new(
240 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
241 1
242 )
243 );
244
245 // Save notifications are reported to all servers.
246 toml_buffer
247 .update(cx, |buffer, cx| buffer.save(cx))
248 .await
249 .unwrap();
250 assert_eq!(
251 fake_rust_server
252 .receive_notification::<lsp::notification::DidSaveTextDocument>()
253 .await
254 .text_document,
255 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
256 );
257 assert_eq!(
258 fake_json_server
259 .receive_notification::<lsp::notification::DidSaveTextDocument>()
260 .await
261 .text_document,
262 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
263 );
264
265 // Renames are reported only to servers matching the buffer's language.
266 fs.rename(
267 Path::new("/the-root/test2.rs"),
268 Path::new("/the-root/test3.rs"),
269 Default::default(),
270 )
271 .await
272 .unwrap();
273 assert_eq!(
274 fake_rust_server
275 .receive_notification::<lsp::notification::DidCloseTextDocument>()
276 .await
277 .text_document,
278 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
279 );
280 assert_eq!(
281 fake_rust_server
282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
283 .await
284 .text_document,
285 lsp::TextDocumentItem {
286 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
287 version: 0,
288 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
289 language_id: Default::default()
290 },
291 );
292
293 rust_buffer2.update(cx, |buffer, cx| {
294 buffer.update_diagnostics(
295 DiagnosticSet::from_sorted_entries(
296 vec![DiagnosticEntry {
297 diagnostic: Default::default(),
298 range: Anchor::MIN..Anchor::MAX,
299 }],
300 &buffer.snapshot(),
301 ),
302 cx,
303 );
304 assert_eq!(
305 buffer
306 .snapshot()
307 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
308 .count(),
309 1
310 );
311 });
312
313 // When the rename changes the extension of the file, the buffer gets closed on the old
314 // language server and gets opened on the new one.
315 fs.rename(
316 Path::new("/the-root/test3.rs"),
317 Path::new("/the-root/test3.json"),
318 Default::default(),
319 )
320 .await
321 .unwrap();
322 assert_eq!(
323 fake_rust_server
324 .receive_notification::<lsp::notification::DidCloseTextDocument>()
325 .await
326 .text_document,
327 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
328 );
329 assert_eq!(
330 fake_json_server
331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
332 .await
333 .text_document,
334 lsp::TextDocumentItem {
335 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
336 version: 0,
337 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
338 language_id: Default::default()
339 },
340 );
341
342 // We clear the diagnostics, since the language has changed.
343 rust_buffer2.read_with(cx, |buffer, _| {
344 assert_eq!(
345 buffer
346 .snapshot()
347 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
348 .count(),
349 0
350 );
351 });
352
353 // The renamed file's version resets after changing language server.
354 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
355 assert_eq!(
356 fake_json_server
357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
358 .await
359 .text_document,
360 lsp::VersionedTextDocumentIdentifier::new(
361 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
362 1
363 )
364 );
365
366 // Restart language servers
367 project.update(cx, |project, cx| {
368 project.restart_language_servers_for_buffers(
369 vec![rust_buffer.clone(), json_buffer.clone()],
370 cx,
371 );
372 });
373
374 let mut rust_shutdown_requests = fake_rust_server
375 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
376 let mut json_shutdown_requests = fake_json_server
377 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
378 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
379
380 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
381 let mut fake_json_server = fake_json_servers.next().await.unwrap();
382
383 // Ensure rust document is reopened in new rust language server
384 assert_eq!(
385 fake_rust_server
386 .receive_notification::<lsp::notification::DidOpenTextDocument>()
387 .await
388 .text_document,
389 lsp::TextDocumentItem {
390 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
391 version: 1,
392 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
393 language_id: Default::default()
394 }
395 );
396
397 // Ensure json documents are reopened in new json language server
398 assert_set_eq!(
399 [
400 fake_json_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 ],
409 [
410 lsp::TextDocumentItem {
411 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
412 version: 0,
413 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
414 language_id: Default::default()
415 },
416 lsp::TextDocumentItem {
417 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
418 version: 1,
419 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
420 language_id: Default::default()
421 }
422 ]
423 );
424
425 // Close notifications are reported only to servers matching the buffer's language.
426 cx.update(|_| drop(json_buffer));
427 let close_message = lsp::DidCloseTextDocumentParams {
428 text_document: lsp::TextDocumentIdentifier::new(
429 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
430 ),
431 };
432 assert_eq!(
433 fake_json_server
434 .receive_notification::<lsp::notification::DidCloseTextDocument>()
435 .await,
436 close_message,
437 );
438}
439
440#[gpui::test]
441async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
442 cx.foreground().forbid_parking();
443
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/dir",
447 json!({
448 "a.rs": "let a = 1;",
449 "b.rs": "let b = 2;"
450 }),
451 )
452 .await;
453
454 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
455
456 let buffer_a = project
457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
458 .await
459 .unwrap();
460 let buffer_b = project
461 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
462 .await
463 .unwrap();
464
465 project.update(cx, |project, cx| {
466 project
467 .update_diagnostics(
468 0,
469 lsp::PublishDiagnosticsParams {
470 uri: Url::from_file_path("/dir/a.rs").unwrap(),
471 version: None,
472 diagnostics: vec![lsp::Diagnostic {
473 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
474 severity: Some(lsp::DiagnosticSeverity::ERROR),
475 message: "error 1".to_string(),
476 ..Default::default()
477 }],
478 },
479 &[],
480 cx,
481 )
482 .unwrap();
483 project
484 .update_diagnostics(
485 0,
486 lsp::PublishDiagnosticsParams {
487 uri: Url::from_file_path("/dir/b.rs").unwrap(),
488 version: None,
489 diagnostics: vec![lsp::Diagnostic {
490 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
491 severity: Some(lsp::DiagnosticSeverity::WARNING),
492 message: "error 2".to_string(),
493 ..Default::default()
494 }],
495 },
496 &[],
497 cx,
498 )
499 .unwrap();
500 });
501
502 buffer_a.read_with(cx, |buffer, _| {
503 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
504 assert_eq!(
505 chunks
506 .iter()
507 .map(|(s, d)| (s.as_str(), *d))
508 .collect::<Vec<_>>(),
509 &[
510 ("let ", None),
511 ("a", Some(DiagnosticSeverity::ERROR)),
512 (" = 1;", None),
513 ]
514 );
515 });
516 buffer_b.read_with(cx, |buffer, _| {
517 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
518 assert_eq!(
519 chunks
520 .iter()
521 .map(|(s, d)| (s.as_str(), *d))
522 .collect::<Vec<_>>(),
523 &[
524 ("let ", None),
525 ("b", Some(DiagnosticSeverity::WARNING)),
526 (" = 2;", None),
527 ]
528 );
529 });
530}
531
532#[gpui::test]
533async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
534 cx.foreground().forbid_parking();
535
536 let fs = FakeFs::new(cx.background());
537 fs.insert_tree(
538 "/root",
539 json!({
540 "dir": {
541 "a.rs": "let a = 1;",
542 },
543 "other.rs": "let b = c;"
544 }),
545 )
546 .await;
547
548 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
549
550 let (worktree, _) = project
551 .update(cx, |project, cx| {
552 project.find_or_create_local_worktree("/root/other.rs", false, cx)
553 })
554 .await
555 .unwrap();
556 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
557
558 project.update(cx, |project, cx| {
559 project
560 .update_diagnostics(
561 0,
562 lsp::PublishDiagnosticsParams {
563 uri: Url::from_file_path("/root/other.rs").unwrap(),
564 version: None,
565 diagnostics: vec![lsp::Diagnostic {
566 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
567 severity: Some(lsp::DiagnosticSeverity::ERROR),
568 message: "unknown variable 'c'".to_string(),
569 ..Default::default()
570 }],
571 },
572 &[],
573 cx,
574 )
575 .unwrap();
576 });
577
578 let buffer = project
579 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
580 .await
581 .unwrap();
582 buffer.read_with(cx, |buffer, _| {
583 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
584 assert_eq!(
585 chunks
586 .iter()
587 .map(|(s, d)| (s.as_str(), *d))
588 .collect::<Vec<_>>(),
589 &[
590 ("let b = ", None),
591 ("c", Some(DiagnosticSeverity::ERROR)),
592 (";", None),
593 ]
594 );
595 });
596
597 project.read_with(cx, |project, cx| {
598 assert_eq!(project.diagnostic_summaries(cx).next(), None);
599 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
600 });
601}
602
603#[gpui::test]
604async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
605 cx.foreground().forbid_parking();
606
607 let progress_token = "the-progress-token";
608 let mut language = Language::new(
609 LanguageConfig {
610 name: "Rust".into(),
611 path_suffixes: vec!["rs".to_string()],
612 ..Default::default()
613 },
614 Some(tree_sitter_rust::language()),
615 );
616 let mut fake_servers = language
617 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
618 disk_based_diagnostics_progress_token: Some(progress_token.into()),
619 disk_based_diagnostics_sources: vec!["disk".into()],
620 ..Default::default()
621 }))
622 .await;
623
624 let fs = FakeFs::new(cx.background());
625 fs.insert_tree(
626 "/dir",
627 json!({
628 "a.rs": "fn a() { A }",
629 "b.rs": "const y: i32 = 1",
630 }),
631 )
632 .await;
633
634 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
635 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
636 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
637
638 // Cause worktree to start the fake language server
639 let _buffer = project
640 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
641 .await
642 .unwrap();
643
644 let mut events = subscribe(&project, cx);
645
646 let fake_server = fake_servers.next().await.unwrap();
647 fake_server
648 .start_progress(format!("{}/0", progress_token))
649 .await;
650 assert_eq!(
651 events.next().await.unwrap(),
652 Event::DiskBasedDiagnosticsStarted {
653 language_server_id: 0,
654 }
655 );
656
657 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
658 uri: Url::from_file_path("/dir/a.rs").unwrap(),
659 version: None,
660 diagnostics: vec![lsp::Diagnostic {
661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
662 severity: Some(lsp::DiagnosticSeverity::ERROR),
663 message: "undefined variable 'A'".to_string(),
664 ..Default::default()
665 }],
666 });
667 assert_eq!(
668 events.next().await.unwrap(),
669 Event::DiagnosticsUpdated {
670 language_server_id: 0,
671 path: (worktree_id, Path::new("a.rs")).into()
672 }
673 );
674
675 fake_server.end_progress(format!("{}/0", progress_token));
676 assert_eq!(
677 events.next().await.unwrap(),
678 Event::DiskBasedDiagnosticsFinished {
679 language_server_id: 0
680 }
681 );
682
683 let buffer = project
684 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
685 .await
686 .unwrap();
687
688 buffer.read_with(cx, |buffer, _| {
689 let snapshot = buffer.snapshot();
690 let diagnostics = snapshot
691 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
692 .collect::<Vec<_>>();
693 assert_eq!(
694 diagnostics,
695 &[DiagnosticEntry {
696 range: Point::new(0, 9)..Point::new(0, 10),
697 diagnostic: Diagnostic {
698 severity: lsp::DiagnosticSeverity::ERROR,
699 message: "undefined variable 'A'".to_string(),
700 group_id: 0,
701 is_primary: true,
702 ..Default::default()
703 }
704 }]
705 )
706 });
707
708 // Ensure publishing empty diagnostics twice only results in one update event.
709 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
710 uri: Url::from_file_path("/dir/a.rs").unwrap(),
711 version: None,
712 diagnostics: Default::default(),
713 });
714 assert_eq!(
715 events.next().await.unwrap(),
716 Event::DiagnosticsUpdated {
717 language_server_id: 0,
718 path: (worktree_id, Path::new("a.rs")).into()
719 }
720 );
721
722 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
723 uri: Url::from_file_path("/dir/a.rs").unwrap(),
724 version: None,
725 diagnostics: Default::default(),
726 });
727 cx.foreground().run_until_parked();
728 assert_eq!(futures::poll!(events.next()), Poll::Pending);
729}
730
731#[gpui::test]
732async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
733 cx.foreground().forbid_parking();
734
735 let progress_token = "the-progress-token";
736 let mut language = Language::new(
737 LanguageConfig {
738 path_suffixes: vec!["rs".to_string()],
739 ..Default::default()
740 },
741 None,
742 );
743 let mut fake_servers = language
744 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
745 disk_based_diagnostics_sources: vec!["disk".into()],
746 disk_based_diagnostics_progress_token: Some(progress_token.into()),
747 ..Default::default()
748 }))
749 .await;
750
751 let fs = FakeFs::new(cx.background());
752 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
753
754 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
755 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
756
757 let buffer = project
758 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
759 .await
760 .unwrap();
761
762 // Simulate diagnostics starting to update.
763 let fake_server = fake_servers.next().await.unwrap();
764 fake_server.start_progress(progress_token).await;
765
766 // Restart the server before the diagnostics finish updating.
767 project.update(cx, |project, cx| {
768 project.restart_language_servers_for_buffers([buffer], cx);
769 });
770 let mut events = subscribe(&project, cx);
771
772 // Simulate the newly started server sending more diagnostics.
773 let fake_server = fake_servers.next().await.unwrap();
774 fake_server.start_progress(progress_token).await;
775 assert_eq!(
776 events.next().await.unwrap(),
777 Event::DiskBasedDiagnosticsStarted {
778 language_server_id: 1
779 }
780 );
781 project.read_with(cx, |project, _| {
782 assert_eq!(
783 project
784 .language_servers_running_disk_based_diagnostics()
785 .collect::<Vec<_>>(),
786 [1]
787 );
788 });
789
790 // All diagnostics are considered done, despite the old server's diagnostic
791 // task never completing.
792 fake_server.end_progress(progress_token);
793 assert_eq!(
794 events.next().await.unwrap(),
795 Event::DiskBasedDiagnosticsFinished {
796 language_server_id: 1
797 }
798 );
799 project.read_with(cx, |project, _| {
800 assert_eq!(
801 project
802 .language_servers_running_disk_based_diagnostics()
803 .collect::<Vec<_>>(),
804 [0; 0]
805 );
806 });
807}
808
809#[gpui::test]
810async fn test_toggling_enable_language_server(
811 deterministic: Arc<Deterministic>,
812 cx: &mut gpui::TestAppContext,
813) {
814 deterministic.forbid_parking();
815
816 let mut rust = Language::new(
817 LanguageConfig {
818 name: Arc::from("Rust"),
819 path_suffixes: vec!["rs".to_string()],
820 ..Default::default()
821 },
822 None,
823 );
824 let mut fake_rust_servers = rust
825 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
826 name: "rust-lsp",
827 ..Default::default()
828 }))
829 .await;
830 let mut js = Language::new(
831 LanguageConfig {
832 name: Arc::from("JavaScript"),
833 path_suffixes: vec!["js".to_string()],
834 ..Default::default()
835 },
836 None,
837 );
838 let mut fake_js_servers = js
839 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
840 name: "js-lsp",
841 ..Default::default()
842 }))
843 .await;
844
845 let fs = FakeFs::new(cx.background());
846 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
847 .await;
848
849 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
850 project.update(cx, |project, _| {
851 project.languages.add(Arc::new(rust));
852 project.languages.add(Arc::new(js));
853 });
854
855 let _rs_buffer = project
856 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
857 .await
858 .unwrap();
859 let _js_buffer = project
860 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
861 .await
862 .unwrap();
863
864 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
865 assert_eq!(
866 fake_rust_server_1
867 .receive_notification::<lsp::notification::DidOpenTextDocument>()
868 .await
869 .text_document
870 .uri
871 .as_str(),
872 "file:///dir/a.rs"
873 );
874
875 let mut fake_js_server = fake_js_servers.next().await.unwrap();
876 assert_eq!(
877 fake_js_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document
881 .uri
882 .as_str(),
883 "file:///dir/b.js"
884 );
885
886 // Disable Rust language server, ensuring only that server gets stopped.
887 cx.update(|cx| {
888 cx.update_global(|settings: &mut Settings, _| {
889 settings.language_overrides.insert(
890 Arc::from("Rust"),
891 settings::EditorSettings {
892 enable_language_server: Some(false),
893 ..Default::default()
894 },
895 );
896 })
897 });
898 fake_rust_server_1
899 .receive_notification::<lsp::notification::Exit>()
900 .await;
901
902 // Enable Rust and disable JavaScript language servers, ensuring that the
903 // former gets started again and that the latter stops.
904 cx.update(|cx| {
905 cx.update_global(|settings: &mut Settings, _| {
906 settings.language_overrides.insert(
907 Arc::from("Rust"),
908 settings::EditorSettings {
909 enable_language_server: Some(true),
910 ..Default::default()
911 },
912 );
913 settings.language_overrides.insert(
914 Arc::from("JavaScript"),
915 settings::EditorSettings {
916 enable_language_server: Some(false),
917 ..Default::default()
918 },
919 );
920 })
921 });
922 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
923 assert_eq!(
924 fake_rust_server_2
925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
926 .await
927 .text_document
928 .uri
929 .as_str(),
930 "file:///dir/a.rs"
931 );
932 fake_js_server
933 .receive_notification::<lsp::notification::Exit>()
934 .await;
935}
936
937#[gpui::test]
938async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
939 cx.foreground().forbid_parking();
940
941 let mut language = Language::new(
942 LanguageConfig {
943 name: "Rust".into(),
944 path_suffixes: vec!["rs".to_string()],
945 ..Default::default()
946 },
947 Some(tree_sitter_rust::language()),
948 );
949 let mut fake_servers = language
950 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
951 disk_based_diagnostics_sources: vec!["disk".into()],
952 ..Default::default()
953 }))
954 .await;
955
956 let text = "
957 fn a() { A }
958 fn b() { BB }
959 fn c() { CCC }
960 "
961 .unindent();
962
963 let fs = FakeFs::new(cx.background());
964 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
965
966 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
967 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
968
969 let buffer = project
970 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
971 .await
972 .unwrap();
973
974 let mut fake_server = fake_servers.next().await.unwrap();
975 let open_notification = fake_server
976 .receive_notification::<lsp::notification::DidOpenTextDocument>()
977 .await;
978
979 // Edit the buffer, moving the content down
980 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
981 let change_notification_1 = fake_server
982 .receive_notification::<lsp::notification::DidChangeTextDocument>()
983 .await;
984 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
985
986 // Report some diagnostics for the initial version of the buffer
987 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
988 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
989 version: Some(open_notification.text_document.version),
990 diagnostics: vec![
991 lsp::Diagnostic {
992 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
993 severity: Some(DiagnosticSeverity::ERROR),
994 message: "undefined variable 'A'".to_string(),
995 source: Some("disk".to_string()),
996 ..Default::default()
997 },
998 lsp::Diagnostic {
999 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1000 severity: Some(DiagnosticSeverity::ERROR),
1001 message: "undefined variable 'BB'".to_string(),
1002 source: Some("disk".to_string()),
1003 ..Default::default()
1004 },
1005 lsp::Diagnostic {
1006 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1007 severity: Some(DiagnosticSeverity::ERROR),
1008 source: Some("disk".to_string()),
1009 message: "undefined variable 'CCC'".to_string(),
1010 ..Default::default()
1011 },
1012 ],
1013 });
1014
1015 // The diagnostics have moved down since they were created.
1016 buffer.next_notification(cx).await;
1017 buffer.read_with(cx, |buffer, _| {
1018 assert_eq!(
1019 buffer
1020 .snapshot()
1021 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1022 .collect::<Vec<_>>(),
1023 &[
1024 DiagnosticEntry {
1025 range: Point::new(3, 9)..Point::new(3, 11),
1026 diagnostic: Diagnostic {
1027 severity: DiagnosticSeverity::ERROR,
1028 message: "undefined variable 'BB'".to_string(),
1029 is_disk_based: true,
1030 group_id: 1,
1031 is_primary: true,
1032 ..Default::default()
1033 },
1034 },
1035 DiagnosticEntry {
1036 range: Point::new(4, 9)..Point::new(4, 12),
1037 diagnostic: Diagnostic {
1038 severity: DiagnosticSeverity::ERROR,
1039 message: "undefined variable 'CCC'".to_string(),
1040 is_disk_based: true,
1041 group_id: 2,
1042 is_primary: true,
1043 ..Default::default()
1044 }
1045 }
1046 ]
1047 );
1048 assert_eq!(
1049 chunks_with_diagnostics(buffer, 0..buffer.len()),
1050 [
1051 ("\n\nfn a() { ".to_string(), None),
1052 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1053 (" }\nfn b() { ".to_string(), None),
1054 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1055 (" }\nfn c() { ".to_string(), None),
1056 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1057 (" }\n".to_string(), None),
1058 ]
1059 );
1060 assert_eq!(
1061 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1062 [
1063 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1064 (" }\nfn c() { ".to_string(), None),
1065 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1066 ]
1067 );
1068 });
1069
1070 // Ensure overlapping diagnostics are highlighted correctly.
1071 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1072 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1073 version: Some(open_notification.text_document.version),
1074 diagnostics: vec![
1075 lsp::Diagnostic {
1076 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1077 severity: Some(DiagnosticSeverity::ERROR),
1078 message: "undefined variable 'A'".to_string(),
1079 source: Some("disk".to_string()),
1080 ..Default::default()
1081 },
1082 lsp::Diagnostic {
1083 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1084 severity: Some(DiagnosticSeverity::WARNING),
1085 message: "unreachable statement".to_string(),
1086 source: Some("disk".to_string()),
1087 ..Default::default()
1088 },
1089 ],
1090 });
1091
1092 buffer.next_notification(cx).await;
1093 buffer.read_with(cx, |buffer, _| {
1094 assert_eq!(
1095 buffer
1096 .snapshot()
1097 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1098 .collect::<Vec<_>>(),
1099 &[
1100 DiagnosticEntry {
1101 range: Point::new(2, 9)..Point::new(2, 12),
1102 diagnostic: Diagnostic {
1103 severity: DiagnosticSeverity::WARNING,
1104 message: "unreachable statement".to_string(),
1105 is_disk_based: true,
1106 group_id: 4,
1107 is_primary: true,
1108 ..Default::default()
1109 }
1110 },
1111 DiagnosticEntry {
1112 range: Point::new(2, 9)..Point::new(2, 10),
1113 diagnostic: Diagnostic {
1114 severity: DiagnosticSeverity::ERROR,
1115 message: "undefined variable 'A'".to_string(),
1116 is_disk_based: true,
1117 group_id: 3,
1118 is_primary: true,
1119 ..Default::default()
1120 },
1121 }
1122 ]
1123 );
1124 assert_eq!(
1125 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1126 [
1127 ("fn a() { ".to_string(), None),
1128 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1129 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1130 ("\n".to_string(), None),
1131 ]
1132 );
1133 assert_eq!(
1134 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1135 [
1136 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1137 ("\n".to_string(), None),
1138 ]
1139 );
1140 });
1141
1142 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1143 // changes since the last save.
1144 buffer.update(cx, |buffer, cx| {
1145 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1146 buffer.edit(
1147 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1148 None,
1149 cx,
1150 );
1151 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1152 });
1153 let change_notification_2 = fake_server
1154 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1155 .await;
1156 assert!(
1157 change_notification_2.text_document.version > change_notification_1.text_document.version
1158 );
1159
1160 // Handle out-of-order diagnostics
1161 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1162 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1163 version: Some(change_notification_2.text_document.version),
1164 diagnostics: vec![
1165 lsp::Diagnostic {
1166 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1167 severity: Some(DiagnosticSeverity::ERROR),
1168 message: "undefined variable 'BB'".to_string(),
1169 source: Some("disk".to_string()),
1170 ..Default::default()
1171 },
1172 lsp::Diagnostic {
1173 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1174 severity: Some(DiagnosticSeverity::WARNING),
1175 message: "undefined variable 'A'".to_string(),
1176 source: Some("disk".to_string()),
1177 ..Default::default()
1178 },
1179 ],
1180 });
1181
1182 buffer.next_notification(cx).await;
1183 buffer.read_with(cx, |buffer, _| {
1184 assert_eq!(
1185 buffer
1186 .snapshot()
1187 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1188 .collect::<Vec<_>>(),
1189 &[
1190 DiagnosticEntry {
1191 range: Point::new(2, 21)..Point::new(2, 22),
1192 diagnostic: Diagnostic {
1193 severity: DiagnosticSeverity::WARNING,
1194 message: "undefined variable 'A'".to_string(),
1195 is_disk_based: true,
1196 group_id: 6,
1197 is_primary: true,
1198 ..Default::default()
1199 }
1200 },
1201 DiagnosticEntry {
1202 range: Point::new(3, 9)..Point::new(3, 14),
1203 diagnostic: Diagnostic {
1204 severity: DiagnosticSeverity::ERROR,
1205 message: "undefined variable 'BB'".to_string(),
1206 is_disk_based: true,
1207 group_id: 5,
1208 is_primary: true,
1209 ..Default::default()
1210 },
1211 }
1212 ]
1213 );
1214 });
1215}
1216
1217#[gpui::test]
1218async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1219 cx.foreground().forbid_parking();
1220
1221 let text = concat!(
1222 "let one = ;\n", //
1223 "let two = \n",
1224 "let three = 3;\n",
1225 );
1226
1227 let fs = FakeFs::new(cx.background());
1228 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1229
1230 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1231 let buffer = project
1232 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1233 .await
1234 .unwrap();
1235
1236 project.update(cx, |project, cx| {
1237 project
1238 .update_buffer_diagnostics(
1239 &buffer,
1240 vec![
1241 DiagnosticEntry {
1242 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1243 diagnostic: Diagnostic {
1244 severity: DiagnosticSeverity::ERROR,
1245 message: "syntax error 1".to_string(),
1246 ..Default::default()
1247 },
1248 },
1249 DiagnosticEntry {
1250 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1251 diagnostic: Diagnostic {
1252 severity: DiagnosticSeverity::ERROR,
1253 message: "syntax error 2".to_string(),
1254 ..Default::default()
1255 },
1256 },
1257 ],
1258 None,
1259 cx,
1260 )
1261 .unwrap();
1262 });
1263
1264 // An empty range is extended forward to include the following character.
1265 // At the end of a line, an empty range is extended backward to include
1266 // the preceding character.
1267 buffer.read_with(cx, |buffer, _| {
1268 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1269 assert_eq!(
1270 chunks
1271 .iter()
1272 .map(|(s, d)| (s.as_str(), *d))
1273 .collect::<Vec<_>>(),
1274 &[
1275 ("let one = ", None),
1276 (";", Some(DiagnosticSeverity::ERROR)),
1277 ("\nlet two =", None),
1278 (" ", Some(DiagnosticSeverity::ERROR)),
1279 ("\nlet three = 3;\n", None)
1280 ]
1281 );
1282 });
1283}
1284
1285#[gpui::test]
1286async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1287 cx.foreground().forbid_parking();
1288
1289 let mut language = Language::new(
1290 LanguageConfig {
1291 name: "Rust".into(),
1292 path_suffixes: vec!["rs".to_string()],
1293 ..Default::default()
1294 },
1295 Some(tree_sitter_rust::language()),
1296 );
1297 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1298
1299 let text = "
1300 fn a() {
1301 f1();
1302 }
1303 fn b() {
1304 f2();
1305 }
1306 fn c() {
1307 f3();
1308 }
1309 "
1310 .unindent();
1311
1312 let fs = FakeFs::new(cx.background());
1313 fs.insert_tree(
1314 "/dir",
1315 json!({
1316 "a.rs": text.clone(),
1317 }),
1318 )
1319 .await;
1320
1321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1322 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1323 let buffer = project
1324 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1325 .await
1326 .unwrap();
1327
1328 let mut fake_server = fake_servers.next().await.unwrap();
1329 let lsp_document_version = fake_server
1330 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1331 .await
1332 .text_document
1333 .version;
1334
1335 // Simulate editing the buffer after the language server computes some edits.
1336 buffer.update(cx, |buffer, cx| {
1337 buffer.edit(
1338 [(
1339 Point::new(0, 0)..Point::new(0, 0),
1340 "// above first function\n",
1341 )],
1342 None,
1343 cx,
1344 );
1345 buffer.edit(
1346 [(
1347 Point::new(2, 0)..Point::new(2, 0),
1348 " // inside first function\n",
1349 )],
1350 None,
1351 cx,
1352 );
1353 buffer.edit(
1354 [(
1355 Point::new(6, 4)..Point::new(6, 4),
1356 "// inside second function ",
1357 )],
1358 None,
1359 cx,
1360 );
1361
1362 assert_eq!(
1363 buffer.text(),
1364 "
1365 // above first function
1366 fn a() {
1367 // inside first function
1368 f1();
1369 }
1370 fn b() {
1371 // inside second function f2();
1372 }
1373 fn c() {
1374 f3();
1375 }
1376 "
1377 .unindent()
1378 );
1379 });
1380
1381 let edits = project
1382 .update(cx, |project, cx| {
1383 project.edits_from_lsp(
1384 &buffer,
1385 vec![
1386 // replace body of first function
1387 lsp::TextEdit {
1388 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1389 new_text: "
1390 fn a() {
1391 f10();
1392 }
1393 "
1394 .unindent(),
1395 },
1396 // edit inside second function
1397 lsp::TextEdit {
1398 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1399 new_text: "00".into(),
1400 },
1401 // edit inside third function via two distinct edits
1402 lsp::TextEdit {
1403 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1404 new_text: "4000".into(),
1405 },
1406 lsp::TextEdit {
1407 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1408 new_text: "".into(),
1409 },
1410 ],
1411 Some(lsp_document_version),
1412 cx,
1413 )
1414 })
1415 .await
1416 .unwrap();
1417
1418 buffer.update(cx, |buffer, cx| {
1419 for (range, new_text) in edits {
1420 buffer.edit([(range, new_text)], None, cx);
1421 }
1422 assert_eq!(
1423 buffer.text(),
1424 "
1425 // above first function
1426 fn a() {
1427 // inside first function
1428 f10();
1429 }
1430 fn b() {
1431 // inside second function f200();
1432 }
1433 fn c() {
1434 f4000();
1435 }
1436 "
1437 .unindent()
1438 );
1439 });
1440}
1441
1442#[gpui::test]
1443async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1444 cx.foreground().forbid_parking();
1445
1446 let text = "
1447 use a::b;
1448 use a::c;
1449
1450 fn f() {
1451 b();
1452 c();
1453 }
1454 "
1455 .unindent();
1456
1457 let fs = FakeFs::new(cx.background());
1458 fs.insert_tree(
1459 "/dir",
1460 json!({
1461 "a.rs": text.clone(),
1462 }),
1463 )
1464 .await;
1465
1466 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1467 let buffer = project
1468 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1469 .await
1470 .unwrap();
1471
1472 // Simulate the language server sending us a small edit in the form of a very large diff.
1473 // Rust-analyzer does this when performing a merge-imports code action.
1474 let edits = project
1475 .update(cx, |project, cx| {
1476 project.edits_from_lsp(
1477 &buffer,
1478 [
1479 // Replace the first use statement without editing the semicolon.
1480 lsp::TextEdit {
1481 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1482 new_text: "a::{b, c}".into(),
1483 },
1484 // Reinsert the remainder of the file between the semicolon and the final
1485 // newline of the file.
1486 lsp::TextEdit {
1487 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1488 new_text: "\n\n".into(),
1489 },
1490 lsp::TextEdit {
1491 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1492 new_text: "
1493 fn f() {
1494 b();
1495 c();
1496 }"
1497 .unindent(),
1498 },
1499 // Delete everything after the first newline of the file.
1500 lsp::TextEdit {
1501 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1502 new_text: "".into(),
1503 },
1504 ],
1505 None,
1506 cx,
1507 )
1508 })
1509 .await
1510 .unwrap();
1511
1512 buffer.update(cx, |buffer, cx| {
1513 let edits = edits
1514 .into_iter()
1515 .map(|(range, text)| {
1516 (
1517 range.start.to_point(buffer)..range.end.to_point(buffer),
1518 text,
1519 )
1520 })
1521 .collect::<Vec<_>>();
1522
1523 assert_eq!(
1524 edits,
1525 [
1526 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1527 (Point::new(1, 0)..Point::new(2, 0), "".into())
1528 ]
1529 );
1530
1531 for (range, new_text) in edits {
1532 buffer.edit([(range, new_text)], None, cx);
1533 }
1534 assert_eq!(
1535 buffer.text(),
1536 "
1537 use a::{b, c};
1538
1539 fn f() {
1540 b();
1541 c();
1542 }
1543 "
1544 .unindent()
1545 );
1546 });
1547}
1548
1549#[gpui::test]
1550async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1551 cx.foreground().forbid_parking();
1552
1553 let text = "
1554 use a::b;
1555 use a::c;
1556
1557 fn f() {
1558 b();
1559 c();
1560 }
1561 "
1562 .unindent();
1563
1564 let fs = FakeFs::new(cx.background());
1565 fs.insert_tree(
1566 "/dir",
1567 json!({
1568 "a.rs": text.clone(),
1569 }),
1570 )
1571 .await;
1572
1573 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1574 let buffer = project
1575 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1576 .await
1577 .unwrap();
1578
1579 // Simulate the language server sending us edits in a non-ordered fashion,
1580 // with ranges sometimes being inverted or pointing to invalid locations.
1581 let edits = project
1582 .update(cx, |project, cx| {
1583 project.edits_from_lsp(
1584 &buffer,
1585 [
1586 lsp::TextEdit {
1587 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1588 new_text: "\n\n".into(),
1589 },
1590 lsp::TextEdit {
1591 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1592 new_text: "a::{b, c}".into(),
1593 },
1594 lsp::TextEdit {
1595 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1596 new_text: "".into(),
1597 },
1598 lsp::TextEdit {
1599 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1600 new_text: "
1601 fn f() {
1602 b();
1603 c();
1604 }"
1605 .unindent(),
1606 },
1607 ],
1608 None,
1609 cx,
1610 )
1611 })
1612 .await
1613 .unwrap();
1614
1615 buffer.update(cx, |buffer, cx| {
1616 let edits = edits
1617 .into_iter()
1618 .map(|(range, text)| {
1619 (
1620 range.start.to_point(buffer)..range.end.to_point(buffer),
1621 text,
1622 )
1623 })
1624 .collect::<Vec<_>>();
1625
1626 assert_eq!(
1627 edits,
1628 [
1629 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1630 (Point::new(1, 0)..Point::new(2, 0), "".into())
1631 ]
1632 );
1633
1634 for (range, new_text) in edits {
1635 buffer.edit([(range, new_text)], None, cx);
1636 }
1637 assert_eq!(
1638 buffer.text(),
1639 "
1640 use a::{b, c};
1641
1642 fn f() {
1643 b();
1644 c();
1645 }
1646 "
1647 .unindent()
1648 );
1649 });
1650}
1651
1652fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1653 buffer: &Buffer,
1654 range: Range<T>,
1655) -> Vec<(String, Option<DiagnosticSeverity>)> {
1656 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1657 for chunk in buffer.snapshot().chunks(range, true) {
1658 if chunks.last().map_or(false, |prev_chunk| {
1659 prev_chunk.1 == chunk.diagnostic_severity
1660 }) {
1661 chunks.last_mut().unwrap().0.push_str(chunk.text);
1662 } else {
1663 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1664 }
1665 }
1666 chunks
1667}
1668
1669#[gpui::test(iterations = 10)]
1670async fn test_definition(cx: &mut gpui::TestAppContext) {
1671 let mut language = Language::new(
1672 LanguageConfig {
1673 name: "Rust".into(),
1674 path_suffixes: vec!["rs".to_string()],
1675 ..Default::default()
1676 },
1677 Some(tree_sitter_rust::language()),
1678 );
1679 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1680
1681 let fs = FakeFs::new(cx.background());
1682 fs.insert_tree(
1683 "/dir",
1684 json!({
1685 "a.rs": "const fn a() { A }",
1686 "b.rs": "const y: i32 = crate::a()",
1687 }),
1688 )
1689 .await;
1690
1691 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1692 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1693
1694 let buffer = project
1695 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1696 .await
1697 .unwrap();
1698
1699 let fake_server = fake_servers.next().await.unwrap();
1700 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1701 let params = params.text_document_position_params;
1702 assert_eq!(
1703 params.text_document.uri.to_file_path().unwrap(),
1704 Path::new("/dir/b.rs"),
1705 );
1706 assert_eq!(params.position, lsp::Position::new(0, 22));
1707
1708 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1709 lsp::Location::new(
1710 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1711 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1712 ),
1713 )))
1714 });
1715
1716 let mut definitions = project
1717 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1718 .await
1719 .unwrap();
1720
1721 // Assert no new language server started
1722 cx.foreground().run_until_parked();
1723 assert!(fake_servers.try_next().is_err());
1724
1725 assert_eq!(definitions.len(), 1);
1726 let definition = definitions.pop().unwrap();
1727 cx.update(|cx| {
1728 let target_buffer = definition.target.buffer.read(cx);
1729 assert_eq!(
1730 target_buffer
1731 .file()
1732 .unwrap()
1733 .as_local()
1734 .unwrap()
1735 .abs_path(cx),
1736 Path::new("/dir/a.rs"),
1737 );
1738 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1739 assert_eq!(
1740 list_worktrees(&project, cx),
1741 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1742 );
1743
1744 drop(definition);
1745 });
1746 cx.read(|cx| {
1747 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1748 });
1749
1750 fn list_worktrees<'a>(
1751 project: &'a ModelHandle<Project>,
1752 cx: &'a AppContext,
1753 ) -> Vec<(&'a Path, bool)> {
1754 project
1755 .read(cx)
1756 .worktrees(cx)
1757 .map(|worktree| {
1758 let worktree = worktree.read(cx);
1759 (
1760 worktree.as_local().unwrap().abs_path().as_ref(),
1761 worktree.is_visible(),
1762 )
1763 })
1764 .collect::<Vec<_>>()
1765 }
1766}
1767
1768#[gpui::test]
1769async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1770 let mut language = Language::new(
1771 LanguageConfig {
1772 name: "TypeScript".into(),
1773 path_suffixes: vec!["ts".to_string()],
1774 ..Default::default()
1775 },
1776 Some(tree_sitter_typescript::language_typescript()),
1777 );
1778 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1779
1780 let fs = FakeFs::new(cx.background());
1781 fs.insert_tree(
1782 "/dir",
1783 json!({
1784 "a.ts": "",
1785 }),
1786 )
1787 .await;
1788
1789 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1790 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1791 let buffer = project
1792 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1793 .await
1794 .unwrap();
1795
1796 let fake_server = fake_language_servers.next().await.unwrap();
1797
1798 let text = "let a = b.fqn";
1799 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1800 let completions = project.update(cx, |project, cx| {
1801 project.completions(&buffer, text.len(), cx)
1802 });
1803
1804 fake_server
1805 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1806 Ok(Some(lsp::CompletionResponse::Array(vec![
1807 lsp::CompletionItem {
1808 label: "fullyQualifiedName?".into(),
1809 insert_text: Some("fullyQualifiedName".into()),
1810 ..Default::default()
1811 },
1812 ])))
1813 })
1814 .next()
1815 .await;
1816 let completions = completions.await.unwrap();
1817 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1818 assert_eq!(completions.len(), 1);
1819 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1820 assert_eq!(
1821 completions[0].old_range.to_offset(&snapshot),
1822 text.len() - 3..text.len()
1823 );
1824
1825 let text = "let a = \"atoms/cmp\"";
1826 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1827 let completions = project.update(cx, |project, cx| {
1828 project.completions(&buffer, text.len() - 1, cx)
1829 });
1830
1831 fake_server
1832 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1833 Ok(Some(lsp::CompletionResponse::Array(vec![
1834 lsp::CompletionItem {
1835 label: "component".into(),
1836 ..Default::default()
1837 },
1838 ])))
1839 })
1840 .next()
1841 .await;
1842 let completions = completions.await.unwrap();
1843 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1844 assert_eq!(completions.len(), 1);
1845 assert_eq!(completions[0].new_text, "component");
1846 assert_eq!(
1847 completions[0].old_range.to_offset(&snapshot),
1848 text.len() - 4..text.len() - 1
1849 );
1850}
1851
1852#[gpui::test]
1853async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1854 let mut language = Language::new(
1855 LanguageConfig {
1856 name: "TypeScript".into(),
1857 path_suffixes: vec!["ts".to_string()],
1858 ..Default::default()
1859 },
1860 Some(tree_sitter_typescript::language_typescript()),
1861 );
1862 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1863
1864 let fs = FakeFs::new(cx.background());
1865 fs.insert_tree(
1866 "/dir",
1867 json!({
1868 "a.ts": "",
1869 }),
1870 )
1871 .await;
1872
1873 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1874 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1875 let buffer = project
1876 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1877 .await
1878 .unwrap();
1879
1880 let fake_server = fake_language_servers.next().await.unwrap();
1881
1882 let text = "let a = b.fqn";
1883 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1884 let completions = project.update(cx, |project, cx| {
1885 project.completions(&buffer, text.len(), cx)
1886 });
1887
1888 fake_server
1889 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1890 Ok(Some(lsp::CompletionResponse::Array(vec![
1891 lsp::CompletionItem {
1892 label: "fullyQualifiedName?".into(),
1893 insert_text: Some("fully\rQualified\r\nName".into()),
1894 ..Default::default()
1895 },
1896 ])))
1897 })
1898 .next()
1899 .await;
1900 let completions = completions.await.unwrap();
1901 assert_eq!(completions.len(), 1);
1902 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1903}
1904
1905#[gpui::test(iterations = 10)]
1906async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1907 let mut language = Language::new(
1908 LanguageConfig {
1909 name: "TypeScript".into(),
1910 path_suffixes: vec!["ts".to_string()],
1911 ..Default::default()
1912 },
1913 None,
1914 );
1915 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1916
1917 let fs = FakeFs::new(cx.background());
1918 fs.insert_tree(
1919 "/dir",
1920 json!({
1921 "a.ts": "a",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1927 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1928 let buffer = project
1929 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1930 .await
1931 .unwrap();
1932
1933 let fake_server = fake_language_servers.next().await.unwrap();
1934
1935 // Language server returns code actions that contain commands, and not edits.
1936 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1937 fake_server
1938 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1939 Ok(Some(vec![
1940 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1941 title: "The code action".into(),
1942 command: Some(lsp::Command {
1943 title: "The command".into(),
1944 command: "_the/command".into(),
1945 arguments: Some(vec![json!("the-argument")]),
1946 }),
1947 ..Default::default()
1948 }),
1949 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1950 title: "two".into(),
1951 ..Default::default()
1952 }),
1953 ]))
1954 })
1955 .next()
1956 .await;
1957
1958 let action = actions.await.unwrap()[0].clone();
1959 let apply = project.update(cx, |project, cx| {
1960 project.apply_code_action(buffer.clone(), action, true, cx)
1961 });
1962
1963 // Resolving the code action does not populate its edits. In absence of
1964 // edits, we must execute the given command.
1965 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1966 |action, _| async move { Ok(action) },
1967 );
1968
1969 // While executing the command, the language server sends the editor
1970 // a `workspaceEdit` request.
1971 fake_server
1972 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1973 let fake = fake_server.clone();
1974 move |params, _| {
1975 assert_eq!(params.command, "_the/command");
1976 let fake = fake.clone();
1977 async move {
1978 fake.server
1979 .request::<lsp::request::ApplyWorkspaceEdit>(
1980 lsp::ApplyWorkspaceEditParams {
1981 label: None,
1982 edit: lsp::WorkspaceEdit {
1983 changes: Some(
1984 [(
1985 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1986 vec![lsp::TextEdit {
1987 range: lsp::Range::new(
1988 lsp::Position::new(0, 0),
1989 lsp::Position::new(0, 0),
1990 ),
1991 new_text: "X".into(),
1992 }],
1993 )]
1994 .into_iter()
1995 .collect(),
1996 ),
1997 ..Default::default()
1998 },
1999 },
2000 )
2001 .await
2002 .unwrap();
2003 Ok(Some(json!(null)))
2004 }
2005 }
2006 })
2007 .next()
2008 .await;
2009
2010 // Applying the code action returns a project transaction containing the edits
2011 // sent by the language server in its `workspaceEdit` request.
2012 let transaction = apply.await.unwrap();
2013 assert!(transaction.0.contains_key(&buffer));
2014 buffer.update(cx, |buffer, cx| {
2015 assert_eq!(buffer.text(), "Xa");
2016 buffer.undo(cx);
2017 assert_eq!(buffer.text(), "a");
2018 });
2019}
2020
2021#[gpui::test]
2022async fn test_save_file(cx: &mut gpui::TestAppContext) {
2023 let fs = FakeFs::new(cx.background());
2024 fs.insert_tree(
2025 "/dir",
2026 json!({
2027 "file1": "the old contents",
2028 }),
2029 )
2030 .await;
2031
2032 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2033 let buffer = project
2034 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2035 .await
2036 .unwrap();
2037 buffer
2038 .update(cx, |buffer, cx| {
2039 assert_eq!(buffer.text(), "the old contents");
2040 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2041 buffer.save(cx)
2042 })
2043 .await
2044 .unwrap();
2045
2046 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2047 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2048}
2049
2050#[gpui::test]
2051async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2052 let fs = FakeFs::new(cx.background());
2053 fs.insert_tree(
2054 "/dir",
2055 json!({
2056 "file1": "the old contents",
2057 }),
2058 )
2059 .await;
2060
2061 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2062 let buffer = project
2063 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2064 .await
2065 .unwrap();
2066 buffer
2067 .update(cx, |buffer, cx| {
2068 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2069 buffer.save(cx)
2070 })
2071 .await
2072 .unwrap();
2073
2074 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2075 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2076}
2077
2078#[gpui::test]
2079async fn test_save_as(cx: &mut gpui::TestAppContext) {
2080 let fs = FakeFs::new(cx.background());
2081 fs.insert_tree("/dir", json!({})).await;
2082
2083 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2084 let buffer = project.update(cx, |project, cx| {
2085 project.create_buffer("", None, cx).unwrap()
2086 });
2087 buffer.update(cx, |buffer, cx| {
2088 buffer.edit([(0..0, "abc")], None, cx);
2089 assert!(buffer.is_dirty());
2090 assert!(!buffer.has_conflict());
2091 });
2092 project
2093 .update(cx, |project, cx| {
2094 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2095 })
2096 .await
2097 .unwrap();
2098 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2099 buffer.read_with(cx, |buffer, cx| {
2100 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2101 assert!(!buffer.is_dirty());
2102 assert!(!buffer.has_conflict());
2103 });
2104
2105 let opened_buffer = project
2106 .update(cx, |project, cx| {
2107 project.open_local_buffer("/dir/file1", cx)
2108 })
2109 .await
2110 .unwrap();
2111 assert_eq!(opened_buffer, buffer);
2112}
2113
2114#[gpui::test(retries = 5)]
2115async fn test_rescan_and_remote_updates(
2116 deterministic: Arc<Deterministic>,
2117 cx: &mut gpui::TestAppContext,
2118) {
2119 let dir = temp_tree(json!({
2120 "a": {
2121 "file1": "",
2122 "file2": "",
2123 "file3": "",
2124 },
2125 "b": {
2126 "c": {
2127 "file4": "",
2128 "file5": "",
2129 }
2130 }
2131 }));
2132
2133 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2134 let rpc = project.read_with(cx, |p, _| p.client.clone());
2135
2136 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2137 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2138 async move { buffer.await.unwrap() }
2139 };
2140 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2141 project.read_with(cx, |project, cx| {
2142 let tree = project.worktrees(cx).next().unwrap();
2143 tree.read(cx)
2144 .entry_for_path(path)
2145 .unwrap_or_else(|| panic!("no entry for path {}", path))
2146 .id
2147 })
2148 };
2149
2150 let buffer2 = buffer_for_path("a/file2", cx).await;
2151 let buffer3 = buffer_for_path("a/file3", cx).await;
2152 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2153 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2154
2155 let file2_id = id_for_path("a/file2", cx);
2156 let file3_id = id_for_path("a/file3", cx);
2157 let file4_id = id_for_path("b/c/file4", cx);
2158
2159 // Create a remote copy of this worktree.
2160 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2161 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2162 let remote = cx.update(|cx| {
2163 Worktree::remote(
2164 1,
2165 1,
2166 proto::WorktreeMetadata {
2167 id: initial_snapshot.id().to_proto(),
2168 root_name: initial_snapshot.root_name().into(),
2169 abs_path: initial_snapshot.abs_path().as_os_str().as_bytes().to_vec(),
2170 visible: true,
2171 },
2172 rpc.clone(),
2173 cx,
2174 )
2175 });
2176 remote.update(cx, |remote, _| {
2177 let update = initial_snapshot.build_initial_update(1);
2178 remote.as_remote_mut().unwrap().update_from_remote(update);
2179 });
2180 deterministic.run_until_parked();
2181
2182 cx.read(|cx| {
2183 assert!(!buffer2.read(cx).is_dirty());
2184 assert!(!buffer3.read(cx).is_dirty());
2185 assert!(!buffer4.read(cx).is_dirty());
2186 assert!(!buffer5.read(cx).is_dirty());
2187 });
2188
2189 // Rename and delete files and directories.
2190 tree.flush_fs_events(cx).await;
2191 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2192 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2193 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2194 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2195 tree.flush_fs_events(cx).await;
2196
2197 let expected_paths = vec![
2198 "a",
2199 "a/file1",
2200 "a/file2.new",
2201 "b",
2202 "d",
2203 "d/file3",
2204 "d/file4",
2205 ];
2206
2207 cx.read(|app| {
2208 assert_eq!(
2209 tree.read(app)
2210 .paths()
2211 .map(|p| p.to_str().unwrap())
2212 .collect::<Vec<_>>(),
2213 expected_paths
2214 );
2215
2216 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2217 assert_eq!(id_for_path("d/file3", cx), file3_id);
2218 assert_eq!(id_for_path("d/file4", cx), file4_id);
2219
2220 assert_eq!(
2221 buffer2.read(app).file().unwrap().path().as_ref(),
2222 Path::new("a/file2.new")
2223 );
2224 assert_eq!(
2225 buffer3.read(app).file().unwrap().path().as_ref(),
2226 Path::new("d/file3")
2227 );
2228 assert_eq!(
2229 buffer4.read(app).file().unwrap().path().as_ref(),
2230 Path::new("d/file4")
2231 );
2232 assert_eq!(
2233 buffer5.read(app).file().unwrap().path().as_ref(),
2234 Path::new("b/c/file5")
2235 );
2236
2237 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2238 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2239 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2240 assert!(buffer5.read(app).file().unwrap().is_deleted());
2241 });
2242
2243 // Update the remote worktree. Check that it becomes consistent with the
2244 // local worktree.
2245 remote.update(cx, |remote, cx| {
2246 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2247 &initial_snapshot,
2248 1,
2249 1,
2250 true,
2251 );
2252 remote.as_remote_mut().unwrap().update_from_remote(update);
2253 });
2254 deterministic.run_until_parked();
2255 remote.read_with(cx, |remote, _| {
2256 assert_eq!(
2257 remote
2258 .paths()
2259 .map(|p| p.to_str().unwrap())
2260 .collect::<Vec<_>>(),
2261 expected_paths
2262 );
2263 });
2264}
2265
2266#[gpui::test(iterations = 10)]
2267async fn test_buffer_identity_across_renames(
2268 deterministic: Arc<Deterministic>,
2269 cx: &mut gpui::TestAppContext,
2270) {
2271 let fs = FakeFs::new(cx.background());
2272 fs.insert_tree(
2273 "/dir",
2274 json!({
2275 "a": {
2276 "file1": "",
2277 }
2278 }),
2279 )
2280 .await;
2281
2282 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2283 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2284 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2285
2286 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2287 project.read_with(cx, |project, cx| {
2288 let tree = project.worktrees(cx).next().unwrap();
2289 tree.read(cx)
2290 .entry_for_path(path)
2291 .unwrap_or_else(|| panic!("no entry for path {}", path))
2292 .id
2293 })
2294 };
2295
2296 let dir_id = id_for_path("a", cx);
2297 let file_id = id_for_path("a/file1", cx);
2298 let buffer = project
2299 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2300 .await
2301 .unwrap();
2302 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2303
2304 project
2305 .update(cx, |project, cx| {
2306 project.rename_entry(dir_id, Path::new("b"), cx)
2307 })
2308 .unwrap()
2309 .await
2310 .unwrap();
2311 deterministic.run_until_parked();
2312 assert_eq!(id_for_path("b", cx), dir_id);
2313 assert_eq!(id_for_path("b/file1", cx), file_id);
2314 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2315}
2316
2317#[gpui::test]
2318async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2319 let fs = FakeFs::new(cx.background());
2320 fs.insert_tree(
2321 "/dir",
2322 json!({
2323 "a.txt": "a-contents",
2324 "b.txt": "b-contents",
2325 }),
2326 )
2327 .await;
2328
2329 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2330
2331 // Spawn multiple tasks to open paths, repeating some paths.
2332 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2333 (
2334 p.open_local_buffer("/dir/a.txt", cx),
2335 p.open_local_buffer("/dir/b.txt", cx),
2336 p.open_local_buffer("/dir/a.txt", cx),
2337 )
2338 });
2339
2340 let buffer_a_1 = buffer_a_1.await.unwrap();
2341 let buffer_a_2 = buffer_a_2.await.unwrap();
2342 let buffer_b = buffer_b.await.unwrap();
2343 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2344 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2345
2346 // There is only one buffer per path.
2347 let buffer_a_id = buffer_a_1.id();
2348 assert_eq!(buffer_a_2.id(), buffer_a_id);
2349
2350 // Open the same path again while it is still open.
2351 drop(buffer_a_1);
2352 let buffer_a_3 = project
2353 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2354 .await
2355 .unwrap();
2356
2357 // There's still only one buffer per path.
2358 assert_eq!(buffer_a_3.id(), buffer_a_id);
2359}
2360
2361#[gpui::test]
2362async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2363 let fs = FakeFs::new(cx.background());
2364 fs.insert_tree(
2365 "/dir",
2366 json!({
2367 "file1": "abc",
2368 "file2": "def",
2369 "file3": "ghi",
2370 }),
2371 )
2372 .await;
2373
2374 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2375
2376 let buffer1 = project
2377 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2378 .await
2379 .unwrap();
2380 let events = Rc::new(RefCell::new(Vec::new()));
2381
2382 // initially, the buffer isn't dirty.
2383 buffer1.update(cx, |buffer, cx| {
2384 cx.subscribe(&buffer1, {
2385 let events = events.clone();
2386 move |_, _, event, _| match event {
2387 BufferEvent::Operation(_) => {}
2388 _ => events.borrow_mut().push(event.clone()),
2389 }
2390 })
2391 .detach();
2392
2393 assert!(!buffer.is_dirty());
2394 assert!(events.borrow().is_empty());
2395
2396 buffer.edit([(1..2, "")], None, cx);
2397 });
2398
2399 // after the first edit, the buffer is dirty, and emits a dirtied event.
2400 buffer1.update(cx, |buffer, cx| {
2401 assert!(buffer.text() == "ac");
2402 assert!(buffer.is_dirty());
2403 assert_eq!(
2404 *events.borrow(),
2405 &[language::Event::Edited, language::Event::DirtyChanged]
2406 );
2407 events.borrow_mut().clear();
2408 buffer.did_save(
2409 buffer.version(),
2410 buffer.as_rope().fingerprint(),
2411 buffer.file().unwrap().mtime(),
2412 None,
2413 cx,
2414 );
2415 });
2416
2417 // after saving, the buffer is not dirty, and emits a saved event.
2418 buffer1.update(cx, |buffer, cx| {
2419 assert!(!buffer.is_dirty());
2420 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2421 events.borrow_mut().clear();
2422
2423 buffer.edit([(1..1, "B")], None, cx);
2424 buffer.edit([(2..2, "D")], None, cx);
2425 });
2426
2427 // after editing again, the buffer is dirty, and emits another dirty event.
2428 buffer1.update(cx, |buffer, cx| {
2429 assert!(buffer.text() == "aBDc");
2430 assert!(buffer.is_dirty());
2431 assert_eq!(
2432 *events.borrow(),
2433 &[
2434 language::Event::Edited,
2435 language::Event::DirtyChanged,
2436 language::Event::Edited,
2437 ],
2438 );
2439 events.borrow_mut().clear();
2440
2441 // After restoring the buffer to its previously-saved state,
2442 // the buffer is not considered dirty anymore.
2443 buffer.edit([(1..3, "")], None, cx);
2444 assert!(buffer.text() == "ac");
2445 assert!(!buffer.is_dirty());
2446 });
2447
2448 assert_eq!(
2449 *events.borrow(),
2450 &[language::Event::Edited, language::Event::DirtyChanged]
2451 );
2452
2453 // When a file is deleted, the buffer is considered dirty.
2454 let events = Rc::new(RefCell::new(Vec::new()));
2455 let buffer2 = project
2456 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2457 .await
2458 .unwrap();
2459 buffer2.update(cx, |_, cx| {
2460 cx.subscribe(&buffer2, {
2461 let events = events.clone();
2462 move |_, _, event, _| events.borrow_mut().push(event.clone())
2463 })
2464 .detach();
2465 });
2466
2467 fs.remove_file("/dir/file2".as_ref(), Default::default())
2468 .await
2469 .unwrap();
2470 cx.foreground().run_until_parked();
2471 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2472 assert_eq!(
2473 *events.borrow(),
2474 &[
2475 language::Event::DirtyChanged,
2476 language::Event::FileHandleChanged
2477 ]
2478 );
2479
2480 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2481 let events = Rc::new(RefCell::new(Vec::new()));
2482 let buffer3 = project
2483 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2484 .await
2485 .unwrap();
2486 buffer3.update(cx, |_, cx| {
2487 cx.subscribe(&buffer3, {
2488 let events = events.clone();
2489 move |_, _, event, _| events.borrow_mut().push(event.clone())
2490 })
2491 .detach();
2492 });
2493
2494 buffer3.update(cx, |buffer, cx| {
2495 buffer.edit([(0..0, "x")], None, cx);
2496 });
2497 events.borrow_mut().clear();
2498 fs.remove_file("/dir/file3".as_ref(), Default::default())
2499 .await
2500 .unwrap();
2501 cx.foreground().run_until_parked();
2502 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2503 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2504}
2505
2506#[gpui::test]
2507async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2508 let initial_contents = "aaa\nbbbbb\nc\n";
2509 let fs = FakeFs::new(cx.background());
2510 fs.insert_tree(
2511 "/dir",
2512 json!({
2513 "the-file": initial_contents,
2514 }),
2515 )
2516 .await;
2517 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2518 let buffer = project
2519 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2520 .await
2521 .unwrap();
2522
2523 let anchors = (0..3)
2524 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2525 .collect::<Vec<_>>();
2526
2527 // Change the file on disk, adding two new lines of text, and removing
2528 // one line.
2529 buffer.read_with(cx, |buffer, _| {
2530 assert!(!buffer.is_dirty());
2531 assert!(!buffer.has_conflict());
2532 });
2533 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2534 fs.save(
2535 "/dir/the-file".as_ref(),
2536 &new_contents.into(),
2537 LineEnding::Unix,
2538 )
2539 .await
2540 .unwrap();
2541
2542 // Because the buffer was not modified, it is reloaded from disk. Its
2543 // contents are edited according to the diff between the old and new
2544 // file contents.
2545 cx.foreground().run_until_parked();
2546 buffer.update(cx, |buffer, _| {
2547 assert_eq!(buffer.text(), new_contents);
2548 assert!(!buffer.is_dirty());
2549 assert!(!buffer.has_conflict());
2550
2551 let anchor_positions = anchors
2552 .iter()
2553 .map(|anchor| anchor.to_point(&*buffer))
2554 .collect::<Vec<_>>();
2555 assert_eq!(
2556 anchor_positions,
2557 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2558 );
2559 });
2560
2561 // Modify the buffer
2562 buffer.update(cx, |buffer, cx| {
2563 buffer.edit([(0..0, " ")], None, cx);
2564 assert!(buffer.is_dirty());
2565 assert!(!buffer.has_conflict());
2566 });
2567
2568 // Change the file on disk again, adding blank lines to the beginning.
2569 fs.save(
2570 "/dir/the-file".as_ref(),
2571 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2572 LineEnding::Unix,
2573 )
2574 .await
2575 .unwrap();
2576
2577 // Because the buffer is modified, it doesn't reload from disk, but is
2578 // marked as having a conflict.
2579 cx.foreground().run_until_parked();
2580 buffer.read_with(cx, |buffer, _| {
2581 assert!(buffer.has_conflict());
2582 });
2583}
2584
2585#[gpui::test]
2586async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2587 let fs = FakeFs::new(cx.background());
2588 fs.insert_tree(
2589 "/dir",
2590 json!({
2591 "file1": "a\nb\nc\n",
2592 "file2": "one\r\ntwo\r\nthree\r\n",
2593 }),
2594 )
2595 .await;
2596
2597 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2598 let buffer1 = project
2599 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2600 .await
2601 .unwrap();
2602 let buffer2 = project
2603 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2604 .await
2605 .unwrap();
2606
2607 buffer1.read_with(cx, |buffer, _| {
2608 assert_eq!(buffer.text(), "a\nb\nc\n");
2609 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2610 });
2611 buffer2.read_with(cx, |buffer, _| {
2612 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2613 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2614 });
2615
2616 // Change a file's line endings on disk from unix to windows. The buffer's
2617 // state updates correctly.
2618 fs.save(
2619 "/dir/file1".as_ref(),
2620 &"aaa\nb\nc\n".into(),
2621 LineEnding::Windows,
2622 )
2623 .await
2624 .unwrap();
2625 cx.foreground().run_until_parked();
2626 buffer1.read_with(cx, |buffer, _| {
2627 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2628 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2629 });
2630
2631 // Save a file with windows line endings. The file is written correctly.
2632 buffer2
2633 .update(cx, |buffer, cx| {
2634 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2635 buffer.save(cx)
2636 })
2637 .await
2638 .unwrap();
2639 assert_eq!(
2640 fs.load("/dir/file2".as_ref()).await.unwrap(),
2641 "one\r\ntwo\r\nthree\r\nfour\r\n",
2642 );
2643}
2644
2645#[gpui::test]
2646async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2647 cx.foreground().forbid_parking();
2648
2649 let fs = FakeFs::new(cx.background());
2650 fs.insert_tree(
2651 "/the-dir",
2652 json!({
2653 "a.rs": "
2654 fn foo(mut v: Vec<usize>) {
2655 for x in &v {
2656 v.push(1);
2657 }
2658 }
2659 "
2660 .unindent(),
2661 }),
2662 )
2663 .await;
2664
2665 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2666 let buffer = project
2667 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2668 .await
2669 .unwrap();
2670
2671 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2672 let message = lsp::PublishDiagnosticsParams {
2673 uri: buffer_uri.clone(),
2674 diagnostics: vec![
2675 lsp::Diagnostic {
2676 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2677 severity: Some(DiagnosticSeverity::WARNING),
2678 message: "error 1".to_string(),
2679 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2680 location: lsp::Location {
2681 uri: buffer_uri.clone(),
2682 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2683 },
2684 message: "error 1 hint 1".to_string(),
2685 }]),
2686 ..Default::default()
2687 },
2688 lsp::Diagnostic {
2689 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2690 severity: Some(DiagnosticSeverity::HINT),
2691 message: "error 1 hint 1".to_string(),
2692 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2693 location: lsp::Location {
2694 uri: buffer_uri.clone(),
2695 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2696 },
2697 message: "original diagnostic".to_string(),
2698 }]),
2699 ..Default::default()
2700 },
2701 lsp::Diagnostic {
2702 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2703 severity: Some(DiagnosticSeverity::ERROR),
2704 message: "error 2".to_string(),
2705 related_information: Some(vec![
2706 lsp::DiagnosticRelatedInformation {
2707 location: lsp::Location {
2708 uri: buffer_uri.clone(),
2709 range: lsp::Range::new(
2710 lsp::Position::new(1, 13),
2711 lsp::Position::new(1, 15),
2712 ),
2713 },
2714 message: "error 2 hint 1".to_string(),
2715 },
2716 lsp::DiagnosticRelatedInformation {
2717 location: lsp::Location {
2718 uri: buffer_uri.clone(),
2719 range: lsp::Range::new(
2720 lsp::Position::new(1, 13),
2721 lsp::Position::new(1, 15),
2722 ),
2723 },
2724 message: "error 2 hint 2".to_string(),
2725 },
2726 ]),
2727 ..Default::default()
2728 },
2729 lsp::Diagnostic {
2730 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2731 severity: Some(DiagnosticSeverity::HINT),
2732 message: "error 2 hint 1".to_string(),
2733 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2734 location: lsp::Location {
2735 uri: buffer_uri.clone(),
2736 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2737 },
2738 message: "original diagnostic".to_string(),
2739 }]),
2740 ..Default::default()
2741 },
2742 lsp::Diagnostic {
2743 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2744 severity: Some(DiagnosticSeverity::HINT),
2745 message: "error 2 hint 2".to_string(),
2746 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2747 location: lsp::Location {
2748 uri: buffer_uri,
2749 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2750 },
2751 message: "original diagnostic".to_string(),
2752 }]),
2753 ..Default::default()
2754 },
2755 ],
2756 version: None,
2757 };
2758
2759 project
2760 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2761 .unwrap();
2762 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2763
2764 assert_eq!(
2765 buffer
2766 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2767 .collect::<Vec<_>>(),
2768 &[
2769 DiagnosticEntry {
2770 range: Point::new(1, 8)..Point::new(1, 9),
2771 diagnostic: Diagnostic {
2772 severity: DiagnosticSeverity::WARNING,
2773 message: "error 1".to_string(),
2774 group_id: 0,
2775 is_primary: true,
2776 ..Default::default()
2777 }
2778 },
2779 DiagnosticEntry {
2780 range: Point::new(1, 8)..Point::new(1, 9),
2781 diagnostic: Diagnostic {
2782 severity: DiagnosticSeverity::HINT,
2783 message: "error 1 hint 1".to_string(),
2784 group_id: 0,
2785 is_primary: false,
2786 ..Default::default()
2787 }
2788 },
2789 DiagnosticEntry {
2790 range: Point::new(1, 13)..Point::new(1, 15),
2791 diagnostic: Diagnostic {
2792 severity: DiagnosticSeverity::HINT,
2793 message: "error 2 hint 1".to_string(),
2794 group_id: 1,
2795 is_primary: false,
2796 ..Default::default()
2797 }
2798 },
2799 DiagnosticEntry {
2800 range: Point::new(1, 13)..Point::new(1, 15),
2801 diagnostic: Diagnostic {
2802 severity: DiagnosticSeverity::HINT,
2803 message: "error 2 hint 2".to_string(),
2804 group_id: 1,
2805 is_primary: false,
2806 ..Default::default()
2807 }
2808 },
2809 DiagnosticEntry {
2810 range: Point::new(2, 8)..Point::new(2, 17),
2811 diagnostic: Diagnostic {
2812 severity: DiagnosticSeverity::ERROR,
2813 message: "error 2".to_string(),
2814 group_id: 1,
2815 is_primary: true,
2816 ..Default::default()
2817 }
2818 }
2819 ]
2820 );
2821
2822 assert_eq!(
2823 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2824 &[
2825 DiagnosticEntry {
2826 range: Point::new(1, 8)..Point::new(1, 9),
2827 diagnostic: Diagnostic {
2828 severity: DiagnosticSeverity::WARNING,
2829 message: "error 1".to_string(),
2830 group_id: 0,
2831 is_primary: true,
2832 ..Default::default()
2833 }
2834 },
2835 DiagnosticEntry {
2836 range: Point::new(1, 8)..Point::new(1, 9),
2837 diagnostic: Diagnostic {
2838 severity: DiagnosticSeverity::HINT,
2839 message: "error 1 hint 1".to_string(),
2840 group_id: 0,
2841 is_primary: false,
2842 ..Default::default()
2843 }
2844 },
2845 ]
2846 );
2847 assert_eq!(
2848 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2849 &[
2850 DiagnosticEntry {
2851 range: Point::new(1, 13)..Point::new(1, 15),
2852 diagnostic: Diagnostic {
2853 severity: DiagnosticSeverity::HINT,
2854 message: "error 2 hint 1".to_string(),
2855 group_id: 1,
2856 is_primary: false,
2857 ..Default::default()
2858 }
2859 },
2860 DiagnosticEntry {
2861 range: Point::new(1, 13)..Point::new(1, 15),
2862 diagnostic: Diagnostic {
2863 severity: DiagnosticSeverity::HINT,
2864 message: "error 2 hint 2".to_string(),
2865 group_id: 1,
2866 is_primary: false,
2867 ..Default::default()
2868 }
2869 },
2870 DiagnosticEntry {
2871 range: Point::new(2, 8)..Point::new(2, 17),
2872 diagnostic: Diagnostic {
2873 severity: DiagnosticSeverity::ERROR,
2874 message: "error 2".to_string(),
2875 group_id: 1,
2876 is_primary: true,
2877 ..Default::default()
2878 }
2879 }
2880 ]
2881 );
2882}
2883
2884#[gpui::test]
2885async fn test_rename(cx: &mut gpui::TestAppContext) {
2886 cx.foreground().forbid_parking();
2887
2888 let mut language = Language::new(
2889 LanguageConfig {
2890 name: "Rust".into(),
2891 path_suffixes: vec!["rs".to_string()],
2892 ..Default::default()
2893 },
2894 Some(tree_sitter_rust::language()),
2895 );
2896 let mut fake_servers = language
2897 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2898 capabilities: lsp::ServerCapabilities {
2899 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2900 prepare_provider: Some(true),
2901 work_done_progress_options: Default::default(),
2902 })),
2903 ..Default::default()
2904 },
2905 ..Default::default()
2906 }))
2907 .await;
2908
2909 let fs = FakeFs::new(cx.background());
2910 fs.insert_tree(
2911 "/dir",
2912 json!({
2913 "one.rs": "const ONE: usize = 1;",
2914 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2915 }),
2916 )
2917 .await;
2918
2919 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2920 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2921 let buffer = project
2922 .update(cx, |project, cx| {
2923 project.open_local_buffer("/dir/one.rs", cx)
2924 })
2925 .await
2926 .unwrap();
2927
2928 let fake_server = fake_servers.next().await.unwrap();
2929
2930 let response = project.update(cx, |project, cx| {
2931 project.prepare_rename(buffer.clone(), 7, cx)
2932 });
2933 fake_server
2934 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2935 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2936 assert_eq!(params.position, lsp::Position::new(0, 7));
2937 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2938 lsp::Position::new(0, 6),
2939 lsp::Position::new(0, 9),
2940 ))))
2941 })
2942 .next()
2943 .await
2944 .unwrap();
2945 let range = response.await.unwrap().unwrap();
2946 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2947 assert_eq!(range, 6..9);
2948
2949 let response = project.update(cx, |project, cx| {
2950 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2951 });
2952 fake_server
2953 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2954 assert_eq!(
2955 params.text_document_position.text_document.uri.as_str(),
2956 "file:///dir/one.rs"
2957 );
2958 assert_eq!(
2959 params.text_document_position.position,
2960 lsp::Position::new(0, 7)
2961 );
2962 assert_eq!(params.new_name, "THREE");
2963 Ok(Some(lsp::WorkspaceEdit {
2964 changes: Some(
2965 [
2966 (
2967 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2968 vec![lsp::TextEdit::new(
2969 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2970 "THREE".to_string(),
2971 )],
2972 ),
2973 (
2974 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2975 vec![
2976 lsp::TextEdit::new(
2977 lsp::Range::new(
2978 lsp::Position::new(0, 24),
2979 lsp::Position::new(0, 27),
2980 ),
2981 "THREE".to_string(),
2982 ),
2983 lsp::TextEdit::new(
2984 lsp::Range::new(
2985 lsp::Position::new(0, 35),
2986 lsp::Position::new(0, 38),
2987 ),
2988 "THREE".to_string(),
2989 ),
2990 ],
2991 ),
2992 ]
2993 .into_iter()
2994 .collect(),
2995 ),
2996 ..Default::default()
2997 }))
2998 })
2999 .next()
3000 .await
3001 .unwrap();
3002 let mut transaction = response.await.unwrap().0;
3003 assert_eq!(transaction.len(), 2);
3004 assert_eq!(
3005 transaction
3006 .remove_entry(&buffer)
3007 .unwrap()
3008 .0
3009 .read_with(cx, |buffer, _| buffer.text()),
3010 "const THREE: usize = 1;"
3011 );
3012 assert_eq!(
3013 transaction
3014 .into_keys()
3015 .next()
3016 .unwrap()
3017 .read_with(cx, |buffer, _| buffer.text()),
3018 "const TWO: usize = one::THREE + one::THREE;"
3019 );
3020}
3021
3022#[gpui::test]
3023async fn test_search(cx: &mut gpui::TestAppContext) {
3024 let fs = FakeFs::new(cx.background());
3025 fs.insert_tree(
3026 "/dir",
3027 json!({
3028 "one.rs": "const ONE: usize = 1;",
3029 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3030 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3031 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3032 }),
3033 )
3034 .await;
3035 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3036 assert_eq!(
3037 search(&project, SearchQuery::text("TWO", false, true), cx)
3038 .await
3039 .unwrap(),
3040 HashMap::from_iter([
3041 ("two.rs".to_string(), vec![6..9]),
3042 ("three.rs".to_string(), vec![37..40])
3043 ])
3044 );
3045
3046 let buffer_4 = project
3047 .update(cx, |project, cx| {
3048 project.open_local_buffer("/dir/four.rs", cx)
3049 })
3050 .await
3051 .unwrap();
3052 buffer_4.update(cx, |buffer, cx| {
3053 let text = "two::TWO";
3054 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3055 });
3056
3057 assert_eq!(
3058 search(&project, SearchQuery::text("TWO", false, true), cx)
3059 .await
3060 .unwrap(),
3061 HashMap::from_iter([
3062 ("two.rs".to_string(), vec![6..9]),
3063 ("three.rs".to_string(), vec![37..40]),
3064 ("four.rs".to_string(), vec![25..28, 36..39])
3065 ])
3066 );
3067
3068 async fn search(
3069 project: &ModelHandle<Project>,
3070 query: SearchQuery,
3071 cx: &mut gpui::TestAppContext,
3072 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3073 let results = project
3074 .update(cx, |project, cx| project.search(query, cx))
3075 .await?;
3076
3077 Ok(results
3078 .into_iter()
3079 .map(|(buffer, ranges)| {
3080 buffer.read_with(cx, |buffer, _| {
3081 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3082 let ranges = ranges
3083 .into_iter()
3084 .map(|range| range.to_offset(buffer))
3085 .collect::<Vec<_>>();
3086 (path, ranges)
3087 })
3088 })
3089 .collect())
3090 }
3091}