1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use serde_json::json;
12use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
13use unindent::Unindent as _;
14use util::{assert_set_eq, test::temp_tree};
15
16#[gpui::test]
17async fn test_symlinks(cx: &mut gpui::TestAppContext) {
18 let dir = temp_tree(json!({
19 "root": {
20 "apple": "",
21 "banana": {
22 "carrot": {
23 "date": "",
24 "endive": "",
25 }
26 },
27 "fennel": {
28 "grape": "",
29 }
30 }
31 }));
32
33 let root_link_path = dir.path().join("root_link");
34 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
35 unix::fs::symlink(
36 &dir.path().join("root/fennel"),
37 &dir.path().join("root/finnochio"),
38 )
39 .unwrap();
40
41 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50}
51
52#[gpui::test]
53async fn test_managing_language_servers(
54 deterministic: Arc<Deterministic>,
55 cx: &mut gpui::TestAppContext,
56) {
57 cx.foreground().forbid_parking();
58
59 let mut rust_language = Language::new(
60 LanguageConfig {
61 name: "Rust".into(),
62 path_suffixes: vec!["rs".to_string()],
63 ..Default::default()
64 },
65 Some(tree_sitter_rust::language()),
66 );
67 let mut json_language = Language::new(
68 LanguageConfig {
69 name: "JSON".into(),
70 path_suffixes: vec!["json".to_string()],
71 ..Default::default()
72 },
73 None,
74 );
75 let mut fake_rust_servers = rust_language
76 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
77 name: "the-rust-language-server",
78 capabilities: lsp::ServerCapabilities {
79 completion_provider: Some(lsp::CompletionOptions {
80 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
81 ..Default::default()
82 }),
83 ..Default::default()
84 },
85 ..Default::default()
86 }))
87 .await;
88 let mut fake_json_servers = json_language
89 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-json-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![":".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }))
100 .await;
101
102 let fs = FakeFs::new(cx.background());
103 fs.insert_tree(
104 "/the-root",
105 json!({
106 "test.rs": "const A: i32 = 1;",
107 "test2.rs": "",
108 "Cargo.toml": "a = 1",
109 "package.json": "{\"a\": 1}",
110 }),
111 )
112 .await;
113
114 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
115
116 // Open a buffer without an associated language server.
117 let toml_buffer = project
118 .update(cx, |project, cx| {
119 project.open_local_buffer("/the-root/Cargo.toml", cx)
120 })
121 .await
122 .unwrap();
123
124 // Open a buffer with an associated language server before the language for it has been loaded.
125 let rust_buffer = project
126 .update(cx, |project, cx| {
127 project.open_local_buffer("/the-root/test.rs", cx)
128 })
129 .await
130 .unwrap();
131 rust_buffer.read_with(cx, |buffer, _| {
132 assert_eq!(buffer.language().map(|l| l.name()), None);
133 });
134
135 // Now we add the languages to the project, and ensure they get assigned to all
136 // the relevant open buffers.
137 project.update(cx, |project, _| {
138 project.languages.add(Arc::new(json_language));
139 project.languages.add(Arc::new(rust_language));
140 });
141 deterministic.run_until_parked();
142 rust_buffer.read_with(cx, |buffer, _| {
143 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
144 });
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
231 rust_buffer2.update(cx, |buffer, cx| {
232 buffer.edit([(0..0, "let x = 1;")], None, cx)
233 });
234 assert_eq!(
235 fake_rust_server
236 .receive_notification::<lsp::notification::DidChangeTextDocument>()
237 .await
238 .text_document,
239 lsp::VersionedTextDocumentIdentifier::new(
240 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
241 1
242 )
243 );
244
245 // Save notifications are reported to all servers.
246 cx.update(|cx| Project::save_buffer(toml_buffer, cx))
247 .await
248 .unwrap();
249 assert_eq!(
250 fake_rust_server
251 .receive_notification::<lsp::notification::DidSaveTextDocument>()
252 .await
253 .text_document,
254 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
255 );
256 assert_eq!(
257 fake_json_server
258 .receive_notification::<lsp::notification::DidSaveTextDocument>()
259 .await
260 .text_document,
261 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
262 );
263
264 // Renames are reported only to servers matching the buffer's language.
265 fs.rename(
266 Path::new("/the-root/test2.rs"),
267 Path::new("/the-root/test3.rs"),
268 Default::default(),
269 )
270 .await
271 .unwrap();
272 assert_eq!(
273 fake_rust_server
274 .receive_notification::<lsp::notification::DidCloseTextDocument>()
275 .await
276 .text_document,
277 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
278 );
279 assert_eq!(
280 fake_rust_server
281 .receive_notification::<lsp::notification::DidOpenTextDocument>()
282 .await
283 .text_document,
284 lsp::TextDocumentItem {
285 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
286 version: 0,
287 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
288 language_id: Default::default()
289 },
290 );
291
292 rust_buffer2.update(cx, |buffer, cx| {
293 buffer.update_diagnostics(
294 DiagnosticSet::from_sorted_entries(
295 vec![DiagnosticEntry {
296 diagnostic: Default::default(),
297 range: Anchor::MIN..Anchor::MAX,
298 }],
299 &buffer.snapshot(),
300 ),
301 cx,
302 );
303 assert_eq!(
304 buffer
305 .snapshot()
306 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
307 .count(),
308 1
309 );
310 });
311
312 // When the rename changes the extension of the file, the buffer gets closed on the old
313 // language server and gets opened on the new one.
314 fs.rename(
315 Path::new("/the-root/test3.rs"),
316 Path::new("/the-root/test3.json"),
317 Default::default(),
318 )
319 .await
320 .unwrap();
321 assert_eq!(
322 fake_rust_server
323 .receive_notification::<lsp::notification::DidCloseTextDocument>()
324 .await
325 .text_document,
326 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
327 );
328 assert_eq!(
329 fake_json_server
330 .receive_notification::<lsp::notification::DidOpenTextDocument>()
331 .await
332 .text_document,
333 lsp::TextDocumentItem {
334 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
335 version: 0,
336 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
337 language_id: Default::default()
338 },
339 );
340
341 // We clear the diagnostics, since the language has changed.
342 rust_buffer2.read_with(cx, |buffer, _| {
343 assert_eq!(
344 buffer
345 .snapshot()
346 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
347 .count(),
348 0
349 );
350 });
351
352 // The renamed file's version resets after changing language server.
353 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
354 assert_eq!(
355 fake_json_server
356 .receive_notification::<lsp::notification::DidChangeTextDocument>()
357 .await
358 .text_document,
359 lsp::VersionedTextDocumentIdentifier::new(
360 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
361 1
362 )
363 );
364
365 // Restart language servers
366 project.update(cx, |project, cx| {
367 project.restart_language_servers_for_buffers(
368 vec![rust_buffer.clone(), json_buffer.clone()],
369 cx,
370 );
371 });
372
373 let mut rust_shutdown_requests = fake_rust_server
374 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
375 let mut json_shutdown_requests = fake_json_server
376 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
377 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
378
379 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
380 let mut fake_json_server = fake_json_servers.next().await.unwrap();
381
382 // Ensure rust document is reopened in new rust language server
383 assert_eq!(
384 fake_rust_server
385 .receive_notification::<lsp::notification::DidOpenTextDocument>()
386 .await
387 .text_document,
388 lsp::TextDocumentItem {
389 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
390 version: 1,
391 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
392 language_id: Default::default()
393 }
394 );
395
396 // Ensure json documents are reopened in new json language server
397 assert_set_eq!(
398 [
399 fake_json_server
400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
401 .await
402 .text_document,
403 fake_json_server
404 .receive_notification::<lsp::notification::DidOpenTextDocument>()
405 .await
406 .text_document,
407 ],
408 [
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
411 version: 0,
412 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 lsp::TextDocumentItem {
416 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
417 version: 1,
418 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
419 language_id: Default::default()
420 }
421 ]
422 );
423
424 // Close notifications are reported only to servers matching the buffer's language.
425 cx.update(|_| drop(json_buffer));
426 let close_message = lsp::DidCloseTextDocumentParams {
427 text_document: lsp::TextDocumentIdentifier::new(
428 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
429 ),
430 };
431 assert_eq!(
432 fake_json_server
433 .receive_notification::<lsp::notification::DidCloseTextDocument>()
434 .await,
435 close_message,
436 );
437}
438
439#[gpui::test]
440async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
441 cx.foreground().forbid_parking();
442
443 let fs = FakeFs::new(cx.background());
444 fs.insert_tree(
445 "/dir",
446 json!({
447 "a.rs": "let a = 1;",
448 "b.rs": "let b = 2;"
449 }),
450 )
451 .await;
452
453 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
454
455 let buffer_a = project
456 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
457 .await
458 .unwrap();
459 let buffer_b = project
460 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
461 .await
462 .unwrap();
463
464 project.update(cx, |project, cx| {
465 project
466 .update_diagnostics(
467 0,
468 lsp::PublishDiagnosticsParams {
469 uri: Url::from_file_path("/dir/a.rs").unwrap(),
470 version: None,
471 diagnostics: vec![lsp::Diagnostic {
472 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
473 severity: Some(lsp::DiagnosticSeverity::ERROR),
474 message: "error 1".to_string(),
475 ..Default::default()
476 }],
477 },
478 &[],
479 cx,
480 )
481 .unwrap();
482 project
483 .update_diagnostics(
484 0,
485 lsp::PublishDiagnosticsParams {
486 uri: Url::from_file_path("/dir/b.rs").unwrap(),
487 version: None,
488 diagnostics: vec![lsp::Diagnostic {
489 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
490 severity: Some(lsp::DiagnosticSeverity::WARNING),
491 message: "error 2".to_string(),
492 ..Default::default()
493 }],
494 },
495 &[],
496 cx,
497 )
498 .unwrap();
499 });
500
501 buffer_a.read_with(cx, |buffer, _| {
502 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
503 assert_eq!(
504 chunks
505 .iter()
506 .map(|(s, d)| (s.as_str(), *d))
507 .collect::<Vec<_>>(),
508 &[
509 ("let ", None),
510 ("a", Some(DiagnosticSeverity::ERROR)),
511 (" = 1;", None),
512 ]
513 );
514 });
515 buffer_b.read_with(cx, |buffer, _| {
516 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
517 assert_eq!(
518 chunks
519 .iter()
520 .map(|(s, d)| (s.as_str(), *d))
521 .collect::<Vec<_>>(),
522 &[
523 ("let ", None),
524 ("b", Some(DiagnosticSeverity::WARNING)),
525 (" = 2;", None),
526 ]
527 );
528 });
529}
530
531#[gpui::test]
532async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
533 cx.foreground().forbid_parking();
534
535 let fs = FakeFs::new(cx.background());
536 fs.insert_tree(
537 "/root",
538 json!({
539 "dir": {
540 "a.rs": "let a = 1;",
541 },
542 "other.rs": "let b = c;"
543 }),
544 )
545 .await;
546
547 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
548
549 let (worktree, _) = project
550 .update(cx, |project, cx| {
551 project.find_or_create_local_worktree("/root/other.rs", false, cx)
552 })
553 .await
554 .unwrap();
555 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
556
557 project.update(cx, |project, cx| {
558 project
559 .update_diagnostics(
560 0,
561 lsp::PublishDiagnosticsParams {
562 uri: Url::from_file_path("/root/other.rs").unwrap(),
563 version: None,
564 diagnostics: vec![lsp::Diagnostic {
565 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
566 severity: Some(lsp::DiagnosticSeverity::ERROR),
567 message: "unknown variable 'c'".to_string(),
568 ..Default::default()
569 }],
570 },
571 &[],
572 cx,
573 )
574 .unwrap();
575 });
576
577 let buffer = project
578 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
579 .await
580 .unwrap();
581 buffer.read_with(cx, |buffer, _| {
582 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
583 assert_eq!(
584 chunks
585 .iter()
586 .map(|(s, d)| (s.as_str(), *d))
587 .collect::<Vec<_>>(),
588 &[
589 ("let b = ", None),
590 ("c", Some(DiagnosticSeverity::ERROR)),
591 (";", None),
592 ]
593 );
594 });
595
596 project.read_with(cx, |project, cx| {
597 assert_eq!(project.diagnostic_summaries(cx).next(), None);
598 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
599 });
600}
601
602#[gpui::test]
603async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
604 cx.foreground().forbid_parking();
605
606 let progress_token = "the-progress-token";
607 let mut language = Language::new(
608 LanguageConfig {
609 name: "Rust".into(),
610 path_suffixes: vec!["rs".to_string()],
611 ..Default::default()
612 },
613 Some(tree_sitter_rust::language()),
614 );
615 let mut fake_servers = language
616 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
617 disk_based_diagnostics_progress_token: Some(progress_token.into()),
618 disk_based_diagnostics_sources: vec!["disk".into()],
619 ..Default::default()
620 }))
621 .await;
622
623 let fs = FakeFs::new(cx.background());
624 fs.insert_tree(
625 "/dir",
626 json!({
627 "a.rs": "fn a() { A }",
628 "b.rs": "const y: i32 = 1",
629 }),
630 )
631 .await;
632
633 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
634 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
635 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
636
637 // Cause worktree to start the fake language server
638 let _buffer = project
639 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
640 .await
641 .unwrap();
642
643 let mut events = subscribe(&project, cx);
644
645 let fake_server = fake_servers.next().await.unwrap();
646 fake_server
647 .start_progress(format!("{}/0", progress_token))
648 .await;
649 assert_eq!(
650 events.next().await.unwrap(),
651 Event::DiskBasedDiagnosticsStarted {
652 language_server_id: 0,
653 }
654 );
655
656 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
657 uri: Url::from_file_path("/dir/a.rs").unwrap(),
658 version: None,
659 diagnostics: vec![lsp::Diagnostic {
660 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
661 severity: Some(lsp::DiagnosticSeverity::ERROR),
662 message: "undefined variable 'A'".to_string(),
663 ..Default::default()
664 }],
665 });
666 assert_eq!(
667 events.next().await.unwrap(),
668 Event::DiagnosticsUpdated {
669 language_server_id: 0,
670 path: (worktree_id, Path::new("a.rs")).into()
671 }
672 );
673
674 fake_server.end_progress(format!("{}/0", progress_token));
675 assert_eq!(
676 events.next().await.unwrap(),
677 Event::DiskBasedDiagnosticsFinished {
678 language_server_id: 0
679 }
680 );
681
682 let buffer = project
683 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
684 .await
685 .unwrap();
686
687 buffer.read_with(cx, |buffer, _| {
688 let snapshot = buffer.snapshot();
689 let diagnostics = snapshot
690 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
691 .collect::<Vec<_>>();
692 assert_eq!(
693 diagnostics,
694 &[DiagnosticEntry {
695 range: Point::new(0, 9)..Point::new(0, 10),
696 diagnostic: Diagnostic {
697 severity: lsp::DiagnosticSeverity::ERROR,
698 message: "undefined variable 'A'".to_string(),
699 group_id: 0,
700 is_primary: true,
701 ..Default::default()
702 }
703 }]
704 )
705 });
706
707 // Ensure publishing empty diagnostics twice only results in one update event.
708 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
709 uri: Url::from_file_path("/dir/a.rs").unwrap(),
710 version: None,
711 diagnostics: Default::default(),
712 });
713 assert_eq!(
714 events.next().await.unwrap(),
715 Event::DiagnosticsUpdated {
716 language_server_id: 0,
717 path: (worktree_id, Path::new("a.rs")).into()
718 }
719 );
720
721 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
722 uri: Url::from_file_path("/dir/a.rs").unwrap(),
723 version: None,
724 diagnostics: Default::default(),
725 });
726 cx.foreground().run_until_parked();
727 assert_eq!(futures::poll!(events.next()), Poll::Pending);
728}
729
730#[gpui::test]
731async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
732 cx.foreground().forbid_parking();
733
734 let progress_token = "the-progress-token";
735 let mut language = Language::new(
736 LanguageConfig {
737 path_suffixes: vec!["rs".to_string()],
738 ..Default::default()
739 },
740 None,
741 );
742 let mut fake_servers = language
743 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
744 disk_based_diagnostics_sources: vec!["disk".into()],
745 disk_based_diagnostics_progress_token: Some(progress_token.into()),
746 ..Default::default()
747 }))
748 .await;
749
750 let fs = FakeFs::new(cx.background());
751 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
752
753 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
754 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
755
756 let buffer = project
757 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
758 .await
759 .unwrap();
760
761 // Simulate diagnostics starting to update.
762 let fake_server = fake_servers.next().await.unwrap();
763 fake_server.start_progress(progress_token).await;
764
765 // Restart the server before the diagnostics finish updating.
766 project.update(cx, |project, cx| {
767 project.restart_language_servers_for_buffers([buffer], cx);
768 });
769 let mut events = subscribe(&project, cx);
770
771 // Simulate the newly started server sending more diagnostics.
772 let fake_server = fake_servers.next().await.unwrap();
773 fake_server.start_progress(progress_token).await;
774 assert_eq!(
775 events.next().await.unwrap(),
776 Event::DiskBasedDiagnosticsStarted {
777 language_server_id: 1
778 }
779 );
780 project.read_with(cx, |project, _| {
781 assert_eq!(
782 project
783 .language_servers_running_disk_based_diagnostics()
784 .collect::<Vec<_>>(),
785 [1]
786 );
787 });
788
789 // All diagnostics are considered done, despite the old server's diagnostic
790 // task never completing.
791 fake_server.end_progress(progress_token);
792 assert_eq!(
793 events.next().await.unwrap(),
794 Event::DiskBasedDiagnosticsFinished {
795 language_server_id: 1
796 }
797 );
798 project.read_with(cx, |project, _| {
799 assert_eq!(
800 project
801 .language_servers_running_disk_based_diagnostics()
802 .collect::<Vec<_>>(),
803 [0; 0]
804 );
805 });
806}
807
808#[gpui::test]
809async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
810 cx.foreground().forbid_parking();
811
812 let mut language = Language::new(
813 LanguageConfig {
814 path_suffixes: vec!["rs".to_string()],
815 ..Default::default()
816 },
817 None,
818 );
819 let mut fake_servers = language
820 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
821 name: "the-lsp",
822 ..Default::default()
823 }))
824 .await;
825
826 let fs = FakeFs::new(cx.background());
827 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
828
829 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
830 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
831
832 let buffer = project
833 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
834 .await
835 .unwrap();
836
837 // Before restarting the server, report diagnostics with an unknown buffer version.
838 let fake_server = fake_servers.next().await.unwrap();
839 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
840 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
841 version: Some(10000),
842 diagnostics: Vec::new(),
843 });
844 cx.foreground().run_until_parked();
845
846 project.update(cx, |project, cx| {
847 project.restart_language_servers_for_buffers([buffer.clone()], cx);
848 });
849 let mut fake_server = fake_servers.next().await.unwrap();
850 let notification = fake_server
851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
852 .await
853 .text_document;
854 assert_eq!(notification.version, 0);
855}
856
857#[gpui::test]
858async fn test_toggling_enable_language_server(
859 deterministic: Arc<Deterministic>,
860 cx: &mut gpui::TestAppContext,
861) {
862 deterministic.forbid_parking();
863
864 let mut rust = Language::new(
865 LanguageConfig {
866 name: Arc::from("Rust"),
867 path_suffixes: vec!["rs".to_string()],
868 ..Default::default()
869 },
870 None,
871 );
872 let mut fake_rust_servers = rust
873 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
874 name: "rust-lsp",
875 ..Default::default()
876 }))
877 .await;
878 let mut js = Language::new(
879 LanguageConfig {
880 name: Arc::from("JavaScript"),
881 path_suffixes: vec!["js".to_string()],
882 ..Default::default()
883 },
884 None,
885 );
886 let mut fake_js_servers = js
887 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
888 name: "js-lsp",
889 ..Default::default()
890 }))
891 .await;
892
893 let fs = FakeFs::new(cx.background());
894 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
895 .await;
896
897 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
898 project.update(cx, |project, _| {
899 project.languages.add(Arc::new(rust));
900 project.languages.add(Arc::new(js));
901 });
902
903 let _rs_buffer = project
904 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
905 .await
906 .unwrap();
907 let _js_buffer = project
908 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
909 .await
910 .unwrap();
911
912 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
913 assert_eq!(
914 fake_rust_server_1
915 .receive_notification::<lsp::notification::DidOpenTextDocument>()
916 .await
917 .text_document
918 .uri
919 .as_str(),
920 "file:///dir/a.rs"
921 );
922
923 let mut fake_js_server = fake_js_servers.next().await.unwrap();
924 assert_eq!(
925 fake_js_server
926 .receive_notification::<lsp::notification::DidOpenTextDocument>()
927 .await
928 .text_document
929 .uri
930 .as_str(),
931 "file:///dir/b.js"
932 );
933
934 // Disable Rust language server, ensuring only that server gets stopped.
935 cx.update(|cx| {
936 cx.update_global(|settings: &mut Settings, _| {
937 settings.language_overrides.insert(
938 Arc::from("Rust"),
939 settings::EditorSettings {
940 enable_language_server: Some(false),
941 ..Default::default()
942 },
943 );
944 })
945 });
946 fake_rust_server_1
947 .receive_notification::<lsp::notification::Exit>()
948 .await;
949
950 // Enable Rust and disable JavaScript language servers, ensuring that the
951 // former gets started again and that the latter stops.
952 cx.update(|cx| {
953 cx.update_global(|settings: &mut Settings, _| {
954 settings.language_overrides.insert(
955 Arc::from("Rust"),
956 settings::EditorSettings {
957 enable_language_server: Some(true),
958 ..Default::default()
959 },
960 );
961 settings.language_overrides.insert(
962 Arc::from("JavaScript"),
963 settings::EditorSettings {
964 enable_language_server: Some(false),
965 ..Default::default()
966 },
967 );
968 })
969 });
970 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
971 assert_eq!(
972 fake_rust_server_2
973 .receive_notification::<lsp::notification::DidOpenTextDocument>()
974 .await
975 .text_document
976 .uri
977 .as_str(),
978 "file:///dir/a.rs"
979 );
980 fake_js_server
981 .receive_notification::<lsp::notification::Exit>()
982 .await;
983}
984
985#[gpui::test]
986async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
987 cx.foreground().forbid_parking();
988
989 let mut language = Language::new(
990 LanguageConfig {
991 name: "Rust".into(),
992 path_suffixes: vec!["rs".to_string()],
993 ..Default::default()
994 },
995 Some(tree_sitter_rust::language()),
996 );
997 let mut fake_servers = language
998 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
999 disk_based_diagnostics_sources: vec!["disk".into()],
1000 ..Default::default()
1001 }))
1002 .await;
1003
1004 let text = "
1005 fn a() { A }
1006 fn b() { BB }
1007 fn c() { CCC }
1008 "
1009 .unindent();
1010
1011 let fs = FakeFs::new(cx.background());
1012 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1013
1014 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1015 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1016
1017 let buffer = project
1018 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1019 .await
1020 .unwrap();
1021
1022 let mut fake_server = fake_servers.next().await.unwrap();
1023 let open_notification = fake_server
1024 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1025 .await;
1026
1027 // Edit the buffer, moving the content down
1028 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1029 let change_notification_1 = fake_server
1030 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1031 .await;
1032 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1033
1034 // Report some diagnostics for the initial version of the buffer
1035 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1036 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1037 version: Some(open_notification.text_document.version),
1038 diagnostics: vec![
1039 lsp::Diagnostic {
1040 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1041 severity: Some(DiagnosticSeverity::ERROR),
1042 message: "undefined variable 'A'".to_string(),
1043 source: Some("disk".to_string()),
1044 ..Default::default()
1045 },
1046 lsp::Diagnostic {
1047 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1048 severity: Some(DiagnosticSeverity::ERROR),
1049 message: "undefined variable 'BB'".to_string(),
1050 source: Some("disk".to_string()),
1051 ..Default::default()
1052 },
1053 lsp::Diagnostic {
1054 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1055 severity: Some(DiagnosticSeverity::ERROR),
1056 source: Some("disk".to_string()),
1057 message: "undefined variable 'CCC'".to_string(),
1058 ..Default::default()
1059 },
1060 ],
1061 });
1062
1063 // The diagnostics have moved down since they were created.
1064 buffer.next_notification(cx).await;
1065 buffer.read_with(cx, |buffer, _| {
1066 assert_eq!(
1067 buffer
1068 .snapshot()
1069 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1070 .collect::<Vec<_>>(),
1071 &[
1072 DiagnosticEntry {
1073 range: Point::new(3, 9)..Point::new(3, 11),
1074 diagnostic: Diagnostic {
1075 severity: DiagnosticSeverity::ERROR,
1076 message: "undefined variable 'BB'".to_string(),
1077 is_disk_based: true,
1078 group_id: 1,
1079 is_primary: true,
1080 ..Default::default()
1081 },
1082 },
1083 DiagnosticEntry {
1084 range: Point::new(4, 9)..Point::new(4, 12),
1085 diagnostic: Diagnostic {
1086 severity: DiagnosticSeverity::ERROR,
1087 message: "undefined variable 'CCC'".to_string(),
1088 is_disk_based: true,
1089 group_id: 2,
1090 is_primary: true,
1091 ..Default::default()
1092 }
1093 }
1094 ]
1095 );
1096 assert_eq!(
1097 chunks_with_diagnostics(buffer, 0..buffer.len()),
1098 [
1099 ("\n\nfn a() { ".to_string(), None),
1100 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1101 (" }\nfn b() { ".to_string(), None),
1102 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1103 (" }\nfn c() { ".to_string(), None),
1104 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1105 (" }\n".to_string(), None),
1106 ]
1107 );
1108 assert_eq!(
1109 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1110 [
1111 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1112 (" }\nfn c() { ".to_string(), None),
1113 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1114 ]
1115 );
1116 });
1117
1118 // Ensure overlapping diagnostics are highlighted correctly.
1119 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1120 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1121 version: Some(open_notification.text_document.version),
1122 diagnostics: vec![
1123 lsp::Diagnostic {
1124 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1125 severity: Some(DiagnosticSeverity::ERROR),
1126 message: "undefined variable 'A'".to_string(),
1127 source: Some("disk".to_string()),
1128 ..Default::default()
1129 },
1130 lsp::Diagnostic {
1131 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1132 severity: Some(DiagnosticSeverity::WARNING),
1133 message: "unreachable statement".to_string(),
1134 source: Some("disk".to_string()),
1135 ..Default::default()
1136 },
1137 ],
1138 });
1139
1140 buffer.next_notification(cx).await;
1141 buffer.read_with(cx, |buffer, _| {
1142 assert_eq!(
1143 buffer
1144 .snapshot()
1145 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1146 .collect::<Vec<_>>(),
1147 &[
1148 DiagnosticEntry {
1149 range: Point::new(2, 9)..Point::new(2, 12),
1150 diagnostic: Diagnostic {
1151 severity: DiagnosticSeverity::WARNING,
1152 message: "unreachable statement".to_string(),
1153 is_disk_based: true,
1154 group_id: 4,
1155 is_primary: true,
1156 ..Default::default()
1157 }
1158 },
1159 DiagnosticEntry {
1160 range: Point::new(2, 9)..Point::new(2, 10),
1161 diagnostic: Diagnostic {
1162 severity: DiagnosticSeverity::ERROR,
1163 message: "undefined variable 'A'".to_string(),
1164 is_disk_based: true,
1165 group_id: 3,
1166 is_primary: true,
1167 ..Default::default()
1168 },
1169 }
1170 ]
1171 );
1172 assert_eq!(
1173 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1174 [
1175 ("fn a() { ".to_string(), None),
1176 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1177 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1178 ("\n".to_string(), None),
1179 ]
1180 );
1181 assert_eq!(
1182 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1183 [
1184 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1185 ("\n".to_string(), None),
1186 ]
1187 );
1188 });
1189
1190 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1191 // changes since the last save.
1192 buffer.update(cx, |buffer, cx| {
1193 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1194 buffer.edit(
1195 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1196 None,
1197 cx,
1198 );
1199 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1200 });
1201 let change_notification_2 = fake_server
1202 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1203 .await;
1204 assert!(
1205 change_notification_2.text_document.version > change_notification_1.text_document.version
1206 );
1207
1208 // Handle out-of-order diagnostics
1209 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1210 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1211 version: Some(change_notification_2.text_document.version),
1212 diagnostics: vec![
1213 lsp::Diagnostic {
1214 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1215 severity: Some(DiagnosticSeverity::ERROR),
1216 message: "undefined variable 'BB'".to_string(),
1217 source: Some("disk".to_string()),
1218 ..Default::default()
1219 },
1220 lsp::Diagnostic {
1221 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1222 severity: Some(DiagnosticSeverity::WARNING),
1223 message: "undefined variable 'A'".to_string(),
1224 source: Some("disk".to_string()),
1225 ..Default::default()
1226 },
1227 ],
1228 });
1229
1230 buffer.next_notification(cx).await;
1231 buffer.read_with(cx, |buffer, _| {
1232 assert_eq!(
1233 buffer
1234 .snapshot()
1235 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1236 .collect::<Vec<_>>(),
1237 &[
1238 DiagnosticEntry {
1239 range: Point::new(2, 21)..Point::new(2, 22),
1240 diagnostic: Diagnostic {
1241 severity: DiagnosticSeverity::WARNING,
1242 message: "undefined variable 'A'".to_string(),
1243 is_disk_based: true,
1244 group_id: 6,
1245 is_primary: true,
1246 ..Default::default()
1247 }
1248 },
1249 DiagnosticEntry {
1250 range: Point::new(3, 9)..Point::new(3, 14),
1251 diagnostic: Diagnostic {
1252 severity: DiagnosticSeverity::ERROR,
1253 message: "undefined variable 'BB'".to_string(),
1254 is_disk_based: true,
1255 group_id: 5,
1256 is_primary: true,
1257 ..Default::default()
1258 },
1259 }
1260 ]
1261 );
1262 });
1263}
1264
1265#[gpui::test]
1266async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1267 cx.foreground().forbid_parking();
1268
1269 let text = concat!(
1270 "let one = ;\n", //
1271 "let two = \n",
1272 "let three = 3;\n",
1273 );
1274
1275 let fs = FakeFs::new(cx.background());
1276 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1277
1278 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1279 let buffer = project
1280 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1281 .await
1282 .unwrap();
1283
1284 project.update(cx, |project, cx| {
1285 project
1286 .update_buffer_diagnostics(
1287 &buffer,
1288 vec![
1289 DiagnosticEntry {
1290 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1291 diagnostic: Diagnostic {
1292 severity: DiagnosticSeverity::ERROR,
1293 message: "syntax error 1".to_string(),
1294 ..Default::default()
1295 },
1296 },
1297 DiagnosticEntry {
1298 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1299 diagnostic: Diagnostic {
1300 severity: DiagnosticSeverity::ERROR,
1301 message: "syntax error 2".to_string(),
1302 ..Default::default()
1303 },
1304 },
1305 ],
1306 None,
1307 cx,
1308 )
1309 .unwrap();
1310 });
1311
1312 // An empty range is extended forward to include the following character.
1313 // At the end of a line, an empty range is extended backward to include
1314 // the preceding character.
1315 buffer.read_with(cx, |buffer, _| {
1316 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1317 assert_eq!(
1318 chunks
1319 .iter()
1320 .map(|(s, d)| (s.as_str(), *d))
1321 .collect::<Vec<_>>(),
1322 &[
1323 ("let one = ", None),
1324 (";", Some(DiagnosticSeverity::ERROR)),
1325 ("\nlet two =", None),
1326 (" ", Some(DiagnosticSeverity::ERROR)),
1327 ("\nlet three = 3;\n", None)
1328 ]
1329 );
1330 });
1331}
1332
1333#[gpui::test]
1334async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1335 cx.foreground().forbid_parking();
1336
1337 let mut language = Language::new(
1338 LanguageConfig {
1339 name: "Rust".into(),
1340 path_suffixes: vec!["rs".to_string()],
1341 ..Default::default()
1342 },
1343 Some(tree_sitter_rust::language()),
1344 );
1345 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1346
1347 let text = "
1348 fn a() {
1349 f1();
1350 }
1351 fn b() {
1352 f2();
1353 }
1354 fn c() {
1355 f3();
1356 }
1357 "
1358 .unindent();
1359
1360 let fs = FakeFs::new(cx.background());
1361 fs.insert_tree(
1362 "/dir",
1363 json!({
1364 "a.rs": text.clone(),
1365 }),
1366 )
1367 .await;
1368
1369 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1370 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1371 let buffer = project
1372 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1373 .await
1374 .unwrap();
1375
1376 let mut fake_server = fake_servers.next().await.unwrap();
1377 let lsp_document_version = fake_server
1378 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1379 .await
1380 .text_document
1381 .version;
1382
1383 // Simulate editing the buffer after the language server computes some edits.
1384 buffer.update(cx, |buffer, cx| {
1385 buffer.edit(
1386 [(
1387 Point::new(0, 0)..Point::new(0, 0),
1388 "// above first function\n",
1389 )],
1390 None,
1391 cx,
1392 );
1393 buffer.edit(
1394 [(
1395 Point::new(2, 0)..Point::new(2, 0),
1396 " // inside first function\n",
1397 )],
1398 None,
1399 cx,
1400 );
1401 buffer.edit(
1402 [(
1403 Point::new(6, 4)..Point::new(6, 4),
1404 "// inside second function ",
1405 )],
1406 None,
1407 cx,
1408 );
1409
1410 assert_eq!(
1411 buffer.text(),
1412 "
1413 // above first function
1414 fn a() {
1415 // inside first function
1416 f1();
1417 }
1418 fn b() {
1419 // inside second function f2();
1420 }
1421 fn c() {
1422 f3();
1423 }
1424 "
1425 .unindent()
1426 );
1427 });
1428
1429 let edits = project
1430 .update(cx, |project, cx| {
1431 project.edits_from_lsp(
1432 &buffer,
1433 vec![
1434 // replace body of first function
1435 lsp::TextEdit {
1436 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1437 new_text: "
1438 fn a() {
1439 f10();
1440 }
1441 "
1442 .unindent(),
1443 },
1444 // edit inside second function
1445 lsp::TextEdit {
1446 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1447 new_text: "00".into(),
1448 },
1449 // edit inside third function via two distinct edits
1450 lsp::TextEdit {
1451 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1452 new_text: "4000".into(),
1453 },
1454 lsp::TextEdit {
1455 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1456 new_text: "".into(),
1457 },
1458 ],
1459 Some(lsp_document_version),
1460 cx,
1461 )
1462 })
1463 .await
1464 .unwrap();
1465
1466 buffer.update(cx, |buffer, cx| {
1467 for (range, new_text) in edits {
1468 buffer.edit([(range, new_text)], None, cx);
1469 }
1470 assert_eq!(
1471 buffer.text(),
1472 "
1473 // above first function
1474 fn a() {
1475 // inside first function
1476 f10();
1477 }
1478 fn b() {
1479 // inside second function f200();
1480 }
1481 fn c() {
1482 f4000();
1483 }
1484 "
1485 .unindent()
1486 );
1487 });
1488}
1489
1490#[gpui::test]
1491async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1492 cx.foreground().forbid_parking();
1493
1494 let text = "
1495 use a::b;
1496 use a::c;
1497
1498 fn f() {
1499 b();
1500 c();
1501 }
1502 "
1503 .unindent();
1504
1505 let fs = FakeFs::new(cx.background());
1506 fs.insert_tree(
1507 "/dir",
1508 json!({
1509 "a.rs": text.clone(),
1510 }),
1511 )
1512 .await;
1513
1514 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1515 let buffer = project
1516 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1517 .await
1518 .unwrap();
1519
1520 // Simulate the language server sending us a small edit in the form of a very large diff.
1521 // Rust-analyzer does this when performing a merge-imports code action.
1522 let edits = project
1523 .update(cx, |project, cx| {
1524 project.edits_from_lsp(
1525 &buffer,
1526 [
1527 // Replace the first use statement without editing the semicolon.
1528 lsp::TextEdit {
1529 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1530 new_text: "a::{b, c}".into(),
1531 },
1532 // Reinsert the remainder of the file between the semicolon and the final
1533 // newline of the file.
1534 lsp::TextEdit {
1535 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1536 new_text: "\n\n".into(),
1537 },
1538 lsp::TextEdit {
1539 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1540 new_text: "
1541 fn f() {
1542 b();
1543 c();
1544 }"
1545 .unindent(),
1546 },
1547 // Delete everything after the first newline of the file.
1548 lsp::TextEdit {
1549 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1550 new_text: "".into(),
1551 },
1552 ],
1553 None,
1554 cx,
1555 )
1556 })
1557 .await
1558 .unwrap();
1559
1560 buffer.update(cx, |buffer, cx| {
1561 let edits = edits
1562 .into_iter()
1563 .map(|(range, text)| {
1564 (
1565 range.start.to_point(buffer)..range.end.to_point(buffer),
1566 text,
1567 )
1568 })
1569 .collect::<Vec<_>>();
1570
1571 assert_eq!(
1572 edits,
1573 [
1574 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1575 (Point::new(1, 0)..Point::new(2, 0), "".into())
1576 ]
1577 );
1578
1579 for (range, new_text) in edits {
1580 buffer.edit([(range, new_text)], None, cx);
1581 }
1582 assert_eq!(
1583 buffer.text(),
1584 "
1585 use a::{b, c};
1586
1587 fn f() {
1588 b();
1589 c();
1590 }
1591 "
1592 .unindent()
1593 );
1594 });
1595}
1596
1597#[gpui::test]
1598async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1599 cx.foreground().forbid_parking();
1600
1601 let text = "
1602 use a::b;
1603 use a::c;
1604
1605 fn f() {
1606 b();
1607 c();
1608 }
1609 "
1610 .unindent();
1611
1612 let fs = FakeFs::new(cx.background());
1613 fs.insert_tree(
1614 "/dir",
1615 json!({
1616 "a.rs": text.clone(),
1617 }),
1618 )
1619 .await;
1620
1621 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1622 let buffer = project
1623 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1624 .await
1625 .unwrap();
1626
1627 // Simulate the language server sending us edits in a non-ordered fashion,
1628 // with ranges sometimes being inverted or pointing to invalid locations.
1629 let edits = project
1630 .update(cx, |project, cx| {
1631 project.edits_from_lsp(
1632 &buffer,
1633 [
1634 lsp::TextEdit {
1635 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1636 new_text: "\n\n".into(),
1637 },
1638 lsp::TextEdit {
1639 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1640 new_text: "a::{b, c}".into(),
1641 },
1642 lsp::TextEdit {
1643 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1644 new_text: "".into(),
1645 },
1646 lsp::TextEdit {
1647 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1648 new_text: "
1649 fn f() {
1650 b();
1651 c();
1652 }"
1653 .unindent(),
1654 },
1655 ],
1656 None,
1657 cx,
1658 )
1659 })
1660 .await
1661 .unwrap();
1662
1663 buffer.update(cx, |buffer, cx| {
1664 let edits = edits
1665 .into_iter()
1666 .map(|(range, text)| {
1667 (
1668 range.start.to_point(buffer)..range.end.to_point(buffer),
1669 text,
1670 )
1671 })
1672 .collect::<Vec<_>>();
1673
1674 assert_eq!(
1675 edits,
1676 [
1677 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1678 (Point::new(1, 0)..Point::new(2, 0), "".into())
1679 ]
1680 );
1681
1682 for (range, new_text) in edits {
1683 buffer.edit([(range, new_text)], None, cx);
1684 }
1685 assert_eq!(
1686 buffer.text(),
1687 "
1688 use a::{b, c};
1689
1690 fn f() {
1691 b();
1692 c();
1693 }
1694 "
1695 .unindent()
1696 );
1697 });
1698}
1699
1700fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1701 buffer: &Buffer,
1702 range: Range<T>,
1703) -> Vec<(String, Option<DiagnosticSeverity>)> {
1704 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1705 for chunk in buffer.snapshot().chunks(range, true) {
1706 if chunks.last().map_or(false, |prev_chunk| {
1707 prev_chunk.1 == chunk.diagnostic_severity
1708 }) {
1709 chunks.last_mut().unwrap().0.push_str(chunk.text);
1710 } else {
1711 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1712 }
1713 }
1714 chunks
1715}
1716
1717#[gpui::test(iterations = 10)]
1718async fn test_definition(cx: &mut gpui::TestAppContext) {
1719 let mut language = Language::new(
1720 LanguageConfig {
1721 name: "Rust".into(),
1722 path_suffixes: vec!["rs".to_string()],
1723 ..Default::default()
1724 },
1725 Some(tree_sitter_rust::language()),
1726 );
1727 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1728
1729 let fs = FakeFs::new(cx.background());
1730 fs.insert_tree(
1731 "/dir",
1732 json!({
1733 "a.rs": "const fn a() { A }",
1734 "b.rs": "const y: i32 = crate::a()",
1735 }),
1736 )
1737 .await;
1738
1739 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1740 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1741
1742 let buffer = project
1743 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1744 .await
1745 .unwrap();
1746
1747 let fake_server = fake_servers.next().await.unwrap();
1748 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1749 let params = params.text_document_position_params;
1750 assert_eq!(
1751 params.text_document.uri.to_file_path().unwrap(),
1752 Path::new("/dir/b.rs"),
1753 );
1754 assert_eq!(params.position, lsp::Position::new(0, 22));
1755
1756 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1757 lsp::Location::new(
1758 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1759 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1760 ),
1761 )))
1762 });
1763
1764 let mut definitions = project
1765 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1766 .await
1767 .unwrap();
1768
1769 // Assert no new language server started
1770 cx.foreground().run_until_parked();
1771 assert!(fake_servers.try_next().is_err());
1772
1773 assert_eq!(definitions.len(), 1);
1774 let definition = definitions.pop().unwrap();
1775 cx.update(|cx| {
1776 let target_buffer = definition.target.buffer.read(cx);
1777 assert_eq!(
1778 target_buffer
1779 .file()
1780 .unwrap()
1781 .as_local()
1782 .unwrap()
1783 .abs_path(cx),
1784 Path::new("/dir/a.rs"),
1785 );
1786 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1787 assert_eq!(
1788 list_worktrees(&project, cx),
1789 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1790 );
1791
1792 drop(definition);
1793 });
1794 cx.read(|cx| {
1795 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1796 });
1797
1798 fn list_worktrees<'a>(
1799 project: &'a ModelHandle<Project>,
1800 cx: &'a AppContext,
1801 ) -> Vec<(&'a Path, bool)> {
1802 project
1803 .read(cx)
1804 .worktrees(cx)
1805 .map(|worktree| {
1806 let worktree = worktree.read(cx);
1807 (
1808 worktree.as_local().unwrap().abs_path().as_ref(),
1809 worktree.is_visible(),
1810 )
1811 })
1812 .collect::<Vec<_>>()
1813 }
1814}
1815
1816#[gpui::test]
1817async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1818 let mut language = Language::new(
1819 LanguageConfig {
1820 name: "TypeScript".into(),
1821 path_suffixes: vec!["ts".to_string()],
1822 ..Default::default()
1823 },
1824 Some(tree_sitter_typescript::language_typescript()),
1825 );
1826 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1827
1828 let fs = FakeFs::new(cx.background());
1829 fs.insert_tree(
1830 "/dir",
1831 json!({
1832 "a.ts": "",
1833 }),
1834 )
1835 .await;
1836
1837 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1838 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1839 let buffer = project
1840 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1841 .await
1842 .unwrap();
1843
1844 let fake_server = fake_language_servers.next().await.unwrap();
1845
1846 let text = "let a = b.fqn";
1847 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1848 let completions = project.update(cx, |project, cx| {
1849 project.completions(&buffer, text.len(), cx)
1850 });
1851
1852 fake_server
1853 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1854 Ok(Some(lsp::CompletionResponse::Array(vec![
1855 lsp::CompletionItem {
1856 label: "fullyQualifiedName?".into(),
1857 insert_text: Some("fullyQualifiedName".into()),
1858 ..Default::default()
1859 },
1860 ])))
1861 })
1862 .next()
1863 .await;
1864 let completions = completions.await.unwrap();
1865 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1866 assert_eq!(completions.len(), 1);
1867 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1868 assert_eq!(
1869 completions[0].old_range.to_offset(&snapshot),
1870 text.len() - 3..text.len()
1871 );
1872
1873 let text = "let a = \"atoms/cmp\"";
1874 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1875 let completions = project.update(cx, |project, cx| {
1876 project.completions(&buffer, text.len() - 1, cx)
1877 });
1878
1879 fake_server
1880 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1881 Ok(Some(lsp::CompletionResponse::Array(vec![
1882 lsp::CompletionItem {
1883 label: "component".into(),
1884 ..Default::default()
1885 },
1886 ])))
1887 })
1888 .next()
1889 .await;
1890 let completions = completions.await.unwrap();
1891 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1892 assert_eq!(completions.len(), 1);
1893 assert_eq!(completions[0].new_text, "component");
1894 assert_eq!(
1895 completions[0].old_range.to_offset(&snapshot),
1896 text.len() - 4..text.len() - 1
1897 );
1898}
1899
1900#[gpui::test]
1901async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1902 let mut language = Language::new(
1903 LanguageConfig {
1904 name: "TypeScript".into(),
1905 path_suffixes: vec!["ts".to_string()],
1906 ..Default::default()
1907 },
1908 Some(tree_sitter_typescript::language_typescript()),
1909 );
1910 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1911
1912 let fs = FakeFs::new(cx.background());
1913 fs.insert_tree(
1914 "/dir",
1915 json!({
1916 "a.ts": "",
1917 }),
1918 )
1919 .await;
1920
1921 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1922 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1923 let buffer = project
1924 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1925 .await
1926 .unwrap();
1927
1928 let fake_server = fake_language_servers.next().await.unwrap();
1929
1930 let text = "let a = b.fqn";
1931 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1932 let completions = project.update(cx, |project, cx| {
1933 project.completions(&buffer, text.len(), cx)
1934 });
1935
1936 fake_server
1937 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1938 Ok(Some(lsp::CompletionResponse::Array(vec![
1939 lsp::CompletionItem {
1940 label: "fullyQualifiedName?".into(),
1941 insert_text: Some("fully\rQualified\r\nName".into()),
1942 ..Default::default()
1943 },
1944 ])))
1945 })
1946 .next()
1947 .await;
1948 let completions = completions.await.unwrap();
1949 assert_eq!(completions.len(), 1);
1950 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1951}
1952
1953#[gpui::test(iterations = 10)]
1954async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1955 let mut language = Language::new(
1956 LanguageConfig {
1957 name: "TypeScript".into(),
1958 path_suffixes: vec!["ts".to_string()],
1959 ..Default::default()
1960 },
1961 None,
1962 );
1963 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1964
1965 let fs = FakeFs::new(cx.background());
1966 fs.insert_tree(
1967 "/dir",
1968 json!({
1969 "a.ts": "a",
1970 }),
1971 )
1972 .await;
1973
1974 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1975 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1976 let buffer = project
1977 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1978 .await
1979 .unwrap();
1980
1981 let fake_server = fake_language_servers.next().await.unwrap();
1982
1983 // Language server returns code actions that contain commands, and not edits.
1984 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1985 fake_server
1986 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1987 Ok(Some(vec![
1988 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1989 title: "The code action".into(),
1990 command: Some(lsp::Command {
1991 title: "The command".into(),
1992 command: "_the/command".into(),
1993 arguments: Some(vec![json!("the-argument")]),
1994 }),
1995 ..Default::default()
1996 }),
1997 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1998 title: "two".into(),
1999 ..Default::default()
2000 }),
2001 ]))
2002 })
2003 .next()
2004 .await;
2005
2006 let action = actions.await.unwrap()[0].clone();
2007 let apply = project.update(cx, |project, cx| {
2008 project.apply_code_action(buffer.clone(), action, true, cx)
2009 });
2010
2011 // Resolving the code action does not populate its edits. In absence of
2012 // edits, we must execute the given command.
2013 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2014 |action, _| async move { Ok(action) },
2015 );
2016
2017 // While executing the command, the language server sends the editor
2018 // a `workspaceEdit` request.
2019 fake_server
2020 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2021 let fake = fake_server.clone();
2022 move |params, _| {
2023 assert_eq!(params.command, "_the/command");
2024 let fake = fake.clone();
2025 async move {
2026 fake.server
2027 .request::<lsp::request::ApplyWorkspaceEdit>(
2028 lsp::ApplyWorkspaceEditParams {
2029 label: None,
2030 edit: lsp::WorkspaceEdit {
2031 changes: Some(
2032 [(
2033 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2034 vec![lsp::TextEdit {
2035 range: lsp::Range::new(
2036 lsp::Position::new(0, 0),
2037 lsp::Position::new(0, 0),
2038 ),
2039 new_text: "X".into(),
2040 }],
2041 )]
2042 .into_iter()
2043 .collect(),
2044 ),
2045 ..Default::default()
2046 },
2047 },
2048 )
2049 .await
2050 .unwrap();
2051 Ok(Some(json!(null)))
2052 }
2053 }
2054 })
2055 .next()
2056 .await;
2057
2058 // Applying the code action returns a project transaction containing the edits
2059 // sent by the language server in its `workspaceEdit` request.
2060 let transaction = apply.await.unwrap();
2061 assert!(transaction.0.contains_key(&buffer));
2062 buffer.update(cx, |buffer, cx| {
2063 assert_eq!(buffer.text(), "Xa");
2064 buffer.undo(cx);
2065 assert_eq!(buffer.text(), "a");
2066 });
2067}
2068
2069#[gpui::test]
2070async fn test_save_file(cx: &mut gpui::TestAppContext) {
2071 let fs = FakeFs::new(cx.background());
2072 fs.insert_tree(
2073 "/dir",
2074 json!({
2075 "file1": "the old contents",
2076 }),
2077 )
2078 .await;
2079
2080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2081 let buffer = project
2082 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2083 .await
2084 .unwrap();
2085 buffer.update(cx, |buffer, cx| {
2086 assert_eq!(buffer.text(), "the old contents");
2087 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2088 });
2089
2090 cx.update(|cx| Project::save_buffer(buffer.clone(), cx))
2091 .await
2092 .unwrap();
2093
2094 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2095 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2096}
2097
2098#[gpui::test]
2099async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2100 let fs = FakeFs::new(cx.background());
2101 fs.insert_tree(
2102 "/dir",
2103 json!({
2104 "file1": "the old contents",
2105 }),
2106 )
2107 .await;
2108
2109 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2110 let buffer = project
2111 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2112 .await
2113 .unwrap();
2114 buffer.update(cx, |buffer, cx| {
2115 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2116 });
2117
2118 cx.update(|cx| Project::save_buffer(buffer.clone(), cx))
2119 .await
2120 .unwrap();
2121
2122 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2123 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2124}
2125
2126#[gpui::test]
2127async fn test_save_as(cx: &mut gpui::TestAppContext) {
2128 let fs = FakeFs::new(cx.background());
2129 fs.insert_tree("/dir", json!({})).await;
2130
2131 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2132
2133 let languages = project.read_with(cx, |project, _| project.languages().clone());
2134 languages.register(
2135 "/some/path",
2136 LanguageConfig {
2137 name: "Rust".into(),
2138 path_suffixes: vec!["rs".into()],
2139 ..Default::default()
2140 },
2141 tree_sitter_rust::language(),
2142 None,
2143 |_| Default::default(),
2144 );
2145
2146 let buffer = project.update(cx, |project, cx| {
2147 project.create_buffer("", None, cx).unwrap()
2148 });
2149 buffer.update(cx, |buffer, cx| {
2150 buffer.edit([(0..0, "abc")], None, cx);
2151 assert!(buffer.is_dirty());
2152 assert!(!buffer.has_conflict());
2153 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2154 });
2155 project
2156 .update(cx, |project, cx| {
2157 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2158 })
2159 .await
2160 .unwrap();
2161 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2162
2163 cx.foreground().run_until_parked();
2164 buffer.read_with(cx, |buffer, cx| {
2165 assert_eq!(
2166 buffer.file().unwrap().full_path(cx),
2167 Path::new("dir/file1.rs")
2168 );
2169 assert!(!buffer.is_dirty());
2170 assert!(!buffer.has_conflict());
2171 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2172 });
2173
2174 let opened_buffer = project
2175 .update(cx, |project, cx| {
2176 project.open_local_buffer("/dir/file1.rs", cx)
2177 })
2178 .await
2179 .unwrap();
2180 assert_eq!(opened_buffer, buffer);
2181}
2182
2183#[gpui::test(retries = 5)]
2184async fn test_rescan_and_remote_updates(
2185 deterministic: Arc<Deterministic>,
2186 cx: &mut gpui::TestAppContext,
2187) {
2188 let dir = temp_tree(json!({
2189 "a": {
2190 "file1": "",
2191 "file2": "",
2192 "file3": "",
2193 },
2194 "b": {
2195 "c": {
2196 "file4": "",
2197 "file5": "",
2198 }
2199 }
2200 }));
2201
2202 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2203 let rpc = project.read_with(cx, |p, _| p.client.clone());
2204
2205 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2206 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2207 async move { buffer.await.unwrap() }
2208 };
2209 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2210 project.read_with(cx, |project, cx| {
2211 let tree = project.worktrees(cx).next().unwrap();
2212 tree.read(cx)
2213 .entry_for_path(path)
2214 .unwrap_or_else(|| panic!("no entry for path {}", path))
2215 .id
2216 })
2217 };
2218
2219 let buffer2 = buffer_for_path("a/file2", cx).await;
2220 let buffer3 = buffer_for_path("a/file3", cx).await;
2221 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2222 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2223
2224 let file2_id = id_for_path("a/file2", cx);
2225 let file3_id = id_for_path("a/file3", cx);
2226 let file4_id = id_for_path("b/c/file4", cx);
2227
2228 // Create a remote copy of this worktree.
2229 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2230 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2231 let remote = cx.update(|cx| {
2232 Worktree::remote(
2233 1,
2234 1,
2235 proto::WorktreeMetadata {
2236 id: initial_snapshot.id().to_proto(),
2237 root_name: initial_snapshot.root_name().into(),
2238 abs_path: initial_snapshot
2239 .abs_path()
2240 .as_os_str()
2241 .to_string_lossy()
2242 .into(),
2243 visible: true,
2244 },
2245 rpc.clone(),
2246 cx,
2247 )
2248 });
2249 remote.update(cx, |remote, _| {
2250 let update = initial_snapshot.build_initial_update(1);
2251 remote.as_remote_mut().unwrap().update_from_remote(update);
2252 });
2253 deterministic.run_until_parked();
2254
2255 cx.read(|cx| {
2256 assert!(!buffer2.read(cx).is_dirty());
2257 assert!(!buffer3.read(cx).is_dirty());
2258 assert!(!buffer4.read(cx).is_dirty());
2259 assert!(!buffer5.read(cx).is_dirty());
2260 });
2261
2262 // Rename and delete files and directories.
2263 tree.flush_fs_events(cx).await;
2264 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2265 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2266 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2267 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2268 tree.flush_fs_events(cx).await;
2269
2270 let expected_paths = vec![
2271 "a",
2272 "a/file1",
2273 "a/file2.new",
2274 "b",
2275 "d",
2276 "d/file3",
2277 "d/file4",
2278 ];
2279
2280 cx.read(|app| {
2281 assert_eq!(
2282 tree.read(app)
2283 .paths()
2284 .map(|p| p.to_str().unwrap())
2285 .collect::<Vec<_>>(),
2286 expected_paths
2287 );
2288
2289 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2290 assert_eq!(id_for_path("d/file3", cx), file3_id);
2291 assert_eq!(id_for_path("d/file4", cx), file4_id);
2292
2293 assert_eq!(
2294 buffer2.read(app).file().unwrap().path().as_ref(),
2295 Path::new("a/file2.new")
2296 );
2297 assert_eq!(
2298 buffer3.read(app).file().unwrap().path().as_ref(),
2299 Path::new("d/file3")
2300 );
2301 assert_eq!(
2302 buffer4.read(app).file().unwrap().path().as_ref(),
2303 Path::new("d/file4")
2304 );
2305 assert_eq!(
2306 buffer5.read(app).file().unwrap().path().as_ref(),
2307 Path::new("b/c/file5")
2308 );
2309
2310 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2311 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2312 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2313 assert!(buffer5.read(app).file().unwrap().is_deleted());
2314 });
2315
2316 // Update the remote worktree. Check that it becomes consistent with the
2317 // local worktree.
2318 remote.update(cx, |remote, cx| {
2319 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2320 &initial_snapshot,
2321 1,
2322 1,
2323 true,
2324 );
2325 remote.as_remote_mut().unwrap().update_from_remote(update);
2326 });
2327 deterministic.run_until_parked();
2328 remote.read_with(cx, |remote, _| {
2329 assert_eq!(
2330 remote
2331 .paths()
2332 .map(|p| p.to_str().unwrap())
2333 .collect::<Vec<_>>(),
2334 expected_paths
2335 );
2336 });
2337}
2338
2339#[gpui::test(iterations = 10)]
2340async fn test_buffer_identity_across_renames(
2341 deterministic: Arc<Deterministic>,
2342 cx: &mut gpui::TestAppContext,
2343) {
2344 let fs = FakeFs::new(cx.background());
2345 fs.insert_tree(
2346 "/dir",
2347 json!({
2348 "a": {
2349 "file1": "",
2350 }
2351 }),
2352 )
2353 .await;
2354
2355 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2356 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2357 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2358
2359 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2360 project.read_with(cx, |project, cx| {
2361 let tree = project.worktrees(cx).next().unwrap();
2362 tree.read(cx)
2363 .entry_for_path(path)
2364 .unwrap_or_else(|| panic!("no entry for path {}", path))
2365 .id
2366 })
2367 };
2368
2369 let dir_id = id_for_path("a", cx);
2370 let file_id = id_for_path("a/file1", cx);
2371 let buffer = project
2372 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2373 .await
2374 .unwrap();
2375 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2376
2377 project
2378 .update(cx, |project, cx| {
2379 project.rename_entry(dir_id, Path::new("b"), cx)
2380 })
2381 .unwrap()
2382 .await
2383 .unwrap();
2384 deterministic.run_until_parked();
2385 assert_eq!(id_for_path("b", cx), dir_id);
2386 assert_eq!(id_for_path("b/file1", cx), file_id);
2387 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2388}
2389
2390#[gpui::test]
2391async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2392 let fs = FakeFs::new(cx.background());
2393 fs.insert_tree(
2394 "/dir",
2395 json!({
2396 "a.txt": "a-contents",
2397 "b.txt": "b-contents",
2398 }),
2399 )
2400 .await;
2401
2402 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2403
2404 // Spawn multiple tasks to open paths, repeating some paths.
2405 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2406 (
2407 p.open_local_buffer("/dir/a.txt", cx),
2408 p.open_local_buffer("/dir/b.txt", cx),
2409 p.open_local_buffer("/dir/a.txt", cx),
2410 )
2411 });
2412
2413 let buffer_a_1 = buffer_a_1.await.unwrap();
2414 let buffer_a_2 = buffer_a_2.await.unwrap();
2415 let buffer_b = buffer_b.await.unwrap();
2416 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2417 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2418
2419 // There is only one buffer per path.
2420 let buffer_a_id = buffer_a_1.id();
2421 assert_eq!(buffer_a_2.id(), buffer_a_id);
2422
2423 // Open the same path again while it is still open.
2424 drop(buffer_a_1);
2425 let buffer_a_3 = project
2426 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2427 .await
2428 .unwrap();
2429
2430 // There's still only one buffer per path.
2431 assert_eq!(buffer_a_3.id(), buffer_a_id);
2432}
2433
2434#[gpui::test]
2435async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2436 let fs = FakeFs::new(cx.background());
2437 fs.insert_tree(
2438 "/dir",
2439 json!({
2440 "file1": "abc",
2441 "file2": "def",
2442 "file3": "ghi",
2443 }),
2444 )
2445 .await;
2446
2447 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2448
2449 let buffer1 = project
2450 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2451 .await
2452 .unwrap();
2453 let events = Rc::new(RefCell::new(Vec::new()));
2454
2455 // initially, the buffer isn't dirty.
2456 buffer1.update(cx, |buffer, cx| {
2457 cx.subscribe(&buffer1, {
2458 let events = events.clone();
2459 move |_, _, event, _| match event {
2460 BufferEvent::Operation(_) => {}
2461 _ => events.borrow_mut().push(event.clone()),
2462 }
2463 })
2464 .detach();
2465
2466 assert!(!buffer.is_dirty());
2467 assert!(events.borrow().is_empty());
2468
2469 buffer.edit([(1..2, "")], None, cx);
2470 });
2471
2472 // after the first edit, the buffer is dirty, and emits a dirtied event.
2473 buffer1.update(cx, |buffer, cx| {
2474 assert!(buffer.text() == "ac");
2475 assert!(buffer.is_dirty());
2476 assert_eq!(
2477 *events.borrow(),
2478 &[language::Event::Edited, language::Event::DirtyChanged]
2479 );
2480 events.borrow_mut().clear();
2481 buffer.did_save(
2482 buffer.version(),
2483 buffer.as_rope().fingerprint(),
2484 buffer.file().unwrap().mtime(),
2485 cx,
2486 );
2487 });
2488
2489 // after saving, the buffer is not dirty, and emits a saved event.
2490 buffer1.update(cx, |buffer, cx| {
2491 assert!(!buffer.is_dirty());
2492 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2493 events.borrow_mut().clear();
2494
2495 buffer.edit([(1..1, "B")], None, cx);
2496 buffer.edit([(2..2, "D")], None, cx);
2497 });
2498
2499 // after editing again, the buffer is dirty, and emits another dirty event.
2500 buffer1.update(cx, |buffer, cx| {
2501 assert!(buffer.text() == "aBDc");
2502 assert!(buffer.is_dirty());
2503 assert_eq!(
2504 *events.borrow(),
2505 &[
2506 language::Event::Edited,
2507 language::Event::DirtyChanged,
2508 language::Event::Edited,
2509 ],
2510 );
2511 events.borrow_mut().clear();
2512
2513 // After restoring the buffer to its previously-saved state,
2514 // the buffer is not considered dirty anymore.
2515 buffer.edit([(1..3, "")], None, cx);
2516 assert!(buffer.text() == "ac");
2517 assert!(!buffer.is_dirty());
2518 });
2519
2520 assert_eq!(
2521 *events.borrow(),
2522 &[language::Event::Edited, language::Event::DirtyChanged]
2523 );
2524
2525 // When a file is deleted, the buffer is considered dirty.
2526 let events = Rc::new(RefCell::new(Vec::new()));
2527 let buffer2 = project
2528 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2529 .await
2530 .unwrap();
2531 buffer2.update(cx, |_, cx| {
2532 cx.subscribe(&buffer2, {
2533 let events = events.clone();
2534 move |_, _, event, _| events.borrow_mut().push(event.clone())
2535 })
2536 .detach();
2537 });
2538
2539 fs.remove_file("/dir/file2".as_ref(), Default::default())
2540 .await
2541 .unwrap();
2542 cx.foreground().run_until_parked();
2543 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2544 assert_eq!(
2545 *events.borrow(),
2546 &[
2547 language::Event::DirtyChanged,
2548 language::Event::FileHandleChanged
2549 ]
2550 );
2551
2552 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2553 let events = Rc::new(RefCell::new(Vec::new()));
2554 let buffer3 = project
2555 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2556 .await
2557 .unwrap();
2558 buffer3.update(cx, |_, cx| {
2559 cx.subscribe(&buffer3, {
2560 let events = events.clone();
2561 move |_, _, event, _| events.borrow_mut().push(event.clone())
2562 })
2563 .detach();
2564 });
2565
2566 buffer3.update(cx, |buffer, cx| {
2567 buffer.edit([(0..0, "x")], None, cx);
2568 });
2569 events.borrow_mut().clear();
2570 fs.remove_file("/dir/file3".as_ref(), Default::default())
2571 .await
2572 .unwrap();
2573 cx.foreground().run_until_parked();
2574 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2575 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2576}
2577
2578#[gpui::test]
2579async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2580 let initial_contents = "aaa\nbbbbb\nc\n";
2581 let fs = FakeFs::new(cx.background());
2582 fs.insert_tree(
2583 "/dir",
2584 json!({
2585 "the-file": initial_contents,
2586 }),
2587 )
2588 .await;
2589 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2590 let buffer = project
2591 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2592 .await
2593 .unwrap();
2594
2595 let anchors = (0..3)
2596 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2597 .collect::<Vec<_>>();
2598
2599 // Change the file on disk, adding two new lines of text, and removing
2600 // one line.
2601 buffer.read_with(cx, |buffer, _| {
2602 assert!(!buffer.is_dirty());
2603 assert!(!buffer.has_conflict());
2604 });
2605 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2606 fs.save(
2607 "/dir/the-file".as_ref(),
2608 &new_contents.into(),
2609 LineEnding::Unix,
2610 )
2611 .await
2612 .unwrap();
2613
2614 // Because the buffer was not modified, it is reloaded from disk. Its
2615 // contents are edited according to the diff between the old and new
2616 // file contents.
2617 cx.foreground().run_until_parked();
2618 buffer.update(cx, |buffer, _| {
2619 assert_eq!(buffer.text(), new_contents);
2620 assert!(!buffer.is_dirty());
2621 assert!(!buffer.has_conflict());
2622
2623 let anchor_positions = anchors
2624 .iter()
2625 .map(|anchor| anchor.to_point(&*buffer))
2626 .collect::<Vec<_>>();
2627 assert_eq!(
2628 anchor_positions,
2629 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2630 );
2631 });
2632
2633 // Modify the buffer
2634 buffer.update(cx, |buffer, cx| {
2635 buffer.edit([(0..0, " ")], None, cx);
2636 assert!(buffer.is_dirty());
2637 assert!(!buffer.has_conflict());
2638 });
2639
2640 // Change the file on disk again, adding blank lines to the beginning.
2641 fs.save(
2642 "/dir/the-file".as_ref(),
2643 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2644 LineEnding::Unix,
2645 )
2646 .await
2647 .unwrap();
2648
2649 // Because the buffer is modified, it doesn't reload from disk, but is
2650 // marked as having a conflict.
2651 cx.foreground().run_until_parked();
2652 buffer.read_with(cx, |buffer, _| {
2653 assert!(buffer.has_conflict());
2654 });
2655}
2656
2657#[gpui::test]
2658async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2659 let fs = FakeFs::new(cx.background());
2660 fs.insert_tree(
2661 "/dir",
2662 json!({
2663 "file1": "a\nb\nc\n",
2664 "file2": "one\r\ntwo\r\nthree\r\n",
2665 }),
2666 )
2667 .await;
2668
2669 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2670 let buffer1 = project
2671 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2672 .await
2673 .unwrap();
2674 let buffer2 = project
2675 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2676 .await
2677 .unwrap();
2678
2679 buffer1.read_with(cx, |buffer, _| {
2680 assert_eq!(buffer.text(), "a\nb\nc\n");
2681 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2682 });
2683 buffer2.read_with(cx, |buffer, _| {
2684 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2685 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2686 });
2687
2688 // Change a file's line endings on disk from unix to windows. The buffer's
2689 // state updates correctly.
2690 fs.save(
2691 "/dir/file1".as_ref(),
2692 &"aaa\nb\nc\n".into(),
2693 LineEnding::Windows,
2694 )
2695 .await
2696 .unwrap();
2697 cx.foreground().run_until_parked();
2698 buffer1.read_with(cx, |buffer, _| {
2699 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2700 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2701 });
2702
2703 // Save a file with windows line endings. The file is written correctly.
2704 buffer2.update(cx, |buffer, cx| {
2705 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2706 });
2707 cx.update(|cx| Project::save_buffer(buffer2, cx))
2708 .await
2709 .unwrap();
2710 assert_eq!(
2711 fs.load("/dir/file2".as_ref()).await.unwrap(),
2712 "one\r\ntwo\r\nthree\r\nfour\r\n",
2713 );
2714}
2715
2716#[gpui::test]
2717async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2718 cx.foreground().forbid_parking();
2719
2720 let fs = FakeFs::new(cx.background());
2721 fs.insert_tree(
2722 "/the-dir",
2723 json!({
2724 "a.rs": "
2725 fn foo(mut v: Vec<usize>) {
2726 for x in &v {
2727 v.push(1);
2728 }
2729 }
2730 "
2731 .unindent(),
2732 }),
2733 )
2734 .await;
2735
2736 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2737 let buffer = project
2738 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2739 .await
2740 .unwrap();
2741
2742 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2743 let message = lsp::PublishDiagnosticsParams {
2744 uri: buffer_uri.clone(),
2745 diagnostics: vec![
2746 lsp::Diagnostic {
2747 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2748 severity: Some(DiagnosticSeverity::WARNING),
2749 message: "error 1".to_string(),
2750 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2751 location: lsp::Location {
2752 uri: buffer_uri.clone(),
2753 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2754 },
2755 message: "error 1 hint 1".to_string(),
2756 }]),
2757 ..Default::default()
2758 },
2759 lsp::Diagnostic {
2760 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2761 severity: Some(DiagnosticSeverity::HINT),
2762 message: "error 1 hint 1".to_string(),
2763 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2764 location: lsp::Location {
2765 uri: buffer_uri.clone(),
2766 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2767 },
2768 message: "original diagnostic".to_string(),
2769 }]),
2770 ..Default::default()
2771 },
2772 lsp::Diagnostic {
2773 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2774 severity: Some(DiagnosticSeverity::ERROR),
2775 message: "error 2".to_string(),
2776 related_information: Some(vec![
2777 lsp::DiagnosticRelatedInformation {
2778 location: lsp::Location {
2779 uri: buffer_uri.clone(),
2780 range: lsp::Range::new(
2781 lsp::Position::new(1, 13),
2782 lsp::Position::new(1, 15),
2783 ),
2784 },
2785 message: "error 2 hint 1".to_string(),
2786 },
2787 lsp::DiagnosticRelatedInformation {
2788 location: lsp::Location {
2789 uri: buffer_uri.clone(),
2790 range: lsp::Range::new(
2791 lsp::Position::new(1, 13),
2792 lsp::Position::new(1, 15),
2793 ),
2794 },
2795 message: "error 2 hint 2".to_string(),
2796 },
2797 ]),
2798 ..Default::default()
2799 },
2800 lsp::Diagnostic {
2801 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2802 severity: Some(DiagnosticSeverity::HINT),
2803 message: "error 2 hint 1".to_string(),
2804 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2805 location: lsp::Location {
2806 uri: buffer_uri.clone(),
2807 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2808 },
2809 message: "original diagnostic".to_string(),
2810 }]),
2811 ..Default::default()
2812 },
2813 lsp::Diagnostic {
2814 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2815 severity: Some(DiagnosticSeverity::HINT),
2816 message: "error 2 hint 2".to_string(),
2817 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2818 location: lsp::Location {
2819 uri: buffer_uri,
2820 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2821 },
2822 message: "original diagnostic".to_string(),
2823 }]),
2824 ..Default::default()
2825 },
2826 ],
2827 version: None,
2828 };
2829
2830 project
2831 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2832 .unwrap();
2833 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2834
2835 assert_eq!(
2836 buffer
2837 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2838 .collect::<Vec<_>>(),
2839 &[
2840 DiagnosticEntry {
2841 range: Point::new(1, 8)..Point::new(1, 9),
2842 diagnostic: Diagnostic {
2843 severity: DiagnosticSeverity::WARNING,
2844 message: "error 1".to_string(),
2845 group_id: 0,
2846 is_primary: true,
2847 ..Default::default()
2848 }
2849 },
2850 DiagnosticEntry {
2851 range: Point::new(1, 8)..Point::new(1, 9),
2852 diagnostic: Diagnostic {
2853 severity: DiagnosticSeverity::HINT,
2854 message: "error 1 hint 1".to_string(),
2855 group_id: 0,
2856 is_primary: false,
2857 ..Default::default()
2858 }
2859 },
2860 DiagnosticEntry {
2861 range: Point::new(1, 13)..Point::new(1, 15),
2862 diagnostic: Diagnostic {
2863 severity: DiagnosticSeverity::HINT,
2864 message: "error 2 hint 1".to_string(),
2865 group_id: 1,
2866 is_primary: false,
2867 ..Default::default()
2868 }
2869 },
2870 DiagnosticEntry {
2871 range: Point::new(1, 13)..Point::new(1, 15),
2872 diagnostic: Diagnostic {
2873 severity: DiagnosticSeverity::HINT,
2874 message: "error 2 hint 2".to_string(),
2875 group_id: 1,
2876 is_primary: false,
2877 ..Default::default()
2878 }
2879 },
2880 DiagnosticEntry {
2881 range: Point::new(2, 8)..Point::new(2, 17),
2882 diagnostic: Diagnostic {
2883 severity: DiagnosticSeverity::ERROR,
2884 message: "error 2".to_string(),
2885 group_id: 1,
2886 is_primary: true,
2887 ..Default::default()
2888 }
2889 }
2890 ]
2891 );
2892
2893 assert_eq!(
2894 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2895 &[
2896 DiagnosticEntry {
2897 range: Point::new(1, 8)..Point::new(1, 9),
2898 diagnostic: Diagnostic {
2899 severity: DiagnosticSeverity::WARNING,
2900 message: "error 1".to_string(),
2901 group_id: 0,
2902 is_primary: true,
2903 ..Default::default()
2904 }
2905 },
2906 DiagnosticEntry {
2907 range: Point::new(1, 8)..Point::new(1, 9),
2908 diagnostic: Diagnostic {
2909 severity: DiagnosticSeverity::HINT,
2910 message: "error 1 hint 1".to_string(),
2911 group_id: 0,
2912 is_primary: false,
2913 ..Default::default()
2914 }
2915 },
2916 ]
2917 );
2918 assert_eq!(
2919 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2920 &[
2921 DiagnosticEntry {
2922 range: Point::new(1, 13)..Point::new(1, 15),
2923 diagnostic: Diagnostic {
2924 severity: DiagnosticSeverity::HINT,
2925 message: "error 2 hint 1".to_string(),
2926 group_id: 1,
2927 is_primary: false,
2928 ..Default::default()
2929 }
2930 },
2931 DiagnosticEntry {
2932 range: Point::new(1, 13)..Point::new(1, 15),
2933 diagnostic: Diagnostic {
2934 severity: DiagnosticSeverity::HINT,
2935 message: "error 2 hint 2".to_string(),
2936 group_id: 1,
2937 is_primary: false,
2938 ..Default::default()
2939 }
2940 },
2941 DiagnosticEntry {
2942 range: Point::new(2, 8)..Point::new(2, 17),
2943 diagnostic: Diagnostic {
2944 severity: DiagnosticSeverity::ERROR,
2945 message: "error 2".to_string(),
2946 group_id: 1,
2947 is_primary: true,
2948 ..Default::default()
2949 }
2950 }
2951 ]
2952 );
2953}
2954
2955#[gpui::test]
2956async fn test_rename(cx: &mut gpui::TestAppContext) {
2957 cx.foreground().forbid_parking();
2958
2959 let mut language = Language::new(
2960 LanguageConfig {
2961 name: "Rust".into(),
2962 path_suffixes: vec!["rs".to_string()],
2963 ..Default::default()
2964 },
2965 Some(tree_sitter_rust::language()),
2966 );
2967 let mut fake_servers = language
2968 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2969 capabilities: lsp::ServerCapabilities {
2970 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2971 prepare_provider: Some(true),
2972 work_done_progress_options: Default::default(),
2973 })),
2974 ..Default::default()
2975 },
2976 ..Default::default()
2977 }))
2978 .await;
2979
2980 let fs = FakeFs::new(cx.background());
2981 fs.insert_tree(
2982 "/dir",
2983 json!({
2984 "one.rs": "const ONE: usize = 1;",
2985 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2986 }),
2987 )
2988 .await;
2989
2990 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2991 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2992 let buffer = project
2993 .update(cx, |project, cx| {
2994 project.open_local_buffer("/dir/one.rs", cx)
2995 })
2996 .await
2997 .unwrap();
2998
2999 let fake_server = fake_servers.next().await.unwrap();
3000
3001 let response = project.update(cx, |project, cx| {
3002 project.prepare_rename(buffer.clone(), 7, cx)
3003 });
3004 fake_server
3005 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3006 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3007 assert_eq!(params.position, lsp::Position::new(0, 7));
3008 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3009 lsp::Position::new(0, 6),
3010 lsp::Position::new(0, 9),
3011 ))))
3012 })
3013 .next()
3014 .await
3015 .unwrap();
3016 let range = response.await.unwrap().unwrap();
3017 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3018 assert_eq!(range, 6..9);
3019
3020 let response = project.update(cx, |project, cx| {
3021 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3022 });
3023 fake_server
3024 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3025 assert_eq!(
3026 params.text_document_position.text_document.uri.as_str(),
3027 "file:///dir/one.rs"
3028 );
3029 assert_eq!(
3030 params.text_document_position.position,
3031 lsp::Position::new(0, 7)
3032 );
3033 assert_eq!(params.new_name, "THREE");
3034 Ok(Some(lsp::WorkspaceEdit {
3035 changes: Some(
3036 [
3037 (
3038 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3039 vec![lsp::TextEdit::new(
3040 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3041 "THREE".to_string(),
3042 )],
3043 ),
3044 (
3045 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3046 vec![
3047 lsp::TextEdit::new(
3048 lsp::Range::new(
3049 lsp::Position::new(0, 24),
3050 lsp::Position::new(0, 27),
3051 ),
3052 "THREE".to_string(),
3053 ),
3054 lsp::TextEdit::new(
3055 lsp::Range::new(
3056 lsp::Position::new(0, 35),
3057 lsp::Position::new(0, 38),
3058 ),
3059 "THREE".to_string(),
3060 ),
3061 ],
3062 ),
3063 ]
3064 .into_iter()
3065 .collect(),
3066 ),
3067 ..Default::default()
3068 }))
3069 })
3070 .next()
3071 .await
3072 .unwrap();
3073 let mut transaction = response.await.unwrap().0;
3074 assert_eq!(transaction.len(), 2);
3075 assert_eq!(
3076 transaction
3077 .remove_entry(&buffer)
3078 .unwrap()
3079 .0
3080 .read_with(cx, |buffer, _| buffer.text()),
3081 "const THREE: usize = 1;"
3082 );
3083 assert_eq!(
3084 transaction
3085 .into_keys()
3086 .next()
3087 .unwrap()
3088 .read_with(cx, |buffer, _| buffer.text()),
3089 "const TWO: usize = one::THREE + one::THREE;"
3090 );
3091}
3092
3093#[gpui::test]
3094async fn test_search(cx: &mut gpui::TestAppContext) {
3095 let fs = FakeFs::new(cx.background());
3096 fs.insert_tree(
3097 "/dir",
3098 json!({
3099 "one.rs": "const ONE: usize = 1;",
3100 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3101 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3102 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3103 }),
3104 )
3105 .await;
3106 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3107 assert_eq!(
3108 search(&project, SearchQuery::text("TWO", false, true), cx)
3109 .await
3110 .unwrap(),
3111 HashMap::from_iter([
3112 ("two.rs".to_string(), vec![6..9]),
3113 ("three.rs".to_string(), vec![37..40])
3114 ])
3115 );
3116
3117 let buffer_4 = project
3118 .update(cx, |project, cx| {
3119 project.open_local_buffer("/dir/four.rs", cx)
3120 })
3121 .await
3122 .unwrap();
3123 buffer_4.update(cx, |buffer, cx| {
3124 let text = "two::TWO";
3125 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3126 });
3127
3128 assert_eq!(
3129 search(&project, SearchQuery::text("TWO", false, true), cx)
3130 .await
3131 .unwrap(),
3132 HashMap::from_iter([
3133 ("two.rs".to_string(), vec![6..9]),
3134 ("three.rs".to_string(), vec![37..40]),
3135 ("four.rs".to_string(), vec![25..28, 36..39])
3136 ])
3137 );
3138
3139 async fn search(
3140 project: &ModelHandle<Project>,
3141 query: SearchQuery,
3142 cx: &mut gpui::TestAppContext,
3143 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3144 let results = project
3145 .update(cx, |project, cx| project.search(query, cx))
3146 .await?;
3147
3148 Ok(results
3149 .into_iter()
3150 .map(|(buffer, ranges)| {
3151 buffer.read_with(cx, |buffer, _| {
3152 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3153 let ranges = ranges
3154 .into_iter()
3155 .map(|range| range.to_offset(buffer))
3156 .collect::<Vec<_>>();
3157 (path, ranges)
3158 })
3159 })
3160 .collect())
3161 }
3162}