1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::LineEnding;
3use fs::{FakeFs, RealFs};
4use futures::{future, StreamExt};
5use gpui::{executor::Deterministic, test::subscribe};
6use language::{
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use serde_json::json;
12use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
13use unindent::Unindent as _;
14use util::{assert_set_eq, test::temp_tree};
15
16#[gpui::test]
17async fn test_symlinks(cx: &mut gpui::TestAppContext) {
18 let dir = temp_tree(json!({
19 "root": {
20 "apple": "",
21 "banana": {
22 "carrot": {
23 "date": "",
24 "endive": "",
25 }
26 },
27 "fennel": {
28 "grape": "",
29 }
30 }
31 }));
32
33 let root_link_path = dir.path().join("root_link");
34 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
35 unix::fs::symlink(
36 &dir.path().join("root/fennel"),
37 &dir.path().join("root/finnochio"),
38 )
39 .unwrap();
40
41 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
42 project.read_with(cx, |project, cx| {
43 let tree = project.worktrees(cx).next().unwrap().read(cx);
44 assert_eq!(tree.file_count(), 5);
45 assert_eq!(
46 tree.inode_for_path("fennel/grape"),
47 tree.inode_for_path("finnochio/grape")
48 );
49 });
50}
51
52#[gpui::test]
53async fn test_managing_language_servers(
54 deterministic: Arc<Deterministic>,
55 cx: &mut gpui::TestAppContext,
56) {
57 cx.foreground().forbid_parking();
58
59 let mut rust_language = Language::new(
60 LanguageConfig {
61 name: "Rust".into(),
62 path_suffixes: vec!["rs".to_string()],
63 ..Default::default()
64 },
65 Some(tree_sitter_rust::language()),
66 );
67 let mut json_language = Language::new(
68 LanguageConfig {
69 name: "JSON".into(),
70 path_suffixes: vec!["json".to_string()],
71 ..Default::default()
72 },
73 None,
74 );
75 let mut fake_rust_servers = rust_language
76 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
77 name: "the-rust-language-server",
78 capabilities: lsp::ServerCapabilities {
79 completion_provider: Some(lsp::CompletionOptions {
80 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
81 ..Default::default()
82 }),
83 ..Default::default()
84 },
85 ..Default::default()
86 }))
87 .await;
88 let mut fake_json_servers = json_language
89 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
90 name: "the-json-language-server",
91 capabilities: lsp::ServerCapabilities {
92 completion_provider: Some(lsp::CompletionOptions {
93 trigger_characters: Some(vec![":".to_string()]),
94 ..Default::default()
95 }),
96 ..Default::default()
97 },
98 ..Default::default()
99 }))
100 .await;
101
102 let fs = FakeFs::new(cx.background());
103 fs.insert_tree(
104 "/the-root",
105 json!({
106 "test.rs": "const A: i32 = 1;",
107 "test2.rs": "",
108 "Cargo.toml": "a = 1",
109 "package.json": "{\"a\": 1}",
110 }),
111 )
112 .await;
113
114 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
115
116 // Open a buffer without an associated language server.
117 let toml_buffer = project
118 .update(cx, |project, cx| {
119 project.open_local_buffer("/the-root/Cargo.toml", cx)
120 })
121 .await
122 .unwrap();
123
124 // Open a buffer with an associated language server before the language for it has been loaded.
125 let rust_buffer = project
126 .update(cx, |project, cx| {
127 project.open_local_buffer("/the-root/test.rs", cx)
128 })
129 .await
130 .unwrap();
131 rust_buffer.read_with(cx, |buffer, _| {
132 assert_eq!(buffer.language().map(|l| l.name()), None);
133 });
134
135 // Now we add the languages to the project, and ensure they get assigned to all
136 // the relevant open buffers.
137 project.update(cx, |project, _| {
138 project.languages.add(Arc::new(json_language));
139 project.languages.add(Arc::new(rust_language));
140 });
141 deterministic.run_until_parked();
142 rust_buffer.read_with(cx, |buffer, _| {
143 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
144 });
145
146 // A server is started up, and it is notified about Rust files.
147 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
148 assert_eq!(
149 fake_rust_server
150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
151 .await
152 .text_document,
153 lsp::TextDocumentItem {
154 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
155 version: 0,
156 text: "const A: i32 = 1;".to_string(),
157 language_id: Default::default()
158 }
159 );
160
161 // The buffer is configured based on the language server's capabilities.
162 rust_buffer.read_with(cx, |buffer, _| {
163 assert_eq!(
164 buffer.completion_triggers(),
165 &[".".to_string(), "::".to_string()]
166 );
167 });
168 toml_buffer.read_with(cx, |buffer, _| {
169 assert!(buffer.completion_triggers().is_empty());
170 });
171
172 // Edit a buffer. The changes are reported to the language server.
173 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
174 assert_eq!(
175 fake_rust_server
176 .receive_notification::<lsp::notification::DidChangeTextDocument>()
177 .await
178 .text_document,
179 lsp::VersionedTextDocumentIdentifier::new(
180 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
181 1
182 )
183 );
184
185 // Open a third buffer with a different associated language server.
186 let json_buffer = project
187 .update(cx, |project, cx| {
188 project.open_local_buffer("/the-root/package.json", cx)
189 })
190 .await
191 .unwrap();
192
193 // A json language server is started up and is only notified about the json buffer.
194 let mut fake_json_server = fake_json_servers.next().await.unwrap();
195 assert_eq!(
196 fake_json_server
197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
198 .await
199 .text_document,
200 lsp::TextDocumentItem {
201 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
202 version: 0,
203 text: "{\"a\": 1}".to_string(),
204 language_id: Default::default()
205 }
206 );
207
208 // This buffer is configured based on the second language server's
209 // capabilities.
210 json_buffer.read_with(cx, |buffer, _| {
211 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
212 });
213
214 // When opening another buffer whose language server is already running,
215 // it is also configured based on the existing language server's capabilities.
216 let rust_buffer2 = project
217 .update(cx, |project, cx| {
218 project.open_local_buffer("/the-root/test2.rs", cx)
219 })
220 .await
221 .unwrap();
222 rust_buffer2.read_with(cx, |buffer, _| {
223 assert_eq!(
224 buffer.completion_triggers(),
225 &[".".to_string(), "::".to_string()]
226 );
227 });
228
229 // Changes are reported only to servers matching the buffer's language.
230 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
231 rust_buffer2.update(cx, |buffer, cx| {
232 buffer.edit([(0..0, "let x = 1;")], None, cx)
233 });
234 assert_eq!(
235 fake_rust_server
236 .receive_notification::<lsp::notification::DidChangeTextDocument>()
237 .await
238 .text_document,
239 lsp::VersionedTextDocumentIdentifier::new(
240 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
241 1
242 )
243 );
244
245 // Save notifications are reported to all servers.
246 toml_buffer
247 .update(cx, |buffer, cx| buffer.save(cx))
248 .await
249 .unwrap();
250 assert_eq!(
251 fake_rust_server
252 .receive_notification::<lsp::notification::DidSaveTextDocument>()
253 .await
254 .text_document,
255 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
256 );
257 assert_eq!(
258 fake_json_server
259 .receive_notification::<lsp::notification::DidSaveTextDocument>()
260 .await
261 .text_document,
262 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
263 );
264
265 // Renames are reported only to servers matching the buffer's language.
266 fs.rename(
267 Path::new("/the-root/test2.rs"),
268 Path::new("/the-root/test3.rs"),
269 Default::default(),
270 )
271 .await
272 .unwrap();
273 assert_eq!(
274 fake_rust_server
275 .receive_notification::<lsp::notification::DidCloseTextDocument>()
276 .await
277 .text_document,
278 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
279 );
280 assert_eq!(
281 fake_rust_server
282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
283 .await
284 .text_document,
285 lsp::TextDocumentItem {
286 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
287 version: 0,
288 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
289 language_id: Default::default()
290 },
291 );
292
293 rust_buffer2.update(cx, |buffer, cx| {
294 buffer.update_diagnostics(
295 DiagnosticSet::from_sorted_entries(
296 vec![DiagnosticEntry {
297 diagnostic: Default::default(),
298 range: Anchor::MIN..Anchor::MAX,
299 }],
300 &buffer.snapshot(),
301 ),
302 cx,
303 );
304 assert_eq!(
305 buffer
306 .snapshot()
307 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
308 .count(),
309 1
310 );
311 });
312
313 // When the rename changes the extension of the file, the buffer gets closed on the old
314 // language server and gets opened on the new one.
315 fs.rename(
316 Path::new("/the-root/test3.rs"),
317 Path::new("/the-root/test3.json"),
318 Default::default(),
319 )
320 .await
321 .unwrap();
322 assert_eq!(
323 fake_rust_server
324 .receive_notification::<lsp::notification::DidCloseTextDocument>()
325 .await
326 .text_document,
327 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
328 );
329 assert_eq!(
330 fake_json_server
331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
332 .await
333 .text_document,
334 lsp::TextDocumentItem {
335 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
336 version: 0,
337 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
338 language_id: Default::default()
339 },
340 );
341
342 // We clear the diagnostics, since the language has changed.
343 rust_buffer2.read_with(cx, |buffer, _| {
344 assert_eq!(
345 buffer
346 .snapshot()
347 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
348 .count(),
349 0
350 );
351 });
352
353 // The renamed file's version resets after changing language server.
354 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
355 assert_eq!(
356 fake_json_server
357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
358 .await
359 .text_document,
360 lsp::VersionedTextDocumentIdentifier::new(
361 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
362 1
363 )
364 );
365
366 // Restart language servers
367 project.update(cx, |project, cx| {
368 project.restart_language_servers_for_buffers(
369 vec![rust_buffer.clone(), json_buffer.clone()],
370 cx,
371 );
372 });
373
374 let mut rust_shutdown_requests = fake_rust_server
375 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
376 let mut json_shutdown_requests = fake_json_server
377 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
378 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
379
380 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
381 let mut fake_json_server = fake_json_servers.next().await.unwrap();
382
383 // Ensure rust document is reopened in new rust language server
384 assert_eq!(
385 fake_rust_server
386 .receive_notification::<lsp::notification::DidOpenTextDocument>()
387 .await
388 .text_document,
389 lsp::TextDocumentItem {
390 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
391 version: 1,
392 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
393 language_id: Default::default()
394 }
395 );
396
397 // Ensure json documents are reopened in new json language server
398 assert_set_eq!(
399 [
400 fake_json_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 ],
409 [
410 lsp::TextDocumentItem {
411 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
412 version: 0,
413 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
414 language_id: Default::default()
415 },
416 lsp::TextDocumentItem {
417 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
418 version: 1,
419 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
420 language_id: Default::default()
421 }
422 ]
423 );
424
425 // Close notifications are reported only to servers matching the buffer's language.
426 cx.update(|_| drop(json_buffer));
427 let close_message = lsp::DidCloseTextDocumentParams {
428 text_document: lsp::TextDocumentIdentifier::new(
429 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
430 ),
431 };
432 assert_eq!(
433 fake_json_server
434 .receive_notification::<lsp::notification::DidCloseTextDocument>()
435 .await,
436 close_message,
437 );
438}
439
440#[gpui::test]
441async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
442 cx.foreground().forbid_parking();
443
444 let fs = FakeFs::new(cx.background());
445 fs.insert_tree(
446 "/dir",
447 json!({
448 "a.rs": "let a = 1;",
449 "b.rs": "let b = 2;"
450 }),
451 )
452 .await;
453
454 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
455
456 let buffer_a = project
457 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
458 .await
459 .unwrap();
460 let buffer_b = project
461 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
462 .await
463 .unwrap();
464
465 project.update(cx, |project, cx| {
466 project
467 .update_diagnostics(
468 0,
469 lsp::PublishDiagnosticsParams {
470 uri: Url::from_file_path("/dir/a.rs").unwrap(),
471 version: None,
472 diagnostics: vec![lsp::Diagnostic {
473 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
474 severity: Some(lsp::DiagnosticSeverity::ERROR),
475 message: "error 1".to_string(),
476 ..Default::default()
477 }],
478 },
479 &[],
480 cx,
481 )
482 .unwrap();
483 project
484 .update_diagnostics(
485 0,
486 lsp::PublishDiagnosticsParams {
487 uri: Url::from_file_path("/dir/b.rs").unwrap(),
488 version: None,
489 diagnostics: vec![lsp::Diagnostic {
490 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
491 severity: Some(lsp::DiagnosticSeverity::WARNING),
492 message: "error 2".to_string(),
493 ..Default::default()
494 }],
495 },
496 &[],
497 cx,
498 )
499 .unwrap();
500 });
501
502 buffer_a.read_with(cx, |buffer, _| {
503 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
504 assert_eq!(
505 chunks
506 .iter()
507 .map(|(s, d)| (s.as_str(), *d))
508 .collect::<Vec<_>>(),
509 &[
510 ("let ", None),
511 ("a", Some(DiagnosticSeverity::ERROR)),
512 (" = 1;", None),
513 ]
514 );
515 });
516 buffer_b.read_with(cx, |buffer, _| {
517 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
518 assert_eq!(
519 chunks
520 .iter()
521 .map(|(s, d)| (s.as_str(), *d))
522 .collect::<Vec<_>>(),
523 &[
524 ("let ", None),
525 ("b", Some(DiagnosticSeverity::WARNING)),
526 (" = 2;", None),
527 ]
528 );
529 });
530}
531
532#[gpui::test]
533async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
534 cx.foreground().forbid_parking();
535
536 let fs = FakeFs::new(cx.background());
537 fs.insert_tree(
538 "/root",
539 json!({
540 "dir": {
541 "a.rs": "let a = 1;",
542 },
543 "other.rs": "let b = c;"
544 }),
545 )
546 .await;
547
548 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
549
550 let (worktree, _) = project
551 .update(cx, |project, cx| {
552 project.find_or_create_local_worktree("/root/other.rs", false, cx)
553 })
554 .await
555 .unwrap();
556 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
557
558 project.update(cx, |project, cx| {
559 project
560 .update_diagnostics(
561 0,
562 lsp::PublishDiagnosticsParams {
563 uri: Url::from_file_path("/root/other.rs").unwrap(),
564 version: None,
565 diagnostics: vec![lsp::Diagnostic {
566 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
567 severity: Some(lsp::DiagnosticSeverity::ERROR),
568 message: "unknown variable 'c'".to_string(),
569 ..Default::default()
570 }],
571 },
572 &[],
573 cx,
574 )
575 .unwrap();
576 });
577
578 let buffer = project
579 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
580 .await
581 .unwrap();
582 buffer.read_with(cx, |buffer, _| {
583 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
584 assert_eq!(
585 chunks
586 .iter()
587 .map(|(s, d)| (s.as_str(), *d))
588 .collect::<Vec<_>>(),
589 &[
590 ("let b = ", None),
591 ("c", Some(DiagnosticSeverity::ERROR)),
592 (";", None),
593 ]
594 );
595 });
596
597 project.read_with(cx, |project, cx| {
598 assert_eq!(project.diagnostic_summaries(cx).next(), None);
599 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
600 });
601}
602
603#[gpui::test]
604async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
605 cx.foreground().forbid_parking();
606
607 let progress_token = "the-progress-token";
608 let mut language = Language::new(
609 LanguageConfig {
610 name: "Rust".into(),
611 path_suffixes: vec!["rs".to_string()],
612 ..Default::default()
613 },
614 Some(tree_sitter_rust::language()),
615 );
616 let mut fake_servers = language
617 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
618 disk_based_diagnostics_progress_token: Some(progress_token.into()),
619 disk_based_diagnostics_sources: vec!["disk".into()],
620 ..Default::default()
621 }))
622 .await;
623
624 let fs = FakeFs::new(cx.background());
625 fs.insert_tree(
626 "/dir",
627 json!({
628 "a.rs": "fn a() { A }",
629 "b.rs": "const y: i32 = 1",
630 }),
631 )
632 .await;
633
634 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
635 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
636 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
637
638 // Cause worktree to start the fake language server
639 let _buffer = project
640 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
641 .await
642 .unwrap();
643
644 let mut events = subscribe(&project, cx);
645
646 let fake_server = fake_servers.next().await.unwrap();
647 fake_server
648 .start_progress(format!("{}/0", progress_token))
649 .await;
650 assert_eq!(
651 events.next().await.unwrap(),
652 Event::DiskBasedDiagnosticsStarted {
653 language_server_id: 0,
654 }
655 );
656
657 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
658 uri: Url::from_file_path("/dir/a.rs").unwrap(),
659 version: None,
660 diagnostics: vec![lsp::Diagnostic {
661 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
662 severity: Some(lsp::DiagnosticSeverity::ERROR),
663 message: "undefined variable 'A'".to_string(),
664 ..Default::default()
665 }],
666 });
667 assert_eq!(
668 events.next().await.unwrap(),
669 Event::DiagnosticsUpdated {
670 language_server_id: 0,
671 path: (worktree_id, Path::new("a.rs")).into()
672 }
673 );
674
675 fake_server.end_progress(format!("{}/0", progress_token));
676 assert_eq!(
677 events.next().await.unwrap(),
678 Event::DiskBasedDiagnosticsFinished {
679 language_server_id: 0
680 }
681 );
682
683 let buffer = project
684 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
685 .await
686 .unwrap();
687
688 buffer.read_with(cx, |buffer, _| {
689 let snapshot = buffer.snapshot();
690 let diagnostics = snapshot
691 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
692 .collect::<Vec<_>>();
693 assert_eq!(
694 diagnostics,
695 &[DiagnosticEntry {
696 range: Point::new(0, 9)..Point::new(0, 10),
697 diagnostic: Diagnostic {
698 severity: lsp::DiagnosticSeverity::ERROR,
699 message: "undefined variable 'A'".to_string(),
700 group_id: 0,
701 is_primary: true,
702 ..Default::default()
703 }
704 }]
705 )
706 });
707
708 // Ensure publishing empty diagnostics twice only results in one update event.
709 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
710 uri: Url::from_file_path("/dir/a.rs").unwrap(),
711 version: None,
712 diagnostics: Default::default(),
713 });
714 assert_eq!(
715 events.next().await.unwrap(),
716 Event::DiagnosticsUpdated {
717 language_server_id: 0,
718 path: (worktree_id, Path::new("a.rs")).into()
719 }
720 );
721
722 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
723 uri: Url::from_file_path("/dir/a.rs").unwrap(),
724 version: None,
725 diagnostics: Default::default(),
726 });
727 cx.foreground().run_until_parked();
728 assert_eq!(futures::poll!(events.next()), Poll::Pending);
729}
730
731#[gpui::test]
732async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
733 cx.foreground().forbid_parking();
734
735 let progress_token = "the-progress-token";
736 let mut language = Language::new(
737 LanguageConfig {
738 path_suffixes: vec!["rs".to_string()],
739 ..Default::default()
740 },
741 None,
742 );
743 let mut fake_servers = language
744 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
745 disk_based_diagnostics_sources: vec!["disk".into()],
746 disk_based_diagnostics_progress_token: Some(progress_token.into()),
747 ..Default::default()
748 }))
749 .await;
750
751 let fs = FakeFs::new(cx.background());
752 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
753
754 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
755 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
756
757 let buffer = project
758 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
759 .await
760 .unwrap();
761
762 // Simulate diagnostics starting to update.
763 let fake_server = fake_servers.next().await.unwrap();
764 fake_server.start_progress(progress_token).await;
765
766 // Restart the server before the diagnostics finish updating.
767 project.update(cx, |project, cx| {
768 project.restart_language_servers_for_buffers([buffer], cx);
769 });
770 let mut events = subscribe(&project, cx);
771
772 // Simulate the newly started server sending more diagnostics.
773 let fake_server = fake_servers.next().await.unwrap();
774 fake_server.start_progress(progress_token).await;
775 assert_eq!(
776 events.next().await.unwrap(),
777 Event::DiskBasedDiagnosticsStarted {
778 language_server_id: 1
779 }
780 );
781 project.read_with(cx, |project, _| {
782 assert_eq!(
783 project
784 .language_servers_running_disk_based_diagnostics()
785 .collect::<Vec<_>>(),
786 [1]
787 );
788 });
789
790 // All diagnostics are considered done, despite the old server's diagnostic
791 // task never completing.
792 fake_server.end_progress(progress_token);
793 assert_eq!(
794 events.next().await.unwrap(),
795 Event::DiskBasedDiagnosticsFinished {
796 language_server_id: 1
797 }
798 );
799 project.read_with(cx, |project, _| {
800 assert_eq!(
801 project
802 .language_servers_running_disk_based_diagnostics()
803 .collect::<Vec<_>>(),
804 [0; 0]
805 );
806 });
807}
808
809#[gpui::test]
810async fn test_toggling_enable_language_server(
811 deterministic: Arc<Deterministic>,
812 cx: &mut gpui::TestAppContext,
813) {
814 deterministic.forbid_parking();
815
816 let mut rust = Language::new(
817 LanguageConfig {
818 name: Arc::from("Rust"),
819 path_suffixes: vec!["rs".to_string()],
820 ..Default::default()
821 },
822 None,
823 );
824 let mut fake_rust_servers = rust
825 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
826 name: "rust-lsp",
827 ..Default::default()
828 }))
829 .await;
830 let mut js = Language::new(
831 LanguageConfig {
832 name: Arc::from("JavaScript"),
833 path_suffixes: vec!["js".to_string()],
834 ..Default::default()
835 },
836 None,
837 );
838 let mut fake_js_servers = js
839 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
840 name: "js-lsp",
841 ..Default::default()
842 }))
843 .await;
844
845 let fs = FakeFs::new(cx.background());
846 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
847 .await;
848
849 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
850 project.update(cx, |project, _| {
851 project.languages.add(Arc::new(rust));
852 project.languages.add(Arc::new(js));
853 });
854
855 let _rs_buffer = project
856 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
857 .await
858 .unwrap();
859 let _js_buffer = project
860 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
861 .await
862 .unwrap();
863
864 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
865 assert_eq!(
866 fake_rust_server_1
867 .receive_notification::<lsp::notification::DidOpenTextDocument>()
868 .await
869 .text_document
870 .uri
871 .as_str(),
872 "file:///dir/a.rs"
873 );
874
875 let mut fake_js_server = fake_js_servers.next().await.unwrap();
876 assert_eq!(
877 fake_js_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document
881 .uri
882 .as_str(),
883 "file:///dir/b.js"
884 );
885
886 // Disable Rust language server, ensuring only that server gets stopped.
887 cx.update(|cx| {
888 cx.update_global(|settings: &mut Settings, _| {
889 settings.language_overrides.insert(
890 Arc::from("Rust"),
891 settings::EditorSettings {
892 enable_language_server: Some(false),
893 ..Default::default()
894 },
895 );
896 })
897 });
898 fake_rust_server_1
899 .receive_notification::<lsp::notification::Exit>()
900 .await;
901
902 // Enable Rust and disable JavaScript language servers, ensuring that the
903 // former gets started again and that the latter stops.
904 cx.update(|cx| {
905 cx.update_global(|settings: &mut Settings, _| {
906 settings.language_overrides.insert(
907 Arc::from("Rust"),
908 settings::EditorSettings {
909 enable_language_server: Some(true),
910 ..Default::default()
911 },
912 );
913 settings.language_overrides.insert(
914 Arc::from("JavaScript"),
915 settings::EditorSettings {
916 enable_language_server: Some(false),
917 ..Default::default()
918 },
919 );
920 })
921 });
922 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
923 assert_eq!(
924 fake_rust_server_2
925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
926 .await
927 .text_document
928 .uri
929 .as_str(),
930 "file:///dir/a.rs"
931 );
932 fake_js_server
933 .receive_notification::<lsp::notification::Exit>()
934 .await;
935}
936
937#[gpui::test]
938async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
939 cx.foreground().forbid_parking();
940
941 let mut language = Language::new(
942 LanguageConfig {
943 name: "Rust".into(),
944 path_suffixes: vec!["rs".to_string()],
945 ..Default::default()
946 },
947 Some(tree_sitter_rust::language()),
948 );
949 let mut fake_servers = language
950 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
951 disk_based_diagnostics_sources: vec!["disk".into()],
952 ..Default::default()
953 }))
954 .await;
955
956 let text = "
957 fn a() { A }
958 fn b() { BB }
959 fn c() { CCC }
960 "
961 .unindent();
962
963 let fs = FakeFs::new(cx.background());
964 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
965
966 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
967 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
968
969 let buffer = project
970 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
971 .await
972 .unwrap();
973
974 let mut fake_server = fake_servers.next().await.unwrap();
975 let open_notification = fake_server
976 .receive_notification::<lsp::notification::DidOpenTextDocument>()
977 .await;
978
979 // Edit the buffer, moving the content down
980 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
981 let change_notification_1 = fake_server
982 .receive_notification::<lsp::notification::DidChangeTextDocument>()
983 .await;
984 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
985
986 // Report some diagnostics for the initial version of the buffer
987 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
988 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
989 version: Some(open_notification.text_document.version),
990 diagnostics: vec![
991 lsp::Diagnostic {
992 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
993 severity: Some(DiagnosticSeverity::ERROR),
994 message: "undefined variable 'A'".to_string(),
995 source: Some("disk".to_string()),
996 ..Default::default()
997 },
998 lsp::Diagnostic {
999 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1000 severity: Some(DiagnosticSeverity::ERROR),
1001 message: "undefined variable 'BB'".to_string(),
1002 source: Some("disk".to_string()),
1003 ..Default::default()
1004 },
1005 lsp::Diagnostic {
1006 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1007 severity: Some(DiagnosticSeverity::ERROR),
1008 source: Some("disk".to_string()),
1009 message: "undefined variable 'CCC'".to_string(),
1010 ..Default::default()
1011 },
1012 ],
1013 });
1014
1015 // The diagnostics have moved down since they were created.
1016 buffer.next_notification(cx).await;
1017 buffer.read_with(cx, |buffer, _| {
1018 assert_eq!(
1019 buffer
1020 .snapshot()
1021 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1022 .collect::<Vec<_>>(),
1023 &[
1024 DiagnosticEntry {
1025 range: Point::new(3, 9)..Point::new(3, 11),
1026 diagnostic: Diagnostic {
1027 severity: DiagnosticSeverity::ERROR,
1028 message: "undefined variable 'BB'".to_string(),
1029 is_disk_based: true,
1030 group_id: 1,
1031 is_primary: true,
1032 ..Default::default()
1033 },
1034 },
1035 DiagnosticEntry {
1036 range: Point::new(4, 9)..Point::new(4, 12),
1037 diagnostic: Diagnostic {
1038 severity: DiagnosticSeverity::ERROR,
1039 message: "undefined variable 'CCC'".to_string(),
1040 is_disk_based: true,
1041 group_id: 2,
1042 is_primary: true,
1043 ..Default::default()
1044 }
1045 }
1046 ]
1047 );
1048 assert_eq!(
1049 chunks_with_diagnostics(buffer, 0..buffer.len()),
1050 [
1051 ("\n\nfn a() { ".to_string(), None),
1052 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1053 (" }\nfn b() { ".to_string(), None),
1054 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1055 (" }\nfn c() { ".to_string(), None),
1056 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1057 (" }\n".to_string(), None),
1058 ]
1059 );
1060 assert_eq!(
1061 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1062 [
1063 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1064 (" }\nfn c() { ".to_string(), None),
1065 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1066 ]
1067 );
1068 });
1069
1070 // Ensure overlapping diagnostics are highlighted correctly.
1071 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1072 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1073 version: Some(open_notification.text_document.version),
1074 diagnostics: vec![
1075 lsp::Diagnostic {
1076 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1077 severity: Some(DiagnosticSeverity::ERROR),
1078 message: "undefined variable 'A'".to_string(),
1079 source: Some("disk".to_string()),
1080 ..Default::default()
1081 },
1082 lsp::Diagnostic {
1083 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1084 severity: Some(DiagnosticSeverity::WARNING),
1085 message: "unreachable statement".to_string(),
1086 source: Some("disk".to_string()),
1087 ..Default::default()
1088 },
1089 ],
1090 });
1091
1092 buffer.next_notification(cx).await;
1093 buffer.read_with(cx, |buffer, _| {
1094 assert_eq!(
1095 buffer
1096 .snapshot()
1097 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1098 .collect::<Vec<_>>(),
1099 &[
1100 DiagnosticEntry {
1101 range: Point::new(2, 9)..Point::new(2, 12),
1102 diagnostic: Diagnostic {
1103 severity: DiagnosticSeverity::WARNING,
1104 message: "unreachable statement".to_string(),
1105 is_disk_based: true,
1106 group_id: 4,
1107 is_primary: true,
1108 ..Default::default()
1109 }
1110 },
1111 DiagnosticEntry {
1112 range: Point::new(2, 9)..Point::new(2, 10),
1113 diagnostic: Diagnostic {
1114 severity: DiagnosticSeverity::ERROR,
1115 message: "undefined variable 'A'".to_string(),
1116 is_disk_based: true,
1117 group_id: 3,
1118 is_primary: true,
1119 ..Default::default()
1120 },
1121 }
1122 ]
1123 );
1124 assert_eq!(
1125 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1126 [
1127 ("fn a() { ".to_string(), None),
1128 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1129 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1130 ("\n".to_string(), None),
1131 ]
1132 );
1133 assert_eq!(
1134 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1135 [
1136 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1137 ("\n".to_string(), None),
1138 ]
1139 );
1140 });
1141
1142 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1143 // changes since the last save.
1144 buffer.update(cx, |buffer, cx| {
1145 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1146 buffer.edit(
1147 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1148 None,
1149 cx,
1150 );
1151 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1152 });
1153 let change_notification_2 = fake_server
1154 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1155 .await;
1156 assert!(
1157 change_notification_2.text_document.version > change_notification_1.text_document.version
1158 );
1159
1160 // Handle out-of-order diagnostics
1161 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1162 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1163 version: Some(change_notification_2.text_document.version),
1164 diagnostics: vec![
1165 lsp::Diagnostic {
1166 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1167 severity: Some(DiagnosticSeverity::ERROR),
1168 message: "undefined variable 'BB'".to_string(),
1169 source: Some("disk".to_string()),
1170 ..Default::default()
1171 },
1172 lsp::Diagnostic {
1173 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1174 severity: Some(DiagnosticSeverity::WARNING),
1175 message: "undefined variable 'A'".to_string(),
1176 source: Some("disk".to_string()),
1177 ..Default::default()
1178 },
1179 ],
1180 });
1181
1182 buffer.next_notification(cx).await;
1183 buffer.read_with(cx, |buffer, _| {
1184 assert_eq!(
1185 buffer
1186 .snapshot()
1187 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1188 .collect::<Vec<_>>(),
1189 &[
1190 DiagnosticEntry {
1191 range: Point::new(2, 21)..Point::new(2, 22),
1192 diagnostic: Diagnostic {
1193 severity: DiagnosticSeverity::WARNING,
1194 message: "undefined variable 'A'".to_string(),
1195 is_disk_based: true,
1196 group_id: 6,
1197 is_primary: true,
1198 ..Default::default()
1199 }
1200 },
1201 DiagnosticEntry {
1202 range: Point::new(3, 9)..Point::new(3, 14),
1203 diagnostic: Diagnostic {
1204 severity: DiagnosticSeverity::ERROR,
1205 message: "undefined variable 'BB'".to_string(),
1206 is_disk_based: true,
1207 group_id: 5,
1208 is_primary: true,
1209 ..Default::default()
1210 },
1211 }
1212 ]
1213 );
1214 });
1215}
1216
1217#[gpui::test]
1218async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1219 cx.foreground().forbid_parking();
1220
1221 let text = concat!(
1222 "let one = ;\n", //
1223 "let two = \n",
1224 "let three = 3;\n",
1225 );
1226
1227 let fs = FakeFs::new(cx.background());
1228 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1229
1230 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1231 let buffer = project
1232 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1233 .await
1234 .unwrap();
1235
1236 project.update(cx, |project, cx| {
1237 project
1238 .update_buffer_diagnostics(
1239 &buffer,
1240 vec![
1241 DiagnosticEntry {
1242 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1243 diagnostic: Diagnostic {
1244 severity: DiagnosticSeverity::ERROR,
1245 message: "syntax error 1".to_string(),
1246 ..Default::default()
1247 },
1248 },
1249 DiagnosticEntry {
1250 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1251 diagnostic: Diagnostic {
1252 severity: DiagnosticSeverity::ERROR,
1253 message: "syntax error 2".to_string(),
1254 ..Default::default()
1255 },
1256 },
1257 ],
1258 None,
1259 cx,
1260 )
1261 .unwrap();
1262 });
1263
1264 // An empty range is extended forward to include the following character.
1265 // At the end of a line, an empty range is extended backward to include
1266 // the preceding character.
1267 buffer.read_with(cx, |buffer, _| {
1268 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1269 assert_eq!(
1270 chunks
1271 .iter()
1272 .map(|(s, d)| (s.as_str(), *d))
1273 .collect::<Vec<_>>(),
1274 &[
1275 ("let one = ", None),
1276 (";", Some(DiagnosticSeverity::ERROR)),
1277 ("\nlet two =", None),
1278 (" ", Some(DiagnosticSeverity::ERROR)),
1279 ("\nlet three = 3;\n", None)
1280 ]
1281 );
1282 });
1283}
1284
1285#[gpui::test]
1286async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1287 cx.foreground().forbid_parking();
1288
1289 let mut language = Language::new(
1290 LanguageConfig {
1291 name: "Rust".into(),
1292 path_suffixes: vec!["rs".to_string()],
1293 ..Default::default()
1294 },
1295 Some(tree_sitter_rust::language()),
1296 );
1297 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1298
1299 let text = "
1300 fn a() {
1301 f1();
1302 }
1303 fn b() {
1304 f2();
1305 }
1306 fn c() {
1307 f3();
1308 }
1309 "
1310 .unindent();
1311
1312 let fs = FakeFs::new(cx.background());
1313 fs.insert_tree(
1314 "/dir",
1315 json!({
1316 "a.rs": text.clone(),
1317 }),
1318 )
1319 .await;
1320
1321 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1322 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1323 let buffer = project
1324 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1325 .await
1326 .unwrap();
1327
1328 let mut fake_server = fake_servers.next().await.unwrap();
1329 let lsp_document_version = fake_server
1330 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1331 .await
1332 .text_document
1333 .version;
1334
1335 // Simulate editing the buffer after the language server computes some edits.
1336 buffer.update(cx, |buffer, cx| {
1337 buffer.edit(
1338 [(
1339 Point::new(0, 0)..Point::new(0, 0),
1340 "// above first function\n",
1341 )],
1342 None,
1343 cx,
1344 );
1345 buffer.edit(
1346 [(
1347 Point::new(2, 0)..Point::new(2, 0),
1348 " // inside first function\n",
1349 )],
1350 None,
1351 cx,
1352 );
1353 buffer.edit(
1354 [(
1355 Point::new(6, 4)..Point::new(6, 4),
1356 "// inside second function ",
1357 )],
1358 None,
1359 cx,
1360 );
1361
1362 assert_eq!(
1363 buffer.text(),
1364 "
1365 // above first function
1366 fn a() {
1367 // inside first function
1368 f1();
1369 }
1370 fn b() {
1371 // inside second function f2();
1372 }
1373 fn c() {
1374 f3();
1375 }
1376 "
1377 .unindent()
1378 );
1379 });
1380
1381 let edits = project
1382 .update(cx, |project, cx| {
1383 project.edits_from_lsp(
1384 &buffer,
1385 vec![
1386 // replace body of first function
1387 lsp::TextEdit {
1388 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1389 new_text: "
1390 fn a() {
1391 f10();
1392 }
1393 "
1394 .unindent(),
1395 },
1396 // edit inside second function
1397 lsp::TextEdit {
1398 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1399 new_text: "00".into(),
1400 },
1401 // edit inside third function via two distinct edits
1402 lsp::TextEdit {
1403 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1404 new_text: "4000".into(),
1405 },
1406 lsp::TextEdit {
1407 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1408 new_text: "".into(),
1409 },
1410 ],
1411 Some(lsp_document_version),
1412 cx,
1413 )
1414 })
1415 .await
1416 .unwrap();
1417
1418 buffer.update(cx, |buffer, cx| {
1419 for (range, new_text) in edits {
1420 buffer.edit([(range, new_text)], None, cx);
1421 }
1422 assert_eq!(
1423 buffer.text(),
1424 "
1425 // above first function
1426 fn a() {
1427 // inside first function
1428 f10();
1429 }
1430 fn b() {
1431 // inside second function f200();
1432 }
1433 fn c() {
1434 f4000();
1435 }
1436 "
1437 .unindent()
1438 );
1439 });
1440}
1441
1442#[gpui::test]
1443async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1444 cx.foreground().forbid_parking();
1445
1446 let text = "
1447 use a::b;
1448 use a::c;
1449
1450 fn f() {
1451 b();
1452 c();
1453 }
1454 "
1455 .unindent();
1456
1457 let fs = FakeFs::new(cx.background());
1458 fs.insert_tree(
1459 "/dir",
1460 json!({
1461 "a.rs": text.clone(),
1462 }),
1463 )
1464 .await;
1465
1466 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1467 let buffer = project
1468 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1469 .await
1470 .unwrap();
1471
1472 // Simulate the language server sending us a small edit in the form of a very large diff.
1473 // Rust-analyzer does this when performing a merge-imports code action.
1474 let edits = project
1475 .update(cx, |project, cx| {
1476 project.edits_from_lsp(
1477 &buffer,
1478 [
1479 // Replace the first use statement without editing the semicolon.
1480 lsp::TextEdit {
1481 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1482 new_text: "a::{b, c}".into(),
1483 },
1484 // Reinsert the remainder of the file between the semicolon and the final
1485 // newline of the file.
1486 lsp::TextEdit {
1487 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1488 new_text: "\n\n".into(),
1489 },
1490 lsp::TextEdit {
1491 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1492 new_text: "
1493 fn f() {
1494 b();
1495 c();
1496 }"
1497 .unindent(),
1498 },
1499 // Delete everything after the first newline of the file.
1500 lsp::TextEdit {
1501 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1502 new_text: "".into(),
1503 },
1504 ],
1505 None,
1506 cx,
1507 )
1508 })
1509 .await
1510 .unwrap();
1511
1512 buffer.update(cx, |buffer, cx| {
1513 let edits = edits
1514 .into_iter()
1515 .map(|(range, text)| {
1516 (
1517 range.start.to_point(buffer)..range.end.to_point(buffer),
1518 text,
1519 )
1520 })
1521 .collect::<Vec<_>>();
1522
1523 assert_eq!(
1524 edits,
1525 [
1526 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1527 (Point::new(1, 0)..Point::new(2, 0), "".into())
1528 ]
1529 );
1530
1531 for (range, new_text) in edits {
1532 buffer.edit([(range, new_text)], None, cx);
1533 }
1534 assert_eq!(
1535 buffer.text(),
1536 "
1537 use a::{b, c};
1538
1539 fn f() {
1540 b();
1541 c();
1542 }
1543 "
1544 .unindent()
1545 );
1546 });
1547}
1548
1549#[gpui::test]
1550async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1551 cx.foreground().forbid_parking();
1552
1553 let text = "
1554 use a::b;
1555 use a::c;
1556
1557 fn f() {
1558 b();
1559 c();
1560 }
1561 "
1562 .unindent();
1563
1564 let fs = FakeFs::new(cx.background());
1565 fs.insert_tree(
1566 "/dir",
1567 json!({
1568 "a.rs": text.clone(),
1569 }),
1570 )
1571 .await;
1572
1573 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1574 let buffer = project
1575 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1576 .await
1577 .unwrap();
1578
1579 // Simulate the language server sending us edits in a non-ordered fashion,
1580 // with ranges sometimes being inverted or pointing to invalid locations.
1581 let edits = project
1582 .update(cx, |project, cx| {
1583 project.edits_from_lsp(
1584 &buffer,
1585 [
1586 lsp::TextEdit {
1587 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1588 new_text: "\n\n".into(),
1589 },
1590 lsp::TextEdit {
1591 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1592 new_text: "a::{b, c}".into(),
1593 },
1594 lsp::TextEdit {
1595 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1596 new_text: "".into(),
1597 },
1598 lsp::TextEdit {
1599 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1600 new_text: "
1601 fn f() {
1602 b();
1603 c();
1604 }"
1605 .unindent(),
1606 },
1607 ],
1608 None,
1609 cx,
1610 )
1611 })
1612 .await
1613 .unwrap();
1614
1615 buffer.update(cx, |buffer, cx| {
1616 let edits = edits
1617 .into_iter()
1618 .map(|(range, text)| {
1619 (
1620 range.start.to_point(buffer)..range.end.to_point(buffer),
1621 text,
1622 )
1623 })
1624 .collect::<Vec<_>>();
1625
1626 assert_eq!(
1627 edits,
1628 [
1629 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1630 (Point::new(1, 0)..Point::new(2, 0), "".into())
1631 ]
1632 );
1633
1634 for (range, new_text) in edits {
1635 buffer.edit([(range, new_text)], None, cx);
1636 }
1637 assert_eq!(
1638 buffer.text(),
1639 "
1640 use a::{b, c};
1641
1642 fn f() {
1643 b();
1644 c();
1645 }
1646 "
1647 .unindent()
1648 );
1649 });
1650}
1651
1652fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1653 buffer: &Buffer,
1654 range: Range<T>,
1655) -> Vec<(String, Option<DiagnosticSeverity>)> {
1656 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1657 for chunk in buffer.snapshot().chunks(range, true) {
1658 if chunks.last().map_or(false, |prev_chunk| {
1659 prev_chunk.1 == chunk.diagnostic_severity
1660 }) {
1661 chunks.last_mut().unwrap().0.push_str(chunk.text);
1662 } else {
1663 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1664 }
1665 }
1666 chunks
1667}
1668
1669#[gpui::test(iterations = 10)]
1670async fn test_definition(cx: &mut gpui::TestAppContext) {
1671 let mut language = Language::new(
1672 LanguageConfig {
1673 name: "Rust".into(),
1674 path_suffixes: vec!["rs".to_string()],
1675 ..Default::default()
1676 },
1677 Some(tree_sitter_rust::language()),
1678 );
1679 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1680
1681 let fs = FakeFs::new(cx.background());
1682 fs.insert_tree(
1683 "/dir",
1684 json!({
1685 "a.rs": "const fn a() { A }",
1686 "b.rs": "const y: i32 = crate::a()",
1687 }),
1688 )
1689 .await;
1690
1691 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
1692 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1693
1694 let buffer = project
1695 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1696 .await
1697 .unwrap();
1698
1699 let fake_server = fake_servers.next().await.unwrap();
1700 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
1701 let params = params.text_document_position_params;
1702 assert_eq!(
1703 params.text_document.uri.to_file_path().unwrap(),
1704 Path::new("/dir/b.rs"),
1705 );
1706 assert_eq!(params.position, lsp::Position::new(0, 22));
1707
1708 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
1709 lsp::Location::new(
1710 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1711 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1712 ),
1713 )))
1714 });
1715
1716 let mut definitions = project
1717 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
1718 .await
1719 .unwrap();
1720
1721 // Assert no new language server started
1722 cx.foreground().run_until_parked();
1723 assert!(fake_servers.try_next().is_err());
1724
1725 assert_eq!(definitions.len(), 1);
1726 let definition = definitions.pop().unwrap();
1727 cx.update(|cx| {
1728 let target_buffer = definition.target.buffer.read(cx);
1729 assert_eq!(
1730 target_buffer
1731 .file()
1732 .unwrap()
1733 .as_local()
1734 .unwrap()
1735 .abs_path(cx),
1736 Path::new("/dir/a.rs"),
1737 );
1738 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
1739 assert_eq!(
1740 list_worktrees(&project, cx),
1741 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
1742 );
1743
1744 drop(definition);
1745 });
1746 cx.read(|cx| {
1747 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
1748 });
1749
1750 fn list_worktrees<'a>(
1751 project: &'a ModelHandle<Project>,
1752 cx: &'a AppContext,
1753 ) -> Vec<(&'a Path, bool)> {
1754 project
1755 .read(cx)
1756 .worktrees(cx)
1757 .map(|worktree| {
1758 let worktree = worktree.read(cx);
1759 (
1760 worktree.as_local().unwrap().abs_path().as_ref(),
1761 worktree.is_visible(),
1762 )
1763 })
1764 .collect::<Vec<_>>()
1765 }
1766}
1767
1768#[gpui::test]
1769async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
1770 let mut language = Language::new(
1771 LanguageConfig {
1772 name: "TypeScript".into(),
1773 path_suffixes: vec!["ts".to_string()],
1774 ..Default::default()
1775 },
1776 Some(tree_sitter_typescript::language_typescript()),
1777 );
1778 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1779
1780 let fs = FakeFs::new(cx.background());
1781 fs.insert_tree(
1782 "/dir",
1783 json!({
1784 "a.ts": "",
1785 }),
1786 )
1787 .await;
1788
1789 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1790 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1791 let buffer = project
1792 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1793 .await
1794 .unwrap();
1795
1796 let fake_server = fake_language_servers.next().await.unwrap();
1797
1798 let text = "let a = b.fqn";
1799 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1800 let completions = project.update(cx, |project, cx| {
1801 project.completions(&buffer, text.len(), cx)
1802 });
1803
1804 fake_server
1805 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1806 Ok(Some(lsp::CompletionResponse::Array(vec![
1807 lsp::CompletionItem {
1808 label: "fullyQualifiedName?".into(),
1809 insert_text: Some("fullyQualifiedName".into()),
1810 ..Default::default()
1811 },
1812 ])))
1813 })
1814 .next()
1815 .await;
1816 let completions = completions.await.unwrap();
1817 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1818 assert_eq!(completions.len(), 1);
1819 assert_eq!(completions[0].new_text, "fullyQualifiedName");
1820 assert_eq!(
1821 completions[0].old_range.to_offset(&snapshot),
1822 text.len() - 3..text.len()
1823 );
1824
1825 let text = "let a = \"atoms/cmp\"";
1826 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1827 let completions = project.update(cx, |project, cx| {
1828 project.completions(&buffer, text.len() - 1, cx)
1829 });
1830
1831 fake_server
1832 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1833 Ok(Some(lsp::CompletionResponse::Array(vec![
1834 lsp::CompletionItem {
1835 label: "component".into(),
1836 ..Default::default()
1837 },
1838 ])))
1839 })
1840 .next()
1841 .await;
1842 let completions = completions.await.unwrap();
1843 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
1844 assert_eq!(completions.len(), 1);
1845 assert_eq!(completions[0].new_text, "component");
1846 assert_eq!(
1847 completions[0].old_range.to_offset(&snapshot),
1848 text.len() - 4..text.len() - 1
1849 );
1850}
1851
1852#[gpui::test]
1853async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
1854 let mut language = Language::new(
1855 LanguageConfig {
1856 name: "TypeScript".into(),
1857 path_suffixes: vec!["ts".to_string()],
1858 ..Default::default()
1859 },
1860 Some(tree_sitter_typescript::language_typescript()),
1861 );
1862 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1863
1864 let fs = FakeFs::new(cx.background());
1865 fs.insert_tree(
1866 "/dir",
1867 json!({
1868 "a.ts": "",
1869 }),
1870 )
1871 .await;
1872
1873 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1874 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1875 let buffer = project
1876 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1877 .await
1878 .unwrap();
1879
1880 let fake_server = fake_language_servers.next().await.unwrap();
1881
1882 let text = "let a = b.fqn";
1883 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
1884 let completions = project.update(cx, |project, cx| {
1885 project.completions(&buffer, text.len(), cx)
1886 });
1887
1888 fake_server
1889 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
1890 Ok(Some(lsp::CompletionResponse::Array(vec![
1891 lsp::CompletionItem {
1892 label: "fullyQualifiedName?".into(),
1893 insert_text: Some("fully\rQualified\r\nName".into()),
1894 ..Default::default()
1895 },
1896 ])))
1897 })
1898 .next()
1899 .await;
1900 let completions = completions.await.unwrap();
1901 assert_eq!(completions.len(), 1);
1902 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
1903}
1904
1905#[gpui::test(iterations = 10)]
1906async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
1907 let mut language = Language::new(
1908 LanguageConfig {
1909 name: "TypeScript".into(),
1910 path_suffixes: vec!["ts".to_string()],
1911 ..Default::default()
1912 },
1913 None,
1914 );
1915 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
1916
1917 let fs = FakeFs::new(cx.background());
1918 fs.insert_tree(
1919 "/dir",
1920 json!({
1921 "a.ts": "a",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1927 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1928 let buffer = project
1929 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
1930 .await
1931 .unwrap();
1932
1933 let fake_server = fake_language_servers.next().await.unwrap();
1934
1935 // Language server returns code actions that contain commands, and not edits.
1936 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
1937 fake_server
1938 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
1939 Ok(Some(vec![
1940 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1941 title: "The code action".into(),
1942 command: Some(lsp::Command {
1943 title: "The command".into(),
1944 command: "_the/command".into(),
1945 arguments: Some(vec![json!("the-argument")]),
1946 }),
1947 ..Default::default()
1948 }),
1949 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
1950 title: "two".into(),
1951 ..Default::default()
1952 }),
1953 ]))
1954 })
1955 .next()
1956 .await;
1957
1958 let action = actions.await.unwrap()[0].clone();
1959 let apply = project.update(cx, |project, cx| {
1960 project.apply_code_action(buffer.clone(), action, true, cx)
1961 });
1962
1963 // Resolving the code action does not populate its edits. In absence of
1964 // edits, we must execute the given command.
1965 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
1966 |action, _| async move { Ok(action) },
1967 );
1968
1969 // While executing the command, the language server sends the editor
1970 // a `workspaceEdit` request.
1971 fake_server
1972 .handle_request::<lsp::request::ExecuteCommand, _, _>({
1973 let fake = fake_server.clone();
1974 move |params, _| {
1975 assert_eq!(params.command, "_the/command");
1976 let fake = fake.clone();
1977 async move {
1978 fake.server
1979 .request::<lsp::request::ApplyWorkspaceEdit>(
1980 lsp::ApplyWorkspaceEditParams {
1981 label: None,
1982 edit: lsp::WorkspaceEdit {
1983 changes: Some(
1984 [(
1985 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
1986 vec![lsp::TextEdit {
1987 range: lsp::Range::new(
1988 lsp::Position::new(0, 0),
1989 lsp::Position::new(0, 0),
1990 ),
1991 new_text: "X".into(),
1992 }],
1993 )]
1994 .into_iter()
1995 .collect(),
1996 ),
1997 ..Default::default()
1998 },
1999 },
2000 )
2001 .await
2002 .unwrap();
2003 Ok(Some(json!(null)))
2004 }
2005 }
2006 })
2007 .next()
2008 .await;
2009
2010 // Applying the code action returns a project transaction containing the edits
2011 // sent by the language server in its `workspaceEdit` request.
2012 let transaction = apply.await.unwrap();
2013 assert!(transaction.0.contains_key(&buffer));
2014 buffer.update(cx, |buffer, cx| {
2015 assert_eq!(buffer.text(), "Xa");
2016 buffer.undo(cx);
2017 assert_eq!(buffer.text(), "a");
2018 });
2019}
2020
2021#[gpui::test]
2022async fn test_save_file(cx: &mut gpui::TestAppContext) {
2023 let fs = FakeFs::new(cx.background());
2024 fs.insert_tree(
2025 "/dir",
2026 json!({
2027 "file1": "the old contents",
2028 }),
2029 )
2030 .await;
2031
2032 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2033 let buffer = project
2034 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2035 .await
2036 .unwrap();
2037 buffer
2038 .update(cx, |buffer, cx| {
2039 assert_eq!(buffer.text(), "the old contents");
2040 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2041 buffer.save(cx)
2042 })
2043 .await
2044 .unwrap();
2045
2046 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2047 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2048}
2049
2050#[gpui::test]
2051async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2052 let fs = FakeFs::new(cx.background());
2053 fs.insert_tree(
2054 "/dir",
2055 json!({
2056 "file1": "the old contents",
2057 }),
2058 )
2059 .await;
2060
2061 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2062 let buffer = project
2063 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2064 .await
2065 .unwrap();
2066 buffer
2067 .update(cx, |buffer, cx| {
2068 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2069 buffer.save(cx)
2070 })
2071 .await
2072 .unwrap();
2073
2074 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2075 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2076}
2077
2078#[gpui::test]
2079async fn test_save_as(cx: &mut gpui::TestAppContext) {
2080 let fs = FakeFs::new(cx.background());
2081 fs.insert_tree("/dir", json!({})).await;
2082
2083 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2084 let buffer = project.update(cx, |project, cx| {
2085 project.create_buffer("", None, cx).unwrap()
2086 });
2087 buffer.update(cx, |buffer, cx| {
2088 buffer.edit([(0..0, "abc")], None, cx);
2089 assert!(buffer.is_dirty());
2090 assert!(!buffer.has_conflict());
2091 });
2092 project
2093 .update(cx, |project, cx| {
2094 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
2095 })
2096 .await
2097 .unwrap();
2098 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
2099 buffer.read_with(cx, |buffer, cx| {
2100 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
2101 assert!(!buffer.is_dirty());
2102 assert!(!buffer.has_conflict());
2103 });
2104
2105 let opened_buffer = project
2106 .update(cx, |project, cx| {
2107 project.open_local_buffer("/dir/file1", cx)
2108 })
2109 .await
2110 .unwrap();
2111 assert_eq!(opened_buffer, buffer);
2112}
2113
2114#[gpui::test(retries = 5)]
2115async fn test_rescan_and_remote_updates(
2116 deterministic: Arc<Deterministic>,
2117 cx: &mut gpui::TestAppContext,
2118) {
2119 let dir = temp_tree(json!({
2120 "a": {
2121 "file1": "",
2122 "file2": "",
2123 "file3": "",
2124 },
2125 "b": {
2126 "c": {
2127 "file4": "",
2128 "file5": "",
2129 }
2130 }
2131 }));
2132
2133 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2134 let rpc = project.read_with(cx, |p, _| p.client.clone());
2135
2136 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2137 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2138 async move { buffer.await.unwrap() }
2139 };
2140 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2141 project.read_with(cx, |project, cx| {
2142 let tree = project.worktrees(cx).next().unwrap();
2143 tree.read(cx)
2144 .entry_for_path(path)
2145 .unwrap_or_else(|| panic!("no entry for path {}", path))
2146 .id
2147 })
2148 };
2149
2150 let buffer2 = buffer_for_path("a/file2", cx).await;
2151 let buffer3 = buffer_for_path("a/file3", cx).await;
2152 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2153 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2154
2155 let file2_id = id_for_path("a/file2", cx);
2156 let file3_id = id_for_path("a/file3", cx);
2157 let file4_id = id_for_path("b/c/file4", cx);
2158
2159 // Create a remote copy of this worktree.
2160 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2161 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
2162 let remote = cx.update(|cx| {
2163 Worktree::remote(
2164 1,
2165 1,
2166 proto::WorktreeMetadata {
2167 id: initial_snapshot.id().to_proto(),
2168 root_name: initial_snapshot.root_name().into(),
2169 abs_path: initial_snapshot
2170 .abs_path()
2171 .as_os_str()
2172 .to_string_lossy()
2173 .into(),
2174 visible: true,
2175 },
2176 rpc.clone(),
2177 cx,
2178 )
2179 });
2180 remote.update(cx, |remote, _| {
2181 let update = initial_snapshot.build_initial_update(1);
2182 remote.as_remote_mut().unwrap().update_from_remote(update);
2183 });
2184 deterministic.run_until_parked();
2185
2186 cx.read(|cx| {
2187 assert!(!buffer2.read(cx).is_dirty());
2188 assert!(!buffer3.read(cx).is_dirty());
2189 assert!(!buffer4.read(cx).is_dirty());
2190 assert!(!buffer5.read(cx).is_dirty());
2191 });
2192
2193 // Rename and delete files and directories.
2194 tree.flush_fs_events(cx).await;
2195 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2196 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2197 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2198 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2199 tree.flush_fs_events(cx).await;
2200
2201 let expected_paths = vec![
2202 "a",
2203 "a/file1",
2204 "a/file2.new",
2205 "b",
2206 "d",
2207 "d/file3",
2208 "d/file4",
2209 ];
2210
2211 cx.read(|app| {
2212 assert_eq!(
2213 tree.read(app)
2214 .paths()
2215 .map(|p| p.to_str().unwrap())
2216 .collect::<Vec<_>>(),
2217 expected_paths
2218 );
2219
2220 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2221 assert_eq!(id_for_path("d/file3", cx), file3_id);
2222 assert_eq!(id_for_path("d/file4", cx), file4_id);
2223
2224 assert_eq!(
2225 buffer2.read(app).file().unwrap().path().as_ref(),
2226 Path::new("a/file2.new")
2227 );
2228 assert_eq!(
2229 buffer3.read(app).file().unwrap().path().as_ref(),
2230 Path::new("d/file3")
2231 );
2232 assert_eq!(
2233 buffer4.read(app).file().unwrap().path().as_ref(),
2234 Path::new("d/file4")
2235 );
2236 assert_eq!(
2237 buffer5.read(app).file().unwrap().path().as_ref(),
2238 Path::new("b/c/file5")
2239 );
2240
2241 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2242 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2243 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2244 assert!(buffer5.read(app).file().unwrap().is_deleted());
2245 });
2246
2247 // Update the remote worktree. Check that it becomes consistent with the
2248 // local worktree.
2249 remote.update(cx, |remote, cx| {
2250 let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
2251 &initial_snapshot,
2252 1,
2253 1,
2254 true,
2255 );
2256 remote.as_remote_mut().unwrap().update_from_remote(update);
2257 });
2258 deterministic.run_until_parked();
2259 remote.read_with(cx, |remote, _| {
2260 assert_eq!(
2261 remote
2262 .paths()
2263 .map(|p| p.to_str().unwrap())
2264 .collect::<Vec<_>>(),
2265 expected_paths
2266 );
2267 });
2268}
2269
2270#[gpui::test(iterations = 10)]
2271async fn test_buffer_identity_across_renames(
2272 deterministic: Arc<Deterministic>,
2273 cx: &mut gpui::TestAppContext,
2274) {
2275 let fs = FakeFs::new(cx.background());
2276 fs.insert_tree(
2277 "/dir",
2278 json!({
2279 "a": {
2280 "file1": "",
2281 }
2282 }),
2283 )
2284 .await;
2285
2286 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2287 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2288 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2289
2290 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2291 project.read_with(cx, |project, cx| {
2292 let tree = project.worktrees(cx).next().unwrap();
2293 tree.read(cx)
2294 .entry_for_path(path)
2295 .unwrap_or_else(|| panic!("no entry for path {}", path))
2296 .id
2297 })
2298 };
2299
2300 let dir_id = id_for_path("a", cx);
2301 let file_id = id_for_path("a/file1", cx);
2302 let buffer = project
2303 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2304 .await
2305 .unwrap();
2306 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2307
2308 project
2309 .update(cx, |project, cx| {
2310 project.rename_entry(dir_id, Path::new("b"), cx)
2311 })
2312 .unwrap()
2313 .await
2314 .unwrap();
2315 deterministic.run_until_parked();
2316 assert_eq!(id_for_path("b", cx), dir_id);
2317 assert_eq!(id_for_path("b/file1", cx), file_id);
2318 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2319}
2320
2321#[gpui::test]
2322async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2323 let fs = FakeFs::new(cx.background());
2324 fs.insert_tree(
2325 "/dir",
2326 json!({
2327 "a.txt": "a-contents",
2328 "b.txt": "b-contents",
2329 }),
2330 )
2331 .await;
2332
2333 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2334
2335 // Spawn multiple tasks to open paths, repeating some paths.
2336 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2337 (
2338 p.open_local_buffer("/dir/a.txt", cx),
2339 p.open_local_buffer("/dir/b.txt", cx),
2340 p.open_local_buffer("/dir/a.txt", cx),
2341 )
2342 });
2343
2344 let buffer_a_1 = buffer_a_1.await.unwrap();
2345 let buffer_a_2 = buffer_a_2.await.unwrap();
2346 let buffer_b = buffer_b.await.unwrap();
2347 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2348 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2349
2350 // There is only one buffer per path.
2351 let buffer_a_id = buffer_a_1.id();
2352 assert_eq!(buffer_a_2.id(), buffer_a_id);
2353
2354 // Open the same path again while it is still open.
2355 drop(buffer_a_1);
2356 let buffer_a_3 = project
2357 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2358 .await
2359 .unwrap();
2360
2361 // There's still only one buffer per path.
2362 assert_eq!(buffer_a_3.id(), buffer_a_id);
2363}
2364
2365#[gpui::test]
2366async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2367 let fs = FakeFs::new(cx.background());
2368 fs.insert_tree(
2369 "/dir",
2370 json!({
2371 "file1": "abc",
2372 "file2": "def",
2373 "file3": "ghi",
2374 }),
2375 )
2376 .await;
2377
2378 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2379
2380 let buffer1 = project
2381 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2382 .await
2383 .unwrap();
2384 let events = Rc::new(RefCell::new(Vec::new()));
2385
2386 // initially, the buffer isn't dirty.
2387 buffer1.update(cx, |buffer, cx| {
2388 cx.subscribe(&buffer1, {
2389 let events = events.clone();
2390 move |_, _, event, _| match event {
2391 BufferEvent::Operation(_) => {}
2392 _ => events.borrow_mut().push(event.clone()),
2393 }
2394 })
2395 .detach();
2396
2397 assert!(!buffer.is_dirty());
2398 assert!(events.borrow().is_empty());
2399
2400 buffer.edit([(1..2, "")], None, cx);
2401 });
2402
2403 // after the first edit, the buffer is dirty, and emits a dirtied event.
2404 buffer1.update(cx, |buffer, cx| {
2405 assert!(buffer.text() == "ac");
2406 assert!(buffer.is_dirty());
2407 assert_eq!(
2408 *events.borrow(),
2409 &[language::Event::Edited, language::Event::DirtyChanged]
2410 );
2411 events.borrow_mut().clear();
2412 buffer.did_save(
2413 buffer.version(),
2414 buffer.as_rope().fingerprint(),
2415 buffer.file().unwrap().mtime(),
2416 None,
2417 cx,
2418 );
2419 });
2420
2421 // after saving, the buffer is not dirty, and emits a saved event.
2422 buffer1.update(cx, |buffer, cx| {
2423 assert!(!buffer.is_dirty());
2424 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2425 events.borrow_mut().clear();
2426
2427 buffer.edit([(1..1, "B")], None, cx);
2428 buffer.edit([(2..2, "D")], None, cx);
2429 });
2430
2431 // after editing again, the buffer is dirty, and emits another dirty event.
2432 buffer1.update(cx, |buffer, cx| {
2433 assert!(buffer.text() == "aBDc");
2434 assert!(buffer.is_dirty());
2435 assert_eq!(
2436 *events.borrow(),
2437 &[
2438 language::Event::Edited,
2439 language::Event::DirtyChanged,
2440 language::Event::Edited,
2441 ],
2442 );
2443 events.borrow_mut().clear();
2444
2445 // After restoring the buffer to its previously-saved state,
2446 // the buffer is not considered dirty anymore.
2447 buffer.edit([(1..3, "")], None, cx);
2448 assert!(buffer.text() == "ac");
2449 assert!(!buffer.is_dirty());
2450 });
2451
2452 assert_eq!(
2453 *events.borrow(),
2454 &[language::Event::Edited, language::Event::DirtyChanged]
2455 );
2456
2457 // When a file is deleted, the buffer is considered dirty.
2458 let events = Rc::new(RefCell::new(Vec::new()));
2459 let buffer2 = project
2460 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2461 .await
2462 .unwrap();
2463 buffer2.update(cx, |_, cx| {
2464 cx.subscribe(&buffer2, {
2465 let events = events.clone();
2466 move |_, _, event, _| events.borrow_mut().push(event.clone())
2467 })
2468 .detach();
2469 });
2470
2471 fs.remove_file("/dir/file2".as_ref(), Default::default())
2472 .await
2473 .unwrap();
2474 cx.foreground().run_until_parked();
2475 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2476 assert_eq!(
2477 *events.borrow(),
2478 &[
2479 language::Event::DirtyChanged,
2480 language::Event::FileHandleChanged
2481 ]
2482 );
2483
2484 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2485 let events = Rc::new(RefCell::new(Vec::new()));
2486 let buffer3 = project
2487 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2488 .await
2489 .unwrap();
2490 buffer3.update(cx, |_, cx| {
2491 cx.subscribe(&buffer3, {
2492 let events = events.clone();
2493 move |_, _, event, _| events.borrow_mut().push(event.clone())
2494 })
2495 .detach();
2496 });
2497
2498 buffer3.update(cx, |buffer, cx| {
2499 buffer.edit([(0..0, "x")], None, cx);
2500 });
2501 events.borrow_mut().clear();
2502 fs.remove_file("/dir/file3".as_ref(), Default::default())
2503 .await
2504 .unwrap();
2505 cx.foreground().run_until_parked();
2506 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2507 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2508}
2509
2510#[gpui::test]
2511async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2512 let initial_contents = "aaa\nbbbbb\nc\n";
2513 let fs = FakeFs::new(cx.background());
2514 fs.insert_tree(
2515 "/dir",
2516 json!({
2517 "the-file": initial_contents,
2518 }),
2519 )
2520 .await;
2521 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2522 let buffer = project
2523 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2524 .await
2525 .unwrap();
2526
2527 let anchors = (0..3)
2528 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2529 .collect::<Vec<_>>();
2530
2531 // Change the file on disk, adding two new lines of text, and removing
2532 // one line.
2533 buffer.read_with(cx, |buffer, _| {
2534 assert!(!buffer.is_dirty());
2535 assert!(!buffer.has_conflict());
2536 });
2537 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2538 fs.save(
2539 "/dir/the-file".as_ref(),
2540 &new_contents.into(),
2541 LineEnding::Unix,
2542 )
2543 .await
2544 .unwrap();
2545
2546 // Because the buffer was not modified, it is reloaded from disk. Its
2547 // contents are edited according to the diff between the old and new
2548 // file contents.
2549 cx.foreground().run_until_parked();
2550 buffer.update(cx, |buffer, _| {
2551 assert_eq!(buffer.text(), new_contents);
2552 assert!(!buffer.is_dirty());
2553 assert!(!buffer.has_conflict());
2554
2555 let anchor_positions = anchors
2556 .iter()
2557 .map(|anchor| anchor.to_point(&*buffer))
2558 .collect::<Vec<_>>();
2559 assert_eq!(
2560 anchor_positions,
2561 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2562 );
2563 });
2564
2565 // Modify the buffer
2566 buffer.update(cx, |buffer, cx| {
2567 buffer.edit([(0..0, " ")], None, cx);
2568 assert!(buffer.is_dirty());
2569 assert!(!buffer.has_conflict());
2570 });
2571
2572 // Change the file on disk again, adding blank lines to the beginning.
2573 fs.save(
2574 "/dir/the-file".as_ref(),
2575 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2576 LineEnding::Unix,
2577 )
2578 .await
2579 .unwrap();
2580
2581 // Because the buffer is modified, it doesn't reload from disk, but is
2582 // marked as having a conflict.
2583 cx.foreground().run_until_parked();
2584 buffer.read_with(cx, |buffer, _| {
2585 assert!(buffer.has_conflict());
2586 });
2587}
2588
2589#[gpui::test]
2590async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2591 let fs = FakeFs::new(cx.background());
2592 fs.insert_tree(
2593 "/dir",
2594 json!({
2595 "file1": "a\nb\nc\n",
2596 "file2": "one\r\ntwo\r\nthree\r\n",
2597 }),
2598 )
2599 .await;
2600
2601 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2602 let buffer1 = project
2603 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2604 .await
2605 .unwrap();
2606 let buffer2 = project
2607 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2608 .await
2609 .unwrap();
2610
2611 buffer1.read_with(cx, |buffer, _| {
2612 assert_eq!(buffer.text(), "a\nb\nc\n");
2613 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2614 });
2615 buffer2.read_with(cx, |buffer, _| {
2616 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2617 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2618 });
2619
2620 // Change a file's line endings on disk from unix to windows. The buffer's
2621 // state updates correctly.
2622 fs.save(
2623 "/dir/file1".as_ref(),
2624 &"aaa\nb\nc\n".into(),
2625 LineEnding::Windows,
2626 )
2627 .await
2628 .unwrap();
2629 cx.foreground().run_until_parked();
2630 buffer1.read_with(cx, |buffer, _| {
2631 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2632 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2633 });
2634
2635 // Save a file with windows line endings. The file is written correctly.
2636 buffer2
2637 .update(cx, |buffer, cx| {
2638 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
2639 buffer.save(cx)
2640 })
2641 .await
2642 .unwrap();
2643 assert_eq!(
2644 fs.load("/dir/file2".as_ref()).await.unwrap(),
2645 "one\r\ntwo\r\nthree\r\nfour\r\n",
2646 );
2647}
2648
2649#[gpui::test]
2650async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
2651 cx.foreground().forbid_parking();
2652
2653 let fs = FakeFs::new(cx.background());
2654 fs.insert_tree(
2655 "/the-dir",
2656 json!({
2657 "a.rs": "
2658 fn foo(mut v: Vec<usize>) {
2659 for x in &v {
2660 v.push(1);
2661 }
2662 }
2663 "
2664 .unindent(),
2665 }),
2666 )
2667 .await;
2668
2669 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
2670 let buffer = project
2671 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
2672 .await
2673 .unwrap();
2674
2675 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
2676 let message = lsp::PublishDiagnosticsParams {
2677 uri: buffer_uri.clone(),
2678 diagnostics: vec![
2679 lsp::Diagnostic {
2680 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2681 severity: Some(DiagnosticSeverity::WARNING),
2682 message: "error 1".to_string(),
2683 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2684 location: lsp::Location {
2685 uri: buffer_uri.clone(),
2686 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2687 },
2688 message: "error 1 hint 1".to_string(),
2689 }]),
2690 ..Default::default()
2691 },
2692 lsp::Diagnostic {
2693 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2694 severity: Some(DiagnosticSeverity::HINT),
2695 message: "error 1 hint 1".to_string(),
2696 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2697 location: lsp::Location {
2698 uri: buffer_uri.clone(),
2699 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
2700 },
2701 message: "original diagnostic".to_string(),
2702 }]),
2703 ..Default::default()
2704 },
2705 lsp::Diagnostic {
2706 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2707 severity: Some(DiagnosticSeverity::ERROR),
2708 message: "error 2".to_string(),
2709 related_information: Some(vec![
2710 lsp::DiagnosticRelatedInformation {
2711 location: lsp::Location {
2712 uri: buffer_uri.clone(),
2713 range: lsp::Range::new(
2714 lsp::Position::new(1, 13),
2715 lsp::Position::new(1, 15),
2716 ),
2717 },
2718 message: "error 2 hint 1".to_string(),
2719 },
2720 lsp::DiagnosticRelatedInformation {
2721 location: lsp::Location {
2722 uri: buffer_uri.clone(),
2723 range: lsp::Range::new(
2724 lsp::Position::new(1, 13),
2725 lsp::Position::new(1, 15),
2726 ),
2727 },
2728 message: "error 2 hint 2".to_string(),
2729 },
2730 ]),
2731 ..Default::default()
2732 },
2733 lsp::Diagnostic {
2734 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2735 severity: Some(DiagnosticSeverity::HINT),
2736 message: "error 2 hint 1".to_string(),
2737 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2738 location: lsp::Location {
2739 uri: buffer_uri.clone(),
2740 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2741 },
2742 message: "original diagnostic".to_string(),
2743 }]),
2744 ..Default::default()
2745 },
2746 lsp::Diagnostic {
2747 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
2748 severity: Some(DiagnosticSeverity::HINT),
2749 message: "error 2 hint 2".to_string(),
2750 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
2751 location: lsp::Location {
2752 uri: buffer_uri,
2753 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
2754 },
2755 message: "original diagnostic".to_string(),
2756 }]),
2757 ..Default::default()
2758 },
2759 ],
2760 version: None,
2761 };
2762
2763 project
2764 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
2765 .unwrap();
2766 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2767
2768 assert_eq!(
2769 buffer
2770 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2771 .collect::<Vec<_>>(),
2772 &[
2773 DiagnosticEntry {
2774 range: Point::new(1, 8)..Point::new(1, 9),
2775 diagnostic: Diagnostic {
2776 severity: DiagnosticSeverity::WARNING,
2777 message: "error 1".to_string(),
2778 group_id: 0,
2779 is_primary: true,
2780 ..Default::default()
2781 }
2782 },
2783 DiagnosticEntry {
2784 range: Point::new(1, 8)..Point::new(1, 9),
2785 diagnostic: Diagnostic {
2786 severity: DiagnosticSeverity::HINT,
2787 message: "error 1 hint 1".to_string(),
2788 group_id: 0,
2789 is_primary: false,
2790 ..Default::default()
2791 }
2792 },
2793 DiagnosticEntry {
2794 range: Point::new(1, 13)..Point::new(1, 15),
2795 diagnostic: Diagnostic {
2796 severity: DiagnosticSeverity::HINT,
2797 message: "error 2 hint 1".to_string(),
2798 group_id: 1,
2799 is_primary: false,
2800 ..Default::default()
2801 }
2802 },
2803 DiagnosticEntry {
2804 range: Point::new(1, 13)..Point::new(1, 15),
2805 diagnostic: Diagnostic {
2806 severity: DiagnosticSeverity::HINT,
2807 message: "error 2 hint 2".to_string(),
2808 group_id: 1,
2809 is_primary: false,
2810 ..Default::default()
2811 }
2812 },
2813 DiagnosticEntry {
2814 range: Point::new(2, 8)..Point::new(2, 17),
2815 diagnostic: Diagnostic {
2816 severity: DiagnosticSeverity::ERROR,
2817 message: "error 2".to_string(),
2818 group_id: 1,
2819 is_primary: true,
2820 ..Default::default()
2821 }
2822 }
2823 ]
2824 );
2825
2826 assert_eq!(
2827 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
2828 &[
2829 DiagnosticEntry {
2830 range: Point::new(1, 8)..Point::new(1, 9),
2831 diagnostic: Diagnostic {
2832 severity: DiagnosticSeverity::WARNING,
2833 message: "error 1".to_string(),
2834 group_id: 0,
2835 is_primary: true,
2836 ..Default::default()
2837 }
2838 },
2839 DiagnosticEntry {
2840 range: Point::new(1, 8)..Point::new(1, 9),
2841 diagnostic: Diagnostic {
2842 severity: DiagnosticSeverity::HINT,
2843 message: "error 1 hint 1".to_string(),
2844 group_id: 0,
2845 is_primary: false,
2846 ..Default::default()
2847 }
2848 },
2849 ]
2850 );
2851 assert_eq!(
2852 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
2853 &[
2854 DiagnosticEntry {
2855 range: Point::new(1, 13)..Point::new(1, 15),
2856 diagnostic: Diagnostic {
2857 severity: DiagnosticSeverity::HINT,
2858 message: "error 2 hint 1".to_string(),
2859 group_id: 1,
2860 is_primary: false,
2861 ..Default::default()
2862 }
2863 },
2864 DiagnosticEntry {
2865 range: Point::new(1, 13)..Point::new(1, 15),
2866 diagnostic: Diagnostic {
2867 severity: DiagnosticSeverity::HINT,
2868 message: "error 2 hint 2".to_string(),
2869 group_id: 1,
2870 is_primary: false,
2871 ..Default::default()
2872 }
2873 },
2874 DiagnosticEntry {
2875 range: Point::new(2, 8)..Point::new(2, 17),
2876 diagnostic: Diagnostic {
2877 severity: DiagnosticSeverity::ERROR,
2878 message: "error 2".to_string(),
2879 group_id: 1,
2880 is_primary: true,
2881 ..Default::default()
2882 }
2883 }
2884 ]
2885 );
2886}
2887
2888#[gpui::test]
2889async fn test_rename(cx: &mut gpui::TestAppContext) {
2890 cx.foreground().forbid_parking();
2891
2892 let mut language = Language::new(
2893 LanguageConfig {
2894 name: "Rust".into(),
2895 path_suffixes: vec!["rs".to_string()],
2896 ..Default::default()
2897 },
2898 Some(tree_sitter_rust::language()),
2899 );
2900 let mut fake_servers = language
2901 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2902 capabilities: lsp::ServerCapabilities {
2903 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
2904 prepare_provider: Some(true),
2905 work_done_progress_options: Default::default(),
2906 })),
2907 ..Default::default()
2908 },
2909 ..Default::default()
2910 }))
2911 .await;
2912
2913 let fs = FakeFs::new(cx.background());
2914 fs.insert_tree(
2915 "/dir",
2916 json!({
2917 "one.rs": "const ONE: usize = 1;",
2918 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
2919 }),
2920 )
2921 .await;
2922
2923 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2924 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2925 let buffer = project
2926 .update(cx, |project, cx| {
2927 project.open_local_buffer("/dir/one.rs", cx)
2928 })
2929 .await
2930 .unwrap();
2931
2932 let fake_server = fake_servers.next().await.unwrap();
2933
2934 let response = project.update(cx, |project, cx| {
2935 project.prepare_rename(buffer.clone(), 7, cx)
2936 });
2937 fake_server
2938 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
2939 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
2940 assert_eq!(params.position, lsp::Position::new(0, 7));
2941 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
2942 lsp::Position::new(0, 6),
2943 lsp::Position::new(0, 9),
2944 ))))
2945 })
2946 .next()
2947 .await
2948 .unwrap();
2949 let range = response.await.unwrap().unwrap();
2950 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
2951 assert_eq!(range, 6..9);
2952
2953 let response = project.update(cx, |project, cx| {
2954 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
2955 });
2956 fake_server
2957 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
2958 assert_eq!(
2959 params.text_document_position.text_document.uri.as_str(),
2960 "file:///dir/one.rs"
2961 );
2962 assert_eq!(
2963 params.text_document_position.position,
2964 lsp::Position::new(0, 7)
2965 );
2966 assert_eq!(params.new_name, "THREE");
2967 Ok(Some(lsp::WorkspaceEdit {
2968 changes: Some(
2969 [
2970 (
2971 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
2972 vec![lsp::TextEdit::new(
2973 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
2974 "THREE".to_string(),
2975 )],
2976 ),
2977 (
2978 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
2979 vec![
2980 lsp::TextEdit::new(
2981 lsp::Range::new(
2982 lsp::Position::new(0, 24),
2983 lsp::Position::new(0, 27),
2984 ),
2985 "THREE".to_string(),
2986 ),
2987 lsp::TextEdit::new(
2988 lsp::Range::new(
2989 lsp::Position::new(0, 35),
2990 lsp::Position::new(0, 38),
2991 ),
2992 "THREE".to_string(),
2993 ),
2994 ],
2995 ),
2996 ]
2997 .into_iter()
2998 .collect(),
2999 ),
3000 ..Default::default()
3001 }))
3002 })
3003 .next()
3004 .await
3005 .unwrap();
3006 let mut transaction = response.await.unwrap().0;
3007 assert_eq!(transaction.len(), 2);
3008 assert_eq!(
3009 transaction
3010 .remove_entry(&buffer)
3011 .unwrap()
3012 .0
3013 .read_with(cx, |buffer, _| buffer.text()),
3014 "const THREE: usize = 1;"
3015 );
3016 assert_eq!(
3017 transaction
3018 .into_keys()
3019 .next()
3020 .unwrap()
3021 .read_with(cx, |buffer, _| buffer.text()),
3022 "const TWO: usize = one::THREE + one::THREE;"
3023 );
3024}
3025
3026#[gpui::test]
3027async fn test_search(cx: &mut gpui::TestAppContext) {
3028 let fs = FakeFs::new(cx.background());
3029 fs.insert_tree(
3030 "/dir",
3031 json!({
3032 "one.rs": "const ONE: usize = 1;",
3033 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3034 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3035 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3036 }),
3037 )
3038 .await;
3039 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3040 assert_eq!(
3041 search(&project, SearchQuery::text("TWO", false, true), cx)
3042 .await
3043 .unwrap(),
3044 HashMap::from_iter([
3045 ("two.rs".to_string(), vec![6..9]),
3046 ("three.rs".to_string(), vec![37..40])
3047 ])
3048 );
3049
3050 let buffer_4 = project
3051 .update(cx, |project, cx| {
3052 project.open_local_buffer("/dir/four.rs", cx)
3053 })
3054 .await
3055 .unwrap();
3056 buffer_4.update(cx, |buffer, cx| {
3057 let text = "two::TWO";
3058 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3059 });
3060
3061 assert_eq!(
3062 search(&project, SearchQuery::text("TWO", false, true), cx)
3063 .await
3064 .unwrap(),
3065 HashMap::from_iter([
3066 ("two.rs".to_string(), vec![6..9]),
3067 ("three.rs".to_string(), vec![37..40]),
3068 ("four.rs".to_string(), vec![25..28, 36..39])
3069 ])
3070 );
3071
3072 async fn search(
3073 project: &ModelHandle<Project>,
3074 query: SearchQuery,
3075 cx: &mut gpui::TestAppContext,
3076 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
3077 let results = project
3078 .update(cx, |project, cx| project.search(query, cx))
3079 .await?;
3080
3081 Ok(results
3082 .into_iter()
3083 .map(|(buffer, ranges)| {
3084 buffer.read_with(cx, |buffer, _| {
3085 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3086 let ranges = ranges
3087 .into_iter()
3088 .map(|range| range.to_offset(buffer))
3089 .collect::<Vec<_>>();
3090 (path, ranges)
3091 })
3092 })
3093 .collect())
3094 }
3095}