1use super::*;
2use crate::udiff::apply_diff_to_string;
3use client::{UserStore, test::FakeServer};
4use clock::FakeSystemClock;
5use clock::ReplicaId;
6use cloud_api_types::{CreateLlmTokenResponse, LlmToken};
7use cloud_llm_client::{
8 EditPredictionRejectReason, EditPredictionRejection, RejectEditPredictionsBody,
9 predict_edits_v3::{PredictEditsV3Request, PredictEditsV3Response},
10};
11
12use futures::{
13 AsyncReadExt, FutureExt, StreamExt,
14 channel::{mpsc, oneshot},
15};
16use gpui::App;
17use gpui::{
18 Entity, TestAppContext,
19 http_client::{FakeHttpClient, Response},
20};
21use indoc::indoc;
22use language::{
23 Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet,
24 DiagnosticSeverity, Operation, Point, Selection, SelectionGoal,
25};
26use language_model::RefreshLlmTokenListener;
27use lsp::LanguageServerId;
28use parking_lot::Mutex;
29use pretty_assertions::{assert_eq, assert_matches};
30use project::{FakeFs, Project};
31use serde_json::json;
32use settings::SettingsStore;
33use std::{ops::Range, path::Path, sync::Arc, time::Duration};
34use util::{
35 path,
36 test::{TextRangeMarker, marked_text_ranges_by},
37};
38use uuid::Uuid;
39use workspace::{AppState, CollaboratorId, MultiWorkspace};
40use zeta_prompt::ZetaPromptInput;
41
42use crate::{
43 BufferEditPrediction, EDIT_PREDICTION_SETTLED_QUIESCENCE, EditPredictionId,
44 EditPredictionJumpsFeatureFlag, EditPredictionStore, REJECT_REQUEST_DEBOUNCE,
45};
46
47#[gpui::test]
48async fn test_current_state(cx: &mut TestAppContext) {
49 let (ep_store, mut requests) = init_test_with_fake_client(cx);
50 let fs = FakeFs::new(cx.executor());
51 fs.insert_tree(
52 "/root",
53 json!({
54 "1.txt": "Hello!\nHow\nBye\n",
55 "2.txt": "Hola!\nComo\nAdios\n"
56 }),
57 )
58 .await;
59 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
60
61 let buffer1 = project
62 .update(cx, |project, cx| {
63 let path = project.find_project_path(path!("/root/1.txt"), cx).unwrap();
64 project.set_active_path(Some(path.clone()), cx);
65 project.open_buffer(path, cx)
66 })
67 .await
68 .unwrap();
69 let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot());
70 let position = snapshot1.anchor_before(language::Point::new(1, 3));
71
72 ep_store.update(cx, |ep_store, cx| {
73 ep_store.register_project(&project, cx);
74 ep_store.register_buffer(&buffer1, &project, cx);
75 });
76
77 // Prediction for current file
78
79 ep_store.update(cx, |ep_store, cx| {
80 ep_store.refresh_prediction_from_buffer(project.clone(), buffer1.clone(), position, cx)
81 });
82 let (request, respond_tx) = requests.predict.next().await.unwrap();
83
84 respond_tx
85 .send(model_response(
86 &request,
87 indoc! {r"
88 --- a/root/1.txt
89 +++ b/root/1.txt
90 @@ ... @@
91 Hello!
92 -How
93 +How are you?
94 Bye
95 "},
96 ))
97 .unwrap();
98
99 cx.run_until_parked();
100
101 ep_store.update(cx, |ep_store, cx| {
102 let prediction = ep_store
103 .prediction_at(&buffer1, None, &project, cx)
104 .unwrap();
105 assert_matches!(prediction, BufferEditPrediction::Local { .. });
106 });
107
108 ep_store.update(cx, |ep_store, cx| {
109 ep_store.reject_current_prediction(EditPredictionRejectReason::Discarded, &project, cx);
110 });
111
112 // Prediction for diagnostic in another file
113
114 let diagnostic = lsp::Diagnostic {
115 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
116 severity: Some(lsp::DiagnosticSeverity::ERROR),
117 message: "Sentence is incomplete".to_string(),
118 ..Default::default()
119 };
120
121 project.update(cx, |project, cx| {
122 project.lsp_store().update(cx, |lsp_store, cx| {
123 lsp_store
124 .update_diagnostics(
125 LanguageServerId(0),
126 lsp::PublishDiagnosticsParams {
127 uri: lsp::Uri::from_file_path(path!("/root/2.txt")).unwrap(),
128 diagnostics: vec![diagnostic],
129 version: None,
130 },
131 None,
132 language::DiagnosticSourceKind::Pushed,
133 &[],
134 cx,
135 )
136 .unwrap();
137 });
138 });
139
140 let (request, respond_tx) = requests.predict.next().await.unwrap();
141 respond_tx
142 .send(model_response(
143 &request,
144 indoc! {r#"
145 --- a/root/2.txt
146 +++ b/root/2.txt
147 @@ ... @@
148 Hola!
149 -Como
150 +Como estas?
151 Adios
152 "#},
153 ))
154 .unwrap();
155 cx.run_until_parked();
156
157 ep_store.update(cx, |ep_store, cx| {
158 let prediction = ep_store
159 .prediction_at(&buffer1, None, &project, cx)
160 .unwrap();
161 assert_matches!(
162 prediction,
163 BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt"))
164 );
165 });
166
167 let buffer2 = project
168 .update(cx, |project, cx| {
169 let path = project.find_project_path(path!("root/2.txt"), cx).unwrap();
170 project.open_buffer(path, cx)
171 })
172 .await
173 .unwrap();
174
175 ep_store.update(cx, |ep_store, cx| {
176 let prediction = ep_store
177 .prediction_at(&buffer2, None, &project, cx)
178 .unwrap();
179 assert_matches!(prediction, BufferEditPrediction::Local { .. });
180 });
181}
182
183#[gpui::test]
184async fn test_diagnostics_refresh_suppressed_while_following(cx: &mut TestAppContext) {
185 let (ep_store, mut requests) = init_test_with_fake_client(cx);
186
187 cx.update(|cx| {
188 cx.update_flags(
189 false,
190 vec![EditPredictionJumpsFeatureFlag::NAME.to_string()],
191 );
192 });
193
194 let fs = FakeFs::new(cx.executor());
195 fs.insert_tree(
196 "/root",
197 json!({
198 "1.txt": "Hello!\nHow\nBye\n",
199 "2.txt": "Hola!\nComo\nAdios\n"
200 }),
201 )
202 .await;
203 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
204
205 let app_state = cx.update(|cx| {
206 let app_state = AppState::test(cx);
207 AppState::set_global(Arc::downgrade(&app_state), cx);
208 app_state
209 });
210
211 let multi_workspace =
212 cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
213 let workspace = multi_workspace
214 .read_with(cx, |multi_workspace, _| multi_workspace.workspace().clone())
215 .unwrap();
216 cx.update(|cx| {
217 AppState::set_global(Arc::downgrade(workspace.read(cx).app_state()), cx);
218 });
219 let _ = app_state;
220
221 let buffer1 = project
222 .update(cx, |project, cx| {
223 let path = project.find_project_path(path!("root/1.txt"), cx).unwrap();
224 project.set_active_path(Some(path.clone()), cx);
225 project.open_buffer(path, cx)
226 })
227 .await
228 .unwrap();
229 let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot());
230 let position = snapshot1.anchor_before(language::Point::new(1, 3));
231
232 ep_store.update(cx, |ep_store, cx| {
233 ep_store.register_project(&project, cx);
234 ep_store.register_buffer(&buffer1, &project, cx);
235 ep_store.refresh_prediction_from_buffer(project.clone(), buffer1.clone(), position, cx);
236 });
237
238 let (request, respond_tx) = requests.predict.next().await.unwrap();
239 respond_tx
240 .send(model_response(
241 &request,
242 indoc! {r"
243 --- a/root/1.txt
244 +++ b/root/1.txt
245 @@ ... @@
246 Hello!
247 -How
248 +How are you?
249 Bye
250 "},
251 ))
252 .unwrap();
253 cx.run_until_parked();
254
255 ep_store.update(cx, |ep_store, cx| {
256 ep_store.reject_current_prediction(EditPredictionRejectReason::Discarded, &project, cx);
257 });
258
259 let _ = multi_workspace.update(cx, |multi_workspace, window, cx| {
260 multi_workspace.workspace().update(cx, |workspace, cx| {
261 workspace.start_following(CollaboratorId::Agent, window, cx);
262 });
263 });
264 cx.run_until_parked();
265
266 let diagnostic = lsp::Diagnostic {
267 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
268 severity: Some(lsp::DiagnosticSeverity::ERROR),
269 message: "Sentence is incomplete".to_string(),
270 ..Default::default()
271 };
272
273 project.update(cx, |project, cx| {
274 project.lsp_store().update(cx, |lsp_store, cx| {
275 lsp_store
276 .update_diagnostics(
277 LanguageServerId(0),
278 lsp::PublishDiagnosticsParams {
279 uri: lsp::Uri::from_file_path(path!("/root/2.txt")).unwrap(),
280 diagnostics: vec![diagnostic.clone()],
281 version: None,
282 },
283 None,
284 language::DiagnosticSourceKind::Pushed,
285 &[],
286 cx,
287 )
288 .unwrap();
289 });
290 });
291
292 cx.run_until_parked();
293 assert_no_predict_request_ready(&mut requests.predict);
294
295 let _ = multi_workspace.update(cx, |multi_workspace, window, cx| {
296 multi_workspace.workspace().update(cx, |workspace, cx| {
297 workspace.unfollow(CollaboratorId::Agent, window, cx);
298 });
299 });
300 cx.run_until_parked();
301
302 project.update(cx, |project, cx| {
303 project.lsp_store().update(cx, |lsp_store, cx| {
304 lsp_store
305 .update_diagnostics(
306 LanguageServerId(0),
307 lsp::PublishDiagnosticsParams {
308 uri: lsp::Uri::from_file_path(path!("/root/2.txt")).unwrap(),
309 diagnostics: vec![diagnostic],
310 version: None,
311 },
312 None,
313 language::DiagnosticSourceKind::Pushed,
314 &[],
315 cx,
316 )
317 .unwrap();
318 });
319 });
320
321 let (request, respond_tx) = requests.predict.next().await.unwrap();
322 respond_tx
323 .send(model_response(
324 &request,
325 indoc! {r#"
326 --- a/root/2.txt
327 +++ b/root/2.txt
328 @@ ... @@
329 Hola!
330 -Como
331 +Como estas?
332 Adios
333 "#},
334 ))
335 .unwrap();
336 cx.run_until_parked();
337
338 ep_store.update(cx, |ep_store, cx| {
339 let prediction = ep_store
340 .prediction_at(&buffer1, None, &project, cx)
341 .unwrap();
342 assert_matches!(
343 prediction,
344 BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt"))
345 );
346 });
347}
348
349#[gpui::test]
350async fn test_simple_request(cx: &mut TestAppContext) {
351 let (ep_store, mut requests) = init_test_with_fake_client(cx);
352 let fs = FakeFs::new(cx.executor());
353 fs.insert_tree(
354 "/root",
355 json!({
356 "foo.md": "Hello!\nHow\nBye\n"
357 }),
358 )
359 .await;
360 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
361
362 let buffer = project
363 .update(cx, |project, cx| {
364 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
365 project.open_buffer(path, cx)
366 })
367 .await
368 .unwrap();
369 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
370 let position = snapshot.anchor_before(language::Point::new(1, 3));
371
372 let prediction_task = ep_store.update(cx, |ep_store, cx| {
373 ep_store.request_prediction(&project, &buffer, position, Default::default(), cx)
374 });
375
376 let (request, respond_tx) = requests.predict.next().await.unwrap();
377
378 // TODO Put back when we have a structured request again
379 // assert_eq!(
380 // request.excerpt_path.as_ref(),
381 // Path::new(path!("root/foo.md"))
382 // );
383 // assert_eq!(
384 // request.cursor_point,
385 // Point {
386 // line: Line(1),
387 // column: 3
388 // }
389 // );
390
391 respond_tx
392 .send(model_response(
393 &request,
394 indoc! { r"
395 --- a/root/foo.md
396 +++ b/root/foo.md
397 @@ ... @@
398 Hello!
399 -How
400 +How are you?
401 Bye
402 "},
403 ))
404 .unwrap();
405
406 let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap();
407
408 assert_eq!(prediction.edits.len(), 1);
409 assert_eq!(
410 prediction.edits[0].0.to_point(&snapshot).start,
411 language::Point::new(1, 3)
412 );
413 assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
414}
415
416#[gpui::test]
417async fn test_request_events(cx: &mut TestAppContext) {
418 let (ep_store, mut requests) = init_test_with_fake_client(cx);
419 let fs = FakeFs::new(cx.executor());
420 fs.insert_tree(
421 "/root",
422 json!({
423 "foo.md": "Hello!\n\nBye\n"
424 }),
425 )
426 .await;
427 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
428
429 let buffer = project
430 .update(cx, |project, cx| {
431 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
432 project.open_buffer(path, cx)
433 })
434 .await
435 .unwrap();
436
437 ep_store.update(cx, |ep_store, cx| {
438 ep_store.register_buffer(&buffer, &project, cx);
439 });
440
441 buffer.update(cx, |buffer, cx| {
442 buffer.edit(vec![(7..7, "How")], None, cx);
443 });
444
445 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
446 let position = snapshot.anchor_before(language::Point::new(1, 3));
447
448 let prediction_task = ep_store.update(cx, |ep_store, cx| {
449 ep_store.request_prediction(&project, &buffer, position, Default::default(), cx)
450 });
451
452 let (request, respond_tx) = requests.predict.next().await.unwrap();
453
454 let prompt = prompt_from_request(&request);
455 assert!(
456 prompt.contains(indoc! {"
457 --- a/root/foo.md
458 +++ b/root/foo.md
459 @@ -1,3 +1,3 @@
460 Hello!
461 -
462 +How
463 Bye
464 "}),
465 "{prompt}"
466 );
467
468 respond_tx
469 .send(model_response(
470 &request,
471 indoc! {r#"
472 --- a/root/foo.md
473 +++ b/root/foo.md
474 @@ ... @@
475 Hello!
476 -How
477 +How are you?
478 Bye
479 "#},
480 ))
481 .unwrap();
482
483 let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap();
484
485 assert_eq!(prediction.edits.len(), 1);
486 assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
487}
488
489#[gpui::test]
490async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContext) {
491 let (ep_store, _requests) = init_test_with_fake_client(cx);
492 let fs = FakeFs::new(cx.executor());
493 fs.insert_tree(
494 "/root",
495 json!({
496 "foo.md": "Hello!\n\nBye\n"
497 }),
498 )
499 .await;
500 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
501
502 let buffer = project
503 .update(cx, |project, cx| {
504 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
505 project.open_buffer(path, cx)
506 })
507 .await
508 .unwrap();
509
510 ep_store.update(cx, |ep_store, cx| {
511 ep_store.register_buffer(&buffer, &project, cx);
512 });
513
514 // First burst: insert "How"
515 buffer.update(cx, |buffer, cx| {
516 buffer.edit(vec![(7..7, "How")], None, cx);
517 });
518
519 // Simulate a pause longer than the grouping threshold (e.g. 500ms).
520 cx.executor().advance_clock(LAST_CHANGE_GROUPING_TIME * 2);
521 cx.run_until_parked();
522
523 // Second burst: append " are you?" immediately after "How" on the same line.
524 //
525 // Keeping both bursts on the same line ensures the existing line-span coalescing logic
526 // groups them into a single `LastEvent`, allowing the pause-split getter to return two diffs.
527 buffer.update(cx, |buffer, cx| {
528 buffer.edit(vec![(10..10, " are you?")], None, cx);
529 });
530
531 // A second edit shortly after the first post-pause edit ensures the last edit timestamp is
532 // advanced after the pause boundary is recorded, making pause-splitting deterministic.
533 buffer.update(cx, |buffer, cx| {
534 buffer.edit(vec![(19..19, "!")], None, cx);
535 });
536
537 // With time-based splitting, there are two distinct events.
538 let events = ep_store.update(cx, |ep_store, cx| {
539 ep_store.edit_history_for_project(&project, cx)
540 });
541 assert_eq!(events.len(), 2);
542
543 let first_total_edit_range = buffer.read_with(cx, |buffer, _| {
544 events[0].total_edit_range.to_point(&buffer.snapshot())
545 });
546 assert_eq!(first_total_edit_range, Point::new(1, 0)..Point::new(1, 3));
547
548 let zeta_prompt::Event::BufferChange { diff, .. } = events[0].event.as_ref();
549 assert_eq!(
550 diff.as_str(),
551 indoc! {"
552 @@ -1,3 +1,3 @@
553 Hello!
554 -
555 +How
556 Bye
557 "}
558 );
559
560 let second_total_edit_range = buffer.read_with(cx, |buffer, _| {
561 events[1].total_edit_range.to_point(&buffer.snapshot())
562 });
563 assert_eq!(second_total_edit_range, Point::new(1, 3)..Point::new(1, 13));
564
565 let zeta_prompt::Event::BufferChange { diff, .. } = events[1].event.as_ref();
566 assert_eq!(
567 diff.as_str(),
568 indoc! {"
569 @@ -1,3 +1,3 @@
570 Hello!
571 -How
572 +How are you?!
573 Bye
574 "}
575 );
576}
577
578#[gpui::test]
579async fn test_predicted_edits_are_separated_in_edit_history(cx: &mut TestAppContext) {
580 let (ep_store, _requests) = init_test_with_fake_client(cx);
581 let fs = FakeFs::new(cx.executor());
582
583 // Create a file with 30 lines to test line-based coalescing
584 let content = (1..=30)
585 .map(|i| format!("Line {}\n", i))
586 .collect::<String>();
587 fs.insert_tree(
588 "/root",
589 json!({
590 "foo.md": content
591 }),
592 )
593 .await;
594 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
595
596 let buffer = project
597 .update(cx, |project, cx| {
598 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
599 project.open_buffer(path, cx)
600 })
601 .await
602 .unwrap();
603
604 ep_store.update(cx, |ep_store, cx| {
605 ep_store.register_buffer(&buffer, &project, cx);
606 });
607
608 // First edit: multi-line edit spanning rows 10-12 (replacing lines 11-13)
609 buffer.update(cx, |buffer, cx| {
610 let start = Point::new(10, 0).to_offset(buffer);
611 let end = Point::new(13, 0).to_offset(buffer);
612 buffer.edit(vec![(start..end, "Middle A\nMiddle B\n")], None, cx);
613 });
614
615 let events = ep_store.update(cx, |ep_store, cx| {
616 ep_store.edit_history_for_project(&project, cx)
617 });
618 assert_eq!(
619 render_events(&events),
620 indoc! {"
621 @@ -8,9 +8,8 @@
622 Line 8
623 Line 9
624 Line 10
625 -Line 11
626 -Line 12
627 -Line 13
628 +Middle A
629 +Middle B
630 Line 14
631 Line 15
632 Line 16
633 "},
634 "After first edit"
635 );
636
637 // Second edit: insert ABOVE the first edit's range (row 5, within 8 lines of row 10)
638 // This tests that coalescing considers the START of the existing range
639 buffer.update(cx, |buffer, cx| {
640 let offset = Point::new(5, 0).to_offset(buffer);
641 buffer.edit(vec![(offset..offset, "Above\n")], None, cx);
642 });
643
644 let events = ep_store.update(cx, |ep_store, cx| {
645 ep_store.edit_history_for_project(&project, cx)
646 });
647 assert_eq!(
648 render_events(&events),
649 indoc! {"
650 @@ -3,14 +3,14 @@
651 Line 3
652 Line 4
653 Line 5
654 +Above
655 Line 6
656 Line 7
657 Line 8
658 Line 9
659 Line 10
660 -Line 11
661 -Line 12
662 -Line 13
663 +Middle A
664 +Middle B
665 Line 14
666 Line 15
667 Line 16
668 "},
669 "After inserting above (should coalesce)"
670 );
671
672 // Third edit: insert BELOW the first edit's range (row 14 in current buffer, within 8 lines of row 12)
673 // This tests that coalescing considers the END of the existing range
674 buffer.update(cx, |buffer, cx| {
675 let offset = Point::new(14, 0).to_offset(buffer);
676 buffer.edit(vec![(offset..offset, "Below\n")], None, cx);
677 });
678
679 let events = ep_store.update(cx, |ep_store, cx| {
680 ep_store.edit_history_for_project(&project, cx)
681 });
682 assert_eq!(
683 render_events(&events),
684 indoc! {"
685 @@ -3,15 +3,16 @@
686 Line 3
687 Line 4
688 Line 5
689 +Above
690 Line 6
691 Line 7
692 Line 8
693 Line 9
694 Line 10
695 -Line 11
696 -Line 12
697 -Line 13
698 +Middle A
699 +Middle B
700 Line 14
701 +Below
702 Line 15
703 Line 16
704 Line 17
705 "},
706 "After inserting below (should coalesce)"
707 );
708
709 // Fourth edit: insert FAR BELOW (row 25, beyond 8 lines from the current range end ~row 15)
710 // This should NOT coalesce - creates a new event
711 buffer.update(cx, |buffer, cx| {
712 let offset = Point::new(25, 0).to_offset(buffer);
713 buffer.edit(vec![(offset..offset, "Far below\n")], None, cx);
714 });
715
716 let events = ep_store.update(cx, |ep_store, cx| {
717 ep_store.edit_history_for_project(&project, cx)
718 });
719 assert_eq!(
720 render_events(&events),
721 indoc! {"
722 @@ -3,15 +3,16 @@
723 Line 3
724 Line 4
725 Line 5
726 +Above
727 Line 6
728 Line 7
729 Line 8
730 Line 9
731 Line 10
732 -Line 11
733 -Line 12
734 -Line 13
735 +Middle A
736 +Middle B
737 Line 14
738 +Below
739 Line 15
740 Line 16
741 Line 17
742
743 ---
744 @@ -23,6 +23,7 @@
745 Line 22
746 Line 23
747 Line 24
748 +Far below
749 Line 25
750 Line 26
751 Line 27
752 "},
753 "After inserting far below (should NOT coalesce)"
754 );
755}
756
757fn render_events(events: &[StoredEvent]) -> String {
758 events
759 .iter()
760 .map(|e| {
761 let zeta_prompt::Event::BufferChange { diff, .. } = e.event.as_ref();
762 diff.as_str()
763 })
764 .collect::<Vec<_>>()
765 .join("\n---\n")
766}
767
768fn render_events_with_predicted(events: &[StoredEvent]) -> Vec<String> {
769 events
770 .iter()
771 .map(|e| {
772 let zeta_prompt::Event::BufferChange {
773 diff, predicted, ..
774 } = e.event.as_ref();
775 let prefix = if *predicted { "predicted" } else { "manual" };
776 format!("{}\n{}", prefix, diff)
777 })
778 .collect()
779}
780
781fn make_collaborator_replica(
782 buffer: &Entity<Buffer>,
783 cx: &mut TestAppContext,
784) -> (Entity<Buffer>, clock::Global) {
785 let (state, version) =
786 buffer.read_with(cx, |buffer, _cx| (buffer.to_proto(_cx), buffer.version()));
787 let collaborator = cx.new(|_cx| {
788 Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap()
789 });
790 (collaborator, version)
791}
792
793async fn apply_collaborator_edit(
794 collaborator: &Entity<Buffer>,
795 buffer: &Entity<Buffer>,
796 since_version: &mut clock::Global,
797 edit_range: Range<usize>,
798 new_text: &str,
799 cx: &mut TestAppContext,
800) {
801 collaborator.update(cx, |collaborator, cx| {
802 collaborator.edit([(edit_range, new_text)], None, cx);
803 });
804
805 let serialize_task = collaborator.read_with(cx, |collaborator, cx| {
806 collaborator.serialize_ops(Some(since_version.clone()), cx)
807 });
808 let ops = serialize_task.await;
809 *since_version = collaborator.read_with(cx, |collaborator, _cx| collaborator.version());
810
811 buffer.update(cx, |buffer, cx| {
812 buffer.apply_ops(
813 ops.into_iter()
814 .map(|op| language::proto::deserialize_operation(op).unwrap()),
815 cx,
816 );
817 });
818}
819
820#[gpui::test]
821async fn test_nearby_collaborator_edits_are_kept_in_history(cx: &mut TestAppContext) {
822 let (ep_store, _requests) = init_test_with_fake_client(cx);
823 let fs = FakeFs::new(cx.executor());
824 fs.insert_tree(
825 "/root",
826 json!({
827 "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n"
828 }),
829 )
830 .await;
831 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
832
833 let buffer = project
834 .update(cx, |project, cx| {
835 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
836 project.set_active_path(Some(path.clone()), cx);
837 project.open_buffer(path, cx)
838 })
839 .await
840 .unwrap();
841
842 let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
843
844 ep_store.update(cx, |ep_store, cx| {
845 ep_store.register_buffer(&buffer, &project, cx);
846 let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
847 });
848
849 buffer.update(cx, |buffer, cx| {
850 buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
851 });
852
853 let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
854
855 let (line_one_start, line_one_len) = collaborator.read_with(cx, |buffer, _cx| {
856 (Point::new(1, 0).to_offset(buffer), buffer.line_len(1))
857 });
858
859 apply_collaborator_edit(
860 &collaborator,
861 &buffer,
862 &mut collaborator_version,
863 line_one_start..line_one_start + line_one_len as usize,
864 "REMOTE ONE",
865 cx,
866 )
867 .await;
868
869 let events = ep_store.update(cx, |ep_store, cx| {
870 ep_store.edit_history_for_project(&project, cx)
871 });
872
873 assert_eq!(
874 render_events_with_predicted(&events),
875 vec![indoc! {"
876 manual
877 @@ -1,5 +1,5 @@
878 -line 0
879 -line 1
880 +LOCAL ZERO
881 +REMOTE ONE
882 line 2
883 line 3
884 line 4
885 "}]
886 );
887}
888
889#[gpui::test]
890async fn test_distant_collaborator_edits_are_omitted_from_history(cx: &mut TestAppContext) {
891 let (ep_store, _requests) = init_test_with_fake_client(cx);
892 let fs = FakeFs::new(cx.executor());
893 fs.insert_tree(
894 "/root",
895 json!({
896 "foo.rs": (0..1000)
897 .map(|i| format!("line {i}\n"))
898 .collect::<String>()
899 }),
900 )
901 .await;
902 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
903
904 let buffer = project
905 .update(cx, |project, cx| {
906 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
907 project.set_active_path(Some(path.clone()), cx);
908 project.open_buffer(path, cx)
909 })
910 .await
911 .unwrap();
912
913 let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
914
915 ep_store.update(cx, |ep_store, cx| {
916 ep_store.register_buffer(&buffer, &project, cx);
917 let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
918 });
919
920 buffer.update(cx, |buffer, cx| {
921 buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
922 });
923
924 let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
925
926 let far_line_start = buffer.read_with(cx, |buffer, _cx| Point::new(900, 0).to_offset(buffer));
927
928 apply_collaborator_edit(
929 &collaborator,
930 &buffer,
931 &mut collaborator_version,
932 far_line_start..far_line_start + 7,
933 "REMOTE FAR",
934 cx,
935 )
936 .await;
937
938 let events = ep_store.update(cx, |ep_store, cx| {
939 ep_store.edit_history_for_project(&project, cx)
940 });
941
942 assert_eq!(
943 render_events_with_predicted(&events),
944 vec![indoc! {"
945 manual
946 @@ -1,4 +1,4 @@
947 -line 0
948 +LOCAL ZERO
949 line 1
950 line 2
951 line 3
952 "}]
953 );
954}
955
956#[gpui::test]
957async fn test_irrelevant_collaborator_edits_in_different_files_are_omitted_from_history(
958 cx: &mut TestAppContext,
959) {
960 let (ep_store, _requests) = init_test_with_fake_client(cx);
961 let fs = FakeFs::new(cx.executor());
962 fs.insert_tree(
963 "/root",
964 json!({
965 "foo.rs": "line 0\nline 1\nline 2\nline 3\n",
966 "bar.rs": "line 0\nline 1\nline 2\nline 3\n"
967 }),
968 )
969 .await;
970 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
971
972 let foo_buffer = project
973 .update(cx, |project, cx| {
974 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
975 project.set_active_path(Some(path.clone()), cx);
976 project.open_buffer(path, cx)
977 })
978 .await
979 .unwrap();
980 let bar_buffer = project
981 .update(cx, |project, cx| {
982 let path = project.find_project_path(path!("root/bar.rs"), cx).unwrap();
983 project.open_buffer(path, cx)
984 })
985 .await
986 .unwrap();
987
988 let foo_cursor = foo_buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
989
990 ep_store.update(cx, |ep_store, cx| {
991 ep_store.register_buffer(&foo_buffer, &project, cx);
992 ep_store.register_buffer(&bar_buffer, &project, cx);
993 let _ = ep_store.prediction_at(&foo_buffer, Some(foo_cursor), &project, cx);
994 });
995
996 let (bar_collaborator, mut bar_version) = make_collaborator_replica(&bar_buffer, cx);
997
998 apply_collaborator_edit(
999 &bar_collaborator,
1000 &bar_buffer,
1001 &mut bar_version,
1002 0..6,
1003 "REMOTE BAR",
1004 cx,
1005 )
1006 .await;
1007
1008 let events = ep_store.update(cx, |ep_store, cx| {
1009 ep_store.edit_history_for_project(&project, cx)
1010 });
1011
1012 assert!(events.is_empty());
1013}
1014
1015#[gpui::test]
1016async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) {
1017 let (ep_store, _requests) = init_test_with_fake_client(cx);
1018 let fs = FakeFs::new(cx.executor());
1019 fs.insert_tree(
1020 "/root",
1021 json!({
1022 "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n"
1023 }),
1024 )
1025 .await;
1026 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1027
1028 let buffer = project
1029 .update(cx, |project, cx| {
1030 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
1031 project.open_buffer(path, cx)
1032 })
1033 .await
1034 .unwrap();
1035
1036 ep_store.update(cx, |ep_store, cx| {
1037 ep_store.register_buffer(&buffer, &project, cx);
1038 });
1039
1040 // Case 1: Manual edits have `predicted` set to false.
1041 buffer.update(cx, |buffer, cx| {
1042 buffer.edit(vec![(0..6, "LINE ZERO")], None, cx);
1043 });
1044
1045 let events = ep_store.update(cx, |ep_store, cx| {
1046 ep_store.edit_history_for_project(&project, cx)
1047 });
1048
1049 assert_eq!(
1050 render_events_with_predicted(&events),
1051 vec![indoc! {"
1052 manual
1053 @@ -1,4 +1,4 @@
1054 -line 0
1055 +LINE ZERO
1056 line 1
1057 line 2
1058 line 3
1059 "}]
1060 );
1061
1062 // Case 2: Multiple successive manual edits near each other are merged into one
1063 // event with `predicted` set to false.
1064 buffer.update(cx, |buffer, cx| {
1065 let offset = Point::new(1, 0).to_offset(buffer);
1066 let end = Point::new(1, 6).to_offset(buffer);
1067 buffer.edit(vec![(offset..end, "LINE ONE")], None, cx);
1068 });
1069
1070 let events = ep_store.update(cx, |ep_store, cx| {
1071 ep_store.edit_history_for_project(&project, cx)
1072 });
1073 assert_eq!(
1074 render_events_with_predicted(&events),
1075 vec![indoc! {"
1076 manual
1077 @@ -1,5 +1,5 @@
1078 -line 0
1079 -line 1
1080 +LINE ZERO
1081 +LINE ONE
1082 line 2
1083 line 3
1084 line 4
1085 "}]
1086 );
1087
1088 // Case 3: Accepted predictions have `predicted` set to true.
1089 // Case 5: A manual edit that follows a predicted edit is not merged with the
1090 // predicted edit, even if it is nearby.
1091 ep_store.update(cx, |ep_store, cx| {
1092 buffer.update(cx, |buffer, cx| {
1093 let offset = Point::new(2, 0).to_offset(buffer);
1094 let end = Point::new(2, 6).to_offset(buffer);
1095 buffer.edit(vec![(offset..end, "LINE TWO")], None, cx);
1096 });
1097 ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx);
1098 });
1099
1100 let events = ep_store.update(cx, |ep_store, cx| {
1101 ep_store.edit_history_for_project(&project, cx)
1102 });
1103 assert_eq!(
1104 render_events_with_predicted(&events),
1105 vec![
1106 indoc! {"
1107 manual
1108 @@ -1,5 +1,5 @@
1109 -line 0
1110 -line 1
1111 +LINE ZERO
1112 +LINE ONE
1113 line 2
1114 line 3
1115 line 4
1116 "},
1117 indoc! {"
1118 predicted
1119 @@ -1,6 +1,6 @@
1120 LINE ZERO
1121 LINE ONE
1122 -line 2
1123 +LINE TWO
1124 line 3
1125 line 4
1126 line 5
1127 "}
1128 ]
1129 );
1130
1131 // Case 4: Multiple successive accepted predictions near each other are merged
1132 // into one event with `predicted` set to true.
1133 ep_store.update(cx, |ep_store, cx| {
1134 buffer.update(cx, |buffer, cx| {
1135 let offset = Point::new(3, 0).to_offset(buffer);
1136 let end = Point::new(3, 6).to_offset(buffer);
1137 buffer.edit(vec![(offset..end, "LINE THREE")], None, cx);
1138 });
1139 ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx);
1140 });
1141
1142 let events = ep_store.update(cx, |ep_store, cx| {
1143 ep_store.edit_history_for_project(&project, cx)
1144 });
1145 assert_eq!(
1146 render_events_with_predicted(&events),
1147 vec![
1148 indoc! {"
1149 manual
1150 @@ -1,5 +1,5 @@
1151 -line 0
1152 -line 1
1153 +LINE ZERO
1154 +LINE ONE
1155 line 2
1156 line 3
1157 line 4
1158 "},
1159 indoc! {"
1160 predicted
1161 @@ -1,7 +1,7 @@
1162 LINE ZERO
1163 LINE ONE
1164 -line 2
1165 -line 3
1166 +LINE TWO
1167 +LINE THREE
1168 line 4
1169 line 5
1170 line 6
1171 "}
1172 ]
1173 );
1174
1175 // Case 5 (continued): A manual edit that follows a predicted edit is not merged
1176 // with the predicted edit, even if it is nearby.
1177 buffer.update(cx, |buffer, cx| {
1178 let offset = Point::new(4, 0).to_offset(buffer);
1179 let end = Point::new(4, 6).to_offset(buffer);
1180 buffer.edit(vec![(offset..end, "LINE FOUR")], None, cx);
1181 });
1182
1183 let events = ep_store.update(cx, |ep_store, cx| {
1184 ep_store.edit_history_for_project(&project, cx)
1185 });
1186 assert_eq!(
1187 render_events_with_predicted(&events),
1188 vec![
1189 indoc! {"
1190 manual
1191 @@ -1,5 +1,5 @@
1192 -line 0
1193 -line 1
1194 +LINE ZERO
1195 +LINE ONE
1196 line 2
1197 line 3
1198 line 4
1199 "},
1200 indoc! {"
1201 predicted
1202 @@ -1,7 +1,7 @@
1203 LINE ZERO
1204 LINE ONE
1205 -line 2
1206 -line 3
1207 +LINE TWO
1208 +LINE THREE
1209 line 4
1210 line 5
1211 line 6
1212 "},
1213 indoc! {"
1214 manual
1215 @@ -2,7 +2,7 @@
1216 LINE ONE
1217 LINE TWO
1218 LINE THREE
1219 -line 4
1220 +LINE FOUR
1221 line 5
1222 line 6
1223 line 7
1224 "}
1225 ]
1226 );
1227
1228 // Case 6: If we then perform a manual edit at a *different* location (more than
1229 // 8 lines away), then the edits at the prior location can be merged with each
1230 // other, even if some are predicted and some are not. `predicted` means all
1231 // constituent edits were predicted.
1232 buffer.update(cx, |buffer, cx| {
1233 let offset = Point::new(14, 0).to_offset(buffer);
1234 let end = Point::new(14, 7).to_offset(buffer);
1235 buffer.edit(vec![(offset..end, "LINE FOURTEEN")], None, cx);
1236 });
1237
1238 let events = ep_store.update(cx, |ep_store, cx| {
1239 ep_store.edit_history_for_project(&project, cx)
1240 });
1241 assert_eq!(
1242 render_events_with_predicted(&events),
1243 vec![
1244 indoc! {"
1245 manual
1246 @@ -1,8 +1,8 @@
1247 -line 0
1248 -line 1
1249 -line 2
1250 -line 3
1251 -line 4
1252 +LINE ZERO
1253 +LINE ONE
1254 +LINE TWO
1255 +LINE THREE
1256 +LINE FOUR
1257 line 5
1258 line 6
1259 line 7
1260 "},
1261 indoc! {"
1262 manual
1263 @@ -12,4 +12,4 @@
1264 line 11
1265 line 12
1266 line 13
1267 -line 14
1268 +LINE FOURTEEN
1269 "}
1270 ]
1271 );
1272}
1273
1274#[gpui::test]
1275async fn test_empty_prediction(cx: &mut TestAppContext) {
1276 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1277 let fs = FakeFs::new(cx.executor());
1278 fs.insert_tree(
1279 "/root",
1280 json!({
1281 "foo.md": "Hello!\nHow\nBye\n"
1282 }),
1283 )
1284 .await;
1285 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1286
1287 let buffer = project
1288 .update(cx, |project, cx| {
1289 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1290 project.open_buffer(path, cx)
1291 })
1292 .await
1293 .unwrap();
1294 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1295 let position = snapshot.anchor_before(language::Point::new(1, 3));
1296
1297 ep_store.update(cx, |ep_store, cx| {
1298 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1299 });
1300
1301 let (request, respond_tx) = requests.predict.next().await.unwrap();
1302 let response = model_response(&request, "");
1303 let id = response.request_id.clone();
1304 respond_tx.send(response).unwrap();
1305
1306 cx.run_until_parked();
1307
1308 ep_store.update(cx, |ep_store, cx| {
1309 assert!(
1310 ep_store
1311 .prediction_at(&buffer, None, &project, cx)
1312 .is_none()
1313 );
1314 });
1315
1316 // prediction is reported as rejected
1317 let (reject_request, _) = requests.reject.next().await.unwrap();
1318
1319 assert_eq!(
1320 &reject_request.rejections,
1321 &[EditPredictionRejection {
1322 request_id: id,
1323 reason: EditPredictionRejectReason::Empty,
1324 was_shown: false,
1325 model_version: None,
1326 e2e_latency_ms: Some(0),
1327 }]
1328 );
1329}
1330
1331#[gpui::test]
1332async fn test_interpolated_empty(cx: &mut TestAppContext) {
1333 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1334 let fs = FakeFs::new(cx.executor());
1335 fs.insert_tree(
1336 "/root",
1337 json!({
1338 "foo.md": "Hello!\nHow\nBye\n"
1339 }),
1340 )
1341 .await;
1342 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1343
1344 let buffer = project
1345 .update(cx, |project, cx| {
1346 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1347 project.open_buffer(path, cx)
1348 })
1349 .await
1350 .unwrap();
1351 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1352 let position = snapshot.anchor_before(language::Point::new(1, 3));
1353
1354 ep_store.update(cx, |ep_store, cx| {
1355 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1356 });
1357
1358 let (request, respond_tx) = requests.predict.next().await.unwrap();
1359
1360 buffer.update(cx, |buffer, cx| {
1361 buffer.set_text("Hello!\nHow are you?\nBye", cx);
1362 });
1363
1364 let response = model_response(&request, SIMPLE_DIFF);
1365 let id = response.request_id.clone();
1366 respond_tx.send(response).unwrap();
1367
1368 cx.run_until_parked();
1369
1370 ep_store.update(cx, |ep_store, cx| {
1371 assert!(
1372 ep_store
1373 .prediction_at(&buffer, None, &project, cx)
1374 .is_none()
1375 );
1376 });
1377
1378 // prediction is reported as rejected
1379 let (reject_request, _) = requests.reject.next().await.unwrap();
1380
1381 assert_eq!(
1382 &reject_request.rejections,
1383 &[EditPredictionRejection {
1384 request_id: id,
1385 reason: EditPredictionRejectReason::InterpolatedEmpty,
1386 was_shown: false,
1387 model_version: None,
1388 e2e_latency_ms: Some(0),
1389 }]
1390 );
1391}
1392
1393const SIMPLE_DIFF: &str = indoc! { r"
1394 --- a/root/foo.md
1395 +++ b/root/foo.md
1396 @@ ... @@
1397 Hello!
1398 -How
1399 +How are you?
1400 Bye
1401"};
1402
1403#[gpui::test]
1404async fn test_replace_current(cx: &mut TestAppContext) {
1405 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1406 let fs = FakeFs::new(cx.executor());
1407 fs.insert_tree(
1408 "/root",
1409 json!({
1410 "foo.md": "Hello!\nHow\nBye\n"
1411 }),
1412 )
1413 .await;
1414 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1415
1416 let buffer = project
1417 .update(cx, |project, cx| {
1418 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1419 project.open_buffer(path, cx)
1420 })
1421 .await
1422 .unwrap();
1423 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1424 let position = snapshot.anchor_before(language::Point::new(1, 3));
1425
1426 ep_store.update(cx, |ep_store, cx| {
1427 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1428 });
1429
1430 let (request, respond_tx) = requests.predict.next().await.unwrap();
1431 let first_response = model_response(&request, SIMPLE_DIFF);
1432 let first_id = first_response.request_id.clone();
1433 respond_tx.send(first_response).unwrap();
1434
1435 cx.run_until_parked();
1436
1437 ep_store.update(cx, |ep_store, cx| {
1438 assert_eq!(
1439 ep_store
1440 .prediction_at(&buffer, None, &project, cx)
1441 .unwrap()
1442 .id
1443 .0,
1444 first_id
1445 );
1446 });
1447
1448 // a second request is triggered
1449 ep_store.update(cx, |ep_store, cx| {
1450 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1451 });
1452
1453 let (request, respond_tx) = requests.predict.next().await.unwrap();
1454 let second_response = model_response(&request, SIMPLE_DIFF);
1455 let second_id = second_response.request_id.clone();
1456 respond_tx.send(second_response).unwrap();
1457
1458 cx.run_until_parked();
1459
1460 ep_store.update(cx, |ep_store, cx| {
1461 // second replaces first
1462 assert_eq!(
1463 ep_store
1464 .prediction_at(&buffer, None, &project, cx)
1465 .unwrap()
1466 .id
1467 .0,
1468 second_id
1469 );
1470 });
1471
1472 // first is reported as replaced
1473 let (reject_request, _) = requests.reject.next().await.unwrap();
1474
1475 assert_eq!(
1476 &reject_request.rejections,
1477 &[EditPredictionRejection {
1478 request_id: first_id,
1479 reason: EditPredictionRejectReason::Replaced,
1480 was_shown: false,
1481 model_version: None,
1482 e2e_latency_ms: Some(0),
1483 }]
1484 );
1485}
1486
1487#[gpui::test]
1488async fn test_current_preferred(cx: &mut TestAppContext) {
1489 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1490 let fs = FakeFs::new(cx.executor());
1491 fs.insert_tree(
1492 "/root",
1493 json!({
1494 "foo.md": "Hello!\nHow\nBye\n"
1495 }),
1496 )
1497 .await;
1498 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1499
1500 let buffer = project
1501 .update(cx, |project, cx| {
1502 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1503 project.open_buffer(path, cx)
1504 })
1505 .await
1506 .unwrap();
1507 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1508 let position = snapshot.anchor_before(language::Point::new(1, 3));
1509
1510 ep_store.update(cx, |ep_store, cx| {
1511 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1512 });
1513
1514 let (request, respond_tx) = requests.predict.next().await.unwrap();
1515 let first_response = model_response(&request, SIMPLE_DIFF);
1516 let first_id = first_response.request_id.clone();
1517 respond_tx.send(first_response).unwrap();
1518
1519 cx.run_until_parked();
1520
1521 ep_store.update(cx, |ep_store, cx| {
1522 assert_eq!(
1523 ep_store
1524 .prediction_at(&buffer, None, &project, cx)
1525 .unwrap()
1526 .id
1527 .0,
1528 first_id
1529 );
1530 });
1531
1532 // a second request is triggered
1533 ep_store.update(cx, |ep_store, cx| {
1534 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1535 });
1536
1537 let (request, respond_tx) = requests.predict.next().await.unwrap();
1538 // worse than current prediction
1539 let second_response = model_response(
1540 &request,
1541 indoc! { r"
1542 --- a/root/foo.md
1543 +++ b/root/foo.md
1544 @@ ... @@
1545 Hello!
1546 -How
1547 +How are
1548 Bye
1549 "},
1550 );
1551 let second_id = second_response.request_id.clone();
1552 respond_tx.send(second_response).unwrap();
1553
1554 cx.run_until_parked();
1555
1556 ep_store.update(cx, |ep_store, cx| {
1557 // first is preferred over second
1558 assert_eq!(
1559 ep_store
1560 .prediction_at(&buffer, None, &project, cx)
1561 .unwrap()
1562 .id
1563 .0,
1564 first_id
1565 );
1566 });
1567
1568 // second is reported as rejected
1569 let (reject_request, _) = requests.reject.next().await.unwrap();
1570
1571 assert_eq!(
1572 &reject_request.rejections,
1573 &[EditPredictionRejection {
1574 request_id: second_id,
1575 reason: EditPredictionRejectReason::CurrentPreferred,
1576 was_shown: false,
1577 model_version: None,
1578 e2e_latency_ms: Some(0),
1579 }]
1580 );
1581}
1582
1583#[gpui::test]
1584async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) {
1585 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1586 let fs = FakeFs::new(cx.executor());
1587 fs.insert_tree(
1588 "/root",
1589 json!({
1590 "foo.md": "Hello!\nHow\nBye\n"
1591 }),
1592 )
1593 .await;
1594 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1595
1596 let buffer = project
1597 .update(cx, |project, cx| {
1598 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1599 project.open_buffer(path, cx)
1600 })
1601 .await
1602 .unwrap();
1603 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1604 let position = snapshot.anchor_before(language::Point::new(1, 3));
1605
1606 // start two refresh tasks
1607 ep_store.update(cx, |ep_store, cx| {
1608 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1609 });
1610
1611 let (request1, respond_first) = requests.predict.next().await.unwrap();
1612
1613 ep_store.update(cx, |ep_store, cx| {
1614 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1615 });
1616
1617 let (request, respond_second) = requests.predict.next().await.unwrap();
1618
1619 // wait for throttle
1620 cx.run_until_parked();
1621
1622 // second responds first
1623 let second_response = model_response(&request, SIMPLE_DIFF);
1624 let second_id = second_response.request_id.clone();
1625 respond_second.send(second_response).unwrap();
1626
1627 cx.run_until_parked();
1628
1629 ep_store.update(cx, |ep_store, cx| {
1630 // current prediction is second
1631 assert_eq!(
1632 ep_store
1633 .prediction_at(&buffer, None, &project, cx)
1634 .unwrap()
1635 .id
1636 .0,
1637 second_id
1638 );
1639 });
1640
1641 let first_response = model_response(&request1, SIMPLE_DIFF);
1642 let first_id = first_response.request_id.clone();
1643 respond_first.send(first_response).unwrap();
1644
1645 cx.run_until_parked();
1646
1647 ep_store.update(cx, |ep_store, cx| {
1648 // current prediction is still second, since first was cancelled
1649 assert_eq!(
1650 ep_store
1651 .prediction_at(&buffer, None, &project, cx)
1652 .unwrap()
1653 .id
1654 .0,
1655 second_id
1656 );
1657 });
1658
1659 // first is reported as rejected
1660 let (reject_request, _) = requests.reject.next().await.unwrap();
1661
1662 cx.run_until_parked();
1663
1664 assert_eq!(
1665 &reject_request.rejections,
1666 &[EditPredictionRejection {
1667 request_id: first_id,
1668 reason: EditPredictionRejectReason::Canceled,
1669 was_shown: false,
1670 model_version: None,
1671 e2e_latency_ms: None,
1672 }]
1673 );
1674}
1675
1676#[gpui::test]
1677async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
1678 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1679 let fs = FakeFs::new(cx.executor());
1680 fs.insert_tree(
1681 "/root",
1682 json!({
1683 "foo.md": "Hello!\nHow\nBye\n"
1684 }),
1685 )
1686 .await;
1687 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1688
1689 let buffer = project
1690 .update(cx, |project, cx| {
1691 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1692 project.open_buffer(path, cx)
1693 })
1694 .await
1695 .unwrap();
1696 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1697 let position = snapshot.anchor_before(language::Point::new(1, 3));
1698
1699 // start two refresh tasks
1700 ep_store.update(cx, |ep_store, cx| {
1701 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1702 });
1703
1704 let (request1, respond_first) = requests.predict.next().await.unwrap();
1705
1706 ep_store.update(cx, |ep_store, cx| {
1707 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1708 });
1709
1710 let (request2, respond_second) = requests.predict.next().await.unwrap();
1711
1712 // wait for throttle, so requests are sent
1713 cx.run_until_parked();
1714
1715 ep_store.update(cx, |ep_store, cx| {
1716 // start a third request
1717 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1718
1719 // 2 are pending, so 2nd is cancelled
1720 assert_eq!(
1721 ep_store
1722 .get_or_init_project(&project, cx)
1723 .cancelled_predictions
1724 .iter()
1725 .copied()
1726 .collect::<Vec<_>>(),
1727 [1]
1728 );
1729 });
1730
1731 // wait for throttle
1732 cx.run_until_parked();
1733
1734 let (request3, respond_third) = requests.predict.next().await.unwrap();
1735
1736 let first_response = model_response(&request1, SIMPLE_DIFF);
1737 let first_id = first_response.request_id.clone();
1738 respond_first.send(first_response).unwrap();
1739
1740 cx.run_until_parked();
1741
1742 ep_store.update(cx, |ep_store, cx| {
1743 // current prediction is first
1744 assert_eq!(
1745 ep_store
1746 .prediction_at(&buffer, None, &project, cx)
1747 .unwrap()
1748 .id
1749 .0,
1750 first_id
1751 );
1752 });
1753
1754 let cancelled_response = model_response(&request2, SIMPLE_DIFF);
1755 let cancelled_id = cancelled_response.request_id.clone();
1756 respond_second.send(cancelled_response).unwrap();
1757
1758 cx.run_until_parked();
1759
1760 ep_store.update(cx, |ep_store, cx| {
1761 // current prediction is still first, since second was cancelled
1762 assert_eq!(
1763 ep_store
1764 .prediction_at(&buffer, None, &project, cx)
1765 .unwrap()
1766 .id
1767 .0,
1768 first_id
1769 );
1770 });
1771
1772 let third_response = model_response(&request3, SIMPLE_DIFF);
1773 let third_response_id = third_response.request_id.clone();
1774 respond_third.send(third_response).unwrap();
1775
1776 cx.run_until_parked();
1777
1778 ep_store.update(cx, |ep_store, cx| {
1779 // third completes and replaces first
1780 assert_eq!(
1781 ep_store
1782 .prediction_at(&buffer, None, &project, cx)
1783 .unwrap()
1784 .id
1785 .0,
1786 third_response_id
1787 );
1788 });
1789
1790 // second is reported as rejected
1791 let (reject_request, _) = requests.reject.next().await.unwrap();
1792
1793 cx.run_until_parked();
1794
1795 assert_eq!(
1796 &reject_request.rejections,
1797 &[
1798 EditPredictionRejection {
1799 request_id: cancelled_id,
1800 reason: EditPredictionRejectReason::Canceled,
1801 was_shown: false,
1802 model_version: None,
1803 e2e_latency_ms: None,
1804 },
1805 EditPredictionRejection {
1806 request_id: first_id,
1807 reason: EditPredictionRejectReason::Replaced,
1808 was_shown: false,
1809 model_version: None,
1810 // 2 throttle waits (for 2nd and 3rd requests) elapsed
1811 // between this request's start and response.
1812 e2e_latency_ms: Some(2 * EditPredictionStore::THROTTLE_TIMEOUT.as_millis()),
1813 }
1814 ]
1815 );
1816}
1817
1818#[gpui::test]
1819async fn test_jump_and_edit_throttles_are_independent(cx: &mut TestAppContext) {
1820 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1821
1822 let fs = FakeFs::new(cx.executor());
1823 fs.insert_tree(
1824 "/root",
1825 json!({
1826 "foo.md": "Hello!\nHow\nBye\n",
1827 "bar.md": "Hola!\nComo\nAdios\n"
1828 }),
1829 )
1830 .await;
1831 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1832
1833 let buffer = project
1834 .update(cx, |project, cx| {
1835 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1836 project.set_active_path(Some(path.clone()), cx);
1837 project.open_buffer(path, cx)
1838 })
1839 .await
1840 .unwrap();
1841 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1842 let position = snapshot.anchor_before(language::Point::new(1, 3));
1843
1844 ep_store.update(cx, |ep_store, cx| {
1845 ep_store.register_project(&project, cx);
1846 ep_store.register_buffer(&buffer, &project, cx);
1847 });
1848
1849 // First edit request - no prior edit, so not throttled.
1850 ep_store.update(cx, |ep_store, cx| {
1851 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1852 });
1853 let (_edit_request, edit_response_tx) = requests.predict.next().await.unwrap();
1854 edit_response_tx.send(empty_response()).unwrap();
1855 cx.run_until_parked();
1856
1857 let diagnostic = lsp::Diagnostic {
1858 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
1859 severity: Some(lsp::DiagnosticSeverity::ERROR),
1860 message: "Sentence is incomplete".to_string(),
1861 ..Default::default()
1862 };
1863
1864 // First jump request triggered by diagnostic event on buffer - no prior jump, so not throttled (independent from edit).
1865 project.update(cx, |project, cx| {
1866 project.lsp_store().update(cx, |lsp_store, cx| {
1867 lsp_store
1868 .update_diagnostics(
1869 LanguageServerId(0),
1870 lsp::PublishDiagnosticsParams {
1871 uri: lsp::Uri::from_file_path(path!("/root/bar.md")).unwrap(),
1872 diagnostics: vec![diagnostic],
1873 version: None,
1874 },
1875 None,
1876 language::DiagnosticSourceKind::Pushed,
1877 &[],
1878 cx,
1879 )
1880 .unwrap();
1881 });
1882 });
1883 let (_jump_request, jump_response_tx) = requests.predict.next().await.unwrap();
1884 jump_response_tx.send(empty_response()).unwrap();
1885 cx.run_until_parked();
1886
1887 // Second edit request - should be throttled by the first edit.
1888 ep_store.update(cx, |ep_store, cx| {
1889 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1890 });
1891 assert_no_predict_request_ready(&mut requests.predict);
1892
1893 // Second jump request - should be throttled by the first jump.
1894 ep_store.update(cx, |ep_store, cx| {
1895 ep_store.refresh_prediction_from_diagnostics(
1896 project.clone(),
1897 DiagnosticSearchScope::Global,
1898 cx,
1899 );
1900 });
1901 assert_no_predict_request_ready(&mut requests.predict);
1902
1903 // Wait for both throttles to expire.
1904 cx.background_executor
1905 .advance_clock(EditPredictionStore::THROTTLE_TIMEOUT);
1906 cx.background_executor.run_until_parked();
1907 cx.run_until_parked();
1908
1909 // Both requests should now go through.
1910 let (_request_1, response_tx_1) = requests.predict.next().await.unwrap();
1911 response_tx_1.send(empty_response()).unwrap();
1912 cx.run_until_parked();
1913
1914 let (_request_2, response_tx_2) = requests.predict.next().await.unwrap();
1915 response_tx_2.send(empty_response()).unwrap();
1916 cx.run_until_parked();
1917}
1918
1919#[gpui::test]
1920async fn test_same_frame_duplicate_requests_deduplicated(cx: &mut TestAppContext) {
1921 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1922 let fs = FakeFs::new(cx.executor());
1923 fs.insert_tree(
1924 "/root",
1925 json!({
1926 "foo.md": "Hello!\nHow\nBye\n"
1927 }),
1928 )
1929 .await;
1930 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1931
1932 let buffer = project
1933 .update(cx, |project, cx| {
1934 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1935 project.open_buffer(path, cx)
1936 })
1937 .await
1938 .unwrap();
1939 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1940 let position = snapshot.anchor_before(language::Point::new(1, 3));
1941
1942 // Enqueue two refresh calls in the same synchronous frame (no yielding).
1943 // Both `cx.spawn` tasks are created before either executes, so they both
1944 // capture the same `proceed_count_at_enqueue`. Only the first task should
1945 // pass the deduplication gate; the second should be skipped.
1946 ep_store.update(cx, |ep_store, cx| {
1947 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1948 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1949 });
1950
1951 // Let both spawned tasks run to completion (including any throttle waits).
1952 cx.run_until_parked();
1953
1954 // Exactly one prediction request should have been sent.
1955 let (request, respond_tx) = requests.predict.next().await.unwrap();
1956 respond_tx
1957 .send(model_response(&request, SIMPLE_DIFF))
1958 .unwrap();
1959 cx.run_until_parked();
1960
1961 // No second request should be pending.
1962 assert_no_predict_request_ready(&mut requests.predict);
1963}
1964
1965#[gpui::test]
1966async fn test_rejections_flushing(cx: &mut TestAppContext) {
1967 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1968
1969 ep_store.update(cx, |ep_store, cx| {
1970 ep_store.reject_prediction(
1971 EditPredictionId("test-1".into()),
1972 EditPredictionRejectReason::Discarded,
1973 false,
1974 None,
1975 None,
1976 cx,
1977 );
1978 ep_store.reject_prediction(
1979 EditPredictionId("test-2".into()),
1980 EditPredictionRejectReason::Canceled,
1981 true,
1982 None,
1983 None,
1984 cx,
1985 );
1986 });
1987
1988 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1989 cx.run_until_parked();
1990
1991 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1992 respond_tx.send(()).unwrap();
1993
1994 // batched
1995 assert_eq!(reject_request.rejections.len(), 2);
1996 assert_eq!(
1997 reject_request.rejections[0],
1998 EditPredictionRejection {
1999 request_id: "test-1".to_string(),
2000 reason: EditPredictionRejectReason::Discarded,
2001 was_shown: false,
2002 model_version: None,
2003 e2e_latency_ms: None
2004 }
2005 );
2006 assert_eq!(
2007 reject_request.rejections[1],
2008 EditPredictionRejection {
2009 request_id: "test-2".to_string(),
2010 reason: EditPredictionRejectReason::Canceled,
2011 was_shown: true,
2012 model_version: None,
2013 e2e_latency_ms: None
2014 }
2015 );
2016
2017 // Reaching batch size limit sends without debounce
2018 ep_store.update(cx, |ep_store, cx| {
2019 for i in 0..70 {
2020 ep_store.reject_prediction(
2021 EditPredictionId(format!("batch-{}", i).into()),
2022 EditPredictionRejectReason::Discarded,
2023 false,
2024 None,
2025 None,
2026 cx,
2027 );
2028 }
2029 });
2030
2031 // First MAX/2 items are sent immediately
2032 cx.run_until_parked();
2033 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
2034 respond_tx.send(()).unwrap();
2035
2036 assert_eq!(reject_request.rejections.len(), 50);
2037 assert_eq!(reject_request.rejections[0].request_id, "batch-0");
2038 assert_eq!(reject_request.rejections[49].request_id, "batch-49");
2039
2040 // Remaining items are debounced with the next batch
2041 cx.executor().advance_clock(Duration::from_secs(15));
2042 cx.run_until_parked();
2043
2044 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
2045 respond_tx.send(()).unwrap();
2046
2047 assert_eq!(reject_request.rejections.len(), 20);
2048 assert_eq!(reject_request.rejections[0].request_id, "batch-50");
2049 assert_eq!(reject_request.rejections[19].request_id, "batch-69");
2050
2051 // Request failure
2052 ep_store.update(cx, |ep_store, cx| {
2053 ep_store.reject_prediction(
2054 EditPredictionId("retry-1".into()),
2055 EditPredictionRejectReason::Discarded,
2056 false,
2057 None,
2058 None,
2059 cx,
2060 );
2061 });
2062
2063 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
2064 cx.run_until_parked();
2065
2066 let (reject_request, _respond_tx) = requests.reject.next().await.unwrap();
2067 assert_eq!(reject_request.rejections.len(), 1);
2068 assert_eq!(reject_request.rejections[0].request_id, "retry-1");
2069 // Simulate failure
2070 drop(_respond_tx);
2071
2072 // Add another rejection
2073 ep_store.update(cx, |ep_store, cx| {
2074 ep_store.reject_prediction(
2075 EditPredictionId("retry-2".into()),
2076 EditPredictionRejectReason::Discarded,
2077 false,
2078 None,
2079 None,
2080 cx,
2081 );
2082 });
2083
2084 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
2085 cx.run_until_parked();
2086
2087 // Retry should include both the failed item and the new one
2088 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
2089 respond_tx.send(()).unwrap();
2090
2091 assert_eq!(reject_request.rejections.len(), 2);
2092 assert_eq!(reject_request.rejections[0].request_id, "retry-1");
2093 assert_eq!(reject_request.rejections[1].request_id, "retry-2");
2094}
2095
2096#[gpui::test]
2097fn test_active_buffer_diagnostics_fetching(cx: &mut TestAppContext) {
2098 let diagnostic_marker: TextRangeMarker = ('«', '»').into();
2099 let search_range_marker: TextRangeMarker = ('[', ']').into();
2100
2101 let (text, mut ranges) = marked_text_ranges_by(
2102 indoc! {r#"
2103 fn alpha() {
2104 let «first_value» = 1;
2105 }
2106
2107 [fn beta() {
2108 let «second_value» = 2;
2109 let third_value = second_value + missing_symbol;
2110 }ˇ]
2111
2112 fn gamma() {
2113 let «fourth_value» = missing_other_symbol;
2114 }
2115 "#},
2116 vec![diagnostic_marker.clone(), search_range_marker.clone()],
2117 );
2118
2119 let diagnostic_ranges = ranges.remove(&diagnostic_marker).unwrap_or_default();
2120 let search_ranges = ranges.remove(&search_range_marker).unwrap_or_default();
2121
2122 let buffer = cx.new(|cx| Buffer::local(&text, cx));
2123
2124 buffer.update(cx, |buffer, cx| {
2125 let snapshot = buffer.snapshot();
2126 let diagnostics = DiagnosticSet::new(
2127 diagnostic_ranges
2128 .iter()
2129 .enumerate()
2130 .map(|(index, range)| DiagnosticEntry {
2131 range: snapshot.offset_to_point_utf16(range.start)
2132 ..snapshot.offset_to_point_utf16(range.end),
2133 diagnostic: Diagnostic {
2134 severity: match index {
2135 0 => DiagnosticSeverity::WARNING,
2136 1 => DiagnosticSeverity::ERROR,
2137 _ => DiagnosticSeverity::HINT,
2138 },
2139 message: match index {
2140 0 => "first warning".to_string(),
2141 1 => "second error".to_string(),
2142 _ => "third hint".to_string(),
2143 },
2144 group_id: index + 1,
2145 is_primary: true,
2146 source_kind: language::DiagnosticSourceKind::Pushed,
2147 ..Diagnostic::default()
2148 },
2149 }),
2150 &snapshot,
2151 );
2152 buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
2153 });
2154
2155 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2156 let search_range = snapshot.offset_to_point(search_ranges[0].start)
2157 ..snapshot.offset_to_point(search_ranges[0].end);
2158
2159 let active_buffer_diagnostics = zeta::active_buffer_diagnostics(&snapshot, search_range, 100);
2160
2161 assert_eq!(
2162 active_buffer_diagnostics,
2163 vec![zeta_prompt::ActiveBufferDiagnostic {
2164 severity: Some(1),
2165 message: "second error".to_string(),
2166 snippet: text,
2167 snippet_buffer_row_range: 5..5,
2168 diagnostic_range_in_snippet: 61..73,
2169 }]
2170 );
2171
2172 let buffer = cx.new(|cx| {
2173 Buffer::local(
2174 indoc! {"
2175 one
2176 two
2177 three
2178 four
2179 five
2180 "},
2181 cx,
2182 )
2183 });
2184
2185 buffer.update(cx, |buffer, cx| {
2186 let snapshot = buffer.snapshot();
2187 let diagnostics = DiagnosticSet::new(
2188 vec![
2189 DiagnosticEntry {
2190 range: text::PointUtf16::new(0, 0)..text::PointUtf16::new(0, 3),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "row zero".to_string(),
2194 group_id: 1,
2195 is_primary: true,
2196 source_kind: language::DiagnosticSourceKind::Pushed,
2197 ..Diagnostic::default()
2198 },
2199 },
2200 DiagnosticEntry {
2201 range: text::PointUtf16::new(2, 0)..text::PointUtf16::new(2, 5),
2202 diagnostic: Diagnostic {
2203 severity: DiagnosticSeverity::WARNING,
2204 message: "row two".to_string(),
2205 group_id: 2,
2206 is_primary: true,
2207 source_kind: language::DiagnosticSourceKind::Pushed,
2208 ..Diagnostic::default()
2209 },
2210 },
2211 DiagnosticEntry {
2212 range: text::PointUtf16::new(4, 0)..text::PointUtf16::new(4, 4),
2213 diagnostic: Diagnostic {
2214 severity: DiagnosticSeverity::INFORMATION,
2215 message: "row four".to_string(),
2216 group_id: 3,
2217 is_primary: true,
2218 source_kind: language::DiagnosticSourceKind::Pushed,
2219 ..Diagnostic::default()
2220 },
2221 },
2222 ],
2223 &snapshot,
2224 );
2225 buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
2226 });
2227
2228 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2229
2230 let active_buffer_diagnostics =
2231 zeta::active_buffer_diagnostics(&snapshot, Point::new(2, 0)..Point::new(4, 0), 100);
2232
2233 assert_eq!(
2234 active_buffer_diagnostics
2235 .iter()
2236 .map(|diagnostic| (
2237 diagnostic.severity,
2238 diagnostic.message.clone(),
2239 diagnostic.snippet.clone(),
2240 diagnostic.snippet_buffer_row_range.clone(),
2241 diagnostic.diagnostic_range_in_snippet.clone(),
2242 ))
2243 .collect::<Vec<_>>(),
2244 vec![
2245 (
2246 Some(2),
2247 "row two".to_string(),
2248 "one\ntwo\nthree\nfour\nfive\n".to_string(),
2249 2..2,
2250 8..13,
2251 ),
2252 (
2253 Some(3),
2254 "row four".to_string(),
2255 "one\ntwo\nthree\nfour\nfive\n".to_string(),
2256 4..4,
2257 19..23,
2258 ),
2259 ]
2260 );
2261}
2262
2263// Generate a model response that would apply the given diff to the active file.
2264fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response {
2265 let editable_range =
2266 zeta_prompt::excerpt_range_for_format(Default::default(), &request.input.excerpt_ranges).1;
2267 let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string();
2268 let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap();
2269
2270 PredictEditsV3Response {
2271 request_id: Uuid::new_v4().to_string(),
2272 editable_range,
2273 output: new_excerpt,
2274 model_version: None,
2275 }
2276}
2277
2278fn empty_response() -> PredictEditsV3Response {
2279 PredictEditsV3Response {
2280 request_id: Uuid::new_v4().to_string(),
2281 editable_range: 0..0,
2282 output: String::new(),
2283 model_version: None,
2284 }
2285}
2286
2287fn prompt_from_request(request: &PredictEditsV3Request) -> String {
2288 zeta_prompt::format_zeta_prompt(&request.input, zeta_prompt::ZetaFormat::default())
2289 .expect("default zeta prompt formatting should succeed in edit prediction tests")
2290}
2291
2292fn assert_no_predict_request_ready(
2293 requests: &mut mpsc::UnboundedReceiver<(
2294 PredictEditsV3Request,
2295 oneshot::Sender<PredictEditsV3Response>,
2296 )>,
2297) {
2298 if requests.next().now_or_never().flatten().is_some() {
2299 panic!("Unexpected prediction request while throttled.");
2300 }
2301}
2302
2303struct RequestChannels {
2304 predict: mpsc::UnboundedReceiver<(
2305 PredictEditsV3Request,
2306 oneshot::Sender<PredictEditsV3Response>,
2307 )>,
2308 reject: mpsc::UnboundedReceiver<(RejectEditPredictionsBody, oneshot::Sender<()>)>,
2309}
2310
2311fn init_test_with_fake_client(
2312 cx: &mut TestAppContext,
2313) -> (Entity<EditPredictionStore>, RequestChannels) {
2314 cx.update(move |cx| {
2315 let settings_store = SettingsStore::test(cx);
2316 cx.set_global(settings_store);
2317 zlog::init_test();
2318
2319 let (predict_req_tx, predict_req_rx) = mpsc::unbounded();
2320 let (reject_req_tx, reject_req_rx) = mpsc::unbounded();
2321
2322 let http_client = FakeHttpClient::create({
2323 move |req| {
2324 let uri = req.uri().path().to_string();
2325 let mut body = req.into_body();
2326 let predict_req_tx = predict_req_tx.clone();
2327 let reject_req_tx = reject_req_tx.clone();
2328 async move {
2329 let resp = match uri.as_str() {
2330 "/client/llm_tokens" => serde_json::to_string(&json!({
2331 "token": "test"
2332 }))
2333 .unwrap(),
2334 "/predict_edits/v3" => {
2335 let mut buf = Vec::new();
2336 body.read_to_end(&mut buf).await.ok();
2337 let decompressed = zstd::decode_all(&buf[..]).unwrap();
2338 let req = serde_json::from_slice(&decompressed).unwrap();
2339
2340 let (res_tx, res_rx) = oneshot::channel();
2341 predict_req_tx.unbounded_send((req, res_tx)).unwrap();
2342 serde_json::to_string(&res_rx.await?).unwrap()
2343 }
2344 "/predict_edits/reject" => {
2345 let mut buf = Vec::new();
2346 body.read_to_end(&mut buf).await.ok();
2347 let req = serde_json::from_slice(&buf).unwrap();
2348
2349 let (res_tx, res_rx) = oneshot::channel();
2350 reject_req_tx.unbounded_send((req, res_tx)).unwrap();
2351 serde_json::to_string(&res_rx.await?).unwrap()
2352 }
2353 _ => {
2354 panic!("Unexpected path: {}", uri)
2355 }
2356 };
2357
2358 Ok(Response::builder().body(resp.into()).unwrap())
2359 }
2360 }
2361 });
2362
2363 let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
2364 client.cloud_client().set_credentials(1, "test".into());
2365
2366 let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
2367 language_model::init(user_store.clone(), client.clone(), cx);
2368 let ep_store = EditPredictionStore::global(&client, &user_store, cx);
2369
2370 (
2371 ep_store,
2372 RequestChannels {
2373 predict: predict_req_rx,
2374 reject: reject_req_rx,
2375 },
2376 )
2377 })
2378}
2379
2380#[gpui::test]
2381async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
2382 let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx));
2383 let edits: Arc<[(Range<Anchor>, Arc<str>)]> = cx.update(|cx| {
2384 to_completion_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into()
2385 });
2386
2387 let edit_preview = cx
2388 .read(|cx| buffer.read(cx).preview_edits(edits.clone(), cx))
2389 .await;
2390
2391 let prediction = EditPrediction {
2392 edits,
2393 cursor_position: None,
2394 edit_preview,
2395 buffer: buffer.clone(),
2396 snapshot: cx.read(|cx| buffer.read(cx).snapshot()),
2397 id: EditPredictionId("the-id".into()),
2398 inputs: ZetaPromptInput {
2399 events: Default::default(),
2400 related_files: Default::default(),
2401 active_buffer_diagnostics: vec![],
2402 cursor_path: Path::new("").into(),
2403 cursor_excerpt: "".into(),
2404 cursor_offset_in_excerpt: 0,
2405 excerpt_start_row: None,
2406 excerpt_ranges: Default::default(),
2407 syntax_ranges: None,
2408 experiment: None,
2409 in_open_source_repo: false,
2410 can_collect_data: false,
2411 repo_url: None,
2412 },
2413 model_version: None,
2414 };
2415
2416 cx.update(|cx| {
2417 assert_eq!(
2418 from_completion_edits(
2419 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2420 &buffer,
2421 cx
2422 ),
2423 vec![(2..5, "REM".into()), (9..11, "".into())]
2424 );
2425
2426 buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx));
2427 assert_eq!(
2428 from_completion_edits(
2429 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2430 &buffer,
2431 cx
2432 ),
2433 vec![(2..2, "REM".into()), (6..8, "".into())]
2434 );
2435
2436 buffer.update(cx, |buffer, cx| buffer.undo(cx));
2437 assert_eq!(
2438 from_completion_edits(
2439 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2440 &buffer,
2441 cx
2442 ),
2443 vec![(2..5, "REM".into()), (9..11, "".into())]
2444 );
2445
2446 buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx));
2447 assert_eq!(
2448 from_completion_edits(
2449 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2450 &buffer,
2451 cx
2452 ),
2453 vec![(3..3, "EM".into()), (7..9, "".into())]
2454 );
2455
2456 buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx));
2457 assert_eq!(
2458 from_completion_edits(
2459 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2460 &buffer,
2461 cx
2462 ),
2463 vec![(4..4, "M".into()), (8..10, "".into())]
2464 );
2465
2466 buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx));
2467 assert_eq!(
2468 from_completion_edits(
2469 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2470 &buffer,
2471 cx
2472 ),
2473 vec![(9..11, "".into())]
2474 );
2475
2476 buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx));
2477 assert_eq!(
2478 from_completion_edits(
2479 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2480 &buffer,
2481 cx
2482 ),
2483 vec![(4..4, "M".into()), (8..10, "".into())]
2484 );
2485
2486 buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx));
2487 assert_eq!(
2488 from_completion_edits(
2489 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2490 &buffer,
2491 cx
2492 ),
2493 vec![(4..4, "M".into())]
2494 );
2495
2496 buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx));
2497 assert_eq!(prediction.interpolate(&buffer.read(cx).snapshot()), None);
2498 })
2499}
2500
2501#[gpui::test]
2502async fn test_clean_up_diff(cx: &mut TestAppContext) {
2503 init_test(cx);
2504
2505 assert_eq!(
2506 apply_edit_prediction(
2507 indoc! {"
2508 fn main() {
2509 let word_1 = \"lorem\";
2510 let range = word.len()..word.len();
2511 }
2512 "},
2513 indoc! {"
2514 fn main() {
2515 let word_1 = \"lorem\";
2516 let range = word_1.len()..word_1.len();
2517 }
2518 "},
2519 cx,
2520 )
2521 .await,
2522 indoc! {"
2523 fn main() {
2524 let word_1 = \"lorem\";
2525 let range = word_1.len()..word_1.len();
2526 }
2527 "},
2528 );
2529
2530 assert_eq!(
2531 apply_edit_prediction(
2532 indoc! {"
2533 fn main() {
2534 let story = \"the quick\"
2535 }
2536 "},
2537 indoc! {"
2538 fn main() {
2539 let story = \"the quick brown fox jumps over the lazy dog\";
2540 }
2541 "},
2542 cx,
2543 )
2544 .await,
2545 indoc! {"
2546 fn main() {
2547 let story = \"the quick brown fox jumps over the lazy dog\";
2548 }
2549 "},
2550 );
2551}
2552
2553#[gpui::test]
2554async fn test_edit_prediction_end_of_buffer(cx: &mut TestAppContext) {
2555 init_test(cx);
2556
2557 let buffer_content = "lorem\n";
2558 let completion_response = "lorem\nipsum\n";
2559
2560 assert_eq!(
2561 apply_edit_prediction(buffer_content, completion_response, cx).await,
2562 "lorem\nipsum\n"
2563 );
2564}
2565
2566#[gpui::test]
2567async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppContext) {
2568 // Test that zeta2's newline normalization logic doesn't insert spurious newlines.
2569 // When the buffer ends without a trailing newline, but the model returns output
2570 // with a trailing newline, zeta2 should normalize both sides before diffing
2571 // so no spurious newline is inserted.
2572 let (ep_store, mut requests) = init_test_with_fake_client(cx);
2573 let fs = FakeFs::new(cx.executor());
2574
2575 // Single line buffer with no trailing newline
2576 fs.insert_tree(
2577 "/root",
2578 json!({
2579 "foo.txt": "hello"
2580 }),
2581 )
2582 .await;
2583 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2584
2585 let buffer = project
2586 .update(cx, |project, cx| {
2587 let path = project
2588 .find_project_path(path!("root/foo.txt"), cx)
2589 .unwrap();
2590 project.open_buffer(path, cx)
2591 })
2592 .await
2593 .unwrap();
2594
2595 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2596 let position = snapshot.anchor_before(language::Point::new(0, 5));
2597
2598 ep_store.update(cx, |ep_store, cx| {
2599 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
2600 });
2601
2602 let (request, respond_tx) = requests.predict.next().await.unwrap();
2603
2604 // Model returns output WITH a trailing newline, even though the buffer doesn't have one.
2605 // Zeta2 should normalize both sides before diffing, so no spurious newline is inserted.
2606 let excerpt_length = request.input.cursor_excerpt.len();
2607 let response = PredictEditsV3Response {
2608 request_id: Uuid::new_v4().to_string(),
2609 output: "hello world\n".to_string(),
2610 editable_range: 0..excerpt_length,
2611 model_version: None,
2612 };
2613 respond_tx.send(response).unwrap();
2614
2615 cx.run_until_parked();
2616
2617 // The prediction should insert " world" without adding a newline
2618 ep_store.update(cx, |ep_store, cx| {
2619 let prediction = ep_store
2620 .prediction_at(&buffer, None, &project, cx)
2621 .expect("should have prediction");
2622 let edits: Vec<_> = prediction
2623 .edits
2624 .iter()
2625 .map(|(range, text)| {
2626 let snapshot = buffer.read(cx).snapshot();
2627 (range.to_offset(&snapshot), text.clone())
2628 })
2629 .collect();
2630 assert_eq!(edits, vec![(5..5, " world".into())]);
2631 });
2632}
2633
2634fn init_test(cx: &mut TestAppContext) {
2635 cx.update(|cx| {
2636 let settings_store = SettingsStore::test(cx);
2637 cx.set_global(settings_store);
2638 });
2639}
2640
2641async fn apply_edit_prediction(
2642 buffer_content: &str,
2643 completion_response: &str,
2644 cx: &mut TestAppContext,
2645) -> String {
2646 let fs = project::FakeFs::new(cx.executor());
2647 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2648 let buffer = cx.new(|cx| Buffer::local(buffer_content, cx));
2649 let (ep_store, response) = make_test_ep_store(&project, cx).await;
2650 *response.lock() = completion_response.to_string();
2651 let edit_prediction = run_edit_prediction(&buffer, &project, &ep_store, cx).await;
2652 buffer.update(cx, |buffer, cx| {
2653 buffer.edit(edit_prediction.edits.iter().cloned(), None, cx)
2654 });
2655 buffer.read_with(cx, |buffer, _| buffer.text())
2656}
2657
2658async fn run_edit_prediction(
2659 buffer: &Entity<Buffer>,
2660 project: &Entity<Project>,
2661 ep_store: &Entity<EditPredictionStore>,
2662 cx: &mut TestAppContext,
2663) -> EditPrediction {
2664 let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0)));
2665 ep_store.update(cx, |ep_store, cx| {
2666 ep_store.register_buffer(buffer, &project, cx)
2667 });
2668 cx.background_executor.run_until_parked();
2669 let prediction_task = ep_store.update(cx, |ep_store, cx| {
2670 ep_store.request_prediction(&project, buffer, cursor, Default::default(), cx)
2671 });
2672 prediction_task.await.unwrap().unwrap().prediction.unwrap()
2673}
2674
2675async fn make_test_ep_store(
2676 project: &Entity<Project>,
2677 cx: &mut TestAppContext,
2678) -> (Entity<EditPredictionStore>, Arc<Mutex<String>>) {
2679 let default_response = "hello world\n".to_string();
2680 let completion_response: Arc<Mutex<String>> = Arc::new(Mutex::new(default_response));
2681 let http_client = FakeHttpClient::create({
2682 let completion_response = completion_response.clone();
2683 let mut next_request_id = 0;
2684 move |req| {
2685 let completion_response = completion_response.clone();
2686 let method = req.method().clone();
2687 let uri = req.uri().path().to_string();
2688 let mut body = req.into_body();
2689 async move {
2690 match (method, uri.as_str()) {
2691 (Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder()
2692 .status(200)
2693 .body(
2694 serde_json::to_string(&CreateLlmTokenResponse {
2695 token: LlmToken("the-llm-token".to_string()),
2696 })
2697 .unwrap()
2698 .into(),
2699 )
2700 .unwrap()),
2701 (Method::POST, "/predict_edits/v3") => {
2702 let mut buf = Vec::new();
2703 body.read_to_end(&mut buf).await.ok();
2704 let decompressed = zstd::decode_all(&buf[..]).unwrap();
2705 let req: PredictEditsV3Request =
2706 serde_json::from_slice(&decompressed).unwrap();
2707
2708 next_request_id += 1;
2709 Ok(http_client::Response::builder()
2710 .status(200)
2711 .body(
2712 serde_json::to_string(&PredictEditsV3Response {
2713 request_id: format!("request-{next_request_id}"),
2714 editable_range: 0..req.input.cursor_excerpt.len(),
2715 output: completion_response.lock().clone(),
2716 model_version: None,
2717 })
2718 .unwrap()
2719 .into(),
2720 )
2721 .unwrap())
2722 }
2723 _ => Ok(http_client::Response::builder()
2724 .status(404)
2725 .body("Not Found".to_string().into())
2726 .unwrap()),
2727 }
2728 }
2729 }
2730 });
2731
2732 let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
2733 let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
2734 cx.update(|cx| {
2735 RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
2736 });
2737 let _server = FakeServer::for_client(42, &client, cx).await;
2738
2739 let ep_store = cx.new(|cx| {
2740 let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx);
2741 ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
2742
2743 let worktrees = project.read(cx).worktrees(cx).collect::<Vec<_>>();
2744 for worktree in worktrees {
2745 let worktree_id = worktree.read(cx).id();
2746 ep_store
2747 .get_or_init_project(project, cx)
2748 .license_detection_watchers
2749 .entry(worktree_id)
2750 .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx)));
2751 }
2752
2753 ep_store
2754 });
2755
2756 (ep_store, completion_response)
2757}
2758
2759fn to_completion_edits(
2760 iterator: impl IntoIterator<Item = (Range<usize>, Arc<str>)>,
2761 buffer: &Entity<Buffer>,
2762 cx: &App,
2763) -> Vec<(Range<Anchor>, Arc<str>)> {
2764 let buffer = buffer.read(cx);
2765 iterator
2766 .into_iter()
2767 .map(|(range, text)| {
2768 (
2769 buffer.anchor_after(range.start)..buffer.anchor_before(range.end),
2770 text,
2771 )
2772 })
2773 .collect()
2774}
2775
2776fn from_completion_edits(
2777 editor_edits: &[(Range<Anchor>, Arc<str>)],
2778 buffer: &Entity<Buffer>,
2779 cx: &App,
2780) -> Vec<(Range<usize>, Arc<str>)> {
2781 let buffer = buffer.read(cx);
2782 editor_edits
2783 .iter()
2784 .map(|(range, text)| {
2785 (
2786 range.start.to_offset(buffer)..range.end.to_offset(buffer),
2787 text.clone(),
2788 )
2789 })
2790 .collect()
2791}
2792
2793#[gpui::test]
2794async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut TestAppContext) {
2795 init_test(cx);
2796
2797 let fs = FakeFs::new(cx.executor());
2798 fs.insert_tree(
2799 "/project",
2800 serde_json::json!({
2801 "main.rs": "fn main() {\n \n}\n"
2802 }),
2803 )
2804 .await;
2805
2806 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2807
2808 let http_client = FakeHttpClient::create(|_req| async move {
2809 Ok(gpui::http_client::Response::builder()
2810 .status(401)
2811 .body("Unauthorized".into())
2812 .unwrap())
2813 });
2814
2815 let client =
2816 cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
2817 let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
2818 cx.update(|cx| {
2819 language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
2820 });
2821
2822 let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx));
2823
2824 let buffer = project
2825 .update(cx, |project, cx| {
2826 let path = project
2827 .find_project_path(path!("/project/main.rs"), cx)
2828 .unwrap();
2829 project.open_buffer(path, cx)
2830 })
2831 .await
2832 .unwrap();
2833
2834 let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 4)));
2835 ep_store.update(cx, |ep_store, cx| {
2836 ep_store.register_buffer(&buffer, &project, cx)
2837 });
2838 cx.background_executor.run_until_parked();
2839
2840 let completion_task = ep_store.update(cx, |ep_store, cx| {
2841 ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
2842 ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx)
2843 });
2844
2845 let result = completion_task.await;
2846 assert!(
2847 result.is_err(),
2848 "Without authentication and without custom URL, prediction should fail"
2849 );
2850}
2851
2852#[gpui::test]
2853async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppContext) {
2854 fn set_collaborator_cursor(buffer: &Entity<Buffer>, row: u32, cx: &mut TestAppContext) {
2855 let collab_replica = clock::ReplicaId::new(10);
2856 let anchor = buffer.read_with(cx, |buffer, _| {
2857 buffer.snapshot().anchor_before(Point::new(row, 0))
2858 });
2859 let selections: Arc<[Selection<Anchor>]> = Arc::new([Selection {
2860 id: 1,
2861 start: anchor,
2862 end: anchor,
2863 reversed: false,
2864 goal: SelectionGoal::None,
2865 }]);
2866 buffer.update(cx, |buffer, cx| {
2867 buffer.apply_ops(
2868 [Operation::UpdateSelections {
2869 selections,
2870 lamport_timestamp: clock::Lamport {
2871 replica_id: collab_replica,
2872 value: 1,
2873 },
2874 line_mode: false,
2875 cursor_shape: CursorShape::Bar,
2876 }],
2877 cx,
2878 );
2879 });
2880 }
2881
2882 fn publish_diagnostics(
2883 uri_path: &'static str,
2884 rows: &[u32],
2885 project: &Entity<Project>,
2886 cx: &mut TestAppContext,
2887 ) {
2888 let diagnostics: Vec<_> = rows
2889 .iter()
2890 .map(|&row| lsp::Diagnostic {
2891 range: lsp::Range::new(lsp::Position::new(row, 0), lsp::Position::new(row, 5)),
2892 severity: Some(lsp::DiagnosticSeverity::ERROR),
2893 message: format!("error at row {row}"),
2894 ..Default::default()
2895 })
2896 .collect();
2897 project.update(cx, |project, cx| {
2898 project.lsp_store().update(cx, |lsp_store, cx| {
2899 lsp_store
2900 .update_diagnostics(
2901 LanguageServerId(0),
2902 lsp::PublishDiagnosticsParams {
2903 uri: lsp::Uri::from_file_path(uri_path).expect("invalid uri"),
2904 diagnostics,
2905 version: None,
2906 },
2907 None,
2908 language::DiagnosticSourceKind::Pushed,
2909 &[],
2910 cx,
2911 )
2912 .expect("failed to update diagnostics");
2913 });
2914 });
2915 }
2916
2917 init_test(cx);
2918
2919 let mut lines = String::new();
2920 for i in 0..60 {
2921 lines.push_str(&format!("line {i}\n"));
2922 }
2923
2924 let fs = FakeFs::new(cx.executor());
2925 fs.insert_tree(
2926 "/root",
2927 json!({
2928 "active.txt": lines,
2929 "collab_file.txt": "error here\nsecond line\n",
2930 "free_file.txt": "another error\nsecond line\n",
2931 }),
2932 )
2933 .await;
2934 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2935
2936 let active_buffer = project
2937 .update(cx, |project, cx| {
2938 let path = project
2939 .find_project_path(path!("/root/active.txt"), cx)
2940 .expect("active.txt not found");
2941 project.set_active_path(Some(path.clone()), cx);
2942 project.open_buffer(path, cx)
2943 })
2944 .await
2945 .expect("failed to open active buffer");
2946
2947 set_collaborator_cursor(&active_buffer, 5, cx);
2948
2949 publish_diagnostics(path!("/root/active.txt"), &[3, 25, 50], &project, cx);
2950
2951 cx.run_until_parked();
2952
2953 let cursor_point = Point::new(25, 0);
2954 let empty_search_range: Range<Point> = Default::default();
2955
2956 let snapshot = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2957 let result = EditPredictionStore::next_diagnostic_location(
2958 active_buffer.clone(),
2959 &snapshot,
2960 empty_search_range.clone(),
2961 cursor_point,
2962 &project,
2963 &mut cx.to_async(),
2964 )
2965 .await
2966 .expect("next_diagnostic_location failed");
2967
2968 let (result_buffer, result_anchor) = result.expect("expected a diagnostic location");
2969 assert_eq!(result_buffer.entity_id(), active_buffer.entity_id());
2970 let result_row = result_buffer.read_with(cx, |buffer, _| {
2971 result_anchor.to_point(&buffer.snapshot()).row
2972 });
2973 assert_ne!(
2974 result_row, 3,
2975 "row 3 is near collaborator (row 5) but far from local cursor (row 25), should be excluded"
2976 );
2977 assert!(
2978 result_row == 25 || result_row == 50,
2979 "expected row 25 or 50, got {result_row}"
2980 );
2981
2982 let snapshot_near = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2983 let near_cursor_point = Point::new(4, 0);
2984 let result_near = EditPredictionStore::next_diagnostic_location(
2985 active_buffer.clone(),
2986 &snapshot_near,
2987 empty_search_range.clone(),
2988 near_cursor_point,
2989 &project,
2990 &mut cx.to_async(),
2991 )
2992 .await
2993 .expect("next_diagnostic_location failed");
2994
2995 let (_, near_anchor) = result_near.expect("expected a diagnostic location when both are near");
2996 let near_row =
2997 active_buffer.read_with(cx, |buffer, _| near_anchor.to_point(&buffer.snapshot()).row);
2998 assert_eq!(
2999 near_row, 3,
3000 "row 3 should be included when local cursor (row 4) is also near the collaborator"
3001 );
3002
3003 let snapshot_far = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
3004 let far_cursor_point = Point::new(50, 0);
3005 let result_far = EditPredictionStore::next_diagnostic_location(
3006 active_buffer.clone(),
3007 &snapshot_far,
3008 empty_search_range.clone(),
3009 far_cursor_point,
3010 &project,
3011 &mut cx.to_async(),
3012 )
3013 .await
3014 .expect("next_diagnostic_location failed");
3015
3016 let (_, far_anchor) = result_far.expect("expected a diagnostic location");
3017 let far_row =
3018 active_buffer.read_with(cx, |buffer, _| far_anchor.to_point(&buffer.snapshot()).row);
3019 assert_eq!(
3020 far_row, 50,
3021 "row 50 is near local cursor (row 50) and far from collaborator, should be picked"
3022 );
3023
3024 publish_diagnostics(path!("/root/collab_file.txt"), &[0], &project, cx);
3025 publish_diagnostics(path!("/root/free_file.txt"), &[0], &project, cx);
3026 cx.run_until_parked();
3027
3028 let collab_buffer = project
3029 .update(cx, |project, cx| {
3030 let path = project
3031 .find_project_path(path!("/root/collab_file.txt"), cx)
3032 .expect("collab_file.txt not found");
3033 project.open_buffer(path, cx)
3034 })
3035 .await
3036 .expect("failed to open collab buffer");
3037
3038 set_collaborator_cursor(&collab_buffer, 0, cx);
3039 cx.run_until_parked();
3040
3041 let no_same_file_search_range = Point::new(0, 0)..Point::new(59, 0);
3042 let snapshot_cross = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
3043 let result_cross = EditPredictionStore::next_diagnostic_location(
3044 active_buffer.clone(),
3045 &snapshot_cross,
3046 no_same_file_search_range,
3047 Point::new(0, 0),
3048 &project,
3049 &mut cx.to_async(),
3050 )
3051 .await
3052 .expect("cross-file next_diagnostic_location failed");
3053
3054 let (cross_buffer, _) = result_cross.expect("expected a cross-file diagnostic location");
3055 let cross_path = cross_buffer.read_with(cx, |buffer, cx| {
3056 buffer
3057 .file()
3058 .expect("buffer should have a file")
3059 .full_path(cx)
3060 });
3061 assert_eq!(
3062 cross_path,
3063 Path::new(path!("root/free_file.txt")),
3064 "should skip collab_file.txt (has collaborator) and pick free_file.txt"
3065 );
3066}
3067
3068#[gpui::test]
3069async fn test_edit_prediction_settled(cx: &mut TestAppContext) {
3070 let (ep_store, _requests) = init_test_with_fake_client(cx);
3071 let fs = FakeFs::new(cx.executor());
3072
3073 // Buffer with two clearly separated regions:
3074 // Region A = lines 0-9 (offsets 0..50)
3075 // Region B = lines 20-29 (offsets 105..155)
3076 // A big gap in between so edits in one region never overlap the other.
3077 let mut content = String::new();
3078 for i in 0..30 {
3079 content.push_str(&format!("line {i:02}\n"));
3080 }
3081
3082 fs.insert_tree(
3083 "/root",
3084 json!({
3085 "foo.md": content.clone()
3086 }),
3087 )
3088 .await;
3089 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
3090
3091 let buffer = project
3092 .update(cx, |project, cx| {
3093 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
3094 project.open_buffer(path, cx)
3095 })
3096 .await
3097 .unwrap();
3098
3099 type SettledEventRecord = (EditPredictionId, String);
3100 let settled_events: Arc<Mutex<Vec<SettledEventRecord>>> = Arc::new(Mutex::new(Vec::new()));
3101
3102 ep_store.update(cx, |ep_store, cx| {
3103 ep_store.register_buffer(&buffer, &project, cx);
3104
3105 let settled_events = settled_events.clone();
3106 ep_store.settled_event_callback = Some(Box::new(move |id, text| {
3107 settled_events.lock().push((id, text));
3108 }));
3109 });
3110
3111 // --- Phase 1: edit in region A and enqueue prediction A ---
3112
3113 buffer.update(cx, |buffer, cx| {
3114 // Edit at the start of line 0.
3115 buffer.edit(vec![(0..0, "ADDED ")], None, cx);
3116 });
3117 cx.run_until_parked();
3118
3119 let snapshot_a = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
3120
3121 // Region A: first 10 lines of the buffer.
3122 let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0));
3123
3124 ep_store.update(cx, |ep_store, cx| {
3125 ep_store.enqueue_settled_prediction(
3126 EditPredictionId("prediction-a".into()),
3127 &project,
3128 &buffer,
3129 &snapshot_a,
3130 editable_region_a.clone(),
3131 None,
3132 Duration::from_secs(0),
3133 cx,
3134 );
3135 });
3136
3137 // --- Phase 2: repeatedly edit in region A to keep it unsettled ---
3138
3139 // Let the worker process the channel message before we start advancing.
3140 cx.run_until_parked();
3141
3142 let mut region_a_edit_offset = 5;
3143 for _ in 0..3 {
3144 // Edit inside region A (not at the boundary) so `last_edit_at` is
3145 // updated before the worker's next wake.
3146 buffer.update(cx, |buffer, cx| {
3147 buffer.edit(
3148 vec![(region_a_edit_offset..region_a_edit_offset, "x")],
3149 None,
3150 cx,
3151 );
3152 });
3153 region_a_edit_offset += 1;
3154 cx.run_until_parked();
3155
3156 cx.executor()
3157 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 2);
3158 cx.run_until_parked();
3159 assert!(
3160 settled_events.lock().is_empty(),
3161 "no settled events should fire while region A is still being edited"
3162 );
3163 }
3164
3165 // Still nothing settled.
3166 assert!(settled_events.lock().is_empty());
3167
3168 // --- Phase 3: edit in distinct region B, enqueue prediction B ---
3169 // Advance a small amount so B's quiescence window starts later than A's,
3170 // but not so much that A settles (A's last edit was at the start of
3171 // iteration 3, and it needs a full Q to settle).
3172 cx.executor()
3173 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
3174 cx.run_until_parked();
3175 assert!(settled_events.lock().is_empty());
3176
3177 let snapshot_b = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
3178 let line_20_offset = snapshot_b.point_to_offset(Point::new(20, 0));
3179
3180 buffer.update(cx, |buffer, cx| {
3181 buffer.edit(vec![(line_20_offset..line_20_offset, "NEW ")], None, cx);
3182 });
3183 cx.run_until_parked();
3184
3185 let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
3186 let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0));
3187
3188 ep_store.update(cx, |ep_store, cx| {
3189 ep_store.enqueue_settled_prediction(
3190 EditPredictionId("prediction-b".into()),
3191 &project,
3192 &buffer,
3193 &snapshot_b2,
3194 editable_region_b.clone(),
3195 None,
3196 Duration::from_secs(0),
3197 cx,
3198 );
3199 });
3200
3201 cx.run_until_parked();
3202 assert!(
3203 settled_events.lock().is_empty(),
3204 "neither prediction should have settled yet"
3205 );
3206
3207 // --- Phase 4: let enough time pass for region A to settle ---
3208 // A's last edit was at T_a (during the last loop iteration). The worker is
3209 // sleeping until T_a + Q. We advance just enough to reach that wake time
3210 // (Q/4 since we already advanced Q/4 in phase 3 on top of the loop's
3211 // 3*Q/2). At that point A has been quiet for Q and settles, but B was
3212 // enqueued only Q/4 ago and stays pending.
3213 cx.executor()
3214 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
3215 cx.run_until_parked();
3216
3217 {
3218 let events = settled_events.lock().clone();
3219 assert_eq!(
3220 events.len(),
3221 1,
3222 "prediction and capture_sample for A should have settled, got: {events:?}"
3223 );
3224 assert_eq!(events[0].0, EditPredictionId("prediction-a".into()));
3225 }
3226
3227 // --- Phase 5: let more time pass for region B to settle ---
3228 // B's last edit was Q/4 before A settled. The worker rescheduled to
3229 // B's last_edit_at + Q, which is 3Q/4 from now.
3230 cx.executor()
3231 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE * 3 / 4);
3232 cx.run_until_parked();
3233
3234 {
3235 let events = settled_events.lock().clone();
3236 assert_eq!(
3237 events.len(),
3238 2,
3239 "both prediction and capture_sample settled events should be emitted for each request, got: {events:?}"
3240 );
3241 assert_eq!(events[1].0, EditPredictionId("prediction-b".into()));
3242 }
3243}
3244
3245#[ctor::ctor]
3246fn init_logger() {
3247 zlog::init_test();
3248}