1use super::*;
2use crate::udiff::apply_diff_to_string;
3use client::{UserStore, test::FakeServer};
4use clock::FakeSystemClock;
5use clock::ReplicaId;
6use cloud_api_types::{CreateLlmTokenResponse, LlmToken};
7use cloud_llm_client::{
8 EditPredictionRejectReason, EditPredictionRejection, RejectEditPredictionsBody,
9 predict_edits_v3::{PredictEditsV3Request, PredictEditsV3Response},
10};
11use futures::{
12 AsyncReadExt, FutureExt, StreamExt,
13 channel::{mpsc, oneshot},
14};
15use gpui::App;
16use gpui::{
17 Entity, TestAppContext,
18 http_client::{FakeHttpClient, Response},
19};
20use indoc::indoc;
21use language::{
22 Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet,
23 DiagnosticSeverity, Operation, Point, Selection, SelectionGoal,
24};
25use lsp::LanguageServerId;
26use parking_lot::Mutex;
27use pretty_assertions::{assert_eq, assert_matches};
28use project::{FakeFs, Project};
29use serde_json::json;
30use settings::SettingsStore;
31use std::{ops::Range, path::Path, sync::Arc, time::Duration};
32use util::{
33 path,
34 test::{TextRangeMarker, marked_text_ranges_by},
35};
36use uuid::Uuid;
37use zeta_prompt::ZetaPromptInput;
38
39use crate::{
40 BufferEditPrediction, EDIT_PREDICTION_SETTLED_QUIESCENCE, EditPredictionId,
41 EditPredictionStore, REJECT_REQUEST_DEBOUNCE,
42};
43
44#[gpui::test]
45async fn test_current_state(cx: &mut TestAppContext) {
46 let (ep_store, mut requests) = init_test_with_fake_client(cx);
47 let fs = FakeFs::new(cx.executor());
48 fs.insert_tree(
49 "/root",
50 json!({
51 "1.txt": "Hello!\nHow\nBye\n",
52 "2.txt": "Hola!\nComo\nAdios\n"
53 }),
54 )
55 .await;
56 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
57
58 let buffer1 = project
59 .update(cx, |project, cx| {
60 let path = project.find_project_path(path!("/root/1.txt"), cx).unwrap();
61 project.set_active_path(Some(path.clone()), cx);
62 project.open_buffer(path, cx)
63 })
64 .await
65 .unwrap();
66 let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot());
67 let position = snapshot1.anchor_before(language::Point::new(1, 3));
68
69 ep_store.update(cx, |ep_store, cx| {
70 ep_store.register_project(&project, cx);
71 ep_store.register_buffer(&buffer1, &project, cx);
72 });
73
74 // Prediction for current file
75
76 ep_store.update(cx, |ep_store, cx| {
77 ep_store.refresh_prediction_from_buffer(project.clone(), buffer1.clone(), position, cx)
78 });
79 let (request, respond_tx) = requests.predict.next().await.unwrap();
80
81 respond_tx
82 .send(model_response(
83 &request,
84 indoc! {r"
85 --- a/root/1.txt
86 +++ b/root/1.txt
87 @@ ... @@
88 Hello!
89 -How
90 +How are you?
91 Bye
92 "},
93 ))
94 .unwrap();
95
96 cx.run_until_parked();
97
98 ep_store.update(cx, |ep_store, cx| {
99 let prediction = ep_store
100 .prediction_at(&buffer1, None, &project, cx)
101 .unwrap();
102 assert_matches!(prediction, BufferEditPrediction::Local { .. });
103 });
104
105 ep_store.update(cx, |ep_store, cx| {
106 ep_store.reject_current_prediction(EditPredictionRejectReason::Discarded, &project, cx);
107 });
108
109 // Prediction for diagnostic in another file
110
111 let diagnostic = lsp::Diagnostic {
112 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
113 severity: Some(lsp::DiagnosticSeverity::ERROR),
114 message: "Sentence is incomplete".to_string(),
115 ..Default::default()
116 };
117
118 project.update(cx, |project, cx| {
119 project.lsp_store().update(cx, |lsp_store, cx| {
120 lsp_store
121 .update_diagnostics(
122 LanguageServerId(0),
123 lsp::PublishDiagnosticsParams {
124 uri: lsp::Uri::from_file_path(path!("/root/2.txt")).unwrap(),
125 diagnostics: vec![diagnostic],
126 version: None,
127 },
128 None,
129 language::DiagnosticSourceKind::Pushed,
130 &[],
131 cx,
132 )
133 .unwrap();
134 });
135 });
136
137 let (request, respond_tx) = requests.predict.next().await.unwrap();
138 respond_tx
139 .send(model_response(
140 &request,
141 indoc! {r#"
142 --- a/root/2.txt
143 +++ b/root/2.txt
144 @@ ... @@
145 Hola!
146 -Como
147 +Como estas?
148 Adios
149 "#},
150 ))
151 .unwrap();
152 cx.run_until_parked();
153
154 ep_store.update(cx, |ep_store, cx| {
155 let prediction = ep_store
156 .prediction_at(&buffer1, None, &project, cx)
157 .unwrap();
158 assert_matches!(
159 prediction,
160 BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt"))
161 );
162 });
163
164 let buffer2 = project
165 .update(cx, |project, cx| {
166 let path = project.find_project_path(path!("root/2.txt"), cx).unwrap();
167 project.open_buffer(path, cx)
168 })
169 .await
170 .unwrap();
171
172 ep_store.update(cx, |ep_store, cx| {
173 let prediction = ep_store
174 .prediction_at(&buffer2, None, &project, cx)
175 .unwrap();
176 assert_matches!(prediction, BufferEditPrediction::Local { .. });
177 });
178}
179
180#[gpui::test]
181async fn test_simple_request(cx: &mut TestAppContext) {
182 let (ep_store, mut requests) = init_test_with_fake_client(cx);
183 let fs = FakeFs::new(cx.executor());
184 fs.insert_tree(
185 "/root",
186 json!({
187 "foo.md": "Hello!\nHow\nBye\n"
188 }),
189 )
190 .await;
191 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
192
193 let buffer = project
194 .update(cx, |project, cx| {
195 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
196 project.open_buffer(path, cx)
197 })
198 .await
199 .unwrap();
200 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
201 let position = snapshot.anchor_before(language::Point::new(1, 3));
202
203 let prediction_task = ep_store.update(cx, |ep_store, cx| {
204 ep_store.request_prediction(&project, &buffer, position, Default::default(), cx)
205 });
206
207 let (request, respond_tx) = requests.predict.next().await.unwrap();
208
209 // TODO Put back when we have a structured request again
210 // assert_eq!(
211 // request.excerpt_path.as_ref(),
212 // Path::new(path!("root/foo.md"))
213 // );
214 // assert_eq!(
215 // request.cursor_point,
216 // Point {
217 // line: Line(1),
218 // column: 3
219 // }
220 // );
221
222 respond_tx
223 .send(model_response(
224 &request,
225 indoc! { r"
226 --- a/root/foo.md
227 +++ b/root/foo.md
228 @@ ... @@
229 Hello!
230 -How
231 +How are you?
232 Bye
233 "},
234 ))
235 .unwrap();
236
237 let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap();
238
239 assert_eq!(prediction.edits.len(), 1);
240 assert_eq!(
241 prediction.edits[0].0.to_point(&snapshot).start,
242 language::Point::new(1, 3)
243 );
244 assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
245}
246
247#[gpui::test]
248async fn test_request_events(cx: &mut TestAppContext) {
249 let (ep_store, mut requests) = init_test_with_fake_client(cx);
250 let fs = FakeFs::new(cx.executor());
251 fs.insert_tree(
252 "/root",
253 json!({
254 "foo.md": "Hello!\n\nBye\n"
255 }),
256 )
257 .await;
258 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
259
260 let buffer = project
261 .update(cx, |project, cx| {
262 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
263 project.open_buffer(path, cx)
264 })
265 .await
266 .unwrap();
267
268 ep_store.update(cx, |ep_store, cx| {
269 ep_store.register_buffer(&buffer, &project, cx);
270 });
271
272 buffer.update(cx, |buffer, cx| {
273 buffer.edit(vec![(7..7, "How")], None, cx);
274 });
275
276 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
277 let position = snapshot.anchor_before(language::Point::new(1, 3));
278
279 let prediction_task = ep_store.update(cx, |ep_store, cx| {
280 ep_store.request_prediction(&project, &buffer, position, Default::default(), cx)
281 });
282
283 let (request, respond_tx) = requests.predict.next().await.unwrap();
284
285 let prompt = prompt_from_request(&request);
286 assert!(
287 prompt.contains(indoc! {"
288 --- a/root/foo.md
289 +++ b/root/foo.md
290 @@ -1,3 +1,3 @@
291 Hello!
292 -
293 +How
294 Bye
295 "}),
296 "{prompt}"
297 );
298
299 respond_tx
300 .send(model_response(
301 &request,
302 indoc! {r#"
303 --- a/root/foo.md
304 +++ b/root/foo.md
305 @@ ... @@
306 Hello!
307 -How
308 +How are you?
309 Bye
310 "#},
311 ))
312 .unwrap();
313
314 let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap();
315
316 assert_eq!(prediction.edits.len(), 1);
317 assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
318}
319
320#[gpui::test]
321async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContext) {
322 let (ep_store, _requests) = init_test_with_fake_client(cx);
323 let fs = FakeFs::new(cx.executor());
324 fs.insert_tree(
325 "/root",
326 json!({
327 "foo.md": "Hello!\n\nBye\n"
328 }),
329 )
330 .await;
331 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
332
333 let buffer = project
334 .update(cx, |project, cx| {
335 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
336 project.open_buffer(path, cx)
337 })
338 .await
339 .unwrap();
340
341 ep_store.update(cx, |ep_store, cx| {
342 ep_store.register_buffer(&buffer, &project, cx);
343 });
344
345 // First burst: insert "How"
346 buffer.update(cx, |buffer, cx| {
347 buffer.edit(vec![(7..7, "How")], None, cx);
348 });
349
350 // Simulate a pause longer than the grouping threshold (e.g. 500ms).
351 cx.executor().advance_clock(LAST_CHANGE_GROUPING_TIME * 2);
352 cx.run_until_parked();
353
354 // Second burst: append " are you?" immediately after "How" on the same line.
355 //
356 // Keeping both bursts on the same line ensures the existing line-span coalescing logic
357 // groups them into a single `LastEvent`, allowing the pause-split getter to return two diffs.
358 buffer.update(cx, |buffer, cx| {
359 buffer.edit(vec![(10..10, " are you?")], None, cx);
360 });
361
362 // A second edit shortly after the first post-pause edit ensures the last edit timestamp is
363 // advanced after the pause boundary is recorded, making pause-splitting deterministic.
364 buffer.update(cx, |buffer, cx| {
365 buffer.edit(vec![(19..19, "!")], None, cx);
366 });
367
368 // With time-based splitting, there are two distinct events.
369 let events = ep_store.update(cx, |ep_store, cx| {
370 ep_store.edit_history_for_project(&project, cx)
371 });
372 assert_eq!(events.len(), 2);
373
374 let first_total_edit_range = buffer.read_with(cx, |buffer, _| {
375 events[0].total_edit_range.to_point(&buffer.snapshot())
376 });
377 assert_eq!(first_total_edit_range, Point::new(1, 0)..Point::new(1, 3));
378
379 let zeta_prompt::Event::BufferChange { diff, .. } = events[0].event.as_ref();
380 assert_eq!(
381 diff.as_str(),
382 indoc! {"
383 @@ -1,3 +1,3 @@
384 Hello!
385 -
386 +How
387 Bye
388 "}
389 );
390
391 let second_total_edit_range = buffer.read_with(cx, |buffer, _| {
392 events[1].total_edit_range.to_point(&buffer.snapshot())
393 });
394 assert_eq!(second_total_edit_range, Point::new(1, 3)..Point::new(1, 13));
395
396 let zeta_prompt::Event::BufferChange { diff, .. } = events[1].event.as_ref();
397 assert_eq!(
398 diff.as_str(),
399 indoc! {"
400 @@ -1,3 +1,3 @@
401 Hello!
402 -How
403 +How are you?!
404 Bye
405 "}
406 );
407}
408
409#[gpui::test]
410async fn test_predicted_edits_are_separated_in_edit_history(cx: &mut TestAppContext) {
411 let (ep_store, _requests) = init_test_with_fake_client(cx);
412 let fs = FakeFs::new(cx.executor());
413
414 // Create a file with 30 lines to test line-based coalescing
415 let content = (1..=30)
416 .map(|i| format!("Line {}\n", i))
417 .collect::<String>();
418 fs.insert_tree(
419 "/root",
420 json!({
421 "foo.md": content
422 }),
423 )
424 .await;
425 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
426
427 let buffer = project
428 .update(cx, |project, cx| {
429 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
430 project.open_buffer(path, cx)
431 })
432 .await
433 .unwrap();
434
435 ep_store.update(cx, |ep_store, cx| {
436 ep_store.register_buffer(&buffer, &project, cx);
437 });
438
439 // First edit: multi-line edit spanning rows 10-12 (replacing lines 11-13)
440 buffer.update(cx, |buffer, cx| {
441 let start = Point::new(10, 0).to_offset(buffer);
442 let end = Point::new(13, 0).to_offset(buffer);
443 buffer.edit(vec![(start..end, "Middle A\nMiddle B\n")], None, cx);
444 });
445
446 let events = ep_store.update(cx, |ep_store, cx| {
447 ep_store.edit_history_for_project(&project, cx)
448 });
449 assert_eq!(
450 render_events(&events),
451 indoc! {"
452 @@ -8,9 +8,8 @@
453 Line 8
454 Line 9
455 Line 10
456 -Line 11
457 -Line 12
458 -Line 13
459 +Middle A
460 +Middle B
461 Line 14
462 Line 15
463 Line 16
464 "},
465 "After first edit"
466 );
467
468 // Second edit: insert ABOVE the first edit's range (row 5, within 8 lines of row 10)
469 // This tests that coalescing considers the START of the existing range
470 buffer.update(cx, |buffer, cx| {
471 let offset = Point::new(5, 0).to_offset(buffer);
472 buffer.edit(vec![(offset..offset, "Above\n")], None, cx);
473 });
474
475 let events = ep_store.update(cx, |ep_store, cx| {
476 ep_store.edit_history_for_project(&project, cx)
477 });
478 assert_eq!(
479 render_events(&events),
480 indoc! {"
481 @@ -3,14 +3,14 @@
482 Line 3
483 Line 4
484 Line 5
485 +Above
486 Line 6
487 Line 7
488 Line 8
489 Line 9
490 Line 10
491 -Line 11
492 -Line 12
493 -Line 13
494 +Middle A
495 +Middle B
496 Line 14
497 Line 15
498 Line 16
499 "},
500 "After inserting above (should coalesce)"
501 );
502
503 // Third edit: insert BELOW the first edit's range (row 14 in current buffer, within 8 lines of row 12)
504 // This tests that coalescing considers the END of the existing range
505 buffer.update(cx, |buffer, cx| {
506 let offset = Point::new(14, 0).to_offset(buffer);
507 buffer.edit(vec![(offset..offset, "Below\n")], None, cx);
508 });
509
510 let events = ep_store.update(cx, |ep_store, cx| {
511 ep_store.edit_history_for_project(&project, cx)
512 });
513 assert_eq!(
514 render_events(&events),
515 indoc! {"
516 @@ -3,15 +3,16 @@
517 Line 3
518 Line 4
519 Line 5
520 +Above
521 Line 6
522 Line 7
523 Line 8
524 Line 9
525 Line 10
526 -Line 11
527 -Line 12
528 -Line 13
529 +Middle A
530 +Middle B
531 Line 14
532 +Below
533 Line 15
534 Line 16
535 Line 17
536 "},
537 "After inserting below (should coalesce)"
538 );
539
540 // Fourth edit: insert FAR BELOW (row 25, beyond 8 lines from the current range end ~row 15)
541 // This should NOT coalesce - creates a new event
542 buffer.update(cx, |buffer, cx| {
543 let offset = Point::new(25, 0).to_offset(buffer);
544 buffer.edit(vec![(offset..offset, "Far below\n")], None, cx);
545 });
546
547 let events = ep_store.update(cx, |ep_store, cx| {
548 ep_store.edit_history_for_project(&project, cx)
549 });
550 assert_eq!(
551 render_events(&events),
552 indoc! {"
553 @@ -3,15 +3,16 @@
554 Line 3
555 Line 4
556 Line 5
557 +Above
558 Line 6
559 Line 7
560 Line 8
561 Line 9
562 Line 10
563 -Line 11
564 -Line 12
565 -Line 13
566 +Middle A
567 +Middle B
568 Line 14
569 +Below
570 Line 15
571 Line 16
572 Line 17
573
574 ---
575 @@ -23,6 +23,7 @@
576 Line 22
577 Line 23
578 Line 24
579 +Far below
580 Line 25
581 Line 26
582 Line 27
583 "},
584 "After inserting far below (should NOT coalesce)"
585 );
586}
587
588fn render_events(events: &[StoredEvent]) -> String {
589 events
590 .iter()
591 .map(|e| {
592 let zeta_prompt::Event::BufferChange { diff, .. } = e.event.as_ref();
593 diff.as_str()
594 })
595 .collect::<Vec<_>>()
596 .join("\n---\n")
597}
598
599fn render_events_with_predicted(events: &[StoredEvent]) -> Vec<String> {
600 events
601 .iter()
602 .map(|e| {
603 let zeta_prompt::Event::BufferChange {
604 diff, predicted, ..
605 } = e.event.as_ref();
606 let prefix = if *predicted { "predicted" } else { "manual" };
607 format!("{}\n{}", prefix, diff)
608 })
609 .collect()
610}
611
612fn make_collaborator_replica(
613 buffer: &Entity<Buffer>,
614 cx: &mut TestAppContext,
615) -> (Entity<Buffer>, clock::Global) {
616 let (state, version) =
617 buffer.read_with(cx, |buffer, _cx| (buffer.to_proto(_cx), buffer.version()));
618 let collaborator = cx.new(|_cx| {
619 Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap()
620 });
621 (collaborator, version)
622}
623
624async fn apply_collaborator_edit(
625 collaborator: &Entity<Buffer>,
626 buffer: &Entity<Buffer>,
627 since_version: &mut clock::Global,
628 edit_range: Range<usize>,
629 new_text: &str,
630 cx: &mut TestAppContext,
631) {
632 collaborator.update(cx, |collaborator, cx| {
633 collaborator.edit([(edit_range, new_text)], None, cx);
634 });
635
636 let serialize_task = collaborator.read_with(cx, |collaborator, cx| {
637 collaborator.serialize_ops(Some(since_version.clone()), cx)
638 });
639 let ops = serialize_task.await;
640 *since_version = collaborator.read_with(cx, |collaborator, _cx| collaborator.version());
641
642 buffer.update(cx, |buffer, cx| {
643 buffer.apply_ops(
644 ops.into_iter()
645 .map(|op| language::proto::deserialize_operation(op).unwrap()),
646 cx,
647 );
648 });
649}
650
651#[gpui::test]
652async fn test_nearby_collaborator_edits_are_kept_in_history(cx: &mut TestAppContext) {
653 let (ep_store, _requests) = init_test_with_fake_client(cx);
654 let fs = FakeFs::new(cx.executor());
655 fs.insert_tree(
656 "/root",
657 json!({
658 "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n"
659 }),
660 )
661 .await;
662 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
663
664 let buffer = project
665 .update(cx, |project, cx| {
666 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
667 project.set_active_path(Some(path.clone()), cx);
668 project.open_buffer(path, cx)
669 })
670 .await
671 .unwrap();
672
673 let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
674
675 ep_store.update(cx, |ep_store, cx| {
676 ep_store.register_buffer(&buffer, &project, cx);
677 let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
678 });
679
680 buffer.update(cx, |buffer, cx| {
681 buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
682 });
683
684 let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
685
686 let (line_one_start, line_one_len) = collaborator.read_with(cx, |buffer, _cx| {
687 (Point::new(1, 0).to_offset(buffer), buffer.line_len(1))
688 });
689
690 apply_collaborator_edit(
691 &collaborator,
692 &buffer,
693 &mut collaborator_version,
694 line_one_start..line_one_start + line_one_len as usize,
695 "REMOTE ONE",
696 cx,
697 )
698 .await;
699
700 let events = ep_store.update(cx, |ep_store, cx| {
701 ep_store.edit_history_for_project(&project, cx)
702 });
703
704 assert_eq!(
705 render_events_with_predicted(&events),
706 vec![indoc! {"
707 manual
708 @@ -1,5 +1,5 @@
709 -line 0
710 -line 1
711 +LOCAL ZERO
712 +REMOTE ONE
713 line 2
714 line 3
715 line 4
716 "}]
717 );
718}
719
720#[gpui::test]
721async fn test_distant_collaborator_edits_are_omitted_from_history(cx: &mut TestAppContext) {
722 let (ep_store, _requests) = init_test_with_fake_client(cx);
723 let fs = FakeFs::new(cx.executor());
724 fs.insert_tree(
725 "/root",
726 json!({
727 "foo.rs": (0..1000)
728 .map(|i| format!("line {i}\n"))
729 .collect::<String>()
730 }),
731 )
732 .await;
733 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
734
735 let buffer = project
736 .update(cx, |project, cx| {
737 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
738 project.set_active_path(Some(path.clone()), cx);
739 project.open_buffer(path, cx)
740 })
741 .await
742 .unwrap();
743
744 let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
745
746 ep_store.update(cx, |ep_store, cx| {
747 ep_store.register_buffer(&buffer, &project, cx);
748 let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
749 });
750
751 buffer.update(cx, |buffer, cx| {
752 buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
753 });
754
755 let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
756
757 let far_line_start = buffer.read_with(cx, |buffer, _cx| Point::new(900, 0).to_offset(buffer));
758
759 apply_collaborator_edit(
760 &collaborator,
761 &buffer,
762 &mut collaborator_version,
763 far_line_start..far_line_start + 7,
764 "REMOTE FAR",
765 cx,
766 )
767 .await;
768
769 let events = ep_store.update(cx, |ep_store, cx| {
770 ep_store.edit_history_for_project(&project, cx)
771 });
772
773 assert_eq!(
774 render_events_with_predicted(&events),
775 vec![indoc! {"
776 manual
777 @@ -1,4 +1,4 @@
778 -line 0
779 +LOCAL ZERO
780 line 1
781 line 2
782 line 3
783 "}]
784 );
785}
786
787#[gpui::test]
788async fn test_irrelevant_collaborator_edits_in_different_files_are_omitted_from_history(
789 cx: &mut TestAppContext,
790) {
791 let (ep_store, _requests) = init_test_with_fake_client(cx);
792 let fs = FakeFs::new(cx.executor());
793 fs.insert_tree(
794 "/root",
795 json!({
796 "foo.rs": "line 0\nline 1\nline 2\nline 3\n",
797 "bar.rs": "line 0\nline 1\nline 2\nline 3\n"
798 }),
799 )
800 .await;
801 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
802
803 let foo_buffer = project
804 .update(cx, |project, cx| {
805 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
806 project.set_active_path(Some(path.clone()), cx);
807 project.open_buffer(path, cx)
808 })
809 .await
810 .unwrap();
811 let bar_buffer = project
812 .update(cx, |project, cx| {
813 let path = project.find_project_path(path!("root/bar.rs"), cx).unwrap();
814 project.open_buffer(path, cx)
815 })
816 .await
817 .unwrap();
818
819 let foo_cursor = foo_buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
820
821 ep_store.update(cx, |ep_store, cx| {
822 ep_store.register_buffer(&foo_buffer, &project, cx);
823 ep_store.register_buffer(&bar_buffer, &project, cx);
824 let _ = ep_store.prediction_at(&foo_buffer, Some(foo_cursor), &project, cx);
825 });
826
827 let (bar_collaborator, mut bar_version) = make_collaborator_replica(&bar_buffer, cx);
828
829 apply_collaborator_edit(
830 &bar_collaborator,
831 &bar_buffer,
832 &mut bar_version,
833 0..6,
834 "REMOTE BAR",
835 cx,
836 )
837 .await;
838
839 let events = ep_store.update(cx, |ep_store, cx| {
840 ep_store.edit_history_for_project(&project, cx)
841 });
842
843 assert!(events.is_empty());
844}
845
846#[gpui::test]
847async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) {
848 let (ep_store, _requests) = init_test_with_fake_client(cx);
849 let fs = FakeFs::new(cx.executor());
850 fs.insert_tree(
851 "/root",
852 json!({
853 "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n"
854 }),
855 )
856 .await;
857 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
858
859 let buffer = project
860 .update(cx, |project, cx| {
861 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
862 project.open_buffer(path, cx)
863 })
864 .await
865 .unwrap();
866
867 ep_store.update(cx, |ep_store, cx| {
868 ep_store.register_buffer(&buffer, &project, cx);
869 });
870
871 // Case 1: Manual edits have `predicted` set to false.
872 buffer.update(cx, |buffer, cx| {
873 buffer.edit(vec![(0..6, "LINE ZERO")], None, cx);
874 });
875
876 let events = ep_store.update(cx, |ep_store, cx| {
877 ep_store.edit_history_for_project(&project, cx)
878 });
879
880 assert_eq!(
881 render_events_with_predicted(&events),
882 vec![indoc! {"
883 manual
884 @@ -1,4 +1,4 @@
885 -line 0
886 +LINE ZERO
887 line 1
888 line 2
889 line 3
890 "}]
891 );
892
893 // Case 2: Multiple successive manual edits near each other are merged into one
894 // event with `predicted` set to false.
895 buffer.update(cx, |buffer, cx| {
896 let offset = Point::new(1, 0).to_offset(buffer);
897 let end = Point::new(1, 6).to_offset(buffer);
898 buffer.edit(vec![(offset..end, "LINE ONE")], None, cx);
899 });
900
901 let events = ep_store.update(cx, |ep_store, cx| {
902 ep_store.edit_history_for_project(&project, cx)
903 });
904 assert_eq!(
905 render_events_with_predicted(&events),
906 vec![indoc! {"
907 manual
908 @@ -1,5 +1,5 @@
909 -line 0
910 -line 1
911 +LINE ZERO
912 +LINE ONE
913 line 2
914 line 3
915 line 4
916 "}]
917 );
918
919 // Case 3: Accepted predictions have `predicted` set to true.
920 // Case 5: A manual edit that follows a predicted edit is not merged with the
921 // predicted edit, even if it is nearby.
922 ep_store.update(cx, |ep_store, cx| {
923 buffer.update(cx, |buffer, cx| {
924 let offset = Point::new(2, 0).to_offset(buffer);
925 let end = Point::new(2, 6).to_offset(buffer);
926 buffer.edit(vec![(offset..end, "LINE TWO")], None, cx);
927 });
928 ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx);
929 });
930
931 let events = ep_store.update(cx, |ep_store, cx| {
932 ep_store.edit_history_for_project(&project, cx)
933 });
934 assert_eq!(
935 render_events_with_predicted(&events),
936 vec![
937 indoc! {"
938 manual
939 @@ -1,5 +1,5 @@
940 -line 0
941 -line 1
942 +LINE ZERO
943 +LINE ONE
944 line 2
945 line 3
946 line 4
947 "},
948 indoc! {"
949 predicted
950 @@ -1,6 +1,6 @@
951 LINE ZERO
952 LINE ONE
953 -line 2
954 +LINE TWO
955 line 3
956 line 4
957 line 5
958 "}
959 ]
960 );
961
962 // Case 4: Multiple successive accepted predictions near each other are merged
963 // into one event with `predicted` set to true.
964 ep_store.update(cx, |ep_store, cx| {
965 buffer.update(cx, |buffer, cx| {
966 let offset = Point::new(3, 0).to_offset(buffer);
967 let end = Point::new(3, 6).to_offset(buffer);
968 buffer.edit(vec![(offset..end, "LINE THREE")], None, cx);
969 });
970 ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx);
971 });
972
973 let events = ep_store.update(cx, |ep_store, cx| {
974 ep_store.edit_history_for_project(&project, cx)
975 });
976 assert_eq!(
977 render_events_with_predicted(&events),
978 vec![
979 indoc! {"
980 manual
981 @@ -1,5 +1,5 @@
982 -line 0
983 -line 1
984 +LINE ZERO
985 +LINE ONE
986 line 2
987 line 3
988 line 4
989 "},
990 indoc! {"
991 predicted
992 @@ -1,7 +1,7 @@
993 LINE ZERO
994 LINE ONE
995 -line 2
996 -line 3
997 +LINE TWO
998 +LINE THREE
999 line 4
1000 line 5
1001 line 6
1002 "}
1003 ]
1004 );
1005
1006 // Case 5 (continued): A manual edit that follows a predicted edit is not merged
1007 // with the predicted edit, even if it is nearby.
1008 buffer.update(cx, |buffer, cx| {
1009 let offset = Point::new(4, 0).to_offset(buffer);
1010 let end = Point::new(4, 6).to_offset(buffer);
1011 buffer.edit(vec![(offset..end, "LINE FOUR")], None, cx);
1012 });
1013
1014 let events = ep_store.update(cx, |ep_store, cx| {
1015 ep_store.edit_history_for_project(&project, cx)
1016 });
1017 assert_eq!(
1018 render_events_with_predicted(&events),
1019 vec![
1020 indoc! {"
1021 manual
1022 @@ -1,5 +1,5 @@
1023 -line 0
1024 -line 1
1025 +LINE ZERO
1026 +LINE ONE
1027 line 2
1028 line 3
1029 line 4
1030 "},
1031 indoc! {"
1032 predicted
1033 @@ -1,7 +1,7 @@
1034 LINE ZERO
1035 LINE ONE
1036 -line 2
1037 -line 3
1038 +LINE TWO
1039 +LINE THREE
1040 line 4
1041 line 5
1042 line 6
1043 "},
1044 indoc! {"
1045 manual
1046 @@ -2,7 +2,7 @@
1047 LINE ONE
1048 LINE TWO
1049 LINE THREE
1050 -line 4
1051 +LINE FOUR
1052 line 5
1053 line 6
1054 line 7
1055 "}
1056 ]
1057 );
1058
1059 // Case 6: If we then perform a manual edit at a *different* location (more than
1060 // 8 lines away), then the edits at the prior location can be merged with each
1061 // other, even if some are predicted and some are not. `predicted` means all
1062 // constituent edits were predicted.
1063 buffer.update(cx, |buffer, cx| {
1064 let offset = Point::new(14, 0).to_offset(buffer);
1065 let end = Point::new(14, 7).to_offset(buffer);
1066 buffer.edit(vec![(offset..end, "LINE FOURTEEN")], None, cx);
1067 });
1068
1069 let events = ep_store.update(cx, |ep_store, cx| {
1070 ep_store.edit_history_for_project(&project, cx)
1071 });
1072 assert_eq!(
1073 render_events_with_predicted(&events),
1074 vec![
1075 indoc! {"
1076 manual
1077 @@ -1,8 +1,8 @@
1078 -line 0
1079 -line 1
1080 -line 2
1081 -line 3
1082 -line 4
1083 +LINE ZERO
1084 +LINE ONE
1085 +LINE TWO
1086 +LINE THREE
1087 +LINE FOUR
1088 line 5
1089 line 6
1090 line 7
1091 "},
1092 indoc! {"
1093 manual
1094 @@ -12,4 +12,4 @@
1095 line 11
1096 line 12
1097 line 13
1098 -line 14
1099 +LINE FOURTEEN
1100 "}
1101 ]
1102 );
1103}
1104
1105#[gpui::test]
1106async fn test_empty_prediction(cx: &mut TestAppContext) {
1107 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1108 let fs = FakeFs::new(cx.executor());
1109 fs.insert_tree(
1110 "/root",
1111 json!({
1112 "foo.md": "Hello!\nHow\nBye\n"
1113 }),
1114 )
1115 .await;
1116 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1117
1118 let buffer = project
1119 .update(cx, |project, cx| {
1120 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1121 project.open_buffer(path, cx)
1122 })
1123 .await
1124 .unwrap();
1125 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1126 let position = snapshot.anchor_before(language::Point::new(1, 3));
1127
1128 ep_store.update(cx, |ep_store, cx| {
1129 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1130 });
1131
1132 let (request, respond_tx) = requests.predict.next().await.unwrap();
1133 let response = model_response(&request, "");
1134 let id = response.request_id.clone();
1135 respond_tx.send(response).unwrap();
1136
1137 cx.run_until_parked();
1138
1139 ep_store.update(cx, |ep_store, cx| {
1140 assert!(
1141 ep_store
1142 .prediction_at(&buffer, None, &project, cx)
1143 .is_none()
1144 );
1145 });
1146
1147 // prediction is reported as rejected
1148 let (reject_request, _) = requests.reject.next().await.unwrap();
1149
1150 assert_eq!(
1151 &reject_request.rejections,
1152 &[EditPredictionRejection {
1153 request_id: id,
1154 reason: EditPredictionRejectReason::Empty,
1155 was_shown: false,
1156 model_version: None,
1157 }]
1158 );
1159}
1160
1161#[gpui::test]
1162async fn test_interpolated_empty(cx: &mut TestAppContext) {
1163 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1164 let fs = FakeFs::new(cx.executor());
1165 fs.insert_tree(
1166 "/root",
1167 json!({
1168 "foo.md": "Hello!\nHow\nBye\n"
1169 }),
1170 )
1171 .await;
1172 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1173
1174 let buffer = project
1175 .update(cx, |project, cx| {
1176 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1177 project.open_buffer(path, cx)
1178 })
1179 .await
1180 .unwrap();
1181 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1182 let position = snapshot.anchor_before(language::Point::new(1, 3));
1183
1184 ep_store.update(cx, |ep_store, cx| {
1185 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1186 });
1187
1188 let (request, respond_tx) = requests.predict.next().await.unwrap();
1189
1190 buffer.update(cx, |buffer, cx| {
1191 buffer.set_text("Hello!\nHow are you?\nBye", cx);
1192 });
1193
1194 let response = model_response(&request, SIMPLE_DIFF);
1195 let id = response.request_id.clone();
1196 respond_tx.send(response).unwrap();
1197
1198 cx.run_until_parked();
1199
1200 ep_store.update(cx, |ep_store, cx| {
1201 assert!(
1202 ep_store
1203 .prediction_at(&buffer, None, &project, cx)
1204 .is_none()
1205 );
1206 });
1207
1208 // prediction is reported as rejected
1209 let (reject_request, _) = requests.reject.next().await.unwrap();
1210
1211 assert_eq!(
1212 &reject_request.rejections,
1213 &[EditPredictionRejection {
1214 request_id: id,
1215 reason: EditPredictionRejectReason::InterpolatedEmpty,
1216 was_shown: false,
1217 model_version: None,
1218 }]
1219 );
1220}
1221
1222const SIMPLE_DIFF: &str = indoc! { r"
1223 --- a/root/foo.md
1224 +++ b/root/foo.md
1225 @@ ... @@
1226 Hello!
1227 -How
1228 +How are you?
1229 Bye
1230"};
1231
1232#[gpui::test]
1233async fn test_replace_current(cx: &mut TestAppContext) {
1234 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1235 let fs = FakeFs::new(cx.executor());
1236 fs.insert_tree(
1237 "/root",
1238 json!({
1239 "foo.md": "Hello!\nHow\nBye\n"
1240 }),
1241 )
1242 .await;
1243 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1244
1245 let buffer = project
1246 .update(cx, |project, cx| {
1247 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1248 project.open_buffer(path, cx)
1249 })
1250 .await
1251 .unwrap();
1252 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1253 let position = snapshot.anchor_before(language::Point::new(1, 3));
1254
1255 ep_store.update(cx, |ep_store, cx| {
1256 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1257 });
1258
1259 let (request, respond_tx) = requests.predict.next().await.unwrap();
1260 let first_response = model_response(&request, SIMPLE_DIFF);
1261 let first_id = first_response.request_id.clone();
1262 respond_tx.send(first_response).unwrap();
1263
1264 cx.run_until_parked();
1265
1266 ep_store.update(cx, |ep_store, cx| {
1267 assert_eq!(
1268 ep_store
1269 .prediction_at(&buffer, None, &project, cx)
1270 .unwrap()
1271 .id
1272 .0,
1273 first_id
1274 );
1275 });
1276
1277 // a second request is triggered
1278 ep_store.update(cx, |ep_store, cx| {
1279 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1280 });
1281
1282 let (request, respond_tx) = requests.predict.next().await.unwrap();
1283 let second_response = model_response(&request, SIMPLE_DIFF);
1284 let second_id = second_response.request_id.clone();
1285 respond_tx.send(second_response).unwrap();
1286
1287 cx.run_until_parked();
1288
1289 ep_store.update(cx, |ep_store, cx| {
1290 // second replaces first
1291 assert_eq!(
1292 ep_store
1293 .prediction_at(&buffer, None, &project, cx)
1294 .unwrap()
1295 .id
1296 .0,
1297 second_id
1298 );
1299 });
1300
1301 // first is reported as replaced
1302 let (reject_request, _) = requests.reject.next().await.unwrap();
1303
1304 assert_eq!(
1305 &reject_request.rejections,
1306 &[EditPredictionRejection {
1307 request_id: first_id,
1308 reason: EditPredictionRejectReason::Replaced,
1309 was_shown: false,
1310 model_version: None,
1311 }]
1312 );
1313}
1314
1315#[gpui::test]
1316async fn test_current_preferred(cx: &mut TestAppContext) {
1317 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1318 let fs = FakeFs::new(cx.executor());
1319 fs.insert_tree(
1320 "/root",
1321 json!({
1322 "foo.md": "Hello!\nHow\nBye\n"
1323 }),
1324 )
1325 .await;
1326 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1327
1328 let buffer = project
1329 .update(cx, |project, cx| {
1330 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1331 project.open_buffer(path, cx)
1332 })
1333 .await
1334 .unwrap();
1335 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1336 let position = snapshot.anchor_before(language::Point::new(1, 3));
1337
1338 ep_store.update(cx, |ep_store, cx| {
1339 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1340 });
1341
1342 let (request, respond_tx) = requests.predict.next().await.unwrap();
1343 let first_response = model_response(&request, SIMPLE_DIFF);
1344 let first_id = first_response.request_id.clone();
1345 respond_tx.send(first_response).unwrap();
1346
1347 cx.run_until_parked();
1348
1349 ep_store.update(cx, |ep_store, cx| {
1350 assert_eq!(
1351 ep_store
1352 .prediction_at(&buffer, None, &project, cx)
1353 .unwrap()
1354 .id
1355 .0,
1356 first_id
1357 );
1358 });
1359
1360 // a second request is triggered
1361 ep_store.update(cx, |ep_store, cx| {
1362 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1363 });
1364
1365 let (request, respond_tx) = requests.predict.next().await.unwrap();
1366 // worse than current prediction
1367 let second_response = model_response(
1368 &request,
1369 indoc! { r"
1370 --- a/root/foo.md
1371 +++ b/root/foo.md
1372 @@ ... @@
1373 Hello!
1374 -How
1375 +How are
1376 Bye
1377 "},
1378 );
1379 let second_id = second_response.request_id.clone();
1380 respond_tx.send(second_response).unwrap();
1381
1382 cx.run_until_parked();
1383
1384 ep_store.update(cx, |ep_store, cx| {
1385 // first is preferred over second
1386 assert_eq!(
1387 ep_store
1388 .prediction_at(&buffer, None, &project, cx)
1389 .unwrap()
1390 .id
1391 .0,
1392 first_id
1393 );
1394 });
1395
1396 // second is reported as rejected
1397 let (reject_request, _) = requests.reject.next().await.unwrap();
1398
1399 assert_eq!(
1400 &reject_request.rejections,
1401 &[EditPredictionRejection {
1402 request_id: second_id,
1403 reason: EditPredictionRejectReason::CurrentPreferred,
1404 was_shown: false,
1405 model_version: None,
1406 }]
1407 );
1408}
1409
1410#[gpui::test]
1411async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) {
1412 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1413 let fs = FakeFs::new(cx.executor());
1414 fs.insert_tree(
1415 "/root",
1416 json!({
1417 "foo.md": "Hello!\nHow\nBye\n"
1418 }),
1419 )
1420 .await;
1421 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1422
1423 let buffer = project
1424 .update(cx, |project, cx| {
1425 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1426 project.open_buffer(path, cx)
1427 })
1428 .await
1429 .unwrap();
1430 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1431 let position = snapshot.anchor_before(language::Point::new(1, 3));
1432
1433 // start two refresh tasks
1434 ep_store.update(cx, |ep_store, cx| {
1435 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1436 });
1437
1438 let (request1, respond_first) = requests.predict.next().await.unwrap();
1439
1440 ep_store.update(cx, |ep_store, cx| {
1441 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1442 });
1443
1444 let (request, respond_second) = requests.predict.next().await.unwrap();
1445
1446 // wait for throttle
1447 cx.run_until_parked();
1448
1449 // second responds first
1450 let second_response = model_response(&request, SIMPLE_DIFF);
1451 let second_id = second_response.request_id.clone();
1452 respond_second.send(second_response).unwrap();
1453
1454 cx.run_until_parked();
1455
1456 ep_store.update(cx, |ep_store, cx| {
1457 // current prediction is second
1458 assert_eq!(
1459 ep_store
1460 .prediction_at(&buffer, None, &project, cx)
1461 .unwrap()
1462 .id
1463 .0,
1464 second_id
1465 );
1466 });
1467
1468 let first_response = model_response(&request1, SIMPLE_DIFF);
1469 let first_id = first_response.request_id.clone();
1470 respond_first.send(first_response).unwrap();
1471
1472 cx.run_until_parked();
1473
1474 ep_store.update(cx, |ep_store, cx| {
1475 // current prediction is still second, since first was cancelled
1476 assert_eq!(
1477 ep_store
1478 .prediction_at(&buffer, None, &project, cx)
1479 .unwrap()
1480 .id
1481 .0,
1482 second_id
1483 );
1484 });
1485
1486 // first is reported as rejected
1487 let (reject_request, _) = requests.reject.next().await.unwrap();
1488
1489 cx.run_until_parked();
1490
1491 assert_eq!(
1492 &reject_request.rejections,
1493 &[EditPredictionRejection {
1494 request_id: first_id,
1495 reason: EditPredictionRejectReason::Canceled,
1496 was_shown: false,
1497 model_version: None,
1498 }]
1499 );
1500}
1501
1502#[gpui::test]
1503async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
1504 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1505 let fs = FakeFs::new(cx.executor());
1506 fs.insert_tree(
1507 "/root",
1508 json!({
1509 "foo.md": "Hello!\nHow\nBye\n"
1510 }),
1511 )
1512 .await;
1513 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1514
1515 let buffer = project
1516 .update(cx, |project, cx| {
1517 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1518 project.open_buffer(path, cx)
1519 })
1520 .await
1521 .unwrap();
1522 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1523 let position = snapshot.anchor_before(language::Point::new(1, 3));
1524
1525 // start two refresh tasks
1526 ep_store.update(cx, |ep_store, cx| {
1527 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1528 });
1529
1530 let (request1, respond_first) = requests.predict.next().await.unwrap();
1531
1532 ep_store.update(cx, |ep_store, cx| {
1533 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1534 });
1535
1536 let (request2, respond_second) = requests.predict.next().await.unwrap();
1537
1538 // wait for throttle, so requests are sent
1539 cx.run_until_parked();
1540
1541 ep_store.update(cx, |ep_store, cx| {
1542 // start a third request
1543 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1544
1545 // 2 are pending, so 2nd is cancelled
1546 assert_eq!(
1547 ep_store
1548 .get_or_init_project(&project, cx)
1549 .cancelled_predictions
1550 .iter()
1551 .copied()
1552 .collect::<Vec<_>>(),
1553 [1]
1554 );
1555 });
1556
1557 // wait for throttle
1558 cx.run_until_parked();
1559
1560 let (request3, respond_third) = requests.predict.next().await.unwrap();
1561
1562 let first_response = model_response(&request1, SIMPLE_DIFF);
1563 let first_id = first_response.request_id.clone();
1564 respond_first.send(first_response).unwrap();
1565
1566 cx.run_until_parked();
1567
1568 ep_store.update(cx, |ep_store, cx| {
1569 // current prediction is first
1570 assert_eq!(
1571 ep_store
1572 .prediction_at(&buffer, None, &project, cx)
1573 .unwrap()
1574 .id
1575 .0,
1576 first_id
1577 );
1578 });
1579
1580 let cancelled_response = model_response(&request2, SIMPLE_DIFF);
1581 let cancelled_id = cancelled_response.request_id.clone();
1582 respond_second.send(cancelled_response).unwrap();
1583
1584 cx.run_until_parked();
1585
1586 ep_store.update(cx, |ep_store, cx| {
1587 // current prediction is still first, since second was cancelled
1588 assert_eq!(
1589 ep_store
1590 .prediction_at(&buffer, None, &project, cx)
1591 .unwrap()
1592 .id
1593 .0,
1594 first_id
1595 );
1596 });
1597
1598 let third_response = model_response(&request3, SIMPLE_DIFF);
1599 let third_response_id = third_response.request_id.clone();
1600 respond_third.send(third_response).unwrap();
1601
1602 cx.run_until_parked();
1603
1604 ep_store.update(cx, |ep_store, cx| {
1605 // third completes and replaces first
1606 assert_eq!(
1607 ep_store
1608 .prediction_at(&buffer, None, &project, cx)
1609 .unwrap()
1610 .id
1611 .0,
1612 third_response_id
1613 );
1614 });
1615
1616 // second is reported as rejected
1617 let (reject_request, _) = requests.reject.next().await.unwrap();
1618
1619 cx.run_until_parked();
1620
1621 assert_eq!(
1622 &reject_request.rejections,
1623 &[
1624 EditPredictionRejection {
1625 request_id: cancelled_id,
1626 reason: EditPredictionRejectReason::Canceled,
1627 was_shown: false,
1628 model_version: None,
1629 },
1630 EditPredictionRejection {
1631 request_id: first_id,
1632 reason: EditPredictionRejectReason::Replaced,
1633 was_shown: false,
1634 model_version: None,
1635 }
1636 ]
1637 );
1638}
1639
1640#[gpui::test]
1641async fn test_jump_and_edit_throttles_are_independent(cx: &mut TestAppContext) {
1642 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1643
1644 let fs = FakeFs::new(cx.executor());
1645 fs.insert_tree(
1646 "/root",
1647 json!({
1648 "foo.md": "Hello!\nHow\nBye\n",
1649 "bar.md": "Hola!\nComo\nAdios\n"
1650 }),
1651 )
1652 .await;
1653 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1654
1655 let buffer = project
1656 .update(cx, |project, cx| {
1657 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1658 project.set_active_path(Some(path.clone()), cx);
1659 project.open_buffer(path, cx)
1660 })
1661 .await
1662 .unwrap();
1663 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1664 let position = snapshot.anchor_before(language::Point::new(1, 3));
1665
1666 ep_store.update(cx, |ep_store, cx| {
1667 ep_store.register_project(&project, cx);
1668 ep_store.register_buffer(&buffer, &project, cx);
1669 });
1670
1671 // First edit request - no prior edit, so not throttled.
1672 ep_store.update(cx, |ep_store, cx| {
1673 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1674 });
1675 let (_edit_request, edit_response_tx) = requests.predict.next().await.unwrap();
1676 edit_response_tx.send(empty_response()).unwrap();
1677 cx.run_until_parked();
1678
1679 let diagnostic = lsp::Diagnostic {
1680 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
1681 severity: Some(lsp::DiagnosticSeverity::ERROR),
1682 message: "Sentence is incomplete".to_string(),
1683 ..Default::default()
1684 };
1685
1686 // First jump request triggered by diagnostic event on buffer - no prior jump, so not throttled (independent from edit).
1687 project.update(cx, |project, cx| {
1688 project.lsp_store().update(cx, |lsp_store, cx| {
1689 lsp_store
1690 .update_diagnostics(
1691 LanguageServerId(0),
1692 lsp::PublishDiagnosticsParams {
1693 uri: lsp::Uri::from_file_path(path!("/root/bar.md")).unwrap(),
1694 diagnostics: vec![diagnostic],
1695 version: None,
1696 },
1697 None,
1698 language::DiagnosticSourceKind::Pushed,
1699 &[],
1700 cx,
1701 )
1702 .unwrap();
1703 });
1704 });
1705 let (_jump_request, jump_response_tx) = requests.predict.next().await.unwrap();
1706 jump_response_tx.send(empty_response()).unwrap();
1707 cx.run_until_parked();
1708
1709 // Second edit request - should be throttled by the first edit.
1710 ep_store.update(cx, |ep_store, cx| {
1711 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1712 });
1713 assert_no_predict_request_ready(&mut requests.predict);
1714
1715 // Second jump request - should be throttled by the first jump.
1716 ep_store.update(cx, |ep_store, cx| {
1717 ep_store.refresh_prediction_from_diagnostics(
1718 project.clone(),
1719 DiagnosticSearchScope::Global,
1720 cx,
1721 );
1722 });
1723 assert_no_predict_request_ready(&mut requests.predict);
1724
1725 // Wait for both throttles to expire.
1726 cx.background_executor
1727 .advance_clock(EditPredictionStore::THROTTLE_TIMEOUT);
1728 cx.background_executor.run_until_parked();
1729 cx.run_until_parked();
1730
1731 // Both requests should now go through.
1732 let (_request_1, response_tx_1) = requests.predict.next().await.unwrap();
1733 response_tx_1.send(empty_response()).unwrap();
1734 cx.run_until_parked();
1735
1736 let (_request_2, response_tx_2) = requests.predict.next().await.unwrap();
1737 response_tx_2.send(empty_response()).unwrap();
1738 cx.run_until_parked();
1739}
1740
1741#[gpui::test]
1742async fn test_same_frame_duplicate_requests_deduplicated(cx: &mut TestAppContext) {
1743 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1744 let fs = FakeFs::new(cx.executor());
1745 fs.insert_tree(
1746 "/root",
1747 json!({
1748 "foo.md": "Hello!\nHow\nBye\n"
1749 }),
1750 )
1751 .await;
1752 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1753
1754 let buffer = project
1755 .update(cx, |project, cx| {
1756 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1757 project.open_buffer(path, cx)
1758 })
1759 .await
1760 .unwrap();
1761 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1762 let position = snapshot.anchor_before(language::Point::new(1, 3));
1763
1764 // Enqueue two refresh calls in the same synchronous frame (no yielding).
1765 // Both `cx.spawn` tasks are created before either executes, so they both
1766 // capture the same `proceed_count_at_enqueue`. Only the first task should
1767 // pass the deduplication gate; the second should be skipped.
1768 ep_store.update(cx, |ep_store, cx| {
1769 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1770 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1771 });
1772
1773 // Let both spawned tasks run to completion (including any throttle waits).
1774 cx.run_until_parked();
1775
1776 // Exactly one prediction request should have been sent.
1777 let (request, respond_tx) = requests.predict.next().await.unwrap();
1778 respond_tx
1779 .send(model_response(&request, SIMPLE_DIFF))
1780 .unwrap();
1781 cx.run_until_parked();
1782
1783 // No second request should be pending.
1784 assert_no_predict_request_ready(&mut requests.predict);
1785}
1786
1787#[gpui::test]
1788async fn test_rejections_flushing(cx: &mut TestAppContext) {
1789 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1790
1791 ep_store.update(cx, |ep_store, cx| {
1792 ep_store.reject_prediction(
1793 EditPredictionId("test-1".into()),
1794 EditPredictionRejectReason::Discarded,
1795 false,
1796 None,
1797 cx,
1798 );
1799 ep_store.reject_prediction(
1800 EditPredictionId("test-2".into()),
1801 EditPredictionRejectReason::Canceled,
1802 true,
1803 None,
1804 cx,
1805 );
1806 });
1807
1808 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1809 cx.run_until_parked();
1810
1811 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1812 respond_tx.send(()).unwrap();
1813
1814 // batched
1815 assert_eq!(reject_request.rejections.len(), 2);
1816 assert_eq!(
1817 reject_request.rejections[0],
1818 EditPredictionRejection {
1819 request_id: "test-1".to_string(),
1820 reason: EditPredictionRejectReason::Discarded,
1821 was_shown: false,
1822 model_version: None,
1823 }
1824 );
1825 assert_eq!(
1826 reject_request.rejections[1],
1827 EditPredictionRejection {
1828 request_id: "test-2".to_string(),
1829 reason: EditPredictionRejectReason::Canceled,
1830 was_shown: true,
1831 model_version: None,
1832 }
1833 );
1834
1835 // Reaching batch size limit sends without debounce
1836 ep_store.update(cx, |ep_store, cx| {
1837 for i in 0..70 {
1838 ep_store.reject_prediction(
1839 EditPredictionId(format!("batch-{}", i).into()),
1840 EditPredictionRejectReason::Discarded,
1841 false,
1842 None,
1843 cx,
1844 );
1845 }
1846 });
1847
1848 // First MAX/2 items are sent immediately
1849 cx.run_until_parked();
1850 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1851 respond_tx.send(()).unwrap();
1852
1853 assert_eq!(reject_request.rejections.len(), 50);
1854 assert_eq!(reject_request.rejections[0].request_id, "batch-0");
1855 assert_eq!(reject_request.rejections[49].request_id, "batch-49");
1856
1857 // Remaining items are debounced with the next batch
1858 cx.executor().advance_clock(Duration::from_secs(15));
1859 cx.run_until_parked();
1860
1861 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1862 respond_tx.send(()).unwrap();
1863
1864 assert_eq!(reject_request.rejections.len(), 20);
1865 assert_eq!(reject_request.rejections[0].request_id, "batch-50");
1866 assert_eq!(reject_request.rejections[19].request_id, "batch-69");
1867
1868 // Request failure
1869 ep_store.update(cx, |ep_store, cx| {
1870 ep_store.reject_prediction(
1871 EditPredictionId("retry-1".into()),
1872 EditPredictionRejectReason::Discarded,
1873 false,
1874 None,
1875 cx,
1876 );
1877 });
1878
1879 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1880 cx.run_until_parked();
1881
1882 let (reject_request, _respond_tx) = requests.reject.next().await.unwrap();
1883 assert_eq!(reject_request.rejections.len(), 1);
1884 assert_eq!(reject_request.rejections[0].request_id, "retry-1");
1885 // Simulate failure
1886 drop(_respond_tx);
1887
1888 // Add another rejection
1889 ep_store.update(cx, |ep_store, cx| {
1890 ep_store.reject_prediction(
1891 EditPredictionId("retry-2".into()),
1892 EditPredictionRejectReason::Discarded,
1893 false,
1894 None,
1895 cx,
1896 );
1897 });
1898
1899 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1900 cx.run_until_parked();
1901
1902 // Retry should include both the failed item and the new one
1903 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1904 respond_tx.send(()).unwrap();
1905
1906 assert_eq!(reject_request.rejections.len(), 2);
1907 assert_eq!(reject_request.rejections[0].request_id, "retry-1");
1908 assert_eq!(reject_request.rejections[1].request_id, "retry-2");
1909}
1910
1911#[gpui::test]
1912fn test_active_buffer_diagnostics_fetching(cx: &mut TestAppContext) {
1913 let diagnostic_marker: TextRangeMarker = ('«', '»').into();
1914 let search_range_marker: TextRangeMarker = ('[', ']').into();
1915
1916 let (text, mut ranges) = marked_text_ranges_by(
1917 indoc! {r#"
1918 fn alpha() {
1919 let «first_value» = 1;
1920 }
1921
1922 [fn beta() {
1923 let «second_value» = 2;
1924 let third_value = second_value + missing_symbol;
1925 }ˇ]
1926
1927 fn gamma() {
1928 let «fourth_value» = missing_other_symbol;
1929 }
1930 "#},
1931 vec![diagnostic_marker.clone(), search_range_marker.clone()],
1932 );
1933
1934 let diagnostic_ranges = ranges.remove(&diagnostic_marker).unwrap_or_default();
1935 let search_ranges = ranges.remove(&search_range_marker).unwrap_or_default();
1936
1937 let buffer = cx.new(|cx| Buffer::local(&text, cx));
1938
1939 buffer.update(cx, |buffer, cx| {
1940 let snapshot = buffer.snapshot();
1941 let diagnostics = DiagnosticSet::new(
1942 diagnostic_ranges
1943 .iter()
1944 .enumerate()
1945 .map(|(index, range)| DiagnosticEntry {
1946 range: snapshot.offset_to_point_utf16(range.start)
1947 ..snapshot.offset_to_point_utf16(range.end),
1948 diagnostic: Diagnostic {
1949 severity: match index {
1950 0 => DiagnosticSeverity::WARNING,
1951 1 => DiagnosticSeverity::ERROR,
1952 _ => DiagnosticSeverity::HINT,
1953 },
1954 message: match index {
1955 0 => "first warning".to_string(),
1956 1 => "second error".to_string(),
1957 _ => "third hint".to_string(),
1958 },
1959 group_id: index + 1,
1960 is_primary: true,
1961 source_kind: language::DiagnosticSourceKind::Pushed,
1962 ..Diagnostic::default()
1963 },
1964 }),
1965 &snapshot,
1966 );
1967 buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
1968 });
1969
1970 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1971 let search_range = snapshot.offset_to_point(search_ranges[0].start)
1972 ..snapshot.offset_to_point(search_ranges[0].end);
1973
1974 let active_buffer_diagnostics = zeta::active_buffer_diagnostics(&snapshot, search_range, 100);
1975
1976 assert_eq!(
1977 active_buffer_diagnostics,
1978 vec![zeta_prompt::ActiveBufferDiagnostic {
1979 severity: Some(1),
1980 message: "second error".to_string(),
1981 snippet: text,
1982 snippet_buffer_row_range: 5..5,
1983 diagnostic_range_in_snippet: 61..73,
1984 }]
1985 );
1986
1987 let buffer = cx.new(|cx| {
1988 Buffer::local(
1989 indoc! {"
1990 one
1991 two
1992 three
1993 four
1994 five
1995 "},
1996 cx,
1997 )
1998 });
1999
2000 buffer.update(cx, |buffer, cx| {
2001 let snapshot = buffer.snapshot();
2002 let diagnostics = DiagnosticSet::new(
2003 vec![
2004 DiagnosticEntry {
2005 range: text::PointUtf16::new(0, 0)..text::PointUtf16::new(0, 3),
2006 diagnostic: Diagnostic {
2007 severity: DiagnosticSeverity::ERROR,
2008 message: "row zero".to_string(),
2009 group_id: 1,
2010 is_primary: true,
2011 source_kind: language::DiagnosticSourceKind::Pushed,
2012 ..Diagnostic::default()
2013 },
2014 },
2015 DiagnosticEntry {
2016 range: text::PointUtf16::new(2, 0)..text::PointUtf16::new(2, 5),
2017 diagnostic: Diagnostic {
2018 severity: DiagnosticSeverity::WARNING,
2019 message: "row two".to_string(),
2020 group_id: 2,
2021 is_primary: true,
2022 source_kind: language::DiagnosticSourceKind::Pushed,
2023 ..Diagnostic::default()
2024 },
2025 },
2026 DiagnosticEntry {
2027 range: text::PointUtf16::new(4, 0)..text::PointUtf16::new(4, 4),
2028 diagnostic: Diagnostic {
2029 severity: DiagnosticSeverity::INFORMATION,
2030 message: "row four".to_string(),
2031 group_id: 3,
2032 is_primary: true,
2033 source_kind: language::DiagnosticSourceKind::Pushed,
2034 ..Diagnostic::default()
2035 },
2036 },
2037 ],
2038 &snapshot,
2039 );
2040 buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
2041 });
2042
2043 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2044
2045 let active_buffer_diagnostics =
2046 zeta::active_buffer_diagnostics(&snapshot, Point::new(2, 0)..Point::new(4, 0), 100);
2047
2048 assert_eq!(
2049 active_buffer_diagnostics
2050 .iter()
2051 .map(|diagnostic| (
2052 diagnostic.severity,
2053 diagnostic.message.clone(),
2054 diagnostic.snippet.clone(),
2055 diagnostic.snippet_buffer_row_range.clone(),
2056 diagnostic.diagnostic_range_in_snippet.clone(),
2057 ))
2058 .collect::<Vec<_>>(),
2059 vec![
2060 (
2061 Some(2),
2062 "row two".to_string(),
2063 "one\ntwo\nthree\nfour\nfive\n".to_string(),
2064 2..2,
2065 8..13,
2066 ),
2067 (
2068 Some(3),
2069 "row four".to_string(),
2070 "one\ntwo\nthree\nfour\nfive\n".to_string(),
2071 4..4,
2072 19..23,
2073 ),
2074 ]
2075 );
2076}
2077
2078// Generate a model response that would apply the given diff to the active file.
2079fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response {
2080 let editable_range =
2081 zeta_prompt::excerpt_range_for_format(Default::default(), &request.input.excerpt_ranges).1;
2082 let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string();
2083 let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap();
2084
2085 PredictEditsV3Response {
2086 request_id: Uuid::new_v4().to_string(),
2087 editable_range,
2088 output: new_excerpt,
2089 model_version: None,
2090 }
2091}
2092
2093fn empty_response() -> PredictEditsV3Response {
2094 PredictEditsV3Response {
2095 request_id: Uuid::new_v4().to_string(),
2096 editable_range: 0..0,
2097 output: String::new(),
2098 model_version: None,
2099 }
2100}
2101
2102fn prompt_from_request(request: &PredictEditsV3Request) -> String {
2103 zeta_prompt::format_zeta_prompt(&request.input, zeta_prompt::ZetaFormat::default())
2104}
2105
2106fn assert_no_predict_request_ready(
2107 requests: &mut mpsc::UnboundedReceiver<(
2108 PredictEditsV3Request,
2109 oneshot::Sender<PredictEditsV3Response>,
2110 )>,
2111) {
2112 if requests.next().now_or_never().flatten().is_some() {
2113 panic!("Unexpected prediction request while throttled.");
2114 }
2115}
2116
2117struct RequestChannels {
2118 predict: mpsc::UnboundedReceiver<(
2119 PredictEditsV3Request,
2120 oneshot::Sender<PredictEditsV3Response>,
2121 )>,
2122 reject: mpsc::UnboundedReceiver<(RejectEditPredictionsBody, oneshot::Sender<()>)>,
2123}
2124
2125fn init_test_with_fake_client(
2126 cx: &mut TestAppContext,
2127) -> (Entity<EditPredictionStore>, RequestChannels) {
2128 cx.update(move |cx| {
2129 let settings_store = SettingsStore::test(cx);
2130 cx.set_global(settings_store);
2131 zlog::init_test();
2132
2133 let (predict_req_tx, predict_req_rx) = mpsc::unbounded();
2134 let (reject_req_tx, reject_req_rx) = mpsc::unbounded();
2135
2136 let http_client = FakeHttpClient::create({
2137 move |req| {
2138 let uri = req.uri().path().to_string();
2139 let mut body = req.into_body();
2140 let predict_req_tx = predict_req_tx.clone();
2141 let reject_req_tx = reject_req_tx.clone();
2142 async move {
2143 let resp = match uri.as_str() {
2144 "/client/llm_tokens" => serde_json::to_string(&json!({
2145 "token": "test"
2146 }))
2147 .unwrap(),
2148 "/predict_edits/v3" => {
2149 let mut buf = Vec::new();
2150 body.read_to_end(&mut buf).await.ok();
2151 let decompressed = zstd::decode_all(&buf[..]).unwrap();
2152 let req = serde_json::from_slice(&decompressed).unwrap();
2153
2154 let (res_tx, res_rx) = oneshot::channel();
2155 predict_req_tx.unbounded_send((req, res_tx)).unwrap();
2156 serde_json::to_string(&res_rx.await?).unwrap()
2157 }
2158 "/predict_edits/reject" => {
2159 let mut buf = Vec::new();
2160 body.read_to_end(&mut buf).await.ok();
2161 let req = serde_json::from_slice(&buf).unwrap();
2162
2163 let (res_tx, res_rx) = oneshot::channel();
2164 reject_req_tx.unbounded_send((req, res_tx)).unwrap();
2165 serde_json::to_string(&res_rx.await?).unwrap()
2166 }
2167 _ => {
2168 panic!("Unexpected path: {}", uri)
2169 }
2170 };
2171
2172 Ok(Response::builder().body(resp.into()).unwrap())
2173 }
2174 }
2175 });
2176
2177 let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
2178 client.cloud_client().set_credentials(1, "test".into());
2179
2180 let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
2181 language_model::init(user_store.clone(), client.clone(), cx);
2182 let ep_store = EditPredictionStore::global(&client, &user_store, cx);
2183
2184 (
2185 ep_store,
2186 RequestChannels {
2187 predict: predict_req_rx,
2188 reject: reject_req_rx,
2189 },
2190 )
2191 })
2192}
2193
2194#[gpui::test]
2195async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
2196 let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx));
2197 let edits: Arc<[(Range<Anchor>, Arc<str>)]> = cx.update(|cx| {
2198 to_completion_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into()
2199 });
2200
2201 let edit_preview = cx
2202 .read(|cx| buffer.read(cx).preview_edits(edits.clone(), cx))
2203 .await;
2204
2205 let prediction = EditPrediction {
2206 edits,
2207 cursor_position: None,
2208 edit_preview,
2209 buffer: buffer.clone(),
2210 snapshot: cx.read(|cx| buffer.read(cx).snapshot()),
2211 id: EditPredictionId("the-id".into()),
2212 inputs: ZetaPromptInput {
2213 events: Default::default(),
2214 related_files: Default::default(),
2215 active_buffer_diagnostics: vec![],
2216 cursor_path: Path::new("").into(),
2217 cursor_excerpt: "".into(),
2218 cursor_offset_in_excerpt: 0,
2219 excerpt_start_row: None,
2220 excerpt_ranges: Default::default(),
2221 syntax_ranges: None,
2222 experiment: None,
2223 in_open_source_repo: false,
2224 can_collect_data: false,
2225 repo_url: None,
2226 },
2227 buffer_snapshotted_at: Instant::now(),
2228 response_received_at: Instant::now(),
2229 model_version: None,
2230 };
2231
2232 cx.update(|cx| {
2233 assert_eq!(
2234 from_completion_edits(
2235 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2236 &buffer,
2237 cx
2238 ),
2239 vec![(2..5, "REM".into()), (9..11, "".into())]
2240 );
2241
2242 buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx));
2243 assert_eq!(
2244 from_completion_edits(
2245 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2246 &buffer,
2247 cx
2248 ),
2249 vec![(2..2, "REM".into()), (6..8, "".into())]
2250 );
2251
2252 buffer.update(cx, |buffer, cx| buffer.undo(cx));
2253 assert_eq!(
2254 from_completion_edits(
2255 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2256 &buffer,
2257 cx
2258 ),
2259 vec![(2..5, "REM".into()), (9..11, "".into())]
2260 );
2261
2262 buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx));
2263 assert_eq!(
2264 from_completion_edits(
2265 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2266 &buffer,
2267 cx
2268 ),
2269 vec![(3..3, "EM".into()), (7..9, "".into())]
2270 );
2271
2272 buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx));
2273 assert_eq!(
2274 from_completion_edits(
2275 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2276 &buffer,
2277 cx
2278 ),
2279 vec![(4..4, "M".into()), (8..10, "".into())]
2280 );
2281
2282 buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx));
2283 assert_eq!(
2284 from_completion_edits(
2285 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2286 &buffer,
2287 cx
2288 ),
2289 vec![(9..11, "".into())]
2290 );
2291
2292 buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx));
2293 assert_eq!(
2294 from_completion_edits(
2295 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2296 &buffer,
2297 cx
2298 ),
2299 vec![(4..4, "M".into()), (8..10, "".into())]
2300 );
2301
2302 buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx));
2303 assert_eq!(
2304 from_completion_edits(
2305 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2306 &buffer,
2307 cx
2308 ),
2309 vec![(4..4, "M".into())]
2310 );
2311
2312 buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx));
2313 assert_eq!(prediction.interpolate(&buffer.read(cx).snapshot()), None);
2314 })
2315}
2316
2317#[gpui::test]
2318async fn test_clean_up_diff(cx: &mut TestAppContext) {
2319 init_test(cx);
2320
2321 assert_eq!(
2322 apply_edit_prediction(
2323 indoc! {"
2324 fn main() {
2325 let word_1 = \"lorem\";
2326 let range = word.len()..word.len();
2327 }
2328 "},
2329 indoc! {"
2330 fn main() {
2331 let word_1 = \"lorem\";
2332 let range = word_1.len()..word_1.len();
2333 }
2334 "},
2335 cx,
2336 )
2337 .await,
2338 indoc! {"
2339 fn main() {
2340 let word_1 = \"lorem\";
2341 let range = word_1.len()..word_1.len();
2342 }
2343 "},
2344 );
2345
2346 assert_eq!(
2347 apply_edit_prediction(
2348 indoc! {"
2349 fn main() {
2350 let story = \"the quick\"
2351 }
2352 "},
2353 indoc! {"
2354 fn main() {
2355 let story = \"the quick brown fox jumps over the lazy dog\";
2356 }
2357 "},
2358 cx,
2359 )
2360 .await,
2361 indoc! {"
2362 fn main() {
2363 let story = \"the quick brown fox jumps over the lazy dog\";
2364 }
2365 "},
2366 );
2367}
2368
2369#[gpui::test]
2370async fn test_edit_prediction_end_of_buffer(cx: &mut TestAppContext) {
2371 init_test(cx);
2372
2373 let buffer_content = "lorem\n";
2374 let completion_response = "lorem\nipsum\n";
2375
2376 assert_eq!(
2377 apply_edit_prediction(buffer_content, completion_response, cx).await,
2378 "lorem\nipsum\n"
2379 );
2380}
2381
2382#[gpui::test]
2383async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppContext) {
2384 // Test that zeta2's newline normalization logic doesn't insert spurious newlines.
2385 // When the buffer ends without a trailing newline, but the model returns output
2386 // with a trailing newline, zeta2 should normalize both sides before diffing
2387 // so no spurious newline is inserted.
2388 let (ep_store, mut requests) = init_test_with_fake_client(cx);
2389 let fs = FakeFs::new(cx.executor());
2390
2391 // Single line buffer with no trailing newline
2392 fs.insert_tree(
2393 "/root",
2394 json!({
2395 "foo.txt": "hello"
2396 }),
2397 )
2398 .await;
2399 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2400
2401 let buffer = project
2402 .update(cx, |project, cx| {
2403 let path = project
2404 .find_project_path(path!("root/foo.txt"), cx)
2405 .unwrap();
2406 project.open_buffer(path, cx)
2407 })
2408 .await
2409 .unwrap();
2410
2411 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2412 let position = snapshot.anchor_before(language::Point::new(0, 5));
2413
2414 ep_store.update(cx, |ep_store, cx| {
2415 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
2416 });
2417
2418 let (request, respond_tx) = requests.predict.next().await.unwrap();
2419
2420 // Model returns output WITH a trailing newline, even though the buffer doesn't have one.
2421 // Zeta2 should normalize both sides before diffing, so no spurious newline is inserted.
2422 let excerpt_length = request.input.cursor_excerpt.len();
2423 let response = PredictEditsV3Response {
2424 request_id: Uuid::new_v4().to_string(),
2425 output: "hello world\n".to_string(),
2426 editable_range: 0..excerpt_length,
2427 model_version: None,
2428 };
2429 respond_tx.send(response).unwrap();
2430
2431 cx.run_until_parked();
2432
2433 // The prediction should insert " world" without adding a newline
2434 ep_store.update(cx, |ep_store, cx| {
2435 let prediction = ep_store
2436 .prediction_at(&buffer, None, &project, cx)
2437 .expect("should have prediction");
2438 let edits: Vec<_> = prediction
2439 .edits
2440 .iter()
2441 .map(|(range, text)| {
2442 let snapshot = buffer.read(cx).snapshot();
2443 (range.to_offset(&snapshot), text.clone())
2444 })
2445 .collect();
2446 assert_eq!(edits, vec![(5..5, " world".into())]);
2447 });
2448}
2449
2450fn init_test(cx: &mut TestAppContext) {
2451 cx.update(|cx| {
2452 let settings_store = SettingsStore::test(cx);
2453 cx.set_global(settings_store);
2454 });
2455}
2456
2457async fn apply_edit_prediction(
2458 buffer_content: &str,
2459 completion_response: &str,
2460 cx: &mut TestAppContext,
2461) -> String {
2462 let fs = project::FakeFs::new(cx.executor());
2463 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2464 let buffer = cx.new(|cx| Buffer::local(buffer_content, cx));
2465 let (ep_store, response) = make_test_ep_store(&project, cx).await;
2466 *response.lock() = completion_response.to_string();
2467 let edit_prediction = run_edit_prediction(&buffer, &project, &ep_store, cx).await;
2468 buffer.update(cx, |buffer, cx| {
2469 buffer.edit(edit_prediction.edits.iter().cloned(), None, cx)
2470 });
2471 buffer.read_with(cx, |buffer, _| buffer.text())
2472}
2473
2474async fn run_edit_prediction(
2475 buffer: &Entity<Buffer>,
2476 project: &Entity<Project>,
2477 ep_store: &Entity<EditPredictionStore>,
2478 cx: &mut TestAppContext,
2479) -> EditPrediction {
2480 let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0)));
2481 ep_store.update(cx, |ep_store, cx| {
2482 ep_store.register_buffer(buffer, &project, cx)
2483 });
2484 cx.background_executor.run_until_parked();
2485 let prediction_task = ep_store.update(cx, |ep_store, cx| {
2486 ep_store.request_prediction(&project, buffer, cursor, Default::default(), cx)
2487 });
2488 prediction_task.await.unwrap().unwrap().prediction.unwrap()
2489}
2490
2491async fn make_test_ep_store(
2492 project: &Entity<Project>,
2493 cx: &mut TestAppContext,
2494) -> (Entity<EditPredictionStore>, Arc<Mutex<String>>) {
2495 let default_response = "hello world\n".to_string();
2496 let completion_response: Arc<Mutex<String>> = Arc::new(Mutex::new(default_response));
2497 let http_client = FakeHttpClient::create({
2498 let completion_response = completion_response.clone();
2499 let mut next_request_id = 0;
2500 move |req| {
2501 let completion_response = completion_response.clone();
2502 let method = req.method().clone();
2503 let uri = req.uri().path().to_string();
2504 let mut body = req.into_body();
2505 async move {
2506 match (method, uri.as_str()) {
2507 (Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder()
2508 .status(200)
2509 .body(
2510 serde_json::to_string(&CreateLlmTokenResponse {
2511 token: LlmToken("the-llm-token".to_string()),
2512 })
2513 .unwrap()
2514 .into(),
2515 )
2516 .unwrap()),
2517 (Method::POST, "/predict_edits/v3") => {
2518 let mut buf = Vec::new();
2519 body.read_to_end(&mut buf).await.ok();
2520 let decompressed = zstd::decode_all(&buf[..]).unwrap();
2521 let req: PredictEditsV3Request =
2522 serde_json::from_slice(&decompressed).unwrap();
2523
2524 next_request_id += 1;
2525 Ok(http_client::Response::builder()
2526 .status(200)
2527 .body(
2528 serde_json::to_string(&PredictEditsV3Response {
2529 request_id: format!("request-{next_request_id}"),
2530 editable_range: 0..req.input.cursor_excerpt.len(),
2531 output: completion_response.lock().clone(),
2532 model_version: None,
2533 })
2534 .unwrap()
2535 .into(),
2536 )
2537 .unwrap())
2538 }
2539 _ => Ok(http_client::Response::builder()
2540 .status(404)
2541 .body("Not Found".to_string().into())
2542 .unwrap()),
2543 }
2544 }
2545 }
2546 });
2547
2548 let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
2549 let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
2550 cx.update(|cx| {
2551 RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
2552 });
2553 let _server = FakeServer::for_client(42, &client, cx).await;
2554
2555 let ep_store = cx.new(|cx| {
2556 let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx);
2557 ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
2558
2559 let worktrees = project.read(cx).worktrees(cx).collect::<Vec<_>>();
2560 for worktree in worktrees {
2561 let worktree_id = worktree.read(cx).id();
2562 ep_store
2563 .get_or_init_project(project, cx)
2564 .license_detection_watchers
2565 .entry(worktree_id)
2566 .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx)));
2567 }
2568
2569 ep_store
2570 });
2571
2572 (ep_store, completion_response)
2573}
2574
2575fn to_completion_edits(
2576 iterator: impl IntoIterator<Item = (Range<usize>, Arc<str>)>,
2577 buffer: &Entity<Buffer>,
2578 cx: &App,
2579) -> Vec<(Range<Anchor>, Arc<str>)> {
2580 let buffer = buffer.read(cx);
2581 iterator
2582 .into_iter()
2583 .map(|(range, text)| {
2584 (
2585 buffer.anchor_after(range.start)..buffer.anchor_before(range.end),
2586 text,
2587 )
2588 })
2589 .collect()
2590}
2591
2592fn from_completion_edits(
2593 editor_edits: &[(Range<Anchor>, Arc<str>)],
2594 buffer: &Entity<Buffer>,
2595 cx: &App,
2596) -> Vec<(Range<usize>, Arc<str>)> {
2597 let buffer = buffer.read(cx);
2598 editor_edits
2599 .iter()
2600 .map(|(range, text)| {
2601 (
2602 range.start.to_offset(buffer)..range.end.to_offset(buffer),
2603 text.clone(),
2604 )
2605 })
2606 .collect()
2607}
2608
2609#[gpui::test]
2610async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut TestAppContext) {
2611 init_test(cx);
2612
2613 let fs = FakeFs::new(cx.executor());
2614 fs.insert_tree(
2615 "/project",
2616 serde_json::json!({
2617 "main.rs": "fn main() {\n \n}\n"
2618 }),
2619 )
2620 .await;
2621
2622 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2623
2624 let http_client = FakeHttpClient::create(|_req| async move {
2625 Ok(gpui::http_client::Response::builder()
2626 .status(401)
2627 .body("Unauthorized".into())
2628 .unwrap())
2629 });
2630
2631 let client =
2632 cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
2633 let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
2634 cx.update(|cx| {
2635 language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
2636 });
2637
2638 let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx));
2639
2640 let buffer = project
2641 .update(cx, |project, cx| {
2642 let path = project
2643 .find_project_path(path!("/project/main.rs"), cx)
2644 .unwrap();
2645 project.open_buffer(path, cx)
2646 })
2647 .await
2648 .unwrap();
2649
2650 let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 4)));
2651 ep_store.update(cx, |ep_store, cx| {
2652 ep_store.register_buffer(&buffer, &project, cx)
2653 });
2654 cx.background_executor.run_until_parked();
2655
2656 let completion_task = ep_store.update(cx, |ep_store, cx| {
2657 ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
2658 ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx)
2659 });
2660
2661 let result = completion_task.await;
2662 assert!(
2663 result.is_err(),
2664 "Without authentication and without custom URL, prediction should fail"
2665 );
2666}
2667
2668#[gpui::test]
2669async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppContext) {
2670 fn set_collaborator_cursor(buffer: &Entity<Buffer>, row: u32, cx: &mut TestAppContext) {
2671 let collab_replica = clock::ReplicaId::new(10);
2672 let anchor = buffer.read_with(cx, |buffer, _| {
2673 buffer.snapshot().anchor_before(Point::new(row, 0))
2674 });
2675 let selections: Arc<[Selection<Anchor>]> = Arc::new([Selection {
2676 id: 1,
2677 start: anchor,
2678 end: anchor,
2679 reversed: false,
2680 goal: SelectionGoal::None,
2681 }]);
2682 buffer.update(cx, |buffer, cx| {
2683 buffer.apply_ops(
2684 [Operation::UpdateSelections {
2685 selections,
2686 lamport_timestamp: clock::Lamport {
2687 replica_id: collab_replica,
2688 value: 1,
2689 },
2690 line_mode: false,
2691 cursor_shape: CursorShape::Bar,
2692 }],
2693 cx,
2694 );
2695 });
2696 }
2697
2698 fn publish_diagnostics(
2699 uri_path: &'static str,
2700 rows: &[u32],
2701 project: &Entity<Project>,
2702 cx: &mut TestAppContext,
2703 ) {
2704 let diagnostics: Vec<_> = rows
2705 .iter()
2706 .map(|&row| lsp::Diagnostic {
2707 range: lsp::Range::new(lsp::Position::new(row, 0), lsp::Position::new(row, 5)),
2708 severity: Some(lsp::DiagnosticSeverity::ERROR),
2709 message: format!("error at row {row}"),
2710 ..Default::default()
2711 })
2712 .collect();
2713 project.update(cx, |project, cx| {
2714 project.lsp_store().update(cx, |lsp_store, cx| {
2715 lsp_store
2716 .update_diagnostics(
2717 LanguageServerId(0),
2718 lsp::PublishDiagnosticsParams {
2719 uri: lsp::Uri::from_file_path(uri_path).expect("invalid uri"),
2720 diagnostics,
2721 version: None,
2722 },
2723 None,
2724 language::DiagnosticSourceKind::Pushed,
2725 &[],
2726 cx,
2727 )
2728 .expect("failed to update diagnostics");
2729 });
2730 });
2731 }
2732
2733 init_test(cx);
2734
2735 let mut lines = String::new();
2736 for i in 0..60 {
2737 lines.push_str(&format!("line {i}\n"));
2738 }
2739
2740 let fs = FakeFs::new(cx.executor());
2741 fs.insert_tree(
2742 "/root",
2743 json!({
2744 "active.txt": lines,
2745 "collab_file.txt": "error here\nsecond line\n",
2746 "free_file.txt": "another error\nsecond line\n",
2747 }),
2748 )
2749 .await;
2750 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2751
2752 let active_buffer = project
2753 .update(cx, |project, cx| {
2754 let path = project
2755 .find_project_path(path!("/root/active.txt"), cx)
2756 .expect("active.txt not found");
2757 project.set_active_path(Some(path.clone()), cx);
2758 project.open_buffer(path, cx)
2759 })
2760 .await
2761 .expect("failed to open active buffer");
2762
2763 set_collaborator_cursor(&active_buffer, 5, cx);
2764
2765 publish_diagnostics(path!("/root/active.txt"), &[3, 25, 50], &project, cx);
2766
2767 cx.run_until_parked();
2768
2769 let cursor_point = Point::new(25, 0);
2770 let empty_search_range: Range<Point> = Default::default();
2771
2772 let snapshot = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2773 let result = EditPredictionStore::next_diagnostic_location(
2774 active_buffer.clone(),
2775 &snapshot,
2776 empty_search_range.clone(),
2777 cursor_point,
2778 &project,
2779 &mut cx.to_async(),
2780 )
2781 .await
2782 .expect("next_diagnostic_location failed");
2783
2784 let (result_buffer, result_anchor) = result.expect("expected a diagnostic location");
2785 assert_eq!(result_buffer.entity_id(), active_buffer.entity_id());
2786 let result_row = result_buffer.read_with(cx, |buffer, _| {
2787 result_anchor.to_point(&buffer.snapshot()).row
2788 });
2789 assert_ne!(
2790 result_row, 3,
2791 "row 3 is near collaborator (row 5) but far from local cursor (row 25), should be excluded"
2792 );
2793 assert!(
2794 result_row == 25 || result_row == 50,
2795 "expected row 25 or 50, got {result_row}"
2796 );
2797
2798 let snapshot_near = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2799 let near_cursor_point = Point::new(4, 0);
2800 let result_near = EditPredictionStore::next_diagnostic_location(
2801 active_buffer.clone(),
2802 &snapshot_near,
2803 empty_search_range.clone(),
2804 near_cursor_point,
2805 &project,
2806 &mut cx.to_async(),
2807 )
2808 .await
2809 .expect("next_diagnostic_location failed");
2810
2811 let (_, near_anchor) = result_near.expect("expected a diagnostic location when both are near");
2812 let near_row =
2813 active_buffer.read_with(cx, |buffer, _| near_anchor.to_point(&buffer.snapshot()).row);
2814 assert_eq!(
2815 near_row, 3,
2816 "row 3 should be included when local cursor (row 4) is also near the collaborator"
2817 );
2818
2819 let snapshot_far = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2820 let far_cursor_point = Point::new(50, 0);
2821 let result_far = EditPredictionStore::next_diagnostic_location(
2822 active_buffer.clone(),
2823 &snapshot_far,
2824 empty_search_range.clone(),
2825 far_cursor_point,
2826 &project,
2827 &mut cx.to_async(),
2828 )
2829 .await
2830 .expect("next_diagnostic_location failed");
2831
2832 let (_, far_anchor) = result_far.expect("expected a diagnostic location");
2833 let far_row =
2834 active_buffer.read_with(cx, |buffer, _| far_anchor.to_point(&buffer.snapshot()).row);
2835 assert_eq!(
2836 far_row, 50,
2837 "row 50 is near local cursor (row 50) and far from collaborator, should be picked"
2838 );
2839
2840 publish_diagnostics(path!("/root/collab_file.txt"), &[0], &project, cx);
2841 publish_diagnostics(path!("/root/free_file.txt"), &[0], &project, cx);
2842 cx.run_until_parked();
2843
2844 let collab_buffer = project
2845 .update(cx, |project, cx| {
2846 let path = project
2847 .find_project_path(path!("/root/collab_file.txt"), cx)
2848 .expect("collab_file.txt not found");
2849 project.open_buffer(path, cx)
2850 })
2851 .await
2852 .expect("failed to open collab buffer");
2853
2854 set_collaborator_cursor(&collab_buffer, 0, cx);
2855 cx.run_until_parked();
2856
2857 let no_same_file_search_range = Point::new(0, 0)..Point::new(59, 0);
2858 let snapshot_cross = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2859 let result_cross = EditPredictionStore::next_diagnostic_location(
2860 active_buffer.clone(),
2861 &snapshot_cross,
2862 no_same_file_search_range,
2863 Point::new(0, 0),
2864 &project,
2865 &mut cx.to_async(),
2866 )
2867 .await
2868 .expect("cross-file next_diagnostic_location failed");
2869
2870 let (cross_buffer, _) = result_cross.expect("expected a cross-file diagnostic location");
2871 let cross_path = cross_buffer.read_with(cx, |buffer, cx| {
2872 buffer
2873 .file()
2874 .expect("buffer should have a file")
2875 .full_path(cx)
2876 });
2877 assert_eq!(
2878 cross_path,
2879 Path::new(path!("root/free_file.txt")),
2880 "should skip collab_file.txt (has collaborator) and pick free_file.txt"
2881 );
2882}
2883
2884#[gpui::test]
2885async fn test_edit_prediction_settled(cx: &mut TestAppContext) {
2886 let (ep_store, _requests) = init_test_with_fake_client(cx);
2887 let fs = FakeFs::new(cx.executor());
2888
2889 // Buffer with two clearly separated regions:
2890 // Region A = lines 0-9 (offsets 0..50)
2891 // Region B = lines 20-29 (offsets 105..155)
2892 // A big gap in between so edits in one region never overlap the other.
2893 let mut content = String::new();
2894 for i in 0..30 {
2895 content.push_str(&format!("line {i:02}\n"));
2896 }
2897
2898 fs.insert_tree(
2899 "/root",
2900 json!({
2901 "foo.md": content.clone()
2902 }),
2903 )
2904 .await;
2905 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2906
2907 let buffer = project
2908 .update(cx, |project, cx| {
2909 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
2910 project.open_buffer(path, cx)
2911 })
2912 .await
2913 .unwrap();
2914
2915 type SettledEventRecord = (EditPredictionId, String);
2916 let settled_events: Arc<Mutex<Vec<SettledEventRecord>>> = Arc::new(Mutex::new(Vec::new()));
2917
2918 ep_store.update(cx, |ep_store, cx| {
2919 ep_store.register_buffer(&buffer, &project, cx);
2920
2921 let settled_events = settled_events.clone();
2922 ep_store.settled_event_callback = Some(Box::new(move |id, text| {
2923 settled_events.lock().push((id, text));
2924 }));
2925 });
2926
2927 // --- Phase 1: edit in region A and enqueue prediction A ---
2928
2929 buffer.update(cx, |buffer, cx| {
2930 // Edit at the start of line 0.
2931 buffer.edit(vec![(0..0, "ADDED ")], None, cx);
2932 });
2933 cx.run_until_parked();
2934
2935 let snapshot_a = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2936
2937 // Region A: first 10 lines of the buffer.
2938 let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0));
2939
2940 ep_store.update(cx, |ep_store, cx| {
2941 ep_store.enqueue_settled_prediction(
2942 EditPredictionId("prediction-a".into()),
2943 &project,
2944 &buffer,
2945 &snapshot_a,
2946 editable_region_a.clone(),
2947 None,
2948 cx,
2949 );
2950 });
2951
2952 // --- Phase 2: repeatedly edit in region A to keep it unsettled ---
2953
2954 // Let the worker process the channel message before we start advancing.
2955 cx.run_until_parked();
2956
2957 let mut region_a_edit_offset = 5;
2958 for _ in 0..3 {
2959 // Edit inside region A (not at the boundary) so `last_edit_at` is
2960 // updated before the worker's next wake.
2961 buffer.update(cx, |buffer, cx| {
2962 buffer.edit(
2963 vec![(region_a_edit_offset..region_a_edit_offset, "x")],
2964 None,
2965 cx,
2966 );
2967 });
2968 region_a_edit_offset += 1;
2969 cx.run_until_parked();
2970
2971 cx.executor()
2972 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 2);
2973 cx.run_until_parked();
2974 assert!(
2975 settled_events.lock().is_empty(),
2976 "no settled events should fire while region A is still being edited"
2977 );
2978 }
2979
2980 // Still nothing settled.
2981 assert!(settled_events.lock().is_empty());
2982
2983 // --- Phase 3: edit in distinct region B, enqueue prediction B ---
2984 // Advance a small amount so B's quiescence window starts later than A's,
2985 // but not so much that A settles (A's last edit was at the start of
2986 // iteration 3, and it needs a full Q to settle).
2987 cx.executor()
2988 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
2989 cx.run_until_parked();
2990 assert!(settled_events.lock().is_empty());
2991
2992 let snapshot_b = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2993 let line_20_offset = snapshot_b.point_to_offset(Point::new(20, 0));
2994
2995 buffer.update(cx, |buffer, cx| {
2996 buffer.edit(vec![(line_20_offset..line_20_offset, "NEW ")], None, cx);
2997 });
2998 cx.run_until_parked();
2999
3000 let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
3001 let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0));
3002
3003 ep_store.update(cx, |ep_store, cx| {
3004 ep_store.enqueue_settled_prediction(
3005 EditPredictionId("prediction-b".into()),
3006 &project,
3007 &buffer,
3008 &snapshot_b2,
3009 editable_region_b.clone(),
3010 None,
3011 cx,
3012 );
3013 });
3014
3015 cx.run_until_parked();
3016 assert!(
3017 settled_events.lock().is_empty(),
3018 "neither prediction should have settled yet"
3019 );
3020
3021 // --- Phase 4: let enough time pass for region A to settle ---
3022 // A's last edit was at T_a (during the last loop iteration). The worker is
3023 // sleeping until T_a + Q. We advance just enough to reach that wake time
3024 // (Q/4 since we already advanced Q/4 in phase 3 on top of the loop's
3025 // 3*Q/2). At that point A has been quiet for Q and settles, but B was
3026 // enqueued only Q/4 ago and stays pending.
3027 cx.executor()
3028 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
3029 cx.run_until_parked();
3030
3031 {
3032 let events = settled_events.lock().clone();
3033 assert_eq!(
3034 events.len(),
3035 1,
3036 "prediction and capture_sample for A should have settled, got: {events:?}"
3037 );
3038 assert_eq!(events[0].0, EditPredictionId("prediction-a".into()));
3039 }
3040
3041 // --- Phase 5: let more time pass for region B to settle ---
3042 // B's last edit was Q/4 before A settled. The worker rescheduled to
3043 // B's last_edit_at + Q, which is 3Q/4 from now.
3044 cx.executor()
3045 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE * 3 / 4);
3046 cx.run_until_parked();
3047
3048 {
3049 let events = settled_events.lock().clone();
3050 assert_eq!(
3051 events.len(),
3052 2,
3053 "both prediction and capture_sample settled events should be emitted for each request, got: {events:?}"
3054 );
3055 assert_eq!(events[1].0, EditPredictionId("prediction-b".into()));
3056 }
3057}
3058
3059#[ctor::ctor]
3060fn init_logger() {
3061 zlog::init_test();
3062}