1use super::*;
2use crate::udiff::apply_diff_to_string;
3use client::{UserStore, test::FakeServer};
4use clock::FakeSystemClock;
5use clock::ReplicaId;
6use cloud_api_types::{CreateLlmTokenResponse, LlmToken};
7use cloud_llm_client::{
8 EditPredictionRejectReason, EditPredictionRejection, RejectEditPredictionsBody,
9 predict_edits_v3::{PredictEditsV3Request, PredictEditsV3Response},
10};
11use futures::{
12 AsyncReadExt, FutureExt, StreamExt,
13 channel::{mpsc, oneshot},
14};
15use gpui::App;
16use gpui::{
17 Entity, TestAppContext,
18 http_client::{FakeHttpClient, Response},
19};
20use indoc::indoc;
21use language::{
22 Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet,
23 DiagnosticSeverity, Operation, Point, Selection, SelectionGoal,
24};
25use language_model::RefreshLlmTokenListener;
26use lsp::LanguageServerId;
27use parking_lot::Mutex;
28use pretty_assertions::{assert_eq, assert_matches};
29use project::{FakeFs, Project};
30use serde_json::json;
31use settings::SettingsStore;
32use std::{ops::Range, path::Path, sync::Arc, time::Duration};
33use util::{
34 path,
35 test::{TextRangeMarker, marked_text_ranges_by},
36};
37use uuid::Uuid;
38use zeta_prompt::ZetaPromptInput;
39
40use crate::{
41 BufferEditPrediction, EDIT_PREDICTION_SETTLED_QUIESCENCE, EditPredictionId,
42 EditPredictionStore, REJECT_REQUEST_DEBOUNCE,
43};
44
45#[gpui::test]
46async fn test_current_state(cx: &mut TestAppContext) {
47 let (ep_store, mut requests) = init_test_with_fake_client(cx);
48 let fs = FakeFs::new(cx.executor());
49 fs.insert_tree(
50 "/root",
51 json!({
52 "1.txt": "Hello!\nHow\nBye\n",
53 "2.txt": "Hola!\nComo\nAdios\n"
54 }),
55 )
56 .await;
57 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
58
59 let buffer1 = project
60 .update(cx, |project, cx| {
61 let path = project.find_project_path(path!("/root/1.txt"), cx).unwrap();
62 project.set_active_path(Some(path.clone()), cx);
63 project.open_buffer(path, cx)
64 })
65 .await
66 .unwrap();
67 let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot());
68 let position = snapshot1.anchor_before(language::Point::new(1, 3));
69
70 ep_store.update(cx, |ep_store, cx| {
71 ep_store.register_project(&project, cx);
72 ep_store.register_buffer(&buffer1, &project, cx);
73 });
74
75 // Prediction for current file
76
77 ep_store.update(cx, |ep_store, cx| {
78 ep_store.refresh_prediction_from_buffer(project.clone(), buffer1.clone(), position, cx)
79 });
80 let (request, respond_tx) = requests.predict.next().await.unwrap();
81
82 respond_tx
83 .send(model_response(
84 &request,
85 indoc! {r"
86 --- a/root/1.txt
87 +++ b/root/1.txt
88 @@ ... @@
89 Hello!
90 -How
91 +How are you?
92 Bye
93 "},
94 ))
95 .unwrap();
96
97 cx.run_until_parked();
98
99 ep_store.update(cx, |ep_store, cx| {
100 let prediction = ep_store
101 .prediction_at(&buffer1, None, &project, cx)
102 .unwrap();
103 assert_matches!(prediction, BufferEditPrediction::Local { .. });
104 });
105
106 ep_store.update(cx, |ep_store, cx| {
107 ep_store.reject_current_prediction(EditPredictionRejectReason::Discarded, &project, cx);
108 });
109
110 // Prediction for diagnostic in another file
111
112 let diagnostic = lsp::Diagnostic {
113 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
114 severity: Some(lsp::DiagnosticSeverity::ERROR),
115 message: "Sentence is incomplete".to_string(),
116 ..Default::default()
117 };
118
119 project.update(cx, |project, cx| {
120 project.lsp_store().update(cx, |lsp_store, cx| {
121 lsp_store
122 .update_diagnostics(
123 LanguageServerId(0),
124 lsp::PublishDiagnosticsParams {
125 uri: lsp::Uri::from_file_path(path!("/root/2.txt")).unwrap(),
126 diagnostics: vec![diagnostic],
127 version: None,
128 },
129 None,
130 language::DiagnosticSourceKind::Pushed,
131 &[],
132 cx,
133 )
134 .unwrap();
135 });
136 });
137
138 let (request, respond_tx) = requests.predict.next().await.unwrap();
139 respond_tx
140 .send(model_response(
141 &request,
142 indoc! {r#"
143 --- a/root/2.txt
144 +++ b/root/2.txt
145 @@ ... @@
146 Hola!
147 -Como
148 +Como estas?
149 Adios
150 "#},
151 ))
152 .unwrap();
153 cx.run_until_parked();
154
155 ep_store.update(cx, |ep_store, cx| {
156 let prediction = ep_store
157 .prediction_at(&buffer1, None, &project, cx)
158 .unwrap();
159 assert_matches!(
160 prediction,
161 BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt"))
162 );
163 });
164
165 let buffer2 = project
166 .update(cx, |project, cx| {
167 let path = project.find_project_path(path!("root/2.txt"), cx).unwrap();
168 project.open_buffer(path, cx)
169 })
170 .await
171 .unwrap();
172
173 ep_store.update(cx, |ep_store, cx| {
174 let prediction = ep_store
175 .prediction_at(&buffer2, None, &project, cx)
176 .unwrap();
177 assert_matches!(prediction, BufferEditPrediction::Local { .. });
178 });
179}
180
181#[gpui::test]
182async fn test_simple_request(cx: &mut TestAppContext) {
183 let (ep_store, mut requests) = init_test_with_fake_client(cx);
184 let fs = FakeFs::new(cx.executor());
185 fs.insert_tree(
186 "/root",
187 json!({
188 "foo.md": "Hello!\nHow\nBye\n"
189 }),
190 )
191 .await;
192 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
193
194 let buffer = project
195 .update(cx, |project, cx| {
196 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
197 project.open_buffer(path, cx)
198 })
199 .await
200 .unwrap();
201 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
202 let position = snapshot.anchor_before(language::Point::new(1, 3));
203
204 let prediction_task = ep_store.update(cx, |ep_store, cx| {
205 ep_store.request_prediction(&project, &buffer, position, Default::default(), cx)
206 });
207
208 let (request, respond_tx) = requests.predict.next().await.unwrap();
209
210 // TODO Put back when we have a structured request again
211 // assert_eq!(
212 // request.excerpt_path.as_ref(),
213 // Path::new(path!("root/foo.md"))
214 // );
215 // assert_eq!(
216 // request.cursor_point,
217 // Point {
218 // line: Line(1),
219 // column: 3
220 // }
221 // );
222
223 respond_tx
224 .send(model_response(
225 &request,
226 indoc! { r"
227 --- a/root/foo.md
228 +++ b/root/foo.md
229 @@ ... @@
230 Hello!
231 -How
232 +How are you?
233 Bye
234 "},
235 ))
236 .unwrap();
237
238 let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap();
239
240 assert_eq!(prediction.edits.len(), 1);
241 assert_eq!(
242 prediction.edits[0].0.to_point(&snapshot).start,
243 language::Point::new(1, 3)
244 );
245 assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
246}
247
248#[gpui::test]
249async fn test_request_events(cx: &mut TestAppContext) {
250 let (ep_store, mut requests) = init_test_with_fake_client(cx);
251 let fs = FakeFs::new(cx.executor());
252 fs.insert_tree(
253 "/root",
254 json!({
255 "foo.md": "Hello!\n\nBye\n"
256 }),
257 )
258 .await;
259 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
260
261 let buffer = project
262 .update(cx, |project, cx| {
263 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
264 project.open_buffer(path, cx)
265 })
266 .await
267 .unwrap();
268
269 ep_store.update(cx, |ep_store, cx| {
270 ep_store.register_buffer(&buffer, &project, cx);
271 });
272
273 buffer.update(cx, |buffer, cx| {
274 buffer.edit(vec![(7..7, "How")], None, cx);
275 });
276
277 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
278 let position = snapshot.anchor_before(language::Point::new(1, 3));
279
280 let prediction_task = ep_store.update(cx, |ep_store, cx| {
281 ep_store.request_prediction(&project, &buffer, position, Default::default(), cx)
282 });
283
284 let (request, respond_tx) = requests.predict.next().await.unwrap();
285
286 let prompt = prompt_from_request(&request);
287 assert!(
288 prompt.contains(indoc! {"
289 --- a/root/foo.md
290 +++ b/root/foo.md
291 @@ -1,3 +1,3 @@
292 Hello!
293 -
294 +How
295 Bye
296 "}),
297 "{prompt}"
298 );
299
300 respond_tx
301 .send(model_response(
302 &request,
303 indoc! {r#"
304 --- a/root/foo.md
305 +++ b/root/foo.md
306 @@ ... @@
307 Hello!
308 -How
309 +How are you?
310 Bye
311 "#},
312 ))
313 .unwrap();
314
315 let prediction = prediction_task.await.unwrap().unwrap().prediction.unwrap();
316
317 assert_eq!(prediction.edits.len(), 1);
318 assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
319}
320
321#[gpui::test]
322async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContext) {
323 let (ep_store, _requests) = init_test_with_fake_client(cx);
324 let fs = FakeFs::new(cx.executor());
325 fs.insert_tree(
326 "/root",
327 json!({
328 "foo.md": "Hello!\n\nBye\n"
329 }),
330 )
331 .await;
332 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
333
334 let buffer = project
335 .update(cx, |project, cx| {
336 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
337 project.open_buffer(path, cx)
338 })
339 .await
340 .unwrap();
341
342 ep_store.update(cx, |ep_store, cx| {
343 ep_store.register_buffer(&buffer, &project, cx);
344 });
345
346 // First burst: insert "How"
347 buffer.update(cx, |buffer, cx| {
348 buffer.edit(vec![(7..7, "How")], None, cx);
349 });
350
351 // Simulate a pause longer than the grouping threshold (e.g. 500ms).
352 cx.executor().advance_clock(LAST_CHANGE_GROUPING_TIME * 2);
353 cx.run_until_parked();
354
355 // Second burst: append " are you?" immediately after "How" on the same line.
356 //
357 // Keeping both bursts on the same line ensures the existing line-span coalescing logic
358 // groups them into a single `LastEvent`, allowing the pause-split getter to return two diffs.
359 buffer.update(cx, |buffer, cx| {
360 buffer.edit(vec![(10..10, " are you?")], None, cx);
361 });
362
363 // A second edit shortly after the first post-pause edit ensures the last edit timestamp is
364 // advanced after the pause boundary is recorded, making pause-splitting deterministic.
365 buffer.update(cx, |buffer, cx| {
366 buffer.edit(vec![(19..19, "!")], None, cx);
367 });
368
369 // With time-based splitting, there are two distinct events.
370 let events = ep_store.update(cx, |ep_store, cx| {
371 ep_store.edit_history_for_project(&project, cx)
372 });
373 assert_eq!(events.len(), 2);
374
375 let first_total_edit_range = buffer.read_with(cx, |buffer, _| {
376 events[0].total_edit_range.to_point(&buffer.snapshot())
377 });
378 assert_eq!(first_total_edit_range, Point::new(1, 0)..Point::new(1, 3));
379
380 let zeta_prompt::Event::BufferChange { diff, .. } = events[0].event.as_ref();
381 assert_eq!(
382 diff.as_str(),
383 indoc! {"
384 @@ -1,3 +1,3 @@
385 Hello!
386 -
387 +How
388 Bye
389 "}
390 );
391
392 let second_total_edit_range = buffer.read_with(cx, |buffer, _| {
393 events[1].total_edit_range.to_point(&buffer.snapshot())
394 });
395 assert_eq!(second_total_edit_range, Point::new(1, 3)..Point::new(1, 13));
396
397 let zeta_prompt::Event::BufferChange { diff, .. } = events[1].event.as_ref();
398 assert_eq!(
399 diff.as_str(),
400 indoc! {"
401 @@ -1,3 +1,3 @@
402 Hello!
403 -How
404 +How are you?!
405 Bye
406 "}
407 );
408}
409
410#[gpui::test]
411async fn test_predicted_edits_are_separated_in_edit_history(cx: &mut TestAppContext) {
412 let (ep_store, _requests) = init_test_with_fake_client(cx);
413 let fs = FakeFs::new(cx.executor());
414
415 // Create a file with 30 lines to test line-based coalescing
416 let content = (1..=30)
417 .map(|i| format!("Line {}\n", i))
418 .collect::<String>();
419 fs.insert_tree(
420 "/root",
421 json!({
422 "foo.md": content
423 }),
424 )
425 .await;
426 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
427
428 let buffer = project
429 .update(cx, |project, cx| {
430 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
431 project.open_buffer(path, cx)
432 })
433 .await
434 .unwrap();
435
436 ep_store.update(cx, |ep_store, cx| {
437 ep_store.register_buffer(&buffer, &project, cx);
438 });
439
440 // First edit: multi-line edit spanning rows 10-12 (replacing lines 11-13)
441 buffer.update(cx, |buffer, cx| {
442 let start = Point::new(10, 0).to_offset(buffer);
443 let end = Point::new(13, 0).to_offset(buffer);
444 buffer.edit(vec![(start..end, "Middle A\nMiddle B\n")], None, cx);
445 });
446
447 let events = ep_store.update(cx, |ep_store, cx| {
448 ep_store.edit_history_for_project(&project, cx)
449 });
450 assert_eq!(
451 render_events(&events),
452 indoc! {"
453 @@ -8,9 +8,8 @@
454 Line 8
455 Line 9
456 Line 10
457 -Line 11
458 -Line 12
459 -Line 13
460 +Middle A
461 +Middle B
462 Line 14
463 Line 15
464 Line 16
465 "},
466 "After first edit"
467 );
468
469 // Second edit: insert ABOVE the first edit's range (row 5, within 8 lines of row 10)
470 // This tests that coalescing considers the START of the existing range
471 buffer.update(cx, |buffer, cx| {
472 let offset = Point::new(5, 0).to_offset(buffer);
473 buffer.edit(vec![(offset..offset, "Above\n")], None, cx);
474 });
475
476 let events = ep_store.update(cx, |ep_store, cx| {
477 ep_store.edit_history_for_project(&project, cx)
478 });
479 assert_eq!(
480 render_events(&events),
481 indoc! {"
482 @@ -3,14 +3,14 @@
483 Line 3
484 Line 4
485 Line 5
486 +Above
487 Line 6
488 Line 7
489 Line 8
490 Line 9
491 Line 10
492 -Line 11
493 -Line 12
494 -Line 13
495 +Middle A
496 +Middle B
497 Line 14
498 Line 15
499 Line 16
500 "},
501 "After inserting above (should coalesce)"
502 );
503
504 // Third edit: insert BELOW the first edit's range (row 14 in current buffer, within 8 lines of row 12)
505 // This tests that coalescing considers the END of the existing range
506 buffer.update(cx, |buffer, cx| {
507 let offset = Point::new(14, 0).to_offset(buffer);
508 buffer.edit(vec![(offset..offset, "Below\n")], None, cx);
509 });
510
511 let events = ep_store.update(cx, |ep_store, cx| {
512 ep_store.edit_history_for_project(&project, cx)
513 });
514 assert_eq!(
515 render_events(&events),
516 indoc! {"
517 @@ -3,15 +3,16 @@
518 Line 3
519 Line 4
520 Line 5
521 +Above
522 Line 6
523 Line 7
524 Line 8
525 Line 9
526 Line 10
527 -Line 11
528 -Line 12
529 -Line 13
530 +Middle A
531 +Middle B
532 Line 14
533 +Below
534 Line 15
535 Line 16
536 Line 17
537 "},
538 "After inserting below (should coalesce)"
539 );
540
541 // Fourth edit: insert FAR BELOW (row 25, beyond 8 lines from the current range end ~row 15)
542 // This should NOT coalesce - creates a new event
543 buffer.update(cx, |buffer, cx| {
544 let offset = Point::new(25, 0).to_offset(buffer);
545 buffer.edit(vec![(offset..offset, "Far below\n")], None, cx);
546 });
547
548 let events = ep_store.update(cx, |ep_store, cx| {
549 ep_store.edit_history_for_project(&project, cx)
550 });
551 assert_eq!(
552 render_events(&events),
553 indoc! {"
554 @@ -3,15 +3,16 @@
555 Line 3
556 Line 4
557 Line 5
558 +Above
559 Line 6
560 Line 7
561 Line 8
562 Line 9
563 Line 10
564 -Line 11
565 -Line 12
566 -Line 13
567 +Middle A
568 +Middle B
569 Line 14
570 +Below
571 Line 15
572 Line 16
573 Line 17
574
575 ---
576 @@ -23,6 +23,7 @@
577 Line 22
578 Line 23
579 Line 24
580 +Far below
581 Line 25
582 Line 26
583 Line 27
584 "},
585 "After inserting far below (should NOT coalesce)"
586 );
587}
588
589fn render_events(events: &[StoredEvent]) -> String {
590 events
591 .iter()
592 .map(|e| {
593 let zeta_prompt::Event::BufferChange { diff, .. } = e.event.as_ref();
594 diff.as_str()
595 })
596 .collect::<Vec<_>>()
597 .join("\n---\n")
598}
599
600fn render_events_with_predicted(events: &[StoredEvent]) -> Vec<String> {
601 events
602 .iter()
603 .map(|e| {
604 let zeta_prompt::Event::BufferChange {
605 diff, predicted, ..
606 } = e.event.as_ref();
607 let prefix = if *predicted { "predicted" } else { "manual" };
608 format!("{}\n{}", prefix, diff)
609 })
610 .collect()
611}
612
613fn make_collaborator_replica(
614 buffer: &Entity<Buffer>,
615 cx: &mut TestAppContext,
616) -> (Entity<Buffer>, clock::Global) {
617 let (state, version) =
618 buffer.read_with(cx, |buffer, _cx| (buffer.to_proto(_cx), buffer.version()));
619 let collaborator = cx.new(|_cx| {
620 Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap()
621 });
622 (collaborator, version)
623}
624
625async fn apply_collaborator_edit(
626 collaborator: &Entity<Buffer>,
627 buffer: &Entity<Buffer>,
628 since_version: &mut clock::Global,
629 edit_range: Range<usize>,
630 new_text: &str,
631 cx: &mut TestAppContext,
632) {
633 collaborator.update(cx, |collaborator, cx| {
634 collaborator.edit([(edit_range, new_text)], None, cx);
635 });
636
637 let serialize_task = collaborator.read_with(cx, |collaborator, cx| {
638 collaborator.serialize_ops(Some(since_version.clone()), cx)
639 });
640 let ops = serialize_task.await;
641 *since_version = collaborator.read_with(cx, |collaborator, _cx| collaborator.version());
642
643 buffer.update(cx, |buffer, cx| {
644 buffer.apply_ops(
645 ops.into_iter()
646 .map(|op| language::proto::deserialize_operation(op).unwrap()),
647 cx,
648 );
649 });
650}
651
652#[gpui::test]
653async fn test_nearby_collaborator_edits_are_kept_in_history(cx: &mut TestAppContext) {
654 let (ep_store, _requests) = init_test_with_fake_client(cx);
655 let fs = FakeFs::new(cx.executor());
656 fs.insert_tree(
657 "/root",
658 json!({
659 "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n"
660 }),
661 )
662 .await;
663 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
664
665 let buffer = project
666 .update(cx, |project, cx| {
667 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
668 project.set_active_path(Some(path.clone()), cx);
669 project.open_buffer(path, cx)
670 })
671 .await
672 .unwrap();
673
674 let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
675
676 ep_store.update(cx, |ep_store, cx| {
677 ep_store.register_buffer(&buffer, &project, cx);
678 let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
679 });
680
681 buffer.update(cx, |buffer, cx| {
682 buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
683 });
684
685 let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
686
687 let (line_one_start, line_one_len) = collaborator.read_with(cx, |buffer, _cx| {
688 (Point::new(1, 0).to_offset(buffer), buffer.line_len(1))
689 });
690
691 apply_collaborator_edit(
692 &collaborator,
693 &buffer,
694 &mut collaborator_version,
695 line_one_start..line_one_start + line_one_len as usize,
696 "REMOTE ONE",
697 cx,
698 )
699 .await;
700
701 let events = ep_store.update(cx, |ep_store, cx| {
702 ep_store.edit_history_for_project(&project, cx)
703 });
704
705 assert_eq!(
706 render_events_with_predicted(&events),
707 vec![indoc! {"
708 manual
709 @@ -1,5 +1,5 @@
710 -line 0
711 -line 1
712 +LOCAL ZERO
713 +REMOTE ONE
714 line 2
715 line 3
716 line 4
717 "}]
718 );
719}
720
721#[gpui::test]
722async fn test_distant_collaborator_edits_are_omitted_from_history(cx: &mut TestAppContext) {
723 let (ep_store, _requests) = init_test_with_fake_client(cx);
724 let fs = FakeFs::new(cx.executor());
725 fs.insert_tree(
726 "/root",
727 json!({
728 "foo.rs": (0..1000)
729 .map(|i| format!("line {i}\n"))
730 .collect::<String>()
731 }),
732 )
733 .await;
734 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
735
736 let buffer = project
737 .update(cx, |project, cx| {
738 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
739 project.set_active_path(Some(path.clone()), cx);
740 project.open_buffer(path, cx)
741 })
742 .await
743 .unwrap();
744
745 let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
746
747 ep_store.update(cx, |ep_store, cx| {
748 ep_store.register_buffer(&buffer, &project, cx);
749 let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx);
750 });
751
752 buffer.update(cx, |buffer, cx| {
753 buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx);
754 });
755
756 let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx);
757
758 let far_line_start = buffer.read_with(cx, |buffer, _cx| Point::new(900, 0).to_offset(buffer));
759
760 apply_collaborator_edit(
761 &collaborator,
762 &buffer,
763 &mut collaborator_version,
764 far_line_start..far_line_start + 7,
765 "REMOTE FAR",
766 cx,
767 )
768 .await;
769
770 let events = ep_store.update(cx, |ep_store, cx| {
771 ep_store.edit_history_for_project(&project, cx)
772 });
773
774 assert_eq!(
775 render_events_with_predicted(&events),
776 vec![indoc! {"
777 manual
778 @@ -1,4 +1,4 @@
779 -line 0
780 +LOCAL ZERO
781 line 1
782 line 2
783 line 3
784 "}]
785 );
786}
787
788#[gpui::test]
789async fn test_irrelevant_collaborator_edits_in_different_files_are_omitted_from_history(
790 cx: &mut TestAppContext,
791) {
792 let (ep_store, _requests) = init_test_with_fake_client(cx);
793 let fs = FakeFs::new(cx.executor());
794 fs.insert_tree(
795 "/root",
796 json!({
797 "foo.rs": "line 0\nline 1\nline 2\nline 3\n",
798 "bar.rs": "line 0\nline 1\nline 2\nline 3\n"
799 }),
800 )
801 .await;
802 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
803
804 let foo_buffer = project
805 .update(cx, |project, cx| {
806 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
807 project.set_active_path(Some(path.clone()), cx);
808 project.open_buffer(path, cx)
809 })
810 .await
811 .unwrap();
812 let bar_buffer = project
813 .update(cx, |project, cx| {
814 let path = project.find_project_path(path!("root/bar.rs"), cx).unwrap();
815 project.open_buffer(path, cx)
816 })
817 .await
818 .unwrap();
819
820 let foo_cursor = foo_buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0)));
821
822 ep_store.update(cx, |ep_store, cx| {
823 ep_store.register_buffer(&foo_buffer, &project, cx);
824 ep_store.register_buffer(&bar_buffer, &project, cx);
825 let _ = ep_store.prediction_at(&foo_buffer, Some(foo_cursor), &project, cx);
826 });
827
828 let (bar_collaborator, mut bar_version) = make_collaborator_replica(&bar_buffer, cx);
829
830 apply_collaborator_edit(
831 &bar_collaborator,
832 &bar_buffer,
833 &mut bar_version,
834 0..6,
835 "REMOTE BAR",
836 cx,
837 )
838 .await;
839
840 let events = ep_store.update(cx, |ep_store, cx| {
841 ep_store.edit_history_for_project(&project, cx)
842 });
843
844 assert!(events.is_empty());
845}
846
847#[gpui::test]
848async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) {
849 let (ep_store, _requests) = init_test_with_fake_client(cx);
850 let fs = FakeFs::new(cx.executor());
851 fs.insert_tree(
852 "/root",
853 json!({
854 "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n"
855 }),
856 )
857 .await;
858 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
859
860 let buffer = project
861 .update(cx, |project, cx| {
862 let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap();
863 project.open_buffer(path, cx)
864 })
865 .await
866 .unwrap();
867
868 ep_store.update(cx, |ep_store, cx| {
869 ep_store.register_buffer(&buffer, &project, cx);
870 });
871
872 // Case 1: Manual edits have `predicted` set to false.
873 buffer.update(cx, |buffer, cx| {
874 buffer.edit(vec![(0..6, "LINE ZERO")], None, cx);
875 });
876
877 let events = ep_store.update(cx, |ep_store, cx| {
878 ep_store.edit_history_for_project(&project, cx)
879 });
880
881 assert_eq!(
882 render_events_with_predicted(&events),
883 vec![indoc! {"
884 manual
885 @@ -1,4 +1,4 @@
886 -line 0
887 +LINE ZERO
888 line 1
889 line 2
890 line 3
891 "}]
892 );
893
894 // Case 2: Multiple successive manual edits near each other are merged into one
895 // event with `predicted` set to false.
896 buffer.update(cx, |buffer, cx| {
897 let offset = Point::new(1, 0).to_offset(buffer);
898 let end = Point::new(1, 6).to_offset(buffer);
899 buffer.edit(vec![(offset..end, "LINE ONE")], None, cx);
900 });
901
902 let events = ep_store.update(cx, |ep_store, cx| {
903 ep_store.edit_history_for_project(&project, cx)
904 });
905 assert_eq!(
906 render_events_with_predicted(&events),
907 vec![indoc! {"
908 manual
909 @@ -1,5 +1,5 @@
910 -line 0
911 -line 1
912 +LINE ZERO
913 +LINE ONE
914 line 2
915 line 3
916 line 4
917 "}]
918 );
919
920 // Case 3: Accepted predictions have `predicted` set to true.
921 // Case 5: A manual edit that follows a predicted edit is not merged with the
922 // predicted edit, even if it is nearby.
923 ep_store.update(cx, |ep_store, cx| {
924 buffer.update(cx, |buffer, cx| {
925 let offset = Point::new(2, 0).to_offset(buffer);
926 let end = Point::new(2, 6).to_offset(buffer);
927 buffer.edit(vec![(offset..end, "LINE TWO")], None, cx);
928 });
929 ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx);
930 });
931
932 let events = ep_store.update(cx, |ep_store, cx| {
933 ep_store.edit_history_for_project(&project, cx)
934 });
935 assert_eq!(
936 render_events_with_predicted(&events),
937 vec![
938 indoc! {"
939 manual
940 @@ -1,5 +1,5 @@
941 -line 0
942 -line 1
943 +LINE ZERO
944 +LINE ONE
945 line 2
946 line 3
947 line 4
948 "},
949 indoc! {"
950 predicted
951 @@ -1,6 +1,6 @@
952 LINE ZERO
953 LINE ONE
954 -line 2
955 +LINE TWO
956 line 3
957 line 4
958 line 5
959 "}
960 ]
961 );
962
963 // Case 4: Multiple successive accepted predictions near each other are merged
964 // into one event with `predicted` set to true.
965 ep_store.update(cx, |ep_store, cx| {
966 buffer.update(cx, |buffer, cx| {
967 let offset = Point::new(3, 0).to_offset(buffer);
968 let end = Point::new(3, 6).to_offset(buffer);
969 buffer.edit(vec![(offset..end, "LINE THREE")], None, cx);
970 });
971 ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx);
972 });
973
974 let events = ep_store.update(cx, |ep_store, cx| {
975 ep_store.edit_history_for_project(&project, cx)
976 });
977 assert_eq!(
978 render_events_with_predicted(&events),
979 vec![
980 indoc! {"
981 manual
982 @@ -1,5 +1,5 @@
983 -line 0
984 -line 1
985 +LINE ZERO
986 +LINE ONE
987 line 2
988 line 3
989 line 4
990 "},
991 indoc! {"
992 predicted
993 @@ -1,7 +1,7 @@
994 LINE ZERO
995 LINE ONE
996 -line 2
997 -line 3
998 +LINE TWO
999 +LINE THREE
1000 line 4
1001 line 5
1002 line 6
1003 "}
1004 ]
1005 );
1006
1007 // Case 5 (continued): A manual edit that follows a predicted edit is not merged
1008 // with the predicted edit, even if it is nearby.
1009 buffer.update(cx, |buffer, cx| {
1010 let offset = Point::new(4, 0).to_offset(buffer);
1011 let end = Point::new(4, 6).to_offset(buffer);
1012 buffer.edit(vec![(offset..end, "LINE FOUR")], None, cx);
1013 });
1014
1015 let events = ep_store.update(cx, |ep_store, cx| {
1016 ep_store.edit_history_for_project(&project, cx)
1017 });
1018 assert_eq!(
1019 render_events_with_predicted(&events),
1020 vec![
1021 indoc! {"
1022 manual
1023 @@ -1,5 +1,5 @@
1024 -line 0
1025 -line 1
1026 +LINE ZERO
1027 +LINE ONE
1028 line 2
1029 line 3
1030 line 4
1031 "},
1032 indoc! {"
1033 predicted
1034 @@ -1,7 +1,7 @@
1035 LINE ZERO
1036 LINE ONE
1037 -line 2
1038 -line 3
1039 +LINE TWO
1040 +LINE THREE
1041 line 4
1042 line 5
1043 line 6
1044 "},
1045 indoc! {"
1046 manual
1047 @@ -2,7 +2,7 @@
1048 LINE ONE
1049 LINE TWO
1050 LINE THREE
1051 -line 4
1052 +LINE FOUR
1053 line 5
1054 line 6
1055 line 7
1056 "}
1057 ]
1058 );
1059
1060 // Case 6: If we then perform a manual edit at a *different* location (more than
1061 // 8 lines away), then the edits at the prior location can be merged with each
1062 // other, even if some are predicted and some are not. `predicted` means all
1063 // constituent edits were predicted.
1064 buffer.update(cx, |buffer, cx| {
1065 let offset = Point::new(14, 0).to_offset(buffer);
1066 let end = Point::new(14, 7).to_offset(buffer);
1067 buffer.edit(vec![(offset..end, "LINE FOURTEEN")], None, cx);
1068 });
1069
1070 let events = ep_store.update(cx, |ep_store, cx| {
1071 ep_store.edit_history_for_project(&project, cx)
1072 });
1073 assert_eq!(
1074 render_events_with_predicted(&events),
1075 vec![
1076 indoc! {"
1077 manual
1078 @@ -1,8 +1,8 @@
1079 -line 0
1080 -line 1
1081 -line 2
1082 -line 3
1083 -line 4
1084 +LINE ZERO
1085 +LINE ONE
1086 +LINE TWO
1087 +LINE THREE
1088 +LINE FOUR
1089 line 5
1090 line 6
1091 line 7
1092 "},
1093 indoc! {"
1094 manual
1095 @@ -12,4 +12,4 @@
1096 line 11
1097 line 12
1098 line 13
1099 -line 14
1100 +LINE FOURTEEN
1101 "}
1102 ]
1103 );
1104}
1105
1106#[gpui::test]
1107async fn test_empty_prediction(cx: &mut TestAppContext) {
1108 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 "/root",
1112 json!({
1113 "foo.md": "Hello!\nHow\nBye\n"
1114 }),
1115 )
1116 .await;
1117 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1118
1119 let buffer = project
1120 .update(cx, |project, cx| {
1121 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1122 project.open_buffer(path, cx)
1123 })
1124 .await
1125 .unwrap();
1126 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1127 let position = snapshot.anchor_before(language::Point::new(1, 3));
1128
1129 ep_store.update(cx, |ep_store, cx| {
1130 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1131 });
1132
1133 let (request, respond_tx) = requests.predict.next().await.unwrap();
1134 let response = model_response(&request, "");
1135 let id = response.request_id.clone();
1136 respond_tx.send(response).unwrap();
1137
1138 cx.run_until_parked();
1139
1140 ep_store.update(cx, |ep_store, cx| {
1141 assert!(
1142 ep_store
1143 .prediction_at(&buffer, None, &project, cx)
1144 .is_none()
1145 );
1146 });
1147
1148 // prediction is reported as rejected
1149 let (reject_request, _) = requests.reject.next().await.unwrap();
1150
1151 assert_eq!(
1152 &reject_request.rejections,
1153 &[EditPredictionRejection {
1154 request_id: id,
1155 reason: EditPredictionRejectReason::Empty,
1156 was_shown: false,
1157 model_version: None,
1158 }]
1159 );
1160}
1161
1162#[gpui::test]
1163async fn test_interpolated_empty(cx: &mut TestAppContext) {
1164 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1165 let fs = FakeFs::new(cx.executor());
1166 fs.insert_tree(
1167 "/root",
1168 json!({
1169 "foo.md": "Hello!\nHow\nBye\n"
1170 }),
1171 )
1172 .await;
1173 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1174
1175 let buffer = project
1176 .update(cx, |project, cx| {
1177 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1178 project.open_buffer(path, cx)
1179 })
1180 .await
1181 .unwrap();
1182 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1183 let position = snapshot.anchor_before(language::Point::new(1, 3));
1184
1185 ep_store.update(cx, |ep_store, cx| {
1186 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1187 });
1188
1189 let (request, respond_tx) = requests.predict.next().await.unwrap();
1190
1191 buffer.update(cx, |buffer, cx| {
1192 buffer.set_text("Hello!\nHow are you?\nBye", cx);
1193 });
1194
1195 let response = model_response(&request, SIMPLE_DIFF);
1196 let id = response.request_id.clone();
1197 respond_tx.send(response).unwrap();
1198
1199 cx.run_until_parked();
1200
1201 ep_store.update(cx, |ep_store, cx| {
1202 assert!(
1203 ep_store
1204 .prediction_at(&buffer, None, &project, cx)
1205 .is_none()
1206 );
1207 });
1208
1209 // prediction is reported as rejected
1210 let (reject_request, _) = requests.reject.next().await.unwrap();
1211
1212 assert_eq!(
1213 &reject_request.rejections,
1214 &[EditPredictionRejection {
1215 request_id: id,
1216 reason: EditPredictionRejectReason::InterpolatedEmpty,
1217 was_shown: false,
1218 model_version: None,
1219 }]
1220 );
1221}
1222
1223const SIMPLE_DIFF: &str = indoc! { r"
1224 --- a/root/foo.md
1225 +++ b/root/foo.md
1226 @@ ... @@
1227 Hello!
1228 -How
1229 +How are you?
1230 Bye
1231"};
1232
1233#[gpui::test]
1234async fn test_replace_current(cx: &mut TestAppContext) {
1235 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1236 let fs = FakeFs::new(cx.executor());
1237 fs.insert_tree(
1238 "/root",
1239 json!({
1240 "foo.md": "Hello!\nHow\nBye\n"
1241 }),
1242 )
1243 .await;
1244 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1245
1246 let buffer = project
1247 .update(cx, |project, cx| {
1248 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1249 project.open_buffer(path, cx)
1250 })
1251 .await
1252 .unwrap();
1253 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1254 let position = snapshot.anchor_before(language::Point::new(1, 3));
1255
1256 ep_store.update(cx, |ep_store, cx| {
1257 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1258 });
1259
1260 let (request, respond_tx) = requests.predict.next().await.unwrap();
1261 let first_response = model_response(&request, SIMPLE_DIFF);
1262 let first_id = first_response.request_id.clone();
1263 respond_tx.send(first_response).unwrap();
1264
1265 cx.run_until_parked();
1266
1267 ep_store.update(cx, |ep_store, cx| {
1268 assert_eq!(
1269 ep_store
1270 .prediction_at(&buffer, None, &project, cx)
1271 .unwrap()
1272 .id
1273 .0,
1274 first_id
1275 );
1276 });
1277
1278 // a second request is triggered
1279 ep_store.update(cx, |ep_store, cx| {
1280 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1281 });
1282
1283 let (request, respond_tx) = requests.predict.next().await.unwrap();
1284 let second_response = model_response(&request, SIMPLE_DIFF);
1285 let second_id = second_response.request_id.clone();
1286 respond_tx.send(second_response).unwrap();
1287
1288 cx.run_until_parked();
1289
1290 ep_store.update(cx, |ep_store, cx| {
1291 // second replaces first
1292 assert_eq!(
1293 ep_store
1294 .prediction_at(&buffer, None, &project, cx)
1295 .unwrap()
1296 .id
1297 .0,
1298 second_id
1299 );
1300 });
1301
1302 // first is reported as replaced
1303 let (reject_request, _) = requests.reject.next().await.unwrap();
1304
1305 assert_eq!(
1306 &reject_request.rejections,
1307 &[EditPredictionRejection {
1308 request_id: first_id,
1309 reason: EditPredictionRejectReason::Replaced,
1310 was_shown: false,
1311 model_version: None,
1312 }]
1313 );
1314}
1315
1316#[gpui::test]
1317async fn test_current_preferred(cx: &mut TestAppContext) {
1318 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1319 let fs = FakeFs::new(cx.executor());
1320 fs.insert_tree(
1321 "/root",
1322 json!({
1323 "foo.md": "Hello!\nHow\nBye\n"
1324 }),
1325 )
1326 .await;
1327 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1328
1329 let buffer = project
1330 .update(cx, |project, cx| {
1331 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1332 project.open_buffer(path, cx)
1333 })
1334 .await
1335 .unwrap();
1336 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1337 let position = snapshot.anchor_before(language::Point::new(1, 3));
1338
1339 ep_store.update(cx, |ep_store, cx| {
1340 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1341 });
1342
1343 let (request, respond_tx) = requests.predict.next().await.unwrap();
1344 let first_response = model_response(&request, SIMPLE_DIFF);
1345 let first_id = first_response.request_id.clone();
1346 respond_tx.send(first_response).unwrap();
1347
1348 cx.run_until_parked();
1349
1350 ep_store.update(cx, |ep_store, cx| {
1351 assert_eq!(
1352 ep_store
1353 .prediction_at(&buffer, None, &project, cx)
1354 .unwrap()
1355 .id
1356 .0,
1357 first_id
1358 );
1359 });
1360
1361 // a second request is triggered
1362 ep_store.update(cx, |ep_store, cx| {
1363 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1364 });
1365
1366 let (request, respond_tx) = requests.predict.next().await.unwrap();
1367 // worse than current prediction
1368 let second_response = model_response(
1369 &request,
1370 indoc! { r"
1371 --- a/root/foo.md
1372 +++ b/root/foo.md
1373 @@ ... @@
1374 Hello!
1375 -How
1376 +How are
1377 Bye
1378 "},
1379 );
1380 let second_id = second_response.request_id.clone();
1381 respond_tx.send(second_response).unwrap();
1382
1383 cx.run_until_parked();
1384
1385 ep_store.update(cx, |ep_store, cx| {
1386 // first is preferred over second
1387 assert_eq!(
1388 ep_store
1389 .prediction_at(&buffer, None, &project, cx)
1390 .unwrap()
1391 .id
1392 .0,
1393 first_id
1394 );
1395 });
1396
1397 // second is reported as rejected
1398 let (reject_request, _) = requests.reject.next().await.unwrap();
1399
1400 assert_eq!(
1401 &reject_request.rejections,
1402 &[EditPredictionRejection {
1403 request_id: second_id,
1404 reason: EditPredictionRejectReason::CurrentPreferred,
1405 was_shown: false,
1406 model_version: None,
1407 }]
1408 );
1409}
1410
1411#[gpui::test]
1412async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) {
1413 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1414 let fs = FakeFs::new(cx.executor());
1415 fs.insert_tree(
1416 "/root",
1417 json!({
1418 "foo.md": "Hello!\nHow\nBye\n"
1419 }),
1420 )
1421 .await;
1422 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1423
1424 let buffer = project
1425 .update(cx, |project, cx| {
1426 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1427 project.open_buffer(path, cx)
1428 })
1429 .await
1430 .unwrap();
1431 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1432 let position = snapshot.anchor_before(language::Point::new(1, 3));
1433
1434 // start two refresh tasks
1435 ep_store.update(cx, |ep_store, cx| {
1436 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1437 });
1438
1439 let (request1, respond_first) = requests.predict.next().await.unwrap();
1440
1441 ep_store.update(cx, |ep_store, cx| {
1442 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1443 });
1444
1445 let (request, respond_second) = requests.predict.next().await.unwrap();
1446
1447 // wait for throttle
1448 cx.run_until_parked();
1449
1450 // second responds first
1451 let second_response = model_response(&request, SIMPLE_DIFF);
1452 let second_id = second_response.request_id.clone();
1453 respond_second.send(second_response).unwrap();
1454
1455 cx.run_until_parked();
1456
1457 ep_store.update(cx, |ep_store, cx| {
1458 // current prediction is second
1459 assert_eq!(
1460 ep_store
1461 .prediction_at(&buffer, None, &project, cx)
1462 .unwrap()
1463 .id
1464 .0,
1465 second_id
1466 );
1467 });
1468
1469 let first_response = model_response(&request1, SIMPLE_DIFF);
1470 let first_id = first_response.request_id.clone();
1471 respond_first.send(first_response).unwrap();
1472
1473 cx.run_until_parked();
1474
1475 ep_store.update(cx, |ep_store, cx| {
1476 // current prediction is still second, since first was cancelled
1477 assert_eq!(
1478 ep_store
1479 .prediction_at(&buffer, None, &project, cx)
1480 .unwrap()
1481 .id
1482 .0,
1483 second_id
1484 );
1485 });
1486
1487 // first is reported as rejected
1488 let (reject_request, _) = requests.reject.next().await.unwrap();
1489
1490 cx.run_until_parked();
1491
1492 assert_eq!(
1493 &reject_request.rejections,
1494 &[EditPredictionRejection {
1495 request_id: first_id,
1496 reason: EditPredictionRejectReason::Canceled,
1497 was_shown: false,
1498 model_version: None,
1499 }]
1500 );
1501}
1502
1503#[gpui::test]
1504async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
1505 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1506 let fs = FakeFs::new(cx.executor());
1507 fs.insert_tree(
1508 "/root",
1509 json!({
1510 "foo.md": "Hello!\nHow\nBye\n"
1511 }),
1512 )
1513 .await;
1514 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1515
1516 let buffer = project
1517 .update(cx, |project, cx| {
1518 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1519 project.open_buffer(path, cx)
1520 })
1521 .await
1522 .unwrap();
1523 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1524 let position = snapshot.anchor_before(language::Point::new(1, 3));
1525
1526 // start two refresh tasks
1527 ep_store.update(cx, |ep_store, cx| {
1528 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1529 });
1530
1531 let (request1, respond_first) = requests.predict.next().await.unwrap();
1532
1533 ep_store.update(cx, |ep_store, cx| {
1534 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1535 });
1536
1537 let (request2, respond_second) = requests.predict.next().await.unwrap();
1538
1539 // wait for throttle, so requests are sent
1540 cx.run_until_parked();
1541
1542 ep_store.update(cx, |ep_store, cx| {
1543 // start a third request
1544 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1545
1546 // 2 are pending, so 2nd is cancelled
1547 assert_eq!(
1548 ep_store
1549 .get_or_init_project(&project, cx)
1550 .cancelled_predictions
1551 .iter()
1552 .copied()
1553 .collect::<Vec<_>>(),
1554 [1]
1555 );
1556 });
1557
1558 // wait for throttle
1559 cx.run_until_parked();
1560
1561 let (request3, respond_third) = requests.predict.next().await.unwrap();
1562
1563 let first_response = model_response(&request1, SIMPLE_DIFF);
1564 let first_id = first_response.request_id.clone();
1565 respond_first.send(first_response).unwrap();
1566
1567 cx.run_until_parked();
1568
1569 ep_store.update(cx, |ep_store, cx| {
1570 // current prediction is first
1571 assert_eq!(
1572 ep_store
1573 .prediction_at(&buffer, None, &project, cx)
1574 .unwrap()
1575 .id
1576 .0,
1577 first_id
1578 );
1579 });
1580
1581 let cancelled_response = model_response(&request2, SIMPLE_DIFF);
1582 let cancelled_id = cancelled_response.request_id.clone();
1583 respond_second.send(cancelled_response).unwrap();
1584
1585 cx.run_until_parked();
1586
1587 ep_store.update(cx, |ep_store, cx| {
1588 // current prediction is still first, since second was cancelled
1589 assert_eq!(
1590 ep_store
1591 .prediction_at(&buffer, None, &project, cx)
1592 .unwrap()
1593 .id
1594 .0,
1595 first_id
1596 );
1597 });
1598
1599 let third_response = model_response(&request3, SIMPLE_DIFF);
1600 let third_response_id = third_response.request_id.clone();
1601 respond_third.send(third_response).unwrap();
1602
1603 cx.run_until_parked();
1604
1605 ep_store.update(cx, |ep_store, cx| {
1606 // third completes and replaces first
1607 assert_eq!(
1608 ep_store
1609 .prediction_at(&buffer, None, &project, cx)
1610 .unwrap()
1611 .id
1612 .0,
1613 third_response_id
1614 );
1615 });
1616
1617 // second is reported as rejected
1618 let (reject_request, _) = requests.reject.next().await.unwrap();
1619
1620 cx.run_until_parked();
1621
1622 assert_eq!(
1623 &reject_request.rejections,
1624 &[
1625 EditPredictionRejection {
1626 request_id: cancelled_id,
1627 reason: EditPredictionRejectReason::Canceled,
1628 was_shown: false,
1629 model_version: None,
1630 },
1631 EditPredictionRejection {
1632 request_id: first_id,
1633 reason: EditPredictionRejectReason::Replaced,
1634 was_shown: false,
1635 model_version: None,
1636 }
1637 ]
1638 );
1639}
1640
1641#[gpui::test]
1642async fn test_jump_and_edit_throttles_are_independent(cx: &mut TestAppContext) {
1643 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1644
1645 let fs = FakeFs::new(cx.executor());
1646 fs.insert_tree(
1647 "/root",
1648 json!({
1649 "foo.md": "Hello!\nHow\nBye\n",
1650 "bar.md": "Hola!\nComo\nAdios\n"
1651 }),
1652 )
1653 .await;
1654 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1655
1656 let buffer = project
1657 .update(cx, |project, cx| {
1658 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1659 project.set_active_path(Some(path.clone()), cx);
1660 project.open_buffer(path, cx)
1661 })
1662 .await
1663 .unwrap();
1664 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1665 let position = snapshot.anchor_before(language::Point::new(1, 3));
1666
1667 ep_store.update(cx, |ep_store, cx| {
1668 ep_store.register_project(&project, cx);
1669 ep_store.register_buffer(&buffer, &project, cx);
1670 });
1671
1672 // First edit request - no prior edit, so not throttled.
1673 ep_store.update(cx, |ep_store, cx| {
1674 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1675 });
1676 let (_edit_request, edit_response_tx) = requests.predict.next().await.unwrap();
1677 edit_response_tx.send(empty_response()).unwrap();
1678 cx.run_until_parked();
1679
1680 let diagnostic = lsp::Diagnostic {
1681 range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
1682 severity: Some(lsp::DiagnosticSeverity::ERROR),
1683 message: "Sentence is incomplete".to_string(),
1684 ..Default::default()
1685 };
1686
1687 // First jump request triggered by diagnostic event on buffer - no prior jump, so not throttled (independent from edit).
1688 project.update(cx, |project, cx| {
1689 project.lsp_store().update(cx, |lsp_store, cx| {
1690 lsp_store
1691 .update_diagnostics(
1692 LanguageServerId(0),
1693 lsp::PublishDiagnosticsParams {
1694 uri: lsp::Uri::from_file_path(path!("/root/bar.md")).unwrap(),
1695 diagnostics: vec![diagnostic],
1696 version: None,
1697 },
1698 None,
1699 language::DiagnosticSourceKind::Pushed,
1700 &[],
1701 cx,
1702 )
1703 .unwrap();
1704 });
1705 });
1706 let (_jump_request, jump_response_tx) = requests.predict.next().await.unwrap();
1707 jump_response_tx.send(empty_response()).unwrap();
1708 cx.run_until_parked();
1709
1710 // Second edit request - should be throttled by the first edit.
1711 ep_store.update(cx, |ep_store, cx| {
1712 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1713 });
1714 assert_no_predict_request_ready(&mut requests.predict);
1715
1716 // Second jump request - should be throttled by the first jump.
1717 ep_store.update(cx, |ep_store, cx| {
1718 ep_store.refresh_prediction_from_diagnostics(
1719 project.clone(),
1720 DiagnosticSearchScope::Global,
1721 cx,
1722 );
1723 });
1724 assert_no_predict_request_ready(&mut requests.predict);
1725
1726 // Wait for both throttles to expire.
1727 cx.background_executor
1728 .advance_clock(EditPredictionStore::THROTTLE_TIMEOUT);
1729 cx.background_executor.run_until_parked();
1730 cx.run_until_parked();
1731
1732 // Both requests should now go through.
1733 let (_request_1, response_tx_1) = requests.predict.next().await.unwrap();
1734 response_tx_1.send(empty_response()).unwrap();
1735 cx.run_until_parked();
1736
1737 let (_request_2, response_tx_2) = requests.predict.next().await.unwrap();
1738 response_tx_2.send(empty_response()).unwrap();
1739 cx.run_until_parked();
1740}
1741
1742#[gpui::test]
1743async fn test_same_frame_duplicate_requests_deduplicated(cx: &mut TestAppContext) {
1744 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1745 let fs = FakeFs::new(cx.executor());
1746 fs.insert_tree(
1747 "/root",
1748 json!({
1749 "foo.md": "Hello!\nHow\nBye\n"
1750 }),
1751 )
1752 .await;
1753 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
1754
1755 let buffer = project
1756 .update(cx, |project, cx| {
1757 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
1758 project.open_buffer(path, cx)
1759 })
1760 .await
1761 .unwrap();
1762 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1763 let position = snapshot.anchor_before(language::Point::new(1, 3));
1764
1765 // Enqueue two refresh calls in the same synchronous frame (no yielding).
1766 // Both `cx.spawn` tasks are created before either executes, so they both
1767 // capture the same `proceed_count_at_enqueue`. Only the first task should
1768 // pass the deduplication gate; the second should be skipped.
1769 ep_store.update(cx, |ep_store, cx| {
1770 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1771 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
1772 });
1773
1774 // Let both spawned tasks run to completion (including any throttle waits).
1775 cx.run_until_parked();
1776
1777 // Exactly one prediction request should have been sent.
1778 let (request, respond_tx) = requests.predict.next().await.unwrap();
1779 respond_tx
1780 .send(model_response(&request, SIMPLE_DIFF))
1781 .unwrap();
1782 cx.run_until_parked();
1783
1784 // No second request should be pending.
1785 assert_no_predict_request_ready(&mut requests.predict);
1786}
1787
1788#[gpui::test]
1789async fn test_rejections_flushing(cx: &mut TestAppContext) {
1790 let (ep_store, mut requests) = init_test_with_fake_client(cx);
1791
1792 ep_store.update(cx, |ep_store, cx| {
1793 ep_store.reject_prediction(
1794 EditPredictionId("test-1".into()),
1795 EditPredictionRejectReason::Discarded,
1796 false,
1797 None,
1798 cx,
1799 );
1800 ep_store.reject_prediction(
1801 EditPredictionId("test-2".into()),
1802 EditPredictionRejectReason::Canceled,
1803 true,
1804 None,
1805 cx,
1806 );
1807 });
1808
1809 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1810 cx.run_until_parked();
1811
1812 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1813 respond_tx.send(()).unwrap();
1814
1815 // batched
1816 assert_eq!(reject_request.rejections.len(), 2);
1817 assert_eq!(
1818 reject_request.rejections[0],
1819 EditPredictionRejection {
1820 request_id: "test-1".to_string(),
1821 reason: EditPredictionRejectReason::Discarded,
1822 was_shown: false,
1823 model_version: None,
1824 }
1825 );
1826 assert_eq!(
1827 reject_request.rejections[1],
1828 EditPredictionRejection {
1829 request_id: "test-2".to_string(),
1830 reason: EditPredictionRejectReason::Canceled,
1831 was_shown: true,
1832 model_version: None,
1833 }
1834 );
1835
1836 // Reaching batch size limit sends without debounce
1837 ep_store.update(cx, |ep_store, cx| {
1838 for i in 0..70 {
1839 ep_store.reject_prediction(
1840 EditPredictionId(format!("batch-{}", i).into()),
1841 EditPredictionRejectReason::Discarded,
1842 false,
1843 None,
1844 cx,
1845 );
1846 }
1847 });
1848
1849 // First MAX/2 items are sent immediately
1850 cx.run_until_parked();
1851 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1852 respond_tx.send(()).unwrap();
1853
1854 assert_eq!(reject_request.rejections.len(), 50);
1855 assert_eq!(reject_request.rejections[0].request_id, "batch-0");
1856 assert_eq!(reject_request.rejections[49].request_id, "batch-49");
1857
1858 // Remaining items are debounced with the next batch
1859 cx.executor().advance_clock(Duration::from_secs(15));
1860 cx.run_until_parked();
1861
1862 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1863 respond_tx.send(()).unwrap();
1864
1865 assert_eq!(reject_request.rejections.len(), 20);
1866 assert_eq!(reject_request.rejections[0].request_id, "batch-50");
1867 assert_eq!(reject_request.rejections[19].request_id, "batch-69");
1868
1869 // Request failure
1870 ep_store.update(cx, |ep_store, cx| {
1871 ep_store.reject_prediction(
1872 EditPredictionId("retry-1".into()),
1873 EditPredictionRejectReason::Discarded,
1874 false,
1875 None,
1876 cx,
1877 );
1878 });
1879
1880 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1881 cx.run_until_parked();
1882
1883 let (reject_request, _respond_tx) = requests.reject.next().await.unwrap();
1884 assert_eq!(reject_request.rejections.len(), 1);
1885 assert_eq!(reject_request.rejections[0].request_id, "retry-1");
1886 // Simulate failure
1887 drop(_respond_tx);
1888
1889 // Add another rejection
1890 ep_store.update(cx, |ep_store, cx| {
1891 ep_store.reject_prediction(
1892 EditPredictionId("retry-2".into()),
1893 EditPredictionRejectReason::Discarded,
1894 false,
1895 None,
1896 cx,
1897 );
1898 });
1899
1900 cx.executor().advance_clock(REJECT_REQUEST_DEBOUNCE);
1901 cx.run_until_parked();
1902
1903 // Retry should include both the failed item and the new one
1904 let (reject_request, respond_tx) = requests.reject.next().await.unwrap();
1905 respond_tx.send(()).unwrap();
1906
1907 assert_eq!(reject_request.rejections.len(), 2);
1908 assert_eq!(reject_request.rejections[0].request_id, "retry-1");
1909 assert_eq!(reject_request.rejections[1].request_id, "retry-2");
1910}
1911
1912#[gpui::test]
1913fn test_active_buffer_diagnostics_fetching(cx: &mut TestAppContext) {
1914 let diagnostic_marker: TextRangeMarker = ('«', '»').into();
1915 let search_range_marker: TextRangeMarker = ('[', ']').into();
1916
1917 let (text, mut ranges) = marked_text_ranges_by(
1918 indoc! {r#"
1919 fn alpha() {
1920 let «first_value» = 1;
1921 }
1922
1923 [fn beta() {
1924 let «second_value» = 2;
1925 let third_value = second_value + missing_symbol;
1926 }ˇ]
1927
1928 fn gamma() {
1929 let «fourth_value» = missing_other_symbol;
1930 }
1931 "#},
1932 vec![diagnostic_marker.clone(), search_range_marker.clone()],
1933 );
1934
1935 let diagnostic_ranges = ranges.remove(&diagnostic_marker).unwrap_or_default();
1936 let search_ranges = ranges.remove(&search_range_marker).unwrap_or_default();
1937
1938 let buffer = cx.new(|cx| Buffer::local(&text, cx));
1939
1940 buffer.update(cx, |buffer, cx| {
1941 let snapshot = buffer.snapshot();
1942 let diagnostics = DiagnosticSet::new(
1943 diagnostic_ranges
1944 .iter()
1945 .enumerate()
1946 .map(|(index, range)| DiagnosticEntry {
1947 range: snapshot.offset_to_point_utf16(range.start)
1948 ..snapshot.offset_to_point_utf16(range.end),
1949 diagnostic: Diagnostic {
1950 severity: match index {
1951 0 => DiagnosticSeverity::WARNING,
1952 1 => DiagnosticSeverity::ERROR,
1953 _ => DiagnosticSeverity::HINT,
1954 },
1955 message: match index {
1956 0 => "first warning".to_string(),
1957 1 => "second error".to_string(),
1958 _ => "third hint".to_string(),
1959 },
1960 group_id: index + 1,
1961 is_primary: true,
1962 source_kind: language::DiagnosticSourceKind::Pushed,
1963 ..Diagnostic::default()
1964 },
1965 }),
1966 &snapshot,
1967 );
1968 buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
1969 });
1970
1971 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
1972 let search_range = snapshot.offset_to_point(search_ranges[0].start)
1973 ..snapshot.offset_to_point(search_ranges[0].end);
1974
1975 let active_buffer_diagnostics = zeta::active_buffer_diagnostics(&snapshot, search_range, 100);
1976
1977 assert_eq!(
1978 active_buffer_diagnostics,
1979 vec![zeta_prompt::ActiveBufferDiagnostic {
1980 severity: Some(1),
1981 message: "second error".to_string(),
1982 snippet: text,
1983 snippet_buffer_row_range: 5..5,
1984 diagnostic_range_in_snippet: 61..73,
1985 }]
1986 );
1987
1988 let buffer = cx.new(|cx| {
1989 Buffer::local(
1990 indoc! {"
1991 one
1992 two
1993 three
1994 four
1995 five
1996 "},
1997 cx,
1998 )
1999 });
2000
2001 buffer.update(cx, |buffer, cx| {
2002 let snapshot = buffer.snapshot();
2003 let diagnostics = DiagnosticSet::new(
2004 vec![
2005 DiagnosticEntry {
2006 range: text::PointUtf16::new(0, 0)..text::PointUtf16::new(0, 3),
2007 diagnostic: Diagnostic {
2008 severity: DiagnosticSeverity::ERROR,
2009 message: "row zero".to_string(),
2010 group_id: 1,
2011 is_primary: true,
2012 source_kind: language::DiagnosticSourceKind::Pushed,
2013 ..Diagnostic::default()
2014 },
2015 },
2016 DiagnosticEntry {
2017 range: text::PointUtf16::new(2, 0)..text::PointUtf16::new(2, 5),
2018 diagnostic: Diagnostic {
2019 severity: DiagnosticSeverity::WARNING,
2020 message: "row two".to_string(),
2021 group_id: 2,
2022 is_primary: true,
2023 source_kind: language::DiagnosticSourceKind::Pushed,
2024 ..Diagnostic::default()
2025 },
2026 },
2027 DiagnosticEntry {
2028 range: text::PointUtf16::new(4, 0)..text::PointUtf16::new(4, 4),
2029 diagnostic: Diagnostic {
2030 severity: DiagnosticSeverity::INFORMATION,
2031 message: "row four".to_string(),
2032 group_id: 3,
2033 is_primary: true,
2034 source_kind: language::DiagnosticSourceKind::Pushed,
2035 ..Diagnostic::default()
2036 },
2037 },
2038 ],
2039 &snapshot,
2040 );
2041 buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
2042 });
2043
2044 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2045
2046 let active_buffer_diagnostics =
2047 zeta::active_buffer_diagnostics(&snapshot, Point::new(2, 0)..Point::new(4, 0), 100);
2048
2049 assert_eq!(
2050 active_buffer_diagnostics
2051 .iter()
2052 .map(|diagnostic| (
2053 diagnostic.severity,
2054 diagnostic.message.clone(),
2055 diagnostic.snippet.clone(),
2056 diagnostic.snippet_buffer_row_range.clone(),
2057 diagnostic.diagnostic_range_in_snippet.clone(),
2058 ))
2059 .collect::<Vec<_>>(),
2060 vec![
2061 (
2062 Some(2),
2063 "row two".to_string(),
2064 "one\ntwo\nthree\nfour\nfive\n".to_string(),
2065 2..2,
2066 8..13,
2067 ),
2068 (
2069 Some(3),
2070 "row four".to_string(),
2071 "one\ntwo\nthree\nfour\nfive\n".to_string(),
2072 4..4,
2073 19..23,
2074 ),
2075 ]
2076 );
2077}
2078
2079// Generate a model response that would apply the given diff to the active file.
2080fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response {
2081 let editable_range =
2082 zeta_prompt::excerpt_range_for_format(Default::default(), &request.input.excerpt_ranges).1;
2083 let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string();
2084 let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap();
2085
2086 PredictEditsV3Response {
2087 request_id: Uuid::new_v4().to_string(),
2088 editable_range,
2089 output: new_excerpt,
2090 model_version: None,
2091 }
2092}
2093
2094fn empty_response() -> PredictEditsV3Response {
2095 PredictEditsV3Response {
2096 request_id: Uuid::new_v4().to_string(),
2097 editable_range: 0..0,
2098 output: String::new(),
2099 model_version: None,
2100 }
2101}
2102
2103fn prompt_from_request(request: &PredictEditsV3Request) -> String {
2104 zeta_prompt::format_zeta_prompt(&request.input, zeta_prompt::ZetaFormat::default())
2105}
2106
2107fn assert_no_predict_request_ready(
2108 requests: &mut mpsc::UnboundedReceiver<(
2109 PredictEditsV3Request,
2110 oneshot::Sender<PredictEditsV3Response>,
2111 )>,
2112) {
2113 if requests.next().now_or_never().flatten().is_some() {
2114 panic!("Unexpected prediction request while throttled.");
2115 }
2116}
2117
2118struct RequestChannels {
2119 predict: mpsc::UnboundedReceiver<(
2120 PredictEditsV3Request,
2121 oneshot::Sender<PredictEditsV3Response>,
2122 )>,
2123 reject: mpsc::UnboundedReceiver<(RejectEditPredictionsBody, oneshot::Sender<()>)>,
2124}
2125
2126fn init_test_with_fake_client(
2127 cx: &mut TestAppContext,
2128) -> (Entity<EditPredictionStore>, RequestChannels) {
2129 cx.update(move |cx| {
2130 let settings_store = SettingsStore::test(cx);
2131 cx.set_global(settings_store);
2132 zlog::init_test();
2133
2134 let (predict_req_tx, predict_req_rx) = mpsc::unbounded();
2135 let (reject_req_tx, reject_req_rx) = mpsc::unbounded();
2136
2137 let http_client = FakeHttpClient::create({
2138 move |req| {
2139 let uri = req.uri().path().to_string();
2140 let mut body = req.into_body();
2141 let predict_req_tx = predict_req_tx.clone();
2142 let reject_req_tx = reject_req_tx.clone();
2143 async move {
2144 let resp = match uri.as_str() {
2145 "/client/llm_tokens" => serde_json::to_string(&json!({
2146 "token": "test"
2147 }))
2148 .unwrap(),
2149 "/predict_edits/v3" => {
2150 let mut buf = Vec::new();
2151 body.read_to_end(&mut buf).await.ok();
2152 let decompressed = zstd::decode_all(&buf[..]).unwrap();
2153 let req = serde_json::from_slice(&decompressed).unwrap();
2154
2155 let (res_tx, res_rx) = oneshot::channel();
2156 predict_req_tx.unbounded_send((req, res_tx)).unwrap();
2157 serde_json::to_string(&res_rx.await?).unwrap()
2158 }
2159 "/predict_edits/reject" => {
2160 let mut buf = Vec::new();
2161 body.read_to_end(&mut buf).await.ok();
2162 let req = serde_json::from_slice(&buf).unwrap();
2163
2164 let (res_tx, res_rx) = oneshot::channel();
2165 reject_req_tx.unbounded_send((req, res_tx)).unwrap();
2166 serde_json::to_string(&res_rx.await?).unwrap()
2167 }
2168 _ => {
2169 panic!("Unexpected path: {}", uri)
2170 }
2171 };
2172
2173 Ok(Response::builder().body(resp.into()).unwrap())
2174 }
2175 }
2176 });
2177
2178 let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
2179 client.cloud_client().set_credentials(1, "test".into());
2180
2181 let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
2182 language_model::init(user_store.clone(), client.clone(), cx);
2183 let ep_store = EditPredictionStore::global(&client, &user_store, cx);
2184
2185 (
2186 ep_store,
2187 RequestChannels {
2188 predict: predict_req_rx,
2189 reject: reject_req_rx,
2190 },
2191 )
2192 })
2193}
2194
2195#[gpui::test]
2196async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
2197 let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx));
2198 let edits: Arc<[(Range<Anchor>, Arc<str>)]> = cx.update(|cx| {
2199 to_completion_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into()
2200 });
2201
2202 let edit_preview = cx
2203 .read(|cx| buffer.read(cx).preview_edits(edits.clone(), cx))
2204 .await;
2205
2206 let prediction = EditPrediction {
2207 edits,
2208 cursor_position: None,
2209 edit_preview,
2210 buffer: buffer.clone(),
2211 snapshot: cx.read(|cx| buffer.read(cx).snapshot()),
2212 id: EditPredictionId("the-id".into()),
2213 inputs: ZetaPromptInput {
2214 events: Default::default(),
2215 related_files: Default::default(),
2216 active_buffer_diagnostics: vec![],
2217 cursor_path: Path::new("").into(),
2218 cursor_excerpt: "".into(),
2219 cursor_offset_in_excerpt: 0,
2220 excerpt_start_row: None,
2221 excerpt_ranges: Default::default(),
2222 syntax_ranges: None,
2223 experiment: None,
2224 in_open_source_repo: false,
2225 can_collect_data: false,
2226 repo_url: None,
2227 },
2228 buffer_snapshotted_at: Instant::now(),
2229 response_received_at: Instant::now(),
2230 model_version: None,
2231 };
2232
2233 cx.update(|cx| {
2234 assert_eq!(
2235 from_completion_edits(
2236 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2237 &buffer,
2238 cx
2239 ),
2240 vec![(2..5, "REM".into()), (9..11, "".into())]
2241 );
2242
2243 buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx));
2244 assert_eq!(
2245 from_completion_edits(
2246 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2247 &buffer,
2248 cx
2249 ),
2250 vec![(2..2, "REM".into()), (6..8, "".into())]
2251 );
2252
2253 buffer.update(cx, |buffer, cx| buffer.undo(cx));
2254 assert_eq!(
2255 from_completion_edits(
2256 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2257 &buffer,
2258 cx
2259 ),
2260 vec![(2..5, "REM".into()), (9..11, "".into())]
2261 );
2262
2263 buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx));
2264 assert_eq!(
2265 from_completion_edits(
2266 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2267 &buffer,
2268 cx
2269 ),
2270 vec![(3..3, "EM".into()), (7..9, "".into())]
2271 );
2272
2273 buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx));
2274 assert_eq!(
2275 from_completion_edits(
2276 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2277 &buffer,
2278 cx
2279 ),
2280 vec![(4..4, "M".into()), (8..10, "".into())]
2281 );
2282
2283 buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx));
2284 assert_eq!(
2285 from_completion_edits(
2286 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2287 &buffer,
2288 cx
2289 ),
2290 vec![(9..11, "".into())]
2291 );
2292
2293 buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx));
2294 assert_eq!(
2295 from_completion_edits(
2296 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2297 &buffer,
2298 cx
2299 ),
2300 vec![(4..4, "M".into()), (8..10, "".into())]
2301 );
2302
2303 buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx));
2304 assert_eq!(
2305 from_completion_edits(
2306 &prediction.interpolate(&buffer.read(cx).snapshot()).unwrap(),
2307 &buffer,
2308 cx
2309 ),
2310 vec![(4..4, "M".into())]
2311 );
2312
2313 buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx));
2314 assert_eq!(prediction.interpolate(&buffer.read(cx).snapshot()), None);
2315 })
2316}
2317
2318#[gpui::test]
2319async fn test_clean_up_diff(cx: &mut TestAppContext) {
2320 init_test(cx);
2321
2322 assert_eq!(
2323 apply_edit_prediction(
2324 indoc! {"
2325 fn main() {
2326 let word_1 = \"lorem\";
2327 let range = word.len()..word.len();
2328 }
2329 "},
2330 indoc! {"
2331 fn main() {
2332 let word_1 = \"lorem\";
2333 let range = word_1.len()..word_1.len();
2334 }
2335 "},
2336 cx,
2337 )
2338 .await,
2339 indoc! {"
2340 fn main() {
2341 let word_1 = \"lorem\";
2342 let range = word_1.len()..word_1.len();
2343 }
2344 "},
2345 );
2346
2347 assert_eq!(
2348 apply_edit_prediction(
2349 indoc! {"
2350 fn main() {
2351 let story = \"the quick\"
2352 }
2353 "},
2354 indoc! {"
2355 fn main() {
2356 let story = \"the quick brown fox jumps over the lazy dog\";
2357 }
2358 "},
2359 cx,
2360 )
2361 .await,
2362 indoc! {"
2363 fn main() {
2364 let story = \"the quick brown fox jumps over the lazy dog\";
2365 }
2366 "},
2367 );
2368}
2369
2370#[gpui::test]
2371async fn test_edit_prediction_end_of_buffer(cx: &mut TestAppContext) {
2372 init_test(cx);
2373
2374 let buffer_content = "lorem\n";
2375 let completion_response = "lorem\nipsum\n";
2376
2377 assert_eq!(
2378 apply_edit_prediction(buffer_content, completion_response, cx).await,
2379 "lorem\nipsum\n"
2380 );
2381}
2382
2383#[gpui::test]
2384async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppContext) {
2385 // Test that zeta2's newline normalization logic doesn't insert spurious newlines.
2386 // When the buffer ends without a trailing newline, but the model returns output
2387 // with a trailing newline, zeta2 should normalize both sides before diffing
2388 // so no spurious newline is inserted.
2389 let (ep_store, mut requests) = init_test_with_fake_client(cx);
2390 let fs = FakeFs::new(cx.executor());
2391
2392 // Single line buffer with no trailing newline
2393 fs.insert_tree(
2394 "/root",
2395 json!({
2396 "foo.txt": "hello"
2397 }),
2398 )
2399 .await;
2400 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2401
2402 let buffer = project
2403 .update(cx, |project, cx| {
2404 let path = project
2405 .find_project_path(path!("root/foo.txt"), cx)
2406 .unwrap();
2407 project.open_buffer(path, cx)
2408 })
2409 .await
2410 .unwrap();
2411
2412 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2413 let position = snapshot.anchor_before(language::Point::new(0, 5));
2414
2415 ep_store.update(cx, |ep_store, cx| {
2416 ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
2417 });
2418
2419 let (request, respond_tx) = requests.predict.next().await.unwrap();
2420
2421 // Model returns output WITH a trailing newline, even though the buffer doesn't have one.
2422 // Zeta2 should normalize both sides before diffing, so no spurious newline is inserted.
2423 let excerpt_length = request.input.cursor_excerpt.len();
2424 let response = PredictEditsV3Response {
2425 request_id: Uuid::new_v4().to_string(),
2426 output: "hello world\n".to_string(),
2427 editable_range: 0..excerpt_length,
2428 model_version: None,
2429 };
2430 respond_tx.send(response).unwrap();
2431
2432 cx.run_until_parked();
2433
2434 // The prediction should insert " world" without adding a newline
2435 ep_store.update(cx, |ep_store, cx| {
2436 let prediction = ep_store
2437 .prediction_at(&buffer, None, &project, cx)
2438 .expect("should have prediction");
2439 let edits: Vec<_> = prediction
2440 .edits
2441 .iter()
2442 .map(|(range, text)| {
2443 let snapshot = buffer.read(cx).snapshot();
2444 (range.to_offset(&snapshot), text.clone())
2445 })
2446 .collect();
2447 assert_eq!(edits, vec![(5..5, " world".into())]);
2448 });
2449}
2450
2451fn init_test(cx: &mut TestAppContext) {
2452 cx.update(|cx| {
2453 let settings_store = SettingsStore::test(cx);
2454 cx.set_global(settings_store);
2455 });
2456}
2457
2458async fn apply_edit_prediction(
2459 buffer_content: &str,
2460 completion_response: &str,
2461 cx: &mut TestAppContext,
2462) -> String {
2463 let fs = project::FakeFs::new(cx.executor());
2464 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2465 let buffer = cx.new(|cx| Buffer::local(buffer_content, cx));
2466 let (ep_store, response) = make_test_ep_store(&project, cx).await;
2467 *response.lock() = completion_response.to_string();
2468 let edit_prediction = run_edit_prediction(&buffer, &project, &ep_store, cx).await;
2469 buffer.update(cx, |buffer, cx| {
2470 buffer.edit(edit_prediction.edits.iter().cloned(), None, cx)
2471 });
2472 buffer.read_with(cx, |buffer, _| buffer.text())
2473}
2474
2475async fn run_edit_prediction(
2476 buffer: &Entity<Buffer>,
2477 project: &Entity<Project>,
2478 ep_store: &Entity<EditPredictionStore>,
2479 cx: &mut TestAppContext,
2480) -> EditPrediction {
2481 let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0)));
2482 ep_store.update(cx, |ep_store, cx| {
2483 ep_store.register_buffer(buffer, &project, cx)
2484 });
2485 cx.background_executor.run_until_parked();
2486 let prediction_task = ep_store.update(cx, |ep_store, cx| {
2487 ep_store.request_prediction(&project, buffer, cursor, Default::default(), cx)
2488 });
2489 prediction_task.await.unwrap().unwrap().prediction.unwrap()
2490}
2491
2492async fn make_test_ep_store(
2493 project: &Entity<Project>,
2494 cx: &mut TestAppContext,
2495) -> (Entity<EditPredictionStore>, Arc<Mutex<String>>) {
2496 let default_response = "hello world\n".to_string();
2497 let completion_response: Arc<Mutex<String>> = Arc::new(Mutex::new(default_response));
2498 let http_client = FakeHttpClient::create({
2499 let completion_response = completion_response.clone();
2500 let mut next_request_id = 0;
2501 move |req| {
2502 let completion_response = completion_response.clone();
2503 let method = req.method().clone();
2504 let uri = req.uri().path().to_string();
2505 let mut body = req.into_body();
2506 async move {
2507 match (method, uri.as_str()) {
2508 (Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder()
2509 .status(200)
2510 .body(
2511 serde_json::to_string(&CreateLlmTokenResponse {
2512 token: LlmToken("the-llm-token".to_string()),
2513 })
2514 .unwrap()
2515 .into(),
2516 )
2517 .unwrap()),
2518 (Method::POST, "/predict_edits/v3") => {
2519 let mut buf = Vec::new();
2520 body.read_to_end(&mut buf).await.ok();
2521 let decompressed = zstd::decode_all(&buf[..]).unwrap();
2522 let req: PredictEditsV3Request =
2523 serde_json::from_slice(&decompressed).unwrap();
2524
2525 next_request_id += 1;
2526 Ok(http_client::Response::builder()
2527 .status(200)
2528 .body(
2529 serde_json::to_string(&PredictEditsV3Response {
2530 request_id: format!("request-{next_request_id}"),
2531 editable_range: 0..req.input.cursor_excerpt.len(),
2532 output: completion_response.lock().clone(),
2533 model_version: None,
2534 })
2535 .unwrap()
2536 .into(),
2537 )
2538 .unwrap())
2539 }
2540 _ => Ok(http_client::Response::builder()
2541 .status(404)
2542 .body("Not Found".to_string().into())
2543 .unwrap()),
2544 }
2545 }
2546 }
2547 });
2548
2549 let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
2550 let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
2551 cx.update(|cx| {
2552 RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
2553 });
2554 let _server = FakeServer::for_client(42, &client, cx).await;
2555
2556 let ep_store = cx.new(|cx| {
2557 let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx);
2558 ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
2559
2560 let worktrees = project.read(cx).worktrees(cx).collect::<Vec<_>>();
2561 for worktree in worktrees {
2562 let worktree_id = worktree.read(cx).id();
2563 ep_store
2564 .get_or_init_project(project, cx)
2565 .license_detection_watchers
2566 .entry(worktree_id)
2567 .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx)));
2568 }
2569
2570 ep_store
2571 });
2572
2573 (ep_store, completion_response)
2574}
2575
2576fn to_completion_edits(
2577 iterator: impl IntoIterator<Item = (Range<usize>, Arc<str>)>,
2578 buffer: &Entity<Buffer>,
2579 cx: &App,
2580) -> Vec<(Range<Anchor>, Arc<str>)> {
2581 let buffer = buffer.read(cx);
2582 iterator
2583 .into_iter()
2584 .map(|(range, text)| {
2585 (
2586 buffer.anchor_after(range.start)..buffer.anchor_before(range.end),
2587 text,
2588 )
2589 })
2590 .collect()
2591}
2592
2593fn from_completion_edits(
2594 editor_edits: &[(Range<Anchor>, Arc<str>)],
2595 buffer: &Entity<Buffer>,
2596 cx: &App,
2597) -> Vec<(Range<usize>, Arc<str>)> {
2598 let buffer = buffer.read(cx);
2599 editor_edits
2600 .iter()
2601 .map(|(range, text)| {
2602 (
2603 range.start.to_offset(buffer)..range.end.to_offset(buffer),
2604 text.clone(),
2605 )
2606 })
2607 .collect()
2608}
2609
2610#[gpui::test]
2611async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut TestAppContext) {
2612 init_test(cx);
2613
2614 let fs = FakeFs::new(cx.executor());
2615 fs.insert_tree(
2616 "/project",
2617 serde_json::json!({
2618 "main.rs": "fn main() {\n \n}\n"
2619 }),
2620 )
2621 .await;
2622
2623 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2624
2625 let http_client = FakeHttpClient::create(|_req| async move {
2626 Ok(gpui::http_client::Response::builder()
2627 .status(401)
2628 .body("Unauthorized".into())
2629 .unwrap())
2630 });
2631
2632 let client =
2633 cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
2634 let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
2635 cx.update(|cx| {
2636 language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
2637 });
2638
2639 let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx));
2640
2641 let buffer = project
2642 .update(cx, |project, cx| {
2643 let path = project
2644 .find_project_path(path!("/project/main.rs"), cx)
2645 .unwrap();
2646 project.open_buffer(path, cx)
2647 })
2648 .await
2649 .unwrap();
2650
2651 let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 4)));
2652 ep_store.update(cx, |ep_store, cx| {
2653 ep_store.register_buffer(&buffer, &project, cx)
2654 });
2655 cx.background_executor.run_until_parked();
2656
2657 let completion_task = ep_store.update(cx, |ep_store, cx| {
2658 ep_store.set_edit_prediction_model(EditPredictionModel::Zeta);
2659 ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx)
2660 });
2661
2662 let result = completion_task.await;
2663 assert!(
2664 result.is_err(),
2665 "Without authentication and without custom URL, prediction should fail"
2666 );
2667}
2668
2669#[gpui::test]
2670async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppContext) {
2671 fn set_collaborator_cursor(buffer: &Entity<Buffer>, row: u32, cx: &mut TestAppContext) {
2672 let collab_replica = clock::ReplicaId::new(10);
2673 let anchor = buffer.read_with(cx, |buffer, _| {
2674 buffer.snapshot().anchor_before(Point::new(row, 0))
2675 });
2676 let selections: Arc<[Selection<Anchor>]> = Arc::new([Selection {
2677 id: 1,
2678 start: anchor,
2679 end: anchor,
2680 reversed: false,
2681 goal: SelectionGoal::None,
2682 }]);
2683 buffer.update(cx, |buffer, cx| {
2684 buffer.apply_ops(
2685 [Operation::UpdateSelections {
2686 selections,
2687 lamport_timestamp: clock::Lamport {
2688 replica_id: collab_replica,
2689 value: 1,
2690 },
2691 line_mode: false,
2692 cursor_shape: CursorShape::Bar,
2693 }],
2694 cx,
2695 );
2696 });
2697 }
2698
2699 fn publish_diagnostics(
2700 uri_path: &'static str,
2701 rows: &[u32],
2702 project: &Entity<Project>,
2703 cx: &mut TestAppContext,
2704 ) {
2705 let diagnostics: Vec<_> = rows
2706 .iter()
2707 .map(|&row| lsp::Diagnostic {
2708 range: lsp::Range::new(lsp::Position::new(row, 0), lsp::Position::new(row, 5)),
2709 severity: Some(lsp::DiagnosticSeverity::ERROR),
2710 message: format!("error at row {row}"),
2711 ..Default::default()
2712 })
2713 .collect();
2714 project.update(cx, |project, cx| {
2715 project.lsp_store().update(cx, |lsp_store, cx| {
2716 lsp_store
2717 .update_diagnostics(
2718 LanguageServerId(0),
2719 lsp::PublishDiagnosticsParams {
2720 uri: lsp::Uri::from_file_path(uri_path).expect("invalid uri"),
2721 diagnostics,
2722 version: None,
2723 },
2724 None,
2725 language::DiagnosticSourceKind::Pushed,
2726 &[],
2727 cx,
2728 )
2729 .expect("failed to update diagnostics");
2730 });
2731 });
2732 }
2733
2734 init_test(cx);
2735
2736 let mut lines = String::new();
2737 for i in 0..60 {
2738 lines.push_str(&format!("line {i}\n"));
2739 }
2740
2741 let fs = FakeFs::new(cx.executor());
2742 fs.insert_tree(
2743 "/root",
2744 json!({
2745 "active.txt": lines,
2746 "collab_file.txt": "error here\nsecond line\n",
2747 "free_file.txt": "another error\nsecond line\n",
2748 }),
2749 )
2750 .await;
2751 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2752
2753 let active_buffer = project
2754 .update(cx, |project, cx| {
2755 let path = project
2756 .find_project_path(path!("/root/active.txt"), cx)
2757 .expect("active.txt not found");
2758 project.set_active_path(Some(path.clone()), cx);
2759 project.open_buffer(path, cx)
2760 })
2761 .await
2762 .expect("failed to open active buffer");
2763
2764 set_collaborator_cursor(&active_buffer, 5, cx);
2765
2766 publish_diagnostics(path!("/root/active.txt"), &[3, 25, 50], &project, cx);
2767
2768 cx.run_until_parked();
2769
2770 let cursor_point = Point::new(25, 0);
2771 let empty_search_range: Range<Point> = Default::default();
2772
2773 let snapshot = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2774 let result = EditPredictionStore::next_diagnostic_location(
2775 active_buffer.clone(),
2776 &snapshot,
2777 empty_search_range.clone(),
2778 cursor_point,
2779 &project,
2780 &mut cx.to_async(),
2781 )
2782 .await
2783 .expect("next_diagnostic_location failed");
2784
2785 let (result_buffer, result_anchor) = result.expect("expected a diagnostic location");
2786 assert_eq!(result_buffer.entity_id(), active_buffer.entity_id());
2787 let result_row = result_buffer.read_with(cx, |buffer, _| {
2788 result_anchor.to_point(&buffer.snapshot()).row
2789 });
2790 assert_ne!(
2791 result_row, 3,
2792 "row 3 is near collaborator (row 5) but far from local cursor (row 25), should be excluded"
2793 );
2794 assert!(
2795 result_row == 25 || result_row == 50,
2796 "expected row 25 or 50, got {result_row}"
2797 );
2798
2799 let snapshot_near = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2800 let near_cursor_point = Point::new(4, 0);
2801 let result_near = EditPredictionStore::next_diagnostic_location(
2802 active_buffer.clone(),
2803 &snapshot_near,
2804 empty_search_range.clone(),
2805 near_cursor_point,
2806 &project,
2807 &mut cx.to_async(),
2808 )
2809 .await
2810 .expect("next_diagnostic_location failed");
2811
2812 let (_, near_anchor) = result_near.expect("expected a diagnostic location when both are near");
2813 let near_row =
2814 active_buffer.read_with(cx, |buffer, _| near_anchor.to_point(&buffer.snapshot()).row);
2815 assert_eq!(
2816 near_row, 3,
2817 "row 3 should be included when local cursor (row 4) is also near the collaborator"
2818 );
2819
2820 let snapshot_far = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2821 let far_cursor_point = Point::new(50, 0);
2822 let result_far = EditPredictionStore::next_diagnostic_location(
2823 active_buffer.clone(),
2824 &snapshot_far,
2825 empty_search_range.clone(),
2826 far_cursor_point,
2827 &project,
2828 &mut cx.to_async(),
2829 )
2830 .await
2831 .expect("next_diagnostic_location failed");
2832
2833 let (_, far_anchor) = result_far.expect("expected a diagnostic location");
2834 let far_row =
2835 active_buffer.read_with(cx, |buffer, _| far_anchor.to_point(&buffer.snapshot()).row);
2836 assert_eq!(
2837 far_row, 50,
2838 "row 50 is near local cursor (row 50) and far from collaborator, should be picked"
2839 );
2840
2841 publish_diagnostics(path!("/root/collab_file.txt"), &[0], &project, cx);
2842 publish_diagnostics(path!("/root/free_file.txt"), &[0], &project, cx);
2843 cx.run_until_parked();
2844
2845 let collab_buffer = project
2846 .update(cx, |project, cx| {
2847 let path = project
2848 .find_project_path(path!("/root/collab_file.txt"), cx)
2849 .expect("collab_file.txt not found");
2850 project.open_buffer(path, cx)
2851 })
2852 .await
2853 .expect("failed to open collab buffer");
2854
2855 set_collaborator_cursor(&collab_buffer, 0, cx);
2856 cx.run_until_parked();
2857
2858 let no_same_file_search_range = Point::new(0, 0)..Point::new(59, 0);
2859 let snapshot_cross = active_buffer.read_with(cx, |buffer, _| buffer.snapshot());
2860 let result_cross = EditPredictionStore::next_diagnostic_location(
2861 active_buffer.clone(),
2862 &snapshot_cross,
2863 no_same_file_search_range,
2864 Point::new(0, 0),
2865 &project,
2866 &mut cx.to_async(),
2867 )
2868 .await
2869 .expect("cross-file next_diagnostic_location failed");
2870
2871 let (cross_buffer, _) = result_cross.expect("expected a cross-file diagnostic location");
2872 let cross_path = cross_buffer.read_with(cx, |buffer, cx| {
2873 buffer
2874 .file()
2875 .expect("buffer should have a file")
2876 .full_path(cx)
2877 });
2878 assert_eq!(
2879 cross_path,
2880 Path::new(path!("root/free_file.txt")),
2881 "should skip collab_file.txt (has collaborator) and pick free_file.txt"
2882 );
2883}
2884
2885#[gpui::test]
2886async fn test_edit_prediction_settled(cx: &mut TestAppContext) {
2887 let (ep_store, _requests) = init_test_with_fake_client(cx);
2888 let fs = FakeFs::new(cx.executor());
2889
2890 // Buffer with two clearly separated regions:
2891 // Region A = lines 0-9 (offsets 0..50)
2892 // Region B = lines 20-29 (offsets 105..155)
2893 // A big gap in between so edits in one region never overlap the other.
2894 let mut content = String::new();
2895 for i in 0..30 {
2896 content.push_str(&format!("line {i:02}\n"));
2897 }
2898
2899 fs.insert_tree(
2900 "/root",
2901 json!({
2902 "foo.md": content.clone()
2903 }),
2904 )
2905 .await;
2906 let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
2907
2908 let buffer = project
2909 .update(cx, |project, cx| {
2910 let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
2911 project.open_buffer(path, cx)
2912 })
2913 .await
2914 .unwrap();
2915
2916 type SettledEventRecord = (EditPredictionId, String);
2917 let settled_events: Arc<Mutex<Vec<SettledEventRecord>>> = Arc::new(Mutex::new(Vec::new()));
2918
2919 ep_store.update(cx, |ep_store, cx| {
2920 ep_store.register_buffer(&buffer, &project, cx);
2921
2922 let settled_events = settled_events.clone();
2923 ep_store.settled_event_callback = Some(Box::new(move |id, text| {
2924 settled_events.lock().push((id, text));
2925 }));
2926 });
2927
2928 // --- Phase 1: edit in region A and enqueue prediction A ---
2929
2930 buffer.update(cx, |buffer, cx| {
2931 // Edit at the start of line 0.
2932 buffer.edit(vec![(0..0, "ADDED ")], None, cx);
2933 });
2934 cx.run_until_parked();
2935
2936 let snapshot_a = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2937
2938 // Region A: first 10 lines of the buffer.
2939 let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0));
2940
2941 ep_store.update(cx, |ep_store, cx| {
2942 ep_store.enqueue_settled_prediction(
2943 EditPredictionId("prediction-a".into()),
2944 &project,
2945 &buffer,
2946 &snapshot_a,
2947 editable_region_a.clone(),
2948 None,
2949 cx,
2950 );
2951 });
2952
2953 // --- Phase 2: repeatedly edit in region A to keep it unsettled ---
2954
2955 // Let the worker process the channel message before we start advancing.
2956 cx.run_until_parked();
2957
2958 let mut region_a_edit_offset = 5;
2959 for _ in 0..3 {
2960 // Edit inside region A (not at the boundary) so `last_edit_at` is
2961 // updated before the worker's next wake.
2962 buffer.update(cx, |buffer, cx| {
2963 buffer.edit(
2964 vec![(region_a_edit_offset..region_a_edit_offset, "x")],
2965 None,
2966 cx,
2967 );
2968 });
2969 region_a_edit_offset += 1;
2970 cx.run_until_parked();
2971
2972 cx.executor()
2973 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 2);
2974 cx.run_until_parked();
2975 assert!(
2976 settled_events.lock().is_empty(),
2977 "no settled events should fire while region A is still being edited"
2978 );
2979 }
2980
2981 // Still nothing settled.
2982 assert!(settled_events.lock().is_empty());
2983
2984 // --- Phase 3: edit in distinct region B, enqueue prediction B ---
2985 // Advance a small amount so B's quiescence window starts later than A's,
2986 // but not so much that A settles (A's last edit was at the start of
2987 // iteration 3, and it needs a full Q to settle).
2988 cx.executor()
2989 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
2990 cx.run_until_parked();
2991 assert!(settled_events.lock().is_empty());
2992
2993 let snapshot_b = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
2994 let line_20_offset = snapshot_b.point_to_offset(Point::new(20, 0));
2995
2996 buffer.update(cx, |buffer, cx| {
2997 buffer.edit(vec![(line_20_offset..line_20_offset, "NEW ")], None, cx);
2998 });
2999 cx.run_until_parked();
3000
3001 let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
3002 let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0));
3003
3004 ep_store.update(cx, |ep_store, cx| {
3005 ep_store.enqueue_settled_prediction(
3006 EditPredictionId("prediction-b".into()),
3007 &project,
3008 &buffer,
3009 &snapshot_b2,
3010 editable_region_b.clone(),
3011 None,
3012 cx,
3013 );
3014 });
3015
3016 cx.run_until_parked();
3017 assert!(
3018 settled_events.lock().is_empty(),
3019 "neither prediction should have settled yet"
3020 );
3021
3022 // --- Phase 4: let enough time pass for region A to settle ---
3023 // A's last edit was at T_a (during the last loop iteration). The worker is
3024 // sleeping until T_a + Q. We advance just enough to reach that wake time
3025 // (Q/4 since we already advanced Q/4 in phase 3 on top of the loop's
3026 // 3*Q/2). At that point A has been quiet for Q and settles, but B was
3027 // enqueued only Q/4 ago and stays pending.
3028 cx.executor()
3029 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4);
3030 cx.run_until_parked();
3031
3032 {
3033 let events = settled_events.lock().clone();
3034 assert_eq!(
3035 events.len(),
3036 1,
3037 "prediction and capture_sample for A should have settled, got: {events:?}"
3038 );
3039 assert_eq!(events[0].0, EditPredictionId("prediction-a".into()));
3040 }
3041
3042 // --- Phase 5: let more time pass for region B to settle ---
3043 // B's last edit was Q/4 before A settled. The worker rescheduled to
3044 // B's last_edit_at + Q, which is 3Q/4 from now.
3045 cx.executor()
3046 .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE * 3 / 4);
3047 cx.run_until_parked();
3048
3049 {
3050 let events = settled_events.lock().clone();
3051 assert_eq!(
3052 events.len(),
3053 2,
3054 "both prediction and capture_sample settled events should be emitted for each request, got: {events:?}"
3055 );
3056 assert_eq!(events[1].0, EditPredictionId("prediction-b".into()));
3057 }
3058}
3059
3060#[ctor::ctor]
3061fn init_logger() {
3062 zlog::init_test();
3063}