zeta.rs

  1use crate::{
  2    CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId,
  3    EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, StoredEvent,
  4    ZedUpdateRequiredError,
  5    cursor_excerpt::{self, compute_cursor_excerpt, compute_syntax_ranges},
  6    prediction::EditPredictionResult,
  7};
  8use anyhow::Result;
  9use cloud_llm_client::{
 10    AcceptEditPredictionBody, EditPredictionRejectReason, predict_edits_v3::RawCompletionRequest,
 11};
 12use edit_prediction_types::PredictedCursorPosition;
 13use gpui::{App, AppContext as _, Entity, Task, WeakEntity, prelude::*};
 14use language::{
 15    Buffer, BufferSnapshot, DiagnosticSeverity, OffsetRangeExt as _, ToOffset as _,
 16    language_settings::all_language_settings, text_diff,
 17};
 18use release_channel::AppVersion;
 19use settings::EditPredictionPromptFormat;
 20use text::{Anchor, Bias, Point};
 21use ui::SharedString;
 22use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification};
 23use zeta_prompt::{ParsedOutput, ZetaPromptInput};
 24
 25use std::{env, ops::Range, path::Path, sync::Arc};
 26use zeta_prompt::{
 27    ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output,
 28    parsed_output_from_editable_region, prompt_input_contains_special_tokens,
 29    stop_tokens_for_format,
 30    zeta1::{self, EDITABLE_REGION_END_MARKER},
 31};
 32
 33use crate::open_ai_compatible::{
 34    load_open_ai_compatible_api_key_if_needed, send_custom_server_request,
 35};
 36
 37pub fn request_prediction_with_zeta(
 38    store: &mut EditPredictionStore,
 39    EditPredictionModelInput {
 40        buffer,
 41        snapshot,
 42        position,
 43        related_files,
 44        events,
 45        debug_tx,
 46        mode,
 47        trigger,
 48        project,
 49        diagnostic_search_range,
 50        can_collect_data,
 51        is_open_source,
 52        ..
 53    }: EditPredictionModelInput,
 54    capture_data: Option<Vec<StoredEvent>>,
 55    cx: &mut Context<EditPredictionStore>,
 56) -> Task<Result<Option<EditPredictionResult>>> {
 57    let settings = &all_language_settings(None, cx).edit_predictions;
 58    let provider = settings.provider;
 59    let custom_server_settings = match provider {
 60        settings::EditPredictionProvider::Ollama => settings.ollama.clone(),
 61        settings::EditPredictionProvider::OpenAiCompatibleApi => {
 62            settings.open_ai_compatible_api.clone()
 63        }
 64        _ => None,
 65    };
 66
 67    let http_client = cx.http_client();
 68    let request_start = cx.background_executor().now();
 69    let raw_config = store.zeta2_raw_config().cloned();
 70    let preferred_experiment = store.preferred_experiment().map(|s| s.to_owned());
 71    let open_ai_compatible_api_key = load_open_ai_compatible_api_key_if_needed(provider, cx);
 72
 73    let excerpt_path: Arc<Path> = snapshot
 74        .file()
 75        .map(|file| -> Arc<Path> { file.full_path(cx).into() })
 76        .unwrap_or_else(|| Arc::from(Path::new("untitled")));
 77
 78    let repo_url = if can_collect_data {
 79        let buffer_id = buffer.read(cx).remote_id();
 80        project
 81            .read(cx)
 82            .git_store()
 83            .read(cx)
 84            .repository_and_path_for_buffer_id(buffer_id, cx)
 85            .and_then(|(repo, _)| repo.read(cx).default_remote_url())
 86    } else {
 87        None
 88    };
 89    let client = store.client.clone();
 90    let llm_token = store.llm_token.clone();
 91    let organization_id = store
 92        .user_store
 93        .read(cx)
 94        .current_organization()
 95        .map(|organization| organization.id.clone());
 96    let app_version = AppVersion::global(cx);
 97
 98    struct Prediction {
 99        prompt_input: ZetaPromptInput,
100        buffer: Entity<Buffer>,
101        snapshot: BufferSnapshot,
102        edits: Vec<(Range<Anchor>, Arc<str>)>,
103        cursor_position: Option<PredictedCursorPosition>,
104        editable_range_in_buffer: Range<usize>,
105        model_version: Option<String>,
106    }
107
108    let request_task = cx.background_spawn({
109        async move {
110            let zeta_version = raw_config
111                .as_ref()
112                .map(|config| config.format)
113                .unwrap_or(ZetaFormat::default());
114
115            let cursor_offset = position.to_offset(&snapshot);
116            let (full_context_offset_range, prompt_input) = zeta2_prompt_input(
117                &snapshot,
118                related_files,
119                events,
120                diagnostic_search_range,
121                excerpt_path,
122                cursor_offset,
123                preferred_experiment,
124                is_open_source,
125                can_collect_data,
126                repo_url,
127            );
128
129            if prompt_input_contains_special_tokens(&prompt_input, zeta_version) {
130                return Err(anyhow::anyhow!("prompt contains special tokens"));
131            }
132
133            let formatted_prompt = format_zeta_prompt(&prompt_input, zeta_version);
134
135            if let Some(debug_tx) = &debug_tx {
136                debug_tx
137                    .unbounded_send(DebugEvent::EditPredictionStarted(
138                        EditPredictionStartedDebugEvent {
139                            buffer: buffer.downgrade(),
140                            prompt: formatted_prompt.clone(),
141                            position,
142                        },
143                    ))
144                    .ok();
145            }
146
147            log::trace!("Sending edit prediction request");
148
149            let Some((request_id, output, model_version, usage)) =
150                (if let Some(custom_settings) = &custom_server_settings {
151                    let max_tokens = custom_settings.max_output_tokens * 4;
152
153                    Some(match custom_settings.prompt_format {
154                        EditPredictionPromptFormat::Zeta => {
155                            let ranges = &prompt_input.excerpt_ranges;
156                            let editable_range_in_excerpt = ranges.editable_350.clone();
157                            let prompt = zeta1::format_zeta1_from_input(
158                                &prompt_input,
159                                editable_range_in_excerpt.clone(),
160                                ranges.editable_350_context_150.clone(),
161                            );
162                            let stop_tokens = vec![
163                                EDITABLE_REGION_END_MARKER.to_string(),
164                                format!("{EDITABLE_REGION_END_MARKER}\n"),
165                                format!("{EDITABLE_REGION_END_MARKER}\n\n"),
166                                format!("{EDITABLE_REGION_END_MARKER}\n\n\n"),
167                            ];
168
169                            let (response_text, request_id) = send_custom_server_request(
170                                provider,
171                                custom_settings,
172                                prompt,
173                                max_tokens,
174                                stop_tokens,
175                                open_ai_compatible_api_key.clone(),
176                                &http_client,
177                            )
178                            .await?;
179
180                            let request_id = EditPredictionId(request_id.into());
181                            let output_text = zeta1::clean_zeta1_model_output(&response_text);
182                            let parsed_output = output_text.map(|text| ParsedOutput {
183                                new_editable_region: text,
184                                range_in_excerpt: editable_range_in_excerpt,
185                                cursor_offset_in_new_editable_region: None,
186                            });
187
188                            (request_id, parsed_output, None, None)
189                        }
190                        EditPredictionPromptFormat::Zeta2 => {
191                            let Some(prompt) = formatted_prompt.clone() else {
192                                return Ok((None, None));
193                            };
194                            let prefill = get_prefill(&prompt_input, zeta_version);
195                            let prompt = format!("{prompt}{prefill}");
196
197                            let (response_text, request_id) = send_custom_server_request(
198                                provider,
199                                custom_settings,
200                                prompt,
201                                max_tokens,
202                                stop_tokens_for_format(zeta_version)
203                                    .iter()
204                                    .map(|token| token.to_string())
205                                    .collect(),
206                                open_ai_compatible_api_key.clone(),
207                                &http_client,
208                            )
209                            .await?;
210
211                            let request_id = EditPredictionId(request_id.into());
212                            let output_text = if response_text.is_empty() {
213                                None
214                            } else {
215                                let output = format!("{prefill}{response_text}");
216                                Some(parse_zeta2_model_output(
217                                    &output,
218                                    zeta_version,
219                                    &prompt_input,
220                                )?)
221                            };
222
223                            (request_id, output_text, None, None)
224                        }
225                        _ => anyhow::bail!("unsupported prompt format"),
226                    })
227                } else if let Some(config) = &raw_config {
228                    let Some(prompt) = format_zeta_prompt(&prompt_input, config.format) else {
229                        return Ok((None, None));
230                    };
231                    let prefill = get_prefill(&prompt_input, config.format);
232                    let prompt = format!("{prompt}{prefill}");
233                    let environment = config
234                        .environment
235                        .clone()
236                        .or_else(|| Some(config.format.to_string().to_lowercase()));
237                    let request = RawCompletionRequest {
238                        model: config.model_id.clone().unwrap_or_default(),
239                        prompt,
240                        temperature: None,
241                        stop: stop_tokens_for_format(config.format)
242                            .iter()
243                            .map(|token| std::borrow::Cow::Borrowed(*token))
244                            .collect(),
245                        max_tokens: Some(2048),
246                        environment,
247                    };
248
249                    let (mut response, usage) = EditPredictionStore::send_raw_llm_request(
250                        request,
251                        client,
252                        None,
253                        llm_token,
254                        organization_id,
255                        app_version,
256                    )
257                    .await?;
258
259                    let request_id = EditPredictionId(response.id.clone().into());
260                    let output = if let Some(choice) = response.choices.pop() {
261                        let response = &choice.text;
262                        let output = format!("{prefill}{response}");
263                        Some(parse_zeta2_model_output(
264                            &output,
265                            config.format,
266                            &prompt_input,
267                        )?)
268                    } else {
269                        None
270                    };
271
272                    Some((request_id, output, None, usage))
273                } else {
274                    // Use V3 endpoint - server handles model/version selection and suffix stripping
275                    let (response, usage) = EditPredictionStore::send_v3_request(
276                        prompt_input.clone(),
277                        client,
278                        llm_token,
279                        organization_id,
280                        app_version,
281                        trigger,
282                        mode,
283                    )
284                    .await?;
285
286                    let request_id = EditPredictionId(response.request_id.into());
287                    let output_text = Some(response.output).filter(|s| !s.is_empty());
288                    let model_version = response.model_version;
289                    let parsed_output = parsed_output_from_editable_region(
290                        response.editable_range,
291                        output_text.unwrap_or_default(),
292                    );
293
294                    Some((request_id, Some(parsed_output), model_version, usage))
295                })
296            else {
297                return Ok((None, None));
298            };
299
300            log::trace!("Got edit prediction response");
301
302            let Some(ParsedOutput {
303                new_editable_region: mut output_text,
304                range_in_excerpt: editable_range_in_excerpt,
305                cursor_offset_in_new_editable_region: cursor_offset_in_output,
306            }) = output
307            else {
308                return Ok((Some((request_id, None)), None));
309            };
310
311            let editable_range_in_buffer = editable_range_in_excerpt.start
312                + full_context_offset_range.start
313                ..editable_range_in_excerpt.end + full_context_offset_range.start;
314
315            let mut old_text = snapshot
316                .text_for_range(editable_range_in_buffer.clone())
317                .collect::<String>();
318
319            if let Some(debug_tx) = &debug_tx {
320                debug_tx
321                    .unbounded_send(DebugEvent::EditPredictionFinished(
322                        EditPredictionFinishedDebugEvent {
323                            buffer: buffer.downgrade(),
324                            position,
325                            model_output: Some(output_text.clone()),
326                        },
327                    ))
328                    .ok();
329            }
330
331            if !output_text.is_empty() && !output_text.ends_with('\n') {
332                output_text.push('\n');
333            }
334            if !old_text.is_empty() && !old_text.ends_with('\n') {
335                old_text.push('\n');
336            }
337
338            let (edits, cursor_position) = compute_edits_and_cursor_position(
339                old_text,
340                &output_text,
341                editable_range_in_buffer.start,
342                cursor_offset_in_output,
343                &snapshot,
344            );
345
346            anyhow::Ok((
347                Some((
348                    request_id,
349                    Some(Prediction {
350                        prompt_input,
351                        buffer,
352                        snapshot: snapshot.clone(),
353                        edits,
354                        cursor_position,
355                        editable_range_in_buffer,
356                        model_version,
357                    }),
358                )),
359                usage,
360            ))
361        }
362    });
363
364    cx.spawn(async move |this, cx| {
365        let Some((id, prediction)) = handle_api_response(&this, request_task.await, cx)? else {
366            return Ok(None);
367        };
368        let request_duration = cx.background_executor().now() - request_start;
369
370        let Some(Prediction {
371            prompt_input: inputs,
372            buffer: edited_buffer,
373            snapshot: edited_buffer_snapshot,
374            edits,
375            cursor_position,
376            editable_range_in_buffer,
377            model_version,
378        }) = prediction
379        else {
380            return Ok(Some(EditPredictionResult {
381                id,
382                e2e_latency: request_duration,
383                prediction: Err(EditPredictionRejectReason::Empty),
384            }));
385        };
386
387        let result = EditPredictionResult::new(
388            id,
389            &edited_buffer,
390            &edited_buffer_snapshot,
391            edits.into(),
392            cursor_position,
393            inputs,
394            model_version,
395            request_duration,
396            cx,
397        )
398        .await;
399
400        if can_collect_data && let Ok(prediction) = &result.prediction {
401            let weak_this = this.clone();
402            let request_id = prediction.id.clone();
403            let edited_buffer = edited_buffer.clone();
404            let edited_buffer_snapshot = edited_buffer_snapshot.clone();
405            let editable_range_in_buffer = editable_range_in_buffer.clone();
406            let edit_preview = prediction.edit_preview.clone();
407            let example_task = capture_data.and_then(|stored_events| {
408                cx.update(|cx| {
409                    crate::capture_example(
410                        project.clone(),
411                        edited_buffer.clone(),
412                        position,
413                        stored_events,
414                        false,
415                        cx,
416                    )
417                })
418            });
419            cx.spawn(async move |cx| {
420                let example_spec = if let Some(task) = example_task {
421                    task.await.ok()
422                } else {
423                    None
424                };
425
426                weak_this
427                    .update(cx, |this, cx| {
428                        this.enqueue_settled_prediction(
429                            request_id.clone(),
430                            &project,
431                            &edited_buffer,
432                            &edited_buffer_snapshot,
433                            editable_range_in_buffer,
434                            &edit_preview,
435                            example_spec,
436                            request_duration,
437                            cx,
438                        );
439                    })
440                    .ok();
441            })
442            .detach();
443        }
444
445        Ok(Some(result))
446    })
447}
448
449fn handle_api_response<T>(
450    this: &WeakEntity<EditPredictionStore>,
451    response: Result<(T, Option<client::EditPredictionUsage>)>,
452    cx: &mut gpui::AsyncApp,
453) -> Result<T> {
454    match response {
455        Ok((data, usage)) => {
456            if let Some(usage) = usage {
457                this.update(cx, |this, cx| {
458                    this.user_store.update(cx, |user_store, cx| {
459                        user_store.update_edit_prediction_usage(usage, cx);
460                    });
461                })
462                .ok();
463            }
464            Ok(data)
465        }
466        Err(err) => {
467            if err.is::<ZedUpdateRequiredError>() {
468                cx.update(|cx| {
469                    this.update(cx, |this, _cx| {
470                        this.update_required = true;
471                    })
472                    .ok();
473
474                    let error_message: SharedString = err.to_string().into();
475                    show_app_notification(
476                        NotificationId::unique::<ZedUpdateRequiredError>(),
477                        cx,
478                        move |cx| {
479                            cx.new(|cx| {
480                                ErrorMessagePrompt::new(error_message.clone(), cx)
481                                    .with_link_button("Update Zed", "https://zed.dev/releases")
482                            })
483                        },
484                    );
485                });
486            }
487            Err(err)
488        }
489    }
490}
491
492pub(crate) fn active_buffer_diagnostics(
493    snapshot: &language::BufferSnapshot,
494    diagnostic_search_range: Range<Point>,
495    additional_context_token_count: usize,
496) -> Vec<zeta_prompt::ActiveBufferDiagnostic> {
497    snapshot
498        .diagnostics_in_range::<Point, Point>(diagnostic_search_range, false)
499        .map(|entry| {
500            let severity = match entry.diagnostic.severity {
501                DiagnosticSeverity::ERROR => Some(1),
502                DiagnosticSeverity::WARNING => Some(2),
503                DiagnosticSeverity::INFORMATION => Some(3),
504                DiagnosticSeverity::HINT => Some(4),
505                _ => None,
506            };
507            let diagnostic_point_range = entry.range.clone();
508            let snippet_point_range = cursor_excerpt::expand_context_syntactically_then_linewise(
509                snapshot,
510                diagnostic_point_range.clone(),
511                additional_context_token_count,
512            );
513            let snippet = snapshot
514                .text_for_range(snippet_point_range.clone())
515                .collect::<String>();
516            let snippet_start_offset = snippet_point_range.start.to_offset(snapshot);
517            let diagnostic_offset_range = diagnostic_point_range.to_offset(snapshot);
518            zeta_prompt::ActiveBufferDiagnostic {
519                severity,
520                message: entry.diagnostic.message.clone(),
521                snippet,
522                snippet_buffer_row_range: diagnostic_point_range.start.row
523                    ..diagnostic_point_range.end.row,
524                diagnostic_range_in_snippet: diagnostic_offset_range.start - snippet_start_offset
525                    ..diagnostic_offset_range.end - snippet_start_offset,
526            }
527        })
528        .collect()
529}
530
531pub fn zeta2_prompt_input(
532    snapshot: &language::BufferSnapshot,
533    related_files: Vec<zeta_prompt::RelatedFile>,
534    events: Vec<Arc<zeta_prompt::Event>>,
535    diagnostic_search_range: Range<Point>,
536    excerpt_path: Arc<Path>,
537    cursor_offset: usize,
538    preferred_experiment: Option<String>,
539    is_open_source: bool,
540    can_collect_data: bool,
541    repo_url: Option<String>,
542) -> (Range<usize>, zeta_prompt::ZetaPromptInput) {
543    let (excerpt_point_range, excerpt_offset_range, cursor_offset_in_excerpt) =
544        compute_cursor_excerpt(snapshot, cursor_offset);
545
546    let cursor_excerpt: Arc<str> = snapshot
547        .text_for_range(excerpt_point_range.clone())
548        .collect::<String>()
549        .into();
550    let syntax_ranges = compute_syntax_ranges(snapshot, cursor_offset, &excerpt_offset_range);
551    let excerpt_ranges = zeta_prompt::compute_legacy_excerpt_ranges(
552        &cursor_excerpt,
553        cursor_offset_in_excerpt,
554        &syntax_ranges,
555    );
556
557    let active_buffer_diagnostics =
558        active_buffer_diagnostics(snapshot, diagnostic_search_range, 100);
559
560    let prompt_input = zeta_prompt::ZetaPromptInput {
561        cursor_path: excerpt_path,
562        cursor_excerpt,
563        cursor_offset_in_excerpt,
564        excerpt_start_row: Some(excerpt_point_range.start.row),
565        events,
566        related_files: Some(related_files),
567        active_buffer_diagnostics,
568        excerpt_ranges,
569        syntax_ranges: Some(syntax_ranges),
570        experiment: preferred_experiment,
571        in_open_source_repo: is_open_source,
572        can_collect_data,
573        repo_url,
574    };
575    (excerpt_offset_range, prompt_input)
576}
577
578pub(crate) fn edit_prediction_accepted(
579    store: &EditPredictionStore,
580    current_prediction: CurrentEditPrediction,
581    cx: &App,
582) {
583    let custom_accept_url = env::var("ZED_ACCEPT_PREDICTION_URL").ok();
584    if store.zeta2_raw_config().is_some() && custom_accept_url.is_none() {
585        return;
586    }
587
588    let request_id = current_prediction.prediction.id.to_string();
589    let model_version = current_prediction.prediction.model_version;
590    let e2e_latency = current_prediction.e2e_latency;
591    let require_auth = custom_accept_url.is_none();
592    let client = store.client.clone();
593    let llm_token = store.llm_token.clone();
594    let organization_id = store
595        .user_store
596        .read(cx)
597        .current_organization()
598        .map(|organization| organization.id.clone());
599    let app_version = AppVersion::global(cx);
600
601    cx.background_spawn(async move {
602        let url = if let Some(accept_edits_url) = custom_accept_url {
603            gpui::http_client::Url::parse(&accept_edits_url)?
604        } else {
605            client
606                .http_client()
607                .build_zed_llm_url("/predict_edits/accept", &[])?
608        };
609
610        let response = EditPredictionStore::send_api_request::<()>(
611            move |builder| {
612                let req = builder.uri(url.as_ref()).body(
613                    serde_json::to_string(&AcceptEditPredictionBody {
614                        request_id: request_id.clone(),
615                        model_version: model_version.clone(),
616                        e2e_latency_ms: Some(e2e_latency.as_millis()),
617                    })?
618                    .into(),
619                );
620                Ok(req?)
621            },
622            client,
623            llm_token,
624            organization_id,
625            app_version,
626            require_auth,
627        )
628        .await;
629
630        response?;
631        anyhow::Ok(())
632    })
633    .detach_and_log_err(cx);
634}
635
636pub fn compute_edits(
637    old_text: String,
638    new_text: &str,
639    offset: usize,
640    snapshot: &BufferSnapshot,
641) -> Vec<(Range<Anchor>, Arc<str>)> {
642    compute_edits_and_cursor_position(old_text, new_text, offset, None, snapshot).0
643}
644
645pub fn compute_edits_and_cursor_position(
646    old_text: String,
647    new_text: &str,
648    offset: usize,
649    cursor_offset_in_new_text: Option<usize>,
650    snapshot: &BufferSnapshot,
651) -> (
652    Vec<(Range<Anchor>, Arc<str>)>,
653    Option<PredictedCursorPosition>,
654) {
655    let diffs = text_diff(&old_text, new_text);
656
657    // Delta represents the cumulative change in byte count from all preceding edits.
658    // new_offset = old_offset + delta, so old_offset = new_offset - delta
659    let mut delta: isize = 0;
660    let mut cursor_position: Option<PredictedCursorPosition> = None;
661    let buffer_len = snapshot.len();
662
663    let edits = diffs
664        .iter()
665        .map(|(raw_old_range, new_text)| {
666            // Compute cursor position if it falls within or before this edit.
667            if let (Some(cursor_offset), None) = (cursor_offset_in_new_text, cursor_position) {
668                let edit_start_in_new = (raw_old_range.start as isize + delta) as usize;
669                let edit_end_in_new = edit_start_in_new + new_text.len();
670
671                if cursor_offset < edit_start_in_new {
672                    let cursor_in_old = (cursor_offset as isize - delta) as usize;
673                    let buffer_offset = (offset + cursor_in_old).min(buffer_len);
674                    cursor_position = Some(PredictedCursorPosition::at_anchor(
675                        snapshot.anchor_after(buffer_offset),
676                    ));
677                } else if cursor_offset < edit_end_in_new {
678                    let buffer_offset = (offset + raw_old_range.start).min(buffer_len);
679                    let offset_within_insertion = cursor_offset - edit_start_in_new;
680                    cursor_position = Some(PredictedCursorPosition::new(
681                        snapshot.anchor_before(buffer_offset),
682                        offset_within_insertion,
683                    ));
684                }
685
686                delta += new_text.len() as isize - raw_old_range.len() as isize;
687            }
688
689            // Compute the edit with prefix/suffix trimming.
690            let mut old_range = raw_old_range.clone();
691            let old_slice = &old_text[old_range.clone()];
692
693            let prefix_len = common_prefix(old_slice.chars(), new_text.chars());
694            let suffix_len = common_prefix(
695                old_slice[prefix_len..].chars().rev(),
696                new_text[prefix_len..].chars().rev(),
697            );
698
699            old_range.start += offset;
700            old_range.end += offset;
701            old_range.start += prefix_len;
702            old_range.end -= suffix_len;
703
704            old_range.start = old_range.start.min(buffer_len);
705            old_range.end = old_range.end.min(buffer_len);
706
707            let new_text = new_text[prefix_len..new_text.len() - suffix_len].into();
708            let range = if old_range.is_empty() {
709                let anchor = snapshot.anchor_after(old_range.start);
710                anchor..anchor
711            } else {
712                snapshot.anchor_after(old_range.start)..snapshot.anchor_before(old_range.end)
713            };
714            (range, new_text)
715        })
716        .collect();
717
718    if let (Some(cursor_offset), None) = (cursor_offset_in_new_text, cursor_position) {
719        let cursor_in_old = (cursor_offset as isize - delta) as usize;
720        let buffer_offset = snapshot.clip_offset(offset + cursor_in_old, Bias::Right);
721        cursor_position = Some(PredictedCursorPosition::at_anchor(
722            snapshot.anchor_after(buffer_offset),
723        ));
724    }
725
726    (edits, cursor_position)
727}
728
729fn common_prefix<T1: Iterator<Item = char>, T2: Iterator<Item = char>>(a: T1, b: T2) -> usize {
730    a.zip(b)
731        .take_while(|(a, b)| a == b)
732        .map(|(a, _)| a.len_utf8())
733        .sum()
734}