1use std::{collections::hash_map::Entry, path::PathBuf, str::FromStr, sync::Arc, time::Duration};
2
3use chrono::TimeDelta;
4use client::{Client, UserStore};
5use cloud_llm_client::predict_edits_v3::PromptFormat;
6use collections::HashMap;
7use editor::{Editor, EditorEvent, EditorMode, ExcerptRange, MultiBuffer};
8use futures::{StreamExt as _, channel::oneshot};
9use gpui::{
10 Entity, EventEmitter, FocusHandle, Focusable, Subscription, Task, WeakEntity, actions,
11 prelude::*,
12};
13use language::{Buffer, DiskState};
14use project::{Project, WorktreeId};
15use ui::{ContextMenu, ContextMenuEntry, DropdownMenu, prelude::*};
16use ui_input::SingleLineInput;
17use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
18use workspace::{Item, SplitDirection, Workspace};
19use zeta2::{DEFAULT_CONTEXT_OPTIONS, PredictionDebugInfo, Zeta, ZetaOptions};
20
21use edit_prediction_context::{DeclarationStyle, EditPredictionExcerptOptions};
22
23actions!(
24 dev,
25 [
26 /// Opens the language server protocol logs viewer.
27 OpenZeta2Inspector
28 ]
29);
30
31pub fn init(cx: &mut App) {
32 cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
33 workspace.register_action(move |workspace, _: &OpenZeta2Inspector, window, cx| {
34 let project = workspace.project();
35 workspace.split_item(
36 SplitDirection::Right,
37 Box::new(cx.new(|cx| {
38 Zeta2Inspector::new(
39 &project,
40 workspace.client(),
41 workspace.user_store(),
42 window,
43 cx,
44 )
45 })),
46 window,
47 cx,
48 );
49 });
50 })
51 .detach();
52}
53
54// TODO show included diagnostics, and events
55
56pub struct Zeta2Inspector {
57 focus_handle: FocusHandle,
58 project: Entity<Project>,
59 last_prediction: Option<LastPrediction>,
60 max_excerpt_bytes_input: Entity<SingleLineInput>,
61 min_excerpt_bytes_input: Entity<SingleLineInput>,
62 cursor_context_ratio_input: Entity<SingleLineInput>,
63 max_prompt_bytes_input: Entity<SingleLineInput>,
64 active_view: ActiveView,
65 zeta: Entity<Zeta>,
66 _active_editor_subscription: Option<Subscription>,
67 _update_state_task: Task<()>,
68 _receive_task: Task<()>,
69}
70
71#[derive(PartialEq)]
72enum ActiveView {
73 Context,
74 Inference,
75}
76
77struct LastPrediction {
78 context_editor: Entity<Editor>,
79 retrieval_time: TimeDelta,
80 buffer: WeakEntity<Buffer>,
81 position: language::Anchor,
82 state: LastPredictionState,
83 _task: Option<Task<()>>,
84}
85
86enum LastPredictionState {
87 Requested,
88 Success {
89 inference_time: TimeDelta,
90 parsing_time: TimeDelta,
91 prompt_planning_time: TimeDelta,
92 prompt_editor: Entity<Editor>,
93 model_response_editor: Entity<Editor>,
94 },
95 Failed {
96 message: String,
97 },
98}
99
100impl Zeta2Inspector {
101 pub fn new(
102 project: &Entity<Project>,
103 client: &Arc<Client>,
104 user_store: &Entity<UserStore>,
105 window: &mut Window,
106 cx: &mut Context<Self>,
107 ) -> Self {
108 let zeta = Zeta::global(client, user_store, cx);
109 let mut request_rx = zeta.update(cx, |zeta, _cx| zeta.debug_info());
110
111 let receive_task = cx.spawn_in(window, async move |this, cx| {
112 while let Some(prediction) = request_rx.next().await {
113 this.update_in(cx, |this, window, cx| {
114 this.update_last_prediction(prediction, window, cx)
115 })
116 .ok();
117 }
118 });
119
120 let mut this = Self {
121 focus_handle: cx.focus_handle(),
122 project: project.clone(),
123 last_prediction: None,
124 active_view: ActiveView::Context,
125 max_excerpt_bytes_input: Self::number_input("Max Excerpt Bytes", window, cx),
126 min_excerpt_bytes_input: Self::number_input("Min Excerpt Bytes", window, cx),
127 cursor_context_ratio_input: Self::number_input("Cursor Context Ratio", window, cx),
128 max_prompt_bytes_input: Self::number_input("Max Prompt Bytes", window, cx),
129 zeta: zeta.clone(),
130 _active_editor_subscription: None,
131 _update_state_task: Task::ready(()),
132 _receive_task: receive_task,
133 };
134 this.set_input_options(&zeta.read(cx).options().clone(), window, cx);
135 this
136 }
137
138 fn set_input_options(
139 &mut self,
140 options: &ZetaOptions,
141 window: &mut Window,
142 cx: &mut Context<Self>,
143 ) {
144 self.max_excerpt_bytes_input.update(cx, |input, cx| {
145 input.set_text(options.context.excerpt.max_bytes.to_string(), window, cx);
146 });
147 self.min_excerpt_bytes_input.update(cx, |input, cx| {
148 input.set_text(options.context.excerpt.min_bytes.to_string(), window, cx);
149 });
150 self.cursor_context_ratio_input.update(cx, |input, cx| {
151 input.set_text(
152 format!(
153 "{:.2}",
154 options
155 .context
156 .excerpt
157 .target_before_cursor_over_total_bytes
158 ),
159 window,
160 cx,
161 );
162 });
163 self.max_prompt_bytes_input.update(cx, |input, cx| {
164 input.set_text(options.max_prompt_bytes.to_string(), window, cx);
165 });
166 cx.notify();
167 }
168
169 fn set_options(&mut self, options: ZetaOptions, cx: &mut Context<Self>) {
170 self.zeta.update(cx, |this, _cx| this.set_options(options));
171
172 const THROTTLE_TIME: Duration = Duration::from_millis(100);
173
174 if let Some(prediction) = self.last_prediction.as_mut() {
175 if let Some(buffer) = prediction.buffer.upgrade() {
176 let position = prediction.position;
177 let zeta = self.zeta.clone();
178 let project = self.project.clone();
179 prediction._task = Some(cx.spawn(async move |_this, cx| {
180 cx.background_executor().timer(THROTTLE_TIME).await;
181 if let Some(task) = zeta
182 .update(cx, |zeta, cx| {
183 zeta.refresh_prediction(&project, &buffer, position, cx)
184 })
185 .ok()
186 {
187 task.await.log_err();
188 }
189 }));
190 prediction.state = LastPredictionState::Requested;
191 } else {
192 self.last_prediction.take();
193 }
194 }
195
196 cx.notify();
197 }
198
199 fn number_input(
200 label: &'static str,
201 window: &mut Window,
202 cx: &mut Context<Self>,
203 ) -> Entity<SingleLineInput> {
204 let input = cx.new(|cx| {
205 SingleLineInput::new(window, cx, "")
206 .label(label)
207 .label_min_width(px(64.))
208 });
209
210 cx.subscribe_in(
211 &input.read(cx).editor().clone(),
212 window,
213 |this, _, event, _window, cx| {
214 let EditorEvent::BufferEdited = event else {
215 return;
216 };
217
218 fn number_input_value<T: FromStr + Default>(
219 input: &Entity<SingleLineInput>,
220 cx: &App,
221 ) -> T {
222 input
223 .read(cx)
224 .editor()
225 .read(cx)
226 .text(cx)
227 .parse::<T>()
228 .unwrap_or_default()
229 }
230
231 let mut context_options = DEFAULT_CONTEXT_OPTIONS.clone();
232 context_options.excerpt = EditPredictionExcerptOptions {
233 max_bytes: number_input_value(&this.max_excerpt_bytes_input, cx),
234 min_bytes: number_input_value(&this.min_excerpt_bytes_input, cx),
235 target_before_cursor_over_total_bytes: number_input_value(
236 &this.cursor_context_ratio_input,
237 cx,
238 ),
239 };
240
241 let zeta_options = this.zeta.read(cx).options();
242 this.set_options(
243 ZetaOptions {
244 context: context_options,
245 max_prompt_bytes: number_input_value(&this.max_prompt_bytes_input, cx),
246 max_diagnostic_bytes: zeta_options.max_diagnostic_bytes,
247 prompt_format: zeta_options.prompt_format,
248 file_indexing_parallelism: zeta_options.file_indexing_parallelism,
249 },
250 cx,
251 );
252 },
253 )
254 .detach();
255 input
256 }
257
258 fn update_last_prediction(
259 &mut self,
260 prediction: zeta2::PredictionDebugInfo,
261 window: &mut Window,
262 cx: &mut Context<Self>,
263 ) {
264 let project = self.project.read(cx);
265 let path_style = project.path_style(cx);
266 let Some(worktree_id) = project
267 .worktrees(cx)
268 .next()
269 .map(|worktree| worktree.read(cx).id())
270 else {
271 log::error!("Open a worktree to use edit prediction debug view");
272 self.last_prediction.take();
273 return;
274 };
275
276 self._update_state_task = cx.spawn_in(window, {
277 let language_registry = self.project.read(cx).languages().clone();
278 async move |this, cx| {
279 let mut languages = HashMap::default();
280 for lang_id in prediction
281 .context
282 .declarations
283 .iter()
284 .map(|snippet| snippet.declaration.identifier().language_id)
285 .chain(prediction.context.excerpt_text.language_id)
286 {
287 if let Entry::Vacant(entry) = languages.entry(lang_id) {
288 // Most snippets are gonna be the same language,
289 // so we think it's fine to do this sequentially for now
290 entry.insert(language_registry.language_for_id(lang_id).await.ok());
291 }
292 }
293
294 let markdown_language = language_registry
295 .language_for_name("Markdown")
296 .await
297 .log_err();
298
299 this.update_in(cx, |this, window, cx| {
300 let context_editor = cx.new(|cx| {
301 let multibuffer = cx.new(|cx| {
302 let mut multibuffer = MultiBuffer::new(language::Capability::ReadOnly);
303 let excerpt_file = Arc::new(ExcerptMetadataFile {
304 title: RelPath::unix("Cursor Excerpt").unwrap().into(),
305 path_style,
306 worktree_id,
307 });
308
309 let excerpt_buffer = cx.new(|cx| {
310 let mut buffer =
311 Buffer::local(prediction.context.excerpt_text.body, cx);
312 if let Some(language) = prediction
313 .context
314 .excerpt_text
315 .language_id
316 .as_ref()
317 .and_then(|id| languages.get(id))
318 {
319 buffer.set_language(language.clone(), cx);
320 }
321 buffer.file_updated(excerpt_file, cx);
322 buffer
323 });
324
325 multibuffer.push_excerpts(
326 excerpt_buffer,
327 [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
328 cx,
329 );
330
331 for snippet in &prediction.context.declarations {
332 let path = this
333 .project
334 .read(cx)
335 .path_for_entry(snippet.declaration.project_entry_id(), cx);
336
337 let snippet_file = Arc::new(ExcerptMetadataFile {
338 title: RelPath::unix(&format!(
339 "{} (Score density: {})",
340 path.map(|p| p.path.display(path_style).to_string())
341 .unwrap_or_else(|| "".to_string()),
342 snippet.score_density(DeclarationStyle::Declaration)
343 ))
344 .unwrap()
345 .into(),
346 path_style,
347 worktree_id,
348 });
349
350 let excerpt_buffer = cx.new(|cx| {
351 let mut buffer =
352 Buffer::local(snippet.declaration.item_text().0, cx);
353 buffer.file_updated(snippet_file, cx);
354 if let Some(language) =
355 languages.get(&snippet.declaration.identifier().language_id)
356 {
357 buffer.set_language(language.clone(), cx);
358 }
359 buffer
360 });
361
362 multibuffer.push_excerpts(
363 excerpt_buffer,
364 [ExcerptRange::new(text::Anchor::MIN..text::Anchor::MAX)],
365 cx,
366 );
367 }
368
369 multibuffer
370 });
371
372 Editor::new(EditorMode::full(), multibuffer, None, window, cx)
373 });
374
375 let PredictionDebugInfo {
376 response_rx,
377 position,
378 buffer,
379 retrieval_time,
380 ..
381 } = prediction;
382
383 let task = cx.spawn_in(window, async move |this, cx| {
384 let response = response_rx.await;
385
386 this.update_in(cx, |this, window, cx| {
387 if let Some(prediction) = this.last_prediction.as_mut() {
388 prediction.state = match response {
389 Ok(Ok(response)) => LastPredictionState::Success {
390 prompt_planning_time: response.prompt_planning_time,
391 inference_time: response.inference_time,
392 parsing_time: response.parsing_time,
393 prompt_editor: cx.new(|cx| {
394 let buffer = cx.new(|cx| {
395 let mut buffer = Buffer::local(response.prompt, cx);
396 buffer.set_language(markdown_language.clone(), cx);
397 buffer
398 });
399 let buffer =
400 cx.new(|cx| MultiBuffer::singleton(buffer, cx));
401 let mut editor = Editor::new(
402 EditorMode::full(),
403 buffer,
404 None,
405 window,
406 cx,
407 );
408 editor.set_read_only(true);
409 editor.set_show_line_numbers(false, cx);
410 editor.set_show_gutter(false, cx);
411 editor.set_show_scrollbars(false, cx);
412 editor
413 }),
414 model_response_editor: cx.new(|cx| {
415 let buffer = cx.new(|cx| {
416 let mut buffer =
417 Buffer::local(response.model_response, cx);
418 buffer.set_language(markdown_language, cx);
419 buffer
420 });
421 let buffer =
422 cx.new(|cx| MultiBuffer::singleton(buffer, cx));
423 let mut editor = Editor::new(
424 EditorMode::full(),
425 buffer,
426 None,
427 window,
428 cx,
429 );
430 editor.set_read_only(true);
431 editor.set_show_line_numbers(false, cx);
432 editor.set_show_gutter(false, cx);
433 editor.set_show_scrollbars(false, cx);
434 editor
435 }),
436 },
437 Ok(Err(err)) => LastPredictionState::Failed { message: err },
438 Err(oneshot::Canceled) => LastPredictionState::Failed {
439 message: "Canceled".to_string(),
440 },
441 };
442 }
443 })
444 .ok();
445 });
446
447 this.last_prediction = Some(LastPrediction {
448 context_editor,
449 retrieval_time,
450 buffer,
451 position,
452 state: LastPredictionState::Requested,
453 _task: Some(task),
454 });
455 cx.notify();
456 })
457 .ok();
458 }
459 });
460 }
461
462 fn render_options(&self, window: &mut Window, cx: &mut Context<Self>) -> Div {
463 v_flex()
464 .gap_2()
465 .child(
466 h_flex()
467 .child(Headline::new("Options").size(HeadlineSize::Small))
468 .justify_between()
469 .child(
470 ui::Button::new("reset-options", "Reset")
471 .disabled(self.zeta.read(cx).options() == &zeta2::DEFAULT_OPTIONS)
472 .style(ButtonStyle::Outlined)
473 .size(ButtonSize::Large)
474 .on_click(cx.listener(|this, _, window, cx| {
475 this.set_input_options(&zeta2::DEFAULT_OPTIONS, window, cx);
476 })),
477 ),
478 )
479 .child(
480 v_flex()
481 .gap_2()
482 .child(
483 h_flex()
484 .gap_2()
485 .items_end()
486 .child(self.max_excerpt_bytes_input.clone())
487 .child(self.min_excerpt_bytes_input.clone())
488 .child(self.cursor_context_ratio_input.clone()),
489 )
490 .child(
491 h_flex()
492 .gap_2()
493 .items_end()
494 .child(self.max_prompt_bytes_input.clone())
495 .child(self.render_prompt_format_dropdown(window, cx)),
496 ),
497 )
498 }
499
500 fn render_prompt_format_dropdown(&self, window: &mut Window, cx: &mut Context<Self>) -> Div {
501 let active_format = self.zeta.read(cx).options().prompt_format;
502 let this = cx.weak_entity();
503
504 v_flex()
505 .gap_1p5()
506 .child(
507 Label::new("Prompt Format")
508 .size(LabelSize::Small)
509 .color(Color::Muted),
510 )
511 .child(
512 DropdownMenu::new(
513 "ep-prompt-format",
514 active_format.to_string(),
515 ContextMenu::build(window, cx, move |mut menu, _window, _cx| {
516 for prompt_format in PromptFormat::iter() {
517 menu = menu.item(
518 ContextMenuEntry::new(prompt_format.to_string())
519 .toggleable(IconPosition::End, active_format == prompt_format)
520 .handler({
521 let this = this.clone();
522 move |_window, cx| {
523 this.update(cx, |this, cx| {
524 let current_options =
525 this.zeta.read(cx).options().clone();
526 let options = ZetaOptions {
527 prompt_format,
528 ..current_options
529 };
530 this.set_options(options, cx);
531 })
532 .ok();
533 }
534 }),
535 )
536 }
537 menu
538 }),
539 )
540 .style(ui::DropdownStyle::Outlined),
541 )
542 }
543
544 fn render_tabs(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
545 if self.last_prediction.is_none() {
546 return None;
547 };
548
549 Some(
550 ui::ToggleButtonGroup::single_row(
551 "prediction",
552 [
553 ui::ToggleButtonSimple::new(
554 "Context",
555 cx.listener(|this, _, _, cx| {
556 this.active_view = ActiveView::Context;
557 cx.notify();
558 }),
559 ),
560 ui::ToggleButtonSimple::new(
561 "Inference",
562 cx.listener(|this, _, _, cx| {
563 this.active_view = ActiveView::Inference;
564 cx.notify();
565 }),
566 ),
567 ],
568 )
569 .style(ui::ToggleButtonGroupStyle::Outlined)
570 .selected_index(if self.active_view == ActiveView::Context {
571 0
572 } else {
573 1
574 })
575 .into_any_element(),
576 )
577 }
578
579 fn render_stats(&self) -> Option<Div> {
580 let Some(prediction) = self.last_prediction.as_ref() else {
581 return None;
582 };
583
584 let (prompt_planning_time, inference_time, parsing_time) = match &prediction.state {
585 LastPredictionState::Success {
586 inference_time,
587 parsing_time,
588 prompt_planning_time,
589 ..
590 } => (
591 Some(*prompt_planning_time),
592 Some(*inference_time),
593 Some(*parsing_time),
594 ),
595 LastPredictionState::Requested | LastPredictionState::Failed { .. } => {
596 (None, None, None)
597 }
598 };
599
600 Some(
601 v_flex()
602 .p_4()
603 .gap_2()
604 .min_w(px(160.))
605 .child(Headline::new("Stats").size(HeadlineSize::Small))
606 .child(Self::render_duration(
607 "Context retrieval",
608 Some(prediction.retrieval_time),
609 ))
610 .child(Self::render_duration(
611 "Prompt planning",
612 prompt_planning_time,
613 ))
614 .child(Self::render_duration("Inference", inference_time))
615 .child(Self::render_duration("Parsing", parsing_time)),
616 )
617 }
618
619 fn render_duration(name: &'static str, time: Option<chrono::TimeDelta>) -> Div {
620 h_flex()
621 .gap_1()
622 .child(Label::new(name).color(Color::Muted).size(LabelSize::Small))
623 .child(match time {
624 Some(time) => Label::new(if time.num_microseconds().unwrap_or(0) >= 1000 {
625 format!("{} ms", time.num_milliseconds())
626 } else {
627 format!("{} ยตs", time.num_microseconds().unwrap_or(0))
628 })
629 .size(LabelSize::Small),
630 None => Label::new("...").size(LabelSize::Small),
631 })
632 }
633
634 fn render_content(&self, cx: &mut Context<Self>) -> AnyElement {
635 match self.last_prediction.as_ref() {
636 None => v_flex()
637 .size_full()
638 .justify_center()
639 .items_center()
640 .child(Label::new("No prediction").size(LabelSize::Large))
641 .into_any(),
642 Some(prediction) => self.render_last_prediction(prediction, cx).into_any(),
643 }
644 }
645
646 fn render_last_prediction(&self, prediction: &LastPrediction, cx: &mut Context<Self>) -> Div {
647 match &self.active_view {
648 ActiveView::Context => div().size_full().child(prediction.context_editor.clone()),
649 ActiveView::Inference => match &prediction.state {
650 LastPredictionState::Success {
651 prompt_editor,
652 model_response_editor,
653 ..
654 } => h_flex()
655 .items_start()
656 .w_full()
657 .flex_1()
658 .border_t_1()
659 .border_color(cx.theme().colors().border)
660 .bg(cx.theme().colors().editor_background)
661 .child(
662 v_flex()
663 .flex_1()
664 .gap_2()
665 .p_4()
666 .h_full()
667 .child(ui::Headline::new("Prompt").size(ui::HeadlineSize::XSmall))
668 .child(prompt_editor.clone()),
669 )
670 .child(ui::vertical_divider())
671 .child(
672 v_flex()
673 .flex_1()
674 .gap_2()
675 .h_full()
676 .p_4()
677 .child(
678 ui::Headline::new("Model Response").size(ui::HeadlineSize::XSmall),
679 )
680 .child(model_response_editor.clone()),
681 ),
682 LastPredictionState::Requested => v_flex()
683 .p_4()
684 .gap_2()
685 .child(Label::new("Loading...").buffer_font(cx)),
686 LastPredictionState::Failed { message } => v_flex()
687 .p_4()
688 .gap_2()
689 .child(Label::new(message.clone()).buffer_font(cx)),
690 },
691 }
692 }
693}
694
695impl Focusable for Zeta2Inspector {
696 fn focus_handle(&self, _cx: &App) -> FocusHandle {
697 self.focus_handle.clone()
698 }
699}
700
701impl Item for Zeta2Inspector {
702 type Event = ();
703
704 fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
705 "Zeta2 Inspector".into()
706 }
707}
708
709impl EventEmitter<()> for Zeta2Inspector {}
710
711impl Render for Zeta2Inspector {
712 fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
713 v_flex()
714 .size_full()
715 .bg(cx.theme().colors().editor_background)
716 .child(
717 h_flex()
718 .w_full()
719 .child(
720 v_flex()
721 .flex_1()
722 .p_4()
723 .h_full()
724 .justify_between()
725 .child(self.render_options(window, cx))
726 .gap_4()
727 .children(self.render_tabs(cx)),
728 )
729 .child(ui::vertical_divider())
730 .children(self.render_stats()),
731 )
732 .child(self.render_content(cx))
733 }
734}
735
736// Using same approach as commit view
737
738struct ExcerptMetadataFile {
739 title: Arc<RelPath>,
740 worktree_id: WorktreeId,
741 path_style: PathStyle,
742}
743
744impl language::File for ExcerptMetadataFile {
745 fn as_local(&self) -> Option<&dyn language::LocalFile> {
746 None
747 }
748
749 fn disk_state(&self) -> DiskState {
750 DiskState::New
751 }
752
753 fn path(&self) -> &Arc<RelPath> {
754 &self.title
755 }
756
757 fn full_path(&self, _: &App) -> PathBuf {
758 self.title.as_std_path().to_path_buf()
759 }
760
761 fn file_name<'a>(&'a self, _: &'a App) -> &'a str {
762 self.title.file_name().unwrap()
763 }
764
765 fn path_style(&self, _: &App) -> PathStyle {
766 self.path_style
767 }
768
769 fn worktree_id(&self, _: &App) -> WorktreeId {
770 self.worktree_id
771 }
772
773 fn to_proto(&self, _: &App) -> language::proto::File {
774 unimplemented!()
775 }
776
777 fn is_private(&self) -> bool {
778 false
779 }
780}