1use crate::{
2 assistant_settings::{AssistantDockPosition, AssistantSettings},
3 OpenAIRequest, OpenAIResponseStreamEvent, RequestMessage, Role,
4};
5use anyhow::{anyhow, Result};
6use chrono::{DateTime, Local};
7use collections::{HashMap, HashSet};
8use editor::{
9 display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint},
10 scroll::{
11 autoscroll::{Autoscroll, AutoscrollStrategy},
12 ScrollAnchor,
13 },
14 Anchor, DisplayPoint, Editor, ToOffset as _,
15};
16use fs::Fs;
17use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
18use gpui::{
19 actions,
20 elements::*,
21 executor::Background,
22 geometry::vector::vec2f,
23 platform::{CursorStyle, MouseButton},
24 Action, AppContext, AsyncAppContext, ClipboardItem, Entity, ModelContext, ModelHandle,
25 Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle, WindowContext,
26};
27use isahc::{http::StatusCode, Request, RequestExt};
28use language::{language_settings::SoftWrap, Buffer, LanguageRegistry, ToOffset as _};
29use serde::Deserialize;
30use settings::SettingsStore;
31use std::{
32 borrow::Cow, cell::RefCell, cmp, fmt::Write, io, iter, ops::Range, rc::Rc, sync::Arc,
33 time::Duration,
34};
35use util::{post_inc, truncate_and_trailoff, ResultExt, TryFutureExt};
36use workspace::{
37 dock::{DockPosition, Panel},
38 item::Item,
39 pane, Pane, Workspace,
40};
41
42const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";
43
44actions!(
45 assistant,
46 [NewContext, Assist, QuoteSelection, ToggleFocus, ResetKey]
47);
48
49pub fn init(cx: &mut AppContext) {
50 settings::register::<AssistantSettings>(cx);
51 cx.add_action(
52 |workspace: &mut Workspace, _: &NewContext, cx: &mut ViewContext<Workspace>| {
53 if let Some(this) = workspace.panel::<AssistantPanel>(cx) {
54 this.update(cx, |this, cx| this.add_context(cx))
55 }
56
57 workspace.focus_panel::<AssistantPanel>(cx);
58 },
59 );
60 cx.add_action(AssistantEditor::assist);
61 cx.capture_action(AssistantEditor::cancel_last_assist);
62 cx.add_action(AssistantEditor::quote_selection);
63 cx.capture_action(AssistantEditor::copy);
64 cx.add_action(AssistantPanel::save_api_key);
65 cx.add_action(AssistantPanel::reset_api_key);
66}
67
68pub enum AssistantPanelEvent {
69 ZoomIn,
70 ZoomOut,
71 Focus,
72 Close,
73 DockPositionChanged,
74}
75
76pub struct AssistantPanel {
77 width: Option<f32>,
78 height: Option<f32>,
79 pane: ViewHandle<Pane>,
80 api_key: Rc<RefCell<Option<String>>>,
81 api_key_editor: Option<ViewHandle<Editor>>,
82 has_read_credentials: bool,
83 languages: Arc<LanguageRegistry>,
84 fs: Arc<dyn Fs>,
85 subscriptions: Vec<Subscription>,
86}
87
88impl AssistantPanel {
89 pub fn load(
90 workspace: WeakViewHandle<Workspace>,
91 cx: AsyncAppContext,
92 ) -> Task<Result<ViewHandle<Self>>> {
93 cx.spawn(|mut cx| async move {
94 // TODO: deserialize state.
95 workspace.update(&mut cx, |workspace, cx| {
96 cx.add_view::<Self, _>(|cx| {
97 let weak_self = cx.weak_handle();
98 let pane = cx.add_view(|cx| {
99 let mut pane = Pane::new(
100 workspace.weak_handle(),
101 workspace.project().clone(),
102 workspace.app_state().background_actions,
103 Default::default(),
104 cx,
105 );
106 pane.set_can_split(false, cx);
107 pane.set_can_navigate(false, cx);
108 pane.on_can_drop(move |_, _| false);
109 pane.set_render_tab_bar_buttons(cx, move |pane, cx| {
110 let weak_self = weak_self.clone();
111 Flex::row()
112 .with_child(Pane::render_tab_bar_button(
113 0,
114 "icons/plus_12.svg",
115 false,
116 Some(("New Context".into(), Some(Box::new(NewContext)))),
117 cx,
118 move |_, cx| {
119 let weak_self = weak_self.clone();
120 cx.window_context().defer(move |cx| {
121 if let Some(this) = weak_self.upgrade(cx) {
122 this.update(cx, |this, cx| this.add_context(cx));
123 }
124 })
125 },
126 None,
127 ))
128 .with_child(Pane::render_tab_bar_button(
129 1,
130 if pane.is_zoomed() {
131 "icons/minimize_8.svg"
132 } else {
133 "icons/maximize_8.svg"
134 },
135 pane.is_zoomed(),
136 Some((
137 "Toggle Zoom".into(),
138 Some(Box::new(workspace::ToggleZoom)),
139 )),
140 cx,
141 move |pane, cx| pane.toggle_zoom(&Default::default(), cx),
142 None,
143 ))
144 .into_any()
145 });
146 let buffer_search_bar = cx.add_view(search::BufferSearchBar::new);
147 pane.toolbar()
148 .update(cx, |toolbar, cx| toolbar.add_item(buffer_search_bar, cx));
149 pane
150 });
151
152 let mut this = Self {
153 pane,
154 api_key: Rc::new(RefCell::new(None)),
155 api_key_editor: None,
156 has_read_credentials: false,
157 languages: workspace.app_state().languages.clone(),
158 fs: workspace.app_state().fs.clone(),
159 width: None,
160 height: None,
161 subscriptions: Default::default(),
162 };
163
164 let mut old_dock_position = this.position(cx);
165 this.subscriptions = vec![
166 cx.observe(&this.pane, |_, _, cx| cx.notify()),
167 cx.subscribe(&this.pane, Self::handle_pane_event),
168 cx.observe_global::<SettingsStore, _>(move |this, cx| {
169 let new_dock_position = this.position(cx);
170 if new_dock_position != old_dock_position {
171 old_dock_position = new_dock_position;
172 cx.emit(AssistantPanelEvent::DockPositionChanged);
173 }
174 }),
175 ];
176
177 this
178 })
179 })
180 })
181 }
182
183 fn handle_pane_event(
184 &mut self,
185 _pane: ViewHandle<Pane>,
186 event: &pane::Event,
187 cx: &mut ViewContext<Self>,
188 ) {
189 match event {
190 pane::Event::ZoomIn => cx.emit(AssistantPanelEvent::ZoomIn),
191 pane::Event::ZoomOut => cx.emit(AssistantPanelEvent::ZoomOut),
192 pane::Event::Focus => cx.emit(AssistantPanelEvent::Focus),
193 pane::Event::Remove => cx.emit(AssistantPanelEvent::Close),
194 _ => {}
195 }
196 }
197
198 fn add_context(&mut self, cx: &mut ViewContext<Self>) {
199 let focus = self.has_focus(cx);
200 let editor = cx
201 .add_view(|cx| AssistantEditor::new(self.api_key.clone(), self.languages.clone(), cx));
202 self.subscriptions
203 .push(cx.subscribe(&editor, Self::handle_assistant_editor_event));
204 self.pane.update(cx, |pane, cx| {
205 pane.add_item(Box::new(editor), true, focus, None, cx)
206 });
207 }
208
209 fn handle_assistant_editor_event(
210 &mut self,
211 _: ViewHandle<AssistantEditor>,
212 event: &AssistantEditorEvent,
213 cx: &mut ViewContext<Self>,
214 ) {
215 match event {
216 AssistantEditorEvent::TabContentChanged => self.pane.update(cx, |_, cx| cx.notify()),
217 }
218 }
219
220 fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
221 if let Some(api_key) = self
222 .api_key_editor
223 .as_ref()
224 .map(|editor| editor.read(cx).text(cx))
225 {
226 if !api_key.is_empty() {
227 cx.platform()
228 .write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
229 .log_err();
230 *self.api_key.borrow_mut() = Some(api_key);
231 self.api_key_editor.take();
232 cx.focus_self();
233 cx.notify();
234 }
235 } else {
236 cx.propagate_action();
237 }
238 }
239
240 fn reset_api_key(&mut self, _: &ResetKey, cx: &mut ViewContext<Self>) {
241 cx.platform().delete_credentials(OPENAI_API_URL).log_err();
242 self.api_key.take();
243 self.api_key_editor = Some(build_api_key_editor(cx));
244 cx.focus_self();
245 cx.notify();
246 }
247}
248
249fn build_api_key_editor(cx: &mut ViewContext<AssistantPanel>) -> ViewHandle<Editor> {
250 cx.add_view(|cx| {
251 let mut editor = Editor::single_line(
252 Some(Arc::new(|theme| theme.assistant.api_key_editor.clone())),
253 cx,
254 );
255 editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
256 editor
257 })
258}
259
260impl Entity for AssistantPanel {
261 type Event = AssistantPanelEvent;
262}
263
264impl View for AssistantPanel {
265 fn ui_name() -> &'static str {
266 "AssistantPanel"
267 }
268
269 fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
270 let style = &theme::current(cx).assistant;
271 if let Some(api_key_editor) = self.api_key_editor.as_ref() {
272 Flex::column()
273 .with_child(
274 Text::new(
275 "Paste your OpenAI API key and press Enter to use the assistant",
276 style.api_key_prompt.text.clone(),
277 )
278 .aligned(),
279 )
280 .with_child(
281 ChildView::new(api_key_editor, cx)
282 .contained()
283 .with_style(style.api_key_editor.container)
284 .aligned(),
285 )
286 .contained()
287 .with_style(style.api_key_prompt.container)
288 .aligned()
289 .into_any()
290 } else {
291 ChildView::new(&self.pane, cx).into_any()
292 }
293 }
294
295 fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {
296 if cx.is_self_focused() {
297 if let Some(api_key_editor) = self.api_key_editor.as_ref() {
298 cx.focus(api_key_editor);
299 } else {
300 cx.focus(&self.pane);
301 }
302 }
303 }
304}
305
306impl Panel for AssistantPanel {
307 fn position(&self, cx: &WindowContext) -> DockPosition {
308 match settings::get::<AssistantSettings>(cx).dock {
309 AssistantDockPosition::Left => DockPosition::Left,
310 AssistantDockPosition::Bottom => DockPosition::Bottom,
311 AssistantDockPosition::Right => DockPosition::Right,
312 }
313 }
314
315 fn position_is_valid(&self, _: DockPosition) -> bool {
316 true
317 }
318
319 fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext<Self>) {
320 settings::update_settings_file::<AssistantSettings>(self.fs.clone(), cx, move |settings| {
321 let dock = match position {
322 DockPosition::Left => AssistantDockPosition::Left,
323 DockPosition::Bottom => AssistantDockPosition::Bottom,
324 DockPosition::Right => AssistantDockPosition::Right,
325 };
326 settings.dock = Some(dock);
327 });
328 }
329
330 fn size(&self, cx: &WindowContext) -> f32 {
331 let settings = settings::get::<AssistantSettings>(cx);
332 match self.position(cx) {
333 DockPosition::Left | DockPosition::Right => {
334 self.width.unwrap_or_else(|| settings.default_width)
335 }
336 DockPosition::Bottom => self.height.unwrap_or_else(|| settings.default_height),
337 }
338 }
339
340 fn set_size(&mut self, size: f32, cx: &mut ViewContext<Self>) {
341 match self.position(cx) {
342 DockPosition::Left | DockPosition::Right => self.width = Some(size),
343 DockPosition::Bottom => self.height = Some(size),
344 }
345 cx.notify();
346 }
347
348 fn should_zoom_in_on_event(event: &AssistantPanelEvent) -> bool {
349 matches!(event, AssistantPanelEvent::ZoomIn)
350 }
351
352 fn should_zoom_out_on_event(event: &AssistantPanelEvent) -> bool {
353 matches!(event, AssistantPanelEvent::ZoomOut)
354 }
355
356 fn is_zoomed(&self, cx: &WindowContext) -> bool {
357 self.pane.read(cx).is_zoomed()
358 }
359
360 fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext<Self>) {
361 self.pane.update(cx, |pane, cx| pane.set_zoomed(zoomed, cx));
362 }
363
364 fn set_active(&mut self, active: bool, cx: &mut ViewContext<Self>) {
365 if active {
366 if self.api_key.borrow().is_none() && !self.has_read_credentials {
367 self.has_read_credentials = true;
368 let api_key = if let Some((_, api_key)) = cx
369 .platform()
370 .read_credentials(OPENAI_API_URL)
371 .log_err()
372 .flatten()
373 {
374 String::from_utf8(api_key).log_err()
375 } else {
376 None
377 };
378 if let Some(api_key) = api_key {
379 *self.api_key.borrow_mut() = Some(api_key);
380 } else if self.api_key_editor.is_none() {
381 self.api_key_editor = Some(build_api_key_editor(cx));
382 cx.notify();
383 }
384 }
385
386 if self.pane.read(cx).items_len() == 0 {
387 self.add_context(cx);
388 }
389 }
390 }
391
392 fn icon_path(&self) -> &'static str {
393 "icons/speech_bubble_12.svg"
394 }
395
396 fn icon_tooltip(&self) -> (String, Option<Box<dyn Action>>) {
397 ("Assistant Panel".into(), Some(Box::new(ToggleFocus)))
398 }
399
400 fn should_change_position_on_event(event: &Self::Event) -> bool {
401 matches!(event, AssistantPanelEvent::DockPositionChanged)
402 }
403
404 fn should_activate_on_event(_: &Self::Event) -> bool {
405 false
406 }
407
408 fn should_close_on_event(event: &AssistantPanelEvent) -> bool {
409 matches!(event, AssistantPanelEvent::Close)
410 }
411
412 fn has_focus(&self, cx: &WindowContext) -> bool {
413 self.pane.read(cx).has_focus()
414 || self
415 .api_key_editor
416 .as_ref()
417 .map_or(false, |editor| editor.is_focused(cx))
418 }
419
420 fn is_focus_event(event: &Self::Event) -> bool {
421 matches!(event, AssistantPanelEvent::Focus)
422 }
423}
424
425enum AssistantEvent {
426 MessagesEdited,
427 SummaryChanged,
428 StreamedCompletion,
429}
430
431struct Assistant {
432 buffer: ModelHandle<Buffer>,
433 messages: Vec<Message>,
434 messages_metadata: HashMap<MessageId, MessageMetadata>,
435 next_message_id: MessageId,
436 summary: Option<String>,
437 pending_summary: Task<Option<()>>,
438 completion_count: usize,
439 pending_completions: Vec<PendingCompletion>,
440 languages: Arc<LanguageRegistry>,
441 model: String,
442 token_count: Option<usize>,
443 max_token_count: usize,
444 pending_token_count: Task<Option<()>>,
445 api_key: Rc<RefCell<Option<String>>>,
446 _subscriptions: Vec<Subscription>,
447}
448
449impl Entity for Assistant {
450 type Event = AssistantEvent;
451}
452
453impl Assistant {
454 fn new(
455 api_key: Rc<RefCell<Option<String>>>,
456 language_registry: Arc<LanguageRegistry>,
457 cx: &mut ModelContext<Self>,
458 ) -> Self {
459 let model = "gpt-3.5-turbo";
460 let buffer = cx.add_model(|cx| Buffer::new(0, "", cx));
461 let mut this = Self {
462 messages: Default::default(),
463 messages_metadata: Default::default(),
464 next_message_id: Default::default(),
465 summary: None,
466 pending_summary: Task::ready(None),
467 completion_count: Default::default(),
468 pending_completions: Default::default(),
469 languages: language_registry,
470 token_count: None,
471 max_token_count: tiktoken_rs::model::get_context_size(model),
472 pending_token_count: Task::ready(None),
473 model: model.into(),
474 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
475 api_key,
476 buffer,
477 };
478 let message = Message {
479 id: MessageId(post_inc(&mut this.next_message_id.0)),
480 start: language::Anchor::MIN,
481 };
482 this.messages.push(message.clone());
483 this.messages_metadata.insert(
484 message.id,
485 MessageMetadata {
486 role: Role::User,
487 sent_at: Local::now(),
488 error: None,
489 },
490 );
491
492 this.count_remaining_tokens(cx);
493 this
494 }
495
496 fn handle_buffer_event(
497 &mut self,
498 _: ModelHandle<Buffer>,
499 event: &language::Event,
500 cx: &mut ModelContext<Self>,
501 ) {
502 match event {
503 language::Event::Edited => {
504 self.count_remaining_tokens(cx);
505 cx.emit(AssistantEvent::MessagesEdited);
506 }
507 _ => {}
508 }
509 }
510
511 fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
512 let messages = self
513 .open_ai_request_messages(cx)
514 .into_iter()
515 .filter_map(|message| {
516 Some(tiktoken_rs::ChatCompletionRequestMessage {
517 role: match message.role {
518 Role::User => "user".into(),
519 Role::Assistant => "assistant".into(),
520 Role::System => "system".into(),
521 },
522 content: message.content,
523 name: None,
524 })
525 })
526 .collect::<Vec<_>>();
527 let model = self.model.clone();
528 self.pending_token_count = cx.spawn_weak(|this, mut cx| {
529 async move {
530 cx.background().timer(Duration::from_millis(200)).await;
531 let token_count = cx
532 .background()
533 .spawn(async move { tiktoken_rs::num_tokens_from_messages(&model, &messages) })
534 .await?;
535
536 this.upgrade(&cx)
537 .ok_or_else(|| anyhow!("assistant was dropped"))?
538 .update(&mut cx, |this, cx| {
539 this.max_token_count = tiktoken_rs::model::get_context_size(&this.model);
540 this.token_count = Some(token_count);
541 cx.notify()
542 });
543 anyhow::Ok(())
544 }
545 .log_err()
546 });
547 }
548
549 fn remaining_tokens(&self) -> Option<isize> {
550 Some(self.max_token_count as isize - self.token_count? as isize)
551 }
552
553 fn set_model(&mut self, model: String, cx: &mut ModelContext<Self>) {
554 self.model = model;
555 self.count_remaining_tokens(cx);
556 cx.notify();
557 }
558
559 fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<(Message, Message)> {
560 let request = OpenAIRequest {
561 model: self.model.clone(),
562 messages: self.open_ai_request_messages(cx),
563 stream: true,
564 };
565
566 let api_key = self.api_key.borrow().clone()?;
567 let stream = stream_completion(api_key, cx.background().clone(), request);
568 let assistant_message =
569 self.insert_message_after(self.messages.last()?.id, Role::Assistant, cx)?;
570 let user_message = self.insert_message_after(assistant_message.id, Role::User, cx)?;
571 let task = cx.spawn_weak({
572 |this, mut cx| async move {
573 let assistant_message_id = assistant_message.id;
574 let stream_completion = async {
575 let mut messages = stream.await?;
576
577 while let Some(message) = messages.next().await {
578 let mut message = message?;
579 if let Some(choice) = message.choices.pop() {
580 this.upgrade(&cx)
581 .ok_or_else(|| anyhow!("assistant was dropped"))?
582 .update(&mut cx, |this, cx| {
583 let text: Arc<str> = choice.delta.content?.into();
584 let message_ix = this
585 .messages
586 .iter()
587 .position(|message| message.id == assistant_message_id)?;
588 this.buffer.update(cx, |buffer, cx| {
589 let offset = if message_ix + 1 == this.messages.len() {
590 buffer.len()
591 } else {
592 this.messages[message_ix + 1]
593 .start
594 .to_offset(buffer)
595 .saturating_sub(1)
596 };
597 buffer.edit([(offset..offset, text)], None, cx);
598 });
599
600 Some(())
601 });
602 }
603 }
604
605 this.upgrade(&cx)
606 .ok_or_else(|| anyhow!("assistant was dropped"))?
607 .update(&mut cx, |this, cx| {
608 this.pending_completions
609 .retain(|completion| completion.id != this.completion_count);
610 this.summarize(cx);
611 });
612
613 anyhow::Ok(())
614 };
615
616 let result = stream_completion.await;
617 if let Some(this) = this.upgrade(&cx) {
618 this.update(&mut cx, |this, cx| {
619 if let Err(error) = result {
620 if let Some(metadata) =
621 this.messages_metadata.get_mut(&assistant_message.id)
622 {
623 metadata.error = Some(error.to_string().trim().into());
624 cx.notify();
625 }
626 }
627 });
628 }
629 }
630 });
631
632 self.pending_completions.push(PendingCompletion {
633 id: post_inc(&mut self.completion_count),
634 _task: task,
635 });
636 Some((assistant_message, user_message))
637 }
638
639 fn cancel_last_assist(&mut self) -> bool {
640 self.pending_completions.pop().is_some()
641 }
642
643 fn remove_empty_messages<'a>(
644 &mut self,
645 messages: HashSet<MessageId>,
646 protected_offsets: HashSet<usize>,
647 cx: &mut ModelContext<Self>,
648 ) {
649 // let mut offset = 0;
650 // let mut excerpts_to_remove = Vec::new();
651 // self.messages.retain(|message| {
652 // let range = offset..offset + message.content.read(cx).len();
653 // offset = range.end + 1;
654 // if range.is_empty()
655 // && !protected_offsets.contains(&range.start)
656 // && messages.contains(&message.id)
657 // {
658 // excerpts_to_remove.push(message.excerpt_id);
659 // self.messages_metadata.remove(&message.excerpt_id);
660 // false
661 // } else {
662 // true
663 // }
664 // });
665
666 // if !excerpts_to_remove.is_empty() {
667 // self.buffer.update(cx, |buffer, cx| {
668 // buffer.remove_excerpts(excerpts_to_remove, cx)
669 // });
670 // cx.notify();
671 // }
672 }
673
674 fn cycle_message_role(&mut self, id: MessageId, cx: &mut ModelContext<Self>) {
675 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
676 metadata.role.cycle();
677 cx.notify();
678 }
679 }
680
681 fn insert_message_after(
682 &mut self,
683 message_id: MessageId,
684 role: Role,
685 cx: &mut ModelContext<Self>,
686 ) -> Option<Message> {
687 if let Some(prev_message_ix) = self
688 .messages
689 .iter()
690 .position(|message| message.id == message_id)
691 {
692 let start = self.buffer.update(cx, |buffer, cx| {
693 let offset = self
694 .messages
695 .get(prev_message_ix + 1)
696 .map_or(buffer.len(), |message| message.start.to_offset(buffer) - 1);
697 buffer.edit([(offset..offset, "\n")], None, cx);
698 buffer.anchor_before(offset + 1)
699 });
700 let message = Message {
701 id: MessageId(post_inc(&mut self.next_message_id.0)),
702 start,
703 };
704 self.messages.insert(prev_message_ix + 1, message.clone());
705 self.messages_metadata.insert(
706 message.id,
707 MessageMetadata {
708 role,
709 sent_at: Local::now(),
710 error: None,
711 },
712 );
713 Some(message)
714 } else {
715 None
716 }
717 }
718
719 fn summarize(&mut self, cx: &mut ModelContext<Self>) {
720 if self.messages.len() >= 2 && self.summary.is_none() {
721 let api_key = self.api_key.borrow().clone();
722 if let Some(api_key) = api_key {
723 let mut messages = self.open_ai_request_messages(cx);
724 messages.truncate(2);
725 messages.push(RequestMessage {
726 role: Role::User,
727 content: "Summarize the conversation into a short title without punctuation"
728 .into(),
729 });
730 let request = OpenAIRequest {
731 model: self.model.clone(),
732 messages,
733 stream: true,
734 };
735
736 let stream = stream_completion(api_key, cx.background().clone(), request);
737 self.pending_summary = cx.spawn(|this, mut cx| {
738 async move {
739 let mut messages = stream.await?;
740
741 while let Some(message) = messages.next().await {
742 let mut message = message?;
743 if let Some(choice) = message.choices.pop() {
744 let text = choice.delta.content.unwrap_or_default();
745 this.update(&mut cx, |this, cx| {
746 this.summary.get_or_insert(String::new()).push_str(&text);
747 cx.emit(AssistantEvent::SummaryChanged);
748 });
749 }
750 }
751
752 anyhow::Ok(())
753 }
754 .log_err()
755 });
756 }
757 }
758 }
759
760 fn open_ai_request_messages(&self, cx: &AppContext) -> Vec<RequestMessage> {
761 let buffer = self.buffer.read(cx);
762 self.messages(cx)
763 .map(|(message, metadata, range)| RequestMessage {
764 role: metadata.role,
765 content: buffer.text_for_range(range).collect(),
766 })
767 .collect()
768 }
769
770 fn message_id_for_offset(&self, offset: usize, cx: &AppContext) -> Option<MessageId> {
771 Some(
772 self.messages(cx)
773 .find(|(_, _, range)| range.contains(&offset))
774 .map(|(message, _, _)| message)
775 .or(self.messages.last())?
776 .id,
777 )
778 }
779
780 fn messages<'a>(
781 &'a self,
782 cx: &'a AppContext,
783 ) -> impl 'a + Iterator<Item = (&Message, &MessageMetadata, Range<usize>)> {
784 let buffer = self.buffer.read(cx);
785 let mut messages = self.messages.iter().peekable();
786 iter::from_fn(move || {
787 while let Some(message) = messages.next() {
788 let metadata = self.messages_metadata.get(&message.id)?;
789 let message_start = message.start.to_offset(buffer);
790 let mut message_end = None;
791 while let Some(next_message) = messages.peek() {
792 if next_message.start.is_valid(buffer) {
793 message_end = Some(next_message.start);
794 break;
795 } else {
796 messages.next();
797 }
798 }
799 let message_end = message_end
800 .unwrap_or(language::Anchor::MAX)
801 .to_offset(buffer);
802 return Some((message, metadata, message_start..message_end));
803 }
804 None
805 })
806 }
807}
808
809struct PendingCompletion {
810 id: usize,
811 _task: Task<()>,
812}
813
814enum AssistantEditorEvent {
815 TabContentChanged,
816}
817
818struct AssistantEditor {
819 assistant: ModelHandle<Assistant>,
820 editor: ViewHandle<Editor>,
821 blocks: HashSet<BlockId>,
822 scroll_bottom: ScrollAnchor,
823 _subscriptions: Vec<Subscription>,
824}
825
826impl AssistantEditor {
827 fn new(
828 api_key: Rc<RefCell<Option<String>>>,
829 language_registry: Arc<LanguageRegistry>,
830 cx: &mut ViewContext<Self>,
831 ) -> Self {
832 let assistant = cx.add_model(|cx| Assistant::new(api_key, language_registry, cx));
833 let editor = cx.add_view(|cx| {
834 let mut editor = Editor::for_buffer(assistant.read(cx).buffer.clone(), None, cx);
835 editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
836 editor.set_show_gutter(false, cx);
837 editor
838 });
839
840 let _subscriptions = vec![
841 cx.observe(&assistant, |_, _, cx| cx.notify()),
842 cx.subscribe(&assistant, Self::handle_assistant_event),
843 cx.subscribe(&editor, Self::handle_editor_event),
844 ];
845
846 Self {
847 assistant,
848 editor,
849 blocks: Default::default(),
850 scroll_bottom: ScrollAnchor {
851 offset: Default::default(),
852 anchor: Anchor::max(),
853 },
854 _subscriptions,
855 }
856 }
857
858 fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
859 let user_message = self.assistant.update(cx, |assistant, cx| {
860 let editor = self.editor.read(cx);
861 let newest_selection = editor
862 .selections
863 .newest_anchor()
864 .head()
865 .to_offset(&editor.buffer().read(cx).snapshot(cx));
866 let message_id = assistant.message_id_for_offset(newest_selection, cx)?;
867 let metadata = assistant.messages_metadata.get(&message_id)?;
868 let user_message = if metadata.role == Role::User {
869 let (_, user_message) = assistant.assist(cx)?;
870 user_message
871 } else {
872 let user_message = assistant.insert_message_after(message_id, Role::User, cx)?;
873 user_message
874 };
875 Some(user_message)
876 });
877
878 if let Some(user_message) = user_message {
879 let cursor = user_message
880 .start
881 .to_offset(&self.assistant.read(cx).buffer.read(cx));
882 self.editor.update(cx, |editor, cx| {
883 editor.change_selections(
884 Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
885 cx,
886 |selections| selections.select_ranges([cursor..cursor]),
887 );
888 });
889 self.update_scroll_bottom(cx);
890 }
891 }
892
893 fn cancel_last_assist(&mut self, _: &editor::Cancel, cx: &mut ViewContext<Self>) {
894 if !self
895 .assistant
896 .update(cx, |assistant, _| assistant.cancel_last_assist())
897 {
898 cx.propagate_action();
899 }
900 }
901
902 fn handle_assistant_event(
903 &mut self,
904 _: ModelHandle<Assistant>,
905 event: &AssistantEvent,
906 cx: &mut ViewContext<Self>,
907 ) {
908 match event {
909 AssistantEvent::MessagesEdited => {
910 self.editor.update(cx, |editor, cx| {
911 let buffer = editor.buffer().read(cx).snapshot(cx);
912 let excerpt_id = *buffer.as_singleton().unwrap().0;
913 let old_blocks = std::mem::take(&mut self.blocks);
914 let new_blocks =
915 self.assistant
916 .read(cx)
917 .messages(cx)
918 .map(|(message, metadata, _)| BlockProperties {
919 position: buffer.anchor_in_excerpt(excerpt_id, message.start),
920 height: 2,
921 style: BlockStyle::Sticky,
922 render: Arc::new({
923 let assistant = self.assistant.clone();
924 let metadata = metadata.clone();
925 let message = message.clone();
926 move |cx| {
927 enum Sender {}
928 enum ErrorTooltip {}
929
930 let theme = theme::current(cx);
931 let style = &theme.assistant;
932 let message_id = message.id;
933 let sender = MouseEventHandler::<Sender, _>::new(
934 message_id.0,
935 cx,
936 |state, _| match metadata.role {
937 Role::User => {
938 let style =
939 style.user_sender.style_for(state, false);
940 Label::new("You", style.text.clone())
941 .contained()
942 .with_style(style.container)
943 }
944 Role::Assistant => {
945 let style = style
946 .assistant_sender
947 .style_for(state, false);
948 Label::new("Assistant", style.text.clone())
949 .contained()
950 .with_style(style.container)
951 }
952 Role::System => {
953 let style =
954 style.system_sender.style_for(state, false);
955 Label::new("System", style.text.clone())
956 .contained()
957 .with_style(style.container)
958 }
959 },
960 )
961 .with_cursor_style(CursorStyle::PointingHand)
962 .on_down(MouseButton::Left, {
963 let assistant = assistant.clone();
964 move |_, _, cx| {
965 assistant.update(cx, |assistant, cx| {
966 assistant.cycle_message_role(message_id, cx)
967 })
968 }
969 });
970
971 Flex::row()
972 .with_child(sender.aligned())
973 .with_child(
974 Label::new(
975 metadata.sent_at.format("%I:%M%P").to_string(),
976 style.sent_at.text.clone(),
977 )
978 .contained()
979 .with_style(style.sent_at.container)
980 .aligned(),
981 )
982 .with_children(metadata.error.clone().map(|error| {
983 Svg::new("icons/circle_x_mark_12.svg")
984 .with_color(style.error_icon.color)
985 .constrained()
986 .with_width(style.error_icon.width)
987 .contained()
988 .with_style(style.error_icon.container)
989 .with_tooltip::<ErrorTooltip>(
990 message_id.0,
991 error,
992 None,
993 theme.tooltip.clone(),
994 cx,
995 )
996 .aligned()
997 }))
998 .aligned()
999 .left()
1000 .contained()
1001 .with_style(style.header)
1002 .into_any()
1003 }
1004 }),
1005 disposition: BlockDisposition::Above,
1006 })
1007 .collect::<Vec<_>>();
1008
1009 editor.remove_blocks(old_blocks, cx);
1010 let ids = editor.insert_blocks(new_blocks, cx);
1011 self.blocks = HashSet::from_iter(ids);
1012 });
1013 }
1014
1015 AssistantEvent::SummaryChanged => {
1016 cx.emit(AssistantEditorEvent::TabContentChanged);
1017 }
1018
1019 AssistantEvent::StreamedCompletion => {
1020 self.editor.update(cx, |editor, cx| {
1021 let snapshot = editor.snapshot(cx);
1022 let scroll_bottom_row = self
1023 .scroll_bottom
1024 .anchor
1025 .to_display_point(&snapshot.display_snapshot)
1026 .row();
1027
1028 let scroll_bottom = scroll_bottom_row as f32 + self.scroll_bottom.offset.y();
1029 let visible_line_count = editor.visible_line_count().unwrap_or(0.);
1030 let scroll_top = scroll_bottom - visible_line_count;
1031 editor
1032 .set_scroll_position(vec2f(self.scroll_bottom.offset.x(), scroll_top), cx);
1033 });
1034 }
1035 }
1036 }
1037
1038 fn handle_editor_event(
1039 &mut self,
1040 _: ViewHandle<Editor>,
1041 event: &editor::Event,
1042 cx: &mut ViewContext<Self>,
1043 ) {
1044 match event {
1045 editor::Event::ScrollPositionChanged { .. } => self.update_scroll_bottom(cx),
1046 _ => {}
1047 }
1048 }
1049
1050 fn update_scroll_bottom(&mut self, cx: &mut ViewContext<Self>) {
1051 self.editor.update(cx, |editor, cx| {
1052 let snapshot = editor.snapshot(cx);
1053 let scroll_position = editor
1054 .scroll_manager
1055 .anchor()
1056 .scroll_position(&snapshot.display_snapshot);
1057 let scroll_bottom = scroll_position.y() + editor.visible_line_count().unwrap_or(0.);
1058 let scroll_bottom_point = cmp::min(
1059 DisplayPoint::new(scroll_bottom.floor() as u32, 0),
1060 snapshot.display_snapshot.max_point(),
1061 );
1062 let scroll_bottom_anchor = snapshot
1063 .buffer_snapshot
1064 .anchor_after(scroll_bottom_point.to_point(&snapshot.display_snapshot));
1065 let scroll_bottom_offset = vec2f(
1066 scroll_position.x(),
1067 scroll_bottom - scroll_bottom_point.row() as f32,
1068 );
1069 self.scroll_bottom = ScrollAnchor {
1070 anchor: scroll_bottom_anchor,
1071 offset: scroll_bottom_offset,
1072 };
1073 });
1074 }
1075
1076 fn quote_selection(
1077 workspace: &mut Workspace,
1078 _: &QuoteSelection,
1079 cx: &mut ViewContext<Workspace>,
1080 ) {
1081 let Some(panel) = workspace.panel::<AssistantPanel>(cx) else {
1082 return;
1083 };
1084 let Some(editor) = workspace.active_item(cx).and_then(|item| item.downcast::<Editor>()) else {
1085 return;
1086 };
1087
1088 let text = editor.read_with(cx, |editor, cx| {
1089 let range = editor.selections.newest::<usize>(cx).range();
1090 let buffer = editor.buffer().read(cx).snapshot(cx);
1091 let start_language = buffer.language_at(range.start);
1092 let end_language = buffer.language_at(range.end);
1093 let language_name = if start_language == end_language {
1094 start_language.map(|language| language.name())
1095 } else {
1096 None
1097 };
1098 let language_name = language_name.as_deref().unwrap_or("").to_lowercase();
1099
1100 let selected_text = buffer.text_for_range(range).collect::<String>();
1101 if selected_text.is_empty() {
1102 None
1103 } else {
1104 Some(if language_name == "markdown" {
1105 selected_text
1106 .lines()
1107 .map(|line| format!("> {}", line))
1108 .collect::<Vec<_>>()
1109 .join("\n")
1110 } else {
1111 format!("```{language_name}\n{selected_text}\n```")
1112 })
1113 }
1114 });
1115
1116 // Activate the panel
1117 if !panel.read(cx).has_focus(cx) {
1118 workspace.toggle_panel_focus::<AssistantPanel>(cx);
1119 }
1120
1121 if let Some(text) = text {
1122 panel.update(cx, |panel, cx| {
1123 if let Some(assistant) = panel
1124 .pane
1125 .read(cx)
1126 .active_item()
1127 .and_then(|item| item.downcast::<AssistantEditor>())
1128 .ok_or_else(|| anyhow!("no active context"))
1129 .log_err()
1130 {
1131 assistant.update(cx, |assistant, cx| {
1132 assistant
1133 .editor
1134 .update(cx, |editor, cx| editor.insert(&text, cx))
1135 });
1136 }
1137 });
1138 }
1139 }
1140
1141 fn copy(&mut self, _: &editor::Copy, cx: &mut ViewContext<Self>) {
1142 let editor = self.editor.read(cx);
1143 let assistant = self.assistant.read(cx);
1144 if editor.selections.count() == 1 {
1145 let selection = editor.selections.newest::<usize>(cx);
1146 let mut offset = 0;
1147 let mut copied_text = String::new();
1148 let mut spanned_messages = 0;
1149 for message in &assistant.messages {
1150 todo!();
1151 // let message_range = offset..offset + message.content.read(cx).len() + 1;
1152 let message_range = offset..offset + 1;
1153
1154 if message_range.start >= selection.range().end {
1155 break;
1156 } else if message_range.end >= selection.range().start {
1157 let range = cmp::max(message_range.start, selection.range().start)
1158 ..cmp::min(message_range.end, selection.range().end);
1159 if !range.is_empty() {
1160 if let Some(metadata) = assistant.messages_metadata.get(&message.id) {
1161 spanned_messages += 1;
1162 write!(&mut copied_text, "## {}\n\n", metadata.role).unwrap();
1163 for chunk in assistant.buffer.read(cx).text_for_range(range) {
1164 copied_text.push_str(&chunk);
1165 }
1166 copied_text.push('\n');
1167 }
1168 }
1169 }
1170
1171 offset = message_range.end;
1172 }
1173
1174 if spanned_messages > 1 {
1175 cx.platform()
1176 .write_to_clipboard(ClipboardItem::new(copied_text));
1177 return;
1178 }
1179 }
1180
1181 cx.propagate_action();
1182 }
1183
1184 fn cycle_model(&mut self, cx: &mut ViewContext<Self>) {
1185 self.assistant.update(cx, |assistant, cx| {
1186 let new_model = match assistant.model.as_str() {
1187 "gpt-4" => "gpt-3.5-turbo",
1188 _ => "gpt-4",
1189 };
1190 assistant.set_model(new_model.into(), cx);
1191 });
1192 }
1193
1194 fn title(&self, cx: &AppContext) -> String {
1195 self.assistant
1196 .read(cx)
1197 .summary
1198 .clone()
1199 .unwrap_or_else(|| "New Context".into())
1200 }
1201}
1202
1203impl Entity for AssistantEditor {
1204 type Event = AssistantEditorEvent;
1205}
1206
1207impl View for AssistantEditor {
1208 fn ui_name() -> &'static str {
1209 "AssistantEditor"
1210 }
1211
1212 fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
1213 enum Model {}
1214 let theme = &theme::current(cx).assistant;
1215 let assistant = &self.assistant.read(cx);
1216 let model = assistant.model.clone();
1217 let remaining_tokens = assistant.remaining_tokens().map(|remaining_tokens| {
1218 let remaining_tokens_style = if remaining_tokens <= 0 {
1219 &theme.no_remaining_tokens
1220 } else {
1221 &theme.remaining_tokens
1222 };
1223 Label::new(
1224 remaining_tokens.to_string(),
1225 remaining_tokens_style.text.clone(),
1226 )
1227 .contained()
1228 .with_style(remaining_tokens_style.container)
1229 });
1230
1231 Stack::new()
1232 .with_child(
1233 ChildView::new(&self.editor, cx)
1234 .contained()
1235 .with_style(theme.container),
1236 )
1237 .with_child(
1238 Flex::row()
1239 .with_child(
1240 MouseEventHandler::<Model, _>::new(0, cx, |state, _| {
1241 let style = theme.model.style_for(state, false);
1242 Label::new(model, style.text.clone())
1243 .contained()
1244 .with_style(style.container)
1245 })
1246 .with_cursor_style(CursorStyle::PointingHand)
1247 .on_click(MouseButton::Left, |_, this, cx| this.cycle_model(cx)),
1248 )
1249 .with_children(remaining_tokens)
1250 .contained()
1251 .with_style(theme.model_info_container)
1252 .aligned()
1253 .top()
1254 .right(),
1255 )
1256 .into_any()
1257 }
1258
1259 fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {
1260 if cx.is_self_focused() {
1261 cx.focus(&self.editor);
1262 }
1263 }
1264}
1265
1266impl Item for AssistantEditor {
1267 fn tab_content<V: View>(
1268 &self,
1269 _: Option<usize>,
1270 style: &theme::Tab,
1271 cx: &gpui::AppContext,
1272 ) -> AnyElement<V> {
1273 let title = truncate_and_trailoff(&self.title(cx), editor::MAX_TAB_TITLE_LEN);
1274 Label::new(title, style.label.clone()).into_any()
1275 }
1276
1277 fn tab_tooltip_text(&self, cx: &AppContext) -> Option<Cow<str>> {
1278 Some(self.title(cx).into())
1279 }
1280
1281 fn as_searchable(
1282 &self,
1283 _: &ViewHandle<Self>,
1284 ) -> Option<Box<dyn workspace::searchable::SearchableItemHandle>> {
1285 Some(Box::new(self.editor.clone()))
1286 }
1287}
1288
1289#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Hash)]
1290struct MessageId(usize);
1291
1292#[derive(Clone, Debug)]
1293struct Message {
1294 id: MessageId,
1295 start: language::Anchor,
1296}
1297
1298#[derive(Clone, Debug)]
1299struct MessageMetadata {
1300 role: Role,
1301 sent_at: DateTime<Local>,
1302 error: Option<String>,
1303}
1304
1305async fn stream_completion(
1306 api_key: String,
1307 executor: Arc<Background>,
1308 mut request: OpenAIRequest,
1309) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
1310 request.stream = true;
1311
1312 let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAIResponseStreamEvent>>();
1313
1314 let json_data = serde_json::to_string(&request)?;
1315 let mut response = Request::post(format!("{OPENAI_API_URL}/chat/completions"))
1316 .header("Content-Type", "application/json")
1317 .header("Authorization", format!("Bearer {}", api_key))
1318 .body(json_data)?
1319 .send_async()
1320 .await?;
1321
1322 let status = response.status();
1323 if status == StatusCode::OK {
1324 executor
1325 .spawn(async move {
1326 let mut lines = BufReader::new(response.body_mut()).lines();
1327
1328 fn parse_line(
1329 line: Result<String, io::Error>,
1330 ) -> Result<Option<OpenAIResponseStreamEvent>> {
1331 if let Some(data) = line?.strip_prefix("data: ") {
1332 let event = serde_json::from_str(&data)?;
1333 Ok(Some(event))
1334 } else {
1335 Ok(None)
1336 }
1337 }
1338
1339 while let Some(line) = lines.next().await {
1340 if let Some(event) = parse_line(line).transpose() {
1341 let done = event.as_ref().map_or(false, |event| {
1342 event
1343 .choices
1344 .last()
1345 .map_or(false, |choice| choice.finish_reason.is_some())
1346 });
1347 if tx.unbounded_send(event).is_err() {
1348 break;
1349 }
1350
1351 if done {
1352 break;
1353 }
1354 }
1355 }
1356
1357 anyhow::Ok(())
1358 })
1359 .detach();
1360
1361 Ok(rx)
1362 } else {
1363 let mut body = String::new();
1364 response.body_mut().read_to_string(&mut body).await?;
1365
1366 #[derive(Deserialize)]
1367 struct OpenAIResponse {
1368 error: OpenAIError,
1369 }
1370
1371 #[derive(Deserialize)]
1372 struct OpenAIError {
1373 message: String,
1374 }
1375
1376 match serde_json::from_str::<OpenAIResponse>(&body) {
1377 Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
1378 "Failed to connect to OpenAI API: {}",
1379 response.error.message,
1380 )),
1381
1382 _ => Err(anyhow!(
1383 "Failed to connect to OpenAI API: {} {}",
1384 response.status(),
1385 body,
1386 )),
1387 }
1388 }
1389}
1390
1391#[cfg(test)]
1392mod tests {
1393 use super::*;
1394 use gpui::AppContext;
1395
1396 #[gpui::test]
1397 fn test_inserting_and_removing_messages(cx: &mut AppContext) {
1398 let registry = Arc::new(LanguageRegistry::test());
1399 let assistant = cx.add_model(|cx| Assistant::new(Default::default(), registry, cx));
1400 let buffer = assistant.read(cx).buffer.clone();
1401
1402 let message_1 = assistant.read(cx).messages[0].clone();
1403 assert_eq!(
1404 messages(&assistant, cx),
1405 vec![(message_1.id, Role::User, 0..0)]
1406 );
1407
1408 let message_2 = assistant.update(cx, |assistant, cx| {
1409 assistant
1410 .insert_message_after(message_1.id, Role::Assistant, cx)
1411 .unwrap()
1412 });
1413 assert_eq!(
1414 messages(&assistant, cx),
1415 vec![
1416 (message_1.id, Role::User, 0..1),
1417 (message_2.id, Role::Assistant, 1..1)
1418 ]
1419 );
1420
1421 buffer.update(cx, |buffer, cx| {
1422 buffer.edit([(0..0, "1"), (1..1, "2")], None, cx)
1423 });
1424 assert_eq!(
1425 messages(&assistant, cx),
1426 vec![
1427 (message_1.id, Role::User, 0..2),
1428 (message_2.id, Role::Assistant, 2..3)
1429 ]
1430 );
1431
1432 let message_3 = assistant.update(cx, |assistant, cx| {
1433 assistant
1434 .insert_message_after(message_2.id, Role::User, cx)
1435 .unwrap()
1436 });
1437 assert_eq!(
1438 messages(&assistant, cx),
1439 vec![
1440 (message_1.id, Role::User, 0..2),
1441 (message_2.id, Role::Assistant, 2..4),
1442 (message_3.id, Role::User, 4..4)
1443 ]
1444 );
1445
1446 let message_4 = assistant.update(cx, |assistant, cx| {
1447 assistant
1448 .insert_message_after(message_2.id, Role::User, cx)
1449 .unwrap()
1450 });
1451 assert_eq!(
1452 messages(&assistant, cx),
1453 vec![
1454 (message_1.id, Role::User, 0..2),
1455 (message_2.id, Role::Assistant, 2..4),
1456 (message_4.id, Role::User, 4..5),
1457 (message_3.id, Role::User, 5..5),
1458 ]
1459 );
1460
1461 buffer.update(cx, |buffer, cx| {
1462 buffer.edit([(4..4, "C"), (5..5, "D")], None, cx)
1463 });
1464 assert_eq!(
1465 messages(&assistant, cx),
1466 vec![
1467 (message_1.id, Role::User, 0..2),
1468 (message_2.id, Role::Assistant, 2..4),
1469 (message_4.id, Role::User, 4..6),
1470 (message_3.id, Role::User, 6..7),
1471 ]
1472 );
1473
1474 // Deleting across message boundaries merges the messages.
1475 buffer.update(cx, |buffer, cx| buffer.edit([(1..4, "")], None, cx));
1476 assert_eq!(
1477 messages(&assistant, cx),
1478 vec![
1479 (message_1.id, Role::User, 0..3),
1480 (message_3.id, Role::User, 3..4),
1481 ]
1482 );
1483
1484 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1485 assert_eq!(
1486 messages(&assistant, cx),
1487 vec![
1488 (message_1.id, Role::User, 0..2),
1489 (message_2.id, Role::Assistant, 2..4),
1490 (message_4.id, Role::User, 4..6),
1491 (message_3.id, Role::User, 6..7),
1492 ]
1493 );
1494 }
1495
1496 fn messages(
1497 assistant: &ModelHandle<Assistant>,
1498 cx: &AppContext,
1499 ) -> Vec<(MessageId, Role, Range<usize>)> {
1500 assistant
1501 .read(cx)
1502 .messages(cx)
1503 .map(|(message, metadata, range)| (message.id, metadata.role, range))
1504 .collect()
1505 }
1506}