1use crate::{
2 assistant_settings::{AssistantDockPosition, AssistantSettings},
3 OpenAIRequest, OpenAIResponseStreamEvent, RequestMessage, Role,
4};
5use anyhow::{anyhow, Result};
6use chrono::{DateTime, Local};
7use collections::{HashMap, HashSet};
8use editor::{Anchor, Editor, ExcerptId, ExcerptRange, MultiBuffer};
9use fs::Fs;
10use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
11use gpui::{
12 actions,
13 elements::*,
14 executor::Background,
15 platform::{CursorStyle, MouseButton},
16 Action, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
17 View, ViewContext, ViewHandle, WeakViewHandle, WindowContext,
18};
19use isahc::{http::StatusCode, Request, RequestExt};
20use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
21use serde::Deserialize;
22use settings::SettingsStore;
23use std::{borrow::Cow, cell::RefCell, io, rc::Rc, sync::Arc, time::Duration};
24use util::{post_inc, truncate_and_trailoff, ResultExt, TryFutureExt};
25use workspace::{
26 dock::{DockPosition, Panel},
27 item::Item,
28 pane, Pane, Workspace,
29};
30
31const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";
32
33actions!(
34 assistant,
35 [NewContext, Assist, QuoteSelection, ToggleFocus, ResetKey]
36);
37
38pub fn init(cx: &mut AppContext) {
39 settings::register::<AssistantSettings>(cx);
40 cx.add_action(
41 |workspace: &mut Workspace, _: &NewContext, cx: &mut ViewContext<Workspace>| {
42 if let Some(this) = workspace.panel::<AssistantPanel>(cx) {
43 this.update(cx, |this, cx| this.add_context(cx))
44 }
45
46 workspace.focus_panel::<AssistantPanel>(cx);
47 },
48 );
49 cx.add_action(AssistantEditor::assist);
50 cx.capture_action(AssistantEditor::cancel_last_assist);
51 cx.add_action(AssistantEditor::quote_selection);
52 cx.add_action(AssistantPanel::save_api_key);
53 cx.add_action(AssistantPanel::reset_api_key);
54}
55
56pub enum AssistantPanelEvent {
57 ZoomIn,
58 ZoomOut,
59 Focus,
60 Close,
61 DockPositionChanged,
62}
63
64pub struct AssistantPanel {
65 width: Option<f32>,
66 height: Option<f32>,
67 pane: ViewHandle<Pane>,
68 api_key: Rc<RefCell<Option<String>>>,
69 api_key_editor: Option<ViewHandle<Editor>>,
70 has_read_credentials: bool,
71 languages: Arc<LanguageRegistry>,
72 fs: Arc<dyn Fs>,
73 subscriptions: Vec<Subscription>,
74}
75
76impl AssistantPanel {
77 pub fn load(
78 workspace: WeakViewHandle<Workspace>,
79 cx: AsyncAppContext,
80 ) -> Task<Result<ViewHandle<Self>>> {
81 cx.spawn(|mut cx| async move {
82 // TODO: deserialize state.
83 workspace.update(&mut cx, |workspace, cx| {
84 cx.add_view::<Self, _>(|cx| {
85 let weak_self = cx.weak_handle();
86 let pane = cx.add_view(|cx| {
87 let mut pane = Pane::new(
88 workspace.weak_handle(),
89 workspace.project().clone(),
90 workspace.app_state().background_actions,
91 Default::default(),
92 cx,
93 );
94 pane.set_can_split(false, cx);
95 pane.set_can_navigate(false, cx);
96 pane.on_can_drop(move |_, _| false);
97 pane.set_render_tab_bar_buttons(cx, move |pane, cx| {
98 let weak_self = weak_self.clone();
99 Flex::row()
100 .with_child(Pane::render_tab_bar_button(
101 0,
102 "icons/plus_12.svg",
103 false,
104 Some(("New Context".into(), Some(Box::new(NewContext)))),
105 cx,
106 move |_, cx| {
107 let weak_self = weak_self.clone();
108 cx.window_context().defer(move |cx| {
109 if let Some(this) = weak_self.upgrade(cx) {
110 this.update(cx, |this, cx| this.add_context(cx));
111 }
112 })
113 },
114 None,
115 ))
116 .with_child(Pane::render_tab_bar_button(
117 1,
118 if pane.is_zoomed() {
119 "icons/minimize_8.svg"
120 } else {
121 "icons/maximize_8.svg"
122 },
123 pane.is_zoomed(),
124 Some((
125 "Toggle Zoom".into(),
126 Some(Box::new(workspace::ToggleZoom)),
127 )),
128 cx,
129 move |pane, cx| pane.toggle_zoom(&Default::default(), cx),
130 None,
131 ))
132 .into_any()
133 });
134 let buffer_search_bar = cx.add_view(search::BufferSearchBar::new);
135 pane.toolbar()
136 .update(cx, |toolbar, cx| toolbar.add_item(buffer_search_bar, cx));
137 pane
138 });
139
140 let mut this = Self {
141 pane,
142 api_key: Rc::new(RefCell::new(None)),
143 api_key_editor: None,
144 has_read_credentials: false,
145 languages: workspace.app_state().languages.clone(),
146 fs: workspace.app_state().fs.clone(),
147 width: None,
148 height: None,
149 subscriptions: Default::default(),
150 };
151
152 let mut old_dock_position = this.position(cx);
153 this.subscriptions = vec![
154 cx.observe(&this.pane, |_, _, cx| cx.notify()),
155 cx.subscribe(&this.pane, Self::handle_pane_event),
156 cx.observe_global::<SettingsStore, _>(move |this, cx| {
157 let new_dock_position = this.position(cx);
158 if new_dock_position != old_dock_position {
159 old_dock_position = new_dock_position;
160 cx.emit(AssistantPanelEvent::DockPositionChanged);
161 }
162 }),
163 ];
164
165 this
166 })
167 })
168 })
169 }
170
171 fn handle_pane_event(
172 &mut self,
173 _pane: ViewHandle<Pane>,
174 event: &pane::Event,
175 cx: &mut ViewContext<Self>,
176 ) {
177 match event {
178 pane::Event::ZoomIn => cx.emit(AssistantPanelEvent::ZoomIn),
179 pane::Event::ZoomOut => cx.emit(AssistantPanelEvent::ZoomOut),
180 pane::Event::Focus => cx.emit(AssistantPanelEvent::Focus),
181 pane::Event::Remove => cx.emit(AssistantPanelEvent::Close),
182 _ => {}
183 }
184 }
185
186 fn add_context(&mut self, cx: &mut ViewContext<Self>) {
187 let focus = self.has_focus(cx);
188 let editor = cx
189 .add_view(|cx| AssistantEditor::new(self.api_key.clone(), self.languages.clone(), cx));
190 self.subscriptions
191 .push(cx.subscribe(&editor, Self::handle_assistant_editor_event));
192 self.pane.update(cx, |pane, cx| {
193 pane.add_item(Box::new(editor), true, focus, None, cx)
194 });
195 }
196
197 fn handle_assistant_editor_event(
198 &mut self,
199 _: ViewHandle<AssistantEditor>,
200 event: &AssistantEditorEvent,
201 cx: &mut ViewContext<Self>,
202 ) {
203 match event {
204 AssistantEditorEvent::TabContentChanged => self.pane.update(cx, |_, cx| cx.notify()),
205 }
206 }
207
208 fn save_api_key(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
209 if let Some(api_key) = self
210 .api_key_editor
211 .as_ref()
212 .map(|editor| editor.read(cx).text(cx))
213 {
214 if !api_key.is_empty() {
215 cx.platform()
216 .write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
217 .log_err();
218 *self.api_key.borrow_mut() = Some(api_key);
219 self.api_key_editor.take();
220 cx.focus_self();
221 cx.notify();
222 }
223 }
224 }
225
226 fn reset_api_key(&mut self, _: &ResetKey, cx: &mut ViewContext<Self>) {
227 cx.platform().delete_credentials(OPENAI_API_URL).log_err();
228 self.api_key.take();
229 self.api_key_editor = Some(build_api_key_editor(cx));
230 cx.focus_self();
231 cx.notify();
232 }
233}
234
235fn build_api_key_editor(cx: &mut ViewContext<AssistantPanel>) -> ViewHandle<Editor> {
236 cx.add_view(|cx| {
237 let mut editor = Editor::single_line(
238 Some(Arc::new(|theme| theme.assistant.api_key_editor.clone())),
239 cx,
240 );
241 editor.set_placeholder_text("sk-000000000000000000000000000000000000000000000000", cx);
242 editor
243 })
244}
245
246impl Entity for AssistantPanel {
247 type Event = AssistantPanelEvent;
248}
249
250impl View for AssistantPanel {
251 fn ui_name() -> &'static str {
252 "AssistantPanel"
253 }
254
255 fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
256 let style = &theme::current(cx).assistant;
257 if let Some(api_key_editor) = self.api_key_editor.as_ref() {
258 Flex::column()
259 .with_child(
260 Text::new(
261 "Paste your OpenAI API key and press Enter to use the assistant",
262 style.api_key_prompt.text.clone(),
263 )
264 .aligned(),
265 )
266 .with_child(
267 ChildView::new(api_key_editor, cx)
268 .contained()
269 .with_style(style.api_key_editor.container)
270 .aligned(),
271 )
272 .contained()
273 .with_style(style.api_key_prompt.container)
274 .aligned()
275 .into_any()
276 } else {
277 ChildView::new(&self.pane, cx).into_any()
278 }
279 }
280
281 fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {
282 if cx.is_self_focused() {
283 if let Some(api_key_editor) = self.api_key_editor.as_ref() {
284 cx.focus(api_key_editor);
285 } else {
286 cx.focus(&self.pane);
287 }
288 }
289 }
290}
291
292impl Panel for AssistantPanel {
293 fn position(&self, cx: &WindowContext) -> DockPosition {
294 match settings::get::<AssistantSettings>(cx).dock {
295 AssistantDockPosition::Left => DockPosition::Left,
296 AssistantDockPosition::Bottom => DockPosition::Bottom,
297 AssistantDockPosition::Right => DockPosition::Right,
298 }
299 }
300
301 fn position_is_valid(&self, _: DockPosition) -> bool {
302 true
303 }
304
305 fn set_position(&mut self, position: DockPosition, cx: &mut ViewContext<Self>) {
306 settings::update_settings_file::<AssistantSettings>(self.fs.clone(), cx, move |settings| {
307 let dock = match position {
308 DockPosition::Left => AssistantDockPosition::Left,
309 DockPosition::Bottom => AssistantDockPosition::Bottom,
310 DockPosition::Right => AssistantDockPosition::Right,
311 };
312 settings.dock = Some(dock);
313 });
314 }
315
316 fn size(&self, cx: &WindowContext) -> f32 {
317 let settings = settings::get::<AssistantSettings>(cx);
318 match self.position(cx) {
319 DockPosition::Left | DockPosition::Right => {
320 self.width.unwrap_or_else(|| settings.default_width)
321 }
322 DockPosition::Bottom => self.height.unwrap_or_else(|| settings.default_height),
323 }
324 }
325
326 fn set_size(&mut self, size: f32, cx: &mut ViewContext<Self>) {
327 match self.position(cx) {
328 DockPosition::Left | DockPosition::Right => self.width = Some(size),
329 DockPosition::Bottom => self.height = Some(size),
330 }
331 cx.notify();
332 }
333
334 fn should_zoom_in_on_event(event: &AssistantPanelEvent) -> bool {
335 matches!(event, AssistantPanelEvent::ZoomIn)
336 }
337
338 fn should_zoom_out_on_event(event: &AssistantPanelEvent) -> bool {
339 matches!(event, AssistantPanelEvent::ZoomOut)
340 }
341
342 fn is_zoomed(&self, cx: &WindowContext) -> bool {
343 self.pane.read(cx).is_zoomed()
344 }
345
346 fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext<Self>) {
347 self.pane.update(cx, |pane, cx| pane.set_zoomed(zoomed, cx));
348 }
349
350 fn set_active(&mut self, active: bool, cx: &mut ViewContext<Self>) {
351 if active {
352 if self.api_key.borrow().is_none() && !self.has_read_credentials {
353 self.has_read_credentials = true;
354 let api_key = if let Some((_, api_key)) = cx
355 .platform()
356 .read_credentials(OPENAI_API_URL)
357 .log_err()
358 .flatten()
359 {
360 String::from_utf8(api_key).log_err()
361 } else {
362 None
363 };
364 if let Some(api_key) = api_key {
365 *self.api_key.borrow_mut() = Some(api_key);
366 } else if self.api_key_editor.is_none() {
367 self.api_key_editor = Some(build_api_key_editor(cx));
368 cx.notify();
369 }
370 }
371
372 if self.pane.read(cx).items_len() == 0 {
373 self.add_context(cx);
374 }
375 }
376 }
377
378 fn icon_path(&self) -> &'static str {
379 "icons/speech_bubble_12.svg"
380 }
381
382 fn icon_tooltip(&self) -> (String, Option<Box<dyn Action>>) {
383 ("Assistant Panel".into(), Some(Box::new(ToggleFocus)))
384 }
385
386 fn should_change_position_on_event(event: &Self::Event) -> bool {
387 matches!(event, AssistantPanelEvent::DockPositionChanged)
388 }
389
390 fn should_activate_on_event(_: &Self::Event) -> bool {
391 false
392 }
393
394 fn should_close_on_event(event: &AssistantPanelEvent) -> bool {
395 matches!(event, AssistantPanelEvent::Close)
396 }
397
398 fn has_focus(&self, cx: &WindowContext) -> bool {
399 self.pane.read(cx).has_focus()
400 || self
401 .api_key_editor
402 .as_ref()
403 .map_or(false, |editor| editor.is_focused(cx))
404 }
405
406 fn is_focus_event(event: &Self::Event) -> bool {
407 matches!(event, AssistantPanelEvent::Focus)
408 }
409}
410
411enum AssistantEvent {
412 MessagesEdited { ids: Vec<ExcerptId> },
413 SummaryChanged,
414}
415
416struct Assistant {
417 buffer: ModelHandle<MultiBuffer>,
418 messages: Vec<Message>,
419 messages_metadata: HashMap<ExcerptId, MessageMetadata>,
420 summary: Option<String>,
421 pending_summary: Task<Option<()>>,
422 completion_count: usize,
423 pending_completions: Vec<PendingCompletion>,
424 languages: Arc<LanguageRegistry>,
425 model: String,
426 token_count: Option<usize>,
427 max_token_count: usize,
428 pending_token_count: Task<Option<()>>,
429 api_key: Rc<RefCell<Option<String>>>,
430 _subscriptions: Vec<Subscription>,
431}
432
433impl Entity for Assistant {
434 type Event = AssistantEvent;
435}
436
437impl Assistant {
438 fn new(
439 api_key: Rc<RefCell<Option<String>>>,
440 language_registry: Arc<LanguageRegistry>,
441 cx: &mut ModelContext<Self>,
442 ) -> Self {
443 let model = "gpt-3.5-turbo";
444 let buffer = cx.add_model(|_| MultiBuffer::new(0));
445 let mut this = Self {
446 messages: Default::default(),
447 messages_metadata: Default::default(),
448 summary: None,
449 pending_summary: Task::ready(None),
450 completion_count: Default::default(),
451 pending_completions: Default::default(),
452 languages: language_registry,
453 token_count: None,
454 max_token_count: tiktoken_rs::model::get_context_size(model),
455 pending_token_count: Task::ready(None),
456 model: model.into(),
457 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
458 api_key,
459 buffer,
460 };
461 this.push_message(Role::User, cx);
462 this.count_remaining_tokens(cx);
463 this
464 }
465
466 fn handle_buffer_event(
467 &mut self,
468 _: ModelHandle<MultiBuffer>,
469 event: &editor::multi_buffer::Event,
470 cx: &mut ModelContext<Self>,
471 ) {
472 match event {
473 editor::multi_buffer::Event::ExcerptsAdded { .. }
474 | editor::multi_buffer::Event::ExcerptsRemoved { .. }
475 | editor::multi_buffer::Event::Edited => self.count_remaining_tokens(cx),
476 editor::multi_buffer::Event::ExcerptsEdited { ids } => {
477 cx.emit(AssistantEvent::MessagesEdited { ids: ids.clone() });
478 }
479 _ => {}
480 }
481 }
482
483 fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
484 let messages = self
485 .messages
486 .iter()
487 .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
488 role: match message.role {
489 Role::User => "user".into(),
490 Role::Assistant => "assistant".into(),
491 Role::System => "system".into(),
492 },
493 content: message.content.read(cx).text(),
494 name: None,
495 })
496 .collect::<Vec<_>>();
497 let model = self.model.clone();
498 self.pending_token_count = cx.spawn(|this, mut cx| {
499 async move {
500 cx.background().timer(Duration::from_millis(200)).await;
501 let token_count = cx
502 .background()
503 .spawn(async move { tiktoken_rs::num_tokens_from_messages(&model, &messages) })
504 .await?;
505
506 this.update(&mut cx, |this, cx| {
507 this.max_token_count = tiktoken_rs::model::get_context_size(&this.model);
508 this.token_count = Some(token_count);
509 cx.notify()
510 });
511 anyhow::Ok(())
512 }
513 .log_err()
514 });
515 }
516
517 fn remaining_tokens(&self) -> Option<isize> {
518 Some(self.max_token_count as isize - self.token_count? as isize)
519 }
520
521 fn set_model(&mut self, model: String, cx: &mut ModelContext<Self>) {
522 self.model = model;
523 self.count_remaining_tokens(cx);
524 cx.notify();
525 }
526
527 fn assist(&mut self, cx: &mut ModelContext<Self>) {
528 let messages = self
529 .messages
530 .iter()
531 .map(|message| RequestMessage {
532 role: message.role,
533 content: message.content.read(cx).text(),
534 })
535 .collect();
536 let request = OpenAIRequest {
537 model: self.model.clone(),
538 messages,
539 stream: true,
540 };
541
542 let api_key = self.api_key.borrow().clone();
543 if let Some(api_key) = api_key {
544 let stream = stream_completion(api_key, cx.background().clone(), request);
545 let (excerpt_id, content) = self.push_message(Role::Assistant, cx);
546 self.push_message(Role::User, cx);
547 let task = cx.spawn(|this, mut cx| async move {
548 let stream_completion = async {
549 let mut messages = stream.await?;
550
551 while let Some(message) = messages.next().await {
552 let mut message = message?;
553 if let Some(choice) = message.choices.pop() {
554 content.update(&mut cx, |content, cx| {
555 let text: Arc<str> = choice.delta.content?.into();
556 content.edit([(content.len()..content.len(), text)], None, cx);
557 Some(())
558 });
559 }
560 }
561
562 this.update(&mut cx, |this, cx| {
563 this.pending_completions
564 .retain(|completion| completion.id != this.completion_count);
565 this.summarize(cx);
566 });
567
568 anyhow::Ok(())
569 };
570
571 if let Err(error) = stream_completion.await {
572 this.update(&mut cx, |this, cx| {
573 if let Some(metadata) = this.messages_metadata.get_mut(&excerpt_id) {
574 metadata.error = Some(error.to_string().trim().into());
575 cx.notify();
576 }
577 })
578 }
579 });
580
581 self.pending_completions.push(PendingCompletion {
582 id: post_inc(&mut self.completion_count),
583 _task: task,
584 });
585 }
586 }
587
588 fn cancel_last_assist(&mut self) -> bool {
589 self.pending_completions.pop().is_some()
590 }
591
592 fn remove_empty_messages<'a>(
593 &mut self,
594 excerpts: HashSet<ExcerptId>,
595 protected_offsets: HashSet<usize>,
596 cx: &mut ModelContext<Self>,
597 ) {
598 let mut offset = 0;
599 let mut excerpts_to_remove = Vec::new();
600 self.messages.retain(|message| {
601 let range = offset..offset + message.content.read(cx).len();
602 offset = range.end + 1;
603 if range.is_empty()
604 && !protected_offsets.contains(&range.start)
605 && excerpts.contains(&message.excerpt_id)
606 {
607 excerpts_to_remove.push(message.excerpt_id);
608 self.messages_metadata.remove(&message.excerpt_id);
609 false
610 } else {
611 true
612 }
613 });
614
615 if !excerpts_to_remove.is_empty() {
616 self.buffer.update(cx, |buffer, cx| {
617 buffer.remove_excerpts(excerpts_to_remove, cx)
618 });
619 cx.notify();
620 }
621 }
622
623 fn push_message(
624 &mut self,
625 role: Role,
626 cx: &mut ModelContext<Self>,
627 ) -> (ExcerptId, ModelHandle<Buffer>) {
628 let content = cx.add_model(|cx| {
629 let mut buffer = Buffer::new(0, "", cx);
630 let markdown = self.languages.language_for_name("Markdown");
631 cx.spawn_weak(|buffer, mut cx| async move {
632 let markdown = markdown.await?;
633 let buffer = buffer
634 .upgrade(&cx)
635 .ok_or_else(|| anyhow!("buffer was dropped"))?;
636 buffer.update(&mut cx, |buffer, cx| {
637 buffer.set_language(Some(markdown), cx)
638 });
639 anyhow::Ok(())
640 })
641 .detach_and_log_err(cx);
642 buffer.set_language_registry(self.languages.clone());
643 buffer
644 });
645 let excerpt_id = self.buffer.update(cx, |buffer, cx| {
646 buffer
647 .push_excerpts(
648 content.clone(),
649 vec![ExcerptRange {
650 context: 0..0,
651 primary: None,
652 }],
653 cx,
654 )
655 .pop()
656 .unwrap()
657 });
658
659 self.messages.push(Message {
660 excerpt_id,
661 role,
662 content: content.clone(),
663 });
664 self.messages_metadata.insert(
665 excerpt_id,
666 MessageMetadata {
667 role,
668 sent_at: Local::now(),
669 error: None,
670 },
671 );
672 (excerpt_id, content)
673 }
674
675 fn summarize(&mut self, cx: &mut ModelContext<Self>) {
676 if self.messages.len() >= 2 && self.summary.is_none() {
677 let api_key = self.api_key.borrow().clone();
678 if let Some(api_key) = api_key {
679 let messages = self
680 .messages
681 .iter()
682 .take(2)
683 .map(|message| RequestMessage {
684 role: message.role,
685 content: message.content.read(cx).text(),
686 })
687 .chain(Some(RequestMessage {
688 role: Role::User,
689 content:
690 "Summarize the conversation into a short title without punctuation"
691 .into(),
692 }))
693 .collect();
694 let request = OpenAIRequest {
695 model: self.model.clone(),
696 messages,
697 stream: true,
698 };
699
700 let stream = stream_completion(api_key, cx.background().clone(), request);
701 self.pending_summary = cx.spawn(|this, mut cx| {
702 async move {
703 let mut messages = stream.await?;
704
705 while let Some(message) = messages.next().await {
706 let mut message = message?;
707 if let Some(choice) = message.choices.pop() {
708 let text = choice.delta.content.unwrap_or_default();
709 this.update(&mut cx, |this, cx| {
710 this.summary.get_or_insert(String::new()).push_str(&text);
711 cx.emit(AssistantEvent::SummaryChanged);
712 });
713 }
714 }
715
716 anyhow::Ok(())
717 }
718 .log_err()
719 });
720 }
721 }
722 }
723}
724
725struct PendingCompletion {
726 id: usize,
727 _task: Task<()>,
728}
729
730enum AssistantEditorEvent {
731 TabContentChanged,
732}
733
734struct AssistantEditor {
735 assistant: ModelHandle<Assistant>,
736 editor: ViewHandle<Editor>,
737 _subscriptions: Vec<Subscription>,
738}
739
740impl AssistantEditor {
741 fn new(
742 api_key: Rc<RefCell<Option<String>>>,
743 language_registry: Arc<LanguageRegistry>,
744 cx: &mut ViewContext<Self>,
745 ) -> Self {
746 let assistant = cx.add_model(|cx| Assistant::new(api_key, language_registry, cx));
747 let editor = cx.add_view(|cx| {
748 let mut editor = Editor::for_multibuffer(assistant.read(cx).buffer.clone(), None, cx);
749 editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
750 editor.set_show_gutter(false, cx);
751 editor.set_render_excerpt_header(
752 {
753 let assistant = assistant.clone();
754 move |_editor, params: editor::RenderExcerptHeaderParams, cx| {
755 enum ErrorTooltip {}
756
757 let theme = theme::current(cx);
758 let style = &theme.assistant;
759 if let Some(metadata) = assistant.read(cx).messages_metadata.get(¶ms.id)
760 {
761 let sender = match metadata.role {
762 Role::User => Label::new("You", style.user_sender.text.clone())
763 .contained()
764 .with_style(style.user_sender.container),
765 Role::Assistant => {
766 Label::new("Assistant", style.assistant_sender.text.clone())
767 .contained()
768 .with_style(style.assistant_sender.container)
769 }
770 Role::System => {
771 Label::new("System", style.assistant_sender.text.clone())
772 .contained()
773 .with_style(style.assistant_sender.container)
774 }
775 };
776
777 Flex::row()
778 .with_child(sender.aligned())
779 .with_child(
780 Label::new(
781 metadata.sent_at.format("%I:%M%P").to_string(),
782 style.sent_at.text.clone(),
783 )
784 .contained()
785 .with_style(style.sent_at.container)
786 .aligned(),
787 )
788 .with_children(metadata.error.clone().map(|error| {
789 Svg::new("icons/circle_x_mark_12.svg")
790 .with_color(style.error_icon.color)
791 .constrained()
792 .with_width(style.error_icon.width)
793 .contained()
794 .with_style(style.error_icon.container)
795 .with_tooltip::<ErrorTooltip>(
796 params.id.into(),
797 error,
798 None,
799 theme.tooltip.clone(),
800 cx,
801 )
802 .aligned()
803 }))
804 .aligned()
805 .left()
806 .contained()
807 .with_style(style.header)
808 .into_any()
809 } else {
810 Empty::new().into_any()
811 }
812 }
813 },
814 cx,
815 );
816 editor
817 });
818
819 let _subscriptions = vec![
820 cx.observe(&assistant, |_, _, cx| cx.notify()),
821 cx.subscribe(&assistant, Self::handle_assistant_event),
822 ];
823
824 Self {
825 assistant,
826 editor,
827 _subscriptions,
828 }
829 }
830
831 fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
832 self.assistant.update(cx, |assistant, cx| {
833 let editor = self.editor.read(cx);
834 let newest_selection = editor.selections.newest_anchor();
835 let role = if newest_selection.head() == Anchor::min() {
836 assistant.messages.first().map(|message| message.role)
837 } else if newest_selection.head() == Anchor::max() {
838 assistant.messages.last().map(|message| message.role)
839 } else {
840 assistant
841 .messages_metadata
842 .get(&newest_selection.head().excerpt_id())
843 .map(|message| message.role)
844 };
845
846 if role.map_or(false, |role| role == Role::Assistant) {
847 assistant.push_message(Role::User, cx);
848 } else {
849 assistant.assist(cx);
850 }
851 });
852 }
853
854 fn cancel_last_assist(&mut self, _: &editor::Cancel, cx: &mut ViewContext<Self>) {
855 if !self
856 .assistant
857 .update(cx, |assistant, _| assistant.cancel_last_assist())
858 {
859 cx.propagate_action();
860 }
861 }
862
863 fn handle_assistant_event(
864 &mut self,
865 assistant: ModelHandle<Assistant>,
866 event: &AssistantEvent,
867 cx: &mut ViewContext<Self>,
868 ) {
869 match event {
870 AssistantEvent::MessagesEdited { ids } => {
871 let selections = self.editor.read(cx).selections.all::<usize>(cx);
872 let selection_heads = selections
873 .iter()
874 .map(|selection| selection.head())
875 .collect::<HashSet<usize>>();
876 let ids = ids.iter().copied().collect::<HashSet<_>>();
877 assistant.update(cx, |assistant, cx| {
878 assistant.remove_empty_messages(ids, selection_heads, cx)
879 });
880 }
881 AssistantEvent::SummaryChanged => {
882 cx.emit(AssistantEditorEvent::TabContentChanged);
883 }
884 }
885 }
886
887 fn quote_selection(
888 workspace: &mut Workspace,
889 _: &QuoteSelection,
890 cx: &mut ViewContext<Workspace>,
891 ) {
892 let Some(panel) = workspace.panel::<AssistantPanel>(cx) else {
893 return;
894 };
895 let Some(editor) = workspace.active_item(cx).and_then(|item| item.downcast::<Editor>()) else {
896 return;
897 };
898
899 let text = editor.read_with(cx, |editor, cx| {
900 let range = editor.selections.newest::<usize>(cx).range();
901 let buffer = editor.buffer().read(cx).snapshot(cx);
902 let start_language = buffer.language_at(range.start);
903 let end_language = buffer.language_at(range.end);
904 let language_name = if start_language == end_language {
905 start_language.map(|language| language.name())
906 } else {
907 None
908 };
909 let language_name = language_name.as_deref().unwrap_or("").to_lowercase();
910
911 let selected_text = buffer.text_for_range(range).collect::<String>();
912 if selected_text.is_empty() {
913 None
914 } else {
915 Some(if language_name == "markdown" {
916 selected_text
917 .lines()
918 .map(|line| format!("> {}", line))
919 .collect::<Vec<_>>()
920 .join("\n")
921 } else {
922 format!("```{language_name}\n{selected_text}\n```")
923 })
924 }
925 });
926
927 // Activate the panel
928 if !panel.read(cx).has_focus(cx) {
929 workspace.toggle_panel_focus::<AssistantPanel>(cx);
930 }
931
932 if let Some(text) = text {
933 panel.update(cx, |panel, cx| {
934 if let Some(assistant) = panel
935 .pane
936 .read(cx)
937 .active_item()
938 .and_then(|item| item.downcast::<AssistantEditor>())
939 .ok_or_else(|| anyhow!("no active context"))
940 .log_err()
941 {
942 assistant.update(cx, |assistant, cx| {
943 assistant
944 .editor
945 .update(cx, |editor, cx| editor.insert(&text, cx))
946 });
947 }
948 });
949 }
950 }
951
952 fn cycle_model(&mut self, cx: &mut ViewContext<Self>) {
953 self.assistant.update(cx, |assistant, cx| {
954 let new_model = match assistant.model.as_str() {
955 "gpt-4" => "gpt-3.5-turbo",
956 _ => "gpt-4",
957 };
958 assistant.set_model(new_model.into(), cx);
959 });
960 }
961
962 fn title(&self, cx: &AppContext) -> String {
963 self.assistant
964 .read(cx)
965 .summary
966 .clone()
967 .unwrap_or_else(|| "New Context".into())
968 }
969}
970
971impl Entity for AssistantEditor {
972 type Event = AssistantEditorEvent;
973}
974
975impl View for AssistantEditor {
976 fn ui_name() -> &'static str {
977 "AssistantEditor"
978 }
979
980 fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
981 enum Model {}
982 let theme = &theme::current(cx).assistant;
983 let assistant = &self.assistant.read(cx);
984 let model = assistant.model.clone();
985 let remaining_tokens = assistant.remaining_tokens().map(|remaining_tokens| {
986 let remaining_tokens_style = if remaining_tokens <= 0 {
987 &theme.no_remaining_tokens
988 } else {
989 &theme.remaining_tokens
990 };
991 Label::new(
992 remaining_tokens.to_string(),
993 remaining_tokens_style.text.clone(),
994 )
995 .contained()
996 .with_style(remaining_tokens_style.container)
997 });
998
999 Stack::new()
1000 .with_child(
1001 ChildView::new(&self.editor, cx)
1002 .contained()
1003 .with_style(theme.container),
1004 )
1005 .with_child(
1006 Flex::row()
1007 .with_child(
1008 MouseEventHandler::<Model, _>::new(0, cx, |state, _| {
1009 let style = theme.model.style_for(state, false);
1010 Label::new(model, style.text.clone())
1011 .contained()
1012 .with_style(style.container)
1013 })
1014 .with_cursor_style(CursorStyle::PointingHand)
1015 .on_click(MouseButton::Left, |_, this, cx| this.cycle_model(cx)),
1016 )
1017 .with_children(remaining_tokens)
1018 .contained()
1019 .with_style(theme.model_info_container)
1020 .aligned()
1021 .top()
1022 .right(),
1023 )
1024 .into_any()
1025 }
1026
1027 fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {
1028 if cx.is_self_focused() {
1029 cx.focus(&self.editor);
1030 }
1031 }
1032}
1033
1034impl Item for AssistantEditor {
1035 fn tab_content<V: View>(
1036 &self,
1037 _: Option<usize>,
1038 style: &theme::Tab,
1039 cx: &gpui::AppContext,
1040 ) -> AnyElement<V> {
1041 let title = truncate_and_trailoff(&self.title(cx), editor::MAX_TAB_TITLE_LEN);
1042 Label::new(title, style.label.clone()).into_any()
1043 }
1044
1045 fn tab_tooltip_text(&self, cx: &AppContext) -> Option<Cow<str>> {
1046 Some(self.title(cx).into())
1047 }
1048}
1049
1050#[derive(Debug)]
1051struct Message {
1052 excerpt_id: ExcerptId,
1053 role: Role,
1054 content: ModelHandle<Buffer>,
1055}
1056
1057#[derive(Debug)]
1058struct MessageMetadata {
1059 role: Role,
1060 sent_at: DateTime<Local>,
1061 error: Option<String>,
1062}
1063
1064async fn stream_completion(
1065 api_key: String,
1066 executor: Arc<Background>,
1067 mut request: OpenAIRequest,
1068) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
1069 request.stream = true;
1070
1071 let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAIResponseStreamEvent>>();
1072
1073 let json_data = serde_json::to_string(&request)?;
1074 let mut response = Request::post(format!("{OPENAI_API_URL}/chat/completions"))
1075 .header("Content-Type", "application/json")
1076 .header("Authorization", format!("Bearer {}", api_key))
1077 .body(json_data)?
1078 .send_async()
1079 .await?;
1080
1081 let status = response.status();
1082 if status == StatusCode::OK {
1083 executor
1084 .spawn(async move {
1085 let mut lines = BufReader::new(response.body_mut()).lines();
1086
1087 fn parse_line(
1088 line: Result<String, io::Error>,
1089 ) -> Result<Option<OpenAIResponseStreamEvent>> {
1090 if let Some(data) = line?.strip_prefix("data: ") {
1091 let event = serde_json::from_str(&data)?;
1092 Ok(Some(event))
1093 } else {
1094 Ok(None)
1095 }
1096 }
1097
1098 while let Some(line) = lines.next().await {
1099 if let Some(event) = parse_line(line).transpose() {
1100 let done = event.as_ref().map_or(false, |event| {
1101 event
1102 .choices
1103 .last()
1104 .map_or(false, |choice| choice.finish_reason.is_some())
1105 });
1106 if tx.unbounded_send(event).is_err() {
1107 break;
1108 }
1109
1110 if done {
1111 break;
1112 }
1113 }
1114 }
1115
1116 anyhow::Ok(())
1117 })
1118 .detach();
1119
1120 Ok(rx)
1121 } else {
1122 let mut body = String::new();
1123 response.body_mut().read_to_string(&mut body).await?;
1124
1125 #[derive(Deserialize)]
1126 struct OpenAIResponse {
1127 error: OpenAIError,
1128 }
1129
1130 #[derive(Deserialize)]
1131 struct OpenAIError {
1132 message: String,
1133 }
1134
1135 match serde_json::from_str::<OpenAIResponse>(&body) {
1136 Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
1137 "Failed to connect to OpenAI API: {}",
1138 response.error.message,
1139 )),
1140
1141 _ => Err(anyhow!(
1142 "Failed to connect to OpenAI API: {} {}",
1143 response.status(),
1144 body,
1145 )),
1146 }
1147 }
1148}