1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, workflow::WorkflowStep, MessageId,
6 MessageStatus,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use client::{self, proto, telemetry::Telemetry};
13use clock::ReplicaId;
14use collections::{HashMap, HashSet};
15use fs::{Fs, RemoveOptions};
16use futures::{future::Shared, stream::FuturesUnordered, FutureExt, StreamExt};
17use gpui::{
18 AppContext, Context as _, EventEmitter, Image, Model, ModelContext, RenderImage, SharedString,
19 Subscription, Task,
20};
21
22use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
23use language_model::{
24 LanguageModel, LanguageModelCacheConfiguration, LanguageModelImage, LanguageModelRegistry,
25 LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
26};
27use open_ai::Model as OpenAiModel;
28use paths::{context_images_dir, contexts_dir};
29use project::Project;
30use serde::{Deserialize, Serialize};
31use smallvec::SmallVec;
32use std::{
33 cmp::{max, Ordering},
34 collections::hash_map,
35 fmt::Debug,
36 iter, mem,
37 ops::Range,
38 path::{Path, PathBuf},
39 sync::Arc,
40 time::{Duration, Instant},
41};
42use telemetry_events::AssistantKind;
43use util::{post_inc, ResultExt, TryFutureExt};
44use uuid::Uuid;
45
46#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
47pub struct ContextId(String);
48
49impl ContextId {
50 pub fn new() -> Self {
51 Self(Uuid::new_v4().to_string())
52 }
53
54 pub fn from_proto(id: String) -> Self {
55 Self(id)
56 }
57
58 pub fn to_proto(&self) -> String {
59 self.0.clone()
60 }
61}
62
63#[derive(Clone, Debug)]
64pub enum ContextOperation {
65 InsertMessage {
66 anchor: MessageAnchor,
67 metadata: MessageMetadata,
68 version: clock::Global,
69 },
70 UpdateMessage {
71 message_id: MessageId,
72 metadata: MessageMetadata,
73 version: clock::Global,
74 },
75 UpdateSummary {
76 summary: ContextSummary,
77 version: clock::Global,
78 },
79 SlashCommandFinished {
80 id: SlashCommandId,
81 output_range: Range<language::Anchor>,
82 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
83 version: clock::Global,
84 },
85 BufferOperation(language::Operation),
86}
87
88impl ContextOperation {
89 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
90 match op.variant.context("invalid variant")? {
91 proto::context_operation::Variant::InsertMessage(insert) => {
92 let message = insert.message.context("invalid message")?;
93 let id = MessageId(language::proto::deserialize_timestamp(
94 message.id.context("invalid id")?,
95 ));
96 Ok(Self::InsertMessage {
97 anchor: MessageAnchor {
98 id,
99 start: language::proto::deserialize_anchor(
100 message.start.context("invalid anchor")?,
101 )
102 .context("invalid anchor")?,
103 },
104 metadata: MessageMetadata {
105 role: Role::from_proto(message.role),
106 status: MessageStatus::from_proto(
107 message.status.context("invalid status")?,
108 ),
109 timestamp: id.0,
110 should_cache: false,
111 is_cache_anchor: false,
112 },
113 version: language::proto::deserialize_version(&insert.version),
114 })
115 }
116 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
117 message_id: MessageId(language::proto::deserialize_timestamp(
118 update.message_id.context("invalid message id")?,
119 )),
120 metadata: MessageMetadata {
121 role: Role::from_proto(update.role),
122 status: MessageStatus::from_proto(update.status.context("invalid status")?),
123 timestamp: language::proto::deserialize_timestamp(
124 update.timestamp.context("invalid timestamp")?,
125 ),
126 should_cache: false,
127 is_cache_anchor: false,
128 },
129 version: language::proto::deserialize_version(&update.version),
130 }),
131 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
132 summary: ContextSummary {
133 text: update.summary,
134 done: update.done,
135 timestamp: language::proto::deserialize_timestamp(
136 update.timestamp.context("invalid timestamp")?,
137 ),
138 },
139 version: language::proto::deserialize_version(&update.version),
140 }),
141 proto::context_operation::Variant::SlashCommandFinished(finished) => {
142 Ok(Self::SlashCommandFinished {
143 id: SlashCommandId(language::proto::deserialize_timestamp(
144 finished.id.context("invalid id")?,
145 )),
146 output_range: language::proto::deserialize_anchor_range(
147 finished.output_range.context("invalid range")?,
148 )?,
149 sections: finished
150 .sections
151 .into_iter()
152 .map(|section| {
153 Ok(SlashCommandOutputSection {
154 range: language::proto::deserialize_anchor_range(
155 section.range.context("invalid range")?,
156 )?,
157 icon: section.icon_name.parse()?,
158 label: section.label.into(),
159 })
160 })
161 .collect::<Result<Vec<_>>>()?,
162 version: language::proto::deserialize_version(&finished.version),
163 })
164 }
165 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
166 language::proto::deserialize_operation(
167 op.operation.context("invalid buffer operation")?,
168 )?,
169 )),
170 }
171 }
172
173 pub fn to_proto(&self) -> proto::ContextOperation {
174 match self {
175 Self::InsertMessage {
176 anchor,
177 metadata,
178 version,
179 } => proto::ContextOperation {
180 variant: Some(proto::context_operation::Variant::InsertMessage(
181 proto::context_operation::InsertMessage {
182 message: Some(proto::ContextMessage {
183 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
184 start: Some(language::proto::serialize_anchor(&anchor.start)),
185 role: metadata.role.to_proto() as i32,
186 status: Some(metadata.status.to_proto()),
187 }),
188 version: language::proto::serialize_version(version),
189 },
190 )),
191 },
192 Self::UpdateMessage {
193 message_id,
194 metadata,
195 version,
196 } => proto::ContextOperation {
197 variant: Some(proto::context_operation::Variant::UpdateMessage(
198 proto::context_operation::UpdateMessage {
199 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
200 role: metadata.role.to_proto() as i32,
201 status: Some(metadata.status.to_proto()),
202 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
203 version: language::proto::serialize_version(version),
204 },
205 )),
206 },
207 Self::UpdateSummary { summary, version } => proto::ContextOperation {
208 variant: Some(proto::context_operation::Variant::UpdateSummary(
209 proto::context_operation::UpdateSummary {
210 summary: summary.text.clone(),
211 done: summary.done,
212 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
213 version: language::proto::serialize_version(version),
214 },
215 )),
216 },
217 Self::SlashCommandFinished {
218 id,
219 output_range,
220 sections,
221 version,
222 } => proto::ContextOperation {
223 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
224 proto::context_operation::SlashCommandFinished {
225 id: Some(language::proto::serialize_timestamp(id.0)),
226 output_range: Some(language::proto::serialize_anchor_range(
227 output_range.clone(),
228 )),
229 sections: sections
230 .iter()
231 .map(|section| {
232 let icon_name: &'static str = section.icon.into();
233 proto::SlashCommandOutputSection {
234 range: Some(language::proto::serialize_anchor_range(
235 section.range.clone(),
236 )),
237 icon_name: icon_name.to_string(),
238 label: section.label.to_string(),
239 }
240 })
241 .collect(),
242 version: language::proto::serialize_version(version),
243 },
244 )),
245 },
246 Self::BufferOperation(operation) => proto::ContextOperation {
247 variant: Some(proto::context_operation::Variant::BufferOperation(
248 proto::context_operation::BufferOperation {
249 operation: Some(language::proto::serialize_operation(operation)),
250 },
251 )),
252 },
253 }
254 }
255
256 fn timestamp(&self) -> clock::Lamport {
257 match self {
258 Self::InsertMessage { anchor, .. } => anchor.id.0,
259 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
260 Self::UpdateSummary { summary, .. } => summary.timestamp,
261 Self::SlashCommandFinished { id, .. } => id.0,
262 Self::BufferOperation(_) => {
263 panic!("reading the timestamp of a buffer operation is not supported")
264 }
265 }
266 }
267
268 /// Returns the current version of the context operation.
269 pub fn version(&self) -> &clock::Global {
270 match self {
271 Self::InsertMessage { version, .. }
272 | Self::UpdateMessage { version, .. }
273 | Self::UpdateSummary { version, .. }
274 | Self::SlashCommandFinished { version, .. } => version,
275 Self::BufferOperation(_) => {
276 panic!("reading the version of a buffer operation is not supported")
277 }
278 }
279 }
280}
281
282#[derive(Debug, Clone)]
283pub enum ContextEvent {
284 ShowAssistError(SharedString),
285 MessagesEdited,
286 SummaryChanged,
287 WorkflowStepsRemoved(Vec<Range<language::Anchor>>),
288 WorkflowStepUpdated(Range<language::Anchor>),
289 StreamedCompletion,
290 PendingSlashCommandsUpdated {
291 removed: Vec<Range<language::Anchor>>,
292 updated: Vec<PendingSlashCommand>,
293 },
294 SlashCommandFinished {
295 output_range: Range<language::Anchor>,
296 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
297 run_commands_in_output: bool,
298 },
299 Operation(ContextOperation),
300}
301
302#[derive(Clone, Default, Debug)]
303pub struct ContextSummary {
304 pub text: String,
305 done: bool,
306 timestamp: clock::Lamport,
307}
308
309#[derive(Clone, Debug, Eq, PartialEq)]
310pub struct MessageAnchor {
311 pub id: MessageId,
312 pub start: language::Anchor,
313}
314
315#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
316pub struct MessageMetadata {
317 pub role: Role,
318 pub status: MessageStatus,
319 timestamp: clock::Lamport,
320 should_cache: bool,
321 is_cache_anchor: bool,
322}
323
324#[derive(Clone, Debug)]
325pub struct MessageImage {
326 image_id: u64,
327 image: Shared<Task<Option<LanguageModelImage>>>,
328}
329
330impl PartialEq for MessageImage {
331 fn eq(&self, other: &Self) -> bool {
332 self.image_id == other.image_id
333 }
334}
335
336impl Eq for MessageImage {}
337
338#[derive(Clone, Debug)]
339pub struct Message {
340 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
341 pub offset_range: Range<usize>,
342 pub index_range: Range<usize>,
343 pub id: MessageId,
344 pub anchor: language::Anchor,
345 pub role: Role,
346 pub status: MessageStatus,
347 pub cache: bool,
348}
349
350impl Message {
351 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
352 let mut content = Vec::new();
353
354 let mut range_start = self.offset_range.start;
355 for (image_offset, message_image) in self.image_offsets.iter() {
356 if *image_offset != range_start {
357 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
358 content.push(text);
359 }
360 }
361
362 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
363 content.push(language_model::MessageContent::Image(image));
364 }
365
366 range_start = *image_offset;
367 }
368 if range_start != self.offset_range.end {
369 if let Some(text) =
370 Self::collect_text_content(buffer, range_start..self.offset_range.end)
371 {
372 content.push(text);
373 }
374 }
375
376 if content.is_empty() {
377 return None;
378 }
379
380 Some(LanguageModelRequestMessage {
381 role: self.role,
382 content,
383 cache: self.cache,
384 })
385 }
386
387 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
388 let text: String = buffer.text_for_range(range.clone()).collect();
389 if text.trim().is_empty() {
390 None
391 } else {
392 Some(MessageContent::Text(text))
393 }
394 }
395}
396
397#[derive(Clone, Debug)]
398pub struct ImageAnchor {
399 pub anchor: language::Anchor,
400 pub image_id: u64,
401 pub render_image: Arc<RenderImage>,
402 pub image: Shared<Task<Option<LanguageModelImage>>>,
403}
404
405struct PendingCompletion {
406 id: usize,
407 assistant_message_id: MessageId,
408 _task: Task<()>,
409}
410
411#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
412pub struct SlashCommandId(clock::Lamport);
413
414struct WorkflowStepEntry {
415 range: Range<language::Anchor>,
416 step: Model<WorkflowStep>,
417}
418
419pub struct Context {
420 id: ContextId,
421 timestamp: clock::Lamport,
422 version: clock::Global,
423 pending_ops: Vec<ContextOperation>,
424 operations: Vec<ContextOperation>,
425 buffer: Model<Buffer>,
426 pending_slash_commands: Vec<PendingSlashCommand>,
427 edits_since_last_slash_command_parse: language::Subscription,
428 finished_slash_commands: HashSet<SlashCommandId>,
429 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
430 message_anchors: Vec<MessageAnchor>,
431 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
432 image_anchors: Vec<ImageAnchor>,
433 messages_metadata: HashMap<MessageId, MessageMetadata>,
434 summary: Option<ContextSummary>,
435 pending_summary: Task<Option<()>>,
436 completion_count: usize,
437 pending_completions: Vec<PendingCompletion>,
438 token_count: Option<usize>,
439 pending_token_count: Task<Option<()>>,
440 pending_save: Task<Result<()>>,
441 pending_cache_warming_task: Task<Option<()>>,
442 path: Option<PathBuf>,
443 _subscriptions: Vec<Subscription>,
444 telemetry: Option<Arc<Telemetry>>,
445 language_registry: Arc<LanguageRegistry>,
446 workflow_steps: Vec<WorkflowStepEntry>,
447 edits_since_last_workflow_step_prune: language::Subscription,
448 project: Option<Model<Project>>,
449 prompt_builder: Arc<PromptBuilder>,
450}
451
452impl EventEmitter<ContextEvent> for Context {}
453
454impl Context {
455 pub fn local(
456 language_registry: Arc<LanguageRegistry>,
457 project: Option<Model<Project>>,
458 telemetry: Option<Arc<Telemetry>>,
459 prompt_builder: Arc<PromptBuilder>,
460 cx: &mut ModelContext<Self>,
461 ) -> Self {
462 Self::new(
463 ContextId::new(),
464 ReplicaId::default(),
465 language::Capability::ReadWrite,
466 language_registry,
467 prompt_builder,
468 project,
469 telemetry,
470 cx,
471 )
472 }
473
474 #[allow(clippy::too_many_arguments)]
475 pub fn new(
476 id: ContextId,
477 replica_id: ReplicaId,
478 capability: language::Capability,
479 language_registry: Arc<LanguageRegistry>,
480 prompt_builder: Arc<PromptBuilder>,
481 project: Option<Model<Project>>,
482 telemetry: Option<Arc<Telemetry>>,
483 cx: &mut ModelContext<Self>,
484 ) -> Self {
485 let buffer = cx.new_model(|_cx| {
486 let mut buffer = Buffer::remote(
487 language::BufferId::new(1).unwrap(),
488 replica_id,
489 capability,
490 "",
491 );
492 buffer.set_language_registry(language_registry.clone());
493 buffer
494 });
495 let edits_since_last_slash_command_parse =
496 buffer.update(cx, |buffer, _| buffer.subscribe());
497 let edits_since_last_workflow_step_prune =
498 buffer.update(cx, |buffer, _| buffer.subscribe());
499 let mut this = Self {
500 id,
501 timestamp: clock::Lamport::new(replica_id),
502 version: clock::Global::new(),
503 pending_ops: Vec::new(),
504 operations: Vec::new(),
505 message_anchors: Default::default(),
506 image_anchors: Default::default(),
507 images: Default::default(),
508 messages_metadata: Default::default(),
509 pending_slash_commands: Vec::new(),
510 finished_slash_commands: HashSet::default(),
511 slash_command_output_sections: Vec::new(),
512 edits_since_last_slash_command_parse,
513 summary: None,
514 pending_summary: Task::ready(None),
515 completion_count: Default::default(),
516 pending_completions: Default::default(),
517 token_count: None,
518 pending_token_count: Task::ready(None),
519 pending_cache_warming_task: Task::ready(None),
520 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
521 pending_save: Task::ready(Ok(())),
522 path: None,
523 buffer,
524 telemetry,
525 project,
526 language_registry,
527 workflow_steps: Vec::new(),
528 edits_since_last_workflow_step_prune,
529 prompt_builder,
530 };
531
532 let first_message_id = MessageId(clock::Lamport {
533 replica_id: 0,
534 value: 0,
535 });
536 let message = MessageAnchor {
537 id: first_message_id,
538 start: language::Anchor::MIN,
539 };
540 this.messages_metadata.insert(
541 first_message_id,
542 MessageMetadata {
543 role: Role::User,
544 status: MessageStatus::Done,
545 timestamp: first_message_id.0,
546 should_cache: false,
547 is_cache_anchor: false,
548 },
549 );
550 this.message_anchors.push(message);
551
552 this.set_language(cx);
553 this.count_remaining_tokens(cx);
554 this
555 }
556
557 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
558 let buffer = self.buffer.read(cx);
559 SavedContext {
560 id: Some(self.id.clone()),
561 zed: "context".into(),
562 version: SavedContext::VERSION.into(),
563 text: buffer.text(),
564 messages: self
565 .messages(cx)
566 .map(|message| SavedMessage {
567 id: message.id,
568 start: message.offset_range.start,
569 metadata: self.messages_metadata[&message.id].clone(),
570 image_offsets: message
571 .image_offsets
572 .iter()
573 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
574 .collect(),
575 })
576 .collect(),
577 summary: self
578 .summary
579 .as_ref()
580 .map(|summary| summary.text.clone())
581 .unwrap_or_default(),
582 slash_command_output_sections: self
583 .slash_command_output_sections
584 .iter()
585 .filter_map(|section| {
586 let range = section.range.to_offset(buffer);
587 if section.range.start.is_valid(buffer) && !range.is_empty() {
588 Some(assistant_slash_command::SlashCommandOutputSection {
589 range,
590 icon: section.icon,
591 label: section.label.clone(),
592 })
593 } else {
594 None
595 }
596 })
597 .collect(),
598 }
599 }
600
601 #[allow(clippy::too_many_arguments)]
602 pub fn deserialize(
603 saved_context: SavedContext,
604 path: PathBuf,
605 language_registry: Arc<LanguageRegistry>,
606 prompt_builder: Arc<PromptBuilder>,
607 project: Option<Model<Project>>,
608 telemetry: Option<Arc<Telemetry>>,
609 cx: &mut ModelContext<Self>,
610 ) -> Self {
611 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
612 let mut this = Self::new(
613 id,
614 ReplicaId::default(),
615 language::Capability::ReadWrite,
616 language_registry,
617 prompt_builder,
618 project,
619 telemetry,
620 cx,
621 );
622 this.path = Some(path);
623 this.buffer.update(cx, |buffer, cx| {
624 buffer.set_text(saved_context.text.as_str(), cx)
625 });
626 let operations = saved_context.into_ops(&this.buffer, cx);
627 this.apply_ops(operations, cx).unwrap();
628 this
629 }
630
631 pub fn id(&self) -> &ContextId {
632 &self.id
633 }
634
635 pub fn replica_id(&self) -> ReplicaId {
636 self.timestamp.replica_id
637 }
638
639 pub fn version(&self, cx: &AppContext) -> ContextVersion {
640 ContextVersion {
641 context: self.version.clone(),
642 buffer: self.buffer.read(cx).version(),
643 }
644 }
645
646 pub fn set_capability(
647 &mut self,
648 capability: language::Capability,
649 cx: &mut ModelContext<Self>,
650 ) {
651 self.buffer
652 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
653 }
654
655 fn next_timestamp(&mut self) -> clock::Lamport {
656 let timestamp = self.timestamp.tick();
657 self.version.observe(timestamp);
658 timestamp
659 }
660
661 pub fn serialize_ops(
662 &self,
663 since: &ContextVersion,
664 cx: &AppContext,
665 ) -> Task<Vec<proto::ContextOperation>> {
666 let buffer_ops = self
667 .buffer
668 .read(cx)
669 .serialize_ops(Some(since.buffer.clone()), cx);
670
671 let mut context_ops = self
672 .operations
673 .iter()
674 .filter(|op| !since.context.observed(op.timestamp()))
675 .cloned()
676 .collect::<Vec<_>>();
677 context_ops.extend(self.pending_ops.iter().cloned());
678
679 cx.background_executor().spawn(async move {
680 let buffer_ops = buffer_ops.await;
681 context_ops.sort_unstable_by_key(|op| op.timestamp());
682 buffer_ops
683 .into_iter()
684 .map(|op| proto::ContextOperation {
685 variant: Some(proto::context_operation::Variant::BufferOperation(
686 proto::context_operation::BufferOperation {
687 operation: Some(op),
688 },
689 )),
690 })
691 .chain(context_ops.into_iter().map(|op| op.to_proto()))
692 .collect()
693 })
694 }
695
696 pub fn apply_ops(
697 &mut self,
698 ops: impl IntoIterator<Item = ContextOperation>,
699 cx: &mut ModelContext<Self>,
700 ) -> Result<()> {
701 let mut buffer_ops = Vec::new();
702 for op in ops {
703 match op {
704 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
705 op @ _ => self.pending_ops.push(op),
706 }
707 }
708 self.buffer
709 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
710 self.flush_ops(cx);
711
712 Ok(())
713 }
714
715 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
716 let mut messages_changed = false;
717 let mut summary_changed = false;
718
719 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
720 for op in mem::take(&mut self.pending_ops) {
721 if !self.can_apply_op(&op, cx) {
722 self.pending_ops.push(op);
723 continue;
724 }
725
726 let timestamp = op.timestamp();
727 match op.clone() {
728 ContextOperation::InsertMessage {
729 anchor, metadata, ..
730 } => {
731 if self.messages_metadata.contains_key(&anchor.id) {
732 // We already applied this operation.
733 } else {
734 self.insert_message(anchor, metadata, cx);
735 messages_changed = true;
736 }
737 }
738 ContextOperation::UpdateMessage {
739 message_id,
740 metadata: new_metadata,
741 ..
742 } => {
743 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
744 if new_metadata.timestamp > metadata.timestamp {
745 *metadata = new_metadata;
746 messages_changed = true;
747 }
748 }
749 ContextOperation::UpdateSummary {
750 summary: new_summary,
751 ..
752 } => {
753 if self
754 .summary
755 .as_ref()
756 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
757 {
758 self.summary = Some(new_summary);
759 summary_changed = true;
760 }
761 }
762 ContextOperation::SlashCommandFinished {
763 id,
764 output_range,
765 sections,
766 ..
767 } => {
768 if self.finished_slash_commands.insert(id) {
769 let buffer = self.buffer.read(cx);
770 self.slash_command_output_sections
771 .extend(sections.iter().cloned());
772 self.slash_command_output_sections
773 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
774 cx.emit(ContextEvent::SlashCommandFinished {
775 output_range,
776 sections,
777 run_commands_in_output: false,
778 });
779 }
780 }
781 ContextOperation::BufferOperation(_) => unreachable!(),
782 }
783
784 self.version.observe(timestamp);
785 self.timestamp.observe(timestamp);
786 self.operations.push(op);
787 }
788
789 if messages_changed {
790 cx.emit(ContextEvent::MessagesEdited);
791 cx.notify();
792 }
793
794 if summary_changed {
795 cx.emit(ContextEvent::SummaryChanged);
796 cx.notify();
797 }
798 }
799
800 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
801 if !self.version.observed_all(op.version()) {
802 return false;
803 }
804
805 match op {
806 ContextOperation::InsertMessage { anchor, .. } => self
807 .buffer
808 .read(cx)
809 .version
810 .observed(anchor.start.timestamp),
811 ContextOperation::UpdateMessage { message_id, .. } => {
812 self.messages_metadata.contains_key(message_id)
813 }
814 ContextOperation::UpdateSummary { .. } => true,
815 ContextOperation::SlashCommandFinished {
816 output_range,
817 sections,
818 ..
819 } => {
820 let version = &self.buffer.read(cx).version;
821 sections
822 .iter()
823 .map(|section| §ion.range)
824 .chain([output_range])
825 .all(|range| {
826 let observed_start = range.start == language::Anchor::MIN
827 || range.start == language::Anchor::MAX
828 || version.observed(range.start.timestamp);
829 let observed_end = range.end == language::Anchor::MIN
830 || range.end == language::Anchor::MAX
831 || version.observed(range.end.timestamp);
832 observed_start && observed_end
833 })
834 }
835 ContextOperation::BufferOperation(_) => {
836 panic!("buffer operations should always be applied")
837 }
838 }
839 }
840
841 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
842 self.operations.push(op.clone());
843 cx.emit(ContextEvent::Operation(op));
844 }
845
846 pub fn buffer(&self) -> &Model<Buffer> {
847 &self.buffer
848 }
849
850 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
851 self.language_registry.clone()
852 }
853
854 pub fn project(&self) -> Option<Model<Project>> {
855 self.project.clone()
856 }
857
858 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
859 self.prompt_builder.clone()
860 }
861
862 pub fn path(&self) -> Option<&Path> {
863 self.path.as_deref()
864 }
865
866 pub fn summary(&self) -> Option<&ContextSummary> {
867 self.summary.as_ref()
868 }
869
870 pub fn workflow_step_containing(
871 &self,
872 offset: usize,
873 cx: &AppContext,
874 ) -> Option<(Range<language::Anchor>, Model<WorkflowStep>)> {
875 let buffer = self.buffer.read(cx);
876 let index = self
877 .workflow_steps
878 .binary_search_by(|step| {
879 let step_range = step.range.to_offset(&buffer);
880 if offset < step_range.start {
881 Ordering::Greater
882 } else if offset > step_range.end {
883 Ordering::Less
884 } else {
885 Ordering::Equal
886 }
887 })
888 .ok()?;
889 let step = &self.workflow_steps[index];
890 Some((step.range.clone(), step.step.clone()))
891 }
892
893 pub fn workflow_step_for_range(
894 &self,
895 range: Range<language::Anchor>,
896 cx: &AppContext,
897 ) -> Option<Model<WorkflowStep>> {
898 let buffer = self.buffer.read(cx);
899 let index = self.workflow_step_index_for_range(&range, buffer).ok()?;
900 Some(self.workflow_steps[index].step.clone())
901 }
902
903 pub fn workflow_step_index_for_range(
904 &self,
905 tagged_range: &Range<text::Anchor>,
906 buffer: &text::BufferSnapshot,
907 ) -> Result<usize, usize> {
908 self.workflow_steps
909 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
910 }
911
912 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
913 &self.pending_slash_commands
914 }
915
916 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
917 &self.slash_command_output_sections
918 }
919
920 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
921 let markdown = self.language_registry.language_for_name("Markdown");
922 cx.spawn(|this, mut cx| async move {
923 let markdown = markdown.await?;
924 this.update(&mut cx, |this, cx| {
925 this.buffer
926 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
927 })
928 })
929 .detach_and_log_err(cx);
930 }
931
932 fn handle_buffer_event(
933 &mut self,
934 _: Model<Buffer>,
935 event: &language::Event,
936 cx: &mut ModelContext<Self>,
937 ) {
938 match event {
939 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
940 ContextOperation::BufferOperation(operation.clone()),
941 )),
942 language::Event::Edited => {
943 self.count_remaining_tokens(cx);
944 self.reparse_slash_commands(cx);
945 // Use `inclusive = true` to invalidate a step when an edit occurs
946 // at the start/end of a parsed step.
947 self.prune_invalid_workflow_steps(true, cx);
948 cx.emit(ContextEvent::MessagesEdited);
949 }
950 _ => {}
951 }
952 }
953
954 pub(crate) fn token_count(&self) -> Option<usize> {
955 self.token_count
956 }
957
958 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
959 let request = self.to_completion_request(cx);
960 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
961 return;
962 };
963 self.pending_token_count = cx.spawn(|this, mut cx| {
964 async move {
965 cx.background_executor()
966 .timer(Duration::from_millis(200))
967 .await;
968
969 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
970 this.update(&mut cx, |this, cx| {
971 this.token_count = Some(token_count);
972 this.start_cache_warming(&model, cx);
973 cx.notify()
974 })
975 }
976 .log_err()
977 });
978 }
979
980 pub fn mark_longest_messages_for_cache(
981 &mut self,
982 cache_configuration: &Option<LanguageModelCacheConfiguration>,
983 speculative: bool,
984 cx: &mut ModelContext<Self>,
985 ) -> bool {
986 let cache_configuration =
987 cache_configuration
988 .as_ref()
989 .unwrap_or(&LanguageModelCacheConfiguration {
990 max_cache_anchors: 0,
991 should_speculate: false,
992 min_total_token: 0,
993 });
994
995 let messages: Vec<Message> = self
996 .messages_from_anchors(
997 self.message_anchors.iter().take(if speculative {
998 self.message_anchors.len().saturating_sub(1)
999 } else {
1000 self.message_anchors.len()
1001 }),
1002 cx,
1003 )
1004 .filter(|message| message.offset_range.len() >= 5_000)
1005 .collect();
1006
1007 let mut sorted_messages = messages.clone();
1008 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1009 if cache_configuration.max_cache_anchors == 0 && cache_configuration.should_speculate {
1010 // Some models support caching, but don't support anchors. In that case we want to
1011 // mark the largest message as needing to be cached, but we will not mark it as an
1012 // anchor.
1013 sorted_messages.truncate(1);
1014 } else {
1015 // Save 1 anchor for the inline assistant.
1016 sorted_messages.truncate(max(cache_configuration.max_cache_anchors, 1) - 1);
1017 }
1018
1019 let longest_message_ids: HashSet<MessageId> = sorted_messages
1020 .into_iter()
1021 .map(|message| message.id)
1022 .collect();
1023
1024 let cache_deltas: HashSet<MessageId> = self
1025 .messages_metadata
1026 .iter()
1027 .filter_map(|(id, metadata)| {
1028 let should_cache = longest_message_ids.contains(id);
1029 let should_be_anchor = should_cache && cache_configuration.max_cache_anchors > 0;
1030 if metadata.should_cache != should_cache
1031 || metadata.is_cache_anchor != should_be_anchor
1032 {
1033 Some(*id)
1034 } else {
1035 None
1036 }
1037 })
1038 .collect();
1039
1040 let mut newly_cached_item = false;
1041 for id in cache_deltas {
1042 newly_cached_item = newly_cached_item || longest_message_ids.contains(&id);
1043 self.update_metadata(id, cx, |metadata| {
1044 metadata.should_cache = longest_message_ids.contains(&id);
1045 metadata.is_cache_anchor =
1046 metadata.should_cache && (cache_configuration.max_cache_anchors > 0);
1047 });
1048 }
1049 newly_cached_item
1050 }
1051
1052 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1053 let cache_configuration = model.cache_configuration();
1054 if !self.mark_longest_messages_for_cache(&cache_configuration, true, cx) {
1055 return;
1056 }
1057 if let Some(cache_configuration) = cache_configuration {
1058 if !cache_configuration.should_speculate {
1059 return;
1060 }
1061 }
1062
1063 let request = {
1064 let mut req = self.to_completion_request(cx);
1065 // Skip the last message because it's likely to change and
1066 // therefore would be a waste to cache.
1067 req.messages.pop();
1068 req.messages.push(LanguageModelRequestMessage {
1069 role: Role::User,
1070 content: vec!["Respond only with OK, nothing else.".into()],
1071 cache: false,
1072 });
1073 req
1074 };
1075
1076 let model = Arc::clone(model);
1077 self.pending_cache_warming_task = cx.spawn(|_, cx| {
1078 async move {
1079 match model.stream_completion(request, &cx).await {
1080 Ok(mut stream) => {
1081 stream.next().await;
1082 log::info!("Cache warming completed successfully");
1083 }
1084 Err(e) => {
1085 log::warn!("Cache warming failed: {}", e);
1086 }
1087 };
1088
1089 anyhow::Ok(())
1090 }
1091 .log_err()
1092 });
1093 }
1094
1095 pub fn reparse_slash_commands(&mut self, cx: &mut ModelContext<Self>) {
1096 let buffer = self.buffer.read(cx);
1097 let mut row_ranges = self
1098 .edits_since_last_slash_command_parse
1099 .consume()
1100 .into_iter()
1101 .map(|edit| {
1102 let start_row = buffer.offset_to_point(edit.new.start).row;
1103 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1104 start_row..end_row
1105 })
1106 .peekable();
1107
1108 let mut removed = Vec::new();
1109 let mut updated = Vec::new();
1110 while let Some(mut row_range) = row_ranges.next() {
1111 while let Some(next_row_range) = row_ranges.peek() {
1112 if row_range.end >= next_row_range.start {
1113 row_range.end = next_row_range.end;
1114 row_ranges.next();
1115 } else {
1116 break;
1117 }
1118 }
1119
1120 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1121 let end = buffer.anchor_after(Point::new(
1122 row_range.end - 1,
1123 buffer.line_len(row_range.end - 1),
1124 ));
1125
1126 let old_range = self.pending_command_indices_for_range(start..end, cx);
1127
1128 let mut new_commands = Vec::new();
1129 let mut lines = buffer.text_for_range(start..end).lines();
1130 let mut offset = lines.offset();
1131 while let Some(line) = lines.next() {
1132 if let Some(command_line) = SlashCommandLine::parse(line) {
1133 let name = &line[command_line.name.clone()];
1134 let arguments = command_line
1135 .arguments
1136 .iter()
1137 .filter_map(|argument_range| {
1138 if argument_range.is_empty() {
1139 None
1140 } else {
1141 line.get(argument_range.clone())
1142 }
1143 })
1144 .map(ToOwned::to_owned)
1145 .collect::<SmallVec<_>>();
1146 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1147 if !command.requires_argument() || !arguments.is_empty() {
1148 let start_ix = offset + command_line.name.start - 1;
1149 let end_ix = offset
1150 + command_line
1151 .arguments
1152 .last()
1153 .map_or(command_line.name.end, |argument| argument.end);
1154 let source_range =
1155 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1156 let pending_command = PendingSlashCommand {
1157 name: name.to_string(),
1158 arguments,
1159 source_range,
1160 status: PendingSlashCommandStatus::Idle,
1161 };
1162 updated.push(pending_command.clone());
1163 new_commands.push(pending_command);
1164 }
1165 }
1166 }
1167
1168 offset = lines.offset();
1169 }
1170
1171 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1172 removed.extend(removed_commands.map(|command| command.source_range));
1173 }
1174
1175 if !updated.is_empty() || !removed.is_empty() {
1176 cx.emit(ContextEvent::PendingSlashCommandsUpdated { removed, updated });
1177 }
1178 }
1179
1180 fn prune_invalid_workflow_steps(&mut self, inclusive: bool, cx: &mut ModelContext<Self>) {
1181 let mut removed = Vec::new();
1182
1183 for edit_range in self.edits_since_last_workflow_step_prune.consume() {
1184 let intersecting_range = self.find_intersecting_steps(edit_range.new, inclusive, cx);
1185 removed.extend(
1186 self.workflow_steps
1187 .drain(intersecting_range)
1188 .map(|step| step.range),
1189 );
1190 }
1191
1192 if !removed.is_empty() {
1193 cx.emit(ContextEvent::WorkflowStepsRemoved(removed));
1194 cx.notify();
1195 }
1196 }
1197
1198 fn find_intersecting_steps(
1199 &self,
1200 range: Range<usize>,
1201 inclusive: bool,
1202 cx: &AppContext,
1203 ) -> Range<usize> {
1204 let buffer = self.buffer.read(cx);
1205 let start_ix = match self.workflow_steps.binary_search_by(|probe| {
1206 probe
1207 .range
1208 .end
1209 .to_offset(buffer)
1210 .cmp(&range.start)
1211 .then(if inclusive {
1212 Ordering::Greater
1213 } else {
1214 Ordering::Less
1215 })
1216 }) {
1217 Ok(ix) | Err(ix) => ix,
1218 };
1219 let end_ix = match self.workflow_steps.binary_search_by(|probe| {
1220 probe
1221 .range
1222 .start
1223 .to_offset(buffer)
1224 .cmp(&range.end)
1225 .then(if inclusive {
1226 Ordering::Less
1227 } else {
1228 Ordering::Greater
1229 })
1230 }) {
1231 Ok(ix) | Err(ix) => ix,
1232 };
1233 start_ix..end_ix
1234 }
1235
1236 fn parse_workflow_steps_in_range(&mut self, range: Range<usize>, cx: &mut ModelContext<Self>) {
1237 let weak_self = cx.weak_model();
1238 let mut new_edit_steps = Vec::new();
1239 let mut edits = Vec::new();
1240
1241 let buffer = self.buffer.read(cx).snapshot();
1242 let mut message_lines = buffer.as_rope().chunks_in_range(range).lines();
1243 let mut in_step = false;
1244 let mut step_open_tag_start_ix = 0;
1245 let mut line_start_offset = message_lines.offset();
1246
1247 while let Some(line) = message_lines.next() {
1248 if let Some(step_start_index) = line.find("<step>") {
1249 if !in_step {
1250 in_step = true;
1251 step_open_tag_start_ix = line_start_offset + step_start_index;
1252 }
1253 }
1254
1255 if let Some(step_end_index) = line.find("</step>") {
1256 if in_step {
1257 let mut step_open_tag_end_ix = step_open_tag_start_ix + "<step>".len();
1258 if buffer.chars_at(step_open_tag_end_ix).next() == Some('\n') {
1259 step_open_tag_end_ix += 1;
1260 }
1261 let mut step_end_tag_start_ix = line_start_offset + step_end_index;
1262 let step_end_tag_end_ix = step_end_tag_start_ix + "</step>".len();
1263 if buffer.reversed_chars_at(step_end_tag_start_ix).next() == Some('\n') {
1264 step_end_tag_start_ix -= 1;
1265 }
1266 edits.push((step_open_tag_start_ix..step_open_tag_end_ix, ""));
1267 edits.push((step_end_tag_start_ix..step_end_tag_end_ix, ""));
1268 let tagged_range = buffer.anchor_after(step_open_tag_end_ix)
1269 ..buffer.anchor_before(step_end_tag_start_ix);
1270
1271 // Check if a step with the same range already exists
1272 let existing_step_index =
1273 self.workflow_step_index_for_range(&tagged_range, &buffer);
1274
1275 if let Err(ix) = existing_step_index {
1276 new_edit_steps.push((
1277 ix,
1278 WorkflowStepEntry {
1279 step: cx.new_model(|_| {
1280 WorkflowStep::new(tagged_range.clone(), weak_self.clone())
1281 }),
1282 range: tagged_range,
1283 },
1284 ));
1285 }
1286
1287 in_step = false;
1288 }
1289 }
1290
1291 line_start_offset = message_lines.offset();
1292 }
1293
1294 let mut updated = Vec::new();
1295 for (index, step) in new_edit_steps.into_iter().rev() {
1296 let step_range = step.range.clone();
1297 updated.push(step_range.clone());
1298 self.workflow_steps.insert(index, step);
1299 self.resolve_workflow_step(step_range, cx);
1300 }
1301
1302 // Delete <step> tags, making sure we don't accidentally invalidate
1303 // the step we just parsed.
1304 self.buffer
1305 .update(cx, |buffer, cx| buffer.edit(edits, None, cx));
1306 self.edits_since_last_workflow_step_prune.consume();
1307 }
1308
1309 pub fn resolve_workflow_step(
1310 &mut self,
1311 tagged_range: Range<language::Anchor>,
1312 cx: &mut ModelContext<Self>,
1313 ) {
1314 let Ok(step_index) = self
1315 .workflow_steps
1316 .binary_search_by(|step| step.range.cmp(&tagged_range, self.buffer.read(cx)))
1317 else {
1318 return;
1319 };
1320
1321 cx.emit(ContextEvent::WorkflowStepUpdated(tagged_range.clone()));
1322 cx.notify();
1323
1324 let resolution = self.workflow_steps[step_index].step.clone();
1325 cx.defer(move |cx| {
1326 resolution.update(cx, |resolution, cx| resolution.resolve(cx));
1327 });
1328 }
1329
1330 pub fn workflow_step_updated(
1331 &mut self,
1332 range: Range<language::Anchor>,
1333 cx: &mut ModelContext<Self>,
1334 ) {
1335 cx.emit(ContextEvent::WorkflowStepUpdated(range));
1336 cx.notify();
1337 }
1338
1339 pub fn pending_command_for_position(
1340 &mut self,
1341 position: language::Anchor,
1342 cx: &mut ModelContext<Self>,
1343 ) -> Option<&mut PendingSlashCommand> {
1344 let buffer = self.buffer.read(cx);
1345 match self
1346 .pending_slash_commands
1347 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1348 {
1349 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1350 Err(ix) => {
1351 let cmd = self.pending_slash_commands.get_mut(ix)?;
1352 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1353 && position.cmp(&cmd.source_range.end, buffer).is_le()
1354 {
1355 Some(cmd)
1356 } else {
1357 None
1358 }
1359 }
1360 }
1361 }
1362
1363 pub fn pending_commands_for_range(
1364 &self,
1365 range: Range<language::Anchor>,
1366 cx: &AppContext,
1367 ) -> &[PendingSlashCommand] {
1368 let range = self.pending_command_indices_for_range(range, cx);
1369 &self.pending_slash_commands[range]
1370 }
1371
1372 fn pending_command_indices_for_range(
1373 &self,
1374 range: Range<language::Anchor>,
1375 cx: &AppContext,
1376 ) -> Range<usize> {
1377 let buffer = self.buffer.read(cx);
1378 let start_ix = match self
1379 .pending_slash_commands
1380 .binary_search_by(|probe| probe.source_range.end.cmp(&range.start, &buffer))
1381 {
1382 Ok(ix) | Err(ix) => ix,
1383 };
1384 let end_ix = match self
1385 .pending_slash_commands
1386 .binary_search_by(|probe| probe.source_range.start.cmp(&range.end, &buffer))
1387 {
1388 Ok(ix) => ix + 1,
1389 Err(ix) => ix,
1390 };
1391 start_ix..end_ix
1392 }
1393
1394 pub fn insert_command_output(
1395 &mut self,
1396 command_range: Range<language::Anchor>,
1397 output: Task<Result<SlashCommandOutput>>,
1398 ensure_trailing_newline: bool,
1399 cx: &mut ModelContext<Self>,
1400 ) {
1401 self.reparse_slash_commands(cx);
1402
1403 let insert_output_task = cx.spawn(|this, mut cx| {
1404 let command_range = command_range.clone();
1405 async move {
1406 let output = output.await;
1407 this.update(&mut cx, |this, cx| match output {
1408 Ok(mut output) => {
1409 // Ensure section ranges are valid.
1410 for section in &mut output.sections {
1411 section.range.start = section.range.start.min(output.text.len());
1412 section.range.end = section.range.end.min(output.text.len());
1413 while !output.text.is_char_boundary(section.range.start) {
1414 section.range.start -= 1;
1415 }
1416 while !output.text.is_char_boundary(section.range.end) {
1417 section.range.end += 1;
1418 }
1419 }
1420
1421 // Ensure there is a newline after the last section.
1422 if ensure_trailing_newline {
1423 let has_newline_after_last_section =
1424 output.sections.last().map_or(false, |last_section| {
1425 output.text[last_section.range.end..].ends_with('\n')
1426 });
1427 if !has_newline_after_last_section {
1428 output.text.push('\n');
1429 }
1430 }
1431
1432 let version = this.version.clone();
1433 let command_id = SlashCommandId(this.next_timestamp());
1434 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1435 let start = command_range.start.to_offset(buffer);
1436 let old_end = command_range.end.to_offset(buffer);
1437 let new_end = start + output.text.len();
1438 buffer.edit([(start..old_end, output.text)], None, cx);
1439
1440 let mut sections = output
1441 .sections
1442 .into_iter()
1443 .map(|section| SlashCommandOutputSection {
1444 range: buffer.anchor_after(start + section.range.start)
1445 ..buffer.anchor_before(start + section.range.end),
1446 icon: section.icon,
1447 label: section.label,
1448 })
1449 .collect::<Vec<_>>();
1450 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1451
1452 this.slash_command_output_sections
1453 .extend(sections.iter().cloned());
1454 this.slash_command_output_sections
1455 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1456
1457 let output_range =
1458 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1459 this.finished_slash_commands.insert(command_id);
1460
1461 (
1462 ContextOperation::SlashCommandFinished {
1463 id: command_id,
1464 output_range: output_range.clone(),
1465 sections: sections.clone(),
1466 version,
1467 },
1468 ContextEvent::SlashCommandFinished {
1469 output_range,
1470 sections,
1471 run_commands_in_output: output.run_commands_in_text,
1472 },
1473 )
1474 });
1475
1476 this.push_op(operation, cx);
1477 cx.emit(event);
1478 }
1479 Err(error) => {
1480 if let Some(pending_command) =
1481 this.pending_command_for_position(command_range.start, cx)
1482 {
1483 pending_command.status =
1484 PendingSlashCommandStatus::Error(error.to_string());
1485 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1486 removed: vec![pending_command.source_range.clone()],
1487 updated: vec![pending_command.clone()],
1488 });
1489 }
1490 }
1491 })
1492 .ok();
1493 }
1494 });
1495
1496 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1497 pending_command.status = PendingSlashCommandStatus::Running {
1498 _task: insert_output_task.shared(),
1499 };
1500 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1501 removed: vec![pending_command.source_range.clone()],
1502 updated: vec![pending_command.clone()],
1503 });
1504 }
1505 }
1506
1507 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1508 self.count_remaining_tokens(cx);
1509 }
1510
1511 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1512 self.message_anchors.iter().rev().find_map(|message| {
1513 message
1514 .start
1515 .is_valid(self.buffer.read(cx))
1516 .then_some(message.id)
1517 })
1518 }
1519
1520 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1521 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1522 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1523 let last_message_id = self.get_last_valid_message_id(cx)?;
1524
1525 if !provider.is_authenticated(cx) {
1526 log::info!("completion provider has no credentials");
1527 return None;
1528 }
1529 // Compute which messages to cache, including the last one.
1530 self.mark_longest_messages_for_cache(&model.cache_configuration(), false, cx);
1531
1532 let request = self.to_completion_request(cx);
1533 let assistant_message = self
1534 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1535 .unwrap();
1536
1537 // Queue up the user's next reply.
1538 let user_message = self
1539 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1540 .unwrap();
1541
1542 let pending_completion_id = post_inc(&mut self.completion_count);
1543
1544 let task = cx.spawn({
1545 |this, mut cx| async move {
1546 let stream = model.stream_completion(request, &cx);
1547 let assistant_message_id = assistant_message.id;
1548 let mut response_latency = None;
1549 let stream_completion = async {
1550 let request_start = Instant::now();
1551 let mut chunks = stream.await?;
1552
1553 while let Some(chunk) = chunks.next().await {
1554 if response_latency.is_none() {
1555 response_latency = Some(request_start.elapsed());
1556 }
1557 let chunk = chunk?;
1558
1559 this.update(&mut cx, |this, cx| {
1560 let message_ix = this
1561 .message_anchors
1562 .iter()
1563 .position(|message| message.id == assistant_message_id)?;
1564 let message_range = this.buffer.update(cx, |buffer, cx| {
1565 let message_start_offset =
1566 this.message_anchors[message_ix].start.to_offset(buffer);
1567 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1568 .iter()
1569 .find(|message| message.start.is_valid(buffer))
1570 .map_or(buffer.len(), |message| {
1571 message.start.to_offset(buffer).saturating_sub(1)
1572 });
1573 let message_new_end_offset = message_old_end_offset + chunk.len();
1574 buffer.edit(
1575 [(message_old_end_offset..message_old_end_offset, chunk)],
1576 None,
1577 cx,
1578 );
1579 message_start_offset..message_new_end_offset
1580 });
1581
1582 // Use `inclusive = false` as edits might occur at the end of a parsed step.
1583 this.prune_invalid_workflow_steps(false, cx);
1584 this.parse_workflow_steps_in_range(message_range, cx);
1585 cx.emit(ContextEvent::StreamedCompletion);
1586
1587 Some(())
1588 })?;
1589 smol::future::yield_now().await;
1590 }
1591 this.update(&mut cx, |this, cx| {
1592 this.pending_completions
1593 .retain(|completion| completion.id != pending_completion_id);
1594 this.summarize(false, cx);
1595 })?;
1596
1597 anyhow::Ok(())
1598 };
1599
1600 let result = stream_completion.await;
1601
1602 this.update(&mut cx, |this, cx| {
1603 let error_message = result
1604 .err()
1605 .map(|error| error.to_string().trim().to_string());
1606
1607 if let Some(error_message) = error_message.as_ref() {
1608 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
1609 error_message.clone(),
1610 )));
1611 }
1612
1613 this.update_metadata(assistant_message_id, cx, |metadata| {
1614 if let Some(error_message) = error_message.as_ref() {
1615 metadata.status =
1616 MessageStatus::Error(SharedString::from(error_message.clone()));
1617 } else {
1618 metadata.status = MessageStatus::Done;
1619 }
1620 });
1621
1622 if let Some(telemetry) = this.telemetry.as_ref() {
1623 telemetry.report_assistant_event(
1624 Some(this.id.0.clone()),
1625 AssistantKind::Panel,
1626 model.telemetry_id(),
1627 response_latency,
1628 error_message,
1629 );
1630 }
1631 })
1632 .ok();
1633 }
1634 });
1635
1636 self.pending_completions.push(PendingCompletion {
1637 id: pending_completion_id,
1638 assistant_message_id: assistant_message.id,
1639 _task: task,
1640 });
1641
1642 Some(user_message)
1643 }
1644
1645 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
1646 let buffer = self.buffer.read(cx);
1647 let request_messages = self
1648 .messages(cx)
1649 .filter(|message| message.status == MessageStatus::Done)
1650 .filter_map(|message| message.to_request_message(&buffer))
1651 .collect();
1652
1653 LanguageModelRequest {
1654 messages: request_messages,
1655 stop: vec![],
1656 temperature: 1.0,
1657 }
1658 }
1659
1660 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
1661 if let Some(pending_completion) = self.pending_completions.pop() {
1662 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
1663 if metadata.status == MessageStatus::Pending {
1664 metadata.status = MessageStatus::Canceled;
1665 }
1666 });
1667 true
1668 } else {
1669 false
1670 }
1671 }
1672
1673 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
1674 for id in ids {
1675 if let Some(metadata) = self.messages_metadata.get(&id) {
1676 let role = metadata.role.cycle();
1677 self.update_metadata(id, cx, |metadata| metadata.role = role);
1678 }
1679 }
1680 }
1681
1682 pub fn update_metadata(
1683 &mut self,
1684 id: MessageId,
1685 cx: &mut ModelContext<Self>,
1686 f: impl FnOnce(&mut MessageMetadata),
1687 ) {
1688 let version = self.version.clone();
1689 let timestamp = self.next_timestamp();
1690 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
1691 f(metadata);
1692 metadata.timestamp = timestamp;
1693 let operation = ContextOperation::UpdateMessage {
1694 message_id: id,
1695 metadata: metadata.clone(),
1696 version,
1697 };
1698 self.push_op(operation, cx);
1699 cx.emit(ContextEvent::MessagesEdited);
1700 cx.notify();
1701 }
1702 }
1703
1704 pub fn insert_message_after(
1705 &mut self,
1706 message_id: MessageId,
1707 role: Role,
1708 status: MessageStatus,
1709 cx: &mut ModelContext<Self>,
1710 ) -> Option<MessageAnchor> {
1711 if let Some(prev_message_ix) = self
1712 .message_anchors
1713 .iter()
1714 .position(|message| message.id == message_id)
1715 {
1716 // Find the next valid message after the one we were given.
1717 let mut next_message_ix = prev_message_ix + 1;
1718 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
1719 if next_message.start.is_valid(self.buffer.read(cx)) {
1720 break;
1721 }
1722 next_message_ix += 1;
1723 }
1724
1725 let start = self.buffer.update(cx, |buffer, cx| {
1726 let offset = self
1727 .message_anchors
1728 .get(next_message_ix)
1729 .map_or(buffer.len(), |message| {
1730 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
1731 });
1732 buffer.edit([(offset..offset, "\n")], None, cx);
1733 buffer.anchor_before(offset + 1)
1734 });
1735
1736 let version = self.version.clone();
1737 let anchor = MessageAnchor {
1738 id: MessageId(self.next_timestamp()),
1739 start,
1740 };
1741 let metadata = MessageMetadata {
1742 role,
1743 status,
1744 timestamp: anchor.id.0,
1745 should_cache: false,
1746 is_cache_anchor: false,
1747 };
1748 self.insert_message(anchor.clone(), metadata.clone(), cx);
1749 self.push_op(
1750 ContextOperation::InsertMessage {
1751 anchor: anchor.clone(),
1752 metadata,
1753 version,
1754 },
1755 cx,
1756 );
1757 Some(anchor)
1758 } else {
1759 None
1760 }
1761 }
1762
1763 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
1764 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
1765 entry.insert((
1766 image.to_image_data(cx).log_err()?,
1767 LanguageModelImage::from_image(image, cx).shared(),
1768 ));
1769 }
1770
1771 Some(())
1772 }
1773
1774 pub fn insert_image_anchor(
1775 &mut self,
1776 image_id: u64,
1777 anchor: language::Anchor,
1778 cx: &mut ModelContext<Self>,
1779 ) -> bool {
1780 cx.emit(ContextEvent::MessagesEdited);
1781
1782 let buffer = self.buffer.read(cx);
1783 let insertion_ix = match self
1784 .image_anchors
1785 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
1786 {
1787 Ok(ix) => ix,
1788 Err(ix) => ix,
1789 };
1790
1791 if let Some((render_image, image)) = self.images.get(&image_id) {
1792 self.image_anchors.insert(
1793 insertion_ix,
1794 ImageAnchor {
1795 anchor,
1796 image_id,
1797 image: image.clone(),
1798 render_image: render_image.clone(),
1799 },
1800 );
1801
1802 true
1803 } else {
1804 false
1805 }
1806 }
1807
1808 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
1809 self.image_anchors.iter().cloned()
1810 }
1811
1812 pub fn split_message(
1813 &mut self,
1814 range: Range<usize>,
1815 cx: &mut ModelContext<Self>,
1816 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
1817 let start_message = self.message_for_offset(range.start, cx);
1818 let end_message = self.message_for_offset(range.end, cx);
1819 if let Some((start_message, end_message)) = start_message.zip(end_message) {
1820 // Prevent splitting when range spans multiple messages.
1821 if start_message.id != end_message.id {
1822 return (None, None);
1823 }
1824
1825 let message = start_message;
1826 let role = message.role;
1827 let mut edited_buffer = false;
1828
1829 let mut suffix_start = None;
1830
1831 // TODO: why did this start panicking?
1832 if range.start > message.offset_range.start
1833 && range.end < message.offset_range.end.saturating_sub(1)
1834 {
1835 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
1836 suffix_start = Some(range.end + 1);
1837 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
1838 suffix_start = Some(range.end);
1839 }
1840 }
1841
1842 let version = self.version.clone();
1843 let suffix = if let Some(suffix_start) = suffix_start {
1844 MessageAnchor {
1845 id: MessageId(self.next_timestamp()),
1846 start: self.buffer.read(cx).anchor_before(suffix_start),
1847 }
1848 } else {
1849 self.buffer.update(cx, |buffer, cx| {
1850 buffer.edit([(range.end..range.end, "\n")], None, cx);
1851 });
1852 edited_buffer = true;
1853 MessageAnchor {
1854 id: MessageId(self.next_timestamp()),
1855 start: self.buffer.read(cx).anchor_before(range.end + 1),
1856 }
1857 };
1858
1859 let suffix_metadata = MessageMetadata {
1860 role,
1861 status: MessageStatus::Done,
1862 timestamp: suffix.id.0,
1863 should_cache: false,
1864 is_cache_anchor: false,
1865 };
1866 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
1867 self.push_op(
1868 ContextOperation::InsertMessage {
1869 anchor: suffix.clone(),
1870 metadata: suffix_metadata,
1871 version,
1872 },
1873 cx,
1874 );
1875
1876 let new_messages =
1877 if range.start == range.end || range.start == message.offset_range.start {
1878 (None, Some(suffix))
1879 } else {
1880 let mut prefix_end = None;
1881 if range.start > message.offset_range.start
1882 && range.end < message.offset_range.end - 1
1883 {
1884 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
1885 prefix_end = Some(range.start + 1);
1886 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
1887 == Some('\n')
1888 {
1889 prefix_end = Some(range.start);
1890 }
1891 }
1892
1893 let version = self.version.clone();
1894 let selection = if let Some(prefix_end) = prefix_end {
1895 MessageAnchor {
1896 id: MessageId(self.next_timestamp()),
1897 start: self.buffer.read(cx).anchor_before(prefix_end),
1898 }
1899 } else {
1900 self.buffer.update(cx, |buffer, cx| {
1901 buffer.edit([(range.start..range.start, "\n")], None, cx)
1902 });
1903 edited_buffer = true;
1904 MessageAnchor {
1905 id: MessageId(self.next_timestamp()),
1906 start: self.buffer.read(cx).anchor_before(range.end + 1),
1907 }
1908 };
1909
1910 let selection_metadata = MessageMetadata {
1911 role,
1912 status: MessageStatus::Done,
1913 timestamp: selection.id.0,
1914 should_cache: false,
1915 is_cache_anchor: false,
1916 };
1917 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
1918 self.push_op(
1919 ContextOperation::InsertMessage {
1920 anchor: selection.clone(),
1921 metadata: selection_metadata,
1922 version,
1923 },
1924 cx,
1925 );
1926
1927 (Some(selection), Some(suffix))
1928 };
1929
1930 if !edited_buffer {
1931 cx.emit(ContextEvent::MessagesEdited);
1932 }
1933 new_messages
1934 } else {
1935 (None, None)
1936 }
1937 }
1938
1939 fn insert_message(
1940 &mut self,
1941 new_anchor: MessageAnchor,
1942 new_metadata: MessageMetadata,
1943 cx: &mut ModelContext<Self>,
1944 ) {
1945 cx.emit(ContextEvent::MessagesEdited);
1946
1947 self.messages_metadata.insert(new_anchor.id, new_metadata);
1948
1949 let buffer = self.buffer.read(cx);
1950 let insertion_ix = self
1951 .message_anchors
1952 .iter()
1953 .position(|anchor| {
1954 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
1955 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
1956 })
1957 .unwrap_or(self.message_anchors.len());
1958 self.message_anchors.insert(insertion_ix, new_anchor);
1959 }
1960
1961 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
1962 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
1963 return;
1964 };
1965 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1966 return;
1967 };
1968
1969 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
1970 if !provider.is_authenticated(cx) {
1971 return;
1972 }
1973
1974 let messages = self
1975 .messages(cx)
1976 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
1977 .chain(Some(LanguageModelRequestMessage {
1978 role: Role::User,
1979 content: vec![
1980 "Summarize the context into a short title without punctuation.".into(),
1981 ],
1982 cache: false,
1983 }));
1984 let request = LanguageModelRequest {
1985 messages: messages.collect(),
1986 stop: vec![],
1987 temperature: 1.0,
1988 };
1989
1990 self.pending_summary = cx.spawn(|this, mut cx| {
1991 async move {
1992 let stream = model.stream_completion(request, &cx);
1993 let mut messages = stream.await?;
1994
1995 let mut replaced = !replace_old;
1996 while let Some(message) = messages.next().await {
1997 let text = message?;
1998 let mut lines = text.lines();
1999 this.update(&mut cx, |this, cx| {
2000 let version = this.version.clone();
2001 let timestamp = this.next_timestamp();
2002 let summary = this.summary.get_or_insert(ContextSummary::default());
2003 if !replaced && replace_old {
2004 summary.text.clear();
2005 replaced = true;
2006 }
2007 summary.text.extend(lines.next());
2008 summary.timestamp = timestamp;
2009 let operation = ContextOperation::UpdateSummary {
2010 summary: summary.clone(),
2011 version,
2012 };
2013 this.push_op(operation, cx);
2014 cx.emit(ContextEvent::SummaryChanged);
2015 })?;
2016
2017 // Stop if the LLM generated multiple lines.
2018 if lines.next().is_some() {
2019 break;
2020 }
2021 }
2022
2023 this.update(&mut cx, |this, cx| {
2024 let version = this.version.clone();
2025 let timestamp = this.next_timestamp();
2026 if let Some(summary) = this.summary.as_mut() {
2027 summary.done = true;
2028 summary.timestamp = timestamp;
2029 let operation = ContextOperation::UpdateSummary {
2030 summary: summary.clone(),
2031 version,
2032 };
2033 this.push_op(operation, cx);
2034 cx.emit(ContextEvent::SummaryChanged);
2035 }
2036 })?;
2037
2038 anyhow::Ok(())
2039 }
2040 .log_err()
2041 });
2042 }
2043 }
2044
2045 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2046 self.messages_for_offsets([offset], cx).pop()
2047 }
2048
2049 pub fn messages_for_offsets(
2050 &self,
2051 offsets: impl IntoIterator<Item = usize>,
2052 cx: &AppContext,
2053 ) -> Vec<Message> {
2054 let mut result = Vec::new();
2055
2056 let mut messages = self.messages(cx).peekable();
2057 let mut offsets = offsets.into_iter().peekable();
2058 let mut current_message = messages.next();
2059 while let Some(offset) = offsets.next() {
2060 // Locate the message that contains the offset.
2061 while current_message.as_ref().map_or(false, |message| {
2062 !message.offset_range.contains(&offset) && messages.peek().is_some()
2063 }) {
2064 current_message = messages.next();
2065 }
2066 let Some(message) = current_message.as_ref() else {
2067 break;
2068 };
2069
2070 // Skip offsets that are in the same message.
2071 while offsets.peek().map_or(false, |offset| {
2072 message.offset_range.contains(offset) || messages.peek().is_none()
2073 }) {
2074 offsets.next();
2075 }
2076
2077 result.push(message.clone());
2078 }
2079 result
2080 }
2081
2082 fn messages_from_anchors<'a>(
2083 &'a self,
2084 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2085 cx: &'a AppContext,
2086 ) -> impl 'a + Iterator<Item = Message> {
2087 let buffer = self.buffer.read(cx);
2088 let messages = message_anchors.enumerate();
2089 let images = self.image_anchors.iter();
2090
2091 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2092 }
2093
2094 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2095 self.messages_from_anchors(self.message_anchors.iter(), cx)
2096 }
2097
2098 pub fn messages_from_iters<'a>(
2099 buffer: &'a Buffer,
2100 metadata: &'a HashMap<MessageId, MessageMetadata>,
2101 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2102 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2103 ) -> impl 'a + Iterator<Item = Message> {
2104 let mut messages = messages.peekable();
2105 let mut images = images.peekable();
2106
2107 iter::from_fn(move || {
2108 if let Some((start_ix, message_anchor)) = messages.next() {
2109 let metadata = metadata.get(&message_anchor.id)?;
2110
2111 let message_start = message_anchor.start.to_offset(buffer);
2112 let mut message_end = None;
2113 let mut end_ix = start_ix;
2114 while let Some((_, next_message)) = messages.peek() {
2115 if next_message.start.is_valid(buffer) {
2116 message_end = Some(next_message.start);
2117 break;
2118 } else {
2119 end_ix += 1;
2120 messages.next();
2121 }
2122 }
2123 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2124 let message_end = message_end_anchor.to_offset(buffer);
2125
2126 let mut image_offsets = SmallVec::new();
2127 while let Some(image_anchor) = images.peek() {
2128 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2129 image_offsets.push((
2130 image_anchor.anchor.to_offset(buffer),
2131 MessageImage {
2132 image_id: image_anchor.image_id,
2133 image: image_anchor.image.clone(),
2134 },
2135 ));
2136 images.next();
2137 } else {
2138 break;
2139 }
2140 }
2141
2142 return Some(Message {
2143 index_range: start_ix..end_ix,
2144 offset_range: message_start..message_end,
2145 id: message_anchor.id,
2146 anchor: message_anchor.start,
2147 role: metadata.role,
2148 status: metadata.status.clone(),
2149 cache: metadata.is_cache_anchor,
2150 image_offsets,
2151 });
2152 }
2153 None
2154 })
2155 }
2156
2157 pub fn save(
2158 &mut self,
2159 debounce: Option<Duration>,
2160 fs: Arc<dyn Fs>,
2161 cx: &mut ModelContext<Context>,
2162 ) {
2163 if self.replica_id() != ReplicaId::default() {
2164 // Prevent saving a remote context for now.
2165 return;
2166 }
2167
2168 self.pending_save = cx.spawn(|this, mut cx| async move {
2169 if let Some(debounce) = debounce {
2170 cx.background_executor().timer(debounce).await;
2171 }
2172
2173 let (old_path, summary) = this.read_with(&cx, |this, _| {
2174 let path = this.path.clone();
2175 let summary = if let Some(summary) = this.summary.as_ref() {
2176 if summary.done {
2177 Some(summary.text.clone())
2178 } else {
2179 None
2180 }
2181 } else {
2182 None
2183 };
2184 (path, summary)
2185 })?;
2186
2187 if let Some(summary) = summary {
2188 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2189 .await;
2190
2191 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2192 let mut discriminant = 1;
2193 let mut new_path;
2194 loop {
2195 new_path = contexts_dir().join(&format!(
2196 "{} - {}.zed.json",
2197 summary.trim(),
2198 discriminant
2199 ));
2200 if fs.is_file(&new_path).await {
2201 discriminant += 1;
2202 } else {
2203 break;
2204 }
2205 }
2206
2207 fs.create_dir(contexts_dir().as_ref()).await?;
2208 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2209 .await?;
2210 if let Some(old_path) = old_path {
2211 if new_path != old_path {
2212 fs.remove_file(
2213 &old_path,
2214 RemoveOptions {
2215 recursive: false,
2216 ignore_if_not_exists: true,
2217 },
2218 )
2219 .await?;
2220 }
2221 }
2222
2223 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2224 }
2225
2226 Ok(())
2227 });
2228 }
2229
2230 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2231 let mut images_to_save = self
2232 .images
2233 .iter()
2234 .map(|(id, (_, llm_image))| {
2235 let fs = fs.clone();
2236 let llm_image = llm_image.clone();
2237 let id = *id;
2238 async move {
2239 if let Some(llm_image) = llm_image.await {
2240 let path: PathBuf =
2241 context_images_dir().join(&format!("{}.png.base64", id));
2242 if fs
2243 .metadata(path.as_path())
2244 .await
2245 .log_err()
2246 .flatten()
2247 .is_none()
2248 {
2249 fs.atomic_write(path, llm_image.source.to_string())
2250 .await
2251 .log_err();
2252 }
2253 }
2254 }
2255 })
2256 .collect::<FuturesUnordered<_>>();
2257 cx.background_executor().spawn(async move {
2258 if fs
2259 .create_dir(context_images_dir().as_ref())
2260 .await
2261 .log_err()
2262 .is_some()
2263 {
2264 while let Some(_) = images_to_save.next().await {}
2265 }
2266 })
2267 }
2268
2269 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2270 let timestamp = self.next_timestamp();
2271 let summary = self.summary.get_or_insert(ContextSummary::default());
2272 summary.timestamp = timestamp;
2273 summary.done = true;
2274 summary.text = custom_summary;
2275 cx.emit(ContextEvent::SummaryChanged);
2276 }
2277}
2278
2279#[derive(Debug, Default)]
2280pub struct ContextVersion {
2281 context: clock::Global,
2282 buffer: clock::Global,
2283}
2284
2285impl ContextVersion {
2286 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2287 Self {
2288 context: language::proto::deserialize_version(&proto.context_version),
2289 buffer: language::proto::deserialize_version(&proto.buffer_version),
2290 }
2291 }
2292
2293 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2294 proto::ContextVersion {
2295 context_id: context_id.to_proto(),
2296 context_version: language::proto::serialize_version(&self.context),
2297 buffer_version: language::proto::serialize_version(&self.buffer),
2298 }
2299 }
2300}
2301
2302#[derive(Debug, Clone)]
2303pub struct PendingSlashCommand {
2304 pub name: String,
2305 pub arguments: SmallVec<[String; 3]>,
2306 pub status: PendingSlashCommandStatus,
2307 pub source_range: Range<language::Anchor>,
2308}
2309
2310#[derive(Debug, Clone)]
2311pub enum PendingSlashCommandStatus {
2312 Idle,
2313 Running { _task: Shared<Task<()>> },
2314 Error(String),
2315}
2316
2317#[derive(Serialize, Deserialize)]
2318pub struct SavedMessage {
2319 pub id: MessageId,
2320 pub start: usize,
2321 pub metadata: MessageMetadata,
2322 #[serde(default)]
2323 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2324 pub image_offsets: Vec<(usize, u64)>,
2325}
2326
2327#[derive(Serialize, Deserialize)]
2328pub struct SavedContext {
2329 pub id: Option<ContextId>,
2330 pub zed: String,
2331 pub version: String,
2332 pub text: String,
2333 pub messages: Vec<SavedMessage>,
2334 pub summary: String,
2335 pub slash_command_output_sections:
2336 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2337}
2338
2339impl SavedContext {
2340 pub const VERSION: &'static str = "0.4.0";
2341
2342 pub fn from_json(json: &str) -> Result<Self> {
2343 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2344 match saved_context_json
2345 .get("version")
2346 .ok_or_else(|| anyhow!("version not found"))?
2347 {
2348 serde_json::Value::String(version) => match version.as_str() {
2349 SavedContext::VERSION => {
2350 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2351 }
2352 SavedContextV0_3_0::VERSION => {
2353 let saved_context =
2354 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2355 Ok(saved_context.upgrade())
2356 }
2357 SavedContextV0_2_0::VERSION => {
2358 let saved_context =
2359 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2360 Ok(saved_context.upgrade())
2361 }
2362 SavedContextV0_1_0::VERSION => {
2363 let saved_context =
2364 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2365 Ok(saved_context.upgrade())
2366 }
2367 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2368 },
2369 _ => Err(anyhow!("version not found on saved context")),
2370 }
2371 }
2372
2373 fn into_ops(
2374 self,
2375 buffer: &Model<Buffer>,
2376 cx: &mut ModelContext<Context>,
2377 ) -> Vec<ContextOperation> {
2378 let mut operations = Vec::new();
2379 let mut version = clock::Global::new();
2380 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2381
2382 let mut first_message_metadata = None;
2383 for message in self.messages {
2384 if message.id == MessageId(clock::Lamport::default()) {
2385 first_message_metadata = Some(message.metadata);
2386 } else {
2387 operations.push(ContextOperation::InsertMessage {
2388 anchor: MessageAnchor {
2389 id: message.id,
2390 start: buffer.read(cx).anchor_before(message.start),
2391 },
2392 metadata: MessageMetadata {
2393 role: message.metadata.role,
2394 status: message.metadata.status,
2395 timestamp: message.metadata.timestamp,
2396 should_cache: false,
2397 is_cache_anchor: false,
2398 },
2399 version: version.clone(),
2400 });
2401 version.observe(message.id.0);
2402 next_timestamp.observe(message.id.0);
2403 }
2404 }
2405
2406 if let Some(metadata) = first_message_metadata {
2407 let timestamp = next_timestamp.tick();
2408 operations.push(ContextOperation::UpdateMessage {
2409 message_id: MessageId(clock::Lamport::default()),
2410 metadata: MessageMetadata {
2411 role: metadata.role,
2412 status: metadata.status,
2413 timestamp,
2414 should_cache: false,
2415 is_cache_anchor: false,
2416 },
2417 version: version.clone(),
2418 });
2419 version.observe(timestamp);
2420 }
2421
2422 let timestamp = next_timestamp.tick();
2423 operations.push(ContextOperation::SlashCommandFinished {
2424 id: SlashCommandId(timestamp),
2425 output_range: language::Anchor::MIN..language::Anchor::MAX,
2426 sections: self
2427 .slash_command_output_sections
2428 .into_iter()
2429 .map(|section| {
2430 let buffer = buffer.read(cx);
2431 SlashCommandOutputSection {
2432 range: buffer.anchor_after(section.range.start)
2433 ..buffer.anchor_before(section.range.end),
2434 icon: section.icon,
2435 label: section.label,
2436 }
2437 })
2438 .collect(),
2439 version: version.clone(),
2440 });
2441 version.observe(timestamp);
2442
2443 let timestamp = next_timestamp.tick();
2444 operations.push(ContextOperation::UpdateSummary {
2445 summary: ContextSummary {
2446 text: self.summary,
2447 done: true,
2448 timestamp,
2449 },
2450 version: version.clone(),
2451 });
2452 version.observe(timestamp);
2453
2454 operations
2455 }
2456}
2457
2458#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
2459struct SavedMessageIdPreV0_4_0(usize);
2460
2461#[derive(Serialize, Deserialize)]
2462struct SavedMessagePreV0_4_0 {
2463 id: SavedMessageIdPreV0_4_0,
2464 start: usize,
2465}
2466
2467#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
2468struct SavedMessageMetadataPreV0_4_0 {
2469 role: Role,
2470 status: MessageStatus,
2471}
2472
2473#[derive(Serialize, Deserialize)]
2474struct SavedContextV0_3_0 {
2475 id: Option<ContextId>,
2476 zed: String,
2477 version: String,
2478 text: String,
2479 messages: Vec<SavedMessagePreV0_4_0>,
2480 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2481 summary: String,
2482 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2483}
2484
2485impl SavedContextV0_3_0 {
2486 const VERSION: &'static str = "0.3.0";
2487
2488 fn upgrade(self) -> SavedContext {
2489 SavedContext {
2490 id: self.id,
2491 zed: self.zed,
2492 version: SavedContext::VERSION.into(),
2493 text: self.text,
2494 messages: self
2495 .messages
2496 .into_iter()
2497 .filter_map(|message| {
2498 let metadata = self.message_metadata.get(&message.id)?;
2499 let timestamp = clock::Lamport {
2500 replica_id: ReplicaId::default(),
2501 value: message.id.0 as u32,
2502 };
2503 Some(SavedMessage {
2504 id: MessageId(timestamp),
2505 start: message.start,
2506 metadata: MessageMetadata {
2507 role: metadata.role,
2508 status: metadata.status.clone(),
2509 timestamp,
2510 should_cache: false,
2511 is_cache_anchor: false,
2512 },
2513 image_offsets: Vec::new(),
2514 })
2515 })
2516 .collect(),
2517 summary: self.summary,
2518 slash_command_output_sections: self.slash_command_output_sections,
2519 }
2520 }
2521}
2522
2523#[derive(Serialize, Deserialize)]
2524struct SavedContextV0_2_0 {
2525 id: Option<ContextId>,
2526 zed: String,
2527 version: String,
2528 text: String,
2529 messages: Vec<SavedMessagePreV0_4_0>,
2530 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2531 summary: String,
2532}
2533
2534impl SavedContextV0_2_0 {
2535 const VERSION: &'static str = "0.2.0";
2536
2537 fn upgrade(self) -> SavedContext {
2538 SavedContextV0_3_0 {
2539 id: self.id,
2540 zed: self.zed,
2541 version: SavedContextV0_3_0::VERSION.to_string(),
2542 text: self.text,
2543 messages: self.messages,
2544 message_metadata: self.message_metadata,
2545 summary: self.summary,
2546 slash_command_output_sections: Vec::new(),
2547 }
2548 .upgrade()
2549 }
2550}
2551
2552#[derive(Serialize, Deserialize)]
2553struct SavedContextV0_1_0 {
2554 id: Option<ContextId>,
2555 zed: String,
2556 version: String,
2557 text: String,
2558 messages: Vec<SavedMessagePreV0_4_0>,
2559 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2560 summary: String,
2561 api_url: Option<String>,
2562 model: OpenAiModel,
2563}
2564
2565impl SavedContextV0_1_0 {
2566 const VERSION: &'static str = "0.1.0";
2567
2568 fn upgrade(self) -> SavedContext {
2569 SavedContextV0_2_0 {
2570 id: self.id,
2571 zed: self.zed,
2572 version: SavedContextV0_2_0::VERSION.to_string(),
2573 text: self.text,
2574 messages: self.messages,
2575 message_metadata: self.message_metadata,
2576 summary: self.summary,
2577 }
2578 .upgrade()
2579 }
2580}
2581
2582#[derive(Clone)]
2583pub struct SavedContextMetadata {
2584 pub title: String,
2585 pub path: PathBuf,
2586 pub mtime: chrono::DateTime<chrono::Local>,
2587}