1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use client::{self, proto, telemetry::Telemetry};
13use clock::ReplicaId;
14use collections::{HashMap, HashSet};
15use fs::{Fs, RemoveOptions};
16use futures::{
17 future::{self, Shared},
18 stream::FuturesUnordered,
19 FutureExt, StreamExt,
20};
21use gpui::{
22 AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext,
23 RenderImage, SharedString, Subscription, Task,
24};
25
26use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
27use language_model::{
28 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
29 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
30 MessageContent, Role,
31};
32use open_ai::Model as OpenAiModel;
33use paths::{context_images_dir, contexts_dir};
34use project::Project;
35use serde::{Deserialize, Serialize};
36use smallvec::SmallVec;
37use std::{
38 cmp::{self, max, Ordering},
39 collections::hash_map,
40 fmt::Debug,
41 iter, mem,
42 ops::Range,
43 path::{Path, PathBuf},
44 str::FromStr as _,
45 sync::Arc,
46 time::{Duration, Instant},
47};
48use telemetry_events::AssistantKind;
49use text::BufferSnapshot;
50use util::{post_inc, ResultExt, TryFutureExt};
51use uuid::Uuid;
52
53#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
54pub struct ContextId(String);
55
56impl ContextId {
57 pub fn new() -> Self {
58 Self(Uuid::new_v4().to_string())
59 }
60
61 pub fn from_proto(id: String) -> Self {
62 Self(id)
63 }
64
65 pub fn to_proto(&self) -> String {
66 self.0.clone()
67 }
68}
69
70#[derive(Clone, Debug)]
71pub enum ContextOperation {
72 InsertMessage {
73 anchor: MessageAnchor,
74 metadata: MessageMetadata,
75 version: clock::Global,
76 },
77 UpdateMessage {
78 message_id: MessageId,
79 metadata: MessageMetadata,
80 version: clock::Global,
81 },
82 UpdateSummary {
83 summary: ContextSummary,
84 version: clock::Global,
85 },
86 SlashCommandFinished {
87 id: SlashCommandId,
88 output_range: Range<language::Anchor>,
89 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
90 version: clock::Global,
91 },
92 BufferOperation(language::Operation),
93}
94
95impl ContextOperation {
96 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
97 match op.variant.context("invalid variant")? {
98 proto::context_operation::Variant::InsertMessage(insert) => {
99 let message = insert.message.context("invalid message")?;
100 let id = MessageId(language::proto::deserialize_timestamp(
101 message.id.context("invalid id")?,
102 ));
103 Ok(Self::InsertMessage {
104 anchor: MessageAnchor {
105 id,
106 start: language::proto::deserialize_anchor(
107 message.start.context("invalid anchor")?,
108 )
109 .context("invalid anchor")?,
110 },
111 metadata: MessageMetadata {
112 role: Role::from_proto(message.role),
113 status: MessageStatus::from_proto(
114 message.status.context("invalid status")?,
115 ),
116 timestamp: id.0,
117 cache: None,
118 },
119 version: language::proto::deserialize_version(&insert.version),
120 })
121 }
122 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
123 message_id: MessageId(language::proto::deserialize_timestamp(
124 update.message_id.context("invalid message id")?,
125 )),
126 metadata: MessageMetadata {
127 role: Role::from_proto(update.role),
128 status: MessageStatus::from_proto(update.status.context("invalid status")?),
129 timestamp: language::proto::deserialize_timestamp(
130 update.timestamp.context("invalid timestamp")?,
131 ),
132 cache: None,
133 },
134 version: language::proto::deserialize_version(&update.version),
135 }),
136 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
137 summary: ContextSummary {
138 text: update.summary,
139 done: update.done,
140 timestamp: language::proto::deserialize_timestamp(
141 update.timestamp.context("invalid timestamp")?,
142 ),
143 },
144 version: language::proto::deserialize_version(&update.version),
145 }),
146 proto::context_operation::Variant::SlashCommandFinished(finished) => {
147 Ok(Self::SlashCommandFinished {
148 id: SlashCommandId(language::proto::deserialize_timestamp(
149 finished.id.context("invalid id")?,
150 )),
151 output_range: language::proto::deserialize_anchor_range(
152 finished.output_range.context("invalid range")?,
153 )?,
154 sections: finished
155 .sections
156 .into_iter()
157 .map(|section| {
158 Ok(SlashCommandOutputSection {
159 range: language::proto::deserialize_anchor_range(
160 section.range.context("invalid range")?,
161 )?,
162 icon: section.icon_name.parse()?,
163 label: section.label.into(),
164 })
165 })
166 .collect::<Result<Vec<_>>>()?,
167 version: language::proto::deserialize_version(&finished.version),
168 })
169 }
170 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
171 language::proto::deserialize_operation(
172 op.operation.context("invalid buffer operation")?,
173 )?,
174 )),
175 }
176 }
177
178 pub fn to_proto(&self) -> proto::ContextOperation {
179 match self {
180 Self::InsertMessage {
181 anchor,
182 metadata,
183 version,
184 } => proto::ContextOperation {
185 variant: Some(proto::context_operation::Variant::InsertMessage(
186 proto::context_operation::InsertMessage {
187 message: Some(proto::ContextMessage {
188 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
189 start: Some(language::proto::serialize_anchor(&anchor.start)),
190 role: metadata.role.to_proto() as i32,
191 status: Some(metadata.status.to_proto()),
192 }),
193 version: language::proto::serialize_version(version),
194 },
195 )),
196 },
197 Self::UpdateMessage {
198 message_id,
199 metadata,
200 version,
201 } => proto::ContextOperation {
202 variant: Some(proto::context_operation::Variant::UpdateMessage(
203 proto::context_operation::UpdateMessage {
204 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
205 role: metadata.role.to_proto() as i32,
206 status: Some(metadata.status.to_proto()),
207 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
208 version: language::proto::serialize_version(version),
209 },
210 )),
211 },
212 Self::UpdateSummary { summary, version } => proto::ContextOperation {
213 variant: Some(proto::context_operation::Variant::UpdateSummary(
214 proto::context_operation::UpdateSummary {
215 summary: summary.text.clone(),
216 done: summary.done,
217 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
218 version: language::proto::serialize_version(version),
219 },
220 )),
221 },
222 Self::SlashCommandFinished {
223 id,
224 output_range,
225 sections,
226 version,
227 } => proto::ContextOperation {
228 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
229 proto::context_operation::SlashCommandFinished {
230 id: Some(language::proto::serialize_timestamp(id.0)),
231 output_range: Some(language::proto::serialize_anchor_range(
232 output_range.clone(),
233 )),
234 sections: sections
235 .iter()
236 .map(|section| {
237 let icon_name: &'static str = section.icon.into();
238 proto::SlashCommandOutputSection {
239 range: Some(language::proto::serialize_anchor_range(
240 section.range.clone(),
241 )),
242 icon_name: icon_name.to_string(),
243 label: section.label.to_string(),
244 }
245 })
246 .collect(),
247 version: language::proto::serialize_version(version),
248 },
249 )),
250 },
251 Self::BufferOperation(operation) => proto::ContextOperation {
252 variant: Some(proto::context_operation::Variant::BufferOperation(
253 proto::context_operation::BufferOperation {
254 operation: Some(language::proto::serialize_operation(operation)),
255 },
256 )),
257 },
258 }
259 }
260
261 fn timestamp(&self) -> clock::Lamport {
262 match self {
263 Self::InsertMessage { anchor, .. } => anchor.id.0,
264 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
265 Self::UpdateSummary { summary, .. } => summary.timestamp,
266 Self::SlashCommandFinished { id, .. } => id.0,
267 Self::BufferOperation(_) => {
268 panic!("reading the timestamp of a buffer operation is not supported")
269 }
270 }
271 }
272
273 /// Returns the current version of the context operation.
274 pub fn version(&self) -> &clock::Global {
275 match self {
276 Self::InsertMessage { version, .. }
277 | Self::UpdateMessage { version, .. }
278 | Self::UpdateSummary { version, .. }
279 | Self::SlashCommandFinished { version, .. } => version,
280 Self::BufferOperation(_) => {
281 panic!("reading the version of a buffer operation is not supported")
282 }
283 }
284 }
285}
286
287#[derive(Debug, Clone)]
288pub enum ContextEvent {
289 ShowAssistError(SharedString),
290 MessagesEdited,
291 SummaryChanged,
292 StreamedCompletion,
293 WorkflowStepsUpdated {
294 removed: Vec<Range<language::Anchor>>,
295 updated: Vec<Range<language::Anchor>>,
296 },
297 PendingSlashCommandsUpdated {
298 removed: Vec<Range<language::Anchor>>,
299 updated: Vec<PendingSlashCommand>,
300 },
301 SlashCommandFinished {
302 output_range: Range<language::Anchor>,
303 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
304 run_commands_in_output: bool,
305 expand_result: bool,
306 },
307 Operation(ContextOperation),
308}
309
310#[derive(Clone, Default, Debug)]
311pub struct ContextSummary {
312 pub text: String,
313 done: bool,
314 timestamp: clock::Lamport,
315}
316
317#[derive(Clone, Debug, Eq, PartialEq)]
318pub struct MessageAnchor {
319 pub id: MessageId,
320 pub start: language::Anchor,
321}
322
323#[derive(Clone, Debug, Eq, PartialEq)]
324pub enum CacheStatus {
325 Pending,
326 Cached,
327}
328
329#[derive(Clone, Debug, Eq, PartialEq)]
330pub struct MessageCacheMetadata {
331 pub is_anchor: bool,
332 pub is_final_anchor: bool,
333 pub status: CacheStatus,
334 pub cached_at: clock::Global,
335}
336
337#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
338pub struct MessageMetadata {
339 pub role: Role,
340 pub status: MessageStatus,
341 pub(crate) timestamp: clock::Lamport,
342 #[serde(skip)]
343 pub cache: Option<MessageCacheMetadata>,
344}
345
346impl From<&Message> for MessageMetadata {
347 fn from(message: &Message) -> Self {
348 Self {
349 role: message.role,
350 status: message.status.clone(),
351 timestamp: message.id.0,
352 cache: message.cache.clone(),
353 }
354 }
355}
356
357impl MessageMetadata {
358 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
359 let result = match &self.cache {
360 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
361 &cached_at,
362 Range {
363 start: buffer.anchor_at(range.start, Bias::Right),
364 end: buffer.anchor_at(range.end, Bias::Left),
365 },
366 ),
367 _ => false,
368 };
369 result
370 }
371}
372
373#[derive(Clone, Debug)]
374pub struct MessageImage {
375 image_id: u64,
376 image: Shared<Task<Option<LanguageModelImage>>>,
377}
378
379impl PartialEq for MessageImage {
380 fn eq(&self, other: &Self) -> bool {
381 self.image_id == other.image_id
382 }
383}
384
385impl Eq for MessageImage {}
386
387#[derive(Clone, Debug)]
388pub struct Message {
389 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
390 pub offset_range: Range<usize>,
391 pub index_range: Range<usize>,
392 pub anchor_range: Range<language::Anchor>,
393 pub id: MessageId,
394 pub role: Role,
395 pub status: MessageStatus,
396 pub cache: Option<MessageCacheMetadata>,
397}
398
399impl Message {
400 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
401 let mut content = Vec::new();
402
403 let mut range_start = self.offset_range.start;
404 for (image_offset, message_image) in self.image_offsets.iter() {
405 if *image_offset != range_start {
406 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
407 content.push(text);
408 }
409 }
410
411 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
412 content.push(language_model::MessageContent::Image(image));
413 }
414
415 range_start = *image_offset;
416 }
417 if range_start != self.offset_range.end {
418 if let Some(text) =
419 Self::collect_text_content(buffer, range_start..self.offset_range.end)
420 {
421 content.push(text);
422 }
423 }
424
425 if content.is_empty() {
426 return None;
427 }
428
429 Some(LanguageModelRequestMessage {
430 role: self.role,
431 content,
432 cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor),
433 })
434 }
435
436 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
437 let text: String = buffer.text_for_range(range.clone()).collect();
438 if text.trim().is_empty() {
439 None
440 } else {
441 Some(MessageContent::Text(text))
442 }
443 }
444}
445
446#[derive(Clone, Debug)]
447pub struct ImageAnchor {
448 pub anchor: language::Anchor,
449 pub image_id: u64,
450 pub render_image: Arc<RenderImage>,
451 pub image: Shared<Task<Option<LanguageModelImage>>>,
452}
453
454struct PendingCompletion {
455 id: usize,
456 assistant_message_id: MessageId,
457 _task: Task<()>,
458}
459
460#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
461pub struct SlashCommandId(clock::Lamport);
462
463#[derive(Clone, Debug)]
464pub struct XmlTag {
465 pub kind: XmlTagKind,
466 pub range: Range<text::Anchor>,
467 pub is_open_tag: bool,
468}
469
470#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
471#[strum(serialize_all = "snake_case")]
472pub enum XmlTagKind {
473 Step,
474 Edit,
475 Path,
476 Search,
477 Within,
478 Operation,
479 Description,
480}
481
482pub struct Context {
483 id: ContextId,
484 timestamp: clock::Lamport,
485 version: clock::Global,
486 pending_ops: Vec<ContextOperation>,
487 operations: Vec<ContextOperation>,
488 buffer: Model<Buffer>,
489 pending_slash_commands: Vec<PendingSlashCommand>,
490 edits_since_last_parse: language::Subscription,
491 finished_slash_commands: HashSet<SlashCommandId>,
492 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
493 pending_tool_uses_by_id: HashMap<String, PendingToolUse>,
494 message_anchors: Vec<MessageAnchor>,
495 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
496 image_anchors: Vec<ImageAnchor>,
497 messages_metadata: HashMap<MessageId, MessageMetadata>,
498 summary: Option<ContextSummary>,
499 pending_summary: Task<Option<()>>,
500 completion_count: usize,
501 pending_completions: Vec<PendingCompletion>,
502 token_count: Option<usize>,
503 pending_token_count: Task<Option<()>>,
504 pending_save: Task<Result<()>>,
505 pending_cache_warming_task: Task<Option<()>>,
506 path: Option<PathBuf>,
507 _subscriptions: Vec<Subscription>,
508 telemetry: Option<Arc<Telemetry>>,
509 language_registry: Arc<LanguageRegistry>,
510 workflow_steps: Vec<WorkflowStep>,
511 xml_tags: Vec<XmlTag>,
512 project: Option<Model<Project>>,
513 prompt_builder: Arc<PromptBuilder>,
514}
515
516trait ContextAnnotation {
517 fn range(&self) -> &Range<language::Anchor>;
518}
519
520impl ContextAnnotation for PendingSlashCommand {
521 fn range(&self) -> &Range<language::Anchor> {
522 &self.source_range
523 }
524}
525
526impl ContextAnnotation for WorkflowStep {
527 fn range(&self) -> &Range<language::Anchor> {
528 &self.range
529 }
530}
531
532impl ContextAnnotation for XmlTag {
533 fn range(&self) -> &Range<language::Anchor> {
534 &self.range
535 }
536}
537
538impl EventEmitter<ContextEvent> for Context {}
539
540impl Context {
541 pub fn local(
542 language_registry: Arc<LanguageRegistry>,
543 project: Option<Model<Project>>,
544 telemetry: Option<Arc<Telemetry>>,
545 prompt_builder: Arc<PromptBuilder>,
546 cx: &mut ModelContext<Self>,
547 ) -> Self {
548 Self::new(
549 ContextId::new(),
550 ReplicaId::default(),
551 language::Capability::ReadWrite,
552 language_registry,
553 prompt_builder,
554 project,
555 telemetry,
556 cx,
557 )
558 }
559
560 #[allow(clippy::too_many_arguments)]
561 pub fn new(
562 id: ContextId,
563 replica_id: ReplicaId,
564 capability: language::Capability,
565 language_registry: Arc<LanguageRegistry>,
566 prompt_builder: Arc<PromptBuilder>,
567 project: Option<Model<Project>>,
568 telemetry: Option<Arc<Telemetry>>,
569 cx: &mut ModelContext<Self>,
570 ) -> Self {
571 let buffer = cx.new_model(|_cx| {
572 let mut buffer = Buffer::remote(
573 language::BufferId::new(1).unwrap(),
574 replica_id,
575 capability,
576 "",
577 );
578 buffer.set_language_registry(language_registry.clone());
579 buffer
580 });
581 let edits_since_last_slash_command_parse =
582 buffer.update(cx, |buffer, _| buffer.subscribe());
583 let mut this = Self {
584 id,
585 timestamp: clock::Lamport::new(replica_id),
586 version: clock::Global::new(),
587 pending_ops: Vec::new(),
588 operations: Vec::new(),
589 message_anchors: Default::default(),
590 image_anchors: Default::default(),
591 images: Default::default(),
592 messages_metadata: Default::default(),
593 pending_slash_commands: Vec::new(),
594 finished_slash_commands: HashSet::default(),
595 pending_tool_uses_by_id: HashMap::default(),
596 slash_command_output_sections: Vec::new(),
597 edits_since_last_parse: edits_since_last_slash_command_parse,
598 summary: None,
599 pending_summary: Task::ready(None),
600 completion_count: Default::default(),
601 pending_completions: Default::default(),
602 token_count: None,
603 pending_token_count: Task::ready(None),
604 pending_cache_warming_task: Task::ready(None),
605 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
606 pending_save: Task::ready(Ok(())),
607 path: None,
608 buffer,
609 telemetry,
610 project,
611 language_registry,
612 workflow_steps: Vec::new(),
613 xml_tags: Vec::new(),
614 prompt_builder,
615 };
616
617 let first_message_id = MessageId(clock::Lamport {
618 replica_id: 0,
619 value: 0,
620 });
621 let message = MessageAnchor {
622 id: first_message_id,
623 start: language::Anchor::MIN,
624 };
625 this.messages_metadata.insert(
626 first_message_id,
627 MessageMetadata {
628 role: Role::User,
629 status: MessageStatus::Done,
630 timestamp: first_message_id.0,
631 cache: None,
632 },
633 );
634 this.message_anchors.push(message);
635
636 this.set_language(cx);
637 this.count_remaining_tokens(cx);
638 this
639 }
640
641 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
642 let buffer = self.buffer.read(cx);
643 SavedContext {
644 id: Some(self.id.clone()),
645 zed: "context".into(),
646 version: SavedContext::VERSION.into(),
647 text: buffer.text(),
648 messages: self
649 .messages(cx)
650 .map(|message| SavedMessage {
651 id: message.id,
652 start: message.offset_range.start,
653 metadata: self.messages_metadata[&message.id].clone(),
654 image_offsets: message
655 .image_offsets
656 .iter()
657 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
658 .collect(),
659 })
660 .collect(),
661 summary: self
662 .summary
663 .as_ref()
664 .map(|summary| summary.text.clone())
665 .unwrap_or_default(),
666 slash_command_output_sections: self
667 .slash_command_output_sections
668 .iter()
669 .filter_map(|section| {
670 let range = section.range.to_offset(buffer);
671 if section.range.start.is_valid(buffer) && !range.is_empty() {
672 Some(assistant_slash_command::SlashCommandOutputSection {
673 range,
674 icon: section.icon,
675 label: section.label.clone(),
676 })
677 } else {
678 None
679 }
680 })
681 .collect(),
682 }
683 }
684
685 #[allow(clippy::too_many_arguments)]
686 pub fn deserialize(
687 saved_context: SavedContext,
688 path: PathBuf,
689 language_registry: Arc<LanguageRegistry>,
690 prompt_builder: Arc<PromptBuilder>,
691 project: Option<Model<Project>>,
692 telemetry: Option<Arc<Telemetry>>,
693 cx: &mut ModelContext<Self>,
694 ) -> Self {
695 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
696 let mut this = Self::new(
697 id,
698 ReplicaId::default(),
699 language::Capability::ReadWrite,
700 language_registry,
701 prompt_builder,
702 project,
703 telemetry,
704 cx,
705 );
706 this.path = Some(path);
707 this.buffer.update(cx, |buffer, cx| {
708 buffer.set_text(saved_context.text.as_str(), cx)
709 });
710 let operations = saved_context.into_ops(&this.buffer, cx);
711 this.apply_ops(operations, cx).unwrap();
712 this
713 }
714
715 pub fn id(&self) -> &ContextId {
716 &self.id
717 }
718
719 pub fn replica_id(&self) -> ReplicaId {
720 self.timestamp.replica_id
721 }
722
723 pub fn version(&self, cx: &AppContext) -> ContextVersion {
724 ContextVersion {
725 context: self.version.clone(),
726 buffer: self.buffer.read(cx).version(),
727 }
728 }
729
730 pub fn set_capability(
731 &mut self,
732 capability: language::Capability,
733 cx: &mut ModelContext<Self>,
734 ) {
735 self.buffer
736 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
737 }
738
739 fn next_timestamp(&mut self) -> clock::Lamport {
740 let timestamp = self.timestamp.tick();
741 self.version.observe(timestamp);
742 timestamp
743 }
744
745 pub fn serialize_ops(
746 &self,
747 since: &ContextVersion,
748 cx: &AppContext,
749 ) -> Task<Vec<proto::ContextOperation>> {
750 let buffer_ops = self
751 .buffer
752 .read(cx)
753 .serialize_ops(Some(since.buffer.clone()), cx);
754
755 let mut context_ops = self
756 .operations
757 .iter()
758 .filter(|op| !since.context.observed(op.timestamp()))
759 .cloned()
760 .collect::<Vec<_>>();
761 context_ops.extend(self.pending_ops.iter().cloned());
762
763 cx.background_executor().spawn(async move {
764 let buffer_ops = buffer_ops.await;
765 context_ops.sort_unstable_by_key(|op| op.timestamp());
766 buffer_ops
767 .into_iter()
768 .map(|op| proto::ContextOperation {
769 variant: Some(proto::context_operation::Variant::BufferOperation(
770 proto::context_operation::BufferOperation {
771 operation: Some(op),
772 },
773 )),
774 })
775 .chain(context_ops.into_iter().map(|op| op.to_proto()))
776 .collect()
777 })
778 }
779
780 pub fn apply_ops(
781 &mut self,
782 ops: impl IntoIterator<Item = ContextOperation>,
783 cx: &mut ModelContext<Self>,
784 ) -> Result<()> {
785 let mut buffer_ops = Vec::new();
786 for op in ops {
787 match op {
788 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
789 op @ _ => self.pending_ops.push(op),
790 }
791 }
792 self.buffer
793 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
794 self.flush_ops(cx);
795
796 Ok(())
797 }
798
799 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
800 let mut changed_messages = HashSet::default();
801 let mut summary_changed = false;
802
803 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
804 for op in mem::take(&mut self.pending_ops) {
805 if !self.can_apply_op(&op, cx) {
806 self.pending_ops.push(op);
807 continue;
808 }
809
810 let timestamp = op.timestamp();
811 match op.clone() {
812 ContextOperation::InsertMessage {
813 anchor, metadata, ..
814 } => {
815 if self.messages_metadata.contains_key(&anchor.id) {
816 // We already applied this operation.
817 } else {
818 changed_messages.insert(anchor.id);
819 self.insert_message(anchor, metadata, cx);
820 }
821 }
822 ContextOperation::UpdateMessage {
823 message_id,
824 metadata: new_metadata,
825 ..
826 } => {
827 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
828 if new_metadata.timestamp > metadata.timestamp {
829 *metadata = new_metadata;
830 changed_messages.insert(message_id);
831 }
832 }
833 ContextOperation::UpdateSummary {
834 summary: new_summary,
835 ..
836 } => {
837 if self
838 .summary
839 .as_ref()
840 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
841 {
842 self.summary = Some(new_summary);
843 summary_changed = true;
844 }
845 }
846 ContextOperation::SlashCommandFinished {
847 id,
848 output_range,
849 sections,
850 ..
851 } => {
852 if self.finished_slash_commands.insert(id) {
853 let buffer = self.buffer.read(cx);
854 self.slash_command_output_sections
855 .extend(sections.iter().cloned());
856 self.slash_command_output_sections
857 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
858 cx.emit(ContextEvent::SlashCommandFinished {
859 output_range,
860 sections,
861 expand_result: false,
862 run_commands_in_output: false,
863 });
864 }
865 }
866 ContextOperation::BufferOperation(_) => unreachable!(),
867 }
868
869 self.version.observe(timestamp);
870 self.timestamp.observe(timestamp);
871 self.operations.push(op);
872 }
873
874 if !changed_messages.is_empty() {
875 self.message_roles_updated(changed_messages, cx);
876 cx.emit(ContextEvent::MessagesEdited);
877 cx.notify();
878 }
879
880 if summary_changed {
881 cx.emit(ContextEvent::SummaryChanged);
882 cx.notify();
883 }
884 }
885
886 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
887 if !self.version.observed_all(op.version()) {
888 return false;
889 }
890
891 match op {
892 ContextOperation::InsertMessage { anchor, .. } => self
893 .buffer
894 .read(cx)
895 .version
896 .observed(anchor.start.timestamp),
897 ContextOperation::UpdateMessage { message_id, .. } => {
898 self.messages_metadata.contains_key(message_id)
899 }
900 ContextOperation::UpdateSummary { .. } => true,
901 ContextOperation::SlashCommandFinished {
902 output_range,
903 sections,
904 ..
905 } => {
906 let version = &self.buffer.read(cx).version;
907 sections
908 .iter()
909 .map(|section| §ion.range)
910 .chain([output_range])
911 .all(|range| {
912 let observed_start = range.start == language::Anchor::MIN
913 || range.start == language::Anchor::MAX
914 || version.observed(range.start.timestamp);
915 let observed_end = range.end == language::Anchor::MIN
916 || range.end == language::Anchor::MAX
917 || version.observed(range.end.timestamp);
918 observed_start && observed_end
919 })
920 }
921 ContextOperation::BufferOperation(_) => {
922 panic!("buffer operations should always be applied")
923 }
924 }
925 }
926
927 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
928 self.operations.push(op.clone());
929 cx.emit(ContextEvent::Operation(op));
930 }
931
932 pub fn buffer(&self) -> &Model<Buffer> {
933 &self.buffer
934 }
935
936 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
937 self.language_registry.clone()
938 }
939
940 pub fn project(&self) -> Option<Model<Project>> {
941 self.project.clone()
942 }
943
944 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
945 self.prompt_builder.clone()
946 }
947
948 pub fn path(&self) -> Option<&Path> {
949 self.path.as_deref()
950 }
951
952 pub fn summary(&self) -> Option<&ContextSummary> {
953 self.summary.as_ref()
954 }
955
956 pub(crate) fn workflow_step_containing(
957 &self,
958 offset: usize,
959 cx: &AppContext,
960 ) -> Option<&WorkflowStep> {
961 let buffer = self.buffer.read(cx);
962 let index = self
963 .workflow_steps
964 .binary_search_by(|step| {
965 let step_range = step.range.to_offset(&buffer);
966 if offset < step_range.start {
967 Ordering::Greater
968 } else if offset > step_range.end {
969 Ordering::Less
970 } else {
971 Ordering::Equal
972 }
973 })
974 .ok()?;
975 Some(&self.workflow_steps[index])
976 }
977
978 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
979 self.workflow_steps.iter().map(|step| step.range.clone())
980 }
981
982 pub(crate) fn workflow_step_for_range(
983 &self,
984 range: &Range<language::Anchor>,
985 cx: &AppContext,
986 ) -> Option<&WorkflowStep> {
987 let buffer = self.buffer.read(cx);
988 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
989 Some(&self.workflow_steps[index])
990 }
991
992 fn workflow_step_index_for_range(
993 &self,
994 tagged_range: &Range<text::Anchor>,
995 buffer: &text::BufferSnapshot,
996 ) -> Result<usize, usize> {
997 self.workflow_steps
998 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
999 }
1000
1001 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
1002 &self.pending_slash_commands
1003 }
1004
1005 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
1006 &self.slash_command_output_sections
1007 }
1008
1009 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
1010 self.pending_tool_uses_by_id.values().collect()
1011 }
1012
1013 pub fn get_tool_use_by_id(&self, id: &String) -> Option<&PendingToolUse> {
1014 self.pending_tool_uses_by_id.get(id)
1015 }
1016
1017 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
1018 let markdown = self.language_registry.language_for_name("Markdown");
1019 cx.spawn(|this, mut cx| async move {
1020 let markdown = markdown.await?;
1021 this.update(&mut cx, |this, cx| {
1022 this.buffer
1023 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1024 })
1025 })
1026 .detach_and_log_err(cx);
1027 }
1028
1029 fn handle_buffer_event(
1030 &mut self,
1031 _: Model<Buffer>,
1032 event: &language::Event,
1033 cx: &mut ModelContext<Self>,
1034 ) {
1035 match event {
1036 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1037 ContextOperation::BufferOperation(operation.clone()),
1038 )),
1039 language::Event::Edited => {
1040 self.count_remaining_tokens(cx);
1041 self.reparse(cx);
1042 // Use `inclusive = true` to invalidate a step when an edit occurs
1043 // at the start/end of a parsed step.
1044 cx.emit(ContextEvent::MessagesEdited);
1045 }
1046 _ => {}
1047 }
1048 }
1049
1050 pub(crate) fn token_count(&self) -> Option<usize> {
1051 self.token_count
1052 }
1053
1054 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1055 let request = self.to_completion_request(cx);
1056 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1057 return;
1058 };
1059 self.pending_token_count = cx.spawn(|this, mut cx| {
1060 async move {
1061 cx.background_executor()
1062 .timer(Duration::from_millis(200))
1063 .await;
1064
1065 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1066 this.update(&mut cx, |this, cx| {
1067 this.token_count = Some(token_count);
1068 this.start_cache_warming(&model, cx);
1069 cx.notify()
1070 })
1071 }
1072 .log_err()
1073 });
1074 }
1075
1076 pub fn mark_cache_anchors(
1077 &mut self,
1078 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1079 speculative: bool,
1080 cx: &mut ModelContext<Self>,
1081 ) -> bool {
1082 let cache_configuration =
1083 cache_configuration
1084 .as_ref()
1085 .unwrap_or(&LanguageModelCacheConfiguration {
1086 max_cache_anchors: 0,
1087 should_speculate: false,
1088 min_total_token: 0,
1089 });
1090
1091 let messages: Vec<Message> = self.messages(cx).collect();
1092
1093 let mut sorted_messages = messages.clone();
1094 if speculative {
1095 // Avoid caching the last message if this is a speculative cache fetch as
1096 // it's likely to change.
1097 sorted_messages.pop();
1098 }
1099 sorted_messages.retain(|m| m.role == Role::User);
1100 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1101
1102 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1103 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1104 0
1105 } else {
1106 // Save 1 anchor for the inline assistant to use.
1107 max(cache_configuration.max_cache_anchors, 1) - 1
1108 };
1109 sorted_messages.truncate(cache_anchors);
1110
1111 let anchors: HashSet<MessageId> = sorted_messages
1112 .into_iter()
1113 .map(|message| message.id)
1114 .collect();
1115
1116 let buffer = self.buffer.read(cx).snapshot();
1117 let invalidated_caches: HashSet<MessageId> = messages
1118 .iter()
1119 .scan(false, |encountered_invalid, message| {
1120 let message_id = message.id;
1121 let is_invalid = self
1122 .messages_metadata
1123 .get(&message_id)
1124 .map_or(true, |metadata| {
1125 !metadata.is_cache_valid(&buffer, &message.offset_range)
1126 || *encountered_invalid
1127 });
1128 *encountered_invalid |= is_invalid;
1129 Some(if is_invalid { Some(message_id) } else { None })
1130 })
1131 .flatten()
1132 .collect();
1133
1134 let last_anchor = messages.iter().rev().find_map(|message| {
1135 if anchors.contains(&message.id) {
1136 Some(message.id)
1137 } else {
1138 None
1139 }
1140 });
1141
1142 let mut new_anchor_needs_caching = false;
1143 let current_version = &buffer.version;
1144 // If we have no anchors, mark all messages as not being cached.
1145 let mut hit_last_anchor = last_anchor.is_none();
1146
1147 for message in messages.iter() {
1148 if hit_last_anchor {
1149 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1150 continue;
1151 }
1152
1153 if let Some(last_anchor) = last_anchor {
1154 if message.id == last_anchor {
1155 hit_last_anchor = true;
1156 }
1157 }
1158
1159 new_anchor_needs_caching = new_anchor_needs_caching
1160 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1161
1162 self.update_metadata(message.id, cx, |metadata| {
1163 let cache_status = if invalidated_caches.contains(&message.id) {
1164 CacheStatus::Pending
1165 } else {
1166 metadata
1167 .cache
1168 .as_ref()
1169 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1170 };
1171 metadata.cache = Some(MessageCacheMetadata {
1172 is_anchor: anchors.contains(&message.id),
1173 is_final_anchor: hit_last_anchor,
1174 status: cache_status,
1175 cached_at: current_version.clone(),
1176 });
1177 });
1178 }
1179 new_anchor_needs_caching
1180 }
1181
1182 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1183 let cache_configuration = model.cache_configuration();
1184
1185 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1186 return;
1187 }
1188 if !self.pending_completions.is_empty() {
1189 return;
1190 }
1191 if let Some(cache_configuration) = cache_configuration {
1192 if !cache_configuration.should_speculate {
1193 return;
1194 }
1195 }
1196
1197 let request = {
1198 let mut req = self.to_completion_request(cx);
1199 // Skip the last message because it's likely to change and
1200 // therefore would be a waste to cache.
1201 req.messages.pop();
1202 req.messages.push(LanguageModelRequestMessage {
1203 role: Role::User,
1204 content: vec!["Respond only with OK, nothing else.".into()],
1205 cache: false,
1206 });
1207 req
1208 };
1209
1210 let model = Arc::clone(model);
1211 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1212 async move {
1213 match model.stream_completion(request, &cx).await {
1214 Ok(mut stream) => {
1215 stream.next().await;
1216 log::info!("Cache warming completed successfully");
1217 }
1218 Err(e) => {
1219 log::warn!("Cache warming failed: {}", e);
1220 }
1221 };
1222 this.update(&mut cx, |this, cx| {
1223 this.update_cache_status_for_completion(cx);
1224 })
1225 .ok();
1226 anyhow::Ok(())
1227 }
1228 .log_err()
1229 });
1230 }
1231
1232 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1233 let cached_message_ids: Vec<MessageId> = self
1234 .messages_metadata
1235 .iter()
1236 .filter_map(|(message_id, metadata)| {
1237 metadata.cache.as_ref().and_then(|cache| {
1238 if cache.status == CacheStatus::Pending {
1239 Some(*message_id)
1240 } else {
1241 None
1242 }
1243 })
1244 })
1245 .collect();
1246
1247 for message_id in cached_message_ids {
1248 self.update_metadata(message_id, cx, |metadata| {
1249 if let Some(cache) = &mut metadata.cache {
1250 cache.status = CacheStatus::Cached;
1251 }
1252 });
1253 }
1254 cx.notify();
1255 }
1256
1257 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1258 let buffer = self.buffer.read(cx).text_snapshot();
1259 let mut row_ranges = self
1260 .edits_since_last_parse
1261 .consume()
1262 .into_iter()
1263 .map(|edit| {
1264 let start_row = buffer.offset_to_point(edit.new.start).row;
1265 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1266 start_row..end_row
1267 })
1268 .peekable();
1269
1270 let mut removed_slash_command_ranges = Vec::new();
1271 let mut updated_slash_commands = Vec::new();
1272 let mut removed_steps = Vec::new();
1273 let mut updated_steps = Vec::new();
1274 while let Some(mut row_range) = row_ranges.next() {
1275 while let Some(next_row_range) = row_ranges.peek() {
1276 if row_range.end >= next_row_range.start {
1277 row_range.end = next_row_range.end;
1278 row_ranges.next();
1279 } else {
1280 break;
1281 }
1282 }
1283
1284 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1285 let end = buffer.anchor_after(Point::new(
1286 row_range.end - 1,
1287 buffer.line_len(row_range.end - 1),
1288 ));
1289
1290 self.reparse_slash_commands_in_range(
1291 start..end,
1292 &buffer,
1293 &mut updated_slash_commands,
1294 &mut removed_slash_command_ranges,
1295 cx,
1296 );
1297 self.reparse_workflow_steps_in_range(
1298 start..end,
1299 &buffer,
1300 &mut updated_steps,
1301 &mut removed_steps,
1302 cx,
1303 );
1304 }
1305
1306 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1307 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1308 removed: removed_slash_command_ranges,
1309 updated: updated_slash_commands,
1310 });
1311 }
1312
1313 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1314 cx.emit(ContextEvent::WorkflowStepsUpdated {
1315 removed: removed_steps,
1316 updated: updated_steps,
1317 });
1318 }
1319 }
1320
1321 fn reparse_slash_commands_in_range(
1322 &mut self,
1323 range: Range<text::Anchor>,
1324 buffer: &BufferSnapshot,
1325 updated: &mut Vec<PendingSlashCommand>,
1326 removed: &mut Vec<Range<text::Anchor>>,
1327 cx: &AppContext,
1328 ) {
1329 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1330
1331 let mut new_commands = Vec::new();
1332 let mut lines = buffer.text_for_range(range).lines();
1333 let mut offset = lines.offset();
1334 while let Some(line) = lines.next() {
1335 if let Some(command_line) = SlashCommandLine::parse(line) {
1336 let name = &line[command_line.name.clone()];
1337 let arguments = command_line
1338 .arguments
1339 .iter()
1340 .filter_map(|argument_range| {
1341 if argument_range.is_empty() {
1342 None
1343 } else {
1344 line.get(argument_range.clone())
1345 }
1346 })
1347 .map(ToOwned::to_owned)
1348 .collect::<SmallVec<_>>();
1349 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1350 if !command.requires_argument() || !arguments.is_empty() {
1351 let start_ix = offset + command_line.name.start - 1;
1352 let end_ix = offset
1353 + command_line
1354 .arguments
1355 .last()
1356 .map_or(command_line.name.end, |argument| argument.end);
1357 let source_range =
1358 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1359 let pending_command = PendingSlashCommand {
1360 name: name.to_string(),
1361 arguments,
1362 source_range,
1363 status: PendingSlashCommandStatus::Idle,
1364 };
1365 updated.push(pending_command.clone());
1366 new_commands.push(pending_command);
1367 }
1368 }
1369 }
1370
1371 offset = lines.offset();
1372 }
1373
1374 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1375 removed.extend(removed_commands.map(|command| command.source_range));
1376 }
1377
1378 fn reparse_workflow_steps_in_range(
1379 &mut self,
1380 range: Range<text::Anchor>,
1381 buffer: &BufferSnapshot,
1382 updated: &mut Vec<Range<text::Anchor>>,
1383 removed: &mut Vec<Range<text::Anchor>>,
1384 cx: &mut ModelContext<Self>,
1385 ) {
1386 // Rebuild the XML tags in the edited range.
1387 let intersecting_tags_range =
1388 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1389 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1390 self.xml_tags
1391 .splice(intersecting_tags_range.clone(), new_tags);
1392
1393 // Find which steps intersect the changed range.
1394 let intersecting_steps_range =
1395 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1396
1397 // Reparse all tags after the last unchanged step before the change.
1398 let mut tags_start_ix = 0;
1399 if let Some(preceding_unchanged_step) =
1400 self.workflow_steps[..intersecting_steps_range.start].last()
1401 {
1402 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1403 tag.range
1404 .start
1405 .cmp(&preceding_unchanged_step.range.end, buffer)
1406 .then(Ordering::Less)
1407 }) {
1408 Ok(ix) | Err(ix) => ix,
1409 };
1410 }
1411
1412 // Rebuild the edit suggestions in the range.
1413 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1414
1415 if let Some(project) = self.project() {
1416 for step in &mut new_steps {
1417 Self::resolve_workflow_step_internal(step, &project, cx);
1418 }
1419 }
1420
1421 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1422 let removed_steps = self
1423 .workflow_steps
1424 .splice(intersecting_steps_range, new_steps);
1425 removed.extend(
1426 removed_steps
1427 .map(|step| step.range)
1428 .filter(|range| !updated.contains(&range)),
1429 );
1430 }
1431
1432 fn parse_xml_tags_in_range(
1433 &self,
1434 buffer: &BufferSnapshot,
1435 range: Range<text::Anchor>,
1436 cx: &AppContext,
1437 ) -> Vec<XmlTag> {
1438 let mut messages = self.messages(cx).peekable();
1439
1440 let mut tags = Vec::new();
1441 let mut lines = buffer.text_for_range(range).lines();
1442 let mut offset = lines.offset();
1443
1444 while let Some(line) = lines.next() {
1445 while let Some(message) = messages.peek() {
1446 if offset < message.offset_range.end {
1447 break;
1448 } else {
1449 messages.next();
1450 }
1451 }
1452
1453 let is_assistant_message = messages
1454 .peek()
1455 .map_or(false, |message| message.role == Role::Assistant);
1456 if is_assistant_message {
1457 for (start_ix, _) in line.match_indices('<') {
1458 let mut name_start_ix = start_ix + 1;
1459 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1460 if let Some(closing_bracket_ix) = closing_bracket_ix {
1461 let end_ix = closing_bracket_ix + 1;
1462 let mut is_open_tag = true;
1463 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1464 name_start_ix += 1;
1465 is_open_tag = false;
1466 }
1467 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1468 let tag_name_len = tag_inner
1469 .find(|c: char| c.is_whitespace())
1470 .unwrap_or(tag_inner.len());
1471 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1472 tags.push(XmlTag {
1473 range: buffer.anchor_after(offset + start_ix)
1474 ..buffer.anchor_before(offset + end_ix),
1475 is_open_tag,
1476 kind,
1477 });
1478 };
1479 }
1480 }
1481 }
1482
1483 offset = lines.offset();
1484 }
1485 tags
1486 }
1487
1488 fn parse_steps(
1489 &mut self,
1490 tags_start_ix: usize,
1491 buffer_end: text::Anchor,
1492 buffer: &BufferSnapshot,
1493 ) -> Vec<WorkflowStep> {
1494 let mut new_steps = Vec::new();
1495 let mut pending_step = None;
1496 let mut edit_step_depth = 0;
1497 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1498 'tags: while let Some(tag) = tags.next() {
1499 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1500 break;
1501 }
1502
1503 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1504 edit_step_depth += 1;
1505 let edit_start = tag.range.start;
1506 let mut edits = Vec::new();
1507 let mut step = WorkflowStep {
1508 range: edit_start..edit_start,
1509 leading_tags_end: tag.range.end,
1510 trailing_tag_start: None,
1511 edits: Default::default(),
1512 resolution: None,
1513 resolution_task: None,
1514 };
1515
1516 while let Some(tag) = tags.next() {
1517 step.trailing_tag_start.get_or_insert(tag.range.start);
1518
1519 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1520 // step.trailing_tag_start = Some(tag.range.start);
1521 edit_step_depth -= 1;
1522 if edit_step_depth == 0 {
1523 step.range.end = tag.range.end;
1524 step.edits = edits.into();
1525 new_steps.push(step);
1526 continue 'tags;
1527 }
1528 }
1529
1530 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1531 let mut path = None;
1532 let mut search = None;
1533 let mut operation = None;
1534 let mut description = None;
1535
1536 while let Some(tag) = tags.next() {
1537 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1538 edits.push(WorkflowStepEdit::new(
1539 path,
1540 operation,
1541 search,
1542 description,
1543 ));
1544 break;
1545 }
1546
1547 if tag.is_open_tag
1548 && [
1549 XmlTagKind::Path,
1550 XmlTagKind::Search,
1551 XmlTagKind::Operation,
1552 XmlTagKind::Description,
1553 ]
1554 .contains(&tag.kind)
1555 {
1556 let kind = tag.kind;
1557 let content_start = tag.range.end;
1558 if let Some(tag) = tags.peek() {
1559 if tag.kind == kind && !tag.is_open_tag {
1560 let tag = tags.next().unwrap();
1561 let content_end = tag.range.start;
1562 let mut content = buffer
1563 .text_for_range(content_start..content_end)
1564 .collect::<String>();
1565 content.truncate(content.trim_end().len());
1566 match kind {
1567 XmlTagKind::Path => path = Some(content),
1568 XmlTagKind::Operation => operation = Some(content),
1569 XmlTagKind::Search => {
1570 search = Some(content).filter(|s| !s.is_empty())
1571 }
1572 XmlTagKind::Description => {
1573 description =
1574 Some(content).filter(|s| !s.is_empty())
1575 }
1576 _ => {}
1577 }
1578 }
1579 }
1580 }
1581 }
1582 }
1583 }
1584
1585 pending_step = Some(step);
1586 }
1587 }
1588
1589 if let Some(mut pending_step) = pending_step {
1590 pending_step.range.end = text::Anchor::MAX;
1591 new_steps.push(pending_step);
1592 }
1593
1594 new_steps
1595 }
1596
1597 pub fn resolve_workflow_step(
1598 &mut self,
1599 tagged_range: Range<text::Anchor>,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Option<()> {
1602 let index = self
1603 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1604 .ok()?;
1605 let step = &mut self.workflow_steps[index];
1606 let project = self.project.as_ref()?;
1607 step.resolution.take();
1608 Self::resolve_workflow_step_internal(step, project, cx);
1609 None
1610 }
1611
1612 fn resolve_workflow_step_internal(
1613 step: &mut WorkflowStep,
1614 project: &Model<Project>,
1615 cx: &mut ModelContext<'_, Context>,
1616 ) {
1617 step.resolution_task = Some(cx.spawn({
1618 let range = step.range.clone();
1619 let edits = step.edits.clone();
1620 let project = project.clone();
1621 |this, mut cx| async move {
1622 let suggestion_groups =
1623 Self::compute_step_resolution(project, edits, &mut cx).await;
1624
1625 this.update(&mut cx, |this, cx| {
1626 let buffer = this.buffer.read(cx).text_snapshot();
1627 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1628 if let Some(ix) = ix {
1629 let step = &mut this.workflow_steps[ix];
1630
1631 let resolution = suggestion_groups.map(|suggestion_groups| {
1632 let mut title = String::new();
1633 for mut chunk in buffer.text_for_range(
1634 step.leading_tags_end
1635 ..step.trailing_tag_start.unwrap_or(step.range.end),
1636 ) {
1637 if title.is_empty() {
1638 chunk = chunk.trim_start();
1639 }
1640 if let Some((prefix, _)) = chunk.split_once('\n') {
1641 title.push_str(prefix);
1642 break;
1643 } else {
1644 title.push_str(chunk);
1645 }
1646 }
1647
1648 WorkflowStepResolution {
1649 title,
1650 suggestion_groups,
1651 }
1652 });
1653
1654 step.resolution = Some(Arc::new(resolution));
1655 cx.emit(ContextEvent::WorkflowStepsUpdated {
1656 removed: vec![],
1657 updated: vec![range],
1658 })
1659 }
1660 })
1661 .ok();
1662 }
1663 }));
1664 }
1665
1666 async fn compute_step_resolution(
1667 project: Model<Project>,
1668 edits: Arc<[Result<WorkflowStepEdit>]>,
1669 cx: &mut AsyncAppContext,
1670 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1671 let mut suggestion_tasks = Vec::new();
1672 for edit in edits.iter() {
1673 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1674 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1675 }
1676
1677 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1678 let suggestions = future::try_join_all(suggestion_tasks).await?;
1679
1680 let mut suggestions_by_buffer = HashMap::default();
1681 for (buffer, suggestion) in suggestions {
1682 suggestions_by_buffer
1683 .entry(buffer)
1684 .or_insert_with(Vec::new)
1685 .push(suggestion);
1686 }
1687
1688 let mut suggestion_groups_by_buffer = HashMap::default();
1689 for (buffer, mut suggestions) in suggestions_by_buffer {
1690 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1691 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1692 // Sort suggestions by their range so that earlier, larger ranges come first
1693 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1694
1695 // Merge overlapping suggestions
1696 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1697
1698 // Create context ranges for each suggestion
1699 for suggestion in suggestions {
1700 let context_range = {
1701 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1702 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1703 let end_row =
1704 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1705 let start = snapshot.anchor_before(Point::new(start_row, 0));
1706 let end =
1707 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1708 start..end
1709 };
1710
1711 if let Some(last_group) = suggestion_groups.last_mut() {
1712 if last_group
1713 .context_range
1714 .end
1715 .cmp(&context_range.start, &snapshot)
1716 .is_ge()
1717 {
1718 // Merge with the previous group if context ranges overlap
1719 last_group.context_range.end = context_range.end;
1720 last_group.suggestions.push(suggestion);
1721 } else {
1722 // Create a new group
1723 suggestion_groups.push(WorkflowSuggestionGroup {
1724 context_range,
1725 suggestions: vec![suggestion],
1726 });
1727 }
1728 } else {
1729 // Create the first group
1730 suggestion_groups.push(WorkflowSuggestionGroup {
1731 context_range,
1732 suggestions: vec![suggestion],
1733 });
1734 }
1735 }
1736
1737 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1738 }
1739
1740 Ok(suggestion_groups_by_buffer)
1741 }
1742
1743 pub fn pending_command_for_position(
1744 &mut self,
1745 position: language::Anchor,
1746 cx: &mut ModelContext<Self>,
1747 ) -> Option<&mut PendingSlashCommand> {
1748 let buffer = self.buffer.read(cx);
1749 match self
1750 .pending_slash_commands
1751 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1752 {
1753 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1754 Err(ix) => {
1755 let cmd = self.pending_slash_commands.get_mut(ix)?;
1756 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1757 && position.cmp(&cmd.source_range.end, buffer).is_le()
1758 {
1759 Some(cmd)
1760 } else {
1761 None
1762 }
1763 }
1764 }
1765 }
1766
1767 pub fn pending_commands_for_range(
1768 &self,
1769 range: Range<language::Anchor>,
1770 cx: &AppContext,
1771 ) -> &[PendingSlashCommand] {
1772 let range = self.pending_command_indices_for_range(range, cx);
1773 &self.pending_slash_commands[range]
1774 }
1775
1776 fn pending_command_indices_for_range(
1777 &self,
1778 range: Range<language::Anchor>,
1779 cx: &AppContext,
1780 ) -> Range<usize> {
1781 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1782 }
1783
1784 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1785 &self,
1786 all_annotations: &[T],
1787 range: Range<language::Anchor>,
1788 cx: &AppContext,
1789 ) -> Range<usize> {
1790 let buffer = self.buffer.read(cx);
1791 let start_ix = match all_annotations
1792 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1793 {
1794 Ok(ix) | Err(ix) => ix,
1795 };
1796 let end_ix = match all_annotations
1797 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1798 {
1799 Ok(ix) => ix + 1,
1800 Err(ix) => ix,
1801 };
1802 start_ix..end_ix
1803 }
1804
1805 pub fn insert_command_output(
1806 &mut self,
1807 command_range: Range<language::Anchor>,
1808 output: Task<Result<SlashCommandOutput>>,
1809 ensure_trailing_newline: bool,
1810 expand_result: bool,
1811 cx: &mut ModelContext<Self>,
1812 ) {
1813 self.reparse(cx);
1814
1815 let insert_output_task = cx.spawn(|this, mut cx| {
1816 let command_range = command_range.clone();
1817 async move {
1818 let output = output.await;
1819 this.update(&mut cx, |this, cx| match output {
1820 Ok(mut output) => {
1821 // Ensure section ranges are valid.
1822 for section in &mut output.sections {
1823 section.range.start = section.range.start.min(output.text.len());
1824 section.range.end = section.range.end.min(output.text.len());
1825 while !output.text.is_char_boundary(section.range.start) {
1826 section.range.start -= 1;
1827 }
1828 while !output.text.is_char_boundary(section.range.end) {
1829 section.range.end += 1;
1830 }
1831 }
1832
1833 // Ensure there is a newline after the last section.
1834 if ensure_trailing_newline {
1835 let has_newline_after_last_section =
1836 output.sections.last().map_or(false, |last_section| {
1837 output.text[last_section.range.end..].ends_with('\n')
1838 });
1839 if !has_newline_after_last_section {
1840 output.text.push('\n');
1841 }
1842 }
1843
1844 let version = this.version.clone();
1845 let command_id = SlashCommandId(this.next_timestamp());
1846 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1847 let start = command_range.start.to_offset(buffer);
1848 let old_end = command_range.end.to_offset(buffer);
1849 let new_end = start + output.text.len();
1850 buffer.edit([(start..old_end, output.text)], None, cx);
1851
1852 let mut sections = output
1853 .sections
1854 .into_iter()
1855 .map(|section| SlashCommandOutputSection {
1856 range: buffer.anchor_after(start + section.range.start)
1857 ..buffer.anchor_before(start + section.range.end),
1858 icon: section.icon,
1859 label: section.label,
1860 })
1861 .collect::<Vec<_>>();
1862 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1863
1864 this.slash_command_output_sections
1865 .extend(sections.iter().cloned());
1866 this.slash_command_output_sections
1867 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1868
1869 let output_range =
1870 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1871 this.finished_slash_commands.insert(command_id);
1872
1873 (
1874 ContextOperation::SlashCommandFinished {
1875 id: command_id,
1876 output_range: output_range.clone(),
1877 sections: sections.clone(),
1878 version,
1879 },
1880 ContextEvent::SlashCommandFinished {
1881 output_range,
1882 sections,
1883 run_commands_in_output: output.run_commands_in_text,
1884 expand_result,
1885 },
1886 )
1887 });
1888
1889 this.push_op(operation, cx);
1890 cx.emit(event);
1891 }
1892 Err(error) => {
1893 if let Some(pending_command) =
1894 this.pending_command_for_position(command_range.start, cx)
1895 {
1896 pending_command.status =
1897 PendingSlashCommandStatus::Error(error.to_string());
1898 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1899 removed: vec![pending_command.source_range.clone()],
1900 updated: vec![pending_command.clone()],
1901 });
1902 }
1903 }
1904 })
1905 .ok();
1906 }
1907 });
1908
1909 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1910 pending_command.status = PendingSlashCommandStatus::Running {
1911 _task: insert_output_task.shared(),
1912 };
1913 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1914 removed: vec![pending_command.source_range.clone()],
1915 updated: vec![pending_command.clone()],
1916 });
1917 }
1918 }
1919
1920 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1921 self.count_remaining_tokens(cx);
1922 }
1923
1924 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1925 self.message_anchors.iter().rev().find_map(|message| {
1926 message
1927 .start
1928 .is_valid(self.buffer.read(cx))
1929 .then_some(message.id)
1930 })
1931 }
1932
1933 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1934 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1935 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1936 let last_message_id = self.get_last_valid_message_id(cx)?;
1937
1938 if !provider.is_authenticated(cx) {
1939 log::info!("completion provider has no credentials");
1940 return None;
1941 }
1942 // Compute which messages to cache, including the last one.
1943 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1944
1945 let request = self.to_completion_request(cx);
1946 let assistant_message = self
1947 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1948 .unwrap();
1949
1950 // Queue up the user's next reply.
1951 let user_message = self
1952 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1953 .unwrap();
1954
1955 let pending_completion_id = post_inc(&mut self.completion_count);
1956
1957 let task = cx.spawn({
1958 |this, mut cx| async move {
1959 let stream = model.stream_completion(request, &cx);
1960 let assistant_message_id = assistant_message.id;
1961 let mut response_latency = None;
1962 let stream_completion = async {
1963 let request_start = Instant::now();
1964 let mut events = stream.await?;
1965
1966 while let Some(event) = events.next().await {
1967 if response_latency.is_none() {
1968 response_latency = Some(request_start.elapsed());
1969 }
1970 let event = event?;
1971
1972 this.update(&mut cx, |this, cx| {
1973 let message_ix = this
1974 .message_anchors
1975 .iter()
1976 .position(|message| message.id == assistant_message_id)?;
1977 this.buffer.update(cx, |buffer, cx| {
1978 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1979 .iter()
1980 .find(|message| message.start.is_valid(buffer))
1981 .map_or(buffer.len(), |message| {
1982 message.start.to_offset(buffer).saturating_sub(1)
1983 });
1984
1985 match event {
1986 LanguageModelCompletionEvent::Text(chunk) => {
1987 buffer.edit(
1988 [(
1989 message_old_end_offset..message_old_end_offset,
1990 chunk,
1991 )],
1992 None,
1993 cx,
1994 );
1995 }
1996 LanguageModelCompletionEvent::ToolUse(tool_use) => {
1997 const NEWLINE: char = '\n';
1998
1999 let mut text = String::new();
2000 text.push(NEWLINE);
2001 text.push_str(
2002 &serde_json::to_string_pretty(&tool_use)
2003 .expect("failed to serialize tool use to JSON"),
2004 );
2005 text.push(NEWLINE);
2006 let text_len = text.len();
2007
2008 buffer.edit(
2009 [(
2010 message_old_end_offset..message_old_end_offset,
2011 text,
2012 )],
2013 None,
2014 cx,
2015 );
2016
2017 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2018 let end_ix =
2019 message_old_end_offset + text_len - NEWLINE.len_utf8();
2020 let source_range = buffer.anchor_after(start_ix)
2021 ..buffer.anchor_after(end_ix);
2022
2023 this.pending_tool_uses_by_id.insert(
2024 tool_use.id.clone(),
2025 PendingToolUse {
2026 id: tool_use.id,
2027 name: tool_use.name,
2028 input: tool_use.input,
2029 status: PendingToolUseStatus::Idle,
2030 source_range,
2031 },
2032 );
2033 }
2034 }
2035 });
2036
2037 cx.emit(ContextEvent::StreamedCompletion);
2038
2039 Some(())
2040 })?;
2041 smol::future::yield_now().await;
2042 }
2043 this.update(&mut cx, |this, cx| {
2044 this.pending_completions
2045 .retain(|completion| completion.id != pending_completion_id);
2046 this.summarize(false, cx);
2047 this.update_cache_status_for_completion(cx);
2048 })?;
2049
2050 anyhow::Ok(())
2051 };
2052
2053 let result = stream_completion.await;
2054
2055 this.update(&mut cx, |this, cx| {
2056 let error_message = result
2057 .err()
2058 .map(|error| error.to_string().trim().to_string());
2059
2060 if let Some(error_message) = error_message.as_ref() {
2061 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2062 error_message.clone(),
2063 )));
2064 }
2065
2066 this.update_metadata(assistant_message_id, cx, |metadata| {
2067 if let Some(error_message) = error_message.as_ref() {
2068 metadata.status =
2069 MessageStatus::Error(SharedString::from(error_message.clone()));
2070 } else {
2071 metadata.status = MessageStatus::Done;
2072 }
2073 });
2074
2075 if let Some(telemetry) = this.telemetry.as_ref() {
2076 telemetry.report_assistant_event(
2077 Some(this.id.0.clone()),
2078 AssistantKind::Panel,
2079 model.telemetry_id(),
2080 response_latency,
2081 error_message,
2082 );
2083 }
2084 })
2085 .ok();
2086 }
2087 });
2088
2089 self.pending_completions.push(PendingCompletion {
2090 id: pending_completion_id,
2091 assistant_message_id: assistant_message.id,
2092 _task: task,
2093 });
2094
2095 Some(user_message)
2096 }
2097
2098 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2099 let buffer = self.buffer.read(cx);
2100 let request_messages = self
2101 .messages(cx)
2102 .filter(|message| message.status == MessageStatus::Done)
2103 .filter_map(|message| message.to_request_message(&buffer))
2104 .collect();
2105
2106 LanguageModelRequest {
2107 messages: request_messages,
2108 tools: Vec::new(),
2109 stop: Vec::new(),
2110 temperature: 1.0,
2111 }
2112 }
2113
2114 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2115 if let Some(pending_completion) = self.pending_completions.pop() {
2116 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2117 if metadata.status == MessageStatus::Pending {
2118 metadata.status = MessageStatus::Canceled;
2119 }
2120 });
2121 true
2122 } else {
2123 false
2124 }
2125 }
2126
2127 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2128 for id in &ids {
2129 if let Some(metadata) = self.messages_metadata.get(id) {
2130 let role = metadata.role.cycle();
2131 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2132 }
2133 }
2134
2135 self.message_roles_updated(ids, cx);
2136 }
2137
2138 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2139 let mut ranges = Vec::new();
2140 for message in self.messages(cx) {
2141 if ids.contains(&message.id) {
2142 ranges.push(message.anchor_range.clone());
2143 }
2144 }
2145
2146 let buffer = self.buffer.read(cx).text_snapshot();
2147 let mut updated = Vec::new();
2148 let mut removed = Vec::new();
2149 for range in ranges {
2150 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2151 }
2152
2153 if !updated.is_empty() || !removed.is_empty() {
2154 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2155 }
2156 }
2157
2158 pub fn update_metadata(
2159 &mut self,
2160 id: MessageId,
2161 cx: &mut ModelContext<Self>,
2162 f: impl FnOnce(&mut MessageMetadata),
2163 ) {
2164 let version = self.version.clone();
2165 let timestamp = self.next_timestamp();
2166 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2167 f(metadata);
2168 metadata.timestamp = timestamp;
2169 let operation = ContextOperation::UpdateMessage {
2170 message_id: id,
2171 metadata: metadata.clone(),
2172 version,
2173 };
2174 self.push_op(operation, cx);
2175 cx.emit(ContextEvent::MessagesEdited);
2176 cx.notify();
2177 }
2178 }
2179
2180 pub fn insert_message_after(
2181 &mut self,
2182 message_id: MessageId,
2183 role: Role,
2184 status: MessageStatus,
2185 cx: &mut ModelContext<Self>,
2186 ) -> Option<MessageAnchor> {
2187 if let Some(prev_message_ix) = self
2188 .message_anchors
2189 .iter()
2190 .position(|message| message.id == message_id)
2191 {
2192 // Find the next valid message after the one we were given.
2193 let mut next_message_ix = prev_message_ix + 1;
2194 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2195 if next_message.start.is_valid(self.buffer.read(cx)) {
2196 break;
2197 }
2198 next_message_ix += 1;
2199 }
2200
2201 let start = self.buffer.update(cx, |buffer, cx| {
2202 let offset = self
2203 .message_anchors
2204 .get(next_message_ix)
2205 .map_or(buffer.len(), |message| {
2206 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2207 });
2208 buffer.edit([(offset..offset, "\n")], None, cx);
2209 buffer.anchor_before(offset + 1)
2210 });
2211
2212 let version = self.version.clone();
2213 let anchor = MessageAnchor {
2214 id: MessageId(self.next_timestamp()),
2215 start,
2216 };
2217 let metadata = MessageMetadata {
2218 role,
2219 status,
2220 timestamp: anchor.id.0,
2221 cache: None,
2222 };
2223 self.insert_message(anchor.clone(), metadata.clone(), cx);
2224 self.push_op(
2225 ContextOperation::InsertMessage {
2226 anchor: anchor.clone(),
2227 metadata,
2228 version,
2229 },
2230 cx,
2231 );
2232 Some(anchor)
2233 } else {
2234 None
2235 }
2236 }
2237
2238 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
2239 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
2240 entry.insert((
2241 image.to_image_data(cx).log_err()?,
2242 LanguageModelImage::from_image(image, cx).shared(),
2243 ));
2244 }
2245
2246 Some(())
2247 }
2248
2249 pub fn insert_image_anchor(
2250 &mut self,
2251 image_id: u64,
2252 anchor: language::Anchor,
2253 cx: &mut ModelContext<Self>,
2254 ) -> bool {
2255 cx.emit(ContextEvent::MessagesEdited);
2256
2257 let buffer = self.buffer.read(cx);
2258 let insertion_ix = match self
2259 .image_anchors
2260 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
2261 {
2262 Ok(ix) => ix,
2263 Err(ix) => ix,
2264 };
2265
2266 if let Some((render_image, image)) = self.images.get(&image_id) {
2267 self.image_anchors.insert(
2268 insertion_ix,
2269 ImageAnchor {
2270 anchor,
2271 image_id,
2272 image: image.clone(),
2273 render_image: render_image.clone(),
2274 },
2275 );
2276
2277 true
2278 } else {
2279 false
2280 }
2281 }
2282
2283 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
2284 self.image_anchors.iter().cloned()
2285 }
2286
2287 pub fn split_message(
2288 &mut self,
2289 range: Range<usize>,
2290 cx: &mut ModelContext<Self>,
2291 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2292 let start_message = self.message_for_offset(range.start, cx);
2293 let end_message = self.message_for_offset(range.end, cx);
2294 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2295 // Prevent splitting when range spans multiple messages.
2296 if start_message.id != end_message.id {
2297 return (None, None);
2298 }
2299
2300 let message = start_message;
2301 let role = message.role;
2302 let mut edited_buffer = false;
2303
2304 let mut suffix_start = None;
2305
2306 // TODO: why did this start panicking?
2307 if range.start > message.offset_range.start
2308 && range.end < message.offset_range.end.saturating_sub(1)
2309 {
2310 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2311 suffix_start = Some(range.end + 1);
2312 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2313 suffix_start = Some(range.end);
2314 }
2315 }
2316
2317 let version = self.version.clone();
2318 let suffix = if let Some(suffix_start) = suffix_start {
2319 MessageAnchor {
2320 id: MessageId(self.next_timestamp()),
2321 start: self.buffer.read(cx).anchor_before(suffix_start),
2322 }
2323 } else {
2324 self.buffer.update(cx, |buffer, cx| {
2325 buffer.edit([(range.end..range.end, "\n")], None, cx);
2326 });
2327 edited_buffer = true;
2328 MessageAnchor {
2329 id: MessageId(self.next_timestamp()),
2330 start: self.buffer.read(cx).anchor_before(range.end + 1),
2331 }
2332 };
2333
2334 let suffix_metadata = MessageMetadata {
2335 role,
2336 status: MessageStatus::Done,
2337 timestamp: suffix.id.0,
2338 cache: None,
2339 };
2340 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2341 self.push_op(
2342 ContextOperation::InsertMessage {
2343 anchor: suffix.clone(),
2344 metadata: suffix_metadata,
2345 version,
2346 },
2347 cx,
2348 );
2349
2350 let new_messages =
2351 if range.start == range.end || range.start == message.offset_range.start {
2352 (None, Some(suffix))
2353 } else {
2354 let mut prefix_end = None;
2355 if range.start > message.offset_range.start
2356 && range.end < message.offset_range.end - 1
2357 {
2358 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2359 prefix_end = Some(range.start + 1);
2360 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2361 == Some('\n')
2362 {
2363 prefix_end = Some(range.start);
2364 }
2365 }
2366
2367 let version = self.version.clone();
2368 let selection = if let Some(prefix_end) = prefix_end {
2369 MessageAnchor {
2370 id: MessageId(self.next_timestamp()),
2371 start: self.buffer.read(cx).anchor_before(prefix_end),
2372 }
2373 } else {
2374 self.buffer.update(cx, |buffer, cx| {
2375 buffer.edit([(range.start..range.start, "\n")], None, cx)
2376 });
2377 edited_buffer = true;
2378 MessageAnchor {
2379 id: MessageId(self.next_timestamp()),
2380 start: self.buffer.read(cx).anchor_before(range.end + 1),
2381 }
2382 };
2383
2384 let selection_metadata = MessageMetadata {
2385 role,
2386 status: MessageStatus::Done,
2387 timestamp: selection.id.0,
2388 cache: None,
2389 };
2390 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2391 self.push_op(
2392 ContextOperation::InsertMessage {
2393 anchor: selection.clone(),
2394 metadata: selection_metadata,
2395 version,
2396 },
2397 cx,
2398 );
2399
2400 (Some(selection), Some(suffix))
2401 };
2402
2403 if !edited_buffer {
2404 cx.emit(ContextEvent::MessagesEdited);
2405 }
2406 new_messages
2407 } else {
2408 (None, None)
2409 }
2410 }
2411
2412 fn insert_message(
2413 &mut self,
2414 new_anchor: MessageAnchor,
2415 new_metadata: MessageMetadata,
2416 cx: &mut ModelContext<Self>,
2417 ) {
2418 cx.emit(ContextEvent::MessagesEdited);
2419
2420 self.messages_metadata.insert(new_anchor.id, new_metadata);
2421
2422 let buffer = self.buffer.read(cx);
2423 let insertion_ix = self
2424 .message_anchors
2425 .iter()
2426 .position(|anchor| {
2427 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2428 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2429 })
2430 .unwrap_or(self.message_anchors.len());
2431 self.message_anchors.insert(insertion_ix, new_anchor);
2432 }
2433
2434 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2435 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2436 return;
2437 };
2438 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2439 return;
2440 };
2441
2442 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2443 if !provider.is_authenticated(cx) {
2444 return;
2445 }
2446
2447 let messages = self
2448 .messages(cx)
2449 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
2450 .chain(Some(LanguageModelRequestMessage {
2451 role: Role::User,
2452 content: vec![
2453 "Summarize the context into a short title without punctuation.".into(),
2454 ],
2455 cache: false,
2456 }));
2457 let request = LanguageModelRequest {
2458 messages: messages.collect(),
2459 tools: Vec::new(),
2460 stop: Vec::new(),
2461 temperature: 1.0,
2462 };
2463
2464 self.pending_summary = cx.spawn(|this, mut cx| {
2465 async move {
2466 let stream = model.stream_completion_text(request, &cx);
2467 let mut messages = stream.await?;
2468
2469 let mut replaced = !replace_old;
2470 while let Some(message) = messages.next().await {
2471 let text = message?;
2472 let mut lines = text.lines();
2473 this.update(&mut cx, |this, cx| {
2474 let version = this.version.clone();
2475 let timestamp = this.next_timestamp();
2476 let summary = this.summary.get_or_insert(ContextSummary::default());
2477 if !replaced && replace_old {
2478 summary.text.clear();
2479 replaced = true;
2480 }
2481 summary.text.extend(lines.next());
2482 summary.timestamp = timestamp;
2483 let operation = ContextOperation::UpdateSummary {
2484 summary: summary.clone(),
2485 version,
2486 };
2487 this.push_op(operation, cx);
2488 cx.emit(ContextEvent::SummaryChanged);
2489 })?;
2490
2491 // Stop if the LLM generated multiple lines.
2492 if lines.next().is_some() {
2493 break;
2494 }
2495 }
2496
2497 this.update(&mut cx, |this, cx| {
2498 let version = this.version.clone();
2499 let timestamp = this.next_timestamp();
2500 if let Some(summary) = this.summary.as_mut() {
2501 summary.done = true;
2502 summary.timestamp = timestamp;
2503 let operation = ContextOperation::UpdateSummary {
2504 summary: summary.clone(),
2505 version,
2506 };
2507 this.push_op(operation, cx);
2508 cx.emit(ContextEvent::SummaryChanged);
2509 }
2510 })?;
2511
2512 anyhow::Ok(())
2513 }
2514 .log_err()
2515 });
2516 }
2517 }
2518
2519 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2520 self.messages_for_offsets([offset], cx).pop()
2521 }
2522
2523 pub fn messages_for_offsets(
2524 &self,
2525 offsets: impl IntoIterator<Item = usize>,
2526 cx: &AppContext,
2527 ) -> Vec<Message> {
2528 let mut result = Vec::new();
2529
2530 let mut messages = self.messages(cx).peekable();
2531 let mut offsets = offsets.into_iter().peekable();
2532 let mut current_message = messages.next();
2533 while let Some(offset) = offsets.next() {
2534 // Locate the message that contains the offset.
2535 while current_message.as_ref().map_or(false, |message| {
2536 !message.offset_range.contains(&offset) && messages.peek().is_some()
2537 }) {
2538 current_message = messages.next();
2539 }
2540 let Some(message) = current_message.as_ref() else {
2541 break;
2542 };
2543
2544 // Skip offsets that are in the same message.
2545 while offsets.peek().map_or(false, |offset| {
2546 message.offset_range.contains(offset) || messages.peek().is_none()
2547 }) {
2548 offsets.next();
2549 }
2550
2551 result.push(message.clone());
2552 }
2553 result
2554 }
2555
2556 fn messages_from_anchors<'a>(
2557 &'a self,
2558 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2559 cx: &'a AppContext,
2560 ) -> impl 'a + Iterator<Item = Message> {
2561 let buffer = self.buffer.read(cx);
2562 let messages = message_anchors.enumerate();
2563 let images = self.image_anchors.iter();
2564
2565 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2566 }
2567
2568 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2569 self.messages_from_anchors(self.message_anchors.iter(), cx)
2570 }
2571
2572 pub fn messages_from_iters<'a>(
2573 buffer: &'a Buffer,
2574 metadata: &'a HashMap<MessageId, MessageMetadata>,
2575 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2576 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2577 ) -> impl 'a + Iterator<Item = Message> {
2578 let mut messages = messages.peekable();
2579 let mut images = images.peekable();
2580
2581 iter::from_fn(move || {
2582 if let Some((start_ix, message_anchor)) = messages.next() {
2583 let metadata = metadata.get(&message_anchor.id)?;
2584
2585 let message_start = message_anchor.start.to_offset(buffer);
2586 let mut message_end = None;
2587 let mut end_ix = start_ix;
2588 while let Some((_, next_message)) = messages.peek() {
2589 if next_message.start.is_valid(buffer) {
2590 message_end = Some(next_message.start);
2591 break;
2592 } else {
2593 end_ix += 1;
2594 messages.next();
2595 }
2596 }
2597 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2598 let message_end = message_end_anchor.to_offset(buffer);
2599
2600 let mut image_offsets = SmallVec::new();
2601 while let Some(image_anchor) = images.peek() {
2602 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2603 image_offsets.push((
2604 image_anchor.anchor.to_offset(buffer),
2605 MessageImage {
2606 image_id: image_anchor.image_id,
2607 image: image_anchor.image.clone(),
2608 },
2609 ));
2610 images.next();
2611 } else {
2612 break;
2613 }
2614 }
2615
2616 return Some(Message {
2617 index_range: start_ix..end_ix,
2618 offset_range: message_start..message_end,
2619 anchor_range: message_anchor.start..message_end_anchor,
2620 id: message_anchor.id,
2621 role: metadata.role,
2622 status: metadata.status.clone(),
2623 cache: metadata.cache.clone(),
2624 image_offsets,
2625 });
2626 }
2627 None
2628 })
2629 }
2630
2631 pub fn save(
2632 &mut self,
2633 debounce: Option<Duration>,
2634 fs: Arc<dyn Fs>,
2635 cx: &mut ModelContext<Context>,
2636 ) {
2637 if self.replica_id() != ReplicaId::default() {
2638 // Prevent saving a remote context for now.
2639 return;
2640 }
2641
2642 self.pending_save = cx.spawn(|this, mut cx| async move {
2643 if let Some(debounce) = debounce {
2644 cx.background_executor().timer(debounce).await;
2645 }
2646
2647 let (old_path, summary) = this.read_with(&cx, |this, _| {
2648 let path = this.path.clone();
2649 let summary = if let Some(summary) = this.summary.as_ref() {
2650 if summary.done {
2651 Some(summary.text.clone())
2652 } else {
2653 None
2654 }
2655 } else {
2656 None
2657 };
2658 (path, summary)
2659 })?;
2660
2661 if let Some(summary) = summary {
2662 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2663 .await;
2664
2665 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2666 let mut discriminant = 1;
2667 let mut new_path;
2668 loop {
2669 new_path = contexts_dir().join(&format!(
2670 "{} - {}.zed.json",
2671 summary.trim(),
2672 discriminant
2673 ));
2674 if fs.is_file(&new_path).await {
2675 discriminant += 1;
2676 } else {
2677 break;
2678 }
2679 }
2680
2681 fs.create_dir(contexts_dir().as_ref()).await?;
2682 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2683 .await?;
2684 if let Some(old_path) = old_path {
2685 if new_path != old_path {
2686 fs.remove_file(
2687 &old_path,
2688 RemoveOptions {
2689 recursive: false,
2690 ignore_if_not_exists: true,
2691 },
2692 )
2693 .await?;
2694 }
2695 }
2696
2697 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2698 }
2699
2700 Ok(())
2701 });
2702 }
2703
2704 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2705 let mut images_to_save = self
2706 .images
2707 .iter()
2708 .map(|(id, (_, llm_image))| {
2709 let fs = fs.clone();
2710 let llm_image = llm_image.clone();
2711 let id = *id;
2712 async move {
2713 if let Some(llm_image) = llm_image.await {
2714 let path: PathBuf =
2715 context_images_dir().join(&format!("{}.png.base64", id));
2716 if fs
2717 .metadata(path.as_path())
2718 .await
2719 .log_err()
2720 .flatten()
2721 .is_none()
2722 {
2723 fs.atomic_write(path, llm_image.source.to_string())
2724 .await
2725 .log_err();
2726 }
2727 }
2728 }
2729 })
2730 .collect::<FuturesUnordered<_>>();
2731 cx.background_executor().spawn(async move {
2732 if fs
2733 .create_dir(context_images_dir().as_ref())
2734 .await
2735 .log_err()
2736 .is_some()
2737 {
2738 while let Some(_) = images_to_save.next().await {}
2739 }
2740 })
2741 }
2742
2743 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2744 let timestamp = self.next_timestamp();
2745 let summary = self.summary.get_or_insert(ContextSummary::default());
2746 summary.timestamp = timestamp;
2747 summary.done = true;
2748 summary.text = custom_summary;
2749 cx.emit(ContextEvent::SummaryChanged);
2750 }
2751}
2752
2753#[derive(Debug, Default)]
2754pub struct ContextVersion {
2755 context: clock::Global,
2756 buffer: clock::Global,
2757}
2758
2759impl ContextVersion {
2760 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2761 Self {
2762 context: language::proto::deserialize_version(&proto.context_version),
2763 buffer: language::proto::deserialize_version(&proto.buffer_version),
2764 }
2765 }
2766
2767 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2768 proto::ContextVersion {
2769 context_id: context_id.to_proto(),
2770 context_version: language::proto::serialize_version(&self.context),
2771 buffer_version: language::proto::serialize_version(&self.buffer),
2772 }
2773 }
2774}
2775
2776#[derive(Debug, Clone)]
2777pub struct PendingSlashCommand {
2778 pub name: String,
2779 pub arguments: SmallVec<[String; 3]>,
2780 pub status: PendingSlashCommandStatus,
2781 pub source_range: Range<language::Anchor>,
2782}
2783
2784#[derive(Debug, Clone)]
2785pub enum PendingSlashCommandStatus {
2786 Idle,
2787 Running { _task: Shared<Task<()>> },
2788 Error(String),
2789}
2790
2791#[derive(Debug, Clone)]
2792pub struct PendingToolUse {
2793 pub id: String,
2794 pub name: String,
2795 pub input: serde_json::Value,
2796 pub status: PendingToolUseStatus,
2797 pub source_range: Range<language::Anchor>,
2798}
2799
2800#[derive(Debug, Clone)]
2801pub enum PendingToolUseStatus {
2802 Idle,
2803 Running { _task: Shared<Task<()>> },
2804 Error(String),
2805}
2806
2807#[derive(Serialize, Deserialize)]
2808pub struct SavedMessage {
2809 pub id: MessageId,
2810 pub start: usize,
2811 pub metadata: MessageMetadata,
2812 #[serde(default)]
2813 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2814 pub image_offsets: Vec<(usize, u64)>,
2815}
2816
2817#[derive(Serialize, Deserialize)]
2818pub struct SavedContext {
2819 pub id: Option<ContextId>,
2820 pub zed: String,
2821 pub version: String,
2822 pub text: String,
2823 pub messages: Vec<SavedMessage>,
2824 pub summary: String,
2825 pub slash_command_output_sections:
2826 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2827}
2828
2829impl SavedContext {
2830 pub const VERSION: &'static str = "0.4.0";
2831
2832 pub fn from_json(json: &str) -> Result<Self> {
2833 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2834 match saved_context_json
2835 .get("version")
2836 .ok_or_else(|| anyhow!("version not found"))?
2837 {
2838 serde_json::Value::String(version) => match version.as_str() {
2839 SavedContext::VERSION => {
2840 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2841 }
2842 SavedContextV0_3_0::VERSION => {
2843 let saved_context =
2844 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2845 Ok(saved_context.upgrade())
2846 }
2847 SavedContextV0_2_0::VERSION => {
2848 let saved_context =
2849 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2850 Ok(saved_context.upgrade())
2851 }
2852 SavedContextV0_1_0::VERSION => {
2853 let saved_context =
2854 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2855 Ok(saved_context.upgrade())
2856 }
2857 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2858 },
2859 _ => Err(anyhow!("version not found on saved context")),
2860 }
2861 }
2862
2863 fn into_ops(
2864 self,
2865 buffer: &Model<Buffer>,
2866 cx: &mut ModelContext<Context>,
2867 ) -> Vec<ContextOperation> {
2868 let mut operations = Vec::new();
2869 let mut version = clock::Global::new();
2870 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2871
2872 let mut first_message_metadata = None;
2873 for message in self.messages {
2874 if message.id == MessageId(clock::Lamport::default()) {
2875 first_message_metadata = Some(message.metadata);
2876 } else {
2877 operations.push(ContextOperation::InsertMessage {
2878 anchor: MessageAnchor {
2879 id: message.id,
2880 start: buffer.read(cx).anchor_before(message.start),
2881 },
2882 metadata: MessageMetadata {
2883 role: message.metadata.role,
2884 status: message.metadata.status,
2885 timestamp: message.metadata.timestamp,
2886 cache: None,
2887 },
2888 version: version.clone(),
2889 });
2890 version.observe(message.id.0);
2891 next_timestamp.observe(message.id.0);
2892 }
2893 }
2894
2895 if let Some(metadata) = first_message_metadata {
2896 let timestamp = next_timestamp.tick();
2897 operations.push(ContextOperation::UpdateMessage {
2898 message_id: MessageId(clock::Lamport::default()),
2899 metadata: MessageMetadata {
2900 role: metadata.role,
2901 status: metadata.status,
2902 timestamp,
2903 cache: None,
2904 },
2905 version: version.clone(),
2906 });
2907 version.observe(timestamp);
2908 }
2909
2910 let timestamp = next_timestamp.tick();
2911 operations.push(ContextOperation::SlashCommandFinished {
2912 id: SlashCommandId(timestamp),
2913 output_range: language::Anchor::MIN..language::Anchor::MAX,
2914 sections: self
2915 .slash_command_output_sections
2916 .into_iter()
2917 .map(|section| {
2918 let buffer = buffer.read(cx);
2919 SlashCommandOutputSection {
2920 range: buffer.anchor_after(section.range.start)
2921 ..buffer.anchor_before(section.range.end),
2922 icon: section.icon,
2923 label: section.label,
2924 }
2925 })
2926 .collect(),
2927 version: version.clone(),
2928 });
2929 version.observe(timestamp);
2930
2931 let timestamp = next_timestamp.tick();
2932 operations.push(ContextOperation::UpdateSummary {
2933 summary: ContextSummary {
2934 text: self.summary,
2935 done: true,
2936 timestamp,
2937 },
2938 version: version.clone(),
2939 });
2940 version.observe(timestamp);
2941
2942 operations
2943 }
2944}
2945
2946#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
2947struct SavedMessageIdPreV0_4_0(usize);
2948
2949#[derive(Serialize, Deserialize)]
2950struct SavedMessagePreV0_4_0 {
2951 id: SavedMessageIdPreV0_4_0,
2952 start: usize,
2953}
2954
2955#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
2956struct SavedMessageMetadataPreV0_4_0 {
2957 role: Role,
2958 status: MessageStatus,
2959}
2960
2961#[derive(Serialize, Deserialize)]
2962struct SavedContextV0_3_0 {
2963 id: Option<ContextId>,
2964 zed: String,
2965 version: String,
2966 text: String,
2967 messages: Vec<SavedMessagePreV0_4_0>,
2968 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2969 summary: String,
2970 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2971}
2972
2973impl SavedContextV0_3_0 {
2974 const VERSION: &'static str = "0.3.0";
2975
2976 fn upgrade(self) -> SavedContext {
2977 SavedContext {
2978 id: self.id,
2979 zed: self.zed,
2980 version: SavedContext::VERSION.into(),
2981 text: self.text,
2982 messages: self
2983 .messages
2984 .into_iter()
2985 .filter_map(|message| {
2986 let metadata = self.message_metadata.get(&message.id)?;
2987 let timestamp = clock::Lamport {
2988 replica_id: ReplicaId::default(),
2989 value: message.id.0 as u32,
2990 };
2991 Some(SavedMessage {
2992 id: MessageId(timestamp),
2993 start: message.start,
2994 metadata: MessageMetadata {
2995 role: metadata.role,
2996 status: metadata.status.clone(),
2997 timestamp,
2998 cache: None,
2999 },
3000 image_offsets: Vec::new(),
3001 })
3002 })
3003 .collect(),
3004 summary: self.summary,
3005 slash_command_output_sections: self.slash_command_output_sections,
3006 }
3007 }
3008}
3009
3010#[derive(Serialize, Deserialize)]
3011struct SavedContextV0_2_0 {
3012 id: Option<ContextId>,
3013 zed: String,
3014 version: String,
3015 text: String,
3016 messages: Vec<SavedMessagePreV0_4_0>,
3017 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3018 summary: String,
3019}
3020
3021impl SavedContextV0_2_0 {
3022 const VERSION: &'static str = "0.2.0";
3023
3024 fn upgrade(self) -> SavedContext {
3025 SavedContextV0_3_0 {
3026 id: self.id,
3027 zed: self.zed,
3028 version: SavedContextV0_3_0::VERSION.to_string(),
3029 text: self.text,
3030 messages: self.messages,
3031 message_metadata: self.message_metadata,
3032 summary: self.summary,
3033 slash_command_output_sections: Vec::new(),
3034 }
3035 .upgrade()
3036 }
3037}
3038
3039#[derive(Serialize, Deserialize)]
3040struct SavedContextV0_1_0 {
3041 id: Option<ContextId>,
3042 zed: String,
3043 version: String,
3044 text: String,
3045 messages: Vec<SavedMessagePreV0_4_0>,
3046 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3047 summary: String,
3048 api_url: Option<String>,
3049 model: OpenAiModel,
3050}
3051
3052impl SavedContextV0_1_0 {
3053 const VERSION: &'static str = "0.1.0";
3054
3055 fn upgrade(self) -> SavedContext {
3056 SavedContextV0_2_0 {
3057 id: self.id,
3058 zed: self.zed,
3059 version: SavedContextV0_2_0::VERSION.to_string(),
3060 text: self.text,
3061 messages: self.messages,
3062 message_metadata: self.message_metadata,
3063 summary: self.summary,
3064 }
3065 .upgrade()
3066 }
3067}
3068
3069#[derive(Clone)]
3070pub struct SavedContextMetadata {
3071 pub title: String,
3072 pub path: PathBuf,
3073 pub mtime: chrono::DateTime<chrono::Local>,
3074}