1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 stream::FuturesUnordered,
21 FutureExt, StreamExt,
22};
23use gpui::{
24 AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext,
25 RenderImage, SharedString, Subscription, Task,
26};
27
28use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
29use language_model::{
30 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
31 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
32 LanguageModelRequestTool, MessageContent, Role,
33};
34use open_ai::Model as OpenAiModel;
35use paths::{context_images_dir, contexts_dir};
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{self, max, Ordering},
41 collections::hash_map,
42 fmt::Debug,
43 iter, mem,
44 ops::Range,
45 path::{Path, PathBuf},
46 str::FromStr as _,
47 sync::Arc,
48 time::{Duration, Instant},
49};
50use telemetry_events::AssistantKind;
51use text::BufferSnapshot;
52use util::{post_inc, ResultExt, TryFutureExt};
53use uuid::Uuid;
54
55#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
56pub struct ContextId(String);
57
58impl ContextId {
59 pub fn new() -> Self {
60 Self(Uuid::new_v4().to_string())
61 }
62
63 pub fn from_proto(id: String) -> Self {
64 Self(id)
65 }
66
67 pub fn to_proto(&self) -> String {
68 self.0.clone()
69 }
70}
71
72#[derive(Clone, Debug)]
73pub enum ContextOperation {
74 InsertMessage {
75 anchor: MessageAnchor,
76 metadata: MessageMetadata,
77 version: clock::Global,
78 },
79 UpdateMessage {
80 message_id: MessageId,
81 metadata: MessageMetadata,
82 version: clock::Global,
83 },
84 UpdateSummary {
85 summary: ContextSummary,
86 version: clock::Global,
87 },
88 SlashCommandFinished {
89 id: SlashCommandId,
90 output_range: Range<language::Anchor>,
91 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
92 version: clock::Global,
93 },
94 BufferOperation(language::Operation),
95}
96
97impl ContextOperation {
98 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
99 match op.variant.context("invalid variant")? {
100 proto::context_operation::Variant::InsertMessage(insert) => {
101 let message = insert.message.context("invalid message")?;
102 let id = MessageId(language::proto::deserialize_timestamp(
103 message.id.context("invalid id")?,
104 ));
105 Ok(Self::InsertMessage {
106 anchor: MessageAnchor {
107 id,
108 start: language::proto::deserialize_anchor(
109 message.start.context("invalid anchor")?,
110 )
111 .context("invalid anchor")?,
112 },
113 metadata: MessageMetadata {
114 role: Role::from_proto(message.role),
115 status: MessageStatus::from_proto(
116 message.status.context("invalid status")?,
117 ),
118 timestamp: id.0,
119 cache: None,
120 },
121 version: language::proto::deserialize_version(&insert.version),
122 })
123 }
124 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
125 message_id: MessageId(language::proto::deserialize_timestamp(
126 update.message_id.context("invalid message id")?,
127 )),
128 metadata: MessageMetadata {
129 role: Role::from_proto(update.role),
130 status: MessageStatus::from_proto(update.status.context("invalid status")?),
131 timestamp: language::proto::deserialize_timestamp(
132 update.timestamp.context("invalid timestamp")?,
133 ),
134 cache: None,
135 },
136 version: language::proto::deserialize_version(&update.version),
137 }),
138 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
139 summary: ContextSummary {
140 text: update.summary,
141 done: update.done,
142 timestamp: language::proto::deserialize_timestamp(
143 update.timestamp.context("invalid timestamp")?,
144 ),
145 },
146 version: language::proto::deserialize_version(&update.version),
147 }),
148 proto::context_operation::Variant::SlashCommandFinished(finished) => {
149 Ok(Self::SlashCommandFinished {
150 id: SlashCommandId(language::proto::deserialize_timestamp(
151 finished.id.context("invalid id")?,
152 )),
153 output_range: language::proto::deserialize_anchor_range(
154 finished.output_range.context("invalid range")?,
155 )?,
156 sections: finished
157 .sections
158 .into_iter()
159 .map(|section| {
160 Ok(SlashCommandOutputSection {
161 range: language::proto::deserialize_anchor_range(
162 section.range.context("invalid range")?,
163 )?,
164 icon: section.icon_name.parse()?,
165 label: section.label.into(),
166 })
167 })
168 .collect::<Result<Vec<_>>>()?,
169 version: language::proto::deserialize_version(&finished.version),
170 })
171 }
172 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
173 language::proto::deserialize_operation(
174 op.operation.context("invalid buffer operation")?,
175 )?,
176 )),
177 }
178 }
179
180 pub fn to_proto(&self) -> proto::ContextOperation {
181 match self {
182 Self::InsertMessage {
183 anchor,
184 metadata,
185 version,
186 } => proto::ContextOperation {
187 variant: Some(proto::context_operation::Variant::InsertMessage(
188 proto::context_operation::InsertMessage {
189 message: Some(proto::ContextMessage {
190 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
191 start: Some(language::proto::serialize_anchor(&anchor.start)),
192 role: metadata.role.to_proto() as i32,
193 status: Some(metadata.status.to_proto()),
194 }),
195 version: language::proto::serialize_version(version),
196 },
197 )),
198 },
199 Self::UpdateMessage {
200 message_id,
201 metadata,
202 version,
203 } => proto::ContextOperation {
204 variant: Some(proto::context_operation::Variant::UpdateMessage(
205 proto::context_operation::UpdateMessage {
206 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
207 role: metadata.role.to_proto() as i32,
208 status: Some(metadata.status.to_proto()),
209 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
210 version: language::proto::serialize_version(version),
211 },
212 )),
213 },
214 Self::UpdateSummary { summary, version } => proto::ContextOperation {
215 variant: Some(proto::context_operation::Variant::UpdateSummary(
216 proto::context_operation::UpdateSummary {
217 summary: summary.text.clone(),
218 done: summary.done,
219 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
220 version: language::proto::serialize_version(version),
221 },
222 )),
223 },
224 Self::SlashCommandFinished {
225 id,
226 output_range,
227 sections,
228 version,
229 } => proto::ContextOperation {
230 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
231 proto::context_operation::SlashCommandFinished {
232 id: Some(language::proto::serialize_timestamp(id.0)),
233 output_range: Some(language::proto::serialize_anchor_range(
234 output_range.clone(),
235 )),
236 sections: sections
237 .iter()
238 .map(|section| {
239 let icon_name: &'static str = section.icon.into();
240 proto::SlashCommandOutputSection {
241 range: Some(language::proto::serialize_anchor_range(
242 section.range.clone(),
243 )),
244 icon_name: icon_name.to_string(),
245 label: section.label.to_string(),
246 }
247 })
248 .collect(),
249 version: language::proto::serialize_version(version),
250 },
251 )),
252 },
253 Self::BufferOperation(operation) => proto::ContextOperation {
254 variant: Some(proto::context_operation::Variant::BufferOperation(
255 proto::context_operation::BufferOperation {
256 operation: Some(language::proto::serialize_operation(operation)),
257 },
258 )),
259 },
260 }
261 }
262
263 fn timestamp(&self) -> clock::Lamport {
264 match self {
265 Self::InsertMessage { anchor, .. } => anchor.id.0,
266 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
267 Self::UpdateSummary { summary, .. } => summary.timestamp,
268 Self::SlashCommandFinished { id, .. } => id.0,
269 Self::BufferOperation(_) => {
270 panic!("reading the timestamp of a buffer operation is not supported")
271 }
272 }
273 }
274
275 /// Returns the current version of the context operation.
276 pub fn version(&self) -> &clock::Global {
277 match self {
278 Self::InsertMessage { version, .. }
279 | Self::UpdateMessage { version, .. }
280 | Self::UpdateSummary { version, .. }
281 | Self::SlashCommandFinished { version, .. } => version,
282 Self::BufferOperation(_) => {
283 panic!("reading the version of a buffer operation is not supported")
284 }
285 }
286 }
287}
288
289#[derive(Debug, Clone)]
290pub enum ContextEvent {
291 ShowAssistError(SharedString),
292 MessagesEdited,
293 SummaryChanged,
294 StreamedCompletion,
295 WorkflowStepsUpdated {
296 removed: Vec<Range<language::Anchor>>,
297 updated: Vec<Range<language::Anchor>>,
298 },
299 PendingSlashCommandsUpdated {
300 removed: Vec<Range<language::Anchor>>,
301 updated: Vec<PendingSlashCommand>,
302 },
303 SlashCommandFinished {
304 output_range: Range<language::Anchor>,
305 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
306 run_commands_in_output: bool,
307 expand_result: bool,
308 },
309 Operation(ContextOperation),
310}
311
312#[derive(Clone, Default, Debug)]
313pub struct ContextSummary {
314 pub text: String,
315 done: bool,
316 timestamp: clock::Lamport,
317}
318
319#[derive(Clone, Debug, Eq, PartialEq)]
320pub struct MessageAnchor {
321 pub id: MessageId,
322 pub start: language::Anchor,
323}
324
325#[derive(Clone, Debug, Eq, PartialEq)]
326pub enum CacheStatus {
327 Pending,
328 Cached,
329}
330
331#[derive(Clone, Debug, Eq, PartialEq)]
332pub struct MessageCacheMetadata {
333 pub is_anchor: bool,
334 pub is_final_anchor: bool,
335 pub status: CacheStatus,
336 pub cached_at: clock::Global,
337}
338
339#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
340pub struct MessageMetadata {
341 pub role: Role,
342 pub status: MessageStatus,
343 pub(crate) timestamp: clock::Lamport,
344 #[serde(skip)]
345 pub cache: Option<MessageCacheMetadata>,
346}
347
348impl From<&Message> for MessageMetadata {
349 fn from(message: &Message) -> Self {
350 Self {
351 role: message.role,
352 status: message.status.clone(),
353 timestamp: message.id.0,
354 cache: message.cache.clone(),
355 }
356 }
357}
358
359impl MessageMetadata {
360 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
361 let result = match &self.cache {
362 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
363 &cached_at,
364 Range {
365 start: buffer.anchor_at(range.start, Bias::Right),
366 end: buffer.anchor_at(range.end, Bias::Left),
367 },
368 ),
369 _ => false,
370 };
371 result
372 }
373}
374
375#[derive(Clone, Debug)]
376pub struct MessageImage {
377 image_id: u64,
378 image: Shared<Task<Option<LanguageModelImage>>>,
379}
380
381impl PartialEq for MessageImage {
382 fn eq(&self, other: &Self) -> bool {
383 self.image_id == other.image_id
384 }
385}
386
387impl Eq for MessageImage {}
388
389#[derive(Clone, Debug)]
390pub struct Message {
391 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
392 pub offset_range: Range<usize>,
393 pub index_range: Range<usize>,
394 pub anchor_range: Range<language::Anchor>,
395 pub id: MessageId,
396 pub role: Role,
397 pub status: MessageStatus,
398 pub cache: Option<MessageCacheMetadata>,
399}
400
401impl Message {
402 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
403 let mut content = Vec::new();
404
405 let mut range_start = self.offset_range.start;
406 for (image_offset, message_image) in self.image_offsets.iter() {
407 if *image_offset != range_start {
408 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
409 content.push(text);
410 }
411 }
412
413 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
414 content.push(language_model::MessageContent::Image(image));
415 }
416
417 range_start = *image_offset;
418 }
419 if range_start != self.offset_range.end {
420 if let Some(text) =
421 Self::collect_text_content(buffer, range_start..self.offset_range.end)
422 {
423 content.push(text);
424 }
425 }
426
427 if content.is_empty() {
428 return None;
429 }
430
431 Some(LanguageModelRequestMessage {
432 role: self.role,
433 content,
434 cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor),
435 })
436 }
437
438 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
439 let text: String = buffer.text_for_range(range.clone()).collect();
440 if text.trim().is_empty() {
441 None
442 } else {
443 Some(MessageContent::Text(text))
444 }
445 }
446}
447
448#[derive(Clone, Debug)]
449pub struct ImageAnchor {
450 pub anchor: language::Anchor,
451 pub image_id: u64,
452 pub render_image: Arc<RenderImage>,
453 pub image: Shared<Task<Option<LanguageModelImage>>>,
454}
455
456struct PendingCompletion {
457 id: usize,
458 assistant_message_id: MessageId,
459 _task: Task<()>,
460}
461
462#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
463pub struct SlashCommandId(clock::Lamport);
464
465#[derive(Clone, Debug)]
466pub struct XmlTag {
467 pub kind: XmlTagKind,
468 pub range: Range<text::Anchor>,
469 pub is_open_tag: bool,
470}
471
472#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
473#[strum(serialize_all = "snake_case")]
474pub enum XmlTagKind {
475 Step,
476 Edit,
477 Path,
478 Search,
479 Within,
480 Operation,
481 Description,
482}
483
484pub struct Context {
485 id: ContextId,
486 timestamp: clock::Lamport,
487 version: clock::Global,
488 pending_ops: Vec<ContextOperation>,
489 operations: Vec<ContextOperation>,
490 buffer: Model<Buffer>,
491 pending_slash_commands: Vec<PendingSlashCommand>,
492 edits_since_last_parse: language::Subscription,
493 finished_slash_commands: HashSet<SlashCommandId>,
494 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
495 pending_tool_uses_by_id: HashMap<String, PendingToolUse>,
496 message_anchors: Vec<MessageAnchor>,
497 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
498 image_anchors: Vec<ImageAnchor>,
499 messages_metadata: HashMap<MessageId, MessageMetadata>,
500 summary: Option<ContextSummary>,
501 pending_summary: Task<Option<()>>,
502 completion_count: usize,
503 pending_completions: Vec<PendingCompletion>,
504 token_count: Option<usize>,
505 pending_token_count: Task<Option<()>>,
506 pending_save: Task<Result<()>>,
507 pending_cache_warming_task: Task<Option<()>>,
508 path: Option<PathBuf>,
509 _subscriptions: Vec<Subscription>,
510 telemetry: Option<Arc<Telemetry>>,
511 language_registry: Arc<LanguageRegistry>,
512 workflow_steps: Vec<WorkflowStep>,
513 xml_tags: Vec<XmlTag>,
514 project: Option<Model<Project>>,
515 prompt_builder: Arc<PromptBuilder>,
516}
517
518trait ContextAnnotation {
519 fn range(&self) -> &Range<language::Anchor>;
520}
521
522impl ContextAnnotation for PendingSlashCommand {
523 fn range(&self) -> &Range<language::Anchor> {
524 &self.source_range
525 }
526}
527
528impl ContextAnnotation for WorkflowStep {
529 fn range(&self) -> &Range<language::Anchor> {
530 &self.range
531 }
532}
533
534impl ContextAnnotation for XmlTag {
535 fn range(&self) -> &Range<language::Anchor> {
536 &self.range
537 }
538}
539
540impl EventEmitter<ContextEvent> for Context {}
541
542impl Context {
543 pub fn local(
544 language_registry: Arc<LanguageRegistry>,
545 project: Option<Model<Project>>,
546 telemetry: Option<Arc<Telemetry>>,
547 prompt_builder: Arc<PromptBuilder>,
548 cx: &mut ModelContext<Self>,
549 ) -> Self {
550 Self::new(
551 ContextId::new(),
552 ReplicaId::default(),
553 language::Capability::ReadWrite,
554 language_registry,
555 prompt_builder,
556 project,
557 telemetry,
558 cx,
559 )
560 }
561
562 #[allow(clippy::too_many_arguments)]
563 pub fn new(
564 id: ContextId,
565 replica_id: ReplicaId,
566 capability: language::Capability,
567 language_registry: Arc<LanguageRegistry>,
568 prompt_builder: Arc<PromptBuilder>,
569 project: Option<Model<Project>>,
570 telemetry: Option<Arc<Telemetry>>,
571 cx: &mut ModelContext<Self>,
572 ) -> Self {
573 let buffer = cx.new_model(|_cx| {
574 let mut buffer = Buffer::remote(
575 language::BufferId::new(1).unwrap(),
576 replica_id,
577 capability,
578 "",
579 );
580 buffer.set_language_registry(language_registry.clone());
581 buffer
582 });
583 let edits_since_last_slash_command_parse =
584 buffer.update(cx, |buffer, _| buffer.subscribe());
585 let mut this = Self {
586 id,
587 timestamp: clock::Lamport::new(replica_id),
588 version: clock::Global::new(),
589 pending_ops: Vec::new(),
590 operations: Vec::new(),
591 message_anchors: Default::default(),
592 image_anchors: Default::default(),
593 images: Default::default(),
594 messages_metadata: Default::default(),
595 pending_slash_commands: Vec::new(),
596 finished_slash_commands: HashSet::default(),
597 pending_tool_uses_by_id: HashMap::default(),
598 slash_command_output_sections: Vec::new(),
599 edits_since_last_parse: edits_since_last_slash_command_parse,
600 summary: None,
601 pending_summary: Task::ready(None),
602 completion_count: Default::default(),
603 pending_completions: Default::default(),
604 token_count: None,
605 pending_token_count: Task::ready(None),
606 pending_cache_warming_task: Task::ready(None),
607 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
608 pending_save: Task::ready(Ok(())),
609 path: None,
610 buffer,
611 telemetry,
612 project,
613 language_registry,
614 workflow_steps: Vec::new(),
615 xml_tags: Vec::new(),
616 prompt_builder,
617 };
618
619 let first_message_id = MessageId(clock::Lamport {
620 replica_id: 0,
621 value: 0,
622 });
623 let message = MessageAnchor {
624 id: first_message_id,
625 start: language::Anchor::MIN,
626 };
627 this.messages_metadata.insert(
628 first_message_id,
629 MessageMetadata {
630 role: Role::User,
631 status: MessageStatus::Done,
632 timestamp: first_message_id.0,
633 cache: None,
634 },
635 );
636 this.message_anchors.push(message);
637
638 this.set_language(cx);
639 this.count_remaining_tokens(cx);
640 this
641 }
642
643 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
644 let buffer = self.buffer.read(cx);
645 SavedContext {
646 id: Some(self.id.clone()),
647 zed: "context".into(),
648 version: SavedContext::VERSION.into(),
649 text: buffer.text(),
650 messages: self
651 .messages(cx)
652 .map(|message| SavedMessage {
653 id: message.id,
654 start: message.offset_range.start,
655 metadata: self.messages_metadata[&message.id].clone(),
656 image_offsets: message
657 .image_offsets
658 .iter()
659 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
660 .collect(),
661 })
662 .collect(),
663 summary: self
664 .summary
665 .as_ref()
666 .map(|summary| summary.text.clone())
667 .unwrap_or_default(),
668 slash_command_output_sections: self
669 .slash_command_output_sections
670 .iter()
671 .filter_map(|section| {
672 let range = section.range.to_offset(buffer);
673 if section.range.start.is_valid(buffer) && !range.is_empty() {
674 Some(assistant_slash_command::SlashCommandOutputSection {
675 range,
676 icon: section.icon,
677 label: section.label.clone(),
678 })
679 } else {
680 None
681 }
682 })
683 .collect(),
684 }
685 }
686
687 #[allow(clippy::too_many_arguments)]
688 pub fn deserialize(
689 saved_context: SavedContext,
690 path: PathBuf,
691 language_registry: Arc<LanguageRegistry>,
692 prompt_builder: Arc<PromptBuilder>,
693 project: Option<Model<Project>>,
694 telemetry: Option<Arc<Telemetry>>,
695 cx: &mut ModelContext<Self>,
696 ) -> Self {
697 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
698 let mut this = Self::new(
699 id,
700 ReplicaId::default(),
701 language::Capability::ReadWrite,
702 language_registry,
703 prompt_builder,
704 project,
705 telemetry,
706 cx,
707 );
708 this.path = Some(path);
709 this.buffer.update(cx, |buffer, cx| {
710 buffer.set_text(saved_context.text.as_str(), cx)
711 });
712 let operations = saved_context.into_ops(&this.buffer, cx);
713 this.apply_ops(operations, cx).unwrap();
714 this
715 }
716
717 pub fn id(&self) -> &ContextId {
718 &self.id
719 }
720
721 pub fn replica_id(&self) -> ReplicaId {
722 self.timestamp.replica_id
723 }
724
725 pub fn version(&self, cx: &AppContext) -> ContextVersion {
726 ContextVersion {
727 context: self.version.clone(),
728 buffer: self.buffer.read(cx).version(),
729 }
730 }
731
732 pub fn set_capability(
733 &mut self,
734 capability: language::Capability,
735 cx: &mut ModelContext<Self>,
736 ) {
737 self.buffer
738 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
739 }
740
741 fn next_timestamp(&mut self) -> clock::Lamport {
742 let timestamp = self.timestamp.tick();
743 self.version.observe(timestamp);
744 timestamp
745 }
746
747 pub fn serialize_ops(
748 &self,
749 since: &ContextVersion,
750 cx: &AppContext,
751 ) -> Task<Vec<proto::ContextOperation>> {
752 let buffer_ops = self
753 .buffer
754 .read(cx)
755 .serialize_ops(Some(since.buffer.clone()), cx);
756
757 let mut context_ops = self
758 .operations
759 .iter()
760 .filter(|op| !since.context.observed(op.timestamp()))
761 .cloned()
762 .collect::<Vec<_>>();
763 context_ops.extend(self.pending_ops.iter().cloned());
764
765 cx.background_executor().spawn(async move {
766 let buffer_ops = buffer_ops.await;
767 context_ops.sort_unstable_by_key(|op| op.timestamp());
768 buffer_ops
769 .into_iter()
770 .map(|op| proto::ContextOperation {
771 variant: Some(proto::context_operation::Variant::BufferOperation(
772 proto::context_operation::BufferOperation {
773 operation: Some(op),
774 },
775 )),
776 })
777 .chain(context_ops.into_iter().map(|op| op.to_proto()))
778 .collect()
779 })
780 }
781
782 pub fn apply_ops(
783 &mut self,
784 ops: impl IntoIterator<Item = ContextOperation>,
785 cx: &mut ModelContext<Self>,
786 ) -> Result<()> {
787 let mut buffer_ops = Vec::new();
788 for op in ops {
789 match op {
790 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
791 op @ _ => self.pending_ops.push(op),
792 }
793 }
794 self.buffer
795 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
796 self.flush_ops(cx);
797
798 Ok(())
799 }
800
801 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
802 let mut changed_messages = HashSet::default();
803 let mut summary_changed = false;
804
805 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
806 for op in mem::take(&mut self.pending_ops) {
807 if !self.can_apply_op(&op, cx) {
808 self.pending_ops.push(op);
809 continue;
810 }
811
812 let timestamp = op.timestamp();
813 match op.clone() {
814 ContextOperation::InsertMessage {
815 anchor, metadata, ..
816 } => {
817 if self.messages_metadata.contains_key(&anchor.id) {
818 // We already applied this operation.
819 } else {
820 changed_messages.insert(anchor.id);
821 self.insert_message(anchor, metadata, cx);
822 }
823 }
824 ContextOperation::UpdateMessage {
825 message_id,
826 metadata: new_metadata,
827 ..
828 } => {
829 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
830 if new_metadata.timestamp > metadata.timestamp {
831 *metadata = new_metadata;
832 changed_messages.insert(message_id);
833 }
834 }
835 ContextOperation::UpdateSummary {
836 summary: new_summary,
837 ..
838 } => {
839 if self
840 .summary
841 .as_ref()
842 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
843 {
844 self.summary = Some(new_summary);
845 summary_changed = true;
846 }
847 }
848 ContextOperation::SlashCommandFinished {
849 id,
850 output_range,
851 sections,
852 ..
853 } => {
854 if self.finished_slash_commands.insert(id) {
855 let buffer = self.buffer.read(cx);
856 self.slash_command_output_sections
857 .extend(sections.iter().cloned());
858 self.slash_command_output_sections
859 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
860 cx.emit(ContextEvent::SlashCommandFinished {
861 output_range,
862 sections,
863 expand_result: false,
864 run_commands_in_output: false,
865 });
866 }
867 }
868 ContextOperation::BufferOperation(_) => unreachable!(),
869 }
870
871 self.version.observe(timestamp);
872 self.timestamp.observe(timestamp);
873 self.operations.push(op);
874 }
875
876 if !changed_messages.is_empty() {
877 self.message_roles_updated(changed_messages, cx);
878 cx.emit(ContextEvent::MessagesEdited);
879 cx.notify();
880 }
881
882 if summary_changed {
883 cx.emit(ContextEvent::SummaryChanged);
884 cx.notify();
885 }
886 }
887
888 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
889 if !self.version.observed_all(op.version()) {
890 return false;
891 }
892
893 match op {
894 ContextOperation::InsertMessage { anchor, .. } => self
895 .buffer
896 .read(cx)
897 .version
898 .observed(anchor.start.timestamp),
899 ContextOperation::UpdateMessage { message_id, .. } => {
900 self.messages_metadata.contains_key(message_id)
901 }
902 ContextOperation::UpdateSummary { .. } => true,
903 ContextOperation::SlashCommandFinished {
904 output_range,
905 sections,
906 ..
907 } => {
908 let version = &self.buffer.read(cx).version;
909 sections
910 .iter()
911 .map(|section| §ion.range)
912 .chain([output_range])
913 .all(|range| {
914 let observed_start = range.start == language::Anchor::MIN
915 || range.start == language::Anchor::MAX
916 || version.observed(range.start.timestamp);
917 let observed_end = range.end == language::Anchor::MIN
918 || range.end == language::Anchor::MAX
919 || version.observed(range.end.timestamp);
920 observed_start && observed_end
921 })
922 }
923 ContextOperation::BufferOperation(_) => {
924 panic!("buffer operations should always be applied")
925 }
926 }
927 }
928
929 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
930 self.operations.push(op.clone());
931 cx.emit(ContextEvent::Operation(op));
932 }
933
934 pub fn buffer(&self) -> &Model<Buffer> {
935 &self.buffer
936 }
937
938 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
939 self.language_registry.clone()
940 }
941
942 pub fn project(&self) -> Option<Model<Project>> {
943 self.project.clone()
944 }
945
946 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
947 self.prompt_builder.clone()
948 }
949
950 pub fn path(&self) -> Option<&Path> {
951 self.path.as_deref()
952 }
953
954 pub fn summary(&self) -> Option<&ContextSummary> {
955 self.summary.as_ref()
956 }
957
958 pub(crate) fn workflow_step_containing(
959 &self,
960 offset: usize,
961 cx: &AppContext,
962 ) -> Option<&WorkflowStep> {
963 let buffer = self.buffer.read(cx);
964 let index = self
965 .workflow_steps
966 .binary_search_by(|step| {
967 let step_range = step.range.to_offset(&buffer);
968 if offset < step_range.start {
969 Ordering::Greater
970 } else if offset > step_range.end {
971 Ordering::Less
972 } else {
973 Ordering::Equal
974 }
975 })
976 .ok()?;
977 Some(&self.workflow_steps[index])
978 }
979
980 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
981 self.workflow_steps.iter().map(|step| step.range.clone())
982 }
983
984 pub(crate) fn workflow_step_for_range(
985 &self,
986 range: &Range<language::Anchor>,
987 cx: &AppContext,
988 ) -> Option<&WorkflowStep> {
989 let buffer = self.buffer.read(cx);
990 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
991 Some(&self.workflow_steps[index])
992 }
993
994 fn workflow_step_index_for_range(
995 &self,
996 tagged_range: &Range<text::Anchor>,
997 buffer: &text::BufferSnapshot,
998 ) -> Result<usize, usize> {
999 self.workflow_steps
1000 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
1001 }
1002
1003 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
1004 &self.pending_slash_commands
1005 }
1006
1007 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
1008 &self.slash_command_output_sections
1009 }
1010
1011 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
1012 self.pending_tool_uses_by_id.values().collect()
1013 }
1014
1015 pub fn get_tool_use_by_id(&self, id: &String) -> Option<&PendingToolUse> {
1016 self.pending_tool_uses_by_id.get(id)
1017 }
1018
1019 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
1020 let markdown = self.language_registry.language_for_name("Markdown");
1021 cx.spawn(|this, mut cx| async move {
1022 let markdown = markdown.await?;
1023 this.update(&mut cx, |this, cx| {
1024 this.buffer
1025 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1026 })
1027 })
1028 .detach_and_log_err(cx);
1029 }
1030
1031 fn handle_buffer_event(
1032 &mut self,
1033 _: Model<Buffer>,
1034 event: &language::Event,
1035 cx: &mut ModelContext<Self>,
1036 ) {
1037 match event {
1038 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1039 ContextOperation::BufferOperation(operation.clone()),
1040 )),
1041 language::Event::Edited => {
1042 self.count_remaining_tokens(cx);
1043 self.reparse(cx);
1044 // Use `inclusive = true` to invalidate a step when an edit occurs
1045 // at the start/end of a parsed step.
1046 cx.emit(ContextEvent::MessagesEdited);
1047 }
1048 _ => {}
1049 }
1050 }
1051
1052 pub(crate) fn token_count(&self) -> Option<usize> {
1053 self.token_count
1054 }
1055
1056 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1057 let request = self.to_completion_request(cx);
1058 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1059 return;
1060 };
1061 self.pending_token_count = cx.spawn(|this, mut cx| {
1062 async move {
1063 cx.background_executor()
1064 .timer(Duration::from_millis(200))
1065 .await;
1066
1067 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1068 this.update(&mut cx, |this, cx| {
1069 this.token_count = Some(token_count);
1070 this.start_cache_warming(&model, cx);
1071 cx.notify()
1072 })
1073 }
1074 .log_err()
1075 });
1076 }
1077
1078 pub fn mark_cache_anchors(
1079 &mut self,
1080 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1081 speculative: bool,
1082 cx: &mut ModelContext<Self>,
1083 ) -> bool {
1084 let cache_configuration =
1085 cache_configuration
1086 .as_ref()
1087 .unwrap_or(&LanguageModelCacheConfiguration {
1088 max_cache_anchors: 0,
1089 should_speculate: false,
1090 min_total_token: 0,
1091 });
1092
1093 let messages: Vec<Message> = self.messages(cx).collect();
1094
1095 let mut sorted_messages = messages.clone();
1096 if speculative {
1097 // Avoid caching the last message if this is a speculative cache fetch as
1098 // it's likely to change.
1099 sorted_messages.pop();
1100 }
1101 sorted_messages.retain(|m| m.role == Role::User);
1102 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1103
1104 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1105 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1106 0
1107 } else {
1108 // Save 1 anchor for the inline assistant to use.
1109 max(cache_configuration.max_cache_anchors, 1) - 1
1110 };
1111 sorted_messages.truncate(cache_anchors);
1112
1113 let anchors: HashSet<MessageId> = sorted_messages
1114 .into_iter()
1115 .map(|message| message.id)
1116 .collect();
1117
1118 let buffer = self.buffer.read(cx).snapshot();
1119 let invalidated_caches: HashSet<MessageId> = messages
1120 .iter()
1121 .scan(false, |encountered_invalid, message| {
1122 let message_id = message.id;
1123 let is_invalid = self
1124 .messages_metadata
1125 .get(&message_id)
1126 .map_or(true, |metadata| {
1127 !metadata.is_cache_valid(&buffer, &message.offset_range)
1128 || *encountered_invalid
1129 });
1130 *encountered_invalid |= is_invalid;
1131 Some(if is_invalid { Some(message_id) } else { None })
1132 })
1133 .flatten()
1134 .collect();
1135
1136 let last_anchor = messages.iter().rev().find_map(|message| {
1137 if anchors.contains(&message.id) {
1138 Some(message.id)
1139 } else {
1140 None
1141 }
1142 });
1143
1144 let mut new_anchor_needs_caching = false;
1145 let current_version = &buffer.version;
1146 // If we have no anchors, mark all messages as not being cached.
1147 let mut hit_last_anchor = last_anchor.is_none();
1148
1149 for message in messages.iter() {
1150 if hit_last_anchor {
1151 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1152 continue;
1153 }
1154
1155 if let Some(last_anchor) = last_anchor {
1156 if message.id == last_anchor {
1157 hit_last_anchor = true;
1158 }
1159 }
1160
1161 new_anchor_needs_caching = new_anchor_needs_caching
1162 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1163
1164 self.update_metadata(message.id, cx, |metadata| {
1165 let cache_status = if invalidated_caches.contains(&message.id) {
1166 CacheStatus::Pending
1167 } else {
1168 metadata
1169 .cache
1170 .as_ref()
1171 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1172 };
1173 metadata.cache = Some(MessageCacheMetadata {
1174 is_anchor: anchors.contains(&message.id),
1175 is_final_anchor: hit_last_anchor,
1176 status: cache_status,
1177 cached_at: current_version.clone(),
1178 });
1179 });
1180 }
1181 new_anchor_needs_caching
1182 }
1183
1184 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1185 let cache_configuration = model.cache_configuration();
1186
1187 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1188 return;
1189 }
1190 if !self.pending_completions.is_empty() {
1191 return;
1192 }
1193 if let Some(cache_configuration) = cache_configuration {
1194 if !cache_configuration.should_speculate {
1195 return;
1196 }
1197 }
1198
1199 let request = {
1200 let mut req = self.to_completion_request(cx);
1201 // Skip the last message because it's likely to change and
1202 // therefore would be a waste to cache.
1203 req.messages.pop();
1204 req.messages.push(LanguageModelRequestMessage {
1205 role: Role::User,
1206 content: vec!["Respond only with OK, nothing else.".into()],
1207 cache: false,
1208 });
1209 req
1210 };
1211
1212 let model = Arc::clone(model);
1213 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1214 async move {
1215 match model.stream_completion(request, &cx).await {
1216 Ok(mut stream) => {
1217 stream.next().await;
1218 log::info!("Cache warming completed successfully");
1219 }
1220 Err(e) => {
1221 log::warn!("Cache warming failed: {}", e);
1222 }
1223 };
1224 this.update(&mut cx, |this, cx| {
1225 this.update_cache_status_for_completion(cx);
1226 })
1227 .ok();
1228 anyhow::Ok(())
1229 }
1230 .log_err()
1231 });
1232 }
1233
1234 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1235 let cached_message_ids: Vec<MessageId> = self
1236 .messages_metadata
1237 .iter()
1238 .filter_map(|(message_id, metadata)| {
1239 metadata.cache.as_ref().and_then(|cache| {
1240 if cache.status == CacheStatus::Pending {
1241 Some(*message_id)
1242 } else {
1243 None
1244 }
1245 })
1246 })
1247 .collect();
1248
1249 for message_id in cached_message_ids {
1250 self.update_metadata(message_id, cx, |metadata| {
1251 if let Some(cache) = &mut metadata.cache {
1252 cache.status = CacheStatus::Cached;
1253 }
1254 });
1255 }
1256 cx.notify();
1257 }
1258
1259 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1260 let buffer = self.buffer.read(cx).text_snapshot();
1261 let mut row_ranges = self
1262 .edits_since_last_parse
1263 .consume()
1264 .into_iter()
1265 .map(|edit| {
1266 let start_row = buffer.offset_to_point(edit.new.start).row;
1267 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1268 start_row..end_row
1269 })
1270 .peekable();
1271
1272 let mut removed_slash_command_ranges = Vec::new();
1273 let mut updated_slash_commands = Vec::new();
1274 let mut removed_steps = Vec::new();
1275 let mut updated_steps = Vec::new();
1276 while let Some(mut row_range) = row_ranges.next() {
1277 while let Some(next_row_range) = row_ranges.peek() {
1278 if row_range.end >= next_row_range.start {
1279 row_range.end = next_row_range.end;
1280 row_ranges.next();
1281 } else {
1282 break;
1283 }
1284 }
1285
1286 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1287 let end = buffer.anchor_after(Point::new(
1288 row_range.end - 1,
1289 buffer.line_len(row_range.end - 1),
1290 ));
1291
1292 self.reparse_slash_commands_in_range(
1293 start..end,
1294 &buffer,
1295 &mut updated_slash_commands,
1296 &mut removed_slash_command_ranges,
1297 cx,
1298 );
1299 self.reparse_workflow_steps_in_range(
1300 start..end,
1301 &buffer,
1302 &mut updated_steps,
1303 &mut removed_steps,
1304 cx,
1305 );
1306 }
1307
1308 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1309 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1310 removed: removed_slash_command_ranges,
1311 updated: updated_slash_commands,
1312 });
1313 }
1314
1315 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1316 cx.emit(ContextEvent::WorkflowStepsUpdated {
1317 removed: removed_steps,
1318 updated: updated_steps,
1319 });
1320 }
1321 }
1322
1323 fn reparse_slash_commands_in_range(
1324 &mut self,
1325 range: Range<text::Anchor>,
1326 buffer: &BufferSnapshot,
1327 updated: &mut Vec<PendingSlashCommand>,
1328 removed: &mut Vec<Range<text::Anchor>>,
1329 cx: &AppContext,
1330 ) {
1331 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1332
1333 let mut new_commands = Vec::new();
1334 let mut lines = buffer.text_for_range(range).lines();
1335 let mut offset = lines.offset();
1336 while let Some(line) = lines.next() {
1337 if let Some(command_line) = SlashCommandLine::parse(line) {
1338 let name = &line[command_line.name.clone()];
1339 let arguments = command_line
1340 .arguments
1341 .iter()
1342 .filter_map(|argument_range| {
1343 if argument_range.is_empty() {
1344 None
1345 } else {
1346 line.get(argument_range.clone())
1347 }
1348 })
1349 .map(ToOwned::to_owned)
1350 .collect::<SmallVec<_>>();
1351 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1352 if !command.requires_argument() || !arguments.is_empty() {
1353 let start_ix = offset + command_line.name.start - 1;
1354 let end_ix = offset
1355 + command_line
1356 .arguments
1357 .last()
1358 .map_or(command_line.name.end, |argument| argument.end);
1359 let source_range =
1360 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1361 let pending_command = PendingSlashCommand {
1362 name: name.to_string(),
1363 arguments,
1364 source_range,
1365 status: PendingSlashCommandStatus::Idle,
1366 };
1367 updated.push(pending_command.clone());
1368 new_commands.push(pending_command);
1369 }
1370 }
1371 }
1372
1373 offset = lines.offset();
1374 }
1375
1376 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1377 removed.extend(removed_commands.map(|command| command.source_range));
1378 }
1379
1380 fn reparse_workflow_steps_in_range(
1381 &mut self,
1382 range: Range<text::Anchor>,
1383 buffer: &BufferSnapshot,
1384 updated: &mut Vec<Range<text::Anchor>>,
1385 removed: &mut Vec<Range<text::Anchor>>,
1386 cx: &mut ModelContext<Self>,
1387 ) {
1388 // Rebuild the XML tags in the edited range.
1389 let intersecting_tags_range =
1390 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1391 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1392 self.xml_tags
1393 .splice(intersecting_tags_range.clone(), new_tags);
1394
1395 // Find which steps intersect the changed range.
1396 let intersecting_steps_range =
1397 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1398
1399 // Reparse all tags after the last unchanged step before the change.
1400 let mut tags_start_ix = 0;
1401 if let Some(preceding_unchanged_step) =
1402 self.workflow_steps[..intersecting_steps_range.start].last()
1403 {
1404 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1405 tag.range
1406 .start
1407 .cmp(&preceding_unchanged_step.range.end, buffer)
1408 .then(Ordering::Less)
1409 }) {
1410 Ok(ix) | Err(ix) => ix,
1411 };
1412 }
1413
1414 // Rebuild the edit suggestions in the range.
1415 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1416
1417 if let Some(project) = self.project() {
1418 for step in &mut new_steps {
1419 Self::resolve_workflow_step_internal(step, &project, cx);
1420 }
1421 }
1422
1423 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1424 let removed_steps = self
1425 .workflow_steps
1426 .splice(intersecting_steps_range, new_steps);
1427 removed.extend(
1428 removed_steps
1429 .map(|step| step.range)
1430 .filter(|range| !updated.contains(&range)),
1431 );
1432 }
1433
1434 fn parse_xml_tags_in_range(
1435 &self,
1436 buffer: &BufferSnapshot,
1437 range: Range<text::Anchor>,
1438 cx: &AppContext,
1439 ) -> Vec<XmlTag> {
1440 let mut messages = self.messages(cx).peekable();
1441
1442 let mut tags = Vec::new();
1443 let mut lines = buffer.text_for_range(range).lines();
1444 let mut offset = lines.offset();
1445
1446 while let Some(line) = lines.next() {
1447 while let Some(message) = messages.peek() {
1448 if offset < message.offset_range.end {
1449 break;
1450 } else {
1451 messages.next();
1452 }
1453 }
1454
1455 let is_assistant_message = messages
1456 .peek()
1457 .map_or(false, |message| message.role == Role::Assistant);
1458 if is_assistant_message {
1459 for (start_ix, _) in line.match_indices('<') {
1460 let mut name_start_ix = start_ix + 1;
1461 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1462 if let Some(closing_bracket_ix) = closing_bracket_ix {
1463 let end_ix = closing_bracket_ix + 1;
1464 let mut is_open_tag = true;
1465 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1466 name_start_ix += 1;
1467 is_open_tag = false;
1468 }
1469 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1470 let tag_name_len = tag_inner
1471 .find(|c: char| c.is_whitespace())
1472 .unwrap_or(tag_inner.len());
1473 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1474 tags.push(XmlTag {
1475 range: buffer.anchor_after(offset + start_ix)
1476 ..buffer.anchor_before(offset + end_ix),
1477 is_open_tag,
1478 kind,
1479 });
1480 };
1481 }
1482 }
1483 }
1484
1485 offset = lines.offset();
1486 }
1487 tags
1488 }
1489
1490 fn parse_steps(
1491 &mut self,
1492 tags_start_ix: usize,
1493 buffer_end: text::Anchor,
1494 buffer: &BufferSnapshot,
1495 ) -> Vec<WorkflowStep> {
1496 let mut new_steps = Vec::new();
1497 let mut pending_step = None;
1498 let mut edit_step_depth = 0;
1499 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1500 'tags: while let Some(tag) = tags.next() {
1501 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1502 break;
1503 }
1504
1505 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1506 edit_step_depth += 1;
1507 let edit_start = tag.range.start;
1508 let mut edits = Vec::new();
1509 let mut step = WorkflowStep {
1510 range: edit_start..edit_start,
1511 leading_tags_end: tag.range.end,
1512 trailing_tag_start: None,
1513 edits: Default::default(),
1514 resolution: None,
1515 resolution_task: None,
1516 };
1517
1518 while let Some(tag) = tags.next() {
1519 step.trailing_tag_start.get_or_insert(tag.range.start);
1520
1521 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1522 // step.trailing_tag_start = Some(tag.range.start);
1523 edit_step_depth -= 1;
1524 if edit_step_depth == 0 {
1525 step.range.end = tag.range.end;
1526 step.edits = edits.into();
1527 new_steps.push(step);
1528 continue 'tags;
1529 }
1530 }
1531
1532 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1533 let mut path = None;
1534 let mut search = None;
1535 let mut operation = None;
1536 let mut description = None;
1537
1538 while let Some(tag) = tags.next() {
1539 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1540 edits.push(WorkflowStepEdit::new(
1541 path,
1542 operation,
1543 search,
1544 description,
1545 ));
1546 break;
1547 }
1548
1549 if tag.is_open_tag
1550 && [
1551 XmlTagKind::Path,
1552 XmlTagKind::Search,
1553 XmlTagKind::Operation,
1554 XmlTagKind::Description,
1555 ]
1556 .contains(&tag.kind)
1557 {
1558 let kind = tag.kind;
1559 let content_start = tag.range.end;
1560 if let Some(tag) = tags.peek() {
1561 if tag.kind == kind && !tag.is_open_tag {
1562 let tag = tags.next().unwrap();
1563 let content_end = tag.range.start;
1564 let mut content = buffer
1565 .text_for_range(content_start..content_end)
1566 .collect::<String>();
1567 content.truncate(content.trim_end().len());
1568 match kind {
1569 XmlTagKind::Path => path = Some(content),
1570 XmlTagKind::Operation => operation = Some(content),
1571 XmlTagKind::Search => {
1572 search = Some(content).filter(|s| !s.is_empty())
1573 }
1574 XmlTagKind::Description => {
1575 description =
1576 Some(content).filter(|s| !s.is_empty())
1577 }
1578 _ => {}
1579 }
1580 }
1581 }
1582 }
1583 }
1584 }
1585 }
1586
1587 pending_step = Some(step);
1588 }
1589 }
1590
1591 if let Some(mut pending_step) = pending_step {
1592 pending_step.range.end = text::Anchor::MAX;
1593 new_steps.push(pending_step);
1594 }
1595
1596 new_steps
1597 }
1598
1599 pub fn resolve_workflow_step(
1600 &mut self,
1601 tagged_range: Range<text::Anchor>,
1602 cx: &mut ModelContext<Self>,
1603 ) -> Option<()> {
1604 let index = self
1605 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1606 .ok()?;
1607 let step = &mut self.workflow_steps[index];
1608 let project = self.project.as_ref()?;
1609 step.resolution.take();
1610 Self::resolve_workflow_step_internal(step, project, cx);
1611 None
1612 }
1613
1614 fn resolve_workflow_step_internal(
1615 step: &mut WorkflowStep,
1616 project: &Model<Project>,
1617 cx: &mut ModelContext<'_, Context>,
1618 ) {
1619 step.resolution_task = Some(cx.spawn({
1620 let range = step.range.clone();
1621 let edits = step.edits.clone();
1622 let project = project.clone();
1623 |this, mut cx| async move {
1624 let suggestion_groups =
1625 Self::compute_step_resolution(project, edits, &mut cx).await;
1626
1627 this.update(&mut cx, |this, cx| {
1628 let buffer = this.buffer.read(cx).text_snapshot();
1629 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1630 if let Some(ix) = ix {
1631 let step = &mut this.workflow_steps[ix];
1632
1633 let resolution = suggestion_groups.map(|suggestion_groups| {
1634 let mut title = String::new();
1635 for mut chunk in buffer.text_for_range(
1636 step.leading_tags_end
1637 ..step.trailing_tag_start.unwrap_or(step.range.end),
1638 ) {
1639 if title.is_empty() {
1640 chunk = chunk.trim_start();
1641 }
1642 if let Some((prefix, _)) = chunk.split_once('\n') {
1643 title.push_str(prefix);
1644 break;
1645 } else {
1646 title.push_str(chunk);
1647 }
1648 }
1649
1650 WorkflowStepResolution {
1651 title,
1652 suggestion_groups,
1653 }
1654 });
1655
1656 step.resolution = Some(Arc::new(resolution));
1657 cx.emit(ContextEvent::WorkflowStepsUpdated {
1658 removed: vec![],
1659 updated: vec![range],
1660 })
1661 }
1662 })
1663 .ok();
1664 }
1665 }));
1666 }
1667
1668 async fn compute_step_resolution(
1669 project: Model<Project>,
1670 edits: Arc<[Result<WorkflowStepEdit>]>,
1671 cx: &mut AsyncAppContext,
1672 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1673 let mut suggestion_tasks = Vec::new();
1674 for edit in edits.iter() {
1675 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1676 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1677 }
1678
1679 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1680 let suggestions = future::try_join_all(suggestion_tasks).await?;
1681
1682 let mut suggestions_by_buffer = HashMap::default();
1683 for (buffer, suggestion) in suggestions {
1684 suggestions_by_buffer
1685 .entry(buffer)
1686 .or_insert_with(Vec::new)
1687 .push(suggestion);
1688 }
1689
1690 let mut suggestion_groups_by_buffer = HashMap::default();
1691 for (buffer, mut suggestions) in suggestions_by_buffer {
1692 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1693 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1694 // Sort suggestions by their range so that earlier, larger ranges come first
1695 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1696
1697 // Merge overlapping suggestions
1698 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1699
1700 // Create context ranges for each suggestion
1701 for suggestion in suggestions {
1702 let context_range = {
1703 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1704 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1705 let end_row =
1706 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1707 let start = snapshot.anchor_before(Point::new(start_row, 0));
1708 let end =
1709 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1710 start..end
1711 };
1712
1713 if let Some(last_group) = suggestion_groups.last_mut() {
1714 if last_group
1715 .context_range
1716 .end
1717 .cmp(&context_range.start, &snapshot)
1718 .is_ge()
1719 {
1720 // Merge with the previous group if context ranges overlap
1721 last_group.context_range.end = context_range.end;
1722 last_group.suggestions.push(suggestion);
1723 } else {
1724 // Create a new group
1725 suggestion_groups.push(WorkflowSuggestionGroup {
1726 context_range,
1727 suggestions: vec![suggestion],
1728 });
1729 }
1730 } else {
1731 // Create the first group
1732 suggestion_groups.push(WorkflowSuggestionGroup {
1733 context_range,
1734 suggestions: vec![suggestion],
1735 });
1736 }
1737 }
1738
1739 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1740 }
1741
1742 Ok(suggestion_groups_by_buffer)
1743 }
1744
1745 pub fn pending_command_for_position(
1746 &mut self,
1747 position: language::Anchor,
1748 cx: &mut ModelContext<Self>,
1749 ) -> Option<&mut PendingSlashCommand> {
1750 let buffer = self.buffer.read(cx);
1751 match self
1752 .pending_slash_commands
1753 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1754 {
1755 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1756 Err(ix) => {
1757 let cmd = self.pending_slash_commands.get_mut(ix)?;
1758 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1759 && position.cmp(&cmd.source_range.end, buffer).is_le()
1760 {
1761 Some(cmd)
1762 } else {
1763 None
1764 }
1765 }
1766 }
1767 }
1768
1769 pub fn pending_commands_for_range(
1770 &self,
1771 range: Range<language::Anchor>,
1772 cx: &AppContext,
1773 ) -> &[PendingSlashCommand] {
1774 let range = self.pending_command_indices_for_range(range, cx);
1775 &self.pending_slash_commands[range]
1776 }
1777
1778 fn pending_command_indices_for_range(
1779 &self,
1780 range: Range<language::Anchor>,
1781 cx: &AppContext,
1782 ) -> Range<usize> {
1783 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1784 }
1785
1786 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1787 &self,
1788 all_annotations: &[T],
1789 range: Range<language::Anchor>,
1790 cx: &AppContext,
1791 ) -> Range<usize> {
1792 let buffer = self.buffer.read(cx);
1793 let start_ix = match all_annotations
1794 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1795 {
1796 Ok(ix) | Err(ix) => ix,
1797 };
1798 let end_ix = match all_annotations
1799 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1800 {
1801 Ok(ix) => ix + 1,
1802 Err(ix) => ix,
1803 };
1804 start_ix..end_ix
1805 }
1806
1807 pub fn insert_command_output(
1808 &mut self,
1809 command_range: Range<language::Anchor>,
1810 output: Task<Result<SlashCommandOutput>>,
1811 ensure_trailing_newline: bool,
1812 expand_result: bool,
1813 cx: &mut ModelContext<Self>,
1814 ) {
1815 self.reparse(cx);
1816
1817 let insert_output_task = cx.spawn(|this, mut cx| {
1818 let command_range = command_range.clone();
1819 async move {
1820 let output = output.await;
1821 this.update(&mut cx, |this, cx| match output {
1822 Ok(mut output) => {
1823 // Ensure section ranges are valid.
1824 for section in &mut output.sections {
1825 section.range.start = section.range.start.min(output.text.len());
1826 section.range.end = section.range.end.min(output.text.len());
1827 while !output.text.is_char_boundary(section.range.start) {
1828 section.range.start -= 1;
1829 }
1830 while !output.text.is_char_boundary(section.range.end) {
1831 section.range.end += 1;
1832 }
1833 }
1834
1835 // Ensure there is a newline after the last section.
1836 if ensure_trailing_newline {
1837 let has_newline_after_last_section =
1838 output.sections.last().map_or(false, |last_section| {
1839 output.text[last_section.range.end..].ends_with('\n')
1840 });
1841 if !has_newline_after_last_section {
1842 output.text.push('\n');
1843 }
1844 }
1845
1846 let version = this.version.clone();
1847 let command_id = SlashCommandId(this.next_timestamp());
1848 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1849 let start = command_range.start.to_offset(buffer);
1850 let old_end = command_range.end.to_offset(buffer);
1851 let new_end = start + output.text.len();
1852 buffer.edit([(start..old_end, output.text)], None, cx);
1853
1854 let mut sections = output
1855 .sections
1856 .into_iter()
1857 .map(|section| SlashCommandOutputSection {
1858 range: buffer.anchor_after(start + section.range.start)
1859 ..buffer.anchor_before(start + section.range.end),
1860 icon: section.icon,
1861 label: section.label,
1862 })
1863 .collect::<Vec<_>>();
1864 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1865
1866 this.slash_command_output_sections
1867 .extend(sections.iter().cloned());
1868 this.slash_command_output_sections
1869 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1870
1871 let output_range =
1872 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1873 this.finished_slash_commands.insert(command_id);
1874
1875 (
1876 ContextOperation::SlashCommandFinished {
1877 id: command_id,
1878 output_range: output_range.clone(),
1879 sections: sections.clone(),
1880 version,
1881 },
1882 ContextEvent::SlashCommandFinished {
1883 output_range,
1884 sections,
1885 run_commands_in_output: output.run_commands_in_text,
1886 expand_result,
1887 },
1888 )
1889 });
1890
1891 this.push_op(operation, cx);
1892 cx.emit(event);
1893 }
1894 Err(error) => {
1895 if let Some(pending_command) =
1896 this.pending_command_for_position(command_range.start, cx)
1897 {
1898 pending_command.status =
1899 PendingSlashCommandStatus::Error(error.to_string());
1900 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1901 removed: vec![pending_command.source_range.clone()],
1902 updated: vec![pending_command.clone()],
1903 });
1904 }
1905 }
1906 })
1907 .ok();
1908 }
1909 });
1910
1911 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1912 pending_command.status = PendingSlashCommandStatus::Running {
1913 _task: insert_output_task.shared(),
1914 };
1915 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1916 removed: vec![pending_command.source_range.clone()],
1917 updated: vec![pending_command.clone()],
1918 });
1919 }
1920 }
1921
1922 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1923 self.count_remaining_tokens(cx);
1924 }
1925
1926 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1927 self.message_anchors.iter().rev().find_map(|message| {
1928 message
1929 .start
1930 .is_valid(self.buffer.read(cx))
1931 .then_some(message.id)
1932 })
1933 }
1934
1935 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1936 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1937 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1938 let last_message_id = self.get_last_valid_message_id(cx)?;
1939
1940 if !provider.is_authenticated(cx) {
1941 log::info!("completion provider has no credentials");
1942 return None;
1943 }
1944 // Compute which messages to cache, including the last one.
1945 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1946
1947 let mut request = self.to_completion_request(cx);
1948
1949 if cx.has_flag::<ToolUseFeatureFlag>() {
1950 let tool_registry = ToolRegistry::global(cx);
1951 request.tools = tool_registry
1952 .tools()
1953 .into_iter()
1954 .map(|tool| LanguageModelRequestTool {
1955 name: tool.name(),
1956 description: tool.description(),
1957 input_schema: tool.input_schema(),
1958 })
1959 .collect();
1960 }
1961
1962 let assistant_message = self
1963 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1964 .unwrap();
1965
1966 // Queue up the user's next reply.
1967 let user_message = self
1968 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1969 .unwrap();
1970
1971 let pending_completion_id = post_inc(&mut self.completion_count);
1972
1973 let task = cx.spawn({
1974 |this, mut cx| async move {
1975 let stream = model.stream_completion(request, &cx);
1976 let assistant_message_id = assistant_message.id;
1977 let mut response_latency = None;
1978 let stream_completion = async {
1979 let request_start = Instant::now();
1980 let mut events = stream.await?;
1981
1982 while let Some(event) = events.next().await {
1983 if response_latency.is_none() {
1984 response_latency = Some(request_start.elapsed());
1985 }
1986 let event = event?;
1987
1988 this.update(&mut cx, |this, cx| {
1989 let message_ix = this
1990 .message_anchors
1991 .iter()
1992 .position(|message| message.id == assistant_message_id)?;
1993 this.buffer.update(cx, |buffer, cx| {
1994 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1995 .iter()
1996 .find(|message| message.start.is_valid(buffer))
1997 .map_or(buffer.len(), |message| {
1998 message.start.to_offset(buffer).saturating_sub(1)
1999 });
2000
2001 match event {
2002 LanguageModelCompletionEvent::Stop(reason) => match reason {
2003 language_model::StopReason::ToolUse => {}
2004 language_model::StopReason::EndTurn => {}
2005 language_model::StopReason::MaxTokens => {}
2006 },
2007 LanguageModelCompletionEvent::Text(chunk) => {
2008 buffer.edit(
2009 [(
2010 message_old_end_offset..message_old_end_offset,
2011 chunk,
2012 )],
2013 None,
2014 cx,
2015 );
2016 }
2017 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2018 const NEWLINE: char = '\n';
2019
2020 let mut text = String::new();
2021 text.push(NEWLINE);
2022 text.push_str(
2023 &serde_json::to_string_pretty(&tool_use)
2024 .expect("failed to serialize tool use to JSON"),
2025 );
2026 text.push(NEWLINE);
2027 let text_len = text.len();
2028
2029 buffer.edit(
2030 [(
2031 message_old_end_offset..message_old_end_offset,
2032 text,
2033 )],
2034 None,
2035 cx,
2036 );
2037
2038 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2039 let end_ix =
2040 message_old_end_offset + text_len - NEWLINE.len_utf8();
2041 let source_range = buffer.anchor_after(start_ix)
2042 ..buffer.anchor_after(end_ix);
2043
2044 this.pending_tool_uses_by_id.insert(
2045 tool_use.id.clone(),
2046 PendingToolUse {
2047 id: tool_use.id,
2048 name: tool_use.name,
2049 input: tool_use.input,
2050 status: PendingToolUseStatus::Idle,
2051 source_range,
2052 },
2053 );
2054 }
2055 }
2056 });
2057
2058 cx.emit(ContextEvent::StreamedCompletion);
2059
2060 Some(())
2061 })?;
2062 smol::future::yield_now().await;
2063 }
2064 this.update(&mut cx, |this, cx| {
2065 this.pending_completions
2066 .retain(|completion| completion.id != pending_completion_id);
2067 this.summarize(false, cx);
2068 this.update_cache_status_for_completion(cx);
2069 })?;
2070
2071 anyhow::Ok(())
2072 };
2073
2074 let result = stream_completion.await;
2075
2076 this.update(&mut cx, |this, cx| {
2077 let error_message = result
2078 .err()
2079 .map(|error| error.to_string().trim().to_string());
2080
2081 if let Some(error_message) = error_message.as_ref() {
2082 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2083 error_message.clone(),
2084 )));
2085 }
2086
2087 this.update_metadata(assistant_message_id, cx, |metadata| {
2088 if let Some(error_message) = error_message.as_ref() {
2089 metadata.status =
2090 MessageStatus::Error(SharedString::from(error_message.clone()));
2091 } else {
2092 metadata.status = MessageStatus::Done;
2093 }
2094 });
2095
2096 if let Some(telemetry) = this.telemetry.as_ref() {
2097 telemetry.report_assistant_event(
2098 Some(this.id.0.clone()),
2099 AssistantKind::Panel,
2100 model.telemetry_id(),
2101 response_latency,
2102 error_message,
2103 );
2104 }
2105 })
2106 .ok();
2107 }
2108 });
2109
2110 self.pending_completions.push(PendingCompletion {
2111 id: pending_completion_id,
2112 assistant_message_id: assistant_message.id,
2113 _task: task,
2114 });
2115
2116 Some(user_message)
2117 }
2118
2119 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2120 let buffer = self.buffer.read(cx);
2121 let request_messages = self
2122 .messages(cx)
2123 .filter(|message| message.status == MessageStatus::Done)
2124 .filter_map(|message| message.to_request_message(&buffer))
2125 .collect();
2126
2127 LanguageModelRequest {
2128 messages: request_messages,
2129 tools: Vec::new(),
2130 stop: Vec::new(),
2131 temperature: 1.0,
2132 }
2133 }
2134
2135 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2136 if let Some(pending_completion) = self.pending_completions.pop() {
2137 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2138 if metadata.status == MessageStatus::Pending {
2139 metadata.status = MessageStatus::Canceled;
2140 }
2141 });
2142 true
2143 } else {
2144 false
2145 }
2146 }
2147
2148 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2149 for id in &ids {
2150 if let Some(metadata) = self.messages_metadata.get(id) {
2151 let role = metadata.role.cycle();
2152 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2153 }
2154 }
2155
2156 self.message_roles_updated(ids, cx);
2157 }
2158
2159 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2160 let mut ranges = Vec::new();
2161 for message in self.messages(cx) {
2162 if ids.contains(&message.id) {
2163 ranges.push(message.anchor_range.clone());
2164 }
2165 }
2166
2167 let buffer = self.buffer.read(cx).text_snapshot();
2168 let mut updated = Vec::new();
2169 let mut removed = Vec::new();
2170 for range in ranges {
2171 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2172 }
2173
2174 if !updated.is_empty() || !removed.is_empty() {
2175 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2176 }
2177 }
2178
2179 pub fn update_metadata(
2180 &mut self,
2181 id: MessageId,
2182 cx: &mut ModelContext<Self>,
2183 f: impl FnOnce(&mut MessageMetadata),
2184 ) {
2185 let version = self.version.clone();
2186 let timestamp = self.next_timestamp();
2187 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2188 f(metadata);
2189 metadata.timestamp = timestamp;
2190 let operation = ContextOperation::UpdateMessage {
2191 message_id: id,
2192 metadata: metadata.clone(),
2193 version,
2194 };
2195 self.push_op(operation, cx);
2196 cx.emit(ContextEvent::MessagesEdited);
2197 cx.notify();
2198 }
2199 }
2200
2201 pub fn insert_message_after(
2202 &mut self,
2203 message_id: MessageId,
2204 role: Role,
2205 status: MessageStatus,
2206 cx: &mut ModelContext<Self>,
2207 ) -> Option<MessageAnchor> {
2208 if let Some(prev_message_ix) = self
2209 .message_anchors
2210 .iter()
2211 .position(|message| message.id == message_id)
2212 {
2213 // Find the next valid message after the one we were given.
2214 let mut next_message_ix = prev_message_ix + 1;
2215 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2216 if next_message.start.is_valid(self.buffer.read(cx)) {
2217 break;
2218 }
2219 next_message_ix += 1;
2220 }
2221
2222 let start = self.buffer.update(cx, |buffer, cx| {
2223 let offset = self
2224 .message_anchors
2225 .get(next_message_ix)
2226 .map_or(buffer.len(), |message| {
2227 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2228 });
2229 buffer.edit([(offset..offset, "\n")], None, cx);
2230 buffer.anchor_before(offset + 1)
2231 });
2232
2233 let version = self.version.clone();
2234 let anchor = MessageAnchor {
2235 id: MessageId(self.next_timestamp()),
2236 start,
2237 };
2238 let metadata = MessageMetadata {
2239 role,
2240 status,
2241 timestamp: anchor.id.0,
2242 cache: None,
2243 };
2244 self.insert_message(anchor.clone(), metadata.clone(), cx);
2245 self.push_op(
2246 ContextOperation::InsertMessage {
2247 anchor: anchor.clone(),
2248 metadata,
2249 version,
2250 },
2251 cx,
2252 );
2253 Some(anchor)
2254 } else {
2255 None
2256 }
2257 }
2258
2259 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
2260 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
2261 entry.insert((
2262 image.to_image_data(cx).log_err()?,
2263 LanguageModelImage::from_image(image, cx).shared(),
2264 ));
2265 }
2266
2267 Some(())
2268 }
2269
2270 pub fn insert_image_anchor(
2271 &mut self,
2272 image_id: u64,
2273 anchor: language::Anchor,
2274 cx: &mut ModelContext<Self>,
2275 ) -> bool {
2276 cx.emit(ContextEvent::MessagesEdited);
2277
2278 let buffer = self.buffer.read(cx);
2279 let insertion_ix = match self
2280 .image_anchors
2281 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
2282 {
2283 Ok(ix) => ix,
2284 Err(ix) => ix,
2285 };
2286
2287 if let Some((render_image, image)) = self.images.get(&image_id) {
2288 self.image_anchors.insert(
2289 insertion_ix,
2290 ImageAnchor {
2291 anchor,
2292 image_id,
2293 image: image.clone(),
2294 render_image: render_image.clone(),
2295 },
2296 );
2297
2298 true
2299 } else {
2300 false
2301 }
2302 }
2303
2304 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
2305 self.image_anchors.iter().cloned()
2306 }
2307
2308 pub fn split_message(
2309 &mut self,
2310 range: Range<usize>,
2311 cx: &mut ModelContext<Self>,
2312 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2313 let start_message = self.message_for_offset(range.start, cx);
2314 let end_message = self.message_for_offset(range.end, cx);
2315 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2316 // Prevent splitting when range spans multiple messages.
2317 if start_message.id != end_message.id {
2318 return (None, None);
2319 }
2320
2321 let message = start_message;
2322 let role = message.role;
2323 let mut edited_buffer = false;
2324
2325 let mut suffix_start = None;
2326
2327 // TODO: why did this start panicking?
2328 if range.start > message.offset_range.start
2329 && range.end < message.offset_range.end.saturating_sub(1)
2330 {
2331 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2332 suffix_start = Some(range.end + 1);
2333 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2334 suffix_start = Some(range.end);
2335 }
2336 }
2337
2338 let version = self.version.clone();
2339 let suffix = if let Some(suffix_start) = suffix_start {
2340 MessageAnchor {
2341 id: MessageId(self.next_timestamp()),
2342 start: self.buffer.read(cx).anchor_before(suffix_start),
2343 }
2344 } else {
2345 self.buffer.update(cx, |buffer, cx| {
2346 buffer.edit([(range.end..range.end, "\n")], None, cx);
2347 });
2348 edited_buffer = true;
2349 MessageAnchor {
2350 id: MessageId(self.next_timestamp()),
2351 start: self.buffer.read(cx).anchor_before(range.end + 1),
2352 }
2353 };
2354
2355 let suffix_metadata = MessageMetadata {
2356 role,
2357 status: MessageStatus::Done,
2358 timestamp: suffix.id.0,
2359 cache: None,
2360 };
2361 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2362 self.push_op(
2363 ContextOperation::InsertMessage {
2364 anchor: suffix.clone(),
2365 metadata: suffix_metadata,
2366 version,
2367 },
2368 cx,
2369 );
2370
2371 let new_messages =
2372 if range.start == range.end || range.start == message.offset_range.start {
2373 (None, Some(suffix))
2374 } else {
2375 let mut prefix_end = None;
2376 if range.start > message.offset_range.start
2377 && range.end < message.offset_range.end - 1
2378 {
2379 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2380 prefix_end = Some(range.start + 1);
2381 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2382 == Some('\n')
2383 {
2384 prefix_end = Some(range.start);
2385 }
2386 }
2387
2388 let version = self.version.clone();
2389 let selection = if let Some(prefix_end) = prefix_end {
2390 MessageAnchor {
2391 id: MessageId(self.next_timestamp()),
2392 start: self.buffer.read(cx).anchor_before(prefix_end),
2393 }
2394 } else {
2395 self.buffer.update(cx, |buffer, cx| {
2396 buffer.edit([(range.start..range.start, "\n")], None, cx)
2397 });
2398 edited_buffer = true;
2399 MessageAnchor {
2400 id: MessageId(self.next_timestamp()),
2401 start: self.buffer.read(cx).anchor_before(range.end + 1),
2402 }
2403 };
2404
2405 let selection_metadata = MessageMetadata {
2406 role,
2407 status: MessageStatus::Done,
2408 timestamp: selection.id.0,
2409 cache: None,
2410 };
2411 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2412 self.push_op(
2413 ContextOperation::InsertMessage {
2414 anchor: selection.clone(),
2415 metadata: selection_metadata,
2416 version,
2417 },
2418 cx,
2419 );
2420
2421 (Some(selection), Some(suffix))
2422 };
2423
2424 if !edited_buffer {
2425 cx.emit(ContextEvent::MessagesEdited);
2426 }
2427 new_messages
2428 } else {
2429 (None, None)
2430 }
2431 }
2432
2433 fn insert_message(
2434 &mut self,
2435 new_anchor: MessageAnchor,
2436 new_metadata: MessageMetadata,
2437 cx: &mut ModelContext<Self>,
2438 ) {
2439 cx.emit(ContextEvent::MessagesEdited);
2440
2441 self.messages_metadata.insert(new_anchor.id, new_metadata);
2442
2443 let buffer = self.buffer.read(cx);
2444 let insertion_ix = self
2445 .message_anchors
2446 .iter()
2447 .position(|anchor| {
2448 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2449 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2450 })
2451 .unwrap_or(self.message_anchors.len());
2452 self.message_anchors.insert(insertion_ix, new_anchor);
2453 }
2454
2455 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2456 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2457 return;
2458 };
2459 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2460 return;
2461 };
2462
2463 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2464 if !provider.is_authenticated(cx) {
2465 return;
2466 }
2467
2468 let messages = self
2469 .messages(cx)
2470 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
2471 .chain(Some(LanguageModelRequestMessage {
2472 role: Role::User,
2473 content: vec![
2474 "Summarize the context into a short title without punctuation.".into(),
2475 ],
2476 cache: false,
2477 }));
2478 let request = LanguageModelRequest {
2479 messages: messages.collect(),
2480 tools: Vec::new(),
2481 stop: Vec::new(),
2482 temperature: 1.0,
2483 };
2484
2485 self.pending_summary = cx.spawn(|this, mut cx| {
2486 async move {
2487 let stream = model.stream_completion_text(request, &cx);
2488 let mut messages = stream.await?;
2489
2490 let mut replaced = !replace_old;
2491 while let Some(message) = messages.next().await {
2492 let text = message?;
2493 let mut lines = text.lines();
2494 this.update(&mut cx, |this, cx| {
2495 let version = this.version.clone();
2496 let timestamp = this.next_timestamp();
2497 let summary = this.summary.get_or_insert(ContextSummary::default());
2498 if !replaced && replace_old {
2499 summary.text.clear();
2500 replaced = true;
2501 }
2502 summary.text.extend(lines.next());
2503 summary.timestamp = timestamp;
2504 let operation = ContextOperation::UpdateSummary {
2505 summary: summary.clone(),
2506 version,
2507 };
2508 this.push_op(operation, cx);
2509 cx.emit(ContextEvent::SummaryChanged);
2510 })?;
2511
2512 // Stop if the LLM generated multiple lines.
2513 if lines.next().is_some() {
2514 break;
2515 }
2516 }
2517
2518 this.update(&mut cx, |this, cx| {
2519 let version = this.version.clone();
2520 let timestamp = this.next_timestamp();
2521 if let Some(summary) = this.summary.as_mut() {
2522 summary.done = true;
2523 summary.timestamp = timestamp;
2524 let operation = ContextOperation::UpdateSummary {
2525 summary: summary.clone(),
2526 version,
2527 };
2528 this.push_op(operation, cx);
2529 cx.emit(ContextEvent::SummaryChanged);
2530 }
2531 })?;
2532
2533 anyhow::Ok(())
2534 }
2535 .log_err()
2536 });
2537 }
2538 }
2539
2540 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2541 self.messages_for_offsets([offset], cx).pop()
2542 }
2543
2544 pub fn messages_for_offsets(
2545 &self,
2546 offsets: impl IntoIterator<Item = usize>,
2547 cx: &AppContext,
2548 ) -> Vec<Message> {
2549 let mut result = Vec::new();
2550
2551 let mut messages = self.messages(cx).peekable();
2552 let mut offsets = offsets.into_iter().peekable();
2553 let mut current_message = messages.next();
2554 while let Some(offset) = offsets.next() {
2555 // Locate the message that contains the offset.
2556 while current_message.as_ref().map_or(false, |message| {
2557 !message.offset_range.contains(&offset) && messages.peek().is_some()
2558 }) {
2559 current_message = messages.next();
2560 }
2561 let Some(message) = current_message.as_ref() else {
2562 break;
2563 };
2564
2565 // Skip offsets that are in the same message.
2566 while offsets.peek().map_or(false, |offset| {
2567 message.offset_range.contains(offset) || messages.peek().is_none()
2568 }) {
2569 offsets.next();
2570 }
2571
2572 result.push(message.clone());
2573 }
2574 result
2575 }
2576
2577 fn messages_from_anchors<'a>(
2578 &'a self,
2579 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2580 cx: &'a AppContext,
2581 ) -> impl 'a + Iterator<Item = Message> {
2582 let buffer = self.buffer.read(cx);
2583 let messages = message_anchors.enumerate();
2584 let images = self.image_anchors.iter();
2585
2586 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2587 }
2588
2589 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2590 self.messages_from_anchors(self.message_anchors.iter(), cx)
2591 }
2592
2593 pub fn messages_from_iters<'a>(
2594 buffer: &'a Buffer,
2595 metadata: &'a HashMap<MessageId, MessageMetadata>,
2596 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2597 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2598 ) -> impl 'a + Iterator<Item = Message> {
2599 let mut messages = messages.peekable();
2600 let mut images = images.peekable();
2601
2602 iter::from_fn(move || {
2603 if let Some((start_ix, message_anchor)) = messages.next() {
2604 let metadata = metadata.get(&message_anchor.id)?;
2605
2606 let message_start = message_anchor.start.to_offset(buffer);
2607 let mut message_end = None;
2608 let mut end_ix = start_ix;
2609 while let Some((_, next_message)) = messages.peek() {
2610 if next_message.start.is_valid(buffer) {
2611 message_end = Some(next_message.start);
2612 break;
2613 } else {
2614 end_ix += 1;
2615 messages.next();
2616 }
2617 }
2618 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2619 let message_end = message_end_anchor.to_offset(buffer);
2620
2621 let mut image_offsets = SmallVec::new();
2622 while let Some(image_anchor) = images.peek() {
2623 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2624 image_offsets.push((
2625 image_anchor.anchor.to_offset(buffer),
2626 MessageImage {
2627 image_id: image_anchor.image_id,
2628 image: image_anchor.image.clone(),
2629 },
2630 ));
2631 images.next();
2632 } else {
2633 break;
2634 }
2635 }
2636
2637 return Some(Message {
2638 index_range: start_ix..end_ix,
2639 offset_range: message_start..message_end,
2640 anchor_range: message_anchor.start..message_end_anchor,
2641 id: message_anchor.id,
2642 role: metadata.role,
2643 status: metadata.status.clone(),
2644 cache: metadata.cache.clone(),
2645 image_offsets,
2646 });
2647 }
2648 None
2649 })
2650 }
2651
2652 pub fn save(
2653 &mut self,
2654 debounce: Option<Duration>,
2655 fs: Arc<dyn Fs>,
2656 cx: &mut ModelContext<Context>,
2657 ) {
2658 if self.replica_id() != ReplicaId::default() {
2659 // Prevent saving a remote context for now.
2660 return;
2661 }
2662
2663 self.pending_save = cx.spawn(|this, mut cx| async move {
2664 if let Some(debounce) = debounce {
2665 cx.background_executor().timer(debounce).await;
2666 }
2667
2668 let (old_path, summary) = this.read_with(&cx, |this, _| {
2669 let path = this.path.clone();
2670 let summary = if let Some(summary) = this.summary.as_ref() {
2671 if summary.done {
2672 Some(summary.text.clone())
2673 } else {
2674 None
2675 }
2676 } else {
2677 None
2678 };
2679 (path, summary)
2680 })?;
2681
2682 if let Some(summary) = summary {
2683 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2684 .await;
2685
2686 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2687 let mut discriminant = 1;
2688 let mut new_path;
2689 loop {
2690 new_path = contexts_dir().join(&format!(
2691 "{} - {}.zed.json",
2692 summary.trim(),
2693 discriminant
2694 ));
2695 if fs.is_file(&new_path).await {
2696 discriminant += 1;
2697 } else {
2698 break;
2699 }
2700 }
2701
2702 fs.create_dir(contexts_dir().as_ref()).await?;
2703 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2704 .await?;
2705 if let Some(old_path) = old_path {
2706 if new_path != old_path {
2707 fs.remove_file(
2708 &old_path,
2709 RemoveOptions {
2710 recursive: false,
2711 ignore_if_not_exists: true,
2712 },
2713 )
2714 .await?;
2715 }
2716 }
2717
2718 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2719 }
2720
2721 Ok(())
2722 });
2723 }
2724
2725 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2726 let mut images_to_save = self
2727 .images
2728 .iter()
2729 .map(|(id, (_, llm_image))| {
2730 let fs = fs.clone();
2731 let llm_image = llm_image.clone();
2732 let id = *id;
2733 async move {
2734 if let Some(llm_image) = llm_image.await {
2735 let path: PathBuf =
2736 context_images_dir().join(&format!("{}.png.base64", id));
2737 if fs
2738 .metadata(path.as_path())
2739 .await
2740 .log_err()
2741 .flatten()
2742 .is_none()
2743 {
2744 fs.atomic_write(path, llm_image.source.to_string())
2745 .await
2746 .log_err();
2747 }
2748 }
2749 }
2750 })
2751 .collect::<FuturesUnordered<_>>();
2752 cx.background_executor().spawn(async move {
2753 if fs
2754 .create_dir(context_images_dir().as_ref())
2755 .await
2756 .log_err()
2757 .is_some()
2758 {
2759 while let Some(_) = images_to_save.next().await {}
2760 }
2761 })
2762 }
2763
2764 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2765 let timestamp = self.next_timestamp();
2766 let summary = self.summary.get_or_insert(ContextSummary::default());
2767 summary.timestamp = timestamp;
2768 summary.done = true;
2769 summary.text = custom_summary;
2770 cx.emit(ContextEvent::SummaryChanged);
2771 }
2772}
2773
2774#[derive(Debug, Default)]
2775pub struct ContextVersion {
2776 context: clock::Global,
2777 buffer: clock::Global,
2778}
2779
2780impl ContextVersion {
2781 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2782 Self {
2783 context: language::proto::deserialize_version(&proto.context_version),
2784 buffer: language::proto::deserialize_version(&proto.buffer_version),
2785 }
2786 }
2787
2788 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2789 proto::ContextVersion {
2790 context_id: context_id.to_proto(),
2791 context_version: language::proto::serialize_version(&self.context),
2792 buffer_version: language::proto::serialize_version(&self.buffer),
2793 }
2794 }
2795}
2796
2797#[derive(Debug, Clone)]
2798pub struct PendingSlashCommand {
2799 pub name: String,
2800 pub arguments: SmallVec<[String; 3]>,
2801 pub status: PendingSlashCommandStatus,
2802 pub source_range: Range<language::Anchor>,
2803}
2804
2805#[derive(Debug, Clone)]
2806pub enum PendingSlashCommandStatus {
2807 Idle,
2808 Running { _task: Shared<Task<()>> },
2809 Error(String),
2810}
2811
2812pub(crate) struct ToolUseFeatureFlag;
2813
2814impl FeatureFlag for ToolUseFeatureFlag {
2815 const NAME: &'static str = "assistant-tool-use";
2816
2817 fn enabled_for_staff() -> bool {
2818 false
2819 }
2820}
2821
2822#[derive(Debug, Clone)]
2823pub struct PendingToolUse {
2824 pub id: String,
2825 pub name: String,
2826 pub input: serde_json::Value,
2827 pub status: PendingToolUseStatus,
2828 pub source_range: Range<language::Anchor>,
2829}
2830
2831#[derive(Debug, Clone)]
2832pub enum PendingToolUseStatus {
2833 Idle,
2834 Running { _task: Shared<Task<()>> },
2835 Error(String),
2836}
2837
2838#[derive(Serialize, Deserialize)]
2839pub struct SavedMessage {
2840 pub id: MessageId,
2841 pub start: usize,
2842 pub metadata: MessageMetadata,
2843 #[serde(default)]
2844 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2845 pub image_offsets: Vec<(usize, u64)>,
2846}
2847
2848#[derive(Serialize, Deserialize)]
2849pub struct SavedContext {
2850 pub id: Option<ContextId>,
2851 pub zed: String,
2852 pub version: String,
2853 pub text: String,
2854 pub messages: Vec<SavedMessage>,
2855 pub summary: String,
2856 pub slash_command_output_sections:
2857 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2858}
2859
2860impl SavedContext {
2861 pub const VERSION: &'static str = "0.4.0";
2862
2863 pub fn from_json(json: &str) -> Result<Self> {
2864 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2865 match saved_context_json
2866 .get("version")
2867 .ok_or_else(|| anyhow!("version not found"))?
2868 {
2869 serde_json::Value::String(version) => match version.as_str() {
2870 SavedContext::VERSION => {
2871 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2872 }
2873 SavedContextV0_3_0::VERSION => {
2874 let saved_context =
2875 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2876 Ok(saved_context.upgrade())
2877 }
2878 SavedContextV0_2_0::VERSION => {
2879 let saved_context =
2880 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2881 Ok(saved_context.upgrade())
2882 }
2883 SavedContextV0_1_0::VERSION => {
2884 let saved_context =
2885 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2886 Ok(saved_context.upgrade())
2887 }
2888 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2889 },
2890 _ => Err(anyhow!("version not found on saved context")),
2891 }
2892 }
2893
2894 fn into_ops(
2895 self,
2896 buffer: &Model<Buffer>,
2897 cx: &mut ModelContext<Context>,
2898 ) -> Vec<ContextOperation> {
2899 let mut operations = Vec::new();
2900 let mut version = clock::Global::new();
2901 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2902
2903 let mut first_message_metadata = None;
2904 for message in self.messages {
2905 if message.id == MessageId(clock::Lamport::default()) {
2906 first_message_metadata = Some(message.metadata);
2907 } else {
2908 operations.push(ContextOperation::InsertMessage {
2909 anchor: MessageAnchor {
2910 id: message.id,
2911 start: buffer.read(cx).anchor_before(message.start),
2912 },
2913 metadata: MessageMetadata {
2914 role: message.metadata.role,
2915 status: message.metadata.status,
2916 timestamp: message.metadata.timestamp,
2917 cache: None,
2918 },
2919 version: version.clone(),
2920 });
2921 version.observe(message.id.0);
2922 next_timestamp.observe(message.id.0);
2923 }
2924 }
2925
2926 if let Some(metadata) = first_message_metadata {
2927 let timestamp = next_timestamp.tick();
2928 operations.push(ContextOperation::UpdateMessage {
2929 message_id: MessageId(clock::Lamport::default()),
2930 metadata: MessageMetadata {
2931 role: metadata.role,
2932 status: metadata.status,
2933 timestamp,
2934 cache: None,
2935 },
2936 version: version.clone(),
2937 });
2938 version.observe(timestamp);
2939 }
2940
2941 let timestamp = next_timestamp.tick();
2942 operations.push(ContextOperation::SlashCommandFinished {
2943 id: SlashCommandId(timestamp),
2944 output_range: language::Anchor::MIN..language::Anchor::MAX,
2945 sections: self
2946 .slash_command_output_sections
2947 .into_iter()
2948 .map(|section| {
2949 let buffer = buffer.read(cx);
2950 SlashCommandOutputSection {
2951 range: buffer.anchor_after(section.range.start)
2952 ..buffer.anchor_before(section.range.end),
2953 icon: section.icon,
2954 label: section.label,
2955 }
2956 })
2957 .collect(),
2958 version: version.clone(),
2959 });
2960 version.observe(timestamp);
2961
2962 let timestamp = next_timestamp.tick();
2963 operations.push(ContextOperation::UpdateSummary {
2964 summary: ContextSummary {
2965 text: self.summary,
2966 done: true,
2967 timestamp,
2968 },
2969 version: version.clone(),
2970 });
2971 version.observe(timestamp);
2972
2973 operations
2974 }
2975}
2976
2977#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
2978struct SavedMessageIdPreV0_4_0(usize);
2979
2980#[derive(Serialize, Deserialize)]
2981struct SavedMessagePreV0_4_0 {
2982 id: SavedMessageIdPreV0_4_0,
2983 start: usize,
2984}
2985
2986#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
2987struct SavedMessageMetadataPreV0_4_0 {
2988 role: Role,
2989 status: MessageStatus,
2990}
2991
2992#[derive(Serialize, Deserialize)]
2993struct SavedContextV0_3_0 {
2994 id: Option<ContextId>,
2995 zed: String,
2996 version: String,
2997 text: String,
2998 messages: Vec<SavedMessagePreV0_4_0>,
2999 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3000 summary: String,
3001 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3002}
3003
3004impl SavedContextV0_3_0 {
3005 const VERSION: &'static str = "0.3.0";
3006
3007 fn upgrade(self) -> SavedContext {
3008 SavedContext {
3009 id: self.id,
3010 zed: self.zed,
3011 version: SavedContext::VERSION.into(),
3012 text: self.text,
3013 messages: self
3014 .messages
3015 .into_iter()
3016 .filter_map(|message| {
3017 let metadata = self.message_metadata.get(&message.id)?;
3018 let timestamp = clock::Lamport {
3019 replica_id: ReplicaId::default(),
3020 value: message.id.0 as u32,
3021 };
3022 Some(SavedMessage {
3023 id: MessageId(timestamp),
3024 start: message.start,
3025 metadata: MessageMetadata {
3026 role: metadata.role,
3027 status: metadata.status.clone(),
3028 timestamp,
3029 cache: None,
3030 },
3031 image_offsets: Vec::new(),
3032 })
3033 })
3034 .collect(),
3035 summary: self.summary,
3036 slash_command_output_sections: self.slash_command_output_sections,
3037 }
3038 }
3039}
3040
3041#[derive(Serialize, Deserialize)]
3042struct SavedContextV0_2_0 {
3043 id: Option<ContextId>,
3044 zed: String,
3045 version: String,
3046 text: String,
3047 messages: Vec<SavedMessagePreV0_4_0>,
3048 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3049 summary: String,
3050}
3051
3052impl SavedContextV0_2_0 {
3053 const VERSION: &'static str = "0.2.0";
3054
3055 fn upgrade(self) -> SavedContext {
3056 SavedContextV0_3_0 {
3057 id: self.id,
3058 zed: self.zed,
3059 version: SavedContextV0_3_0::VERSION.to_string(),
3060 text: self.text,
3061 messages: self.messages,
3062 message_metadata: self.message_metadata,
3063 summary: self.summary,
3064 slash_command_output_sections: Vec::new(),
3065 }
3066 .upgrade()
3067 }
3068}
3069
3070#[derive(Serialize, Deserialize)]
3071struct SavedContextV0_1_0 {
3072 id: Option<ContextId>,
3073 zed: String,
3074 version: String,
3075 text: String,
3076 messages: Vec<SavedMessagePreV0_4_0>,
3077 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3078 summary: String,
3079 api_url: Option<String>,
3080 model: OpenAiModel,
3081}
3082
3083impl SavedContextV0_1_0 {
3084 const VERSION: &'static str = "0.1.0";
3085
3086 fn upgrade(self) -> SavedContext {
3087 SavedContextV0_2_0 {
3088 id: self.id,
3089 zed: self.zed,
3090 version: SavedContextV0_2_0::VERSION.to_string(),
3091 text: self.text,
3092 messages: self.messages,
3093 message_metadata: self.message_metadata,
3094 summary: self.summary,
3095 }
3096 .upgrade()
3097 }
3098}
3099
3100#[derive(Clone)]
3101pub struct SavedContextMetadata {
3102 pub title: String,
3103 pub path: PathBuf,
3104 pub mtime: chrono::DateTime<chrono::Local>,
3105}