1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 stream::FuturesUnordered,
21 FutureExt, StreamExt,
22};
23use gpui::{
24 AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext,
25 RenderImage, SharedString, Subscription, Task,
26};
27
28use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
29use language_model::{
30 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
31 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
32 LanguageModelRequestTool, MessageContent, Role, StopReason,
33};
34use open_ai::Model as OpenAiModel;
35use paths::{context_images_dir, contexts_dir};
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{self, max, Ordering},
41 collections::hash_map,
42 fmt::Debug,
43 iter, mem,
44 ops::Range,
45 path::{Path, PathBuf},
46 str::FromStr as _,
47 sync::Arc,
48 time::{Duration, Instant},
49};
50use telemetry_events::AssistantKind;
51use text::BufferSnapshot;
52use util::{post_inc, ResultExt, TryFutureExt};
53use uuid::Uuid;
54
55#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
56pub struct ContextId(String);
57
58impl ContextId {
59 pub fn new() -> Self {
60 Self(Uuid::new_v4().to_string())
61 }
62
63 pub fn from_proto(id: String) -> Self {
64 Self(id)
65 }
66
67 pub fn to_proto(&self) -> String {
68 self.0.clone()
69 }
70}
71
72#[derive(Clone, Debug)]
73pub enum ContextOperation {
74 InsertMessage {
75 anchor: MessageAnchor,
76 metadata: MessageMetadata,
77 version: clock::Global,
78 },
79 UpdateMessage {
80 message_id: MessageId,
81 metadata: MessageMetadata,
82 version: clock::Global,
83 },
84 UpdateSummary {
85 summary: ContextSummary,
86 version: clock::Global,
87 },
88 SlashCommandFinished {
89 id: SlashCommandId,
90 output_range: Range<language::Anchor>,
91 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
92 version: clock::Global,
93 },
94 BufferOperation(language::Operation),
95}
96
97impl ContextOperation {
98 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
99 match op.variant.context("invalid variant")? {
100 proto::context_operation::Variant::InsertMessage(insert) => {
101 let message = insert.message.context("invalid message")?;
102 let id = MessageId(language::proto::deserialize_timestamp(
103 message.id.context("invalid id")?,
104 ));
105 Ok(Self::InsertMessage {
106 anchor: MessageAnchor {
107 id,
108 start: language::proto::deserialize_anchor(
109 message.start.context("invalid anchor")?,
110 )
111 .context("invalid anchor")?,
112 },
113 metadata: MessageMetadata {
114 role: Role::from_proto(message.role),
115 status: MessageStatus::from_proto(
116 message.status.context("invalid status")?,
117 ),
118 timestamp: id.0,
119 cache: None,
120 },
121 version: language::proto::deserialize_version(&insert.version),
122 })
123 }
124 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
125 message_id: MessageId(language::proto::deserialize_timestamp(
126 update.message_id.context("invalid message id")?,
127 )),
128 metadata: MessageMetadata {
129 role: Role::from_proto(update.role),
130 status: MessageStatus::from_proto(update.status.context("invalid status")?),
131 timestamp: language::proto::deserialize_timestamp(
132 update.timestamp.context("invalid timestamp")?,
133 ),
134 cache: None,
135 },
136 version: language::proto::deserialize_version(&update.version),
137 }),
138 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
139 summary: ContextSummary {
140 text: update.summary,
141 done: update.done,
142 timestamp: language::proto::deserialize_timestamp(
143 update.timestamp.context("invalid timestamp")?,
144 ),
145 },
146 version: language::proto::deserialize_version(&update.version),
147 }),
148 proto::context_operation::Variant::SlashCommandFinished(finished) => {
149 Ok(Self::SlashCommandFinished {
150 id: SlashCommandId(language::proto::deserialize_timestamp(
151 finished.id.context("invalid id")?,
152 )),
153 output_range: language::proto::deserialize_anchor_range(
154 finished.output_range.context("invalid range")?,
155 )?,
156 sections: finished
157 .sections
158 .into_iter()
159 .map(|section| {
160 Ok(SlashCommandOutputSection {
161 range: language::proto::deserialize_anchor_range(
162 section.range.context("invalid range")?,
163 )?,
164 icon: section.icon_name.parse()?,
165 label: section.label.into(),
166 })
167 })
168 .collect::<Result<Vec<_>>>()?,
169 version: language::proto::deserialize_version(&finished.version),
170 })
171 }
172 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
173 language::proto::deserialize_operation(
174 op.operation.context("invalid buffer operation")?,
175 )?,
176 )),
177 }
178 }
179
180 pub fn to_proto(&self) -> proto::ContextOperation {
181 match self {
182 Self::InsertMessage {
183 anchor,
184 metadata,
185 version,
186 } => proto::ContextOperation {
187 variant: Some(proto::context_operation::Variant::InsertMessage(
188 proto::context_operation::InsertMessage {
189 message: Some(proto::ContextMessage {
190 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
191 start: Some(language::proto::serialize_anchor(&anchor.start)),
192 role: metadata.role.to_proto() as i32,
193 status: Some(metadata.status.to_proto()),
194 }),
195 version: language::proto::serialize_version(version),
196 },
197 )),
198 },
199 Self::UpdateMessage {
200 message_id,
201 metadata,
202 version,
203 } => proto::ContextOperation {
204 variant: Some(proto::context_operation::Variant::UpdateMessage(
205 proto::context_operation::UpdateMessage {
206 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
207 role: metadata.role.to_proto() as i32,
208 status: Some(metadata.status.to_proto()),
209 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
210 version: language::proto::serialize_version(version),
211 },
212 )),
213 },
214 Self::UpdateSummary { summary, version } => proto::ContextOperation {
215 variant: Some(proto::context_operation::Variant::UpdateSummary(
216 proto::context_operation::UpdateSummary {
217 summary: summary.text.clone(),
218 done: summary.done,
219 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
220 version: language::proto::serialize_version(version),
221 },
222 )),
223 },
224 Self::SlashCommandFinished {
225 id,
226 output_range,
227 sections,
228 version,
229 } => proto::ContextOperation {
230 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
231 proto::context_operation::SlashCommandFinished {
232 id: Some(language::proto::serialize_timestamp(id.0)),
233 output_range: Some(language::proto::serialize_anchor_range(
234 output_range.clone(),
235 )),
236 sections: sections
237 .iter()
238 .map(|section| {
239 let icon_name: &'static str = section.icon.into();
240 proto::SlashCommandOutputSection {
241 range: Some(language::proto::serialize_anchor_range(
242 section.range.clone(),
243 )),
244 icon_name: icon_name.to_string(),
245 label: section.label.to_string(),
246 }
247 })
248 .collect(),
249 version: language::proto::serialize_version(version),
250 },
251 )),
252 },
253 Self::BufferOperation(operation) => proto::ContextOperation {
254 variant: Some(proto::context_operation::Variant::BufferOperation(
255 proto::context_operation::BufferOperation {
256 operation: Some(language::proto::serialize_operation(operation)),
257 },
258 )),
259 },
260 }
261 }
262
263 fn timestamp(&self) -> clock::Lamport {
264 match self {
265 Self::InsertMessage { anchor, .. } => anchor.id.0,
266 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
267 Self::UpdateSummary { summary, .. } => summary.timestamp,
268 Self::SlashCommandFinished { id, .. } => id.0,
269 Self::BufferOperation(_) => {
270 panic!("reading the timestamp of a buffer operation is not supported")
271 }
272 }
273 }
274
275 /// Returns the current version of the context operation.
276 pub fn version(&self) -> &clock::Global {
277 match self {
278 Self::InsertMessage { version, .. }
279 | Self::UpdateMessage { version, .. }
280 | Self::UpdateSummary { version, .. }
281 | Self::SlashCommandFinished { version, .. } => version,
282 Self::BufferOperation(_) => {
283 panic!("reading the version of a buffer operation is not supported")
284 }
285 }
286 }
287}
288
289#[derive(Debug, Clone)]
290pub enum ContextEvent {
291 ShowAssistError(SharedString),
292 MessagesEdited,
293 SummaryChanged,
294 StreamedCompletion,
295 WorkflowStepsUpdated {
296 removed: Vec<Range<language::Anchor>>,
297 updated: Vec<Range<language::Anchor>>,
298 },
299 PendingSlashCommandsUpdated {
300 removed: Vec<Range<language::Anchor>>,
301 updated: Vec<PendingSlashCommand>,
302 },
303 SlashCommandFinished {
304 output_range: Range<language::Anchor>,
305 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
306 run_commands_in_output: bool,
307 expand_result: bool,
308 },
309 UsePendingTools,
310 ToolFinished {
311 tool_use_id: Arc<str>,
312 output_range: Range<language::Anchor>,
313 },
314 Operation(ContextOperation),
315}
316
317#[derive(Clone, Default, Debug)]
318pub struct ContextSummary {
319 pub text: String,
320 done: bool,
321 timestamp: clock::Lamport,
322}
323
324#[derive(Clone, Debug, Eq, PartialEq)]
325pub struct MessageAnchor {
326 pub id: MessageId,
327 pub start: language::Anchor,
328}
329
330#[derive(Clone, Debug, Eq, PartialEq)]
331pub enum CacheStatus {
332 Pending,
333 Cached,
334}
335
336#[derive(Clone, Debug, Eq, PartialEq)]
337pub struct MessageCacheMetadata {
338 pub is_anchor: bool,
339 pub is_final_anchor: bool,
340 pub status: CacheStatus,
341 pub cached_at: clock::Global,
342}
343
344#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
345pub struct MessageMetadata {
346 pub role: Role,
347 pub status: MessageStatus,
348 pub(crate) timestamp: clock::Lamport,
349 #[serde(skip)]
350 pub cache: Option<MessageCacheMetadata>,
351}
352
353impl From<&Message> for MessageMetadata {
354 fn from(message: &Message) -> Self {
355 Self {
356 role: message.role,
357 status: message.status.clone(),
358 timestamp: message.id.0,
359 cache: message.cache.clone(),
360 }
361 }
362}
363
364impl MessageMetadata {
365 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
366 let result = match &self.cache {
367 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
368 &cached_at,
369 Range {
370 start: buffer.anchor_at(range.start, Bias::Right),
371 end: buffer.anchor_at(range.end, Bias::Left),
372 },
373 ),
374 _ => false,
375 };
376 result
377 }
378}
379
380#[derive(Clone, Debug)]
381pub struct MessageImage {
382 image_id: u64,
383 image: Shared<Task<Option<LanguageModelImage>>>,
384}
385
386impl PartialEq for MessageImage {
387 fn eq(&self, other: &Self) -> bool {
388 self.image_id == other.image_id
389 }
390}
391
392impl Eq for MessageImage {}
393
394#[derive(Clone, Debug)]
395pub struct Message {
396 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
397 pub offset_range: Range<usize>,
398 pub index_range: Range<usize>,
399 pub anchor_range: Range<language::Anchor>,
400 pub id: MessageId,
401 pub role: Role,
402 pub status: MessageStatus,
403 pub cache: Option<MessageCacheMetadata>,
404}
405
406impl Message {
407 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
408 let mut content = Vec::new();
409
410 let mut range_start = self.offset_range.start;
411 for (image_offset, message_image) in self.image_offsets.iter() {
412 if *image_offset != range_start {
413 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
414 content.push(text);
415 }
416 }
417
418 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
419 content.push(language_model::MessageContent::Image(image));
420 }
421
422 range_start = *image_offset;
423 }
424
425 if range_start != self.offset_range.end {
426 if let Some(text) =
427 Self::collect_text_content(buffer, range_start..self.offset_range.end)
428 {
429 content.push(text);
430 }
431 }
432
433 if content.is_empty() {
434 return None;
435 }
436
437 Some(LanguageModelRequestMessage {
438 role: self.role,
439 content,
440 cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor),
441 })
442 }
443
444 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
445 let text: String = buffer.text_for_range(range.clone()).collect();
446 if text.trim().is_empty() {
447 None
448 } else {
449 Some(MessageContent::Text(text))
450 }
451 }
452}
453
454#[derive(Clone, Debug)]
455pub struct ImageAnchor {
456 pub anchor: language::Anchor,
457 pub image_id: u64,
458 pub render_image: Arc<RenderImage>,
459 pub image: Shared<Task<Option<LanguageModelImage>>>,
460}
461
462struct PendingCompletion {
463 id: usize,
464 assistant_message_id: MessageId,
465 _task: Task<()>,
466}
467
468#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
469pub struct SlashCommandId(clock::Lamport);
470
471#[derive(Clone, Debug)]
472pub struct XmlTag {
473 pub kind: XmlTagKind,
474 pub range: Range<text::Anchor>,
475 pub is_open_tag: bool,
476}
477
478#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
479#[strum(serialize_all = "snake_case")]
480pub enum XmlTagKind {
481 Step,
482 Edit,
483 Path,
484 Search,
485 Within,
486 Operation,
487 Description,
488}
489
490pub struct Context {
491 id: ContextId,
492 timestamp: clock::Lamport,
493 version: clock::Global,
494 pending_ops: Vec<ContextOperation>,
495 operations: Vec<ContextOperation>,
496 buffer: Model<Buffer>,
497 pending_slash_commands: Vec<PendingSlashCommand>,
498 edits_since_last_parse: language::Subscription,
499 finished_slash_commands: HashSet<SlashCommandId>,
500 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
501 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
502 message_anchors: Vec<MessageAnchor>,
503 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
504 image_anchors: Vec<ImageAnchor>,
505 messages_metadata: HashMap<MessageId, MessageMetadata>,
506 summary: Option<ContextSummary>,
507 pending_summary: Task<Option<()>>,
508 completion_count: usize,
509 pending_completions: Vec<PendingCompletion>,
510 token_count: Option<usize>,
511 pending_token_count: Task<Option<()>>,
512 pending_save: Task<Result<()>>,
513 pending_cache_warming_task: Task<Option<()>>,
514 path: Option<PathBuf>,
515 _subscriptions: Vec<Subscription>,
516 telemetry: Option<Arc<Telemetry>>,
517 language_registry: Arc<LanguageRegistry>,
518 workflow_steps: Vec<WorkflowStep>,
519 xml_tags: Vec<XmlTag>,
520 project: Option<Model<Project>>,
521 prompt_builder: Arc<PromptBuilder>,
522}
523
524trait ContextAnnotation {
525 fn range(&self) -> &Range<language::Anchor>;
526}
527
528impl ContextAnnotation for PendingSlashCommand {
529 fn range(&self) -> &Range<language::Anchor> {
530 &self.source_range
531 }
532}
533
534impl ContextAnnotation for WorkflowStep {
535 fn range(&self) -> &Range<language::Anchor> {
536 &self.range
537 }
538}
539
540impl ContextAnnotation for XmlTag {
541 fn range(&self) -> &Range<language::Anchor> {
542 &self.range
543 }
544}
545
546impl EventEmitter<ContextEvent> for Context {}
547
548impl Context {
549 pub fn local(
550 language_registry: Arc<LanguageRegistry>,
551 project: Option<Model<Project>>,
552 telemetry: Option<Arc<Telemetry>>,
553 prompt_builder: Arc<PromptBuilder>,
554 cx: &mut ModelContext<Self>,
555 ) -> Self {
556 Self::new(
557 ContextId::new(),
558 ReplicaId::default(),
559 language::Capability::ReadWrite,
560 language_registry,
561 prompt_builder,
562 project,
563 telemetry,
564 cx,
565 )
566 }
567
568 #[allow(clippy::too_many_arguments)]
569 pub fn new(
570 id: ContextId,
571 replica_id: ReplicaId,
572 capability: language::Capability,
573 language_registry: Arc<LanguageRegistry>,
574 prompt_builder: Arc<PromptBuilder>,
575 project: Option<Model<Project>>,
576 telemetry: Option<Arc<Telemetry>>,
577 cx: &mut ModelContext<Self>,
578 ) -> Self {
579 let buffer = cx.new_model(|_cx| {
580 let mut buffer = Buffer::remote(
581 language::BufferId::new(1).unwrap(),
582 replica_id,
583 capability,
584 "",
585 );
586 buffer.set_language_registry(language_registry.clone());
587 buffer
588 });
589 let edits_since_last_slash_command_parse =
590 buffer.update(cx, |buffer, _| buffer.subscribe());
591 let mut this = Self {
592 id,
593 timestamp: clock::Lamport::new(replica_id),
594 version: clock::Global::new(),
595 pending_ops: Vec::new(),
596 operations: Vec::new(),
597 message_anchors: Default::default(),
598 image_anchors: Default::default(),
599 images: Default::default(),
600 messages_metadata: Default::default(),
601 pending_slash_commands: Vec::new(),
602 finished_slash_commands: HashSet::default(),
603 pending_tool_uses_by_id: HashMap::default(),
604 slash_command_output_sections: Vec::new(),
605 edits_since_last_parse: edits_since_last_slash_command_parse,
606 summary: None,
607 pending_summary: Task::ready(None),
608 completion_count: Default::default(),
609 pending_completions: Default::default(),
610 token_count: None,
611 pending_token_count: Task::ready(None),
612 pending_cache_warming_task: Task::ready(None),
613 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
614 pending_save: Task::ready(Ok(())),
615 path: None,
616 buffer,
617 telemetry,
618 project,
619 language_registry,
620 workflow_steps: Vec::new(),
621 xml_tags: Vec::new(),
622 prompt_builder,
623 };
624
625 let first_message_id = MessageId(clock::Lamport {
626 replica_id: 0,
627 value: 0,
628 });
629 let message = MessageAnchor {
630 id: first_message_id,
631 start: language::Anchor::MIN,
632 };
633 this.messages_metadata.insert(
634 first_message_id,
635 MessageMetadata {
636 role: Role::User,
637 status: MessageStatus::Done,
638 timestamp: first_message_id.0,
639 cache: None,
640 },
641 );
642 this.message_anchors.push(message);
643
644 this.set_language(cx);
645 this.count_remaining_tokens(cx);
646 this
647 }
648
649 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
650 let buffer = self.buffer.read(cx);
651 SavedContext {
652 id: Some(self.id.clone()),
653 zed: "context".into(),
654 version: SavedContext::VERSION.into(),
655 text: buffer.text(),
656 messages: self
657 .messages(cx)
658 .map(|message| SavedMessage {
659 id: message.id,
660 start: message.offset_range.start,
661 metadata: self.messages_metadata[&message.id].clone(),
662 image_offsets: message
663 .image_offsets
664 .iter()
665 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
666 .collect(),
667 })
668 .collect(),
669 summary: self
670 .summary
671 .as_ref()
672 .map(|summary| summary.text.clone())
673 .unwrap_or_default(),
674 slash_command_output_sections: self
675 .slash_command_output_sections
676 .iter()
677 .filter_map(|section| {
678 let range = section.range.to_offset(buffer);
679 if section.range.start.is_valid(buffer) && !range.is_empty() {
680 Some(assistant_slash_command::SlashCommandOutputSection {
681 range,
682 icon: section.icon,
683 label: section.label.clone(),
684 })
685 } else {
686 None
687 }
688 })
689 .collect(),
690 }
691 }
692
693 #[allow(clippy::too_many_arguments)]
694 pub fn deserialize(
695 saved_context: SavedContext,
696 path: PathBuf,
697 language_registry: Arc<LanguageRegistry>,
698 prompt_builder: Arc<PromptBuilder>,
699 project: Option<Model<Project>>,
700 telemetry: Option<Arc<Telemetry>>,
701 cx: &mut ModelContext<Self>,
702 ) -> Self {
703 let id = saved_context.id.clone().unwrap_or_else(ContextId::new);
704 let mut this = Self::new(
705 id,
706 ReplicaId::default(),
707 language::Capability::ReadWrite,
708 language_registry,
709 prompt_builder,
710 project,
711 telemetry,
712 cx,
713 );
714 this.path = Some(path);
715 this.buffer.update(cx, |buffer, cx| {
716 buffer.set_text(saved_context.text.as_str(), cx)
717 });
718 let operations = saved_context.into_ops(&this.buffer, cx);
719 this.apply_ops(operations, cx).unwrap();
720 this
721 }
722
723 pub fn id(&self) -> &ContextId {
724 &self.id
725 }
726
727 pub fn replica_id(&self) -> ReplicaId {
728 self.timestamp.replica_id
729 }
730
731 pub fn version(&self, cx: &AppContext) -> ContextVersion {
732 ContextVersion {
733 context: self.version.clone(),
734 buffer: self.buffer.read(cx).version(),
735 }
736 }
737
738 pub fn set_capability(
739 &mut self,
740 capability: language::Capability,
741 cx: &mut ModelContext<Self>,
742 ) {
743 self.buffer
744 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
745 }
746
747 fn next_timestamp(&mut self) -> clock::Lamport {
748 let timestamp = self.timestamp.tick();
749 self.version.observe(timestamp);
750 timestamp
751 }
752
753 pub fn serialize_ops(
754 &self,
755 since: &ContextVersion,
756 cx: &AppContext,
757 ) -> Task<Vec<proto::ContextOperation>> {
758 let buffer_ops = self
759 .buffer
760 .read(cx)
761 .serialize_ops(Some(since.buffer.clone()), cx);
762
763 let mut context_ops = self
764 .operations
765 .iter()
766 .filter(|op| !since.context.observed(op.timestamp()))
767 .cloned()
768 .collect::<Vec<_>>();
769 context_ops.extend(self.pending_ops.iter().cloned());
770
771 cx.background_executor().spawn(async move {
772 let buffer_ops = buffer_ops.await;
773 context_ops.sort_unstable_by_key(|op| op.timestamp());
774 buffer_ops
775 .into_iter()
776 .map(|op| proto::ContextOperation {
777 variant: Some(proto::context_operation::Variant::BufferOperation(
778 proto::context_operation::BufferOperation {
779 operation: Some(op),
780 },
781 )),
782 })
783 .chain(context_ops.into_iter().map(|op| op.to_proto()))
784 .collect()
785 })
786 }
787
788 pub fn apply_ops(
789 &mut self,
790 ops: impl IntoIterator<Item = ContextOperation>,
791 cx: &mut ModelContext<Self>,
792 ) -> Result<()> {
793 let mut buffer_ops = Vec::new();
794 for op in ops {
795 match op {
796 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
797 op @ _ => self.pending_ops.push(op),
798 }
799 }
800 self.buffer
801 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
802 self.flush_ops(cx);
803
804 Ok(())
805 }
806
807 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
808 let mut changed_messages = HashSet::default();
809 let mut summary_changed = false;
810
811 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
812 for op in mem::take(&mut self.pending_ops) {
813 if !self.can_apply_op(&op, cx) {
814 self.pending_ops.push(op);
815 continue;
816 }
817
818 let timestamp = op.timestamp();
819 match op.clone() {
820 ContextOperation::InsertMessage {
821 anchor, metadata, ..
822 } => {
823 if self.messages_metadata.contains_key(&anchor.id) {
824 // We already applied this operation.
825 } else {
826 changed_messages.insert(anchor.id);
827 self.insert_message(anchor, metadata, cx);
828 }
829 }
830 ContextOperation::UpdateMessage {
831 message_id,
832 metadata: new_metadata,
833 ..
834 } => {
835 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
836 if new_metadata.timestamp > metadata.timestamp {
837 *metadata = new_metadata;
838 changed_messages.insert(message_id);
839 }
840 }
841 ContextOperation::UpdateSummary {
842 summary: new_summary,
843 ..
844 } => {
845 if self
846 .summary
847 .as_ref()
848 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
849 {
850 self.summary = Some(new_summary);
851 summary_changed = true;
852 }
853 }
854 ContextOperation::SlashCommandFinished {
855 id,
856 output_range,
857 sections,
858 ..
859 } => {
860 if self.finished_slash_commands.insert(id) {
861 let buffer = self.buffer.read(cx);
862 self.slash_command_output_sections
863 .extend(sections.iter().cloned());
864 self.slash_command_output_sections
865 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
866 cx.emit(ContextEvent::SlashCommandFinished {
867 output_range,
868 sections,
869 expand_result: false,
870 run_commands_in_output: false,
871 });
872 }
873 }
874 ContextOperation::BufferOperation(_) => unreachable!(),
875 }
876
877 self.version.observe(timestamp);
878 self.timestamp.observe(timestamp);
879 self.operations.push(op);
880 }
881
882 if !changed_messages.is_empty() {
883 self.message_roles_updated(changed_messages, cx);
884 cx.emit(ContextEvent::MessagesEdited);
885 cx.notify();
886 }
887
888 if summary_changed {
889 cx.emit(ContextEvent::SummaryChanged);
890 cx.notify();
891 }
892 }
893
894 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
895 if !self.version.observed_all(op.version()) {
896 return false;
897 }
898
899 match op {
900 ContextOperation::InsertMessage { anchor, .. } => self
901 .buffer
902 .read(cx)
903 .version
904 .observed(anchor.start.timestamp),
905 ContextOperation::UpdateMessage { message_id, .. } => {
906 self.messages_metadata.contains_key(message_id)
907 }
908 ContextOperation::UpdateSummary { .. } => true,
909 ContextOperation::SlashCommandFinished {
910 output_range,
911 sections,
912 ..
913 } => {
914 let version = &self.buffer.read(cx).version;
915 sections
916 .iter()
917 .map(|section| §ion.range)
918 .chain([output_range])
919 .all(|range| {
920 let observed_start = range.start == language::Anchor::MIN
921 || range.start == language::Anchor::MAX
922 || version.observed(range.start.timestamp);
923 let observed_end = range.end == language::Anchor::MIN
924 || range.end == language::Anchor::MAX
925 || version.observed(range.end.timestamp);
926 observed_start && observed_end
927 })
928 }
929 ContextOperation::BufferOperation(_) => {
930 panic!("buffer operations should always be applied")
931 }
932 }
933 }
934
935 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
936 self.operations.push(op.clone());
937 cx.emit(ContextEvent::Operation(op));
938 }
939
940 pub fn buffer(&self) -> &Model<Buffer> {
941 &self.buffer
942 }
943
944 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
945 self.language_registry.clone()
946 }
947
948 pub fn project(&self) -> Option<Model<Project>> {
949 self.project.clone()
950 }
951
952 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
953 self.prompt_builder.clone()
954 }
955
956 pub fn path(&self) -> Option<&Path> {
957 self.path.as_deref()
958 }
959
960 pub fn summary(&self) -> Option<&ContextSummary> {
961 self.summary.as_ref()
962 }
963
964 pub(crate) fn workflow_step_containing(
965 &self,
966 offset: usize,
967 cx: &AppContext,
968 ) -> Option<&WorkflowStep> {
969 let buffer = self.buffer.read(cx);
970 let index = self
971 .workflow_steps
972 .binary_search_by(|step| {
973 let step_range = step.range.to_offset(&buffer);
974 if offset < step_range.start {
975 Ordering::Greater
976 } else if offset > step_range.end {
977 Ordering::Less
978 } else {
979 Ordering::Equal
980 }
981 })
982 .ok()?;
983 Some(&self.workflow_steps[index])
984 }
985
986 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
987 self.workflow_steps.iter().map(|step| step.range.clone())
988 }
989
990 pub(crate) fn workflow_step_for_range(
991 &self,
992 range: &Range<language::Anchor>,
993 cx: &AppContext,
994 ) -> Option<&WorkflowStep> {
995 let buffer = self.buffer.read(cx);
996 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
997 Some(&self.workflow_steps[index])
998 }
999
1000 fn workflow_step_index_for_range(
1001 &self,
1002 tagged_range: &Range<text::Anchor>,
1003 buffer: &text::BufferSnapshot,
1004 ) -> Result<usize, usize> {
1005 self.workflow_steps
1006 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
1007 }
1008
1009 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
1010 &self.pending_slash_commands
1011 }
1012
1013 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
1014 &self.slash_command_output_sections
1015 }
1016
1017 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
1018 self.pending_tool_uses_by_id.values().collect()
1019 }
1020
1021 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
1022 self.pending_tool_uses_by_id.get(id)
1023 }
1024
1025 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
1026 let markdown = self.language_registry.language_for_name("Markdown");
1027 cx.spawn(|this, mut cx| async move {
1028 let markdown = markdown.await?;
1029 this.update(&mut cx, |this, cx| {
1030 this.buffer
1031 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1032 })
1033 })
1034 .detach_and_log_err(cx);
1035 }
1036
1037 fn handle_buffer_event(
1038 &mut self,
1039 _: Model<Buffer>,
1040 event: &language::Event,
1041 cx: &mut ModelContext<Self>,
1042 ) {
1043 match event {
1044 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1045 ContextOperation::BufferOperation(operation.clone()),
1046 )),
1047 language::Event::Edited => {
1048 self.count_remaining_tokens(cx);
1049 self.reparse(cx);
1050 // Use `inclusive = true` to invalidate a step when an edit occurs
1051 // at the start/end of a parsed step.
1052 cx.emit(ContextEvent::MessagesEdited);
1053 }
1054 _ => {}
1055 }
1056 }
1057
1058 pub(crate) fn token_count(&self) -> Option<usize> {
1059 self.token_count
1060 }
1061
1062 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1063 let request = self.to_completion_request(cx);
1064 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1065 return;
1066 };
1067 self.pending_token_count = cx.spawn(|this, mut cx| {
1068 async move {
1069 cx.background_executor()
1070 .timer(Duration::from_millis(200))
1071 .await;
1072
1073 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1074 this.update(&mut cx, |this, cx| {
1075 this.token_count = Some(token_count);
1076 this.start_cache_warming(&model, cx);
1077 cx.notify()
1078 })
1079 }
1080 .log_err()
1081 });
1082 }
1083
1084 pub fn mark_cache_anchors(
1085 &mut self,
1086 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1087 speculative: bool,
1088 cx: &mut ModelContext<Self>,
1089 ) -> bool {
1090 let cache_configuration =
1091 cache_configuration
1092 .as_ref()
1093 .unwrap_or(&LanguageModelCacheConfiguration {
1094 max_cache_anchors: 0,
1095 should_speculate: false,
1096 min_total_token: 0,
1097 });
1098
1099 let messages: Vec<Message> = self.messages(cx).collect();
1100
1101 let mut sorted_messages = messages.clone();
1102 if speculative {
1103 // Avoid caching the last message if this is a speculative cache fetch as
1104 // it's likely to change.
1105 sorted_messages.pop();
1106 }
1107 sorted_messages.retain(|m| m.role == Role::User);
1108 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1109
1110 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1111 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1112 0
1113 } else {
1114 // Save 1 anchor for the inline assistant to use.
1115 max(cache_configuration.max_cache_anchors, 1) - 1
1116 };
1117 sorted_messages.truncate(cache_anchors);
1118
1119 let anchors: HashSet<MessageId> = sorted_messages
1120 .into_iter()
1121 .map(|message| message.id)
1122 .collect();
1123
1124 let buffer = self.buffer.read(cx).snapshot();
1125 let invalidated_caches: HashSet<MessageId> = messages
1126 .iter()
1127 .scan(false, |encountered_invalid, message| {
1128 let message_id = message.id;
1129 let is_invalid = self
1130 .messages_metadata
1131 .get(&message_id)
1132 .map_or(true, |metadata| {
1133 !metadata.is_cache_valid(&buffer, &message.offset_range)
1134 || *encountered_invalid
1135 });
1136 *encountered_invalid |= is_invalid;
1137 Some(if is_invalid { Some(message_id) } else { None })
1138 })
1139 .flatten()
1140 .collect();
1141
1142 let last_anchor = messages.iter().rev().find_map(|message| {
1143 if anchors.contains(&message.id) {
1144 Some(message.id)
1145 } else {
1146 None
1147 }
1148 });
1149
1150 let mut new_anchor_needs_caching = false;
1151 let current_version = &buffer.version;
1152 // If we have no anchors, mark all messages as not being cached.
1153 let mut hit_last_anchor = last_anchor.is_none();
1154
1155 for message in messages.iter() {
1156 if hit_last_anchor {
1157 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1158 continue;
1159 }
1160
1161 if let Some(last_anchor) = last_anchor {
1162 if message.id == last_anchor {
1163 hit_last_anchor = true;
1164 }
1165 }
1166
1167 new_anchor_needs_caching = new_anchor_needs_caching
1168 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1169
1170 self.update_metadata(message.id, cx, |metadata| {
1171 let cache_status = if invalidated_caches.contains(&message.id) {
1172 CacheStatus::Pending
1173 } else {
1174 metadata
1175 .cache
1176 .as_ref()
1177 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1178 };
1179 metadata.cache = Some(MessageCacheMetadata {
1180 is_anchor: anchors.contains(&message.id),
1181 is_final_anchor: hit_last_anchor,
1182 status: cache_status,
1183 cached_at: current_version.clone(),
1184 });
1185 });
1186 }
1187 new_anchor_needs_caching
1188 }
1189
1190 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1191 let cache_configuration = model.cache_configuration();
1192
1193 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1194 return;
1195 }
1196 if !self.pending_completions.is_empty() {
1197 return;
1198 }
1199 if let Some(cache_configuration) = cache_configuration {
1200 if !cache_configuration.should_speculate {
1201 return;
1202 }
1203 }
1204
1205 let request = {
1206 let mut req = self.to_completion_request(cx);
1207 // Skip the last message because it's likely to change and
1208 // therefore would be a waste to cache.
1209 req.messages.pop();
1210 req.messages.push(LanguageModelRequestMessage {
1211 role: Role::User,
1212 content: vec!["Respond only with OK, nothing else.".into()],
1213 cache: false,
1214 });
1215 req
1216 };
1217
1218 let model = Arc::clone(model);
1219 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1220 async move {
1221 match model.stream_completion(request, &cx).await {
1222 Ok(mut stream) => {
1223 stream.next().await;
1224 log::info!("Cache warming completed successfully");
1225 }
1226 Err(e) => {
1227 log::warn!("Cache warming failed: {}", e);
1228 }
1229 };
1230 this.update(&mut cx, |this, cx| {
1231 this.update_cache_status_for_completion(cx);
1232 })
1233 .ok();
1234 anyhow::Ok(())
1235 }
1236 .log_err()
1237 });
1238 }
1239
1240 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1241 let cached_message_ids: Vec<MessageId> = self
1242 .messages_metadata
1243 .iter()
1244 .filter_map(|(message_id, metadata)| {
1245 metadata.cache.as_ref().and_then(|cache| {
1246 if cache.status == CacheStatus::Pending {
1247 Some(*message_id)
1248 } else {
1249 None
1250 }
1251 })
1252 })
1253 .collect();
1254
1255 for message_id in cached_message_ids {
1256 self.update_metadata(message_id, cx, |metadata| {
1257 if let Some(cache) = &mut metadata.cache {
1258 cache.status = CacheStatus::Cached;
1259 }
1260 });
1261 }
1262 cx.notify();
1263 }
1264
1265 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1266 let buffer = self.buffer.read(cx).text_snapshot();
1267 let mut row_ranges = self
1268 .edits_since_last_parse
1269 .consume()
1270 .into_iter()
1271 .map(|edit| {
1272 let start_row = buffer.offset_to_point(edit.new.start).row;
1273 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1274 start_row..end_row
1275 })
1276 .peekable();
1277
1278 let mut removed_slash_command_ranges = Vec::new();
1279 let mut updated_slash_commands = Vec::new();
1280 let mut removed_steps = Vec::new();
1281 let mut updated_steps = Vec::new();
1282 while let Some(mut row_range) = row_ranges.next() {
1283 while let Some(next_row_range) = row_ranges.peek() {
1284 if row_range.end >= next_row_range.start {
1285 row_range.end = next_row_range.end;
1286 row_ranges.next();
1287 } else {
1288 break;
1289 }
1290 }
1291
1292 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1293 let end = buffer.anchor_after(Point::new(
1294 row_range.end - 1,
1295 buffer.line_len(row_range.end - 1),
1296 ));
1297
1298 self.reparse_slash_commands_in_range(
1299 start..end,
1300 &buffer,
1301 &mut updated_slash_commands,
1302 &mut removed_slash_command_ranges,
1303 cx,
1304 );
1305 self.reparse_workflow_steps_in_range(
1306 start..end,
1307 &buffer,
1308 &mut updated_steps,
1309 &mut removed_steps,
1310 cx,
1311 );
1312 }
1313
1314 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1315 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1316 removed: removed_slash_command_ranges,
1317 updated: updated_slash_commands,
1318 });
1319 }
1320
1321 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1322 cx.emit(ContextEvent::WorkflowStepsUpdated {
1323 removed: removed_steps,
1324 updated: updated_steps,
1325 });
1326 }
1327 }
1328
1329 fn reparse_slash_commands_in_range(
1330 &mut self,
1331 range: Range<text::Anchor>,
1332 buffer: &BufferSnapshot,
1333 updated: &mut Vec<PendingSlashCommand>,
1334 removed: &mut Vec<Range<text::Anchor>>,
1335 cx: &AppContext,
1336 ) {
1337 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1338
1339 let mut new_commands = Vec::new();
1340 let mut lines = buffer.text_for_range(range).lines();
1341 let mut offset = lines.offset();
1342 while let Some(line) = lines.next() {
1343 if let Some(command_line) = SlashCommandLine::parse(line) {
1344 let name = &line[command_line.name.clone()];
1345 let arguments = command_line
1346 .arguments
1347 .iter()
1348 .filter_map(|argument_range| {
1349 if argument_range.is_empty() {
1350 None
1351 } else {
1352 line.get(argument_range.clone())
1353 }
1354 })
1355 .map(ToOwned::to_owned)
1356 .collect::<SmallVec<_>>();
1357 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1358 if !command.requires_argument() || !arguments.is_empty() {
1359 let start_ix = offset + command_line.name.start - 1;
1360 let end_ix = offset
1361 + command_line
1362 .arguments
1363 .last()
1364 .map_or(command_line.name.end, |argument| argument.end);
1365 let source_range =
1366 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1367 let pending_command = PendingSlashCommand {
1368 name: name.to_string(),
1369 arguments,
1370 source_range,
1371 status: PendingSlashCommandStatus::Idle,
1372 };
1373 updated.push(pending_command.clone());
1374 new_commands.push(pending_command);
1375 }
1376 }
1377 }
1378
1379 offset = lines.offset();
1380 }
1381
1382 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1383 removed.extend(removed_commands.map(|command| command.source_range));
1384 }
1385
1386 fn reparse_workflow_steps_in_range(
1387 &mut self,
1388 range: Range<text::Anchor>,
1389 buffer: &BufferSnapshot,
1390 updated: &mut Vec<Range<text::Anchor>>,
1391 removed: &mut Vec<Range<text::Anchor>>,
1392 cx: &mut ModelContext<Self>,
1393 ) {
1394 // Rebuild the XML tags in the edited range.
1395 let intersecting_tags_range =
1396 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1397 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1398 self.xml_tags
1399 .splice(intersecting_tags_range.clone(), new_tags);
1400
1401 // Find which steps intersect the changed range.
1402 let intersecting_steps_range =
1403 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1404
1405 // Reparse all tags after the last unchanged step before the change.
1406 let mut tags_start_ix = 0;
1407 if let Some(preceding_unchanged_step) =
1408 self.workflow_steps[..intersecting_steps_range.start].last()
1409 {
1410 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1411 tag.range
1412 .start
1413 .cmp(&preceding_unchanged_step.range.end, buffer)
1414 .then(Ordering::Less)
1415 }) {
1416 Ok(ix) | Err(ix) => ix,
1417 };
1418 }
1419
1420 // Rebuild the edit suggestions in the range.
1421 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1422
1423 if let Some(project) = self.project() {
1424 for step in &mut new_steps {
1425 Self::resolve_workflow_step_internal(step, &project, cx);
1426 }
1427 }
1428
1429 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1430 let removed_steps = self
1431 .workflow_steps
1432 .splice(intersecting_steps_range, new_steps);
1433 removed.extend(
1434 removed_steps
1435 .map(|step| step.range)
1436 .filter(|range| !updated.contains(&range)),
1437 );
1438 }
1439
1440 fn parse_xml_tags_in_range(
1441 &self,
1442 buffer: &BufferSnapshot,
1443 range: Range<text::Anchor>,
1444 cx: &AppContext,
1445 ) -> Vec<XmlTag> {
1446 let mut messages = self.messages(cx).peekable();
1447
1448 let mut tags = Vec::new();
1449 let mut lines = buffer.text_for_range(range).lines();
1450 let mut offset = lines.offset();
1451
1452 while let Some(line) = lines.next() {
1453 while let Some(message) = messages.peek() {
1454 if offset < message.offset_range.end {
1455 break;
1456 } else {
1457 messages.next();
1458 }
1459 }
1460
1461 let is_assistant_message = messages
1462 .peek()
1463 .map_or(false, |message| message.role == Role::Assistant);
1464 if is_assistant_message {
1465 for (start_ix, _) in line.match_indices('<') {
1466 let mut name_start_ix = start_ix + 1;
1467 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1468 if let Some(closing_bracket_ix) = closing_bracket_ix {
1469 let end_ix = closing_bracket_ix + 1;
1470 let mut is_open_tag = true;
1471 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1472 name_start_ix += 1;
1473 is_open_tag = false;
1474 }
1475 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1476 let tag_name_len = tag_inner
1477 .find(|c: char| c.is_whitespace())
1478 .unwrap_or(tag_inner.len());
1479 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1480 tags.push(XmlTag {
1481 range: buffer.anchor_after(offset + start_ix)
1482 ..buffer.anchor_before(offset + end_ix),
1483 is_open_tag,
1484 kind,
1485 });
1486 };
1487 }
1488 }
1489 }
1490
1491 offset = lines.offset();
1492 }
1493 tags
1494 }
1495
1496 fn parse_steps(
1497 &mut self,
1498 tags_start_ix: usize,
1499 buffer_end: text::Anchor,
1500 buffer: &BufferSnapshot,
1501 ) -> Vec<WorkflowStep> {
1502 let mut new_steps = Vec::new();
1503 let mut pending_step = None;
1504 let mut edit_step_depth = 0;
1505 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1506 'tags: while let Some(tag) = tags.next() {
1507 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1508 break;
1509 }
1510
1511 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1512 edit_step_depth += 1;
1513 let edit_start = tag.range.start;
1514 let mut edits = Vec::new();
1515 let mut step = WorkflowStep {
1516 range: edit_start..edit_start,
1517 leading_tags_end: tag.range.end,
1518 trailing_tag_start: None,
1519 edits: Default::default(),
1520 resolution: None,
1521 resolution_task: None,
1522 };
1523
1524 while let Some(tag) = tags.next() {
1525 step.trailing_tag_start.get_or_insert(tag.range.start);
1526
1527 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1528 // step.trailing_tag_start = Some(tag.range.start);
1529 edit_step_depth -= 1;
1530 if edit_step_depth == 0 {
1531 step.range.end = tag.range.end;
1532 step.edits = edits.into();
1533 new_steps.push(step);
1534 continue 'tags;
1535 }
1536 }
1537
1538 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1539 let mut path = None;
1540 let mut search = None;
1541 let mut operation = None;
1542 let mut description = None;
1543
1544 while let Some(tag) = tags.next() {
1545 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1546 edits.push(WorkflowStepEdit::new(
1547 path,
1548 operation,
1549 search,
1550 description,
1551 ));
1552 break;
1553 }
1554
1555 if tag.is_open_tag
1556 && [
1557 XmlTagKind::Path,
1558 XmlTagKind::Search,
1559 XmlTagKind::Operation,
1560 XmlTagKind::Description,
1561 ]
1562 .contains(&tag.kind)
1563 {
1564 let kind = tag.kind;
1565 let content_start = tag.range.end;
1566 if let Some(tag) = tags.peek() {
1567 if tag.kind == kind && !tag.is_open_tag {
1568 let tag = tags.next().unwrap();
1569 let content_end = tag.range.start;
1570 let mut content = buffer
1571 .text_for_range(content_start..content_end)
1572 .collect::<String>();
1573 content.truncate(content.trim_end().len());
1574 match kind {
1575 XmlTagKind::Path => path = Some(content),
1576 XmlTagKind::Operation => operation = Some(content),
1577 XmlTagKind::Search => {
1578 search = Some(content).filter(|s| !s.is_empty())
1579 }
1580 XmlTagKind::Description => {
1581 description =
1582 Some(content).filter(|s| !s.is_empty())
1583 }
1584 _ => {}
1585 }
1586 }
1587 }
1588 }
1589 }
1590 }
1591 }
1592
1593 pending_step = Some(step);
1594 }
1595 }
1596
1597 if let Some(mut pending_step) = pending_step {
1598 pending_step.range.end = text::Anchor::MAX;
1599 new_steps.push(pending_step);
1600 }
1601
1602 new_steps
1603 }
1604
1605 pub fn resolve_workflow_step(
1606 &mut self,
1607 tagged_range: Range<text::Anchor>,
1608 cx: &mut ModelContext<Self>,
1609 ) -> Option<()> {
1610 let index = self
1611 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1612 .ok()?;
1613 let step = &mut self.workflow_steps[index];
1614 let project = self.project.as_ref()?;
1615 step.resolution.take();
1616 Self::resolve_workflow_step_internal(step, project, cx);
1617 None
1618 }
1619
1620 fn resolve_workflow_step_internal(
1621 step: &mut WorkflowStep,
1622 project: &Model<Project>,
1623 cx: &mut ModelContext<'_, Context>,
1624 ) {
1625 step.resolution_task = Some(cx.spawn({
1626 let range = step.range.clone();
1627 let edits = step.edits.clone();
1628 let project = project.clone();
1629 |this, mut cx| async move {
1630 let suggestion_groups =
1631 Self::compute_step_resolution(project, edits, &mut cx).await;
1632
1633 this.update(&mut cx, |this, cx| {
1634 let buffer = this.buffer.read(cx).text_snapshot();
1635 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1636 if let Some(ix) = ix {
1637 let step = &mut this.workflow_steps[ix];
1638
1639 let resolution = suggestion_groups.map(|suggestion_groups| {
1640 let mut title = String::new();
1641 for mut chunk in buffer.text_for_range(
1642 step.leading_tags_end
1643 ..step.trailing_tag_start.unwrap_or(step.range.end),
1644 ) {
1645 if title.is_empty() {
1646 chunk = chunk.trim_start();
1647 }
1648 if let Some((prefix, _)) = chunk.split_once('\n') {
1649 title.push_str(prefix);
1650 break;
1651 } else {
1652 title.push_str(chunk);
1653 }
1654 }
1655
1656 WorkflowStepResolution {
1657 title,
1658 suggestion_groups,
1659 }
1660 });
1661
1662 step.resolution = Some(Arc::new(resolution));
1663 cx.emit(ContextEvent::WorkflowStepsUpdated {
1664 removed: vec![],
1665 updated: vec![range],
1666 })
1667 }
1668 })
1669 .ok();
1670 }
1671 }));
1672 }
1673
1674 async fn compute_step_resolution(
1675 project: Model<Project>,
1676 edits: Arc<[Result<WorkflowStepEdit>]>,
1677 cx: &mut AsyncAppContext,
1678 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1679 let mut suggestion_tasks = Vec::new();
1680 for edit in edits.iter() {
1681 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1682 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1683 }
1684
1685 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1686 let suggestions = future::try_join_all(suggestion_tasks).await?;
1687
1688 let mut suggestions_by_buffer = HashMap::default();
1689 for (buffer, suggestion) in suggestions {
1690 suggestions_by_buffer
1691 .entry(buffer)
1692 .or_insert_with(Vec::new)
1693 .push(suggestion);
1694 }
1695
1696 let mut suggestion_groups_by_buffer = HashMap::default();
1697 for (buffer, mut suggestions) in suggestions_by_buffer {
1698 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1699 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1700 // Sort suggestions by their range so that earlier, larger ranges come first
1701 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1702
1703 // Merge overlapping suggestions
1704 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1705
1706 // Create context ranges for each suggestion
1707 for suggestion in suggestions {
1708 let context_range = {
1709 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1710 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1711 let end_row =
1712 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1713 let start = snapshot.anchor_before(Point::new(start_row, 0));
1714 let end =
1715 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1716 start..end
1717 };
1718
1719 if let Some(last_group) = suggestion_groups.last_mut() {
1720 if last_group
1721 .context_range
1722 .end
1723 .cmp(&context_range.start, &snapshot)
1724 .is_ge()
1725 {
1726 // Merge with the previous group if context ranges overlap
1727 last_group.context_range.end = context_range.end;
1728 last_group.suggestions.push(suggestion);
1729 } else {
1730 // Create a new group
1731 suggestion_groups.push(WorkflowSuggestionGroup {
1732 context_range,
1733 suggestions: vec![suggestion],
1734 });
1735 }
1736 } else {
1737 // Create the first group
1738 suggestion_groups.push(WorkflowSuggestionGroup {
1739 context_range,
1740 suggestions: vec![suggestion],
1741 });
1742 }
1743 }
1744
1745 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1746 }
1747
1748 Ok(suggestion_groups_by_buffer)
1749 }
1750
1751 pub fn pending_command_for_position(
1752 &mut self,
1753 position: language::Anchor,
1754 cx: &mut ModelContext<Self>,
1755 ) -> Option<&mut PendingSlashCommand> {
1756 let buffer = self.buffer.read(cx);
1757 match self
1758 .pending_slash_commands
1759 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1760 {
1761 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1762 Err(ix) => {
1763 let cmd = self.pending_slash_commands.get_mut(ix)?;
1764 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1765 && position.cmp(&cmd.source_range.end, buffer).is_le()
1766 {
1767 Some(cmd)
1768 } else {
1769 None
1770 }
1771 }
1772 }
1773 }
1774
1775 pub fn pending_commands_for_range(
1776 &self,
1777 range: Range<language::Anchor>,
1778 cx: &AppContext,
1779 ) -> &[PendingSlashCommand] {
1780 let range = self.pending_command_indices_for_range(range, cx);
1781 &self.pending_slash_commands[range]
1782 }
1783
1784 fn pending_command_indices_for_range(
1785 &self,
1786 range: Range<language::Anchor>,
1787 cx: &AppContext,
1788 ) -> Range<usize> {
1789 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1790 }
1791
1792 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1793 &self,
1794 all_annotations: &[T],
1795 range: Range<language::Anchor>,
1796 cx: &AppContext,
1797 ) -> Range<usize> {
1798 let buffer = self.buffer.read(cx);
1799 let start_ix = match all_annotations
1800 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1801 {
1802 Ok(ix) | Err(ix) => ix,
1803 };
1804 let end_ix = match all_annotations
1805 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1806 {
1807 Ok(ix) => ix + 1,
1808 Err(ix) => ix,
1809 };
1810 start_ix..end_ix
1811 }
1812
1813 pub fn insert_command_output(
1814 &mut self,
1815 command_range: Range<language::Anchor>,
1816 output: Task<Result<SlashCommandOutput>>,
1817 ensure_trailing_newline: bool,
1818 expand_result: bool,
1819 cx: &mut ModelContext<Self>,
1820 ) {
1821 self.reparse(cx);
1822
1823 let insert_output_task = cx.spawn(|this, mut cx| {
1824 let command_range = command_range.clone();
1825 async move {
1826 let output = output.await;
1827 this.update(&mut cx, |this, cx| match output {
1828 Ok(mut output) => {
1829 // Ensure section ranges are valid.
1830 for section in &mut output.sections {
1831 section.range.start = section.range.start.min(output.text.len());
1832 section.range.end = section.range.end.min(output.text.len());
1833 while !output.text.is_char_boundary(section.range.start) {
1834 section.range.start -= 1;
1835 }
1836 while !output.text.is_char_boundary(section.range.end) {
1837 section.range.end += 1;
1838 }
1839 }
1840
1841 // Ensure there is a newline after the last section.
1842 if ensure_trailing_newline {
1843 let has_newline_after_last_section =
1844 output.sections.last().map_or(false, |last_section| {
1845 output.text[last_section.range.end..].ends_with('\n')
1846 });
1847 if !has_newline_after_last_section {
1848 output.text.push('\n');
1849 }
1850 }
1851
1852 let version = this.version.clone();
1853 let command_id = SlashCommandId(this.next_timestamp());
1854 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1855 let start = command_range.start.to_offset(buffer);
1856 let old_end = command_range.end.to_offset(buffer);
1857 let new_end = start + output.text.len();
1858 buffer.edit([(start..old_end, output.text)], None, cx);
1859
1860 let mut sections = output
1861 .sections
1862 .into_iter()
1863 .map(|section| SlashCommandOutputSection {
1864 range: buffer.anchor_after(start + section.range.start)
1865 ..buffer.anchor_before(start + section.range.end),
1866 icon: section.icon,
1867 label: section.label,
1868 })
1869 .collect::<Vec<_>>();
1870 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1871
1872 this.slash_command_output_sections
1873 .extend(sections.iter().cloned());
1874 this.slash_command_output_sections
1875 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1876
1877 let output_range =
1878 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1879 this.finished_slash_commands.insert(command_id);
1880
1881 (
1882 ContextOperation::SlashCommandFinished {
1883 id: command_id,
1884 output_range: output_range.clone(),
1885 sections: sections.clone(),
1886 version,
1887 },
1888 ContextEvent::SlashCommandFinished {
1889 output_range,
1890 sections,
1891 run_commands_in_output: output.run_commands_in_text,
1892 expand_result,
1893 },
1894 )
1895 });
1896
1897 this.push_op(operation, cx);
1898 cx.emit(event);
1899 }
1900 Err(error) => {
1901 if let Some(pending_command) =
1902 this.pending_command_for_position(command_range.start, cx)
1903 {
1904 pending_command.status =
1905 PendingSlashCommandStatus::Error(error.to_string());
1906 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1907 removed: vec![pending_command.source_range.clone()],
1908 updated: vec![pending_command.clone()],
1909 });
1910 }
1911 }
1912 })
1913 .ok();
1914 }
1915 });
1916
1917 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1918 pending_command.status = PendingSlashCommandStatus::Running {
1919 _task: insert_output_task.shared(),
1920 };
1921 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1922 removed: vec![pending_command.source_range.clone()],
1923 updated: vec![pending_command.clone()],
1924 });
1925 }
1926 }
1927
1928 pub fn insert_tool_output(
1929 &mut self,
1930 tool_use_id: Arc<str>,
1931 output: Task<Result<String>>,
1932 cx: &mut ModelContext<Self>,
1933 ) {
1934 let insert_output_task = cx.spawn(|this, mut cx| {
1935 let tool_use_id = tool_use_id.clone();
1936 async move {
1937 let output = output.await;
1938 this.update(&mut cx, |this, cx| match output {
1939 Ok(mut output) => {
1940 const NEWLINE: char = '\n';
1941
1942 if !output.ends_with(NEWLINE) {
1943 output.push(NEWLINE);
1944 }
1945
1946 let anchor_range = this.buffer.update(cx, |buffer, cx| {
1947 let insert_start = buffer.len().to_offset(buffer);
1948 let insert_end = insert_start;
1949
1950 let start = insert_start;
1951 let end = start + output.len() - NEWLINE.len_utf8();
1952
1953 buffer.edit([(insert_start..insert_end, output)], None, cx);
1954
1955 let output_range = buffer.anchor_after(start)..buffer.anchor_after(end);
1956
1957 output_range
1958 });
1959
1960 cx.emit(ContextEvent::ToolFinished {
1961 tool_use_id,
1962 output_range: anchor_range,
1963 });
1964 }
1965 Err(err) => {
1966 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1967 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1968 }
1969 }
1970 })
1971 .ok();
1972 }
1973 });
1974
1975 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1976 tool_use.status = PendingToolUseStatus::Running {
1977 _task: insert_output_task.shared(),
1978 };
1979 }
1980 }
1981
1982 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1983 self.count_remaining_tokens(cx);
1984 }
1985
1986 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1987 self.message_anchors.iter().rev().find_map(|message| {
1988 message
1989 .start
1990 .is_valid(self.buffer.read(cx))
1991 .then_some(message.id)
1992 })
1993 }
1994
1995 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1996 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1997 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1998 let last_message_id = self.get_last_valid_message_id(cx)?;
1999
2000 if !provider.is_authenticated(cx) {
2001 log::info!("completion provider has no credentials");
2002 return None;
2003 }
2004 // Compute which messages to cache, including the last one.
2005 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
2006
2007 let mut request = self.to_completion_request(cx);
2008
2009 if cx.has_flag::<ToolUseFeatureFlag>() {
2010 let tool_registry = ToolRegistry::global(cx);
2011 request.tools = tool_registry
2012 .tools()
2013 .into_iter()
2014 .map(|tool| LanguageModelRequestTool {
2015 name: tool.name(),
2016 description: tool.description(),
2017 input_schema: tool.input_schema(),
2018 })
2019 .collect();
2020 }
2021
2022 let assistant_message = self
2023 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
2024 .unwrap();
2025
2026 // Queue up the user's next reply.
2027 let user_message = self
2028 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
2029 .unwrap();
2030
2031 let pending_completion_id = post_inc(&mut self.completion_count);
2032
2033 let task = cx.spawn({
2034 |this, mut cx| async move {
2035 let stream = model.stream_completion(request, &cx);
2036 let assistant_message_id = assistant_message.id;
2037 let mut response_latency = None;
2038 let stream_completion = async {
2039 let request_start = Instant::now();
2040 let mut events = stream.await?;
2041
2042 while let Some(event) = events.next().await {
2043 if response_latency.is_none() {
2044 response_latency = Some(request_start.elapsed());
2045 }
2046 let event = event?;
2047
2048 this.update(&mut cx, |this, cx| {
2049 let message_ix = this
2050 .message_anchors
2051 .iter()
2052 .position(|message| message.id == assistant_message_id)?;
2053 let event_to_emit = this.buffer.update(cx, |buffer, cx| {
2054 let message_old_end_offset = this.message_anchors[message_ix + 1..]
2055 .iter()
2056 .find(|message| message.start.is_valid(buffer))
2057 .map_or(buffer.len(), |message| {
2058 message.start.to_offset(buffer).saturating_sub(1)
2059 });
2060
2061 match event {
2062 LanguageModelCompletionEvent::Stop(reason) => match reason {
2063 StopReason::ToolUse => {
2064 return Some(ContextEvent::UsePendingTools);
2065 }
2066 StopReason::EndTurn => {}
2067 StopReason::MaxTokens => {}
2068 },
2069 LanguageModelCompletionEvent::Text(chunk) => {
2070 buffer.edit(
2071 [(
2072 message_old_end_offset..message_old_end_offset,
2073 chunk,
2074 )],
2075 None,
2076 cx,
2077 );
2078 }
2079 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2080 const NEWLINE: char = '\n';
2081
2082 let mut text = String::new();
2083 text.push(NEWLINE);
2084 text.push_str(
2085 &serde_json::to_string_pretty(&tool_use)
2086 .expect("failed to serialize tool use to JSON"),
2087 );
2088 text.push(NEWLINE);
2089 let text_len = text.len();
2090
2091 buffer.edit(
2092 [(
2093 message_old_end_offset..message_old_end_offset,
2094 text,
2095 )],
2096 None,
2097 cx,
2098 );
2099
2100 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2101 let end_ix =
2102 message_old_end_offset + text_len - NEWLINE.len_utf8();
2103 let source_range = buffer.anchor_after(start_ix)
2104 ..buffer.anchor_after(end_ix);
2105
2106 let tool_use_id: Arc<str> = tool_use.id.into();
2107 this.pending_tool_uses_by_id.insert(
2108 tool_use_id.clone(),
2109 PendingToolUse {
2110 id: tool_use_id,
2111 name: tool_use.name,
2112 input: tool_use.input,
2113 status: PendingToolUseStatus::Idle,
2114 source_range,
2115 },
2116 );
2117 }
2118 }
2119
2120 None
2121 });
2122
2123 cx.emit(ContextEvent::StreamedCompletion);
2124 if let Some(event) = event_to_emit {
2125 cx.emit(event);
2126 }
2127
2128 Some(())
2129 })?;
2130 smol::future::yield_now().await;
2131 }
2132 this.update(&mut cx, |this, cx| {
2133 this.pending_completions
2134 .retain(|completion| completion.id != pending_completion_id);
2135 this.summarize(false, cx);
2136 this.update_cache_status_for_completion(cx);
2137 })?;
2138
2139 anyhow::Ok(())
2140 };
2141
2142 let result = stream_completion.await;
2143
2144 this.update(&mut cx, |this, cx| {
2145 let error_message = result
2146 .err()
2147 .map(|error| error.to_string().trim().to_string());
2148
2149 if let Some(error_message) = error_message.as_ref() {
2150 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2151 error_message.clone(),
2152 )));
2153 }
2154
2155 this.update_metadata(assistant_message_id, cx, |metadata| {
2156 if let Some(error_message) = error_message.as_ref() {
2157 metadata.status =
2158 MessageStatus::Error(SharedString::from(error_message.clone()));
2159 } else {
2160 metadata.status = MessageStatus::Done;
2161 }
2162 });
2163
2164 if let Some(telemetry) = this.telemetry.as_ref() {
2165 telemetry.report_assistant_event(
2166 Some(this.id.0.clone()),
2167 AssistantKind::Panel,
2168 model.telemetry_id(),
2169 response_latency,
2170 error_message,
2171 );
2172 }
2173 })
2174 .ok();
2175 }
2176 });
2177
2178 self.pending_completions.push(PendingCompletion {
2179 id: pending_completion_id,
2180 assistant_message_id: assistant_message.id,
2181 _task: task,
2182 });
2183
2184 Some(user_message)
2185 }
2186
2187 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2188 let buffer = self.buffer.read(cx);
2189 let request_messages = self
2190 .messages(cx)
2191 .filter(|message| message.status == MessageStatus::Done)
2192 .filter_map(|message| message.to_request_message(&buffer))
2193 .collect();
2194
2195 LanguageModelRequest {
2196 messages: request_messages,
2197 tools: Vec::new(),
2198 stop: Vec::new(),
2199 temperature: 1.0,
2200 }
2201 }
2202
2203 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2204 if let Some(pending_completion) = self.pending_completions.pop() {
2205 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2206 if metadata.status == MessageStatus::Pending {
2207 metadata.status = MessageStatus::Canceled;
2208 }
2209 });
2210 true
2211 } else {
2212 false
2213 }
2214 }
2215
2216 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2217 for id in &ids {
2218 if let Some(metadata) = self.messages_metadata.get(id) {
2219 let role = metadata.role.cycle();
2220 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2221 }
2222 }
2223
2224 self.message_roles_updated(ids, cx);
2225 }
2226
2227 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2228 let mut ranges = Vec::new();
2229 for message in self.messages(cx) {
2230 if ids.contains(&message.id) {
2231 ranges.push(message.anchor_range.clone());
2232 }
2233 }
2234
2235 let buffer = self.buffer.read(cx).text_snapshot();
2236 let mut updated = Vec::new();
2237 let mut removed = Vec::new();
2238 for range in ranges {
2239 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2240 }
2241
2242 if !updated.is_empty() || !removed.is_empty() {
2243 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2244 }
2245 }
2246
2247 pub fn update_metadata(
2248 &mut self,
2249 id: MessageId,
2250 cx: &mut ModelContext<Self>,
2251 f: impl FnOnce(&mut MessageMetadata),
2252 ) {
2253 let version = self.version.clone();
2254 let timestamp = self.next_timestamp();
2255 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2256 f(metadata);
2257 metadata.timestamp = timestamp;
2258 let operation = ContextOperation::UpdateMessage {
2259 message_id: id,
2260 metadata: metadata.clone(),
2261 version,
2262 };
2263 self.push_op(operation, cx);
2264 cx.emit(ContextEvent::MessagesEdited);
2265 cx.notify();
2266 }
2267 }
2268
2269 pub fn insert_message_after(
2270 &mut self,
2271 message_id: MessageId,
2272 role: Role,
2273 status: MessageStatus,
2274 cx: &mut ModelContext<Self>,
2275 ) -> Option<MessageAnchor> {
2276 if let Some(prev_message_ix) = self
2277 .message_anchors
2278 .iter()
2279 .position(|message| message.id == message_id)
2280 {
2281 // Find the next valid message after the one we were given.
2282 let mut next_message_ix = prev_message_ix + 1;
2283 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2284 if next_message.start.is_valid(self.buffer.read(cx)) {
2285 break;
2286 }
2287 next_message_ix += 1;
2288 }
2289
2290 let start = self.buffer.update(cx, |buffer, cx| {
2291 let offset = self
2292 .message_anchors
2293 .get(next_message_ix)
2294 .map_or(buffer.len(), |message| {
2295 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2296 });
2297 buffer.edit([(offset..offset, "\n")], None, cx);
2298 buffer.anchor_before(offset + 1)
2299 });
2300
2301 let version = self.version.clone();
2302 let anchor = MessageAnchor {
2303 id: MessageId(self.next_timestamp()),
2304 start,
2305 };
2306 let metadata = MessageMetadata {
2307 role,
2308 status,
2309 timestamp: anchor.id.0,
2310 cache: None,
2311 };
2312 self.insert_message(anchor.clone(), metadata.clone(), cx);
2313 self.push_op(
2314 ContextOperation::InsertMessage {
2315 anchor: anchor.clone(),
2316 metadata,
2317 version,
2318 },
2319 cx,
2320 );
2321 Some(anchor)
2322 } else {
2323 None
2324 }
2325 }
2326
2327 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
2328 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
2329 entry.insert((
2330 image.to_image_data(cx).log_err()?,
2331 LanguageModelImage::from_image(image, cx).shared(),
2332 ));
2333 }
2334
2335 Some(())
2336 }
2337
2338 pub fn insert_image_anchor(
2339 &mut self,
2340 image_id: u64,
2341 anchor: language::Anchor,
2342 cx: &mut ModelContext<Self>,
2343 ) -> bool {
2344 cx.emit(ContextEvent::MessagesEdited);
2345
2346 let buffer = self.buffer.read(cx);
2347 let insertion_ix = match self
2348 .image_anchors
2349 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
2350 {
2351 Ok(ix) => ix,
2352 Err(ix) => ix,
2353 };
2354
2355 if let Some((render_image, image)) = self.images.get(&image_id) {
2356 self.image_anchors.insert(
2357 insertion_ix,
2358 ImageAnchor {
2359 anchor,
2360 image_id,
2361 image: image.clone(),
2362 render_image: render_image.clone(),
2363 },
2364 );
2365
2366 true
2367 } else {
2368 false
2369 }
2370 }
2371
2372 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
2373 self.image_anchors.iter().cloned()
2374 }
2375
2376 pub fn split_message(
2377 &mut self,
2378 range: Range<usize>,
2379 cx: &mut ModelContext<Self>,
2380 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2381 let start_message = self.message_for_offset(range.start, cx);
2382 let end_message = self.message_for_offset(range.end, cx);
2383 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2384 // Prevent splitting when range spans multiple messages.
2385 if start_message.id != end_message.id {
2386 return (None, None);
2387 }
2388
2389 let message = start_message;
2390 let role = message.role;
2391 let mut edited_buffer = false;
2392
2393 let mut suffix_start = None;
2394
2395 // TODO: why did this start panicking?
2396 if range.start > message.offset_range.start
2397 && range.end < message.offset_range.end.saturating_sub(1)
2398 {
2399 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2400 suffix_start = Some(range.end + 1);
2401 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2402 suffix_start = Some(range.end);
2403 }
2404 }
2405
2406 let version = self.version.clone();
2407 let suffix = if let Some(suffix_start) = suffix_start {
2408 MessageAnchor {
2409 id: MessageId(self.next_timestamp()),
2410 start: self.buffer.read(cx).anchor_before(suffix_start),
2411 }
2412 } else {
2413 self.buffer.update(cx, |buffer, cx| {
2414 buffer.edit([(range.end..range.end, "\n")], None, cx);
2415 });
2416 edited_buffer = true;
2417 MessageAnchor {
2418 id: MessageId(self.next_timestamp()),
2419 start: self.buffer.read(cx).anchor_before(range.end + 1),
2420 }
2421 };
2422
2423 let suffix_metadata = MessageMetadata {
2424 role,
2425 status: MessageStatus::Done,
2426 timestamp: suffix.id.0,
2427 cache: None,
2428 };
2429 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2430 self.push_op(
2431 ContextOperation::InsertMessage {
2432 anchor: suffix.clone(),
2433 metadata: suffix_metadata,
2434 version,
2435 },
2436 cx,
2437 );
2438
2439 let new_messages =
2440 if range.start == range.end || range.start == message.offset_range.start {
2441 (None, Some(suffix))
2442 } else {
2443 let mut prefix_end = None;
2444 if range.start > message.offset_range.start
2445 && range.end < message.offset_range.end - 1
2446 {
2447 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2448 prefix_end = Some(range.start + 1);
2449 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2450 == Some('\n')
2451 {
2452 prefix_end = Some(range.start);
2453 }
2454 }
2455
2456 let version = self.version.clone();
2457 let selection = if let Some(prefix_end) = prefix_end {
2458 MessageAnchor {
2459 id: MessageId(self.next_timestamp()),
2460 start: self.buffer.read(cx).anchor_before(prefix_end),
2461 }
2462 } else {
2463 self.buffer.update(cx, |buffer, cx| {
2464 buffer.edit([(range.start..range.start, "\n")], None, cx)
2465 });
2466 edited_buffer = true;
2467 MessageAnchor {
2468 id: MessageId(self.next_timestamp()),
2469 start: self.buffer.read(cx).anchor_before(range.end + 1),
2470 }
2471 };
2472
2473 let selection_metadata = MessageMetadata {
2474 role,
2475 status: MessageStatus::Done,
2476 timestamp: selection.id.0,
2477 cache: None,
2478 };
2479 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2480 self.push_op(
2481 ContextOperation::InsertMessage {
2482 anchor: selection.clone(),
2483 metadata: selection_metadata,
2484 version,
2485 },
2486 cx,
2487 );
2488
2489 (Some(selection), Some(suffix))
2490 };
2491
2492 if !edited_buffer {
2493 cx.emit(ContextEvent::MessagesEdited);
2494 }
2495 new_messages
2496 } else {
2497 (None, None)
2498 }
2499 }
2500
2501 fn insert_message(
2502 &mut self,
2503 new_anchor: MessageAnchor,
2504 new_metadata: MessageMetadata,
2505 cx: &mut ModelContext<Self>,
2506 ) {
2507 cx.emit(ContextEvent::MessagesEdited);
2508
2509 self.messages_metadata.insert(new_anchor.id, new_metadata);
2510
2511 let buffer = self.buffer.read(cx);
2512 let insertion_ix = self
2513 .message_anchors
2514 .iter()
2515 .position(|anchor| {
2516 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2517 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2518 })
2519 .unwrap_or(self.message_anchors.len());
2520 self.message_anchors.insert(insertion_ix, new_anchor);
2521 }
2522
2523 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2524 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2525 return;
2526 };
2527 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2528 return;
2529 };
2530
2531 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2532 if !provider.is_authenticated(cx) {
2533 return;
2534 }
2535
2536 let messages = self
2537 .messages(cx)
2538 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
2539 .chain(Some(LanguageModelRequestMessage {
2540 role: Role::User,
2541 content: vec![
2542 "Summarize the context into a short title without punctuation.".into(),
2543 ],
2544 cache: false,
2545 }));
2546 let request = LanguageModelRequest {
2547 messages: messages.collect(),
2548 tools: Vec::new(),
2549 stop: Vec::new(),
2550 temperature: 1.0,
2551 };
2552
2553 self.pending_summary = cx.spawn(|this, mut cx| {
2554 async move {
2555 let stream = model.stream_completion_text(request, &cx);
2556 let mut messages = stream.await?;
2557
2558 let mut replaced = !replace_old;
2559 while let Some(message) = messages.next().await {
2560 let text = message?;
2561 let mut lines = text.lines();
2562 this.update(&mut cx, |this, cx| {
2563 let version = this.version.clone();
2564 let timestamp = this.next_timestamp();
2565 let summary = this.summary.get_or_insert(ContextSummary::default());
2566 if !replaced && replace_old {
2567 summary.text.clear();
2568 replaced = true;
2569 }
2570 summary.text.extend(lines.next());
2571 summary.timestamp = timestamp;
2572 let operation = ContextOperation::UpdateSummary {
2573 summary: summary.clone(),
2574 version,
2575 };
2576 this.push_op(operation, cx);
2577 cx.emit(ContextEvent::SummaryChanged);
2578 })?;
2579
2580 // Stop if the LLM generated multiple lines.
2581 if lines.next().is_some() {
2582 break;
2583 }
2584 }
2585
2586 this.update(&mut cx, |this, cx| {
2587 let version = this.version.clone();
2588 let timestamp = this.next_timestamp();
2589 if let Some(summary) = this.summary.as_mut() {
2590 summary.done = true;
2591 summary.timestamp = timestamp;
2592 let operation = ContextOperation::UpdateSummary {
2593 summary: summary.clone(),
2594 version,
2595 };
2596 this.push_op(operation, cx);
2597 cx.emit(ContextEvent::SummaryChanged);
2598 }
2599 })?;
2600
2601 anyhow::Ok(())
2602 }
2603 .log_err()
2604 });
2605 }
2606 }
2607
2608 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2609 self.messages_for_offsets([offset], cx).pop()
2610 }
2611
2612 pub fn messages_for_offsets(
2613 &self,
2614 offsets: impl IntoIterator<Item = usize>,
2615 cx: &AppContext,
2616 ) -> Vec<Message> {
2617 let mut result = Vec::new();
2618
2619 let mut messages = self.messages(cx).peekable();
2620 let mut offsets = offsets.into_iter().peekable();
2621 let mut current_message = messages.next();
2622 while let Some(offset) = offsets.next() {
2623 // Locate the message that contains the offset.
2624 while current_message.as_ref().map_or(false, |message| {
2625 !message.offset_range.contains(&offset) && messages.peek().is_some()
2626 }) {
2627 current_message = messages.next();
2628 }
2629 let Some(message) = current_message.as_ref() else {
2630 break;
2631 };
2632
2633 // Skip offsets that are in the same message.
2634 while offsets.peek().map_or(false, |offset| {
2635 message.offset_range.contains(offset) || messages.peek().is_none()
2636 }) {
2637 offsets.next();
2638 }
2639
2640 result.push(message.clone());
2641 }
2642 result
2643 }
2644
2645 fn messages_from_anchors<'a>(
2646 &'a self,
2647 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2648 cx: &'a AppContext,
2649 ) -> impl 'a + Iterator<Item = Message> {
2650 let buffer = self.buffer.read(cx);
2651 let messages = message_anchors.enumerate();
2652 let images = self.image_anchors.iter();
2653
2654 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2655 }
2656
2657 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2658 self.messages_from_anchors(self.message_anchors.iter(), cx)
2659 }
2660
2661 pub fn messages_from_iters<'a>(
2662 buffer: &'a Buffer,
2663 metadata: &'a HashMap<MessageId, MessageMetadata>,
2664 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2665 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2666 ) -> impl 'a + Iterator<Item = Message> {
2667 let mut messages = messages.peekable();
2668 let mut images = images.peekable();
2669
2670 iter::from_fn(move || {
2671 if let Some((start_ix, message_anchor)) = messages.next() {
2672 let metadata = metadata.get(&message_anchor.id)?;
2673
2674 let message_start = message_anchor.start.to_offset(buffer);
2675 let mut message_end = None;
2676 let mut end_ix = start_ix;
2677 while let Some((_, next_message)) = messages.peek() {
2678 if next_message.start.is_valid(buffer) {
2679 message_end = Some(next_message.start);
2680 break;
2681 } else {
2682 end_ix += 1;
2683 messages.next();
2684 }
2685 }
2686 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2687 let message_end = message_end_anchor.to_offset(buffer);
2688
2689 let mut image_offsets = SmallVec::new();
2690 while let Some(image_anchor) = images.peek() {
2691 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2692 image_offsets.push((
2693 image_anchor.anchor.to_offset(buffer),
2694 MessageImage {
2695 image_id: image_anchor.image_id,
2696 image: image_anchor.image.clone(),
2697 },
2698 ));
2699 images.next();
2700 } else {
2701 break;
2702 }
2703 }
2704
2705 return Some(Message {
2706 index_range: start_ix..end_ix,
2707 offset_range: message_start..message_end,
2708 anchor_range: message_anchor.start..message_end_anchor,
2709 id: message_anchor.id,
2710 role: metadata.role,
2711 status: metadata.status.clone(),
2712 cache: metadata.cache.clone(),
2713 image_offsets,
2714 });
2715 }
2716 None
2717 })
2718 }
2719
2720 pub fn save(
2721 &mut self,
2722 debounce: Option<Duration>,
2723 fs: Arc<dyn Fs>,
2724 cx: &mut ModelContext<Context>,
2725 ) {
2726 if self.replica_id() != ReplicaId::default() {
2727 // Prevent saving a remote context for now.
2728 return;
2729 }
2730
2731 self.pending_save = cx.spawn(|this, mut cx| async move {
2732 if let Some(debounce) = debounce {
2733 cx.background_executor().timer(debounce).await;
2734 }
2735
2736 let (old_path, summary) = this.read_with(&cx, |this, _| {
2737 let path = this.path.clone();
2738 let summary = if let Some(summary) = this.summary.as_ref() {
2739 if summary.done {
2740 Some(summary.text.clone())
2741 } else {
2742 None
2743 }
2744 } else {
2745 None
2746 };
2747 (path, summary)
2748 })?;
2749
2750 if let Some(summary) = summary {
2751 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2752 .await;
2753
2754 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2755 let mut discriminant = 1;
2756 let mut new_path;
2757 loop {
2758 new_path = contexts_dir().join(&format!(
2759 "{} - {}.zed.json",
2760 summary.trim(),
2761 discriminant
2762 ));
2763 if fs.is_file(&new_path).await {
2764 discriminant += 1;
2765 } else {
2766 break;
2767 }
2768 }
2769
2770 fs.create_dir(contexts_dir().as_ref()).await?;
2771 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2772 .await?;
2773 if let Some(old_path) = old_path {
2774 if new_path != old_path {
2775 fs.remove_file(
2776 &old_path,
2777 RemoveOptions {
2778 recursive: false,
2779 ignore_if_not_exists: true,
2780 },
2781 )
2782 .await?;
2783 }
2784 }
2785
2786 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2787 }
2788
2789 Ok(())
2790 });
2791 }
2792
2793 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2794 let mut images_to_save = self
2795 .images
2796 .iter()
2797 .map(|(id, (_, llm_image))| {
2798 let fs = fs.clone();
2799 let llm_image = llm_image.clone();
2800 let id = *id;
2801 async move {
2802 if let Some(llm_image) = llm_image.await {
2803 let path: PathBuf =
2804 context_images_dir().join(&format!("{}.png.base64", id));
2805 if fs
2806 .metadata(path.as_path())
2807 .await
2808 .log_err()
2809 .flatten()
2810 .is_none()
2811 {
2812 fs.atomic_write(path, llm_image.source.to_string())
2813 .await
2814 .log_err();
2815 }
2816 }
2817 }
2818 })
2819 .collect::<FuturesUnordered<_>>();
2820 cx.background_executor().spawn(async move {
2821 if fs
2822 .create_dir(context_images_dir().as_ref())
2823 .await
2824 .log_err()
2825 .is_some()
2826 {
2827 while let Some(_) = images_to_save.next().await {}
2828 }
2829 })
2830 }
2831
2832 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2833 let timestamp = self.next_timestamp();
2834 let summary = self.summary.get_or_insert(ContextSummary::default());
2835 summary.timestamp = timestamp;
2836 summary.done = true;
2837 summary.text = custom_summary;
2838 cx.emit(ContextEvent::SummaryChanged);
2839 }
2840}
2841
2842#[derive(Debug, Default)]
2843pub struct ContextVersion {
2844 context: clock::Global,
2845 buffer: clock::Global,
2846}
2847
2848impl ContextVersion {
2849 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2850 Self {
2851 context: language::proto::deserialize_version(&proto.context_version),
2852 buffer: language::proto::deserialize_version(&proto.buffer_version),
2853 }
2854 }
2855
2856 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2857 proto::ContextVersion {
2858 context_id: context_id.to_proto(),
2859 context_version: language::proto::serialize_version(&self.context),
2860 buffer_version: language::proto::serialize_version(&self.buffer),
2861 }
2862 }
2863}
2864
2865#[derive(Debug, Clone)]
2866pub struct PendingSlashCommand {
2867 pub name: String,
2868 pub arguments: SmallVec<[String; 3]>,
2869 pub status: PendingSlashCommandStatus,
2870 pub source_range: Range<language::Anchor>,
2871}
2872
2873#[derive(Debug, Clone)]
2874pub enum PendingSlashCommandStatus {
2875 Idle,
2876 Running { _task: Shared<Task<()>> },
2877 Error(String),
2878}
2879
2880pub(crate) struct ToolUseFeatureFlag;
2881
2882impl FeatureFlag for ToolUseFeatureFlag {
2883 const NAME: &'static str = "assistant-tool-use";
2884
2885 fn enabled_for_staff() -> bool {
2886 false
2887 }
2888}
2889
2890#[derive(Debug, Clone)]
2891pub struct PendingToolUse {
2892 pub id: Arc<str>,
2893 pub name: String,
2894 pub input: serde_json::Value,
2895 pub status: PendingToolUseStatus,
2896 pub source_range: Range<language::Anchor>,
2897}
2898
2899#[derive(Debug, Clone)]
2900pub enum PendingToolUseStatus {
2901 Idle,
2902 Running { _task: Shared<Task<()>> },
2903 Error(String),
2904}
2905
2906impl PendingToolUseStatus {
2907 pub fn is_idle(&self) -> bool {
2908 matches!(self, PendingToolUseStatus::Idle)
2909 }
2910}
2911
2912#[derive(Serialize, Deserialize)]
2913pub struct SavedMessage {
2914 pub id: MessageId,
2915 pub start: usize,
2916 pub metadata: MessageMetadata,
2917 #[serde(default)]
2918 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2919 pub image_offsets: Vec<(usize, u64)>,
2920}
2921
2922#[derive(Serialize, Deserialize)]
2923pub struct SavedContext {
2924 pub id: Option<ContextId>,
2925 pub zed: String,
2926 pub version: String,
2927 pub text: String,
2928 pub messages: Vec<SavedMessage>,
2929 pub summary: String,
2930 pub slash_command_output_sections:
2931 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2932}
2933
2934impl SavedContext {
2935 pub const VERSION: &'static str = "0.4.0";
2936
2937 pub fn from_json(json: &str) -> Result<Self> {
2938 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2939 match saved_context_json
2940 .get("version")
2941 .ok_or_else(|| anyhow!("version not found"))?
2942 {
2943 serde_json::Value::String(version) => match version.as_str() {
2944 SavedContext::VERSION => {
2945 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2946 }
2947 SavedContextV0_3_0::VERSION => {
2948 let saved_context =
2949 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2950 Ok(saved_context.upgrade())
2951 }
2952 SavedContextV0_2_0::VERSION => {
2953 let saved_context =
2954 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2955 Ok(saved_context.upgrade())
2956 }
2957 SavedContextV0_1_0::VERSION => {
2958 let saved_context =
2959 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2960 Ok(saved_context.upgrade())
2961 }
2962 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2963 },
2964 _ => Err(anyhow!("version not found on saved context")),
2965 }
2966 }
2967
2968 fn into_ops(
2969 self,
2970 buffer: &Model<Buffer>,
2971 cx: &mut ModelContext<Context>,
2972 ) -> Vec<ContextOperation> {
2973 let mut operations = Vec::new();
2974 let mut version = clock::Global::new();
2975 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2976
2977 let mut first_message_metadata = None;
2978 for message in self.messages {
2979 if message.id == MessageId(clock::Lamport::default()) {
2980 first_message_metadata = Some(message.metadata);
2981 } else {
2982 operations.push(ContextOperation::InsertMessage {
2983 anchor: MessageAnchor {
2984 id: message.id,
2985 start: buffer.read(cx).anchor_before(message.start),
2986 },
2987 metadata: MessageMetadata {
2988 role: message.metadata.role,
2989 status: message.metadata.status,
2990 timestamp: message.metadata.timestamp,
2991 cache: None,
2992 },
2993 version: version.clone(),
2994 });
2995 version.observe(message.id.0);
2996 next_timestamp.observe(message.id.0);
2997 }
2998 }
2999
3000 if let Some(metadata) = first_message_metadata {
3001 let timestamp = next_timestamp.tick();
3002 operations.push(ContextOperation::UpdateMessage {
3003 message_id: MessageId(clock::Lamport::default()),
3004 metadata: MessageMetadata {
3005 role: metadata.role,
3006 status: metadata.status,
3007 timestamp,
3008 cache: None,
3009 },
3010 version: version.clone(),
3011 });
3012 version.observe(timestamp);
3013 }
3014
3015 let timestamp = next_timestamp.tick();
3016 operations.push(ContextOperation::SlashCommandFinished {
3017 id: SlashCommandId(timestamp),
3018 output_range: language::Anchor::MIN..language::Anchor::MAX,
3019 sections: self
3020 .slash_command_output_sections
3021 .into_iter()
3022 .map(|section| {
3023 let buffer = buffer.read(cx);
3024 SlashCommandOutputSection {
3025 range: buffer.anchor_after(section.range.start)
3026 ..buffer.anchor_before(section.range.end),
3027 icon: section.icon,
3028 label: section.label,
3029 }
3030 })
3031 .collect(),
3032 version: version.clone(),
3033 });
3034 version.observe(timestamp);
3035
3036 let timestamp = next_timestamp.tick();
3037 operations.push(ContextOperation::UpdateSummary {
3038 summary: ContextSummary {
3039 text: self.summary,
3040 done: true,
3041 timestamp,
3042 },
3043 version: version.clone(),
3044 });
3045 version.observe(timestamp);
3046
3047 operations
3048 }
3049}
3050
3051#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3052struct SavedMessageIdPreV0_4_0(usize);
3053
3054#[derive(Serialize, Deserialize)]
3055struct SavedMessagePreV0_4_0 {
3056 id: SavedMessageIdPreV0_4_0,
3057 start: usize,
3058}
3059
3060#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3061struct SavedMessageMetadataPreV0_4_0 {
3062 role: Role,
3063 status: MessageStatus,
3064}
3065
3066#[derive(Serialize, Deserialize)]
3067struct SavedContextV0_3_0 {
3068 id: Option<ContextId>,
3069 zed: String,
3070 version: String,
3071 text: String,
3072 messages: Vec<SavedMessagePreV0_4_0>,
3073 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3074 summary: String,
3075 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3076}
3077
3078impl SavedContextV0_3_0 {
3079 const VERSION: &'static str = "0.3.0";
3080
3081 fn upgrade(self) -> SavedContext {
3082 SavedContext {
3083 id: self.id,
3084 zed: self.zed,
3085 version: SavedContext::VERSION.into(),
3086 text: self.text,
3087 messages: self
3088 .messages
3089 .into_iter()
3090 .filter_map(|message| {
3091 let metadata = self.message_metadata.get(&message.id)?;
3092 let timestamp = clock::Lamport {
3093 replica_id: ReplicaId::default(),
3094 value: message.id.0 as u32,
3095 };
3096 Some(SavedMessage {
3097 id: MessageId(timestamp),
3098 start: message.start,
3099 metadata: MessageMetadata {
3100 role: metadata.role,
3101 status: metadata.status.clone(),
3102 timestamp,
3103 cache: None,
3104 },
3105 image_offsets: Vec::new(),
3106 })
3107 })
3108 .collect(),
3109 summary: self.summary,
3110 slash_command_output_sections: self.slash_command_output_sections,
3111 }
3112 }
3113}
3114
3115#[derive(Serialize, Deserialize)]
3116struct SavedContextV0_2_0 {
3117 id: Option<ContextId>,
3118 zed: String,
3119 version: String,
3120 text: String,
3121 messages: Vec<SavedMessagePreV0_4_0>,
3122 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3123 summary: String,
3124}
3125
3126impl SavedContextV0_2_0 {
3127 const VERSION: &'static str = "0.2.0";
3128
3129 fn upgrade(self) -> SavedContext {
3130 SavedContextV0_3_0 {
3131 id: self.id,
3132 zed: self.zed,
3133 version: SavedContextV0_3_0::VERSION.to_string(),
3134 text: self.text,
3135 messages: self.messages,
3136 message_metadata: self.message_metadata,
3137 summary: self.summary,
3138 slash_command_output_sections: Vec::new(),
3139 }
3140 .upgrade()
3141 }
3142}
3143
3144#[derive(Serialize, Deserialize)]
3145struct SavedContextV0_1_0 {
3146 id: Option<ContextId>,
3147 zed: String,
3148 version: String,
3149 text: String,
3150 messages: Vec<SavedMessagePreV0_4_0>,
3151 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3152 summary: String,
3153 api_url: Option<String>,
3154 model: OpenAiModel,
3155}
3156
3157impl SavedContextV0_1_0 {
3158 const VERSION: &'static str = "0.1.0";
3159
3160 fn upgrade(self) -> SavedContext {
3161 SavedContextV0_2_0 {
3162 id: self.id,
3163 zed: self.zed,
3164 version: SavedContextV0_2_0::VERSION.to_string(),
3165 text: self.text,
3166 messages: self.messages,
3167 message_metadata: self.message_metadata,
3168 summary: self.summary,
3169 }
3170 .upgrade()
3171 }
3172}
3173
3174#[derive(Clone)]
3175pub struct SavedContextMetadata {
3176 pub title: String,
3177 pub path: PathBuf,
3178 pub mtime: chrono::DateTime<chrono::Local>,
3179}