1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 FutureExt, StreamExt,
21};
22use gpui::{
23 AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage,
24 SharedString, Subscription, Task,
25};
26
27use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
28use language_model::{
29 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
30 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
31 LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
32 StopReason,
33};
34use open_ai::Model as OpenAiModel;
35use paths::contexts_dir;
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{self, max, Ordering},
41 fmt::Debug,
42 iter, mem,
43 ops::Range,
44 path::{Path, PathBuf},
45 str::FromStr as _,
46 sync::Arc,
47 time::{Duration, Instant},
48};
49use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
50use text::BufferSnapshot;
51use util::{post_inc, ResultExt, TryFutureExt};
52use uuid::Uuid;
53
54#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
55pub struct ContextId(String);
56
57impl ContextId {
58 pub fn new() -> Self {
59 Self(Uuid::new_v4().to_string())
60 }
61
62 pub fn from_proto(id: String) -> Self {
63 Self(id)
64 }
65
66 pub fn to_proto(&self) -> String {
67 self.0.clone()
68 }
69}
70
71#[derive(Clone, Debug)]
72pub enum ContextOperation {
73 InsertMessage {
74 anchor: MessageAnchor,
75 metadata: MessageMetadata,
76 version: clock::Global,
77 },
78 UpdateMessage {
79 message_id: MessageId,
80 metadata: MessageMetadata,
81 version: clock::Global,
82 },
83 UpdateSummary {
84 summary: ContextSummary,
85 version: clock::Global,
86 },
87 SlashCommandFinished {
88 id: SlashCommandId,
89 output_range: Range<language::Anchor>,
90 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
91 version: clock::Global,
92 },
93 BufferOperation(language::Operation),
94}
95
96impl ContextOperation {
97 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
98 match op.variant.context("invalid variant")? {
99 proto::context_operation::Variant::InsertMessage(insert) => {
100 let message = insert.message.context("invalid message")?;
101 let id = MessageId(language::proto::deserialize_timestamp(
102 message.id.context("invalid id")?,
103 ));
104 Ok(Self::InsertMessage {
105 anchor: MessageAnchor {
106 id,
107 start: language::proto::deserialize_anchor(
108 message.start.context("invalid anchor")?,
109 )
110 .context("invalid anchor")?,
111 },
112 metadata: MessageMetadata {
113 role: Role::from_proto(message.role),
114 status: MessageStatus::from_proto(
115 message.status.context("invalid status")?,
116 ),
117 timestamp: id.0,
118 cache: None,
119 },
120 version: language::proto::deserialize_version(&insert.version),
121 })
122 }
123 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
124 message_id: MessageId(language::proto::deserialize_timestamp(
125 update.message_id.context("invalid message id")?,
126 )),
127 metadata: MessageMetadata {
128 role: Role::from_proto(update.role),
129 status: MessageStatus::from_proto(update.status.context("invalid status")?),
130 timestamp: language::proto::deserialize_timestamp(
131 update.timestamp.context("invalid timestamp")?,
132 ),
133 cache: None,
134 },
135 version: language::proto::deserialize_version(&update.version),
136 }),
137 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
138 summary: ContextSummary {
139 text: update.summary,
140 done: update.done,
141 timestamp: language::proto::deserialize_timestamp(
142 update.timestamp.context("invalid timestamp")?,
143 ),
144 },
145 version: language::proto::deserialize_version(&update.version),
146 }),
147 proto::context_operation::Variant::SlashCommandFinished(finished) => {
148 Ok(Self::SlashCommandFinished {
149 id: SlashCommandId(language::proto::deserialize_timestamp(
150 finished.id.context("invalid id")?,
151 )),
152 output_range: language::proto::deserialize_anchor_range(
153 finished.output_range.context("invalid range")?,
154 )?,
155 sections: finished
156 .sections
157 .into_iter()
158 .map(|section| {
159 Ok(SlashCommandOutputSection {
160 range: language::proto::deserialize_anchor_range(
161 section.range.context("invalid range")?,
162 )?,
163 icon: section.icon_name.parse()?,
164 label: section.label.into(),
165 metadata: section
166 .metadata
167 .and_then(|metadata| serde_json::from_str(&metadata).log_err()),
168 })
169 })
170 .collect::<Result<Vec<_>>>()?,
171 version: language::proto::deserialize_version(&finished.version),
172 })
173 }
174 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
175 language::proto::deserialize_operation(
176 op.operation.context("invalid buffer operation")?,
177 )?,
178 )),
179 }
180 }
181
182 pub fn to_proto(&self) -> proto::ContextOperation {
183 match self {
184 Self::InsertMessage {
185 anchor,
186 metadata,
187 version,
188 } => proto::ContextOperation {
189 variant: Some(proto::context_operation::Variant::InsertMessage(
190 proto::context_operation::InsertMessage {
191 message: Some(proto::ContextMessage {
192 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
193 start: Some(language::proto::serialize_anchor(&anchor.start)),
194 role: metadata.role.to_proto() as i32,
195 status: Some(metadata.status.to_proto()),
196 }),
197 version: language::proto::serialize_version(version),
198 },
199 )),
200 },
201 Self::UpdateMessage {
202 message_id,
203 metadata,
204 version,
205 } => proto::ContextOperation {
206 variant: Some(proto::context_operation::Variant::UpdateMessage(
207 proto::context_operation::UpdateMessage {
208 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
209 role: metadata.role.to_proto() as i32,
210 status: Some(metadata.status.to_proto()),
211 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
212 version: language::proto::serialize_version(version),
213 },
214 )),
215 },
216 Self::UpdateSummary { summary, version } => proto::ContextOperation {
217 variant: Some(proto::context_operation::Variant::UpdateSummary(
218 proto::context_operation::UpdateSummary {
219 summary: summary.text.clone(),
220 done: summary.done,
221 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
222 version: language::proto::serialize_version(version),
223 },
224 )),
225 },
226 Self::SlashCommandFinished {
227 id,
228 output_range,
229 sections,
230 version,
231 } => proto::ContextOperation {
232 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
233 proto::context_operation::SlashCommandFinished {
234 id: Some(language::proto::serialize_timestamp(id.0)),
235 output_range: Some(language::proto::serialize_anchor_range(
236 output_range.clone(),
237 )),
238 sections: sections
239 .iter()
240 .map(|section| {
241 let icon_name: &'static str = section.icon.into();
242 proto::SlashCommandOutputSection {
243 range: Some(language::proto::serialize_anchor_range(
244 section.range.clone(),
245 )),
246 icon_name: icon_name.to_string(),
247 label: section.label.to_string(),
248 metadata: section.metadata.as_ref().and_then(|metadata| {
249 serde_json::to_string(metadata).log_err()
250 }),
251 }
252 })
253 .collect(),
254 version: language::proto::serialize_version(version),
255 },
256 )),
257 },
258 Self::BufferOperation(operation) => proto::ContextOperation {
259 variant: Some(proto::context_operation::Variant::BufferOperation(
260 proto::context_operation::BufferOperation {
261 operation: Some(language::proto::serialize_operation(operation)),
262 },
263 )),
264 },
265 }
266 }
267
268 fn timestamp(&self) -> clock::Lamport {
269 match self {
270 Self::InsertMessage { anchor, .. } => anchor.id.0,
271 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
272 Self::UpdateSummary { summary, .. } => summary.timestamp,
273 Self::SlashCommandFinished { id, .. } => id.0,
274 Self::BufferOperation(_) => {
275 panic!("reading the timestamp of a buffer operation is not supported")
276 }
277 }
278 }
279
280 /// Returns the current version of the context operation.
281 pub fn version(&self) -> &clock::Global {
282 match self {
283 Self::InsertMessage { version, .. }
284 | Self::UpdateMessage { version, .. }
285 | Self::UpdateSummary { version, .. }
286 | Self::SlashCommandFinished { version, .. } => version,
287 Self::BufferOperation(_) => {
288 panic!("reading the version of a buffer operation is not supported")
289 }
290 }
291 }
292}
293
294#[derive(Debug, Clone)]
295pub enum ContextEvent {
296 ShowAssistError(SharedString),
297 MessagesEdited,
298 SummaryChanged,
299 StreamedCompletion,
300 WorkflowStepsUpdated {
301 removed: Vec<Range<language::Anchor>>,
302 updated: Vec<Range<language::Anchor>>,
303 },
304 PendingSlashCommandsUpdated {
305 removed: Vec<Range<language::Anchor>>,
306 updated: Vec<PendingSlashCommand>,
307 },
308 SlashCommandFinished {
309 output_range: Range<language::Anchor>,
310 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
311 run_commands_in_output: bool,
312 expand_result: bool,
313 },
314 UsePendingTools,
315 ToolFinished {
316 tool_use_id: Arc<str>,
317 output_range: Range<language::Anchor>,
318 },
319 Operation(ContextOperation),
320}
321
322#[derive(Clone, Default, Debug)]
323pub struct ContextSummary {
324 pub text: String,
325 done: bool,
326 timestamp: clock::Lamport,
327}
328
329#[derive(Clone, Debug, Eq, PartialEq)]
330pub struct MessageAnchor {
331 pub id: MessageId,
332 pub start: language::Anchor,
333}
334
335#[derive(Clone, Debug, Eq, PartialEq)]
336pub enum CacheStatus {
337 Pending,
338 Cached,
339}
340
341#[derive(Clone, Debug, Eq, PartialEq)]
342pub struct MessageCacheMetadata {
343 pub is_anchor: bool,
344 pub is_final_anchor: bool,
345 pub status: CacheStatus,
346 pub cached_at: clock::Global,
347}
348
349#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
350pub struct MessageMetadata {
351 pub role: Role,
352 pub status: MessageStatus,
353 pub(crate) timestamp: clock::Lamport,
354 #[serde(skip)]
355 pub cache: Option<MessageCacheMetadata>,
356}
357
358impl From<&Message> for MessageMetadata {
359 fn from(message: &Message) -> Self {
360 Self {
361 role: message.role,
362 status: message.status.clone(),
363 timestamp: message.id.0,
364 cache: message.cache.clone(),
365 }
366 }
367}
368
369impl MessageMetadata {
370 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
371 let result = match &self.cache {
372 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
373 &cached_at,
374 Range {
375 start: buffer.anchor_at(range.start, Bias::Right),
376 end: buffer.anchor_at(range.end, Bias::Left),
377 },
378 ),
379 _ => false,
380 };
381 result
382 }
383}
384
385#[derive(Clone, Debug)]
386pub struct Message {
387 pub offset_range: Range<usize>,
388 pub index_range: Range<usize>,
389 pub anchor_range: Range<language::Anchor>,
390 pub id: MessageId,
391 pub role: Role,
392 pub status: MessageStatus,
393 pub cache: Option<MessageCacheMetadata>,
394}
395
396#[derive(Debug, Clone)]
397pub enum Content {
398 Image {
399 anchor: language::Anchor,
400 image_id: u64,
401 render_image: Arc<RenderImage>,
402 image: Shared<Task<Option<LanguageModelImage>>>,
403 },
404 ToolUse {
405 range: Range<language::Anchor>,
406 tool_use: LanguageModelToolUse,
407 },
408 ToolResult {
409 range: Range<language::Anchor>,
410 tool_use_id: Arc<str>,
411 },
412}
413
414impl Content {
415 fn range(&self) -> Range<language::Anchor> {
416 match self {
417 Self::Image { anchor, .. } => *anchor..*anchor,
418 Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(),
419 }
420 }
421
422 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
423 let self_range = self.range();
424 let other_range = other.range();
425 if self_range.end.cmp(&other_range.start, buffer).is_lt() {
426 Ordering::Less
427 } else if self_range.start.cmp(&other_range.end, buffer).is_gt() {
428 Ordering::Greater
429 } else {
430 Ordering::Equal
431 }
432 }
433}
434
435struct PendingCompletion {
436 id: usize,
437 assistant_message_id: MessageId,
438 _task: Task<()>,
439}
440
441#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
442pub struct SlashCommandId(clock::Lamport);
443
444#[derive(Clone, Debug)]
445pub struct XmlTag {
446 pub kind: XmlTagKind,
447 pub range: Range<text::Anchor>,
448 pub is_open_tag: bool,
449}
450
451#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
452#[strum(serialize_all = "snake_case")]
453pub enum XmlTagKind {
454 Step,
455 Edit,
456 Path,
457 Search,
458 Within,
459 Operation,
460 Description,
461}
462
463pub struct Context {
464 id: ContextId,
465 timestamp: clock::Lamport,
466 version: clock::Global,
467 pending_ops: Vec<ContextOperation>,
468 operations: Vec<ContextOperation>,
469 buffer: Model<Buffer>,
470 pending_slash_commands: Vec<PendingSlashCommand>,
471 edits_since_last_parse: language::Subscription,
472 finished_slash_commands: HashSet<SlashCommandId>,
473 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
474 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
475 message_anchors: Vec<MessageAnchor>,
476 contents: Vec<Content>,
477 messages_metadata: HashMap<MessageId, MessageMetadata>,
478 summary: Option<ContextSummary>,
479 pending_summary: Task<Option<()>>,
480 completion_count: usize,
481 pending_completions: Vec<PendingCompletion>,
482 token_count: Option<usize>,
483 pending_token_count: Task<Option<()>>,
484 pending_save: Task<Result<()>>,
485 pending_cache_warming_task: Task<Option<()>>,
486 path: Option<PathBuf>,
487 _subscriptions: Vec<Subscription>,
488 telemetry: Option<Arc<Telemetry>>,
489 language_registry: Arc<LanguageRegistry>,
490 workflow_steps: Vec<WorkflowStep>,
491 xml_tags: Vec<XmlTag>,
492 project: Option<Model<Project>>,
493 prompt_builder: Arc<PromptBuilder>,
494}
495
496trait ContextAnnotation {
497 fn range(&self) -> &Range<language::Anchor>;
498}
499
500impl ContextAnnotation for PendingSlashCommand {
501 fn range(&self) -> &Range<language::Anchor> {
502 &self.source_range
503 }
504}
505
506impl ContextAnnotation for WorkflowStep {
507 fn range(&self) -> &Range<language::Anchor> {
508 &self.range
509 }
510}
511
512impl ContextAnnotation for XmlTag {
513 fn range(&self) -> &Range<language::Anchor> {
514 &self.range
515 }
516}
517
518impl EventEmitter<ContextEvent> for Context {}
519
520impl Context {
521 pub fn local(
522 language_registry: Arc<LanguageRegistry>,
523 project: Option<Model<Project>>,
524 telemetry: Option<Arc<Telemetry>>,
525 prompt_builder: Arc<PromptBuilder>,
526 cx: &mut ModelContext<Self>,
527 ) -> Self {
528 Self::new(
529 ContextId::new(),
530 ReplicaId::default(),
531 language::Capability::ReadWrite,
532 language_registry,
533 prompt_builder,
534 project,
535 telemetry,
536 cx,
537 )
538 }
539
540 #[allow(clippy::too_many_arguments)]
541 pub fn new(
542 id: ContextId,
543 replica_id: ReplicaId,
544 capability: language::Capability,
545 language_registry: Arc<LanguageRegistry>,
546 prompt_builder: Arc<PromptBuilder>,
547 project: Option<Model<Project>>,
548 telemetry: Option<Arc<Telemetry>>,
549 cx: &mut ModelContext<Self>,
550 ) -> Self {
551 let buffer = cx.new_model(|_cx| {
552 let buffer = Buffer::remote(
553 language::BufferId::new(1).unwrap(),
554 replica_id,
555 capability,
556 "",
557 );
558 buffer.set_language_registry(language_registry.clone());
559 buffer
560 });
561 let edits_since_last_slash_command_parse =
562 buffer.update(cx, |buffer, _| buffer.subscribe());
563 let mut this = Self {
564 id,
565 timestamp: clock::Lamport::new(replica_id),
566 version: clock::Global::new(),
567 pending_ops: Vec::new(),
568 operations: Vec::new(),
569 message_anchors: Default::default(),
570 contents: Default::default(),
571 messages_metadata: Default::default(),
572 pending_slash_commands: Vec::new(),
573 finished_slash_commands: HashSet::default(),
574 pending_tool_uses_by_id: HashMap::default(),
575 slash_command_output_sections: Vec::new(),
576 edits_since_last_parse: edits_since_last_slash_command_parse,
577 summary: None,
578 pending_summary: Task::ready(None),
579 completion_count: Default::default(),
580 pending_completions: Default::default(),
581 token_count: None,
582 pending_token_count: Task::ready(None),
583 pending_cache_warming_task: Task::ready(None),
584 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
585 pending_save: Task::ready(Ok(())),
586 path: None,
587 buffer,
588 telemetry,
589 project,
590 language_registry,
591 workflow_steps: Vec::new(),
592 xml_tags: Vec::new(),
593 prompt_builder,
594 };
595
596 let first_message_id = MessageId(clock::Lamport {
597 replica_id: 0,
598 value: 0,
599 });
600 let message = MessageAnchor {
601 id: first_message_id,
602 start: language::Anchor::MIN,
603 };
604 this.messages_metadata.insert(
605 first_message_id,
606 MessageMetadata {
607 role: Role::User,
608 status: MessageStatus::Done,
609 timestamp: first_message_id.0,
610 cache: None,
611 },
612 );
613 this.message_anchors.push(message);
614
615 this.set_language(cx);
616 this.count_remaining_tokens(cx);
617 this
618 }
619
620 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
621 let buffer = self.buffer.read(cx);
622 SavedContext {
623 id: Some(self.id.clone()),
624 zed: "context".into(),
625 version: SavedContext::VERSION.into(),
626 text: buffer.text(),
627 messages: self
628 .messages(cx)
629 .map(|message| SavedMessage {
630 id: message.id,
631 start: message.offset_range.start,
632 metadata: self.messages_metadata[&message.id].clone(),
633 })
634 .collect(),
635 summary: self
636 .summary
637 .as_ref()
638 .map(|summary| summary.text.clone())
639 .unwrap_or_default(),
640 slash_command_output_sections: self
641 .slash_command_output_sections
642 .iter()
643 .filter_map(|section| {
644 if section.is_valid(buffer) {
645 let range = section.range.to_offset(buffer);
646 Some(assistant_slash_command::SlashCommandOutputSection {
647 range,
648 icon: section.icon,
649 label: section.label.clone(),
650 metadata: section.metadata.clone(),
651 })
652 } else {
653 None
654 }
655 })
656 .collect(),
657 }
658 }
659
660 #[allow(clippy::too_many_arguments)]
661 pub fn deserialize(
662 saved_context: SavedContext,
663 path: PathBuf,
664 language_registry: Arc<LanguageRegistry>,
665 prompt_builder: Arc<PromptBuilder>,
666 project: Option<Model<Project>>,
667 telemetry: Option<Arc<Telemetry>>,
668 cx: &mut ModelContext<Self>,
669 ) -> Self {
670 let id = saved_context.id.clone().unwrap_or_else(ContextId::new);
671 let mut this = Self::new(
672 id,
673 ReplicaId::default(),
674 language::Capability::ReadWrite,
675 language_registry,
676 prompt_builder,
677 project,
678 telemetry,
679 cx,
680 );
681 this.path = Some(path);
682 this.buffer.update(cx, |buffer, cx| {
683 buffer.set_text(saved_context.text.as_str(), cx)
684 });
685 let operations = saved_context.into_ops(&this.buffer, cx);
686 this.apply_ops(operations, cx);
687 this
688 }
689
690 pub fn id(&self) -> &ContextId {
691 &self.id
692 }
693
694 pub fn replica_id(&self) -> ReplicaId {
695 self.timestamp.replica_id
696 }
697
698 pub fn version(&self, cx: &AppContext) -> ContextVersion {
699 ContextVersion {
700 context: self.version.clone(),
701 buffer: self.buffer.read(cx).version(),
702 }
703 }
704
705 pub fn set_capability(
706 &mut self,
707 capability: language::Capability,
708 cx: &mut ModelContext<Self>,
709 ) {
710 self.buffer
711 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
712 }
713
714 fn next_timestamp(&mut self) -> clock::Lamport {
715 let timestamp = self.timestamp.tick();
716 self.version.observe(timestamp);
717 timestamp
718 }
719
720 pub fn serialize_ops(
721 &self,
722 since: &ContextVersion,
723 cx: &AppContext,
724 ) -> Task<Vec<proto::ContextOperation>> {
725 let buffer_ops = self
726 .buffer
727 .read(cx)
728 .serialize_ops(Some(since.buffer.clone()), cx);
729
730 let mut context_ops = self
731 .operations
732 .iter()
733 .filter(|op| !since.context.observed(op.timestamp()))
734 .cloned()
735 .collect::<Vec<_>>();
736 context_ops.extend(self.pending_ops.iter().cloned());
737
738 cx.background_executor().spawn(async move {
739 let buffer_ops = buffer_ops.await;
740 context_ops.sort_unstable_by_key(|op| op.timestamp());
741 buffer_ops
742 .into_iter()
743 .map(|op| proto::ContextOperation {
744 variant: Some(proto::context_operation::Variant::BufferOperation(
745 proto::context_operation::BufferOperation {
746 operation: Some(op),
747 },
748 )),
749 })
750 .chain(context_ops.into_iter().map(|op| op.to_proto()))
751 .collect()
752 })
753 }
754
755 pub fn apply_ops(
756 &mut self,
757 ops: impl IntoIterator<Item = ContextOperation>,
758 cx: &mut ModelContext<Self>,
759 ) {
760 let mut buffer_ops = Vec::new();
761 for op in ops {
762 match op {
763 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
764 op @ _ => self.pending_ops.push(op),
765 }
766 }
767 self.buffer
768 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx));
769 self.flush_ops(cx);
770 }
771
772 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
773 let mut changed_messages = HashSet::default();
774 let mut summary_changed = false;
775
776 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
777 for op in mem::take(&mut self.pending_ops) {
778 if !self.can_apply_op(&op, cx) {
779 self.pending_ops.push(op);
780 continue;
781 }
782
783 let timestamp = op.timestamp();
784 match op.clone() {
785 ContextOperation::InsertMessage {
786 anchor, metadata, ..
787 } => {
788 if self.messages_metadata.contains_key(&anchor.id) {
789 // We already applied this operation.
790 } else {
791 changed_messages.insert(anchor.id);
792 self.insert_message(anchor, metadata, cx);
793 }
794 }
795 ContextOperation::UpdateMessage {
796 message_id,
797 metadata: new_metadata,
798 ..
799 } => {
800 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
801 if new_metadata.timestamp > metadata.timestamp {
802 *metadata = new_metadata;
803 changed_messages.insert(message_id);
804 }
805 }
806 ContextOperation::UpdateSummary {
807 summary: new_summary,
808 ..
809 } => {
810 if self
811 .summary
812 .as_ref()
813 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
814 {
815 self.summary = Some(new_summary);
816 summary_changed = true;
817 }
818 }
819 ContextOperation::SlashCommandFinished {
820 id,
821 output_range,
822 sections,
823 ..
824 } => {
825 if self.finished_slash_commands.insert(id) {
826 let buffer = self.buffer.read(cx);
827 self.slash_command_output_sections
828 .extend(sections.iter().cloned());
829 self.slash_command_output_sections
830 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
831 cx.emit(ContextEvent::SlashCommandFinished {
832 output_range,
833 sections,
834 expand_result: false,
835 run_commands_in_output: false,
836 });
837 }
838 }
839 ContextOperation::BufferOperation(_) => unreachable!(),
840 }
841
842 self.version.observe(timestamp);
843 self.timestamp.observe(timestamp);
844 self.operations.push(op);
845 }
846
847 if !changed_messages.is_empty() {
848 self.message_roles_updated(changed_messages, cx);
849 cx.emit(ContextEvent::MessagesEdited);
850 cx.notify();
851 }
852
853 if summary_changed {
854 cx.emit(ContextEvent::SummaryChanged);
855 cx.notify();
856 }
857 }
858
859 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
860 if !self.version.observed_all(op.version()) {
861 return false;
862 }
863
864 match op {
865 ContextOperation::InsertMessage { anchor, .. } => self
866 .buffer
867 .read(cx)
868 .version
869 .observed(anchor.start.timestamp),
870 ContextOperation::UpdateMessage { message_id, .. } => {
871 self.messages_metadata.contains_key(message_id)
872 }
873 ContextOperation::UpdateSummary { .. } => true,
874 ContextOperation::SlashCommandFinished {
875 output_range,
876 sections,
877 ..
878 } => {
879 let version = &self.buffer.read(cx).version;
880 sections
881 .iter()
882 .map(|section| §ion.range)
883 .chain([output_range])
884 .all(|range| {
885 let observed_start = range.start == language::Anchor::MIN
886 || range.start == language::Anchor::MAX
887 || version.observed(range.start.timestamp);
888 let observed_end = range.end == language::Anchor::MIN
889 || range.end == language::Anchor::MAX
890 || version.observed(range.end.timestamp);
891 observed_start && observed_end
892 })
893 }
894 ContextOperation::BufferOperation(_) => {
895 panic!("buffer operations should always be applied")
896 }
897 }
898 }
899
900 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
901 self.operations.push(op.clone());
902 cx.emit(ContextEvent::Operation(op));
903 }
904
905 pub fn buffer(&self) -> &Model<Buffer> {
906 &self.buffer
907 }
908
909 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
910 self.language_registry.clone()
911 }
912
913 pub fn project(&self) -> Option<Model<Project>> {
914 self.project.clone()
915 }
916
917 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
918 self.prompt_builder.clone()
919 }
920
921 pub fn path(&self) -> Option<&Path> {
922 self.path.as_deref()
923 }
924
925 pub fn summary(&self) -> Option<&ContextSummary> {
926 self.summary.as_ref()
927 }
928
929 pub(crate) fn workflow_step_containing(
930 &self,
931 offset: usize,
932 cx: &AppContext,
933 ) -> Option<&WorkflowStep> {
934 let buffer = self.buffer.read(cx);
935 let index = self
936 .workflow_steps
937 .binary_search_by(|step| {
938 let step_range = step.range.to_offset(&buffer);
939 if offset < step_range.start {
940 Ordering::Greater
941 } else if offset > step_range.end {
942 Ordering::Less
943 } else {
944 Ordering::Equal
945 }
946 })
947 .ok()?;
948 Some(&self.workflow_steps[index])
949 }
950
951 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
952 self.workflow_steps.iter().map(|step| step.range.clone())
953 }
954
955 pub(crate) fn workflow_step_for_range(
956 &self,
957 range: &Range<language::Anchor>,
958 cx: &AppContext,
959 ) -> Option<&WorkflowStep> {
960 let buffer = self.buffer.read(cx);
961 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
962 Some(&self.workflow_steps[index])
963 }
964
965 fn workflow_step_index_for_range(
966 &self,
967 tagged_range: &Range<text::Anchor>,
968 buffer: &text::BufferSnapshot,
969 ) -> Result<usize, usize> {
970 self.workflow_steps
971 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
972 }
973
974 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
975 &self.pending_slash_commands
976 }
977
978 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
979 &self.slash_command_output_sections
980 }
981
982 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
983 self.pending_tool_uses_by_id.values().collect()
984 }
985
986 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
987 self.pending_tool_uses_by_id.get(id)
988 }
989
990 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
991 let markdown = self.language_registry.language_for_name("Markdown");
992 cx.spawn(|this, mut cx| async move {
993 let markdown = markdown.await?;
994 this.update(&mut cx, |this, cx| {
995 this.buffer
996 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
997 })
998 })
999 .detach_and_log_err(cx);
1000 }
1001
1002 fn handle_buffer_event(
1003 &mut self,
1004 _: Model<Buffer>,
1005 event: &language::BufferEvent,
1006 cx: &mut ModelContext<Self>,
1007 ) {
1008 match event {
1009 language::BufferEvent::Operation {
1010 operation,
1011 is_local: true,
1012 } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation(
1013 operation.clone(),
1014 ))),
1015 language::BufferEvent::Edited => {
1016 self.count_remaining_tokens(cx);
1017 self.reparse(cx);
1018 // Use `inclusive = true` to invalidate a step when an edit occurs
1019 // at the start/end of a parsed step.
1020 cx.emit(ContextEvent::MessagesEdited);
1021 }
1022 _ => {}
1023 }
1024 }
1025
1026 pub(crate) fn token_count(&self) -> Option<usize> {
1027 self.token_count
1028 }
1029
1030 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1031 let request = self.to_completion_request(cx);
1032 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1033 return;
1034 };
1035 self.pending_token_count = cx.spawn(|this, mut cx| {
1036 async move {
1037 cx.background_executor()
1038 .timer(Duration::from_millis(200))
1039 .await;
1040
1041 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1042 this.update(&mut cx, |this, cx| {
1043 this.token_count = Some(token_count);
1044 this.start_cache_warming(&model, cx);
1045 cx.notify()
1046 })
1047 }
1048 .log_err()
1049 });
1050 }
1051
1052 pub fn mark_cache_anchors(
1053 &mut self,
1054 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1055 speculative: bool,
1056 cx: &mut ModelContext<Self>,
1057 ) -> bool {
1058 let cache_configuration =
1059 cache_configuration
1060 .as_ref()
1061 .unwrap_or(&LanguageModelCacheConfiguration {
1062 max_cache_anchors: 0,
1063 should_speculate: false,
1064 min_total_token: 0,
1065 });
1066
1067 let messages: Vec<Message> = self.messages(cx).collect();
1068
1069 let mut sorted_messages = messages.clone();
1070 if speculative {
1071 // Avoid caching the last message if this is a speculative cache fetch as
1072 // it's likely to change.
1073 sorted_messages.pop();
1074 }
1075 sorted_messages.retain(|m| m.role == Role::User);
1076 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1077
1078 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1079 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1080 0
1081 } else {
1082 // Save 1 anchor for the inline assistant to use.
1083 max(cache_configuration.max_cache_anchors, 1) - 1
1084 };
1085 sorted_messages.truncate(cache_anchors);
1086
1087 let anchors: HashSet<MessageId> = sorted_messages
1088 .into_iter()
1089 .map(|message| message.id)
1090 .collect();
1091
1092 let buffer = self.buffer.read(cx).snapshot();
1093 let invalidated_caches: HashSet<MessageId> = messages
1094 .iter()
1095 .scan(false, |encountered_invalid, message| {
1096 let message_id = message.id;
1097 let is_invalid = self
1098 .messages_metadata
1099 .get(&message_id)
1100 .map_or(true, |metadata| {
1101 !metadata.is_cache_valid(&buffer, &message.offset_range)
1102 || *encountered_invalid
1103 });
1104 *encountered_invalid |= is_invalid;
1105 Some(if is_invalid { Some(message_id) } else { None })
1106 })
1107 .flatten()
1108 .collect();
1109
1110 let last_anchor = messages.iter().rev().find_map(|message| {
1111 if anchors.contains(&message.id) {
1112 Some(message.id)
1113 } else {
1114 None
1115 }
1116 });
1117
1118 let mut new_anchor_needs_caching = false;
1119 let current_version = &buffer.version;
1120 // If we have no anchors, mark all messages as not being cached.
1121 let mut hit_last_anchor = last_anchor.is_none();
1122
1123 for message in messages.iter() {
1124 if hit_last_anchor {
1125 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1126 continue;
1127 }
1128
1129 if let Some(last_anchor) = last_anchor {
1130 if message.id == last_anchor {
1131 hit_last_anchor = true;
1132 }
1133 }
1134
1135 new_anchor_needs_caching = new_anchor_needs_caching
1136 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1137
1138 self.update_metadata(message.id, cx, |metadata| {
1139 let cache_status = if invalidated_caches.contains(&message.id) {
1140 CacheStatus::Pending
1141 } else {
1142 metadata
1143 .cache
1144 .as_ref()
1145 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1146 };
1147 metadata.cache = Some(MessageCacheMetadata {
1148 is_anchor: anchors.contains(&message.id),
1149 is_final_anchor: hit_last_anchor,
1150 status: cache_status,
1151 cached_at: current_version.clone(),
1152 });
1153 });
1154 }
1155 new_anchor_needs_caching
1156 }
1157
1158 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1159 let cache_configuration = model.cache_configuration();
1160
1161 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1162 return;
1163 }
1164 if !self.pending_completions.is_empty() {
1165 return;
1166 }
1167 if let Some(cache_configuration) = cache_configuration {
1168 if !cache_configuration.should_speculate {
1169 return;
1170 }
1171 }
1172
1173 let request = {
1174 let mut req = self.to_completion_request(cx);
1175 // Skip the last message because it's likely to change and
1176 // therefore would be a waste to cache.
1177 req.messages.pop();
1178 req.messages.push(LanguageModelRequestMessage {
1179 role: Role::User,
1180 content: vec!["Respond only with OK, nothing else.".into()],
1181 cache: false,
1182 });
1183 req
1184 };
1185
1186 let model = Arc::clone(model);
1187 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1188 async move {
1189 match model.stream_completion(request, &cx).await {
1190 Ok(mut stream) => {
1191 stream.next().await;
1192 log::info!("Cache warming completed successfully");
1193 }
1194 Err(e) => {
1195 log::warn!("Cache warming failed: {}", e);
1196 }
1197 };
1198 this.update(&mut cx, |this, cx| {
1199 this.update_cache_status_for_completion(cx);
1200 })
1201 .ok();
1202 anyhow::Ok(())
1203 }
1204 .log_err()
1205 });
1206 }
1207
1208 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1209 let cached_message_ids: Vec<MessageId> = self
1210 .messages_metadata
1211 .iter()
1212 .filter_map(|(message_id, metadata)| {
1213 metadata.cache.as_ref().and_then(|cache| {
1214 if cache.status == CacheStatus::Pending {
1215 Some(*message_id)
1216 } else {
1217 None
1218 }
1219 })
1220 })
1221 .collect();
1222
1223 for message_id in cached_message_ids {
1224 self.update_metadata(message_id, cx, |metadata| {
1225 if let Some(cache) = &mut metadata.cache {
1226 cache.status = CacheStatus::Cached;
1227 }
1228 });
1229 }
1230 cx.notify();
1231 }
1232
1233 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1234 let buffer = self.buffer.read(cx).text_snapshot();
1235 let mut row_ranges = self
1236 .edits_since_last_parse
1237 .consume()
1238 .into_iter()
1239 .map(|edit| {
1240 let start_row = buffer.offset_to_point(edit.new.start).row;
1241 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1242 start_row..end_row
1243 })
1244 .peekable();
1245
1246 let mut removed_slash_command_ranges = Vec::new();
1247 let mut updated_slash_commands = Vec::new();
1248 let mut removed_steps = Vec::new();
1249 let mut updated_steps = Vec::new();
1250 while let Some(mut row_range) = row_ranges.next() {
1251 while let Some(next_row_range) = row_ranges.peek() {
1252 if row_range.end >= next_row_range.start {
1253 row_range.end = next_row_range.end;
1254 row_ranges.next();
1255 } else {
1256 break;
1257 }
1258 }
1259
1260 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1261 let end = buffer.anchor_after(Point::new(
1262 row_range.end - 1,
1263 buffer.line_len(row_range.end - 1),
1264 ));
1265
1266 self.reparse_slash_commands_in_range(
1267 start..end,
1268 &buffer,
1269 &mut updated_slash_commands,
1270 &mut removed_slash_command_ranges,
1271 cx,
1272 );
1273 self.reparse_workflow_steps_in_range(
1274 start..end,
1275 &buffer,
1276 &mut updated_steps,
1277 &mut removed_steps,
1278 cx,
1279 );
1280 }
1281
1282 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1283 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1284 removed: removed_slash_command_ranges,
1285 updated: updated_slash_commands,
1286 });
1287 }
1288
1289 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1290 cx.emit(ContextEvent::WorkflowStepsUpdated {
1291 removed: removed_steps,
1292 updated: updated_steps,
1293 });
1294 }
1295 }
1296
1297 fn reparse_slash_commands_in_range(
1298 &mut self,
1299 range: Range<text::Anchor>,
1300 buffer: &BufferSnapshot,
1301 updated: &mut Vec<PendingSlashCommand>,
1302 removed: &mut Vec<Range<text::Anchor>>,
1303 cx: &AppContext,
1304 ) {
1305 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1306
1307 let mut new_commands = Vec::new();
1308 let mut lines = buffer.text_for_range(range).lines();
1309 let mut offset = lines.offset();
1310 while let Some(line) = lines.next() {
1311 if let Some(command_line) = SlashCommandLine::parse(line) {
1312 let name = &line[command_line.name.clone()];
1313 let arguments = command_line
1314 .arguments
1315 .iter()
1316 .filter_map(|argument_range| {
1317 if argument_range.is_empty() {
1318 None
1319 } else {
1320 line.get(argument_range.clone())
1321 }
1322 })
1323 .map(ToOwned::to_owned)
1324 .collect::<SmallVec<_>>();
1325 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1326 if !command.requires_argument() || !arguments.is_empty() {
1327 let start_ix = offset + command_line.name.start - 1;
1328 let end_ix = offset
1329 + command_line
1330 .arguments
1331 .last()
1332 .map_or(command_line.name.end, |argument| argument.end);
1333 let source_range =
1334 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1335 let pending_command = PendingSlashCommand {
1336 name: name.to_string(),
1337 arguments,
1338 source_range,
1339 status: PendingSlashCommandStatus::Idle,
1340 };
1341 updated.push(pending_command.clone());
1342 new_commands.push(pending_command);
1343 }
1344 }
1345 }
1346
1347 offset = lines.offset();
1348 }
1349
1350 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1351 removed.extend(removed_commands.map(|command| command.source_range));
1352 }
1353
1354 fn reparse_workflow_steps_in_range(
1355 &mut self,
1356 range: Range<text::Anchor>,
1357 buffer: &BufferSnapshot,
1358 updated: &mut Vec<Range<text::Anchor>>,
1359 removed: &mut Vec<Range<text::Anchor>>,
1360 cx: &mut ModelContext<Self>,
1361 ) {
1362 // Rebuild the XML tags in the edited range.
1363 let intersecting_tags_range =
1364 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1365 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1366 self.xml_tags
1367 .splice(intersecting_tags_range.clone(), new_tags);
1368
1369 // Find which steps intersect the changed range.
1370 let intersecting_steps_range =
1371 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1372
1373 // Reparse all tags after the last unchanged step before the change.
1374 let mut tags_start_ix = 0;
1375 if let Some(preceding_unchanged_step) =
1376 self.workflow_steps[..intersecting_steps_range.start].last()
1377 {
1378 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1379 tag.range
1380 .start
1381 .cmp(&preceding_unchanged_step.range.end, buffer)
1382 .then(Ordering::Less)
1383 }) {
1384 Ok(ix) | Err(ix) => ix,
1385 };
1386 }
1387
1388 // Rebuild the edit suggestions in the range.
1389 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1390
1391 if let Some(project) = self.project() {
1392 for step in &mut new_steps {
1393 Self::resolve_workflow_step_internal(step, &project, cx);
1394 }
1395 }
1396
1397 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1398 let removed_steps = self
1399 .workflow_steps
1400 .splice(intersecting_steps_range, new_steps);
1401 removed.extend(
1402 removed_steps
1403 .map(|step| step.range)
1404 .filter(|range| !updated.contains(&range)),
1405 );
1406 }
1407
1408 fn parse_xml_tags_in_range(
1409 &self,
1410 buffer: &BufferSnapshot,
1411 range: Range<text::Anchor>,
1412 cx: &AppContext,
1413 ) -> Vec<XmlTag> {
1414 let mut messages = self.messages(cx).peekable();
1415
1416 let mut tags = Vec::new();
1417 let mut lines = buffer.text_for_range(range).lines();
1418 let mut offset = lines.offset();
1419
1420 while let Some(line) = lines.next() {
1421 while let Some(message) = messages.peek() {
1422 if offset < message.offset_range.end {
1423 break;
1424 } else {
1425 messages.next();
1426 }
1427 }
1428
1429 let is_assistant_message = messages
1430 .peek()
1431 .map_or(false, |message| message.role == Role::Assistant);
1432 if is_assistant_message {
1433 for (start_ix, _) in line.match_indices('<') {
1434 let mut name_start_ix = start_ix + 1;
1435 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1436 if let Some(closing_bracket_ix) = closing_bracket_ix {
1437 let end_ix = closing_bracket_ix + 1;
1438 let mut is_open_tag = true;
1439 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1440 name_start_ix += 1;
1441 is_open_tag = false;
1442 }
1443 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1444 let tag_name_len = tag_inner
1445 .find(|c: char| c.is_whitespace())
1446 .unwrap_or(tag_inner.len());
1447 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1448 tags.push(XmlTag {
1449 range: buffer.anchor_after(offset + start_ix)
1450 ..buffer.anchor_before(offset + end_ix),
1451 is_open_tag,
1452 kind,
1453 });
1454 };
1455 }
1456 }
1457 }
1458
1459 offset = lines.offset();
1460 }
1461 tags
1462 }
1463
1464 fn parse_steps(
1465 &mut self,
1466 tags_start_ix: usize,
1467 buffer_end: text::Anchor,
1468 buffer: &BufferSnapshot,
1469 ) -> Vec<WorkflowStep> {
1470 let mut new_steps = Vec::new();
1471 let mut pending_step = None;
1472 let mut edit_step_depth = 0;
1473 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1474 'tags: while let Some(tag) = tags.next() {
1475 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1476 break;
1477 }
1478
1479 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1480 edit_step_depth += 1;
1481 let edit_start = tag.range.start;
1482 let mut edits = Vec::new();
1483 let mut step = WorkflowStep {
1484 range: edit_start..edit_start,
1485 leading_tags_end: tag.range.end,
1486 trailing_tag_start: None,
1487 edits: Default::default(),
1488 resolution: None,
1489 resolution_task: None,
1490 };
1491
1492 while let Some(tag) = tags.next() {
1493 step.trailing_tag_start.get_or_insert(tag.range.start);
1494
1495 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1496 // step.trailing_tag_start = Some(tag.range.start);
1497 edit_step_depth -= 1;
1498 if edit_step_depth == 0 {
1499 step.range.end = tag.range.end;
1500 step.edits = edits.into();
1501 new_steps.push(step);
1502 continue 'tags;
1503 }
1504 }
1505
1506 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1507 let mut path = None;
1508 let mut search = None;
1509 let mut operation = None;
1510 let mut description = None;
1511
1512 while let Some(tag) = tags.next() {
1513 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1514 edits.push(WorkflowStepEdit::new(
1515 path,
1516 operation,
1517 search,
1518 description,
1519 ));
1520 break;
1521 }
1522
1523 if tag.is_open_tag
1524 && [
1525 XmlTagKind::Path,
1526 XmlTagKind::Search,
1527 XmlTagKind::Operation,
1528 XmlTagKind::Description,
1529 ]
1530 .contains(&tag.kind)
1531 {
1532 let kind = tag.kind;
1533 let content_start = tag.range.end;
1534 if let Some(tag) = tags.peek() {
1535 if tag.kind == kind && !tag.is_open_tag {
1536 let tag = tags.next().unwrap();
1537 let content_end = tag.range.start;
1538 let mut content = buffer
1539 .text_for_range(content_start..content_end)
1540 .collect::<String>();
1541 content.truncate(content.trim_end().len());
1542 match kind {
1543 XmlTagKind::Path => path = Some(content),
1544 XmlTagKind::Operation => operation = Some(content),
1545 XmlTagKind::Search => {
1546 search = Some(content).filter(|s| !s.is_empty())
1547 }
1548 XmlTagKind::Description => {
1549 description =
1550 Some(content).filter(|s| !s.is_empty())
1551 }
1552 _ => {}
1553 }
1554 }
1555 }
1556 }
1557 }
1558 }
1559 }
1560
1561 pending_step = Some(step);
1562 }
1563 }
1564
1565 if let Some(mut pending_step) = pending_step {
1566 pending_step.range.end = text::Anchor::MAX;
1567 new_steps.push(pending_step);
1568 }
1569
1570 new_steps
1571 }
1572
1573 pub fn resolve_workflow_step(
1574 &mut self,
1575 tagged_range: Range<text::Anchor>,
1576 cx: &mut ModelContext<Self>,
1577 ) -> Option<()> {
1578 let index = self
1579 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1580 .ok()?;
1581 let step = &mut self.workflow_steps[index];
1582 let project = self.project.as_ref()?;
1583 step.resolution.take();
1584 Self::resolve_workflow_step_internal(step, project, cx);
1585 None
1586 }
1587
1588 fn resolve_workflow_step_internal(
1589 step: &mut WorkflowStep,
1590 project: &Model<Project>,
1591 cx: &mut ModelContext<'_, Context>,
1592 ) {
1593 step.resolution_task = Some(cx.spawn({
1594 let range = step.range.clone();
1595 let edits = step.edits.clone();
1596 let project = project.clone();
1597 |this, mut cx| async move {
1598 let suggestion_groups =
1599 Self::compute_step_resolution(project, edits, &mut cx).await;
1600
1601 this.update(&mut cx, |this, cx| {
1602 let buffer = this.buffer.read(cx).text_snapshot();
1603 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1604 if let Some(ix) = ix {
1605 let step = &mut this.workflow_steps[ix];
1606
1607 let resolution = suggestion_groups.map(|suggestion_groups| {
1608 let mut title = String::new();
1609 for mut chunk in buffer.text_for_range(
1610 step.leading_tags_end
1611 ..step.trailing_tag_start.unwrap_or(step.range.end),
1612 ) {
1613 if title.is_empty() {
1614 chunk = chunk.trim_start();
1615 }
1616 if let Some((prefix, _)) = chunk.split_once('\n') {
1617 title.push_str(prefix);
1618 break;
1619 } else {
1620 title.push_str(chunk);
1621 }
1622 }
1623
1624 WorkflowStepResolution {
1625 title,
1626 suggestion_groups,
1627 }
1628 });
1629
1630 step.resolution = Some(Arc::new(resolution));
1631 cx.emit(ContextEvent::WorkflowStepsUpdated {
1632 removed: vec![],
1633 updated: vec![range],
1634 })
1635 }
1636 })
1637 .ok();
1638 }
1639 }));
1640 }
1641
1642 async fn compute_step_resolution(
1643 project: Model<Project>,
1644 edits: Arc<[Result<WorkflowStepEdit>]>,
1645 cx: &mut AsyncAppContext,
1646 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1647 let mut suggestion_tasks = Vec::new();
1648 for edit in edits.iter() {
1649 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1650 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1651 }
1652
1653 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1654 let suggestions = future::try_join_all(suggestion_tasks).await?;
1655
1656 let mut suggestions_by_buffer = HashMap::default();
1657 for (buffer, suggestion) in suggestions {
1658 suggestions_by_buffer
1659 .entry(buffer)
1660 .or_insert_with(Vec::new)
1661 .push(suggestion);
1662 }
1663
1664 let mut suggestion_groups_by_buffer = HashMap::default();
1665 for (buffer, mut suggestions) in suggestions_by_buffer {
1666 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1667 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1668 // Sort suggestions by their range so that earlier, larger ranges come first
1669 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1670
1671 // Merge overlapping suggestions
1672 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1673
1674 // Create context ranges for each suggestion
1675 for suggestion in suggestions {
1676 let context_range = {
1677 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1678 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1679 let end_row =
1680 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1681 let start = snapshot.anchor_before(Point::new(start_row, 0));
1682 let end =
1683 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1684 start..end
1685 };
1686
1687 if let Some(last_group) = suggestion_groups.last_mut() {
1688 if last_group
1689 .context_range
1690 .end
1691 .cmp(&context_range.start, &snapshot)
1692 .is_ge()
1693 {
1694 // Merge with the previous group if context ranges overlap
1695 last_group.context_range.end = context_range.end;
1696 last_group.suggestions.push(suggestion);
1697 } else {
1698 // Create a new group
1699 suggestion_groups.push(WorkflowSuggestionGroup {
1700 context_range,
1701 suggestions: vec![suggestion],
1702 });
1703 }
1704 } else {
1705 // Create the first group
1706 suggestion_groups.push(WorkflowSuggestionGroup {
1707 context_range,
1708 suggestions: vec![suggestion],
1709 });
1710 }
1711 }
1712
1713 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1714 }
1715
1716 Ok(suggestion_groups_by_buffer)
1717 }
1718
1719 pub fn pending_command_for_position(
1720 &mut self,
1721 position: language::Anchor,
1722 cx: &mut ModelContext<Self>,
1723 ) -> Option<&mut PendingSlashCommand> {
1724 let buffer = self.buffer.read(cx);
1725 match self
1726 .pending_slash_commands
1727 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1728 {
1729 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1730 Err(ix) => {
1731 let cmd = self.pending_slash_commands.get_mut(ix)?;
1732 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1733 && position.cmp(&cmd.source_range.end, buffer).is_le()
1734 {
1735 Some(cmd)
1736 } else {
1737 None
1738 }
1739 }
1740 }
1741 }
1742
1743 pub fn pending_commands_for_range(
1744 &self,
1745 range: Range<language::Anchor>,
1746 cx: &AppContext,
1747 ) -> &[PendingSlashCommand] {
1748 let range = self.pending_command_indices_for_range(range, cx);
1749 &self.pending_slash_commands[range]
1750 }
1751
1752 fn pending_command_indices_for_range(
1753 &self,
1754 range: Range<language::Anchor>,
1755 cx: &AppContext,
1756 ) -> Range<usize> {
1757 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1758 }
1759
1760 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1761 &self,
1762 all_annotations: &[T],
1763 range: Range<language::Anchor>,
1764 cx: &AppContext,
1765 ) -> Range<usize> {
1766 let buffer = self.buffer.read(cx);
1767 let start_ix = match all_annotations
1768 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1769 {
1770 Ok(ix) | Err(ix) => ix,
1771 };
1772 let end_ix = match all_annotations
1773 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1774 {
1775 Ok(ix) => ix + 1,
1776 Err(ix) => ix,
1777 };
1778 start_ix..end_ix
1779 }
1780
1781 pub fn insert_command_output(
1782 &mut self,
1783 command_range: Range<language::Anchor>,
1784 output: Task<Result<SlashCommandOutput>>,
1785 ensure_trailing_newline: bool,
1786 expand_result: bool,
1787 cx: &mut ModelContext<Self>,
1788 ) {
1789 self.reparse(cx);
1790
1791 let insert_output_task = cx.spawn(|this, mut cx| {
1792 let command_range = command_range.clone();
1793 async move {
1794 let output = output.await;
1795 this.update(&mut cx, |this, cx| match output {
1796 Ok(mut output) => {
1797 // Ensure section ranges are valid.
1798 for section in &mut output.sections {
1799 section.range.start = section.range.start.min(output.text.len());
1800 section.range.end = section.range.end.min(output.text.len());
1801 while !output.text.is_char_boundary(section.range.start) {
1802 section.range.start -= 1;
1803 }
1804 while !output.text.is_char_boundary(section.range.end) {
1805 section.range.end += 1;
1806 }
1807 }
1808
1809 // Ensure there is a newline after the last section.
1810 if ensure_trailing_newline {
1811 let has_newline_after_last_section =
1812 output.sections.last().map_or(false, |last_section| {
1813 output.text[last_section.range.end..].ends_with('\n')
1814 });
1815 if !has_newline_after_last_section {
1816 output.text.push('\n');
1817 }
1818 }
1819
1820 let version = this.version.clone();
1821 let command_id = SlashCommandId(this.next_timestamp());
1822 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1823 let start = command_range.start.to_offset(buffer);
1824 let old_end = command_range.end.to_offset(buffer);
1825 let new_end = start + output.text.len();
1826 buffer.edit([(start..old_end, output.text)], None, cx);
1827
1828 let mut sections = output
1829 .sections
1830 .into_iter()
1831 .map(|section| SlashCommandOutputSection {
1832 range: buffer.anchor_after(start + section.range.start)
1833 ..buffer.anchor_before(start + section.range.end),
1834 icon: section.icon,
1835 label: section.label,
1836 metadata: section.metadata,
1837 })
1838 .collect::<Vec<_>>();
1839 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1840
1841 this.slash_command_output_sections
1842 .extend(sections.iter().cloned());
1843 this.slash_command_output_sections
1844 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1845
1846 let output_range =
1847 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1848 this.finished_slash_commands.insert(command_id);
1849
1850 (
1851 ContextOperation::SlashCommandFinished {
1852 id: command_id,
1853 output_range: output_range.clone(),
1854 sections: sections.clone(),
1855 version,
1856 },
1857 ContextEvent::SlashCommandFinished {
1858 output_range,
1859 sections,
1860 run_commands_in_output: output.run_commands_in_text,
1861 expand_result,
1862 },
1863 )
1864 });
1865
1866 this.push_op(operation, cx);
1867 cx.emit(event);
1868 }
1869 Err(error) => {
1870 if let Some(pending_command) =
1871 this.pending_command_for_position(command_range.start, cx)
1872 {
1873 pending_command.status =
1874 PendingSlashCommandStatus::Error(error.to_string());
1875 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1876 removed: vec![pending_command.source_range.clone()],
1877 updated: vec![pending_command.clone()],
1878 });
1879 }
1880 }
1881 })
1882 .ok();
1883 }
1884 });
1885
1886 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1887 pending_command.status = PendingSlashCommandStatus::Running {
1888 _task: insert_output_task.shared(),
1889 };
1890 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1891 removed: vec![pending_command.source_range.clone()],
1892 updated: vec![pending_command.clone()],
1893 });
1894 }
1895 }
1896
1897 pub fn insert_tool_output(
1898 &mut self,
1899 tool_use_id: Arc<str>,
1900 output: Task<Result<String>>,
1901 cx: &mut ModelContext<Self>,
1902 ) {
1903 let insert_output_task = cx.spawn(|this, mut cx| {
1904 let tool_use_id = tool_use_id.clone();
1905 async move {
1906 let output = output.await;
1907 this.update(&mut cx, |this, cx| match output {
1908 Ok(mut output) => {
1909 const NEWLINE: char = '\n';
1910
1911 if !output.ends_with(NEWLINE) {
1912 output.push(NEWLINE);
1913 }
1914
1915 let anchor_range = this.buffer.update(cx, |buffer, cx| {
1916 let insert_start = buffer.len().to_offset(buffer);
1917 let insert_end = insert_start;
1918
1919 let start = insert_start;
1920 let end = start + output.len() - NEWLINE.len_utf8();
1921
1922 buffer.edit([(insert_start..insert_end, output)], None, cx);
1923
1924 let output_range = buffer.anchor_after(start)..buffer.anchor_after(end);
1925
1926 output_range
1927 });
1928
1929 this.insert_content(
1930 Content::ToolResult {
1931 range: anchor_range.clone(),
1932 tool_use_id: tool_use_id.clone(),
1933 },
1934 cx,
1935 );
1936
1937 cx.emit(ContextEvent::ToolFinished {
1938 tool_use_id,
1939 output_range: anchor_range,
1940 });
1941 }
1942 Err(err) => {
1943 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1944 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1945 }
1946 }
1947 })
1948 .ok();
1949 }
1950 });
1951
1952 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1953 tool_use.status = PendingToolUseStatus::Running {
1954 _task: insert_output_task.shared(),
1955 };
1956 }
1957 }
1958
1959 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1960 self.count_remaining_tokens(cx);
1961 }
1962
1963 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1964 self.message_anchors.iter().rev().find_map(|message| {
1965 message
1966 .start
1967 .is_valid(self.buffer.read(cx))
1968 .then_some(message.id)
1969 })
1970 }
1971
1972 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1973 let model_registry = LanguageModelRegistry::read_global(cx);
1974 let provider = model_registry.active_provider()?;
1975 let model = model_registry.active_model()?;
1976 let last_message_id = self.get_last_valid_message_id(cx)?;
1977
1978 if !provider.is_authenticated(cx) {
1979 log::info!("completion provider has no credentials");
1980 return None;
1981 }
1982 // Compute which messages to cache, including the last one.
1983 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1984
1985 let mut request = self.to_completion_request(cx);
1986
1987 if cx.has_flag::<ToolUseFeatureFlag>() {
1988 let tool_registry = ToolRegistry::global(cx);
1989 request.tools = tool_registry
1990 .tools()
1991 .into_iter()
1992 .map(|tool| LanguageModelRequestTool {
1993 name: tool.name(),
1994 description: tool.description(),
1995 input_schema: tool.input_schema(),
1996 })
1997 .collect();
1998 }
1999
2000 let assistant_message = self
2001 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
2002 .unwrap();
2003
2004 // Queue up the user's next reply.
2005 let user_message = self
2006 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
2007 .unwrap();
2008
2009 let pending_completion_id = post_inc(&mut self.completion_count);
2010
2011 let task = cx.spawn({
2012 |this, mut cx| async move {
2013 let stream = model.stream_completion(request, &cx);
2014 let assistant_message_id = assistant_message.id;
2015 let mut response_latency = None;
2016 let stream_completion = async {
2017 let request_start = Instant::now();
2018 let mut events = stream.await?;
2019 let mut stop_reason = StopReason::EndTurn;
2020
2021 while let Some(event) = events.next().await {
2022 if response_latency.is_none() {
2023 response_latency = Some(request_start.elapsed());
2024 }
2025 let event = event?;
2026
2027 this.update(&mut cx, |this, cx| {
2028 let message_ix = this
2029 .message_anchors
2030 .iter()
2031 .position(|message| message.id == assistant_message_id)?;
2032 this.buffer.update(cx, |buffer, cx| {
2033 let message_old_end_offset = this.message_anchors[message_ix + 1..]
2034 .iter()
2035 .find(|message| message.start.is_valid(buffer))
2036 .map_or(buffer.len(), |message| {
2037 message.start.to_offset(buffer).saturating_sub(1)
2038 });
2039
2040 match event {
2041 LanguageModelCompletionEvent::Stop(reason) => {
2042 stop_reason = reason;
2043 }
2044 LanguageModelCompletionEvent::Text(chunk) => {
2045 buffer.edit(
2046 [(
2047 message_old_end_offset..message_old_end_offset,
2048 chunk,
2049 )],
2050 None,
2051 cx,
2052 );
2053 }
2054 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2055 const NEWLINE: char = '\n';
2056
2057 let mut text = String::new();
2058 text.push(NEWLINE);
2059 text.push_str(
2060 &serde_json::to_string_pretty(&tool_use)
2061 .expect("failed to serialize tool use to JSON"),
2062 );
2063 text.push(NEWLINE);
2064 let text_len = text.len();
2065
2066 buffer.edit(
2067 [(
2068 message_old_end_offset..message_old_end_offset,
2069 text,
2070 )],
2071 None,
2072 cx,
2073 );
2074
2075 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2076 let end_ix =
2077 message_old_end_offset + text_len - NEWLINE.len_utf8();
2078 let source_range = buffer.anchor_after(start_ix)
2079 ..buffer.anchor_after(end_ix);
2080
2081 let tool_use_id: Arc<str> = tool_use.id.into();
2082 this.pending_tool_uses_by_id.insert(
2083 tool_use_id.clone(),
2084 PendingToolUse {
2085 id: tool_use_id,
2086 name: tool_use.name,
2087 input: tool_use.input,
2088 status: PendingToolUseStatus::Idle,
2089 source_range,
2090 },
2091 );
2092 }
2093 }
2094 });
2095
2096 cx.emit(ContextEvent::StreamedCompletion);
2097
2098 Some(())
2099 })?;
2100 smol::future::yield_now().await;
2101 }
2102 this.update(&mut cx, |this, cx| {
2103 this.pending_completions
2104 .retain(|completion| completion.id != pending_completion_id);
2105 this.summarize(false, cx);
2106 this.update_cache_status_for_completion(cx);
2107 })?;
2108
2109 anyhow::Ok(stop_reason)
2110 };
2111
2112 let result = stream_completion.await;
2113
2114 this.update(&mut cx, |this, cx| {
2115 let error_message = result
2116 .as_ref()
2117 .err()
2118 .map(|error| error.to_string().trim().to_string());
2119
2120 if let Some(error_message) = error_message.as_ref() {
2121 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2122 error_message.clone(),
2123 )));
2124 }
2125
2126 this.update_metadata(assistant_message_id, cx, |metadata| {
2127 if let Some(error_message) = error_message.as_ref() {
2128 metadata.status =
2129 MessageStatus::Error(SharedString::from(error_message.clone()));
2130 } else {
2131 metadata.status = MessageStatus::Done;
2132 }
2133 });
2134
2135 if let Some(telemetry) = this.telemetry.as_ref() {
2136 let language_name = this
2137 .buffer
2138 .read(cx)
2139 .language()
2140 .map(|language| language.name());
2141 telemetry.report_assistant_event(AssistantEvent {
2142 conversation_id: Some(this.id.0.clone()),
2143 kind: AssistantKind::Panel,
2144 phase: AssistantPhase::Response,
2145 model: model.telemetry_id(),
2146 model_provider: model.provider_id().to_string(),
2147 response_latency,
2148 error_message,
2149 language_name,
2150 });
2151 }
2152
2153 if let Ok(stop_reason) = result {
2154 match stop_reason {
2155 StopReason::ToolUse => {
2156 cx.emit(ContextEvent::UsePendingTools);
2157 }
2158 StopReason::EndTurn => {}
2159 StopReason::MaxTokens => {}
2160 }
2161 }
2162 })
2163 .ok();
2164 }
2165 });
2166
2167 self.pending_completions.push(PendingCompletion {
2168 id: pending_completion_id,
2169 assistant_message_id: assistant_message.id,
2170 _task: task,
2171 });
2172
2173 Some(user_message)
2174 }
2175
2176 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2177 let buffer = self.buffer.read(cx);
2178
2179 let mut contents = self.contents(cx).peekable();
2180
2181 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<String> {
2182 let text: String = buffer.text_for_range(range.clone()).collect();
2183 if text.trim().is_empty() {
2184 None
2185 } else {
2186 Some(text)
2187 }
2188 }
2189
2190 let mut completion_request = LanguageModelRequest {
2191 messages: Vec::new(),
2192 tools: Vec::new(),
2193 stop: Vec::new(),
2194 temperature: None,
2195 };
2196 for message in self.messages(cx) {
2197 if message.status != MessageStatus::Done {
2198 continue;
2199 }
2200
2201 let mut offset = message.offset_range.start;
2202 let mut request_message = LanguageModelRequestMessage {
2203 role: message.role,
2204 content: Vec::new(),
2205 cache: message
2206 .cache
2207 .as_ref()
2208 .map_or(false, |cache| cache.is_anchor),
2209 };
2210
2211 while let Some(content) = contents.peek() {
2212 if content
2213 .range()
2214 .end
2215 .cmp(&message.anchor_range.end, buffer)
2216 .is_lt()
2217 {
2218 let content = contents.next().unwrap();
2219 let range = content.range().to_offset(buffer);
2220 request_message.content.extend(
2221 collect_text_content(buffer, offset..range.start).map(MessageContent::Text),
2222 );
2223
2224 match content {
2225 Content::Image { image, .. } => {
2226 if let Some(image) = image.clone().now_or_never().flatten() {
2227 request_message
2228 .content
2229 .push(language_model::MessageContent::Image(image));
2230 }
2231 }
2232 Content::ToolUse { tool_use, .. } => {
2233 request_message
2234 .content
2235 .push(language_model::MessageContent::ToolUse(tool_use.clone()));
2236 }
2237 Content::ToolResult { tool_use_id, .. } => {
2238 request_message.content.push(
2239 language_model::MessageContent::ToolResult(
2240 LanguageModelToolResult {
2241 tool_use_id: tool_use_id.to_string(),
2242 is_error: false,
2243 content: collect_text_content(buffer, range.clone())
2244 .unwrap_or_default(),
2245 },
2246 ),
2247 );
2248 }
2249 }
2250
2251 offset = range.end;
2252 } else {
2253 break;
2254 }
2255 }
2256
2257 request_message.content.extend(
2258 collect_text_content(buffer, offset..message.offset_range.end)
2259 .map(MessageContent::Text),
2260 );
2261
2262 completion_request.messages.push(request_message);
2263 }
2264
2265 completion_request
2266 }
2267
2268 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2269 if let Some(pending_completion) = self.pending_completions.pop() {
2270 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2271 if metadata.status == MessageStatus::Pending {
2272 metadata.status = MessageStatus::Canceled;
2273 }
2274 });
2275 true
2276 } else {
2277 false
2278 }
2279 }
2280
2281 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2282 for id in &ids {
2283 if let Some(metadata) = self.messages_metadata.get(id) {
2284 let role = metadata.role.cycle();
2285 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2286 }
2287 }
2288
2289 self.message_roles_updated(ids, cx);
2290 }
2291
2292 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2293 let mut ranges = Vec::new();
2294 for message in self.messages(cx) {
2295 if ids.contains(&message.id) {
2296 ranges.push(message.anchor_range.clone());
2297 }
2298 }
2299
2300 let buffer = self.buffer.read(cx).text_snapshot();
2301 let mut updated = Vec::new();
2302 let mut removed = Vec::new();
2303 for range in ranges {
2304 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2305 }
2306
2307 if !updated.is_empty() || !removed.is_empty() {
2308 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2309 }
2310 }
2311
2312 pub fn update_metadata(
2313 &mut self,
2314 id: MessageId,
2315 cx: &mut ModelContext<Self>,
2316 f: impl FnOnce(&mut MessageMetadata),
2317 ) {
2318 let version = self.version.clone();
2319 let timestamp = self.next_timestamp();
2320 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2321 f(metadata);
2322 metadata.timestamp = timestamp;
2323 let operation = ContextOperation::UpdateMessage {
2324 message_id: id,
2325 metadata: metadata.clone(),
2326 version,
2327 };
2328 self.push_op(operation, cx);
2329 cx.emit(ContextEvent::MessagesEdited);
2330 cx.notify();
2331 }
2332 }
2333
2334 pub fn insert_message_after(
2335 &mut self,
2336 message_id: MessageId,
2337 role: Role,
2338 status: MessageStatus,
2339 cx: &mut ModelContext<Self>,
2340 ) -> Option<MessageAnchor> {
2341 if let Some(prev_message_ix) = self
2342 .message_anchors
2343 .iter()
2344 .position(|message| message.id == message_id)
2345 {
2346 // Find the next valid message after the one we were given.
2347 let mut next_message_ix = prev_message_ix + 1;
2348 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2349 if next_message.start.is_valid(self.buffer.read(cx)) {
2350 break;
2351 }
2352 next_message_ix += 1;
2353 }
2354
2355 let start = self.buffer.update(cx, |buffer, cx| {
2356 let offset = self
2357 .message_anchors
2358 .get(next_message_ix)
2359 .map_or(buffer.len(), |message| {
2360 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2361 });
2362 buffer.edit([(offset..offset, "\n")], None, cx);
2363 buffer.anchor_before(offset + 1)
2364 });
2365
2366 let version = self.version.clone();
2367 let anchor = MessageAnchor {
2368 id: MessageId(self.next_timestamp()),
2369 start,
2370 };
2371 let metadata = MessageMetadata {
2372 role,
2373 status,
2374 timestamp: anchor.id.0,
2375 cache: None,
2376 };
2377 self.insert_message(anchor.clone(), metadata.clone(), cx);
2378 self.push_op(
2379 ContextOperation::InsertMessage {
2380 anchor: anchor.clone(),
2381 metadata,
2382 version,
2383 },
2384 cx,
2385 );
2386 Some(anchor)
2387 } else {
2388 None
2389 }
2390 }
2391
2392 pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext<Self>) {
2393 let buffer = self.buffer.read(cx);
2394 let insertion_ix = match self
2395 .contents
2396 .binary_search_by(|probe| probe.cmp(&content, buffer))
2397 {
2398 Ok(ix) => {
2399 self.contents.remove(ix);
2400 ix
2401 }
2402 Err(ix) => ix,
2403 };
2404 self.contents.insert(insertion_ix, content);
2405 cx.emit(ContextEvent::MessagesEdited);
2406 }
2407
2408 pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Content> {
2409 let buffer = self.buffer.read(cx);
2410 self.contents
2411 .iter()
2412 .filter(|content| {
2413 let range = content.range();
2414 range.start.is_valid(buffer) && range.end.is_valid(buffer)
2415 })
2416 .cloned()
2417 }
2418
2419 pub fn split_message(
2420 &mut self,
2421 range: Range<usize>,
2422 cx: &mut ModelContext<Self>,
2423 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2424 let start_message = self.message_for_offset(range.start, cx);
2425 let end_message = self.message_for_offset(range.end, cx);
2426 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2427 // Prevent splitting when range spans multiple messages.
2428 if start_message.id != end_message.id {
2429 return (None, None);
2430 }
2431
2432 let message = start_message;
2433 let role = message.role;
2434 let mut edited_buffer = false;
2435
2436 let mut suffix_start = None;
2437
2438 // TODO: why did this start panicking?
2439 if range.start > message.offset_range.start
2440 && range.end < message.offset_range.end.saturating_sub(1)
2441 {
2442 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2443 suffix_start = Some(range.end + 1);
2444 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2445 suffix_start = Some(range.end);
2446 }
2447 }
2448
2449 let version = self.version.clone();
2450 let suffix = if let Some(suffix_start) = suffix_start {
2451 MessageAnchor {
2452 id: MessageId(self.next_timestamp()),
2453 start: self.buffer.read(cx).anchor_before(suffix_start),
2454 }
2455 } else {
2456 self.buffer.update(cx, |buffer, cx| {
2457 buffer.edit([(range.end..range.end, "\n")], None, cx);
2458 });
2459 edited_buffer = true;
2460 MessageAnchor {
2461 id: MessageId(self.next_timestamp()),
2462 start: self.buffer.read(cx).anchor_before(range.end + 1),
2463 }
2464 };
2465
2466 let suffix_metadata = MessageMetadata {
2467 role,
2468 status: MessageStatus::Done,
2469 timestamp: suffix.id.0,
2470 cache: None,
2471 };
2472 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2473 self.push_op(
2474 ContextOperation::InsertMessage {
2475 anchor: suffix.clone(),
2476 metadata: suffix_metadata,
2477 version,
2478 },
2479 cx,
2480 );
2481
2482 let new_messages =
2483 if range.start == range.end || range.start == message.offset_range.start {
2484 (None, Some(suffix))
2485 } else {
2486 let mut prefix_end = None;
2487 if range.start > message.offset_range.start
2488 && range.end < message.offset_range.end - 1
2489 {
2490 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2491 prefix_end = Some(range.start + 1);
2492 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2493 == Some('\n')
2494 {
2495 prefix_end = Some(range.start);
2496 }
2497 }
2498
2499 let version = self.version.clone();
2500 let selection = if let Some(prefix_end) = prefix_end {
2501 MessageAnchor {
2502 id: MessageId(self.next_timestamp()),
2503 start: self.buffer.read(cx).anchor_before(prefix_end),
2504 }
2505 } else {
2506 self.buffer.update(cx, |buffer, cx| {
2507 buffer.edit([(range.start..range.start, "\n")], None, cx)
2508 });
2509 edited_buffer = true;
2510 MessageAnchor {
2511 id: MessageId(self.next_timestamp()),
2512 start: self.buffer.read(cx).anchor_before(range.end + 1),
2513 }
2514 };
2515
2516 let selection_metadata = MessageMetadata {
2517 role,
2518 status: MessageStatus::Done,
2519 timestamp: selection.id.0,
2520 cache: None,
2521 };
2522 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2523 self.push_op(
2524 ContextOperation::InsertMessage {
2525 anchor: selection.clone(),
2526 metadata: selection_metadata,
2527 version,
2528 },
2529 cx,
2530 );
2531
2532 (Some(selection), Some(suffix))
2533 };
2534
2535 if !edited_buffer {
2536 cx.emit(ContextEvent::MessagesEdited);
2537 }
2538 new_messages
2539 } else {
2540 (None, None)
2541 }
2542 }
2543
2544 fn insert_message(
2545 &mut self,
2546 new_anchor: MessageAnchor,
2547 new_metadata: MessageMetadata,
2548 cx: &mut ModelContext<Self>,
2549 ) {
2550 cx.emit(ContextEvent::MessagesEdited);
2551
2552 self.messages_metadata.insert(new_anchor.id, new_metadata);
2553
2554 let buffer = self.buffer.read(cx);
2555 let insertion_ix = self
2556 .message_anchors
2557 .iter()
2558 .position(|anchor| {
2559 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2560 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2561 })
2562 .unwrap_or(self.message_anchors.len());
2563 self.message_anchors.insert(insertion_ix, new_anchor);
2564 }
2565
2566 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2567 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2568 return;
2569 };
2570 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2571 return;
2572 };
2573
2574 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2575 if !provider.is_authenticated(cx) {
2576 return;
2577 }
2578
2579 let mut request = self.to_completion_request(cx);
2580 request.messages.push(LanguageModelRequestMessage {
2581 role: Role::User,
2582 content: vec![
2583 "Summarize the context into a short title without punctuation.".into(),
2584 ],
2585 cache: false,
2586 });
2587
2588 self.pending_summary = cx.spawn(|this, mut cx| {
2589 async move {
2590 let stream = model.stream_completion_text(request, &cx);
2591 let mut messages = stream.await?;
2592
2593 let mut replaced = !replace_old;
2594 while let Some(message) = messages.next().await {
2595 let text = message?;
2596 let mut lines = text.lines();
2597 this.update(&mut cx, |this, cx| {
2598 let version = this.version.clone();
2599 let timestamp = this.next_timestamp();
2600 let summary = this.summary.get_or_insert(ContextSummary::default());
2601 if !replaced && replace_old {
2602 summary.text.clear();
2603 replaced = true;
2604 }
2605 summary.text.extend(lines.next());
2606 summary.timestamp = timestamp;
2607 let operation = ContextOperation::UpdateSummary {
2608 summary: summary.clone(),
2609 version,
2610 };
2611 this.push_op(operation, cx);
2612 cx.emit(ContextEvent::SummaryChanged);
2613 })?;
2614
2615 // Stop if the LLM generated multiple lines.
2616 if lines.next().is_some() {
2617 break;
2618 }
2619 }
2620
2621 this.update(&mut cx, |this, cx| {
2622 let version = this.version.clone();
2623 let timestamp = this.next_timestamp();
2624 if let Some(summary) = this.summary.as_mut() {
2625 summary.done = true;
2626 summary.timestamp = timestamp;
2627 let operation = ContextOperation::UpdateSummary {
2628 summary: summary.clone(),
2629 version,
2630 };
2631 this.push_op(operation, cx);
2632 cx.emit(ContextEvent::SummaryChanged);
2633 }
2634 })?;
2635
2636 anyhow::Ok(())
2637 }
2638 .log_err()
2639 });
2640 }
2641 }
2642
2643 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2644 self.messages_for_offsets([offset], cx).pop()
2645 }
2646
2647 pub fn messages_for_offsets(
2648 &self,
2649 offsets: impl IntoIterator<Item = usize>,
2650 cx: &AppContext,
2651 ) -> Vec<Message> {
2652 let mut result = Vec::new();
2653
2654 let mut messages = self.messages(cx).peekable();
2655 let mut offsets = offsets.into_iter().peekable();
2656 let mut current_message = messages.next();
2657 while let Some(offset) = offsets.next() {
2658 // Locate the message that contains the offset.
2659 while current_message.as_ref().map_or(false, |message| {
2660 !message.offset_range.contains(&offset) && messages.peek().is_some()
2661 }) {
2662 current_message = messages.next();
2663 }
2664 let Some(message) = current_message.as_ref() else {
2665 break;
2666 };
2667
2668 // Skip offsets that are in the same message.
2669 while offsets.peek().map_or(false, |offset| {
2670 message.offset_range.contains(offset) || messages.peek().is_none()
2671 }) {
2672 offsets.next();
2673 }
2674
2675 result.push(message.clone());
2676 }
2677 result
2678 }
2679
2680 fn messages_from_anchors<'a>(
2681 &'a self,
2682 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2683 cx: &'a AppContext,
2684 ) -> impl 'a + Iterator<Item = Message> {
2685 let buffer = self.buffer.read(cx);
2686
2687 Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate())
2688 }
2689
2690 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2691 self.messages_from_anchors(self.message_anchors.iter(), cx)
2692 }
2693
2694 pub fn messages_from_iters<'a>(
2695 buffer: &'a Buffer,
2696 metadata: &'a HashMap<MessageId, MessageMetadata>,
2697 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2698 ) -> impl 'a + Iterator<Item = Message> {
2699 let mut messages = messages.peekable();
2700
2701 iter::from_fn(move || {
2702 if let Some((start_ix, message_anchor)) = messages.next() {
2703 let metadata = metadata.get(&message_anchor.id)?;
2704
2705 let message_start = message_anchor.start.to_offset(buffer);
2706 let mut message_end = None;
2707 let mut end_ix = start_ix;
2708 while let Some((_, next_message)) = messages.peek() {
2709 if next_message.start.is_valid(buffer) {
2710 message_end = Some(next_message.start);
2711 break;
2712 } else {
2713 end_ix += 1;
2714 messages.next();
2715 }
2716 }
2717 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2718 let message_end = message_end_anchor.to_offset(buffer);
2719
2720 return Some(Message {
2721 index_range: start_ix..end_ix,
2722 offset_range: message_start..message_end,
2723 anchor_range: message_anchor.start..message_end_anchor,
2724 id: message_anchor.id,
2725 role: metadata.role,
2726 status: metadata.status.clone(),
2727 cache: metadata.cache.clone(),
2728 });
2729 }
2730 None
2731 })
2732 }
2733
2734 pub fn save(
2735 &mut self,
2736 debounce: Option<Duration>,
2737 fs: Arc<dyn Fs>,
2738 cx: &mut ModelContext<Context>,
2739 ) {
2740 if self.replica_id() != ReplicaId::default() {
2741 // Prevent saving a remote context for now.
2742 return;
2743 }
2744
2745 self.pending_save = cx.spawn(|this, mut cx| async move {
2746 if let Some(debounce) = debounce {
2747 cx.background_executor().timer(debounce).await;
2748 }
2749
2750 let (old_path, summary) = this.read_with(&cx, |this, _| {
2751 let path = this.path.clone();
2752 let summary = if let Some(summary) = this.summary.as_ref() {
2753 if summary.done {
2754 Some(summary.text.clone())
2755 } else {
2756 None
2757 }
2758 } else {
2759 None
2760 };
2761 (path, summary)
2762 })?;
2763
2764 if let Some(summary) = summary {
2765 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2766 let mut discriminant = 1;
2767 let mut new_path;
2768 loop {
2769 new_path = contexts_dir().join(&format!(
2770 "{} - {}.zed.json",
2771 summary.trim(),
2772 discriminant
2773 ));
2774 if fs.is_file(&new_path).await {
2775 discriminant += 1;
2776 } else {
2777 break;
2778 }
2779 }
2780
2781 fs.create_dir(contexts_dir().as_ref()).await?;
2782 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2783 .await?;
2784 if let Some(old_path) = old_path {
2785 if new_path != old_path {
2786 fs.remove_file(
2787 &old_path,
2788 RemoveOptions {
2789 recursive: false,
2790 ignore_if_not_exists: true,
2791 },
2792 )
2793 .await?;
2794 }
2795 }
2796
2797 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2798 }
2799
2800 Ok(())
2801 });
2802 }
2803
2804 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2805 let timestamp = self.next_timestamp();
2806 let summary = self.summary.get_or_insert(ContextSummary::default());
2807 summary.timestamp = timestamp;
2808 summary.done = true;
2809 summary.text = custom_summary;
2810 cx.emit(ContextEvent::SummaryChanged);
2811 }
2812}
2813
2814#[derive(Debug, Default)]
2815pub struct ContextVersion {
2816 context: clock::Global,
2817 buffer: clock::Global,
2818}
2819
2820impl ContextVersion {
2821 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2822 Self {
2823 context: language::proto::deserialize_version(&proto.context_version),
2824 buffer: language::proto::deserialize_version(&proto.buffer_version),
2825 }
2826 }
2827
2828 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2829 proto::ContextVersion {
2830 context_id: context_id.to_proto(),
2831 context_version: language::proto::serialize_version(&self.context),
2832 buffer_version: language::proto::serialize_version(&self.buffer),
2833 }
2834 }
2835}
2836
2837#[derive(Debug, Clone)]
2838pub struct PendingSlashCommand {
2839 pub name: String,
2840 pub arguments: SmallVec<[String; 3]>,
2841 pub status: PendingSlashCommandStatus,
2842 pub source_range: Range<language::Anchor>,
2843}
2844
2845#[derive(Debug, Clone)]
2846pub enum PendingSlashCommandStatus {
2847 Idle,
2848 Running { _task: Shared<Task<()>> },
2849 Error(String),
2850}
2851
2852pub(crate) struct ToolUseFeatureFlag;
2853
2854impl FeatureFlag for ToolUseFeatureFlag {
2855 const NAME: &'static str = "assistant-tool-use";
2856
2857 fn enabled_for_staff() -> bool {
2858 false
2859 }
2860}
2861
2862#[derive(Debug, Clone)]
2863pub struct PendingToolUse {
2864 pub id: Arc<str>,
2865 pub name: String,
2866 pub input: serde_json::Value,
2867 pub status: PendingToolUseStatus,
2868 pub source_range: Range<language::Anchor>,
2869}
2870
2871#[derive(Debug, Clone)]
2872pub enum PendingToolUseStatus {
2873 Idle,
2874 Running { _task: Shared<Task<()>> },
2875 Error(String),
2876}
2877
2878impl PendingToolUseStatus {
2879 pub fn is_idle(&self) -> bool {
2880 matches!(self, PendingToolUseStatus::Idle)
2881 }
2882}
2883
2884#[derive(Serialize, Deserialize)]
2885pub struct SavedMessage {
2886 pub id: MessageId,
2887 pub start: usize,
2888 pub metadata: MessageMetadata,
2889}
2890
2891#[derive(Serialize, Deserialize)]
2892pub struct SavedContext {
2893 pub id: Option<ContextId>,
2894 pub zed: String,
2895 pub version: String,
2896 pub text: String,
2897 pub messages: Vec<SavedMessage>,
2898 pub summary: String,
2899 pub slash_command_output_sections:
2900 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2901}
2902
2903impl SavedContext {
2904 pub const VERSION: &'static str = "0.4.0";
2905
2906 pub fn from_json(json: &str) -> Result<Self> {
2907 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2908 match saved_context_json
2909 .get("version")
2910 .ok_or_else(|| anyhow!("version not found"))?
2911 {
2912 serde_json::Value::String(version) => match version.as_str() {
2913 SavedContext::VERSION => {
2914 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2915 }
2916 SavedContextV0_3_0::VERSION => {
2917 let saved_context =
2918 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2919 Ok(saved_context.upgrade())
2920 }
2921 SavedContextV0_2_0::VERSION => {
2922 let saved_context =
2923 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2924 Ok(saved_context.upgrade())
2925 }
2926 SavedContextV0_1_0::VERSION => {
2927 let saved_context =
2928 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2929 Ok(saved_context.upgrade())
2930 }
2931 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2932 },
2933 _ => Err(anyhow!("version not found on saved context")),
2934 }
2935 }
2936
2937 fn into_ops(
2938 self,
2939 buffer: &Model<Buffer>,
2940 cx: &mut ModelContext<Context>,
2941 ) -> Vec<ContextOperation> {
2942 let mut operations = Vec::new();
2943 let mut version = clock::Global::new();
2944 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2945
2946 let mut first_message_metadata = None;
2947 for message in self.messages {
2948 if message.id == MessageId(clock::Lamport::default()) {
2949 first_message_metadata = Some(message.metadata);
2950 } else {
2951 operations.push(ContextOperation::InsertMessage {
2952 anchor: MessageAnchor {
2953 id: message.id,
2954 start: buffer.read(cx).anchor_before(message.start),
2955 },
2956 metadata: MessageMetadata {
2957 role: message.metadata.role,
2958 status: message.metadata.status,
2959 timestamp: message.metadata.timestamp,
2960 cache: None,
2961 },
2962 version: version.clone(),
2963 });
2964 version.observe(message.id.0);
2965 next_timestamp.observe(message.id.0);
2966 }
2967 }
2968
2969 if let Some(metadata) = first_message_metadata {
2970 let timestamp = next_timestamp.tick();
2971 operations.push(ContextOperation::UpdateMessage {
2972 message_id: MessageId(clock::Lamport::default()),
2973 metadata: MessageMetadata {
2974 role: metadata.role,
2975 status: metadata.status,
2976 timestamp,
2977 cache: None,
2978 },
2979 version: version.clone(),
2980 });
2981 version.observe(timestamp);
2982 }
2983
2984 let timestamp = next_timestamp.tick();
2985 operations.push(ContextOperation::SlashCommandFinished {
2986 id: SlashCommandId(timestamp),
2987 output_range: language::Anchor::MIN..language::Anchor::MAX,
2988 sections: self
2989 .slash_command_output_sections
2990 .into_iter()
2991 .map(|section| {
2992 let buffer = buffer.read(cx);
2993 SlashCommandOutputSection {
2994 range: buffer.anchor_after(section.range.start)
2995 ..buffer.anchor_before(section.range.end),
2996 icon: section.icon,
2997 label: section.label,
2998 metadata: section.metadata,
2999 }
3000 })
3001 .collect(),
3002 version: version.clone(),
3003 });
3004 version.observe(timestamp);
3005
3006 let timestamp = next_timestamp.tick();
3007 operations.push(ContextOperation::UpdateSummary {
3008 summary: ContextSummary {
3009 text: self.summary,
3010 done: true,
3011 timestamp,
3012 },
3013 version: version.clone(),
3014 });
3015 version.observe(timestamp);
3016
3017 operations
3018 }
3019}
3020
3021#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3022struct SavedMessageIdPreV0_4_0(usize);
3023
3024#[derive(Serialize, Deserialize)]
3025struct SavedMessagePreV0_4_0 {
3026 id: SavedMessageIdPreV0_4_0,
3027 start: usize,
3028}
3029
3030#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3031struct SavedMessageMetadataPreV0_4_0 {
3032 role: Role,
3033 status: MessageStatus,
3034}
3035
3036#[derive(Serialize, Deserialize)]
3037struct SavedContextV0_3_0 {
3038 id: Option<ContextId>,
3039 zed: String,
3040 version: String,
3041 text: String,
3042 messages: Vec<SavedMessagePreV0_4_0>,
3043 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3044 summary: String,
3045 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3046}
3047
3048impl SavedContextV0_3_0 {
3049 const VERSION: &'static str = "0.3.0";
3050
3051 fn upgrade(self) -> SavedContext {
3052 SavedContext {
3053 id: self.id,
3054 zed: self.zed,
3055 version: SavedContext::VERSION.into(),
3056 text: self.text,
3057 messages: self
3058 .messages
3059 .into_iter()
3060 .filter_map(|message| {
3061 let metadata = self.message_metadata.get(&message.id)?;
3062 let timestamp = clock::Lamport {
3063 replica_id: ReplicaId::default(),
3064 value: message.id.0 as u32,
3065 };
3066 Some(SavedMessage {
3067 id: MessageId(timestamp),
3068 start: message.start,
3069 metadata: MessageMetadata {
3070 role: metadata.role,
3071 status: metadata.status.clone(),
3072 timestamp,
3073 cache: None,
3074 },
3075 })
3076 })
3077 .collect(),
3078 summary: self.summary,
3079 slash_command_output_sections: self.slash_command_output_sections,
3080 }
3081 }
3082}
3083
3084#[derive(Serialize, Deserialize)]
3085struct SavedContextV0_2_0 {
3086 id: Option<ContextId>,
3087 zed: String,
3088 version: String,
3089 text: String,
3090 messages: Vec<SavedMessagePreV0_4_0>,
3091 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3092 summary: String,
3093}
3094
3095impl SavedContextV0_2_0 {
3096 const VERSION: &'static str = "0.2.0";
3097
3098 fn upgrade(self) -> SavedContext {
3099 SavedContextV0_3_0 {
3100 id: self.id,
3101 zed: self.zed,
3102 version: SavedContextV0_3_0::VERSION.to_string(),
3103 text: self.text,
3104 messages: self.messages,
3105 message_metadata: self.message_metadata,
3106 summary: self.summary,
3107 slash_command_output_sections: Vec::new(),
3108 }
3109 .upgrade()
3110 }
3111}
3112
3113#[derive(Serialize, Deserialize)]
3114struct SavedContextV0_1_0 {
3115 id: Option<ContextId>,
3116 zed: String,
3117 version: String,
3118 text: String,
3119 messages: Vec<SavedMessagePreV0_4_0>,
3120 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3121 summary: String,
3122 api_url: Option<String>,
3123 model: OpenAiModel,
3124}
3125
3126impl SavedContextV0_1_0 {
3127 const VERSION: &'static str = "0.1.0";
3128
3129 fn upgrade(self) -> SavedContext {
3130 SavedContextV0_2_0 {
3131 id: self.id,
3132 zed: self.zed,
3133 version: SavedContextV0_2_0::VERSION.to_string(),
3134 text: self.text,
3135 messages: self.messages,
3136 message_metadata: self.message_metadata,
3137 summary: self.summary,
3138 }
3139 .upgrade()
3140 }
3141}
3142
3143#[derive(Clone)]
3144pub struct SavedContextMetadata {
3145 pub title: String,
3146 pub path: PathBuf,
3147 pub mtime: chrono::DateTime<chrono::Local>,
3148}