1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 stream::FuturesUnordered,
21 FutureExt, StreamExt,
22};
23use gpui::{
24 AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext,
25 RenderImage, SharedString, Subscription, Task,
26};
27
28use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
29use language_model::{
30 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
31 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
32 LanguageModelRequestTool, MessageContent, Role, StopReason,
33};
34use open_ai::Model as OpenAiModel;
35use paths::{context_images_dir, contexts_dir};
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{self, max, Ordering},
41 collections::hash_map,
42 fmt::Debug,
43 iter, mem,
44 ops::Range,
45 path::{Path, PathBuf},
46 str::FromStr as _,
47 sync::Arc,
48 time::{Duration, Instant},
49};
50use telemetry_events::AssistantKind;
51use text::BufferSnapshot;
52use util::{post_inc, ResultExt, TryFutureExt};
53use uuid::Uuid;
54
55#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
56pub struct ContextId(String);
57
58impl ContextId {
59 pub fn new() -> Self {
60 Self(Uuid::new_v4().to_string())
61 }
62
63 pub fn from_proto(id: String) -> Self {
64 Self(id)
65 }
66
67 pub fn to_proto(&self) -> String {
68 self.0.clone()
69 }
70}
71
72#[derive(Clone, Debug)]
73pub enum ContextOperation {
74 InsertMessage {
75 anchor: MessageAnchor,
76 metadata: MessageMetadata,
77 version: clock::Global,
78 },
79 UpdateMessage {
80 message_id: MessageId,
81 metadata: MessageMetadata,
82 version: clock::Global,
83 },
84 UpdateSummary {
85 summary: ContextSummary,
86 version: clock::Global,
87 },
88 SlashCommandFinished {
89 id: SlashCommandId,
90 output_range: Range<language::Anchor>,
91 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
92 version: clock::Global,
93 },
94 BufferOperation(language::Operation),
95}
96
97impl ContextOperation {
98 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
99 match op.variant.context("invalid variant")? {
100 proto::context_operation::Variant::InsertMessage(insert) => {
101 let message = insert.message.context("invalid message")?;
102 let id = MessageId(language::proto::deserialize_timestamp(
103 message.id.context("invalid id")?,
104 ));
105 Ok(Self::InsertMessage {
106 anchor: MessageAnchor {
107 id,
108 start: language::proto::deserialize_anchor(
109 message.start.context("invalid anchor")?,
110 )
111 .context("invalid anchor")?,
112 },
113 metadata: MessageMetadata {
114 role: Role::from_proto(message.role),
115 status: MessageStatus::from_proto(
116 message.status.context("invalid status")?,
117 ),
118 timestamp: id.0,
119 cache: None,
120 },
121 version: language::proto::deserialize_version(&insert.version),
122 })
123 }
124 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
125 message_id: MessageId(language::proto::deserialize_timestamp(
126 update.message_id.context("invalid message id")?,
127 )),
128 metadata: MessageMetadata {
129 role: Role::from_proto(update.role),
130 status: MessageStatus::from_proto(update.status.context("invalid status")?),
131 timestamp: language::proto::deserialize_timestamp(
132 update.timestamp.context("invalid timestamp")?,
133 ),
134 cache: None,
135 },
136 version: language::proto::deserialize_version(&update.version),
137 }),
138 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
139 summary: ContextSummary {
140 text: update.summary,
141 done: update.done,
142 timestamp: language::proto::deserialize_timestamp(
143 update.timestamp.context("invalid timestamp")?,
144 ),
145 },
146 version: language::proto::deserialize_version(&update.version),
147 }),
148 proto::context_operation::Variant::SlashCommandFinished(finished) => {
149 Ok(Self::SlashCommandFinished {
150 id: SlashCommandId(language::proto::deserialize_timestamp(
151 finished.id.context("invalid id")?,
152 )),
153 output_range: language::proto::deserialize_anchor_range(
154 finished.output_range.context("invalid range")?,
155 )?,
156 sections: finished
157 .sections
158 .into_iter()
159 .map(|section| {
160 Ok(SlashCommandOutputSection {
161 range: language::proto::deserialize_anchor_range(
162 section.range.context("invalid range")?,
163 )?,
164 icon: section.icon_name.parse()?,
165 label: section.label.into(),
166 })
167 })
168 .collect::<Result<Vec<_>>>()?,
169 version: language::proto::deserialize_version(&finished.version),
170 })
171 }
172 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
173 language::proto::deserialize_operation(
174 op.operation.context("invalid buffer operation")?,
175 )?,
176 )),
177 }
178 }
179
180 pub fn to_proto(&self) -> proto::ContextOperation {
181 match self {
182 Self::InsertMessage {
183 anchor,
184 metadata,
185 version,
186 } => proto::ContextOperation {
187 variant: Some(proto::context_operation::Variant::InsertMessage(
188 proto::context_operation::InsertMessage {
189 message: Some(proto::ContextMessage {
190 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
191 start: Some(language::proto::serialize_anchor(&anchor.start)),
192 role: metadata.role.to_proto() as i32,
193 status: Some(metadata.status.to_proto()),
194 }),
195 version: language::proto::serialize_version(version),
196 },
197 )),
198 },
199 Self::UpdateMessage {
200 message_id,
201 metadata,
202 version,
203 } => proto::ContextOperation {
204 variant: Some(proto::context_operation::Variant::UpdateMessage(
205 proto::context_operation::UpdateMessage {
206 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
207 role: metadata.role.to_proto() as i32,
208 status: Some(metadata.status.to_proto()),
209 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
210 version: language::proto::serialize_version(version),
211 },
212 )),
213 },
214 Self::UpdateSummary { summary, version } => proto::ContextOperation {
215 variant: Some(proto::context_operation::Variant::UpdateSummary(
216 proto::context_operation::UpdateSummary {
217 summary: summary.text.clone(),
218 done: summary.done,
219 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
220 version: language::proto::serialize_version(version),
221 },
222 )),
223 },
224 Self::SlashCommandFinished {
225 id,
226 output_range,
227 sections,
228 version,
229 } => proto::ContextOperation {
230 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
231 proto::context_operation::SlashCommandFinished {
232 id: Some(language::proto::serialize_timestamp(id.0)),
233 output_range: Some(language::proto::serialize_anchor_range(
234 output_range.clone(),
235 )),
236 sections: sections
237 .iter()
238 .map(|section| {
239 let icon_name: &'static str = section.icon.into();
240 proto::SlashCommandOutputSection {
241 range: Some(language::proto::serialize_anchor_range(
242 section.range.clone(),
243 )),
244 icon_name: icon_name.to_string(),
245 label: section.label.to_string(),
246 }
247 })
248 .collect(),
249 version: language::proto::serialize_version(version),
250 },
251 )),
252 },
253 Self::BufferOperation(operation) => proto::ContextOperation {
254 variant: Some(proto::context_operation::Variant::BufferOperation(
255 proto::context_operation::BufferOperation {
256 operation: Some(language::proto::serialize_operation(operation)),
257 },
258 )),
259 },
260 }
261 }
262
263 fn timestamp(&self) -> clock::Lamport {
264 match self {
265 Self::InsertMessage { anchor, .. } => anchor.id.0,
266 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
267 Self::UpdateSummary { summary, .. } => summary.timestamp,
268 Self::SlashCommandFinished { id, .. } => id.0,
269 Self::BufferOperation(_) => {
270 panic!("reading the timestamp of a buffer operation is not supported")
271 }
272 }
273 }
274
275 /// Returns the current version of the context operation.
276 pub fn version(&self) -> &clock::Global {
277 match self {
278 Self::InsertMessage { version, .. }
279 | Self::UpdateMessage { version, .. }
280 | Self::UpdateSummary { version, .. }
281 | Self::SlashCommandFinished { version, .. } => version,
282 Self::BufferOperation(_) => {
283 panic!("reading the version of a buffer operation is not supported")
284 }
285 }
286 }
287}
288
289#[derive(Debug, Clone)]
290pub enum ContextEvent {
291 ShowAssistError(SharedString),
292 MessagesEdited,
293 SummaryChanged,
294 StreamedCompletion,
295 WorkflowStepsUpdated {
296 removed: Vec<Range<language::Anchor>>,
297 updated: Vec<Range<language::Anchor>>,
298 },
299 PendingSlashCommandsUpdated {
300 removed: Vec<Range<language::Anchor>>,
301 updated: Vec<PendingSlashCommand>,
302 },
303 SlashCommandFinished {
304 output_range: Range<language::Anchor>,
305 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
306 run_commands_in_output: bool,
307 expand_result: bool,
308 },
309 UsePendingTools,
310 Operation(ContextOperation),
311}
312
313#[derive(Clone, Default, Debug)]
314pub struct ContextSummary {
315 pub text: String,
316 done: bool,
317 timestamp: clock::Lamport,
318}
319
320#[derive(Clone, Debug, Eq, PartialEq)]
321pub struct MessageAnchor {
322 pub id: MessageId,
323 pub start: language::Anchor,
324}
325
326#[derive(Clone, Debug, Eq, PartialEq)]
327pub enum CacheStatus {
328 Pending,
329 Cached,
330}
331
332#[derive(Clone, Debug, Eq, PartialEq)]
333pub struct MessageCacheMetadata {
334 pub is_anchor: bool,
335 pub is_final_anchor: bool,
336 pub status: CacheStatus,
337 pub cached_at: clock::Global,
338}
339
340#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
341pub struct MessageMetadata {
342 pub role: Role,
343 pub status: MessageStatus,
344 pub(crate) timestamp: clock::Lamport,
345 #[serde(skip)]
346 pub cache: Option<MessageCacheMetadata>,
347}
348
349impl From<&Message> for MessageMetadata {
350 fn from(message: &Message) -> Self {
351 Self {
352 role: message.role,
353 status: message.status.clone(),
354 timestamp: message.id.0,
355 cache: message.cache.clone(),
356 }
357 }
358}
359
360impl MessageMetadata {
361 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
362 let result = match &self.cache {
363 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
364 &cached_at,
365 Range {
366 start: buffer.anchor_at(range.start, Bias::Right),
367 end: buffer.anchor_at(range.end, Bias::Left),
368 },
369 ),
370 _ => false,
371 };
372 result
373 }
374}
375
376#[derive(Clone, Debug)]
377pub struct MessageImage {
378 image_id: u64,
379 image: Shared<Task<Option<LanguageModelImage>>>,
380}
381
382impl PartialEq for MessageImage {
383 fn eq(&self, other: &Self) -> bool {
384 self.image_id == other.image_id
385 }
386}
387
388impl Eq for MessageImage {}
389
390#[derive(Clone, Debug)]
391pub struct Message {
392 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
393 pub offset_range: Range<usize>,
394 pub index_range: Range<usize>,
395 pub anchor_range: Range<language::Anchor>,
396 pub id: MessageId,
397 pub role: Role,
398 pub status: MessageStatus,
399 pub cache: Option<MessageCacheMetadata>,
400}
401
402impl Message {
403 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
404 let mut content = Vec::new();
405
406 let mut range_start = self.offset_range.start;
407 for (image_offset, message_image) in self.image_offsets.iter() {
408 if *image_offset != range_start {
409 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
410 content.push(text);
411 }
412 }
413
414 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
415 content.push(language_model::MessageContent::Image(image));
416 }
417
418 range_start = *image_offset;
419 }
420
421 if range_start != self.offset_range.end {
422 if let Some(text) =
423 Self::collect_text_content(buffer, range_start..self.offset_range.end)
424 {
425 content.push(text);
426 }
427 }
428
429 if content.is_empty() {
430 return None;
431 }
432
433 Some(LanguageModelRequestMessage {
434 role: self.role,
435 content,
436 cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor),
437 })
438 }
439
440 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
441 let text: String = buffer.text_for_range(range.clone()).collect();
442 if text.trim().is_empty() {
443 None
444 } else {
445 Some(MessageContent::Text(text))
446 }
447 }
448}
449
450#[derive(Clone, Debug)]
451pub struct ImageAnchor {
452 pub anchor: language::Anchor,
453 pub image_id: u64,
454 pub render_image: Arc<RenderImage>,
455 pub image: Shared<Task<Option<LanguageModelImage>>>,
456}
457
458struct PendingCompletion {
459 id: usize,
460 assistant_message_id: MessageId,
461 _task: Task<()>,
462}
463
464#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
465pub struct SlashCommandId(clock::Lamport);
466
467#[derive(Clone, Debug)]
468pub struct XmlTag {
469 pub kind: XmlTagKind,
470 pub range: Range<text::Anchor>,
471 pub is_open_tag: bool,
472}
473
474#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
475#[strum(serialize_all = "snake_case")]
476pub enum XmlTagKind {
477 Step,
478 Edit,
479 Path,
480 Search,
481 Within,
482 Operation,
483 Description,
484}
485
486pub struct Context {
487 id: ContextId,
488 timestamp: clock::Lamport,
489 version: clock::Global,
490 pending_ops: Vec<ContextOperation>,
491 operations: Vec<ContextOperation>,
492 buffer: Model<Buffer>,
493 pending_slash_commands: Vec<PendingSlashCommand>,
494 edits_since_last_parse: language::Subscription,
495 finished_slash_commands: HashSet<SlashCommandId>,
496 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
497 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
498 message_anchors: Vec<MessageAnchor>,
499 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
500 image_anchors: Vec<ImageAnchor>,
501 messages_metadata: HashMap<MessageId, MessageMetadata>,
502 summary: Option<ContextSummary>,
503 pending_summary: Task<Option<()>>,
504 completion_count: usize,
505 pending_completions: Vec<PendingCompletion>,
506 token_count: Option<usize>,
507 pending_token_count: Task<Option<()>>,
508 pending_save: Task<Result<()>>,
509 pending_cache_warming_task: Task<Option<()>>,
510 path: Option<PathBuf>,
511 _subscriptions: Vec<Subscription>,
512 telemetry: Option<Arc<Telemetry>>,
513 language_registry: Arc<LanguageRegistry>,
514 workflow_steps: Vec<WorkflowStep>,
515 xml_tags: Vec<XmlTag>,
516 project: Option<Model<Project>>,
517 prompt_builder: Arc<PromptBuilder>,
518}
519
520trait ContextAnnotation {
521 fn range(&self) -> &Range<language::Anchor>;
522}
523
524impl ContextAnnotation for PendingSlashCommand {
525 fn range(&self) -> &Range<language::Anchor> {
526 &self.source_range
527 }
528}
529
530impl ContextAnnotation for WorkflowStep {
531 fn range(&self) -> &Range<language::Anchor> {
532 &self.range
533 }
534}
535
536impl ContextAnnotation for XmlTag {
537 fn range(&self) -> &Range<language::Anchor> {
538 &self.range
539 }
540}
541
542impl EventEmitter<ContextEvent> for Context {}
543
544impl Context {
545 pub fn local(
546 language_registry: Arc<LanguageRegistry>,
547 project: Option<Model<Project>>,
548 telemetry: Option<Arc<Telemetry>>,
549 prompt_builder: Arc<PromptBuilder>,
550 cx: &mut ModelContext<Self>,
551 ) -> Self {
552 Self::new(
553 ContextId::new(),
554 ReplicaId::default(),
555 language::Capability::ReadWrite,
556 language_registry,
557 prompt_builder,
558 project,
559 telemetry,
560 cx,
561 )
562 }
563
564 #[allow(clippy::too_many_arguments)]
565 pub fn new(
566 id: ContextId,
567 replica_id: ReplicaId,
568 capability: language::Capability,
569 language_registry: Arc<LanguageRegistry>,
570 prompt_builder: Arc<PromptBuilder>,
571 project: Option<Model<Project>>,
572 telemetry: Option<Arc<Telemetry>>,
573 cx: &mut ModelContext<Self>,
574 ) -> Self {
575 let buffer = cx.new_model(|_cx| {
576 let mut buffer = Buffer::remote(
577 language::BufferId::new(1).unwrap(),
578 replica_id,
579 capability,
580 "",
581 );
582 buffer.set_language_registry(language_registry.clone());
583 buffer
584 });
585 let edits_since_last_slash_command_parse =
586 buffer.update(cx, |buffer, _| buffer.subscribe());
587 let mut this = Self {
588 id,
589 timestamp: clock::Lamport::new(replica_id),
590 version: clock::Global::new(),
591 pending_ops: Vec::new(),
592 operations: Vec::new(),
593 message_anchors: Default::default(),
594 image_anchors: Default::default(),
595 images: Default::default(),
596 messages_metadata: Default::default(),
597 pending_slash_commands: Vec::new(),
598 finished_slash_commands: HashSet::default(),
599 pending_tool_uses_by_id: HashMap::default(),
600 slash_command_output_sections: Vec::new(),
601 edits_since_last_parse: edits_since_last_slash_command_parse,
602 summary: None,
603 pending_summary: Task::ready(None),
604 completion_count: Default::default(),
605 pending_completions: Default::default(),
606 token_count: None,
607 pending_token_count: Task::ready(None),
608 pending_cache_warming_task: Task::ready(None),
609 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
610 pending_save: Task::ready(Ok(())),
611 path: None,
612 buffer,
613 telemetry,
614 project,
615 language_registry,
616 workflow_steps: Vec::new(),
617 xml_tags: Vec::new(),
618 prompt_builder,
619 };
620
621 let first_message_id = MessageId(clock::Lamport {
622 replica_id: 0,
623 value: 0,
624 });
625 let message = MessageAnchor {
626 id: first_message_id,
627 start: language::Anchor::MIN,
628 };
629 this.messages_metadata.insert(
630 first_message_id,
631 MessageMetadata {
632 role: Role::User,
633 status: MessageStatus::Done,
634 timestamp: first_message_id.0,
635 cache: None,
636 },
637 );
638 this.message_anchors.push(message);
639
640 this.set_language(cx);
641 this.count_remaining_tokens(cx);
642 this
643 }
644
645 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
646 let buffer = self.buffer.read(cx);
647 SavedContext {
648 id: Some(self.id.clone()),
649 zed: "context".into(),
650 version: SavedContext::VERSION.into(),
651 text: buffer.text(),
652 messages: self
653 .messages(cx)
654 .map(|message| SavedMessage {
655 id: message.id,
656 start: message.offset_range.start,
657 metadata: self.messages_metadata[&message.id].clone(),
658 image_offsets: message
659 .image_offsets
660 .iter()
661 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
662 .collect(),
663 })
664 .collect(),
665 summary: self
666 .summary
667 .as_ref()
668 .map(|summary| summary.text.clone())
669 .unwrap_or_default(),
670 slash_command_output_sections: self
671 .slash_command_output_sections
672 .iter()
673 .filter_map(|section| {
674 let range = section.range.to_offset(buffer);
675 if section.range.start.is_valid(buffer) && !range.is_empty() {
676 Some(assistant_slash_command::SlashCommandOutputSection {
677 range,
678 icon: section.icon,
679 label: section.label.clone(),
680 })
681 } else {
682 None
683 }
684 })
685 .collect(),
686 }
687 }
688
689 #[allow(clippy::too_many_arguments)]
690 pub fn deserialize(
691 saved_context: SavedContext,
692 path: PathBuf,
693 language_registry: Arc<LanguageRegistry>,
694 prompt_builder: Arc<PromptBuilder>,
695 project: Option<Model<Project>>,
696 telemetry: Option<Arc<Telemetry>>,
697 cx: &mut ModelContext<Self>,
698 ) -> Self {
699 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
700 let mut this = Self::new(
701 id,
702 ReplicaId::default(),
703 language::Capability::ReadWrite,
704 language_registry,
705 prompt_builder,
706 project,
707 telemetry,
708 cx,
709 );
710 this.path = Some(path);
711 this.buffer.update(cx, |buffer, cx| {
712 buffer.set_text(saved_context.text.as_str(), cx)
713 });
714 let operations = saved_context.into_ops(&this.buffer, cx);
715 this.apply_ops(operations, cx).unwrap();
716 this
717 }
718
719 pub fn id(&self) -> &ContextId {
720 &self.id
721 }
722
723 pub fn replica_id(&self) -> ReplicaId {
724 self.timestamp.replica_id
725 }
726
727 pub fn version(&self, cx: &AppContext) -> ContextVersion {
728 ContextVersion {
729 context: self.version.clone(),
730 buffer: self.buffer.read(cx).version(),
731 }
732 }
733
734 pub fn set_capability(
735 &mut self,
736 capability: language::Capability,
737 cx: &mut ModelContext<Self>,
738 ) {
739 self.buffer
740 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
741 }
742
743 fn next_timestamp(&mut self) -> clock::Lamport {
744 let timestamp = self.timestamp.tick();
745 self.version.observe(timestamp);
746 timestamp
747 }
748
749 pub fn serialize_ops(
750 &self,
751 since: &ContextVersion,
752 cx: &AppContext,
753 ) -> Task<Vec<proto::ContextOperation>> {
754 let buffer_ops = self
755 .buffer
756 .read(cx)
757 .serialize_ops(Some(since.buffer.clone()), cx);
758
759 let mut context_ops = self
760 .operations
761 .iter()
762 .filter(|op| !since.context.observed(op.timestamp()))
763 .cloned()
764 .collect::<Vec<_>>();
765 context_ops.extend(self.pending_ops.iter().cloned());
766
767 cx.background_executor().spawn(async move {
768 let buffer_ops = buffer_ops.await;
769 context_ops.sort_unstable_by_key(|op| op.timestamp());
770 buffer_ops
771 .into_iter()
772 .map(|op| proto::ContextOperation {
773 variant: Some(proto::context_operation::Variant::BufferOperation(
774 proto::context_operation::BufferOperation {
775 operation: Some(op),
776 },
777 )),
778 })
779 .chain(context_ops.into_iter().map(|op| op.to_proto()))
780 .collect()
781 })
782 }
783
784 pub fn apply_ops(
785 &mut self,
786 ops: impl IntoIterator<Item = ContextOperation>,
787 cx: &mut ModelContext<Self>,
788 ) -> Result<()> {
789 let mut buffer_ops = Vec::new();
790 for op in ops {
791 match op {
792 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
793 op @ _ => self.pending_ops.push(op),
794 }
795 }
796 self.buffer
797 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
798 self.flush_ops(cx);
799
800 Ok(())
801 }
802
803 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
804 let mut changed_messages = HashSet::default();
805 let mut summary_changed = false;
806
807 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
808 for op in mem::take(&mut self.pending_ops) {
809 if !self.can_apply_op(&op, cx) {
810 self.pending_ops.push(op);
811 continue;
812 }
813
814 let timestamp = op.timestamp();
815 match op.clone() {
816 ContextOperation::InsertMessage {
817 anchor, metadata, ..
818 } => {
819 if self.messages_metadata.contains_key(&anchor.id) {
820 // We already applied this operation.
821 } else {
822 changed_messages.insert(anchor.id);
823 self.insert_message(anchor, metadata, cx);
824 }
825 }
826 ContextOperation::UpdateMessage {
827 message_id,
828 metadata: new_metadata,
829 ..
830 } => {
831 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
832 if new_metadata.timestamp > metadata.timestamp {
833 *metadata = new_metadata;
834 changed_messages.insert(message_id);
835 }
836 }
837 ContextOperation::UpdateSummary {
838 summary: new_summary,
839 ..
840 } => {
841 if self
842 .summary
843 .as_ref()
844 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
845 {
846 self.summary = Some(new_summary);
847 summary_changed = true;
848 }
849 }
850 ContextOperation::SlashCommandFinished {
851 id,
852 output_range,
853 sections,
854 ..
855 } => {
856 if self.finished_slash_commands.insert(id) {
857 let buffer = self.buffer.read(cx);
858 self.slash_command_output_sections
859 .extend(sections.iter().cloned());
860 self.slash_command_output_sections
861 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
862 cx.emit(ContextEvent::SlashCommandFinished {
863 output_range,
864 sections,
865 expand_result: false,
866 run_commands_in_output: false,
867 });
868 }
869 }
870 ContextOperation::BufferOperation(_) => unreachable!(),
871 }
872
873 self.version.observe(timestamp);
874 self.timestamp.observe(timestamp);
875 self.operations.push(op);
876 }
877
878 if !changed_messages.is_empty() {
879 self.message_roles_updated(changed_messages, cx);
880 cx.emit(ContextEvent::MessagesEdited);
881 cx.notify();
882 }
883
884 if summary_changed {
885 cx.emit(ContextEvent::SummaryChanged);
886 cx.notify();
887 }
888 }
889
890 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
891 if !self.version.observed_all(op.version()) {
892 return false;
893 }
894
895 match op {
896 ContextOperation::InsertMessage { anchor, .. } => self
897 .buffer
898 .read(cx)
899 .version
900 .observed(anchor.start.timestamp),
901 ContextOperation::UpdateMessage { message_id, .. } => {
902 self.messages_metadata.contains_key(message_id)
903 }
904 ContextOperation::UpdateSummary { .. } => true,
905 ContextOperation::SlashCommandFinished {
906 output_range,
907 sections,
908 ..
909 } => {
910 let version = &self.buffer.read(cx).version;
911 sections
912 .iter()
913 .map(|section| §ion.range)
914 .chain([output_range])
915 .all(|range| {
916 let observed_start = range.start == language::Anchor::MIN
917 || range.start == language::Anchor::MAX
918 || version.observed(range.start.timestamp);
919 let observed_end = range.end == language::Anchor::MIN
920 || range.end == language::Anchor::MAX
921 || version.observed(range.end.timestamp);
922 observed_start && observed_end
923 })
924 }
925 ContextOperation::BufferOperation(_) => {
926 panic!("buffer operations should always be applied")
927 }
928 }
929 }
930
931 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
932 self.operations.push(op.clone());
933 cx.emit(ContextEvent::Operation(op));
934 }
935
936 pub fn buffer(&self) -> &Model<Buffer> {
937 &self.buffer
938 }
939
940 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
941 self.language_registry.clone()
942 }
943
944 pub fn project(&self) -> Option<Model<Project>> {
945 self.project.clone()
946 }
947
948 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
949 self.prompt_builder.clone()
950 }
951
952 pub fn path(&self) -> Option<&Path> {
953 self.path.as_deref()
954 }
955
956 pub fn summary(&self) -> Option<&ContextSummary> {
957 self.summary.as_ref()
958 }
959
960 pub(crate) fn workflow_step_containing(
961 &self,
962 offset: usize,
963 cx: &AppContext,
964 ) -> Option<&WorkflowStep> {
965 let buffer = self.buffer.read(cx);
966 let index = self
967 .workflow_steps
968 .binary_search_by(|step| {
969 let step_range = step.range.to_offset(&buffer);
970 if offset < step_range.start {
971 Ordering::Greater
972 } else if offset > step_range.end {
973 Ordering::Less
974 } else {
975 Ordering::Equal
976 }
977 })
978 .ok()?;
979 Some(&self.workflow_steps[index])
980 }
981
982 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
983 self.workflow_steps.iter().map(|step| step.range.clone())
984 }
985
986 pub(crate) fn workflow_step_for_range(
987 &self,
988 range: &Range<language::Anchor>,
989 cx: &AppContext,
990 ) -> Option<&WorkflowStep> {
991 let buffer = self.buffer.read(cx);
992 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
993 Some(&self.workflow_steps[index])
994 }
995
996 fn workflow_step_index_for_range(
997 &self,
998 tagged_range: &Range<text::Anchor>,
999 buffer: &text::BufferSnapshot,
1000 ) -> Result<usize, usize> {
1001 self.workflow_steps
1002 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
1003 }
1004
1005 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
1006 &self.pending_slash_commands
1007 }
1008
1009 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
1010 &self.slash_command_output_sections
1011 }
1012
1013 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
1014 self.pending_tool_uses_by_id.values().collect()
1015 }
1016
1017 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
1018 self.pending_tool_uses_by_id.get(id)
1019 }
1020
1021 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
1022 let markdown = self.language_registry.language_for_name("Markdown");
1023 cx.spawn(|this, mut cx| async move {
1024 let markdown = markdown.await?;
1025 this.update(&mut cx, |this, cx| {
1026 this.buffer
1027 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1028 })
1029 })
1030 .detach_and_log_err(cx);
1031 }
1032
1033 fn handle_buffer_event(
1034 &mut self,
1035 _: Model<Buffer>,
1036 event: &language::Event,
1037 cx: &mut ModelContext<Self>,
1038 ) {
1039 match event {
1040 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1041 ContextOperation::BufferOperation(operation.clone()),
1042 )),
1043 language::Event::Edited => {
1044 self.count_remaining_tokens(cx);
1045 self.reparse(cx);
1046 // Use `inclusive = true` to invalidate a step when an edit occurs
1047 // at the start/end of a parsed step.
1048 cx.emit(ContextEvent::MessagesEdited);
1049 }
1050 _ => {}
1051 }
1052 }
1053
1054 pub(crate) fn token_count(&self) -> Option<usize> {
1055 self.token_count
1056 }
1057
1058 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1059 let request = self.to_completion_request(cx);
1060 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1061 return;
1062 };
1063 self.pending_token_count = cx.spawn(|this, mut cx| {
1064 async move {
1065 cx.background_executor()
1066 .timer(Duration::from_millis(200))
1067 .await;
1068
1069 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1070 this.update(&mut cx, |this, cx| {
1071 this.token_count = Some(token_count);
1072 this.start_cache_warming(&model, cx);
1073 cx.notify()
1074 })
1075 }
1076 .log_err()
1077 });
1078 }
1079
1080 pub fn mark_cache_anchors(
1081 &mut self,
1082 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1083 speculative: bool,
1084 cx: &mut ModelContext<Self>,
1085 ) -> bool {
1086 let cache_configuration =
1087 cache_configuration
1088 .as_ref()
1089 .unwrap_or(&LanguageModelCacheConfiguration {
1090 max_cache_anchors: 0,
1091 should_speculate: false,
1092 min_total_token: 0,
1093 });
1094
1095 let messages: Vec<Message> = self.messages(cx).collect();
1096
1097 let mut sorted_messages = messages.clone();
1098 if speculative {
1099 // Avoid caching the last message if this is a speculative cache fetch as
1100 // it's likely to change.
1101 sorted_messages.pop();
1102 }
1103 sorted_messages.retain(|m| m.role == Role::User);
1104 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1105
1106 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1107 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1108 0
1109 } else {
1110 // Save 1 anchor for the inline assistant to use.
1111 max(cache_configuration.max_cache_anchors, 1) - 1
1112 };
1113 sorted_messages.truncate(cache_anchors);
1114
1115 let anchors: HashSet<MessageId> = sorted_messages
1116 .into_iter()
1117 .map(|message| message.id)
1118 .collect();
1119
1120 let buffer = self.buffer.read(cx).snapshot();
1121 let invalidated_caches: HashSet<MessageId> = messages
1122 .iter()
1123 .scan(false, |encountered_invalid, message| {
1124 let message_id = message.id;
1125 let is_invalid = self
1126 .messages_metadata
1127 .get(&message_id)
1128 .map_or(true, |metadata| {
1129 !metadata.is_cache_valid(&buffer, &message.offset_range)
1130 || *encountered_invalid
1131 });
1132 *encountered_invalid |= is_invalid;
1133 Some(if is_invalid { Some(message_id) } else { None })
1134 })
1135 .flatten()
1136 .collect();
1137
1138 let last_anchor = messages.iter().rev().find_map(|message| {
1139 if anchors.contains(&message.id) {
1140 Some(message.id)
1141 } else {
1142 None
1143 }
1144 });
1145
1146 let mut new_anchor_needs_caching = false;
1147 let current_version = &buffer.version;
1148 // If we have no anchors, mark all messages as not being cached.
1149 let mut hit_last_anchor = last_anchor.is_none();
1150
1151 for message in messages.iter() {
1152 if hit_last_anchor {
1153 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1154 continue;
1155 }
1156
1157 if let Some(last_anchor) = last_anchor {
1158 if message.id == last_anchor {
1159 hit_last_anchor = true;
1160 }
1161 }
1162
1163 new_anchor_needs_caching = new_anchor_needs_caching
1164 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1165
1166 self.update_metadata(message.id, cx, |metadata| {
1167 let cache_status = if invalidated_caches.contains(&message.id) {
1168 CacheStatus::Pending
1169 } else {
1170 metadata
1171 .cache
1172 .as_ref()
1173 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1174 };
1175 metadata.cache = Some(MessageCacheMetadata {
1176 is_anchor: anchors.contains(&message.id),
1177 is_final_anchor: hit_last_anchor,
1178 status: cache_status,
1179 cached_at: current_version.clone(),
1180 });
1181 });
1182 }
1183 new_anchor_needs_caching
1184 }
1185
1186 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1187 let cache_configuration = model.cache_configuration();
1188
1189 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1190 return;
1191 }
1192 if !self.pending_completions.is_empty() {
1193 return;
1194 }
1195 if let Some(cache_configuration) = cache_configuration {
1196 if !cache_configuration.should_speculate {
1197 return;
1198 }
1199 }
1200
1201 let request = {
1202 let mut req = self.to_completion_request(cx);
1203 // Skip the last message because it's likely to change and
1204 // therefore would be a waste to cache.
1205 req.messages.pop();
1206 req.messages.push(LanguageModelRequestMessage {
1207 role: Role::User,
1208 content: vec!["Respond only with OK, nothing else.".into()],
1209 cache: false,
1210 });
1211 req
1212 };
1213
1214 let model = Arc::clone(model);
1215 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1216 async move {
1217 match model.stream_completion(request, &cx).await {
1218 Ok(mut stream) => {
1219 stream.next().await;
1220 log::info!("Cache warming completed successfully");
1221 }
1222 Err(e) => {
1223 log::warn!("Cache warming failed: {}", e);
1224 }
1225 };
1226 this.update(&mut cx, |this, cx| {
1227 this.update_cache_status_for_completion(cx);
1228 })
1229 .ok();
1230 anyhow::Ok(())
1231 }
1232 .log_err()
1233 });
1234 }
1235
1236 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1237 let cached_message_ids: Vec<MessageId> = self
1238 .messages_metadata
1239 .iter()
1240 .filter_map(|(message_id, metadata)| {
1241 metadata.cache.as_ref().and_then(|cache| {
1242 if cache.status == CacheStatus::Pending {
1243 Some(*message_id)
1244 } else {
1245 None
1246 }
1247 })
1248 })
1249 .collect();
1250
1251 for message_id in cached_message_ids {
1252 self.update_metadata(message_id, cx, |metadata| {
1253 if let Some(cache) = &mut metadata.cache {
1254 cache.status = CacheStatus::Cached;
1255 }
1256 });
1257 }
1258 cx.notify();
1259 }
1260
1261 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1262 let buffer = self.buffer.read(cx).text_snapshot();
1263 let mut row_ranges = self
1264 .edits_since_last_parse
1265 .consume()
1266 .into_iter()
1267 .map(|edit| {
1268 let start_row = buffer.offset_to_point(edit.new.start).row;
1269 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1270 start_row..end_row
1271 })
1272 .peekable();
1273
1274 let mut removed_slash_command_ranges = Vec::new();
1275 let mut updated_slash_commands = Vec::new();
1276 let mut removed_steps = Vec::new();
1277 let mut updated_steps = Vec::new();
1278 while let Some(mut row_range) = row_ranges.next() {
1279 while let Some(next_row_range) = row_ranges.peek() {
1280 if row_range.end >= next_row_range.start {
1281 row_range.end = next_row_range.end;
1282 row_ranges.next();
1283 } else {
1284 break;
1285 }
1286 }
1287
1288 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1289 let end = buffer.anchor_after(Point::new(
1290 row_range.end - 1,
1291 buffer.line_len(row_range.end - 1),
1292 ));
1293
1294 self.reparse_slash_commands_in_range(
1295 start..end,
1296 &buffer,
1297 &mut updated_slash_commands,
1298 &mut removed_slash_command_ranges,
1299 cx,
1300 );
1301 self.reparse_workflow_steps_in_range(
1302 start..end,
1303 &buffer,
1304 &mut updated_steps,
1305 &mut removed_steps,
1306 cx,
1307 );
1308 }
1309
1310 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1311 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1312 removed: removed_slash_command_ranges,
1313 updated: updated_slash_commands,
1314 });
1315 }
1316
1317 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1318 cx.emit(ContextEvent::WorkflowStepsUpdated {
1319 removed: removed_steps,
1320 updated: updated_steps,
1321 });
1322 }
1323 }
1324
1325 fn reparse_slash_commands_in_range(
1326 &mut self,
1327 range: Range<text::Anchor>,
1328 buffer: &BufferSnapshot,
1329 updated: &mut Vec<PendingSlashCommand>,
1330 removed: &mut Vec<Range<text::Anchor>>,
1331 cx: &AppContext,
1332 ) {
1333 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1334
1335 let mut new_commands = Vec::new();
1336 let mut lines = buffer.text_for_range(range).lines();
1337 let mut offset = lines.offset();
1338 while let Some(line) = lines.next() {
1339 if let Some(command_line) = SlashCommandLine::parse(line) {
1340 let name = &line[command_line.name.clone()];
1341 let arguments = command_line
1342 .arguments
1343 .iter()
1344 .filter_map(|argument_range| {
1345 if argument_range.is_empty() {
1346 None
1347 } else {
1348 line.get(argument_range.clone())
1349 }
1350 })
1351 .map(ToOwned::to_owned)
1352 .collect::<SmallVec<_>>();
1353 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1354 if !command.requires_argument() || !arguments.is_empty() {
1355 let start_ix = offset + command_line.name.start - 1;
1356 let end_ix = offset
1357 + command_line
1358 .arguments
1359 .last()
1360 .map_or(command_line.name.end, |argument| argument.end);
1361 let source_range =
1362 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1363 let pending_command = PendingSlashCommand {
1364 name: name.to_string(),
1365 arguments,
1366 source_range,
1367 status: PendingSlashCommandStatus::Idle,
1368 };
1369 updated.push(pending_command.clone());
1370 new_commands.push(pending_command);
1371 }
1372 }
1373 }
1374
1375 offset = lines.offset();
1376 }
1377
1378 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1379 removed.extend(removed_commands.map(|command| command.source_range));
1380 }
1381
1382 fn reparse_workflow_steps_in_range(
1383 &mut self,
1384 range: Range<text::Anchor>,
1385 buffer: &BufferSnapshot,
1386 updated: &mut Vec<Range<text::Anchor>>,
1387 removed: &mut Vec<Range<text::Anchor>>,
1388 cx: &mut ModelContext<Self>,
1389 ) {
1390 // Rebuild the XML tags in the edited range.
1391 let intersecting_tags_range =
1392 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1393 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1394 self.xml_tags
1395 .splice(intersecting_tags_range.clone(), new_tags);
1396
1397 // Find which steps intersect the changed range.
1398 let intersecting_steps_range =
1399 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1400
1401 // Reparse all tags after the last unchanged step before the change.
1402 let mut tags_start_ix = 0;
1403 if let Some(preceding_unchanged_step) =
1404 self.workflow_steps[..intersecting_steps_range.start].last()
1405 {
1406 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1407 tag.range
1408 .start
1409 .cmp(&preceding_unchanged_step.range.end, buffer)
1410 .then(Ordering::Less)
1411 }) {
1412 Ok(ix) | Err(ix) => ix,
1413 };
1414 }
1415
1416 // Rebuild the edit suggestions in the range.
1417 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1418
1419 if let Some(project) = self.project() {
1420 for step in &mut new_steps {
1421 Self::resolve_workflow_step_internal(step, &project, cx);
1422 }
1423 }
1424
1425 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1426 let removed_steps = self
1427 .workflow_steps
1428 .splice(intersecting_steps_range, new_steps);
1429 removed.extend(
1430 removed_steps
1431 .map(|step| step.range)
1432 .filter(|range| !updated.contains(&range)),
1433 );
1434 }
1435
1436 fn parse_xml_tags_in_range(
1437 &self,
1438 buffer: &BufferSnapshot,
1439 range: Range<text::Anchor>,
1440 cx: &AppContext,
1441 ) -> Vec<XmlTag> {
1442 let mut messages = self.messages(cx).peekable();
1443
1444 let mut tags = Vec::new();
1445 let mut lines = buffer.text_for_range(range).lines();
1446 let mut offset = lines.offset();
1447
1448 while let Some(line) = lines.next() {
1449 while let Some(message) = messages.peek() {
1450 if offset < message.offset_range.end {
1451 break;
1452 } else {
1453 messages.next();
1454 }
1455 }
1456
1457 let is_assistant_message = messages
1458 .peek()
1459 .map_or(false, |message| message.role == Role::Assistant);
1460 if is_assistant_message {
1461 for (start_ix, _) in line.match_indices('<') {
1462 let mut name_start_ix = start_ix + 1;
1463 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1464 if let Some(closing_bracket_ix) = closing_bracket_ix {
1465 let end_ix = closing_bracket_ix + 1;
1466 let mut is_open_tag = true;
1467 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1468 name_start_ix += 1;
1469 is_open_tag = false;
1470 }
1471 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1472 let tag_name_len = tag_inner
1473 .find(|c: char| c.is_whitespace())
1474 .unwrap_or(tag_inner.len());
1475 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1476 tags.push(XmlTag {
1477 range: buffer.anchor_after(offset + start_ix)
1478 ..buffer.anchor_before(offset + end_ix),
1479 is_open_tag,
1480 kind,
1481 });
1482 };
1483 }
1484 }
1485 }
1486
1487 offset = lines.offset();
1488 }
1489 tags
1490 }
1491
1492 fn parse_steps(
1493 &mut self,
1494 tags_start_ix: usize,
1495 buffer_end: text::Anchor,
1496 buffer: &BufferSnapshot,
1497 ) -> Vec<WorkflowStep> {
1498 let mut new_steps = Vec::new();
1499 let mut pending_step = None;
1500 let mut edit_step_depth = 0;
1501 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1502 'tags: while let Some(tag) = tags.next() {
1503 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1504 break;
1505 }
1506
1507 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1508 edit_step_depth += 1;
1509 let edit_start = tag.range.start;
1510 let mut edits = Vec::new();
1511 let mut step = WorkflowStep {
1512 range: edit_start..edit_start,
1513 leading_tags_end: tag.range.end,
1514 trailing_tag_start: None,
1515 edits: Default::default(),
1516 resolution: None,
1517 resolution_task: None,
1518 };
1519
1520 while let Some(tag) = tags.next() {
1521 step.trailing_tag_start.get_or_insert(tag.range.start);
1522
1523 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1524 // step.trailing_tag_start = Some(tag.range.start);
1525 edit_step_depth -= 1;
1526 if edit_step_depth == 0 {
1527 step.range.end = tag.range.end;
1528 step.edits = edits.into();
1529 new_steps.push(step);
1530 continue 'tags;
1531 }
1532 }
1533
1534 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1535 let mut path = None;
1536 let mut search = None;
1537 let mut operation = None;
1538 let mut description = None;
1539
1540 while let Some(tag) = tags.next() {
1541 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1542 edits.push(WorkflowStepEdit::new(
1543 path,
1544 operation,
1545 search,
1546 description,
1547 ));
1548 break;
1549 }
1550
1551 if tag.is_open_tag
1552 && [
1553 XmlTagKind::Path,
1554 XmlTagKind::Search,
1555 XmlTagKind::Operation,
1556 XmlTagKind::Description,
1557 ]
1558 .contains(&tag.kind)
1559 {
1560 let kind = tag.kind;
1561 let content_start = tag.range.end;
1562 if let Some(tag) = tags.peek() {
1563 if tag.kind == kind && !tag.is_open_tag {
1564 let tag = tags.next().unwrap();
1565 let content_end = tag.range.start;
1566 let mut content = buffer
1567 .text_for_range(content_start..content_end)
1568 .collect::<String>();
1569 content.truncate(content.trim_end().len());
1570 match kind {
1571 XmlTagKind::Path => path = Some(content),
1572 XmlTagKind::Operation => operation = Some(content),
1573 XmlTagKind::Search => {
1574 search = Some(content).filter(|s| !s.is_empty())
1575 }
1576 XmlTagKind::Description => {
1577 description =
1578 Some(content).filter(|s| !s.is_empty())
1579 }
1580 _ => {}
1581 }
1582 }
1583 }
1584 }
1585 }
1586 }
1587 }
1588
1589 pending_step = Some(step);
1590 }
1591 }
1592
1593 if let Some(mut pending_step) = pending_step {
1594 pending_step.range.end = text::Anchor::MAX;
1595 new_steps.push(pending_step);
1596 }
1597
1598 new_steps
1599 }
1600
1601 pub fn resolve_workflow_step(
1602 &mut self,
1603 tagged_range: Range<text::Anchor>,
1604 cx: &mut ModelContext<Self>,
1605 ) -> Option<()> {
1606 let index = self
1607 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1608 .ok()?;
1609 let step = &mut self.workflow_steps[index];
1610 let project = self.project.as_ref()?;
1611 step.resolution.take();
1612 Self::resolve_workflow_step_internal(step, project, cx);
1613 None
1614 }
1615
1616 fn resolve_workflow_step_internal(
1617 step: &mut WorkflowStep,
1618 project: &Model<Project>,
1619 cx: &mut ModelContext<'_, Context>,
1620 ) {
1621 step.resolution_task = Some(cx.spawn({
1622 let range = step.range.clone();
1623 let edits = step.edits.clone();
1624 let project = project.clone();
1625 |this, mut cx| async move {
1626 let suggestion_groups =
1627 Self::compute_step_resolution(project, edits, &mut cx).await;
1628
1629 this.update(&mut cx, |this, cx| {
1630 let buffer = this.buffer.read(cx).text_snapshot();
1631 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1632 if let Some(ix) = ix {
1633 let step = &mut this.workflow_steps[ix];
1634
1635 let resolution = suggestion_groups.map(|suggestion_groups| {
1636 let mut title = String::new();
1637 for mut chunk in buffer.text_for_range(
1638 step.leading_tags_end
1639 ..step.trailing_tag_start.unwrap_or(step.range.end),
1640 ) {
1641 if title.is_empty() {
1642 chunk = chunk.trim_start();
1643 }
1644 if let Some((prefix, _)) = chunk.split_once('\n') {
1645 title.push_str(prefix);
1646 break;
1647 } else {
1648 title.push_str(chunk);
1649 }
1650 }
1651
1652 WorkflowStepResolution {
1653 title,
1654 suggestion_groups,
1655 }
1656 });
1657
1658 step.resolution = Some(Arc::new(resolution));
1659 cx.emit(ContextEvent::WorkflowStepsUpdated {
1660 removed: vec![],
1661 updated: vec![range],
1662 })
1663 }
1664 })
1665 .ok();
1666 }
1667 }));
1668 }
1669
1670 async fn compute_step_resolution(
1671 project: Model<Project>,
1672 edits: Arc<[Result<WorkflowStepEdit>]>,
1673 cx: &mut AsyncAppContext,
1674 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1675 let mut suggestion_tasks = Vec::new();
1676 for edit in edits.iter() {
1677 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1678 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1679 }
1680
1681 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1682 let suggestions = future::try_join_all(suggestion_tasks).await?;
1683
1684 let mut suggestions_by_buffer = HashMap::default();
1685 for (buffer, suggestion) in suggestions {
1686 suggestions_by_buffer
1687 .entry(buffer)
1688 .or_insert_with(Vec::new)
1689 .push(suggestion);
1690 }
1691
1692 let mut suggestion_groups_by_buffer = HashMap::default();
1693 for (buffer, mut suggestions) in suggestions_by_buffer {
1694 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1695 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1696 // Sort suggestions by their range so that earlier, larger ranges come first
1697 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1698
1699 // Merge overlapping suggestions
1700 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1701
1702 // Create context ranges for each suggestion
1703 for suggestion in suggestions {
1704 let context_range = {
1705 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1706 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1707 let end_row =
1708 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1709 let start = snapshot.anchor_before(Point::new(start_row, 0));
1710 let end =
1711 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1712 start..end
1713 };
1714
1715 if let Some(last_group) = suggestion_groups.last_mut() {
1716 if last_group
1717 .context_range
1718 .end
1719 .cmp(&context_range.start, &snapshot)
1720 .is_ge()
1721 {
1722 // Merge with the previous group if context ranges overlap
1723 last_group.context_range.end = context_range.end;
1724 last_group.suggestions.push(suggestion);
1725 } else {
1726 // Create a new group
1727 suggestion_groups.push(WorkflowSuggestionGroup {
1728 context_range,
1729 suggestions: vec![suggestion],
1730 });
1731 }
1732 } else {
1733 // Create the first group
1734 suggestion_groups.push(WorkflowSuggestionGroup {
1735 context_range,
1736 suggestions: vec![suggestion],
1737 });
1738 }
1739 }
1740
1741 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1742 }
1743
1744 Ok(suggestion_groups_by_buffer)
1745 }
1746
1747 pub fn pending_command_for_position(
1748 &mut self,
1749 position: language::Anchor,
1750 cx: &mut ModelContext<Self>,
1751 ) -> Option<&mut PendingSlashCommand> {
1752 let buffer = self.buffer.read(cx);
1753 match self
1754 .pending_slash_commands
1755 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1756 {
1757 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1758 Err(ix) => {
1759 let cmd = self.pending_slash_commands.get_mut(ix)?;
1760 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1761 && position.cmp(&cmd.source_range.end, buffer).is_le()
1762 {
1763 Some(cmd)
1764 } else {
1765 None
1766 }
1767 }
1768 }
1769 }
1770
1771 pub fn pending_commands_for_range(
1772 &self,
1773 range: Range<language::Anchor>,
1774 cx: &AppContext,
1775 ) -> &[PendingSlashCommand] {
1776 let range = self.pending_command_indices_for_range(range, cx);
1777 &self.pending_slash_commands[range]
1778 }
1779
1780 fn pending_command_indices_for_range(
1781 &self,
1782 range: Range<language::Anchor>,
1783 cx: &AppContext,
1784 ) -> Range<usize> {
1785 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1786 }
1787
1788 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1789 &self,
1790 all_annotations: &[T],
1791 range: Range<language::Anchor>,
1792 cx: &AppContext,
1793 ) -> Range<usize> {
1794 let buffer = self.buffer.read(cx);
1795 let start_ix = match all_annotations
1796 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1797 {
1798 Ok(ix) | Err(ix) => ix,
1799 };
1800 let end_ix = match all_annotations
1801 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1802 {
1803 Ok(ix) => ix + 1,
1804 Err(ix) => ix,
1805 };
1806 start_ix..end_ix
1807 }
1808
1809 pub fn insert_command_output(
1810 &mut self,
1811 command_range: Range<language::Anchor>,
1812 output: Task<Result<SlashCommandOutput>>,
1813 ensure_trailing_newline: bool,
1814 expand_result: bool,
1815 cx: &mut ModelContext<Self>,
1816 ) {
1817 self.reparse(cx);
1818
1819 let insert_output_task = cx.spawn(|this, mut cx| {
1820 let command_range = command_range.clone();
1821 async move {
1822 let output = output.await;
1823 this.update(&mut cx, |this, cx| match output {
1824 Ok(mut output) => {
1825 // Ensure section ranges are valid.
1826 for section in &mut output.sections {
1827 section.range.start = section.range.start.min(output.text.len());
1828 section.range.end = section.range.end.min(output.text.len());
1829 while !output.text.is_char_boundary(section.range.start) {
1830 section.range.start -= 1;
1831 }
1832 while !output.text.is_char_boundary(section.range.end) {
1833 section.range.end += 1;
1834 }
1835 }
1836
1837 // Ensure there is a newline after the last section.
1838 if ensure_trailing_newline {
1839 let has_newline_after_last_section =
1840 output.sections.last().map_or(false, |last_section| {
1841 output.text[last_section.range.end..].ends_with('\n')
1842 });
1843 if !has_newline_after_last_section {
1844 output.text.push('\n');
1845 }
1846 }
1847
1848 let version = this.version.clone();
1849 let command_id = SlashCommandId(this.next_timestamp());
1850 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1851 let start = command_range.start.to_offset(buffer);
1852 let old_end = command_range.end.to_offset(buffer);
1853 let new_end = start + output.text.len();
1854 buffer.edit([(start..old_end, output.text)], None, cx);
1855
1856 let mut sections = output
1857 .sections
1858 .into_iter()
1859 .map(|section| SlashCommandOutputSection {
1860 range: buffer.anchor_after(start + section.range.start)
1861 ..buffer.anchor_before(start + section.range.end),
1862 icon: section.icon,
1863 label: section.label,
1864 })
1865 .collect::<Vec<_>>();
1866 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1867
1868 this.slash_command_output_sections
1869 .extend(sections.iter().cloned());
1870 this.slash_command_output_sections
1871 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1872
1873 let output_range =
1874 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1875 this.finished_slash_commands.insert(command_id);
1876
1877 (
1878 ContextOperation::SlashCommandFinished {
1879 id: command_id,
1880 output_range: output_range.clone(),
1881 sections: sections.clone(),
1882 version,
1883 },
1884 ContextEvent::SlashCommandFinished {
1885 output_range,
1886 sections,
1887 run_commands_in_output: output.run_commands_in_text,
1888 expand_result,
1889 },
1890 )
1891 });
1892
1893 this.push_op(operation, cx);
1894 cx.emit(event);
1895 }
1896 Err(error) => {
1897 if let Some(pending_command) =
1898 this.pending_command_for_position(command_range.start, cx)
1899 {
1900 pending_command.status =
1901 PendingSlashCommandStatus::Error(error.to_string());
1902 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1903 removed: vec![pending_command.source_range.clone()],
1904 updated: vec![pending_command.clone()],
1905 });
1906 }
1907 }
1908 })
1909 .ok();
1910 }
1911 });
1912
1913 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1914 pending_command.status = PendingSlashCommandStatus::Running {
1915 _task: insert_output_task.shared(),
1916 };
1917 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1918 removed: vec![pending_command.source_range.clone()],
1919 updated: vec![pending_command.clone()],
1920 });
1921 }
1922 }
1923
1924 pub fn insert_tool_output(
1925 &mut self,
1926 tool_id: Arc<str>,
1927 output: Task<Result<String>>,
1928 cx: &mut ModelContext<Self>,
1929 ) {
1930 let insert_output_task = cx.spawn(|this, mut cx| {
1931 let tool_id = tool_id.clone();
1932 async move {
1933 let output = output.await;
1934 this.update(&mut cx, |this, cx| match output {
1935 Ok(mut output) => {
1936 if !output.ends_with('\n') {
1937 output.push('\n');
1938 }
1939
1940 this.buffer.update(cx, |buffer, cx| {
1941 let buffer_end = buffer.len().to_offset(buffer);
1942
1943 buffer.edit([(buffer_end..buffer_end, output)], None, cx);
1944 });
1945 }
1946 Err(err) => {
1947 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_id) {
1948 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1949 }
1950 }
1951 })
1952 .ok();
1953 }
1954 });
1955
1956 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_id) {
1957 tool_use.status = PendingToolUseStatus::Running {
1958 _task: insert_output_task.shared(),
1959 };
1960 }
1961 }
1962
1963 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1964 self.count_remaining_tokens(cx);
1965 }
1966
1967 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1968 self.message_anchors.iter().rev().find_map(|message| {
1969 message
1970 .start
1971 .is_valid(self.buffer.read(cx))
1972 .then_some(message.id)
1973 })
1974 }
1975
1976 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1977 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1978 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1979 let last_message_id = self.get_last_valid_message_id(cx)?;
1980
1981 if !provider.is_authenticated(cx) {
1982 log::info!("completion provider has no credentials");
1983 return None;
1984 }
1985 // Compute which messages to cache, including the last one.
1986 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1987
1988 let mut request = self.to_completion_request(cx);
1989
1990 if cx.has_flag::<ToolUseFeatureFlag>() {
1991 let tool_registry = ToolRegistry::global(cx);
1992 request.tools = tool_registry
1993 .tools()
1994 .into_iter()
1995 .map(|tool| LanguageModelRequestTool {
1996 name: tool.name(),
1997 description: tool.description(),
1998 input_schema: tool.input_schema(),
1999 })
2000 .collect();
2001 }
2002
2003 let assistant_message = self
2004 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
2005 .unwrap();
2006
2007 // Queue up the user's next reply.
2008 let user_message = self
2009 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
2010 .unwrap();
2011
2012 let pending_completion_id = post_inc(&mut self.completion_count);
2013
2014 let task = cx.spawn({
2015 |this, mut cx| async move {
2016 let stream = model.stream_completion(request, &cx);
2017 let assistant_message_id = assistant_message.id;
2018 let mut response_latency = None;
2019 let stream_completion = async {
2020 let request_start = Instant::now();
2021 let mut events = stream.await?;
2022
2023 while let Some(event) = events.next().await {
2024 if response_latency.is_none() {
2025 response_latency = Some(request_start.elapsed());
2026 }
2027 let event = event?;
2028
2029 this.update(&mut cx, |this, cx| {
2030 let message_ix = this
2031 .message_anchors
2032 .iter()
2033 .position(|message| message.id == assistant_message_id)?;
2034 let event_to_emit = this.buffer.update(cx, |buffer, cx| {
2035 let message_old_end_offset = this.message_anchors[message_ix + 1..]
2036 .iter()
2037 .find(|message| message.start.is_valid(buffer))
2038 .map_or(buffer.len(), |message| {
2039 message.start.to_offset(buffer).saturating_sub(1)
2040 });
2041
2042 match event {
2043 LanguageModelCompletionEvent::Stop(reason) => match reason {
2044 StopReason::ToolUse => {
2045 return Some(ContextEvent::UsePendingTools);
2046 }
2047 StopReason::EndTurn => {}
2048 StopReason::MaxTokens => {}
2049 },
2050 LanguageModelCompletionEvent::Text(chunk) => {
2051 buffer.edit(
2052 [(
2053 message_old_end_offset..message_old_end_offset,
2054 chunk,
2055 )],
2056 None,
2057 cx,
2058 );
2059 }
2060 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2061 const NEWLINE: char = '\n';
2062
2063 let mut text = String::new();
2064 text.push(NEWLINE);
2065 text.push_str(
2066 &serde_json::to_string_pretty(&tool_use)
2067 .expect("failed to serialize tool use to JSON"),
2068 );
2069 text.push(NEWLINE);
2070 let text_len = text.len();
2071
2072 buffer.edit(
2073 [(
2074 message_old_end_offset..message_old_end_offset,
2075 text,
2076 )],
2077 None,
2078 cx,
2079 );
2080
2081 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2082 let end_ix =
2083 message_old_end_offset + text_len - NEWLINE.len_utf8();
2084 let source_range = buffer.anchor_after(start_ix)
2085 ..buffer.anchor_after(end_ix);
2086
2087 let tool_use_id: Arc<str> = tool_use.id.into();
2088 this.pending_tool_uses_by_id.insert(
2089 tool_use_id.clone(),
2090 PendingToolUse {
2091 id: tool_use_id,
2092 name: tool_use.name,
2093 input: tool_use.input,
2094 status: PendingToolUseStatus::Idle,
2095 source_range,
2096 },
2097 );
2098 }
2099 }
2100
2101 None
2102 });
2103
2104 cx.emit(ContextEvent::StreamedCompletion);
2105 if let Some(event) = event_to_emit {
2106 cx.emit(event);
2107 }
2108
2109 Some(())
2110 })?;
2111 smol::future::yield_now().await;
2112 }
2113 this.update(&mut cx, |this, cx| {
2114 this.pending_completions
2115 .retain(|completion| completion.id != pending_completion_id);
2116 this.summarize(false, cx);
2117 this.update_cache_status_for_completion(cx);
2118 })?;
2119
2120 anyhow::Ok(())
2121 };
2122
2123 let result = stream_completion.await;
2124
2125 this.update(&mut cx, |this, cx| {
2126 let error_message = result
2127 .err()
2128 .map(|error| error.to_string().trim().to_string());
2129
2130 if let Some(error_message) = error_message.as_ref() {
2131 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2132 error_message.clone(),
2133 )));
2134 }
2135
2136 this.update_metadata(assistant_message_id, cx, |metadata| {
2137 if let Some(error_message) = error_message.as_ref() {
2138 metadata.status =
2139 MessageStatus::Error(SharedString::from(error_message.clone()));
2140 } else {
2141 metadata.status = MessageStatus::Done;
2142 }
2143 });
2144
2145 if let Some(telemetry) = this.telemetry.as_ref() {
2146 telemetry.report_assistant_event(
2147 Some(this.id.0.clone()),
2148 AssistantKind::Panel,
2149 model.telemetry_id(),
2150 response_latency,
2151 error_message,
2152 );
2153 }
2154 })
2155 .ok();
2156 }
2157 });
2158
2159 self.pending_completions.push(PendingCompletion {
2160 id: pending_completion_id,
2161 assistant_message_id: assistant_message.id,
2162 _task: task,
2163 });
2164
2165 Some(user_message)
2166 }
2167
2168 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2169 let buffer = self.buffer.read(cx);
2170 let request_messages = self
2171 .messages(cx)
2172 .filter(|message| message.status == MessageStatus::Done)
2173 .filter_map(|message| message.to_request_message(&buffer))
2174 .collect();
2175
2176 LanguageModelRequest {
2177 messages: request_messages,
2178 tools: Vec::new(),
2179 stop: Vec::new(),
2180 temperature: 1.0,
2181 }
2182 }
2183
2184 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2185 if let Some(pending_completion) = self.pending_completions.pop() {
2186 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2187 if metadata.status == MessageStatus::Pending {
2188 metadata.status = MessageStatus::Canceled;
2189 }
2190 });
2191 true
2192 } else {
2193 false
2194 }
2195 }
2196
2197 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2198 for id in &ids {
2199 if let Some(metadata) = self.messages_metadata.get(id) {
2200 let role = metadata.role.cycle();
2201 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2202 }
2203 }
2204
2205 self.message_roles_updated(ids, cx);
2206 }
2207
2208 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2209 let mut ranges = Vec::new();
2210 for message in self.messages(cx) {
2211 if ids.contains(&message.id) {
2212 ranges.push(message.anchor_range.clone());
2213 }
2214 }
2215
2216 let buffer = self.buffer.read(cx).text_snapshot();
2217 let mut updated = Vec::new();
2218 let mut removed = Vec::new();
2219 for range in ranges {
2220 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2221 }
2222
2223 if !updated.is_empty() || !removed.is_empty() {
2224 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2225 }
2226 }
2227
2228 pub fn update_metadata(
2229 &mut self,
2230 id: MessageId,
2231 cx: &mut ModelContext<Self>,
2232 f: impl FnOnce(&mut MessageMetadata),
2233 ) {
2234 let version = self.version.clone();
2235 let timestamp = self.next_timestamp();
2236 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2237 f(metadata);
2238 metadata.timestamp = timestamp;
2239 let operation = ContextOperation::UpdateMessage {
2240 message_id: id,
2241 metadata: metadata.clone(),
2242 version,
2243 };
2244 self.push_op(operation, cx);
2245 cx.emit(ContextEvent::MessagesEdited);
2246 cx.notify();
2247 }
2248 }
2249
2250 pub fn insert_message_after(
2251 &mut self,
2252 message_id: MessageId,
2253 role: Role,
2254 status: MessageStatus,
2255 cx: &mut ModelContext<Self>,
2256 ) -> Option<MessageAnchor> {
2257 if let Some(prev_message_ix) = self
2258 .message_anchors
2259 .iter()
2260 .position(|message| message.id == message_id)
2261 {
2262 // Find the next valid message after the one we were given.
2263 let mut next_message_ix = prev_message_ix + 1;
2264 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2265 if next_message.start.is_valid(self.buffer.read(cx)) {
2266 break;
2267 }
2268 next_message_ix += 1;
2269 }
2270
2271 let start = self.buffer.update(cx, |buffer, cx| {
2272 let offset = self
2273 .message_anchors
2274 .get(next_message_ix)
2275 .map_or(buffer.len(), |message| {
2276 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2277 });
2278 buffer.edit([(offset..offset, "\n")], None, cx);
2279 buffer.anchor_before(offset + 1)
2280 });
2281
2282 let version = self.version.clone();
2283 let anchor = MessageAnchor {
2284 id: MessageId(self.next_timestamp()),
2285 start,
2286 };
2287 let metadata = MessageMetadata {
2288 role,
2289 status,
2290 timestamp: anchor.id.0,
2291 cache: None,
2292 };
2293 self.insert_message(anchor.clone(), metadata.clone(), cx);
2294 self.push_op(
2295 ContextOperation::InsertMessage {
2296 anchor: anchor.clone(),
2297 metadata,
2298 version,
2299 },
2300 cx,
2301 );
2302 Some(anchor)
2303 } else {
2304 None
2305 }
2306 }
2307
2308 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
2309 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
2310 entry.insert((
2311 image.to_image_data(cx).log_err()?,
2312 LanguageModelImage::from_image(image, cx).shared(),
2313 ));
2314 }
2315
2316 Some(())
2317 }
2318
2319 pub fn insert_image_anchor(
2320 &mut self,
2321 image_id: u64,
2322 anchor: language::Anchor,
2323 cx: &mut ModelContext<Self>,
2324 ) -> bool {
2325 cx.emit(ContextEvent::MessagesEdited);
2326
2327 let buffer = self.buffer.read(cx);
2328 let insertion_ix = match self
2329 .image_anchors
2330 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
2331 {
2332 Ok(ix) => ix,
2333 Err(ix) => ix,
2334 };
2335
2336 if let Some((render_image, image)) = self.images.get(&image_id) {
2337 self.image_anchors.insert(
2338 insertion_ix,
2339 ImageAnchor {
2340 anchor,
2341 image_id,
2342 image: image.clone(),
2343 render_image: render_image.clone(),
2344 },
2345 );
2346
2347 true
2348 } else {
2349 false
2350 }
2351 }
2352
2353 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
2354 self.image_anchors.iter().cloned()
2355 }
2356
2357 pub fn split_message(
2358 &mut self,
2359 range: Range<usize>,
2360 cx: &mut ModelContext<Self>,
2361 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2362 let start_message = self.message_for_offset(range.start, cx);
2363 let end_message = self.message_for_offset(range.end, cx);
2364 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2365 // Prevent splitting when range spans multiple messages.
2366 if start_message.id != end_message.id {
2367 return (None, None);
2368 }
2369
2370 let message = start_message;
2371 let role = message.role;
2372 let mut edited_buffer = false;
2373
2374 let mut suffix_start = None;
2375
2376 // TODO: why did this start panicking?
2377 if range.start > message.offset_range.start
2378 && range.end < message.offset_range.end.saturating_sub(1)
2379 {
2380 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2381 suffix_start = Some(range.end + 1);
2382 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2383 suffix_start = Some(range.end);
2384 }
2385 }
2386
2387 let version = self.version.clone();
2388 let suffix = if let Some(suffix_start) = suffix_start {
2389 MessageAnchor {
2390 id: MessageId(self.next_timestamp()),
2391 start: self.buffer.read(cx).anchor_before(suffix_start),
2392 }
2393 } else {
2394 self.buffer.update(cx, |buffer, cx| {
2395 buffer.edit([(range.end..range.end, "\n")], None, cx);
2396 });
2397 edited_buffer = true;
2398 MessageAnchor {
2399 id: MessageId(self.next_timestamp()),
2400 start: self.buffer.read(cx).anchor_before(range.end + 1),
2401 }
2402 };
2403
2404 let suffix_metadata = MessageMetadata {
2405 role,
2406 status: MessageStatus::Done,
2407 timestamp: suffix.id.0,
2408 cache: None,
2409 };
2410 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2411 self.push_op(
2412 ContextOperation::InsertMessage {
2413 anchor: suffix.clone(),
2414 metadata: suffix_metadata,
2415 version,
2416 },
2417 cx,
2418 );
2419
2420 let new_messages =
2421 if range.start == range.end || range.start == message.offset_range.start {
2422 (None, Some(suffix))
2423 } else {
2424 let mut prefix_end = None;
2425 if range.start > message.offset_range.start
2426 && range.end < message.offset_range.end - 1
2427 {
2428 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2429 prefix_end = Some(range.start + 1);
2430 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2431 == Some('\n')
2432 {
2433 prefix_end = Some(range.start);
2434 }
2435 }
2436
2437 let version = self.version.clone();
2438 let selection = if let Some(prefix_end) = prefix_end {
2439 MessageAnchor {
2440 id: MessageId(self.next_timestamp()),
2441 start: self.buffer.read(cx).anchor_before(prefix_end),
2442 }
2443 } else {
2444 self.buffer.update(cx, |buffer, cx| {
2445 buffer.edit([(range.start..range.start, "\n")], None, cx)
2446 });
2447 edited_buffer = true;
2448 MessageAnchor {
2449 id: MessageId(self.next_timestamp()),
2450 start: self.buffer.read(cx).anchor_before(range.end + 1),
2451 }
2452 };
2453
2454 let selection_metadata = MessageMetadata {
2455 role,
2456 status: MessageStatus::Done,
2457 timestamp: selection.id.0,
2458 cache: None,
2459 };
2460 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2461 self.push_op(
2462 ContextOperation::InsertMessage {
2463 anchor: selection.clone(),
2464 metadata: selection_metadata,
2465 version,
2466 },
2467 cx,
2468 );
2469
2470 (Some(selection), Some(suffix))
2471 };
2472
2473 if !edited_buffer {
2474 cx.emit(ContextEvent::MessagesEdited);
2475 }
2476 new_messages
2477 } else {
2478 (None, None)
2479 }
2480 }
2481
2482 fn insert_message(
2483 &mut self,
2484 new_anchor: MessageAnchor,
2485 new_metadata: MessageMetadata,
2486 cx: &mut ModelContext<Self>,
2487 ) {
2488 cx.emit(ContextEvent::MessagesEdited);
2489
2490 self.messages_metadata.insert(new_anchor.id, new_metadata);
2491
2492 let buffer = self.buffer.read(cx);
2493 let insertion_ix = self
2494 .message_anchors
2495 .iter()
2496 .position(|anchor| {
2497 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2498 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2499 })
2500 .unwrap_or(self.message_anchors.len());
2501 self.message_anchors.insert(insertion_ix, new_anchor);
2502 }
2503
2504 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2505 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2506 return;
2507 };
2508 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2509 return;
2510 };
2511
2512 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2513 if !provider.is_authenticated(cx) {
2514 return;
2515 }
2516
2517 let messages = self
2518 .messages(cx)
2519 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
2520 .chain(Some(LanguageModelRequestMessage {
2521 role: Role::User,
2522 content: vec![
2523 "Summarize the context into a short title without punctuation.".into(),
2524 ],
2525 cache: false,
2526 }));
2527 let request = LanguageModelRequest {
2528 messages: messages.collect(),
2529 tools: Vec::new(),
2530 stop: Vec::new(),
2531 temperature: 1.0,
2532 };
2533
2534 self.pending_summary = cx.spawn(|this, mut cx| {
2535 async move {
2536 let stream = model.stream_completion_text(request, &cx);
2537 let mut messages = stream.await?;
2538
2539 let mut replaced = !replace_old;
2540 while let Some(message) = messages.next().await {
2541 let text = message?;
2542 let mut lines = text.lines();
2543 this.update(&mut cx, |this, cx| {
2544 let version = this.version.clone();
2545 let timestamp = this.next_timestamp();
2546 let summary = this.summary.get_or_insert(ContextSummary::default());
2547 if !replaced && replace_old {
2548 summary.text.clear();
2549 replaced = true;
2550 }
2551 summary.text.extend(lines.next());
2552 summary.timestamp = timestamp;
2553 let operation = ContextOperation::UpdateSummary {
2554 summary: summary.clone(),
2555 version,
2556 };
2557 this.push_op(operation, cx);
2558 cx.emit(ContextEvent::SummaryChanged);
2559 })?;
2560
2561 // Stop if the LLM generated multiple lines.
2562 if lines.next().is_some() {
2563 break;
2564 }
2565 }
2566
2567 this.update(&mut cx, |this, cx| {
2568 let version = this.version.clone();
2569 let timestamp = this.next_timestamp();
2570 if let Some(summary) = this.summary.as_mut() {
2571 summary.done = true;
2572 summary.timestamp = timestamp;
2573 let operation = ContextOperation::UpdateSummary {
2574 summary: summary.clone(),
2575 version,
2576 };
2577 this.push_op(operation, cx);
2578 cx.emit(ContextEvent::SummaryChanged);
2579 }
2580 })?;
2581
2582 anyhow::Ok(())
2583 }
2584 .log_err()
2585 });
2586 }
2587 }
2588
2589 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2590 self.messages_for_offsets([offset], cx).pop()
2591 }
2592
2593 pub fn messages_for_offsets(
2594 &self,
2595 offsets: impl IntoIterator<Item = usize>,
2596 cx: &AppContext,
2597 ) -> Vec<Message> {
2598 let mut result = Vec::new();
2599
2600 let mut messages = self.messages(cx).peekable();
2601 let mut offsets = offsets.into_iter().peekable();
2602 let mut current_message = messages.next();
2603 while let Some(offset) = offsets.next() {
2604 // Locate the message that contains the offset.
2605 while current_message.as_ref().map_or(false, |message| {
2606 !message.offset_range.contains(&offset) && messages.peek().is_some()
2607 }) {
2608 current_message = messages.next();
2609 }
2610 let Some(message) = current_message.as_ref() else {
2611 break;
2612 };
2613
2614 // Skip offsets that are in the same message.
2615 while offsets.peek().map_or(false, |offset| {
2616 message.offset_range.contains(offset) || messages.peek().is_none()
2617 }) {
2618 offsets.next();
2619 }
2620
2621 result.push(message.clone());
2622 }
2623 result
2624 }
2625
2626 fn messages_from_anchors<'a>(
2627 &'a self,
2628 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2629 cx: &'a AppContext,
2630 ) -> impl 'a + Iterator<Item = Message> {
2631 let buffer = self.buffer.read(cx);
2632 let messages = message_anchors.enumerate();
2633 let images = self.image_anchors.iter();
2634
2635 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2636 }
2637
2638 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2639 self.messages_from_anchors(self.message_anchors.iter(), cx)
2640 }
2641
2642 pub fn messages_from_iters<'a>(
2643 buffer: &'a Buffer,
2644 metadata: &'a HashMap<MessageId, MessageMetadata>,
2645 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2646 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2647 ) -> impl 'a + Iterator<Item = Message> {
2648 let mut messages = messages.peekable();
2649 let mut images = images.peekable();
2650
2651 iter::from_fn(move || {
2652 if let Some((start_ix, message_anchor)) = messages.next() {
2653 let metadata = metadata.get(&message_anchor.id)?;
2654
2655 let message_start = message_anchor.start.to_offset(buffer);
2656 let mut message_end = None;
2657 let mut end_ix = start_ix;
2658 while let Some((_, next_message)) = messages.peek() {
2659 if next_message.start.is_valid(buffer) {
2660 message_end = Some(next_message.start);
2661 break;
2662 } else {
2663 end_ix += 1;
2664 messages.next();
2665 }
2666 }
2667 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2668 let message_end = message_end_anchor.to_offset(buffer);
2669
2670 let mut image_offsets = SmallVec::new();
2671 while let Some(image_anchor) = images.peek() {
2672 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2673 image_offsets.push((
2674 image_anchor.anchor.to_offset(buffer),
2675 MessageImage {
2676 image_id: image_anchor.image_id,
2677 image: image_anchor.image.clone(),
2678 },
2679 ));
2680 images.next();
2681 } else {
2682 break;
2683 }
2684 }
2685
2686 return Some(Message {
2687 index_range: start_ix..end_ix,
2688 offset_range: message_start..message_end,
2689 anchor_range: message_anchor.start..message_end_anchor,
2690 id: message_anchor.id,
2691 role: metadata.role,
2692 status: metadata.status.clone(),
2693 cache: metadata.cache.clone(),
2694 image_offsets,
2695 });
2696 }
2697 None
2698 })
2699 }
2700
2701 pub fn save(
2702 &mut self,
2703 debounce: Option<Duration>,
2704 fs: Arc<dyn Fs>,
2705 cx: &mut ModelContext<Context>,
2706 ) {
2707 if self.replica_id() != ReplicaId::default() {
2708 // Prevent saving a remote context for now.
2709 return;
2710 }
2711
2712 self.pending_save = cx.spawn(|this, mut cx| async move {
2713 if let Some(debounce) = debounce {
2714 cx.background_executor().timer(debounce).await;
2715 }
2716
2717 let (old_path, summary) = this.read_with(&cx, |this, _| {
2718 let path = this.path.clone();
2719 let summary = if let Some(summary) = this.summary.as_ref() {
2720 if summary.done {
2721 Some(summary.text.clone())
2722 } else {
2723 None
2724 }
2725 } else {
2726 None
2727 };
2728 (path, summary)
2729 })?;
2730
2731 if let Some(summary) = summary {
2732 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2733 .await;
2734
2735 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2736 let mut discriminant = 1;
2737 let mut new_path;
2738 loop {
2739 new_path = contexts_dir().join(&format!(
2740 "{} - {}.zed.json",
2741 summary.trim(),
2742 discriminant
2743 ));
2744 if fs.is_file(&new_path).await {
2745 discriminant += 1;
2746 } else {
2747 break;
2748 }
2749 }
2750
2751 fs.create_dir(contexts_dir().as_ref()).await?;
2752 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2753 .await?;
2754 if let Some(old_path) = old_path {
2755 if new_path != old_path {
2756 fs.remove_file(
2757 &old_path,
2758 RemoveOptions {
2759 recursive: false,
2760 ignore_if_not_exists: true,
2761 },
2762 )
2763 .await?;
2764 }
2765 }
2766
2767 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2768 }
2769
2770 Ok(())
2771 });
2772 }
2773
2774 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2775 let mut images_to_save = self
2776 .images
2777 .iter()
2778 .map(|(id, (_, llm_image))| {
2779 let fs = fs.clone();
2780 let llm_image = llm_image.clone();
2781 let id = *id;
2782 async move {
2783 if let Some(llm_image) = llm_image.await {
2784 let path: PathBuf =
2785 context_images_dir().join(&format!("{}.png.base64", id));
2786 if fs
2787 .metadata(path.as_path())
2788 .await
2789 .log_err()
2790 .flatten()
2791 .is_none()
2792 {
2793 fs.atomic_write(path, llm_image.source.to_string())
2794 .await
2795 .log_err();
2796 }
2797 }
2798 }
2799 })
2800 .collect::<FuturesUnordered<_>>();
2801 cx.background_executor().spawn(async move {
2802 if fs
2803 .create_dir(context_images_dir().as_ref())
2804 .await
2805 .log_err()
2806 .is_some()
2807 {
2808 while let Some(_) = images_to_save.next().await {}
2809 }
2810 })
2811 }
2812
2813 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2814 let timestamp = self.next_timestamp();
2815 let summary = self.summary.get_or_insert(ContextSummary::default());
2816 summary.timestamp = timestamp;
2817 summary.done = true;
2818 summary.text = custom_summary;
2819 cx.emit(ContextEvent::SummaryChanged);
2820 }
2821}
2822
2823#[derive(Debug, Default)]
2824pub struct ContextVersion {
2825 context: clock::Global,
2826 buffer: clock::Global,
2827}
2828
2829impl ContextVersion {
2830 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2831 Self {
2832 context: language::proto::deserialize_version(&proto.context_version),
2833 buffer: language::proto::deserialize_version(&proto.buffer_version),
2834 }
2835 }
2836
2837 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2838 proto::ContextVersion {
2839 context_id: context_id.to_proto(),
2840 context_version: language::proto::serialize_version(&self.context),
2841 buffer_version: language::proto::serialize_version(&self.buffer),
2842 }
2843 }
2844}
2845
2846#[derive(Debug, Clone)]
2847pub struct PendingSlashCommand {
2848 pub name: String,
2849 pub arguments: SmallVec<[String; 3]>,
2850 pub status: PendingSlashCommandStatus,
2851 pub source_range: Range<language::Anchor>,
2852}
2853
2854#[derive(Debug, Clone)]
2855pub enum PendingSlashCommandStatus {
2856 Idle,
2857 Running { _task: Shared<Task<()>> },
2858 Error(String),
2859}
2860
2861pub(crate) struct ToolUseFeatureFlag;
2862
2863impl FeatureFlag for ToolUseFeatureFlag {
2864 const NAME: &'static str = "assistant-tool-use";
2865
2866 fn enabled_for_staff() -> bool {
2867 false
2868 }
2869}
2870
2871#[derive(Debug, Clone)]
2872pub struct PendingToolUse {
2873 pub id: Arc<str>,
2874 pub name: String,
2875 pub input: serde_json::Value,
2876 pub status: PendingToolUseStatus,
2877 pub source_range: Range<language::Anchor>,
2878}
2879
2880#[derive(Debug, Clone)]
2881pub enum PendingToolUseStatus {
2882 Idle,
2883 Running { _task: Shared<Task<()>> },
2884 Error(String),
2885}
2886
2887impl PendingToolUseStatus {
2888 pub fn is_idle(&self) -> bool {
2889 matches!(self, PendingToolUseStatus::Idle)
2890 }
2891}
2892
2893#[derive(Serialize, Deserialize)]
2894pub struct SavedMessage {
2895 pub id: MessageId,
2896 pub start: usize,
2897 pub metadata: MessageMetadata,
2898 #[serde(default)]
2899 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2900 pub image_offsets: Vec<(usize, u64)>,
2901}
2902
2903#[derive(Serialize, Deserialize)]
2904pub struct SavedContext {
2905 pub id: Option<ContextId>,
2906 pub zed: String,
2907 pub version: String,
2908 pub text: String,
2909 pub messages: Vec<SavedMessage>,
2910 pub summary: String,
2911 pub slash_command_output_sections:
2912 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2913}
2914
2915impl SavedContext {
2916 pub const VERSION: &'static str = "0.4.0";
2917
2918 pub fn from_json(json: &str) -> Result<Self> {
2919 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2920 match saved_context_json
2921 .get("version")
2922 .ok_or_else(|| anyhow!("version not found"))?
2923 {
2924 serde_json::Value::String(version) => match version.as_str() {
2925 SavedContext::VERSION => {
2926 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2927 }
2928 SavedContextV0_3_0::VERSION => {
2929 let saved_context =
2930 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2931 Ok(saved_context.upgrade())
2932 }
2933 SavedContextV0_2_0::VERSION => {
2934 let saved_context =
2935 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2936 Ok(saved_context.upgrade())
2937 }
2938 SavedContextV0_1_0::VERSION => {
2939 let saved_context =
2940 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2941 Ok(saved_context.upgrade())
2942 }
2943 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2944 },
2945 _ => Err(anyhow!("version not found on saved context")),
2946 }
2947 }
2948
2949 fn into_ops(
2950 self,
2951 buffer: &Model<Buffer>,
2952 cx: &mut ModelContext<Context>,
2953 ) -> Vec<ContextOperation> {
2954 let mut operations = Vec::new();
2955 let mut version = clock::Global::new();
2956 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2957
2958 let mut first_message_metadata = None;
2959 for message in self.messages {
2960 if message.id == MessageId(clock::Lamport::default()) {
2961 first_message_metadata = Some(message.metadata);
2962 } else {
2963 operations.push(ContextOperation::InsertMessage {
2964 anchor: MessageAnchor {
2965 id: message.id,
2966 start: buffer.read(cx).anchor_before(message.start),
2967 },
2968 metadata: MessageMetadata {
2969 role: message.metadata.role,
2970 status: message.metadata.status,
2971 timestamp: message.metadata.timestamp,
2972 cache: None,
2973 },
2974 version: version.clone(),
2975 });
2976 version.observe(message.id.0);
2977 next_timestamp.observe(message.id.0);
2978 }
2979 }
2980
2981 if let Some(metadata) = first_message_metadata {
2982 let timestamp = next_timestamp.tick();
2983 operations.push(ContextOperation::UpdateMessage {
2984 message_id: MessageId(clock::Lamport::default()),
2985 metadata: MessageMetadata {
2986 role: metadata.role,
2987 status: metadata.status,
2988 timestamp,
2989 cache: None,
2990 },
2991 version: version.clone(),
2992 });
2993 version.observe(timestamp);
2994 }
2995
2996 let timestamp = next_timestamp.tick();
2997 operations.push(ContextOperation::SlashCommandFinished {
2998 id: SlashCommandId(timestamp),
2999 output_range: language::Anchor::MIN..language::Anchor::MAX,
3000 sections: self
3001 .slash_command_output_sections
3002 .into_iter()
3003 .map(|section| {
3004 let buffer = buffer.read(cx);
3005 SlashCommandOutputSection {
3006 range: buffer.anchor_after(section.range.start)
3007 ..buffer.anchor_before(section.range.end),
3008 icon: section.icon,
3009 label: section.label,
3010 }
3011 })
3012 .collect(),
3013 version: version.clone(),
3014 });
3015 version.observe(timestamp);
3016
3017 let timestamp = next_timestamp.tick();
3018 operations.push(ContextOperation::UpdateSummary {
3019 summary: ContextSummary {
3020 text: self.summary,
3021 done: true,
3022 timestamp,
3023 },
3024 version: version.clone(),
3025 });
3026 version.observe(timestamp);
3027
3028 operations
3029 }
3030}
3031
3032#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3033struct SavedMessageIdPreV0_4_0(usize);
3034
3035#[derive(Serialize, Deserialize)]
3036struct SavedMessagePreV0_4_0 {
3037 id: SavedMessageIdPreV0_4_0,
3038 start: usize,
3039}
3040
3041#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3042struct SavedMessageMetadataPreV0_4_0 {
3043 role: Role,
3044 status: MessageStatus,
3045}
3046
3047#[derive(Serialize, Deserialize)]
3048struct SavedContextV0_3_0 {
3049 id: Option<ContextId>,
3050 zed: String,
3051 version: String,
3052 text: String,
3053 messages: Vec<SavedMessagePreV0_4_0>,
3054 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3055 summary: String,
3056 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3057}
3058
3059impl SavedContextV0_3_0 {
3060 const VERSION: &'static str = "0.3.0";
3061
3062 fn upgrade(self) -> SavedContext {
3063 SavedContext {
3064 id: self.id,
3065 zed: self.zed,
3066 version: SavedContext::VERSION.into(),
3067 text: self.text,
3068 messages: self
3069 .messages
3070 .into_iter()
3071 .filter_map(|message| {
3072 let metadata = self.message_metadata.get(&message.id)?;
3073 let timestamp = clock::Lamport {
3074 replica_id: ReplicaId::default(),
3075 value: message.id.0 as u32,
3076 };
3077 Some(SavedMessage {
3078 id: MessageId(timestamp),
3079 start: message.start,
3080 metadata: MessageMetadata {
3081 role: metadata.role,
3082 status: metadata.status.clone(),
3083 timestamp,
3084 cache: None,
3085 },
3086 image_offsets: Vec::new(),
3087 })
3088 })
3089 .collect(),
3090 summary: self.summary,
3091 slash_command_output_sections: self.slash_command_output_sections,
3092 }
3093 }
3094}
3095
3096#[derive(Serialize, Deserialize)]
3097struct SavedContextV0_2_0 {
3098 id: Option<ContextId>,
3099 zed: String,
3100 version: String,
3101 text: String,
3102 messages: Vec<SavedMessagePreV0_4_0>,
3103 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3104 summary: String,
3105}
3106
3107impl SavedContextV0_2_0 {
3108 const VERSION: &'static str = "0.2.0";
3109
3110 fn upgrade(self) -> SavedContext {
3111 SavedContextV0_3_0 {
3112 id: self.id,
3113 zed: self.zed,
3114 version: SavedContextV0_3_0::VERSION.to_string(),
3115 text: self.text,
3116 messages: self.messages,
3117 message_metadata: self.message_metadata,
3118 summary: self.summary,
3119 slash_command_output_sections: Vec::new(),
3120 }
3121 .upgrade()
3122 }
3123}
3124
3125#[derive(Serialize, Deserialize)]
3126struct SavedContextV0_1_0 {
3127 id: Option<ContextId>,
3128 zed: String,
3129 version: String,
3130 text: String,
3131 messages: Vec<SavedMessagePreV0_4_0>,
3132 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3133 summary: String,
3134 api_url: Option<String>,
3135 model: OpenAiModel,
3136}
3137
3138impl SavedContextV0_1_0 {
3139 const VERSION: &'static str = "0.1.0";
3140
3141 fn upgrade(self) -> SavedContext {
3142 SavedContextV0_2_0 {
3143 id: self.id,
3144 zed: self.zed,
3145 version: SavedContextV0_2_0::VERSION.to_string(),
3146 text: self.text,
3147 messages: self.messages,
3148 message_metadata: self.message_metadata,
3149 summary: self.summary,
3150 }
3151 .upgrade()
3152 }
3153}
3154
3155#[derive(Clone)]
3156pub struct SavedContextMetadata {
3157 pub title: String,
3158 pub path: PathBuf,
3159 pub mtime: chrono::DateTime<chrono::Local>,
3160}