1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 FutureExt, StreamExt,
21};
22use gpui::{
23 AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage,
24 SharedString, Subscription, Task,
25};
26
27use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
28use language_model::{
29 provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
30 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
31 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
32 LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
33 StopReason,
34};
35use open_ai::Model as OpenAiModel;
36use paths::contexts_dir;
37use project::Project;
38use serde::{Deserialize, Serialize};
39use smallvec::SmallVec;
40use std::{
41 cmp::{self, max, Ordering},
42 fmt::Debug,
43 iter, mem,
44 ops::Range,
45 path::{Path, PathBuf},
46 str::FromStr as _,
47 sync::Arc,
48 time::{Duration, Instant},
49};
50use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
51use text::BufferSnapshot;
52use util::{post_inc, ResultExt, TryFutureExt};
53use uuid::Uuid;
54
55#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
56pub struct ContextId(String);
57
58impl ContextId {
59 pub fn new() -> Self {
60 Self(Uuid::new_v4().to_string())
61 }
62
63 pub fn from_proto(id: String) -> Self {
64 Self(id)
65 }
66
67 pub fn to_proto(&self) -> String {
68 self.0.clone()
69 }
70}
71
72#[derive(Clone, Debug)]
73pub enum ContextOperation {
74 InsertMessage {
75 anchor: MessageAnchor,
76 metadata: MessageMetadata,
77 version: clock::Global,
78 },
79 UpdateMessage {
80 message_id: MessageId,
81 metadata: MessageMetadata,
82 version: clock::Global,
83 },
84 UpdateSummary {
85 summary: ContextSummary,
86 version: clock::Global,
87 },
88 SlashCommandFinished {
89 id: SlashCommandId,
90 output_range: Range<language::Anchor>,
91 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
92 version: clock::Global,
93 },
94 BufferOperation(language::Operation),
95}
96
97impl ContextOperation {
98 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
99 match op.variant.context("invalid variant")? {
100 proto::context_operation::Variant::InsertMessage(insert) => {
101 let message = insert.message.context("invalid message")?;
102 let id = MessageId(language::proto::deserialize_timestamp(
103 message.id.context("invalid id")?,
104 ));
105 Ok(Self::InsertMessage {
106 anchor: MessageAnchor {
107 id,
108 start: language::proto::deserialize_anchor(
109 message.start.context("invalid anchor")?,
110 )
111 .context("invalid anchor")?,
112 },
113 metadata: MessageMetadata {
114 role: Role::from_proto(message.role),
115 status: MessageStatus::from_proto(
116 message.status.context("invalid status")?,
117 ),
118 timestamp: id.0,
119 cache: None,
120 },
121 version: language::proto::deserialize_version(&insert.version),
122 })
123 }
124 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
125 message_id: MessageId(language::proto::deserialize_timestamp(
126 update.message_id.context("invalid message id")?,
127 )),
128 metadata: MessageMetadata {
129 role: Role::from_proto(update.role),
130 status: MessageStatus::from_proto(update.status.context("invalid status")?),
131 timestamp: language::proto::deserialize_timestamp(
132 update.timestamp.context("invalid timestamp")?,
133 ),
134 cache: None,
135 },
136 version: language::proto::deserialize_version(&update.version),
137 }),
138 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
139 summary: ContextSummary {
140 text: update.summary,
141 done: update.done,
142 timestamp: language::proto::deserialize_timestamp(
143 update.timestamp.context("invalid timestamp")?,
144 ),
145 },
146 version: language::proto::deserialize_version(&update.version),
147 }),
148 proto::context_operation::Variant::SlashCommandFinished(finished) => {
149 Ok(Self::SlashCommandFinished {
150 id: SlashCommandId(language::proto::deserialize_timestamp(
151 finished.id.context("invalid id")?,
152 )),
153 output_range: language::proto::deserialize_anchor_range(
154 finished.output_range.context("invalid range")?,
155 )?,
156 sections: finished
157 .sections
158 .into_iter()
159 .map(|section| {
160 Ok(SlashCommandOutputSection {
161 range: language::proto::deserialize_anchor_range(
162 section.range.context("invalid range")?,
163 )?,
164 icon: section.icon_name.parse()?,
165 label: section.label.into(),
166 metadata: section
167 .metadata
168 .and_then(|metadata| serde_json::from_str(&metadata).log_err()),
169 })
170 })
171 .collect::<Result<Vec<_>>>()?,
172 version: language::proto::deserialize_version(&finished.version),
173 })
174 }
175 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
176 language::proto::deserialize_operation(
177 op.operation.context("invalid buffer operation")?,
178 )?,
179 )),
180 }
181 }
182
183 pub fn to_proto(&self) -> proto::ContextOperation {
184 match self {
185 Self::InsertMessage {
186 anchor,
187 metadata,
188 version,
189 } => proto::ContextOperation {
190 variant: Some(proto::context_operation::Variant::InsertMessage(
191 proto::context_operation::InsertMessage {
192 message: Some(proto::ContextMessage {
193 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
194 start: Some(language::proto::serialize_anchor(&anchor.start)),
195 role: metadata.role.to_proto() as i32,
196 status: Some(metadata.status.to_proto()),
197 }),
198 version: language::proto::serialize_version(version),
199 },
200 )),
201 },
202 Self::UpdateMessage {
203 message_id,
204 metadata,
205 version,
206 } => proto::ContextOperation {
207 variant: Some(proto::context_operation::Variant::UpdateMessage(
208 proto::context_operation::UpdateMessage {
209 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
210 role: metadata.role.to_proto() as i32,
211 status: Some(metadata.status.to_proto()),
212 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
213 version: language::proto::serialize_version(version),
214 },
215 )),
216 },
217 Self::UpdateSummary { summary, version } => proto::ContextOperation {
218 variant: Some(proto::context_operation::Variant::UpdateSummary(
219 proto::context_operation::UpdateSummary {
220 summary: summary.text.clone(),
221 done: summary.done,
222 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
223 version: language::proto::serialize_version(version),
224 },
225 )),
226 },
227 Self::SlashCommandFinished {
228 id,
229 output_range,
230 sections,
231 version,
232 } => proto::ContextOperation {
233 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
234 proto::context_operation::SlashCommandFinished {
235 id: Some(language::proto::serialize_timestamp(id.0)),
236 output_range: Some(language::proto::serialize_anchor_range(
237 output_range.clone(),
238 )),
239 sections: sections
240 .iter()
241 .map(|section| {
242 let icon_name: &'static str = section.icon.into();
243 proto::SlashCommandOutputSection {
244 range: Some(language::proto::serialize_anchor_range(
245 section.range.clone(),
246 )),
247 icon_name: icon_name.to_string(),
248 label: section.label.to_string(),
249 metadata: section.metadata.as_ref().and_then(|metadata| {
250 serde_json::to_string(metadata).log_err()
251 }),
252 }
253 })
254 .collect(),
255 version: language::proto::serialize_version(version),
256 },
257 )),
258 },
259 Self::BufferOperation(operation) => proto::ContextOperation {
260 variant: Some(proto::context_operation::Variant::BufferOperation(
261 proto::context_operation::BufferOperation {
262 operation: Some(language::proto::serialize_operation(operation)),
263 },
264 )),
265 },
266 }
267 }
268
269 fn timestamp(&self) -> clock::Lamport {
270 match self {
271 Self::InsertMessage { anchor, .. } => anchor.id.0,
272 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
273 Self::UpdateSummary { summary, .. } => summary.timestamp,
274 Self::SlashCommandFinished { id, .. } => id.0,
275 Self::BufferOperation(_) => {
276 panic!("reading the timestamp of a buffer operation is not supported")
277 }
278 }
279 }
280
281 /// Returns the current version of the context operation.
282 pub fn version(&self) -> &clock::Global {
283 match self {
284 Self::InsertMessage { version, .. }
285 | Self::UpdateMessage { version, .. }
286 | Self::UpdateSummary { version, .. }
287 | Self::SlashCommandFinished { version, .. } => version,
288 Self::BufferOperation(_) => {
289 panic!("reading the version of a buffer operation is not supported")
290 }
291 }
292 }
293}
294
295#[derive(Debug, Clone)]
296pub enum ContextEvent {
297 ShowAssistError(SharedString),
298 ShowPaymentRequiredError,
299 ShowMaxMonthlySpendReachedError,
300 MessagesEdited,
301 SummaryChanged,
302 StreamedCompletion,
303 WorkflowStepsUpdated {
304 removed: Vec<Range<language::Anchor>>,
305 updated: Vec<Range<language::Anchor>>,
306 },
307 PendingSlashCommandsUpdated {
308 removed: Vec<Range<language::Anchor>>,
309 updated: Vec<PendingSlashCommand>,
310 },
311 SlashCommandFinished {
312 output_range: Range<language::Anchor>,
313 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
314 run_commands_in_output: bool,
315 expand_result: bool,
316 },
317 UsePendingTools,
318 ToolFinished {
319 tool_use_id: Arc<str>,
320 output_range: Range<language::Anchor>,
321 },
322 Operation(ContextOperation),
323}
324
325#[derive(Clone, Default, Debug)]
326pub struct ContextSummary {
327 pub text: String,
328 done: bool,
329 timestamp: clock::Lamport,
330}
331
332#[derive(Clone, Debug, Eq, PartialEq)]
333pub struct MessageAnchor {
334 pub id: MessageId,
335 pub start: language::Anchor,
336}
337
338#[derive(Clone, Debug, Eq, PartialEq)]
339pub enum CacheStatus {
340 Pending,
341 Cached,
342}
343
344#[derive(Clone, Debug, Eq, PartialEq)]
345pub struct MessageCacheMetadata {
346 pub is_anchor: bool,
347 pub is_final_anchor: bool,
348 pub status: CacheStatus,
349 pub cached_at: clock::Global,
350}
351
352#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
353pub struct MessageMetadata {
354 pub role: Role,
355 pub status: MessageStatus,
356 pub(crate) timestamp: clock::Lamport,
357 #[serde(skip)]
358 pub cache: Option<MessageCacheMetadata>,
359}
360
361impl From<&Message> for MessageMetadata {
362 fn from(message: &Message) -> Self {
363 Self {
364 role: message.role,
365 status: message.status.clone(),
366 timestamp: message.id.0,
367 cache: message.cache.clone(),
368 }
369 }
370}
371
372impl MessageMetadata {
373 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
374 let result = match &self.cache {
375 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
376 &cached_at,
377 Range {
378 start: buffer.anchor_at(range.start, Bias::Right),
379 end: buffer.anchor_at(range.end, Bias::Left),
380 },
381 ),
382 _ => false,
383 };
384 result
385 }
386}
387
388#[derive(Clone, Debug)]
389pub struct Message {
390 pub offset_range: Range<usize>,
391 pub index_range: Range<usize>,
392 pub anchor_range: Range<language::Anchor>,
393 pub id: MessageId,
394 pub role: Role,
395 pub status: MessageStatus,
396 pub cache: Option<MessageCacheMetadata>,
397}
398
399#[derive(Debug, Clone)]
400pub enum Content {
401 Image {
402 anchor: language::Anchor,
403 image_id: u64,
404 render_image: Arc<RenderImage>,
405 image: Shared<Task<Option<LanguageModelImage>>>,
406 },
407 ToolUse {
408 range: Range<language::Anchor>,
409 tool_use: LanguageModelToolUse,
410 },
411 ToolResult {
412 range: Range<language::Anchor>,
413 tool_use_id: Arc<str>,
414 },
415}
416
417impl Content {
418 fn range(&self) -> Range<language::Anchor> {
419 match self {
420 Self::Image { anchor, .. } => *anchor..*anchor,
421 Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(),
422 }
423 }
424
425 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
426 let self_range = self.range();
427 let other_range = other.range();
428 if self_range.end.cmp(&other_range.start, buffer).is_lt() {
429 Ordering::Less
430 } else if self_range.start.cmp(&other_range.end, buffer).is_gt() {
431 Ordering::Greater
432 } else {
433 Ordering::Equal
434 }
435 }
436}
437
438struct PendingCompletion {
439 id: usize,
440 assistant_message_id: MessageId,
441 _task: Task<()>,
442}
443
444#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
445pub struct SlashCommandId(clock::Lamport);
446
447#[derive(Clone, Debug)]
448pub struct XmlTag {
449 pub kind: XmlTagKind,
450 pub range: Range<text::Anchor>,
451 pub is_open_tag: bool,
452}
453
454#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
455#[strum(serialize_all = "snake_case")]
456pub enum XmlTagKind {
457 Step,
458 Edit,
459 Path,
460 Search,
461 Within,
462 Operation,
463 Description,
464}
465
466pub struct Context {
467 id: ContextId,
468 timestamp: clock::Lamport,
469 version: clock::Global,
470 pending_ops: Vec<ContextOperation>,
471 operations: Vec<ContextOperation>,
472 buffer: Model<Buffer>,
473 pending_slash_commands: Vec<PendingSlashCommand>,
474 edits_since_last_parse: language::Subscription,
475 finished_slash_commands: HashSet<SlashCommandId>,
476 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
477 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
478 message_anchors: Vec<MessageAnchor>,
479 contents: Vec<Content>,
480 messages_metadata: HashMap<MessageId, MessageMetadata>,
481 summary: Option<ContextSummary>,
482 pending_summary: Task<Option<()>>,
483 completion_count: usize,
484 pending_completions: Vec<PendingCompletion>,
485 token_count: Option<usize>,
486 pending_token_count: Task<Option<()>>,
487 pending_save: Task<Result<()>>,
488 pending_cache_warming_task: Task<Option<()>>,
489 path: Option<PathBuf>,
490 _subscriptions: Vec<Subscription>,
491 telemetry: Option<Arc<Telemetry>>,
492 language_registry: Arc<LanguageRegistry>,
493 workflow_steps: Vec<WorkflowStep>,
494 xml_tags: Vec<XmlTag>,
495 project: Option<Model<Project>>,
496 prompt_builder: Arc<PromptBuilder>,
497}
498
499trait ContextAnnotation {
500 fn range(&self) -> &Range<language::Anchor>;
501}
502
503impl ContextAnnotation for PendingSlashCommand {
504 fn range(&self) -> &Range<language::Anchor> {
505 &self.source_range
506 }
507}
508
509impl ContextAnnotation for WorkflowStep {
510 fn range(&self) -> &Range<language::Anchor> {
511 &self.range
512 }
513}
514
515impl ContextAnnotation for XmlTag {
516 fn range(&self) -> &Range<language::Anchor> {
517 &self.range
518 }
519}
520
521impl EventEmitter<ContextEvent> for Context {}
522
523impl Context {
524 pub fn local(
525 language_registry: Arc<LanguageRegistry>,
526 project: Option<Model<Project>>,
527 telemetry: Option<Arc<Telemetry>>,
528 prompt_builder: Arc<PromptBuilder>,
529 cx: &mut ModelContext<Self>,
530 ) -> Self {
531 Self::new(
532 ContextId::new(),
533 ReplicaId::default(),
534 language::Capability::ReadWrite,
535 language_registry,
536 prompt_builder,
537 project,
538 telemetry,
539 cx,
540 )
541 }
542
543 #[allow(clippy::too_many_arguments)]
544 pub fn new(
545 id: ContextId,
546 replica_id: ReplicaId,
547 capability: language::Capability,
548 language_registry: Arc<LanguageRegistry>,
549 prompt_builder: Arc<PromptBuilder>,
550 project: Option<Model<Project>>,
551 telemetry: Option<Arc<Telemetry>>,
552 cx: &mut ModelContext<Self>,
553 ) -> Self {
554 let buffer = cx.new_model(|_cx| {
555 let buffer = Buffer::remote(
556 language::BufferId::new(1).unwrap(),
557 replica_id,
558 capability,
559 "",
560 );
561 buffer.set_language_registry(language_registry.clone());
562 buffer
563 });
564 let edits_since_last_slash_command_parse =
565 buffer.update(cx, |buffer, _| buffer.subscribe());
566 let mut this = Self {
567 id,
568 timestamp: clock::Lamport::new(replica_id),
569 version: clock::Global::new(),
570 pending_ops: Vec::new(),
571 operations: Vec::new(),
572 message_anchors: Default::default(),
573 contents: Default::default(),
574 messages_metadata: Default::default(),
575 pending_slash_commands: Vec::new(),
576 finished_slash_commands: HashSet::default(),
577 pending_tool_uses_by_id: HashMap::default(),
578 slash_command_output_sections: Vec::new(),
579 edits_since_last_parse: edits_since_last_slash_command_parse,
580 summary: None,
581 pending_summary: Task::ready(None),
582 completion_count: Default::default(),
583 pending_completions: Default::default(),
584 token_count: None,
585 pending_token_count: Task::ready(None),
586 pending_cache_warming_task: Task::ready(None),
587 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
588 pending_save: Task::ready(Ok(())),
589 path: None,
590 buffer,
591 telemetry,
592 project,
593 language_registry,
594 workflow_steps: Vec::new(),
595 xml_tags: Vec::new(),
596 prompt_builder,
597 };
598
599 let first_message_id = MessageId(clock::Lamport {
600 replica_id: 0,
601 value: 0,
602 });
603 let message = MessageAnchor {
604 id: first_message_id,
605 start: language::Anchor::MIN,
606 };
607 this.messages_metadata.insert(
608 first_message_id,
609 MessageMetadata {
610 role: Role::User,
611 status: MessageStatus::Done,
612 timestamp: first_message_id.0,
613 cache: None,
614 },
615 );
616 this.message_anchors.push(message);
617
618 this.set_language(cx);
619 this.count_remaining_tokens(cx);
620 this
621 }
622
623 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
624 let buffer = self.buffer.read(cx);
625 SavedContext {
626 id: Some(self.id.clone()),
627 zed: "context".into(),
628 version: SavedContext::VERSION.into(),
629 text: buffer.text(),
630 messages: self
631 .messages(cx)
632 .map(|message| SavedMessage {
633 id: message.id,
634 start: message.offset_range.start,
635 metadata: self.messages_metadata[&message.id].clone(),
636 })
637 .collect(),
638 summary: self
639 .summary
640 .as_ref()
641 .map(|summary| summary.text.clone())
642 .unwrap_or_default(),
643 slash_command_output_sections: self
644 .slash_command_output_sections
645 .iter()
646 .filter_map(|section| {
647 if section.is_valid(buffer) {
648 let range = section.range.to_offset(buffer);
649 Some(assistant_slash_command::SlashCommandOutputSection {
650 range,
651 icon: section.icon,
652 label: section.label.clone(),
653 metadata: section.metadata.clone(),
654 })
655 } else {
656 None
657 }
658 })
659 .collect(),
660 }
661 }
662
663 #[allow(clippy::too_many_arguments)]
664 pub fn deserialize(
665 saved_context: SavedContext,
666 path: PathBuf,
667 language_registry: Arc<LanguageRegistry>,
668 prompt_builder: Arc<PromptBuilder>,
669 project: Option<Model<Project>>,
670 telemetry: Option<Arc<Telemetry>>,
671 cx: &mut ModelContext<Self>,
672 ) -> Self {
673 let id = saved_context.id.clone().unwrap_or_else(ContextId::new);
674 let mut this = Self::new(
675 id,
676 ReplicaId::default(),
677 language::Capability::ReadWrite,
678 language_registry,
679 prompt_builder,
680 project,
681 telemetry,
682 cx,
683 );
684 this.path = Some(path);
685 this.buffer.update(cx, |buffer, cx| {
686 buffer.set_text(saved_context.text.as_str(), cx)
687 });
688 let operations = saved_context.into_ops(&this.buffer, cx);
689 this.apply_ops(operations, cx);
690 this
691 }
692
693 pub fn id(&self) -> &ContextId {
694 &self.id
695 }
696
697 pub fn replica_id(&self) -> ReplicaId {
698 self.timestamp.replica_id
699 }
700
701 pub fn version(&self, cx: &AppContext) -> ContextVersion {
702 ContextVersion {
703 context: self.version.clone(),
704 buffer: self.buffer.read(cx).version(),
705 }
706 }
707
708 pub fn set_capability(
709 &mut self,
710 capability: language::Capability,
711 cx: &mut ModelContext<Self>,
712 ) {
713 self.buffer
714 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
715 }
716
717 fn next_timestamp(&mut self) -> clock::Lamport {
718 let timestamp = self.timestamp.tick();
719 self.version.observe(timestamp);
720 timestamp
721 }
722
723 pub fn serialize_ops(
724 &self,
725 since: &ContextVersion,
726 cx: &AppContext,
727 ) -> Task<Vec<proto::ContextOperation>> {
728 let buffer_ops = self
729 .buffer
730 .read(cx)
731 .serialize_ops(Some(since.buffer.clone()), cx);
732
733 let mut context_ops = self
734 .operations
735 .iter()
736 .filter(|op| !since.context.observed(op.timestamp()))
737 .cloned()
738 .collect::<Vec<_>>();
739 context_ops.extend(self.pending_ops.iter().cloned());
740
741 cx.background_executor().spawn(async move {
742 let buffer_ops = buffer_ops.await;
743 context_ops.sort_unstable_by_key(|op| op.timestamp());
744 buffer_ops
745 .into_iter()
746 .map(|op| proto::ContextOperation {
747 variant: Some(proto::context_operation::Variant::BufferOperation(
748 proto::context_operation::BufferOperation {
749 operation: Some(op),
750 },
751 )),
752 })
753 .chain(context_ops.into_iter().map(|op| op.to_proto()))
754 .collect()
755 })
756 }
757
758 pub fn apply_ops(
759 &mut self,
760 ops: impl IntoIterator<Item = ContextOperation>,
761 cx: &mut ModelContext<Self>,
762 ) {
763 let mut buffer_ops = Vec::new();
764 for op in ops {
765 match op {
766 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
767 op @ _ => self.pending_ops.push(op),
768 }
769 }
770 self.buffer
771 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx));
772 self.flush_ops(cx);
773 }
774
775 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
776 let mut changed_messages = HashSet::default();
777 let mut summary_changed = false;
778
779 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
780 for op in mem::take(&mut self.pending_ops) {
781 if !self.can_apply_op(&op, cx) {
782 self.pending_ops.push(op);
783 continue;
784 }
785
786 let timestamp = op.timestamp();
787 match op.clone() {
788 ContextOperation::InsertMessage {
789 anchor, metadata, ..
790 } => {
791 if self.messages_metadata.contains_key(&anchor.id) {
792 // We already applied this operation.
793 } else {
794 changed_messages.insert(anchor.id);
795 self.insert_message(anchor, metadata, cx);
796 }
797 }
798 ContextOperation::UpdateMessage {
799 message_id,
800 metadata: new_metadata,
801 ..
802 } => {
803 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
804 if new_metadata.timestamp > metadata.timestamp {
805 *metadata = new_metadata;
806 changed_messages.insert(message_id);
807 }
808 }
809 ContextOperation::UpdateSummary {
810 summary: new_summary,
811 ..
812 } => {
813 if self
814 .summary
815 .as_ref()
816 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
817 {
818 self.summary = Some(new_summary);
819 summary_changed = true;
820 }
821 }
822 ContextOperation::SlashCommandFinished {
823 id,
824 output_range,
825 sections,
826 ..
827 } => {
828 if self.finished_slash_commands.insert(id) {
829 let buffer = self.buffer.read(cx);
830 self.slash_command_output_sections
831 .extend(sections.iter().cloned());
832 self.slash_command_output_sections
833 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
834 cx.emit(ContextEvent::SlashCommandFinished {
835 output_range,
836 sections,
837 expand_result: false,
838 run_commands_in_output: false,
839 });
840 }
841 }
842 ContextOperation::BufferOperation(_) => unreachable!(),
843 }
844
845 self.version.observe(timestamp);
846 self.timestamp.observe(timestamp);
847 self.operations.push(op);
848 }
849
850 if !changed_messages.is_empty() {
851 self.message_roles_updated(changed_messages, cx);
852 cx.emit(ContextEvent::MessagesEdited);
853 cx.notify();
854 }
855
856 if summary_changed {
857 cx.emit(ContextEvent::SummaryChanged);
858 cx.notify();
859 }
860 }
861
862 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
863 if !self.version.observed_all(op.version()) {
864 return false;
865 }
866
867 match op {
868 ContextOperation::InsertMessage { anchor, .. } => self
869 .buffer
870 .read(cx)
871 .version
872 .observed(anchor.start.timestamp),
873 ContextOperation::UpdateMessage { message_id, .. } => {
874 self.messages_metadata.contains_key(message_id)
875 }
876 ContextOperation::UpdateSummary { .. } => true,
877 ContextOperation::SlashCommandFinished {
878 output_range,
879 sections,
880 ..
881 } => {
882 let version = &self.buffer.read(cx).version;
883 sections
884 .iter()
885 .map(|section| §ion.range)
886 .chain([output_range])
887 .all(|range| {
888 let observed_start = range.start == language::Anchor::MIN
889 || range.start == language::Anchor::MAX
890 || version.observed(range.start.timestamp);
891 let observed_end = range.end == language::Anchor::MIN
892 || range.end == language::Anchor::MAX
893 || version.observed(range.end.timestamp);
894 observed_start && observed_end
895 })
896 }
897 ContextOperation::BufferOperation(_) => {
898 panic!("buffer operations should always be applied")
899 }
900 }
901 }
902
903 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
904 self.operations.push(op.clone());
905 cx.emit(ContextEvent::Operation(op));
906 }
907
908 pub fn buffer(&self) -> &Model<Buffer> {
909 &self.buffer
910 }
911
912 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
913 self.language_registry.clone()
914 }
915
916 pub fn project(&self) -> Option<Model<Project>> {
917 self.project.clone()
918 }
919
920 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
921 self.prompt_builder.clone()
922 }
923
924 pub fn path(&self) -> Option<&Path> {
925 self.path.as_deref()
926 }
927
928 pub fn summary(&self) -> Option<&ContextSummary> {
929 self.summary.as_ref()
930 }
931
932 pub(crate) fn workflow_step_containing(
933 &self,
934 offset: usize,
935 cx: &AppContext,
936 ) -> Option<&WorkflowStep> {
937 let buffer = self.buffer.read(cx);
938 let index = self
939 .workflow_steps
940 .binary_search_by(|step| {
941 let step_range = step.range.to_offset(&buffer);
942 if offset < step_range.start {
943 Ordering::Greater
944 } else if offset > step_range.end {
945 Ordering::Less
946 } else {
947 Ordering::Equal
948 }
949 })
950 .ok()?;
951 Some(&self.workflow_steps[index])
952 }
953
954 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
955 self.workflow_steps.iter().map(|step| step.range.clone())
956 }
957
958 pub(crate) fn workflow_step_for_range(
959 &self,
960 range: &Range<language::Anchor>,
961 cx: &AppContext,
962 ) -> Option<&WorkflowStep> {
963 let buffer = self.buffer.read(cx);
964 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
965 Some(&self.workflow_steps[index])
966 }
967
968 fn workflow_step_index_for_range(
969 &self,
970 tagged_range: &Range<text::Anchor>,
971 buffer: &text::BufferSnapshot,
972 ) -> Result<usize, usize> {
973 self.workflow_steps
974 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
975 }
976
977 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
978 &self.pending_slash_commands
979 }
980
981 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
982 &self.slash_command_output_sections
983 }
984
985 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
986 self.pending_tool_uses_by_id.values().collect()
987 }
988
989 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
990 self.pending_tool_uses_by_id.get(id)
991 }
992
993 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
994 let markdown = self.language_registry.language_for_name("Markdown");
995 cx.spawn(|this, mut cx| async move {
996 let markdown = markdown.await?;
997 this.update(&mut cx, |this, cx| {
998 this.buffer
999 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1000 })
1001 })
1002 .detach_and_log_err(cx);
1003 }
1004
1005 fn handle_buffer_event(
1006 &mut self,
1007 _: Model<Buffer>,
1008 event: &language::BufferEvent,
1009 cx: &mut ModelContext<Self>,
1010 ) {
1011 match event {
1012 language::BufferEvent::Operation {
1013 operation,
1014 is_local: true,
1015 } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation(
1016 operation.clone(),
1017 ))),
1018 language::BufferEvent::Edited => {
1019 self.count_remaining_tokens(cx);
1020 self.reparse(cx);
1021 // Use `inclusive = true` to invalidate a step when an edit occurs
1022 // at the start/end of a parsed step.
1023 cx.emit(ContextEvent::MessagesEdited);
1024 }
1025 _ => {}
1026 }
1027 }
1028
1029 pub(crate) fn token_count(&self) -> Option<usize> {
1030 self.token_count
1031 }
1032
1033 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1034 let request = self.to_completion_request(cx);
1035 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1036 return;
1037 };
1038 self.pending_token_count = cx.spawn(|this, mut cx| {
1039 async move {
1040 cx.background_executor()
1041 .timer(Duration::from_millis(200))
1042 .await;
1043
1044 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1045 this.update(&mut cx, |this, cx| {
1046 this.token_count = Some(token_count);
1047 this.start_cache_warming(&model, cx);
1048 cx.notify()
1049 })
1050 }
1051 .log_err()
1052 });
1053 }
1054
1055 pub fn mark_cache_anchors(
1056 &mut self,
1057 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1058 speculative: bool,
1059 cx: &mut ModelContext<Self>,
1060 ) -> bool {
1061 let cache_configuration =
1062 cache_configuration
1063 .as_ref()
1064 .unwrap_or(&LanguageModelCacheConfiguration {
1065 max_cache_anchors: 0,
1066 should_speculate: false,
1067 min_total_token: 0,
1068 });
1069
1070 let messages: Vec<Message> = self.messages(cx).collect();
1071
1072 let mut sorted_messages = messages.clone();
1073 if speculative {
1074 // Avoid caching the last message if this is a speculative cache fetch as
1075 // it's likely to change.
1076 sorted_messages.pop();
1077 }
1078 sorted_messages.retain(|m| m.role == Role::User);
1079 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1080
1081 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1082 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1083 0
1084 } else {
1085 // Save 1 anchor for the inline assistant to use.
1086 max(cache_configuration.max_cache_anchors, 1) - 1
1087 };
1088 sorted_messages.truncate(cache_anchors);
1089
1090 let anchors: HashSet<MessageId> = sorted_messages
1091 .into_iter()
1092 .map(|message| message.id)
1093 .collect();
1094
1095 let buffer = self.buffer.read(cx).snapshot();
1096 let invalidated_caches: HashSet<MessageId> = messages
1097 .iter()
1098 .scan(false, |encountered_invalid, message| {
1099 let message_id = message.id;
1100 let is_invalid = self
1101 .messages_metadata
1102 .get(&message_id)
1103 .map_or(true, |metadata| {
1104 !metadata.is_cache_valid(&buffer, &message.offset_range)
1105 || *encountered_invalid
1106 });
1107 *encountered_invalid |= is_invalid;
1108 Some(if is_invalid { Some(message_id) } else { None })
1109 })
1110 .flatten()
1111 .collect();
1112
1113 let last_anchor = messages.iter().rev().find_map(|message| {
1114 if anchors.contains(&message.id) {
1115 Some(message.id)
1116 } else {
1117 None
1118 }
1119 });
1120
1121 let mut new_anchor_needs_caching = false;
1122 let current_version = &buffer.version;
1123 // If we have no anchors, mark all messages as not being cached.
1124 let mut hit_last_anchor = last_anchor.is_none();
1125
1126 for message in messages.iter() {
1127 if hit_last_anchor {
1128 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1129 continue;
1130 }
1131
1132 if let Some(last_anchor) = last_anchor {
1133 if message.id == last_anchor {
1134 hit_last_anchor = true;
1135 }
1136 }
1137
1138 new_anchor_needs_caching = new_anchor_needs_caching
1139 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1140
1141 self.update_metadata(message.id, cx, |metadata| {
1142 let cache_status = if invalidated_caches.contains(&message.id) {
1143 CacheStatus::Pending
1144 } else {
1145 metadata
1146 .cache
1147 .as_ref()
1148 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1149 };
1150 metadata.cache = Some(MessageCacheMetadata {
1151 is_anchor: anchors.contains(&message.id),
1152 is_final_anchor: hit_last_anchor,
1153 status: cache_status,
1154 cached_at: current_version.clone(),
1155 });
1156 });
1157 }
1158 new_anchor_needs_caching
1159 }
1160
1161 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1162 let cache_configuration = model.cache_configuration();
1163
1164 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1165 return;
1166 }
1167 if !self.pending_completions.is_empty() {
1168 return;
1169 }
1170 if let Some(cache_configuration) = cache_configuration {
1171 if !cache_configuration.should_speculate {
1172 return;
1173 }
1174 }
1175
1176 let request = {
1177 let mut req = self.to_completion_request(cx);
1178 // Skip the last message because it's likely to change and
1179 // therefore would be a waste to cache.
1180 req.messages.pop();
1181 req.messages.push(LanguageModelRequestMessage {
1182 role: Role::User,
1183 content: vec!["Respond only with OK, nothing else.".into()],
1184 cache: false,
1185 });
1186 req
1187 };
1188
1189 let model = Arc::clone(model);
1190 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1191 async move {
1192 match model.stream_completion(request, &cx).await {
1193 Ok(mut stream) => {
1194 stream.next().await;
1195 log::info!("Cache warming completed successfully");
1196 }
1197 Err(e) => {
1198 log::warn!("Cache warming failed: {}", e);
1199 }
1200 };
1201 this.update(&mut cx, |this, cx| {
1202 this.update_cache_status_for_completion(cx);
1203 })
1204 .ok();
1205 anyhow::Ok(())
1206 }
1207 .log_err()
1208 });
1209 }
1210
1211 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1212 let cached_message_ids: Vec<MessageId> = self
1213 .messages_metadata
1214 .iter()
1215 .filter_map(|(message_id, metadata)| {
1216 metadata.cache.as_ref().and_then(|cache| {
1217 if cache.status == CacheStatus::Pending {
1218 Some(*message_id)
1219 } else {
1220 None
1221 }
1222 })
1223 })
1224 .collect();
1225
1226 for message_id in cached_message_ids {
1227 self.update_metadata(message_id, cx, |metadata| {
1228 if let Some(cache) = &mut metadata.cache {
1229 cache.status = CacheStatus::Cached;
1230 }
1231 });
1232 }
1233 cx.notify();
1234 }
1235
1236 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1237 let buffer = self.buffer.read(cx).text_snapshot();
1238 let mut row_ranges = self
1239 .edits_since_last_parse
1240 .consume()
1241 .into_iter()
1242 .map(|edit| {
1243 let start_row = buffer.offset_to_point(edit.new.start).row;
1244 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1245 start_row..end_row
1246 })
1247 .peekable();
1248
1249 let mut removed_slash_command_ranges = Vec::new();
1250 let mut updated_slash_commands = Vec::new();
1251 let mut removed_steps = Vec::new();
1252 let mut updated_steps = Vec::new();
1253 while let Some(mut row_range) = row_ranges.next() {
1254 while let Some(next_row_range) = row_ranges.peek() {
1255 if row_range.end >= next_row_range.start {
1256 row_range.end = next_row_range.end;
1257 row_ranges.next();
1258 } else {
1259 break;
1260 }
1261 }
1262
1263 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1264 let end = buffer.anchor_after(Point::new(
1265 row_range.end - 1,
1266 buffer.line_len(row_range.end - 1),
1267 ));
1268
1269 self.reparse_slash_commands_in_range(
1270 start..end,
1271 &buffer,
1272 &mut updated_slash_commands,
1273 &mut removed_slash_command_ranges,
1274 cx,
1275 );
1276 self.reparse_workflow_steps_in_range(
1277 start..end,
1278 &buffer,
1279 &mut updated_steps,
1280 &mut removed_steps,
1281 cx,
1282 );
1283 }
1284
1285 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1286 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1287 removed: removed_slash_command_ranges,
1288 updated: updated_slash_commands,
1289 });
1290 }
1291
1292 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1293 cx.emit(ContextEvent::WorkflowStepsUpdated {
1294 removed: removed_steps,
1295 updated: updated_steps,
1296 });
1297 }
1298 }
1299
1300 fn reparse_slash_commands_in_range(
1301 &mut self,
1302 range: Range<text::Anchor>,
1303 buffer: &BufferSnapshot,
1304 updated: &mut Vec<PendingSlashCommand>,
1305 removed: &mut Vec<Range<text::Anchor>>,
1306 cx: &AppContext,
1307 ) {
1308 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1309
1310 let mut new_commands = Vec::new();
1311 let mut lines = buffer.text_for_range(range).lines();
1312 let mut offset = lines.offset();
1313 while let Some(line) = lines.next() {
1314 if let Some(command_line) = SlashCommandLine::parse(line) {
1315 let name = &line[command_line.name.clone()];
1316 let arguments = command_line
1317 .arguments
1318 .iter()
1319 .filter_map(|argument_range| {
1320 if argument_range.is_empty() {
1321 None
1322 } else {
1323 line.get(argument_range.clone())
1324 }
1325 })
1326 .map(ToOwned::to_owned)
1327 .collect::<SmallVec<_>>();
1328 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1329 if !command.requires_argument() || !arguments.is_empty() {
1330 let start_ix = offset + command_line.name.start - 1;
1331 let end_ix = offset
1332 + command_line
1333 .arguments
1334 .last()
1335 .map_or(command_line.name.end, |argument| argument.end);
1336 let source_range =
1337 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1338 let pending_command = PendingSlashCommand {
1339 name: name.to_string(),
1340 arguments,
1341 source_range,
1342 status: PendingSlashCommandStatus::Idle,
1343 };
1344 updated.push(pending_command.clone());
1345 new_commands.push(pending_command);
1346 }
1347 }
1348 }
1349
1350 offset = lines.offset();
1351 }
1352
1353 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1354 removed.extend(removed_commands.map(|command| command.source_range));
1355 }
1356
1357 fn reparse_workflow_steps_in_range(
1358 &mut self,
1359 range: Range<text::Anchor>,
1360 buffer: &BufferSnapshot,
1361 updated: &mut Vec<Range<text::Anchor>>,
1362 removed: &mut Vec<Range<text::Anchor>>,
1363 cx: &mut ModelContext<Self>,
1364 ) {
1365 // Rebuild the XML tags in the edited range.
1366 let intersecting_tags_range =
1367 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1368 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1369 self.xml_tags
1370 .splice(intersecting_tags_range.clone(), new_tags);
1371
1372 // Find which steps intersect the changed range.
1373 let intersecting_steps_range =
1374 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1375
1376 // Reparse all tags after the last unchanged step before the change.
1377 let mut tags_start_ix = 0;
1378 if let Some(preceding_unchanged_step) =
1379 self.workflow_steps[..intersecting_steps_range.start].last()
1380 {
1381 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1382 tag.range
1383 .start
1384 .cmp(&preceding_unchanged_step.range.end, buffer)
1385 .then(Ordering::Less)
1386 }) {
1387 Ok(ix) | Err(ix) => ix,
1388 };
1389 }
1390
1391 // Rebuild the edit suggestions in the range.
1392 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1393
1394 if let Some(project) = self.project() {
1395 for step in &mut new_steps {
1396 Self::resolve_workflow_step_internal(step, &project, cx);
1397 }
1398 }
1399
1400 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1401 let removed_steps = self
1402 .workflow_steps
1403 .splice(intersecting_steps_range, new_steps);
1404 removed.extend(
1405 removed_steps
1406 .map(|step| step.range)
1407 .filter(|range| !updated.contains(&range)),
1408 );
1409 }
1410
1411 fn parse_xml_tags_in_range(
1412 &self,
1413 buffer: &BufferSnapshot,
1414 range: Range<text::Anchor>,
1415 cx: &AppContext,
1416 ) -> Vec<XmlTag> {
1417 let mut messages = self.messages(cx).peekable();
1418
1419 let mut tags = Vec::new();
1420 let mut lines = buffer.text_for_range(range).lines();
1421 let mut offset = lines.offset();
1422
1423 while let Some(line) = lines.next() {
1424 while let Some(message) = messages.peek() {
1425 if offset < message.offset_range.end {
1426 break;
1427 } else {
1428 messages.next();
1429 }
1430 }
1431
1432 let is_assistant_message = messages
1433 .peek()
1434 .map_or(false, |message| message.role == Role::Assistant);
1435 if is_assistant_message {
1436 for (start_ix, _) in line.match_indices('<') {
1437 let mut name_start_ix = start_ix + 1;
1438 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1439 if let Some(closing_bracket_ix) = closing_bracket_ix {
1440 let end_ix = closing_bracket_ix + 1;
1441 let mut is_open_tag = true;
1442 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1443 name_start_ix += 1;
1444 is_open_tag = false;
1445 }
1446 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1447 let tag_name_len = tag_inner
1448 .find(|c: char| c.is_whitespace())
1449 .unwrap_or(tag_inner.len());
1450 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1451 tags.push(XmlTag {
1452 range: buffer.anchor_after(offset + start_ix)
1453 ..buffer.anchor_before(offset + end_ix),
1454 is_open_tag,
1455 kind,
1456 });
1457 };
1458 }
1459 }
1460 }
1461
1462 offset = lines.offset();
1463 }
1464 tags
1465 }
1466
1467 fn parse_steps(
1468 &mut self,
1469 tags_start_ix: usize,
1470 buffer_end: text::Anchor,
1471 buffer: &BufferSnapshot,
1472 ) -> Vec<WorkflowStep> {
1473 let mut new_steps = Vec::new();
1474 let mut pending_step = None;
1475 let mut edit_step_depth = 0;
1476 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1477 'tags: while let Some(tag) = tags.next() {
1478 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1479 break;
1480 }
1481
1482 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1483 edit_step_depth += 1;
1484 let edit_start = tag.range.start;
1485 let mut edits = Vec::new();
1486 let mut step = WorkflowStep {
1487 range: edit_start..edit_start,
1488 leading_tags_end: tag.range.end,
1489 trailing_tag_start: None,
1490 edits: Default::default(),
1491 resolution: None,
1492 resolution_task: None,
1493 };
1494
1495 while let Some(tag) = tags.next() {
1496 step.trailing_tag_start.get_or_insert(tag.range.start);
1497
1498 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1499 // step.trailing_tag_start = Some(tag.range.start);
1500 edit_step_depth -= 1;
1501 if edit_step_depth == 0 {
1502 step.range.end = tag.range.end;
1503 step.edits = edits.into();
1504 new_steps.push(step);
1505 continue 'tags;
1506 }
1507 }
1508
1509 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1510 let mut path = None;
1511 let mut search = None;
1512 let mut operation = None;
1513 let mut description = None;
1514
1515 while let Some(tag) = tags.next() {
1516 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1517 edits.push(WorkflowStepEdit::new(
1518 path,
1519 operation,
1520 search,
1521 description,
1522 ));
1523 break;
1524 }
1525
1526 if tag.is_open_tag
1527 && [
1528 XmlTagKind::Path,
1529 XmlTagKind::Search,
1530 XmlTagKind::Operation,
1531 XmlTagKind::Description,
1532 ]
1533 .contains(&tag.kind)
1534 {
1535 let kind = tag.kind;
1536 let content_start = tag.range.end;
1537 if let Some(tag) = tags.peek() {
1538 if tag.kind == kind && !tag.is_open_tag {
1539 let tag = tags.next().unwrap();
1540 let content_end = tag.range.start;
1541 let mut content = buffer
1542 .text_for_range(content_start..content_end)
1543 .collect::<String>();
1544 content.truncate(content.trim_end().len());
1545 match kind {
1546 XmlTagKind::Path => path = Some(content),
1547 XmlTagKind::Operation => operation = Some(content),
1548 XmlTagKind::Search => {
1549 search = Some(content).filter(|s| !s.is_empty())
1550 }
1551 XmlTagKind::Description => {
1552 description =
1553 Some(content).filter(|s| !s.is_empty())
1554 }
1555 _ => {}
1556 }
1557 }
1558 }
1559 }
1560 }
1561 }
1562 }
1563
1564 pending_step = Some(step);
1565 }
1566 }
1567
1568 if let Some(mut pending_step) = pending_step {
1569 pending_step.range.end = text::Anchor::MAX;
1570 new_steps.push(pending_step);
1571 }
1572
1573 new_steps
1574 }
1575
1576 pub fn resolve_workflow_step(
1577 &mut self,
1578 tagged_range: Range<text::Anchor>,
1579 cx: &mut ModelContext<Self>,
1580 ) -> Option<()> {
1581 let index = self
1582 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1583 .ok()?;
1584 let step = &mut self.workflow_steps[index];
1585 let project = self.project.as_ref()?;
1586 step.resolution.take();
1587 Self::resolve_workflow_step_internal(step, project, cx);
1588 None
1589 }
1590
1591 fn resolve_workflow_step_internal(
1592 step: &mut WorkflowStep,
1593 project: &Model<Project>,
1594 cx: &mut ModelContext<'_, Context>,
1595 ) {
1596 step.resolution_task = Some(cx.spawn({
1597 let range = step.range.clone();
1598 let edits = step.edits.clone();
1599 let project = project.clone();
1600 |this, mut cx| async move {
1601 let suggestion_groups =
1602 Self::compute_step_resolution(project, edits, &mut cx).await;
1603
1604 this.update(&mut cx, |this, cx| {
1605 let buffer = this.buffer.read(cx).text_snapshot();
1606 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1607 if let Some(ix) = ix {
1608 let step = &mut this.workflow_steps[ix];
1609
1610 let resolution = suggestion_groups.map(|suggestion_groups| {
1611 let mut title = String::new();
1612 for mut chunk in buffer.text_for_range(
1613 step.leading_tags_end
1614 ..step.trailing_tag_start.unwrap_or(step.range.end),
1615 ) {
1616 if title.is_empty() {
1617 chunk = chunk.trim_start();
1618 }
1619 if let Some((prefix, _)) = chunk.split_once('\n') {
1620 title.push_str(prefix);
1621 break;
1622 } else {
1623 title.push_str(chunk);
1624 }
1625 }
1626
1627 WorkflowStepResolution {
1628 title,
1629 suggestion_groups,
1630 }
1631 });
1632
1633 step.resolution = Some(Arc::new(resolution));
1634 cx.emit(ContextEvent::WorkflowStepsUpdated {
1635 removed: vec![],
1636 updated: vec![range],
1637 })
1638 }
1639 })
1640 .ok();
1641 }
1642 }));
1643 }
1644
1645 async fn compute_step_resolution(
1646 project: Model<Project>,
1647 edits: Arc<[Result<WorkflowStepEdit>]>,
1648 cx: &mut AsyncAppContext,
1649 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1650 let mut suggestion_tasks = Vec::new();
1651 for edit in edits.iter() {
1652 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1653 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1654 }
1655
1656 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1657 let suggestions = future::try_join_all(suggestion_tasks).await?;
1658
1659 let mut suggestions_by_buffer = HashMap::default();
1660 for (buffer, suggestion) in suggestions {
1661 suggestions_by_buffer
1662 .entry(buffer)
1663 .or_insert_with(Vec::new)
1664 .push(suggestion);
1665 }
1666
1667 let mut suggestion_groups_by_buffer = HashMap::default();
1668 for (buffer, mut suggestions) in suggestions_by_buffer {
1669 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1670 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1671 // Sort suggestions by their range so that earlier, larger ranges come first
1672 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1673
1674 // Merge overlapping suggestions
1675 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1676
1677 // Create context ranges for each suggestion
1678 for suggestion in suggestions {
1679 let context_range = {
1680 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1681 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1682 let end_row =
1683 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1684 let start = snapshot.anchor_before(Point::new(start_row, 0));
1685 let end =
1686 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1687 start..end
1688 };
1689
1690 if let Some(last_group) = suggestion_groups.last_mut() {
1691 if last_group
1692 .context_range
1693 .end
1694 .cmp(&context_range.start, &snapshot)
1695 .is_ge()
1696 {
1697 // Merge with the previous group if context ranges overlap
1698 last_group.context_range.end = context_range.end;
1699 last_group.suggestions.push(suggestion);
1700 } else {
1701 // Create a new group
1702 suggestion_groups.push(WorkflowSuggestionGroup {
1703 context_range,
1704 suggestions: vec![suggestion],
1705 });
1706 }
1707 } else {
1708 // Create the first group
1709 suggestion_groups.push(WorkflowSuggestionGroup {
1710 context_range,
1711 suggestions: vec![suggestion],
1712 });
1713 }
1714 }
1715
1716 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1717 }
1718
1719 Ok(suggestion_groups_by_buffer)
1720 }
1721
1722 pub fn pending_command_for_position(
1723 &mut self,
1724 position: language::Anchor,
1725 cx: &mut ModelContext<Self>,
1726 ) -> Option<&mut PendingSlashCommand> {
1727 let buffer = self.buffer.read(cx);
1728 match self
1729 .pending_slash_commands
1730 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1731 {
1732 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1733 Err(ix) => {
1734 let cmd = self.pending_slash_commands.get_mut(ix)?;
1735 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1736 && position.cmp(&cmd.source_range.end, buffer).is_le()
1737 {
1738 Some(cmd)
1739 } else {
1740 None
1741 }
1742 }
1743 }
1744 }
1745
1746 pub fn pending_commands_for_range(
1747 &self,
1748 range: Range<language::Anchor>,
1749 cx: &AppContext,
1750 ) -> &[PendingSlashCommand] {
1751 let range = self.pending_command_indices_for_range(range, cx);
1752 &self.pending_slash_commands[range]
1753 }
1754
1755 fn pending_command_indices_for_range(
1756 &self,
1757 range: Range<language::Anchor>,
1758 cx: &AppContext,
1759 ) -> Range<usize> {
1760 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1761 }
1762
1763 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1764 &self,
1765 all_annotations: &[T],
1766 range: Range<language::Anchor>,
1767 cx: &AppContext,
1768 ) -> Range<usize> {
1769 let buffer = self.buffer.read(cx);
1770 let start_ix = match all_annotations
1771 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1772 {
1773 Ok(ix) | Err(ix) => ix,
1774 };
1775 let end_ix = match all_annotations
1776 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1777 {
1778 Ok(ix) => ix + 1,
1779 Err(ix) => ix,
1780 };
1781 start_ix..end_ix
1782 }
1783
1784 pub fn insert_command_output(
1785 &mut self,
1786 command_range: Range<language::Anchor>,
1787 output: Task<Result<SlashCommandOutput>>,
1788 ensure_trailing_newline: bool,
1789 expand_result: bool,
1790 cx: &mut ModelContext<Self>,
1791 ) {
1792 self.reparse(cx);
1793
1794 let insert_output_task = cx.spawn(|this, mut cx| {
1795 let command_range = command_range.clone();
1796 async move {
1797 let output = output.await;
1798 this.update(&mut cx, |this, cx| match output {
1799 Ok(mut output) => {
1800 // Ensure section ranges are valid.
1801 for section in &mut output.sections {
1802 section.range.start = section.range.start.min(output.text.len());
1803 section.range.end = section.range.end.min(output.text.len());
1804 while !output.text.is_char_boundary(section.range.start) {
1805 section.range.start -= 1;
1806 }
1807 while !output.text.is_char_boundary(section.range.end) {
1808 section.range.end += 1;
1809 }
1810 }
1811
1812 // Ensure there is a newline after the last section.
1813 if ensure_trailing_newline {
1814 let has_newline_after_last_section =
1815 output.sections.last().map_or(false, |last_section| {
1816 output.text[last_section.range.end..].ends_with('\n')
1817 });
1818 if !has_newline_after_last_section {
1819 output.text.push('\n');
1820 }
1821 }
1822
1823 let version = this.version.clone();
1824 let command_id = SlashCommandId(this.next_timestamp());
1825 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1826 let start = command_range.start.to_offset(buffer);
1827 let old_end = command_range.end.to_offset(buffer);
1828 let new_end = start + output.text.len();
1829 buffer.edit([(start..old_end, output.text)], None, cx);
1830
1831 let mut sections = output
1832 .sections
1833 .into_iter()
1834 .map(|section| SlashCommandOutputSection {
1835 range: buffer.anchor_after(start + section.range.start)
1836 ..buffer.anchor_before(start + section.range.end),
1837 icon: section.icon,
1838 label: section.label,
1839 metadata: section.metadata,
1840 })
1841 .collect::<Vec<_>>();
1842 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1843
1844 this.slash_command_output_sections
1845 .extend(sections.iter().cloned());
1846 this.slash_command_output_sections
1847 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1848
1849 let output_range =
1850 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1851 this.finished_slash_commands.insert(command_id);
1852
1853 (
1854 ContextOperation::SlashCommandFinished {
1855 id: command_id,
1856 output_range: output_range.clone(),
1857 sections: sections.clone(),
1858 version,
1859 },
1860 ContextEvent::SlashCommandFinished {
1861 output_range,
1862 sections,
1863 run_commands_in_output: output.run_commands_in_text,
1864 expand_result,
1865 },
1866 )
1867 });
1868
1869 this.push_op(operation, cx);
1870 cx.emit(event);
1871 }
1872 Err(error) => {
1873 if let Some(pending_command) =
1874 this.pending_command_for_position(command_range.start, cx)
1875 {
1876 pending_command.status =
1877 PendingSlashCommandStatus::Error(error.to_string());
1878 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1879 removed: vec![pending_command.source_range.clone()],
1880 updated: vec![pending_command.clone()],
1881 });
1882 }
1883 }
1884 })
1885 .ok();
1886 }
1887 });
1888
1889 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1890 pending_command.status = PendingSlashCommandStatus::Running {
1891 _task: insert_output_task.shared(),
1892 };
1893 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1894 removed: vec![pending_command.source_range.clone()],
1895 updated: vec![pending_command.clone()],
1896 });
1897 }
1898 }
1899
1900 pub fn insert_tool_output(
1901 &mut self,
1902 tool_use_id: Arc<str>,
1903 output: Task<Result<String>>,
1904 cx: &mut ModelContext<Self>,
1905 ) {
1906 let insert_output_task = cx.spawn(|this, mut cx| {
1907 let tool_use_id = tool_use_id.clone();
1908 async move {
1909 let output = output.await;
1910 this.update(&mut cx, |this, cx| match output {
1911 Ok(mut output) => {
1912 const NEWLINE: char = '\n';
1913
1914 if !output.ends_with(NEWLINE) {
1915 output.push(NEWLINE);
1916 }
1917
1918 let anchor_range = this.buffer.update(cx, |buffer, cx| {
1919 let insert_start = buffer.len().to_offset(buffer);
1920 let insert_end = insert_start;
1921
1922 let start = insert_start;
1923 let end = start + output.len() - NEWLINE.len_utf8();
1924
1925 buffer.edit([(insert_start..insert_end, output)], None, cx);
1926
1927 let output_range = buffer.anchor_after(start)..buffer.anchor_after(end);
1928
1929 output_range
1930 });
1931
1932 this.insert_content(
1933 Content::ToolResult {
1934 range: anchor_range.clone(),
1935 tool_use_id: tool_use_id.clone(),
1936 },
1937 cx,
1938 );
1939
1940 cx.emit(ContextEvent::ToolFinished {
1941 tool_use_id,
1942 output_range: anchor_range,
1943 });
1944 }
1945 Err(err) => {
1946 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1947 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1948 }
1949 }
1950 })
1951 .ok();
1952 }
1953 });
1954
1955 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1956 tool_use.status = PendingToolUseStatus::Running {
1957 _task: insert_output_task.shared(),
1958 };
1959 }
1960 }
1961
1962 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1963 self.count_remaining_tokens(cx);
1964 }
1965
1966 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1967 self.message_anchors.iter().rev().find_map(|message| {
1968 message
1969 .start
1970 .is_valid(self.buffer.read(cx))
1971 .then_some(message.id)
1972 })
1973 }
1974
1975 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1976 let model_registry = LanguageModelRegistry::read_global(cx);
1977 let provider = model_registry.active_provider()?;
1978 let model = model_registry.active_model()?;
1979 let last_message_id = self.get_last_valid_message_id(cx)?;
1980
1981 if !provider.is_authenticated(cx) {
1982 log::info!("completion provider has no credentials");
1983 return None;
1984 }
1985 // Compute which messages to cache, including the last one.
1986 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1987
1988 let mut request = self.to_completion_request(cx);
1989
1990 if cx.has_flag::<ToolUseFeatureFlag>() {
1991 let tool_registry = ToolRegistry::global(cx);
1992 request.tools = tool_registry
1993 .tools()
1994 .into_iter()
1995 .map(|tool| LanguageModelRequestTool {
1996 name: tool.name(),
1997 description: tool.description(),
1998 input_schema: tool.input_schema(),
1999 })
2000 .collect();
2001 }
2002
2003 let assistant_message = self
2004 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
2005 .unwrap();
2006
2007 // Queue up the user's next reply.
2008 let user_message = self
2009 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
2010 .unwrap();
2011
2012 let pending_completion_id = post_inc(&mut self.completion_count);
2013
2014 let task = cx.spawn({
2015 |this, mut cx| async move {
2016 let stream = model.stream_completion(request, &cx);
2017 let assistant_message_id = assistant_message.id;
2018 let mut response_latency = None;
2019 let stream_completion = async {
2020 let request_start = Instant::now();
2021 let mut events = stream.await?;
2022 let mut stop_reason = StopReason::EndTurn;
2023
2024 while let Some(event) = events.next().await {
2025 if response_latency.is_none() {
2026 response_latency = Some(request_start.elapsed());
2027 }
2028 let event = event?;
2029
2030 this.update(&mut cx, |this, cx| {
2031 let message_ix = this
2032 .message_anchors
2033 .iter()
2034 .position(|message| message.id == assistant_message_id)?;
2035 this.buffer.update(cx, |buffer, cx| {
2036 let message_old_end_offset = this.message_anchors[message_ix + 1..]
2037 .iter()
2038 .find(|message| message.start.is_valid(buffer))
2039 .map_or(buffer.len(), |message| {
2040 message.start.to_offset(buffer).saturating_sub(1)
2041 });
2042
2043 match event {
2044 LanguageModelCompletionEvent::Stop(reason) => {
2045 stop_reason = reason;
2046 }
2047 LanguageModelCompletionEvent::Text(chunk) => {
2048 buffer.edit(
2049 [(
2050 message_old_end_offset..message_old_end_offset,
2051 chunk,
2052 )],
2053 None,
2054 cx,
2055 );
2056 }
2057 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2058 const NEWLINE: char = '\n';
2059
2060 let mut text = String::new();
2061 text.push(NEWLINE);
2062 text.push_str(
2063 &serde_json::to_string_pretty(&tool_use)
2064 .expect("failed to serialize tool use to JSON"),
2065 );
2066 text.push(NEWLINE);
2067 let text_len = text.len();
2068
2069 buffer.edit(
2070 [(
2071 message_old_end_offset..message_old_end_offset,
2072 text,
2073 )],
2074 None,
2075 cx,
2076 );
2077
2078 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2079 let end_ix =
2080 message_old_end_offset + text_len - NEWLINE.len_utf8();
2081 let source_range = buffer.anchor_after(start_ix)
2082 ..buffer.anchor_after(end_ix);
2083
2084 let tool_use_id: Arc<str> = tool_use.id.into();
2085 this.pending_tool_uses_by_id.insert(
2086 tool_use_id.clone(),
2087 PendingToolUse {
2088 id: tool_use_id,
2089 name: tool_use.name,
2090 input: tool_use.input,
2091 status: PendingToolUseStatus::Idle,
2092 source_range,
2093 },
2094 );
2095 }
2096 }
2097 });
2098
2099 cx.emit(ContextEvent::StreamedCompletion);
2100
2101 Some(())
2102 })?;
2103 smol::future::yield_now().await;
2104 }
2105 this.update(&mut cx, |this, cx| {
2106 this.pending_completions
2107 .retain(|completion| completion.id != pending_completion_id);
2108 this.summarize(false, cx);
2109 this.update_cache_status_for_completion(cx);
2110 })?;
2111
2112 anyhow::Ok(stop_reason)
2113 };
2114
2115 let result = stream_completion.await;
2116
2117 this.update(&mut cx, |this, cx| {
2118 let error_message = if let Some(error) = result.as_ref().err() {
2119 if error.is::<PaymentRequiredError>() {
2120 cx.emit(ContextEvent::ShowPaymentRequiredError);
2121 this.update_metadata(assistant_message_id, cx, |metadata| {
2122 metadata.status = MessageStatus::Canceled;
2123 });
2124 Some(error.to_string())
2125 } else if error.is::<MaxMonthlySpendReachedError>() {
2126 cx.emit(ContextEvent::ShowMaxMonthlySpendReachedError);
2127 this.update_metadata(assistant_message_id, cx, |metadata| {
2128 metadata.status = MessageStatus::Canceled;
2129 });
2130 Some(error.to_string())
2131 } else {
2132 let error_message = error.to_string().trim().to_string();
2133 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2134 error_message.clone(),
2135 )));
2136 this.update_metadata(assistant_message_id, cx, |metadata| {
2137 metadata.status =
2138 MessageStatus::Error(SharedString::from(error_message.clone()));
2139 });
2140 Some(error_message)
2141 }
2142 } else {
2143 this.update_metadata(assistant_message_id, cx, |metadata| {
2144 metadata.status = MessageStatus::Done;
2145 });
2146 None
2147 };
2148
2149 if let Some(telemetry) = this.telemetry.as_ref() {
2150 let language_name = this
2151 .buffer
2152 .read(cx)
2153 .language()
2154 .map(|language| language.name());
2155 telemetry.report_assistant_event(AssistantEvent {
2156 conversation_id: Some(this.id.0.clone()),
2157 kind: AssistantKind::Panel,
2158 phase: AssistantPhase::Response,
2159 model: model.telemetry_id(),
2160 model_provider: model.provider_id().to_string(),
2161 response_latency,
2162 error_message,
2163 language_name: language_name.map(|name| name.to_proto()),
2164 });
2165 }
2166
2167 if let Ok(stop_reason) = result {
2168 match stop_reason {
2169 StopReason::ToolUse => {
2170 cx.emit(ContextEvent::UsePendingTools);
2171 }
2172 StopReason::EndTurn => {}
2173 StopReason::MaxTokens => {}
2174 }
2175 }
2176 })
2177 .ok();
2178 }
2179 });
2180
2181 self.pending_completions.push(PendingCompletion {
2182 id: pending_completion_id,
2183 assistant_message_id: assistant_message.id,
2184 _task: task,
2185 });
2186
2187 Some(user_message)
2188 }
2189
2190 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2191 let buffer = self.buffer.read(cx);
2192
2193 let mut contents = self.contents(cx).peekable();
2194
2195 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<String> {
2196 let text: String = buffer.text_for_range(range.clone()).collect();
2197 if text.trim().is_empty() {
2198 None
2199 } else {
2200 Some(text)
2201 }
2202 }
2203
2204 let mut completion_request = LanguageModelRequest {
2205 messages: Vec::new(),
2206 tools: Vec::new(),
2207 stop: Vec::new(),
2208 temperature: None,
2209 };
2210 for message in self.messages(cx) {
2211 if message.status != MessageStatus::Done {
2212 continue;
2213 }
2214
2215 let mut offset = message.offset_range.start;
2216 let mut request_message = LanguageModelRequestMessage {
2217 role: message.role,
2218 content: Vec::new(),
2219 cache: message
2220 .cache
2221 .as_ref()
2222 .map_or(false, |cache| cache.is_anchor),
2223 };
2224
2225 while let Some(content) = contents.peek() {
2226 if content
2227 .range()
2228 .end
2229 .cmp(&message.anchor_range.end, buffer)
2230 .is_lt()
2231 {
2232 let content = contents.next().unwrap();
2233 let range = content.range().to_offset(buffer);
2234 request_message.content.extend(
2235 collect_text_content(buffer, offset..range.start).map(MessageContent::Text),
2236 );
2237
2238 match content {
2239 Content::Image { image, .. } => {
2240 if let Some(image) = image.clone().now_or_never().flatten() {
2241 request_message
2242 .content
2243 .push(language_model::MessageContent::Image(image));
2244 }
2245 }
2246 Content::ToolUse { tool_use, .. } => {
2247 request_message
2248 .content
2249 .push(language_model::MessageContent::ToolUse(tool_use.clone()));
2250 }
2251 Content::ToolResult { tool_use_id, .. } => {
2252 request_message.content.push(
2253 language_model::MessageContent::ToolResult(
2254 LanguageModelToolResult {
2255 tool_use_id: tool_use_id.to_string(),
2256 is_error: false,
2257 content: collect_text_content(buffer, range.clone())
2258 .unwrap_or_default(),
2259 },
2260 ),
2261 );
2262 }
2263 }
2264
2265 offset = range.end;
2266 } else {
2267 break;
2268 }
2269 }
2270
2271 request_message.content.extend(
2272 collect_text_content(buffer, offset..message.offset_range.end)
2273 .map(MessageContent::Text),
2274 );
2275
2276 completion_request.messages.push(request_message);
2277 }
2278
2279 completion_request
2280 }
2281
2282 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2283 if let Some(pending_completion) = self.pending_completions.pop() {
2284 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2285 if metadata.status == MessageStatus::Pending {
2286 metadata.status = MessageStatus::Canceled;
2287 }
2288 });
2289 true
2290 } else {
2291 false
2292 }
2293 }
2294
2295 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2296 for id in &ids {
2297 if let Some(metadata) = self.messages_metadata.get(id) {
2298 let role = metadata.role.cycle();
2299 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2300 }
2301 }
2302
2303 self.message_roles_updated(ids, cx);
2304 }
2305
2306 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2307 let mut ranges = Vec::new();
2308 for message in self.messages(cx) {
2309 if ids.contains(&message.id) {
2310 ranges.push(message.anchor_range.clone());
2311 }
2312 }
2313
2314 let buffer = self.buffer.read(cx).text_snapshot();
2315 let mut updated = Vec::new();
2316 let mut removed = Vec::new();
2317 for range in ranges {
2318 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2319 }
2320
2321 if !updated.is_empty() || !removed.is_empty() {
2322 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2323 }
2324 }
2325
2326 pub fn update_metadata(
2327 &mut self,
2328 id: MessageId,
2329 cx: &mut ModelContext<Self>,
2330 f: impl FnOnce(&mut MessageMetadata),
2331 ) {
2332 let version = self.version.clone();
2333 let timestamp = self.next_timestamp();
2334 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2335 f(metadata);
2336 metadata.timestamp = timestamp;
2337 let operation = ContextOperation::UpdateMessage {
2338 message_id: id,
2339 metadata: metadata.clone(),
2340 version,
2341 };
2342 self.push_op(operation, cx);
2343 cx.emit(ContextEvent::MessagesEdited);
2344 cx.notify();
2345 }
2346 }
2347
2348 pub fn insert_message_after(
2349 &mut self,
2350 message_id: MessageId,
2351 role: Role,
2352 status: MessageStatus,
2353 cx: &mut ModelContext<Self>,
2354 ) -> Option<MessageAnchor> {
2355 if let Some(prev_message_ix) = self
2356 .message_anchors
2357 .iter()
2358 .position(|message| message.id == message_id)
2359 {
2360 // Find the next valid message after the one we were given.
2361 let mut next_message_ix = prev_message_ix + 1;
2362 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2363 if next_message.start.is_valid(self.buffer.read(cx)) {
2364 break;
2365 }
2366 next_message_ix += 1;
2367 }
2368
2369 let start = self.buffer.update(cx, |buffer, cx| {
2370 let offset = self
2371 .message_anchors
2372 .get(next_message_ix)
2373 .map_or(buffer.len(), |message| {
2374 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2375 });
2376 buffer.edit([(offset..offset, "\n")], None, cx);
2377 buffer.anchor_before(offset + 1)
2378 });
2379
2380 let version = self.version.clone();
2381 let anchor = MessageAnchor {
2382 id: MessageId(self.next_timestamp()),
2383 start,
2384 };
2385 let metadata = MessageMetadata {
2386 role,
2387 status,
2388 timestamp: anchor.id.0,
2389 cache: None,
2390 };
2391 self.insert_message(anchor.clone(), metadata.clone(), cx);
2392 self.push_op(
2393 ContextOperation::InsertMessage {
2394 anchor: anchor.clone(),
2395 metadata,
2396 version,
2397 },
2398 cx,
2399 );
2400 Some(anchor)
2401 } else {
2402 None
2403 }
2404 }
2405
2406 pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext<Self>) {
2407 let buffer = self.buffer.read(cx);
2408 let insertion_ix = match self
2409 .contents
2410 .binary_search_by(|probe| probe.cmp(&content, buffer))
2411 {
2412 Ok(ix) => {
2413 self.contents.remove(ix);
2414 ix
2415 }
2416 Err(ix) => ix,
2417 };
2418 self.contents.insert(insertion_ix, content);
2419 cx.emit(ContextEvent::MessagesEdited);
2420 }
2421
2422 pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Content> {
2423 let buffer = self.buffer.read(cx);
2424 self.contents
2425 .iter()
2426 .filter(|content| {
2427 let range = content.range();
2428 range.start.is_valid(buffer) && range.end.is_valid(buffer)
2429 })
2430 .cloned()
2431 }
2432
2433 pub fn split_message(
2434 &mut self,
2435 range: Range<usize>,
2436 cx: &mut ModelContext<Self>,
2437 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2438 let start_message = self.message_for_offset(range.start, cx);
2439 let end_message = self.message_for_offset(range.end, cx);
2440 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2441 // Prevent splitting when range spans multiple messages.
2442 if start_message.id != end_message.id {
2443 return (None, None);
2444 }
2445
2446 let message = start_message;
2447 let role = message.role;
2448 let mut edited_buffer = false;
2449
2450 let mut suffix_start = None;
2451
2452 // TODO: why did this start panicking?
2453 if range.start > message.offset_range.start
2454 && range.end < message.offset_range.end.saturating_sub(1)
2455 {
2456 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2457 suffix_start = Some(range.end + 1);
2458 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2459 suffix_start = Some(range.end);
2460 }
2461 }
2462
2463 let version = self.version.clone();
2464 let suffix = if let Some(suffix_start) = suffix_start {
2465 MessageAnchor {
2466 id: MessageId(self.next_timestamp()),
2467 start: self.buffer.read(cx).anchor_before(suffix_start),
2468 }
2469 } else {
2470 self.buffer.update(cx, |buffer, cx| {
2471 buffer.edit([(range.end..range.end, "\n")], None, cx);
2472 });
2473 edited_buffer = true;
2474 MessageAnchor {
2475 id: MessageId(self.next_timestamp()),
2476 start: self.buffer.read(cx).anchor_before(range.end + 1),
2477 }
2478 };
2479
2480 let suffix_metadata = MessageMetadata {
2481 role,
2482 status: MessageStatus::Done,
2483 timestamp: suffix.id.0,
2484 cache: None,
2485 };
2486 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2487 self.push_op(
2488 ContextOperation::InsertMessage {
2489 anchor: suffix.clone(),
2490 metadata: suffix_metadata,
2491 version,
2492 },
2493 cx,
2494 );
2495
2496 let new_messages =
2497 if range.start == range.end || range.start == message.offset_range.start {
2498 (None, Some(suffix))
2499 } else {
2500 let mut prefix_end = None;
2501 if range.start > message.offset_range.start
2502 && range.end < message.offset_range.end - 1
2503 {
2504 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2505 prefix_end = Some(range.start + 1);
2506 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2507 == Some('\n')
2508 {
2509 prefix_end = Some(range.start);
2510 }
2511 }
2512
2513 let version = self.version.clone();
2514 let selection = if let Some(prefix_end) = prefix_end {
2515 MessageAnchor {
2516 id: MessageId(self.next_timestamp()),
2517 start: self.buffer.read(cx).anchor_before(prefix_end),
2518 }
2519 } else {
2520 self.buffer.update(cx, |buffer, cx| {
2521 buffer.edit([(range.start..range.start, "\n")], None, cx)
2522 });
2523 edited_buffer = true;
2524 MessageAnchor {
2525 id: MessageId(self.next_timestamp()),
2526 start: self.buffer.read(cx).anchor_before(range.end + 1),
2527 }
2528 };
2529
2530 let selection_metadata = MessageMetadata {
2531 role,
2532 status: MessageStatus::Done,
2533 timestamp: selection.id.0,
2534 cache: None,
2535 };
2536 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2537 self.push_op(
2538 ContextOperation::InsertMessage {
2539 anchor: selection.clone(),
2540 metadata: selection_metadata,
2541 version,
2542 },
2543 cx,
2544 );
2545
2546 (Some(selection), Some(suffix))
2547 };
2548
2549 if !edited_buffer {
2550 cx.emit(ContextEvent::MessagesEdited);
2551 }
2552 new_messages
2553 } else {
2554 (None, None)
2555 }
2556 }
2557
2558 fn insert_message(
2559 &mut self,
2560 new_anchor: MessageAnchor,
2561 new_metadata: MessageMetadata,
2562 cx: &mut ModelContext<Self>,
2563 ) {
2564 cx.emit(ContextEvent::MessagesEdited);
2565
2566 self.messages_metadata.insert(new_anchor.id, new_metadata);
2567
2568 let buffer = self.buffer.read(cx);
2569 let insertion_ix = self
2570 .message_anchors
2571 .iter()
2572 .position(|anchor| {
2573 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2574 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2575 })
2576 .unwrap_or(self.message_anchors.len());
2577 self.message_anchors.insert(insertion_ix, new_anchor);
2578 }
2579
2580 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2581 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2582 return;
2583 };
2584 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2585 return;
2586 };
2587
2588 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2589 if !provider.is_authenticated(cx) {
2590 return;
2591 }
2592
2593 let mut request = self.to_completion_request(cx);
2594 request.messages.push(LanguageModelRequestMessage {
2595 role: Role::User,
2596 content: vec![
2597 "Summarize the context into a short title without punctuation.".into(),
2598 ],
2599 cache: false,
2600 });
2601
2602 self.pending_summary = cx.spawn(|this, mut cx| {
2603 async move {
2604 let stream = model.stream_completion_text(request, &cx);
2605 let mut messages = stream.await?;
2606
2607 let mut replaced = !replace_old;
2608 while let Some(message) = messages.next().await {
2609 let text = message?;
2610 let mut lines = text.lines();
2611 this.update(&mut cx, |this, cx| {
2612 let version = this.version.clone();
2613 let timestamp = this.next_timestamp();
2614 let summary = this.summary.get_or_insert(ContextSummary::default());
2615 if !replaced && replace_old {
2616 summary.text.clear();
2617 replaced = true;
2618 }
2619 summary.text.extend(lines.next());
2620 summary.timestamp = timestamp;
2621 let operation = ContextOperation::UpdateSummary {
2622 summary: summary.clone(),
2623 version,
2624 };
2625 this.push_op(operation, cx);
2626 cx.emit(ContextEvent::SummaryChanged);
2627 })?;
2628
2629 // Stop if the LLM generated multiple lines.
2630 if lines.next().is_some() {
2631 break;
2632 }
2633 }
2634
2635 this.update(&mut cx, |this, cx| {
2636 let version = this.version.clone();
2637 let timestamp = this.next_timestamp();
2638 if let Some(summary) = this.summary.as_mut() {
2639 summary.done = true;
2640 summary.timestamp = timestamp;
2641 let operation = ContextOperation::UpdateSummary {
2642 summary: summary.clone(),
2643 version,
2644 };
2645 this.push_op(operation, cx);
2646 cx.emit(ContextEvent::SummaryChanged);
2647 }
2648 })?;
2649
2650 anyhow::Ok(())
2651 }
2652 .log_err()
2653 });
2654 }
2655 }
2656
2657 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2658 self.messages_for_offsets([offset], cx).pop()
2659 }
2660
2661 pub fn messages_for_offsets(
2662 &self,
2663 offsets: impl IntoIterator<Item = usize>,
2664 cx: &AppContext,
2665 ) -> Vec<Message> {
2666 let mut result = Vec::new();
2667
2668 let mut messages = self.messages(cx).peekable();
2669 let mut offsets = offsets.into_iter().peekable();
2670 let mut current_message = messages.next();
2671 while let Some(offset) = offsets.next() {
2672 // Locate the message that contains the offset.
2673 while current_message.as_ref().map_or(false, |message| {
2674 !message.offset_range.contains(&offset) && messages.peek().is_some()
2675 }) {
2676 current_message = messages.next();
2677 }
2678 let Some(message) = current_message.as_ref() else {
2679 break;
2680 };
2681
2682 // Skip offsets that are in the same message.
2683 while offsets.peek().map_or(false, |offset| {
2684 message.offset_range.contains(offset) || messages.peek().is_none()
2685 }) {
2686 offsets.next();
2687 }
2688
2689 result.push(message.clone());
2690 }
2691 result
2692 }
2693
2694 fn messages_from_anchors<'a>(
2695 &'a self,
2696 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2697 cx: &'a AppContext,
2698 ) -> impl 'a + Iterator<Item = Message> {
2699 let buffer = self.buffer.read(cx);
2700
2701 Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate())
2702 }
2703
2704 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2705 self.messages_from_anchors(self.message_anchors.iter(), cx)
2706 }
2707
2708 pub fn messages_from_iters<'a>(
2709 buffer: &'a Buffer,
2710 metadata: &'a HashMap<MessageId, MessageMetadata>,
2711 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2712 ) -> impl 'a + Iterator<Item = Message> {
2713 let mut messages = messages.peekable();
2714
2715 iter::from_fn(move || {
2716 if let Some((start_ix, message_anchor)) = messages.next() {
2717 let metadata = metadata.get(&message_anchor.id)?;
2718
2719 let message_start = message_anchor.start.to_offset(buffer);
2720 let mut message_end = None;
2721 let mut end_ix = start_ix;
2722 while let Some((_, next_message)) = messages.peek() {
2723 if next_message.start.is_valid(buffer) {
2724 message_end = Some(next_message.start);
2725 break;
2726 } else {
2727 end_ix += 1;
2728 messages.next();
2729 }
2730 }
2731 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2732 let message_end = message_end_anchor.to_offset(buffer);
2733
2734 return Some(Message {
2735 index_range: start_ix..end_ix,
2736 offset_range: message_start..message_end,
2737 anchor_range: message_anchor.start..message_end_anchor,
2738 id: message_anchor.id,
2739 role: metadata.role,
2740 status: metadata.status.clone(),
2741 cache: metadata.cache.clone(),
2742 });
2743 }
2744 None
2745 })
2746 }
2747
2748 pub fn save(
2749 &mut self,
2750 debounce: Option<Duration>,
2751 fs: Arc<dyn Fs>,
2752 cx: &mut ModelContext<Context>,
2753 ) {
2754 if self.replica_id() != ReplicaId::default() {
2755 // Prevent saving a remote context for now.
2756 return;
2757 }
2758
2759 self.pending_save = cx.spawn(|this, mut cx| async move {
2760 if let Some(debounce) = debounce {
2761 cx.background_executor().timer(debounce).await;
2762 }
2763
2764 let (old_path, summary) = this.read_with(&cx, |this, _| {
2765 let path = this.path.clone();
2766 let summary = if let Some(summary) = this.summary.as_ref() {
2767 if summary.done {
2768 Some(summary.text.clone())
2769 } else {
2770 None
2771 }
2772 } else {
2773 None
2774 };
2775 (path, summary)
2776 })?;
2777
2778 if let Some(summary) = summary {
2779 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2780 let mut discriminant = 1;
2781 let mut new_path;
2782 loop {
2783 new_path = contexts_dir().join(&format!(
2784 "{} - {}.zed.json",
2785 summary.trim(),
2786 discriminant
2787 ));
2788 if fs.is_file(&new_path).await {
2789 discriminant += 1;
2790 } else {
2791 break;
2792 }
2793 }
2794
2795 fs.create_dir(contexts_dir().as_ref()).await?;
2796 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2797 .await?;
2798 if let Some(old_path) = old_path {
2799 if new_path != old_path {
2800 fs.remove_file(
2801 &old_path,
2802 RemoveOptions {
2803 recursive: false,
2804 ignore_if_not_exists: true,
2805 },
2806 )
2807 .await?;
2808 }
2809 }
2810
2811 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2812 }
2813
2814 Ok(())
2815 });
2816 }
2817
2818 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2819 let timestamp = self.next_timestamp();
2820 let summary = self.summary.get_or_insert(ContextSummary::default());
2821 summary.timestamp = timestamp;
2822 summary.done = true;
2823 summary.text = custom_summary;
2824 cx.emit(ContextEvent::SummaryChanged);
2825 }
2826}
2827
2828#[derive(Debug, Default)]
2829pub struct ContextVersion {
2830 context: clock::Global,
2831 buffer: clock::Global,
2832}
2833
2834impl ContextVersion {
2835 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2836 Self {
2837 context: language::proto::deserialize_version(&proto.context_version),
2838 buffer: language::proto::deserialize_version(&proto.buffer_version),
2839 }
2840 }
2841
2842 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2843 proto::ContextVersion {
2844 context_id: context_id.to_proto(),
2845 context_version: language::proto::serialize_version(&self.context),
2846 buffer_version: language::proto::serialize_version(&self.buffer),
2847 }
2848 }
2849}
2850
2851#[derive(Debug, Clone)]
2852pub struct PendingSlashCommand {
2853 pub name: String,
2854 pub arguments: SmallVec<[String; 3]>,
2855 pub status: PendingSlashCommandStatus,
2856 pub source_range: Range<language::Anchor>,
2857}
2858
2859#[derive(Debug, Clone)]
2860pub enum PendingSlashCommandStatus {
2861 Idle,
2862 Running { _task: Shared<Task<()>> },
2863 Error(String),
2864}
2865
2866pub(crate) struct ToolUseFeatureFlag;
2867
2868impl FeatureFlag for ToolUseFeatureFlag {
2869 const NAME: &'static str = "assistant-tool-use";
2870
2871 fn enabled_for_staff() -> bool {
2872 false
2873 }
2874}
2875
2876#[derive(Debug, Clone)]
2877pub struct PendingToolUse {
2878 pub id: Arc<str>,
2879 pub name: String,
2880 pub input: serde_json::Value,
2881 pub status: PendingToolUseStatus,
2882 pub source_range: Range<language::Anchor>,
2883}
2884
2885#[derive(Debug, Clone)]
2886pub enum PendingToolUseStatus {
2887 Idle,
2888 Running { _task: Shared<Task<()>> },
2889 Error(String),
2890}
2891
2892impl PendingToolUseStatus {
2893 pub fn is_idle(&self) -> bool {
2894 matches!(self, PendingToolUseStatus::Idle)
2895 }
2896}
2897
2898#[derive(Serialize, Deserialize)]
2899pub struct SavedMessage {
2900 pub id: MessageId,
2901 pub start: usize,
2902 pub metadata: MessageMetadata,
2903}
2904
2905#[derive(Serialize, Deserialize)]
2906pub struct SavedContext {
2907 pub id: Option<ContextId>,
2908 pub zed: String,
2909 pub version: String,
2910 pub text: String,
2911 pub messages: Vec<SavedMessage>,
2912 pub summary: String,
2913 pub slash_command_output_sections:
2914 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2915}
2916
2917impl SavedContext {
2918 pub const VERSION: &'static str = "0.4.0";
2919
2920 pub fn from_json(json: &str) -> Result<Self> {
2921 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2922 match saved_context_json
2923 .get("version")
2924 .ok_or_else(|| anyhow!("version not found"))?
2925 {
2926 serde_json::Value::String(version) => match version.as_str() {
2927 SavedContext::VERSION => {
2928 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2929 }
2930 SavedContextV0_3_0::VERSION => {
2931 let saved_context =
2932 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2933 Ok(saved_context.upgrade())
2934 }
2935 SavedContextV0_2_0::VERSION => {
2936 let saved_context =
2937 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2938 Ok(saved_context.upgrade())
2939 }
2940 SavedContextV0_1_0::VERSION => {
2941 let saved_context =
2942 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2943 Ok(saved_context.upgrade())
2944 }
2945 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2946 },
2947 _ => Err(anyhow!("version not found on saved context")),
2948 }
2949 }
2950
2951 fn into_ops(
2952 self,
2953 buffer: &Model<Buffer>,
2954 cx: &mut ModelContext<Context>,
2955 ) -> Vec<ContextOperation> {
2956 let mut operations = Vec::new();
2957 let mut version = clock::Global::new();
2958 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2959
2960 let mut first_message_metadata = None;
2961 for message in self.messages {
2962 if message.id == MessageId(clock::Lamport::default()) {
2963 first_message_metadata = Some(message.metadata);
2964 } else {
2965 operations.push(ContextOperation::InsertMessage {
2966 anchor: MessageAnchor {
2967 id: message.id,
2968 start: buffer.read(cx).anchor_before(message.start),
2969 },
2970 metadata: MessageMetadata {
2971 role: message.metadata.role,
2972 status: message.metadata.status,
2973 timestamp: message.metadata.timestamp,
2974 cache: None,
2975 },
2976 version: version.clone(),
2977 });
2978 version.observe(message.id.0);
2979 next_timestamp.observe(message.id.0);
2980 }
2981 }
2982
2983 if let Some(metadata) = first_message_metadata {
2984 let timestamp = next_timestamp.tick();
2985 operations.push(ContextOperation::UpdateMessage {
2986 message_id: MessageId(clock::Lamport::default()),
2987 metadata: MessageMetadata {
2988 role: metadata.role,
2989 status: metadata.status,
2990 timestamp,
2991 cache: None,
2992 },
2993 version: version.clone(),
2994 });
2995 version.observe(timestamp);
2996 }
2997
2998 let timestamp = next_timestamp.tick();
2999 operations.push(ContextOperation::SlashCommandFinished {
3000 id: SlashCommandId(timestamp),
3001 output_range: language::Anchor::MIN..language::Anchor::MAX,
3002 sections: self
3003 .slash_command_output_sections
3004 .into_iter()
3005 .map(|section| {
3006 let buffer = buffer.read(cx);
3007 SlashCommandOutputSection {
3008 range: buffer.anchor_after(section.range.start)
3009 ..buffer.anchor_before(section.range.end),
3010 icon: section.icon,
3011 label: section.label,
3012 metadata: section.metadata,
3013 }
3014 })
3015 .collect(),
3016 version: version.clone(),
3017 });
3018 version.observe(timestamp);
3019
3020 let timestamp = next_timestamp.tick();
3021 operations.push(ContextOperation::UpdateSummary {
3022 summary: ContextSummary {
3023 text: self.summary,
3024 done: true,
3025 timestamp,
3026 },
3027 version: version.clone(),
3028 });
3029 version.observe(timestamp);
3030
3031 operations
3032 }
3033}
3034
3035#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3036struct SavedMessageIdPreV0_4_0(usize);
3037
3038#[derive(Serialize, Deserialize)]
3039struct SavedMessagePreV0_4_0 {
3040 id: SavedMessageIdPreV0_4_0,
3041 start: usize,
3042}
3043
3044#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3045struct SavedMessageMetadataPreV0_4_0 {
3046 role: Role,
3047 status: MessageStatus,
3048}
3049
3050#[derive(Serialize, Deserialize)]
3051struct SavedContextV0_3_0 {
3052 id: Option<ContextId>,
3053 zed: String,
3054 version: String,
3055 text: String,
3056 messages: Vec<SavedMessagePreV0_4_0>,
3057 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3058 summary: String,
3059 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3060}
3061
3062impl SavedContextV0_3_0 {
3063 const VERSION: &'static str = "0.3.0";
3064
3065 fn upgrade(self) -> SavedContext {
3066 SavedContext {
3067 id: self.id,
3068 zed: self.zed,
3069 version: SavedContext::VERSION.into(),
3070 text: self.text,
3071 messages: self
3072 .messages
3073 .into_iter()
3074 .filter_map(|message| {
3075 let metadata = self.message_metadata.get(&message.id)?;
3076 let timestamp = clock::Lamport {
3077 replica_id: ReplicaId::default(),
3078 value: message.id.0 as u32,
3079 };
3080 Some(SavedMessage {
3081 id: MessageId(timestamp),
3082 start: message.start,
3083 metadata: MessageMetadata {
3084 role: metadata.role,
3085 status: metadata.status.clone(),
3086 timestamp,
3087 cache: None,
3088 },
3089 })
3090 })
3091 .collect(),
3092 summary: self.summary,
3093 slash_command_output_sections: self.slash_command_output_sections,
3094 }
3095 }
3096}
3097
3098#[derive(Serialize, Deserialize)]
3099struct SavedContextV0_2_0 {
3100 id: Option<ContextId>,
3101 zed: String,
3102 version: String,
3103 text: String,
3104 messages: Vec<SavedMessagePreV0_4_0>,
3105 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3106 summary: String,
3107}
3108
3109impl SavedContextV0_2_0 {
3110 const VERSION: &'static str = "0.2.0";
3111
3112 fn upgrade(self) -> SavedContext {
3113 SavedContextV0_3_0 {
3114 id: self.id,
3115 zed: self.zed,
3116 version: SavedContextV0_3_0::VERSION.to_string(),
3117 text: self.text,
3118 messages: self.messages,
3119 message_metadata: self.message_metadata,
3120 summary: self.summary,
3121 slash_command_output_sections: Vec::new(),
3122 }
3123 .upgrade()
3124 }
3125}
3126
3127#[derive(Serialize, Deserialize)]
3128struct SavedContextV0_1_0 {
3129 id: Option<ContextId>,
3130 zed: String,
3131 version: String,
3132 text: String,
3133 messages: Vec<SavedMessagePreV0_4_0>,
3134 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3135 summary: String,
3136 api_url: Option<String>,
3137 model: OpenAiModel,
3138}
3139
3140impl SavedContextV0_1_0 {
3141 const VERSION: &'static str = "0.1.0";
3142
3143 fn upgrade(self) -> SavedContext {
3144 SavedContextV0_2_0 {
3145 id: self.id,
3146 zed: self.zed,
3147 version: SavedContextV0_2_0::VERSION.to_string(),
3148 text: self.text,
3149 messages: self.messages,
3150 message_metadata: self.message_metadata,
3151 summary: self.summary,
3152 }
3153 .upgrade()
3154 }
3155}
3156
3157#[derive(Clone)]
3158pub struct SavedContextMetadata {
3159 pub title: String,
3160 pub path: PathBuf,
3161 pub mtime: chrono::DateTime<chrono::Local>,
3162}