1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 FutureExt, StreamExt,
21};
22use gpui::{
23 AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage,
24 SharedString, Subscription, Task,
25};
26
27use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
28use language_model::{
29 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
30 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
31 LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
32 StopReason,
33};
34use open_ai::Model as OpenAiModel;
35use paths::contexts_dir;
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{self, max, Ordering},
41 fmt::Debug,
42 iter, mem,
43 ops::Range,
44 path::{Path, PathBuf},
45 str::FromStr as _,
46 sync::Arc,
47 time::{Duration, Instant},
48};
49use telemetry_events::AssistantKind;
50use text::BufferSnapshot;
51use util::{post_inc, TryFutureExt};
52use uuid::Uuid;
53
54#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
55pub struct ContextId(String);
56
57impl ContextId {
58 pub fn new() -> Self {
59 Self(Uuid::new_v4().to_string())
60 }
61
62 pub fn from_proto(id: String) -> Self {
63 Self(id)
64 }
65
66 pub fn to_proto(&self) -> String {
67 self.0.clone()
68 }
69}
70
71#[derive(Clone, Debug)]
72pub enum ContextOperation {
73 InsertMessage {
74 anchor: MessageAnchor,
75 metadata: MessageMetadata,
76 version: clock::Global,
77 },
78 UpdateMessage {
79 message_id: MessageId,
80 metadata: MessageMetadata,
81 version: clock::Global,
82 },
83 UpdateSummary {
84 summary: ContextSummary,
85 version: clock::Global,
86 },
87 SlashCommandFinished {
88 id: SlashCommandId,
89 output_range: Range<language::Anchor>,
90 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
91 version: clock::Global,
92 },
93 BufferOperation(language::Operation),
94}
95
96impl ContextOperation {
97 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
98 match op.variant.context("invalid variant")? {
99 proto::context_operation::Variant::InsertMessage(insert) => {
100 let message = insert.message.context("invalid message")?;
101 let id = MessageId(language::proto::deserialize_timestamp(
102 message.id.context("invalid id")?,
103 ));
104 Ok(Self::InsertMessage {
105 anchor: MessageAnchor {
106 id,
107 start: language::proto::deserialize_anchor(
108 message.start.context("invalid anchor")?,
109 )
110 .context("invalid anchor")?,
111 },
112 metadata: MessageMetadata {
113 role: Role::from_proto(message.role),
114 status: MessageStatus::from_proto(
115 message.status.context("invalid status")?,
116 ),
117 timestamp: id.0,
118 cache: None,
119 },
120 version: language::proto::deserialize_version(&insert.version),
121 })
122 }
123 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
124 message_id: MessageId(language::proto::deserialize_timestamp(
125 update.message_id.context("invalid message id")?,
126 )),
127 metadata: MessageMetadata {
128 role: Role::from_proto(update.role),
129 status: MessageStatus::from_proto(update.status.context("invalid status")?),
130 timestamp: language::proto::deserialize_timestamp(
131 update.timestamp.context("invalid timestamp")?,
132 ),
133 cache: None,
134 },
135 version: language::proto::deserialize_version(&update.version),
136 }),
137 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
138 summary: ContextSummary {
139 text: update.summary,
140 done: update.done,
141 timestamp: language::proto::deserialize_timestamp(
142 update.timestamp.context("invalid timestamp")?,
143 ),
144 },
145 version: language::proto::deserialize_version(&update.version),
146 }),
147 proto::context_operation::Variant::SlashCommandFinished(finished) => {
148 Ok(Self::SlashCommandFinished {
149 id: SlashCommandId(language::proto::deserialize_timestamp(
150 finished.id.context("invalid id")?,
151 )),
152 output_range: language::proto::deserialize_anchor_range(
153 finished.output_range.context("invalid range")?,
154 )?,
155 sections: finished
156 .sections
157 .into_iter()
158 .map(|section| {
159 Ok(SlashCommandOutputSection {
160 range: language::proto::deserialize_anchor_range(
161 section.range.context("invalid range")?,
162 )?,
163 icon: section.icon_name.parse()?,
164 label: section.label.into(),
165 })
166 })
167 .collect::<Result<Vec<_>>>()?,
168 version: language::proto::deserialize_version(&finished.version),
169 })
170 }
171 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
172 language::proto::deserialize_operation(
173 op.operation.context("invalid buffer operation")?,
174 )?,
175 )),
176 }
177 }
178
179 pub fn to_proto(&self) -> proto::ContextOperation {
180 match self {
181 Self::InsertMessage {
182 anchor,
183 metadata,
184 version,
185 } => proto::ContextOperation {
186 variant: Some(proto::context_operation::Variant::InsertMessage(
187 proto::context_operation::InsertMessage {
188 message: Some(proto::ContextMessage {
189 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
190 start: Some(language::proto::serialize_anchor(&anchor.start)),
191 role: metadata.role.to_proto() as i32,
192 status: Some(metadata.status.to_proto()),
193 }),
194 version: language::proto::serialize_version(version),
195 },
196 )),
197 },
198 Self::UpdateMessage {
199 message_id,
200 metadata,
201 version,
202 } => proto::ContextOperation {
203 variant: Some(proto::context_operation::Variant::UpdateMessage(
204 proto::context_operation::UpdateMessage {
205 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
206 role: metadata.role.to_proto() as i32,
207 status: Some(metadata.status.to_proto()),
208 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
209 version: language::proto::serialize_version(version),
210 },
211 )),
212 },
213 Self::UpdateSummary { summary, version } => proto::ContextOperation {
214 variant: Some(proto::context_operation::Variant::UpdateSummary(
215 proto::context_operation::UpdateSummary {
216 summary: summary.text.clone(),
217 done: summary.done,
218 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
219 version: language::proto::serialize_version(version),
220 },
221 )),
222 },
223 Self::SlashCommandFinished {
224 id,
225 output_range,
226 sections,
227 version,
228 } => proto::ContextOperation {
229 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
230 proto::context_operation::SlashCommandFinished {
231 id: Some(language::proto::serialize_timestamp(id.0)),
232 output_range: Some(language::proto::serialize_anchor_range(
233 output_range.clone(),
234 )),
235 sections: sections
236 .iter()
237 .map(|section| {
238 let icon_name: &'static str = section.icon.into();
239 proto::SlashCommandOutputSection {
240 range: Some(language::proto::serialize_anchor_range(
241 section.range.clone(),
242 )),
243 icon_name: icon_name.to_string(),
244 label: section.label.to_string(),
245 }
246 })
247 .collect(),
248 version: language::proto::serialize_version(version),
249 },
250 )),
251 },
252 Self::BufferOperation(operation) => proto::ContextOperation {
253 variant: Some(proto::context_operation::Variant::BufferOperation(
254 proto::context_operation::BufferOperation {
255 operation: Some(language::proto::serialize_operation(operation)),
256 },
257 )),
258 },
259 }
260 }
261
262 fn timestamp(&self) -> clock::Lamport {
263 match self {
264 Self::InsertMessage { anchor, .. } => anchor.id.0,
265 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
266 Self::UpdateSummary { summary, .. } => summary.timestamp,
267 Self::SlashCommandFinished { id, .. } => id.0,
268 Self::BufferOperation(_) => {
269 panic!("reading the timestamp of a buffer operation is not supported")
270 }
271 }
272 }
273
274 /// Returns the current version of the context operation.
275 pub fn version(&self) -> &clock::Global {
276 match self {
277 Self::InsertMessage { version, .. }
278 | Self::UpdateMessage { version, .. }
279 | Self::UpdateSummary { version, .. }
280 | Self::SlashCommandFinished { version, .. } => version,
281 Self::BufferOperation(_) => {
282 panic!("reading the version of a buffer operation is not supported")
283 }
284 }
285 }
286}
287
288#[derive(Debug, Clone)]
289pub enum ContextEvent {
290 ShowAssistError(SharedString),
291 MessagesEdited,
292 SummaryChanged,
293 StreamedCompletion,
294 WorkflowStepsUpdated {
295 removed: Vec<Range<language::Anchor>>,
296 updated: Vec<Range<language::Anchor>>,
297 },
298 PendingSlashCommandsUpdated {
299 removed: Vec<Range<language::Anchor>>,
300 updated: Vec<PendingSlashCommand>,
301 },
302 SlashCommandFinished {
303 output_range: Range<language::Anchor>,
304 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
305 run_commands_in_output: bool,
306 expand_result: bool,
307 },
308 UsePendingTools,
309 ToolFinished {
310 tool_use_id: Arc<str>,
311 output_range: Range<language::Anchor>,
312 },
313 Operation(ContextOperation),
314}
315
316#[derive(Clone, Default, Debug)]
317pub struct ContextSummary {
318 pub text: String,
319 done: bool,
320 timestamp: clock::Lamport,
321}
322
323#[derive(Clone, Debug, Eq, PartialEq)]
324pub struct MessageAnchor {
325 pub id: MessageId,
326 pub start: language::Anchor,
327}
328
329#[derive(Clone, Debug, Eq, PartialEq)]
330pub enum CacheStatus {
331 Pending,
332 Cached,
333}
334
335#[derive(Clone, Debug, Eq, PartialEq)]
336pub struct MessageCacheMetadata {
337 pub is_anchor: bool,
338 pub is_final_anchor: bool,
339 pub status: CacheStatus,
340 pub cached_at: clock::Global,
341}
342
343#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
344pub struct MessageMetadata {
345 pub role: Role,
346 pub status: MessageStatus,
347 pub(crate) timestamp: clock::Lamport,
348 #[serde(skip)]
349 pub cache: Option<MessageCacheMetadata>,
350}
351
352impl From<&Message> for MessageMetadata {
353 fn from(message: &Message) -> Self {
354 Self {
355 role: message.role,
356 status: message.status.clone(),
357 timestamp: message.id.0,
358 cache: message.cache.clone(),
359 }
360 }
361}
362
363impl MessageMetadata {
364 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
365 let result = match &self.cache {
366 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
367 &cached_at,
368 Range {
369 start: buffer.anchor_at(range.start, Bias::Right),
370 end: buffer.anchor_at(range.end, Bias::Left),
371 },
372 ),
373 _ => false,
374 };
375 result
376 }
377}
378
379#[derive(Clone, Debug)]
380pub struct Message {
381 pub offset_range: Range<usize>,
382 pub index_range: Range<usize>,
383 pub anchor_range: Range<language::Anchor>,
384 pub id: MessageId,
385 pub role: Role,
386 pub status: MessageStatus,
387 pub cache: Option<MessageCacheMetadata>,
388}
389
390#[derive(Debug, Clone)]
391pub enum Content {
392 Image {
393 anchor: language::Anchor,
394 image_id: u64,
395 render_image: Arc<RenderImage>,
396 image: Shared<Task<Option<LanguageModelImage>>>,
397 },
398 ToolUse {
399 range: Range<language::Anchor>,
400 tool_use: LanguageModelToolUse,
401 },
402 ToolResult {
403 range: Range<language::Anchor>,
404 tool_use_id: Arc<str>,
405 },
406}
407
408impl Content {
409 fn range(&self) -> Range<language::Anchor> {
410 match self {
411 Self::Image { anchor, .. } => *anchor..*anchor,
412 Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(),
413 }
414 }
415
416 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
417 let self_range = self.range();
418 let other_range = other.range();
419 if self_range.end.cmp(&other_range.start, buffer).is_lt() {
420 Ordering::Less
421 } else if self_range.start.cmp(&other_range.end, buffer).is_gt() {
422 Ordering::Greater
423 } else {
424 Ordering::Equal
425 }
426 }
427}
428
429struct PendingCompletion {
430 id: usize,
431 assistant_message_id: MessageId,
432 _task: Task<()>,
433}
434
435#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
436pub struct SlashCommandId(clock::Lamport);
437
438#[derive(Clone, Debug)]
439pub struct XmlTag {
440 pub kind: XmlTagKind,
441 pub range: Range<text::Anchor>,
442 pub is_open_tag: bool,
443}
444
445#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
446#[strum(serialize_all = "snake_case")]
447pub enum XmlTagKind {
448 Step,
449 Edit,
450 Path,
451 Search,
452 Within,
453 Operation,
454 Description,
455}
456
457pub struct Context {
458 id: ContextId,
459 timestamp: clock::Lamport,
460 version: clock::Global,
461 pending_ops: Vec<ContextOperation>,
462 operations: Vec<ContextOperation>,
463 buffer: Model<Buffer>,
464 pending_slash_commands: Vec<PendingSlashCommand>,
465 edits_since_last_parse: language::Subscription,
466 finished_slash_commands: HashSet<SlashCommandId>,
467 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
468 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
469 message_anchors: Vec<MessageAnchor>,
470 contents: Vec<Content>,
471 messages_metadata: HashMap<MessageId, MessageMetadata>,
472 summary: Option<ContextSummary>,
473 pending_summary: Task<Option<()>>,
474 completion_count: usize,
475 pending_completions: Vec<PendingCompletion>,
476 token_count: Option<usize>,
477 pending_token_count: Task<Option<()>>,
478 pending_save: Task<Result<()>>,
479 pending_cache_warming_task: Task<Option<()>>,
480 path: Option<PathBuf>,
481 _subscriptions: Vec<Subscription>,
482 telemetry: Option<Arc<Telemetry>>,
483 language_registry: Arc<LanguageRegistry>,
484 workflow_steps: Vec<WorkflowStep>,
485 xml_tags: Vec<XmlTag>,
486 project: Option<Model<Project>>,
487 prompt_builder: Arc<PromptBuilder>,
488}
489
490trait ContextAnnotation {
491 fn range(&self) -> &Range<language::Anchor>;
492}
493
494impl ContextAnnotation for PendingSlashCommand {
495 fn range(&self) -> &Range<language::Anchor> {
496 &self.source_range
497 }
498}
499
500impl ContextAnnotation for WorkflowStep {
501 fn range(&self) -> &Range<language::Anchor> {
502 &self.range
503 }
504}
505
506impl ContextAnnotation for XmlTag {
507 fn range(&self) -> &Range<language::Anchor> {
508 &self.range
509 }
510}
511
512impl EventEmitter<ContextEvent> for Context {}
513
514impl Context {
515 pub fn local(
516 language_registry: Arc<LanguageRegistry>,
517 project: Option<Model<Project>>,
518 telemetry: Option<Arc<Telemetry>>,
519 prompt_builder: Arc<PromptBuilder>,
520 cx: &mut ModelContext<Self>,
521 ) -> Self {
522 Self::new(
523 ContextId::new(),
524 ReplicaId::default(),
525 language::Capability::ReadWrite,
526 language_registry,
527 prompt_builder,
528 project,
529 telemetry,
530 cx,
531 )
532 }
533
534 #[allow(clippy::too_many_arguments)]
535 pub fn new(
536 id: ContextId,
537 replica_id: ReplicaId,
538 capability: language::Capability,
539 language_registry: Arc<LanguageRegistry>,
540 prompt_builder: Arc<PromptBuilder>,
541 project: Option<Model<Project>>,
542 telemetry: Option<Arc<Telemetry>>,
543 cx: &mut ModelContext<Self>,
544 ) -> Self {
545 let buffer = cx.new_model(|_cx| {
546 let mut buffer = Buffer::remote(
547 language::BufferId::new(1).unwrap(),
548 replica_id,
549 capability,
550 "",
551 );
552 buffer.set_language_registry(language_registry.clone());
553 buffer
554 });
555 let edits_since_last_slash_command_parse =
556 buffer.update(cx, |buffer, _| buffer.subscribe());
557 let mut this = Self {
558 id,
559 timestamp: clock::Lamport::new(replica_id),
560 version: clock::Global::new(),
561 pending_ops: Vec::new(),
562 operations: Vec::new(),
563 message_anchors: Default::default(),
564 contents: Default::default(),
565 messages_metadata: Default::default(),
566 pending_slash_commands: Vec::new(),
567 finished_slash_commands: HashSet::default(),
568 pending_tool_uses_by_id: HashMap::default(),
569 slash_command_output_sections: Vec::new(),
570 edits_since_last_parse: edits_since_last_slash_command_parse,
571 summary: None,
572 pending_summary: Task::ready(None),
573 completion_count: Default::default(),
574 pending_completions: Default::default(),
575 token_count: None,
576 pending_token_count: Task::ready(None),
577 pending_cache_warming_task: Task::ready(None),
578 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
579 pending_save: Task::ready(Ok(())),
580 path: None,
581 buffer,
582 telemetry,
583 project,
584 language_registry,
585 workflow_steps: Vec::new(),
586 xml_tags: Vec::new(),
587 prompt_builder,
588 };
589
590 let first_message_id = MessageId(clock::Lamport {
591 replica_id: 0,
592 value: 0,
593 });
594 let message = MessageAnchor {
595 id: first_message_id,
596 start: language::Anchor::MIN,
597 };
598 this.messages_metadata.insert(
599 first_message_id,
600 MessageMetadata {
601 role: Role::User,
602 status: MessageStatus::Done,
603 timestamp: first_message_id.0,
604 cache: None,
605 },
606 );
607 this.message_anchors.push(message);
608
609 this.set_language(cx);
610 this.count_remaining_tokens(cx);
611 this
612 }
613
614 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
615 let buffer = self.buffer.read(cx);
616 SavedContext {
617 id: Some(self.id.clone()),
618 zed: "context".into(),
619 version: SavedContext::VERSION.into(),
620 text: buffer.text(),
621 messages: self
622 .messages(cx)
623 .map(|message| SavedMessage {
624 id: message.id,
625 start: message.offset_range.start,
626 metadata: self.messages_metadata[&message.id].clone(),
627 })
628 .collect(),
629 summary: self
630 .summary
631 .as_ref()
632 .map(|summary| summary.text.clone())
633 .unwrap_or_default(),
634 slash_command_output_sections: self
635 .slash_command_output_sections
636 .iter()
637 .filter_map(|section| {
638 let range = section.range.to_offset(buffer);
639 if section.range.start.is_valid(buffer) && !range.is_empty() {
640 Some(assistant_slash_command::SlashCommandOutputSection {
641 range,
642 icon: section.icon,
643 label: section.label.clone(),
644 })
645 } else {
646 None
647 }
648 })
649 .collect(),
650 }
651 }
652
653 #[allow(clippy::too_many_arguments)]
654 pub fn deserialize(
655 saved_context: SavedContext,
656 path: PathBuf,
657 language_registry: Arc<LanguageRegistry>,
658 prompt_builder: Arc<PromptBuilder>,
659 project: Option<Model<Project>>,
660 telemetry: Option<Arc<Telemetry>>,
661 cx: &mut ModelContext<Self>,
662 ) -> Self {
663 let id = saved_context.id.clone().unwrap_or_else(ContextId::new);
664 let mut this = Self::new(
665 id,
666 ReplicaId::default(),
667 language::Capability::ReadWrite,
668 language_registry,
669 prompt_builder,
670 project,
671 telemetry,
672 cx,
673 );
674 this.path = Some(path);
675 this.buffer.update(cx, |buffer, cx| {
676 buffer.set_text(saved_context.text.as_str(), cx)
677 });
678 let operations = saved_context.into_ops(&this.buffer, cx);
679 this.apply_ops(operations, cx).unwrap();
680 this
681 }
682
683 pub fn id(&self) -> &ContextId {
684 &self.id
685 }
686
687 pub fn replica_id(&self) -> ReplicaId {
688 self.timestamp.replica_id
689 }
690
691 pub fn version(&self, cx: &AppContext) -> ContextVersion {
692 ContextVersion {
693 context: self.version.clone(),
694 buffer: self.buffer.read(cx).version(),
695 }
696 }
697
698 pub fn set_capability(
699 &mut self,
700 capability: language::Capability,
701 cx: &mut ModelContext<Self>,
702 ) {
703 self.buffer
704 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
705 }
706
707 fn next_timestamp(&mut self) -> clock::Lamport {
708 let timestamp = self.timestamp.tick();
709 self.version.observe(timestamp);
710 timestamp
711 }
712
713 pub fn serialize_ops(
714 &self,
715 since: &ContextVersion,
716 cx: &AppContext,
717 ) -> Task<Vec<proto::ContextOperation>> {
718 let buffer_ops = self
719 .buffer
720 .read(cx)
721 .serialize_ops(Some(since.buffer.clone()), cx);
722
723 let mut context_ops = self
724 .operations
725 .iter()
726 .filter(|op| !since.context.observed(op.timestamp()))
727 .cloned()
728 .collect::<Vec<_>>();
729 context_ops.extend(self.pending_ops.iter().cloned());
730
731 cx.background_executor().spawn(async move {
732 let buffer_ops = buffer_ops.await;
733 context_ops.sort_unstable_by_key(|op| op.timestamp());
734 buffer_ops
735 .into_iter()
736 .map(|op| proto::ContextOperation {
737 variant: Some(proto::context_operation::Variant::BufferOperation(
738 proto::context_operation::BufferOperation {
739 operation: Some(op),
740 },
741 )),
742 })
743 .chain(context_ops.into_iter().map(|op| op.to_proto()))
744 .collect()
745 })
746 }
747
748 pub fn apply_ops(
749 &mut self,
750 ops: impl IntoIterator<Item = ContextOperation>,
751 cx: &mut ModelContext<Self>,
752 ) -> Result<()> {
753 let mut buffer_ops = Vec::new();
754 for op in ops {
755 match op {
756 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
757 op @ _ => self.pending_ops.push(op),
758 }
759 }
760 self.buffer
761 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
762 self.flush_ops(cx);
763
764 Ok(())
765 }
766
767 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
768 let mut changed_messages = HashSet::default();
769 let mut summary_changed = false;
770
771 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
772 for op in mem::take(&mut self.pending_ops) {
773 if !self.can_apply_op(&op, cx) {
774 self.pending_ops.push(op);
775 continue;
776 }
777
778 let timestamp = op.timestamp();
779 match op.clone() {
780 ContextOperation::InsertMessage {
781 anchor, metadata, ..
782 } => {
783 if self.messages_metadata.contains_key(&anchor.id) {
784 // We already applied this operation.
785 } else {
786 changed_messages.insert(anchor.id);
787 self.insert_message(anchor, metadata, cx);
788 }
789 }
790 ContextOperation::UpdateMessage {
791 message_id,
792 metadata: new_metadata,
793 ..
794 } => {
795 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
796 if new_metadata.timestamp > metadata.timestamp {
797 *metadata = new_metadata;
798 changed_messages.insert(message_id);
799 }
800 }
801 ContextOperation::UpdateSummary {
802 summary: new_summary,
803 ..
804 } => {
805 if self
806 .summary
807 .as_ref()
808 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
809 {
810 self.summary = Some(new_summary);
811 summary_changed = true;
812 }
813 }
814 ContextOperation::SlashCommandFinished {
815 id,
816 output_range,
817 sections,
818 ..
819 } => {
820 if self.finished_slash_commands.insert(id) {
821 let buffer = self.buffer.read(cx);
822 self.slash_command_output_sections
823 .extend(sections.iter().cloned());
824 self.slash_command_output_sections
825 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
826 cx.emit(ContextEvent::SlashCommandFinished {
827 output_range,
828 sections,
829 expand_result: false,
830 run_commands_in_output: false,
831 });
832 }
833 }
834 ContextOperation::BufferOperation(_) => unreachable!(),
835 }
836
837 self.version.observe(timestamp);
838 self.timestamp.observe(timestamp);
839 self.operations.push(op);
840 }
841
842 if !changed_messages.is_empty() {
843 self.message_roles_updated(changed_messages, cx);
844 cx.emit(ContextEvent::MessagesEdited);
845 cx.notify();
846 }
847
848 if summary_changed {
849 cx.emit(ContextEvent::SummaryChanged);
850 cx.notify();
851 }
852 }
853
854 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
855 if !self.version.observed_all(op.version()) {
856 return false;
857 }
858
859 match op {
860 ContextOperation::InsertMessage { anchor, .. } => self
861 .buffer
862 .read(cx)
863 .version
864 .observed(anchor.start.timestamp),
865 ContextOperation::UpdateMessage { message_id, .. } => {
866 self.messages_metadata.contains_key(message_id)
867 }
868 ContextOperation::UpdateSummary { .. } => true,
869 ContextOperation::SlashCommandFinished {
870 output_range,
871 sections,
872 ..
873 } => {
874 let version = &self.buffer.read(cx).version;
875 sections
876 .iter()
877 .map(|section| §ion.range)
878 .chain([output_range])
879 .all(|range| {
880 let observed_start = range.start == language::Anchor::MIN
881 || range.start == language::Anchor::MAX
882 || version.observed(range.start.timestamp);
883 let observed_end = range.end == language::Anchor::MIN
884 || range.end == language::Anchor::MAX
885 || version.observed(range.end.timestamp);
886 observed_start && observed_end
887 })
888 }
889 ContextOperation::BufferOperation(_) => {
890 panic!("buffer operations should always be applied")
891 }
892 }
893 }
894
895 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
896 self.operations.push(op.clone());
897 cx.emit(ContextEvent::Operation(op));
898 }
899
900 pub fn buffer(&self) -> &Model<Buffer> {
901 &self.buffer
902 }
903
904 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
905 self.language_registry.clone()
906 }
907
908 pub fn project(&self) -> Option<Model<Project>> {
909 self.project.clone()
910 }
911
912 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
913 self.prompt_builder.clone()
914 }
915
916 pub fn path(&self) -> Option<&Path> {
917 self.path.as_deref()
918 }
919
920 pub fn summary(&self) -> Option<&ContextSummary> {
921 self.summary.as_ref()
922 }
923
924 pub(crate) fn workflow_step_containing(
925 &self,
926 offset: usize,
927 cx: &AppContext,
928 ) -> Option<&WorkflowStep> {
929 let buffer = self.buffer.read(cx);
930 let index = self
931 .workflow_steps
932 .binary_search_by(|step| {
933 let step_range = step.range.to_offset(&buffer);
934 if offset < step_range.start {
935 Ordering::Greater
936 } else if offset > step_range.end {
937 Ordering::Less
938 } else {
939 Ordering::Equal
940 }
941 })
942 .ok()?;
943 Some(&self.workflow_steps[index])
944 }
945
946 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
947 self.workflow_steps.iter().map(|step| step.range.clone())
948 }
949
950 pub(crate) fn workflow_step_for_range(
951 &self,
952 range: &Range<language::Anchor>,
953 cx: &AppContext,
954 ) -> Option<&WorkflowStep> {
955 let buffer = self.buffer.read(cx);
956 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
957 Some(&self.workflow_steps[index])
958 }
959
960 fn workflow_step_index_for_range(
961 &self,
962 tagged_range: &Range<text::Anchor>,
963 buffer: &text::BufferSnapshot,
964 ) -> Result<usize, usize> {
965 self.workflow_steps
966 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
967 }
968
969 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
970 &self.pending_slash_commands
971 }
972
973 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
974 &self.slash_command_output_sections
975 }
976
977 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
978 self.pending_tool_uses_by_id.values().collect()
979 }
980
981 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
982 self.pending_tool_uses_by_id.get(id)
983 }
984
985 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
986 let markdown = self.language_registry.language_for_name("Markdown");
987 cx.spawn(|this, mut cx| async move {
988 let markdown = markdown.await?;
989 this.update(&mut cx, |this, cx| {
990 this.buffer
991 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
992 })
993 })
994 .detach_and_log_err(cx);
995 }
996
997 fn handle_buffer_event(
998 &mut self,
999 _: Model<Buffer>,
1000 event: &language::Event,
1001 cx: &mut ModelContext<Self>,
1002 ) {
1003 match event {
1004 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1005 ContextOperation::BufferOperation(operation.clone()),
1006 )),
1007 language::Event::Edited => {
1008 self.count_remaining_tokens(cx);
1009 self.reparse(cx);
1010 // Use `inclusive = true` to invalidate a step when an edit occurs
1011 // at the start/end of a parsed step.
1012 cx.emit(ContextEvent::MessagesEdited);
1013 }
1014 _ => {}
1015 }
1016 }
1017
1018 pub(crate) fn token_count(&self) -> Option<usize> {
1019 self.token_count
1020 }
1021
1022 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1023 let request = self.to_completion_request(cx);
1024 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1025 return;
1026 };
1027 self.pending_token_count = cx.spawn(|this, mut cx| {
1028 async move {
1029 cx.background_executor()
1030 .timer(Duration::from_millis(200))
1031 .await;
1032
1033 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1034 this.update(&mut cx, |this, cx| {
1035 this.token_count = Some(token_count);
1036 this.start_cache_warming(&model, cx);
1037 cx.notify()
1038 })
1039 }
1040 .log_err()
1041 });
1042 }
1043
1044 pub fn mark_cache_anchors(
1045 &mut self,
1046 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1047 speculative: bool,
1048 cx: &mut ModelContext<Self>,
1049 ) -> bool {
1050 let cache_configuration =
1051 cache_configuration
1052 .as_ref()
1053 .unwrap_or(&LanguageModelCacheConfiguration {
1054 max_cache_anchors: 0,
1055 should_speculate: false,
1056 min_total_token: 0,
1057 });
1058
1059 let messages: Vec<Message> = self.messages(cx).collect();
1060
1061 let mut sorted_messages = messages.clone();
1062 if speculative {
1063 // Avoid caching the last message if this is a speculative cache fetch as
1064 // it's likely to change.
1065 sorted_messages.pop();
1066 }
1067 sorted_messages.retain(|m| m.role == Role::User);
1068 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1069
1070 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1071 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1072 0
1073 } else {
1074 // Save 1 anchor for the inline assistant to use.
1075 max(cache_configuration.max_cache_anchors, 1) - 1
1076 };
1077 sorted_messages.truncate(cache_anchors);
1078
1079 let anchors: HashSet<MessageId> = sorted_messages
1080 .into_iter()
1081 .map(|message| message.id)
1082 .collect();
1083
1084 let buffer = self.buffer.read(cx).snapshot();
1085 let invalidated_caches: HashSet<MessageId> = messages
1086 .iter()
1087 .scan(false, |encountered_invalid, message| {
1088 let message_id = message.id;
1089 let is_invalid = self
1090 .messages_metadata
1091 .get(&message_id)
1092 .map_or(true, |metadata| {
1093 !metadata.is_cache_valid(&buffer, &message.offset_range)
1094 || *encountered_invalid
1095 });
1096 *encountered_invalid |= is_invalid;
1097 Some(if is_invalid { Some(message_id) } else { None })
1098 })
1099 .flatten()
1100 .collect();
1101
1102 let last_anchor = messages.iter().rev().find_map(|message| {
1103 if anchors.contains(&message.id) {
1104 Some(message.id)
1105 } else {
1106 None
1107 }
1108 });
1109
1110 let mut new_anchor_needs_caching = false;
1111 let current_version = &buffer.version;
1112 // If we have no anchors, mark all messages as not being cached.
1113 let mut hit_last_anchor = last_anchor.is_none();
1114
1115 for message in messages.iter() {
1116 if hit_last_anchor {
1117 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1118 continue;
1119 }
1120
1121 if let Some(last_anchor) = last_anchor {
1122 if message.id == last_anchor {
1123 hit_last_anchor = true;
1124 }
1125 }
1126
1127 new_anchor_needs_caching = new_anchor_needs_caching
1128 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1129
1130 self.update_metadata(message.id, cx, |metadata| {
1131 let cache_status = if invalidated_caches.contains(&message.id) {
1132 CacheStatus::Pending
1133 } else {
1134 metadata
1135 .cache
1136 .as_ref()
1137 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1138 };
1139 metadata.cache = Some(MessageCacheMetadata {
1140 is_anchor: anchors.contains(&message.id),
1141 is_final_anchor: hit_last_anchor,
1142 status: cache_status,
1143 cached_at: current_version.clone(),
1144 });
1145 });
1146 }
1147 new_anchor_needs_caching
1148 }
1149
1150 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1151 let cache_configuration = model.cache_configuration();
1152
1153 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1154 return;
1155 }
1156 if !self.pending_completions.is_empty() {
1157 return;
1158 }
1159 if let Some(cache_configuration) = cache_configuration {
1160 if !cache_configuration.should_speculate {
1161 return;
1162 }
1163 }
1164
1165 let request = {
1166 let mut req = self.to_completion_request(cx);
1167 // Skip the last message because it's likely to change and
1168 // therefore would be a waste to cache.
1169 req.messages.pop();
1170 req.messages.push(LanguageModelRequestMessage {
1171 role: Role::User,
1172 content: vec!["Respond only with OK, nothing else.".into()],
1173 cache: false,
1174 });
1175 req
1176 };
1177
1178 let model = Arc::clone(model);
1179 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1180 async move {
1181 match model.stream_completion(request, &cx).await {
1182 Ok(mut stream) => {
1183 stream.next().await;
1184 log::info!("Cache warming completed successfully");
1185 }
1186 Err(e) => {
1187 log::warn!("Cache warming failed: {}", e);
1188 }
1189 };
1190 this.update(&mut cx, |this, cx| {
1191 this.update_cache_status_for_completion(cx);
1192 })
1193 .ok();
1194 anyhow::Ok(())
1195 }
1196 .log_err()
1197 });
1198 }
1199
1200 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1201 let cached_message_ids: Vec<MessageId> = self
1202 .messages_metadata
1203 .iter()
1204 .filter_map(|(message_id, metadata)| {
1205 metadata.cache.as_ref().and_then(|cache| {
1206 if cache.status == CacheStatus::Pending {
1207 Some(*message_id)
1208 } else {
1209 None
1210 }
1211 })
1212 })
1213 .collect();
1214
1215 for message_id in cached_message_ids {
1216 self.update_metadata(message_id, cx, |metadata| {
1217 if let Some(cache) = &mut metadata.cache {
1218 cache.status = CacheStatus::Cached;
1219 }
1220 });
1221 }
1222 cx.notify();
1223 }
1224
1225 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1226 let buffer = self.buffer.read(cx).text_snapshot();
1227 let mut row_ranges = self
1228 .edits_since_last_parse
1229 .consume()
1230 .into_iter()
1231 .map(|edit| {
1232 let start_row = buffer.offset_to_point(edit.new.start).row;
1233 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1234 start_row..end_row
1235 })
1236 .peekable();
1237
1238 let mut removed_slash_command_ranges = Vec::new();
1239 let mut updated_slash_commands = Vec::new();
1240 let mut removed_steps = Vec::new();
1241 let mut updated_steps = Vec::new();
1242 while let Some(mut row_range) = row_ranges.next() {
1243 while let Some(next_row_range) = row_ranges.peek() {
1244 if row_range.end >= next_row_range.start {
1245 row_range.end = next_row_range.end;
1246 row_ranges.next();
1247 } else {
1248 break;
1249 }
1250 }
1251
1252 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1253 let end = buffer.anchor_after(Point::new(
1254 row_range.end - 1,
1255 buffer.line_len(row_range.end - 1),
1256 ));
1257
1258 self.reparse_slash_commands_in_range(
1259 start..end,
1260 &buffer,
1261 &mut updated_slash_commands,
1262 &mut removed_slash_command_ranges,
1263 cx,
1264 );
1265 self.reparse_workflow_steps_in_range(
1266 start..end,
1267 &buffer,
1268 &mut updated_steps,
1269 &mut removed_steps,
1270 cx,
1271 );
1272 }
1273
1274 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1275 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1276 removed: removed_slash_command_ranges,
1277 updated: updated_slash_commands,
1278 });
1279 }
1280
1281 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1282 cx.emit(ContextEvent::WorkflowStepsUpdated {
1283 removed: removed_steps,
1284 updated: updated_steps,
1285 });
1286 }
1287 }
1288
1289 fn reparse_slash_commands_in_range(
1290 &mut self,
1291 range: Range<text::Anchor>,
1292 buffer: &BufferSnapshot,
1293 updated: &mut Vec<PendingSlashCommand>,
1294 removed: &mut Vec<Range<text::Anchor>>,
1295 cx: &AppContext,
1296 ) {
1297 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1298
1299 let mut new_commands = Vec::new();
1300 let mut lines = buffer.text_for_range(range).lines();
1301 let mut offset = lines.offset();
1302 while let Some(line) = lines.next() {
1303 if let Some(command_line) = SlashCommandLine::parse(line) {
1304 let name = &line[command_line.name.clone()];
1305 let arguments = command_line
1306 .arguments
1307 .iter()
1308 .filter_map(|argument_range| {
1309 if argument_range.is_empty() {
1310 None
1311 } else {
1312 line.get(argument_range.clone())
1313 }
1314 })
1315 .map(ToOwned::to_owned)
1316 .collect::<SmallVec<_>>();
1317 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1318 if !command.requires_argument() || !arguments.is_empty() {
1319 let start_ix = offset + command_line.name.start - 1;
1320 let end_ix = offset
1321 + command_line
1322 .arguments
1323 .last()
1324 .map_or(command_line.name.end, |argument| argument.end);
1325 let source_range =
1326 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1327 let pending_command = PendingSlashCommand {
1328 name: name.to_string(),
1329 arguments,
1330 source_range,
1331 status: PendingSlashCommandStatus::Idle,
1332 };
1333 updated.push(pending_command.clone());
1334 new_commands.push(pending_command);
1335 }
1336 }
1337 }
1338
1339 offset = lines.offset();
1340 }
1341
1342 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1343 removed.extend(removed_commands.map(|command| command.source_range));
1344 }
1345
1346 fn reparse_workflow_steps_in_range(
1347 &mut self,
1348 range: Range<text::Anchor>,
1349 buffer: &BufferSnapshot,
1350 updated: &mut Vec<Range<text::Anchor>>,
1351 removed: &mut Vec<Range<text::Anchor>>,
1352 cx: &mut ModelContext<Self>,
1353 ) {
1354 // Rebuild the XML tags in the edited range.
1355 let intersecting_tags_range =
1356 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1357 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1358 self.xml_tags
1359 .splice(intersecting_tags_range.clone(), new_tags);
1360
1361 // Find which steps intersect the changed range.
1362 let intersecting_steps_range =
1363 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1364
1365 // Reparse all tags after the last unchanged step before the change.
1366 let mut tags_start_ix = 0;
1367 if let Some(preceding_unchanged_step) =
1368 self.workflow_steps[..intersecting_steps_range.start].last()
1369 {
1370 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1371 tag.range
1372 .start
1373 .cmp(&preceding_unchanged_step.range.end, buffer)
1374 .then(Ordering::Less)
1375 }) {
1376 Ok(ix) | Err(ix) => ix,
1377 };
1378 }
1379
1380 // Rebuild the edit suggestions in the range.
1381 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1382
1383 if let Some(project) = self.project() {
1384 for step in &mut new_steps {
1385 Self::resolve_workflow_step_internal(step, &project, cx);
1386 }
1387 }
1388
1389 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1390 let removed_steps = self
1391 .workflow_steps
1392 .splice(intersecting_steps_range, new_steps);
1393 removed.extend(
1394 removed_steps
1395 .map(|step| step.range)
1396 .filter(|range| !updated.contains(&range)),
1397 );
1398 }
1399
1400 fn parse_xml_tags_in_range(
1401 &self,
1402 buffer: &BufferSnapshot,
1403 range: Range<text::Anchor>,
1404 cx: &AppContext,
1405 ) -> Vec<XmlTag> {
1406 let mut messages = self.messages(cx).peekable();
1407
1408 let mut tags = Vec::new();
1409 let mut lines = buffer.text_for_range(range).lines();
1410 let mut offset = lines.offset();
1411
1412 while let Some(line) = lines.next() {
1413 while let Some(message) = messages.peek() {
1414 if offset < message.offset_range.end {
1415 break;
1416 } else {
1417 messages.next();
1418 }
1419 }
1420
1421 let is_assistant_message = messages
1422 .peek()
1423 .map_or(false, |message| message.role == Role::Assistant);
1424 if is_assistant_message {
1425 for (start_ix, _) in line.match_indices('<') {
1426 let mut name_start_ix = start_ix + 1;
1427 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1428 if let Some(closing_bracket_ix) = closing_bracket_ix {
1429 let end_ix = closing_bracket_ix + 1;
1430 let mut is_open_tag = true;
1431 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1432 name_start_ix += 1;
1433 is_open_tag = false;
1434 }
1435 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1436 let tag_name_len = tag_inner
1437 .find(|c: char| c.is_whitespace())
1438 .unwrap_or(tag_inner.len());
1439 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1440 tags.push(XmlTag {
1441 range: buffer.anchor_after(offset + start_ix)
1442 ..buffer.anchor_before(offset + end_ix),
1443 is_open_tag,
1444 kind,
1445 });
1446 };
1447 }
1448 }
1449 }
1450
1451 offset = lines.offset();
1452 }
1453 tags
1454 }
1455
1456 fn parse_steps(
1457 &mut self,
1458 tags_start_ix: usize,
1459 buffer_end: text::Anchor,
1460 buffer: &BufferSnapshot,
1461 ) -> Vec<WorkflowStep> {
1462 let mut new_steps = Vec::new();
1463 let mut pending_step = None;
1464 let mut edit_step_depth = 0;
1465 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1466 'tags: while let Some(tag) = tags.next() {
1467 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1468 break;
1469 }
1470
1471 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1472 edit_step_depth += 1;
1473 let edit_start = tag.range.start;
1474 let mut edits = Vec::new();
1475 let mut step = WorkflowStep {
1476 range: edit_start..edit_start,
1477 leading_tags_end: tag.range.end,
1478 trailing_tag_start: None,
1479 edits: Default::default(),
1480 resolution: None,
1481 resolution_task: None,
1482 };
1483
1484 while let Some(tag) = tags.next() {
1485 step.trailing_tag_start.get_or_insert(tag.range.start);
1486
1487 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1488 // step.trailing_tag_start = Some(tag.range.start);
1489 edit_step_depth -= 1;
1490 if edit_step_depth == 0 {
1491 step.range.end = tag.range.end;
1492 step.edits = edits.into();
1493 new_steps.push(step);
1494 continue 'tags;
1495 }
1496 }
1497
1498 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1499 let mut path = None;
1500 let mut search = None;
1501 let mut operation = None;
1502 let mut description = None;
1503
1504 while let Some(tag) = tags.next() {
1505 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1506 edits.push(WorkflowStepEdit::new(
1507 path,
1508 operation,
1509 search,
1510 description,
1511 ));
1512 break;
1513 }
1514
1515 if tag.is_open_tag
1516 && [
1517 XmlTagKind::Path,
1518 XmlTagKind::Search,
1519 XmlTagKind::Operation,
1520 XmlTagKind::Description,
1521 ]
1522 .contains(&tag.kind)
1523 {
1524 let kind = tag.kind;
1525 let content_start = tag.range.end;
1526 if let Some(tag) = tags.peek() {
1527 if tag.kind == kind && !tag.is_open_tag {
1528 let tag = tags.next().unwrap();
1529 let content_end = tag.range.start;
1530 let mut content = buffer
1531 .text_for_range(content_start..content_end)
1532 .collect::<String>();
1533 content.truncate(content.trim_end().len());
1534 match kind {
1535 XmlTagKind::Path => path = Some(content),
1536 XmlTagKind::Operation => operation = Some(content),
1537 XmlTagKind::Search => {
1538 search = Some(content).filter(|s| !s.is_empty())
1539 }
1540 XmlTagKind::Description => {
1541 description =
1542 Some(content).filter(|s| !s.is_empty())
1543 }
1544 _ => {}
1545 }
1546 }
1547 }
1548 }
1549 }
1550 }
1551 }
1552
1553 pending_step = Some(step);
1554 }
1555 }
1556
1557 if let Some(mut pending_step) = pending_step {
1558 pending_step.range.end = text::Anchor::MAX;
1559 new_steps.push(pending_step);
1560 }
1561
1562 new_steps
1563 }
1564
1565 pub fn resolve_workflow_step(
1566 &mut self,
1567 tagged_range: Range<text::Anchor>,
1568 cx: &mut ModelContext<Self>,
1569 ) -> Option<()> {
1570 let index = self
1571 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1572 .ok()?;
1573 let step = &mut self.workflow_steps[index];
1574 let project = self.project.as_ref()?;
1575 step.resolution.take();
1576 Self::resolve_workflow_step_internal(step, project, cx);
1577 None
1578 }
1579
1580 fn resolve_workflow_step_internal(
1581 step: &mut WorkflowStep,
1582 project: &Model<Project>,
1583 cx: &mut ModelContext<'_, Context>,
1584 ) {
1585 step.resolution_task = Some(cx.spawn({
1586 let range = step.range.clone();
1587 let edits = step.edits.clone();
1588 let project = project.clone();
1589 |this, mut cx| async move {
1590 let suggestion_groups =
1591 Self::compute_step_resolution(project, edits, &mut cx).await;
1592
1593 this.update(&mut cx, |this, cx| {
1594 let buffer = this.buffer.read(cx).text_snapshot();
1595 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1596 if let Some(ix) = ix {
1597 let step = &mut this.workflow_steps[ix];
1598
1599 let resolution = suggestion_groups.map(|suggestion_groups| {
1600 let mut title = String::new();
1601 for mut chunk in buffer.text_for_range(
1602 step.leading_tags_end
1603 ..step.trailing_tag_start.unwrap_or(step.range.end),
1604 ) {
1605 if title.is_empty() {
1606 chunk = chunk.trim_start();
1607 }
1608 if let Some((prefix, _)) = chunk.split_once('\n') {
1609 title.push_str(prefix);
1610 break;
1611 } else {
1612 title.push_str(chunk);
1613 }
1614 }
1615
1616 WorkflowStepResolution {
1617 title,
1618 suggestion_groups,
1619 }
1620 });
1621
1622 step.resolution = Some(Arc::new(resolution));
1623 cx.emit(ContextEvent::WorkflowStepsUpdated {
1624 removed: vec![],
1625 updated: vec![range],
1626 })
1627 }
1628 })
1629 .ok();
1630 }
1631 }));
1632 }
1633
1634 async fn compute_step_resolution(
1635 project: Model<Project>,
1636 edits: Arc<[Result<WorkflowStepEdit>]>,
1637 cx: &mut AsyncAppContext,
1638 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1639 let mut suggestion_tasks = Vec::new();
1640 for edit in edits.iter() {
1641 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1642 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1643 }
1644
1645 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1646 let suggestions = future::try_join_all(suggestion_tasks).await?;
1647
1648 let mut suggestions_by_buffer = HashMap::default();
1649 for (buffer, suggestion) in suggestions {
1650 suggestions_by_buffer
1651 .entry(buffer)
1652 .or_insert_with(Vec::new)
1653 .push(suggestion);
1654 }
1655
1656 let mut suggestion_groups_by_buffer = HashMap::default();
1657 for (buffer, mut suggestions) in suggestions_by_buffer {
1658 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1659 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1660 // Sort suggestions by their range so that earlier, larger ranges come first
1661 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1662
1663 // Merge overlapping suggestions
1664 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1665
1666 // Create context ranges for each suggestion
1667 for suggestion in suggestions {
1668 let context_range = {
1669 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1670 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1671 let end_row =
1672 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1673 let start = snapshot.anchor_before(Point::new(start_row, 0));
1674 let end =
1675 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1676 start..end
1677 };
1678
1679 if let Some(last_group) = suggestion_groups.last_mut() {
1680 if last_group
1681 .context_range
1682 .end
1683 .cmp(&context_range.start, &snapshot)
1684 .is_ge()
1685 {
1686 // Merge with the previous group if context ranges overlap
1687 last_group.context_range.end = context_range.end;
1688 last_group.suggestions.push(suggestion);
1689 } else {
1690 // Create a new group
1691 suggestion_groups.push(WorkflowSuggestionGroup {
1692 context_range,
1693 suggestions: vec![suggestion],
1694 });
1695 }
1696 } else {
1697 // Create the first group
1698 suggestion_groups.push(WorkflowSuggestionGroup {
1699 context_range,
1700 suggestions: vec![suggestion],
1701 });
1702 }
1703 }
1704
1705 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1706 }
1707
1708 Ok(suggestion_groups_by_buffer)
1709 }
1710
1711 pub fn pending_command_for_position(
1712 &mut self,
1713 position: language::Anchor,
1714 cx: &mut ModelContext<Self>,
1715 ) -> Option<&mut PendingSlashCommand> {
1716 let buffer = self.buffer.read(cx);
1717 match self
1718 .pending_slash_commands
1719 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1720 {
1721 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1722 Err(ix) => {
1723 let cmd = self.pending_slash_commands.get_mut(ix)?;
1724 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1725 && position.cmp(&cmd.source_range.end, buffer).is_le()
1726 {
1727 Some(cmd)
1728 } else {
1729 None
1730 }
1731 }
1732 }
1733 }
1734
1735 pub fn pending_commands_for_range(
1736 &self,
1737 range: Range<language::Anchor>,
1738 cx: &AppContext,
1739 ) -> &[PendingSlashCommand] {
1740 let range = self.pending_command_indices_for_range(range, cx);
1741 &self.pending_slash_commands[range]
1742 }
1743
1744 fn pending_command_indices_for_range(
1745 &self,
1746 range: Range<language::Anchor>,
1747 cx: &AppContext,
1748 ) -> Range<usize> {
1749 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1750 }
1751
1752 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1753 &self,
1754 all_annotations: &[T],
1755 range: Range<language::Anchor>,
1756 cx: &AppContext,
1757 ) -> Range<usize> {
1758 let buffer = self.buffer.read(cx);
1759 let start_ix = match all_annotations
1760 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1761 {
1762 Ok(ix) | Err(ix) => ix,
1763 };
1764 let end_ix = match all_annotations
1765 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1766 {
1767 Ok(ix) => ix + 1,
1768 Err(ix) => ix,
1769 };
1770 start_ix..end_ix
1771 }
1772
1773 pub fn insert_command_output(
1774 &mut self,
1775 command_range: Range<language::Anchor>,
1776 output: Task<Result<SlashCommandOutput>>,
1777 ensure_trailing_newline: bool,
1778 expand_result: bool,
1779 cx: &mut ModelContext<Self>,
1780 ) {
1781 self.reparse(cx);
1782
1783 let insert_output_task = cx.spawn(|this, mut cx| {
1784 let command_range = command_range.clone();
1785 async move {
1786 let output = output.await;
1787 this.update(&mut cx, |this, cx| match output {
1788 Ok(mut output) => {
1789 // Ensure section ranges are valid.
1790 for section in &mut output.sections {
1791 section.range.start = section.range.start.min(output.text.len());
1792 section.range.end = section.range.end.min(output.text.len());
1793 while !output.text.is_char_boundary(section.range.start) {
1794 section.range.start -= 1;
1795 }
1796 while !output.text.is_char_boundary(section.range.end) {
1797 section.range.end += 1;
1798 }
1799 }
1800
1801 // Ensure there is a newline after the last section.
1802 if ensure_trailing_newline {
1803 let has_newline_after_last_section =
1804 output.sections.last().map_or(false, |last_section| {
1805 output.text[last_section.range.end..].ends_with('\n')
1806 });
1807 if !has_newline_after_last_section {
1808 output.text.push('\n');
1809 }
1810 }
1811
1812 let version = this.version.clone();
1813 let command_id = SlashCommandId(this.next_timestamp());
1814 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1815 let start = command_range.start.to_offset(buffer);
1816 let old_end = command_range.end.to_offset(buffer);
1817 let new_end = start + output.text.len();
1818 buffer.edit([(start..old_end, output.text)], None, cx);
1819
1820 let mut sections = output
1821 .sections
1822 .into_iter()
1823 .map(|section| SlashCommandOutputSection {
1824 range: buffer.anchor_after(start + section.range.start)
1825 ..buffer.anchor_before(start + section.range.end),
1826 icon: section.icon,
1827 label: section.label,
1828 })
1829 .collect::<Vec<_>>();
1830 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1831
1832 this.slash_command_output_sections
1833 .extend(sections.iter().cloned());
1834 this.slash_command_output_sections
1835 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1836
1837 let output_range =
1838 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1839 this.finished_slash_commands.insert(command_id);
1840
1841 (
1842 ContextOperation::SlashCommandFinished {
1843 id: command_id,
1844 output_range: output_range.clone(),
1845 sections: sections.clone(),
1846 version,
1847 },
1848 ContextEvent::SlashCommandFinished {
1849 output_range,
1850 sections,
1851 run_commands_in_output: output.run_commands_in_text,
1852 expand_result,
1853 },
1854 )
1855 });
1856
1857 this.push_op(operation, cx);
1858 cx.emit(event);
1859 }
1860 Err(error) => {
1861 if let Some(pending_command) =
1862 this.pending_command_for_position(command_range.start, cx)
1863 {
1864 pending_command.status =
1865 PendingSlashCommandStatus::Error(error.to_string());
1866 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1867 removed: vec![pending_command.source_range.clone()],
1868 updated: vec![pending_command.clone()],
1869 });
1870 }
1871 }
1872 })
1873 .ok();
1874 }
1875 });
1876
1877 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1878 pending_command.status = PendingSlashCommandStatus::Running {
1879 _task: insert_output_task.shared(),
1880 };
1881 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1882 removed: vec![pending_command.source_range.clone()],
1883 updated: vec![pending_command.clone()],
1884 });
1885 }
1886 }
1887
1888 pub fn insert_tool_output(
1889 &mut self,
1890 tool_use_id: Arc<str>,
1891 output: Task<Result<String>>,
1892 cx: &mut ModelContext<Self>,
1893 ) {
1894 let insert_output_task = cx.spawn(|this, mut cx| {
1895 let tool_use_id = tool_use_id.clone();
1896 async move {
1897 let output = output.await;
1898 this.update(&mut cx, |this, cx| match output {
1899 Ok(mut output) => {
1900 const NEWLINE: char = '\n';
1901
1902 if !output.ends_with(NEWLINE) {
1903 output.push(NEWLINE);
1904 }
1905
1906 let anchor_range = this.buffer.update(cx, |buffer, cx| {
1907 let insert_start = buffer.len().to_offset(buffer);
1908 let insert_end = insert_start;
1909
1910 let start = insert_start;
1911 let end = start + output.len() - NEWLINE.len_utf8();
1912
1913 buffer.edit([(insert_start..insert_end, output)], None, cx);
1914
1915 let output_range = buffer.anchor_after(start)..buffer.anchor_after(end);
1916
1917 output_range
1918 });
1919
1920 this.insert_content(
1921 Content::ToolResult {
1922 range: anchor_range.clone(),
1923 tool_use_id: tool_use_id.clone(),
1924 },
1925 cx,
1926 );
1927
1928 cx.emit(ContextEvent::ToolFinished {
1929 tool_use_id,
1930 output_range: anchor_range,
1931 });
1932 }
1933 Err(err) => {
1934 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1935 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1936 }
1937 }
1938 })
1939 .ok();
1940 }
1941 });
1942
1943 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1944 tool_use.status = PendingToolUseStatus::Running {
1945 _task: insert_output_task.shared(),
1946 };
1947 }
1948 }
1949
1950 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1951 self.count_remaining_tokens(cx);
1952 }
1953
1954 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1955 self.message_anchors.iter().rev().find_map(|message| {
1956 message
1957 .start
1958 .is_valid(self.buffer.read(cx))
1959 .then_some(message.id)
1960 })
1961 }
1962
1963 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1964 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1965 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1966 let last_message_id = self.get_last_valid_message_id(cx)?;
1967
1968 if !provider.is_authenticated(cx) {
1969 log::info!("completion provider has no credentials");
1970 return None;
1971 }
1972 // Compute which messages to cache, including the last one.
1973 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1974
1975 let mut request = self.to_completion_request(cx);
1976
1977 if cx.has_flag::<ToolUseFeatureFlag>() {
1978 let tool_registry = ToolRegistry::global(cx);
1979 request.tools = tool_registry
1980 .tools()
1981 .into_iter()
1982 .map(|tool| LanguageModelRequestTool {
1983 name: tool.name(),
1984 description: tool.description(),
1985 input_schema: tool.input_schema(),
1986 })
1987 .collect();
1988 }
1989
1990 let assistant_message = self
1991 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1992 .unwrap();
1993
1994 // Queue up the user's next reply.
1995 let user_message = self
1996 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1997 .unwrap();
1998
1999 let pending_completion_id = post_inc(&mut self.completion_count);
2000
2001 let task = cx.spawn({
2002 |this, mut cx| async move {
2003 let stream = model.stream_completion(request, &cx);
2004 let assistant_message_id = assistant_message.id;
2005 let mut response_latency = None;
2006 let stream_completion = async {
2007 let request_start = Instant::now();
2008 let mut events = stream.await?;
2009 let mut stop_reason = StopReason::EndTurn;
2010
2011 while let Some(event) = events.next().await {
2012 if response_latency.is_none() {
2013 response_latency = Some(request_start.elapsed());
2014 }
2015 let event = event?;
2016
2017 this.update(&mut cx, |this, cx| {
2018 let message_ix = this
2019 .message_anchors
2020 .iter()
2021 .position(|message| message.id == assistant_message_id)?;
2022 this.buffer.update(cx, |buffer, cx| {
2023 let message_old_end_offset = this.message_anchors[message_ix + 1..]
2024 .iter()
2025 .find(|message| message.start.is_valid(buffer))
2026 .map_or(buffer.len(), |message| {
2027 message.start.to_offset(buffer).saturating_sub(1)
2028 });
2029
2030 match event {
2031 LanguageModelCompletionEvent::Stop(reason) => {
2032 stop_reason = reason;
2033 }
2034 LanguageModelCompletionEvent::Text(chunk) => {
2035 buffer.edit(
2036 [(
2037 message_old_end_offset..message_old_end_offset,
2038 chunk,
2039 )],
2040 None,
2041 cx,
2042 );
2043 }
2044 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2045 const NEWLINE: char = '\n';
2046
2047 let mut text = String::new();
2048 text.push(NEWLINE);
2049 text.push_str(
2050 &serde_json::to_string_pretty(&tool_use)
2051 .expect("failed to serialize tool use to JSON"),
2052 );
2053 text.push(NEWLINE);
2054 let text_len = text.len();
2055
2056 buffer.edit(
2057 [(
2058 message_old_end_offset..message_old_end_offset,
2059 text,
2060 )],
2061 None,
2062 cx,
2063 );
2064
2065 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2066 let end_ix =
2067 message_old_end_offset + text_len - NEWLINE.len_utf8();
2068 let source_range = buffer.anchor_after(start_ix)
2069 ..buffer.anchor_after(end_ix);
2070
2071 let tool_use_id: Arc<str> = tool_use.id.into();
2072 this.pending_tool_uses_by_id.insert(
2073 tool_use_id.clone(),
2074 PendingToolUse {
2075 id: tool_use_id,
2076 name: tool_use.name,
2077 input: tool_use.input,
2078 status: PendingToolUseStatus::Idle,
2079 source_range,
2080 },
2081 );
2082 }
2083 }
2084 });
2085
2086 cx.emit(ContextEvent::StreamedCompletion);
2087
2088 Some(())
2089 })?;
2090 smol::future::yield_now().await;
2091 }
2092 this.update(&mut cx, |this, cx| {
2093 this.pending_completions
2094 .retain(|completion| completion.id != pending_completion_id);
2095 this.summarize(false, cx);
2096 this.update_cache_status_for_completion(cx);
2097 })?;
2098
2099 anyhow::Ok(stop_reason)
2100 };
2101
2102 let result = stream_completion.await;
2103
2104 this.update(&mut cx, |this, cx| {
2105 let error_message = result
2106 .as_ref()
2107 .err()
2108 .map(|error| error.to_string().trim().to_string());
2109
2110 if let Some(error_message) = error_message.as_ref() {
2111 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2112 error_message.clone(),
2113 )));
2114 }
2115
2116 this.update_metadata(assistant_message_id, cx, |metadata| {
2117 if let Some(error_message) = error_message.as_ref() {
2118 metadata.status =
2119 MessageStatus::Error(SharedString::from(error_message.clone()));
2120 } else {
2121 metadata.status = MessageStatus::Done;
2122 }
2123 });
2124
2125 if let Some(telemetry) = this.telemetry.as_ref() {
2126 telemetry.report_assistant_event(
2127 Some(this.id.0.clone()),
2128 AssistantKind::Panel,
2129 model.telemetry_id(),
2130 response_latency,
2131 error_message,
2132 );
2133 }
2134
2135 if let Ok(stop_reason) = result {
2136 match stop_reason {
2137 StopReason::ToolUse => {
2138 cx.emit(ContextEvent::UsePendingTools);
2139 }
2140 StopReason::EndTurn => {}
2141 StopReason::MaxTokens => {}
2142 }
2143 }
2144 })
2145 .ok();
2146 }
2147 });
2148
2149 self.pending_completions.push(PendingCompletion {
2150 id: pending_completion_id,
2151 assistant_message_id: assistant_message.id,
2152 _task: task,
2153 });
2154
2155 Some(user_message)
2156 }
2157
2158 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2159 let buffer = self.buffer.read(cx);
2160
2161 let mut contents = self.contents(cx).peekable();
2162
2163 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<String> {
2164 let text: String = buffer.text_for_range(range.clone()).collect();
2165 if text.trim().is_empty() {
2166 None
2167 } else {
2168 Some(text)
2169 }
2170 }
2171
2172 let mut completion_request = LanguageModelRequest {
2173 messages: Vec::new(),
2174 tools: Vec::new(),
2175 stop: Vec::new(),
2176 temperature: 1.0,
2177 };
2178 for message in self.messages(cx) {
2179 if message.status != MessageStatus::Done {
2180 continue;
2181 }
2182
2183 let mut offset = message.offset_range.start;
2184 let mut request_message = LanguageModelRequestMessage {
2185 role: message.role,
2186 content: Vec::new(),
2187 cache: message
2188 .cache
2189 .as_ref()
2190 .map_or(false, |cache| cache.is_anchor),
2191 };
2192
2193 while let Some(content) = contents.peek() {
2194 if content
2195 .range()
2196 .end
2197 .cmp(&message.anchor_range.end, buffer)
2198 .is_lt()
2199 {
2200 let content = contents.next().unwrap();
2201 let range = content.range().to_offset(buffer);
2202 request_message.content.extend(
2203 collect_text_content(buffer, offset..range.start).map(MessageContent::Text),
2204 );
2205
2206 match content {
2207 Content::Image { image, .. } => {
2208 if let Some(image) = image.clone().now_or_never().flatten() {
2209 request_message
2210 .content
2211 .push(language_model::MessageContent::Image(image));
2212 }
2213 }
2214 Content::ToolUse { tool_use, .. } => {
2215 request_message
2216 .content
2217 .push(language_model::MessageContent::ToolUse(tool_use.clone()));
2218 }
2219 Content::ToolResult { tool_use_id, .. } => {
2220 request_message.content.push(
2221 language_model::MessageContent::ToolResult(
2222 LanguageModelToolResult {
2223 tool_use_id: tool_use_id.to_string(),
2224 is_error: false,
2225 content: collect_text_content(buffer, range.clone())
2226 .unwrap_or_default(),
2227 },
2228 ),
2229 );
2230 }
2231 }
2232
2233 offset = range.end;
2234 } else {
2235 break;
2236 }
2237 }
2238
2239 request_message.content.extend(
2240 collect_text_content(buffer, offset..message.offset_range.end)
2241 .map(MessageContent::Text),
2242 );
2243
2244 completion_request.messages.push(request_message);
2245 }
2246
2247 completion_request
2248 }
2249
2250 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2251 if let Some(pending_completion) = self.pending_completions.pop() {
2252 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2253 if metadata.status == MessageStatus::Pending {
2254 metadata.status = MessageStatus::Canceled;
2255 }
2256 });
2257 true
2258 } else {
2259 false
2260 }
2261 }
2262
2263 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2264 for id in &ids {
2265 if let Some(metadata) = self.messages_metadata.get(id) {
2266 let role = metadata.role.cycle();
2267 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2268 }
2269 }
2270
2271 self.message_roles_updated(ids, cx);
2272 }
2273
2274 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2275 let mut ranges = Vec::new();
2276 for message in self.messages(cx) {
2277 if ids.contains(&message.id) {
2278 ranges.push(message.anchor_range.clone());
2279 }
2280 }
2281
2282 let buffer = self.buffer.read(cx).text_snapshot();
2283 let mut updated = Vec::new();
2284 let mut removed = Vec::new();
2285 for range in ranges {
2286 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2287 }
2288
2289 if !updated.is_empty() || !removed.is_empty() {
2290 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2291 }
2292 }
2293
2294 pub fn update_metadata(
2295 &mut self,
2296 id: MessageId,
2297 cx: &mut ModelContext<Self>,
2298 f: impl FnOnce(&mut MessageMetadata),
2299 ) {
2300 let version = self.version.clone();
2301 let timestamp = self.next_timestamp();
2302 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2303 f(metadata);
2304 metadata.timestamp = timestamp;
2305 let operation = ContextOperation::UpdateMessage {
2306 message_id: id,
2307 metadata: metadata.clone(),
2308 version,
2309 };
2310 self.push_op(operation, cx);
2311 cx.emit(ContextEvent::MessagesEdited);
2312 cx.notify();
2313 }
2314 }
2315
2316 pub fn insert_message_after(
2317 &mut self,
2318 message_id: MessageId,
2319 role: Role,
2320 status: MessageStatus,
2321 cx: &mut ModelContext<Self>,
2322 ) -> Option<MessageAnchor> {
2323 if let Some(prev_message_ix) = self
2324 .message_anchors
2325 .iter()
2326 .position(|message| message.id == message_id)
2327 {
2328 // Find the next valid message after the one we were given.
2329 let mut next_message_ix = prev_message_ix + 1;
2330 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2331 if next_message.start.is_valid(self.buffer.read(cx)) {
2332 break;
2333 }
2334 next_message_ix += 1;
2335 }
2336
2337 let start = self.buffer.update(cx, |buffer, cx| {
2338 let offset = self
2339 .message_anchors
2340 .get(next_message_ix)
2341 .map_or(buffer.len(), |message| {
2342 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2343 });
2344 buffer.edit([(offset..offset, "\n")], None, cx);
2345 buffer.anchor_before(offset + 1)
2346 });
2347
2348 let version = self.version.clone();
2349 let anchor = MessageAnchor {
2350 id: MessageId(self.next_timestamp()),
2351 start,
2352 };
2353 let metadata = MessageMetadata {
2354 role,
2355 status,
2356 timestamp: anchor.id.0,
2357 cache: None,
2358 };
2359 self.insert_message(anchor.clone(), metadata.clone(), cx);
2360 self.push_op(
2361 ContextOperation::InsertMessage {
2362 anchor: anchor.clone(),
2363 metadata,
2364 version,
2365 },
2366 cx,
2367 );
2368 Some(anchor)
2369 } else {
2370 None
2371 }
2372 }
2373
2374 pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext<Self>) {
2375 let buffer = self.buffer.read(cx);
2376 let insertion_ix = match self
2377 .contents
2378 .binary_search_by(|probe| probe.cmp(&content, buffer))
2379 {
2380 Ok(ix) => {
2381 self.contents.remove(ix);
2382 ix
2383 }
2384 Err(ix) => ix,
2385 };
2386 self.contents.insert(insertion_ix, content);
2387 cx.emit(ContextEvent::MessagesEdited);
2388 }
2389
2390 pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Content> {
2391 let buffer = self.buffer.read(cx);
2392 self.contents
2393 .iter()
2394 .filter(|content| {
2395 let range = content.range();
2396 range.start.is_valid(buffer) && range.end.is_valid(buffer)
2397 })
2398 .cloned()
2399 }
2400
2401 pub fn split_message(
2402 &mut self,
2403 range: Range<usize>,
2404 cx: &mut ModelContext<Self>,
2405 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2406 let start_message = self.message_for_offset(range.start, cx);
2407 let end_message = self.message_for_offset(range.end, cx);
2408 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2409 // Prevent splitting when range spans multiple messages.
2410 if start_message.id != end_message.id {
2411 return (None, None);
2412 }
2413
2414 let message = start_message;
2415 let role = message.role;
2416 let mut edited_buffer = false;
2417
2418 let mut suffix_start = None;
2419
2420 // TODO: why did this start panicking?
2421 if range.start > message.offset_range.start
2422 && range.end < message.offset_range.end.saturating_sub(1)
2423 {
2424 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2425 suffix_start = Some(range.end + 1);
2426 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2427 suffix_start = Some(range.end);
2428 }
2429 }
2430
2431 let version = self.version.clone();
2432 let suffix = if let Some(suffix_start) = suffix_start {
2433 MessageAnchor {
2434 id: MessageId(self.next_timestamp()),
2435 start: self.buffer.read(cx).anchor_before(suffix_start),
2436 }
2437 } else {
2438 self.buffer.update(cx, |buffer, cx| {
2439 buffer.edit([(range.end..range.end, "\n")], None, cx);
2440 });
2441 edited_buffer = true;
2442 MessageAnchor {
2443 id: MessageId(self.next_timestamp()),
2444 start: self.buffer.read(cx).anchor_before(range.end + 1),
2445 }
2446 };
2447
2448 let suffix_metadata = MessageMetadata {
2449 role,
2450 status: MessageStatus::Done,
2451 timestamp: suffix.id.0,
2452 cache: None,
2453 };
2454 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2455 self.push_op(
2456 ContextOperation::InsertMessage {
2457 anchor: suffix.clone(),
2458 metadata: suffix_metadata,
2459 version,
2460 },
2461 cx,
2462 );
2463
2464 let new_messages =
2465 if range.start == range.end || range.start == message.offset_range.start {
2466 (None, Some(suffix))
2467 } else {
2468 let mut prefix_end = None;
2469 if range.start > message.offset_range.start
2470 && range.end < message.offset_range.end - 1
2471 {
2472 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2473 prefix_end = Some(range.start + 1);
2474 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2475 == Some('\n')
2476 {
2477 prefix_end = Some(range.start);
2478 }
2479 }
2480
2481 let version = self.version.clone();
2482 let selection = if let Some(prefix_end) = prefix_end {
2483 MessageAnchor {
2484 id: MessageId(self.next_timestamp()),
2485 start: self.buffer.read(cx).anchor_before(prefix_end),
2486 }
2487 } else {
2488 self.buffer.update(cx, |buffer, cx| {
2489 buffer.edit([(range.start..range.start, "\n")], None, cx)
2490 });
2491 edited_buffer = true;
2492 MessageAnchor {
2493 id: MessageId(self.next_timestamp()),
2494 start: self.buffer.read(cx).anchor_before(range.end + 1),
2495 }
2496 };
2497
2498 let selection_metadata = MessageMetadata {
2499 role,
2500 status: MessageStatus::Done,
2501 timestamp: selection.id.0,
2502 cache: None,
2503 };
2504 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2505 self.push_op(
2506 ContextOperation::InsertMessage {
2507 anchor: selection.clone(),
2508 metadata: selection_metadata,
2509 version,
2510 },
2511 cx,
2512 );
2513
2514 (Some(selection), Some(suffix))
2515 };
2516
2517 if !edited_buffer {
2518 cx.emit(ContextEvent::MessagesEdited);
2519 }
2520 new_messages
2521 } else {
2522 (None, None)
2523 }
2524 }
2525
2526 fn insert_message(
2527 &mut self,
2528 new_anchor: MessageAnchor,
2529 new_metadata: MessageMetadata,
2530 cx: &mut ModelContext<Self>,
2531 ) {
2532 cx.emit(ContextEvent::MessagesEdited);
2533
2534 self.messages_metadata.insert(new_anchor.id, new_metadata);
2535
2536 let buffer = self.buffer.read(cx);
2537 let insertion_ix = self
2538 .message_anchors
2539 .iter()
2540 .position(|anchor| {
2541 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2542 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2543 })
2544 .unwrap_or(self.message_anchors.len());
2545 self.message_anchors.insert(insertion_ix, new_anchor);
2546 }
2547
2548 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2549 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2550 return;
2551 };
2552 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2553 return;
2554 };
2555
2556 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2557 if !provider.is_authenticated(cx) {
2558 return;
2559 }
2560
2561 let mut request = self.to_completion_request(cx);
2562 request.messages.push(LanguageModelRequestMessage {
2563 role: Role::User,
2564 content: vec![
2565 "Summarize the context into a short title without punctuation.".into(),
2566 ],
2567 cache: false,
2568 });
2569
2570 self.pending_summary = cx.spawn(|this, mut cx| {
2571 async move {
2572 let stream = model.stream_completion_text(request, &cx);
2573 let mut messages = stream.await?;
2574
2575 let mut replaced = !replace_old;
2576 while let Some(message) = messages.next().await {
2577 let text = message?;
2578 let mut lines = text.lines();
2579 this.update(&mut cx, |this, cx| {
2580 let version = this.version.clone();
2581 let timestamp = this.next_timestamp();
2582 let summary = this.summary.get_or_insert(ContextSummary::default());
2583 if !replaced && replace_old {
2584 summary.text.clear();
2585 replaced = true;
2586 }
2587 summary.text.extend(lines.next());
2588 summary.timestamp = timestamp;
2589 let operation = ContextOperation::UpdateSummary {
2590 summary: summary.clone(),
2591 version,
2592 };
2593 this.push_op(operation, cx);
2594 cx.emit(ContextEvent::SummaryChanged);
2595 })?;
2596
2597 // Stop if the LLM generated multiple lines.
2598 if lines.next().is_some() {
2599 break;
2600 }
2601 }
2602
2603 this.update(&mut cx, |this, cx| {
2604 let version = this.version.clone();
2605 let timestamp = this.next_timestamp();
2606 if let Some(summary) = this.summary.as_mut() {
2607 summary.done = true;
2608 summary.timestamp = timestamp;
2609 let operation = ContextOperation::UpdateSummary {
2610 summary: summary.clone(),
2611 version,
2612 };
2613 this.push_op(operation, cx);
2614 cx.emit(ContextEvent::SummaryChanged);
2615 }
2616 })?;
2617
2618 anyhow::Ok(())
2619 }
2620 .log_err()
2621 });
2622 }
2623 }
2624
2625 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2626 self.messages_for_offsets([offset], cx).pop()
2627 }
2628
2629 pub fn messages_for_offsets(
2630 &self,
2631 offsets: impl IntoIterator<Item = usize>,
2632 cx: &AppContext,
2633 ) -> Vec<Message> {
2634 let mut result = Vec::new();
2635
2636 let mut messages = self.messages(cx).peekable();
2637 let mut offsets = offsets.into_iter().peekable();
2638 let mut current_message = messages.next();
2639 while let Some(offset) = offsets.next() {
2640 // Locate the message that contains the offset.
2641 while current_message.as_ref().map_or(false, |message| {
2642 !message.offset_range.contains(&offset) && messages.peek().is_some()
2643 }) {
2644 current_message = messages.next();
2645 }
2646 let Some(message) = current_message.as_ref() else {
2647 break;
2648 };
2649
2650 // Skip offsets that are in the same message.
2651 while offsets.peek().map_or(false, |offset| {
2652 message.offset_range.contains(offset) || messages.peek().is_none()
2653 }) {
2654 offsets.next();
2655 }
2656
2657 result.push(message.clone());
2658 }
2659 result
2660 }
2661
2662 fn messages_from_anchors<'a>(
2663 &'a self,
2664 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2665 cx: &'a AppContext,
2666 ) -> impl 'a + Iterator<Item = Message> {
2667 let buffer = self.buffer.read(cx);
2668
2669 Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate())
2670 }
2671
2672 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2673 self.messages_from_anchors(self.message_anchors.iter(), cx)
2674 }
2675
2676 pub fn messages_from_iters<'a>(
2677 buffer: &'a Buffer,
2678 metadata: &'a HashMap<MessageId, MessageMetadata>,
2679 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2680 ) -> impl 'a + Iterator<Item = Message> {
2681 let mut messages = messages.peekable();
2682
2683 iter::from_fn(move || {
2684 if let Some((start_ix, message_anchor)) = messages.next() {
2685 let metadata = metadata.get(&message_anchor.id)?;
2686
2687 let message_start = message_anchor.start.to_offset(buffer);
2688 let mut message_end = None;
2689 let mut end_ix = start_ix;
2690 while let Some((_, next_message)) = messages.peek() {
2691 if next_message.start.is_valid(buffer) {
2692 message_end = Some(next_message.start);
2693 break;
2694 } else {
2695 end_ix += 1;
2696 messages.next();
2697 }
2698 }
2699 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2700 let message_end = message_end_anchor.to_offset(buffer);
2701
2702 return Some(Message {
2703 index_range: start_ix..end_ix,
2704 offset_range: message_start..message_end,
2705 anchor_range: message_anchor.start..message_end_anchor,
2706 id: message_anchor.id,
2707 role: metadata.role,
2708 status: metadata.status.clone(),
2709 cache: metadata.cache.clone(),
2710 });
2711 }
2712 None
2713 })
2714 }
2715
2716 pub fn save(
2717 &mut self,
2718 debounce: Option<Duration>,
2719 fs: Arc<dyn Fs>,
2720 cx: &mut ModelContext<Context>,
2721 ) {
2722 if self.replica_id() != ReplicaId::default() {
2723 // Prevent saving a remote context for now.
2724 return;
2725 }
2726
2727 self.pending_save = cx.spawn(|this, mut cx| async move {
2728 if let Some(debounce) = debounce {
2729 cx.background_executor().timer(debounce).await;
2730 }
2731
2732 let (old_path, summary) = this.read_with(&cx, |this, _| {
2733 let path = this.path.clone();
2734 let summary = if let Some(summary) = this.summary.as_ref() {
2735 if summary.done {
2736 Some(summary.text.clone())
2737 } else {
2738 None
2739 }
2740 } else {
2741 None
2742 };
2743 (path, summary)
2744 })?;
2745
2746 if let Some(summary) = summary {
2747 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2748 let mut discriminant = 1;
2749 let mut new_path;
2750 loop {
2751 new_path = contexts_dir().join(&format!(
2752 "{} - {}.zed.json",
2753 summary.trim(),
2754 discriminant
2755 ));
2756 if fs.is_file(&new_path).await {
2757 discriminant += 1;
2758 } else {
2759 break;
2760 }
2761 }
2762
2763 fs.create_dir(contexts_dir().as_ref()).await?;
2764 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2765 .await?;
2766 if let Some(old_path) = old_path {
2767 if new_path != old_path {
2768 fs.remove_file(
2769 &old_path,
2770 RemoveOptions {
2771 recursive: false,
2772 ignore_if_not_exists: true,
2773 },
2774 )
2775 .await?;
2776 }
2777 }
2778
2779 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2780 }
2781
2782 Ok(())
2783 });
2784 }
2785
2786 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2787 let timestamp = self.next_timestamp();
2788 let summary = self.summary.get_or_insert(ContextSummary::default());
2789 summary.timestamp = timestamp;
2790 summary.done = true;
2791 summary.text = custom_summary;
2792 cx.emit(ContextEvent::SummaryChanged);
2793 }
2794}
2795
2796#[derive(Debug, Default)]
2797pub struct ContextVersion {
2798 context: clock::Global,
2799 buffer: clock::Global,
2800}
2801
2802impl ContextVersion {
2803 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2804 Self {
2805 context: language::proto::deserialize_version(&proto.context_version),
2806 buffer: language::proto::deserialize_version(&proto.buffer_version),
2807 }
2808 }
2809
2810 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2811 proto::ContextVersion {
2812 context_id: context_id.to_proto(),
2813 context_version: language::proto::serialize_version(&self.context),
2814 buffer_version: language::proto::serialize_version(&self.buffer),
2815 }
2816 }
2817}
2818
2819#[derive(Debug, Clone)]
2820pub struct PendingSlashCommand {
2821 pub name: String,
2822 pub arguments: SmallVec<[String; 3]>,
2823 pub status: PendingSlashCommandStatus,
2824 pub source_range: Range<language::Anchor>,
2825}
2826
2827#[derive(Debug, Clone)]
2828pub enum PendingSlashCommandStatus {
2829 Idle,
2830 Running { _task: Shared<Task<()>> },
2831 Error(String),
2832}
2833
2834pub(crate) struct ToolUseFeatureFlag;
2835
2836impl FeatureFlag for ToolUseFeatureFlag {
2837 const NAME: &'static str = "assistant-tool-use";
2838
2839 fn enabled_for_staff() -> bool {
2840 false
2841 }
2842}
2843
2844#[derive(Debug, Clone)]
2845pub struct PendingToolUse {
2846 pub id: Arc<str>,
2847 pub name: String,
2848 pub input: serde_json::Value,
2849 pub status: PendingToolUseStatus,
2850 pub source_range: Range<language::Anchor>,
2851}
2852
2853#[derive(Debug, Clone)]
2854pub enum PendingToolUseStatus {
2855 Idle,
2856 Running { _task: Shared<Task<()>> },
2857 Error(String),
2858}
2859
2860impl PendingToolUseStatus {
2861 pub fn is_idle(&self) -> bool {
2862 matches!(self, PendingToolUseStatus::Idle)
2863 }
2864}
2865
2866#[derive(Serialize, Deserialize)]
2867pub struct SavedMessage {
2868 pub id: MessageId,
2869 pub start: usize,
2870 pub metadata: MessageMetadata,
2871}
2872
2873#[derive(Serialize, Deserialize)]
2874pub struct SavedContext {
2875 pub id: Option<ContextId>,
2876 pub zed: String,
2877 pub version: String,
2878 pub text: String,
2879 pub messages: Vec<SavedMessage>,
2880 pub summary: String,
2881 pub slash_command_output_sections:
2882 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2883}
2884
2885impl SavedContext {
2886 pub const VERSION: &'static str = "0.4.0";
2887
2888 pub fn from_json(json: &str) -> Result<Self> {
2889 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2890 match saved_context_json
2891 .get("version")
2892 .ok_or_else(|| anyhow!("version not found"))?
2893 {
2894 serde_json::Value::String(version) => match version.as_str() {
2895 SavedContext::VERSION => {
2896 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2897 }
2898 SavedContextV0_3_0::VERSION => {
2899 let saved_context =
2900 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2901 Ok(saved_context.upgrade())
2902 }
2903 SavedContextV0_2_0::VERSION => {
2904 let saved_context =
2905 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2906 Ok(saved_context.upgrade())
2907 }
2908 SavedContextV0_1_0::VERSION => {
2909 let saved_context =
2910 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2911 Ok(saved_context.upgrade())
2912 }
2913 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2914 },
2915 _ => Err(anyhow!("version not found on saved context")),
2916 }
2917 }
2918
2919 fn into_ops(
2920 self,
2921 buffer: &Model<Buffer>,
2922 cx: &mut ModelContext<Context>,
2923 ) -> Vec<ContextOperation> {
2924 let mut operations = Vec::new();
2925 let mut version = clock::Global::new();
2926 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2927
2928 let mut first_message_metadata = None;
2929 for message in self.messages {
2930 if message.id == MessageId(clock::Lamport::default()) {
2931 first_message_metadata = Some(message.metadata);
2932 } else {
2933 operations.push(ContextOperation::InsertMessage {
2934 anchor: MessageAnchor {
2935 id: message.id,
2936 start: buffer.read(cx).anchor_before(message.start),
2937 },
2938 metadata: MessageMetadata {
2939 role: message.metadata.role,
2940 status: message.metadata.status,
2941 timestamp: message.metadata.timestamp,
2942 cache: None,
2943 },
2944 version: version.clone(),
2945 });
2946 version.observe(message.id.0);
2947 next_timestamp.observe(message.id.0);
2948 }
2949 }
2950
2951 if let Some(metadata) = first_message_metadata {
2952 let timestamp = next_timestamp.tick();
2953 operations.push(ContextOperation::UpdateMessage {
2954 message_id: MessageId(clock::Lamport::default()),
2955 metadata: MessageMetadata {
2956 role: metadata.role,
2957 status: metadata.status,
2958 timestamp,
2959 cache: None,
2960 },
2961 version: version.clone(),
2962 });
2963 version.observe(timestamp);
2964 }
2965
2966 let timestamp = next_timestamp.tick();
2967 operations.push(ContextOperation::SlashCommandFinished {
2968 id: SlashCommandId(timestamp),
2969 output_range: language::Anchor::MIN..language::Anchor::MAX,
2970 sections: self
2971 .slash_command_output_sections
2972 .into_iter()
2973 .map(|section| {
2974 let buffer = buffer.read(cx);
2975 SlashCommandOutputSection {
2976 range: buffer.anchor_after(section.range.start)
2977 ..buffer.anchor_before(section.range.end),
2978 icon: section.icon,
2979 label: section.label,
2980 }
2981 })
2982 .collect(),
2983 version: version.clone(),
2984 });
2985 version.observe(timestamp);
2986
2987 let timestamp = next_timestamp.tick();
2988 operations.push(ContextOperation::UpdateSummary {
2989 summary: ContextSummary {
2990 text: self.summary,
2991 done: true,
2992 timestamp,
2993 },
2994 version: version.clone(),
2995 });
2996 version.observe(timestamp);
2997
2998 operations
2999 }
3000}
3001
3002#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3003struct SavedMessageIdPreV0_4_0(usize);
3004
3005#[derive(Serialize, Deserialize)]
3006struct SavedMessagePreV0_4_0 {
3007 id: SavedMessageIdPreV0_4_0,
3008 start: usize,
3009}
3010
3011#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3012struct SavedMessageMetadataPreV0_4_0 {
3013 role: Role,
3014 status: MessageStatus,
3015}
3016
3017#[derive(Serialize, Deserialize)]
3018struct SavedContextV0_3_0 {
3019 id: Option<ContextId>,
3020 zed: String,
3021 version: String,
3022 text: String,
3023 messages: Vec<SavedMessagePreV0_4_0>,
3024 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3025 summary: String,
3026 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3027}
3028
3029impl SavedContextV0_3_0 {
3030 const VERSION: &'static str = "0.3.0";
3031
3032 fn upgrade(self) -> SavedContext {
3033 SavedContext {
3034 id: self.id,
3035 zed: self.zed,
3036 version: SavedContext::VERSION.into(),
3037 text: self.text,
3038 messages: self
3039 .messages
3040 .into_iter()
3041 .filter_map(|message| {
3042 let metadata = self.message_metadata.get(&message.id)?;
3043 let timestamp = clock::Lamport {
3044 replica_id: ReplicaId::default(),
3045 value: message.id.0 as u32,
3046 };
3047 Some(SavedMessage {
3048 id: MessageId(timestamp),
3049 start: message.start,
3050 metadata: MessageMetadata {
3051 role: metadata.role,
3052 status: metadata.status.clone(),
3053 timestamp,
3054 cache: None,
3055 },
3056 })
3057 })
3058 .collect(),
3059 summary: self.summary,
3060 slash_command_output_sections: self.slash_command_output_sections,
3061 }
3062 }
3063}
3064
3065#[derive(Serialize, Deserialize)]
3066struct SavedContextV0_2_0 {
3067 id: Option<ContextId>,
3068 zed: String,
3069 version: String,
3070 text: String,
3071 messages: Vec<SavedMessagePreV0_4_0>,
3072 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3073 summary: String,
3074}
3075
3076impl SavedContextV0_2_0 {
3077 const VERSION: &'static str = "0.2.0";
3078
3079 fn upgrade(self) -> SavedContext {
3080 SavedContextV0_3_0 {
3081 id: self.id,
3082 zed: self.zed,
3083 version: SavedContextV0_3_0::VERSION.to_string(),
3084 text: self.text,
3085 messages: self.messages,
3086 message_metadata: self.message_metadata,
3087 summary: self.summary,
3088 slash_command_output_sections: Vec::new(),
3089 }
3090 .upgrade()
3091 }
3092}
3093
3094#[derive(Serialize, Deserialize)]
3095struct SavedContextV0_1_0 {
3096 id: Option<ContextId>,
3097 zed: String,
3098 version: String,
3099 text: String,
3100 messages: Vec<SavedMessagePreV0_4_0>,
3101 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3102 summary: String,
3103 api_url: Option<String>,
3104 model: OpenAiModel,
3105}
3106
3107impl SavedContextV0_1_0 {
3108 const VERSION: &'static str = "0.1.0";
3109
3110 fn upgrade(self) -> SavedContext {
3111 SavedContextV0_2_0 {
3112 id: self.id,
3113 zed: self.zed,
3114 version: SavedContextV0_2_0::VERSION.to_string(),
3115 text: self.text,
3116 messages: self.messages,
3117 message_metadata: self.message_metadata,
3118 summary: self.summary,
3119 }
3120 .upgrade()
3121 }
3122}
3123
3124#[derive(Clone)]
3125pub struct SavedContextMetadata {
3126 pub title: String,
3127 pub path: PathBuf,
3128 pub mtime: chrono::DateTime<chrono::Local>,
3129}