1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, MessageId, MessageStatus,
6 WorkflowStep, WorkflowStepEdit, WorkflowStepResolution, WorkflowSuggestionGroup,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use assistant_tool::ToolRegistry;
13use client::{self, proto, telemetry::Telemetry};
14use clock::ReplicaId;
15use collections::{HashMap, HashSet};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt};
17use fs::{Fs, RemoveOptions};
18use futures::{
19 future::{self, Shared},
20 FutureExt, StreamExt,
21};
22use gpui::{
23 AppContext, AsyncAppContext, Context as _, EventEmitter, Image, Model, ModelContext,
24 RenderImage, SharedString, Subscription, Task,
25};
26
27use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
28use language_model::{
29 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
30 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
31 LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
32 StopReason,
33};
34use open_ai::Model as OpenAiModel;
35use paths::contexts_dir;
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{self, max, Ordering},
41 collections::hash_map,
42 fmt::Debug,
43 iter, mem,
44 ops::Range,
45 path::{Path, PathBuf},
46 str::FromStr as _,
47 sync::Arc,
48 time::{Duration, Instant},
49};
50use telemetry_events::AssistantKind;
51use text::BufferSnapshot;
52use util::{post_inc, ResultExt, TryFutureExt};
53use uuid::Uuid;
54
55#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
56pub struct ContextId(String);
57
58impl ContextId {
59 pub fn new() -> Self {
60 Self(Uuid::new_v4().to_string())
61 }
62
63 pub fn from_proto(id: String) -> Self {
64 Self(id)
65 }
66
67 pub fn to_proto(&self) -> String {
68 self.0.clone()
69 }
70}
71
72#[derive(Clone, Debug)]
73pub enum ContextOperation {
74 InsertMessage {
75 anchor: MessageAnchor,
76 metadata: MessageMetadata,
77 version: clock::Global,
78 },
79 UpdateMessage {
80 message_id: MessageId,
81 metadata: MessageMetadata,
82 version: clock::Global,
83 },
84 UpdateSummary {
85 summary: ContextSummary,
86 version: clock::Global,
87 },
88 SlashCommandFinished {
89 id: SlashCommandId,
90 output_range: Range<language::Anchor>,
91 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
92 version: clock::Global,
93 },
94 BufferOperation(language::Operation),
95}
96
97impl ContextOperation {
98 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
99 match op.variant.context("invalid variant")? {
100 proto::context_operation::Variant::InsertMessage(insert) => {
101 let message = insert.message.context("invalid message")?;
102 let id = MessageId(language::proto::deserialize_timestamp(
103 message.id.context("invalid id")?,
104 ));
105 Ok(Self::InsertMessage {
106 anchor: MessageAnchor {
107 id,
108 start: language::proto::deserialize_anchor(
109 message.start.context("invalid anchor")?,
110 )
111 .context("invalid anchor")?,
112 },
113 metadata: MessageMetadata {
114 role: Role::from_proto(message.role),
115 status: MessageStatus::from_proto(
116 message.status.context("invalid status")?,
117 ),
118 timestamp: id.0,
119 cache: None,
120 },
121 version: language::proto::deserialize_version(&insert.version),
122 })
123 }
124 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
125 message_id: MessageId(language::proto::deserialize_timestamp(
126 update.message_id.context("invalid message id")?,
127 )),
128 metadata: MessageMetadata {
129 role: Role::from_proto(update.role),
130 status: MessageStatus::from_proto(update.status.context("invalid status")?),
131 timestamp: language::proto::deserialize_timestamp(
132 update.timestamp.context("invalid timestamp")?,
133 ),
134 cache: None,
135 },
136 version: language::proto::deserialize_version(&update.version),
137 }),
138 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
139 summary: ContextSummary {
140 text: update.summary,
141 done: update.done,
142 timestamp: language::proto::deserialize_timestamp(
143 update.timestamp.context("invalid timestamp")?,
144 ),
145 },
146 version: language::proto::deserialize_version(&update.version),
147 }),
148 proto::context_operation::Variant::SlashCommandFinished(finished) => {
149 Ok(Self::SlashCommandFinished {
150 id: SlashCommandId(language::proto::deserialize_timestamp(
151 finished.id.context("invalid id")?,
152 )),
153 output_range: language::proto::deserialize_anchor_range(
154 finished.output_range.context("invalid range")?,
155 )?,
156 sections: finished
157 .sections
158 .into_iter()
159 .map(|section| {
160 Ok(SlashCommandOutputSection {
161 range: language::proto::deserialize_anchor_range(
162 section.range.context("invalid range")?,
163 )?,
164 icon: section.icon_name.parse()?,
165 label: section.label.into(),
166 })
167 })
168 .collect::<Result<Vec<_>>>()?,
169 version: language::proto::deserialize_version(&finished.version),
170 })
171 }
172 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
173 language::proto::deserialize_operation(
174 op.operation.context("invalid buffer operation")?,
175 )?,
176 )),
177 }
178 }
179
180 pub fn to_proto(&self) -> proto::ContextOperation {
181 match self {
182 Self::InsertMessage {
183 anchor,
184 metadata,
185 version,
186 } => proto::ContextOperation {
187 variant: Some(proto::context_operation::Variant::InsertMessage(
188 proto::context_operation::InsertMessage {
189 message: Some(proto::ContextMessage {
190 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
191 start: Some(language::proto::serialize_anchor(&anchor.start)),
192 role: metadata.role.to_proto() as i32,
193 status: Some(metadata.status.to_proto()),
194 }),
195 version: language::proto::serialize_version(version),
196 },
197 )),
198 },
199 Self::UpdateMessage {
200 message_id,
201 metadata,
202 version,
203 } => proto::ContextOperation {
204 variant: Some(proto::context_operation::Variant::UpdateMessage(
205 proto::context_operation::UpdateMessage {
206 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
207 role: metadata.role.to_proto() as i32,
208 status: Some(metadata.status.to_proto()),
209 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
210 version: language::proto::serialize_version(version),
211 },
212 )),
213 },
214 Self::UpdateSummary { summary, version } => proto::ContextOperation {
215 variant: Some(proto::context_operation::Variant::UpdateSummary(
216 proto::context_operation::UpdateSummary {
217 summary: summary.text.clone(),
218 done: summary.done,
219 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
220 version: language::proto::serialize_version(version),
221 },
222 )),
223 },
224 Self::SlashCommandFinished {
225 id,
226 output_range,
227 sections,
228 version,
229 } => proto::ContextOperation {
230 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
231 proto::context_operation::SlashCommandFinished {
232 id: Some(language::proto::serialize_timestamp(id.0)),
233 output_range: Some(language::proto::serialize_anchor_range(
234 output_range.clone(),
235 )),
236 sections: sections
237 .iter()
238 .map(|section| {
239 let icon_name: &'static str = section.icon.into();
240 proto::SlashCommandOutputSection {
241 range: Some(language::proto::serialize_anchor_range(
242 section.range.clone(),
243 )),
244 icon_name: icon_name.to_string(),
245 label: section.label.to_string(),
246 }
247 })
248 .collect(),
249 version: language::proto::serialize_version(version),
250 },
251 )),
252 },
253 Self::BufferOperation(operation) => proto::ContextOperation {
254 variant: Some(proto::context_operation::Variant::BufferOperation(
255 proto::context_operation::BufferOperation {
256 operation: Some(language::proto::serialize_operation(operation)),
257 },
258 )),
259 },
260 }
261 }
262
263 fn timestamp(&self) -> clock::Lamport {
264 match self {
265 Self::InsertMessage { anchor, .. } => anchor.id.0,
266 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
267 Self::UpdateSummary { summary, .. } => summary.timestamp,
268 Self::SlashCommandFinished { id, .. } => id.0,
269 Self::BufferOperation(_) => {
270 panic!("reading the timestamp of a buffer operation is not supported")
271 }
272 }
273 }
274
275 /// Returns the current version of the context operation.
276 pub fn version(&self) -> &clock::Global {
277 match self {
278 Self::InsertMessage { version, .. }
279 | Self::UpdateMessage { version, .. }
280 | Self::UpdateSummary { version, .. }
281 | Self::SlashCommandFinished { version, .. } => version,
282 Self::BufferOperation(_) => {
283 panic!("reading the version of a buffer operation is not supported")
284 }
285 }
286 }
287}
288
289#[derive(Debug, Clone)]
290pub enum ContextEvent {
291 ShowAssistError(SharedString),
292 MessagesEdited,
293 SummaryChanged,
294 StreamedCompletion,
295 WorkflowStepsUpdated {
296 removed: Vec<Range<language::Anchor>>,
297 updated: Vec<Range<language::Anchor>>,
298 },
299 PendingSlashCommandsUpdated {
300 removed: Vec<Range<language::Anchor>>,
301 updated: Vec<PendingSlashCommand>,
302 },
303 SlashCommandFinished {
304 output_range: Range<language::Anchor>,
305 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
306 run_commands_in_output: bool,
307 expand_result: bool,
308 },
309 UsePendingTools,
310 ToolFinished {
311 tool_use_id: Arc<str>,
312 output_range: Range<language::Anchor>,
313 },
314 Operation(ContextOperation),
315}
316
317#[derive(Clone, Default, Debug)]
318pub struct ContextSummary {
319 pub text: String,
320 done: bool,
321 timestamp: clock::Lamport,
322}
323
324#[derive(Clone, Debug, Eq, PartialEq)]
325pub struct MessageAnchor {
326 pub id: MessageId,
327 pub start: language::Anchor,
328}
329
330#[derive(Clone, Debug, Eq, PartialEq)]
331pub enum CacheStatus {
332 Pending,
333 Cached,
334}
335
336#[derive(Clone, Debug, Eq, PartialEq)]
337pub struct MessageCacheMetadata {
338 pub is_anchor: bool,
339 pub is_final_anchor: bool,
340 pub status: CacheStatus,
341 pub cached_at: clock::Global,
342}
343
344#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
345pub struct MessageMetadata {
346 pub role: Role,
347 pub status: MessageStatus,
348 pub(crate) timestamp: clock::Lamport,
349 #[serde(skip)]
350 pub cache: Option<MessageCacheMetadata>,
351}
352
353impl From<&Message> for MessageMetadata {
354 fn from(message: &Message) -> Self {
355 Self {
356 role: message.role,
357 status: message.status.clone(),
358 timestamp: message.id.0,
359 cache: message.cache.clone(),
360 }
361 }
362}
363
364impl MessageMetadata {
365 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
366 let result = match &self.cache {
367 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
368 &cached_at,
369 Range {
370 start: buffer.anchor_at(range.start, Bias::Right),
371 end: buffer.anchor_at(range.end, Bias::Left),
372 },
373 ),
374 _ => false,
375 };
376 result
377 }
378}
379
380#[derive(Clone, Debug)]
381pub struct Message {
382 pub offset_range: Range<usize>,
383 pub index_range: Range<usize>,
384 pub anchor_range: Range<language::Anchor>,
385 pub id: MessageId,
386 pub role: Role,
387 pub status: MessageStatus,
388 pub cache: Option<MessageCacheMetadata>,
389}
390
391#[derive(Debug, Clone)]
392pub enum Content {
393 Image {
394 anchor: language::Anchor,
395 image_id: u64,
396 render_image: Arc<RenderImage>,
397 image: Shared<Task<Option<LanguageModelImage>>>,
398 },
399 ToolUse {
400 range: Range<language::Anchor>,
401 tool_use: LanguageModelToolUse,
402 },
403 ToolResult {
404 range: Range<language::Anchor>,
405 tool_use_id: Arc<str>,
406 },
407}
408
409impl Content {
410 fn range(&self) -> Range<language::Anchor> {
411 match self {
412 Self::Image { anchor, .. } => *anchor..*anchor,
413 Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(),
414 }
415 }
416
417 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
418 let self_range = self.range();
419 let other_range = other.range();
420 if self_range.end.cmp(&other_range.start, buffer).is_lt() {
421 Ordering::Less
422 } else if self_range.start.cmp(&other_range.end, buffer).is_gt() {
423 Ordering::Greater
424 } else {
425 Ordering::Equal
426 }
427 }
428}
429
430struct PendingCompletion {
431 id: usize,
432 assistant_message_id: MessageId,
433 _task: Task<()>,
434}
435
436#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
437pub struct SlashCommandId(clock::Lamport);
438
439#[derive(Clone, Debug)]
440pub struct XmlTag {
441 pub kind: XmlTagKind,
442 pub range: Range<text::Anchor>,
443 pub is_open_tag: bool,
444}
445
446#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
447#[strum(serialize_all = "snake_case")]
448pub enum XmlTagKind {
449 Step,
450 Edit,
451 Path,
452 Search,
453 Within,
454 Operation,
455 Description,
456}
457
458pub struct Context {
459 id: ContextId,
460 timestamp: clock::Lamport,
461 version: clock::Global,
462 pending_ops: Vec<ContextOperation>,
463 operations: Vec<ContextOperation>,
464 buffer: Model<Buffer>,
465 pending_slash_commands: Vec<PendingSlashCommand>,
466 edits_since_last_parse: language::Subscription,
467 finished_slash_commands: HashSet<SlashCommandId>,
468 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
469 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
470 message_anchors: Vec<MessageAnchor>,
471 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
472 contents: Vec<Content>,
473 messages_metadata: HashMap<MessageId, MessageMetadata>,
474 summary: Option<ContextSummary>,
475 pending_summary: Task<Option<()>>,
476 completion_count: usize,
477 pending_completions: Vec<PendingCompletion>,
478 token_count: Option<usize>,
479 pending_token_count: Task<Option<()>>,
480 pending_save: Task<Result<()>>,
481 pending_cache_warming_task: Task<Option<()>>,
482 path: Option<PathBuf>,
483 _subscriptions: Vec<Subscription>,
484 telemetry: Option<Arc<Telemetry>>,
485 language_registry: Arc<LanguageRegistry>,
486 workflow_steps: Vec<WorkflowStep>,
487 xml_tags: Vec<XmlTag>,
488 project: Option<Model<Project>>,
489 prompt_builder: Arc<PromptBuilder>,
490}
491
492trait ContextAnnotation {
493 fn range(&self) -> &Range<language::Anchor>;
494}
495
496impl ContextAnnotation for PendingSlashCommand {
497 fn range(&self) -> &Range<language::Anchor> {
498 &self.source_range
499 }
500}
501
502impl ContextAnnotation for WorkflowStep {
503 fn range(&self) -> &Range<language::Anchor> {
504 &self.range
505 }
506}
507
508impl ContextAnnotation for XmlTag {
509 fn range(&self) -> &Range<language::Anchor> {
510 &self.range
511 }
512}
513
514impl EventEmitter<ContextEvent> for Context {}
515
516impl Context {
517 pub fn local(
518 language_registry: Arc<LanguageRegistry>,
519 project: Option<Model<Project>>,
520 telemetry: Option<Arc<Telemetry>>,
521 prompt_builder: Arc<PromptBuilder>,
522 cx: &mut ModelContext<Self>,
523 ) -> Self {
524 Self::new(
525 ContextId::new(),
526 ReplicaId::default(),
527 language::Capability::ReadWrite,
528 language_registry,
529 prompt_builder,
530 project,
531 telemetry,
532 cx,
533 )
534 }
535
536 #[allow(clippy::too_many_arguments)]
537 pub fn new(
538 id: ContextId,
539 replica_id: ReplicaId,
540 capability: language::Capability,
541 language_registry: Arc<LanguageRegistry>,
542 prompt_builder: Arc<PromptBuilder>,
543 project: Option<Model<Project>>,
544 telemetry: Option<Arc<Telemetry>>,
545 cx: &mut ModelContext<Self>,
546 ) -> Self {
547 let buffer = cx.new_model(|_cx| {
548 let mut buffer = Buffer::remote(
549 language::BufferId::new(1).unwrap(),
550 replica_id,
551 capability,
552 "",
553 );
554 buffer.set_language_registry(language_registry.clone());
555 buffer
556 });
557 let edits_since_last_slash_command_parse =
558 buffer.update(cx, |buffer, _| buffer.subscribe());
559 let mut this = Self {
560 id,
561 timestamp: clock::Lamport::new(replica_id),
562 version: clock::Global::new(),
563 pending_ops: Vec::new(),
564 operations: Vec::new(),
565 message_anchors: Default::default(),
566 contents: Default::default(),
567 images: Default::default(),
568 messages_metadata: Default::default(),
569 pending_slash_commands: Vec::new(),
570 finished_slash_commands: HashSet::default(),
571 pending_tool_uses_by_id: HashMap::default(),
572 slash_command_output_sections: Vec::new(),
573 edits_since_last_parse: edits_since_last_slash_command_parse,
574 summary: None,
575 pending_summary: Task::ready(None),
576 completion_count: Default::default(),
577 pending_completions: Default::default(),
578 token_count: None,
579 pending_token_count: Task::ready(None),
580 pending_cache_warming_task: Task::ready(None),
581 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
582 pending_save: Task::ready(Ok(())),
583 path: None,
584 buffer,
585 telemetry,
586 project,
587 language_registry,
588 workflow_steps: Vec::new(),
589 xml_tags: Vec::new(),
590 prompt_builder,
591 };
592
593 let first_message_id = MessageId(clock::Lamport {
594 replica_id: 0,
595 value: 0,
596 });
597 let message = MessageAnchor {
598 id: first_message_id,
599 start: language::Anchor::MIN,
600 };
601 this.messages_metadata.insert(
602 first_message_id,
603 MessageMetadata {
604 role: Role::User,
605 status: MessageStatus::Done,
606 timestamp: first_message_id.0,
607 cache: None,
608 },
609 );
610 this.message_anchors.push(message);
611
612 this.set_language(cx);
613 this.count_remaining_tokens(cx);
614 this
615 }
616
617 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
618 let buffer = self.buffer.read(cx);
619 SavedContext {
620 id: Some(self.id.clone()),
621 zed: "context".into(),
622 version: SavedContext::VERSION.into(),
623 text: buffer.text(),
624 messages: self
625 .messages(cx)
626 .map(|message| SavedMessage {
627 id: message.id,
628 start: message.offset_range.start,
629 metadata: self.messages_metadata[&message.id].clone(),
630 })
631 .collect(),
632 summary: self
633 .summary
634 .as_ref()
635 .map(|summary| summary.text.clone())
636 .unwrap_or_default(),
637 slash_command_output_sections: self
638 .slash_command_output_sections
639 .iter()
640 .filter_map(|section| {
641 let range = section.range.to_offset(buffer);
642 if section.range.start.is_valid(buffer) && !range.is_empty() {
643 Some(assistant_slash_command::SlashCommandOutputSection {
644 range,
645 icon: section.icon,
646 label: section.label.clone(),
647 })
648 } else {
649 None
650 }
651 })
652 .collect(),
653 }
654 }
655
656 #[allow(clippy::too_many_arguments)]
657 pub fn deserialize(
658 saved_context: SavedContext,
659 path: PathBuf,
660 language_registry: Arc<LanguageRegistry>,
661 prompt_builder: Arc<PromptBuilder>,
662 project: Option<Model<Project>>,
663 telemetry: Option<Arc<Telemetry>>,
664 cx: &mut ModelContext<Self>,
665 ) -> Self {
666 let id = saved_context.id.clone().unwrap_or_else(ContextId::new);
667 let mut this = Self::new(
668 id,
669 ReplicaId::default(),
670 language::Capability::ReadWrite,
671 language_registry,
672 prompt_builder,
673 project,
674 telemetry,
675 cx,
676 );
677 this.path = Some(path);
678 this.buffer.update(cx, |buffer, cx| {
679 buffer.set_text(saved_context.text.as_str(), cx)
680 });
681 let operations = saved_context.into_ops(&this.buffer, cx);
682 this.apply_ops(operations, cx).unwrap();
683 this
684 }
685
686 pub fn id(&self) -> &ContextId {
687 &self.id
688 }
689
690 pub fn replica_id(&self) -> ReplicaId {
691 self.timestamp.replica_id
692 }
693
694 pub fn version(&self, cx: &AppContext) -> ContextVersion {
695 ContextVersion {
696 context: self.version.clone(),
697 buffer: self.buffer.read(cx).version(),
698 }
699 }
700
701 pub fn set_capability(
702 &mut self,
703 capability: language::Capability,
704 cx: &mut ModelContext<Self>,
705 ) {
706 self.buffer
707 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
708 }
709
710 fn next_timestamp(&mut self) -> clock::Lamport {
711 let timestamp = self.timestamp.tick();
712 self.version.observe(timestamp);
713 timestamp
714 }
715
716 pub fn serialize_ops(
717 &self,
718 since: &ContextVersion,
719 cx: &AppContext,
720 ) -> Task<Vec<proto::ContextOperation>> {
721 let buffer_ops = self
722 .buffer
723 .read(cx)
724 .serialize_ops(Some(since.buffer.clone()), cx);
725
726 let mut context_ops = self
727 .operations
728 .iter()
729 .filter(|op| !since.context.observed(op.timestamp()))
730 .cloned()
731 .collect::<Vec<_>>();
732 context_ops.extend(self.pending_ops.iter().cloned());
733
734 cx.background_executor().spawn(async move {
735 let buffer_ops = buffer_ops.await;
736 context_ops.sort_unstable_by_key(|op| op.timestamp());
737 buffer_ops
738 .into_iter()
739 .map(|op| proto::ContextOperation {
740 variant: Some(proto::context_operation::Variant::BufferOperation(
741 proto::context_operation::BufferOperation {
742 operation: Some(op),
743 },
744 )),
745 })
746 .chain(context_ops.into_iter().map(|op| op.to_proto()))
747 .collect()
748 })
749 }
750
751 pub fn apply_ops(
752 &mut self,
753 ops: impl IntoIterator<Item = ContextOperation>,
754 cx: &mut ModelContext<Self>,
755 ) -> Result<()> {
756 let mut buffer_ops = Vec::new();
757 for op in ops {
758 match op {
759 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
760 op @ _ => self.pending_ops.push(op),
761 }
762 }
763 self.buffer
764 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
765 self.flush_ops(cx);
766
767 Ok(())
768 }
769
770 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
771 let mut changed_messages = HashSet::default();
772 let mut summary_changed = false;
773
774 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
775 for op in mem::take(&mut self.pending_ops) {
776 if !self.can_apply_op(&op, cx) {
777 self.pending_ops.push(op);
778 continue;
779 }
780
781 let timestamp = op.timestamp();
782 match op.clone() {
783 ContextOperation::InsertMessage {
784 anchor, metadata, ..
785 } => {
786 if self.messages_metadata.contains_key(&anchor.id) {
787 // We already applied this operation.
788 } else {
789 changed_messages.insert(anchor.id);
790 self.insert_message(anchor, metadata, cx);
791 }
792 }
793 ContextOperation::UpdateMessage {
794 message_id,
795 metadata: new_metadata,
796 ..
797 } => {
798 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
799 if new_metadata.timestamp > metadata.timestamp {
800 *metadata = new_metadata;
801 changed_messages.insert(message_id);
802 }
803 }
804 ContextOperation::UpdateSummary {
805 summary: new_summary,
806 ..
807 } => {
808 if self
809 .summary
810 .as_ref()
811 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
812 {
813 self.summary = Some(new_summary);
814 summary_changed = true;
815 }
816 }
817 ContextOperation::SlashCommandFinished {
818 id,
819 output_range,
820 sections,
821 ..
822 } => {
823 if self.finished_slash_commands.insert(id) {
824 let buffer = self.buffer.read(cx);
825 self.slash_command_output_sections
826 .extend(sections.iter().cloned());
827 self.slash_command_output_sections
828 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
829 cx.emit(ContextEvent::SlashCommandFinished {
830 output_range,
831 sections,
832 expand_result: false,
833 run_commands_in_output: false,
834 });
835 }
836 }
837 ContextOperation::BufferOperation(_) => unreachable!(),
838 }
839
840 self.version.observe(timestamp);
841 self.timestamp.observe(timestamp);
842 self.operations.push(op);
843 }
844
845 if !changed_messages.is_empty() {
846 self.message_roles_updated(changed_messages, cx);
847 cx.emit(ContextEvent::MessagesEdited);
848 cx.notify();
849 }
850
851 if summary_changed {
852 cx.emit(ContextEvent::SummaryChanged);
853 cx.notify();
854 }
855 }
856
857 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
858 if !self.version.observed_all(op.version()) {
859 return false;
860 }
861
862 match op {
863 ContextOperation::InsertMessage { anchor, .. } => self
864 .buffer
865 .read(cx)
866 .version
867 .observed(anchor.start.timestamp),
868 ContextOperation::UpdateMessage { message_id, .. } => {
869 self.messages_metadata.contains_key(message_id)
870 }
871 ContextOperation::UpdateSummary { .. } => true,
872 ContextOperation::SlashCommandFinished {
873 output_range,
874 sections,
875 ..
876 } => {
877 let version = &self.buffer.read(cx).version;
878 sections
879 .iter()
880 .map(|section| §ion.range)
881 .chain([output_range])
882 .all(|range| {
883 let observed_start = range.start == language::Anchor::MIN
884 || range.start == language::Anchor::MAX
885 || version.observed(range.start.timestamp);
886 let observed_end = range.end == language::Anchor::MIN
887 || range.end == language::Anchor::MAX
888 || version.observed(range.end.timestamp);
889 observed_start && observed_end
890 })
891 }
892 ContextOperation::BufferOperation(_) => {
893 panic!("buffer operations should always be applied")
894 }
895 }
896 }
897
898 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
899 self.operations.push(op.clone());
900 cx.emit(ContextEvent::Operation(op));
901 }
902
903 pub fn buffer(&self) -> &Model<Buffer> {
904 &self.buffer
905 }
906
907 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
908 self.language_registry.clone()
909 }
910
911 pub fn project(&self) -> Option<Model<Project>> {
912 self.project.clone()
913 }
914
915 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
916 self.prompt_builder.clone()
917 }
918
919 pub fn path(&self) -> Option<&Path> {
920 self.path.as_deref()
921 }
922
923 pub fn summary(&self) -> Option<&ContextSummary> {
924 self.summary.as_ref()
925 }
926
927 pub(crate) fn workflow_step_containing(
928 &self,
929 offset: usize,
930 cx: &AppContext,
931 ) -> Option<&WorkflowStep> {
932 let buffer = self.buffer.read(cx);
933 let index = self
934 .workflow_steps
935 .binary_search_by(|step| {
936 let step_range = step.range.to_offset(&buffer);
937 if offset < step_range.start {
938 Ordering::Greater
939 } else if offset > step_range.end {
940 Ordering::Less
941 } else {
942 Ordering::Equal
943 }
944 })
945 .ok()?;
946 Some(&self.workflow_steps[index])
947 }
948
949 pub fn workflow_step_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
950 self.workflow_steps.iter().map(|step| step.range.clone())
951 }
952
953 pub(crate) fn workflow_step_for_range(
954 &self,
955 range: &Range<language::Anchor>,
956 cx: &AppContext,
957 ) -> Option<&WorkflowStep> {
958 let buffer = self.buffer.read(cx);
959 let index = self.workflow_step_index_for_range(range, buffer).ok()?;
960 Some(&self.workflow_steps[index])
961 }
962
963 fn workflow_step_index_for_range(
964 &self,
965 tagged_range: &Range<text::Anchor>,
966 buffer: &text::BufferSnapshot,
967 ) -> Result<usize, usize> {
968 self.workflow_steps
969 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
970 }
971
972 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
973 &self.pending_slash_commands
974 }
975
976 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
977 &self.slash_command_output_sections
978 }
979
980 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
981 self.pending_tool_uses_by_id.values().collect()
982 }
983
984 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
985 self.pending_tool_uses_by_id.get(id)
986 }
987
988 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
989 let markdown = self.language_registry.language_for_name("Markdown");
990 cx.spawn(|this, mut cx| async move {
991 let markdown = markdown.await?;
992 this.update(&mut cx, |this, cx| {
993 this.buffer
994 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
995 })
996 })
997 .detach_and_log_err(cx);
998 }
999
1000 fn handle_buffer_event(
1001 &mut self,
1002 _: Model<Buffer>,
1003 event: &language::Event,
1004 cx: &mut ModelContext<Self>,
1005 ) {
1006 match event {
1007 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1008 ContextOperation::BufferOperation(operation.clone()),
1009 )),
1010 language::Event::Edited => {
1011 self.count_remaining_tokens(cx);
1012 self.reparse(cx);
1013 // Use `inclusive = true` to invalidate a step when an edit occurs
1014 // at the start/end of a parsed step.
1015 cx.emit(ContextEvent::MessagesEdited);
1016 }
1017 _ => {}
1018 }
1019 }
1020
1021 pub(crate) fn token_count(&self) -> Option<usize> {
1022 self.token_count
1023 }
1024
1025 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1026 let request = self.to_completion_request(cx);
1027 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1028 return;
1029 };
1030 self.pending_token_count = cx.spawn(|this, mut cx| {
1031 async move {
1032 cx.background_executor()
1033 .timer(Duration::from_millis(200))
1034 .await;
1035
1036 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1037 this.update(&mut cx, |this, cx| {
1038 this.token_count = Some(token_count);
1039 this.start_cache_warming(&model, cx);
1040 cx.notify()
1041 })
1042 }
1043 .log_err()
1044 });
1045 }
1046
1047 pub fn mark_cache_anchors(
1048 &mut self,
1049 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1050 speculative: bool,
1051 cx: &mut ModelContext<Self>,
1052 ) -> bool {
1053 let cache_configuration =
1054 cache_configuration
1055 .as_ref()
1056 .unwrap_or(&LanguageModelCacheConfiguration {
1057 max_cache_anchors: 0,
1058 should_speculate: false,
1059 min_total_token: 0,
1060 });
1061
1062 let messages: Vec<Message> = self.messages(cx).collect();
1063
1064 let mut sorted_messages = messages.clone();
1065 if speculative {
1066 // Avoid caching the last message if this is a speculative cache fetch as
1067 // it's likely to change.
1068 sorted_messages.pop();
1069 }
1070 sorted_messages.retain(|m| m.role == Role::User);
1071 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1072
1073 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1074 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1075 0
1076 } else {
1077 // Save 1 anchor for the inline assistant to use.
1078 max(cache_configuration.max_cache_anchors, 1) - 1
1079 };
1080 sorted_messages.truncate(cache_anchors);
1081
1082 let anchors: HashSet<MessageId> = sorted_messages
1083 .into_iter()
1084 .map(|message| message.id)
1085 .collect();
1086
1087 let buffer = self.buffer.read(cx).snapshot();
1088 let invalidated_caches: HashSet<MessageId> = messages
1089 .iter()
1090 .scan(false, |encountered_invalid, message| {
1091 let message_id = message.id;
1092 let is_invalid = self
1093 .messages_metadata
1094 .get(&message_id)
1095 .map_or(true, |metadata| {
1096 !metadata.is_cache_valid(&buffer, &message.offset_range)
1097 || *encountered_invalid
1098 });
1099 *encountered_invalid |= is_invalid;
1100 Some(if is_invalid { Some(message_id) } else { None })
1101 })
1102 .flatten()
1103 .collect();
1104
1105 let last_anchor = messages.iter().rev().find_map(|message| {
1106 if anchors.contains(&message.id) {
1107 Some(message.id)
1108 } else {
1109 None
1110 }
1111 });
1112
1113 let mut new_anchor_needs_caching = false;
1114 let current_version = &buffer.version;
1115 // If we have no anchors, mark all messages as not being cached.
1116 let mut hit_last_anchor = last_anchor.is_none();
1117
1118 for message in messages.iter() {
1119 if hit_last_anchor {
1120 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1121 continue;
1122 }
1123
1124 if let Some(last_anchor) = last_anchor {
1125 if message.id == last_anchor {
1126 hit_last_anchor = true;
1127 }
1128 }
1129
1130 new_anchor_needs_caching = new_anchor_needs_caching
1131 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1132
1133 self.update_metadata(message.id, cx, |metadata| {
1134 let cache_status = if invalidated_caches.contains(&message.id) {
1135 CacheStatus::Pending
1136 } else {
1137 metadata
1138 .cache
1139 .as_ref()
1140 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1141 };
1142 metadata.cache = Some(MessageCacheMetadata {
1143 is_anchor: anchors.contains(&message.id),
1144 is_final_anchor: hit_last_anchor,
1145 status: cache_status,
1146 cached_at: current_version.clone(),
1147 });
1148 });
1149 }
1150 new_anchor_needs_caching
1151 }
1152
1153 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1154 let cache_configuration = model.cache_configuration();
1155
1156 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1157 return;
1158 }
1159 if !self.pending_completions.is_empty() {
1160 return;
1161 }
1162 if let Some(cache_configuration) = cache_configuration {
1163 if !cache_configuration.should_speculate {
1164 return;
1165 }
1166 }
1167
1168 let request = {
1169 let mut req = self.to_completion_request(cx);
1170 // Skip the last message because it's likely to change and
1171 // therefore would be a waste to cache.
1172 req.messages.pop();
1173 req.messages.push(LanguageModelRequestMessage {
1174 role: Role::User,
1175 content: vec!["Respond only with OK, nothing else.".into()],
1176 cache: false,
1177 });
1178 req
1179 };
1180
1181 let model = Arc::clone(model);
1182 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1183 async move {
1184 match model.stream_completion(request, &cx).await {
1185 Ok(mut stream) => {
1186 stream.next().await;
1187 log::info!("Cache warming completed successfully");
1188 }
1189 Err(e) => {
1190 log::warn!("Cache warming failed: {}", e);
1191 }
1192 };
1193 this.update(&mut cx, |this, cx| {
1194 this.update_cache_status_for_completion(cx);
1195 })
1196 .ok();
1197 anyhow::Ok(())
1198 }
1199 .log_err()
1200 });
1201 }
1202
1203 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1204 let cached_message_ids: Vec<MessageId> = self
1205 .messages_metadata
1206 .iter()
1207 .filter_map(|(message_id, metadata)| {
1208 metadata.cache.as_ref().and_then(|cache| {
1209 if cache.status == CacheStatus::Pending {
1210 Some(*message_id)
1211 } else {
1212 None
1213 }
1214 })
1215 })
1216 .collect();
1217
1218 for message_id in cached_message_ids {
1219 self.update_metadata(message_id, cx, |metadata| {
1220 if let Some(cache) = &mut metadata.cache {
1221 cache.status = CacheStatus::Cached;
1222 }
1223 });
1224 }
1225 cx.notify();
1226 }
1227
1228 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1229 let buffer = self.buffer.read(cx).text_snapshot();
1230 let mut row_ranges = self
1231 .edits_since_last_parse
1232 .consume()
1233 .into_iter()
1234 .map(|edit| {
1235 let start_row = buffer.offset_to_point(edit.new.start).row;
1236 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1237 start_row..end_row
1238 })
1239 .peekable();
1240
1241 let mut removed_slash_command_ranges = Vec::new();
1242 let mut updated_slash_commands = Vec::new();
1243 let mut removed_steps = Vec::new();
1244 let mut updated_steps = Vec::new();
1245 while let Some(mut row_range) = row_ranges.next() {
1246 while let Some(next_row_range) = row_ranges.peek() {
1247 if row_range.end >= next_row_range.start {
1248 row_range.end = next_row_range.end;
1249 row_ranges.next();
1250 } else {
1251 break;
1252 }
1253 }
1254
1255 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1256 let end = buffer.anchor_after(Point::new(
1257 row_range.end - 1,
1258 buffer.line_len(row_range.end - 1),
1259 ));
1260
1261 self.reparse_slash_commands_in_range(
1262 start..end,
1263 &buffer,
1264 &mut updated_slash_commands,
1265 &mut removed_slash_command_ranges,
1266 cx,
1267 );
1268 self.reparse_workflow_steps_in_range(
1269 start..end,
1270 &buffer,
1271 &mut updated_steps,
1272 &mut removed_steps,
1273 cx,
1274 );
1275 }
1276
1277 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1278 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1279 removed: removed_slash_command_ranges,
1280 updated: updated_slash_commands,
1281 });
1282 }
1283
1284 if !updated_steps.is_empty() || !removed_steps.is_empty() {
1285 cx.emit(ContextEvent::WorkflowStepsUpdated {
1286 removed: removed_steps,
1287 updated: updated_steps,
1288 });
1289 }
1290 }
1291
1292 fn reparse_slash_commands_in_range(
1293 &mut self,
1294 range: Range<text::Anchor>,
1295 buffer: &BufferSnapshot,
1296 updated: &mut Vec<PendingSlashCommand>,
1297 removed: &mut Vec<Range<text::Anchor>>,
1298 cx: &AppContext,
1299 ) {
1300 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1301
1302 let mut new_commands = Vec::new();
1303 let mut lines = buffer.text_for_range(range).lines();
1304 let mut offset = lines.offset();
1305 while let Some(line) = lines.next() {
1306 if let Some(command_line) = SlashCommandLine::parse(line) {
1307 let name = &line[command_line.name.clone()];
1308 let arguments = command_line
1309 .arguments
1310 .iter()
1311 .filter_map(|argument_range| {
1312 if argument_range.is_empty() {
1313 None
1314 } else {
1315 line.get(argument_range.clone())
1316 }
1317 })
1318 .map(ToOwned::to_owned)
1319 .collect::<SmallVec<_>>();
1320 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1321 if !command.requires_argument() || !arguments.is_empty() {
1322 let start_ix = offset + command_line.name.start - 1;
1323 let end_ix = offset
1324 + command_line
1325 .arguments
1326 .last()
1327 .map_or(command_line.name.end, |argument| argument.end);
1328 let source_range =
1329 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1330 let pending_command = PendingSlashCommand {
1331 name: name.to_string(),
1332 arguments,
1333 source_range,
1334 status: PendingSlashCommandStatus::Idle,
1335 };
1336 updated.push(pending_command.clone());
1337 new_commands.push(pending_command);
1338 }
1339 }
1340 }
1341
1342 offset = lines.offset();
1343 }
1344
1345 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1346 removed.extend(removed_commands.map(|command| command.source_range));
1347 }
1348
1349 fn reparse_workflow_steps_in_range(
1350 &mut self,
1351 range: Range<text::Anchor>,
1352 buffer: &BufferSnapshot,
1353 updated: &mut Vec<Range<text::Anchor>>,
1354 removed: &mut Vec<Range<text::Anchor>>,
1355 cx: &mut ModelContext<Self>,
1356 ) {
1357 // Rebuild the XML tags in the edited range.
1358 let intersecting_tags_range =
1359 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1360 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1361 self.xml_tags
1362 .splice(intersecting_tags_range.clone(), new_tags);
1363
1364 // Find which steps intersect the changed range.
1365 let intersecting_steps_range =
1366 self.indices_intersecting_buffer_range(&self.workflow_steps, range.clone(), cx);
1367
1368 // Reparse all tags after the last unchanged step before the change.
1369 let mut tags_start_ix = 0;
1370 if let Some(preceding_unchanged_step) =
1371 self.workflow_steps[..intersecting_steps_range.start].last()
1372 {
1373 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1374 tag.range
1375 .start
1376 .cmp(&preceding_unchanged_step.range.end, buffer)
1377 .then(Ordering::Less)
1378 }) {
1379 Ok(ix) | Err(ix) => ix,
1380 };
1381 }
1382
1383 // Rebuild the edit suggestions in the range.
1384 let mut new_steps = self.parse_steps(tags_start_ix, range.end, buffer);
1385
1386 if let Some(project) = self.project() {
1387 for step in &mut new_steps {
1388 Self::resolve_workflow_step_internal(step, &project, cx);
1389 }
1390 }
1391
1392 updated.extend(new_steps.iter().map(|step| step.range.clone()));
1393 let removed_steps = self
1394 .workflow_steps
1395 .splice(intersecting_steps_range, new_steps);
1396 removed.extend(
1397 removed_steps
1398 .map(|step| step.range)
1399 .filter(|range| !updated.contains(&range)),
1400 );
1401 }
1402
1403 fn parse_xml_tags_in_range(
1404 &self,
1405 buffer: &BufferSnapshot,
1406 range: Range<text::Anchor>,
1407 cx: &AppContext,
1408 ) -> Vec<XmlTag> {
1409 let mut messages = self.messages(cx).peekable();
1410
1411 let mut tags = Vec::new();
1412 let mut lines = buffer.text_for_range(range).lines();
1413 let mut offset = lines.offset();
1414
1415 while let Some(line) = lines.next() {
1416 while let Some(message) = messages.peek() {
1417 if offset < message.offset_range.end {
1418 break;
1419 } else {
1420 messages.next();
1421 }
1422 }
1423
1424 let is_assistant_message = messages
1425 .peek()
1426 .map_or(false, |message| message.role == Role::Assistant);
1427 if is_assistant_message {
1428 for (start_ix, _) in line.match_indices('<') {
1429 let mut name_start_ix = start_ix + 1;
1430 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1431 if let Some(closing_bracket_ix) = closing_bracket_ix {
1432 let end_ix = closing_bracket_ix + 1;
1433 let mut is_open_tag = true;
1434 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1435 name_start_ix += 1;
1436 is_open_tag = false;
1437 }
1438 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1439 let tag_name_len = tag_inner
1440 .find(|c: char| c.is_whitespace())
1441 .unwrap_or(tag_inner.len());
1442 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1443 tags.push(XmlTag {
1444 range: buffer.anchor_after(offset + start_ix)
1445 ..buffer.anchor_before(offset + end_ix),
1446 is_open_tag,
1447 kind,
1448 });
1449 };
1450 }
1451 }
1452 }
1453
1454 offset = lines.offset();
1455 }
1456 tags
1457 }
1458
1459 fn parse_steps(
1460 &mut self,
1461 tags_start_ix: usize,
1462 buffer_end: text::Anchor,
1463 buffer: &BufferSnapshot,
1464 ) -> Vec<WorkflowStep> {
1465 let mut new_steps = Vec::new();
1466 let mut pending_step = None;
1467 let mut edit_step_depth = 0;
1468 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1469 'tags: while let Some(tag) = tags.next() {
1470 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && edit_step_depth == 0 {
1471 break;
1472 }
1473
1474 if tag.kind == XmlTagKind::Step && tag.is_open_tag {
1475 edit_step_depth += 1;
1476 let edit_start = tag.range.start;
1477 let mut edits = Vec::new();
1478 let mut step = WorkflowStep {
1479 range: edit_start..edit_start,
1480 leading_tags_end: tag.range.end,
1481 trailing_tag_start: None,
1482 edits: Default::default(),
1483 resolution: None,
1484 resolution_task: None,
1485 };
1486
1487 while let Some(tag) = tags.next() {
1488 step.trailing_tag_start.get_or_insert(tag.range.start);
1489
1490 if tag.kind == XmlTagKind::Step && !tag.is_open_tag {
1491 // step.trailing_tag_start = Some(tag.range.start);
1492 edit_step_depth -= 1;
1493 if edit_step_depth == 0 {
1494 step.range.end = tag.range.end;
1495 step.edits = edits.into();
1496 new_steps.push(step);
1497 continue 'tags;
1498 }
1499 }
1500
1501 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1502 let mut path = None;
1503 let mut search = None;
1504 let mut operation = None;
1505 let mut description = None;
1506
1507 while let Some(tag) = tags.next() {
1508 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1509 edits.push(WorkflowStepEdit::new(
1510 path,
1511 operation,
1512 search,
1513 description,
1514 ));
1515 break;
1516 }
1517
1518 if tag.is_open_tag
1519 && [
1520 XmlTagKind::Path,
1521 XmlTagKind::Search,
1522 XmlTagKind::Operation,
1523 XmlTagKind::Description,
1524 ]
1525 .contains(&tag.kind)
1526 {
1527 let kind = tag.kind;
1528 let content_start = tag.range.end;
1529 if let Some(tag) = tags.peek() {
1530 if tag.kind == kind && !tag.is_open_tag {
1531 let tag = tags.next().unwrap();
1532 let content_end = tag.range.start;
1533 let mut content = buffer
1534 .text_for_range(content_start..content_end)
1535 .collect::<String>();
1536 content.truncate(content.trim_end().len());
1537 match kind {
1538 XmlTagKind::Path => path = Some(content),
1539 XmlTagKind::Operation => operation = Some(content),
1540 XmlTagKind::Search => {
1541 search = Some(content).filter(|s| !s.is_empty())
1542 }
1543 XmlTagKind::Description => {
1544 description =
1545 Some(content).filter(|s| !s.is_empty())
1546 }
1547 _ => {}
1548 }
1549 }
1550 }
1551 }
1552 }
1553 }
1554 }
1555
1556 pending_step = Some(step);
1557 }
1558 }
1559
1560 if let Some(mut pending_step) = pending_step {
1561 pending_step.range.end = text::Anchor::MAX;
1562 new_steps.push(pending_step);
1563 }
1564
1565 new_steps
1566 }
1567
1568 pub fn resolve_workflow_step(
1569 &mut self,
1570 tagged_range: Range<text::Anchor>,
1571 cx: &mut ModelContext<Self>,
1572 ) -> Option<()> {
1573 let index = self
1574 .workflow_step_index_for_range(&tagged_range, self.buffer.read(cx))
1575 .ok()?;
1576 let step = &mut self.workflow_steps[index];
1577 let project = self.project.as_ref()?;
1578 step.resolution.take();
1579 Self::resolve_workflow_step_internal(step, project, cx);
1580 None
1581 }
1582
1583 fn resolve_workflow_step_internal(
1584 step: &mut WorkflowStep,
1585 project: &Model<Project>,
1586 cx: &mut ModelContext<'_, Context>,
1587 ) {
1588 step.resolution_task = Some(cx.spawn({
1589 let range = step.range.clone();
1590 let edits = step.edits.clone();
1591 let project = project.clone();
1592 |this, mut cx| async move {
1593 let suggestion_groups =
1594 Self::compute_step_resolution(project, edits, &mut cx).await;
1595
1596 this.update(&mut cx, |this, cx| {
1597 let buffer = this.buffer.read(cx).text_snapshot();
1598 let ix = this.workflow_step_index_for_range(&range, &buffer).ok();
1599 if let Some(ix) = ix {
1600 let step = &mut this.workflow_steps[ix];
1601
1602 let resolution = suggestion_groups.map(|suggestion_groups| {
1603 let mut title = String::new();
1604 for mut chunk in buffer.text_for_range(
1605 step.leading_tags_end
1606 ..step.trailing_tag_start.unwrap_or(step.range.end),
1607 ) {
1608 if title.is_empty() {
1609 chunk = chunk.trim_start();
1610 }
1611 if let Some((prefix, _)) = chunk.split_once('\n') {
1612 title.push_str(prefix);
1613 break;
1614 } else {
1615 title.push_str(chunk);
1616 }
1617 }
1618
1619 WorkflowStepResolution {
1620 title,
1621 suggestion_groups,
1622 }
1623 });
1624
1625 step.resolution = Some(Arc::new(resolution));
1626 cx.emit(ContextEvent::WorkflowStepsUpdated {
1627 removed: vec![],
1628 updated: vec![range],
1629 })
1630 }
1631 })
1632 .ok();
1633 }
1634 }));
1635 }
1636
1637 async fn compute_step_resolution(
1638 project: Model<Project>,
1639 edits: Arc<[Result<WorkflowStepEdit>]>,
1640 cx: &mut AsyncAppContext,
1641 ) -> Result<HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>> {
1642 let mut suggestion_tasks = Vec::new();
1643 for edit in edits.iter() {
1644 let edit = edit.as_ref().map_err(|e| anyhow!("{e}"))?;
1645 suggestion_tasks.push(edit.resolve(project.clone(), cx.clone()));
1646 }
1647
1648 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1649 let suggestions = future::try_join_all(suggestion_tasks).await?;
1650
1651 let mut suggestions_by_buffer = HashMap::default();
1652 for (buffer, suggestion) in suggestions {
1653 suggestions_by_buffer
1654 .entry(buffer)
1655 .or_insert_with(Vec::new)
1656 .push(suggestion);
1657 }
1658
1659 let mut suggestion_groups_by_buffer = HashMap::default();
1660 for (buffer, mut suggestions) in suggestions_by_buffer {
1661 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1662 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
1663 // Sort suggestions by their range so that earlier, larger ranges come first
1664 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1665
1666 // Merge overlapping suggestions
1667 suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
1668
1669 // Create context ranges for each suggestion
1670 for suggestion in suggestions {
1671 let context_range = {
1672 let suggestion_point_range = suggestion.range().to_point(&snapshot);
1673 let start_row = suggestion_point_range.start.row.saturating_sub(5);
1674 let end_row =
1675 cmp::min(suggestion_point_range.end.row + 5, snapshot.max_point().row);
1676 let start = snapshot.anchor_before(Point::new(start_row, 0));
1677 let end =
1678 snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
1679 start..end
1680 };
1681
1682 if let Some(last_group) = suggestion_groups.last_mut() {
1683 if last_group
1684 .context_range
1685 .end
1686 .cmp(&context_range.start, &snapshot)
1687 .is_ge()
1688 {
1689 // Merge with the previous group if context ranges overlap
1690 last_group.context_range.end = context_range.end;
1691 last_group.suggestions.push(suggestion);
1692 } else {
1693 // Create a new group
1694 suggestion_groups.push(WorkflowSuggestionGroup {
1695 context_range,
1696 suggestions: vec![suggestion],
1697 });
1698 }
1699 } else {
1700 // Create the first group
1701 suggestion_groups.push(WorkflowSuggestionGroup {
1702 context_range,
1703 suggestions: vec![suggestion],
1704 });
1705 }
1706 }
1707
1708 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1709 }
1710
1711 Ok(suggestion_groups_by_buffer)
1712 }
1713
1714 pub fn pending_command_for_position(
1715 &mut self,
1716 position: language::Anchor,
1717 cx: &mut ModelContext<Self>,
1718 ) -> Option<&mut PendingSlashCommand> {
1719 let buffer = self.buffer.read(cx);
1720 match self
1721 .pending_slash_commands
1722 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1723 {
1724 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1725 Err(ix) => {
1726 let cmd = self.pending_slash_commands.get_mut(ix)?;
1727 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1728 && position.cmp(&cmd.source_range.end, buffer).is_le()
1729 {
1730 Some(cmd)
1731 } else {
1732 None
1733 }
1734 }
1735 }
1736 }
1737
1738 pub fn pending_commands_for_range(
1739 &self,
1740 range: Range<language::Anchor>,
1741 cx: &AppContext,
1742 ) -> &[PendingSlashCommand] {
1743 let range = self.pending_command_indices_for_range(range, cx);
1744 &self.pending_slash_commands[range]
1745 }
1746
1747 fn pending_command_indices_for_range(
1748 &self,
1749 range: Range<language::Anchor>,
1750 cx: &AppContext,
1751 ) -> Range<usize> {
1752 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1753 }
1754
1755 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1756 &self,
1757 all_annotations: &[T],
1758 range: Range<language::Anchor>,
1759 cx: &AppContext,
1760 ) -> Range<usize> {
1761 let buffer = self.buffer.read(cx);
1762 let start_ix = match all_annotations
1763 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1764 {
1765 Ok(ix) | Err(ix) => ix,
1766 };
1767 let end_ix = match all_annotations
1768 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1769 {
1770 Ok(ix) => ix + 1,
1771 Err(ix) => ix,
1772 };
1773 start_ix..end_ix
1774 }
1775
1776 pub fn insert_command_output(
1777 &mut self,
1778 command_range: Range<language::Anchor>,
1779 output: Task<Result<SlashCommandOutput>>,
1780 ensure_trailing_newline: bool,
1781 expand_result: bool,
1782 cx: &mut ModelContext<Self>,
1783 ) {
1784 self.reparse(cx);
1785
1786 let insert_output_task = cx.spawn(|this, mut cx| {
1787 let command_range = command_range.clone();
1788 async move {
1789 let output = output.await;
1790 this.update(&mut cx, |this, cx| match output {
1791 Ok(mut output) => {
1792 // Ensure section ranges are valid.
1793 for section in &mut output.sections {
1794 section.range.start = section.range.start.min(output.text.len());
1795 section.range.end = section.range.end.min(output.text.len());
1796 while !output.text.is_char_boundary(section.range.start) {
1797 section.range.start -= 1;
1798 }
1799 while !output.text.is_char_boundary(section.range.end) {
1800 section.range.end += 1;
1801 }
1802 }
1803
1804 // Ensure there is a newline after the last section.
1805 if ensure_trailing_newline {
1806 let has_newline_after_last_section =
1807 output.sections.last().map_or(false, |last_section| {
1808 output.text[last_section.range.end..].ends_with('\n')
1809 });
1810 if !has_newline_after_last_section {
1811 output.text.push('\n');
1812 }
1813 }
1814
1815 let version = this.version.clone();
1816 let command_id = SlashCommandId(this.next_timestamp());
1817 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1818 let start = command_range.start.to_offset(buffer);
1819 let old_end = command_range.end.to_offset(buffer);
1820 let new_end = start + output.text.len();
1821 buffer.edit([(start..old_end, output.text)], None, cx);
1822
1823 let mut sections = output
1824 .sections
1825 .into_iter()
1826 .map(|section| SlashCommandOutputSection {
1827 range: buffer.anchor_after(start + section.range.start)
1828 ..buffer.anchor_before(start + section.range.end),
1829 icon: section.icon,
1830 label: section.label,
1831 })
1832 .collect::<Vec<_>>();
1833 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1834
1835 this.slash_command_output_sections
1836 .extend(sections.iter().cloned());
1837 this.slash_command_output_sections
1838 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1839
1840 let output_range =
1841 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1842 this.finished_slash_commands.insert(command_id);
1843
1844 (
1845 ContextOperation::SlashCommandFinished {
1846 id: command_id,
1847 output_range: output_range.clone(),
1848 sections: sections.clone(),
1849 version,
1850 },
1851 ContextEvent::SlashCommandFinished {
1852 output_range,
1853 sections,
1854 run_commands_in_output: output.run_commands_in_text,
1855 expand_result,
1856 },
1857 )
1858 });
1859
1860 this.push_op(operation, cx);
1861 cx.emit(event);
1862 }
1863 Err(error) => {
1864 if let Some(pending_command) =
1865 this.pending_command_for_position(command_range.start, cx)
1866 {
1867 pending_command.status =
1868 PendingSlashCommandStatus::Error(error.to_string());
1869 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1870 removed: vec![pending_command.source_range.clone()],
1871 updated: vec![pending_command.clone()],
1872 });
1873 }
1874 }
1875 })
1876 .ok();
1877 }
1878 });
1879
1880 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1881 pending_command.status = PendingSlashCommandStatus::Running {
1882 _task: insert_output_task.shared(),
1883 };
1884 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1885 removed: vec![pending_command.source_range.clone()],
1886 updated: vec![pending_command.clone()],
1887 });
1888 }
1889 }
1890
1891 pub fn insert_tool_output(
1892 &mut self,
1893 tool_use_id: Arc<str>,
1894 output: Task<Result<String>>,
1895 cx: &mut ModelContext<Self>,
1896 ) {
1897 let insert_output_task = cx.spawn(|this, mut cx| {
1898 let tool_use_id = tool_use_id.clone();
1899 async move {
1900 let output = output.await;
1901 this.update(&mut cx, |this, cx| match output {
1902 Ok(mut output) => {
1903 const NEWLINE: char = '\n';
1904
1905 if !output.ends_with(NEWLINE) {
1906 output.push(NEWLINE);
1907 }
1908
1909 let anchor_range = this.buffer.update(cx, |buffer, cx| {
1910 let insert_start = buffer.len().to_offset(buffer);
1911 let insert_end = insert_start;
1912
1913 let start = insert_start;
1914 let end = start + output.len() - NEWLINE.len_utf8();
1915
1916 buffer.edit([(insert_start..insert_end, output)], None, cx);
1917
1918 let output_range = buffer.anchor_after(start)..buffer.anchor_after(end);
1919
1920 output_range
1921 });
1922
1923 this.insert_content(
1924 Content::ToolResult {
1925 range: anchor_range.clone(),
1926 tool_use_id: tool_use_id.clone(),
1927 },
1928 cx,
1929 );
1930
1931 cx.emit(ContextEvent::ToolFinished {
1932 tool_use_id,
1933 output_range: anchor_range,
1934 });
1935 }
1936 Err(err) => {
1937 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1938 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1939 }
1940 }
1941 })
1942 .ok();
1943 }
1944 });
1945
1946 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1947 tool_use.status = PendingToolUseStatus::Running {
1948 _task: insert_output_task.shared(),
1949 };
1950 }
1951 }
1952
1953 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1954 self.count_remaining_tokens(cx);
1955 }
1956
1957 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1958 self.message_anchors.iter().rev().find_map(|message| {
1959 message
1960 .start
1961 .is_valid(self.buffer.read(cx))
1962 .then_some(message.id)
1963 })
1964 }
1965
1966 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1967 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1968 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1969 let last_message_id = self.get_last_valid_message_id(cx)?;
1970
1971 if !provider.is_authenticated(cx) {
1972 log::info!("completion provider has no credentials");
1973 return None;
1974 }
1975 // Compute which messages to cache, including the last one.
1976 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1977
1978 let mut request = self.to_completion_request(cx);
1979
1980 if cx.has_flag::<ToolUseFeatureFlag>() {
1981 let tool_registry = ToolRegistry::global(cx);
1982 request.tools = tool_registry
1983 .tools()
1984 .into_iter()
1985 .map(|tool| LanguageModelRequestTool {
1986 name: tool.name(),
1987 description: tool.description(),
1988 input_schema: tool.input_schema(),
1989 })
1990 .collect();
1991 }
1992
1993 let assistant_message = self
1994 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1995 .unwrap();
1996
1997 // Queue up the user's next reply.
1998 let user_message = self
1999 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
2000 .unwrap();
2001
2002 let pending_completion_id = post_inc(&mut self.completion_count);
2003
2004 let task = cx.spawn({
2005 |this, mut cx| async move {
2006 let stream = model.stream_completion(request, &cx);
2007 let assistant_message_id = assistant_message.id;
2008 let mut response_latency = None;
2009 let stream_completion = async {
2010 let request_start = Instant::now();
2011 let mut events = stream.await?;
2012 let mut stop_reason = StopReason::EndTurn;
2013
2014 while let Some(event) = events.next().await {
2015 if response_latency.is_none() {
2016 response_latency = Some(request_start.elapsed());
2017 }
2018 let event = event?;
2019
2020 this.update(&mut cx, |this, cx| {
2021 let message_ix = this
2022 .message_anchors
2023 .iter()
2024 .position(|message| message.id == assistant_message_id)?;
2025 this.buffer.update(cx, |buffer, cx| {
2026 let message_old_end_offset = this.message_anchors[message_ix + 1..]
2027 .iter()
2028 .find(|message| message.start.is_valid(buffer))
2029 .map_or(buffer.len(), |message| {
2030 message.start.to_offset(buffer).saturating_sub(1)
2031 });
2032
2033 match event {
2034 LanguageModelCompletionEvent::Stop(reason) => {
2035 stop_reason = reason;
2036 }
2037 LanguageModelCompletionEvent::Text(chunk) => {
2038 buffer.edit(
2039 [(
2040 message_old_end_offset..message_old_end_offset,
2041 chunk,
2042 )],
2043 None,
2044 cx,
2045 );
2046 }
2047 LanguageModelCompletionEvent::ToolUse(tool_use) => {
2048 const NEWLINE: char = '\n';
2049
2050 let mut text = String::new();
2051 text.push(NEWLINE);
2052 text.push_str(
2053 &serde_json::to_string_pretty(&tool_use)
2054 .expect("failed to serialize tool use to JSON"),
2055 );
2056 text.push(NEWLINE);
2057 let text_len = text.len();
2058
2059 buffer.edit(
2060 [(
2061 message_old_end_offset..message_old_end_offset,
2062 text,
2063 )],
2064 None,
2065 cx,
2066 );
2067
2068 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
2069 let end_ix =
2070 message_old_end_offset + text_len - NEWLINE.len_utf8();
2071 let source_range = buffer.anchor_after(start_ix)
2072 ..buffer.anchor_after(end_ix);
2073
2074 let tool_use_id: Arc<str> = tool_use.id.into();
2075 this.pending_tool_uses_by_id.insert(
2076 tool_use_id.clone(),
2077 PendingToolUse {
2078 id: tool_use_id,
2079 name: tool_use.name,
2080 input: tool_use.input,
2081 status: PendingToolUseStatus::Idle,
2082 source_range,
2083 },
2084 );
2085 }
2086 }
2087 });
2088
2089 cx.emit(ContextEvent::StreamedCompletion);
2090
2091 Some(())
2092 })?;
2093 smol::future::yield_now().await;
2094 }
2095 this.update(&mut cx, |this, cx| {
2096 this.pending_completions
2097 .retain(|completion| completion.id != pending_completion_id);
2098 this.summarize(false, cx);
2099 this.update_cache_status_for_completion(cx);
2100 })?;
2101
2102 anyhow::Ok(stop_reason)
2103 };
2104
2105 let result = stream_completion.await;
2106
2107 this.update(&mut cx, |this, cx| {
2108 let error_message = result
2109 .as_ref()
2110 .err()
2111 .map(|error| error.to_string().trim().to_string());
2112
2113 if let Some(error_message) = error_message.as_ref() {
2114 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2115 error_message.clone(),
2116 )));
2117 }
2118
2119 this.update_metadata(assistant_message_id, cx, |metadata| {
2120 if let Some(error_message) = error_message.as_ref() {
2121 metadata.status =
2122 MessageStatus::Error(SharedString::from(error_message.clone()));
2123 } else {
2124 metadata.status = MessageStatus::Done;
2125 }
2126 });
2127
2128 if let Some(telemetry) = this.telemetry.as_ref() {
2129 telemetry.report_assistant_event(
2130 Some(this.id.0.clone()),
2131 AssistantKind::Panel,
2132 model.telemetry_id(),
2133 response_latency,
2134 error_message,
2135 );
2136 }
2137
2138 if let Ok(stop_reason) = result {
2139 match stop_reason {
2140 StopReason::ToolUse => {
2141 cx.emit(ContextEvent::UsePendingTools);
2142 }
2143 StopReason::EndTurn => {}
2144 StopReason::MaxTokens => {}
2145 }
2146 }
2147 })
2148 .ok();
2149 }
2150 });
2151
2152 self.pending_completions.push(PendingCompletion {
2153 id: pending_completion_id,
2154 assistant_message_id: assistant_message.id,
2155 _task: task,
2156 });
2157
2158 Some(user_message)
2159 }
2160
2161 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
2162 let buffer = self.buffer.read(cx);
2163
2164 let mut contents = self.contents(cx).peekable();
2165
2166 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<String> {
2167 let text: String = buffer.text_for_range(range.clone()).collect();
2168 if text.trim().is_empty() {
2169 None
2170 } else {
2171 Some(text)
2172 }
2173 }
2174
2175 let mut completion_request = LanguageModelRequest {
2176 messages: Vec::new(),
2177 tools: Vec::new(),
2178 stop: Vec::new(),
2179 temperature: 1.0,
2180 };
2181 for message in self.messages(cx) {
2182 if message.status != MessageStatus::Done {
2183 continue;
2184 }
2185
2186 let mut offset = message.offset_range.start;
2187 let mut request_message = LanguageModelRequestMessage {
2188 role: message.role,
2189 content: Vec::new(),
2190 cache: message
2191 .cache
2192 .as_ref()
2193 .map_or(false, |cache| cache.is_anchor),
2194 };
2195
2196 while let Some(content) = contents.peek() {
2197 if content
2198 .range()
2199 .end
2200 .cmp(&message.anchor_range.end, buffer)
2201 .is_lt()
2202 {
2203 let content = contents.next().unwrap();
2204 let range = content.range().to_offset(buffer);
2205 request_message.content.extend(
2206 collect_text_content(buffer, offset..range.start).map(MessageContent::Text),
2207 );
2208
2209 match content {
2210 Content::Image { image, .. } => {
2211 if let Some(image) = image.clone().now_or_never().flatten() {
2212 request_message
2213 .content
2214 .push(language_model::MessageContent::Image(image));
2215 }
2216 }
2217 Content::ToolUse { tool_use, .. } => {
2218 request_message
2219 .content
2220 .push(language_model::MessageContent::ToolUse(tool_use.clone()));
2221 }
2222 Content::ToolResult { tool_use_id, .. } => {
2223 request_message.content.push(
2224 language_model::MessageContent::ToolResult(
2225 LanguageModelToolResult {
2226 tool_use_id: tool_use_id.to_string(),
2227 is_error: false,
2228 content: collect_text_content(buffer, range.clone())
2229 .unwrap_or_default(),
2230 },
2231 ),
2232 );
2233 }
2234 }
2235
2236 offset = range.end;
2237 } else {
2238 break;
2239 }
2240 }
2241
2242 request_message.content.extend(
2243 collect_text_content(buffer, offset..message.offset_range.end)
2244 .map(MessageContent::Text),
2245 );
2246
2247 completion_request.messages.push(request_message);
2248 }
2249
2250 completion_request
2251 }
2252
2253 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2254 if let Some(pending_completion) = self.pending_completions.pop() {
2255 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2256 if metadata.status == MessageStatus::Pending {
2257 metadata.status = MessageStatus::Canceled;
2258 }
2259 });
2260 true
2261 } else {
2262 false
2263 }
2264 }
2265
2266 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2267 for id in &ids {
2268 if let Some(metadata) = self.messages_metadata.get(id) {
2269 let role = metadata.role.cycle();
2270 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2271 }
2272 }
2273
2274 self.message_roles_updated(ids, cx);
2275 }
2276
2277 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2278 let mut ranges = Vec::new();
2279 for message in self.messages(cx) {
2280 if ids.contains(&message.id) {
2281 ranges.push(message.anchor_range.clone());
2282 }
2283 }
2284
2285 let buffer = self.buffer.read(cx).text_snapshot();
2286 let mut updated = Vec::new();
2287 let mut removed = Vec::new();
2288 for range in ranges {
2289 self.reparse_workflow_steps_in_range(range, &buffer, &mut updated, &mut removed, cx);
2290 }
2291
2292 if !updated.is_empty() || !removed.is_empty() {
2293 cx.emit(ContextEvent::WorkflowStepsUpdated { removed, updated })
2294 }
2295 }
2296
2297 pub fn update_metadata(
2298 &mut self,
2299 id: MessageId,
2300 cx: &mut ModelContext<Self>,
2301 f: impl FnOnce(&mut MessageMetadata),
2302 ) {
2303 let version = self.version.clone();
2304 let timestamp = self.next_timestamp();
2305 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2306 f(metadata);
2307 metadata.timestamp = timestamp;
2308 let operation = ContextOperation::UpdateMessage {
2309 message_id: id,
2310 metadata: metadata.clone(),
2311 version,
2312 };
2313 self.push_op(operation, cx);
2314 cx.emit(ContextEvent::MessagesEdited);
2315 cx.notify();
2316 }
2317 }
2318
2319 pub fn insert_message_after(
2320 &mut self,
2321 message_id: MessageId,
2322 role: Role,
2323 status: MessageStatus,
2324 cx: &mut ModelContext<Self>,
2325 ) -> Option<MessageAnchor> {
2326 if let Some(prev_message_ix) = self
2327 .message_anchors
2328 .iter()
2329 .position(|message| message.id == message_id)
2330 {
2331 // Find the next valid message after the one we were given.
2332 let mut next_message_ix = prev_message_ix + 1;
2333 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2334 if next_message.start.is_valid(self.buffer.read(cx)) {
2335 break;
2336 }
2337 next_message_ix += 1;
2338 }
2339
2340 let start = self.buffer.update(cx, |buffer, cx| {
2341 let offset = self
2342 .message_anchors
2343 .get(next_message_ix)
2344 .map_or(buffer.len(), |message| {
2345 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2346 });
2347 buffer.edit([(offset..offset, "\n")], None, cx);
2348 buffer.anchor_before(offset + 1)
2349 });
2350
2351 let version = self.version.clone();
2352 let anchor = MessageAnchor {
2353 id: MessageId(self.next_timestamp()),
2354 start,
2355 };
2356 let metadata = MessageMetadata {
2357 role,
2358 status,
2359 timestamp: anchor.id.0,
2360 cache: None,
2361 };
2362 self.insert_message(anchor.clone(), metadata.clone(), cx);
2363 self.push_op(
2364 ContextOperation::InsertMessage {
2365 anchor: anchor.clone(),
2366 metadata,
2367 version,
2368 },
2369 cx,
2370 );
2371 Some(anchor)
2372 } else {
2373 None
2374 }
2375 }
2376
2377 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
2378 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
2379 entry.insert((
2380 image.to_image_data(cx).log_err()?,
2381 LanguageModelImage::from_image(image, cx).shared(),
2382 ));
2383 }
2384
2385 Some(())
2386 }
2387
2388 pub fn insert_image_content(
2389 &mut self,
2390 image_id: u64,
2391 anchor: language::Anchor,
2392 cx: &mut ModelContext<Self>,
2393 ) {
2394 if let Some((render_image, image)) = self.images.get(&image_id) {
2395 self.insert_content(
2396 Content::Image {
2397 anchor,
2398 image_id,
2399 image: image.clone(),
2400 render_image: render_image.clone(),
2401 },
2402 cx,
2403 );
2404 }
2405 }
2406
2407 pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext<Self>) {
2408 let buffer = self.buffer.read(cx);
2409 let insertion_ix = match self
2410 .contents
2411 .binary_search_by(|probe| probe.cmp(&content, buffer))
2412 {
2413 Ok(ix) => {
2414 self.contents.remove(ix);
2415 ix
2416 }
2417 Err(ix) => ix,
2418 };
2419 self.contents.insert(insertion_ix, content);
2420 cx.emit(ContextEvent::MessagesEdited);
2421 }
2422
2423 pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Content> {
2424 let buffer = self.buffer.read(cx);
2425 self.contents
2426 .iter()
2427 .filter(|content| {
2428 let range = content.range();
2429 range.start.is_valid(buffer) && range.end.is_valid(buffer)
2430 })
2431 .cloned()
2432 }
2433
2434 pub fn split_message(
2435 &mut self,
2436 range: Range<usize>,
2437 cx: &mut ModelContext<Self>,
2438 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2439 let start_message = self.message_for_offset(range.start, cx);
2440 let end_message = self.message_for_offset(range.end, cx);
2441 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2442 // Prevent splitting when range spans multiple messages.
2443 if start_message.id != end_message.id {
2444 return (None, None);
2445 }
2446
2447 let message = start_message;
2448 let role = message.role;
2449 let mut edited_buffer = false;
2450
2451 let mut suffix_start = None;
2452
2453 // TODO: why did this start panicking?
2454 if range.start > message.offset_range.start
2455 && range.end < message.offset_range.end.saturating_sub(1)
2456 {
2457 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2458 suffix_start = Some(range.end + 1);
2459 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2460 suffix_start = Some(range.end);
2461 }
2462 }
2463
2464 let version = self.version.clone();
2465 let suffix = if let Some(suffix_start) = suffix_start {
2466 MessageAnchor {
2467 id: MessageId(self.next_timestamp()),
2468 start: self.buffer.read(cx).anchor_before(suffix_start),
2469 }
2470 } else {
2471 self.buffer.update(cx, |buffer, cx| {
2472 buffer.edit([(range.end..range.end, "\n")], None, cx);
2473 });
2474 edited_buffer = true;
2475 MessageAnchor {
2476 id: MessageId(self.next_timestamp()),
2477 start: self.buffer.read(cx).anchor_before(range.end + 1),
2478 }
2479 };
2480
2481 let suffix_metadata = MessageMetadata {
2482 role,
2483 status: MessageStatus::Done,
2484 timestamp: suffix.id.0,
2485 cache: None,
2486 };
2487 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2488 self.push_op(
2489 ContextOperation::InsertMessage {
2490 anchor: suffix.clone(),
2491 metadata: suffix_metadata,
2492 version,
2493 },
2494 cx,
2495 );
2496
2497 let new_messages =
2498 if range.start == range.end || range.start == message.offset_range.start {
2499 (None, Some(suffix))
2500 } else {
2501 let mut prefix_end = None;
2502 if range.start > message.offset_range.start
2503 && range.end < message.offset_range.end - 1
2504 {
2505 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2506 prefix_end = Some(range.start + 1);
2507 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2508 == Some('\n')
2509 {
2510 prefix_end = Some(range.start);
2511 }
2512 }
2513
2514 let version = self.version.clone();
2515 let selection = if let Some(prefix_end) = prefix_end {
2516 MessageAnchor {
2517 id: MessageId(self.next_timestamp()),
2518 start: self.buffer.read(cx).anchor_before(prefix_end),
2519 }
2520 } else {
2521 self.buffer.update(cx, |buffer, cx| {
2522 buffer.edit([(range.start..range.start, "\n")], None, cx)
2523 });
2524 edited_buffer = true;
2525 MessageAnchor {
2526 id: MessageId(self.next_timestamp()),
2527 start: self.buffer.read(cx).anchor_before(range.end + 1),
2528 }
2529 };
2530
2531 let selection_metadata = MessageMetadata {
2532 role,
2533 status: MessageStatus::Done,
2534 timestamp: selection.id.0,
2535 cache: None,
2536 };
2537 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2538 self.push_op(
2539 ContextOperation::InsertMessage {
2540 anchor: selection.clone(),
2541 metadata: selection_metadata,
2542 version,
2543 },
2544 cx,
2545 );
2546
2547 (Some(selection), Some(suffix))
2548 };
2549
2550 if !edited_buffer {
2551 cx.emit(ContextEvent::MessagesEdited);
2552 }
2553 new_messages
2554 } else {
2555 (None, None)
2556 }
2557 }
2558
2559 fn insert_message(
2560 &mut self,
2561 new_anchor: MessageAnchor,
2562 new_metadata: MessageMetadata,
2563 cx: &mut ModelContext<Self>,
2564 ) {
2565 cx.emit(ContextEvent::MessagesEdited);
2566
2567 self.messages_metadata.insert(new_anchor.id, new_metadata);
2568
2569 let buffer = self.buffer.read(cx);
2570 let insertion_ix = self
2571 .message_anchors
2572 .iter()
2573 .position(|anchor| {
2574 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2575 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2576 })
2577 .unwrap_or(self.message_anchors.len());
2578 self.message_anchors.insert(insertion_ix, new_anchor);
2579 }
2580
2581 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2582 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2583 return;
2584 };
2585 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2586 return;
2587 };
2588
2589 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2590 if !provider.is_authenticated(cx) {
2591 return;
2592 }
2593
2594 let mut request = self.to_completion_request(cx);
2595 request.messages.push(LanguageModelRequestMessage {
2596 role: Role::User,
2597 content: vec![
2598 "Summarize the context into a short title without punctuation.".into(),
2599 ],
2600 cache: false,
2601 });
2602
2603 self.pending_summary = cx.spawn(|this, mut cx| {
2604 async move {
2605 let stream = model.stream_completion_text(request, &cx);
2606 let mut messages = stream.await?;
2607
2608 let mut replaced = !replace_old;
2609 while let Some(message) = messages.next().await {
2610 let text = message?;
2611 let mut lines = text.lines();
2612 this.update(&mut cx, |this, cx| {
2613 let version = this.version.clone();
2614 let timestamp = this.next_timestamp();
2615 let summary = this.summary.get_or_insert(ContextSummary::default());
2616 if !replaced && replace_old {
2617 summary.text.clear();
2618 replaced = true;
2619 }
2620 summary.text.extend(lines.next());
2621 summary.timestamp = timestamp;
2622 let operation = ContextOperation::UpdateSummary {
2623 summary: summary.clone(),
2624 version,
2625 };
2626 this.push_op(operation, cx);
2627 cx.emit(ContextEvent::SummaryChanged);
2628 })?;
2629
2630 // Stop if the LLM generated multiple lines.
2631 if lines.next().is_some() {
2632 break;
2633 }
2634 }
2635
2636 this.update(&mut cx, |this, cx| {
2637 let version = this.version.clone();
2638 let timestamp = this.next_timestamp();
2639 if let Some(summary) = this.summary.as_mut() {
2640 summary.done = true;
2641 summary.timestamp = timestamp;
2642 let operation = ContextOperation::UpdateSummary {
2643 summary: summary.clone(),
2644 version,
2645 };
2646 this.push_op(operation, cx);
2647 cx.emit(ContextEvent::SummaryChanged);
2648 }
2649 })?;
2650
2651 anyhow::Ok(())
2652 }
2653 .log_err()
2654 });
2655 }
2656 }
2657
2658 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2659 self.messages_for_offsets([offset], cx).pop()
2660 }
2661
2662 pub fn messages_for_offsets(
2663 &self,
2664 offsets: impl IntoIterator<Item = usize>,
2665 cx: &AppContext,
2666 ) -> Vec<Message> {
2667 let mut result = Vec::new();
2668
2669 let mut messages = self.messages(cx).peekable();
2670 let mut offsets = offsets.into_iter().peekable();
2671 let mut current_message = messages.next();
2672 while let Some(offset) = offsets.next() {
2673 // Locate the message that contains the offset.
2674 while current_message.as_ref().map_or(false, |message| {
2675 !message.offset_range.contains(&offset) && messages.peek().is_some()
2676 }) {
2677 current_message = messages.next();
2678 }
2679 let Some(message) = current_message.as_ref() else {
2680 break;
2681 };
2682
2683 // Skip offsets that are in the same message.
2684 while offsets.peek().map_or(false, |offset| {
2685 message.offset_range.contains(offset) || messages.peek().is_none()
2686 }) {
2687 offsets.next();
2688 }
2689
2690 result.push(message.clone());
2691 }
2692 result
2693 }
2694
2695 fn messages_from_anchors<'a>(
2696 &'a self,
2697 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2698 cx: &'a AppContext,
2699 ) -> impl 'a + Iterator<Item = Message> {
2700 let buffer = self.buffer.read(cx);
2701
2702 Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate())
2703 }
2704
2705 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2706 self.messages_from_anchors(self.message_anchors.iter(), cx)
2707 }
2708
2709 pub fn messages_from_iters<'a>(
2710 buffer: &'a Buffer,
2711 metadata: &'a HashMap<MessageId, MessageMetadata>,
2712 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2713 ) -> impl 'a + Iterator<Item = Message> {
2714 let mut messages = messages.peekable();
2715
2716 iter::from_fn(move || {
2717 if let Some((start_ix, message_anchor)) = messages.next() {
2718 let metadata = metadata.get(&message_anchor.id)?;
2719
2720 let message_start = message_anchor.start.to_offset(buffer);
2721 let mut message_end = None;
2722 let mut end_ix = start_ix;
2723 while let Some((_, next_message)) = messages.peek() {
2724 if next_message.start.is_valid(buffer) {
2725 message_end = Some(next_message.start);
2726 break;
2727 } else {
2728 end_ix += 1;
2729 messages.next();
2730 }
2731 }
2732 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2733 let message_end = message_end_anchor.to_offset(buffer);
2734
2735 return Some(Message {
2736 index_range: start_ix..end_ix,
2737 offset_range: message_start..message_end,
2738 anchor_range: message_anchor.start..message_end_anchor,
2739 id: message_anchor.id,
2740 role: metadata.role,
2741 status: metadata.status.clone(),
2742 cache: metadata.cache.clone(),
2743 });
2744 }
2745 None
2746 })
2747 }
2748
2749 pub fn save(
2750 &mut self,
2751 debounce: Option<Duration>,
2752 fs: Arc<dyn Fs>,
2753 cx: &mut ModelContext<Context>,
2754 ) {
2755 if self.replica_id() != ReplicaId::default() {
2756 // Prevent saving a remote context for now.
2757 return;
2758 }
2759
2760 self.pending_save = cx.spawn(|this, mut cx| async move {
2761 if let Some(debounce) = debounce {
2762 cx.background_executor().timer(debounce).await;
2763 }
2764
2765 let (old_path, summary) = this.read_with(&cx, |this, _| {
2766 let path = this.path.clone();
2767 let summary = if let Some(summary) = this.summary.as_ref() {
2768 if summary.done {
2769 Some(summary.text.clone())
2770 } else {
2771 None
2772 }
2773 } else {
2774 None
2775 };
2776 (path, summary)
2777 })?;
2778
2779 if let Some(summary) = summary {
2780 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2781 let mut discriminant = 1;
2782 let mut new_path;
2783 loop {
2784 new_path = contexts_dir().join(&format!(
2785 "{} - {}.zed.json",
2786 summary.trim(),
2787 discriminant
2788 ));
2789 if fs.is_file(&new_path).await {
2790 discriminant += 1;
2791 } else {
2792 break;
2793 }
2794 }
2795
2796 fs.create_dir(contexts_dir().as_ref()).await?;
2797 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2798 .await?;
2799 if let Some(old_path) = old_path {
2800 if new_path != old_path {
2801 fs.remove_file(
2802 &old_path,
2803 RemoveOptions {
2804 recursive: false,
2805 ignore_if_not_exists: true,
2806 },
2807 )
2808 .await?;
2809 }
2810 }
2811
2812 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2813 }
2814
2815 Ok(())
2816 });
2817 }
2818
2819 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2820 let timestamp = self.next_timestamp();
2821 let summary = self.summary.get_or_insert(ContextSummary::default());
2822 summary.timestamp = timestamp;
2823 summary.done = true;
2824 summary.text = custom_summary;
2825 cx.emit(ContextEvent::SummaryChanged);
2826 }
2827}
2828
2829#[derive(Debug, Default)]
2830pub struct ContextVersion {
2831 context: clock::Global,
2832 buffer: clock::Global,
2833}
2834
2835impl ContextVersion {
2836 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2837 Self {
2838 context: language::proto::deserialize_version(&proto.context_version),
2839 buffer: language::proto::deserialize_version(&proto.buffer_version),
2840 }
2841 }
2842
2843 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2844 proto::ContextVersion {
2845 context_id: context_id.to_proto(),
2846 context_version: language::proto::serialize_version(&self.context),
2847 buffer_version: language::proto::serialize_version(&self.buffer),
2848 }
2849 }
2850}
2851
2852#[derive(Debug, Clone)]
2853pub struct PendingSlashCommand {
2854 pub name: String,
2855 pub arguments: SmallVec<[String; 3]>,
2856 pub status: PendingSlashCommandStatus,
2857 pub source_range: Range<language::Anchor>,
2858}
2859
2860#[derive(Debug, Clone)]
2861pub enum PendingSlashCommandStatus {
2862 Idle,
2863 Running { _task: Shared<Task<()>> },
2864 Error(String),
2865}
2866
2867pub(crate) struct ToolUseFeatureFlag;
2868
2869impl FeatureFlag for ToolUseFeatureFlag {
2870 const NAME: &'static str = "assistant-tool-use";
2871
2872 fn enabled_for_staff() -> bool {
2873 false
2874 }
2875}
2876
2877#[derive(Debug, Clone)]
2878pub struct PendingToolUse {
2879 pub id: Arc<str>,
2880 pub name: String,
2881 pub input: serde_json::Value,
2882 pub status: PendingToolUseStatus,
2883 pub source_range: Range<language::Anchor>,
2884}
2885
2886#[derive(Debug, Clone)]
2887pub enum PendingToolUseStatus {
2888 Idle,
2889 Running { _task: Shared<Task<()>> },
2890 Error(String),
2891}
2892
2893impl PendingToolUseStatus {
2894 pub fn is_idle(&self) -> bool {
2895 matches!(self, PendingToolUseStatus::Idle)
2896 }
2897}
2898
2899#[derive(Serialize, Deserialize)]
2900pub struct SavedMessage {
2901 pub id: MessageId,
2902 pub start: usize,
2903 pub metadata: MessageMetadata,
2904}
2905
2906#[derive(Serialize, Deserialize)]
2907pub struct SavedContext {
2908 pub id: Option<ContextId>,
2909 pub zed: String,
2910 pub version: String,
2911 pub text: String,
2912 pub messages: Vec<SavedMessage>,
2913 pub summary: String,
2914 pub slash_command_output_sections:
2915 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2916}
2917
2918impl SavedContext {
2919 pub const VERSION: &'static str = "0.4.0";
2920
2921 pub fn from_json(json: &str) -> Result<Self> {
2922 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2923 match saved_context_json
2924 .get("version")
2925 .ok_or_else(|| anyhow!("version not found"))?
2926 {
2927 serde_json::Value::String(version) => match version.as_str() {
2928 SavedContext::VERSION => {
2929 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2930 }
2931 SavedContextV0_3_0::VERSION => {
2932 let saved_context =
2933 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2934 Ok(saved_context.upgrade())
2935 }
2936 SavedContextV0_2_0::VERSION => {
2937 let saved_context =
2938 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2939 Ok(saved_context.upgrade())
2940 }
2941 SavedContextV0_1_0::VERSION => {
2942 let saved_context =
2943 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2944 Ok(saved_context.upgrade())
2945 }
2946 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2947 },
2948 _ => Err(anyhow!("version not found on saved context")),
2949 }
2950 }
2951
2952 fn into_ops(
2953 self,
2954 buffer: &Model<Buffer>,
2955 cx: &mut ModelContext<Context>,
2956 ) -> Vec<ContextOperation> {
2957 let mut operations = Vec::new();
2958 let mut version = clock::Global::new();
2959 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2960
2961 let mut first_message_metadata = None;
2962 for message in self.messages {
2963 if message.id == MessageId(clock::Lamport::default()) {
2964 first_message_metadata = Some(message.metadata);
2965 } else {
2966 operations.push(ContextOperation::InsertMessage {
2967 anchor: MessageAnchor {
2968 id: message.id,
2969 start: buffer.read(cx).anchor_before(message.start),
2970 },
2971 metadata: MessageMetadata {
2972 role: message.metadata.role,
2973 status: message.metadata.status,
2974 timestamp: message.metadata.timestamp,
2975 cache: None,
2976 },
2977 version: version.clone(),
2978 });
2979 version.observe(message.id.0);
2980 next_timestamp.observe(message.id.0);
2981 }
2982 }
2983
2984 if let Some(metadata) = first_message_metadata {
2985 let timestamp = next_timestamp.tick();
2986 operations.push(ContextOperation::UpdateMessage {
2987 message_id: MessageId(clock::Lamport::default()),
2988 metadata: MessageMetadata {
2989 role: metadata.role,
2990 status: metadata.status,
2991 timestamp,
2992 cache: None,
2993 },
2994 version: version.clone(),
2995 });
2996 version.observe(timestamp);
2997 }
2998
2999 let timestamp = next_timestamp.tick();
3000 operations.push(ContextOperation::SlashCommandFinished {
3001 id: SlashCommandId(timestamp),
3002 output_range: language::Anchor::MIN..language::Anchor::MAX,
3003 sections: self
3004 .slash_command_output_sections
3005 .into_iter()
3006 .map(|section| {
3007 let buffer = buffer.read(cx);
3008 SlashCommandOutputSection {
3009 range: buffer.anchor_after(section.range.start)
3010 ..buffer.anchor_before(section.range.end),
3011 icon: section.icon,
3012 label: section.label,
3013 }
3014 })
3015 .collect(),
3016 version: version.clone(),
3017 });
3018 version.observe(timestamp);
3019
3020 let timestamp = next_timestamp.tick();
3021 operations.push(ContextOperation::UpdateSummary {
3022 summary: ContextSummary {
3023 text: self.summary,
3024 done: true,
3025 timestamp,
3026 },
3027 version: version.clone(),
3028 });
3029 version.observe(timestamp);
3030
3031 operations
3032 }
3033}
3034
3035#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3036struct SavedMessageIdPreV0_4_0(usize);
3037
3038#[derive(Serialize, Deserialize)]
3039struct SavedMessagePreV0_4_0 {
3040 id: SavedMessageIdPreV0_4_0,
3041 start: usize,
3042}
3043
3044#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3045struct SavedMessageMetadataPreV0_4_0 {
3046 role: Role,
3047 status: MessageStatus,
3048}
3049
3050#[derive(Serialize, Deserialize)]
3051struct SavedContextV0_3_0 {
3052 id: Option<ContextId>,
3053 zed: String,
3054 version: String,
3055 text: String,
3056 messages: Vec<SavedMessagePreV0_4_0>,
3057 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3058 summary: String,
3059 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3060}
3061
3062impl SavedContextV0_3_0 {
3063 const VERSION: &'static str = "0.3.0";
3064
3065 fn upgrade(self) -> SavedContext {
3066 SavedContext {
3067 id: self.id,
3068 zed: self.zed,
3069 version: SavedContext::VERSION.into(),
3070 text: self.text,
3071 messages: self
3072 .messages
3073 .into_iter()
3074 .filter_map(|message| {
3075 let metadata = self.message_metadata.get(&message.id)?;
3076 let timestamp = clock::Lamport {
3077 replica_id: ReplicaId::default(),
3078 value: message.id.0 as u32,
3079 };
3080 Some(SavedMessage {
3081 id: MessageId(timestamp),
3082 start: message.start,
3083 metadata: MessageMetadata {
3084 role: metadata.role,
3085 status: metadata.status.clone(),
3086 timestamp,
3087 cache: None,
3088 },
3089 })
3090 })
3091 .collect(),
3092 summary: self.summary,
3093 slash_command_output_sections: self.slash_command_output_sections,
3094 }
3095 }
3096}
3097
3098#[derive(Serialize, Deserialize)]
3099struct SavedContextV0_2_0 {
3100 id: Option<ContextId>,
3101 zed: String,
3102 version: String,
3103 text: String,
3104 messages: Vec<SavedMessagePreV0_4_0>,
3105 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3106 summary: String,
3107}
3108
3109impl SavedContextV0_2_0 {
3110 const VERSION: &'static str = "0.2.0";
3111
3112 fn upgrade(self) -> SavedContext {
3113 SavedContextV0_3_0 {
3114 id: self.id,
3115 zed: self.zed,
3116 version: SavedContextV0_3_0::VERSION.to_string(),
3117 text: self.text,
3118 messages: self.messages,
3119 message_metadata: self.message_metadata,
3120 summary: self.summary,
3121 slash_command_output_sections: Vec::new(),
3122 }
3123 .upgrade()
3124 }
3125}
3126
3127#[derive(Serialize, Deserialize)]
3128struct SavedContextV0_1_0 {
3129 id: Option<ContextId>,
3130 zed: String,
3131 version: String,
3132 text: String,
3133 messages: Vec<SavedMessagePreV0_4_0>,
3134 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3135 summary: String,
3136 api_url: Option<String>,
3137 model: OpenAiModel,
3138}
3139
3140impl SavedContextV0_1_0 {
3141 const VERSION: &'static str = "0.1.0";
3142
3143 fn upgrade(self) -> SavedContext {
3144 SavedContextV0_2_0 {
3145 id: self.id,
3146 zed: self.zed,
3147 version: SavedContextV0_2_0::VERSION.to_string(),
3148 text: self.text,
3149 messages: self.messages,
3150 message_metadata: self.message_metadata,
3151 summary: self.summary,
3152 }
3153 .upgrade()
3154 }
3155}
3156
3157#[derive(Clone)]
3158pub struct SavedContextMetadata {
3159 pub title: String,
3160 pub path: PathBuf,
3161 pub mtime: chrono::DateTime<chrono::Local>,
3162}