1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, workflow::WorkflowStep, MessageId,
6 MessageStatus,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use client::{self, proto, telemetry::Telemetry};
13use clock::ReplicaId;
14use collections::{HashMap, HashSet};
15use fs::{Fs, RemoveOptions};
16use futures::{future::Shared, stream::FuturesUnordered, FutureExt, StreamExt};
17use gpui::{
18 AppContext, Context as _, EventEmitter, Image, Model, ModelContext, RenderImage, SharedString,
19 Subscription, Task,
20};
21
22use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
23use language_model::{
24 LanguageModel, LanguageModelCacheConfiguration, LanguageModelImage, LanguageModelRegistry,
25 LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
26};
27use open_ai::Model as OpenAiModel;
28use paths::{context_images_dir, contexts_dir};
29use project::Project;
30use serde::{Deserialize, Serialize};
31use smallvec::SmallVec;
32use std::{
33 cmp::{max, Ordering},
34 collections::hash_map,
35 fmt::Debug,
36 iter, mem,
37 ops::Range,
38 path::{Path, PathBuf},
39 sync::Arc,
40 time::{Duration, Instant},
41};
42use telemetry_events::AssistantKind;
43use text::BufferSnapshot;
44use util::{post_inc, ResultExt, TryFutureExt};
45use uuid::Uuid;
46
47#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
48pub struct ContextId(String);
49
50impl ContextId {
51 pub fn new() -> Self {
52 Self(Uuid::new_v4().to_string())
53 }
54
55 pub fn from_proto(id: String) -> Self {
56 Self(id)
57 }
58
59 pub fn to_proto(&self) -> String {
60 self.0.clone()
61 }
62}
63
64#[derive(Clone, Debug)]
65pub enum ContextOperation {
66 InsertMessage {
67 anchor: MessageAnchor,
68 metadata: MessageMetadata,
69 version: clock::Global,
70 },
71 UpdateMessage {
72 message_id: MessageId,
73 metadata: MessageMetadata,
74 version: clock::Global,
75 },
76 UpdateSummary {
77 summary: ContextSummary,
78 version: clock::Global,
79 },
80 SlashCommandFinished {
81 id: SlashCommandId,
82 output_range: Range<language::Anchor>,
83 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
84 version: clock::Global,
85 },
86 BufferOperation(language::Operation),
87}
88
89impl ContextOperation {
90 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
91 match op.variant.context("invalid variant")? {
92 proto::context_operation::Variant::InsertMessage(insert) => {
93 let message = insert.message.context("invalid message")?;
94 let id = MessageId(language::proto::deserialize_timestamp(
95 message.id.context("invalid id")?,
96 ));
97 Ok(Self::InsertMessage {
98 anchor: MessageAnchor {
99 id,
100 start: language::proto::deserialize_anchor(
101 message.start.context("invalid anchor")?,
102 )
103 .context("invalid anchor")?,
104 },
105 metadata: MessageMetadata {
106 role: Role::from_proto(message.role),
107 status: MessageStatus::from_proto(
108 message.status.context("invalid status")?,
109 ),
110 timestamp: id.0,
111 cache: None,
112 },
113 version: language::proto::deserialize_version(&insert.version),
114 })
115 }
116 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
117 message_id: MessageId(language::proto::deserialize_timestamp(
118 update.message_id.context("invalid message id")?,
119 )),
120 metadata: MessageMetadata {
121 role: Role::from_proto(update.role),
122 status: MessageStatus::from_proto(update.status.context("invalid status")?),
123 timestamp: language::proto::deserialize_timestamp(
124 update.timestamp.context("invalid timestamp")?,
125 ),
126 cache: None,
127 },
128 version: language::proto::deserialize_version(&update.version),
129 }),
130 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
131 summary: ContextSummary {
132 text: update.summary,
133 done: update.done,
134 timestamp: language::proto::deserialize_timestamp(
135 update.timestamp.context("invalid timestamp")?,
136 ),
137 },
138 version: language::proto::deserialize_version(&update.version),
139 }),
140 proto::context_operation::Variant::SlashCommandFinished(finished) => {
141 Ok(Self::SlashCommandFinished {
142 id: SlashCommandId(language::proto::deserialize_timestamp(
143 finished.id.context("invalid id")?,
144 )),
145 output_range: language::proto::deserialize_anchor_range(
146 finished.output_range.context("invalid range")?,
147 )?,
148 sections: finished
149 .sections
150 .into_iter()
151 .map(|section| {
152 Ok(SlashCommandOutputSection {
153 range: language::proto::deserialize_anchor_range(
154 section.range.context("invalid range")?,
155 )?,
156 icon: section.icon_name.parse()?,
157 label: section.label.into(),
158 })
159 })
160 .collect::<Result<Vec<_>>>()?,
161 version: language::proto::deserialize_version(&finished.version),
162 })
163 }
164 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
165 language::proto::deserialize_operation(
166 op.operation.context("invalid buffer operation")?,
167 )?,
168 )),
169 }
170 }
171
172 pub fn to_proto(&self) -> proto::ContextOperation {
173 match self {
174 Self::InsertMessage {
175 anchor,
176 metadata,
177 version,
178 } => proto::ContextOperation {
179 variant: Some(proto::context_operation::Variant::InsertMessage(
180 proto::context_operation::InsertMessage {
181 message: Some(proto::ContextMessage {
182 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
183 start: Some(language::proto::serialize_anchor(&anchor.start)),
184 role: metadata.role.to_proto() as i32,
185 status: Some(metadata.status.to_proto()),
186 }),
187 version: language::proto::serialize_version(version),
188 },
189 )),
190 },
191 Self::UpdateMessage {
192 message_id,
193 metadata,
194 version,
195 } => proto::ContextOperation {
196 variant: Some(proto::context_operation::Variant::UpdateMessage(
197 proto::context_operation::UpdateMessage {
198 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
199 role: metadata.role.to_proto() as i32,
200 status: Some(metadata.status.to_proto()),
201 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
202 version: language::proto::serialize_version(version),
203 },
204 )),
205 },
206 Self::UpdateSummary { summary, version } => proto::ContextOperation {
207 variant: Some(proto::context_operation::Variant::UpdateSummary(
208 proto::context_operation::UpdateSummary {
209 summary: summary.text.clone(),
210 done: summary.done,
211 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
212 version: language::proto::serialize_version(version),
213 },
214 )),
215 },
216 Self::SlashCommandFinished {
217 id,
218 output_range,
219 sections,
220 version,
221 } => proto::ContextOperation {
222 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
223 proto::context_operation::SlashCommandFinished {
224 id: Some(language::proto::serialize_timestamp(id.0)),
225 output_range: Some(language::proto::serialize_anchor_range(
226 output_range.clone(),
227 )),
228 sections: sections
229 .iter()
230 .map(|section| {
231 let icon_name: &'static str = section.icon.into();
232 proto::SlashCommandOutputSection {
233 range: Some(language::proto::serialize_anchor_range(
234 section.range.clone(),
235 )),
236 icon_name: icon_name.to_string(),
237 label: section.label.to_string(),
238 }
239 })
240 .collect(),
241 version: language::proto::serialize_version(version),
242 },
243 )),
244 },
245 Self::BufferOperation(operation) => proto::ContextOperation {
246 variant: Some(proto::context_operation::Variant::BufferOperation(
247 proto::context_operation::BufferOperation {
248 operation: Some(language::proto::serialize_operation(operation)),
249 },
250 )),
251 },
252 }
253 }
254
255 fn timestamp(&self) -> clock::Lamport {
256 match self {
257 Self::InsertMessage { anchor, .. } => anchor.id.0,
258 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
259 Self::UpdateSummary { summary, .. } => summary.timestamp,
260 Self::SlashCommandFinished { id, .. } => id.0,
261 Self::BufferOperation(_) => {
262 panic!("reading the timestamp of a buffer operation is not supported")
263 }
264 }
265 }
266
267 /// Returns the current version of the context operation.
268 pub fn version(&self) -> &clock::Global {
269 match self {
270 Self::InsertMessage { version, .. }
271 | Self::UpdateMessage { version, .. }
272 | Self::UpdateSummary { version, .. }
273 | Self::SlashCommandFinished { version, .. } => version,
274 Self::BufferOperation(_) => {
275 panic!("reading the version of a buffer operation is not supported")
276 }
277 }
278 }
279}
280
281#[derive(Debug, Clone)]
282pub enum ContextEvent {
283 ShowAssistError(SharedString),
284 MessagesEdited,
285 SummaryChanged,
286 WorkflowStepsRemoved(Vec<Range<language::Anchor>>),
287 WorkflowStepUpdated(Range<language::Anchor>),
288 StreamedCompletion,
289 PendingSlashCommandsUpdated {
290 removed: Vec<Range<language::Anchor>>,
291 updated: Vec<PendingSlashCommand>,
292 },
293 SlashCommandFinished {
294 output_range: Range<language::Anchor>,
295 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
296 run_commands_in_output: bool,
297 expand_result: bool,
298 },
299 Operation(ContextOperation),
300}
301
302#[derive(Clone, Default, Debug)]
303pub struct ContextSummary {
304 pub text: String,
305 done: bool,
306 timestamp: clock::Lamport,
307}
308
309#[derive(Clone, Debug, Eq, PartialEq)]
310pub struct MessageAnchor {
311 pub id: MessageId,
312 pub start: language::Anchor,
313}
314
315#[derive(Clone, Debug, Eq, PartialEq)]
316pub enum CacheStatus {
317 Pending,
318 Cached,
319}
320
321#[derive(Clone, Debug, Eq, PartialEq)]
322pub struct MessageCacheMetadata {
323 pub is_anchor: bool,
324 pub is_final_anchor: bool,
325 pub status: CacheStatus,
326 pub cached_at: clock::Global,
327}
328
329#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
330pub struct MessageMetadata {
331 pub role: Role,
332 pub status: MessageStatus,
333 timestamp: clock::Lamport,
334 #[serde(skip)]
335 pub cache: Option<MessageCacheMetadata>,
336}
337
338impl MessageMetadata {
339 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
340 let result = match &self.cache {
341 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
342 &cached_at,
343 Range {
344 start: buffer.anchor_at(range.start, Bias::Right),
345 end: buffer.anchor_at(range.end, Bias::Left),
346 },
347 ),
348 _ => false,
349 };
350 result
351 }
352}
353
354#[derive(Clone, Debug)]
355pub struct MessageImage {
356 image_id: u64,
357 image: Shared<Task<Option<LanguageModelImage>>>,
358}
359
360impl PartialEq for MessageImage {
361 fn eq(&self, other: &Self) -> bool {
362 self.image_id == other.image_id
363 }
364}
365
366impl Eq for MessageImage {}
367
368#[derive(Clone, Debug)]
369pub struct Message {
370 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
371 pub offset_range: Range<usize>,
372 pub index_range: Range<usize>,
373 pub id: MessageId,
374 pub anchor: language::Anchor,
375 pub role: Role,
376 pub status: MessageStatus,
377 pub cache: Option<MessageCacheMetadata>,
378}
379
380impl Message {
381 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
382 let mut content = Vec::new();
383
384 let mut range_start = self.offset_range.start;
385 for (image_offset, message_image) in self.image_offsets.iter() {
386 if *image_offset != range_start {
387 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
388 content.push(text);
389 }
390 }
391
392 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
393 content.push(language_model::MessageContent::Image(image));
394 }
395
396 range_start = *image_offset;
397 }
398 if range_start != self.offset_range.end {
399 if let Some(text) =
400 Self::collect_text_content(buffer, range_start..self.offset_range.end)
401 {
402 content.push(text);
403 }
404 }
405
406 if content.is_empty() {
407 return None;
408 }
409
410 Some(LanguageModelRequestMessage {
411 role: self.role,
412 content,
413 cache: self.cache.as_ref().map_or(false, |cache| cache.is_anchor),
414 })
415 }
416
417 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
418 let text: String = buffer.text_for_range(range.clone()).collect();
419 if text.trim().is_empty() {
420 None
421 } else {
422 Some(MessageContent::Text(text))
423 }
424 }
425}
426
427#[derive(Clone, Debug)]
428pub struct ImageAnchor {
429 pub anchor: language::Anchor,
430 pub image_id: u64,
431 pub render_image: Arc<RenderImage>,
432 pub image: Shared<Task<Option<LanguageModelImage>>>,
433}
434
435struct PendingCompletion {
436 id: usize,
437 assistant_message_id: MessageId,
438 _task: Task<()>,
439}
440
441#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
442pub struct SlashCommandId(clock::Lamport);
443
444struct WorkflowStepEntry {
445 range: Range<language::Anchor>,
446 step: Model<WorkflowStep>,
447}
448
449pub struct Context {
450 id: ContextId,
451 timestamp: clock::Lamport,
452 version: clock::Global,
453 pending_ops: Vec<ContextOperation>,
454 operations: Vec<ContextOperation>,
455 buffer: Model<Buffer>,
456 pending_slash_commands: Vec<PendingSlashCommand>,
457 edits_since_last_slash_command_parse: language::Subscription,
458 finished_slash_commands: HashSet<SlashCommandId>,
459 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
460 message_anchors: Vec<MessageAnchor>,
461 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
462 image_anchors: Vec<ImageAnchor>,
463 messages_metadata: HashMap<MessageId, MessageMetadata>,
464 summary: Option<ContextSummary>,
465 pending_summary: Task<Option<()>>,
466 completion_count: usize,
467 pending_completions: Vec<PendingCompletion>,
468 token_count: Option<usize>,
469 pending_token_count: Task<Option<()>>,
470 pending_save: Task<Result<()>>,
471 pending_cache_warming_task: Task<Option<()>>,
472 path: Option<PathBuf>,
473 _subscriptions: Vec<Subscription>,
474 telemetry: Option<Arc<Telemetry>>,
475 language_registry: Arc<LanguageRegistry>,
476 workflow_steps: Vec<WorkflowStepEntry>,
477 edits_since_last_workflow_step_prune: language::Subscription,
478 project: Option<Model<Project>>,
479 prompt_builder: Arc<PromptBuilder>,
480}
481
482impl EventEmitter<ContextEvent> for Context {}
483
484impl Context {
485 pub fn local(
486 language_registry: Arc<LanguageRegistry>,
487 project: Option<Model<Project>>,
488 telemetry: Option<Arc<Telemetry>>,
489 prompt_builder: Arc<PromptBuilder>,
490 cx: &mut ModelContext<Self>,
491 ) -> Self {
492 Self::new(
493 ContextId::new(),
494 ReplicaId::default(),
495 language::Capability::ReadWrite,
496 language_registry,
497 prompt_builder,
498 project,
499 telemetry,
500 cx,
501 )
502 }
503
504 #[allow(clippy::too_many_arguments)]
505 pub fn new(
506 id: ContextId,
507 replica_id: ReplicaId,
508 capability: language::Capability,
509 language_registry: Arc<LanguageRegistry>,
510 prompt_builder: Arc<PromptBuilder>,
511 project: Option<Model<Project>>,
512 telemetry: Option<Arc<Telemetry>>,
513 cx: &mut ModelContext<Self>,
514 ) -> Self {
515 let buffer = cx.new_model(|_cx| {
516 let mut buffer = Buffer::remote(
517 language::BufferId::new(1).unwrap(),
518 replica_id,
519 capability,
520 "",
521 );
522 buffer.set_language_registry(language_registry.clone());
523 buffer
524 });
525 let edits_since_last_slash_command_parse =
526 buffer.update(cx, |buffer, _| buffer.subscribe());
527 let edits_since_last_workflow_step_prune =
528 buffer.update(cx, |buffer, _| buffer.subscribe());
529 let mut this = Self {
530 id,
531 timestamp: clock::Lamport::new(replica_id),
532 version: clock::Global::new(),
533 pending_ops: Vec::new(),
534 operations: Vec::new(),
535 message_anchors: Default::default(),
536 image_anchors: Default::default(),
537 images: Default::default(),
538 messages_metadata: Default::default(),
539 pending_slash_commands: Vec::new(),
540 finished_slash_commands: HashSet::default(),
541 slash_command_output_sections: Vec::new(),
542 edits_since_last_slash_command_parse,
543 summary: None,
544 pending_summary: Task::ready(None),
545 completion_count: Default::default(),
546 pending_completions: Default::default(),
547 token_count: None,
548 pending_token_count: Task::ready(None),
549 pending_cache_warming_task: Task::ready(None),
550 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
551 pending_save: Task::ready(Ok(())),
552 path: None,
553 buffer,
554 telemetry,
555 project,
556 language_registry,
557 workflow_steps: Vec::new(),
558 edits_since_last_workflow_step_prune,
559 prompt_builder,
560 };
561
562 let first_message_id = MessageId(clock::Lamport {
563 replica_id: 0,
564 value: 0,
565 });
566 let message = MessageAnchor {
567 id: first_message_id,
568 start: language::Anchor::MIN,
569 };
570 this.messages_metadata.insert(
571 first_message_id,
572 MessageMetadata {
573 role: Role::User,
574 status: MessageStatus::Done,
575 timestamp: first_message_id.0,
576 cache: None,
577 },
578 );
579 this.message_anchors.push(message);
580
581 this.set_language(cx);
582 this.count_remaining_tokens(cx);
583 this
584 }
585
586 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
587 let buffer = self.buffer.read(cx);
588 SavedContext {
589 id: Some(self.id.clone()),
590 zed: "context".into(),
591 version: SavedContext::VERSION.into(),
592 text: buffer.text(),
593 messages: self
594 .messages(cx)
595 .map(|message| SavedMessage {
596 id: message.id,
597 start: message.offset_range.start,
598 metadata: self.messages_metadata[&message.id].clone(),
599 image_offsets: message
600 .image_offsets
601 .iter()
602 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
603 .collect(),
604 })
605 .collect(),
606 summary: self
607 .summary
608 .as_ref()
609 .map(|summary| summary.text.clone())
610 .unwrap_or_default(),
611 slash_command_output_sections: self
612 .slash_command_output_sections
613 .iter()
614 .filter_map(|section| {
615 let range = section.range.to_offset(buffer);
616 if section.range.start.is_valid(buffer) && !range.is_empty() {
617 Some(assistant_slash_command::SlashCommandOutputSection {
618 range,
619 icon: section.icon,
620 label: section.label.clone(),
621 })
622 } else {
623 None
624 }
625 })
626 .collect(),
627 }
628 }
629
630 #[allow(clippy::too_many_arguments)]
631 pub fn deserialize(
632 saved_context: SavedContext,
633 path: PathBuf,
634 language_registry: Arc<LanguageRegistry>,
635 prompt_builder: Arc<PromptBuilder>,
636 project: Option<Model<Project>>,
637 telemetry: Option<Arc<Telemetry>>,
638 cx: &mut ModelContext<Self>,
639 ) -> Self {
640 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
641 let mut this = Self::new(
642 id,
643 ReplicaId::default(),
644 language::Capability::ReadWrite,
645 language_registry,
646 prompt_builder,
647 project,
648 telemetry,
649 cx,
650 );
651 this.path = Some(path);
652 this.buffer.update(cx, |buffer, cx| {
653 buffer.set_text(saved_context.text.as_str(), cx)
654 });
655 let operations = saved_context.into_ops(&this.buffer, cx);
656 this.apply_ops(operations, cx).unwrap();
657 this
658 }
659
660 pub fn id(&self) -> &ContextId {
661 &self.id
662 }
663
664 pub fn replica_id(&self) -> ReplicaId {
665 self.timestamp.replica_id
666 }
667
668 pub fn version(&self, cx: &AppContext) -> ContextVersion {
669 ContextVersion {
670 context: self.version.clone(),
671 buffer: self.buffer.read(cx).version(),
672 }
673 }
674
675 pub fn set_capability(
676 &mut self,
677 capability: language::Capability,
678 cx: &mut ModelContext<Self>,
679 ) {
680 self.buffer
681 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
682 }
683
684 fn next_timestamp(&mut self) -> clock::Lamport {
685 let timestamp = self.timestamp.tick();
686 self.version.observe(timestamp);
687 timestamp
688 }
689
690 pub fn serialize_ops(
691 &self,
692 since: &ContextVersion,
693 cx: &AppContext,
694 ) -> Task<Vec<proto::ContextOperation>> {
695 let buffer_ops = self
696 .buffer
697 .read(cx)
698 .serialize_ops(Some(since.buffer.clone()), cx);
699
700 let mut context_ops = self
701 .operations
702 .iter()
703 .filter(|op| !since.context.observed(op.timestamp()))
704 .cloned()
705 .collect::<Vec<_>>();
706 context_ops.extend(self.pending_ops.iter().cloned());
707
708 cx.background_executor().spawn(async move {
709 let buffer_ops = buffer_ops.await;
710 context_ops.sort_unstable_by_key(|op| op.timestamp());
711 buffer_ops
712 .into_iter()
713 .map(|op| proto::ContextOperation {
714 variant: Some(proto::context_operation::Variant::BufferOperation(
715 proto::context_operation::BufferOperation {
716 operation: Some(op),
717 },
718 )),
719 })
720 .chain(context_ops.into_iter().map(|op| op.to_proto()))
721 .collect()
722 })
723 }
724
725 pub fn apply_ops(
726 &mut self,
727 ops: impl IntoIterator<Item = ContextOperation>,
728 cx: &mut ModelContext<Self>,
729 ) -> Result<()> {
730 let mut buffer_ops = Vec::new();
731 for op in ops {
732 match op {
733 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
734 op @ _ => self.pending_ops.push(op),
735 }
736 }
737 self.buffer
738 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
739 self.flush_ops(cx);
740
741 Ok(())
742 }
743
744 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
745 let mut messages_changed = false;
746 let mut summary_changed = false;
747
748 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
749 for op in mem::take(&mut self.pending_ops) {
750 if !self.can_apply_op(&op, cx) {
751 self.pending_ops.push(op);
752 continue;
753 }
754
755 let timestamp = op.timestamp();
756 match op.clone() {
757 ContextOperation::InsertMessage {
758 anchor, metadata, ..
759 } => {
760 if self.messages_metadata.contains_key(&anchor.id) {
761 // We already applied this operation.
762 } else {
763 self.insert_message(anchor, metadata, cx);
764 messages_changed = true;
765 }
766 }
767 ContextOperation::UpdateMessage {
768 message_id,
769 metadata: new_metadata,
770 ..
771 } => {
772 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
773 if new_metadata.timestamp > metadata.timestamp {
774 *metadata = new_metadata;
775 messages_changed = true;
776 }
777 }
778 ContextOperation::UpdateSummary {
779 summary: new_summary,
780 ..
781 } => {
782 if self
783 .summary
784 .as_ref()
785 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
786 {
787 self.summary = Some(new_summary);
788 summary_changed = true;
789 }
790 }
791 ContextOperation::SlashCommandFinished {
792 id,
793 output_range,
794 sections,
795 ..
796 } => {
797 if self.finished_slash_commands.insert(id) {
798 let buffer = self.buffer.read(cx);
799 self.slash_command_output_sections
800 .extend(sections.iter().cloned());
801 self.slash_command_output_sections
802 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
803 cx.emit(ContextEvent::SlashCommandFinished {
804 output_range,
805 sections,
806 expand_result: false,
807 run_commands_in_output: false,
808 });
809 }
810 }
811 ContextOperation::BufferOperation(_) => unreachable!(),
812 }
813
814 self.version.observe(timestamp);
815 self.timestamp.observe(timestamp);
816 self.operations.push(op);
817 }
818
819 if messages_changed {
820 cx.emit(ContextEvent::MessagesEdited);
821 cx.notify();
822 }
823
824 if summary_changed {
825 cx.emit(ContextEvent::SummaryChanged);
826 cx.notify();
827 }
828 }
829
830 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
831 if !self.version.observed_all(op.version()) {
832 return false;
833 }
834
835 match op {
836 ContextOperation::InsertMessage { anchor, .. } => self
837 .buffer
838 .read(cx)
839 .version
840 .observed(anchor.start.timestamp),
841 ContextOperation::UpdateMessage { message_id, .. } => {
842 self.messages_metadata.contains_key(message_id)
843 }
844 ContextOperation::UpdateSummary { .. } => true,
845 ContextOperation::SlashCommandFinished {
846 output_range,
847 sections,
848 ..
849 } => {
850 let version = &self.buffer.read(cx).version;
851 sections
852 .iter()
853 .map(|section| §ion.range)
854 .chain([output_range])
855 .all(|range| {
856 let observed_start = range.start == language::Anchor::MIN
857 || range.start == language::Anchor::MAX
858 || version.observed(range.start.timestamp);
859 let observed_end = range.end == language::Anchor::MIN
860 || range.end == language::Anchor::MAX
861 || version.observed(range.end.timestamp);
862 observed_start && observed_end
863 })
864 }
865 ContextOperation::BufferOperation(_) => {
866 panic!("buffer operations should always be applied")
867 }
868 }
869 }
870
871 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
872 self.operations.push(op.clone());
873 cx.emit(ContextEvent::Operation(op));
874 }
875
876 pub fn buffer(&self) -> &Model<Buffer> {
877 &self.buffer
878 }
879
880 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
881 self.language_registry.clone()
882 }
883
884 pub fn project(&self) -> Option<Model<Project>> {
885 self.project.clone()
886 }
887
888 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
889 self.prompt_builder.clone()
890 }
891
892 pub fn path(&self) -> Option<&Path> {
893 self.path.as_deref()
894 }
895
896 pub fn summary(&self) -> Option<&ContextSummary> {
897 self.summary.as_ref()
898 }
899
900 pub fn workflow_step_containing(
901 &self,
902 offset: usize,
903 cx: &AppContext,
904 ) -> Option<(Range<language::Anchor>, Model<WorkflowStep>)> {
905 let buffer = self.buffer.read(cx);
906 let index = self
907 .workflow_steps
908 .binary_search_by(|step| {
909 let step_range = step.range.to_offset(&buffer);
910 if offset < step_range.start {
911 Ordering::Greater
912 } else if offset > step_range.end {
913 Ordering::Less
914 } else {
915 Ordering::Equal
916 }
917 })
918 .ok()?;
919 let step = &self.workflow_steps[index];
920 Some((step.range.clone(), step.step.clone()))
921 }
922
923 pub fn workflow_step_for_range(
924 &self,
925 range: Range<language::Anchor>,
926 cx: &AppContext,
927 ) -> Option<Model<WorkflowStep>> {
928 let buffer = self.buffer.read(cx);
929 let index = self.workflow_step_index_for_range(&range, buffer).ok()?;
930 Some(self.workflow_steps[index].step.clone())
931 }
932
933 pub fn workflow_step_index_for_range(
934 &self,
935 tagged_range: &Range<text::Anchor>,
936 buffer: &text::BufferSnapshot,
937 ) -> Result<usize, usize> {
938 self.workflow_steps
939 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
940 }
941
942 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
943 &self.pending_slash_commands
944 }
945
946 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
947 &self.slash_command_output_sections
948 }
949
950 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
951 let markdown = self.language_registry.language_for_name("Markdown");
952 cx.spawn(|this, mut cx| async move {
953 let markdown = markdown.await?;
954 this.update(&mut cx, |this, cx| {
955 this.buffer
956 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
957 })
958 })
959 .detach_and_log_err(cx);
960 }
961
962 fn handle_buffer_event(
963 &mut self,
964 _: Model<Buffer>,
965 event: &language::Event,
966 cx: &mut ModelContext<Self>,
967 ) {
968 match event {
969 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
970 ContextOperation::BufferOperation(operation.clone()),
971 )),
972 language::Event::Edited => {
973 self.count_remaining_tokens(cx);
974 self.reparse_slash_commands(cx);
975 // Use `inclusive = true` to invalidate a step when an edit occurs
976 // at the start/end of a parsed step.
977 self.prune_invalid_workflow_steps(true, cx);
978 cx.emit(ContextEvent::MessagesEdited);
979 }
980 _ => {}
981 }
982 }
983
984 pub(crate) fn token_count(&self) -> Option<usize> {
985 self.token_count
986 }
987
988 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
989 let request = self.to_completion_request(cx);
990 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
991 return;
992 };
993 self.pending_token_count = cx.spawn(|this, mut cx| {
994 async move {
995 cx.background_executor()
996 .timer(Duration::from_millis(200))
997 .await;
998
999 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1000 this.update(&mut cx, |this, cx| {
1001 this.token_count = Some(token_count);
1002 this.start_cache_warming(&model, cx);
1003 cx.notify()
1004 })
1005 }
1006 .log_err()
1007 });
1008 }
1009
1010 pub fn mark_cache_anchors(
1011 &mut self,
1012 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1013 speculative: bool,
1014 cx: &mut ModelContext<Self>,
1015 ) -> bool {
1016 let cache_configuration =
1017 cache_configuration
1018 .as_ref()
1019 .unwrap_or(&LanguageModelCacheConfiguration {
1020 max_cache_anchors: 0,
1021 should_speculate: false,
1022 min_total_token: 0,
1023 });
1024
1025 let messages: Vec<Message> = self.messages(cx).collect();
1026
1027 let mut sorted_messages = messages.clone();
1028 if speculative {
1029 // Avoid caching the last message if this is a speculative cache fetch as
1030 // it's likely to change.
1031 sorted_messages.pop();
1032 }
1033 sorted_messages.retain(|m| m.role == Role::User);
1034 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1035
1036 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1037 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1038 0
1039 } else {
1040 // Save 1 anchor for the inline assistant to use.
1041 max(cache_configuration.max_cache_anchors, 1) - 1
1042 };
1043 sorted_messages.truncate(cache_anchors);
1044
1045 let anchors: HashSet<MessageId> = sorted_messages
1046 .into_iter()
1047 .map(|message| message.id)
1048 .collect();
1049
1050 let buffer = self.buffer.read(cx).snapshot();
1051 let invalidated_caches: HashSet<MessageId> = messages
1052 .iter()
1053 .scan(false, |encountered_invalid, message| {
1054 let message_id = message.id;
1055 let is_invalid = self
1056 .messages_metadata
1057 .get(&message_id)
1058 .map_or(true, |metadata| {
1059 !metadata.is_cache_valid(&buffer, &message.offset_range)
1060 || *encountered_invalid
1061 });
1062 *encountered_invalid |= is_invalid;
1063 Some(if is_invalid { Some(message_id) } else { None })
1064 })
1065 .flatten()
1066 .collect();
1067
1068 let last_anchor = messages.iter().rev().find_map(|message| {
1069 if anchors.contains(&message.id) {
1070 Some(message.id)
1071 } else {
1072 None
1073 }
1074 });
1075
1076 let mut new_anchor_needs_caching = false;
1077 let current_version = &buffer.version;
1078 // If we have no anchors, mark all messages as not being cached.
1079 let mut hit_last_anchor = last_anchor.is_none();
1080
1081 for message in messages.iter() {
1082 if hit_last_anchor {
1083 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1084 continue;
1085 }
1086
1087 if let Some(last_anchor) = last_anchor {
1088 if message.id == last_anchor {
1089 hit_last_anchor = true;
1090 }
1091 }
1092
1093 new_anchor_needs_caching = new_anchor_needs_caching
1094 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1095
1096 self.update_metadata(message.id, cx, |metadata| {
1097 let cache_status = if invalidated_caches.contains(&message.id) {
1098 CacheStatus::Pending
1099 } else {
1100 metadata
1101 .cache
1102 .as_ref()
1103 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1104 };
1105 metadata.cache = Some(MessageCacheMetadata {
1106 is_anchor: anchors.contains(&message.id),
1107 is_final_anchor: hit_last_anchor,
1108 status: cache_status,
1109 cached_at: current_version.clone(),
1110 });
1111 });
1112 }
1113 new_anchor_needs_caching
1114 }
1115
1116 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1117 let cache_configuration = model.cache_configuration();
1118
1119 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1120 return;
1121 }
1122 if !self.pending_completions.is_empty() {
1123 return;
1124 }
1125 if let Some(cache_configuration) = cache_configuration {
1126 if !cache_configuration.should_speculate {
1127 return;
1128 }
1129 }
1130
1131 let request = {
1132 let mut req = self.to_completion_request(cx);
1133 // Skip the last message because it's likely to change and
1134 // therefore would be a waste to cache.
1135 req.messages.pop();
1136 req.messages.push(LanguageModelRequestMessage {
1137 role: Role::User,
1138 content: vec!["Respond only with OK, nothing else.".into()],
1139 cache: false,
1140 });
1141 req
1142 };
1143
1144 let model = Arc::clone(model);
1145 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1146 async move {
1147 match model.stream_completion(request, &cx).await {
1148 Ok(mut stream) => {
1149 stream.next().await;
1150 log::info!("Cache warming completed successfully");
1151 }
1152 Err(e) => {
1153 log::warn!("Cache warming failed: {}", e);
1154 }
1155 };
1156 this.update(&mut cx, |this, cx| {
1157 this.update_cache_status_for_completion(cx);
1158 })
1159 .ok();
1160 anyhow::Ok(())
1161 }
1162 .log_err()
1163 });
1164 }
1165
1166 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1167 let cached_message_ids: Vec<MessageId> = self
1168 .messages_metadata
1169 .iter()
1170 .filter_map(|(message_id, metadata)| {
1171 metadata.cache.as_ref().and_then(|cache| {
1172 if cache.status == CacheStatus::Pending {
1173 Some(*message_id)
1174 } else {
1175 None
1176 }
1177 })
1178 })
1179 .collect();
1180
1181 for message_id in cached_message_ids {
1182 self.update_metadata(message_id, cx, |metadata| {
1183 if let Some(cache) = &mut metadata.cache {
1184 cache.status = CacheStatus::Cached;
1185 }
1186 });
1187 }
1188 cx.notify();
1189 }
1190
1191 pub fn reparse_slash_commands(&mut self, cx: &mut ModelContext<Self>) {
1192 let buffer = self.buffer.read(cx);
1193 let mut row_ranges = self
1194 .edits_since_last_slash_command_parse
1195 .consume()
1196 .into_iter()
1197 .map(|edit| {
1198 let start_row = buffer.offset_to_point(edit.new.start).row;
1199 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1200 start_row..end_row
1201 })
1202 .peekable();
1203
1204 let mut removed = Vec::new();
1205 let mut updated = Vec::new();
1206 while let Some(mut row_range) = row_ranges.next() {
1207 while let Some(next_row_range) = row_ranges.peek() {
1208 if row_range.end >= next_row_range.start {
1209 row_range.end = next_row_range.end;
1210 row_ranges.next();
1211 } else {
1212 break;
1213 }
1214 }
1215
1216 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1217 let end = buffer.anchor_after(Point::new(
1218 row_range.end - 1,
1219 buffer.line_len(row_range.end - 1),
1220 ));
1221
1222 let old_range = self.pending_command_indices_for_range(start..end, cx);
1223
1224 let mut new_commands = Vec::new();
1225 let mut lines = buffer.text_for_range(start..end).lines();
1226 let mut offset = lines.offset();
1227 while let Some(line) = lines.next() {
1228 if let Some(command_line) = SlashCommandLine::parse(line) {
1229 let name = &line[command_line.name.clone()];
1230 let arguments = command_line
1231 .arguments
1232 .iter()
1233 .filter_map(|argument_range| {
1234 if argument_range.is_empty() {
1235 None
1236 } else {
1237 line.get(argument_range.clone())
1238 }
1239 })
1240 .map(ToOwned::to_owned)
1241 .collect::<SmallVec<_>>();
1242 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1243 if !command.requires_argument() || !arguments.is_empty() {
1244 let start_ix = offset + command_line.name.start - 1;
1245 let end_ix = offset
1246 + command_line
1247 .arguments
1248 .last()
1249 .map_or(command_line.name.end, |argument| argument.end);
1250 let source_range =
1251 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1252 let pending_command = PendingSlashCommand {
1253 name: name.to_string(),
1254 arguments,
1255 source_range,
1256 status: PendingSlashCommandStatus::Idle,
1257 };
1258 updated.push(pending_command.clone());
1259 new_commands.push(pending_command);
1260 }
1261 }
1262 }
1263
1264 offset = lines.offset();
1265 }
1266
1267 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1268 removed.extend(removed_commands.map(|command| command.source_range));
1269 }
1270
1271 if !updated.is_empty() || !removed.is_empty() {
1272 cx.emit(ContextEvent::PendingSlashCommandsUpdated { removed, updated });
1273 }
1274 }
1275
1276 fn prune_invalid_workflow_steps(&mut self, inclusive: bool, cx: &mut ModelContext<Self>) {
1277 let mut removed = Vec::new();
1278
1279 for edit_range in self.edits_since_last_workflow_step_prune.consume() {
1280 let intersecting_range = self.find_intersecting_steps(edit_range.new, inclusive, cx);
1281 removed.extend(
1282 self.workflow_steps
1283 .drain(intersecting_range)
1284 .map(|step| step.range),
1285 );
1286 }
1287
1288 if !removed.is_empty() {
1289 cx.emit(ContextEvent::WorkflowStepsRemoved(removed));
1290 cx.notify();
1291 }
1292 }
1293
1294 fn find_intersecting_steps(
1295 &self,
1296 range: Range<usize>,
1297 inclusive: bool,
1298 cx: &AppContext,
1299 ) -> Range<usize> {
1300 let buffer = self.buffer.read(cx);
1301 let start_ix = match self.workflow_steps.binary_search_by(|probe| {
1302 probe
1303 .range
1304 .end
1305 .to_offset(buffer)
1306 .cmp(&range.start)
1307 .then(if inclusive {
1308 Ordering::Greater
1309 } else {
1310 Ordering::Less
1311 })
1312 }) {
1313 Ok(ix) | Err(ix) => ix,
1314 };
1315 let end_ix = match self.workflow_steps.binary_search_by(|probe| {
1316 probe
1317 .range
1318 .start
1319 .to_offset(buffer)
1320 .cmp(&range.end)
1321 .then(if inclusive {
1322 Ordering::Less
1323 } else {
1324 Ordering::Greater
1325 })
1326 }) {
1327 Ok(ix) | Err(ix) => ix,
1328 };
1329 start_ix..end_ix
1330 }
1331
1332 fn parse_workflow_steps_in_range(&mut self, range: Range<usize>, cx: &mut ModelContext<Self>) {
1333 let weak_self = cx.weak_model();
1334 let mut new_edit_steps = Vec::new();
1335 let mut edits = Vec::new();
1336
1337 let buffer = self.buffer.read(cx).snapshot();
1338 let mut message_lines = buffer.as_rope().chunks_in_range(range).lines();
1339 let mut in_step = false;
1340 let mut step_open_tag_start_ix = 0;
1341 let mut line_start_offset = message_lines.offset();
1342
1343 while let Some(line) = message_lines.next() {
1344 if let Some(step_start_index) = line.find("<step>") {
1345 if !in_step {
1346 in_step = true;
1347 step_open_tag_start_ix = line_start_offset + step_start_index;
1348 }
1349 }
1350
1351 if let Some(step_end_index) = line.find("</step>") {
1352 if in_step {
1353 let mut step_open_tag_end_ix = step_open_tag_start_ix + "<step>".len();
1354 if buffer.chars_at(step_open_tag_end_ix).next() == Some('\n') {
1355 step_open_tag_end_ix += 1;
1356 }
1357 let mut step_end_tag_start_ix = line_start_offset + step_end_index;
1358 let step_end_tag_end_ix = step_end_tag_start_ix + "</step>".len();
1359 if buffer.reversed_chars_at(step_end_tag_start_ix).next() == Some('\n') {
1360 step_end_tag_start_ix -= 1;
1361 }
1362 edits.push((step_open_tag_start_ix..step_open_tag_end_ix, ""));
1363 edits.push((step_end_tag_start_ix..step_end_tag_end_ix, ""));
1364 let tagged_range = buffer.anchor_after(step_open_tag_end_ix)
1365 ..buffer.anchor_before(step_end_tag_start_ix);
1366
1367 // Check if a step with the same range already exists
1368 let existing_step_index =
1369 self.workflow_step_index_for_range(&tagged_range, &buffer);
1370
1371 if let Err(ix) = existing_step_index {
1372 new_edit_steps.push((
1373 ix,
1374 WorkflowStepEntry {
1375 step: cx.new_model(|_| {
1376 WorkflowStep::new(tagged_range.clone(), weak_self.clone())
1377 }),
1378 range: tagged_range,
1379 },
1380 ));
1381 }
1382
1383 in_step = false;
1384 }
1385 }
1386
1387 line_start_offset = message_lines.offset();
1388 }
1389
1390 let mut updated = Vec::new();
1391 for (index, step) in new_edit_steps.into_iter().rev() {
1392 let step_range = step.range.clone();
1393 updated.push(step_range.clone());
1394 self.workflow_steps.insert(index, step);
1395 self.resolve_workflow_step(step_range, cx);
1396 }
1397
1398 // Delete <step> tags, making sure we don't accidentally invalidate
1399 // the step we just parsed.
1400 self.buffer
1401 .update(cx, |buffer, cx| buffer.edit(edits, None, cx));
1402 self.edits_since_last_workflow_step_prune.consume();
1403 }
1404
1405 pub fn resolve_workflow_step(
1406 &mut self,
1407 tagged_range: Range<language::Anchor>,
1408 cx: &mut ModelContext<Self>,
1409 ) {
1410 let Ok(step_index) = self
1411 .workflow_steps
1412 .binary_search_by(|step| step.range.cmp(&tagged_range, self.buffer.read(cx)))
1413 else {
1414 return;
1415 };
1416
1417 cx.emit(ContextEvent::WorkflowStepUpdated(tagged_range.clone()));
1418 cx.notify();
1419
1420 let resolution = self.workflow_steps[step_index].step.clone();
1421 cx.defer(move |cx| {
1422 resolution.update(cx, |resolution, cx| resolution.resolve(cx));
1423 });
1424 }
1425
1426 pub fn workflow_step_updated(
1427 &mut self,
1428 range: Range<language::Anchor>,
1429 cx: &mut ModelContext<Self>,
1430 ) {
1431 cx.emit(ContextEvent::WorkflowStepUpdated(range));
1432 cx.notify();
1433 }
1434
1435 pub fn pending_command_for_position(
1436 &mut self,
1437 position: language::Anchor,
1438 cx: &mut ModelContext<Self>,
1439 ) -> Option<&mut PendingSlashCommand> {
1440 let buffer = self.buffer.read(cx);
1441 match self
1442 .pending_slash_commands
1443 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1444 {
1445 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1446 Err(ix) => {
1447 let cmd = self.pending_slash_commands.get_mut(ix)?;
1448 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1449 && position.cmp(&cmd.source_range.end, buffer).is_le()
1450 {
1451 Some(cmd)
1452 } else {
1453 None
1454 }
1455 }
1456 }
1457 }
1458
1459 pub fn pending_commands_for_range(
1460 &self,
1461 range: Range<language::Anchor>,
1462 cx: &AppContext,
1463 ) -> &[PendingSlashCommand] {
1464 let range = self.pending_command_indices_for_range(range, cx);
1465 &self.pending_slash_commands[range]
1466 }
1467
1468 fn pending_command_indices_for_range(
1469 &self,
1470 range: Range<language::Anchor>,
1471 cx: &AppContext,
1472 ) -> Range<usize> {
1473 let buffer = self.buffer.read(cx);
1474 let start_ix = match self
1475 .pending_slash_commands
1476 .binary_search_by(|probe| probe.source_range.end.cmp(&range.start, &buffer))
1477 {
1478 Ok(ix) | Err(ix) => ix,
1479 };
1480 let end_ix = match self
1481 .pending_slash_commands
1482 .binary_search_by(|probe| probe.source_range.start.cmp(&range.end, &buffer))
1483 {
1484 Ok(ix) => ix + 1,
1485 Err(ix) => ix,
1486 };
1487 start_ix..end_ix
1488 }
1489
1490 pub fn insert_command_output(
1491 &mut self,
1492 command_range: Range<language::Anchor>,
1493 output: Task<Result<SlashCommandOutput>>,
1494 ensure_trailing_newline: bool,
1495 expand_result: bool,
1496 cx: &mut ModelContext<Self>,
1497 ) {
1498 self.reparse_slash_commands(cx);
1499
1500 let insert_output_task = cx.spawn(|this, mut cx| {
1501 let command_range = command_range.clone();
1502 async move {
1503 let output = output.await;
1504 this.update(&mut cx, |this, cx| match output {
1505 Ok(mut output) => {
1506 // Ensure section ranges are valid.
1507 for section in &mut output.sections {
1508 section.range.start = section.range.start.min(output.text.len());
1509 section.range.end = section.range.end.min(output.text.len());
1510 while !output.text.is_char_boundary(section.range.start) {
1511 section.range.start -= 1;
1512 }
1513 while !output.text.is_char_boundary(section.range.end) {
1514 section.range.end += 1;
1515 }
1516 }
1517
1518 // Ensure there is a newline after the last section.
1519 if ensure_trailing_newline {
1520 let has_newline_after_last_section =
1521 output.sections.last().map_or(false, |last_section| {
1522 output.text[last_section.range.end..].ends_with('\n')
1523 });
1524 if !has_newline_after_last_section {
1525 output.text.push('\n');
1526 }
1527 }
1528
1529 let version = this.version.clone();
1530 let command_id = SlashCommandId(this.next_timestamp());
1531 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1532 let start = command_range.start.to_offset(buffer);
1533 let old_end = command_range.end.to_offset(buffer);
1534 let new_end = start + output.text.len();
1535 buffer.edit([(start..old_end, output.text)], None, cx);
1536
1537 let mut sections = output
1538 .sections
1539 .into_iter()
1540 .map(|section| SlashCommandOutputSection {
1541 range: buffer.anchor_after(start + section.range.start)
1542 ..buffer.anchor_before(start + section.range.end),
1543 icon: section.icon,
1544 label: section.label,
1545 })
1546 .collect::<Vec<_>>();
1547 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1548
1549 this.slash_command_output_sections
1550 .extend(sections.iter().cloned());
1551 this.slash_command_output_sections
1552 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1553
1554 let output_range =
1555 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1556 this.finished_slash_commands.insert(command_id);
1557
1558 (
1559 ContextOperation::SlashCommandFinished {
1560 id: command_id,
1561 output_range: output_range.clone(),
1562 sections: sections.clone(),
1563 version,
1564 },
1565 ContextEvent::SlashCommandFinished {
1566 output_range,
1567 sections,
1568 run_commands_in_output: output.run_commands_in_text,
1569 expand_result,
1570 },
1571 )
1572 });
1573
1574 this.push_op(operation, cx);
1575 cx.emit(event);
1576 }
1577 Err(error) => {
1578 if let Some(pending_command) =
1579 this.pending_command_for_position(command_range.start, cx)
1580 {
1581 pending_command.status =
1582 PendingSlashCommandStatus::Error(error.to_string());
1583 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1584 removed: vec![pending_command.source_range.clone()],
1585 updated: vec![pending_command.clone()],
1586 });
1587 }
1588 }
1589 })
1590 .ok();
1591 }
1592 });
1593
1594 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1595 pending_command.status = PendingSlashCommandStatus::Running {
1596 _task: insert_output_task.shared(),
1597 };
1598 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1599 removed: vec![pending_command.source_range.clone()],
1600 updated: vec![pending_command.clone()],
1601 });
1602 }
1603 }
1604
1605 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1606 self.count_remaining_tokens(cx);
1607 }
1608
1609 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1610 self.message_anchors.iter().rev().find_map(|message| {
1611 message
1612 .start
1613 .is_valid(self.buffer.read(cx))
1614 .then_some(message.id)
1615 })
1616 }
1617
1618 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1619 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1620 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1621 let last_message_id = self.get_last_valid_message_id(cx)?;
1622
1623 if !provider.is_authenticated(cx) {
1624 log::info!("completion provider has no credentials");
1625 return None;
1626 }
1627 // Compute which messages to cache, including the last one.
1628 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1629
1630 let request = self.to_completion_request(cx);
1631 let assistant_message = self
1632 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1633 .unwrap();
1634
1635 // Queue up the user's next reply.
1636 let user_message = self
1637 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1638 .unwrap();
1639
1640 let pending_completion_id = post_inc(&mut self.completion_count);
1641
1642 let task = cx.spawn({
1643 |this, mut cx| async move {
1644 let stream = model.stream_completion(request, &cx);
1645 let assistant_message_id = assistant_message.id;
1646 let mut response_latency = None;
1647 let stream_completion = async {
1648 let request_start = Instant::now();
1649 let mut chunks = stream.await?;
1650
1651 while let Some(chunk) = chunks.next().await {
1652 if response_latency.is_none() {
1653 response_latency = Some(request_start.elapsed());
1654 }
1655 let chunk = chunk?;
1656
1657 this.update(&mut cx, |this, cx| {
1658 let message_ix = this
1659 .message_anchors
1660 .iter()
1661 .position(|message| message.id == assistant_message_id)?;
1662 let message_range = this.buffer.update(cx, |buffer, cx| {
1663 let message_start_offset =
1664 this.message_anchors[message_ix].start.to_offset(buffer);
1665 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1666 .iter()
1667 .find(|message| message.start.is_valid(buffer))
1668 .map_or(buffer.len(), |message| {
1669 message.start.to_offset(buffer).saturating_sub(1)
1670 });
1671 let message_new_end_offset = message_old_end_offset + chunk.len();
1672 buffer.edit(
1673 [(message_old_end_offset..message_old_end_offset, chunk)],
1674 None,
1675 cx,
1676 );
1677 message_start_offset..message_new_end_offset
1678 });
1679
1680 // Use `inclusive = false` as edits might occur at the end of a parsed step.
1681 this.prune_invalid_workflow_steps(false, cx);
1682 this.parse_workflow_steps_in_range(message_range, cx);
1683 cx.emit(ContextEvent::StreamedCompletion);
1684
1685 Some(())
1686 })?;
1687 smol::future::yield_now().await;
1688 }
1689 this.update(&mut cx, |this, cx| {
1690 this.pending_completions
1691 .retain(|completion| completion.id != pending_completion_id);
1692 this.summarize(false, cx);
1693 this.update_cache_status_for_completion(cx);
1694 })?;
1695
1696 anyhow::Ok(())
1697 };
1698
1699 let result = stream_completion.await;
1700
1701 this.update(&mut cx, |this, cx| {
1702 let error_message = result
1703 .err()
1704 .map(|error| error.to_string().trim().to_string());
1705
1706 if let Some(error_message) = error_message.as_ref() {
1707 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
1708 error_message.clone(),
1709 )));
1710 }
1711
1712 this.update_metadata(assistant_message_id, cx, |metadata| {
1713 if let Some(error_message) = error_message.as_ref() {
1714 metadata.status =
1715 MessageStatus::Error(SharedString::from(error_message.clone()));
1716 } else {
1717 metadata.status = MessageStatus::Done;
1718 }
1719 });
1720
1721 if let Some(telemetry) = this.telemetry.as_ref() {
1722 telemetry.report_assistant_event(
1723 Some(this.id.0.clone()),
1724 AssistantKind::Panel,
1725 model.telemetry_id(),
1726 response_latency,
1727 error_message,
1728 );
1729 }
1730 })
1731 .ok();
1732 }
1733 });
1734
1735 self.pending_completions.push(PendingCompletion {
1736 id: pending_completion_id,
1737 assistant_message_id: assistant_message.id,
1738 _task: task,
1739 });
1740
1741 Some(user_message)
1742 }
1743
1744 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
1745 let buffer = self.buffer.read(cx);
1746 let request_messages = self
1747 .messages(cx)
1748 .filter(|message| message.status == MessageStatus::Done)
1749 .filter_map(|message| message.to_request_message(&buffer))
1750 .collect();
1751
1752 LanguageModelRequest {
1753 messages: request_messages,
1754 stop: vec![],
1755 temperature: 1.0,
1756 }
1757 }
1758
1759 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
1760 if let Some(pending_completion) = self.pending_completions.pop() {
1761 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
1762 if metadata.status == MessageStatus::Pending {
1763 metadata.status = MessageStatus::Canceled;
1764 }
1765 });
1766 true
1767 } else {
1768 false
1769 }
1770 }
1771
1772 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
1773 for id in ids {
1774 if let Some(metadata) = self.messages_metadata.get(&id) {
1775 let role = metadata.role.cycle();
1776 self.update_metadata(id, cx, |metadata| metadata.role = role);
1777 }
1778 }
1779 }
1780
1781 pub fn update_metadata(
1782 &mut self,
1783 id: MessageId,
1784 cx: &mut ModelContext<Self>,
1785 f: impl FnOnce(&mut MessageMetadata),
1786 ) {
1787 let version = self.version.clone();
1788 let timestamp = self.next_timestamp();
1789 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
1790 f(metadata);
1791 metadata.timestamp = timestamp;
1792 let operation = ContextOperation::UpdateMessage {
1793 message_id: id,
1794 metadata: metadata.clone(),
1795 version,
1796 };
1797 self.push_op(operation, cx);
1798 cx.emit(ContextEvent::MessagesEdited);
1799 cx.notify();
1800 }
1801 }
1802
1803 pub fn insert_message_after(
1804 &mut self,
1805 message_id: MessageId,
1806 role: Role,
1807 status: MessageStatus,
1808 cx: &mut ModelContext<Self>,
1809 ) -> Option<MessageAnchor> {
1810 if let Some(prev_message_ix) = self
1811 .message_anchors
1812 .iter()
1813 .position(|message| message.id == message_id)
1814 {
1815 // Find the next valid message after the one we were given.
1816 let mut next_message_ix = prev_message_ix + 1;
1817 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
1818 if next_message.start.is_valid(self.buffer.read(cx)) {
1819 break;
1820 }
1821 next_message_ix += 1;
1822 }
1823
1824 let start = self.buffer.update(cx, |buffer, cx| {
1825 let offset = self
1826 .message_anchors
1827 .get(next_message_ix)
1828 .map_or(buffer.len(), |message| {
1829 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
1830 });
1831 buffer.edit([(offset..offset, "\n")], None, cx);
1832 buffer.anchor_before(offset + 1)
1833 });
1834
1835 let version = self.version.clone();
1836 let anchor = MessageAnchor {
1837 id: MessageId(self.next_timestamp()),
1838 start,
1839 };
1840 let metadata = MessageMetadata {
1841 role,
1842 status,
1843 timestamp: anchor.id.0,
1844 cache: None,
1845 };
1846 self.insert_message(anchor.clone(), metadata.clone(), cx);
1847 self.push_op(
1848 ContextOperation::InsertMessage {
1849 anchor: anchor.clone(),
1850 metadata,
1851 version,
1852 },
1853 cx,
1854 );
1855 Some(anchor)
1856 } else {
1857 None
1858 }
1859 }
1860
1861 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
1862 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
1863 entry.insert((
1864 image.to_image_data(cx).log_err()?,
1865 LanguageModelImage::from_image(image, cx).shared(),
1866 ));
1867 }
1868
1869 Some(())
1870 }
1871
1872 pub fn insert_image_anchor(
1873 &mut self,
1874 image_id: u64,
1875 anchor: language::Anchor,
1876 cx: &mut ModelContext<Self>,
1877 ) -> bool {
1878 cx.emit(ContextEvent::MessagesEdited);
1879
1880 let buffer = self.buffer.read(cx);
1881 let insertion_ix = match self
1882 .image_anchors
1883 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
1884 {
1885 Ok(ix) => ix,
1886 Err(ix) => ix,
1887 };
1888
1889 if let Some((render_image, image)) = self.images.get(&image_id) {
1890 self.image_anchors.insert(
1891 insertion_ix,
1892 ImageAnchor {
1893 anchor,
1894 image_id,
1895 image: image.clone(),
1896 render_image: render_image.clone(),
1897 },
1898 );
1899
1900 true
1901 } else {
1902 false
1903 }
1904 }
1905
1906 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
1907 self.image_anchors.iter().cloned()
1908 }
1909
1910 pub fn split_message(
1911 &mut self,
1912 range: Range<usize>,
1913 cx: &mut ModelContext<Self>,
1914 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
1915 let start_message = self.message_for_offset(range.start, cx);
1916 let end_message = self.message_for_offset(range.end, cx);
1917 if let Some((start_message, end_message)) = start_message.zip(end_message) {
1918 // Prevent splitting when range spans multiple messages.
1919 if start_message.id != end_message.id {
1920 return (None, None);
1921 }
1922
1923 let message = start_message;
1924 let role = message.role;
1925 let mut edited_buffer = false;
1926
1927 let mut suffix_start = None;
1928
1929 // TODO: why did this start panicking?
1930 if range.start > message.offset_range.start
1931 && range.end < message.offset_range.end.saturating_sub(1)
1932 {
1933 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
1934 suffix_start = Some(range.end + 1);
1935 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
1936 suffix_start = Some(range.end);
1937 }
1938 }
1939
1940 let version = self.version.clone();
1941 let suffix = if let Some(suffix_start) = suffix_start {
1942 MessageAnchor {
1943 id: MessageId(self.next_timestamp()),
1944 start: self.buffer.read(cx).anchor_before(suffix_start),
1945 }
1946 } else {
1947 self.buffer.update(cx, |buffer, cx| {
1948 buffer.edit([(range.end..range.end, "\n")], None, cx);
1949 });
1950 edited_buffer = true;
1951 MessageAnchor {
1952 id: MessageId(self.next_timestamp()),
1953 start: self.buffer.read(cx).anchor_before(range.end + 1),
1954 }
1955 };
1956
1957 let suffix_metadata = MessageMetadata {
1958 role,
1959 status: MessageStatus::Done,
1960 timestamp: suffix.id.0,
1961 cache: None,
1962 };
1963 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
1964 self.push_op(
1965 ContextOperation::InsertMessage {
1966 anchor: suffix.clone(),
1967 metadata: suffix_metadata,
1968 version,
1969 },
1970 cx,
1971 );
1972
1973 let new_messages =
1974 if range.start == range.end || range.start == message.offset_range.start {
1975 (None, Some(suffix))
1976 } else {
1977 let mut prefix_end = None;
1978 if range.start > message.offset_range.start
1979 && range.end < message.offset_range.end - 1
1980 {
1981 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
1982 prefix_end = Some(range.start + 1);
1983 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
1984 == Some('\n')
1985 {
1986 prefix_end = Some(range.start);
1987 }
1988 }
1989
1990 let version = self.version.clone();
1991 let selection = if let Some(prefix_end) = prefix_end {
1992 MessageAnchor {
1993 id: MessageId(self.next_timestamp()),
1994 start: self.buffer.read(cx).anchor_before(prefix_end),
1995 }
1996 } else {
1997 self.buffer.update(cx, |buffer, cx| {
1998 buffer.edit([(range.start..range.start, "\n")], None, cx)
1999 });
2000 edited_buffer = true;
2001 MessageAnchor {
2002 id: MessageId(self.next_timestamp()),
2003 start: self.buffer.read(cx).anchor_before(range.end + 1),
2004 }
2005 };
2006
2007 let selection_metadata = MessageMetadata {
2008 role,
2009 status: MessageStatus::Done,
2010 timestamp: selection.id.0,
2011 cache: None,
2012 };
2013 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2014 self.push_op(
2015 ContextOperation::InsertMessage {
2016 anchor: selection.clone(),
2017 metadata: selection_metadata,
2018 version,
2019 },
2020 cx,
2021 );
2022
2023 (Some(selection), Some(suffix))
2024 };
2025
2026 if !edited_buffer {
2027 cx.emit(ContextEvent::MessagesEdited);
2028 }
2029 new_messages
2030 } else {
2031 (None, None)
2032 }
2033 }
2034
2035 fn insert_message(
2036 &mut self,
2037 new_anchor: MessageAnchor,
2038 new_metadata: MessageMetadata,
2039 cx: &mut ModelContext<Self>,
2040 ) {
2041 cx.emit(ContextEvent::MessagesEdited);
2042
2043 self.messages_metadata.insert(new_anchor.id, new_metadata);
2044
2045 let buffer = self.buffer.read(cx);
2046 let insertion_ix = self
2047 .message_anchors
2048 .iter()
2049 .position(|anchor| {
2050 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2051 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2052 })
2053 .unwrap_or(self.message_anchors.len());
2054 self.message_anchors.insert(insertion_ix, new_anchor);
2055 }
2056
2057 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2058 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2059 return;
2060 };
2061 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2062 return;
2063 };
2064
2065 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2066 if !provider.is_authenticated(cx) {
2067 return;
2068 }
2069
2070 let messages = self
2071 .messages(cx)
2072 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
2073 .chain(Some(LanguageModelRequestMessage {
2074 role: Role::User,
2075 content: vec![
2076 "Summarize the context into a short title without punctuation.".into(),
2077 ],
2078 cache: false,
2079 }));
2080 let request = LanguageModelRequest {
2081 messages: messages.collect(),
2082 stop: vec![],
2083 temperature: 1.0,
2084 };
2085
2086 self.pending_summary = cx.spawn(|this, mut cx| {
2087 async move {
2088 let stream = model.stream_completion(request, &cx);
2089 let mut messages = stream.await?;
2090
2091 let mut replaced = !replace_old;
2092 while let Some(message) = messages.next().await {
2093 let text = message?;
2094 let mut lines = text.lines();
2095 this.update(&mut cx, |this, cx| {
2096 let version = this.version.clone();
2097 let timestamp = this.next_timestamp();
2098 let summary = this.summary.get_or_insert(ContextSummary::default());
2099 if !replaced && replace_old {
2100 summary.text.clear();
2101 replaced = true;
2102 }
2103 summary.text.extend(lines.next());
2104 summary.timestamp = timestamp;
2105 let operation = ContextOperation::UpdateSummary {
2106 summary: summary.clone(),
2107 version,
2108 };
2109 this.push_op(operation, cx);
2110 cx.emit(ContextEvent::SummaryChanged);
2111 })?;
2112
2113 // Stop if the LLM generated multiple lines.
2114 if lines.next().is_some() {
2115 break;
2116 }
2117 }
2118
2119 this.update(&mut cx, |this, cx| {
2120 let version = this.version.clone();
2121 let timestamp = this.next_timestamp();
2122 if let Some(summary) = this.summary.as_mut() {
2123 summary.done = true;
2124 summary.timestamp = timestamp;
2125 let operation = ContextOperation::UpdateSummary {
2126 summary: summary.clone(),
2127 version,
2128 };
2129 this.push_op(operation, cx);
2130 cx.emit(ContextEvent::SummaryChanged);
2131 }
2132 })?;
2133
2134 anyhow::Ok(())
2135 }
2136 .log_err()
2137 });
2138 }
2139 }
2140
2141 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2142 self.messages_for_offsets([offset], cx).pop()
2143 }
2144
2145 pub fn messages_for_offsets(
2146 &self,
2147 offsets: impl IntoIterator<Item = usize>,
2148 cx: &AppContext,
2149 ) -> Vec<Message> {
2150 let mut result = Vec::new();
2151
2152 let mut messages = self.messages(cx).peekable();
2153 let mut offsets = offsets.into_iter().peekable();
2154 let mut current_message = messages.next();
2155 while let Some(offset) = offsets.next() {
2156 // Locate the message that contains the offset.
2157 while current_message.as_ref().map_or(false, |message| {
2158 !message.offset_range.contains(&offset) && messages.peek().is_some()
2159 }) {
2160 current_message = messages.next();
2161 }
2162 let Some(message) = current_message.as_ref() else {
2163 break;
2164 };
2165
2166 // Skip offsets that are in the same message.
2167 while offsets.peek().map_or(false, |offset| {
2168 message.offset_range.contains(offset) || messages.peek().is_none()
2169 }) {
2170 offsets.next();
2171 }
2172
2173 result.push(message.clone());
2174 }
2175 result
2176 }
2177
2178 fn messages_from_anchors<'a>(
2179 &'a self,
2180 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2181 cx: &'a AppContext,
2182 ) -> impl 'a + Iterator<Item = Message> {
2183 let buffer = self.buffer.read(cx);
2184 let messages = message_anchors.enumerate();
2185 let images = self.image_anchors.iter();
2186
2187 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2188 }
2189
2190 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2191 self.messages_from_anchors(self.message_anchors.iter(), cx)
2192 }
2193
2194 pub fn messages_from_iters<'a>(
2195 buffer: &'a Buffer,
2196 metadata: &'a HashMap<MessageId, MessageMetadata>,
2197 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2198 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2199 ) -> impl 'a + Iterator<Item = Message> {
2200 let mut messages = messages.peekable();
2201 let mut images = images.peekable();
2202
2203 iter::from_fn(move || {
2204 if let Some((start_ix, message_anchor)) = messages.next() {
2205 let metadata = metadata.get(&message_anchor.id)?;
2206
2207 let message_start = message_anchor.start.to_offset(buffer);
2208 let mut message_end = None;
2209 let mut end_ix = start_ix;
2210 while let Some((_, next_message)) = messages.peek() {
2211 if next_message.start.is_valid(buffer) {
2212 message_end = Some(next_message.start);
2213 break;
2214 } else {
2215 end_ix += 1;
2216 messages.next();
2217 }
2218 }
2219 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2220 let message_end = message_end_anchor.to_offset(buffer);
2221
2222 let mut image_offsets = SmallVec::new();
2223 while let Some(image_anchor) = images.peek() {
2224 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2225 image_offsets.push((
2226 image_anchor.anchor.to_offset(buffer),
2227 MessageImage {
2228 image_id: image_anchor.image_id,
2229 image: image_anchor.image.clone(),
2230 },
2231 ));
2232 images.next();
2233 } else {
2234 break;
2235 }
2236 }
2237
2238 return Some(Message {
2239 index_range: start_ix..end_ix,
2240 offset_range: message_start..message_end,
2241 id: message_anchor.id,
2242 anchor: message_anchor.start,
2243 role: metadata.role,
2244 status: metadata.status.clone(),
2245 cache: metadata.cache.clone(),
2246 image_offsets,
2247 });
2248 }
2249 None
2250 })
2251 }
2252
2253 pub fn save(
2254 &mut self,
2255 debounce: Option<Duration>,
2256 fs: Arc<dyn Fs>,
2257 cx: &mut ModelContext<Context>,
2258 ) {
2259 if self.replica_id() != ReplicaId::default() {
2260 // Prevent saving a remote context for now.
2261 return;
2262 }
2263
2264 self.pending_save = cx.spawn(|this, mut cx| async move {
2265 if let Some(debounce) = debounce {
2266 cx.background_executor().timer(debounce).await;
2267 }
2268
2269 let (old_path, summary) = this.read_with(&cx, |this, _| {
2270 let path = this.path.clone();
2271 let summary = if let Some(summary) = this.summary.as_ref() {
2272 if summary.done {
2273 Some(summary.text.clone())
2274 } else {
2275 None
2276 }
2277 } else {
2278 None
2279 };
2280 (path, summary)
2281 })?;
2282
2283 if let Some(summary) = summary {
2284 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2285 .await;
2286
2287 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2288 let mut discriminant = 1;
2289 let mut new_path;
2290 loop {
2291 new_path = contexts_dir().join(&format!(
2292 "{} - {}.zed.json",
2293 summary.trim(),
2294 discriminant
2295 ));
2296 if fs.is_file(&new_path).await {
2297 discriminant += 1;
2298 } else {
2299 break;
2300 }
2301 }
2302
2303 fs.create_dir(contexts_dir().as_ref()).await?;
2304 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2305 .await?;
2306 if let Some(old_path) = old_path {
2307 if new_path != old_path {
2308 fs.remove_file(
2309 &old_path,
2310 RemoveOptions {
2311 recursive: false,
2312 ignore_if_not_exists: true,
2313 },
2314 )
2315 .await?;
2316 }
2317 }
2318
2319 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2320 }
2321
2322 Ok(())
2323 });
2324 }
2325
2326 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2327 let mut images_to_save = self
2328 .images
2329 .iter()
2330 .map(|(id, (_, llm_image))| {
2331 let fs = fs.clone();
2332 let llm_image = llm_image.clone();
2333 let id = *id;
2334 async move {
2335 if let Some(llm_image) = llm_image.await {
2336 let path: PathBuf =
2337 context_images_dir().join(&format!("{}.png.base64", id));
2338 if fs
2339 .metadata(path.as_path())
2340 .await
2341 .log_err()
2342 .flatten()
2343 .is_none()
2344 {
2345 fs.atomic_write(path, llm_image.source.to_string())
2346 .await
2347 .log_err();
2348 }
2349 }
2350 }
2351 })
2352 .collect::<FuturesUnordered<_>>();
2353 cx.background_executor().spawn(async move {
2354 if fs
2355 .create_dir(context_images_dir().as_ref())
2356 .await
2357 .log_err()
2358 .is_some()
2359 {
2360 while let Some(_) = images_to_save.next().await {}
2361 }
2362 })
2363 }
2364
2365 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2366 let timestamp = self.next_timestamp();
2367 let summary = self.summary.get_or_insert(ContextSummary::default());
2368 summary.timestamp = timestamp;
2369 summary.done = true;
2370 summary.text = custom_summary;
2371 cx.emit(ContextEvent::SummaryChanged);
2372 }
2373}
2374
2375#[derive(Debug, Default)]
2376pub struct ContextVersion {
2377 context: clock::Global,
2378 buffer: clock::Global,
2379}
2380
2381impl ContextVersion {
2382 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2383 Self {
2384 context: language::proto::deserialize_version(&proto.context_version),
2385 buffer: language::proto::deserialize_version(&proto.buffer_version),
2386 }
2387 }
2388
2389 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2390 proto::ContextVersion {
2391 context_id: context_id.to_proto(),
2392 context_version: language::proto::serialize_version(&self.context),
2393 buffer_version: language::proto::serialize_version(&self.buffer),
2394 }
2395 }
2396}
2397
2398#[derive(Debug, Clone)]
2399pub struct PendingSlashCommand {
2400 pub name: String,
2401 pub arguments: SmallVec<[String; 3]>,
2402 pub status: PendingSlashCommandStatus,
2403 pub source_range: Range<language::Anchor>,
2404}
2405
2406#[derive(Debug, Clone)]
2407pub enum PendingSlashCommandStatus {
2408 Idle,
2409 Running { _task: Shared<Task<()>> },
2410 Error(String),
2411}
2412
2413#[derive(Serialize, Deserialize)]
2414pub struct SavedMessage {
2415 pub id: MessageId,
2416 pub start: usize,
2417 pub metadata: MessageMetadata,
2418 #[serde(default)]
2419 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2420 pub image_offsets: Vec<(usize, u64)>,
2421}
2422
2423#[derive(Serialize, Deserialize)]
2424pub struct SavedContext {
2425 pub id: Option<ContextId>,
2426 pub zed: String,
2427 pub version: String,
2428 pub text: String,
2429 pub messages: Vec<SavedMessage>,
2430 pub summary: String,
2431 pub slash_command_output_sections:
2432 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2433}
2434
2435impl SavedContext {
2436 pub const VERSION: &'static str = "0.4.0";
2437
2438 pub fn from_json(json: &str) -> Result<Self> {
2439 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2440 match saved_context_json
2441 .get("version")
2442 .ok_or_else(|| anyhow!("version not found"))?
2443 {
2444 serde_json::Value::String(version) => match version.as_str() {
2445 SavedContext::VERSION => {
2446 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2447 }
2448 SavedContextV0_3_0::VERSION => {
2449 let saved_context =
2450 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2451 Ok(saved_context.upgrade())
2452 }
2453 SavedContextV0_2_0::VERSION => {
2454 let saved_context =
2455 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2456 Ok(saved_context.upgrade())
2457 }
2458 SavedContextV0_1_0::VERSION => {
2459 let saved_context =
2460 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2461 Ok(saved_context.upgrade())
2462 }
2463 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2464 },
2465 _ => Err(anyhow!("version not found on saved context")),
2466 }
2467 }
2468
2469 fn into_ops(
2470 self,
2471 buffer: &Model<Buffer>,
2472 cx: &mut ModelContext<Context>,
2473 ) -> Vec<ContextOperation> {
2474 let mut operations = Vec::new();
2475 let mut version = clock::Global::new();
2476 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2477
2478 let mut first_message_metadata = None;
2479 for message in self.messages {
2480 if message.id == MessageId(clock::Lamport::default()) {
2481 first_message_metadata = Some(message.metadata);
2482 } else {
2483 operations.push(ContextOperation::InsertMessage {
2484 anchor: MessageAnchor {
2485 id: message.id,
2486 start: buffer.read(cx).anchor_before(message.start),
2487 },
2488 metadata: MessageMetadata {
2489 role: message.metadata.role,
2490 status: message.metadata.status,
2491 timestamp: message.metadata.timestamp,
2492 cache: None,
2493 },
2494 version: version.clone(),
2495 });
2496 version.observe(message.id.0);
2497 next_timestamp.observe(message.id.0);
2498 }
2499 }
2500
2501 if let Some(metadata) = first_message_metadata {
2502 let timestamp = next_timestamp.tick();
2503 operations.push(ContextOperation::UpdateMessage {
2504 message_id: MessageId(clock::Lamport::default()),
2505 metadata: MessageMetadata {
2506 role: metadata.role,
2507 status: metadata.status,
2508 timestamp,
2509 cache: None,
2510 },
2511 version: version.clone(),
2512 });
2513 version.observe(timestamp);
2514 }
2515
2516 let timestamp = next_timestamp.tick();
2517 operations.push(ContextOperation::SlashCommandFinished {
2518 id: SlashCommandId(timestamp),
2519 output_range: language::Anchor::MIN..language::Anchor::MAX,
2520 sections: self
2521 .slash_command_output_sections
2522 .into_iter()
2523 .map(|section| {
2524 let buffer = buffer.read(cx);
2525 SlashCommandOutputSection {
2526 range: buffer.anchor_after(section.range.start)
2527 ..buffer.anchor_before(section.range.end),
2528 icon: section.icon,
2529 label: section.label,
2530 }
2531 })
2532 .collect(),
2533 version: version.clone(),
2534 });
2535 version.observe(timestamp);
2536
2537 let timestamp = next_timestamp.tick();
2538 operations.push(ContextOperation::UpdateSummary {
2539 summary: ContextSummary {
2540 text: self.summary,
2541 done: true,
2542 timestamp,
2543 },
2544 version: version.clone(),
2545 });
2546 version.observe(timestamp);
2547
2548 operations
2549 }
2550}
2551
2552#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
2553struct SavedMessageIdPreV0_4_0(usize);
2554
2555#[derive(Serialize, Deserialize)]
2556struct SavedMessagePreV0_4_0 {
2557 id: SavedMessageIdPreV0_4_0,
2558 start: usize,
2559}
2560
2561#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
2562struct SavedMessageMetadataPreV0_4_0 {
2563 role: Role,
2564 status: MessageStatus,
2565}
2566
2567#[derive(Serialize, Deserialize)]
2568struct SavedContextV0_3_0 {
2569 id: Option<ContextId>,
2570 zed: String,
2571 version: String,
2572 text: String,
2573 messages: Vec<SavedMessagePreV0_4_0>,
2574 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2575 summary: String,
2576 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2577}
2578
2579impl SavedContextV0_3_0 {
2580 const VERSION: &'static str = "0.3.0";
2581
2582 fn upgrade(self) -> SavedContext {
2583 SavedContext {
2584 id: self.id,
2585 zed: self.zed,
2586 version: SavedContext::VERSION.into(),
2587 text: self.text,
2588 messages: self
2589 .messages
2590 .into_iter()
2591 .filter_map(|message| {
2592 let metadata = self.message_metadata.get(&message.id)?;
2593 let timestamp = clock::Lamport {
2594 replica_id: ReplicaId::default(),
2595 value: message.id.0 as u32,
2596 };
2597 Some(SavedMessage {
2598 id: MessageId(timestamp),
2599 start: message.start,
2600 metadata: MessageMetadata {
2601 role: metadata.role,
2602 status: metadata.status.clone(),
2603 timestamp,
2604 cache: None,
2605 },
2606 image_offsets: Vec::new(),
2607 })
2608 })
2609 .collect(),
2610 summary: self.summary,
2611 slash_command_output_sections: self.slash_command_output_sections,
2612 }
2613 }
2614}
2615
2616#[derive(Serialize, Deserialize)]
2617struct SavedContextV0_2_0 {
2618 id: Option<ContextId>,
2619 zed: String,
2620 version: String,
2621 text: String,
2622 messages: Vec<SavedMessagePreV0_4_0>,
2623 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2624 summary: String,
2625}
2626
2627impl SavedContextV0_2_0 {
2628 const VERSION: &'static str = "0.2.0";
2629
2630 fn upgrade(self) -> SavedContext {
2631 SavedContextV0_3_0 {
2632 id: self.id,
2633 zed: self.zed,
2634 version: SavedContextV0_3_0::VERSION.to_string(),
2635 text: self.text,
2636 messages: self.messages,
2637 message_metadata: self.message_metadata,
2638 summary: self.summary,
2639 slash_command_output_sections: Vec::new(),
2640 }
2641 .upgrade()
2642 }
2643}
2644
2645#[derive(Serialize, Deserialize)]
2646struct SavedContextV0_1_0 {
2647 id: Option<ContextId>,
2648 zed: String,
2649 version: String,
2650 text: String,
2651 messages: Vec<SavedMessagePreV0_4_0>,
2652 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2653 summary: String,
2654 api_url: Option<String>,
2655 model: OpenAiModel,
2656}
2657
2658impl SavedContextV0_1_0 {
2659 const VERSION: &'static str = "0.1.0";
2660
2661 fn upgrade(self) -> SavedContext {
2662 SavedContextV0_2_0 {
2663 id: self.id,
2664 zed: self.zed,
2665 version: SavedContextV0_2_0::VERSION.to_string(),
2666 text: self.text,
2667 messages: self.messages,
2668 message_metadata: self.message_metadata,
2669 summary: self.summary,
2670 }
2671 .upgrade()
2672 }
2673}
2674
2675#[derive(Clone)]
2676pub struct SavedContextMetadata {
2677 pub title: String,
2678 pub path: PathBuf,
2679 pub mtime: chrono::DateTime<chrono::Local>,
2680}