1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder, slash_command::SlashCommandLine, workflow::WorkflowStep, MessageId,
6 MessageStatus,
7};
8use anyhow::{anyhow, Context as _, Result};
9use assistant_slash_command::{
10 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
11};
12use client::{self, proto, telemetry::Telemetry};
13use clock::ReplicaId;
14use collections::{HashMap, HashSet};
15use fs::{Fs, RemoveOptions};
16use futures::{future::Shared, stream::FuturesUnordered, FutureExt, StreamExt};
17use gpui::{
18 AppContext, Context as _, EventEmitter, Image, Model, ModelContext, RenderImage, SharedString,
19 Subscription, Task,
20};
21
22use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
23use language_model::{
24 LanguageModel, LanguageModelCacheConfiguration, LanguageModelImage, LanguageModelRegistry,
25 LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
26};
27use open_ai::Model as OpenAiModel;
28use paths::{context_images_dir, contexts_dir};
29use project::Project;
30use serde::{Deserialize, Serialize};
31use smallvec::SmallVec;
32use std::{
33 cmp::{max, Ordering},
34 collections::hash_map,
35 fmt::Debug,
36 iter, mem,
37 ops::Range,
38 path::{Path, PathBuf},
39 sync::Arc,
40 time::{Duration, Instant},
41};
42use telemetry_events::AssistantKind;
43use util::{post_inc, ResultExt, TryFutureExt};
44use uuid::Uuid;
45
46#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
47pub struct ContextId(String);
48
49impl ContextId {
50 pub fn new() -> Self {
51 Self(Uuid::new_v4().to_string())
52 }
53
54 pub fn from_proto(id: String) -> Self {
55 Self(id)
56 }
57
58 pub fn to_proto(&self) -> String {
59 self.0.clone()
60 }
61}
62
63#[derive(Clone, Debug)]
64pub enum ContextOperation {
65 InsertMessage {
66 anchor: MessageAnchor,
67 metadata: MessageMetadata,
68 version: clock::Global,
69 },
70 UpdateMessage {
71 message_id: MessageId,
72 metadata: MessageMetadata,
73 version: clock::Global,
74 },
75 UpdateSummary {
76 summary: ContextSummary,
77 version: clock::Global,
78 },
79 SlashCommandFinished {
80 id: SlashCommandId,
81 output_range: Range<language::Anchor>,
82 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
83 version: clock::Global,
84 },
85 BufferOperation(language::Operation),
86}
87
88impl ContextOperation {
89 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
90 match op.variant.context("invalid variant")? {
91 proto::context_operation::Variant::InsertMessage(insert) => {
92 let message = insert.message.context("invalid message")?;
93 let id = MessageId(language::proto::deserialize_timestamp(
94 message.id.context("invalid id")?,
95 ));
96 Ok(Self::InsertMessage {
97 anchor: MessageAnchor {
98 id,
99 start: language::proto::deserialize_anchor(
100 message.start.context("invalid anchor")?,
101 )
102 .context("invalid anchor")?,
103 },
104 metadata: MessageMetadata {
105 role: Role::from_proto(message.role),
106 status: MessageStatus::from_proto(
107 message.status.context("invalid status")?,
108 ),
109 timestamp: id.0,
110 should_cache: false,
111 is_cache_anchor: false,
112 },
113 version: language::proto::deserialize_version(&insert.version),
114 })
115 }
116 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
117 message_id: MessageId(language::proto::deserialize_timestamp(
118 update.message_id.context("invalid message id")?,
119 )),
120 metadata: MessageMetadata {
121 role: Role::from_proto(update.role),
122 status: MessageStatus::from_proto(update.status.context("invalid status")?),
123 timestamp: language::proto::deserialize_timestamp(
124 update.timestamp.context("invalid timestamp")?,
125 ),
126 should_cache: false,
127 is_cache_anchor: false,
128 },
129 version: language::proto::deserialize_version(&update.version),
130 }),
131 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
132 summary: ContextSummary {
133 text: update.summary,
134 done: update.done,
135 timestamp: language::proto::deserialize_timestamp(
136 update.timestamp.context("invalid timestamp")?,
137 ),
138 },
139 version: language::proto::deserialize_version(&update.version),
140 }),
141 proto::context_operation::Variant::SlashCommandFinished(finished) => {
142 Ok(Self::SlashCommandFinished {
143 id: SlashCommandId(language::proto::deserialize_timestamp(
144 finished.id.context("invalid id")?,
145 )),
146 output_range: language::proto::deserialize_anchor_range(
147 finished.output_range.context("invalid range")?,
148 )?,
149 sections: finished
150 .sections
151 .into_iter()
152 .map(|section| {
153 Ok(SlashCommandOutputSection {
154 range: language::proto::deserialize_anchor_range(
155 section.range.context("invalid range")?,
156 )?,
157 icon: section.icon_name.parse()?,
158 label: section.label.into(),
159 })
160 })
161 .collect::<Result<Vec<_>>>()?,
162 version: language::proto::deserialize_version(&finished.version),
163 })
164 }
165 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
166 language::proto::deserialize_operation(
167 op.operation.context("invalid buffer operation")?,
168 )?,
169 )),
170 }
171 }
172
173 pub fn to_proto(&self) -> proto::ContextOperation {
174 match self {
175 Self::InsertMessage {
176 anchor,
177 metadata,
178 version,
179 } => proto::ContextOperation {
180 variant: Some(proto::context_operation::Variant::InsertMessage(
181 proto::context_operation::InsertMessage {
182 message: Some(proto::ContextMessage {
183 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
184 start: Some(language::proto::serialize_anchor(&anchor.start)),
185 role: metadata.role.to_proto() as i32,
186 status: Some(metadata.status.to_proto()),
187 }),
188 version: language::proto::serialize_version(version),
189 },
190 )),
191 },
192 Self::UpdateMessage {
193 message_id,
194 metadata,
195 version,
196 } => proto::ContextOperation {
197 variant: Some(proto::context_operation::Variant::UpdateMessage(
198 proto::context_operation::UpdateMessage {
199 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
200 role: metadata.role.to_proto() as i32,
201 status: Some(metadata.status.to_proto()),
202 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
203 version: language::proto::serialize_version(version),
204 },
205 )),
206 },
207 Self::UpdateSummary { summary, version } => proto::ContextOperation {
208 variant: Some(proto::context_operation::Variant::UpdateSummary(
209 proto::context_operation::UpdateSummary {
210 summary: summary.text.clone(),
211 done: summary.done,
212 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
213 version: language::proto::serialize_version(version),
214 },
215 )),
216 },
217 Self::SlashCommandFinished {
218 id,
219 output_range,
220 sections,
221 version,
222 } => proto::ContextOperation {
223 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
224 proto::context_operation::SlashCommandFinished {
225 id: Some(language::proto::serialize_timestamp(id.0)),
226 output_range: Some(language::proto::serialize_anchor_range(
227 output_range.clone(),
228 )),
229 sections: sections
230 .iter()
231 .map(|section| {
232 let icon_name: &'static str = section.icon.into();
233 proto::SlashCommandOutputSection {
234 range: Some(language::proto::serialize_anchor_range(
235 section.range.clone(),
236 )),
237 icon_name: icon_name.to_string(),
238 label: section.label.to_string(),
239 }
240 })
241 .collect(),
242 version: language::proto::serialize_version(version),
243 },
244 )),
245 },
246 Self::BufferOperation(operation) => proto::ContextOperation {
247 variant: Some(proto::context_operation::Variant::BufferOperation(
248 proto::context_operation::BufferOperation {
249 operation: Some(language::proto::serialize_operation(operation)),
250 },
251 )),
252 },
253 }
254 }
255
256 fn timestamp(&self) -> clock::Lamport {
257 match self {
258 Self::InsertMessage { anchor, .. } => anchor.id.0,
259 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
260 Self::UpdateSummary { summary, .. } => summary.timestamp,
261 Self::SlashCommandFinished { id, .. } => id.0,
262 Self::BufferOperation(_) => {
263 panic!("reading the timestamp of a buffer operation is not supported")
264 }
265 }
266 }
267
268 /// Returns the current version of the context operation.
269 pub fn version(&self) -> &clock::Global {
270 match self {
271 Self::InsertMessage { version, .. }
272 | Self::UpdateMessage { version, .. }
273 | Self::UpdateSummary { version, .. }
274 | Self::SlashCommandFinished { version, .. } => version,
275 Self::BufferOperation(_) => {
276 panic!("reading the version of a buffer operation is not supported")
277 }
278 }
279 }
280}
281
282#[derive(Debug, Clone)]
283pub enum ContextEvent {
284 ShowAssistError(SharedString),
285 MessagesEdited,
286 SummaryChanged,
287 WorkflowStepsRemoved(Vec<Range<language::Anchor>>),
288 WorkflowStepUpdated(Range<language::Anchor>),
289 StreamedCompletion,
290 PendingSlashCommandsUpdated {
291 removed: Vec<Range<language::Anchor>>,
292 updated: Vec<PendingSlashCommand>,
293 },
294 SlashCommandFinished {
295 output_range: Range<language::Anchor>,
296 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
297 run_commands_in_output: bool,
298 expand_result: bool,
299 },
300 Operation(ContextOperation),
301}
302
303#[derive(Clone, Default, Debug)]
304pub struct ContextSummary {
305 pub text: String,
306 done: bool,
307 timestamp: clock::Lamport,
308}
309
310#[derive(Clone, Debug, Eq, PartialEq)]
311pub struct MessageAnchor {
312 pub id: MessageId,
313 pub start: language::Anchor,
314}
315
316#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
317pub struct MessageMetadata {
318 pub role: Role,
319 pub status: MessageStatus,
320 timestamp: clock::Lamport,
321 should_cache: bool,
322 is_cache_anchor: bool,
323}
324
325#[derive(Clone, Debug)]
326pub struct MessageImage {
327 image_id: u64,
328 image: Shared<Task<Option<LanguageModelImage>>>,
329}
330
331impl PartialEq for MessageImage {
332 fn eq(&self, other: &Self) -> bool {
333 self.image_id == other.image_id
334 }
335}
336
337impl Eq for MessageImage {}
338
339#[derive(Clone, Debug)]
340pub struct Message {
341 pub image_offsets: SmallVec<[(usize, MessageImage); 1]>,
342 pub offset_range: Range<usize>,
343 pub index_range: Range<usize>,
344 pub id: MessageId,
345 pub anchor: language::Anchor,
346 pub role: Role,
347 pub status: MessageStatus,
348 pub cache: bool,
349}
350
351impl Message {
352 fn to_request_message(&self, buffer: &Buffer) -> Option<LanguageModelRequestMessage> {
353 let mut content = Vec::new();
354
355 let mut range_start = self.offset_range.start;
356 for (image_offset, message_image) in self.image_offsets.iter() {
357 if *image_offset != range_start {
358 if let Some(text) = Self::collect_text_content(buffer, range_start..*image_offset) {
359 content.push(text);
360 }
361 }
362
363 if let Some(image) = message_image.image.clone().now_or_never().flatten() {
364 content.push(language_model::MessageContent::Image(image));
365 }
366
367 range_start = *image_offset;
368 }
369 if range_start != self.offset_range.end {
370 if let Some(text) =
371 Self::collect_text_content(buffer, range_start..self.offset_range.end)
372 {
373 content.push(text);
374 }
375 }
376
377 if content.is_empty() {
378 return None;
379 }
380
381 Some(LanguageModelRequestMessage {
382 role: self.role,
383 content,
384 cache: self.cache,
385 })
386 }
387
388 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<MessageContent> {
389 let text: String = buffer.text_for_range(range.clone()).collect();
390 if text.trim().is_empty() {
391 None
392 } else {
393 Some(MessageContent::Text(text))
394 }
395 }
396}
397
398#[derive(Clone, Debug)]
399pub struct ImageAnchor {
400 pub anchor: language::Anchor,
401 pub image_id: u64,
402 pub render_image: Arc<RenderImage>,
403 pub image: Shared<Task<Option<LanguageModelImage>>>,
404}
405
406struct PendingCompletion {
407 id: usize,
408 assistant_message_id: MessageId,
409 _task: Task<()>,
410}
411
412#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
413pub struct SlashCommandId(clock::Lamport);
414
415struct WorkflowStepEntry {
416 range: Range<language::Anchor>,
417 step: Model<WorkflowStep>,
418}
419
420pub struct Context {
421 id: ContextId,
422 timestamp: clock::Lamport,
423 version: clock::Global,
424 pending_ops: Vec<ContextOperation>,
425 operations: Vec<ContextOperation>,
426 buffer: Model<Buffer>,
427 pending_slash_commands: Vec<PendingSlashCommand>,
428 edits_since_last_slash_command_parse: language::Subscription,
429 finished_slash_commands: HashSet<SlashCommandId>,
430 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
431 message_anchors: Vec<MessageAnchor>,
432 images: HashMap<u64, (Arc<RenderImage>, Shared<Task<Option<LanguageModelImage>>>)>,
433 image_anchors: Vec<ImageAnchor>,
434 messages_metadata: HashMap<MessageId, MessageMetadata>,
435 summary: Option<ContextSummary>,
436 pending_summary: Task<Option<()>>,
437 completion_count: usize,
438 pending_completions: Vec<PendingCompletion>,
439 token_count: Option<usize>,
440 pending_token_count: Task<Option<()>>,
441 pending_save: Task<Result<()>>,
442 pending_cache_warming_task: Task<Option<()>>,
443 path: Option<PathBuf>,
444 _subscriptions: Vec<Subscription>,
445 telemetry: Option<Arc<Telemetry>>,
446 language_registry: Arc<LanguageRegistry>,
447 workflow_steps: Vec<WorkflowStepEntry>,
448 edits_since_last_workflow_step_prune: language::Subscription,
449 project: Option<Model<Project>>,
450 prompt_builder: Arc<PromptBuilder>,
451}
452
453impl EventEmitter<ContextEvent> for Context {}
454
455impl Context {
456 pub fn local(
457 language_registry: Arc<LanguageRegistry>,
458 project: Option<Model<Project>>,
459 telemetry: Option<Arc<Telemetry>>,
460 prompt_builder: Arc<PromptBuilder>,
461 cx: &mut ModelContext<Self>,
462 ) -> Self {
463 Self::new(
464 ContextId::new(),
465 ReplicaId::default(),
466 language::Capability::ReadWrite,
467 language_registry,
468 prompt_builder,
469 project,
470 telemetry,
471 cx,
472 )
473 }
474
475 #[allow(clippy::too_many_arguments)]
476 pub fn new(
477 id: ContextId,
478 replica_id: ReplicaId,
479 capability: language::Capability,
480 language_registry: Arc<LanguageRegistry>,
481 prompt_builder: Arc<PromptBuilder>,
482 project: Option<Model<Project>>,
483 telemetry: Option<Arc<Telemetry>>,
484 cx: &mut ModelContext<Self>,
485 ) -> Self {
486 let buffer = cx.new_model(|_cx| {
487 let mut buffer = Buffer::remote(
488 language::BufferId::new(1).unwrap(),
489 replica_id,
490 capability,
491 "",
492 );
493 buffer.set_language_registry(language_registry.clone());
494 buffer
495 });
496 let edits_since_last_slash_command_parse =
497 buffer.update(cx, |buffer, _| buffer.subscribe());
498 let edits_since_last_workflow_step_prune =
499 buffer.update(cx, |buffer, _| buffer.subscribe());
500 let mut this = Self {
501 id,
502 timestamp: clock::Lamport::new(replica_id),
503 version: clock::Global::new(),
504 pending_ops: Vec::new(),
505 operations: Vec::new(),
506 message_anchors: Default::default(),
507 image_anchors: Default::default(),
508 images: Default::default(),
509 messages_metadata: Default::default(),
510 pending_slash_commands: Vec::new(),
511 finished_slash_commands: HashSet::default(),
512 slash_command_output_sections: Vec::new(),
513 edits_since_last_slash_command_parse,
514 summary: None,
515 pending_summary: Task::ready(None),
516 completion_count: Default::default(),
517 pending_completions: Default::default(),
518 token_count: None,
519 pending_token_count: Task::ready(None),
520 pending_cache_warming_task: Task::ready(None),
521 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
522 pending_save: Task::ready(Ok(())),
523 path: None,
524 buffer,
525 telemetry,
526 project,
527 language_registry,
528 workflow_steps: Vec::new(),
529 edits_since_last_workflow_step_prune,
530 prompt_builder,
531 };
532
533 let first_message_id = MessageId(clock::Lamport {
534 replica_id: 0,
535 value: 0,
536 });
537 let message = MessageAnchor {
538 id: first_message_id,
539 start: language::Anchor::MIN,
540 };
541 this.messages_metadata.insert(
542 first_message_id,
543 MessageMetadata {
544 role: Role::User,
545 status: MessageStatus::Done,
546 timestamp: first_message_id.0,
547 should_cache: false,
548 is_cache_anchor: false,
549 },
550 );
551 this.message_anchors.push(message);
552
553 this.set_language(cx);
554 this.count_remaining_tokens(cx);
555 this
556 }
557
558 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
559 let buffer = self.buffer.read(cx);
560 SavedContext {
561 id: Some(self.id.clone()),
562 zed: "context".into(),
563 version: SavedContext::VERSION.into(),
564 text: buffer.text(),
565 messages: self
566 .messages(cx)
567 .map(|message| SavedMessage {
568 id: message.id,
569 start: message.offset_range.start,
570 metadata: self.messages_metadata[&message.id].clone(),
571 image_offsets: message
572 .image_offsets
573 .iter()
574 .map(|image_offset| (image_offset.0, image_offset.1.image_id))
575 .collect(),
576 })
577 .collect(),
578 summary: self
579 .summary
580 .as_ref()
581 .map(|summary| summary.text.clone())
582 .unwrap_or_default(),
583 slash_command_output_sections: self
584 .slash_command_output_sections
585 .iter()
586 .filter_map(|section| {
587 let range = section.range.to_offset(buffer);
588 if section.range.start.is_valid(buffer) && !range.is_empty() {
589 Some(assistant_slash_command::SlashCommandOutputSection {
590 range,
591 icon: section.icon,
592 label: section.label.clone(),
593 })
594 } else {
595 None
596 }
597 })
598 .collect(),
599 }
600 }
601
602 #[allow(clippy::too_many_arguments)]
603 pub fn deserialize(
604 saved_context: SavedContext,
605 path: PathBuf,
606 language_registry: Arc<LanguageRegistry>,
607 prompt_builder: Arc<PromptBuilder>,
608 project: Option<Model<Project>>,
609 telemetry: Option<Arc<Telemetry>>,
610 cx: &mut ModelContext<Self>,
611 ) -> Self {
612 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
613 let mut this = Self::new(
614 id,
615 ReplicaId::default(),
616 language::Capability::ReadWrite,
617 language_registry,
618 prompt_builder,
619 project,
620 telemetry,
621 cx,
622 );
623 this.path = Some(path);
624 this.buffer.update(cx, |buffer, cx| {
625 buffer.set_text(saved_context.text.as_str(), cx)
626 });
627 let operations = saved_context.into_ops(&this.buffer, cx);
628 this.apply_ops(operations, cx).unwrap();
629 this
630 }
631
632 pub fn id(&self) -> &ContextId {
633 &self.id
634 }
635
636 pub fn replica_id(&self) -> ReplicaId {
637 self.timestamp.replica_id
638 }
639
640 pub fn version(&self, cx: &AppContext) -> ContextVersion {
641 ContextVersion {
642 context: self.version.clone(),
643 buffer: self.buffer.read(cx).version(),
644 }
645 }
646
647 pub fn set_capability(
648 &mut self,
649 capability: language::Capability,
650 cx: &mut ModelContext<Self>,
651 ) {
652 self.buffer
653 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
654 }
655
656 fn next_timestamp(&mut self) -> clock::Lamport {
657 let timestamp = self.timestamp.tick();
658 self.version.observe(timestamp);
659 timestamp
660 }
661
662 pub fn serialize_ops(
663 &self,
664 since: &ContextVersion,
665 cx: &AppContext,
666 ) -> Task<Vec<proto::ContextOperation>> {
667 let buffer_ops = self
668 .buffer
669 .read(cx)
670 .serialize_ops(Some(since.buffer.clone()), cx);
671
672 let mut context_ops = self
673 .operations
674 .iter()
675 .filter(|op| !since.context.observed(op.timestamp()))
676 .cloned()
677 .collect::<Vec<_>>();
678 context_ops.extend(self.pending_ops.iter().cloned());
679
680 cx.background_executor().spawn(async move {
681 let buffer_ops = buffer_ops.await;
682 context_ops.sort_unstable_by_key(|op| op.timestamp());
683 buffer_ops
684 .into_iter()
685 .map(|op| proto::ContextOperation {
686 variant: Some(proto::context_operation::Variant::BufferOperation(
687 proto::context_operation::BufferOperation {
688 operation: Some(op),
689 },
690 )),
691 })
692 .chain(context_ops.into_iter().map(|op| op.to_proto()))
693 .collect()
694 })
695 }
696
697 pub fn apply_ops(
698 &mut self,
699 ops: impl IntoIterator<Item = ContextOperation>,
700 cx: &mut ModelContext<Self>,
701 ) -> Result<()> {
702 let mut buffer_ops = Vec::new();
703 for op in ops {
704 match op {
705 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
706 op @ _ => self.pending_ops.push(op),
707 }
708 }
709 self.buffer
710 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
711 self.flush_ops(cx);
712
713 Ok(())
714 }
715
716 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
717 let mut messages_changed = false;
718 let mut summary_changed = false;
719
720 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
721 for op in mem::take(&mut self.pending_ops) {
722 if !self.can_apply_op(&op, cx) {
723 self.pending_ops.push(op);
724 continue;
725 }
726
727 let timestamp = op.timestamp();
728 match op.clone() {
729 ContextOperation::InsertMessage {
730 anchor, metadata, ..
731 } => {
732 if self.messages_metadata.contains_key(&anchor.id) {
733 // We already applied this operation.
734 } else {
735 self.insert_message(anchor, metadata, cx);
736 messages_changed = true;
737 }
738 }
739 ContextOperation::UpdateMessage {
740 message_id,
741 metadata: new_metadata,
742 ..
743 } => {
744 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
745 if new_metadata.timestamp > metadata.timestamp {
746 *metadata = new_metadata;
747 messages_changed = true;
748 }
749 }
750 ContextOperation::UpdateSummary {
751 summary: new_summary,
752 ..
753 } => {
754 if self
755 .summary
756 .as_ref()
757 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
758 {
759 self.summary = Some(new_summary);
760 summary_changed = true;
761 }
762 }
763 ContextOperation::SlashCommandFinished {
764 id,
765 output_range,
766 sections,
767 ..
768 } => {
769 if self.finished_slash_commands.insert(id) {
770 let buffer = self.buffer.read(cx);
771 self.slash_command_output_sections
772 .extend(sections.iter().cloned());
773 self.slash_command_output_sections
774 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
775 cx.emit(ContextEvent::SlashCommandFinished {
776 output_range,
777 sections,
778 expand_result: false,
779 run_commands_in_output: false,
780 });
781 }
782 }
783 ContextOperation::BufferOperation(_) => unreachable!(),
784 }
785
786 self.version.observe(timestamp);
787 self.timestamp.observe(timestamp);
788 self.operations.push(op);
789 }
790
791 if messages_changed {
792 cx.emit(ContextEvent::MessagesEdited);
793 cx.notify();
794 }
795
796 if summary_changed {
797 cx.emit(ContextEvent::SummaryChanged);
798 cx.notify();
799 }
800 }
801
802 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
803 if !self.version.observed_all(op.version()) {
804 return false;
805 }
806
807 match op {
808 ContextOperation::InsertMessage { anchor, .. } => self
809 .buffer
810 .read(cx)
811 .version
812 .observed(anchor.start.timestamp),
813 ContextOperation::UpdateMessage { message_id, .. } => {
814 self.messages_metadata.contains_key(message_id)
815 }
816 ContextOperation::UpdateSummary { .. } => true,
817 ContextOperation::SlashCommandFinished {
818 output_range,
819 sections,
820 ..
821 } => {
822 let version = &self.buffer.read(cx).version;
823 sections
824 .iter()
825 .map(|section| §ion.range)
826 .chain([output_range])
827 .all(|range| {
828 let observed_start = range.start == language::Anchor::MIN
829 || range.start == language::Anchor::MAX
830 || version.observed(range.start.timestamp);
831 let observed_end = range.end == language::Anchor::MIN
832 || range.end == language::Anchor::MAX
833 || version.observed(range.end.timestamp);
834 observed_start && observed_end
835 })
836 }
837 ContextOperation::BufferOperation(_) => {
838 panic!("buffer operations should always be applied")
839 }
840 }
841 }
842
843 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
844 self.operations.push(op.clone());
845 cx.emit(ContextEvent::Operation(op));
846 }
847
848 pub fn buffer(&self) -> &Model<Buffer> {
849 &self.buffer
850 }
851
852 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
853 self.language_registry.clone()
854 }
855
856 pub fn project(&self) -> Option<Model<Project>> {
857 self.project.clone()
858 }
859
860 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
861 self.prompt_builder.clone()
862 }
863
864 pub fn path(&self) -> Option<&Path> {
865 self.path.as_deref()
866 }
867
868 pub fn summary(&self) -> Option<&ContextSummary> {
869 self.summary.as_ref()
870 }
871
872 pub fn workflow_step_containing(
873 &self,
874 offset: usize,
875 cx: &AppContext,
876 ) -> Option<(Range<language::Anchor>, Model<WorkflowStep>)> {
877 let buffer = self.buffer.read(cx);
878 let index = self
879 .workflow_steps
880 .binary_search_by(|step| {
881 let step_range = step.range.to_offset(&buffer);
882 if offset < step_range.start {
883 Ordering::Greater
884 } else if offset > step_range.end {
885 Ordering::Less
886 } else {
887 Ordering::Equal
888 }
889 })
890 .ok()?;
891 let step = &self.workflow_steps[index];
892 Some((step.range.clone(), step.step.clone()))
893 }
894
895 pub fn workflow_step_for_range(
896 &self,
897 range: Range<language::Anchor>,
898 cx: &AppContext,
899 ) -> Option<Model<WorkflowStep>> {
900 let buffer = self.buffer.read(cx);
901 let index = self.workflow_step_index_for_range(&range, buffer).ok()?;
902 Some(self.workflow_steps[index].step.clone())
903 }
904
905 pub fn workflow_step_index_for_range(
906 &self,
907 tagged_range: &Range<text::Anchor>,
908 buffer: &text::BufferSnapshot,
909 ) -> Result<usize, usize> {
910 self.workflow_steps
911 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
912 }
913
914 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
915 &self.pending_slash_commands
916 }
917
918 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
919 &self.slash_command_output_sections
920 }
921
922 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
923 let markdown = self.language_registry.language_for_name("Markdown");
924 cx.spawn(|this, mut cx| async move {
925 let markdown = markdown.await?;
926 this.update(&mut cx, |this, cx| {
927 this.buffer
928 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
929 })
930 })
931 .detach_and_log_err(cx);
932 }
933
934 fn handle_buffer_event(
935 &mut self,
936 _: Model<Buffer>,
937 event: &language::Event,
938 cx: &mut ModelContext<Self>,
939 ) {
940 match event {
941 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
942 ContextOperation::BufferOperation(operation.clone()),
943 )),
944 language::Event::Edited => {
945 self.count_remaining_tokens(cx);
946 self.reparse_slash_commands(cx);
947 // Use `inclusive = true` to invalidate a step when an edit occurs
948 // at the start/end of a parsed step.
949 self.prune_invalid_workflow_steps(true, cx);
950 cx.emit(ContextEvent::MessagesEdited);
951 }
952 _ => {}
953 }
954 }
955
956 pub(crate) fn token_count(&self) -> Option<usize> {
957 self.token_count
958 }
959
960 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
961 let request = self.to_completion_request(cx);
962 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
963 return;
964 };
965 self.pending_token_count = cx.spawn(|this, mut cx| {
966 async move {
967 cx.background_executor()
968 .timer(Duration::from_millis(200))
969 .await;
970
971 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
972 this.update(&mut cx, |this, cx| {
973 this.token_count = Some(token_count);
974 this.start_cache_warming(&model, cx);
975 cx.notify()
976 })
977 }
978 .log_err()
979 });
980 }
981
982 pub fn mark_longest_messages_for_cache(
983 &mut self,
984 cache_configuration: &Option<LanguageModelCacheConfiguration>,
985 speculative: bool,
986 cx: &mut ModelContext<Self>,
987 ) -> bool {
988 let cache_configuration =
989 cache_configuration
990 .as_ref()
991 .unwrap_or(&LanguageModelCacheConfiguration {
992 max_cache_anchors: 0,
993 should_speculate: false,
994 min_total_token: 0,
995 });
996
997 let messages: Vec<Message> = self
998 .messages_from_anchors(
999 self.message_anchors.iter().take(if speculative {
1000 self.message_anchors.len().saturating_sub(1)
1001 } else {
1002 self.message_anchors.len()
1003 }),
1004 cx,
1005 )
1006 .filter(|message| message.offset_range.len() >= 5_000)
1007 .collect();
1008
1009 let mut sorted_messages = messages.clone();
1010 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1011 if cache_configuration.max_cache_anchors == 0 && cache_configuration.should_speculate {
1012 // Some models support caching, but don't support anchors. In that case we want to
1013 // mark the largest message as needing to be cached, but we will not mark it as an
1014 // anchor.
1015 sorted_messages.truncate(1);
1016 } else {
1017 // Save 1 anchor for the inline assistant.
1018 sorted_messages.truncate(max(cache_configuration.max_cache_anchors, 1) - 1);
1019 }
1020
1021 let longest_message_ids: HashSet<MessageId> = sorted_messages
1022 .into_iter()
1023 .map(|message| message.id)
1024 .collect();
1025
1026 let cache_deltas: HashSet<MessageId> = self
1027 .messages_metadata
1028 .iter()
1029 .filter_map(|(id, metadata)| {
1030 let should_cache = longest_message_ids.contains(id);
1031 let should_be_anchor = should_cache && cache_configuration.max_cache_anchors > 0;
1032 if metadata.should_cache != should_cache
1033 || metadata.is_cache_anchor != should_be_anchor
1034 {
1035 Some(*id)
1036 } else {
1037 None
1038 }
1039 })
1040 .collect();
1041
1042 let mut newly_cached_item = false;
1043 for id in cache_deltas {
1044 newly_cached_item = newly_cached_item || longest_message_ids.contains(&id);
1045 self.update_metadata(id, cx, |metadata| {
1046 metadata.should_cache = longest_message_ids.contains(&id);
1047 metadata.is_cache_anchor =
1048 metadata.should_cache && (cache_configuration.max_cache_anchors > 0);
1049 });
1050 }
1051 newly_cached_item
1052 }
1053
1054 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1055 let cache_configuration = model.cache_configuration();
1056 if !self.mark_longest_messages_for_cache(&cache_configuration, true, cx) {
1057 return;
1058 }
1059 if let Some(cache_configuration) = cache_configuration {
1060 if !cache_configuration.should_speculate {
1061 return;
1062 }
1063 }
1064
1065 let request = {
1066 let mut req = self.to_completion_request(cx);
1067 // Skip the last message because it's likely to change and
1068 // therefore would be a waste to cache.
1069 req.messages.pop();
1070 req.messages.push(LanguageModelRequestMessage {
1071 role: Role::User,
1072 content: vec!["Respond only with OK, nothing else.".into()],
1073 cache: false,
1074 });
1075 req
1076 };
1077
1078 let model = Arc::clone(model);
1079 self.pending_cache_warming_task = cx.spawn(|_, cx| {
1080 async move {
1081 match model.stream_completion(request, &cx).await {
1082 Ok(mut stream) => {
1083 stream.next().await;
1084 log::info!("Cache warming completed successfully");
1085 }
1086 Err(e) => {
1087 log::warn!("Cache warming failed: {}", e);
1088 }
1089 };
1090
1091 anyhow::Ok(())
1092 }
1093 .log_err()
1094 });
1095 }
1096
1097 pub fn reparse_slash_commands(&mut self, cx: &mut ModelContext<Self>) {
1098 let buffer = self.buffer.read(cx);
1099 let mut row_ranges = self
1100 .edits_since_last_slash_command_parse
1101 .consume()
1102 .into_iter()
1103 .map(|edit| {
1104 let start_row = buffer.offset_to_point(edit.new.start).row;
1105 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1106 start_row..end_row
1107 })
1108 .peekable();
1109
1110 let mut removed = Vec::new();
1111 let mut updated = Vec::new();
1112 while let Some(mut row_range) = row_ranges.next() {
1113 while let Some(next_row_range) = row_ranges.peek() {
1114 if row_range.end >= next_row_range.start {
1115 row_range.end = next_row_range.end;
1116 row_ranges.next();
1117 } else {
1118 break;
1119 }
1120 }
1121
1122 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1123 let end = buffer.anchor_after(Point::new(
1124 row_range.end - 1,
1125 buffer.line_len(row_range.end - 1),
1126 ));
1127
1128 let old_range = self.pending_command_indices_for_range(start..end, cx);
1129
1130 let mut new_commands = Vec::new();
1131 let mut lines = buffer.text_for_range(start..end).lines();
1132 let mut offset = lines.offset();
1133 while let Some(line) = lines.next() {
1134 if let Some(command_line) = SlashCommandLine::parse(line) {
1135 let name = &line[command_line.name.clone()];
1136 let arguments = command_line
1137 .arguments
1138 .iter()
1139 .filter_map(|argument_range| {
1140 if argument_range.is_empty() {
1141 None
1142 } else {
1143 line.get(argument_range.clone())
1144 }
1145 })
1146 .map(ToOwned::to_owned)
1147 .collect::<SmallVec<_>>();
1148 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1149 if !command.requires_argument() || !arguments.is_empty() {
1150 let start_ix = offset + command_line.name.start - 1;
1151 let end_ix = offset
1152 + command_line
1153 .arguments
1154 .last()
1155 .map_or(command_line.name.end, |argument| argument.end);
1156 let source_range =
1157 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1158 let pending_command = PendingSlashCommand {
1159 name: name.to_string(),
1160 arguments,
1161 source_range,
1162 status: PendingSlashCommandStatus::Idle,
1163 };
1164 updated.push(pending_command.clone());
1165 new_commands.push(pending_command);
1166 }
1167 }
1168 }
1169
1170 offset = lines.offset();
1171 }
1172
1173 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1174 removed.extend(removed_commands.map(|command| command.source_range));
1175 }
1176
1177 if !updated.is_empty() || !removed.is_empty() {
1178 cx.emit(ContextEvent::PendingSlashCommandsUpdated { removed, updated });
1179 }
1180 }
1181
1182 fn prune_invalid_workflow_steps(&mut self, inclusive: bool, cx: &mut ModelContext<Self>) {
1183 let mut removed = Vec::new();
1184
1185 for edit_range in self.edits_since_last_workflow_step_prune.consume() {
1186 let intersecting_range = self.find_intersecting_steps(edit_range.new, inclusive, cx);
1187 removed.extend(
1188 self.workflow_steps
1189 .drain(intersecting_range)
1190 .map(|step| step.range),
1191 );
1192 }
1193
1194 if !removed.is_empty() {
1195 cx.emit(ContextEvent::WorkflowStepsRemoved(removed));
1196 cx.notify();
1197 }
1198 }
1199
1200 fn find_intersecting_steps(
1201 &self,
1202 range: Range<usize>,
1203 inclusive: bool,
1204 cx: &AppContext,
1205 ) -> Range<usize> {
1206 let buffer = self.buffer.read(cx);
1207 let start_ix = match self.workflow_steps.binary_search_by(|probe| {
1208 probe
1209 .range
1210 .end
1211 .to_offset(buffer)
1212 .cmp(&range.start)
1213 .then(if inclusive {
1214 Ordering::Greater
1215 } else {
1216 Ordering::Less
1217 })
1218 }) {
1219 Ok(ix) | Err(ix) => ix,
1220 };
1221 let end_ix = match self.workflow_steps.binary_search_by(|probe| {
1222 probe
1223 .range
1224 .start
1225 .to_offset(buffer)
1226 .cmp(&range.end)
1227 .then(if inclusive {
1228 Ordering::Less
1229 } else {
1230 Ordering::Greater
1231 })
1232 }) {
1233 Ok(ix) | Err(ix) => ix,
1234 };
1235 start_ix..end_ix
1236 }
1237
1238 fn parse_workflow_steps_in_range(&mut self, range: Range<usize>, cx: &mut ModelContext<Self>) {
1239 let weak_self = cx.weak_model();
1240 let mut new_edit_steps = Vec::new();
1241 let mut edits = Vec::new();
1242
1243 let buffer = self.buffer.read(cx).snapshot();
1244 let mut message_lines = buffer.as_rope().chunks_in_range(range).lines();
1245 let mut in_step = false;
1246 let mut step_open_tag_start_ix = 0;
1247 let mut line_start_offset = message_lines.offset();
1248
1249 while let Some(line) = message_lines.next() {
1250 if let Some(step_start_index) = line.find("<step>") {
1251 if !in_step {
1252 in_step = true;
1253 step_open_tag_start_ix = line_start_offset + step_start_index;
1254 }
1255 }
1256
1257 if let Some(step_end_index) = line.find("</step>") {
1258 if in_step {
1259 let mut step_open_tag_end_ix = step_open_tag_start_ix + "<step>".len();
1260 if buffer.chars_at(step_open_tag_end_ix).next() == Some('\n') {
1261 step_open_tag_end_ix += 1;
1262 }
1263 let mut step_end_tag_start_ix = line_start_offset + step_end_index;
1264 let step_end_tag_end_ix = step_end_tag_start_ix + "</step>".len();
1265 if buffer.reversed_chars_at(step_end_tag_start_ix).next() == Some('\n') {
1266 step_end_tag_start_ix -= 1;
1267 }
1268 edits.push((step_open_tag_start_ix..step_open_tag_end_ix, ""));
1269 edits.push((step_end_tag_start_ix..step_end_tag_end_ix, ""));
1270 let tagged_range = buffer.anchor_after(step_open_tag_end_ix)
1271 ..buffer.anchor_before(step_end_tag_start_ix);
1272
1273 // Check if a step with the same range already exists
1274 let existing_step_index =
1275 self.workflow_step_index_for_range(&tagged_range, &buffer);
1276
1277 if let Err(ix) = existing_step_index {
1278 new_edit_steps.push((
1279 ix,
1280 WorkflowStepEntry {
1281 step: cx.new_model(|_| {
1282 WorkflowStep::new(tagged_range.clone(), weak_self.clone())
1283 }),
1284 range: tagged_range,
1285 },
1286 ));
1287 }
1288
1289 in_step = false;
1290 }
1291 }
1292
1293 line_start_offset = message_lines.offset();
1294 }
1295
1296 let mut updated = Vec::new();
1297 for (index, step) in new_edit_steps.into_iter().rev() {
1298 let step_range = step.range.clone();
1299 updated.push(step_range.clone());
1300 self.workflow_steps.insert(index, step);
1301 self.resolve_workflow_step(step_range, cx);
1302 }
1303
1304 // Delete <step> tags, making sure we don't accidentally invalidate
1305 // the step we just parsed.
1306 self.buffer
1307 .update(cx, |buffer, cx| buffer.edit(edits, None, cx));
1308 self.edits_since_last_workflow_step_prune.consume();
1309 }
1310
1311 pub fn resolve_workflow_step(
1312 &mut self,
1313 tagged_range: Range<language::Anchor>,
1314 cx: &mut ModelContext<Self>,
1315 ) {
1316 let Ok(step_index) = self
1317 .workflow_steps
1318 .binary_search_by(|step| step.range.cmp(&tagged_range, self.buffer.read(cx)))
1319 else {
1320 return;
1321 };
1322
1323 cx.emit(ContextEvent::WorkflowStepUpdated(tagged_range.clone()));
1324 cx.notify();
1325
1326 let resolution = self.workflow_steps[step_index].step.clone();
1327 cx.defer(move |cx| {
1328 resolution.update(cx, |resolution, cx| resolution.resolve(cx));
1329 });
1330 }
1331
1332 pub fn workflow_step_updated(
1333 &mut self,
1334 range: Range<language::Anchor>,
1335 cx: &mut ModelContext<Self>,
1336 ) {
1337 cx.emit(ContextEvent::WorkflowStepUpdated(range));
1338 cx.notify();
1339 }
1340
1341 pub fn pending_command_for_position(
1342 &mut self,
1343 position: language::Anchor,
1344 cx: &mut ModelContext<Self>,
1345 ) -> Option<&mut PendingSlashCommand> {
1346 let buffer = self.buffer.read(cx);
1347 match self
1348 .pending_slash_commands
1349 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1350 {
1351 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1352 Err(ix) => {
1353 let cmd = self.pending_slash_commands.get_mut(ix)?;
1354 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1355 && position.cmp(&cmd.source_range.end, buffer).is_le()
1356 {
1357 Some(cmd)
1358 } else {
1359 None
1360 }
1361 }
1362 }
1363 }
1364
1365 pub fn pending_commands_for_range(
1366 &self,
1367 range: Range<language::Anchor>,
1368 cx: &AppContext,
1369 ) -> &[PendingSlashCommand] {
1370 let range = self.pending_command_indices_for_range(range, cx);
1371 &self.pending_slash_commands[range]
1372 }
1373
1374 fn pending_command_indices_for_range(
1375 &self,
1376 range: Range<language::Anchor>,
1377 cx: &AppContext,
1378 ) -> Range<usize> {
1379 let buffer = self.buffer.read(cx);
1380 let start_ix = match self
1381 .pending_slash_commands
1382 .binary_search_by(|probe| probe.source_range.end.cmp(&range.start, &buffer))
1383 {
1384 Ok(ix) | Err(ix) => ix,
1385 };
1386 let end_ix = match self
1387 .pending_slash_commands
1388 .binary_search_by(|probe| probe.source_range.start.cmp(&range.end, &buffer))
1389 {
1390 Ok(ix) => ix + 1,
1391 Err(ix) => ix,
1392 };
1393 start_ix..end_ix
1394 }
1395
1396 pub fn insert_command_output(
1397 &mut self,
1398 command_range: Range<language::Anchor>,
1399 output: Task<Result<SlashCommandOutput>>,
1400 ensure_trailing_newline: bool,
1401 expand_result: bool,
1402 cx: &mut ModelContext<Self>,
1403 ) {
1404 self.reparse_slash_commands(cx);
1405
1406 let insert_output_task = cx.spawn(|this, mut cx| {
1407 let command_range = command_range.clone();
1408 async move {
1409 let output = output.await;
1410 this.update(&mut cx, |this, cx| match output {
1411 Ok(mut output) => {
1412 // Ensure section ranges are valid.
1413 for section in &mut output.sections {
1414 section.range.start = section.range.start.min(output.text.len());
1415 section.range.end = section.range.end.min(output.text.len());
1416 while !output.text.is_char_boundary(section.range.start) {
1417 section.range.start -= 1;
1418 }
1419 while !output.text.is_char_boundary(section.range.end) {
1420 section.range.end += 1;
1421 }
1422 }
1423
1424 // Ensure there is a newline after the last section.
1425 if ensure_trailing_newline {
1426 let has_newline_after_last_section =
1427 output.sections.last().map_or(false, |last_section| {
1428 output.text[last_section.range.end..].ends_with('\n')
1429 });
1430 if !has_newline_after_last_section {
1431 output.text.push('\n');
1432 }
1433 }
1434
1435 let version = this.version.clone();
1436 let command_id = SlashCommandId(this.next_timestamp());
1437 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1438 let start = command_range.start.to_offset(buffer);
1439 let old_end = command_range.end.to_offset(buffer);
1440 let new_end = start + output.text.len();
1441 buffer.edit([(start..old_end, output.text)], None, cx);
1442
1443 let mut sections = output
1444 .sections
1445 .into_iter()
1446 .map(|section| SlashCommandOutputSection {
1447 range: buffer.anchor_after(start + section.range.start)
1448 ..buffer.anchor_before(start + section.range.end),
1449 icon: section.icon,
1450 label: section.label,
1451 })
1452 .collect::<Vec<_>>();
1453 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1454
1455 this.slash_command_output_sections
1456 .extend(sections.iter().cloned());
1457 this.slash_command_output_sections
1458 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1459
1460 let output_range =
1461 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1462 this.finished_slash_commands.insert(command_id);
1463
1464 (
1465 ContextOperation::SlashCommandFinished {
1466 id: command_id,
1467 output_range: output_range.clone(),
1468 sections: sections.clone(),
1469 version,
1470 },
1471 ContextEvent::SlashCommandFinished {
1472 output_range,
1473 sections,
1474 run_commands_in_output: output.run_commands_in_text,
1475 expand_result,
1476 },
1477 )
1478 });
1479
1480 this.push_op(operation, cx);
1481 cx.emit(event);
1482 }
1483 Err(error) => {
1484 if let Some(pending_command) =
1485 this.pending_command_for_position(command_range.start, cx)
1486 {
1487 pending_command.status =
1488 PendingSlashCommandStatus::Error(error.to_string());
1489 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1490 removed: vec![pending_command.source_range.clone()],
1491 updated: vec![pending_command.clone()],
1492 });
1493 }
1494 }
1495 })
1496 .ok();
1497 }
1498 });
1499
1500 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1501 pending_command.status = PendingSlashCommandStatus::Running {
1502 _task: insert_output_task.shared(),
1503 };
1504 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1505 removed: vec![pending_command.source_range.clone()],
1506 updated: vec![pending_command.clone()],
1507 });
1508 }
1509 }
1510
1511 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1512 self.count_remaining_tokens(cx);
1513 }
1514
1515 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1516 self.message_anchors.iter().rev().find_map(|message| {
1517 message
1518 .start
1519 .is_valid(self.buffer.read(cx))
1520 .then_some(message.id)
1521 })
1522 }
1523
1524 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1525 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1526 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1527 let last_message_id = self.get_last_valid_message_id(cx)?;
1528
1529 if !provider.is_authenticated(cx) {
1530 log::info!("completion provider has no credentials");
1531 return None;
1532 }
1533 // Compute which messages to cache, including the last one.
1534 self.mark_longest_messages_for_cache(&model.cache_configuration(), false, cx);
1535
1536 let request = self.to_completion_request(cx);
1537 let assistant_message = self
1538 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1539 .unwrap();
1540
1541 // Queue up the user's next reply.
1542 let user_message = self
1543 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1544 .unwrap();
1545
1546 let pending_completion_id = post_inc(&mut self.completion_count);
1547
1548 let task = cx.spawn({
1549 |this, mut cx| async move {
1550 let stream = model.stream_completion(request, &cx);
1551 let assistant_message_id = assistant_message.id;
1552 let mut response_latency = None;
1553 let stream_completion = async {
1554 let request_start = Instant::now();
1555 let mut chunks = stream.await?;
1556
1557 while let Some(chunk) = chunks.next().await {
1558 if response_latency.is_none() {
1559 response_latency = Some(request_start.elapsed());
1560 }
1561 let chunk = chunk?;
1562
1563 this.update(&mut cx, |this, cx| {
1564 let message_ix = this
1565 .message_anchors
1566 .iter()
1567 .position(|message| message.id == assistant_message_id)?;
1568 let message_range = this.buffer.update(cx, |buffer, cx| {
1569 let message_start_offset =
1570 this.message_anchors[message_ix].start.to_offset(buffer);
1571 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1572 .iter()
1573 .find(|message| message.start.is_valid(buffer))
1574 .map_or(buffer.len(), |message| {
1575 message.start.to_offset(buffer).saturating_sub(1)
1576 });
1577 let message_new_end_offset = message_old_end_offset + chunk.len();
1578 buffer.edit(
1579 [(message_old_end_offset..message_old_end_offset, chunk)],
1580 None,
1581 cx,
1582 );
1583 message_start_offset..message_new_end_offset
1584 });
1585
1586 // Use `inclusive = false` as edits might occur at the end of a parsed step.
1587 this.prune_invalid_workflow_steps(false, cx);
1588 this.parse_workflow_steps_in_range(message_range, cx);
1589 cx.emit(ContextEvent::StreamedCompletion);
1590
1591 Some(())
1592 })?;
1593 smol::future::yield_now().await;
1594 }
1595 this.update(&mut cx, |this, cx| {
1596 this.pending_completions
1597 .retain(|completion| completion.id != pending_completion_id);
1598 this.summarize(false, cx);
1599 })?;
1600
1601 anyhow::Ok(())
1602 };
1603
1604 let result = stream_completion.await;
1605
1606 this.update(&mut cx, |this, cx| {
1607 let error_message = result
1608 .err()
1609 .map(|error| error.to_string().trim().to_string());
1610
1611 if let Some(error_message) = error_message.as_ref() {
1612 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
1613 error_message.clone(),
1614 )));
1615 }
1616
1617 this.update_metadata(assistant_message_id, cx, |metadata| {
1618 if let Some(error_message) = error_message.as_ref() {
1619 metadata.status =
1620 MessageStatus::Error(SharedString::from(error_message.clone()));
1621 } else {
1622 metadata.status = MessageStatus::Done;
1623 }
1624 });
1625
1626 if let Some(telemetry) = this.telemetry.as_ref() {
1627 telemetry.report_assistant_event(
1628 Some(this.id.0.clone()),
1629 AssistantKind::Panel,
1630 model.telemetry_id(),
1631 response_latency,
1632 error_message,
1633 );
1634 }
1635 })
1636 .ok();
1637 }
1638 });
1639
1640 self.pending_completions.push(PendingCompletion {
1641 id: pending_completion_id,
1642 assistant_message_id: assistant_message.id,
1643 _task: task,
1644 });
1645
1646 Some(user_message)
1647 }
1648
1649 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
1650 let buffer = self.buffer.read(cx);
1651 let request_messages = self
1652 .messages(cx)
1653 .filter(|message| message.status == MessageStatus::Done)
1654 .filter_map(|message| message.to_request_message(&buffer))
1655 .collect();
1656
1657 LanguageModelRequest {
1658 messages: request_messages,
1659 stop: vec![],
1660 temperature: 1.0,
1661 }
1662 }
1663
1664 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
1665 if let Some(pending_completion) = self.pending_completions.pop() {
1666 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
1667 if metadata.status == MessageStatus::Pending {
1668 metadata.status = MessageStatus::Canceled;
1669 }
1670 });
1671 true
1672 } else {
1673 false
1674 }
1675 }
1676
1677 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
1678 for id in ids {
1679 if let Some(metadata) = self.messages_metadata.get(&id) {
1680 let role = metadata.role.cycle();
1681 self.update_metadata(id, cx, |metadata| metadata.role = role);
1682 }
1683 }
1684 }
1685
1686 pub fn update_metadata(
1687 &mut self,
1688 id: MessageId,
1689 cx: &mut ModelContext<Self>,
1690 f: impl FnOnce(&mut MessageMetadata),
1691 ) {
1692 let version = self.version.clone();
1693 let timestamp = self.next_timestamp();
1694 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
1695 f(metadata);
1696 metadata.timestamp = timestamp;
1697 let operation = ContextOperation::UpdateMessage {
1698 message_id: id,
1699 metadata: metadata.clone(),
1700 version,
1701 };
1702 self.push_op(operation, cx);
1703 cx.emit(ContextEvent::MessagesEdited);
1704 cx.notify();
1705 }
1706 }
1707
1708 pub fn insert_message_after(
1709 &mut self,
1710 message_id: MessageId,
1711 role: Role,
1712 status: MessageStatus,
1713 cx: &mut ModelContext<Self>,
1714 ) -> Option<MessageAnchor> {
1715 if let Some(prev_message_ix) = self
1716 .message_anchors
1717 .iter()
1718 .position(|message| message.id == message_id)
1719 {
1720 // Find the next valid message after the one we were given.
1721 let mut next_message_ix = prev_message_ix + 1;
1722 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
1723 if next_message.start.is_valid(self.buffer.read(cx)) {
1724 break;
1725 }
1726 next_message_ix += 1;
1727 }
1728
1729 let start = self.buffer.update(cx, |buffer, cx| {
1730 let offset = self
1731 .message_anchors
1732 .get(next_message_ix)
1733 .map_or(buffer.len(), |message| {
1734 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
1735 });
1736 buffer.edit([(offset..offset, "\n")], None, cx);
1737 buffer.anchor_before(offset + 1)
1738 });
1739
1740 let version = self.version.clone();
1741 let anchor = MessageAnchor {
1742 id: MessageId(self.next_timestamp()),
1743 start,
1744 };
1745 let metadata = MessageMetadata {
1746 role,
1747 status,
1748 timestamp: anchor.id.0,
1749 should_cache: false,
1750 is_cache_anchor: false,
1751 };
1752 self.insert_message(anchor.clone(), metadata.clone(), cx);
1753 self.push_op(
1754 ContextOperation::InsertMessage {
1755 anchor: anchor.clone(),
1756 metadata,
1757 version,
1758 },
1759 cx,
1760 );
1761 Some(anchor)
1762 } else {
1763 None
1764 }
1765 }
1766
1767 pub fn insert_image(&mut self, image: Image, cx: &mut ModelContext<Self>) -> Option<()> {
1768 if let hash_map::Entry::Vacant(entry) = self.images.entry(image.id()) {
1769 entry.insert((
1770 image.to_image_data(cx).log_err()?,
1771 LanguageModelImage::from_image(image, cx).shared(),
1772 ));
1773 }
1774
1775 Some(())
1776 }
1777
1778 pub fn insert_image_anchor(
1779 &mut self,
1780 image_id: u64,
1781 anchor: language::Anchor,
1782 cx: &mut ModelContext<Self>,
1783 ) -> bool {
1784 cx.emit(ContextEvent::MessagesEdited);
1785
1786 let buffer = self.buffer.read(cx);
1787 let insertion_ix = match self
1788 .image_anchors
1789 .binary_search_by(|existing_anchor| anchor.cmp(&existing_anchor.anchor, buffer))
1790 {
1791 Ok(ix) => ix,
1792 Err(ix) => ix,
1793 };
1794
1795 if let Some((render_image, image)) = self.images.get(&image_id) {
1796 self.image_anchors.insert(
1797 insertion_ix,
1798 ImageAnchor {
1799 anchor,
1800 image_id,
1801 image: image.clone(),
1802 render_image: render_image.clone(),
1803 },
1804 );
1805
1806 true
1807 } else {
1808 false
1809 }
1810 }
1811
1812 pub fn images<'a>(&'a self, _cx: &'a AppContext) -> impl 'a + Iterator<Item = ImageAnchor> {
1813 self.image_anchors.iter().cloned()
1814 }
1815
1816 pub fn split_message(
1817 &mut self,
1818 range: Range<usize>,
1819 cx: &mut ModelContext<Self>,
1820 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
1821 let start_message = self.message_for_offset(range.start, cx);
1822 let end_message = self.message_for_offset(range.end, cx);
1823 if let Some((start_message, end_message)) = start_message.zip(end_message) {
1824 // Prevent splitting when range spans multiple messages.
1825 if start_message.id != end_message.id {
1826 return (None, None);
1827 }
1828
1829 let message = start_message;
1830 let role = message.role;
1831 let mut edited_buffer = false;
1832
1833 let mut suffix_start = None;
1834
1835 // TODO: why did this start panicking?
1836 if range.start > message.offset_range.start
1837 && range.end < message.offset_range.end.saturating_sub(1)
1838 {
1839 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
1840 suffix_start = Some(range.end + 1);
1841 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
1842 suffix_start = Some(range.end);
1843 }
1844 }
1845
1846 let version = self.version.clone();
1847 let suffix = if let Some(suffix_start) = suffix_start {
1848 MessageAnchor {
1849 id: MessageId(self.next_timestamp()),
1850 start: self.buffer.read(cx).anchor_before(suffix_start),
1851 }
1852 } else {
1853 self.buffer.update(cx, |buffer, cx| {
1854 buffer.edit([(range.end..range.end, "\n")], None, cx);
1855 });
1856 edited_buffer = true;
1857 MessageAnchor {
1858 id: MessageId(self.next_timestamp()),
1859 start: self.buffer.read(cx).anchor_before(range.end + 1),
1860 }
1861 };
1862
1863 let suffix_metadata = MessageMetadata {
1864 role,
1865 status: MessageStatus::Done,
1866 timestamp: suffix.id.0,
1867 should_cache: false,
1868 is_cache_anchor: false,
1869 };
1870 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
1871 self.push_op(
1872 ContextOperation::InsertMessage {
1873 anchor: suffix.clone(),
1874 metadata: suffix_metadata,
1875 version,
1876 },
1877 cx,
1878 );
1879
1880 let new_messages =
1881 if range.start == range.end || range.start == message.offset_range.start {
1882 (None, Some(suffix))
1883 } else {
1884 let mut prefix_end = None;
1885 if range.start > message.offset_range.start
1886 && range.end < message.offset_range.end - 1
1887 {
1888 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
1889 prefix_end = Some(range.start + 1);
1890 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
1891 == Some('\n')
1892 {
1893 prefix_end = Some(range.start);
1894 }
1895 }
1896
1897 let version = self.version.clone();
1898 let selection = if let Some(prefix_end) = prefix_end {
1899 MessageAnchor {
1900 id: MessageId(self.next_timestamp()),
1901 start: self.buffer.read(cx).anchor_before(prefix_end),
1902 }
1903 } else {
1904 self.buffer.update(cx, |buffer, cx| {
1905 buffer.edit([(range.start..range.start, "\n")], None, cx)
1906 });
1907 edited_buffer = true;
1908 MessageAnchor {
1909 id: MessageId(self.next_timestamp()),
1910 start: self.buffer.read(cx).anchor_before(range.end + 1),
1911 }
1912 };
1913
1914 let selection_metadata = MessageMetadata {
1915 role,
1916 status: MessageStatus::Done,
1917 timestamp: selection.id.0,
1918 should_cache: false,
1919 is_cache_anchor: false,
1920 };
1921 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
1922 self.push_op(
1923 ContextOperation::InsertMessage {
1924 anchor: selection.clone(),
1925 metadata: selection_metadata,
1926 version,
1927 },
1928 cx,
1929 );
1930
1931 (Some(selection), Some(suffix))
1932 };
1933
1934 if !edited_buffer {
1935 cx.emit(ContextEvent::MessagesEdited);
1936 }
1937 new_messages
1938 } else {
1939 (None, None)
1940 }
1941 }
1942
1943 fn insert_message(
1944 &mut self,
1945 new_anchor: MessageAnchor,
1946 new_metadata: MessageMetadata,
1947 cx: &mut ModelContext<Self>,
1948 ) {
1949 cx.emit(ContextEvent::MessagesEdited);
1950
1951 self.messages_metadata.insert(new_anchor.id, new_metadata);
1952
1953 let buffer = self.buffer.read(cx);
1954 let insertion_ix = self
1955 .message_anchors
1956 .iter()
1957 .position(|anchor| {
1958 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
1959 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
1960 })
1961 .unwrap_or(self.message_anchors.len());
1962 self.message_anchors.insert(insertion_ix, new_anchor);
1963 }
1964
1965 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
1966 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
1967 return;
1968 };
1969 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1970 return;
1971 };
1972
1973 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
1974 if !provider.is_authenticated(cx) {
1975 return;
1976 }
1977
1978 let messages = self
1979 .messages(cx)
1980 .filter_map(|message| message.to_request_message(self.buffer.read(cx)))
1981 .chain(Some(LanguageModelRequestMessage {
1982 role: Role::User,
1983 content: vec![
1984 "Summarize the context into a short title without punctuation.".into(),
1985 ],
1986 cache: false,
1987 }));
1988 let request = LanguageModelRequest {
1989 messages: messages.collect(),
1990 stop: vec![],
1991 temperature: 1.0,
1992 };
1993
1994 self.pending_summary = cx.spawn(|this, mut cx| {
1995 async move {
1996 let stream = model.stream_completion(request, &cx);
1997 let mut messages = stream.await?;
1998
1999 let mut replaced = !replace_old;
2000 while let Some(message) = messages.next().await {
2001 let text = message?;
2002 let mut lines = text.lines();
2003 this.update(&mut cx, |this, cx| {
2004 let version = this.version.clone();
2005 let timestamp = this.next_timestamp();
2006 let summary = this.summary.get_or_insert(ContextSummary::default());
2007 if !replaced && replace_old {
2008 summary.text.clear();
2009 replaced = true;
2010 }
2011 summary.text.extend(lines.next());
2012 summary.timestamp = timestamp;
2013 let operation = ContextOperation::UpdateSummary {
2014 summary: summary.clone(),
2015 version,
2016 };
2017 this.push_op(operation, cx);
2018 cx.emit(ContextEvent::SummaryChanged);
2019 })?;
2020
2021 // Stop if the LLM generated multiple lines.
2022 if lines.next().is_some() {
2023 break;
2024 }
2025 }
2026
2027 this.update(&mut cx, |this, cx| {
2028 let version = this.version.clone();
2029 let timestamp = this.next_timestamp();
2030 if let Some(summary) = this.summary.as_mut() {
2031 summary.done = true;
2032 summary.timestamp = timestamp;
2033 let operation = ContextOperation::UpdateSummary {
2034 summary: summary.clone(),
2035 version,
2036 };
2037 this.push_op(operation, cx);
2038 cx.emit(ContextEvent::SummaryChanged);
2039 }
2040 })?;
2041
2042 anyhow::Ok(())
2043 }
2044 .log_err()
2045 });
2046 }
2047 }
2048
2049 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2050 self.messages_for_offsets([offset], cx).pop()
2051 }
2052
2053 pub fn messages_for_offsets(
2054 &self,
2055 offsets: impl IntoIterator<Item = usize>,
2056 cx: &AppContext,
2057 ) -> Vec<Message> {
2058 let mut result = Vec::new();
2059
2060 let mut messages = self.messages(cx).peekable();
2061 let mut offsets = offsets.into_iter().peekable();
2062 let mut current_message = messages.next();
2063 while let Some(offset) = offsets.next() {
2064 // Locate the message that contains the offset.
2065 while current_message.as_ref().map_or(false, |message| {
2066 !message.offset_range.contains(&offset) && messages.peek().is_some()
2067 }) {
2068 current_message = messages.next();
2069 }
2070 let Some(message) = current_message.as_ref() else {
2071 break;
2072 };
2073
2074 // Skip offsets that are in the same message.
2075 while offsets.peek().map_or(false, |offset| {
2076 message.offset_range.contains(offset) || messages.peek().is_none()
2077 }) {
2078 offsets.next();
2079 }
2080
2081 result.push(message.clone());
2082 }
2083 result
2084 }
2085
2086 fn messages_from_anchors<'a>(
2087 &'a self,
2088 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2089 cx: &'a AppContext,
2090 ) -> impl 'a + Iterator<Item = Message> {
2091 let buffer = self.buffer.read(cx);
2092 let messages = message_anchors.enumerate();
2093 let images = self.image_anchors.iter();
2094
2095 Self::messages_from_iters(buffer, &self.messages_metadata, messages, images)
2096 }
2097
2098 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2099 self.messages_from_anchors(self.message_anchors.iter(), cx)
2100 }
2101
2102 pub fn messages_from_iters<'a>(
2103 buffer: &'a Buffer,
2104 metadata: &'a HashMap<MessageId, MessageMetadata>,
2105 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2106 images: impl Iterator<Item = &'a ImageAnchor> + 'a,
2107 ) -> impl 'a + Iterator<Item = Message> {
2108 let mut messages = messages.peekable();
2109 let mut images = images.peekable();
2110
2111 iter::from_fn(move || {
2112 if let Some((start_ix, message_anchor)) = messages.next() {
2113 let metadata = metadata.get(&message_anchor.id)?;
2114
2115 let message_start = message_anchor.start.to_offset(buffer);
2116 let mut message_end = None;
2117 let mut end_ix = start_ix;
2118 while let Some((_, next_message)) = messages.peek() {
2119 if next_message.start.is_valid(buffer) {
2120 message_end = Some(next_message.start);
2121 break;
2122 } else {
2123 end_ix += 1;
2124 messages.next();
2125 }
2126 }
2127 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2128 let message_end = message_end_anchor.to_offset(buffer);
2129
2130 let mut image_offsets = SmallVec::new();
2131 while let Some(image_anchor) = images.peek() {
2132 if image_anchor.anchor.cmp(&message_end_anchor, buffer).is_lt() {
2133 image_offsets.push((
2134 image_anchor.anchor.to_offset(buffer),
2135 MessageImage {
2136 image_id: image_anchor.image_id,
2137 image: image_anchor.image.clone(),
2138 },
2139 ));
2140 images.next();
2141 } else {
2142 break;
2143 }
2144 }
2145
2146 return Some(Message {
2147 index_range: start_ix..end_ix,
2148 offset_range: message_start..message_end,
2149 id: message_anchor.id,
2150 anchor: message_anchor.start,
2151 role: metadata.role,
2152 status: metadata.status.clone(),
2153 cache: metadata.is_cache_anchor,
2154 image_offsets,
2155 });
2156 }
2157 None
2158 })
2159 }
2160
2161 pub fn save(
2162 &mut self,
2163 debounce: Option<Duration>,
2164 fs: Arc<dyn Fs>,
2165 cx: &mut ModelContext<Context>,
2166 ) {
2167 if self.replica_id() != ReplicaId::default() {
2168 // Prevent saving a remote context for now.
2169 return;
2170 }
2171
2172 self.pending_save = cx.spawn(|this, mut cx| async move {
2173 if let Some(debounce) = debounce {
2174 cx.background_executor().timer(debounce).await;
2175 }
2176
2177 let (old_path, summary) = this.read_with(&cx, |this, _| {
2178 let path = this.path.clone();
2179 let summary = if let Some(summary) = this.summary.as_ref() {
2180 if summary.done {
2181 Some(summary.text.clone())
2182 } else {
2183 None
2184 }
2185 } else {
2186 None
2187 };
2188 (path, summary)
2189 })?;
2190
2191 if let Some(summary) = summary {
2192 this.read_with(&cx, |this, cx| this.serialize_images(fs.clone(), cx))?
2193 .await;
2194
2195 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2196 let mut discriminant = 1;
2197 let mut new_path;
2198 loop {
2199 new_path = contexts_dir().join(&format!(
2200 "{} - {}.zed.json",
2201 summary.trim(),
2202 discriminant
2203 ));
2204 if fs.is_file(&new_path).await {
2205 discriminant += 1;
2206 } else {
2207 break;
2208 }
2209 }
2210
2211 fs.create_dir(contexts_dir().as_ref()).await?;
2212 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2213 .await?;
2214 if let Some(old_path) = old_path {
2215 if new_path != old_path {
2216 fs.remove_file(
2217 &old_path,
2218 RemoveOptions {
2219 recursive: false,
2220 ignore_if_not_exists: true,
2221 },
2222 )
2223 .await?;
2224 }
2225 }
2226
2227 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2228 }
2229
2230 Ok(())
2231 });
2232 }
2233
2234 pub fn serialize_images(&self, fs: Arc<dyn Fs>, cx: &AppContext) -> Task<()> {
2235 let mut images_to_save = self
2236 .images
2237 .iter()
2238 .map(|(id, (_, llm_image))| {
2239 let fs = fs.clone();
2240 let llm_image = llm_image.clone();
2241 let id = *id;
2242 async move {
2243 if let Some(llm_image) = llm_image.await {
2244 let path: PathBuf =
2245 context_images_dir().join(&format!("{}.png.base64", id));
2246 if fs
2247 .metadata(path.as_path())
2248 .await
2249 .log_err()
2250 .flatten()
2251 .is_none()
2252 {
2253 fs.atomic_write(path, llm_image.source.to_string())
2254 .await
2255 .log_err();
2256 }
2257 }
2258 }
2259 })
2260 .collect::<FuturesUnordered<_>>();
2261 cx.background_executor().spawn(async move {
2262 if fs
2263 .create_dir(context_images_dir().as_ref())
2264 .await
2265 .log_err()
2266 .is_some()
2267 {
2268 while let Some(_) = images_to_save.next().await {}
2269 }
2270 })
2271 }
2272
2273 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2274 let timestamp = self.next_timestamp();
2275 let summary = self.summary.get_or_insert(ContextSummary::default());
2276 summary.timestamp = timestamp;
2277 summary.done = true;
2278 summary.text = custom_summary;
2279 cx.emit(ContextEvent::SummaryChanged);
2280 }
2281}
2282
2283#[derive(Debug, Default)]
2284pub struct ContextVersion {
2285 context: clock::Global,
2286 buffer: clock::Global,
2287}
2288
2289impl ContextVersion {
2290 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2291 Self {
2292 context: language::proto::deserialize_version(&proto.context_version),
2293 buffer: language::proto::deserialize_version(&proto.buffer_version),
2294 }
2295 }
2296
2297 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2298 proto::ContextVersion {
2299 context_id: context_id.to_proto(),
2300 context_version: language::proto::serialize_version(&self.context),
2301 buffer_version: language::proto::serialize_version(&self.buffer),
2302 }
2303 }
2304}
2305
2306#[derive(Debug, Clone)]
2307pub struct PendingSlashCommand {
2308 pub name: String,
2309 pub arguments: SmallVec<[String; 3]>,
2310 pub status: PendingSlashCommandStatus,
2311 pub source_range: Range<language::Anchor>,
2312}
2313
2314#[derive(Debug, Clone)]
2315pub enum PendingSlashCommandStatus {
2316 Idle,
2317 Running { _task: Shared<Task<()>> },
2318 Error(String),
2319}
2320
2321#[derive(Serialize, Deserialize)]
2322pub struct SavedMessage {
2323 pub id: MessageId,
2324 pub start: usize,
2325 pub metadata: MessageMetadata,
2326 #[serde(default)]
2327 // This is defaulted for backwards compatibility with JSON files created before August 2024. We didn't always have this field.
2328 pub image_offsets: Vec<(usize, u64)>,
2329}
2330
2331#[derive(Serialize, Deserialize)]
2332pub struct SavedContext {
2333 pub id: Option<ContextId>,
2334 pub zed: String,
2335 pub version: String,
2336 pub text: String,
2337 pub messages: Vec<SavedMessage>,
2338 pub summary: String,
2339 pub slash_command_output_sections:
2340 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2341}
2342
2343impl SavedContext {
2344 pub const VERSION: &'static str = "0.4.0";
2345
2346 pub fn from_json(json: &str) -> Result<Self> {
2347 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2348 match saved_context_json
2349 .get("version")
2350 .ok_or_else(|| anyhow!("version not found"))?
2351 {
2352 serde_json::Value::String(version) => match version.as_str() {
2353 SavedContext::VERSION => {
2354 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2355 }
2356 SavedContextV0_3_0::VERSION => {
2357 let saved_context =
2358 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2359 Ok(saved_context.upgrade())
2360 }
2361 SavedContextV0_2_0::VERSION => {
2362 let saved_context =
2363 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2364 Ok(saved_context.upgrade())
2365 }
2366 SavedContextV0_1_0::VERSION => {
2367 let saved_context =
2368 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2369 Ok(saved_context.upgrade())
2370 }
2371 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2372 },
2373 _ => Err(anyhow!("version not found on saved context")),
2374 }
2375 }
2376
2377 fn into_ops(
2378 self,
2379 buffer: &Model<Buffer>,
2380 cx: &mut ModelContext<Context>,
2381 ) -> Vec<ContextOperation> {
2382 let mut operations = Vec::new();
2383 let mut version = clock::Global::new();
2384 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2385
2386 let mut first_message_metadata = None;
2387 for message in self.messages {
2388 if message.id == MessageId(clock::Lamport::default()) {
2389 first_message_metadata = Some(message.metadata);
2390 } else {
2391 operations.push(ContextOperation::InsertMessage {
2392 anchor: MessageAnchor {
2393 id: message.id,
2394 start: buffer.read(cx).anchor_before(message.start),
2395 },
2396 metadata: MessageMetadata {
2397 role: message.metadata.role,
2398 status: message.metadata.status,
2399 timestamp: message.metadata.timestamp,
2400 should_cache: false,
2401 is_cache_anchor: false,
2402 },
2403 version: version.clone(),
2404 });
2405 version.observe(message.id.0);
2406 next_timestamp.observe(message.id.0);
2407 }
2408 }
2409
2410 if let Some(metadata) = first_message_metadata {
2411 let timestamp = next_timestamp.tick();
2412 operations.push(ContextOperation::UpdateMessage {
2413 message_id: MessageId(clock::Lamport::default()),
2414 metadata: MessageMetadata {
2415 role: metadata.role,
2416 status: metadata.status,
2417 timestamp,
2418 should_cache: false,
2419 is_cache_anchor: false,
2420 },
2421 version: version.clone(),
2422 });
2423 version.observe(timestamp);
2424 }
2425
2426 let timestamp = next_timestamp.tick();
2427 operations.push(ContextOperation::SlashCommandFinished {
2428 id: SlashCommandId(timestamp),
2429 output_range: language::Anchor::MIN..language::Anchor::MAX,
2430 sections: self
2431 .slash_command_output_sections
2432 .into_iter()
2433 .map(|section| {
2434 let buffer = buffer.read(cx);
2435 SlashCommandOutputSection {
2436 range: buffer.anchor_after(section.range.start)
2437 ..buffer.anchor_before(section.range.end),
2438 icon: section.icon,
2439 label: section.label,
2440 }
2441 })
2442 .collect(),
2443 version: version.clone(),
2444 });
2445 version.observe(timestamp);
2446
2447 let timestamp = next_timestamp.tick();
2448 operations.push(ContextOperation::UpdateSummary {
2449 summary: ContextSummary {
2450 text: self.summary,
2451 done: true,
2452 timestamp,
2453 },
2454 version: version.clone(),
2455 });
2456 version.observe(timestamp);
2457
2458 operations
2459 }
2460}
2461
2462#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
2463struct SavedMessageIdPreV0_4_0(usize);
2464
2465#[derive(Serialize, Deserialize)]
2466struct SavedMessagePreV0_4_0 {
2467 id: SavedMessageIdPreV0_4_0,
2468 start: usize,
2469}
2470
2471#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
2472struct SavedMessageMetadataPreV0_4_0 {
2473 role: Role,
2474 status: MessageStatus,
2475}
2476
2477#[derive(Serialize, Deserialize)]
2478struct SavedContextV0_3_0 {
2479 id: Option<ContextId>,
2480 zed: String,
2481 version: String,
2482 text: String,
2483 messages: Vec<SavedMessagePreV0_4_0>,
2484 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2485 summary: String,
2486 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2487}
2488
2489impl SavedContextV0_3_0 {
2490 const VERSION: &'static str = "0.3.0";
2491
2492 fn upgrade(self) -> SavedContext {
2493 SavedContext {
2494 id: self.id,
2495 zed: self.zed,
2496 version: SavedContext::VERSION.into(),
2497 text: self.text,
2498 messages: self
2499 .messages
2500 .into_iter()
2501 .filter_map(|message| {
2502 let metadata = self.message_metadata.get(&message.id)?;
2503 let timestamp = clock::Lamport {
2504 replica_id: ReplicaId::default(),
2505 value: message.id.0 as u32,
2506 };
2507 Some(SavedMessage {
2508 id: MessageId(timestamp),
2509 start: message.start,
2510 metadata: MessageMetadata {
2511 role: metadata.role,
2512 status: metadata.status.clone(),
2513 timestamp,
2514 should_cache: false,
2515 is_cache_anchor: false,
2516 },
2517 image_offsets: Vec::new(),
2518 })
2519 })
2520 .collect(),
2521 summary: self.summary,
2522 slash_command_output_sections: self.slash_command_output_sections,
2523 }
2524 }
2525}
2526
2527#[derive(Serialize, Deserialize)]
2528struct SavedContextV0_2_0 {
2529 id: Option<ContextId>,
2530 zed: String,
2531 version: String,
2532 text: String,
2533 messages: Vec<SavedMessagePreV0_4_0>,
2534 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2535 summary: String,
2536}
2537
2538impl SavedContextV0_2_0 {
2539 const VERSION: &'static str = "0.2.0";
2540
2541 fn upgrade(self) -> SavedContext {
2542 SavedContextV0_3_0 {
2543 id: self.id,
2544 zed: self.zed,
2545 version: SavedContextV0_3_0::VERSION.to_string(),
2546 text: self.text,
2547 messages: self.messages,
2548 message_metadata: self.message_metadata,
2549 summary: self.summary,
2550 slash_command_output_sections: Vec::new(),
2551 }
2552 .upgrade()
2553 }
2554}
2555
2556#[derive(Serialize, Deserialize)]
2557struct SavedContextV0_1_0 {
2558 id: Option<ContextId>,
2559 zed: String,
2560 version: String,
2561 text: String,
2562 messages: Vec<SavedMessagePreV0_4_0>,
2563 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2564 summary: String,
2565 api_url: Option<String>,
2566 model: OpenAiModel,
2567}
2568
2569impl SavedContextV0_1_0 {
2570 const VERSION: &'static str = "0.1.0";
2571
2572 fn upgrade(self) -> SavedContext {
2573 SavedContextV0_2_0 {
2574 id: self.id,
2575 zed: self.zed,
2576 version: SavedContextV0_2_0::VERSION.to_string(),
2577 text: self.text,
2578 messages: self.messages,
2579 message_metadata: self.message_metadata,
2580 summary: self.summary,
2581 }
2582 .upgrade()
2583 }
2584}
2585
2586#[derive(Clone)]
2587pub struct SavedContextMetadata {
2588 pub title: String,
2589 pub path: PathBuf,
2590 pub mtime: chrono::DateTime<chrono::Local>,
2591}