1#[cfg(test)]
2mod context_tests;
3
4use crate::{
5 prompts::PromptBuilder,
6 slash_command::{file_command::FileCommandMetadata, SlashCommandLine},
7 AssistantEdit, AssistantPatch, AssistantPatchStatus, MessageId, MessageStatus,
8};
9use anyhow::{anyhow, Context as _, Result};
10use assistant_slash_command::{
11 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
12};
13use assistant_tool::ToolRegistry;
14use client::{self, proto, telemetry::Telemetry};
15use clock::ReplicaId;
16use collections::{HashMap, HashSet};
17use feature_flags::{FeatureFlag, FeatureFlagAppExt};
18use fs::{Fs, RemoveOptions};
19use futures::{future::Shared, FutureExt, StreamExt};
20use gpui::{
21 AppContext, Context as _, EventEmitter, Model, ModelContext, RenderImage, SharedString,
22 Subscription, Task,
23};
24
25use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
26use language_model::{
27 logging::report_assistant_event,
28 provider::cloud::{MaxMonthlySpendReachedError, PaymentRequiredError},
29 LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
30 LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
31 LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role,
32 StopReason,
33};
34use open_ai::Model as OpenAiModel;
35use paths::contexts_dir;
36use project::Project;
37use serde::{Deserialize, Serialize};
38use smallvec::SmallVec;
39use std::{
40 cmp::{max, Ordering},
41 fmt::Debug,
42 iter, mem,
43 ops::Range,
44 path::{Path, PathBuf},
45 str::FromStr as _,
46 sync::Arc,
47 time::{Duration, Instant},
48};
49use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
50use text::BufferSnapshot;
51use util::{post_inc, ResultExt, TryFutureExt};
52use uuid::Uuid;
53
54#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
55pub struct ContextId(String);
56
57impl ContextId {
58 pub fn new() -> Self {
59 Self(Uuid::new_v4().to_string())
60 }
61
62 pub fn from_proto(id: String) -> Self {
63 Self(id)
64 }
65
66 pub fn to_proto(&self) -> String {
67 self.0.clone()
68 }
69}
70
71#[derive(Clone, Copy, Debug, PartialEq, Eq)]
72pub enum RequestType {
73 /// Request a normal chat response from the model.
74 Chat,
75 /// Add a preamble to the message, which tells the model to return a structured response that suggests edits.
76 SuggestEdits,
77}
78
79#[derive(Clone, Debug)]
80pub enum ContextOperation {
81 InsertMessage {
82 anchor: MessageAnchor,
83 metadata: MessageMetadata,
84 version: clock::Global,
85 },
86 UpdateMessage {
87 message_id: MessageId,
88 metadata: MessageMetadata,
89 version: clock::Global,
90 },
91 UpdateSummary {
92 summary: ContextSummary,
93 version: clock::Global,
94 },
95 SlashCommandFinished {
96 id: SlashCommandId,
97 output_range: Range<language::Anchor>,
98 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
99 version: clock::Global,
100 },
101 BufferOperation(language::Operation),
102}
103
104impl ContextOperation {
105 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
106 match op.variant.context("invalid variant")? {
107 proto::context_operation::Variant::InsertMessage(insert) => {
108 let message = insert.message.context("invalid message")?;
109 let id = MessageId(language::proto::deserialize_timestamp(
110 message.id.context("invalid id")?,
111 ));
112 Ok(Self::InsertMessage {
113 anchor: MessageAnchor {
114 id,
115 start: language::proto::deserialize_anchor(
116 message.start.context("invalid anchor")?,
117 )
118 .context("invalid anchor")?,
119 },
120 metadata: MessageMetadata {
121 role: Role::from_proto(message.role),
122 status: MessageStatus::from_proto(
123 message.status.context("invalid status")?,
124 ),
125 timestamp: id.0,
126 cache: None,
127 },
128 version: language::proto::deserialize_version(&insert.version),
129 })
130 }
131 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
132 message_id: MessageId(language::proto::deserialize_timestamp(
133 update.message_id.context("invalid message id")?,
134 )),
135 metadata: MessageMetadata {
136 role: Role::from_proto(update.role),
137 status: MessageStatus::from_proto(update.status.context("invalid status")?),
138 timestamp: language::proto::deserialize_timestamp(
139 update.timestamp.context("invalid timestamp")?,
140 ),
141 cache: None,
142 },
143 version: language::proto::deserialize_version(&update.version),
144 }),
145 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
146 summary: ContextSummary {
147 text: update.summary,
148 done: update.done,
149 timestamp: language::proto::deserialize_timestamp(
150 update.timestamp.context("invalid timestamp")?,
151 ),
152 },
153 version: language::proto::deserialize_version(&update.version),
154 }),
155 proto::context_operation::Variant::SlashCommandFinished(finished) => {
156 Ok(Self::SlashCommandFinished {
157 id: SlashCommandId(language::proto::deserialize_timestamp(
158 finished.id.context("invalid id")?,
159 )),
160 output_range: language::proto::deserialize_anchor_range(
161 finished.output_range.context("invalid range")?,
162 )?,
163 sections: finished
164 .sections
165 .into_iter()
166 .map(|section| {
167 Ok(SlashCommandOutputSection {
168 range: language::proto::deserialize_anchor_range(
169 section.range.context("invalid range")?,
170 )?,
171 icon: section.icon_name.parse()?,
172 label: section.label.into(),
173 metadata: section
174 .metadata
175 .and_then(|metadata| serde_json::from_str(&metadata).log_err()),
176 })
177 })
178 .collect::<Result<Vec<_>>>()?,
179 version: language::proto::deserialize_version(&finished.version),
180 })
181 }
182 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
183 language::proto::deserialize_operation(
184 op.operation.context("invalid buffer operation")?,
185 )?,
186 )),
187 }
188 }
189
190 pub fn to_proto(&self) -> proto::ContextOperation {
191 match self {
192 Self::InsertMessage {
193 anchor,
194 metadata,
195 version,
196 } => proto::ContextOperation {
197 variant: Some(proto::context_operation::Variant::InsertMessage(
198 proto::context_operation::InsertMessage {
199 message: Some(proto::ContextMessage {
200 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
201 start: Some(language::proto::serialize_anchor(&anchor.start)),
202 role: metadata.role.to_proto() as i32,
203 status: Some(metadata.status.to_proto()),
204 }),
205 version: language::proto::serialize_version(version),
206 },
207 )),
208 },
209 Self::UpdateMessage {
210 message_id,
211 metadata,
212 version,
213 } => proto::ContextOperation {
214 variant: Some(proto::context_operation::Variant::UpdateMessage(
215 proto::context_operation::UpdateMessage {
216 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
217 role: metadata.role.to_proto() as i32,
218 status: Some(metadata.status.to_proto()),
219 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
220 version: language::proto::serialize_version(version),
221 },
222 )),
223 },
224 Self::UpdateSummary { summary, version } => proto::ContextOperation {
225 variant: Some(proto::context_operation::Variant::UpdateSummary(
226 proto::context_operation::UpdateSummary {
227 summary: summary.text.clone(),
228 done: summary.done,
229 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
230 version: language::proto::serialize_version(version),
231 },
232 )),
233 },
234 Self::SlashCommandFinished {
235 id,
236 output_range,
237 sections,
238 version,
239 } => proto::ContextOperation {
240 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
241 proto::context_operation::SlashCommandFinished {
242 id: Some(language::proto::serialize_timestamp(id.0)),
243 output_range: Some(language::proto::serialize_anchor_range(
244 output_range.clone(),
245 )),
246 sections: sections
247 .iter()
248 .map(|section| {
249 let icon_name: &'static str = section.icon.into();
250 proto::SlashCommandOutputSection {
251 range: Some(language::proto::serialize_anchor_range(
252 section.range.clone(),
253 )),
254 icon_name: icon_name.to_string(),
255 label: section.label.to_string(),
256 metadata: section.metadata.as_ref().and_then(|metadata| {
257 serde_json::to_string(metadata).log_err()
258 }),
259 }
260 })
261 .collect(),
262 version: language::proto::serialize_version(version),
263 },
264 )),
265 },
266 Self::BufferOperation(operation) => proto::ContextOperation {
267 variant: Some(proto::context_operation::Variant::BufferOperation(
268 proto::context_operation::BufferOperation {
269 operation: Some(language::proto::serialize_operation(operation)),
270 },
271 )),
272 },
273 }
274 }
275
276 fn timestamp(&self) -> clock::Lamport {
277 match self {
278 Self::InsertMessage { anchor, .. } => anchor.id.0,
279 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
280 Self::UpdateSummary { summary, .. } => summary.timestamp,
281 Self::SlashCommandFinished { id, .. } => id.0,
282 Self::BufferOperation(_) => {
283 panic!("reading the timestamp of a buffer operation is not supported")
284 }
285 }
286 }
287
288 /// Returns the current version of the context operation.
289 pub fn version(&self) -> &clock::Global {
290 match self {
291 Self::InsertMessage { version, .. }
292 | Self::UpdateMessage { version, .. }
293 | Self::UpdateSummary { version, .. }
294 | Self::SlashCommandFinished { version, .. } => version,
295 Self::BufferOperation(_) => {
296 panic!("reading the version of a buffer operation is not supported")
297 }
298 }
299 }
300}
301
302#[derive(Debug, Clone)]
303pub enum ContextEvent {
304 ShowAssistError(SharedString),
305 ShowPaymentRequiredError,
306 ShowMaxMonthlySpendReachedError,
307 MessagesEdited,
308 SummaryChanged,
309 StreamedCompletion,
310 PatchesUpdated {
311 removed: Vec<Range<language::Anchor>>,
312 updated: Vec<Range<language::Anchor>>,
313 },
314 PendingSlashCommandsUpdated {
315 removed: Vec<Range<language::Anchor>>,
316 updated: Vec<PendingSlashCommand>,
317 },
318 SlashCommandFinished {
319 output_range: Range<language::Anchor>,
320 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
321 run_commands_in_output: bool,
322 expand_result: bool,
323 },
324 UsePendingTools,
325 ToolFinished {
326 tool_use_id: Arc<str>,
327 output_range: Range<language::Anchor>,
328 },
329 Operation(ContextOperation),
330}
331
332#[derive(Clone, Default, Debug)]
333pub struct ContextSummary {
334 pub text: String,
335 done: bool,
336 timestamp: clock::Lamport,
337}
338
339#[derive(Clone, Debug, Eq, PartialEq)]
340pub struct MessageAnchor {
341 pub id: MessageId,
342 pub start: language::Anchor,
343}
344
345#[derive(Clone, Debug, Eq, PartialEq)]
346pub enum CacheStatus {
347 Pending,
348 Cached,
349}
350
351#[derive(Clone, Debug, Eq, PartialEq)]
352pub struct MessageCacheMetadata {
353 pub is_anchor: bool,
354 pub is_final_anchor: bool,
355 pub status: CacheStatus,
356 pub cached_at: clock::Global,
357}
358
359#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
360pub struct MessageMetadata {
361 pub role: Role,
362 pub status: MessageStatus,
363 pub(crate) timestamp: clock::Lamport,
364 #[serde(skip)]
365 pub cache: Option<MessageCacheMetadata>,
366}
367
368impl From<&Message> for MessageMetadata {
369 fn from(message: &Message) -> Self {
370 Self {
371 role: message.role,
372 status: message.status.clone(),
373 timestamp: message.id.0,
374 cache: message.cache.clone(),
375 }
376 }
377}
378
379impl MessageMetadata {
380 pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
381 let result = match &self.cache {
382 Some(MessageCacheMetadata { cached_at, .. }) => !buffer.has_edits_since_in_range(
383 &cached_at,
384 Range {
385 start: buffer.anchor_at(range.start, Bias::Right),
386 end: buffer.anchor_at(range.end, Bias::Left),
387 },
388 ),
389 _ => false,
390 };
391 result
392 }
393}
394
395#[derive(Clone, Debug)]
396pub struct Message {
397 pub offset_range: Range<usize>,
398 pub index_range: Range<usize>,
399 pub anchor_range: Range<language::Anchor>,
400 pub id: MessageId,
401 pub role: Role,
402 pub status: MessageStatus,
403 pub cache: Option<MessageCacheMetadata>,
404}
405
406#[derive(Debug, Clone)]
407pub enum Content {
408 Image {
409 anchor: language::Anchor,
410 image_id: u64,
411 render_image: Arc<RenderImage>,
412 image: Shared<Task<Option<LanguageModelImage>>>,
413 },
414 ToolUse {
415 range: Range<language::Anchor>,
416 tool_use: LanguageModelToolUse,
417 },
418 ToolResult {
419 range: Range<language::Anchor>,
420 tool_use_id: Arc<str>,
421 },
422}
423
424impl Content {
425 fn range(&self) -> Range<language::Anchor> {
426 match self {
427 Self::Image { anchor, .. } => *anchor..*anchor,
428 Self::ToolUse { range, .. } | Self::ToolResult { range, .. } => range.clone(),
429 }
430 }
431
432 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
433 let self_range = self.range();
434 let other_range = other.range();
435 if self_range.end.cmp(&other_range.start, buffer).is_lt() {
436 Ordering::Less
437 } else if self_range.start.cmp(&other_range.end, buffer).is_gt() {
438 Ordering::Greater
439 } else {
440 Ordering::Equal
441 }
442 }
443}
444
445struct PendingCompletion {
446 id: usize,
447 assistant_message_id: MessageId,
448 _task: Task<()>,
449}
450
451#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
452pub struct SlashCommandId(clock::Lamport);
453
454#[derive(Clone, Debug)]
455pub struct XmlTag {
456 pub kind: XmlTagKind,
457 pub range: Range<text::Anchor>,
458 pub is_open_tag: bool,
459}
460
461#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
462#[strum(serialize_all = "snake_case")]
463pub enum XmlTagKind {
464 Patch,
465 Title,
466 Edit,
467 Path,
468 Description,
469 OldText,
470 NewText,
471 Operation,
472}
473
474pub struct Context {
475 id: ContextId,
476 timestamp: clock::Lamport,
477 version: clock::Global,
478 pending_ops: Vec<ContextOperation>,
479 operations: Vec<ContextOperation>,
480 buffer: Model<Buffer>,
481 pending_slash_commands: Vec<PendingSlashCommand>,
482 edits_since_last_parse: language::Subscription,
483 finished_slash_commands: HashSet<SlashCommandId>,
484 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
485 pending_tool_uses_by_id: HashMap<Arc<str>, PendingToolUse>,
486 message_anchors: Vec<MessageAnchor>,
487 contents: Vec<Content>,
488 messages_metadata: HashMap<MessageId, MessageMetadata>,
489 summary: Option<ContextSummary>,
490 pending_summary: Task<Option<()>>,
491 completion_count: usize,
492 pending_completions: Vec<PendingCompletion>,
493 token_count: Option<usize>,
494 pending_token_count: Task<Option<()>>,
495 pending_save: Task<Result<()>>,
496 pending_cache_warming_task: Task<Option<()>>,
497 path: Option<PathBuf>,
498 _subscriptions: Vec<Subscription>,
499 telemetry: Option<Arc<Telemetry>>,
500 language_registry: Arc<LanguageRegistry>,
501 patches: Vec<AssistantPatch>,
502 xml_tags: Vec<XmlTag>,
503 project: Option<Model<Project>>,
504 prompt_builder: Arc<PromptBuilder>,
505}
506
507trait ContextAnnotation {
508 fn range(&self) -> &Range<language::Anchor>;
509}
510
511impl ContextAnnotation for PendingSlashCommand {
512 fn range(&self) -> &Range<language::Anchor> {
513 &self.source_range
514 }
515}
516
517impl ContextAnnotation for AssistantPatch {
518 fn range(&self) -> &Range<language::Anchor> {
519 &self.range
520 }
521}
522
523impl ContextAnnotation for XmlTag {
524 fn range(&self) -> &Range<language::Anchor> {
525 &self.range
526 }
527}
528
529impl EventEmitter<ContextEvent> for Context {}
530
531impl Context {
532 pub fn local(
533 language_registry: Arc<LanguageRegistry>,
534 project: Option<Model<Project>>,
535 telemetry: Option<Arc<Telemetry>>,
536 prompt_builder: Arc<PromptBuilder>,
537 cx: &mut ModelContext<Self>,
538 ) -> Self {
539 Self::new(
540 ContextId::new(),
541 ReplicaId::default(),
542 language::Capability::ReadWrite,
543 language_registry,
544 prompt_builder,
545 project,
546 telemetry,
547 cx,
548 )
549 }
550
551 #[allow(clippy::too_many_arguments)]
552 pub fn new(
553 id: ContextId,
554 replica_id: ReplicaId,
555 capability: language::Capability,
556 language_registry: Arc<LanguageRegistry>,
557 prompt_builder: Arc<PromptBuilder>,
558 project: Option<Model<Project>>,
559 telemetry: Option<Arc<Telemetry>>,
560 cx: &mut ModelContext<Self>,
561 ) -> Self {
562 let buffer = cx.new_model(|_cx| {
563 let buffer = Buffer::remote(
564 language::BufferId::new(1).unwrap(),
565 replica_id,
566 capability,
567 "",
568 );
569 buffer.set_language_registry(language_registry.clone());
570 buffer
571 });
572 let edits_since_last_slash_command_parse =
573 buffer.update(cx, |buffer, _| buffer.subscribe());
574 let mut this = Self {
575 id,
576 timestamp: clock::Lamport::new(replica_id),
577 version: clock::Global::new(),
578 pending_ops: Vec::new(),
579 operations: Vec::new(),
580 message_anchors: Default::default(),
581 contents: Default::default(),
582 messages_metadata: Default::default(),
583 pending_slash_commands: Vec::new(),
584 finished_slash_commands: HashSet::default(),
585 pending_tool_uses_by_id: HashMap::default(),
586 slash_command_output_sections: Vec::new(),
587 edits_since_last_parse: edits_since_last_slash_command_parse,
588 summary: None,
589 pending_summary: Task::ready(None),
590 completion_count: Default::default(),
591 pending_completions: Default::default(),
592 token_count: None,
593 pending_token_count: Task::ready(None),
594 pending_cache_warming_task: Task::ready(None),
595 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
596 pending_save: Task::ready(Ok(())),
597 path: None,
598 buffer,
599 telemetry,
600 project,
601 language_registry,
602 patches: Vec::new(),
603 xml_tags: Vec::new(),
604 prompt_builder,
605 };
606
607 let first_message_id = MessageId(clock::Lamport {
608 replica_id: 0,
609 value: 0,
610 });
611 let message = MessageAnchor {
612 id: first_message_id,
613 start: language::Anchor::MIN,
614 };
615 this.messages_metadata.insert(
616 first_message_id,
617 MessageMetadata {
618 role: Role::User,
619 status: MessageStatus::Done,
620 timestamp: first_message_id.0,
621 cache: None,
622 },
623 );
624 this.message_anchors.push(message);
625
626 this.set_language(cx);
627 this.count_remaining_tokens(cx);
628 this
629 }
630
631 pub(crate) fn serialize(&self, cx: &AppContext) -> SavedContext {
632 let buffer = self.buffer.read(cx);
633 SavedContext {
634 id: Some(self.id.clone()),
635 zed: "context".into(),
636 version: SavedContext::VERSION.into(),
637 text: buffer.text(),
638 messages: self
639 .messages(cx)
640 .map(|message| SavedMessage {
641 id: message.id,
642 start: message.offset_range.start,
643 metadata: self.messages_metadata[&message.id].clone(),
644 })
645 .collect(),
646 summary: self
647 .summary
648 .as_ref()
649 .map(|summary| summary.text.clone())
650 .unwrap_or_default(),
651 slash_command_output_sections: self
652 .slash_command_output_sections
653 .iter()
654 .filter_map(|section| {
655 if section.is_valid(buffer) {
656 let range = section.range.to_offset(buffer);
657 Some(assistant_slash_command::SlashCommandOutputSection {
658 range,
659 icon: section.icon,
660 label: section.label.clone(),
661 metadata: section.metadata.clone(),
662 })
663 } else {
664 None
665 }
666 })
667 .collect(),
668 }
669 }
670
671 #[allow(clippy::too_many_arguments)]
672 pub fn deserialize(
673 saved_context: SavedContext,
674 path: PathBuf,
675 language_registry: Arc<LanguageRegistry>,
676 prompt_builder: Arc<PromptBuilder>,
677 project: Option<Model<Project>>,
678 telemetry: Option<Arc<Telemetry>>,
679 cx: &mut ModelContext<Self>,
680 ) -> Self {
681 let id = saved_context.id.clone().unwrap_or_else(ContextId::new);
682 let mut this = Self::new(
683 id,
684 ReplicaId::default(),
685 language::Capability::ReadWrite,
686 language_registry,
687 prompt_builder,
688 project,
689 telemetry,
690 cx,
691 );
692 this.path = Some(path);
693 this.buffer.update(cx, |buffer, cx| {
694 buffer.set_text(saved_context.text.as_str(), cx)
695 });
696 let operations = saved_context.into_ops(&this.buffer, cx);
697 this.apply_ops(operations, cx);
698 this
699 }
700
701 pub fn id(&self) -> &ContextId {
702 &self.id
703 }
704
705 pub fn replica_id(&self) -> ReplicaId {
706 self.timestamp.replica_id
707 }
708
709 pub fn version(&self, cx: &AppContext) -> ContextVersion {
710 ContextVersion {
711 context: self.version.clone(),
712 buffer: self.buffer.read(cx).version(),
713 }
714 }
715
716 pub fn set_capability(
717 &mut self,
718 capability: language::Capability,
719 cx: &mut ModelContext<Self>,
720 ) {
721 self.buffer
722 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
723 }
724
725 fn next_timestamp(&mut self) -> clock::Lamport {
726 let timestamp = self.timestamp.tick();
727 self.version.observe(timestamp);
728 timestamp
729 }
730
731 pub fn serialize_ops(
732 &self,
733 since: &ContextVersion,
734 cx: &AppContext,
735 ) -> Task<Vec<proto::ContextOperation>> {
736 let buffer_ops = self
737 .buffer
738 .read(cx)
739 .serialize_ops(Some(since.buffer.clone()), cx);
740
741 let mut context_ops = self
742 .operations
743 .iter()
744 .filter(|op| !since.context.observed(op.timestamp()))
745 .cloned()
746 .collect::<Vec<_>>();
747 context_ops.extend(self.pending_ops.iter().cloned());
748
749 cx.background_executor().spawn(async move {
750 let buffer_ops = buffer_ops.await;
751 context_ops.sort_unstable_by_key(|op| op.timestamp());
752 buffer_ops
753 .into_iter()
754 .map(|op| proto::ContextOperation {
755 variant: Some(proto::context_operation::Variant::BufferOperation(
756 proto::context_operation::BufferOperation {
757 operation: Some(op),
758 },
759 )),
760 })
761 .chain(context_ops.into_iter().map(|op| op.to_proto()))
762 .collect()
763 })
764 }
765
766 pub fn apply_ops(
767 &mut self,
768 ops: impl IntoIterator<Item = ContextOperation>,
769 cx: &mut ModelContext<Self>,
770 ) {
771 let mut buffer_ops = Vec::new();
772 for op in ops {
773 match op {
774 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
775 op @ _ => self.pending_ops.push(op),
776 }
777 }
778 self.buffer
779 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx));
780 self.flush_ops(cx);
781 }
782
783 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
784 let mut changed_messages = HashSet::default();
785 let mut summary_changed = false;
786
787 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
788 for op in mem::take(&mut self.pending_ops) {
789 if !self.can_apply_op(&op, cx) {
790 self.pending_ops.push(op);
791 continue;
792 }
793
794 let timestamp = op.timestamp();
795 match op.clone() {
796 ContextOperation::InsertMessage {
797 anchor, metadata, ..
798 } => {
799 if self.messages_metadata.contains_key(&anchor.id) {
800 // We already applied this operation.
801 } else {
802 changed_messages.insert(anchor.id);
803 self.insert_message(anchor, metadata, cx);
804 }
805 }
806 ContextOperation::UpdateMessage {
807 message_id,
808 metadata: new_metadata,
809 ..
810 } => {
811 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
812 if new_metadata.timestamp > metadata.timestamp {
813 *metadata = new_metadata;
814 changed_messages.insert(message_id);
815 }
816 }
817 ContextOperation::UpdateSummary {
818 summary: new_summary,
819 ..
820 } => {
821 if self
822 .summary
823 .as_ref()
824 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
825 {
826 self.summary = Some(new_summary);
827 summary_changed = true;
828 }
829 }
830 ContextOperation::SlashCommandFinished {
831 id,
832 output_range,
833 sections,
834 ..
835 } => {
836 if self.finished_slash_commands.insert(id) {
837 let buffer = self.buffer.read(cx);
838 self.slash_command_output_sections
839 .extend(sections.iter().cloned());
840 self.slash_command_output_sections
841 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
842 cx.emit(ContextEvent::SlashCommandFinished {
843 output_range,
844 sections,
845 expand_result: false,
846 run_commands_in_output: false,
847 });
848 }
849 }
850 ContextOperation::BufferOperation(_) => unreachable!(),
851 }
852
853 self.version.observe(timestamp);
854 self.timestamp.observe(timestamp);
855 self.operations.push(op);
856 }
857
858 if !changed_messages.is_empty() {
859 self.message_roles_updated(changed_messages, cx);
860 cx.emit(ContextEvent::MessagesEdited);
861 cx.notify();
862 }
863
864 if summary_changed {
865 cx.emit(ContextEvent::SummaryChanged);
866 cx.notify();
867 }
868 }
869
870 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
871 if !self.version.observed_all(op.version()) {
872 return false;
873 }
874
875 match op {
876 ContextOperation::InsertMessage { anchor, .. } => self
877 .buffer
878 .read(cx)
879 .version
880 .observed(anchor.start.timestamp),
881 ContextOperation::UpdateMessage { message_id, .. } => {
882 self.messages_metadata.contains_key(message_id)
883 }
884 ContextOperation::UpdateSummary { .. } => true,
885 ContextOperation::SlashCommandFinished {
886 output_range,
887 sections,
888 ..
889 } => {
890 let version = &self.buffer.read(cx).version;
891 sections
892 .iter()
893 .map(|section| §ion.range)
894 .chain([output_range])
895 .all(|range| {
896 let observed_start = range.start == language::Anchor::MIN
897 || range.start == language::Anchor::MAX
898 || version.observed(range.start.timestamp);
899 let observed_end = range.end == language::Anchor::MIN
900 || range.end == language::Anchor::MAX
901 || version.observed(range.end.timestamp);
902 observed_start && observed_end
903 })
904 }
905 ContextOperation::BufferOperation(_) => {
906 panic!("buffer operations should always be applied")
907 }
908 }
909 }
910
911 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
912 self.operations.push(op.clone());
913 cx.emit(ContextEvent::Operation(op));
914 }
915
916 pub fn buffer(&self) -> &Model<Buffer> {
917 &self.buffer
918 }
919
920 pub fn language_registry(&self) -> Arc<LanguageRegistry> {
921 self.language_registry.clone()
922 }
923
924 pub fn project(&self) -> Option<Model<Project>> {
925 self.project.clone()
926 }
927
928 pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
929 self.prompt_builder.clone()
930 }
931
932 pub fn path(&self) -> Option<&Path> {
933 self.path.as_deref()
934 }
935
936 pub fn summary(&self) -> Option<&ContextSummary> {
937 self.summary.as_ref()
938 }
939
940 pub(crate) fn patch_containing(
941 &self,
942 position: Point,
943 cx: &AppContext,
944 ) -> Option<&AssistantPatch> {
945 let buffer = self.buffer.read(cx);
946 let index = self.patches.binary_search_by(|patch| {
947 let patch_range = patch.range.to_point(&buffer);
948 if position < patch_range.start {
949 Ordering::Greater
950 } else if position > patch_range.end {
951 Ordering::Less
952 } else {
953 Ordering::Equal
954 }
955 });
956 if let Ok(ix) = index {
957 Some(&self.patches[ix])
958 } else {
959 None
960 }
961 }
962
963 pub fn patch_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
964 self.patches.iter().map(|patch| patch.range.clone())
965 }
966
967 pub(crate) fn patch_for_range(
968 &self,
969 range: &Range<language::Anchor>,
970 cx: &AppContext,
971 ) -> Option<&AssistantPatch> {
972 let buffer = self.buffer.read(cx);
973 let index = self.patch_index_for_range(range, buffer).ok()?;
974 Some(&self.patches[index])
975 }
976
977 fn patch_index_for_range(
978 &self,
979 tagged_range: &Range<text::Anchor>,
980 buffer: &text::BufferSnapshot,
981 ) -> Result<usize, usize> {
982 self.patches
983 .binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
984 }
985
986 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
987 &self.pending_slash_commands
988 }
989
990 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
991 &self.slash_command_output_sections
992 }
993
994 pub fn contains_files(&self, cx: &AppContext) -> bool {
995 let buffer = self.buffer.read(cx);
996 self.slash_command_output_sections.iter().any(|section| {
997 section.is_valid(buffer)
998 && section
999 .metadata
1000 .as_ref()
1001 .and_then(|metadata| {
1002 serde_json::from_value::<FileCommandMetadata>(metadata.clone()).ok()
1003 })
1004 .is_some()
1005 })
1006 }
1007
1008 pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
1009 self.pending_tool_uses_by_id.values().collect()
1010 }
1011
1012 pub fn get_tool_use_by_id(&self, id: &Arc<str>) -> Option<&PendingToolUse> {
1013 self.pending_tool_uses_by_id.get(id)
1014 }
1015
1016 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
1017 let markdown = self.language_registry.language_for_name("Markdown");
1018 cx.spawn(|this, mut cx| async move {
1019 let markdown = markdown.await?;
1020 this.update(&mut cx, |this, cx| {
1021 this.buffer
1022 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1023 })
1024 })
1025 .detach_and_log_err(cx);
1026 }
1027
1028 fn handle_buffer_event(
1029 &mut self,
1030 _: Model<Buffer>,
1031 event: &language::BufferEvent,
1032 cx: &mut ModelContext<Self>,
1033 ) {
1034 match event {
1035 language::BufferEvent::Operation {
1036 operation,
1037 is_local: true,
1038 } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation(
1039 operation.clone(),
1040 ))),
1041 language::BufferEvent::Edited => {
1042 self.count_remaining_tokens(cx);
1043 self.reparse(cx);
1044 cx.emit(ContextEvent::MessagesEdited);
1045 }
1046 _ => {}
1047 }
1048 }
1049
1050 pub(crate) fn token_count(&self) -> Option<usize> {
1051 self.token_count
1052 }
1053
1054 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1055 // Assume it will be a Chat request, even though that takes fewer tokens (and risks going over the limit),
1056 // because otherwise you see in the UI that your empty message has a bunch of tokens already used.
1057 let request = self.to_completion_request(RequestType::Chat, cx);
1058 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1059 return;
1060 };
1061 self.pending_token_count = cx.spawn(|this, mut cx| {
1062 async move {
1063 cx.background_executor()
1064 .timer(Duration::from_millis(200))
1065 .await;
1066
1067 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1068 this.update(&mut cx, |this, cx| {
1069 this.token_count = Some(token_count);
1070 this.start_cache_warming(&model, cx);
1071 cx.notify()
1072 })
1073 }
1074 .log_err()
1075 });
1076 }
1077
1078 pub fn mark_cache_anchors(
1079 &mut self,
1080 cache_configuration: &Option<LanguageModelCacheConfiguration>,
1081 speculative: bool,
1082 cx: &mut ModelContext<Self>,
1083 ) -> bool {
1084 let cache_configuration =
1085 cache_configuration
1086 .as_ref()
1087 .unwrap_or(&LanguageModelCacheConfiguration {
1088 max_cache_anchors: 0,
1089 should_speculate: false,
1090 min_total_token: 0,
1091 });
1092
1093 let messages: Vec<Message> = self.messages(cx).collect();
1094
1095 let mut sorted_messages = messages.clone();
1096 if speculative {
1097 // Avoid caching the last message if this is a speculative cache fetch as
1098 // it's likely to change.
1099 sorted_messages.pop();
1100 }
1101 sorted_messages.retain(|m| m.role == Role::User);
1102 sorted_messages.sort_by(|a, b| b.offset_range.len().cmp(&a.offset_range.len()));
1103
1104 let cache_anchors = if self.token_count.unwrap_or(0) < cache_configuration.min_total_token {
1105 // If we have't hit the minimum threshold to enable caching, don't cache anything.
1106 0
1107 } else {
1108 // Save 1 anchor for the inline assistant to use.
1109 max(cache_configuration.max_cache_anchors, 1) - 1
1110 };
1111 sorted_messages.truncate(cache_anchors);
1112
1113 let anchors: HashSet<MessageId> = sorted_messages
1114 .into_iter()
1115 .map(|message| message.id)
1116 .collect();
1117
1118 let buffer = self.buffer.read(cx).snapshot();
1119 let invalidated_caches: HashSet<MessageId> = messages
1120 .iter()
1121 .scan(false, |encountered_invalid, message| {
1122 let message_id = message.id;
1123 let is_invalid = self
1124 .messages_metadata
1125 .get(&message_id)
1126 .map_or(true, |metadata| {
1127 !metadata.is_cache_valid(&buffer, &message.offset_range)
1128 || *encountered_invalid
1129 });
1130 *encountered_invalid |= is_invalid;
1131 Some(if is_invalid { Some(message_id) } else { None })
1132 })
1133 .flatten()
1134 .collect();
1135
1136 let last_anchor = messages.iter().rev().find_map(|message| {
1137 if anchors.contains(&message.id) {
1138 Some(message.id)
1139 } else {
1140 None
1141 }
1142 });
1143
1144 let mut new_anchor_needs_caching = false;
1145 let current_version = &buffer.version;
1146 // If we have no anchors, mark all messages as not being cached.
1147 let mut hit_last_anchor = last_anchor.is_none();
1148
1149 for message in messages.iter() {
1150 if hit_last_anchor {
1151 self.update_metadata(message.id, cx, |metadata| metadata.cache = None);
1152 continue;
1153 }
1154
1155 if let Some(last_anchor) = last_anchor {
1156 if message.id == last_anchor {
1157 hit_last_anchor = true;
1158 }
1159 }
1160
1161 new_anchor_needs_caching = new_anchor_needs_caching
1162 || (invalidated_caches.contains(&message.id) && anchors.contains(&message.id));
1163
1164 self.update_metadata(message.id, cx, |metadata| {
1165 let cache_status = if invalidated_caches.contains(&message.id) {
1166 CacheStatus::Pending
1167 } else {
1168 metadata
1169 .cache
1170 .as_ref()
1171 .map_or(CacheStatus::Pending, |cm| cm.status.clone())
1172 };
1173 metadata.cache = Some(MessageCacheMetadata {
1174 is_anchor: anchors.contains(&message.id),
1175 is_final_anchor: hit_last_anchor,
1176 status: cache_status,
1177 cached_at: current_version.clone(),
1178 });
1179 });
1180 }
1181 new_anchor_needs_caching
1182 }
1183
1184 fn start_cache_warming(&mut self, model: &Arc<dyn LanguageModel>, cx: &mut ModelContext<Self>) {
1185 let cache_configuration = model.cache_configuration();
1186
1187 if !self.mark_cache_anchors(&cache_configuration, true, cx) {
1188 return;
1189 }
1190 if !self.pending_completions.is_empty() {
1191 return;
1192 }
1193 if let Some(cache_configuration) = cache_configuration {
1194 if !cache_configuration.should_speculate {
1195 return;
1196 }
1197 }
1198
1199 let request = {
1200 let mut req = self.to_completion_request(RequestType::Chat, cx);
1201 // Skip the last message because it's likely to change and
1202 // therefore would be a waste to cache.
1203 req.messages.pop();
1204 req.messages.push(LanguageModelRequestMessage {
1205 role: Role::User,
1206 content: vec!["Respond only with OK, nothing else.".into()],
1207 cache: false,
1208 });
1209 req
1210 };
1211
1212 let model = Arc::clone(model);
1213 self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
1214 async move {
1215 match model.stream_completion(request, &cx).await {
1216 Ok(mut stream) => {
1217 stream.next().await;
1218 log::info!("Cache warming completed successfully");
1219 }
1220 Err(e) => {
1221 log::warn!("Cache warming failed: {}", e);
1222 }
1223 };
1224 this.update(&mut cx, |this, cx| {
1225 this.update_cache_status_for_completion(cx);
1226 })
1227 .ok();
1228 anyhow::Ok(())
1229 }
1230 .log_err()
1231 });
1232 }
1233
1234 pub fn update_cache_status_for_completion(&mut self, cx: &mut ModelContext<Self>) {
1235 let cached_message_ids: Vec<MessageId> = self
1236 .messages_metadata
1237 .iter()
1238 .filter_map(|(message_id, metadata)| {
1239 metadata.cache.as_ref().and_then(|cache| {
1240 if cache.status == CacheStatus::Pending {
1241 Some(*message_id)
1242 } else {
1243 None
1244 }
1245 })
1246 })
1247 .collect();
1248
1249 for message_id in cached_message_ids {
1250 self.update_metadata(message_id, cx, |metadata| {
1251 if let Some(cache) = &mut metadata.cache {
1252 cache.status = CacheStatus::Cached;
1253 }
1254 });
1255 }
1256 cx.notify();
1257 }
1258
1259 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
1260 let buffer = self.buffer.read(cx).text_snapshot();
1261 let mut row_ranges = self
1262 .edits_since_last_parse
1263 .consume()
1264 .into_iter()
1265 .map(|edit| {
1266 let start_row = buffer.offset_to_point(edit.new.start).row;
1267 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1268 start_row..end_row
1269 })
1270 .peekable();
1271
1272 let mut removed_slash_command_ranges = Vec::new();
1273 let mut updated_slash_commands = Vec::new();
1274 let mut removed_patches = Vec::new();
1275 let mut updated_patches = Vec::new();
1276 while let Some(mut row_range) = row_ranges.next() {
1277 while let Some(next_row_range) = row_ranges.peek() {
1278 if row_range.end >= next_row_range.start {
1279 row_range.end = next_row_range.end;
1280 row_ranges.next();
1281 } else {
1282 break;
1283 }
1284 }
1285
1286 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1287 let end = buffer.anchor_after(Point::new(
1288 row_range.end - 1,
1289 buffer.line_len(row_range.end - 1),
1290 ));
1291
1292 self.reparse_slash_commands_in_range(
1293 start..end,
1294 &buffer,
1295 &mut updated_slash_commands,
1296 &mut removed_slash_command_ranges,
1297 cx,
1298 );
1299 self.reparse_patches_in_range(
1300 start..end,
1301 &buffer,
1302 &mut updated_patches,
1303 &mut removed_patches,
1304 cx,
1305 );
1306 }
1307
1308 if !updated_slash_commands.is_empty() || !removed_slash_command_ranges.is_empty() {
1309 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1310 removed: removed_slash_command_ranges,
1311 updated: updated_slash_commands,
1312 });
1313 }
1314
1315 if !updated_patches.is_empty() || !removed_patches.is_empty() {
1316 cx.emit(ContextEvent::PatchesUpdated {
1317 removed: removed_patches,
1318 updated: updated_patches,
1319 });
1320 }
1321 }
1322
1323 fn reparse_slash_commands_in_range(
1324 &mut self,
1325 range: Range<text::Anchor>,
1326 buffer: &BufferSnapshot,
1327 updated: &mut Vec<PendingSlashCommand>,
1328 removed: &mut Vec<Range<text::Anchor>>,
1329 cx: &AppContext,
1330 ) {
1331 let old_range = self.pending_command_indices_for_range(range.clone(), cx);
1332
1333 let mut new_commands = Vec::new();
1334 let mut lines = buffer.text_for_range(range).lines();
1335 let mut offset = lines.offset();
1336 while let Some(line) = lines.next() {
1337 if let Some(command_line) = SlashCommandLine::parse(line) {
1338 let name = &line[command_line.name.clone()];
1339 let arguments = command_line
1340 .arguments
1341 .iter()
1342 .filter_map(|argument_range| {
1343 if argument_range.is_empty() {
1344 None
1345 } else {
1346 line.get(argument_range.clone())
1347 }
1348 })
1349 .map(ToOwned::to_owned)
1350 .collect::<SmallVec<_>>();
1351 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1352 if !command.requires_argument() || !arguments.is_empty() {
1353 let start_ix = offset + command_line.name.start - 1;
1354 let end_ix = offset
1355 + command_line
1356 .arguments
1357 .last()
1358 .map_or(command_line.name.end, |argument| argument.end);
1359 let source_range =
1360 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1361 let pending_command = PendingSlashCommand {
1362 name: name.to_string(),
1363 arguments,
1364 source_range,
1365 status: PendingSlashCommandStatus::Idle,
1366 };
1367 updated.push(pending_command.clone());
1368 new_commands.push(pending_command);
1369 }
1370 }
1371 }
1372
1373 offset = lines.offset();
1374 }
1375
1376 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1377 removed.extend(removed_commands.map(|command| command.source_range));
1378 }
1379
1380 fn reparse_patches_in_range(
1381 &mut self,
1382 range: Range<text::Anchor>,
1383 buffer: &BufferSnapshot,
1384 updated: &mut Vec<Range<text::Anchor>>,
1385 removed: &mut Vec<Range<text::Anchor>>,
1386 cx: &mut ModelContext<Self>,
1387 ) {
1388 // Rebuild the XML tags in the edited range.
1389 let intersecting_tags_range =
1390 self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
1391 let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
1392 self.xml_tags
1393 .splice(intersecting_tags_range.clone(), new_tags);
1394
1395 // Find which patches intersect the changed range.
1396 let intersecting_patches_range =
1397 self.indices_intersecting_buffer_range(&self.patches, range.clone(), cx);
1398
1399 // Reparse all tags after the last unchanged patch before the change.
1400 let mut tags_start_ix = 0;
1401 if let Some(preceding_unchanged_patch) =
1402 self.patches[..intersecting_patches_range.start].last()
1403 {
1404 tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
1405 tag.range
1406 .start
1407 .cmp(&preceding_unchanged_patch.range.end, buffer)
1408 .then(Ordering::Less)
1409 }) {
1410 Ok(ix) | Err(ix) => ix,
1411 };
1412 }
1413
1414 // Rebuild the patches in the range.
1415 let new_patches = self.parse_patches(tags_start_ix, range.end, buffer, cx);
1416 updated.extend(new_patches.iter().map(|patch| patch.range.clone()));
1417 let removed_patches = self.patches.splice(intersecting_patches_range, new_patches);
1418 removed.extend(
1419 removed_patches
1420 .map(|patch| patch.range)
1421 .filter(|range| !updated.contains(&range)),
1422 );
1423 }
1424
1425 fn parse_xml_tags_in_range(
1426 &self,
1427 buffer: &BufferSnapshot,
1428 range: Range<text::Anchor>,
1429 cx: &AppContext,
1430 ) -> Vec<XmlTag> {
1431 let mut messages = self.messages(cx).peekable();
1432
1433 let mut tags = Vec::new();
1434 let mut lines = buffer.text_for_range(range).lines();
1435 let mut offset = lines.offset();
1436
1437 while let Some(line) = lines.next() {
1438 while let Some(message) = messages.peek() {
1439 if offset < message.offset_range.end {
1440 break;
1441 } else {
1442 messages.next();
1443 }
1444 }
1445
1446 let is_assistant_message = messages
1447 .peek()
1448 .map_or(false, |message| message.role == Role::Assistant);
1449 if is_assistant_message {
1450 for (start_ix, _) in line.match_indices('<') {
1451 let mut name_start_ix = start_ix + 1;
1452 let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
1453 if let Some(closing_bracket_ix) = closing_bracket_ix {
1454 let end_ix = closing_bracket_ix + 1;
1455 let mut is_open_tag = true;
1456 if line[name_start_ix..closing_bracket_ix].starts_with('/') {
1457 name_start_ix += 1;
1458 is_open_tag = false;
1459 }
1460 let tag_inner = &line[name_start_ix..closing_bracket_ix];
1461 let tag_name_len = tag_inner
1462 .find(|c: char| c.is_whitespace())
1463 .unwrap_or(tag_inner.len());
1464 if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
1465 tags.push(XmlTag {
1466 range: buffer.anchor_after(offset + start_ix)
1467 ..buffer.anchor_before(offset + end_ix),
1468 is_open_tag,
1469 kind,
1470 });
1471 };
1472 }
1473 }
1474 }
1475
1476 offset = lines.offset();
1477 }
1478 tags
1479 }
1480
1481 fn parse_patches(
1482 &mut self,
1483 tags_start_ix: usize,
1484 buffer_end: text::Anchor,
1485 buffer: &BufferSnapshot,
1486 cx: &AppContext,
1487 ) -> Vec<AssistantPatch> {
1488 let mut new_patches = Vec::new();
1489 let mut pending_patch = None;
1490 let mut patch_tag_depth = 0;
1491 let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
1492 'tags: while let Some(tag) = tags.next() {
1493 if tag.range.start.cmp(&buffer_end, buffer).is_gt() && patch_tag_depth == 0 {
1494 break;
1495 }
1496
1497 if tag.kind == XmlTagKind::Patch && tag.is_open_tag {
1498 patch_tag_depth += 1;
1499 let patch_start = tag.range.start;
1500 let mut edits = Vec::<Result<AssistantEdit>>::new();
1501 let mut patch = AssistantPatch {
1502 range: patch_start..patch_start,
1503 title: String::new().into(),
1504 edits: Default::default(),
1505 status: crate::AssistantPatchStatus::Pending,
1506 };
1507
1508 while let Some(tag) = tags.next() {
1509 if tag.kind == XmlTagKind::Patch && !tag.is_open_tag {
1510 patch_tag_depth -= 1;
1511 if patch_tag_depth == 0 {
1512 patch.range.end = tag.range.end;
1513
1514 // Include the line immediately after this <patch> tag if it's empty.
1515 let patch_end_offset = patch.range.end.to_offset(buffer);
1516 let mut patch_end_chars = buffer.chars_at(patch_end_offset);
1517 if patch_end_chars.next() == Some('\n')
1518 && patch_end_chars.next().map_or(true, |ch| ch == '\n')
1519 {
1520 let messages = self.messages_for_offsets(
1521 [patch_end_offset, patch_end_offset + 1],
1522 cx,
1523 );
1524 if messages.len() == 1 {
1525 patch.range.end = buffer.anchor_before(patch_end_offset + 1);
1526 }
1527 }
1528
1529 edits.sort_unstable_by(|a, b| {
1530 if let (Ok(a), Ok(b)) = (a, b) {
1531 a.path.cmp(&b.path)
1532 } else {
1533 Ordering::Equal
1534 }
1535 });
1536 patch.edits = edits.into();
1537 patch.status = AssistantPatchStatus::Ready;
1538 new_patches.push(patch);
1539 continue 'tags;
1540 }
1541 }
1542
1543 if tag.kind == XmlTagKind::Title && tag.is_open_tag {
1544 let content_start = tag.range.end;
1545 while let Some(tag) = tags.next() {
1546 if tag.kind == XmlTagKind::Title && !tag.is_open_tag {
1547 let content_end = tag.range.start;
1548 patch.title =
1549 trimmed_text_in_range(buffer, content_start..content_end)
1550 .into();
1551 break;
1552 }
1553 }
1554 }
1555
1556 if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
1557 let mut path = None;
1558 let mut old_text = None;
1559 let mut new_text = None;
1560 let mut operation = None;
1561 let mut description = None;
1562
1563 while let Some(tag) = tags.next() {
1564 if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
1565 edits.push(AssistantEdit::new(
1566 path,
1567 operation,
1568 old_text,
1569 new_text,
1570 description,
1571 ));
1572 break;
1573 }
1574
1575 if tag.is_open_tag
1576 && [
1577 XmlTagKind::Path,
1578 XmlTagKind::OldText,
1579 XmlTagKind::NewText,
1580 XmlTagKind::Operation,
1581 XmlTagKind::Description,
1582 ]
1583 .contains(&tag.kind)
1584 {
1585 let kind = tag.kind;
1586 let content_start = tag.range.end;
1587 if let Some(tag) = tags.peek() {
1588 if tag.kind == kind && !tag.is_open_tag {
1589 let tag = tags.next().unwrap();
1590 let content_end = tag.range.start;
1591 let content = trimmed_text_in_range(
1592 buffer,
1593 content_start..content_end,
1594 );
1595 match kind {
1596 XmlTagKind::Path => path = Some(content),
1597 XmlTagKind::Operation => operation = Some(content),
1598 XmlTagKind::OldText => {
1599 old_text = Some(content).filter(|s| !s.is_empty())
1600 }
1601 XmlTagKind::NewText => {
1602 new_text = Some(content).filter(|s| !s.is_empty())
1603 }
1604 XmlTagKind::Description => {
1605 description =
1606 Some(content).filter(|s| !s.is_empty())
1607 }
1608 _ => {}
1609 }
1610 }
1611 }
1612 }
1613 }
1614 }
1615 }
1616
1617 patch.edits = edits.into();
1618 pending_patch = Some(patch);
1619 }
1620 }
1621
1622 if let Some(mut pending_patch) = pending_patch {
1623 let patch_start = pending_patch.range.start.to_offset(buffer);
1624 if let Some(message) = self.message_for_offset(patch_start, cx) {
1625 if message.anchor_range.end == text::Anchor::MAX {
1626 pending_patch.range.end = text::Anchor::MAX;
1627 } else {
1628 let message_end = buffer.anchor_after(message.offset_range.end - 1);
1629 pending_patch.range.end = message_end;
1630 }
1631 } else {
1632 pending_patch.range.end = text::Anchor::MAX;
1633 }
1634
1635 new_patches.push(pending_patch);
1636 }
1637
1638 new_patches
1639 }
1640
1641 pub fn pending_command_for_position(
1642 &mut self,
1643 position: language::Anchor,
1644 cx: &mut ModelContext<Self>,
1645 ) -> Option<&mut PendingSlashCommand> {
1646 let buffer = self.buffer.read(cx);
1647 match self
1648 .pending_slash_commands
1649 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1650 {
1651 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1652 Err(ix) => {
1653 let cmd = self.pending_slash_commands.get_mut(ix)?;
1654 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1655 && position.cmp(&cmd.source_range.end, buffer).is_le()
1656 {
1657 Some(cmd)
1658 } else {
1659 None
1660 }
1661 }
1662 }
1663 }
1664
1665 pub fn pending_commands_for_range(
1666 &self,
1667 range: Range<language::Anchor>,
1668 cx: &AppContext,
1669 ) -> &[PendingSlashCommand] {
1670 let range = self.pending_command_indices_for_range(range, cx);
1671 &self.pending_slash_commands[range]
1672 }
1673
1674 fn pending_command_indices_for_range(
1675 &self,
1676 range: Range<language::Anchor>,
1677 cx: &AppContext,
1678 ) -> Range<usize> {
1679 self.indices_intersecting_buffer_range(&self.pending_slash_commands, range, cx)
1680 }
1681
1682 fn indices_intersecting_buffer_range<T: ContextAnnotation>(
1683 &self,
1684 all_annotations: &[T],
1685 range: Range<language::Anchor>,
1686 cx: &AppContext,
1687 ) -> Range<usize> {
1688 let buffer = self.buffer.read(cx);
1689 let start_ix = match all_annotations
1690 .binary_search_by(|probe| probe.range().end.cmp(&range.start, &buffer))
1691 {
1692 Ok(ix) | Err(ix) => ix,
1693 };
1694 let end_ix = match all_annotations
1695 .binary_search_by(|probe| probe.range().start.cmp(&range.end, &buffer))
1696 {
1697 Ok(ix) => ix + 1,
1698 Err(ix) => ix,
1699 };
1700 start_ix..end_ix
1701 }
1702
1703 pub fn insert_command_output(
1704 &mut self,
1705 command_range: Range<language::Anchor>,
1706 output: Task<SlashCommandResult>,
1707 ensure_trailing_newline: bool,
1708 expand_result: bool,
1709 cx: &mut ModelContext<Self>,
1710 ) {
1711 self.reparse(cx);
1712
1713 let insert_output_task = cx.spawn(|this, mut cx| {
1714 let command_range = command_range.clone();
1715 async move {
1716 let output = output.await;
1717 let output = match output {
1718 Ok(output) => SlashCommandOutput::from_event_stream(output).await,
1719 Err(err) => Err(err),
1720 };
1721 this.update(&mut cx, |this, cx| match output {
1722 Ok(mut output) => {
1723 output.ensure_valid_section_ranges();
1724
1725 // Ensure there is a newline after the last section.
1726 if ensure_trailing_newline {
1727 let has_newline_after_last_section =
1728 output.sections.last().map_or(false, |last_section| {
1729 output.text[last_section.range.end..].ends_with('\n')
1730 });
1731 if !has_newline_after_last_section {
1732 output.text.push('\n');
1733 }
1734 }
1735
1736 let version = this.version.clone();
1737 let command_id = SlashCommandId(this.next_timestamp());
1738 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1739 let start = command_range.start.to_offset(buffer);
1740 let old_end = command_range.end.to_offset(buffer);
1741 let new_end = start + output.text.len();
1742 buffer.edit([(start..old_end, output.text)], None, cx);
1743
1744 let mut sections = output
1745 .sections
1746 .into_iter()
1747 .map(|section| SlashCommandOutputSection {
1748 range: buffer.anchor_after(start + section.range.start)
1749 ..buffer.anchor_before(start + section.range.end),
1750 icon: section.icon,
1751 label: section.label,
1752 metadata: section.metadata,
1753 })
1754 .collect::<Vec<_>>();
1755 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1756
1757 this.slash_command_output_sections
1758 .extend(sections.iter().cloned());
1759 this.slash_command_output_sections
1760 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1761
1762 let output_range =
1763 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1764 this.finished_slash_commands.insert(command_id);
1765
1766 (
1767 ContextOperation::SlashCommandFinished {
1768 id: command_id,
1769 output_range: output_range.clone(),
1770 sections: sections.clone(),
1771 version,
1772 },
1773 ContextEvent::SlashCommandFinished {
1774 output_range,
1775 sections,
1776 run_commands_in_output: output.run_commands_in_text,
1777 expand_result,
1778 },
1779 )
1780 });
1781
1782 this.push_op(operation, cx);
1783 cx.emit(event);
1784 }
1785 Err(error) => {
1786 if let Some(pending_command) =
1787 this.pending_command_for_position(command_range.start, cx)
1788 {
1789 pending_command.status =
1790 PendingSlashCommandStatus::Error(error.to_string());
1791 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1792 removed: vec![pending_command.source_range.clone()],
1793 updated: vec![pending_command.clone()],
1794 });
1795 }
1796 }
1797 })
1798 .ok();
1799 }
1800 });
1801
1802 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1803 pending_command.status = PendingSlashCommandStatus::Running {
1804 _task: insert_output_task.shared(),
1805 };
1806 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1807 removed: vec![pending_command.source_range.clone()],
1808 updated: vec![pending_command.clone()],
1809 });
1810 }
1811 }
1812
1813 pub fn insert_tool_output(
1814 &mut self,
1815 tool_use_id: Arc<str>,
1816 output: Task<Result<String>>,
1817 cx: &mut ModelContext<Self>,
1818 ) {
1819 let insert_output_task = cx.spawn(|this, mut cx| {
1820 let tool_use_id = tool_use_id.clone();
1821 async move {
1822 let output = output.await;
1823 this.update(&mut cx, |this, cx| match output {
1824 Ok(mut output) => {
1825 const NEWLINE: char = '\n';
1826
1827 if !output.ends_with(NEWLINE) {
1828 output.push(NEWLINE);
1829 }
1830
1831 let anchor_range = this.buffer.update(cx, |buffer, cx| {
1832 let insert_start = buffer.len().to_offset(buffer);
1833 let insert_end = insert_start;
1834
1835 let start = insert_start;
1836 let end = start + output.len() - NEWLINE.len_utf8();
1837
1838 buffer.edit([(insert_start..insert_end, output)], None, cx);
1839
1840 let output_range = buffer.anchor_after(start)..buffer.anchor_after(end);
1841
1842 output_range
1843 });
1844
1845 this.insert_content(
1846 Content::ToolResult {
1847 range: anchor_range.clone(),
1848 tool_use_id: tool_use_id.clone(),
1849 },
1850 cx,
1851 );
1852
1853 cx.emit(ContextEvent::ToolFinished {
1854 tool_use_id,
1855 output_range: anchor_range,
1856 });
1857 }
1858 Err(err) => {
1859 if let Some(tool_use) = this.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1860 tool_use.status = PendingToolUseStatus::Error(err.to_string());
1861 }
1862 }
1863 })
1864 .ok();
1865 }
1866 });
1867
1868 if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
1869 tool_use.status = PendingToolUseStatus::Running {
1870 _task: insert_output_task.shared(),
1871 };
1872 }
1873 }
1874
1875 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1876 self.count_remaining_tokens(cx);
1877 }
1878
1879 fn get_last_valid_message_id(&self, cx: &ModelContext<Self>) -> Option<MessageId> {
1880 self.message_anchors.iter().rev().find_map(|message| {
1881 message
1882 .start
1883 .is_valid(self.buffer.read(cx))
1884 .then_some(message.id)
1885 })
1886 }
1887
1888 pub fn assist(
1889 &mut self,
1890 request_type: RequestType,
1891 cx: &mut ModelContext<Self>,
1892 ) -> Option<MessageAnchor> {
1893 let model_registry = LanguageModelRegistry::read_global(cx);
1894 let provider = model_registry.active_provider()?;
1895 let model = model_registry.active_model()?;
1896 let last_message_id = self.get_last_valid_message_id(cx)?;
1897
1898 if !provider.is_authenticated(cx) {
1899 log::info!("completion provider has no credentials");
1900 return None;
1901 }
1902 // Compute which messages to cache, including the last one.
1903 self.mark_cache_anchors(&model.cache_configuration(), false, cx);
1904
1905 let mut request = self.to_completion_request(request_type, cx);
1906
1907 if cx.has_flag::<ToolUseFeatureFlag>() {
1908 let tool_registry = ToolRegistry::global(cx);
1909 request.tools = tool_registry
1910 .tools()
1911 .into_iter()
1912 .map(|tool| LanguageModelRequestTool {
1913 name: tool.name(),
1914 description: tool.description(),
1915 input_schema: tool.input_schema(),
1916 })
1917 .collect();
1918 }
1919
1920 let assistant_message = self
1921 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1922 .unwrap();
1923
1924 // Queue up the user's next reply.
1925 let user_message = self
1926 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1927 .unwrap();
1928
1929 let pending_completion_id = post_inc(&mut self.completion_count);
1930
1931 let task = cx.spawn({
1932 |this, mut cx| async move {
1933 let stream = model.stream_completion(request, &cx);
1934 let assistant_message_id = assistant_message.id;
1935 let mut response_latency = None;
1936 let stream_completion = async {
1937 let request_start = Instant::now();
1938 let mut events = stream.await?;
1939 let mut stop_reason = StopReason::EndTurn;
1940
1941 while let Some(event) = events.next().await {
1942 if response_latency.is_none() {
1943 response_latency = Some(request_start.elapsed());
1944 }
1945 let event = event?;
1946
1947 this.update(&mut cx, |this, cx| {
1948 let message_ix = this
1949 .message_anchors
1950 .iter()
1951 .position(|message| message.id == assistant_message_id)?;
1952 this.buffer.update(cx, |buffer, cx| {
1953 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1954 .iter()
1955 .find(|message| message.start.is_valid(buffer))
1956 .map_or(buffer.len(), |message| {
1957 message.start.to_offset(buffer).saturating_sub(1)
1958 });
1959
1960 match event {
1961 LanguageModelCompletionEvent::StartMessage { .. } => {}
1962 LanguageModelCompletionEvent::Stop(reason) => {
1963 stop_reason = reason;
1964 }
1965 LanguageModelCompletionEvent::Text(chunk) => {
1966 buffer.edit(
1967 [(
1968 message_old_end_offset..message_old_end_offset,
1969 chunk,
1970 )],
1971 None,
1972 cx,
1973 );
1974 }
1975 LanguageModelCompletionEvent::ToolUse(tool_use) => {
1976 const NEWLINE: char = '\n';
1977
1978 let mut text = String::new();
1979 text.push(NEWLINE);
1980 text.push_str(
1981 &serde_json::to_string_pretty(&tool_use)
1982 .expect("failed to serialize tool use to JSON"),
1983 );
1984 text.push(NEWLINE);
1985 let text_len = text.len();
1986
1987 buffer.edit(
1988 [(
1989 message_old_end_offset..message_old_end_offset,
1990 text,
1991 )],
1992 None,
1993 cx,
1994 );
1995
1996 let start_ix = message_old_end_offset + NEWLINE.len_utf8();
1997 let end_ix =
1998 message_old_end_offset + text_len - NEWLINE.len_utf8();
1999 let source_range = buffer.anchor_after(start_ix)
2000 ..buffer.anchor_after(end_ix);
2001
2002 let tool_use_id: Arc<str> = tool_use.id.into();
2003 this.pending_tool_uses_by_id.insert(
2004 tool_use_id.clone(),
2005 PendingToolUse {
2006 id: tool_use_id,
2007 name: tool_use.name,
2008 input: tool_use.input,
2009 status: PendingToolUseStatus::Idle,
2010 source_range,
2011 },
2012 );
2013 }
2014 }
2015 });
2016
2017 cx.emit(ContextEvent::StreamedCompletion);
2018
2019 Some(())
2020 })?;
2021 smol::future::yield_now().await;
2022 }
2023 this.update(&mut cx, |this, cx| {
2024 this.pending_completions
2025 .retain(|completion| completion.id != pending_completion_id);
2026 this.summarize(false, cx);
2027 this.update_cache_status_for_completion(cx);
2028 })?;
2029
2030 anyhow::Ok(stop_reason)
2031 };
2032
2033 let result = stream_completion.await;
2034
2035 this.update(&mut cx, |this, cx| {
2036 let error_message = if let Some(error) = result.as_ref().err() {
2037 if error.is::<PaymentRequiredError>() {
2038 cx.emit(ContextEvent::ShowPaymentRequiredError);
2039 this.update_metadata(assistant_message_id, cx, |metadata| {
2040 metadata.status = MessageStatus::Canceled;
2041 });
2042 Some(error.to_string())
2043 } else if error.is::<MaxMonthlySpendReachedError>() {
2044 cx.emit(ContextEvent::ShowMaxMonthlySpendReachedError);
2045 this.update_metadata(assistant_message_id, cx, |metadata| {
2046 metadata.status = MessageStatus::Canceled;
2047 });
2048 Some(error.to_string())
2049 } else {
2050 let error_message = error.to_string().trim().to_string();
2051 cx.emit(ContextEvent::ShowAssistError(SharedString::from(
2052 error_message.clone(),
2053 )));
2054 this.update_metadata(assistant_message_id, cx, |metadata| {
2055 metadata.status =
2056 MessageStatus::Error(SharedString::from(error_message.clone()));
2057 });
2058 Some(error_message)
2059 }
2060 } else {
2061 this.update_metadata(assistant_message_id, cx, |metadata| {
2062 metadata.status = MessageStatus::Done;
2063 });
2064 None
2065 };
2066
2067 let language_name = this
2068 .buffer
2069 .read(cx)
2070 .language()
2071 .map(|language| language.name());
2072 report_assistant_event(
2073 AssistantEvent {
2074 conversation_id: Some(this.id.0.clone()),
2075 kind: AssistantKind::Panel,
2076 phase: AssistantPhase::Response,
2077 message_id: None,
2078 model: model.telemetry_id(),
2079 model_provider: model.provider_id().to_string(),
2080 response_latency,
2081 error_message,
2082 language_name: language_name.map(|name| name.to_proto()),
2083 },
2084 this.telemetry.clone(),
2085 cx.http_client(),
2086 model.api_key(cx),
2087 cx.background_executor(),
2088 );
2089
2090 if let Ok(stop_reason) = result {
2091 match stop_reason {
2092 StopReason::ToolUse => {
2093 cx.emit(ContextEvent::UsePendingTools);
2094 }
2095 StopReason::EndTurn => {}
2096 StopReason::MaxTokens => {}
2097 }
2098 }
2099 })
2100 .ok();
2101 }
2102 });
2103
2104 self.pending_completions.push(PendingCompletion {
2105 id: pending_completion_id,
2106 assistant_message_id: assistant_message.id,
2107 _task: task,
2108 });
2109
2110 Some(user_message)
2111 }
2112
2113 pub fn to_completion_request(
2114 &self,
2115 request_type: RequestType,
2116 cx: &AppContext,
2117 ) -> LanguageModelRequest {
2118 let buffer = self.buffer.read(cx);
2119
2120 let mut contents = self.contents(cx).peekable();
2121
2122 fn collect_text_content(buffer: &Buffer, range: Range<usize>) -> Option<String> {
2123 let text: String = buffer.text_for_range(range.clone()).collect();
2124 if text.trim().is_empty() {
2125 None
2126 } else {
2127 Some(text)
2128 }
2129 }
2130
2131 let mut completion_request = LanguageModelRequest {
2132 messages: Vec::new(),
2133 tools: Vec::new(),
2134 stop: Vec::new(),
2135 temperature: None,
2136 };
2137 for message in self.messages(cx) {
2138 if message.status != MessageStatus::Done {
2139 continue;
2140 }
2141
2142 let mut offset = message.offset_range.start;
2143 let mut request_message = LanguageModelRequestMessage {
2144 role: message.role,
2145 content: Vec::new(),
2146 cache: message
2147 .cache
2148 .as_ref()
2149 .map_or(false, |cache| cache.is_anchor),
2150 };
2151
2152 while let Some(content) = contents.peek() {
2153 if content
2154 .range()
2155 .end
2156 .cmp(&message.anchor_range.end, buffer)
2157 .is_lt()
2158 {
2159 let content = contents.next().unwrap();
2160 let range = content.range().to_offset(buffer);
2161 request_message.content.extend(
2162 collect_text_content(buffer, offset..range.start).map(MessageContent::Text),
2163 );
2164
2165 match content {
2166 Content::Image { image, .. } => {
2167 if let Some(image) = image.clone().now_or_never().flatten() {
2168 request_message
2169 .content
2170 .push(language_model::MessageContent::Image(image));
2171 }
2172 }
2173 Content::ToolUse { tool_use, .. } => {
2174 request_message
2175 .content
2176 .push(language_model::MessageContent::ToolUse(tool_use.clone()));
2177 }
2178 Content::ToolResult { tool_use_id, .. } => {
2179 request_message.content.push(
2180 language_model::MessageContent::ToolResult(
2181 LanguageModelToolResult {
2182 tool_use_id: tool_use_id.to_string(),
2183 is_error: false,
2184 content: collect_text_content(buffer, range.clone())
2185 .unwrap_or_default(),
2186 },
2187 ),
2188 );
2189 }
2190 }
2191
2192 offset = range.end;
2193 } else {
2194 break;
2195 }
2196 }
2197
2198 request_message.content.extend(
2199 collect_text_content(buffer, offset..message.offset_range.end)
2200 .map(MessageContent::Text),
2201 );
2202
2203 completion_request.messages.push(request_message);
2204 }
2205
2206 if let RequestType::SuggestEdits = request_type {
2207 if let Ok(preamble) = self.prompt_builder.generate_suggest_edits_prompt() {
2208 let last_elem_index = completion_request.messages.len();
2209
2210 completion_request
2211 .messages
2212 .push(LanguageModelRequestMessage {
2213 role: Role::User,
2214 content: vec![MessageContent::Text(preamble)],
2215 cache: false,
2216 });
2217
2218 // The preamble message should be sent right before the last actual user message.
2219 completion_request
2220 .messages
2221 .swap(last_elem_index, last_elem_index.saturating_sub(1));
2222 }
2223 }
2224
2225 completion_request
2226 }
2227
2228 pub fn cancel_last_assist(&mut self, cx: &mut ModelContext<Self>) -> bool {
2229 if let Some(pending_completion) = self.pending_completions.pop() {
2230 self.update_metadata(pending_completion.assistant_message_id, cx, |metadata| {
2231 if metadata.status == MessageStatus::Pending {
2232 metadata.status = MessageStatus::Canceled;
2233 }
2234 });
2235 true
2236 } else {
2237 false
2238 }
2239 }
2240
2241 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2242 for id in &ids {
2243 if let Some(metadata) = self.messages_metadata.get(id) {
2244 let role = metadata.role.cycle();
2245 self.update_metadata(*id, cx, |metadata| metadata.role = role);
2246 }
2247 }
2248
2249 self.message_roles_updated(ids, cx);
2250 }
2251
2252 fn message_roles_updated(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
2253 let mut ranges = Vec::new();
2254 for message in self.messages(cx) {
2255 if ids.contains(&message.id) {
2256 ranges.push(message.anchor_range.clone());
2257 }
2258 }
2259
2260 let buffer = self.buffer.read(cx).text_snapshot();
2261 let mut updated = Vec::new();
2262 let mut removed = Vec::new();
2263 for range in ranges {
2264 self.reparse_patches_in_range(range, &buffer, &mut updated, &mut removed, cx);
2265 }
2266
2267 if !updated.is_empty() || !removed.is_empty() {
2268 cx.emit(ContextEvent::PatchesUpdated { removed, updated })
2269 }
2270 }
2271
2272 pub fn update_metadata(
2273 &mut self,
2274 id: MessageId,
2275 cx: &mut ModelContext<Self>,
2276 f: impl FnOnce(&mut MessageMetadata),
2277 ) {
2278 let version = self.version.clone();
2279 let timestamp = self.next_timestamp();
2280 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
2281 f(metadata);
2282 metadata.timestamp = timestamp;
2283 let operation = ContextOperation::UpdateMessage {
2284 message_id: id,
2285 metadata: metadata.clone(),
2286 version,
2287 };
2288 self.push_op(operation, cx);
2289 cx.emit(ContextEvent::MessagesEdited);
2290 cx.notify();
2291 }
2292 }
2293
2294 pub fn insert_message_after(
2295 &mut self,
2296 message_id: MessageId,
2297 role: Role,
2298 status: MessageStatus,
2299 cx: &mut ModelContext<Self>,
2300 ) -> Option<MessageAnchor> {
2301 if let Some(prev_message_ix) = self
2302 .message_anchors
2303 .iter()
2304 .position(|message| message.id == message_id)
2305 {
2306 // Find the next valid message after the one we were given.
2307 let mut next_message_ix = prev_message_ix + 1;
2308 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
2309 if next_message.start.is_valid(self.buffer.read(cx)) {
2310 break;
2311 }
2312 next_message_ix += 1;
2313 }
2314
2315 let start = self.buffer.update(cx, |buffer, cx| {
2316 let offset = self
2317 .message_anchors
2318 .get(next_message_ix)
2319 .map_or(buffer.len(), |message| {
2320 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
2321 });
2322 buffer.edit([(offset..offset, "\n")], None, cx);
2323 buffer.anchor_before(offset + 1)
2324 });
2325
2326 let version = self.version.clone();
2327 let anchor = MessageAnchor {
2328 id: MessageId(self.next_timestamp()),
2329 start,
2330 };
2331 let metadata = MessageMetadata {
2332 role,
2333 status,
2334 timestamp: anchor.id.0,
2335 cache: None,
2336 };
2337 self.insert_message(anchor.clone(), metadata.clone(), cx);
2338 self.push_op(
2339 ContextOperation::InsertMessage {
2340 anchor: anchor.clone(),
2341 metadata,
2342 version,
2343 },
2344 cx,
2345 );
2346 Some(anchor)
2347 } else {
2348 None
2349 }
2350 }
2351
2352 pub fn insert_content(&mut self, content: Content, cx: &mut ModelContext<Self>) {
2353 let buffer = self.buffer.read(cx);
2354 let insertion_ix = match self
2355 .contents
2356 .binary_search_by(|probe| probe.cmp(&content, buffer))
2357 {
2358 Ok(ix) => {
2359 self.contents.remove(ix);
2360 ix
2361 }
2362 Err(ix) => ix,
2363 };
2364 self.contents.insert(insertion_ix, content);
2365 cx.emit(ContextEvent::MessagesEdited);
2366 }
2367
2368 pub fn contents<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Content> {
2369 let buffer = self.buffer.read(cx);
2370 self.contents
2371 .iter()
2372 .filter(|content| {
2373 let range = content.range();
2374 range.start.is_valid(buffer) && range.end.is_valid(buffer)
2375 })
2376 .cloned()
2377 }
2378
2379 pub fn split_message(
2380 &mut self,
2381 range: Range<usize>,
2382 cx: &mut ModelContext<Self>,
2383 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
2384 let start_message = self.message_for_offset(range.start, cx);
2385 let end_message = self.message_for_offset(range.end, cx);
2386 if let Some((start_message, end_message)) = start_message.zip(end_message) {
2387 // Prevent splitting when range spans multiple messages.
2388 if start_message.id != end_message.id {
2389 return (None, None);
2390 }
2391
2392 let message = start_message;
2393 let role = message.role;
2394 let mut edited_buffer = false;
2395
2396 let mut suffix_start = None;
2397
2398 // TODO: why did this start panicking?
2399 if range.start > message.offset_range.start
2400 && range.end < message.offset_range.end.saturating_sub(1)
2401 {
2402 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
2403 suffix_start = Some(range.end + 1);
2404 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
2405 suffix_start = Some(range.end);
2406 }
2407 }
2408
2409 let version = self.version.clone();
2410 let suffix = if let Some(suffix_start) = suffix_start {
2411 MessageAnchor {
2412 id: MessageId(self.next_timestamp()),
2413 start: self.buffer.read(cx).anchor_before(suffix_start),
2414 }
2415 } else {
2416 self.buffer.update(cx, |buffer, cx| {
2417 buffer.edit([(range.end..range.end, "\n")], None, cx);
2418 });
2419 edited_buffer = true;
2420 MessageAnchor {
2421 id: MessageId(self.next_timestamp()),
2422 start: self.buffer.read(cx).anchor_before(range.end + 1),
2423 }
2424 };
2425
2426 let suffix_metadata = MessageMetadata {
2427 role,
2428 status: MessageStatus::Done,
2429 timestamp: suffix.id.0,
2430 cache: None,
2431 };
2432 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
2433 self.push_op(
2434 ContextOperation::InsertMessage {
2435 anchor: suffix.clone(),
2436 metadata: suffix_metadata,
2437 version,
2438 },
2439 cx,
2440 );
2441
2442 let new_messages =
2443 if range.start == range.end || range.start == message.offset_range.start {
2444 (None, Some(suffix))
2445 } else {
2446 let mut prefix_end = None;
2447 if range.start > message.offset_range.start
2448 && range.end < message.offset_range.end - 1
2449 {
2450 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
2451 prefix_end = Some(range.start + 1);
2452 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
2453 == Some('\n')
2454 {
2455 prefix_end = Some(range.start);
2456 }
2457 }
2458
2459 let version = self.version.clone();
2460 let selection = if let Some(prefix_end) = prefix_end {
2461 MessageAnchor {
2462 id: MessageId(self.next_timestamp()),
2463 start: self.buffer.read(cx).anchor_before(prefix_end),
2464 }
2465 } else {
2466 self.buffer.update(cx, |buffer, cx| {
2467 buffer.edit([(range.start..range.start, "\n")], None, cx)
2468 });
2469 edited_buffer = true;
2470 MessageAnchor {
2471 id: MessageId(self.next_timestamp()),
2472 start: self.buffer.read(cx).anchor_before(range.end + 1),
2473 }
2474 };
2475
2476 let selection_metadata = MessageMetadata {
2477 role,
2478 status: MessageStatus::Done,
2479 timestamp: selection.id.0,
2480 cache: None,
2481 };
2482 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
2483 self.push_op(
2484 ContextOperation::InsertMessage {
2485 anchor: selection.clone(),
2486 metadata: selection_metadata,
2487 version,
2488 },
2489 cx,
2490 );
2491
2492 (Some(selection), Some(suffix))
2493 };
2494
2495 if !edited_buffer {
2496 cx.emit(ContextEvent::MessagesEdited);
2497 }
2498 new_messages
2499 } else {
2500 (None, None)
2501 }
2502 }
2503
2504 fn insert_message(
2505 &mut self,
2506 new_anchor: MessageAnchor,
2507 new_metadata: MessageMetadata,
2508 cx: &mut ModelContext<Self>,
2509 ) {
2510 cx.emit(ContextEvent::MessagesEdited);
2511
2512 self.messages_metadata.insert(new_anchor.id, new_metadata);
2513
2514 let buffer = self.buffer.read(cx);
2515 let insertion_ix = self
2516 .message_anchors
2517 .iter()
2518 .position(|anchor| {
2519 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
2520 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
2521 })
2522 .unwrap_or(self.message_anchors.len());
2523 self.message_anchors.insert(insertion_ix, new_anchor);
2524 }
2525
2526 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
2527 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
2528 return;
2529 };
2530 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
2531 return;
2532 };
2533
2534 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
2535 if !provider.is_authenticated(cx) {
2536 return;
2537 }
2538
2539 let mut request = self.to_completion_request(RequestType::Chat, cx);
2540 request.messages.push(LanguageModelRequestMessage {
2541 role: Role::User,
2542 content: vec![
2543 "Generate a concise 3-7 word title for this conversation, omitting punctuation"
2544 .into(),
2545 ],
2546 cache: false,
2547 });
2548
2549 self.pending_summary = cx.spawn(|this, mut cx| {
2550 async move {
2551 let stream = model.stream_completion_text(request, &cx);
2552 let mut messages = stream.await?;
2553
2554 let mut replaced = !replace_old;
2555 while let Some(message) = messages.stream.next().await {
2556 let text = message?;
2557 let mut lines = text.lines();
2558 this.update(&mut cx, |this, cx| {
2559 let version = this.version.clone();
2560 let timestamp = this.next_timestamp();
2561 let summary = this.summary.get_or_insert(ContextSummary::default());
2562 if !replaced && replace_old {
2563 summary.text.clear();
2564 replaced = true;
2565 }
2566 summary.text.extend(lines.next());
2567 summary.timestamp = timestamp;
2568 let operation = ContextOperation::UpdateSummary {
2569 summary: summary.clone(),
2570 version,
2571 };
2572 this.push_op(operation, cx);
2573 cx.emit(ContextEvent::SummaryChanged);
2574 })?;
2575
2576 // Stop if the LLM generated multiple lines.
2577 if lines.next().is_some() {
2578 break;
2579 }
2580 }
2581
2582 this.update(&mut cx, |this, cx| {
2583 let version = this.version.clone();
2584 let timestamp = this.next_timestamp();
2585 if let Some(summary) = this.summary.as_mut() {
2586 summary.done = true;
2587 summary.timestamp = timestamp;
2588 let operation = ContextOperation::UpdateSummary {
2589 summary: summary.clone(),
2590 version,
2591 };
2592 this.push_op(operation, cx);
2593 cx.emit(ContextEvent::SummaryChanged);
2594 }
2595 })?;
2596
2597 anyhow::Ok(())
2598 }
2599 .log_err()
2600 });
2601 }
2602 }
2603
2604 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2605 self.messages_for_offsets([offset], cx).pop()
2606 }
2607
2608 pub fn messages_for_offsets(
2609 &self,
2610 offsets: impl IntoIterator<Item = usize>,
2611 cx: &AppContext,
2612 ) -> Vec<Message> {
2613 let mut result = Vec::new();
2614
2615 let mut messages = self.messages(cx).peekable();
2616 let mut offsets = offsets.into_iter().peekable();
2617 let mut current_message = messages.next();
2618 while let Some(offset) = offsets.next() {
2619 // Locate the message that contains the offset.
2620 while current_message.as_ref().map_or(false, |message| {
2621 !message.offset_range.contains(&offset) && messages.peek().is_some()
2622 }) {
2623 current_message = messages.next();
2624 }
2625 let Some(message) = current_message.as_ref() else {
2626 break;
2627 };
2628
2629 // Skip offsets that are in the same message.
2630 while offsets.peek().map_or(false, |offset| {
2631 message.offset_range.contains(offset) || messages.peek().is_none()
2632 }) {
2633 offsets.next();
2634 }
2635
2636 result.push(message.clone());
2637 }
2638 result
2639 }
2640
2641 fn messages_from_anchors<'a>(
2642 &'a self,
2643 message_anchors: impl Iterator<Item = &'a MessageAnchor> + 'a,
2644 cx: &'a AppContext,
2645 ) -> impl 'a + Iterator<Item = Message> {
2646 let buffer = self.buffer.read(cx);
2647
2648 Self::messages_from_iters(buffer, &self.messages_metadata, message_anchors.enumerate())
2649 }
2650
2651 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2652 self.messages_from_anchors(self.message_anchors.iter(), cx)
2653 }
2654
2655 pub fn messages_from_iters<'a>(
2656 buffer: &'a Buffer,
2657 metadata: &'a HashMap<MessageId, MessageMetadata>,
2658 messages: impl Iterator<Item = (usize, &'a MessageAnchor)> + 'a,
2659 ) -> impl 'a + Iterator<Item = Message> {
2660 let mut messages = messages.peekable();
2661
2662 iter::from_fn(move || {
2663 if let Some((start_ix, message_anchor)) = messages.next() {
2664 let metadata = metadata.get(&message_anchor.id)?;
2665
2666 let message_start = message_anchor.start.to_offset(buffer);
2667 let mut message_end = None;
2668 let mut end_ix = start_ix;
2669 while let Some((_, next_message)) = messages.peek() {
2670 if next_message.start.is_valid(buffer) {
2671 message_end = Some(next_message.start);
2672 break;
2673 } else {
2674 end_ix += 1;
2675 messages.next();
2676 }
2677 }
2678 let message_end_anchor = message_end.unwrap_or(language::Anchor::MAX);
2679 let message_end = message_end_anchor.to_offset(buffer);
2680
2681 return Some(Message {
2682 index_range: start_ix..end_ix,
2683 offset_range: message_start..message_end,
2684 anchor_range: message_anchor.start..message_end_anchor,
2685 id: message_anchor.id,
2686 role: metadata.role,
2687 status: metadata.status.clone(),
2688 cache: metadata.cache.clone(),
2689 });
2690 }
2691 None
2692 })
2693 }
2694
2695 pub fn save(
2696 &mut self,
2697 debounce: Option<Duration>,
2698 fs: Arc<dyn Fs>,
2699 cx: &mut ModelContext<Context>,
2700 ) {
2701 if self.replica_id() != ReplicaId::default() {
2702 // Prevent saving a remote context for now.
2703 return;
2704 }
2705
2706 self.pending_save = cx.spawn(|this, mut cx| async move {
2707 if let Some(debounce) = debounce {
2708 cx.background_executor().timer(debounce).await;
2709 }
2710
2711 let (old_path, summary) = this.read_with(&cx, |this, _| {
2712 let path = this.path.clone();
2713 let summary = if let Some(summary) = this.summary.as_ref() {
2714 if summary.done {
2715 Some(summary.text.clone())
2716 } else {
2717 None
2718 }
2719 } else {
2720 None
2721 };
2722 (path, summary)
2723 })?;
2724
2725 if let Some(summary) = summary {
2726 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2727 let mut discriminant = 1;
2728 let mut new_path;
2729 loop {
2730 new_path = contexts_dir().join(&format!(
2731 "{} - {}.zed.json",
2732 summary.trim(),
2733 discriminant
2734 ));
2735 if fs.is_file(&new_path).await {
2736 discriminant += 1;
2737 } else {
2738 break;
2739 }
2740 }
2741
2742 fs.create_dir(contexts_dir().as_ref()).await?;
2743 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2744 .await?;
2745 if let Some(old_path) = old_path {
2746 if new_path != old_path {
2747 fs.remove_file(
2748 &old_path,
2749 RemoveOptions {
2750 recursive: false,
2751 ignore_if_not_exists: true,
2752 },
2753 )
2754 .await?;
2755 }
2756 }
2757
2758 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2759 }
2760
2761 Ok(())
2762 });
2763 }
2764
2765 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2766 let timestamp = self.next_timestamp();
2767 let summary = self.summary.get_or_insert(ContextSummary::default());
2768 summary.timestamp = timestamp;
2769 summary.done = true;
2770 summary.text = custom_summary;
2771 cx.emit(ContextEvent::SummaryChanged);
2772 }
2773}
2774
2775fn trimmed_text_in_range(buffer: &BufferSnapshot, range: Range<text::Anchor>) -> String {
2776 let mut is_start = true;
2777 let mut content = buffer
2778 .text_for_range(range)
2779 .map(|mut chunk| {
2780 if is_start {
2781 chunk = chunk.trim_start_matches('\n');
2782 if !chunk.is_empty() {
2783 is_start = false;
2784 }
2785 }
2786 chunk
2787 })
2788 .collect::<String>();
2789 content.truncate(content.trim_end().len());
2790 content
2791}
2792
2793#[derive(Debug, Default)]
2794pub struct ContextVersion {
2795 context: clock::Global,
2796 buffer: clock::Global,
2797}
2798
2799impl ContextVersion {
2800 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2801 Self {
2802 context: language::proto::deserialize_version(&proto.context_version),
2803 buffer: language::proto::deserialize_version(&proto.buffer_version),
2804 }
2805 }
2806
2807 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2808 proto::ContextVersion {
2809 context_id: context_id.to_proto(),
2810 context_version: language::proto::serialize_version(&self.context),
2811 buffer_version: language::proto::serialize_version(&self.buffer),
2812 }
2813 }
2814}
2815
2816#[derive(Debug, Clone)]
2817pub struct PendingSlashCommand {
2818 pub name: String,
2819 pub arguments: SmallVec<[String; 3]>,
2820 pub status: PendingSlashCommandStatus,
2821 pub source_range: Range<language::Anchor>,
2822}
2823
2824#[derive(Debug, Clone)]
2825pub enum PendingSlashCommandStatus {
2826 Idle,
2827 Running { _task: Shared<Task<()>> },
2828 Error(String),
2829}
2830
2831pub(crate) struct ToolUseFeatureFlag;
2832
2833impl FeatureFlag for ToolUseFeatureFlag {
2834 const NAME: &'static str = "assistant-tool-use";
2835
2836 fn enabled_for_staff() -> bool {
2837 false
2838 }
2839}
2840
2841#[derive(Debug, Clone)]
2842pub struct PendingToolUse {
2843 pub id: Arc<str>,
2844 pub name: String,
2845 pub input: serde_json::Value,
2846 pub status: PendingToolUseStatus,
2847 pub source_range: Range<language::Anchor>,
2848}
2849
2850#[derive(Debug, Clone)]
2851pub enum PendingToolUseStatus {
2852 Idle,
2853 Running { _task: Shared<Task<()>> },
2854 Error(String),
2855}
2856
2857impl PendingToolUseStatus {
2858 pub fn is_idle(&self) -> bool {
2859 matches!(self, PendingToolUseStatus::Idle)
2860 }
2861}
2862
2863#[derive(Serialize, Deserialize)]
2864pub struct SavedMessage {
2865 pub id: MessageId,
2866 pub start: usize,
2867 pub metadata: MessageMetadata,
2868}
2869
2870#[derive(Serialize, Deserialize)]
2871pub struct SavedContext {
2872 pub id: Option<ContextId>,
2873 pub zed: String,
2874 pub version: String,
2875 pub text: String,
2876 pub messages: Vec<SavedMessage>,
2877 pub summary: String,
2878 pub slash_command_output_sections:
2879 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2880}
2881
2882impl SavedContext {
2883 pub const VERSION: &'static str = "0.4.0";
2884
2885 pub fn from_json(json: &str) -> Result<Self> {
2886 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2887 match saved_context_json
2888 .get("version")
2889 .ok_or_else(|| anyhow!("version not found"))?
2890 {
2891 serde_json::Value::String(version) => match version.as_str() {
2892 SavedContext::VERSION => {
2893 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2894 }
2895 SavedContextV0_3_0::VERSION => {
2896 let saved_context =
2897 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2898 Ok(saved_context.upgrade())
2899 }
2900 SavedContextV0_2_0::VERSION => {
2901 let saved_context =
2902 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2903 Ok(saved_context.upgrade())
2904 }
2905 SavedContextV0_1_0::VERSION => {
2906 let saved_context =
2907 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2908 Ok(saved_context.upgrade())
2909 }
2910 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2911 },
2912 _ => Err(anyhow!("version not found on saved context")),
2913 }
2914 }
2915
2916 fn into_ops(
2917 self,
2918 buffer: &Model<Buffer>,
2919 cx: &mut ModelContext<Context>,
2920 ) -> Vec<ContextOperation> {
2921 let mut operations = Vec::new();
2922 let mut version = clock::Global::new();
2923 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2924
2925 let mut first_message_metadata = None;
2926 for message in self.messages {
2927 if message.id == MessageId(clock::Lamport::default()) {
2928 first_message_metadata = Some(message.metadata);
2929 } else {
2930 operations.push(ContextOperation::InsertMessage {
2931 anchor: MessageAnchor {
2932 id: message.id,
2933 start: buffer.read(cx).anchor_before(message.start),
2934 },
2935 metadata: MessageMetadata {
2936 role: message.metadata.role,
2937 status: message.metadata.status,
2938 timestamp: message.metadata.timestamp,
2939 cache: None,
2940 },
2941 version: version.clone(),
2942 });
2943 version.observe(message.id.0);
2944 next_timestamp.observe(message.id.0);
2945 }
2946 }
2947
2948 if let Some(metadata) = first_message_metadata {
2949 let timestamp = next_timestamp.tick();
2950 operations.push(ContextOperation::UpdateMessage {
2951 message_id: MessageId(clock::Lamport::default()),
2952 metadata: MessageMetadata {
2953 role: metadata.role,
2954 status: metadata.status,
2955 timestamp,
2956 cache: None,
2957 },
2958 version: version.clone(),
2959 });
2960 version.observe(timestamp);
2961 }
2962
2963 let timestamp = next_timestamp.tick();
2964 operations.push(ContextOperation::SlashCommandFinished {
2965 id: SlashCommandId(timestamp),
2966 output_range: language::Anchor::MIN..language::Anchor::MAX,
2967 sections: self
2968 .slash_command_output_sections
2969 .into_iter()
2970 .map(|section| {
2971 let buffer = buffer.read(cx);
2972 SlashCommandOutputSection {
2973 range: buffer.anchor_after(section.range.start)
2974 ..buffer.anchor_before(section.range.end),
2975 icon: section.icon,
2976 label: section.label,
2977 metadata: section.metadata,
2978 }
2979 })
2980 .collect(),
2981 version: version.clone(),
2982 });
2983 version.observe(timestamp);
2984
2985 let timestamp = next_timestamp.tick();
2986 operations.push(ContextOperation::UpdateSummary {
2987 summary: ContextSummary {
2988 text: self.summary,
2989 done: true,
2990 timestamp,
2991 },
2992 version: version.clone(),
2993 });
2994 version.observe(timestamp);
2995
2996 operations
2997 }
2998}
2999
3000#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
3001struct SavedMessageIdPreV0_4_0(usize);
3002
3003#[derive(Serialize, Deserialize)]
3004struct SavedMessagePreV0_4_0 {
3005 id: SavedMessageIdPreV0_4_0,
3006 start: usize,
3007}
3008
3009#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
3010struct SavedMessageMetadataPreV0_4_0 {
3011 role: Role,
3012 status: MessageStatus,
3013}
3014
3015#[derive(Serialize, Deserialize)]
3016struct SavedContextV0_3_0 {
3017 id: Option<ContextId>,
3018 zed: String,
3019 version: String,
3020 text: String,
3021 messages: Vec<SavedMessagePreV0_4_0>,
3022 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3023 summary: String,
3024 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
3025}
3026
3027impl SavedContextV0_3_0 {
3028 const VERSION: &'static str = "0.3.0";
3029
3030 fn upgrade(self) -> SavedContext {
3031 SavedContext {
3032 id: self.id,
3033 zed: self.zed,
3034 version: SavedContext::VERSION.into(),
3035 text: self.text,
3036 messages: self
3037 .messages
3038 .into_iter()
3039 .filter_map(|message| {
3040 let metadata = self.message_metadata.get(&message.id)?;
3041 let timestamp = clock::Lamport {
3042 replica_id: ReplicaId::default(),
3043 value: message.id.0 as u32,
3044 };
3045 Some(SavedMessage {
3046 id: MessageId(timestamp),
3047 start: message.start,
3048 metadata: MessageMetadata {
3049 role: metadata.role,
3050 status: metadata.status.clone(),
3051 timestamp,
3052 cache: None,
3053 },
3054 })
3055 })
3056 .collect(),
3057 summary: self.summary,
3058 slash_command_output_sections: self.slash_command_output_sections,
3059 }
3060 }
3061}
3062
3063#[derive(Serialize, Deserialize)]
3064struct SavedContextV0_2_0 {
3065 id: Option<ContextId>,
3066 zed: String,
3067 version: String,
3068 text: String,
3069 messages: Vec<SavedMessagePreV0_4_0>,
3070 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3071 summary: String,
3072}
3073
3074impl SavedContextV0_2_0 {
3075 const VERSION: &'static str = "0.2.0";
3076
3077 fn upgrade(self) -> SavedContext {
3078 SavedContextV0_3_0 {
3079 id: self.id,
3080 zed: self.zed,
3081 version: SavedContextV0_3_0::VERSION.to_string(),
3082 text: self.text,
3083 messages: self.messages,
3084 message_metadata: self.message_metadata,
3085 summary: self.summary,
3086 slash_command_output_sections: Vec::new(),
3087 }
3088 .upgrade()
3089 }
3090}
3091
3092#[derive(Serialize, Deserialize)]
3093struct SavedContextV0_1_0 {
3094 id: Option<ContextId>,
3095 zed: String,
3096 version: String,
3097 text: String,
3098 messages: Vec<SavedMessagePreV0_4_0>,
3099 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
3100 summary: String,
3101 api_url: Option<String>,
3102 model: OpenAiModel,
3103}
3104
3105impl SavedContextV0_1_0 {
3106 const VERSION: &'static str = "0.1.0";
3107
3108 fn upgrade(self) -> SavedContext {
3109 SavedContextV0_2_0 {
3110 id: self.id,
3111 zed: self.zed,
3112 version: SavedContextV0_2_0::VERSION.to_string(),
3113 text: self.text,
3114 messages: self.messages,
3115 message_metadata: self.message_metadata,
3116 summary: self.summary,
3117 }
3118 .upgrade()
3119 }
3120}
3121
3122#[derive(Clone)]
3123pub struct SavedContextMetadata {
3124 pub title: String,
3125 pub path: PathBuf,
3126 pub mtime: chrono::DateTime<chrono::Local>,
3127}