1use crate::{
2 prompts::PromptBuilder, slash_command::SlashCommandLine, AssistantPanel, InitialInsertion,
3 InlineAssistId, InlineAssistant, MessageId, MessageStatus,
4};
5use anyhow::{anyhow, Context as _, Result};
6use assistant_slash_command::{
7 SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
8};
9use client::{self, proto, telemetry::Telemetry};
10use clock::ReplicaId;
11use collections::{HashMap, HashSet};
12use editor::Editor;
13use fs::{Fs, RemoveOptions};
14use futures::{
15 future::{self, Shared},
16 FutureExt, StreamExt,
17};
18use gpui::{
19 AppContext, Context as _, EventEmitter, Model, ModelContext, Subscription, Task, UpdateGlobal,
20 View, WeakView,
21};
22use language::{
23 AnchorRangeExt, Bias, Buffer, BufferSnapshot, LanguageRegistry, OffsetRangeExt, ParseStatus,
24 Point, ToOffset,
25};
26use language_model::{
27 LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelTool,
28 Role,
29};
30use open_ai::Model as OpenAiModel;
31use paths::contexts_dir;
32use project::Project;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use std::{
36 cmp,
37 fmt::Debug,
38 iter, mem,
39 ops::Range,
40 path::{Path, PathBuf},
41 sync::Arc,
42 time::{Duration, Instant},
43};
44use telemetry_events::AssistantKind;
45use ui::{SharedString, WindowContext};
46use util::{post_inc, ResultExt, TryFutureExt};
47use uuid::Uuid;
48use workspace::Workspace;
49
50#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
51pub struct ContextId(String);
52
53impl ContextId {
54 pub fn new() -> Self {
55 Self(Uuid::new_v4().to_string())
56 }
57
58 pub fn from_proto(id: String) -> Self {
59 Self(id)
60 }
61
62 pub fn to_proto(&self) -> String {
63 self.0.clone()
64 }
65}
66
67#[derive(Clone, Debug)]
68pub enum ContextOperation {
69 InsertMessage {
70 anchor: MessageAnchor,
71 metadata: MessageMetadata,
72 version: clock::Global,
73 },
74 UpdateMessage {
75 message_id: MessageId,
76 metadata: MessageMetadata,
77 version: clock::Global,
78 },
79 UpdateSummary {
80 summary: ContextSummary,
81 version: clock::Global,
82 },
83 SlashCommandFinished {
84 id: SlashCommandId,
85 output_range: Range<language::Anchor>,
86 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
87 version: clock::Global,
88 },
89 BufferOperation(language::Operation),
90}
91
92impl ContextOperation {
93 pub fn from_proto(op: proto::ContextOperation) -> Result<Self> {
94 match op.variant.context("invalid variant")? {
95 proto::context_operation::Variant::InsertMessage(insert) => {
96 let message = insert.message.context("invalid message")?;
97 let id = MessageId(language::proto::deserialize_timestamp(
98 message.id.context("invalid id")?,
99 ));
100 Ok(Self::InsertMessage {
101 anchor: MessageAnchor {
102 id,
103 start: language::proto::deserialize_anchor(
104 message.start.context("invalid anchor")?,
105 )
106 .context("invalid anchor")?,
107 },
108 metadata: MessageMetadata {
109 role: Role::from_proto(message.role),
110 status: MessageStatus::from_proto(
111 message.status.context("invalid status")?,
112 ),
113 timestamp: id.0,
114 },
115 version: language::proto::deserialize_version(&insert.version),
116 })
117 }
118 proto::context_operation::Variant::UpdateMessage(update) => Ok(Self::UpdateMessage {
119 message_id: MessageId(language::proto::deserialize_timestamp(
120 update.message_id.context("invalid message id")?,
121 )),
122 metadata: MessageMetadata {
123 role: Role::from_proto(update.role),
124 status: MessageStatus::from_proto(update.status.context("invalid status")?),
125 timestamp: language::proto::deserialize_timestamp(
126 update.timestamp.context("invalid timestamp")?,
127 ),
128 },
129 version: language::proto::deserialize_version(&update.version),
130 }),
131 proto::context_operation::Variant::UpdateSummary(update) => Ok(Self::UpdateSummary {
132 summary: ContextSummary {
133 text: update.summary,
134 done: update.done,
135 timestamp: language::proto::deserialize_timestamp(
136 update.timestamp.context("invalid timestamp")?,
137 ),
138 },
139 version: language::proto::deserialize_version(&update.version),
140 }),
141 proto::context_operation::Variant::SlashCommandFinished(finished) => {
142 Ok(Self::SlashCommandFinished {
143 id: SlashCommandId(language::proto::deserialize_timestamp(
144 finished.id.context("invalid id")?,
145 )),
146 output_range: language::proto::deserialize_anchor_range(
147 finished.output_range.context("invalid range")?,
148 )?,
149 sections: finished
150 .sections
151 .into_iter()
152 .map(|section| {
153 Ok(SlashCommandOutputSection {
154 range: language::proto::deserialize_anchor_range(
155 section.range.context("invalid range")?,
156 )?,
157 icon: section.icon_name.parse()?,
158 label: section.label.into(),
159 })
160 })
161 .collect::<Result<Vec<_>>>()?,
162 version: language::proto::deserialize_version(&finished.version),
163 })
164 }
165 proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
166 language::proto::deserialize_operation(
167 op.operation.context("invalid buffer operation")?,
168 )?,
169 )),
170 }
171 }
172
173 pub fn to_proto(&self) -> proto::ContextOperation {
174 match self {
175 Self::InsertMessage {
176 anchor,
177 metadata,
178 version,
179 } => proto::ContextOperation {
180 variant: Some(proto::context_operation::Variant::InsertMessage(
181 proto::context_operation::InsertMessage {
182 message: Some(proto::ContextMessage {
183 id: Some(language::proto::serialize_timestamp(anchor.id.0)),
184 start: Some(language::proto::serialize_anchor(&anchor.start)),
185 role: metadata.role.to_proto() as i32,
186 status: Some(metadata.status.to_proto()),
187 }),
188 version: language::proto::serialize_version(version),
189 },
190 )),
191 },
192 Self::UpdateMessage {
193 message_id,
194 metadata,
195 version,
196 } => proto::ContextOperation {
197 variant: Some(proto::context_operation::Variant::UpdateMessage(
198 proto::context_operation::UpdateMessage {
199 message_id: Some(language::proto::serialize_timestamp(message_id.0)),
200 role: metadata.role.to_proto() as i32,
201 status: Some(metadata.status.to_proto()),
202 timestamp: Some(language::proto::serialize_timestamp(metadata.timestamp)),
203 version: language::proto::serialize_version(version),
204 },
205 )),
206 },
207 Self::UpdateSummary { summary, version } => proto::ContextOperation {
208 variant: Some(proto::context_operation::Variant::UpdateSummary(
209 proto::context_operation::UpdateSummary {
210 summary: summary.text.clone(),
211 done: summary.done,
212 timestamp: Some(language::proto::serialize_timestamp(summary.timestamp)),
213 version: language::proto::serialize_version(version),
214 },
215 )),
216 },
217 Self::SlashCommandFinished {
218 id,
219 output_range,
220 sections,
221 version,
222 } => proto::ContextOperation {
223 variant: Some(proto::context_operation::Variant::SlashCommandFinished(
224 proto::context_operation::SlashCommandFinished {
225 id: Some(language::proto::serialize_timestamp(id.0)),
226 output_range: Some(language::proto::serialize_anchor_range(
227 output_range.clone(),
228 )),
229 sections: sections
230 .iter()
231 .map(|section| {
232 let icon_name: &'static str = section.icon.into();
233 proto::SlashCommandOutputSection {
234 range: Some(language::proto::serialize_anchor_range(
235 section.range.clone(),
236 )),
237 icon_name: icon_name.to_string(),
238 label: section.label.to_string(),
239 }
240 })
241 .collect(),
242 version: language::proto::serialize_version(version),
243 },
244 )),
245 },
246 Self::BufferOperation(operation) => proto::ContextOperation {
247 variant: Some(proto::context_operation::Variant::BufferOperation(
248 proto::context_operation::BufferOperation {
249 operation: Some(language::proto::serialize_operation(operation)),
250 },
251 )),
252 },
253 }
254 }
255
256 fn timestamp(&self) -> clock::Lamport {
257 match self {
258 Self::InsertMessage { anchor, .. } => anchor.id.0,
259 Self::UpdateMessage { metadata, .. } => metadata.timestamp,
260 Self::UpdateSummary { summary, .. } => summary.timestamp,
261 Self::SlashCommandFinished { id, .. } => id.0,
262 Self::BufferOperation(_) => {
263 panic!("reading the timestamp of a buffer operation is not supported")
264 }
265 }
266 }
267
268 /// Returns the current version of the context operation.
269 pub fn version(&self) -> &clock::Global {
270 match self {
271 Self::InsertMessage { version, .. }
272 | Self::UpdateMessage { version, .. }
273 | Self::UpdateSummary { version, .. }
274 | Self::SlashCommandFinished { version, .. } => version,
275 Self::BufferOperation(_) => {
276 panic!("reading the version of a buffer operation is not supported")
277 }
278 }
279 }
280}
281
282#[derive(Debug, Clone)]
283pub enum ContextEvent {
284 AssistError(String),
285 MessagesEdited,
286 SummaryChanged,
287 WorkflowStepsRemoved(Vec<Range<language::Anchor>>),
288 WorkflowStepUpdated(Range<language::Anchor>),
289 StreamedCompletion,
290 PendingSlashCommandsUpdated {
291 removed: Vec<Range<language::Anchor>>,
292 updated: Vec<PendingSlashCommand>,
293 },
294 SlashCommandFinished {
295 output_range: Range<language::Anchor>,
296 sections: Vec<SlashCommandOutputSection<language::Anchor>>,
297 run_commands_in_output: bool,
298 },
299 Operation(ContextOperation),
300}
301
302#[derive(Clone, Default, Debug)]
303pub struct ContextSummary {
304 pub text: String,
305 done: bool,
306 timestamp: clock::Lamport,
307}
308
309#[derive(Clone, Debug, Eq, PartialEq)]
310pub struct MessageAnchor {
311 pub id: MessageId,
312 pub start: language::Anchor,
313}
314
315#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
316pub struct MessageMetadata {
317 pub role: Role,
318 status: MessageStatus,
319 timestamp: clock::Lamport,
320}
321
322#[derive(Clone, Debug, PartialEq, Eq)]
323pub struct Message {
324 pub offset_range: Range<usize>,
325 pub index_range: Range<usize>,
326 pub id: MessageId,
327 pub anchor: language::Anchor,
328 pub role: Role,
329 pub status: MessageStatus,
330}
331
332impl Message {
333 fn to_request_message(&self, buffer: &Buffer) -> LanguageModelRequestMessage {
334 LanguageModelRequestMessage {
335 role: self.role,
336 content: buffer.text_for_range(self.offset_range.clone()).collect(),
337 }
338 }
339}
340
341struct PendingCompletion {
342 id: usize,
343 _task: Task<()>,
344}
345
346#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
347pub struct SlashCommandId(clock::Lamport);
348
349#[derive(Debug)]
350pub struct WorkflowStep {
351 pub tagged_range: Range<language::Anchor>,
352 pub status: WorkflowStepStatus,
353}
354
355#[derive(Clone, Debug, Eq, PartialEq)]
356pub struct ResolvedWorkflowStep {
357 pub title: String,
358 pub suggestions: HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>,
359}
360
361pub enum WorkflowStepStatus {
362 Pending(Task<Option<()>>),
363 Resolved(ResolvedWorkflowStep),
364 Error(Arc<anyhow::Error>),
365}
366
367impl WorkflowStepStatus {
368 pub fn into_resolved(&self) -> Option<Result<ResolvedWorkflowStep, Arc<anyhow::Error>>> {
369 match self {
370 WorkflowStepStatus::Resolved(resolved) => Some(Ok(resolved.clone())),
371 WorkflowStepStatus::Error(error) => Some(Err(error.clone())),
372 WorkflowStepStatus::Pending(_) => None,
373 }
374 }
375}
376
377#[derive(Clone, Debug, Eq, PartialEq)]
378pub struct WorkflowSuggestionGroup {
379 pub context_range: Range<language::Anchor>,
380 pub suggestions: Vec<WorkflowSuggestion>,
381}
382
383#[derive(Clone, Debug, Eq, PartialEq)]
384pub enum WorkflowSuggestion {
385 Update {
386 range: Range<language::Anchor>,
387 description: String,
388 },
389 CreateFile {
390 description: String,
391 },
392 InsertSiblingBefore {
393 position: language::Anchor,
394 description: String,
395 },
396 InsertSiblingAfter {
397 position: language::Anchor,
398 description: String,
399 },
400 PrependChild {
401 position: language::Anchor,
402 description: String,
403 },
404 AppendChild {
405 position: language::Anchor,
406 description: String,
407 },
408 Delete {
409 range: Range<language::Anchor>,
410 },
411}
412
413impl WorkflowSuggestion {
414 pub fn range(&self) -> Range<language::Anchor> {
415 match self {
416 WorkflowSuggestion::Update { range, .. } => range.clone(),
417 WorkflowSuggestion::CreateFile { .. } => language::Anchor::MIN..language::Anchor::MAX,
418 WorkflowSuggestion::InsertSiblingBefore { position, .. }
419 | WorkflowSuggestion::InsertSiblingAfter { position, .. }
420 | WorkflowSuggestion::PrependChild { position, .. }
421 | WorkflowSuggestion::AppendChild { position, .. } => *position..*position,
422 WorkflowSuggestion::Delete { range } => range.clone(),
423 }
424 }
425
426 pub fn description(&self) -> Option<&str> {
427 match self {
428 WorkflowSuggestion::Update { description, .. }
429 | WorkflowSuggestion::CreateFile { description }
430 | WorkflowSuggestion::InsertSiblingBefore { description, .. }
431 | WorkflowSuggestion::InsertSiblingAfter { description, .. }
432 | WorkflowSuggestion::PrependChild { description, .. }
433 | WorkflowSuggestion::AppendChild { description, .. } => Some(description),
434 WorkflowSuggestion::Delete { .. } => None,
435 }
436 }
437
438 fn description_mut(&mut self) -> Option<&mut String> {
439 match self {
440 WorkflowSuggestion::Update { description, .. }
441 | WorkflowSuggestion::CreateFile { description }
442 | WorkflowSuggestion::InsertSiblingBefore { description, .. }
443 | WorkflowSuggestion::InsertSiblingAfter { description, .. }
444 | WorkflowSuggestion::PrependChild { description, .. }
445 | WorkflowSuggestion::AppendChild { description, .. } => Some(description),
446 WorkflowSuggestion::Delete { .. } => None,
447 }
448 }
449
450 fn try_merge(&mut self, other: &Self, buffer: &BufferSnapshot) -> bool {
451 let range = self.range();
452 let other_range = other.range();
453
454 // Don't merge if we don't contain the other suggestion.
455 if range.start.cmp(&other_range.start, buffer).is_gt()
456 || range.end.cmp(&other_range.end, buffer).is_lt()
457 {
458 return false;
459 }
460
461 if let Some(description) = self.description_mut() {
462 if let Some(other_description) = other.description() {
463 description.push('\n');
464 description.push_str(other_description);
465 }
466 }
467 true
468 }
469
470 pub fn show(
471 &self,
472 editor: &View<Editor>,
473 excerpt_id: editor::ExcerptId,
474 workspace: &WeakView<Workspace>,
475 assistant_panel: &View<AssistantPanel>,
476 cx: &mut WindowContext,
477 ) -> Option<InlineAssistId> {
478 let mut initial_transaction_id = None;
479 let initial_prompt;
480 let suggestion_range;
481 let buffer = editor.read(cx).buffer().clone();
482 let snapshot = buffer.read(cx).snapshot(cx);
483
484 match self {
485 WorkflowSuggestion::Update { range, description } => {
486 initial_prompt = description.clone();
487 suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)?
488 ..snapshot.anchor_in_excerpt(excerpt_id, range.end)?;
489 }
490 WorkflowSuggestion::CreateFile { description } => {
491 initial_prompt = description.clone();
492 suggestion_range = editor::Anchor::min()..editor::Anchor::min();
493 }
494 WorkflowSuggestion::InsertSiblingBefore {
495 position,
496 description,
497 } => {
498 let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
499 initial_prompt = description.clone();
500 suggestion_range = buffer.update(cx, |buffer, cx| {
501 buffer.start_transaction(cx);
502 let line_start = buffer.insert_empty_line(position, true, true, cx);
503 initial_transaction_id = buffer.end_transaction(cx);
504 buffer.refresh_preview(cx);
505
506 let line_start = buffer.read(cx).anchor_before(line_start);
507 line_start..line_start
508 });
509 }
510 WorkflowSuggestion::InsertSiblingAfter {
511 position,
512 description,
513 } => {
514 let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
515 initial_prompt = description.clone();
516 suggestion_range = buffer.update(cx, |buffer, cx| {
517 buffer.start_transaction(cx);
518 let line_start = buffer.insert_empty_line(position, true, true, cx);
519 initial_transaction_id = buffer.end_transaction(cx);
520 buffer.refresh_preview(cx);
521
522 let line_start = buffer.read(cx).anchor_before(line_start);
523 line_start..line_start
524 });
525 }
526 WorkflowSuggestion::PrependChild {
527 position,
528 description,
529 } => {
530 let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
531 initial_prompt = description.clone();
532 suggestion_range = buffer.update(cx, |buffer, cx| {
533 buffer.start_transaction(cx);
534 let line_start = buffer.insert_empty_line(position, false, true, cx);
535 initial_transaction_id = buffer.end_transaction(cx);
536 buffer.refresh_preview(cx);
537
538 let line_start = buffer.read(cx).anchor_before(line_start);
539 line_start..line_start
540 });
541 }
542 WorkflowSuggestion::AppendChild {
543 position,
544 description,
545 } => {
546 let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
547 initial_prompt = description.clone();
548 suggestion_range = buffer.update(cx, |buffer, cx| {
549 buffer.start_transaction(cx);
550 let line_start = buffer.insert_empty_line(position, true, false, cx);
551 initial_transaction_id = buffer.end_transaction(cx);
552 buffer.refresh_preview(cx);
553
554 let line_start = buffer.read(cx).anchor_before(line_start);
555 line_start..line_start
556 });
557 }
558 WorkflowSuggestion::Delete { range } => {
559 initial_prompt = "Delete".to_string();
560 suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)?
561 ..snapshot.anchor_in_excerpt(excerpt_id, range.end)?;
562 }
563 }
564
565 InlineAssistant::update_global(cx, |inline_assistant, cx| {
566 Some(inline_assistant.suggest_assist(
567 editor,
568 suggestion_range,
569 initial_prompt,
570 initial_transaction_id,
571 Some(workspace.clone()),
572 Some(assistant_panel),
573 cx,
574 ))
575 })
576 }
577}
578
579impl Debug for WorkflowStepStatus {
580 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
581 match self {
582 WorkflowStepStatus::Pending(_) => write!(f, "WorkflowStepStatus::Pending"),
583 WorkflowStepStatus::Resolved(ResolvedWorkflowStep { title, suggestions }) => f
584 .debug_struct("WorkflowStepStatus::Resolved")
585 .field("title", title)
586 .field("suggestions", suggestions)
587 .finish(),
588 WorkflowStepStatus::Error(error) => f
589 .debug_tuple("WorkflowStepStatus::Error")
590 .field(error)
591 .finish(),
592 }
593 }
594}
595
596pub struct Context {
597 id: ContextId,
598 timestamp: clock::Lamport,
599 version: clock::Global,
600 pending_ops: Vec<ContextOperation>,
601 operations: Vec<ContextOperation>,
602 buffer: Model<Buffer>,
603 pending_slash_commands: Vec<PendingSlashCommand>,
604 edits_since_last_slash_command_parse: language::Subscription,
605 finished_slash_commands: HashSet<SlashCommandId>,
606 slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
607 message_anchors: Vec<MessageAnchor>,
608 messages_metadata: HashMap<MessageId, MessageMetadata>,
609 summary: Option<ContextSummary>,
610 pending_summary: Task<Option<()>>,
611 completion_count: usize,
612 pending_completions: Vec<PendingCompletion>,
613 token_count: Option<usize>,
614 pending_token_count: Task<Option<()>>,
615 pending_save: Task<Result<()>>,
616 path: Option<PathBuf>,
617 _subscriptions: Vec<Subscription>,
618 telemetry: Option<Arc<Telemetry>>,
619 language_registry: Arc<LanguageRegistry>,
620 workflow_steps: Vec<WorkflowStep>,
621 project: Option<Model<Project>>,
622 prompt_builder: Arc<PromptBuilder>,
623}
624
625impl EventEmitter<ContextEvent> for Context {}
626
627impl Context {
628 pub fn local(
629 language_registry: Arc<LanguageRegistry>,
630 project: Option<Model<Project>>,
631 telemetry: Option<Arc<Telemetry>>,
632 prompt_builder: Arc<PromptBuilder>,
633 cx: &mut ModelContext<Self>,
634 ) -> Self {
635 Self::new(
636 ContextId::new(),
637 ReplicaId::default(),
638 language::Capability::ReadWrite,
639 language_registry,
640 prompt_builder,
641 project,
642 telemetry,
643 cx,
644 )
645 }
646
647 #[allow(clippy::too_many_arguments)]
648 pub fn new(
649 id: ContextId,
650 replica_id: ReplicaId,
651 capability: language::Capability,
652 language_registry: Arc<LanguageRegistry>,
653 prompt_builder: Arc<PromptBuilder>,
654 project: Option<Model<Project>>,
655 telemetry: Option<Arc<Telemetry>>,
656 cx: &mut ModelContext<Self>,
657 ) -> Self {
658 let buffer = cx.new_model(|_cx| {
659 let mut buffer = Buffer::remote(
660 language::BufferId::new(1).unwrap(),
661 replica_id,
662 capability,
663 "",
664 );
665 buffer.set_language_registry(language_registry.clone());
666 buffer
667 });
668 let edits_since_last_slash_command_parse =
669 buffer.update(cx, |buffer, _| buffer.subscribe());
670 let mut this = Self {
671 id,
672 timestamp: clock::Lamport::new(replica_id),
673 version: clock::Global::new(),
674 pending_ops: Vec::new(),
675 operations: Vec::new(),
676 message_anchors: Default::default(),
677 messages_metadata: Default::default(),
678 pending_slash_commands: Vec::new(),
679 finished_slash_commands: HashSet::default(),
680 slash_command_output_sections: Vec::new(),
681 edits_since_last_slash_command_parse,
682 summary: None,
683 pending_summary: Task::ready(None),
684 completion_count: Default::default(),
685 pending_completions: Default::default(),
686 token_count: None,
687 pending_token_count: Task::ready(None),
688 _subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
689 pending_save: Task::ready(Ok(())),
690 path: None,
691 buffer,
692 telemetry,
693 project,
694 language_registry,
695 workflow_steps: Vec::new(),
696 prompt_builder,
697 };
698
699 let first_message_id = MessageId(clock::Lamport {
700 replica_id: 0,
701 value: 0,
702 });
703 let message = MessageAnchor {
704 id: first_message_id,
705 start: language::Anchor::MIN,
706 };
707 this.messages_metadata.insert(
708 first_message_id,
709 MessageMetadata {
710 role: Role::User,
711 status: MessageStatus::Done,
712 timestamp: first_message_id.0,
713 },
714 );
715 this.message_anchors.push(message);
716
717 this.set_language(cx);
718 this.count_remaining_tokens(cx);
719 this
720 }
721
722 fn serialize(&self, cx: &AppContext) -> SavedContext {
723 let buffer = self.buffer.read(cx);
724 SavedContext {
725 id: Some(self.id.clone()),
726 zed: "context".into(),
727 version: SavedContext::VERSION.into(),
728 text: buffer.text(),
729 messages: self
730 .messages(cx)
731 .map(|message| SavedMessage {
732 id: message.id,
733 start: message.offset_range.start,
734 metadata: self.messages_metadata[&message.id].clone(),
735 })
736 .collect(),
737 summary: self
738 .summary
739 .as_ref()
740 .map(|summary| summary.text.clone())
741 .unwrap_or_default(),
742 slash_command_output_sections: self
743 .slash_command_output_sections
744 .iter()
745 .filter_map(|section| {
746 let range = section.range.to_offset(buffer);
747 if section.range.start.is_valid(buffer) && !range.is_empty() {
748 Some(assistant_slash_command::SlashCommandOutputSection {
749 range,
750 icon: section.icon,
751 label: section.label.clone(),
752 })
753 } else {
754 None
755 }
756 })
757 .collect(),
758 }
759 }
760
761 #[allow(clippy::too_many_arguments)]
762 pub fn deserialize(
763 saved_context: SavedContext,
764 path: PathBuf,
765 language_registry: Arc<LanguageRegistry>,
766 prompt_builder: Arc<PromptBuilder>,
767 project: Option<Model<Project>>,
768 telemetry: Option<Arc<Telemetry>>,
769 cx: &mut ModelContext<Self>,
770 ) -> Self {
771 let id = saved_context.id.clone().unwrap_or_else(|| ContextId::new());
772 let mut this = Self::new(
773 id,
774 ReplicaId::default(),
775 language::Capability::ReadWrite,
776 language_registry,
777 prompt_builder,
778 project,
779 telemetry,
780 cx,
781 );
782 this.path = Some(path);
783 this.buffer.update(cx, |buffer, cx| {
784 buffer.set_text(saved_context.text.as_str(), cx)
785 });
786 let operations = saved_context.into_ops(&this.buffer, cx);
787 this.apply_ops(operations, cx).unwrap();
788 this
789 }
790
791 pub fn id(&self) -> &ContextId {
792 &self.id
793 }
794
795 pub fn replica_id(&self) -> ReplicaId {
796 self.timestamp.replica_id
797 }
798
799 pub fn version(&self, cx: &AppContext) -> ContextVersion {
800 ContextVersion {
801 context: self.version.clone(),
802 buffer: self.buffer.read(cx).version(),
803 }
804 }
805
806 pub fn set_capability(
807 &mut self,
808 capability: language::Capability,
809 cx: &mut ModelContext<Self>,
810 ) {
811 self.buffer
812 .update(cx, |buffer, cx| buffer.set_capability(capability, cx));
813 }
814
815 fn next_timestamp(&mut self) -> clock::Lamport {
816 let timestamp = self.timestamp.tick();
817 self.version.observe(timestamp);
818 timestamp
819 }
820
821 pub fn serialize_ops(
822 &self,
823 since: &ContextVersion,
824 cx: &AppContext,
825 ) -> Task<Vec<proto::ContextOperation>> {
826 let buffer_ops = self
827 .buffer
828 .read(cx)
829 .serialize_ops(Some(since.buffer.clone()), cx);
830
831 let mut context_ops = self
832 .operations
833 .iter()
834 .filter(|op| !since.context.observed(op.timestamp()))
835 .cloned()
836 .collect::<Vec<_>>();
837 context_ops.extend(self.pending_ops.iter().cloned());
838
839 cx.background_executor().spawn(async move {
840 let buffer_ops = buffer_ops.await;
841 context_ops.sort_unstable_by_key(|op| op.timestamp());
842 buffer_ops
843 .into_iter()
844 .map(|op| proto::ContextOperation {
845 variant: Some(proto::context_operation::Variant::BufferOperation(
846 proto::context_operation::BufferOperation {
847 operation: Some(op),
848 },
849 )),
850 })
851 .chain(context_ops.into_iter().map(|op| op.to_proto()))
852 .collect()
853 })
854 }
855
856 pub fn apply_ops(
857 &mut self,
858 ops: impl IntoIterator<Item = ContextOperation>,
859 cx: &mut ModelContext<Self>,
860 ) -> Result<()> {
861 let mut buffer_ops = Vec::new();
862 for op in ops {
863 match op {
864 ContextOperation::BufferOperation(buffer_op) => buffer_ops.push(buffer_op),
865 op @ _ => self.pending_ops.push(op),
866 }
867 }
868 self.buffer
869 .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
870 self.flush_ops(cx);
871
872 Ok(())
873 }
874
875 fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
876 let mut messages_changed = false;
877 let mut summary_changed = false;
878
879 self.pending_ops.sort_unstable_by_key(|op| op.timestamp());
880 for op in mem::take(&mut self.pending_ops) {
881 if !self.can_apply_op(&op, cx) {
882 self.pending_ops.push(op);
883 continue;
884 }
885
886 let timestamp = op.timestamp();
887 match op.clone() {
888 ContextOperation::InsertMessage {
889 anchor, metadata, ..
890 } => {
891 if self.messages_metadata.contains_key(&anchor.id) {
892 // We already applied this operation.
893 } else {
894 self.insert_message(anchor, metadata, cx);
895 messages_changed = true;
896 }
897 }
898 ContextOperation::UpdateMessage {
899 message_id,
900 metadata: new_metadata,
901 ..
902 } => {
903 let metadata = self.messages_metadata.get_mut(&message_id).unwrap();
904 if new_metadata.timestamp > metadata.timestamp {
905 *metadata = new_metadata;
906 messages_changed = true;
907 }
908 }
909 ContextOperation::UpdateSummary {
910 summary: new_summary,
911 ..
912 } => {
913 if self
914 .summary
915 .as_ref()
916 .map_or(true, |summary| new_summary.timestamp > summary.timestamp)
917 {
918 self.summary = Some(new_summary);
919 summary_changed = true;
920 }
921 }
922 ContextOperation::SlashCommandFinished {
923 id,
924 output_range,
925 sections,
926 ..
927 } => {
928 if self.finished_slash_commands.insert(id) {
929 let buffer = self.buffer.read(cx);
930 self.slash_command_output_sections
931 .extend(sections.iter().cloned());
932 self.slash_command_output_sections
933 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
934 cx.emit(ContextEvent::SlashCommandFinished {
935 output_range,
936 sections,
937 run_commands_in_output: false,
938 });
939 }
940 }
941 ContextOperation::BufferOperation(_) => unreachable!(),
942 }
943
944 self.version.observe(timestamp);
945 self.timestamp.observe(timestamp);
946 self.operations.push(op);
947 }
948
949 if messages_changed {
950 cx.emit(ContextEvent::MessagesEdited);
951 cx.notify();
952 }
953
954 if summary_changed {
955 cx.emit(ContextEvent::SummaryChanged);
956 cx.notify();
957 }
958 }
959
960 fn can_apply_op(&self, op: &ContextOperation, cx: &AppContext) -> bool {
961 if !self.version.observed_all(op.version()) {
962 return false;
963 }
964
965 match op {
966 ContextOperation::InsertMessage { anchor, .. } => self
967 .buffer
968 .read(cx)
969 .version
970 .observed(anchor.start.timestamp),
971 ContextOperation::UpdateMessage { message_id, .. } => {
972 self.messages_metadata.contains_key(message_id)
973 }
974 ContextOperation::UpdateSummary { .. } => true,
975 ContextOperation::SlashCommandFinished {
976 output_range,
977 sections,
978 ..
979 } => {
980 let version = &self.buffer.read(cx).version;
981 sections
982 .iter()
983 .map(|section| §ion.range)
984 .chain([output_range])
985 .all(|range| {
986 let observed_start = range.start == language::Anchor::MIN
987 || range.start == language::Anchor::MAX
988 || version.observed(range.start.timestamp);
989 let observed_end = range.end == language::Anchor::MIN
990 || range.end == language::Anchor::MAX
991 || version.observed(range.end.timestamp);
992 observed_start && observed_end
993 })
994 }
995 ContextOperation::BufferOperation(_) => {
996 panic!("buffer operations should always be applied")
997 }
998 }
999 }
1000
1001 fn push_op(&mut self, op: ContextOperation, cx: &mut ModelContext<Self>) {
1002 self.operations.push(op.clone());
1003 cx.emit(ContextEvent::Operation(op));
1004 }
1005
1006 pub fn buffer(&self) -> &Model<Buffer> {
1007 &self.buffer
1008 }
1009
1010 pub fn path(&self) -> Option<&Path> {
1011 self.path.as_deref()
1012 }
1013
1014 pub fn summary(&self) -> Option<&ContextSummary> {
1015 self.summary.as_ref()
1016 }
1017
1018 pub fn workflow_steps(&self) -> &[WorkflowStep] {
1019 &self.workflow_steps
1020 }
1021
1022 pub fn workflow_step_for_range(&self, range: Range<language::Anchor>) -> Option<&WorkflowStep> {
1023 self.workflow_steps
1024 .iter()
1025 .find(|step| step.tagged_range == range)
1026 }
1027
1028 pub fn pending_slash_commands(&self) -> &[PendingSlashCommand] {
1029 &self.pending_slash_commands
1030 }
1031
1032 pub fn slash_command_output_sections(&self) -> &[SlashCommandOutputSection<language::Anchor>] {
1033 &self.slash_command_output_sections
1034 }
1035
1036 fn set_language(&mut self, cx: &mut ModelContext<Self>) {
1037 let markdown = self.language_registry.language_for_name("Markdown");
1038 cx.spawn(|this, mut cx| async move {
1039 let markdown = markdown.await?;
1040 this.update(&mut cx, |this, cx| {
1041 this.buffer
1042 .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
1043 })
1044 })
1045 .detach_and_log_err(cx);
1046 }
1047
1048 fn handle_buffer_event(
1049 &mut self,
1050 _: Model<Buffer>,
1051 event: &language::Event,
1052 cx: &mut ModelContext<Self>,
1053 ) {
1054 match event {
1055 language::Event::Operation(operation) => cx.emit(ContextEvent::Operation(
1056 ContextOperation::BufferOperation(operation.clone()),
1057 )),
1058 language::Event::Edited => {
1059 self.count_remaining_tokens(cx);
1060 self.reparse_slash_commands(cx);
1061 self.prune_invalid_workflow_steps(cx);
1062 cx.emit(ContextEvent::MessagesEdited);
1063 }
1064 _ => {}
1065 }
1066 }
1067
1068 pub(crate) fn token_count(&self) -> Option<usize> {
1069 self.token_count
1070 }
1071
1072 pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
1073 let request = self.to_completion_request(cx);
1074 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1075 return;
1076 };
1077 self.pending_token_count = cx.spawn(|this, mut cx| {
1078 async move {
1079 cx.background_executor()
1080 .timer(Duration::from_millis(200))
1081 .await;
1082
1083 let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
1084 this.update(&mut cx, |this, cx| {
1085 this.token_count = Some(token_count);
1086 cx.notify()
1087 })
1088 }
1089 .log_err()
1090 });
1091 }
1092
1093 pub fn reparse_slash_commands(&mut self, cx: &mut ModelContext<Self>) {
1094 let buffer = self.buffer.read(cx);
1095 let mut row_ranges = self
1096 .edits_since_last_slash_command_parse
1097 .consume()
1098 .into_iter()
1099 .map(|edit| {
1100 let start_row = buffer.offset_to_point(edit.new.start).row;
1101 let end_row = buffer.offset_to_point(edit.new.end).row + 1;
1102 start_row..end_row
1103 })
1104 .peekable();
1105
1106 let mut removed = Vec::new();
1107 let mut updated = Vec::new();
1108 while let Some(mut row_range) = row_ranges.next() {
1109 while let Some(next_row_range) = row_ranges.peek() {
1110 if row_range.end >= next_row_range.start {
1111 row_range.end = next_row_range.end;
1112 row_ranges.next();
1113 } else {
1114 break;
1115 }
1116 }
1117
1118 let start = buffer.anchor_before(Point::new(row_range.start, 0));
1119 let end = buffer.anchor_after(Point::new(
1120 row_range.end - 1,
1121 buffer.line_len(row_range.end - 1),
1122 ));
1123
1124 let old_range = self.pending_command_indices_for_range(start..end, cx);
1125
1126 let mut new_commands = Vec::new();
1127 let mut lines = buffer.text_for_range(start..end).lines();
1128 let mut offset = lines.offset();
1129 while let Some(line) = lines.next() {
1130 if let Some(command_line) = SlashCommandLine::parse(line) {
1131 let name = &line[command_line.name.clone()];
1132 let argument = command_line.argument.as_ref().and_then(|argument| {
1133 (!argument.is_empty()).then_some(&line[argument.clone()])
1134 });
1135 if let Some(command) = SlashCommandRegistry::global(cx).command(name) {
1136 if !command.requires_argument() || argument.is_some() {
1137 let start_ix = offset + command_line.name.start - 1;
1138 let end_ix = offset
1139 + command_line
1140 .argument
1141 .map_or(command_line.name.end, |argument| argument.end);
1142 let source_range =
1143 buffer.anchor_after(start_ix)..buffer.anchor_after(end_ix);
1144 let pending_command = PendingSlashCommand {
1145 name: name.to_string(),
1146 argument: argument.map(ToString::to_string),
1147 source_range,
1148 status: PendingSlashCommandStatus::Idle,
1149 };
1150 updated.push(pending_command.clone());
1151 new_commands.push(pending_command);
1152 }
1153 }
1154 }
1155
1156 offset = lines.offset();
1157 }
1158
1159 let removed_commands = self.pending_slash_commands.splice(old_range, new_commands);
1160 removed.extend(removed_commands.map(|command| command.source_range));
1161 }
1162
1163 if !updated.is_empty() || !removed.is_empty() {
1164 cx.emit(ContextEvent::PendingSlashCommandsUpdated { removed, updated });
1165 }
1166 }
1167
1168 fn prune_invalid_workflow_steps(&mut self, cx: &mut ModelContext<Self>) {
1169 let buffer = self.buffer.read(cx);
1170 let prev_len = self.workflow_steps.len();
1171 let mut removed = Vec::new();
1172 self.workflow_steps.retain(|step| {
1173 if step.tagged_range.start.is_valid(buffer) && step.tagged_range.end.is_valid(buffer) {
1174 true
1175 } else {
1176 removed.push(step.tagged_range.clone());
1177 false
1178 }
1179 });
1180 if self.workflow_steps.len() != prev_len {
1181 cx.emit(ContextEvent::WorkflowStepsRemoved(removed));
1182 cx.notify();
1183 }
1184 }
1185
1186 fn parse_workflow_steps_in_range(
1187 &mut self,
1188 range: Range<usize>,
1189 project: Model<Project>,
1190 cx: &mut ModelContext<Self>,
1191 ) {
1192 let mut new_edit_steps = Vec::new();
1193 let mut edits = Vec::new();
1194
1195 let buffer = self.buffer.read(cx).snapshot();
1196 let mut message_lines = buffer.as_rope().chunks_in_range(range).lines();
1197 let mut in_step = false;
1198 let mut step_open_tag_start_ix = 0;
1199 let mut line_start_offset = message_lines.offset();
1200
1201 while let Some(line) = message_lines.next() {
1202 if let Some(step_start_index) = line.find("<step>") {
1203 if !in_step {
1204 in_step = true;
1205 step_open_tag_start_ix = line_start_offset + step_start_index;
1206 }
1207 }
1208
1209 if let Some(step_end_index) = line.find("</step>") {
1210 if in_step {
1211 let step_open_tag_end_ix = step_open_tag_start_ix + "<step>".len();
1212 let mut step_end_tag_start_ix = line_start_offset + step_end_index;
1213 let step_end_tag_end_ix = step_end_tag_start_ix + "</step>".len();
1214 if buffer.reversed_chars_at(step_end_tag_start_ix).next() == Some('\n') {
1215 step_end_tag_start_ix -= 1;
1216 }
1217 edits.push((step_open_tag_start_ix..step_open_tag_end_ix, ""));
1218 edits.push((step_end_tag_start_ix..step_end_tag_end_ix, ""));
1219 let tagged_range = buffer.anchor_after(step_open_tag_end_ix)
1220 ..buffer.anchor_before(step_end_tag_start_ix);
1221
1222 // Check if a step with the same range already exists
1223 let existing_step_index = self
1224 .workflow_steps
1225 .binary_search_by(|probe| probe.tagged_range.cmp(&tagged_range, &buffer));
1226
1227 if let Err(ix) = existing_step_index {
1228 new_edit_steps.push((
1229 ix,
1230 WorkflowStep {
1231 tagged_range,
1232 status: WorkflowStepStatus::Pending(Task::ready(None)),
1233 },
1234 ));
1235 }
1236
1237 in_step = false;
1238 }
1239 }
1240
1241 line_start_offset = message_lines.offset();
1242 }
1243
1244 let mut updated = Vec::new();
1245 for (index, step) in new_edit_steps.into_iter().rev() {
1246 let step_range = step.tagged_range.clone();
1247 updated.push(step_range.clone());
1248 self.workflow_steps.insert(index, step);
1249 self.resolve_workflow_step(step_range, project.clone(), cx);
1250 }
1251 self.buffer
1252 .update(cx, |buffer, cx| buffer.edit(edits, None, cx));
1253 }
1254
1255 pub fn resolve_workflow_step(
1256 &mut self,
1257 tagged_range: Range<language::Anchor>,
1258 project: Model<Project>,
1259 cx: &mut ModelContext<Self>,
1260 ) {
1261 let Ok(step_index) = self
1262 .workflow_steps
1263 .binary_search_by(|step| step.tagged_range.cmp(&tagged_range, self.buffer.read(cx)))
1264 else {
1265 return;
1266 };
1267
1268 let mut request = self.to_completion_request(cx);
1269 let Some(edit_step) = self.workflow_steps.get_mut(step_index) else {
1270 return;
1271 };
1272
1273 if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
1274 let step_text = self
1275 .buffer
1276 .read(cx)
1277 .text_for_range(tagged_range.clone())
1278 .collect::<String>();
1279
1280 let tagged_range = tagged_range.clone();
1281 edit_step.status = WorkflowStepStatus::Pending(cx.spawn(|this, mut cx| {
1282 async move {
1283 let result = async {
1284 let mut prompt = this.update(&mut cx, |this, _| {
1285 this.prompt_builder.generate_step_resolution_prompt()
1286 })??;
1287 prompt.push_str(&step_text);
1288
1289 request.messages.push(LanguageModelRequestMessage {
1290 role: Role::User,
1291 content: prompt,
1292 });
1293
1294 // Invoke the model to get its edit suggestions for this workflow step.
1295 let resolution = model
1296 .use_tool::<tool::WorkflowStepResolution>(request, &cx)
1297 .await?;
1298
1299 // Translate the parsed suggestions to our internal types, which anchor the suggestions to locations in the code.
1300 let suggestion_tasks: Vec<_> = resolution
1301 .suggestions
1302 .iter()
1303 .map(|suggestion| suggestion.resolve(project.clone(), cx.clone()))
1304 .collect();
1305
1306 // Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
1307 let suggestions = future::join_all(suggestion_tasks)
1308 .await
1309 .into_iter()
1310 .filter_map(|task| task.log_err())
1311 .collect::<Vec<_>>();
1312
1313 let mut suggestions_by_buffer = HashMap::default();
1314 for (buffer, suggestion) in suggestions {
1315 suggestions_by_buffer
1316 .entry(buffer)
1317 .or_insert_with(Vec::new)
1318 .push(suggestion);
1319 }
1320
1321 let mut suggestion_groups_by_buffer = HashMap::default();
1322 for (buffer, mut suggestions) in suggestions_by_buffer {
1323 let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
1324 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
1325 // Sort suggestions by their range so that earlier, larger ranges come first
1326 suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
1327
1328 // Merge overlapping suggestions
1329 suggestions.dedup_by(|a, b| b.try_merge(&a, &snapshot));
1330
1331 // Create context ranges for each suggestion
1332 for suggestion in suggestions {
1333 let context_range = {
1334 let suggestion_point_range =
1335 suggestion.range().to_point(&snapshot);
1336 let start_row =
1337 suggestion_point_range.start.row.saturating_sub(5);
1338 let end_row = cmp::min(
1339 suggestion_point_range.end.row + 5,
1340 snapshot.max_point().row,
1341 );
1342 let start = snapshot.anchor_before(Point::new(start_row, 0));
1343 let end = snapshot.anchor_after(Point::new(
1344 end_row,
1345 snapshot.line_len(end_row),
1346 ));
1347 start..end
1348 };
1349
1350 if let Some(last_group) = suggestion_groups.last_mut() {
1351 if last_group
1352 .context_range
1353 .end
1354 .cmp(&context_range.start, &snapshot)
1355 .is_ge()
1356 {
1357 // Merge with the previous group if context ranges overlap
1358 last_group.context_range.end = context_range.end;
1359 last_group.suggestions.push(suggestion);
1360 } else {
1361 // Create a new group
1362 suggestion_groups.push(WorkflowSuggestionGroup {
1363 context_range,
1364 suggestions: vec![suggestion],
1365 });
1366 }
1367 } else {
1368 // Create the first group
1369 suggestion_groups.push(WorkflowSuggestionGroup {
1370 context_range,
1371 suggestions: vec![suggestion],
1372 });
1373 }
1374 }
1375
1376 suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
1377 }
1378
1379 Ok((resolution.step_title, suggestion_groups_by_buffer))
1380 };
1381
1382 let result = result.await;
1383 this.update(&mut cx, |this, cx| {
1384 let step_index = this
1385 .workflow_steps
1386 .binary_search_by(|step| {
1387 step.tagged_range.cmp(&tagged_range, this.buffer.read(cx))
1388 })
1389 .map_err(|_| anyhow!("edit step not found"))?;
1390 if let Some(edit_step) = this.workflow_steps.get_mut(step_index) {
1391 edit_step.status = match result {
1392 Ok((title, suggestions)) => {
1393 WorkflowStepStatus::Resolved(ResolvedWorkflowStep {
1394 title,
1395 suggestions,
1396 })
1397 }
1398 Err(error) => WorkflowStepStatus::Error(Arc::new(error)),
1399 };
1400 cx.emit(ContextEvent::WorkflowStepUpdated(tagged_range));
1401 cx.notify();
1402 }
1403 anyhow::Ok(())
1404 })?
1405 }
1406 .log_err()
1407 }));
1408 } else {
1409 edit_step.status = WorkflowStepStatus::Error(Arc::new(anyhow!("no active model")));
1410 }
1411
1412 cx.emit(ContextEvent::WorkflowStepUpdated(tagged_range));
1413 cx.notify();
1414 }
1415
1416 pub fn pending_command_for_position(
1417 &mut self,
1418 position: language::Anchor,
1419 cx: &mut ModelContext<Self>,
1420 ) -> Option<&mut PendingSlashCommand> {
1421 let buffer = self.buffer.read(cx);
1422 match self
1423 .pending_slash_commands
1424 .binary_search_by(|probe| probe.source_range.end.cmp(&position, buffer))
1425 {
1426 Ok(ix) => Some(&mut self.pending_slash_commands[ix]),
1427 Err(ix) => {
1428 let cmd = self.pending_slash_commands.get_mut(ix)?;
1429 if position.cmp(&cmd.source_range.start, buffer).is_ge()
1430 && position.cmp(&cmd.source_range.end, buffer).is_le()
1431 {
1432 Some(cmd)
1433 } else {
1434 None
1435 }
1436 }
1437 }
1438 }
1439
1440 pub fn pending_commands_for_range(
1441 &self,
1442 range: Range<language::Anchor>,
1443 cx: &AppContext,
1444 ) -> &[PendingSlashCommand] {
1445 let range = self.pending_command_indices_for_range(range, cx);
1446 &self.pending_slash_commands[range]
1447 }
1448
1449 fn pending_command_indices_for_range(
1450 &self,
1451 range: Range<language::Anchor>,
1452 cx: &AppContext,
1453 ) -> Range<usize> {
1454 let buffer = self.buffer.read(cx);
1455 let start_ix = match self
1456 .pending_slash_commands
1457 .binary_search_by(|probe| probe.source_range.end.cmp(&range.start, &buffer))
1458 {
1459 Ok(ix) | Err(ix) => ix,
1460 };
1461 let end_ix = match self
1462 .pending_slash_commands
1463 .binary_search_by(|probe| probe.source_range.start.cmp(&range.end, &buffer))
1464 {
1465 Ok(ix) => ix + 1,
1466 Err(ix) => ix,
1467 };
1468 start_ix..end_ix
1469 }
1470
1471 pub fn insert_command_output(
1472 &mut self,
1473 command_range: Range<language::Anchor>,
1474 output: Task<Result<SlashCommandOutput>>,
1475 insert_trailing_newline: bool,
1476 cx: &mut ModelContext<Self>,
1477 ) {
1478 self.reparse_slash_commands(cx);
1479
1480 let insert_output_task = cx.spawn(|this, mut cx| {
1481 let command_range = command_range.clone();
1482 async move {
1483 let output = output.await;
1484 this.update(&mut cx, |this, cx| match output {
1485 Ok(mut output) => {
1486 if insert_trailing_newline {
1487 output.text.push('\n');
1488 }
1489
1490 let version = this.version.clone();
1491 let command_id = SlashCommandId(this.next_timestamp());
1492 let (operation, event) = this.buffer.update(cx, |buffer, cx| {
1493 let start = command_range.start.to_offset(buffer);
1494 let old_end = command_range.end.to_offset(buffer);
1495 let new_end = start + output.text.len();
1496 buffer.edit([(start..old_end, output.text)], None, cx);
1497
1498 let mut sections = output
1499 .sections
1500 .into_iter()
1501 .map(|section| SlashCommandOutputSection {
1502 range: buffer.anchor_after(start + section.range.start)
1503 ..buffer.anchor_before(start + section.range.end),
1504 icon: section.icon,
1505 label: section.label,
1506 })
1507 .collect::<Vec<_>>();
1508 sections.sort_by(|a, b| a.range.cmp(&b.range, buffer));
1509
1510 this.slash_command_output_sections
1511 .extend(sections.iter().cloned());
1512 this.slash_command_output_sections
1513 .sort_by(|a, b| a.range.cmp(&b.range, buffer));
1514
1515 let output_range =
1516 buffer.anchor_after(start)..buffer.anchor_before(new_end);
1517 this.finished_slash_commands.insert(command_id);
1518
1519 (
1520 ContextOperation::SlashCommandFinished {
1521 id: command_id,
1522 output_range: output_range.clone(),
1523 sections: sections.clone(),
1524 version,
1525 },
1526 ContextEvent::SlashCommandFinished {
1527 output_range,
1528 sections,
1529 run_commands_in_output: output.run_commands_in_text,
1530 },
1531 )
1532 });
1533
1534 this.push_op(operation, cx);
1535 cx.emit(event);
1536 }
1537 Err(error) => {
1538 if let Some(pending_command) =
1539 this.pending_command_for_position(command_range.start, cx)
1540 {
1541 pending_command.status =
1542 PendingSlashCommandStatus::Error(error.to_string());
1543 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1544 removed: vec![pending_command.source_range.clone()],
1545 updated: vec![pending_command.clone()],
1546 });
1547 }
1548 }
1549 })
1550 .ok();
1551 }
1552 });
1553
1554 if let Some(pending_command) = self.pending_command_for_position(command_range.start, cx) {
1555 pending_command.status = PendingSlashCommandStatus::Running {
1556 _task: insert_output_task.shared(),
1557 };
1558 cx.emit(ContextEvent::PendingSlashCommandsUpdated {
1559 removed: vec![pending_command.source_range.clone()],
1560 updated: vec![pending_command.clone()],
1561 });
1562 }
1563 }
1564
1565 pub fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
1566 self.count_remaining_tokens(cx);
1567 }
1568
1569 pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
1570 let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
1571 let model = LanguageModelRegistry::read_global(cx).active_model()?;
1572 let last_message_id = self.message_anchors.iter().rev().find_map(|message| {
1573 message
1574 .start
1575 .is_valid(self.buffer.read(cx))
1576 .then_some(message.id)
1577 })?;
1578
1579 if !provider.is_authenticated(cx) {
1580 log::info!("completion provider has no credentials");
1581 return None;
1582 }
1583
1584 let request = self.to_completion_request(cx);
1585 let assistant_message = self
1586 .insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
1587 .unwrap();
1588
1589 // Queue up the user's next reply.
1590 let user_message = self
1591 .insert_message_after(assistant_message.id, Role::User, MessageStatus::Done, cx)
1592 .unwrap();
1593
1594 let task = cx.spawn({
1595 |this, mut cx| async move {
1596 let stream = model.stream_completion(request, &cx);
1597 let assistant_message_id = assistant_message.id;
1598 let mut response_latency = None;
1599 let stream_completion = async {
1600 let request_start = Instant::now();
1601 let mut chunks = stream.await?;
1602
1603 while let Some(chunk) = chunks.next().await {
1604 if response_latency.is_none() {
1605 response_latency = Some(request_start.elapsed());
1606 }
1607 let chunk = chunk?;
1608
1609 this.update(&mut cx, |this, cx| {
1610 let message_ix = this
1611 .message_anchors
1612 .iter()
1613 .position(|message| message.id == assistant_message_id)?;
1614 let message_range = this.buffer.update(cx, |buffer, cx| {
1615 let message_start_offset =
1616 this.message_anchors[message_ix].start.to_offset(buffer);
1617 let message_old_end_offset = this.message_anchors[message_ix + 1..]
1618 .iter()
1619 .find(|message| message.start.is_valid(buffer))
1620 .map_or(buffer.len(), |message| {
1621 message.start.to_offset(buffer).saturating_sub(1)
1622 });
1623 let message_new_end_offset = message_old_end_offset + chunk.len();
1624 buffer.edit(
1625 [(message_old_end_offset..message_old_end_offset, chunk)],
1626 None,
1627 cx,
1628 );
1629 message_start_offset..message_new_end_offset
1630 });
1631 if let Some(project) = this.project.clone() {
1632 this.parse_workflow_steps_in_range(message_range, project, cx);
1633 }
1634 cx.emit(ContextEvent::StreamedCompletion);
1635
1636 Some(())
1637 })?;
1638 smol::future::yield_now().await;
1639 }
1640
1641 this.update(&mut cx, |this, cx| {
1642 this.pending_completions
1643 .retain(|completion| completion.id != this.completion_count);
1644 this.summarize(false, cx);
1645 })?;
1646
1647 anyhow::Ok(())
1648 };
1649
1650 let result = stream_completion.await;
1651
1652 this.update(&mut cx, |this, cx| {
1653 let error_message = result
1654 .err()
1655 .map(|error| error.to_string().trim().to_string());
1656
1657 if let Some(error_message) = error_message.as_ref() {
1658 cx.emit(ContextEvent::AssistError(error_message.to_string()));
1659 }
1660
1661 this.update_metadata(assistant_message_id, cx, |metadata| {
1662 if let Some(error_message) = error_message.as_ref() {
1663 metadata.status =
1664 MessageStatus::Error(SharedString::from(error_message.clone()));
1665 } else {
1666 metadata.status = MessageStatus::Done;
1667 }
1668 });
1669
1670 if let Some(telemetry) = this.telemetry.as_ref() {
1671 telemetry.report_assistant_event(
1672 Some(this.id.0.clone()),
1673 AssistantKind::Panel,
1674 model.telemetry_id(),
1675 response_latency,
1676 error_message,
1677 );
1678 }
1679 })
1680 .ok();
1681 }
1682 });
1683
1684 self.pending_completions.push(PendingCompletion {
1685 id: post_inc(&mut self.completion_count),
1686 _task: task,
1687 });
1688
1689 Some(user_message)
1690 }
1691
1692 pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
1693 let messages = self
1694 .messages(cx)
1695 .filter(|message| matches!(message.status, MessageStatus::Done))
1696 .map(|message| message.to_request_message(self.buffer.read(cx)));
1697
1698 LanguageModelRequest {
1699 messages: messages.collect(),
1700 stop: vec![],
1701 temperature: 1.0,
1702 }
1703 }
1704
1705 pub fn cancel_last_assist(&mut self) -> bool {
1706 self.pending_completions.pop().is_some()
1707 }
1708
1709 pub fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
1710 for id in ids {
1711 if let Some(metadata) = self.messages_metadata.get(&id) {
1712 let role = metadata.role.cycle();
1713 self.update_metadata(id, cx, |metadata| metadata.role = role);
1714 }
1715 }
1716 }
1717
1718 pub fn update_metadata(
1719 &mut self,
1720 id: MessageId,
1721 cx: &mut ModelContext<Self>,
1722 f: impl FnOnce(&mut MessageMetadata),
1723 ) {
1724 let version = self.version.clone();
1725 let timestamp = self.next_timestamp();
1726 if let Some(metadata) = self.messages_metadata.get_mut(&id) {
1727 f(metadata);
1728 metadata.timestamp = timestamp;
1729 let operation = ContextOperation::UpdateMessage {
1730 message_id: id,
1731 metadata: metadata.clone(),
1732 version,
1733 };
1734 self.push_op(operation, cx);
1735 cx.emit(ContextEvent::MessagesEdited);
1736 cx.notify();
1737 }
1738 }
1739
1740 fn insert_message_after(
1741 &mut self,
1742 message_id: MessageId,
1743 role: Role,
1744 status: MessageStatus,
1745 cx: &mut ModelContext<Self>,
1746 ) -> Option<MessageAnchor> {
1747 if let Some(prev_message_ix) = self
1748 .message_anchors
1749 .iter()
1750 .position(|message| message.id == message_id)
1751 {
1752 // Find the next valid message after the one we were given.
1753 let mut next_message_ix = prev_message_ix + 1;
1754 while let Some(next_message) = self.message_anchors.get(next_message_ix) {
1755 if next_message.start.is_valid(self.buffer.read(cx)) {
1756 break;
1757 }
1758 next_message_ix += 1;
1759 }
1760
1761 let start = self.buffer.update(cx, |buffer, cx| {
1762 let offset = self
1763 .message_anchors
1764 .get(next_message_ix)
1765 .map_or(buffer.len(), |message| {
1766 buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left)
1767 });
1768 buffer.edit([(offset..offset, "\n")], None, cx);
1769 buffer.anchor_before(offset + 1)
1770 });
1771
1772 let version = self.version.clone();
1773 let anchor = MessageAnchor {
1774 id: MessageId(self.next_timestamp()),
1775 start,
1776 };
1777 let metadata = MessageMetadata {
1778 role,
1779 status,
1780 timestamp: anchor.id.0,
1781 };
1782 self.insert_message(anchor.clone(), metadata.clone(), cx);
1783 self.push_op(
1784 ContextOperation::InsertMessage {
1785 anchor: anchor.clone(),
1786 metadata,
1787 version,
1788 },
1789 cx,
1790 );
1791 Some(anchor)
1792 } else {
1793 None
1794 }
1795 }
1796
1797 pub fn split_message(
1798 &mut self,
1799 range: Range<usize>,
1800 cx: &mut ModelContext<Self>,
1801 ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
1802 let start_message = self.message_for_offset(range.start, cx);
1803 let end_message = self.message_for_offset(range.end, cx);
1804 if let Some((start_message, end_message)) = start_message.zip(end_message) {
1805 // Prevent splitting when range spans multiple messages.
1806 if start_message.id != end_message.id {
1807 return (None, None);
1808 }
1809
1810 let message = start_message;
1811 let role = message.role;
1812 let mut edited_buffer = false;
1813
1814 let mut suffix_start = None;
1815 if range.start > message.offset_range.start && range.end < message.offset_range.end - 1
1816 {
1817 if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
1818 suffix_start = Some(range.end + 1);
1819 } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
1820 suffix_start = Some(range.end);
1821 }
1822 }
1823
1824 let version = self.version.clone();
1825 let suffix = if let Some(suffix_start) = suffix_start {
1826 MessageAnchor {
1827 id: MessageId(self.next_timestamp()),
1828 start: self.buffer.read(cx).anchor_before(suffix_start),
1829 }
1830 } else {
1831 self.buffer.update(cx, |buffer, cx| {
1832 buffer.edit([(range.end..range.end, "\n")], None, cx);
1833 });
1834 edited_buffer = true;
1835 MessageAnchor {
1836 id: MessageId(self.next_timestamp()),
1837 start: self.buffer.read(cx).anchor_before(range.end + 1),
1838 }
1839 };
1840
1841 let suffix_metadata = MessageMetadata {
1842 role,
1843 status: MessageStatus::Done,
1844 timestamp: suffix.id.0,
1845 };
1846 self.insert_message(suffix.clone(), suffix_metadata.clone(), cx);
1847 self.push_op(
1848 ContextOperation::InsertMessage {
1849 anchor: suffix.clone(),
1850 metadata: suffix_metadata,
1851 version,
1852 },
1853 cx,
1854 );
1855
1856 let new_messages =
1857 if range.start == range.end || range.start == message.offset_range.start {
1858 (None, Some(suffix))
1859 } else {
1860 let mut prefix_end = None;
1861 if range.start > message.offset_range.start
1862 && range.end < message.offset_range.end - 1
1863 {
1864 if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
1865 prefix_end = Some(range.start + 1);
1866 } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
1867 == Some('\n')
1868 {
1869 prefix_end = Some(range.start);
1870 }
1871 }
1872
1873 let version = self.version.clone();
1874 let selection = if let Some(prefix_end) = prefix_end {
1875 MessageAnchor {
1876 id: MessageId(self.next_timestamp()),
1877 start: self.buffer.read(cx).anchor_before(prefix_end),
1878 }
1879 } else {
1880 self.buffer.update(cx, |buffer, cx| {
1881 buffer.edit([(range.start..range.start, "\n")], None, cx)
1882 });
1883 edited_buffer = true;
1884 MessageAnchor {
1885 id: MessageId(self.next_timestamp()),
1886 start: self.buffer.read(cx).anchor_before(range.end + 1),
1887 }
1888 };
1889
1890 let selection_metadata = MessageMetadata {
1891 role,
1892 status: MessageStatus::Done,
1893 timestamp: selection.id.0,
1894 };
1895 self.insert_message(selection.clone(), selection_metadata.clone(), cx);
1896 self.push_op(
1897 ContextOperation::InsertMessage {
1898 anchor: selection.clone(),
1899 metadata: selection_metadata,
1900 version,
1901 },
1902 cx,
1903 );
1904
1905 (Some(selection), Some(suffix))
1906 };
1907
1908 if !edited_buffer {
1909 cx.emit(ContextEvent::MessagesEdited);
1910 }
1911 new_messages
1912 } else {
1913 (None, None)
1914 }
1915 }
1916
1917 fn insert_message(
1918 &mut self,
1919 new_anchor: MessageAnchor,
1920 new_metadata: MessageMetadata,
1921 cx: &mut ModelContext<Self>,
1922 ) {
1923 cx.emit(ContextEvent::MessagesEdited);
1924
1925 self.messages_metadata.insert(new_anchor.id, new_metadata);
1926
1927 let buffer = self.buffer.read(cx);
1928 let insertion_ix = self
1929 .message_anchors
1930 .iter()
1931 .position(|anchor| {
1932 let comparison = new_anchor.start.cmp(&anchor.start, buffer);
1933 comparison.is_lt() || (comparison.is_eq() && new_anchor.id > anchor.id)
1934 })
1935 .unwrap_or(self.message_anchors.len());
1936 self.message_anchors.insert(insertion_ix, new_anchor);
1937 }
1938
1939 pub(super) fn summarize(&mut self, replace_old: bool, cx: &mut ModelContext<Self>) {
1940 let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
1941 return;
1942 };
1943 let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
1944 return;
1945 };
1946
1947 if replace_old || (self.message_anchors.len() >= 2 && self.summary.is_none()) {
1948 if !provider.is_authenticated(cx) {
1949 return;
1950 }
1951
1952 let messages = self
1953 .messages(cx)
1954 .map(|message| message.to_request_message(self.buffer.read(cx)))
1955 .chain(Some(LanguageModelRequestMessage {
1956 role: Role::User,
1957 content: "Summarize the context into a short title without punctuation.".into(),
1958 }));
1959 let request = LanguageModelRequest {
1960 messages: messages.collect(),
1961 stop: vec![],
1962 temperature: 1.0,
1963 };
1964
1965 self.pending_summary = cx.spawn(|this, mut cx| {
1966 async move {
1967 let stream = model.stream_completion(request, &cx);
1968 let mut messages = stream.await?;
1969
1970 let mut replaced = !replace_old;
1971 while let Some(message) = messages.next().await {
1972 let text = message?;
1973 let mut lines = text.lines();
1974 this.update(&mut cx, |this, cx| {
1975 let version = this.version.clone();
1976 let timestamp = this.next_timestamp();
1977 let summary = this.summary.get_or_insert(ContextSummary::default());
1978 if !replaced && replace_old {
1979 summary.text.clear();
1980 replaced = true;
1981 }
1982 summary.text.extend(lines.next());
1983 summary.timestamp = timestamp;
1984 let operation = ContextOperation::UpdateSummary {
1985 summary: summary.clone(),
1986 version,
1987 };
1988 this.push_op(operation, cx);
1989 cx.emit(ContextEvent::SummaryChanged);
1990 })?;
1991
1992 // Stop if the LLM generated multiple lines.
1993 if lines.next().is_some() {
1994 break;
1995 }
1996 }
1997
1998 this.update(&mut cx, |this, cx| {
1999 let version = this.version.clone();
2000 let timestamp = this.next_timestamp();
2001 if let Some(summary) = this.summary.as_mut() {
2002 summary.done = true;
2003 summary.timestamp = timestamp;
2004 let operation = ContextOperation::UpdateSummary {
2005 summary: summary.clone(),
2006 version,
2007 };
2008 this.push_op(operation, cx);
2009 cx.emit(ContextEvent::SummaryChanged);
2010 }
2011 })?;
2012
2013 anyhow::Ok(())
2014 }
2015 .log_err()
2016 });
2017 }
2018 }
2019
2020 fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
2021 self.messages_for_offsets([offset], cx).pop()
2022 }
2023
2024 pub fn messages_for_offsets(
2025 &self,
2026 offsets: impl IntoIterator<Item = usize>,
2027 cx: &AppContext,
2028 ) -> Vec<Message> {
2029 let mut result = Vec::new();
2030
2031 let mut messages = self.messages(cx).peekable();
2032 let mut offsets = offsets.into_iter().peekable();
2033 let mut current_message = messages.next();
2034 while let Some(offset) = offsets.next() {
2035 // Locate the message that contains the offset.
2036 while current_message.as_ref().map_or(false, |message| {
2037 !message.offset_range.contains(&offset) && messages.peek().is_some()
2038 }) {
2039 current_message = messages.next();
2040 }
2041 let Some(message) = current_message.as_ref() else {
2042 break;
2043 };
2044
2045 // Skip offsets that are in the same message.
2046 while offsets.peek().map_or(false, |offset| {
2047 message.offset_range.contains(offset) || messages.peek().is_none()
2048 }) {
2049 offsets.next();
2050 }
2051
2052 result.push(message.clone());
2053 }
2054 result
2055 }
2056
2057 pub fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
2058 let buffer = self.buffer.read(cx);
2059 let mut message_anchors = self.message_anchors.iter().enumerate().peekable();
2060 iter::from_fn(move || {
2061 if let Some((start_ix, message_anchor)) = message_anchors.next() {
2062 let metadata = self.messages_metadata.get(&message_anchor.id)?;
2063 let message_start = message_anchor.start.to_offset(buffer);
2064 let mut message_end = None;
2065 let mut end_ix = start_ix;
2066 while let Some((_, next_message)) = message_anchors.peek() {
2067 if next_message.start.is_valid(buffer) {
2068 message_end = Some(next_message.start);
2069 break;
2070 } else {
2071 end_ix += 1;
2072 message_anchors.next();
2073 }
2074 }
2075 let message_end = message_end
2076 .unwrap_or(language::Anchor::MAX)
2077 .to_offset(buffer);
2078
2079 return Some(Message {
2080 index_range: start_ix..end_ix,
2081 offset_range: message_start..message_end,
2082 id: message_anchor.id,
2083 anchor: message_anchor.start,
2084 role: metadata.role,
2085 status: metadata.status.clone(),
2086 });
2087 }
2088 None
2089 })
2090 }
2091
2092 pub fn save(
2093 &mut self,
2094 debounce: Option<Duration>,
2095 fs: Arc<dyn Fs>,
2096 cx: &mut ModelContext<Context>,
2097 ) {
2098 if self.replica_id() != ReplicaId::default() {
2099 // Prevent saving a remote context for now.
2100 return;
2101 }
2102
2103 self.pending_save = cx.spawn(|this, mut cx| async move {
2104 if let Some(debounce) = debounce {
2105 cx.background_executor().timer(debounce).await;
2106 }
2107
2108 let (old_path, summary) = this.read_with(&cx, |this, _| {
2109 let path = this.path.clone();
2110 let summary = if let Some(summary) = this.summary.as_ref() {
2111 if summary.done {
2112 Some(summary.text.clone())
2113 } else {
2114 None
2115 }
2116 } else {
2117 None
2118 };
2119 (path, summary)
2120 })?;
2121
2122 if let Some(summary) = summary {
2123 let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
2124 let mut discriminant = 1;
2125 let mut new_path;
2126 loop {
2127 new_path = contexts_dir().join(&format!(
2128 "{} - {}.zed.json",
2129 summary.trim(),
2130 discriminant
2131 ));
2132 if fs.is_file(&new_path).await {
2133 discriminant += 1;
2134 } else {
2135 break;
2136 }
2137 }
2138
2139 fs.create_dir(contexts_dir().as_ref()).await?;
2140 fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
2141 .await?;
2142 if let Some(old_path) = old_path {
2143 if new_path != old_path {
2144 fs.remove_file(
2145 &old_path,
2146 RemoveOptions {
2147 recursive: false,
2148 ignore_if_not_exists: true,
2149 },
2150 )
2151 .await?;
2152 }
2153 }
2154
2155 this.update(&mut cx, |this, _| this.path = Some(new_path))?;
2156 }
2157
2158 Ok(())
2159 });
2160 }
2161
2162 pub(crate) fn custom_summary(&mut self, custom_summary: String, cx: &mut ModelContext<Self>) {
2163 let timestamp = self.next_timestamp();
2164 let summary = self.summary.get_or_insert(ContextSummary::default());
2165 summary.timestamp = timestamp;
2166 summary.done = true;
2167 summary.text = custom_summary;
2168 cx.emit(ContextEvent::SummaryChanged);
2169 }
2170}
2171
2172#[derive(Debug, Default)]
2173pub struct ContextVersion {
2174 context: clock::Global,
2175 buffer: clock::Global,
2176}
2177
2178impl ContextVersion {
2179 pub fn from_proto(proto: &proto::ContextVersion) -> Self {
2180 Self {
2181 context: language::proto::deserialize_version(&proto.context_version),
2182 buffer: language::proto::deserialize_version(&proto.buffer_version),
2183 }
2184 }
2185
2186 pub fn to_proto(&self, context_id: ContextId) -> proto::ContextVersion {
2187 proto::ContextVersion {
2188 context_id: context_id.to_proto(),
2189 context_version: language::proto::serialize_version(&self.context),
2190 buffer_version: language::proto::serialize_version(&self.buffer),
2191 }
2192 }
2193}
2194
2195#[derive(Debug, Clone)]
2196pub struct PendingSlashCommand {
2197 pub name: String,
2198 pub argument: Option<String>,
2199 pub status: PendingSlashCommandStatus,
2200 pub source_range: Range<language::Anchor>,
2201}
2202
2203#[derive(Debug, Clone)]
2204pub enum PendingSlashCommandStatus {
2205 Idle,
2206 Running { _task: Shared<Task<()>> },
2207 Error(String),
2208}
2209
2210#[derive(Serialize, Deserialize)]
2211pub struct SavedMessage {
2212 pub id: MessageId,
2213 pub start: usize,
2214 pub metadata: MessageMetadata,
2215}
2216
2217#[derive(Serialize, Deserialize)]
2218pub struct SavedContext {
2219 pub id: Option<ContextId>,
2220 pub zed: String,
2221 pub version: String,
2222 pub text: String,
2223 pub messages: Vec<SavedMessage>,
2224 pub summary: String,
2225 pub slash_command_output_sections:
2226 Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2227}
2228
2229impl SavedContext {
2230 pub const VERSION: &'static str = "0.4.0";
2231
2232 pub fn from_json(json: &str) -> Result<Self> {
2233 let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
2234 match saved_context_json
2235 .get("version")
2236 .ok_or_else(|| anyhow!("version not found"))?
2237 {
2238 serde_json::Value::String(version) => match version.as_str() {
2239 SavedContext::VERSION => {
2240 Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
2241 }
2242 SavedContextV0_3_0::VERSION => {
2243 let saved_context =
2244 serde_json::from_value::<SavedContextV0_3_0>(saved_context_json)?;
2245 Ok(saved_context.upgrade())
2246 }
2247 SavedContextV0_2_0::VERSION => {
2248 let saved_context =
2249 serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
2250 Ok(saved_context.upgrade())
2251 }
2252 SavedContextV0_1_0::VERSION => {
2253 let saved_context =
2254 serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
2255 Ok(saved_context.upgrade())
2256 }
2257 _ => Err(anyhow!("unrecognized saved context version: {}", version)),
2258 },
2259 _ => Err(anyhow!("version not found on saved context")),
2260 }
2261 }
2262
2263 fn into_ops(
2264 self,
2265 buffer: &Model<Buffer>,
2266 cx: &mut ModelContext<Context>,
2267 ) -> Vec<ContextOperation> {
2268 let mut operations = Vec::new();
2269 let mut version = clock::Global::new();
2270 let mut next_timestamp = clock::Lamport::new(ReplicaId::default());
2271
2272 let mut first_message_metadata = None;
2273 for message in self.messages {
2274 if message.id == MessageId(clock::Lamport::default()) {
2275 first_message_metadata = Some(message.metadata);
2276 } else {
2277 operations.push(ContextOperation::InsertMessage {
2278 anchor: MessageAnchor {
2279 id: message.id,
2280 start: buffer.read(cx).anchor_before(message.start),
2281 },
2282 metadata: MessageMetadata {
2283 role: message.metadata.role,
2284 status: message.metadata.status,
2285 timestamp: message.metadata.timestamp,
2286 },
2287 version: version.clone(),
2288 });
2289 version.observe(message.id.0);
2290 next_timestamp.observe(message.id.0);
2291 }
2292 }
2293
2294 if let Some(metadata) = first_message_metadata {
2295 let timestamp = next_timestamp.tick();
2296 operations.push(ContextOperation::UpdateMessage {
2297 message_id: MessageId(clock::Lamport::default()),
2298 metadata: MessageMetadata {
2299 role: metadata.role,
2300 status: metadata.status,
2301 timestamp,
2302 },
2303 version: version.clone(),
2304 });
2305 version.observe(timestamp);
2306 }
2307
2308 let timestamp = next_timestamp.tick();
2309 operations.push(ContextOperation::SlashCommandFinished {
2310 id: SlashCommandId(timestamp),
2311 output_range: language::Anchor::MIN..language::Anchor::MAX,
2312 sections: self
2313 .slash_command_output_sections
2314 .into_iter()
2315 .map(|section| {
2316 let buffer = buffer.read(cx);
2317 SlashCommandOutputSection {
2318 range: buffer.anchor_after(section.range.start)
2319 ..buffer.anchor_before(section.range.end),
2320 icon: section.icon,
2321 label: section.label,
2322 }
2323 })
2324 .collect(),
2325 version: version.clone(),
2326 });
2327 version.observe(timestamp);
2328
2329 let timestamp = next_timestamp.tick();
2330 operations.push(ContextOperation::UpdateSummary {
2331 summary: ContextSummary {
2332 text: self.summary,
2333 done: true,
2334 timestamp,
2335 },
2336 version: version.clone(),
2337 });
2338 version.observe(timestamp);
2339
2340 operations
2341 }
2342}
2343
2344#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
2345struct SavedMessageIdPreV0_4_0(usize);
2346
2347#[derive(Serialize, Deserialize)]
2348struct SavedMessagePreV0_4_0 {
2349 id: SavedMessageIdPreV0_4_0,
2350 start: usize,
2351}
2352
2353#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
2354struct SavedMessageMetadataPreV0_4_0 {
2355 role: Role,
2356 status: MessageStatus,
2357}
2358
2359#[derive(Serialize, Deserialize)]
2360struct SavedContextV0_3_0 {
2361 id: Option<ContextId>,
2362 zed: String,
2363 version: String,
2364 text: String,
2365 messages: Vec<SavedMessagePreV0_4_0>,
2366 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2367 summary: String,
2368 slash_command_output_sections: Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
2369}
2370
2371impl SavedContextV0_3_0 {
2372 const VERSION: &'static str = "0.3.0";
2373
2374 fn upgrade(self) -> SavedContext {
2375 SavedContext {
2376 id: self.id,
2377 zed: self.zed,
2378 version: SavedContext::VERSION.into(),
2379 text: self.text,
2380 messages: self
2381 .messages
2382 .into_iter()
2383 .filter_map(|message| {
2384 let metadata = self.message_metadata.get(&message.id)?;
2385 let timestamp = clock::Lamport {
2386 replica_id: ReplicaId::default(),
2387 value: message.id.0 as u32,
2388 };
2389 Some(SavedMessage {
2390 id: MessageId(timestamp),
2391 start: message.start,
2392 metadata: MessageMetadata {
2393 role: metadata.role,
2394 status: metadata.status.clone(),
2395 timestamp,
2396 },
2397 })
2398 })
2399 .collect(),
2400 summary: self.summary,
2401 slash_command_output_sections: self.slash_command_output_sections,
2402 }
2403 }
2404}
2405
2406#[derive(Serialize, Deserialize)]
2407struct SavedContextV0_2_0 {
2408 id: Option<ContextId>,
2409 zed: String,
2410 version: String,
2411 text: String,
2412 messages: Vec<SavedMessagePreV0_4_0>,
2413 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2414 summary: String,
2415}
2416
2417impl SavedContextV0_2_0 {
2418 const VERSION: &'static str = "0.2.0";
2419
2420 fn upgrade(self) -> SavedContext {
2421 SavedContextV0_3_0 {
2422 id: self.id,
2423 zed: self.zed,
2424 version: SavedContextV0_3_0::VERSION.to_string(),
2425 text: self.text,
2426 messages: self.messages,
2427 message_metadata: self.message_metadata,
2428 summary: self.summary,
2429 slash_command_output_sections: Vec::new(),
2430 }
2431 .upgrade()
2432 }
2433}
2434
2435#[derive(Serialize, Deserialize)]
2436struct SavedContextV0_1_0 {
2437 id: Option<ContextId>,
2438 zed: String,
2439 version: String,
2440 text: String,
2441 messages: Vec<SavedMessagePreV0_4_0>,
2442 message_metadata: HashMap<SavedMessageIdPreV0_4_0, SavedMessageMetadataPreV0_4_0>,
2443 summary: String,
2444 api_url: Option<String>,
2445 model: OpenAiModel,
2446}
2447
2448impl SavedContextV0_1_0 {
2449 const VERSION: &'static str = "0.1.0";
2450
2451 fn upgrade(self) -> SavedContext {
2452 SavedContextV0_2_0 {
2453 id: self.id,
2454 zed: self.zed,
2455 version: SavedContextV0_2_0::VERSION.to_string(),
2456 text: self.text,
2457 messages: self.messages,
2458 message_metadata: self.message_metadata,
2459 summary: self.summary,
2460 }
2461 .upgrade()
2462 }
2463}
2464
2465#[derive(Clone)]
2466pub struct SavedContextMetadata {
2467 pub title: String,
2468 pub path: PathBuf,
2469 pub mtime: chrono::DateTime<chrono::Local>,
2470}
2471
2472#[cfg(test)]
2473mod tests {
2474 use super::*;
2475 use crate::{
2476 assistant_panel, prompt_library,
2477 slash_command::{active_command, file_command},
2478 MessageId,
2479 };
2480 use assistant_slash_command::{ArgumentCompletion, SlashCommand};
2481 use fs::FakeFs;
2482 use gpui::{AppContext, TestAppContext, WeakView};
2483 use indoc::indoc;
2484 use language::LspAdapterDelegate;
2485 use parking_lot::Mutex;
2486 use project::Project;
2487 use rand::prelude::*;
2488 use serde_json::json;
2489 use settings::SettingsStore;
2490 use std::{cell::RefCell, env, rc::Rc, sync::atomic::AtomicBool};
2491 use text::{network::Network, ToPoint};
2492 use ui::WindowContext;
2493 use unindent::Unindent;
2494 use util::{test::marked_text_ranges, RandomCharIter};
2495 use workspace::Workspace;
2496
2497 #[gpui::test]
2498 fn test_inserting_and_removing_messages(cx: &mut AppContext) {
2499 let settings_store = SettingsStore::test(cx);
2500 LanguageModelRegistry::test(cx);
2501 cx.set_global(settings_store);
2502 assistant_panel::init(cx);
2503 let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
2504 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
2505 let context =
2506 cx.new_model(|cx| Context::local(registry, None, None, prompt_builder.clone(), cx));
2507 let buffer = context.read(cx).buffer.clone();
2508
2509 let message_1 = context.read(cx).message_anchors[0].clone();
2510 assert_eq!(
2511 messages(&context, cx),
2512 vec![(message_1.id, Role::User, 0..0)]
2513 );
2514
2515 let message_2 = context.update(cx, |context, cx| {
2516 context
2517 .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx)
2518 .unwrap()
2519 });
2520 assert_eq!(
2521 messages(&context, cx),
2522 vec![
2523 (message_1.id, Role::User, 0..1),
2524 (message_2.id, Role::Assistant, 1..1)
2525 ]
2526 );
2527
2528 buffer.update(cx, |buffer, cx| {
2529 buffer.edit([(0..0, "1"), (1..1, "2")], None, cx)
2530 });
2531 assert_eq!(
2532 messages(&context, cx),
2533 vec![
2534 (message_1.id, Role::User, 0..2),
2535 (message_2.id, Role::Assistant, 2..3)
2536 ]
2537 );
2538
2539 let message_3 = context.update(cx, |context, cx| {
2540 context
2541 .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
2542 .unwrap()
2543 });
2544 assert_eq!(
2545 messages(&context, cx),
2546 vec![
2547 (message_1.id, Role::User, 0..2),
2548 (message_2.id, Role::Assistant, 2..4),
2549 (message_3.id, Role::User, 4..4)
2550 ]
2551 );
2552
2553 let message_4 = context.update(cx, |context, cx| {
2554 context
2555 .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
2556 .unwrap()
2557 });
2558 assert_eq!(
2559 messages(&context, cx),
2560 vec![
2561 (message_1.id, Role::User, 0..2),
2562 (message_2.id, Role::Assistant, 2..4),
2563 (message_4.id, Role::User, 4..5),
2564 (message_3.id, Role::User, 5..5),
2565 ]
2566 );
2567
2568 buffer.update(cx, |buffer, cx| {
2569 buffer.edit([(4..4, "C"), (5..5, "D")], None, cx)
2570 });
2571 assert_eq!(
2572 messages(&context, cx),
2573 vec![
2574 (message_1.id, Role::User, 0..2),
2575 (message_2.id, Role::Assistant, 2..4),
2576 (message_4.id, Role::User, 4..6),
2577 (message_3.id, Role::User, 6..7),
2578 ]
2579 );
2580
2581 // Deleting across message boundaries merges the messages.
2582 buffer.update(cx, |buffer, cx| buffer.edit([(1..4, "")], None, cx));
2583 assert_eq!(
2584 messages(&context, cx),
2585 vec![
2586 (message_1.id, Role::User, 0..3),
2587 (message_3.id, Role::User, 3..4),
2588 ]
2589 );
2590
2591 // Undoing the deletion should also undo the merge.
2592 buffer.update(cx, |buffer, cx| buffer.undo(cx));
2593 assert_eq!(
2594 messages(&context, cx),
2595 vec![
2596 (message_1.id, Role::User, 0..2),
2597 (message_2.id, Role::Assistant, 2..4),
2598 (message_4.id, Role::User, 4..6),
2599 (message_3.id, Role::User, 6..7),
2600 ]
2601 );
2602
2603 // Redoing the deletion should also redo the merge.
2604 buffer.update(cx, |buffer, cx| buffer.redo(cx));
2605 assert_eq!(
2606 messages(&context, cx),
2607 vec![
2608 (message_1.id, Role::User, 0..3),
2609 (message_3.id, Role::User, 3..4),
2610 ]
2611 );
2612
2613 // Ensure we can still insert after a merged message.
2614 let message_5 = context.update(cx, |context, cx| {
2615 context
2616 .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx)
2617 .unwrap()
2618 });
2619 assert_eq!(
2620 messages(&context, cx),
2621 vec![
2622 (message_1.id, Role::User, 0..3),
2623 (message_5.id, Role::System, 3..4),
2624 (message_3.id, Role::User, 4..5)
2625 ]
2626 );
2627 }
2628
2629 #[gpui::test]
2630 fn test_message_splitting(cx: &mut AppContext) {
2631 let settings_store = SettingsStore::test(cx);
2632 cx.set_global(settings_store);
2633 LanguageModelRegistry::test(cx);
2634 assistant_panel::init(cx);
2635 let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
2636
2637 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
2638 let context =
2639 cx.new_model(|cx| Context::local(registry, None, None, prompt_builder.clone(), cx));
2640 let buffer = context.read(cx).buffer.clone();
2641
2642 let message_1 = context.read(cx).message_anchors[0].clone();
2643 assert_eq!(
2644 messages(&context, cx),
2645 vec![(message_1.id, Role::User, 0..0)]
2646 );
2647
2648 buffer.update(cx, |buffer, cx| {
2649 buffer.edit([(0..0, "aaa\nbbb\nccc\nddd\n")], None, cx)
2650 });
2651
2652 let (_, message_2) = context.update(cx, |context, cx| context.split_message(3..3, cx));
2653 let message_2 = message_2.unwrap();
2654
2655 // We recycle newlines in the middle of a split message
2656 assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n");
2657 assert_eq!(
2658 messages(&context, cx),
2659 vec![
2660 (message_1.id, Role::User, 0..4),
2661 (message_2.id, Role::User, 4..16),
2662 ]
2663 );
2664
2665 let (_, message_3) = context.update(cx, |context, cx| context.split_message(3..3, cx));
2666 let message_3 = message_3.unwrap();
2667
2668 // We don't recycle newlines at the end of a split message
2669 assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n");
2670 assert_eq!(
2671 messages(&context, cx),
2672 vec![
2673 (message_1.id, Role::User, 0..4),
2674 (message_3.id, Role::User, 4..5),
2675 (message_2.id, Role::User, 5..17),
2676 ]
2677 );
2678
2679 let (_, message_4) = context.update(cx, |context, cx| context.split_message(9..9, cx));
2680 let message_4 = message_4.unwrap();
2681 assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n");
2682 assert_eq!(
2683 messages(&context, cx),
2684 vec![
2685 (message_1.id, Role::User, 0..4),
2686 (message_3.id, Role::User, 4..5),
2687 (message_2.id, Role::User, 5..9),
2688 (message_4.id, Role::User, 9..17),
2689 ]
2690 );
2691
2692 let (_, message_5) = context.update(cx, |context, cx| context.split_message(9..9, cx));
2693 let message_5 = message_5.unwrap();
2694 assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\nddd\n");
2695 assert_eq!(
2696 messages(&context, cx),
2697 vec![
2698 (message_1.id, Role::User, 0..4),
2699 (message_3.id, Role::User, 4..5),
2700 (message_2.id, Role::User, 5..9),
2701 (message_4.id, Role::User, 9..10),
2702 (message_5.id, Role::User, 10..18),
2703 ]
2704 );
2705
2706 let (message_6, message_7) =
2707 context.update(cx, |context, cx| context.split_message(14..16, cx));
2708 let message_6 = message_6.unwrap();
2709 let message_7 = message_7.unwrap();
2710 assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\ndd\nd\n");
2711 assert_eq!(
2712 messages(&context, cx),
2713 vec![
2714 (message_1.id, Role::User, 0..4),
2715 (message_3.id, Role::User, 4..5),
2716 (message_2.id, Role::User, 5..9),
2717 (message_4.id, Role::User, 9..10),
2718 (message_5.id, Role::User, 10..14),
2719 (message_6.id, Role::User, 14..17),
2720 (message_7.id, Role::User, 17..19),
2721 ]
2722 );
2723 }
2724
2725 #[gpui::test]
2726 fn test_messages_for_offsets(cx: &mut AppContext) {
2727 let settings_store = SettingsStore::test(cx);
2728 LanguageModelRegistry::test(cx);
2729 cx.set_global(settings_store);
2730 assistant_panel::init(cx);
2731 let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
2732 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
2733 let context =
2734 cx.new_model(|cx| Context::local(registry, None, None, prompt_builder.clone(), cx));
2735 let buffer = context.read(cx).buffer.clone();
2736
2737 let message_1 = context.read(cx).message_anchors[0].clone();
2738 assert_eq!(
2739 messages(&context, cx),
2740 vec![(message_1.id, Role::User, 0..0)]
2741 );
2742
2743 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx));
2744 let message_2 = context
2745 .update(cx, |context, cx| {
2746 context.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx)
2747 })
2748 .unwrap();
2749 buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbb")], None, cx));
2750
2751 let message_3 = context
2752 .update(cx, |context, cx| {
2753 context.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
2754 })
2755 .unwrap();
2756 buffer.update(cx, |buffer, cx| buffer.edit([(8..8, "ccc")], None, cx));
2757
2758 assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc");
2759 assert_eq!(
2760 messages(&context, cx),
2761 vec![
2762 (message_1.id, Role::User, 0..4),
2763 (message_2.id, Role::User, 4..8),
2764 (message_3.id, Role::User, 8..11)
2765 ]
2766 );
2767
2768 assert_eq!(
2769 message_ids_for_offsets(&context, &[0, 4, 9], cx),
2770 [message_1.id, message_2.id, message_3.id]
2771 );
2772 assert_eq!(
2773 message_ids_for_offsets(&context, &[0, 1, 11], cx),
2774 [message_1.id, message_3.id]
2775 );
2776
2777 let message_4 = context
2778 .update(cx, |context, cx| {
2779 context.insert_message_after(message_3.id, Role::User, MessageStatus::Done, cx)
2780 })
2781 .unwrap();
2782 assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\n");
2783 assert_eq!(
2784 messages(&context, cx),
2785 vec![
2786 (message_1.id, Role::User, 0..4),
2787 (message_2.id, Role::User, 4..8),
2788 (message_3.id, Role::User, 8..12),
2789 (message_4.id, Role::User, 12..12)
2790 ]
2791 );
2792 assert_eq!(
2793 message_ids_for_offsets(&context, &[0, 4, 8, 12], cx),
2794 [message_1.id, message_2.id, message_3.id, message_4.id]
2795 );
2796
2797 fn message_ids_for_offsets(
2798 context: &Model<Context>,
2799 offsets: &[usize],
2800 cx: &AppContext,
2801 ) -> Vec<MessageId> {
2802 context
2803 .read(cx)
2804 .messages_for_offsets(offsets.iter().copied(), cx)
2805 .into_iter()
2806 .map(|message| message.id)
2807 .collect()
2808 }
2809 }
2810
2811 #[gpui::test]
2812 async fn test_slash_commands(cx: &mut TestAppContext) {
2813 let settings_store = cx.update(SettingsStore::test);
2814 cx.set_global(settings_store);
2815 cx.update(LanguageModelRegistry::test);
2816 cx.update(Project::init_settings);
2817 cx.update(assistant_panel::init);
2818 let fs = FakeFs::new(cx.background_executor.clone());
2819
2820 fs.insert_tree(
2821 "/test",
2822 json!({
2823 "src": {
2824 "lib.rs": "fn one() -> usize { 1 }",
2825 "main.rs": "
2826 use crate::one;
2827 fn main() { one(); }
2828 ".unindent(),
2829 }
2830 }),
2831 )
2832 .await;
2833
2834 let slash_command_registry = cx.update(SlashCommandRegistry::default_global);
2835 slash_command_registry.register_command(file_command::FileSlashCommand, false);
2836 slash_command_registry.register_command(active_command::ActiveSlashCommand, false);
2837
2838 let registry = Arc::new(LanguageRegistry::test(cx.executor()));
2839 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
2840 let context = cx.new_model(|cx| {
2841 Context::local(registry.clone(), None, None, prompt_builder.clone(), cx)
2842 });
2843
2844 let output_ranges = Rc::new(RefCell::new(HashSet::default()));
2845 context.update(cx, |_, cx| {
2846 cx.subscribe(&context, {
2847 let ranges = output_ranges.clone();
2848 move |_, _, event, _| match event {
2849 ContextEvent::PendingSlashCommandsUpdated { removed, updated } => {
2850 for range in removed {
2851 ranges.borrow_mut().remove(range);
2852 }
2853 for command in updated {
2854 ranges.borrow_mut().insert(command.source_range.clone());
2855 }
2856 }
2857 _ => {}
2858 }
2859 })
2860 .detach();
2861 });
2862
2863 let buffer = context.read_with(cx, |context, _| context.buffer.clone());
2864
2865 // Insert a slash command
2866 buffer.update(cx, |buffer, cx| {
2867 buffer.edit([(0..0, "/file src/lib.rs")], None, cx);
2868 });
2869 assert_text_and_output_ranges(
2870 &buffer,
2871 &output_ranges.borrow(),
2872 "
2873 «/file src/lib.rs»
2874 "
2875 .unindent()
2876 .trim_end(),
2877 cx,
2878 );
2879
2880 // Edit the argument of the slash command.
2881 buffer.update(cx, |buffer, cx| {
2882 let edit_offset = buffer.text().find("lib.rs").unwrap();
2883 buffer.edit([(edit_offset..edit_offset + "lib".len(), "main")], None, cx);
2884 });
2885 assert_text_and_output_ranges(
2886 &buffer,
2887 &output_ranges.borrow(),
2888 "
2889 «/file src/main.rs»
2890 "
2891 .unindent()
2892 .trim_end(),
2893 cx,
2894 );
2895
2896 // Edit the name of the slash command, using one that doesn't exist.
2897 buffer.update(cx, |buffer, cx| {
2898 let edit_offset = buffer.text().find("/file").unwrap();
2899 buffer.edit(
2900 [(edit_offset..edit_offset + "/file".len(), "/unknown")],
2901 None,
2902 cx,
2903 );
2904 });
2905 assert_text_and_output_ranges(
2906 &buffer,
2907 &output_ranges.borrow(),
2908 "
2909 /unknown src/main.rs
2910 "
2911 .unindent()
2912 .trim_end(),
2913 cx,
2914 );
2915
2916 #[track_caller]
2917 fn assert_text_and_output_ranges(
2918 buffer: &Model<Buffer>,
2919 ranges: &HashSet<Range<language::Anchor>>,
2920 expected_marked_text: &str,
2921 cx: &mut TestAppContext,
2922 ) {
2923 let (expected_text, expected_ranges) = marked_text_ranges(expected_marked_text, false);
2924 let (actual_text, actual_ranges) = buffer.update(cx, |buffer, _| {
2925 let mut ranges = ranges
2926 .iter()
2927 .map(|range| range.to_offset(buffer))
2928 .collect::<Vec<_>>();
2929 ranges.sort_by_key(|a| a.start);
2930 (buffer.text(), ranges)
2931 });
2932
2933 assert_eq!(actual_text, expected_text);
2934 assert_eq!(actual_ranges, expected_ranges);
2935 }
2936 }
2937
2938 #[gpui::test]
2939 async fn test_edit_step_parsing(cx: &mut TestAppContext) {
2940 cx.update(prompt_library::init);
2941 let settings_store = cx.update(SettingsStore::test);
2942 cx.set_global(settings_store);
2943 cx.update(Project::init_settings);
2944 let fs = FakeFs::new(cx.executor());
2945 fs.as_fake()
2946 .insert_tree(
2947 "/root",
2948 json!({
2949 "hello.rs": r#"
2950 fn hello() {
2951 println!("Hello, World!");
2952 }
2953 "#.unindent()
2954 }),
2955 )
2956 .await;
2957 let project = Project::test(fs, [Path::new("/root")], cx).await;
2958 cx.update(LanguageModelRegistry::test);
2959
2960 let model = cx.read(|cx| {
2961 LanguageModelRegistry::read_global(cx)
2962 .active_model()
2963 .unwrap()
2964 });
2965 cx.update(assistant_panel::init);
2966 let registry = Arc::new(LanguageRegistry::test(cx.executor()));
2967
2968 // Create a new context
2969 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
2970 let context = cx.new_model(|cx| {
2971 Context::local(
2972 registry.clone(),
2973 Some(project),
2974 None,
2975 prompt_builder.clone(),
2976 cx,
2977 )
2978 });
2979 let buffer = context.read_with(cx, |context, _| context.buffer.clone());
2980
2981 // Simulate user input
2982 let user_message = indoc! {r#"
2983 Please add unnecessary complexity to this code:
2984
2985 ```hello.rs
2986 fn main() {
2987 println!("Hello, World!");
2988 }
2989 ```
2990 "#};
2991 buffer.update(cx, |buffer, cx| {
2992 buffer.edit([(0..0, user_message)], None, cx);
2993 });
2994
2995 // Simulate LLM response with edit steps
2996 let llm_response = indoc! {r#"
2997 Sure, I can help you with that. Here's a step-by-step process:
2998
2999 <step>
3000 First, let's extract the greeting into a separate function:
3001
3002 ```rust
3003 fn greet() {
3004 println!("Hello, World!");
3005 }
3006
3007 fn main() {
3008 greet();
3009 }
3010 ```
3011 </step>
3012
3013 <step>
3014 Now, let's make the greeting customizable:
3015
3016 ```rust
3017 fn greet(name: &str) {
3018 println!("Hello, {}!", name);
3019 }
3020
3021 fn main() {
3022 greet("World");
3023 }
3024 ```
3025 </step>
3026
3027 These changes make the code more modular and flexible.
3028 "#};
3029
3030 // Simulate the assist method to trigger the LLM response
3031 context.update(cx, |context, cx| context.assist(cx));
3032 cx.run_until_parked();
3033
3034 // Retrieve the assistant response message's start from the context
3035 let response_start_row = context.read_with(cx, |context, cx| {
3036 let buffer = context.buffer.read(cx);
3037 context.message_anchors[1].start.to_point(buffer).row
3038 });
3039
3040 // Simulate the LLM completion
3041 model
3042 .as_fake()
3043 .stream_last_completion_response(llm_response.to_string());
3044 model.as_fake().end_last_completion_stream();
3045
3046 // Wait for the completion to be processed
3047 cx.run_until_parked();
3048
3049 // Verify that the edit steps were parsed correctly
3050 context.read_with(cx, |context, cx| {
3051 assert_eq!(
3052 workflow_steps(context, cx),
3053 vec![
3054 (
3055 Point::new(response_start_row + 2, 0)
3056 ..Point::new(response_start_row + 13, 3),
3057 WorkflowStepTestStatus::Pending
3058 ),
3059 (
3060 Point::new(response_start_row + 15, 0)
3061 ..Point::new(response_start_row + 26, 3),
3062 WorkflowStepTestStatus::Pending
3063 ),
3064 ]
3065 );
3066 });
3067
3068 model
3069 .as_fake()
3070 .respond_to_last_tool_use(Ok(serde_json::to_value(tool::WorkflowStepResolution {
3071 step_title: "Title".into(),
3072 suggestions: vec![tool::WorkflowSuggestion {
3073 path: "/root/hello.rs".into(),
3074 // Simulate a symbol name that's slightly different than our outline query
3075 kind: tool::WorkflowSuggestionKind::Update {
3076 symbol: "fn main()".into(),
3077 description: "Extract a greeting function".into(),
3078 },
3079 }],
3080 })
3081 .unwrap()));
3082
3083 // Wait for tool use to be processed.
3084 cx.run_until_parked();
3085
3086 // Verify that the first edit step is not pending anymore.
3087 context.read_with(cx, |context, cx| {
3088 assert_eq!(
3089 workflow_steps(context, cx),
3090 vec![
3091 (
3092 Point::new(response_start_row + 2, 0)
3093 ..Point::new(response_start_row + 13, 3),
3094 WorkflowStepTestStatus::Resolved
3095 ),
3096 (
3097 Point::new(response_start_row + 15, 0)
3098 ..Point::new(response_start_row + 26, 3),
3099 WorkflowStepTestStatus::Pending
3100 ),
3101 ]
3102 );
3103 });
3104
3105 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
3106 enum WorkflowStepTestStatus {
3107 Pending,
3108 Resolved,
3109 Error,
3110 }
3111
3112 fn workflow_steps(
3113 context: &Context,
3114 cx: &AppContext,
3115 ) -> Vec<(Range<Point>, WorkflowStepTestStatus)> {
3116 context
3117 .workflow_steps
3118 .iter()
3119 .map(|step| {
3120 let buffer = context.buffer.read(cx);
3121 let status = match &step.status {
3122 WorkflowStepStatus::Pending(_) => WorkflowStepTestStatus::Pending,
3123 WorkflowStepStatus::Resolved { .. } => WorkflowStepTestStatus::Resolved,
3124 WorkflowStepStatus::Error(_) => WorkflowStepTestStatus::Error,
3125 };
3126 (step.tagged_range.to_point(buffer), status)
3127 })
3128 .collect()
3129 }
3130 }
3131
3132 #[gpui::test]
3133 async fn test_serialization(cx: &mut TestAppContext) {
3134 let settings_store = cx.update(SettingsStore::test);
3135 cx.set_global(settings_store);
3136 cx.update(LanguageModelRegistry::test);
3137 cx.update(assistant_panel::init);
3138 let registry = Arc::new(LanguageRegistry::test(cx.executor()));
3139 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
3140 let context = cx.new_model(|cx| {
3141 Context::local(registry.clone(), None, None, prompt_builder.clone(), cx)
3142 });
3143 let buffer = context.read_with(cx, |context, _| context.buffer.clone());
3144 let message_0 = context.read_with(cx, |context, _| context.message_anchors[0].id);
3145 let message_1 = context.update(cx, |context, cx| {
3146 context
3147 .insert_message_after(message_0, Role::Assistant, MessageStatus::Done, cx)
3148 .unwrap()
3149 });
3150 let message_2 = context.update(cx, |context, cx| {
3151 context
3152 .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx)
3153 .unwrap()
3154 });
3155 buffer.update(cx, |buffer, cx| {
3156 buffer.edit([(0..0, "a"), (1..1, "b\nc")], None, cx);
3157 buffer.finalize_last_transaction();
3158 });
3159 let _message_3 = context.update(cx, |context, cx| {
3160 context
3161 .insert_message_after(message_2.id, Role::System, MessageStatus::Done, cx)
3162 .unwrap()
3163 });
3164 buffer.update(cx, |buffer, cx| buffer.undo(cx));
3165 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "a\nb\nc\n");
3166 assert_eq!(
3167 cx.read(|cx| messages(&context, cx)),
3168 [
3169 (message_0, Role::User, 0..2),
3170 (message_1.id, Role::Assistant, 2..6),
3171 (message_2.id, Role::System, 6..6),
3172 ]
3173 );
3174
3175 let serialized_context = context.read_with(cx, |context, cx| context.serialize(cx));
3176 let deserialized_context = cx.new_model(|cx| {
3177 Context::deserialize(
3178 serialized_context,
3179 Default::default(),
3180 registry.clone(),
3181 prompt_builder.clone(),
3182 None,
3183 None,
3184 cx,
3185 )
3186 });
3187 let deserialized_buffer =
3188 deserialized_context.read_with(cx, |context, _| context.buffer.clone());
3189 assert_eq!(
3190 deserialized_buffer.read_with(cx, |buffer, _| buffer.text()),
3191 "a\nb\nc\n"
3192 );
3193 assert_eq!(
3194 cx.read(|cx| messages(&deserialized_context, cx)),
3195 [
3196 (message_0, Role::User, 0..2),
3197 (message_1.id, Role::Assistant, 2..6),
3198 (message_2.id, Role::System, 6..6),
3199 ]
3200 );
3201 }
3202
3203 #[gpui::test(iterations = 100)]
3204 async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: StdRng) {
3205 let min_peers = env::var("MIN_PEERS")
3206 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
3207 .unwrap_or(2);
3208 let max_peers = env::var("MAX_PEERS")
3209 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
3210 .unwrap_or(5);
3211 let operations = env::var("OPERATIONS")
3212 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
3213 .unwrap_or(50);
3214
3215 let settings_store = cx.update(SettingsStore::test);
3216 cx.set_global(settings_store);
3217 cx.update(LanguageModelRegistry::test);
3218
3219 cx.update(assistant_panel::init);
3220 let slash_commands = cx.update(SlashCommandRegistry::default_global);
3221 slash_commands.register_command(FakeSlashCommand("cmd-1".into()), false);
3222 slash_commands.register_command(FakeSlashCommand("cmd-2".into()), false);
3223 slash_commands.register_command(FakeSlashCommand("cmd-3".into()), false);
3224
3225 let registry = Arc::new(LanguageRegistry::test(cx.background_executor.clone()));
3226 let network = Arc::new(Mutex::new(Network::new(rng.clone())));
3227 let mut contexts = Vec::new();
3228
3229 let num_peers = rng.gen_range(min_peers..=max_peers);
3230 let context_id = ContextId::new();
3231 let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
3232 for i in 0..num_peers {
3233 let context = cx.new_model(|cx| {
3234 Context::new(
3235 context_id.clone(),
3236 i as ReplicaId,
3237 language::Capability::ReadWrite,
3238 registry.clone(),
3239 prompt_builder.clone(),
3240 None,
3241 None,
3242 cx,
3243 )
3244 });
3245
3246 cx.update(|cx| {
3247 cx.subscribe(&context, {
3248 let network = network.clone();
3249 move |_, event, _| {
3250 if let ContextEvent::Operation(op) = event {
3251 network
3252 .lock()
3253 .broadcast(i as ReplicaId, vec![op.to_proto()]);
3254 }
3255 }
3256 })
3257 .detach();
3258 });
3259
3260 contexts.push(context);
3261 network.lock().add_peer(i as ReplicaId);
3262 }
3263
3264 let mut mutation_count = operations;
3265
3266 while mutation_count > 0
3267 || !network.lock().is_idle()
3268 || network.lock().contains_disconnected_peers()
3269 {
3270 let context_index = rng.gen_range(0..contexts.len());
3271 let context = &contexts[context_index];
3272
3273 match rng.gen_range(0..100) {
3274 0..=29 if mutation_count > 0 => {
3275 log::info!("Context {}: edit buffer", context_index);
3276 context.update(cx, |context, cx| {
3277 context
3278 .buffer
3279 .update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
3280 });
3281 mutation_count -= 1;
3282 }
3283 30..=44 if mutation_count > 0 => {
3284 context.update(cx, |context, cx| {
3285 let range = context.buffer.read(cx).random_byte_range(0, &mut rng);
3286 log::info!("Context {}: split message at {:?}", context_index, range);
3287 context.split_message(range, cx);
3288 });
3289 mutation_count -= 1;
3290 }
3291 45..=59 if mutation_count > 0 => {
3292 context.update(cx, |context, cx| {
3293 if let Some(message) = context.messages(cx).choose(&mut rng) {
3294 let role = *[Role::User, Role::Assistant, Role::System]
3295 .choose(&mut rng)
3296 .unwrap();
3297 log::info!(
3298 "Context {}: insert message after {:?} with {:?}",
3299 context_index,
3300 message.id,
3301 role
3302 );
3303 context.insert_message_after(message.id, role, MessageStatus::Done, cx);
3304 }
3305 });
3306 mutation_count -= 1;
3307 }
3308 60..=74 if mutation_count > 0 => {
3309 context.update(cx, |context, cx| {
3310 let command_text = "/".to_string()
3311 + slash_commands
3312 .command_names()
3313 .choose(&mut rng)
3314 .unwrap()
3315 .clone()
3316 .as_ref();
3317
3318 let command_range = context.buffer.update(cx, |buffer, cx| {
3319 let offset = buffer.random_byte_range(0, &mut rng).start;
3320 buffer.edit(
3321 [(offset..offset, format!("\n{}\n", command_text))],
3322 None,
3323 cx,
3324 );
3325 offset + 1..offset + 1 + command_text.len()
3326 });
3327
3328 let output_len = rng.gen_range(1..=10);
3329 let output_text = RandomCharIter::new(&mut rng)
3330 .filter(|c| *c != '\r')
3331 .take(output_len)
3332 .collect::<String>();
3333
3334 let num_sections = rng.gen_range(0..=3);
3335 let mut sections = Vec::with_capacity(num_sections);
3336 for _ in 0..num_sections {
3337 let section_start = rng.gen_range(0..output_len);
3338 let section_end = rng.gen_range(section_start..=output_len);
3339 sections.push(SlashCommandOutputSection {
3340 range: section_start..section_end,
3341 icon: ui::IconName::Ai,
3342 label: "section".into(),
3343 });
3344 }
3345
3346 log::info!(
3347 "Context {}: insert slash command output at {:?} with {:?}",
3348 context_index,
3349 command_range,
3350 sections
3351 );
3352
3353 let command_range =
3354 context.buffer.read(cx).anchor_after(command_range.start)
3355 ..context.buffer.read(cx).anchor_after(command_range.end);
3356 context.insert_command_output(
3357 command_range,
3358 Task::ready(Ok(SlashCommandOutput {
3359 text: output_text,
3360 sections,
3361 run_commands_in_text: false,
3362 })),
3363 true,
3364 cx,
3365 );
3366 });
3367 cx.run_until_parked();
3368 mutation_count -= 1;
3369 }
3370 75..=84 if mutation_count > 0 => {
3371 context.update(cx, |context, cx| {
3372 if let Some(message) = context.messages(cx).choose(&mut rng) {
3373 let new_status = match rng.gen_range(0..3) {
3374 0 => MessageStatus::Done,
3375 1 => MessageStatus::Pending,
3376 _ => MessageStatus::Error(SharedString::from("Random error")),
3377 };
3378 log::info!(
3379 "Context {}: update message {:?} status to {:?}",
3380 context_index,
3381 message.id,
3382 new_status
3383 );
3384 context.update_metadata(message.id, cx, |metadata| {
3385 metadata.status = new_status;
3386 });
3387 }
3388 });
3389 mutation_count -= 1;
3390 }
3391 _ => {
3392 let replica_id = context_index as ReplicaId;
3393 if network.lock().is_disconnected(replica_id) {
3394 network.lock().reconnect_peer(replica_id, 0);
3395
3396 let (ops_to_send, ops_to_receive) = cx.read(|cx| {
3397 let host_context = &contexts[0].read(cx);
3398 let guest_context = context.read(cx);
3399 (
3400 guest_context.serialize_ops(&host_context.version(cx), cx),
3401 host_context.serialize_ops(&guest_context.version(cx), cx),
3402 )
3403 });
3404 let ops_to_send = ops_to_send.await;
3405 let ops_to_receive = ops_to_receive
3406 .await
3407 .into_iter()
3408 .map(ContextOperation::from_proto)
3409 .collect::<Result<Vec<_>>>()
3410 .unwrap();
3411 log::info!(
3412 "Context {}: reconnecting. Sent {} operations, received {} operations",
3413 context_index,
3414 ops_to_send.len(),
3415 ops_to_receive.len()
3416 );
3417
3418 network.lock().broadcast(replica_id, ops_to_send);
3419 context
3420 .update(cx, |context, cx| context.apply_ops(ops_to_receive, cx))
3421 .unwrap();
3422 } else if rng.gen_bool(0.1) && replica_id != 0 {
3423 log::info!("Context {}: disconnecting", context_index);
3424 network.lock().disconnect_peer(replica_id);
3425 } else if network.lock().has_unreceived(replica_id) {
3426 log::info!("Context {}: applying operations", context_index);
3427 let ops = network.lock().receive(replica_id);
3428 let ops = ops
3429 .into_iter()
3430 .map(ContextOperation::from_proto)
3431 .collect::<Result<Vec<_>>>()
3432 .unwrap();
3433 context
3434 .update(cx, |context, cx| context.apply_ops(ops, cx))
3435 .unwrap();
3436 }
3437 }
3438 }
3439 }
3440
3441 cx.read(|cx| {
3442 let first_context = contexts[0].read(cx);
3443 for context in &contexts[1..] {
3444 let context = context.read(cx);
3445 assert!(context.pending_ops.is_empty());
3446 assert_eq!(
3447 context.buffer.read(cx).text(),
3448 first_context.buffer.read(cx).text(),
3449 "Context {} text != Context 0 text",
3450 context.buffer.read(cx).replica_id()
3451 );
3452 assert_eq!(
3453 context.message_anchors,
3454 first_context.message_anchors,
3455 "Context {} messages != Context 0 messages",
3456 context.buffer.read(cx).replica_id()
3457 );
3458 assert_eq!(
3459 context.messages_metadata,
3460 first_context.messages_metadata,
3461 "Context {} message metadata != Context 0 message metadata",
3462 context.buffer.read(cx).replica_id()
3463 );
3464 assert_eq!(
3465 context.slash_command_output_sections,
3466 first_context.slash_command_output_sections,
3467 "Context {} slash command output sections != Context 0 slash command output sections",
3468 context.buffer.read(cx).replica_id()
3469 );
3470 }
3471 });
3472 }
3473
3474 fn messages(context: &Model<Context>, cx: &AppContext) -> Vec<(MessageId, Role, Range<usize>)> {
3475 context
3476 .read(cx)
3477 .messages(cx)
3478 .map(|message| (message.id, message.role, message.offset_range))
3479 .collect()
3480 }
3481
3482 #[derive(Clone)]
3483 struct FakeSlashCommand(String);
3484
3485 impl SlashCommand for FakeSlashCommand {
3486 fn name(&self) -> String {
3487 self.0.clone()
3488 }
3489
3490 fn description(&self) -> String {
3491 format!("Fake slash command: {}", self.0)
3492 }
3493
3494 fn menu_text(&self) -> String {
3495 format!("Run fake command: {}", self.0)
3496 }
3497
3498 fn complete_argument(
3499 self: Arc<Self>,
3500 _query: String,
3501 _cancel: Arc<AtomicBool>,
3502 _workspace: Option<WeakView<Workspace>>,
3503 _cx: &mut AppContext,
3504 ) -> Task<Result<Vec<ArgumentCompletion>>> {
3505 Task::ready(Ok(vec![]))
3506 }
3507
3508 fn requires_argument(&self) -> bool {
3509 false
3510 }
3511
3512 fn run(
3513 self: Arc<Self>,
3514 _argument: Option<&str>,
3515 _workspace: WeakView<Workspace>,
3516 _delegate: Option<Arc<dyn LspAdapterDelegate>>,
3517 _cx: &mut WindowContext,
3518 ) -> Task<Result<SlashCommandOutput>> {
3519 Task::ready(Ok(SlashCommandOutput {
3520 text: format!("Executed fake command: {}", self.0),
3521 sections: vec![],
3522 run_commands_in_text: false,
3523 }))
3524 }
3525 }
3526}
3527
3528mod tool {
3529 use gpui::AsyncAppContext;
3530
3531 use super::*;
3532
3533 #[derive(Debug, Serialize, Deserialize, JsonSchema)]
3534 pub struct WorkflowStepResolution {
3535 /// An extremely short title for the edit step represented by these operations.
3536 pub step_title: String,
3537 /// A sequence of operations to apply to the codebase.
3538 /// When multiple operations are required for a step, be sure to include multiple operations in this list.
3539 pub suggestions: Vec<WorkflowSuggestion>,
3540 }
3541
3542 impl LanguageModelTool for WorkflowStepResolution {
3543 fn name() -> String {
3544 "edit".into()
3545 }
3546
3547 fn description() -> String {
3548 "suggest edits to one or more locations in the codebase".into()
3549 }
3550 }
3551
3552 /// A description of an operation to apply to one location in the codebase.
3553 ///
3554 /// This object represents a single edit operation that can be performed on a specific file
3555 /// in the codebase. It encapsulates both the location (file path) and the nature of the
3556 /// edit to be made.
3557 ///
3558 /// # Fields
3559 ///
3560 /// * `path`: A string representing the file path where the edit operation should be applied.
3561 /// This path is relative to the root of the project or repository.
3562 ///
3563 /// * `kind`: An enum representing the specific type of edit operation to be performed.
3564 ///
3565 /// # Usage
3566 ///
3567 /// `EditOperation` is used within a code editor to represent and apply
3568 /// programmatic changes to source code. It provides a structured way to describe
3569 /// edits for features like refactoring tools or AI-assisted coding suggestions.
3570 #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
3571 pub struct WorkflowSuggestion {
3572 /// The path to the file containing the relevant operation
3573 pub path: String,
3574 #[serde(flatten)]
3575 pub kind: WorkflowSuggestionKind,
3576 }
3577
3578 impl WorkflowSuggestion {
3579 pub(super) async fn resolve(
3580 &self,
3581 project: Model<Project>,
3582 mut cx: AsyncAppContext,
3583 ) -> Result<(Model<Buffer>, super::WorkflowSuggestion)> {
3584 let path = self.path.clone();
3585 let kind = self.kind.clone();
3586 let buffer = project
3587 .update(&mut cx, |project, cx| {
3588 let project_path = project
3589 .find_project_path(Path::new(&path), cx)
3590 .with_context(|| format!("worktree not found for {:?}", path))?;
3591 anyhow::Ok(project.open_buffer(project_path, cx))
3592 })??
3593 .await?;
3594
3595 let mut parse_status = buffer.read_with(&cx, |buffer, _cx| buffer.parse_status())?;
3596 while *parse_status.borrow() != ParseStatus::Idle {
3597 parse_status.changed().await?;
3598 }
3599
3600 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
3601 let outline = snapshot.outline(None).context("no outline for buffer")?;
3602
3603 let suggestion;
3604 match kind {
3605 WorkflowSuggestionKind::Update {
3606 symbol,
3607 description,
3608 } => {
3609 let symbol = outline
3610 .find_most_similar(&symbol)
3611 .with_context(|| format!("symbol not found: {:?}", symbol))?
3612 .to_point(&snapshot);
3613 let start = symbol
3614 .annotation_range
3615 .map_or(symbol.range.start, |range| range.start);
3616 let start = Point::new(start.row, 0);
3617 let end = Point::new(
3618 symbol.range.end.row,
3619 snapshot.line_len(symbol.range.end.row),
3620 );
3621 let range = snapshot.anchor_before(start)..snapshot.anchor_after(end);
3622 suggestion = super::WorkflowSuggestion::Update { range, description };
3623 }
3624 WorkflowSuggestionKind::Create { description } => {
3625 suggestion = super::WorkflowSuggestion::CreateFile { description };
3626 }
3627 WorkflowSuggestionKind::InsertSiblingBefore {
3628 symbol,
3629 description,
3630 } => {
3631 let symbol = outline
3632 .find_most_similar(&symbol)
3633 .with_context(|| format!("symbol not found: {:?}", symbol))?
3634 .to_point(&snapshot);
3635 let position = snapshot.anchor_before(
3636 symbol
3637 .annotation_range
3638 .map_or(symbol.range.start, |annotation_range| {
3639 annotation_range.start
3640 }),
3641 );
3642 suggestion = super::WorkflowSuggestion::InsertSiblingBefore {
3643 position,
3644 description,
3645 };
3646 }
3647 WorkflowSuggestionKind::InsertSiblingAfter {
3648 symbol,
3649 description,
3650 } => {
3651 let symbol = outline
3652 .find_most_similar(&symbol)
3653 .with_context(|| format!("symbol not found: {:?}", symbol))?
3654 .to_point(&snapshot);
3655 let position = snapshot.anchor_after(symbol.range.end);
3656 suggestion = super::WorkflowSuggestion::InsertSiblingAfter {
3657 position,
3658 description,
3659 };
3660 }
3661 WorkflowSuggestionKind::PrependChild {
3662 symbol,
3663 description,
3664 } => {
3665 if let Some(symbol) = symbol {
3666 let symbol = outline
3667 .find_most_similar(&symbol)
3668 .with_context(|| format!("symbol not found: {:?}", symbol))?
3669 .to_point(&snapshot);
3670
3671 let position = snapshot.anchor_after(
3672 symbol
3673 .body_range
3674 .map_or(symbol.range.start, |body_range| body_range.start),
3675 );
3676 suggestion = super::WorkflowSuggestion::PrependChild {
3677 position,
3678 description,
3679 };
3680 } else {
3681 suggestion = super::WorkflowSuggestion::PrependChild {
3682 position: language::Anchor::MIN,
3683 description,
3684 };
3685 }
3686 }
3687 WorkflowSuggestionKind::AppendChild {
3688 symbol,
3689 description,
3690 } => {
3691 if let Some(symbol) = symbol {
3692 let symbol = outline
3693 .find_most_similar(&symbol)
3694 .with_context(|| format!("symbol not found: {:?}", symbol))?
3695 .to_point(&snapshot);
3696
3697 let position = snapshot.anchor_before(
3698 symbol
3699 .body_range
3700 .map_or(symbol.range.end, |body_range| body_range.end),
3701 );
3702 suggestion = super::WorkflowSuggestion::AppendChild {
3703 position,
3704 description,
3705 };
3706 } else {
3707 suggestion = super::WorkflowSuggestion::PrependChild {
3708 position: language::Anchor::MAX,
3709 description,
3710 };
3711 }
3712 }
3713 WorkflowSuggestionKind::Delete { symbol } => {
3714 let symbol = outline
3715 .find_most_similar(&symbol)
3716 .with_context(|| format!("symbol not found: {:?}", symbol))?
3717 .to_point(&snapshot);
3718 let start = symbol
3719 .annotation_range
3720 .map_or(symbol.range.start, |range| range.start);
3721 let start = Point::new(start.row, 0);
3722 let end = Point::new(
3723 symbol.range.end.row,
3724 snapshot.line_len(symbol.range.end.row),
3725 );
3726 let range = snapshot.anchor_before(start)..snapshot.anchor_after(end);
3727 suggestion = super::WorkflowSuggestion::Delete { range };
3728 }
3729 }
3730
3731 Ok((buffer, suggestion))
3732 }
3733 }
3734
3735 #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
3736 #[serde(tag = "kind")]
3737 pub enum WorkflowSuggestionKind {
3738 /// Rewrites the specified symbol entirely based on the given description.
3739 /// This operation completely replaces the existing symbol with new content.
3740 Update {
3741 /// A fully-qualified reference to the symbol, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
3742 /// The path should uniquely identify the symbol within the containing file.
3743 symbol: String,
3744 /// A brief description of the transformation to apply to the symbol.
3745 description: String,
3746 },
3747 /// Creates a new file with the given path based on the provided description.
3748 /// This operation adds a new file to the codebase.
3749 Create {
3750 /// A brief description of the file to be created.
3751 description: String,
3752 },
3753 /// Inserts a new symbol based on the given description before the specified symbol.
3754 /// This operation adds new content immediately preceding an existing symbol.
3755 InsertSiblingBefore {
3756 /// A fully-qualified reference to the symbol, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
3757 /// The new content will be inserted immediately before this symbol.
3758 symbol: String,
3759 /// A brief description of the new symbol to be inserted.
3760 description: String,
3761 },
3762 /// Inserts a new symbol based on the given description after the specified symbol.
3763 /// This operation adds new content immediately following an existing symbol.
3764 InsertSiblingAfter {
3765 /// A fully-qualified reference to the symbol, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
3766 /// The new content will be inserted immediately after this symbol.
3767 symbol: String,
3768 /// A brief description of the new symbol to be inserted.
3769 description: String,
3770 },
3771 /// Inserts a new symbol as a child of the specified symbol at the start.
3772 /// This operation adds new content as the first child of an existing symbol (or file if no symbol is provided).
3773 PrependChild {
3774 /// An optional fully-qualified reference to the symbol after the code you want to insert, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
3775 /// If provided, the new content will be inserted as the first child of this symbol.
3776 /// If not provided, the new content will be inserted at the top of the file.
3777 symbol: Option<String>,
3778 /// A brief description of the new symbol to be inserted.
3779 description: String,
3780 },
3781 /// Inserts a new symbol as a child of the specified symbol at the end.
3782 /// This operation adds new content as the last child of an existing symbol (or file if no symbol is provided).
3783 AppendChild {
3784 /// An optional fully-qualified reference to the symbol before the code you want to insert, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
3785 /// If provided, the new content will be inserted as the last child of this symbol.
3786 /// If not provided, the new content will be applied at the bottom of the file.
3787 symbol: Option<String>,
3788 /// A brief description of the new symbol to be inserted.
3789 description: String,
3790 },
3791 /// Deletes the specified symbol from the containing file.
3792 Delete {
3793 /// An fully-qualified reference to the symbol to be deleted, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
3794 symbol: String,
3795 },
3796 }
3797
3798 impl WorkflowSuggestionKind {
3799 pub fn symbol(&self) -> Option<&str> {
3800 match self {
3801 Self::Update { symbol, .. } => Some(symbol),
3802 Self::InsertSiblingBefore { symbol, .. } => Some(symbol),
3803 Self::InsertSiblingAfter { symbol, .. } => Some(symbol),
3804 Self::PrependChild { symbol, .. } => symbol.as_deref(),
3805 Self::AppendChild { symbol, .. } => symbol.as_deref(),
3806 Self::Delete { symbol } => Some(symbol),
3807 Self::Create { .. } => None,
3808 }
3809 }
3810
3811 pub fn description(&self) -> Option<&str> {
3812 match self {
3813 Self::Update { description, .. } => Some(description),
3814 Self::Create { description } => Some(description),
3815 Self::InsertSiblingBefore { description, .. } => Some(description),
3816 Self::InsertSiblingAfter { description, .. } => Some(description),
3817 Self::PrependChild { description, .. } => Some(description),
3818 Self::AppendChild { description, .. } => Some(description),
3819 Self::Delete { .. } => None,
3820 }
3821 }
3822
3823 pub fn initial_insertion(&self) -> Option<InitialInsertion> {
3824 match self {
3825 WorkflowSuggestionKind::InsertSiblingBefore { .. } => {
3826 Some(InitialInsertion::NewlineAfter)
3827 }
3828 WorkflowSuggestionKind::InsertSiblingAfter { .. } => {
3829 Some(InitialInsertion::NewlineBefore)
3830 }
3831 WorkflowSuggestionKind::PrependChild { .. } => Some(InitialInsertion::NewlineAfter),
3832 WorkflowSuggestionKind::AppendChild { .. } => Some(InitialInsertion::NewlineBefore),
3833 _ => None,
3834 }
3835 }
3836 }
3837}