1mod edit_parser;
2#[cfg(test)]
3mod evals;
4
5use crate::{Template, Templates};
6use aho_corasick::AhoCorasick;
7use anyhow::Result;
8use assistant_tool::ActionLog;
9use edit_parser::{EditParser, EditParserEvent, EditParserMetrics};
10use futures::{
11 Stream, StreamExt,
12 channel::mpsc::{self, UnboundedReceiver},
13 pin_mut,
14 stream::BoxStream,
15};
16use gpui::{AppContext, AsyncApp, Entity, SharedString, Task};
17use language::{Bias, Buffer, BufferSnapshot, LineIndent, Point};
18use language_model::{
19 LanguageModel, LanguageModelCompletionError, LanguageModelRequest, LanguageModelRequestMessage,
20 LanguageModelToolChoice, MessageContent, Role,
21};
22use project::{AgentLocation, Project};
23use serde::Serialize;
24use std::{cmp, iter, mem, ops::Range, path::PathBuf, sync::Arc, task::Poll};
25use streaming_diff::{CharOperation, StreamingDiff};
26
27#[derive(Serialize)]
28struct CreateFilePromptTemplate {
29 path: Option<PathBuf>,
30 edit_description: String,
31}
32
33impl Template for CreateFilePromptTemplate {
34 const TEMPLATE_NAME: &'static str = "create_file_prompt.hbs";
35}
36
37#[derive(Serialize)]
38struct EditFilePromptTemplate {
39 path: Option<PathBuf>,
40 edit_description: String,
41}
42
43impl Template for EditFilePromptTemplate {
44 const TEMPLATE_NAME: &'static str = "edit_file_prompt.hbs";
45}
46
47#[derive(Clone, Debug, PartialEq, Eq)]
48pub enum EditAgentOutputEvent {
49 Edited,
50 OldTextNotFound(SharedString),
51}
52
53#[derive(Clone, Debug)]
54pub struct EditAgentOutput {
55 pub _raw_edits: String,
56 pub _parser_metrics: EditParserMetrics,
57}
58
59#[derive(Clone)]
60pub struct EditAgent {
61 model: Arc<dyn LanguageModel>,
62 action_log: Entity<ActionLog>,
63 project: Entity<Project>,
64 templates: Arc<Templates>,
65}
66
67impl EditAgent {
68 pub fn new(
69 model: Arc<dyn LanguageModel>,
70 project: Entity<Project>,
71 action_log: Entity<ActionLog>,
72 templates: Arc<Templates>,
73 ) -> Self {
74 EditAgent {
75 model,
76 project,
77 action_log,
78 templates,
79 }
80 }
81
82 pub fn overwrite(
83 &self,
84 buffer: Entity<Buffer>,
85 edit_description: String,
86 conversation: &LanguageModelRequest,
87 cx: &mut AsyncApp,
88 ) -> (
89 Task<Result<EditAgentOutput>>,
90 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
91 ) {
92 let this = self.clone();
93 let (events_tx, events_rx) = mpsc::unbounded();
94 let conversation = conversation.clone();
95 let output = cx.spawn(async move |cx| {
96 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
97 let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
98 let prompt = CreateFilePromptTemplate {
99 path,
100 edit_description,
101 }
102 .render(&this.templates)?;
103 let new_chunks = this.request(conversation, prompt, cx).await?;
104
105 let (output, mut inner_events) = this.overwrite_with_chunks(buffer, new_chunks, cx);
106 while let Some(event) = inner_events.next().await {
107 events_tx.unbounded_send(event).ok();
108 }
109 output.await
110 });
111 (output, events_rx)
112 }
113
114 fn overwrite_with_chunks(
115 &self,
116 buffer: Entity<Buffer>,
117 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
118 cx: &mut AsyncApp,
119 ) -> (
120 Task<Result<EditAgentOutput>>,
121 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
122 ) {
123 let (output_events_tx, output_events_rx) = mpsc::unbounded();
124 let this = self.clone();
125 let task = cx.spawn(async move |cx| {
126 this.action_log
127 .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx))?;
128 let output = this
129 .overwrite_with_chunks_internal(buffer, edit_chunks, output_events_tx, cx)
130 .await;
131 this.project
132 .update(cx, |project, cx| project.set_agent_location(None, cx))?;
133 output
134 });
135 (task, output_events_rx)
136 }
137
138 async fn overwrite_with_chunks_internal(
139 &self,
140 buffer: Entity<Buffer>,
141 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
142 output_events_tx: mpsc::UnboundedSender<EditAgentOutputEvent>,
143 cx: &mut AsyncApp,
144 ) -> Result<EditAgentOutput> {
145 cx.update(|cx| {
146 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
147 self.action_log.update(cx, |log, cx| {
148 log.buffer_edited(buffer.clone(), cx);
149 });
150 self.project.update(cx, |project, cx| {
151 project.set_agent_location(
152 Some(AgentLocation {
153 buffer: buffer.downgrade(),
154 position: language::Anchor::MAX,
155 }),
156 cx,
157 )
158 });
159 output_events_tx
160 .unbounded_send(EditAgentOutputEvent::Edited)
161 .ok();
162 })?;
163
164 let mut raw_edits = String::new();
165 pin_mut!(edit_chunks);
166 while let Some(chunk) = edit_chunks.next().await {
167 let chunk = chunk?;
168 raw_edits.push_str(&chunk);
169 cx.update(|cx| {
170 buffer.update(cx, |buffer, cx| buffer.append(chunk, cx));
171 self.action_log
172 .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
173 self.project.update(cx, |project, cx| {
174 project.set_agent_location(
175 Some(AgentLocation {
176 buffer: buffer.downgrade(),
177 position: language::Anchor::MAX,
178 }),
179 cx,
180 )
181 });
182 })?;
183 output_events_tx
184 .unbounded_send(EditAgentOutputEvent::Edited)
185 .ok();
186 }
187
188 Ok(EditAgentOutput {
189 _raw_edits: raw_edits,
190 _parser_metrics: EditParserMetrics::default(),
191 })
192 }
193
194 pub fn edit(
195 &self,
196 buffer: Entity<Buffer>,
197 edit_description: String,
198 conversation: &LanguageModelRequest,
199 cx: &mut AsyncApp,
200 ) -> (
201 Task<Result<EditAgentOutput>>,
202 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
203 ) {
204 self.project
205 .update(cx, |project, cx| {
206 project.set_agent_location(
207 Some(AgentLocation {
208 buffer: buffer.downgrade(),
209 position: language::Anchor::MIN,
210 }),
211 cx,
212 );
213 })
214 .ok();
215
216 let this = self.clone();
217 let (events_tx, events_rx) = mpsc::unbounded();
218 let conversation = conversation.clone();
219 let output = cx.spawn(async move |cx| {
220 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
221 let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
222 let prompt = EditFilePromptTemplate {
223 path,
224 edit_description,
225 }
226 .render(&this.templates)?;
227 let edit_chunks = this.request(conversation, prompt, cx).await?;
228
229 let (output, mut inner_events) = this.apply_edit_chunks(buffer, edit_chunks, cx);
230 while let Some(event) = inner_events.next().await {
231 events_tx.unbounded_send(event).ok();
232 }
233 output.await
234 });
235 (output, events_rx)
236 }
237
238 fn apply_edit_chunks(
239 &self,
240 buffer: Entity<Buffer>,
241 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
242 cx: &mut AsyncApp,
243 ) -> (
244 Task<Result<EditAgentOutput>>,
245 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
246 ) {
247 let (output_events_tx, output_events_rx) = mpsc::unbounded();
248 let this = self.clone();
249 let task = cx.spawn(async move |mut cx| {
250 this.action_log
251 .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))?;
252 let output = this
253 .apply_edit_chunks_internal(buffer, edit_chunks, output_events_tx, &mut cx)
254 .await;
255 this.project
256 .update(cx, |project, cx| project.set_agent_location(None, cx))?;
257 output
258 });
259 (task, output_events_rx)
260 }
261
262 async fn apply_edit_chunks_internal(
263 &self,
264 buffer: Entity<Buffer>,
265 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
266 output_events: mpsc::UnboundedSender<EditAgentOutputEvent>,
267 cx: &mut AsyncApp,
268 ) -> Result<EditAgentOutput> {
269 let (output, mut edit_events) = Self::parse_edit_chunks(edit_chunks, cx);
270 while let Some(edit_event) = edit_events.next().await {
271 let EditParserEvent::OldText(old_text_query) = edit_event? else {
272 continue;
273 };
274
275 // Skip edits with an empty old text.
276 if old_text_query.is_empty() {
277 continue;
278 }
279
280 let old_text_query = SharedString::from(old_text_query);
281
282 let (edits_tx, edits_rx) = mpsc::unbounded();
283 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
284 let old_range = cx
285 .background_spawn({
286 let snapshot = snapshot.clone();
287 let old_text_query = old_text_query.clone();
288 async move { Self::resolve_location(&snapshot, &old_text_query) }
289 })
290 .await;
291 let Some(old_range) = old_range else {
292 // We couldn't find the old text in the buffer. Report the error.
293 output_events
294 .unbounded_send(EditAgentOutputEvent::OldTextNotFound(old_text_query))
295 .ok();
296 continue;
297 };
298
299 let compute_edits = cx.background_spawn(async move {
300 let buffer_start_indent =
301 snapshot.line_indent_for_row(snapshot.offset_to_point(old_range.start).row);
302 let old_text_start_indent = old_text_query
303 .lines()
304 .next()
305 .map_or(buffer_start_indent, |line| {
306 LineIndent::from_iter(line.chars())
307 });
308 let indent_delta = if buffer_start_indent.tabs > 0 {
309 IndentDelta::Tabs(
310 buffer_start_indent.tabs as isize - old_text_start_indent.tabs as isize,
311 )
312 } else {
313 IndentDelta::Spaces(
314 buffer_start_indent.spaces as isize - old_text_start_indent.spaces as isize,
315 )
316 };
317
318 let old_text = snapshot
319 .text_for_range(old_range.clone())
320 .collect::<String>();
321 let mut diff = StreamingDiff::new(old_text);
322 let mut edit_start = old_range.start;
323 let mut new_text_chunks =
324 Self::reindent_new_text_chunks(indent_delta, &mut edit_events);
325 let mut done = false;
326 while !done {
327 let char_operations = if let Some(new_text_chunk) = new_text_chunks.next().await
328 {
329 diff.push_new(&new_text_chunk?)
330 } else {
331 done = true;
332 mem::take(&mut diff).finish()
333 };
334
335 for op in char_operations {
336 match op {
337 CharOperation::Insert { text } => {
338 let edit_start = snapshot.anchor_after(edit_start);
339 edits_tx
340 .unbounded_send((edit_start..edit_start, Arc::from(text)))?;
341 }
342 CharOperation::Delete { bytes } => {
343 let edit_end = edit_start + bytes;
344 let edit_range = snapshot.anchor_after(edit_start)
345 ..snapshot.anchor_before(edit_end);
346 edit_start = edit_end;
347 edits_tx.unbounded_send((edit_range, Arc::from("")))?;
348 }
349 CharOperation::Keep { bytes } => edit_start += bytes,
350 }
351 }
352 }
353
354 drop(new_text_chunks);
355 anyhow::Ok(edit_events)
356 });
357
358 // TODO: group all edits into one transaction
359 let mut edits_rx = edits_rx.ready_chunks(32);
360 while let Some(edits) = edits_rx.next().await {
361 if edits.is_empty() {
362 continue;
363 }
364
365 // Edit the buffer and report edits to the action log as part of the
366 // same effect cycle, otherwise the edit will be reported as if the
367 // user made it.
368 cx.update(|cx| {
369 let max_edit_end = buffer.update(cx, |buffer, cx| {
370 buffer.edit(edits.iter().cloned(), None, cx);
371 let max_edit_end = buffer
372 .summaries_for_anchors::<Point, _>(
373 edits.iter().map(|(range, _)| &range.end),
374 )
375 .max()
376 .unwrap();
377 buffer.anchor_before(max_edit_end)
378 });
379 self.action_log
380 .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
381 self.project.update(cx, |project, cx| {
382 project.set_agent_location(
383 Some(AgentLocation {
384 buffer: buffer.downgrade(),
385 position: max_edit_end,
386 }),
387 cx,
388 );
389 });
390 })?;
391 output_events
392 .unbounded_send(EditAgentOutputEvent::Edited)
393 .ok();
394 }
395
396 edit_events = compute_edits.await?;
397 }
398
399 output.await
400 }
401
402 fn parse_edit_chunks(
403 chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
404 cx: &mut AsyncApp,
405 ) -> (
406 Task<Result<EditAgentOutput>>,
407 UnboundedReceiver<Result<EditParserEvent>>,
408 ) {
409 let (tx, rx) = mpsc::unbounded();
410 let output = cx.background_spawn(async move {
411 pin_mut!(chunks);
412
413 let mut parser = EditParser::new();
414 let mut raw_edits = String::new();
415 while let Some(chunk) = chunks.next().await {
416 match chunk {
417 Ok(chunk) => {
418 raw_edits.push_str(&chunk);
419 for event in parser.push(&chunk) {
420 tx.unbounded_send(Ok(event))?;
421 }
422 }
423 Err(error) => {
424 tx.unbounded_send(Err(error.into()))?;
425 }
426 }
427 }
428 Ok(EditAgentOutput {
429 _raw_edits: raw_edits,
430 _parser_metrics: parser.finish(),
431 })
432 });
433 (output, rx)
434 }
435
436 fn reindent_new_text_chunks(
437 delta: IndentDelta,
438 mut stream: impl Unpin + Stream<Item = Result<EditParserEvent>>,
439 ) -> impl Stream<Item = Result<String>> {
440 let mut buffer = String::new();
441 let mut in_leading_whitespace = true;
442 let mut done = false;
443 futures::stream::poll_fn(move |cx| {
444 while !done {
445 let (chunk, is_last_chunk) = match stream.poll_next_unpin(cx) {
446 Poll::Ready(Some(Ok(EditParserEvent::NewTextChunk { chunk, done }))) => {
447 (chunk, done)
448 }
449 Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
450 Poll::Pending => return Poll::Pending,
451 _ => return Poll::Ready(None),
452 };
453
454 buffer.push_str(&chunk);
455
456 let mut indented_new_text = String::new();
457 let mut start_ix = 0;
458 let mut newlines = buffer.match_indices('\n').peekable();
459 loop {
460 let (line_end, is_pending_line) = match newlines.next() {
461 Some((ix, _)) => (ix, false),
462 None => (buffer.len(), true),
463 };
464 let line = &buffer[start_ix..line_end];
465
466 if in_leading_whitespace {
467 if let Some(non_whitespace_ix) = line.find(|c| delta.character() != c) {
468 // We found a non-whitespace character, adjust
469 // indentation based on the delta.
470 let new_indent_len =
471 cmp::max(0, non_whitespace_ix as isize + delta.len()) as usize;
472 indented_new_text
473 .extend(iter::repeat(delta.character()).take(new_indent_len));
474 indented_new_text.push_str(&line[non_whitespace_ix..]);
475 in_leading_whitespace = false;
476 } else if is_pending_line {
477 // We're still in leading whitespace and this line is incomplete.
478 // Stop processing until we receive more input.
479 break;
480 } else {
481 // This line is entirely whitespace. Push it without indentation.
482 indented_new_text.push_str(line);
483 }
484 } else {
485 indented_new_text.push_str(line);
486 }
487
488 if is_pending_line {
489 start_ix = line_end;
490 break;
491 } else {
492 in_leading_whitespace = true;
493 indented_new_text.push('\n');
494 start_ix = line_end + 1;
495 }
496 }
497 buffer.replace_range(..start_ix, "");
498
499 // This was the last chunk, push all the buffered content as-is.
500 if is_last_chunk {
501 indented_new_text.push_str(&buffer);
502 buffer.clear();
503 done = true;
504 }
505
506 if !indented_new_text.is_empty() {
507 return Poll::Ready(Some(Ok(indented_new_text)));
508 }
509 }
510
511 Poll::Ready(None)
512 })
513 }
514
515 async fn request(
516 &self,
517 mut conversation: LanguageModelRequest,
518 prompt: String,
519 cx: &mut AsyncApp,
520 ) -> Result<BoxStream<'static, Result<String, LanguageModelCompletionError>>> {
521 let mut messages_iter = conversation.messages.iter_mut();
522 if let Some(last_message) = messages_iter.next_back() {
523 if last_message.role == Role::Assistant {
524 let old_content_len = last_message.content.len();
525 last_message
526 .content
527 .retain(|content| !matches!(content, MessageContent::ToolUse(_)));
528 let new_content_len = last_message.content.len();
529
530 // We just removed pending tool uses from the content of the
531 // last message, so it doesn't make sense to cache it anymore
532 // (e.g., the message will look very different on the next
533 // request). Thus, we move the flag to the message prior to it,
534 // as it will still be a valid prefix of the conversation.
535 if old_content_len != new_content_len && last_message.cache {
536 if let Some(prev_message) = messages_iter.next_back() {
537 last_message.cache = false;
538 prev_message.cache = true;
539 }
540 }
541
542 if last_message.content.is_empty() {
543 conversation.messages.pop();
544 }
545 }
546 }
547
548 conversation.messages.push(LanguageModelRequestMessage {
549 role: Role::User,
550 content: vec![MessageContent::Text(prompt)],
551 cache: false,
552 });
553
554 // Include tools in the request so that we can take advantage of
555 // caching when ToolChoice::None is supported.
556 let mut tool_choice = None;
557 let mut tools = Vec::new();
558 if !conversation.tools.is_empty()
559 && self
560 .model
561 .supports_tool_choice(LanguageModelToolChoice::None)
562 {
563 tool_choice = Some(LanguageModelToolChoice::None);
564 tools = conversation.tools.clone();
565 }
566
567 let request = LanguageModelRequest {
568 thread_id: conversation.thread_id,
569 prompt_id: conversation.prompt_id,
570 mode: conversation.mode,
571 messages: conversation.messages,
572 tool_choice,
573 tools,
574 stop: Vec::new(),
575 temperature: None,
576 };
577
578 Ok(self.model.stream_completion_text(request, cx).await?.stream)
579 }
580
581 fn resolve_location(buffer: &BufferSnapshot, search_query: &str) -> Option<Range<usize>> {
582 let range = Self::resolve_location_exact(buffer, search_query)
583 .or_else(|| Self::resolve_location_fuzzy(buffer, search_query))?;
584
585 // Expand the range to include entire lines.
586 let mut start = buffer.offset_to_point(buffer.clip_offset(range.start, Bias::Left));
587 start.column = 0;
588 let mut end = buffer.offset_to_point(buffer.clip_offset(range.end, Bias::Right));
589 if end.column > 0 {
590 end.column = buffer.line_len(end.row);
591 }
592
593 Some(buffer.point_to_offset(start)..buffer.point_to_offset(end))
594 }
595
596 fn resolve_location_exact(buffer: &BufferSnapshot, search_query: &str) -> Option<Range<usize>> {
597 let search = AhoCorasick::new([search_query]).ok()?;
598 let mat = search
599 .stream_find_iter(buffer.bytes_in_range(0..buffer.len()))
600 .next()?
601 .expect("buffer can't error");
602 Some(mat.range())
603 }
604
605 fn resolve_location_fuzzy(buffer: &BufferSnapshot, search_query: &str) -> Option<Range<usize>> {
606 const INSERTION_COST: u32 = 3;
607 const DELETION_COST: u32 = 10;
608
609 let buffer_line_count = buffer.max_point().row as usize + 1;
610 let query_line_count = search_query.lines().count();
611 let mut matrix = SearchMatrix::new(query_line_count + 1, buffer_line_count + 1);
612 let mut leading_deletion_cost = 0_u32;
613 for (row, query_line) in search_query.lines().enumerate() {
614 let query_line = query_line.trim();
615 leading_deletion_cost = leading_deletion_cost.saturating_add(DELETION_COST);
616 matrix.set(
617 row + 1,
618 0,
619 SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
620 );
621
622 let mut buffer_lines = buffer.as_rope().chunks().lines();
623 let mut col = 0;
624 while let Some(buffer_line) = buffer_lines.next() {
625 let buffer_line = buffer_line.trim();
626 let up = SearchState::new(
627 matrix.get(row, col + 1).cost.saturating_add(DELETION_COST),
628 SearchDirection::Up,
629 );
630 let left = SearchState::new(
631 matrix.get(row + 1, col).cost.saturating_add(INSERTION_COST),
632 SearchDirection::Left,
633 );
634 let diagonal = SearchState::new(
635 if fuzzy_eq(query_line, buffer_line) {
636 matrix.get(row, col).cost
637 } else {
638 matrix
639 .get(row, col)
640 .cost
641 .saturating_add(DELETION_COST + INSERTION_COST)
642 },
643 SearchDirection::Diagonal,
644 );
645 matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
646 col += 1;
647 }
648 }
649
650 // Traceback to find the best match
651 let mut buffer_row_end = buffer_line_count as u32;
652 let mut best_cost = u32::MAX;
653 for col in 1..=buffer_line_count {
654 let cost = matrix.get(query_line_count, col).cost;
655 if cost < best_cost {
656 best_cost = cost;
657 buffer_row_end = col as u32;
658 }
659 }
660
661 let mut matched_lines = 0;
662 let mut query_row = query_line_count;
663 let mut buffer_row_start = buffer_row_end;
664 while query_row > 0 && buffer_row_start > 0 {
665 let current = matrix.get(query_row, buffer_row_start as usize);
666 match current.direction {
667 SearchDirection::Diagonal => {
668 query_row -= 1;
669 buffer_row_start -= 1;
670 matched_lines += 1;
671 }
672 SearchDirection::Up => {
673 query_row -= 1;
674 }
675 SearchDirection::Left => {
676 buffer_row_start -= 1;
677 }
678 }
679 }
680
681 let matched_buffer_row_count = buffer_row_end - buffer_row_start;
682 let matched_ratio =
683 matched_lines as f32 / (matched_buffer_row_count as f32).max(query_line_count as f32);
684 if matched_ratio >= 0.8 {
685 let buffer_start_ix = buffer.point_to_offset(Point::new(buffer_row_start, 0));
686 let buffer_end_ix = buffer.point_to_offset(Point::new(
687 buffer_row_end - 1,
688 buffer.line_len(buffer_row_end - 1),
689 ));
690 Some(buffer_start_ix..buffer_end_ix)
691 } else {
692 None
693 }
694 }
695}
696
697fn fuzzy_eq(left: &str, right: &str) -> bool {
698 const THRESHOLD: f64 = 0.8;
699
700 let min_levenshtein = left.len().abs_diff(right.len());
701 let min_normalized_levenshtein =
702 1. - (min_levenshtein as f64 / cmp::max(left.len(), right.len()) as f64);
703 if min_normalized_levenshtein < THRESHOLD {
704 return false;
705 }
706
707 strsim::normalized_levenshtein(left, right) >= THRESHOLD
708}
709
710#[derive(Copy, Clone, Debug)]
711enum IndentDelta {
712 Spaces(isize),
713 Tabs(isize),
714}
715
716impl IndentDelta {
717 fn character(&self) -> char {
718 match self {
719 IndentDelta::Spaces(_) => ' ',
720 IndentDelta::Tabs(_) => '\t',
721 }
722 }
723
724 fn len(&self) -> isize {
725 match self {
726 IndentDelta::Spaces(n) => *n,
727 IndentDelta::Tabs(n) => *n,
728 }
729 }
730}
731
732#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
733enum SearchDirection {
734 Up,
735 Left,
736 Diagonal,
737}
738
739#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
740struct SearchState {
741 cost: u32,
742 direction: SearchDirection,
743}
744
745impl SearchState {
746 fn new(cost: u32, direction: SearchDirection) -> Self {
747 Self { cost, direction }
748 }
749}
750
751struct SearchMatrix {
752 cols: usize,
753 data: Vec<SearchState>,
754}
755
756impl SearchMatrix {
757 fn new(rows: usize, cols: usize) -> Self {
758 SearchMatrix {
759 cols,
760 data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
761 }
762 }
763
764 fn get(&self, row: usize, col: usize) -> SearchState {
765 self.data[row * self.cols + col]
766 }
767
768 fn set(&mut self, row: usize, col: usize, cost: SearchState) {
769 self.data[row * self.cols + col] = cost;
770 }
771}
772
773#[cfg(test)]
774mod tests {
775 use super::*;
776 use fs::FakeFs;
777 use futures::stream;
778 use gpui::{App, AppContext, TestAppContext};
779 use indoc::indoc;
780 use language_model::fake_provider::FakeLanguageModel;
781 use project::{AgentLocation, Project};
782 use rand::prelude::*;
783 use rand::rngs::StdRng;
784 use std::cmp;
785 use unindent::Unindent;
786 use util::test::{generate_marked_text, marked_text_ranges};
787
788 #[gpui::test(iterations = 100)]
789 async fn test_empty_old_text(cx: &mut TestAppContext, mut rng: StdRng) {
790 let agent = init_test(cx).await;
791 let buffer = cx.new(|cx| {
792 Buffer::local(
793 indoc! {"
794 abc
795 def
796 ghi
797 "},
798 cx,
799 )
800 });
801 let raw_edits = simulate_llm_output(
802 indoc! {"
803 <old_text></old_text>
804 <new_text>jkl</new_text>
805 <old_text>def</old_text>
806 <new_text>DEF</new_text>
807 "},
808 &mut rng,
809 cx,
810 );
811 let (apply, _events) =
812 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
813 apply.await.unwrap();
814 pretty_assertions::assert_eq!(
815 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
816 indoc! {"
817 abc
818 DEF
819 ghi
820 "}
821 );
822 }
823
824 #[gpui::test(iterations = 100)]
825 async fn test_indentation(cx: &mut TestAppContext, mut rng: StdRng) {
826 let agent = init_test(cx).await;
827 let buffer = cx.new(|cx| {
828 Buffer::local(
829 indoc! {"
830 lorem
831 ipsum
832 dolor
833 sit
834 "},
835 cx,
836 )
837 });
838 let raw_edits = simulate_llm_output(
839 indoc! {"
840 <old_text>
841 ipsum
842 dolor
843 sit
844 </old_text>
845 <new_text>
846 ipsum
847 dolor
848 sit
849 amet
850 </new_text>
851 "},
852 &mut rng,
853 cx,
854 );
855 let (apply, _events) =
856 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
857 apply.await.unwrap();
858 pretty_assertions::assert_eq!(
859 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
860 indoc! {"
861 lorem
862 ipsum
863 dolor
864 sit
865 amet
866 "}
867 );
868 }
869
870 #[gpui::test(iterations = 100)]
871 async fn test_dependent_edits(cx: &mut TestAppContext, mut rng: StdRng) {
872 let agent = init_test(cx).await;
873 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
874 let raw_edits = simulate_llm_output(
875 indoc! {"
876 <old_text>
877 def
878 </old_text>
879 <new_text>
880 DEF
881 </new_text>
882
883 <old_text>
884 DEF
885 </old_text>
886 <new_text>
887 DeF
888 </new_text>
889 "},
890 &mut rng,
891 cx,
892 );
893 let (apply, _events) =
894 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
895 apply.await.unwrap();
896 assert_eq!(
897 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
898 "abc\nDeF\nghi"
899 );
900 }
901
902 #[gpui::test(iterations = 100)]
903 async fn test_old_text_hallucination(cx: &mut TestAppContext, mut rng: StdRng) {
904 let agent = init_test(cx).await;
905 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
906 let raw_edits = simulate_llm_output(
907 indoc! {"
908 <old_text>
909 jkl
910 </old_text>
911 <new_text>
912 mno
913 </new_text>
914
915 <old_text>
916 abc
917 </old_text>
918 <new_text>
919 ABC
920 </new_text>
921 "},
922 &mut rng,
923 cx,
924 );
925 let (apply, _events) =
926 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
927 apply.await.unwrap();
928 assert_eq!(
929 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
930 "ABC\ndef\nghi"
931 );
932 }
933
934 #[gpui::test]
935 async fn test_edit_events(cx: &mut TestAppContext) {
936 let agent = init_test(cx).await;
937 let project = agent
938 .action_log
939 .read_with(cx, |log, _| log.project().clone());
940 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
941 let (chunks_tx, chunks_rx) = mpsc::unbounded();
942 let (apply, mut events) = agent.apply_edit_chunks(
943 buffer.clone(),
944 chunks_rx.map(|chunk: &str| Ok(chunk.to_string())),
945 &mut cx.to_async(),
946 );
947
948 chunks_tx.unbounded_send("<old_text>a").unwrap();
949 cx.run_until_parked();
950 assert_eq!(drain_events(&mut events), vec![]);
951 assert_eq!(
952 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
953 "abc\ndef\nghi"
954 );
955 assert_eq!(
956 project.read_with(cx, |project, _| project.agent_location()),
957 None
958 );
959
960 chunks_tx.unbounded_send("bc</old_text>").unwrap();
961 cx.run_until_parked();
962 assert_eq!(drain_events(&mut events), vec![]);
963 assert_eq!(
964 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
965 "abc\ndef\nghi"
966 );
967 assert_eq!(
968 project.read_with(cx, |project, _| project.agent_location()),
969 None
970 );
971
972 chunks_tx.unbounded_send("<new_text>abX").unwrap();
973 cx.run_until_parked();
974 assert_eq!(drain_events(&mut events), [EditAgentOutputEvent::Edited]);
975 assert_eq!(
976 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
977 "abXc\ndef\nghi"
978 );
979 assert_eq!(
980 project.read_with(cx, |project, _| project.agent_location()),
981 Some(AgentLocation {
982 buffer: buffer.downgrade(),
983 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 3)))
984 })
985 );
986
987 chunks_tx.unbounded_send("cY").unwrap();
988 cx.run_until_parked();
989 assert_eq!(drain_events(&mut events), [EditAgentOutputEvent::Edited]);
990 assert_eq!(
991 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
992 "abXcY\ndef\nghi"
993 );
994 assert_eq!(
995 project.read_with(cx, |project, _| project.agent_location()),
996 Some(AgentLocation {
997 buffer: buffer.downgrade(),
998 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
999 })
1000 );
1001
1002 chunks_tx.unbounded_send("</new_text>").unwrap();
1003 chunks_tx.unbounded_send("<old_text>hall").unwrap();
1004 cx.run_until_parked();
1005 assert_eq!(drain_events(&mut events), vec![]);
1006 assert_eq!(
1007 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1008 "abXcY\ndef\nghi"
1009 );
1010 assert_eq!(
1011 project.read_with(cx, |project, _| project.agent_location()),
1012 Some(AgentLocation {
1013 buffer: buffer.downgrade(),
1014 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1015 })
1016 );
1017
1018 chunks_tx.unbounded_send("ucinated old</old_text>").unwrap();
1019 chunks_tx.unbounded_send("<new_text>").unwrap();
1020 cx.run_until_parked();
1021 assert_eq!(
1022 drain_events(&mut events),
1023 vec![EditAgentOutputEvent::OldTextNotFound(
1024 "hallucinated old".into()
1025 )]
1026 );
1027 assert_eq!(
1028 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1029 "abXcY\ndef\nghi"
1030 );
1031 assert_eq!(
1032 project.read_with(cx, |project, _| project.agent_location()),
1033 Some(AgentLocation {
1034 buffer: buffer.downgrade(),
1035 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1036 })
1037 );
1038
1039 chunks_tx.unbounded_send("hallucinated new</new_").unwrap();
1040 chunks_tx.unbounded_send("text>").unwrap();
1041 cx.run_until_parked();
1042 assert_eq!(drain_events(&mut events), vec![]);
1043 assert_eq!(
1044 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1045 "abXcY\ndef\nghi"
1046 );
1047 assert_eq!(
1048 project.read_with(cx, |project, _| project.agent_location()),
1049 Some(AgentLocation {
1050 buffer: buffer.downgrade(),
1051 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1052 })
1053 );
1054
1055 chunks_tx.unbounded_send("<old_text>gh").unwrap();
1056 chunks_tx.unbounded_send("i</old_text>").unwrap();
1057 chunks_tx.unbounded_send("<new_text>").unwrap();
1058 cx.run_until_parked();
1059 assert_eq!(drain_events(&mut events), vec![]);
1060 assert_eq!(
1061 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1062 "abXcY\ndef\nghi"
1063 );
1064 assert_eq!(
1065 project.read_with(cx, |project, _| project.agent_location()),
1066 Some(AgentLocation {
1067 buffer: buffer.downgrade(),
1068 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1069 })
1070 );
1071
1072 chunks_tx.unbounded_send("GHI</new_text>").unwrap();
1073 cx.run_until_parked();
1074 assert_eq!(
1075 drain_events(&mut events),
1076 vec![EditAgentOutputEvent::Edited]
1077 );
1078 assert_eq!(
1079 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1080 "abXcY\ndef\nGHI"
1081 );
1082 assert_eq!(
1083 project.read_with(cx, |project, _| project.agent_location()),
1084 Some(AgentLocation {
1085 buffer: buffer.downgrade(),
1086 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(2, 3)))
1087 })
1088 );
1089
1090 drop(chunks_tx);
1091 apply.await.unwrap();
1092 assert_eq!(
1093 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1094 "abXcY\ndef\nGHI"
1095 );
1096 assert_eq!(drain_events(&mut events), vec![]);
1097 assert_eq!(
1098 project.read_with(cx, |project, _| project.agent_location()),
1099 None
1100 );
1101 }
1102
1103 #[gpui::test]
1104 async fn test_overwrite_events(cx: &mut TestAppContext) {
1105 let agent = init_test(cx).await;
1106 let project = agent
1107 .action_log
1108 .read_with(cx, |log, _| log.project().clone());
1109 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
1110 let (chunks_tx, chunks_rx) = mpsc::unbounded();
1111 let (apply, mut events) = agent.overwrite_with_chunks(
1112 buffer.clone(),
1113 chunks_rx.map(|chunk: &str| Ok(chunk.to_string())),
1114 &mut cx.to_async(),
1115 );
1116
1117 cx.run_until_parked();
1118 assert_eq!(
1119 drain_events(&mut events),
1120 vec![EditAgentOutputEvent::Edited]
1121 );
1122 assert_eq!(
1123 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1124 ""
1125 );
1126 assert_eq!(
1127 project.read_with(cx, |project, _| project.agent_location()),
1128 Some(AgentLocation {
1129 buffer: buffer.downgrade(),
1130 position: language::Anchor::MAX
1131 })
1132 );
1133
1134 chunks_tx.unbounded_send("jkl\n").unwrap();
1135 cx.run_until_parked();
1136 assert_eq!(
1137 drain_events(&mut events),
1138 vec![EditAgentOutputEvent::Edited]
1139 );
1140 assert_eq!(
1141 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1142 "jkl\n"
1143 );
1144 assert_eq!(
1145 project.read_with(cx, |project, _| project.agent_location()),
1146 Some(AgentLocation {
1147 buffer: buffer.downgrade(),
1148 position: language::Anchor::MAX
1149 })
1150 );
1151
1152 chunks_tx.unbounded_send("mno\n").unwrap();
1153 cx.run_until_parked();
1154 assert_eq!(
1155 drain_events(&mut events),
1156 vec![EditAgentOutputEvent::Edited]
1157 );
1158 assert_eq!(
1159 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1160 "jkl\nmno\n"
1161 );
1162 assert_eq!(
1163 project.read_with(cx, |project, _| project.agent_location()),
1164 Some(AgentLocation {
1165 buffer: buffer.downgrade(),
1166 position: language::Anchor::MAX
1167 })
1168 );
1169
1170 chunks_tx.unbounded_send("pqr").unwrap();
1171 cx.run_until_parked();
1172 assert_eq!(
1173 drain_events(&mut events),
1174 vec![EditAgentOutputEvent::Edited]
1175 );
1176 assert_eq!(
1177 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1178 "jkl\nmno\npqr"
1179 );
1180 assert_eq!(
1181 project.read_with(cx, |project, _| project.agent_location()),
1182 Some(AgentLocation {
1183 buffer: buffer.downgrade(),
1184 position: language::Anchor::MAX
1185 })
1186 );
1187
1188 drop(chunks_tx);
1189 apply.await.unwrap();
1190 assert_eq!(
1191 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1192 "jkl\nmno\npqr"
1193 );
1194 assert_eq!(drain_events(&mut events), vec![]);
1195 assert_eq!(
1196 project.read_with(cx, |project, _| project.agent_location()),
1197 None
1198 );
1199 }
1200
1201 #[gpui::test]
1202 fn test_resolve_location(cx: &mut App) {
1203 assert_location_resolution(
1204 concat!(
1205 " Lorem\n",
1206 "« ipsum»\n",
1207 " dolor sit amet\n",
1208 " consecteur",
1209 ),
1210 "ipsum",
1211 cx,
1212 );
1213
1214 assert_location_resolution(
1215 concat!(
1216 " Lorem\n",
1217 "« ipsum\n",
1218 " dolor sit amet»\n",
1219 " consecteur",
1220 ),
1221 "ipsum\ndolor sit amet",
1222 cx,
1223 );
1224
1225 assert_location_resolution(
1226 &"
1227 «fn foo1(a: usize) -> usize {
1228 40
1229 }»
1230
1231 fn foo2(b: usize) -> usize {
1232 42
1233 }
1234 "
1235 .unindent(),
1236 "fn foo1(a: usize) -> u32 {\n40\n}",
1237 cx,
1238 );
1239
1240 assert_location_resolution(
1241 &"
1242 class Something {
1243 one() { return 1; }
1244 « two() { return 2222; }
1245 three() { return 333; }
1246 four() { return 4444; }
1247 five() { return 5555; }
1248 six() { return 6666; }»
1249 seven() { return 7; }
1250 eight() { return 8; }
1251 }
1252 "
1253 .unindent(),
1254 &"
1255 two() { return 2222; }
1256 four() { return 4444; }
1257 five() { return 5555; }
1258 six() { return 6666; }
1259 "
1260 .unindent(),
1261 cx,
1262 );
1263
1264 assert_location_resolution(
1265 &"
1266 use std::ops::Range;
1267 use std::sync::Mutex;
1268 use std::{
1269 collections::HashMap,
1270 env,
1271 ffi::{OsStr, OsString},
1272 fs,
1273 io::{BufRead, BufReader},
1274 mem,
1275 path::{Path, PathBuf},
1276 process::Command,
1277 sync::LazyLock,
1278 time::SystemTime,
1279 };
1280 "
1281 .unindent(),
1282 &"
1283 use std::collections::{HashMap, HashSet};
1284 use std::ffi::{OsStr, OsString};
1285 use std::fmt::Write as _;
1286 use std::fs;
1287 use std::io::{BufReader, Read, Write};
1288 use std::mem;
1289 use std::path::{Path, PathBuf};
1290 use std::process::Command;
1291 use std::sync::Arc;
1292 "
1293 .unindent(),
1294 cx,
1295 );
1296
1297 assert_location_resolution(
1298 indoc! {"
1299 impl Foo {
1300 fn new() -> Self {
1301 Self {
1302 subscriptions: vec![
1303 cx.observe_window_activation(window, |editor, window, cx| {
1304 let active = window.is_window_active();
1305 editor.blink_manager.update(cx, |blink_manager, cx| {
1306 if active {
1307 blink_manager.enable(cx);
1308 } else {
1309 blink_manager.disable(cx);
1310 }
1311 });
1312 }),
1313 ];
1314 }
1315 }
1316 }
1317 "},
1318 concat!(
1319 " editor.blink_manager.update(cx, |blink_manager, cx| {\n",
1320 " blink_manager.enable(cx);\n",
1321 " });",
1322 ),
1323 cx,
1324 );
1325
1326 assert_location_resolution(
1327 indoc! {r#"
1328 let tool = cx
1329 .update(|cx| working_set.tool(&tool_name, cx))
1330 .map_err(|err| {
1331 anyhow!("Failed to look up tool '{}': {}", tool_name, err)
1332 })?;
1333
1334 let Some(tool) = tool else {
1335 return Err(anyhow!("Tool '{}' not found", tool_name));
1336 };
1337
1338 let project = project.clone();
1339 let action_log = action_log.clone();
1340 let messages = messages.clone();
1341 let tool_result = cx
1342 .update(|cx| tool.run(invocation.input, &messages, project, action_log, cx))
1343 .map_err(|err| anyhow!("Failed to start tool '{}': {}", tool_name, err))?;
1344
1345 tasks.push(tool_result.output);
1346 "#},
1347 concat!(
1348 "let tool_result = cx\n",
1349 " .update(|cx| tool.run(invocation.input, &messages, project, action_log, cx))\n",
1350 " .output;",
1351 ),
1352 cx,
1353 );
1354 }
1355
1356 #[gpui::test(iterations = 100)]
1357 async fn test_indent_new_text_chunks(mut rng: StdRng) {
1358 let chunks = to_random_chunks(&mut rng, " abc\n def\n ghi");
1359 let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
1360 Ok(EditParserEvent::NewTextChunk {
1361 chunk: chunk.clone(),
1362 done: index == chunks.len() - 1,
1363 })
1364 }));
1365 let indented_chunks =
1366 EditAgent::reindent_new_text_chunks(IndentDelta::Spaces(2), new_text_chunks)
1367 .collect::<Vec<_>>()
1368 .await;
1369 let new_text = indented_chunks
1370 .into_iter()
1371 .collect::<Result<String>>()
1372 .unwrap();
1373 assert_eq!(new_text, " abc\n def\n ghi");
1374 }
1375
1376 #[gpui::test(iterations = 100)]
1377 async fn test_outdent_new_text_chunks(mut rng: StdRng) {
1378 let chunks = to_random_chunks(&mut rng, "\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi");
1379 let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
1380 Ok(EditParserEvent::NewTextChunk {
1381 chunk: chunk.clone(),
1382 done: index == chunks.len() - 1,
1383 })
1384 }));
1385 let indented_chunks =
1386 EditAgent::reindent_new_text_chunks(IndentDelta::Tabs(-2), new_text_chunks)
1387 .collect::<Vec<_>>()
1388 .await;
1389 let new_text = indented_chunks
1390 .into_iter()
1391 .collect::<Result<String>>()
1392 .unwrap();
1393 assert_eq!(new_text, "\t\tabc\ndef\n\t\t\t\tghi");
1394 }
1395
1396 #[gpui::test(iterations = 100)]
1397 async fn test_random_indents(mut rng: StdRng) {
1398 let len = rng.gen_range(1..=100);
1399 let new_text = util::RandomCharIter::new(&mut rng)
1400 .with_simple_text()
1401 .take(len)
1402 .collect::<String>();
1403 let new_text = new_text
1404 .split('\n')
1405 .map(|line| format!("{}{}", " ".repeat(rng.gen_range(0..=8)), line))
1406 .collect::<Vec<_>>()
1407 .join("\n");
1408 let delta = IndentDelta::Spaces(rng.gen_range(-4..=4));
1409
1410 let chunks = to_random_chunks(&mut rng, &new_text);
1411 let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
1412 Ok(EditParserEvent::NewTextChunk {
1413 chunk: chunk.clone(),
1414 done: index == chunks.len() - 1,
1415 })
1416 }));
1417 let reindented_chunks = EditAgent::reindent_new_text_chunks(delta, new_text_chunks)
1418 .collect::<Vec<_>>()
1419 .await;
1420 let actual_reindented_text = reindented_chunks
1421 .into_iter()
1422 .collect::<Result<String>>()
1423 .unwrap();
1424 let expected_reindented_text = new_text
1425 .split('\n')
1426 .map(|line| {
1427 if let Some(ix) = line.find(|c| c != ' ') {
1428 let new_indent = cmp::max(0, ix as isize + delta.len()) as usize;
1429 format!("{}{}", " ".repeat(new_indent), &line[ix..])
1430 } else {
1431 line.to_string()
1432 }
1433 })
1434 .collect::<Vec<_>>()
1435 .join("\n");
1436 assert_eq!(actual_reindented_text, expected_reindented_text);
1437 }
1438
1439 #[track_caller]
1440 fn assert_location_resolution(text_with_expected_range: &str, query: &str, cx: &mut App) {
1441 let (text, _) = marked_text_ranges(text_with_expected_range, false);
1442 let buffer = cx.new(|cx| Buffer::local(text.clone(), cx));
1443 let snapshot = buffer.read(cx).snapshot();
1444 let mut ranges = Vec::new();
1445 ranges.extend(EditAgent::resolve_location(&snapshot, query));
1446 let text_with_actual_range = generate_marked_text(&text, &ranges, false);
1447 pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
1448 }
1449
1450 fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec<String> {
1451 let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50));
1452 let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count);
1453 chunk_indices.sort();
1454 chunk_indices.push(input.len());
1455
1456 let mut chunks = Vec::new();
1457 let mut last_ix = 0;
1458 for chunk_ix in chunk_indices {
1459 chunks.push(input[last_ix..chunk_ix].to_string());
1460 last_ix = chunk_ix;
1461 }
1462 chunks
1463 }
1464
1465 fn simulate_llm_output(
1466 output: &str,
1467 rng: &mut StdRng,
1468 cx: &mut TestAppContext,
1469 ) -> impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>> {
1470 let executor = cx.executor();
1471 stream::iter(to_random_chunks(rng, output).into_iter().map(Ok)).then(move |chunk| {
1472 let executor = executor.clone();
1473 async move {
1474 executor.simulate_random_delay().await;
1475 chunk
1476 }
1477 })
1478 }
1479
1480 async fn init_test(cx: &mut TestAppContext) -> EditAgent {
1481 cx.update(settings::init);
1482 cx.update(Project::init_settings);
1483 let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
1484 let model = Arc::new(FakeLanguageModel::default());
1485 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1486 EditAgent::new(model, project, action_log, Templates::new())
1487 }
1488
1489 fn drain_events(
1490 stream: &mut UnboundedReceiver<EditAgentOutputEvent>,
1491 ) -> Vec<EditAgentOutputEvent> {
1492 let mut events = Vec::new();
1493 while let Ok(Some(event)) = stream.try_next() {
1494 events.push(event);
1495 }
1496 events
1497 }
1498}