1mod edit_parser;
2#[cfg(test)]
3mod evals;
4
5use crate::{Template, Templates};
6use aho_corasick::AhoCorasick;
7use anyhow::Result;
8use assistant_tool::ActionLog;
9use edit_parser::{EditParser, EditParserEvent, EditParserMetrics};
10use futures::{
11 Stream, StreamExt,
12 channel::mpsc::{self, UnboundedReceiver},
13 pin_mut,
14 stream::BoxStream,
15};
16use gpui::{AppContext, AsyncApp, Entity, SharedString, Task};
17use language::{Bias, Buffer, BufferSnapshot, LineIndent, Point};
18use language_model::{
19 LanguageModel, LanguageModelCompletionError, LanguageModelRequest, LanguageModelRequestMessage,
20 LanguageModelToolChoice, MessageContent, Role,
21};
22use project::{AgentLocation, Project};
23use schemars::JsonSchema;
24use serde::{Deserialize, Serialize};
25use std::{cmp, iter, mem, ops::Range, path::PathBuf, sync::Arc, task::Poll};
26use streaming_diff::{CharOperation, StreamingDiff};
27use util::debug_panic;
28use zed_llm_client::CompletionIntent;
29
30#[derive(Serialize)]
31struct CreateFilePromptTemplate {
32 path: Option<PathBuf>,
33 edit_description: String,
34}
35
36impl Template for CreateFilePromptTemplate {
37 const TEMPLATE_NAME: &'static str = "create_file_prompt.hbs";
38}
39
40#[derive(Serialize)]
41struct EditFilePromptTemplate {
42 path: Option<PathBuf>,
43 edit_description: String,
44}
45
46impl Template for EditFilePromptTemplate {
47 const TEMPLATE_NAME: &'static str = "edit_file_prompt.hbs";
48}
49
50#[derive(Clone, Debug, PartialEq, Eq)]
51pub enum EditAgentOutputEvent {
52 Edited,
53 OldTextNotFound(SharedString),
54}
55
56#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
57pub struct EditAgentOutput {
58 pub raw_edits: String,
59 pub parser_metrics: EditParserMetrics,
60}
61
62#[derive(Clone)]
63pub struct EditAgent {
64 model: Arc<dyn LanguageModel>,
65 action_log: Entity<ActionLog>,
66 project: Entity<Project>,
67 templates: Arc<Templates>,
68}
69
70impl EditAgent {
71 pub fn new(
72 model: Arc<dyn LanguageModel>,
73 project: Entity<Project>,
74 action_log: Entity<ActionLog>,
75 templates: Arc<Templates>,
76 ) -> Self {
77 EditAgent {
78 model,
79 project,
80 action_log,
81 templates,
82 }
83 }
84
85 pub fn overwrite(
86 &self,
87 buffer: Entity<Buffer>,
88 edit_description: String,
89 conversation: &LanguageModelRequest,
90 cx: &mut AsyncApp,
91 ) -> (
92 Task<Result<EditAgentOutput>>,
93 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
94 ) {
95 let this = self.clone();
96 let (events_tx, events_rx) = mpsc::unbounded();
97 let conversation = conversation.clone();
98 let output = cx.spawn(async move |cx| {
99 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
100 let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
101 let prompt = CreateFilePromptTemplate {
102 path,
103 edit_description,
104 }
105 .render(&this.templates)?;
106 let new_chunks = this
107 .request(conversation, CompletionIntent::CreateFile, prompt, cx)
108 .await?;
109
110 let (output, mut inner_events) = this.overwrite_with_chunks(buffer, new_chunks, cx);
111 while let Some(event) = inner_events.next().await {
112 events_tx.unbounded_send(event).ok();
113 }
114 output.await
115 });
116 (output, events_rx)
117 }
118
119 fn overwrite_with_chunks(
120 &self,
121 buffer: Entity<Buffer>,
122 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
123 cx: &mut AsyncApp,
124 ) -> (
125 Task<Result<EditAgentOutput>>,
126 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
127 ) {
128 let (output_events_tx, output_events_rx) = mpsc::unbounded();
129 let this = self.clone();
130 let task = cx.spawn(async move |cx| {
131 this.action_log
132 .update(cx, |log, cx| log.buffer_created(buffer.clone(), cx))?;
133 let output = this
134 .overwrite_with_chunks_internal(buffer, edit_chunks, output_events_tx, cx)
135 .await;
136 this.project
137 .update(cx, |project, cx| project.set_agent_location(None, cx))?;
138 output
139 });
140 (task, output_events_rx)
141 }
142
143 async fn overwrite_with_chunks_internal(
144 &self,
145 buffer: Entity<Buffer>,
146 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
147 output_events_tx: mpsc::UnboundedSender<EditAgentOutputEvent>,
148 cx: &mut AsyncApp,
149 ) -> Result<EditAgentOutput> {
150 cx.update(|cx| {
151 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
152 self.action_log.update(cx, |log, cx| {
153 log.buffer_edited(buffer.clone(), cx);
154 });
155 self.project.update(cx, |project, cx| {
156 project.set_agent_location(
157 Some(AgentLocation {
158 buffer: buffer.downgrade(),
159 position: language::Anchor::MAX,
160 }),
161 cx,
162 )
163 });
164 output_events_tx
165 .unbounded_send(EditAgentOutputEvent::Edited)
166 .ok();
167 })?;
168
169 let mut raw_edits = String::new();
170 pin_mut!(edit_chunks);
171 while let Some(chunk) = edit_chunks.next().await {
172 let chunk = chunk?;
173 raw_edits.push_str(&chunk);
174 cx.update(|cx| {
175 buffer.update(cx, |buffer, cx| buffer.append(chunk, cx));
176 self.action_log
177 .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
178 self.project.update(cx, |project, cx| {
179 project.set_agent_location(
180 Some(AgentLocation {
181 buffer: buffer.downgrade(),
182 position: language::Anchor::MAX,
183 }),
184 cx,
185 )
186 });
187 })?;
188 output_events_tx
189 .unbounded_send(EditAgentOutputEvent::Edited)
190 .ok();
191 }
192
193 Ok(EditAgentOutput {
194 raw_edits,
195 parser_metrics: EditParserMetrics::default(),
196 })
197 }
198
199 pub fn edit(
200 &self,
201 buffer: Entity<Buffer>,
202 edit_description: String,
203 conversation: &LanguageModelRequest,
204 cx: &mut AsyncApp,
205 ) -> (
206 Task<Result<EditAgentOutput>>,
207 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
208 ) {
209 self.project
210 .update(cx, |project, cx| {
211 project.set_agent_location(
212 Some(AgentLocation {
213 buffer: buffer.downgrade(),
214 position: language::Anchor::MIN,
215 }),
216 cx,
217 );
218 })
219 .ok();
220
221 let this = self.clone();
222 let (events_tx, events_rx) = mpsc::unbounded();
223 let conversation = conversation.clone();
224 let output = cx.spawn(async move |cx| {
225 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
226 let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
227 let prompt = EditFilePromptTemplate {
228 path,
229 edit_description,
230 }
231 .render(&this.templates)?;
232 let edit_chunks = this
233 .request(conversation, CompletionIntent::EditFile, prompt, cx)
234 .await?;
235
236 let (output, mut inner_events) = this.apply_edit_chunks(buffer, edit_chunks, cx);
237 while let Some(event) = inner_events.next().await {
238 events_tx.unbounded_send(event).ok();
239 }
240 output.await
241 });
242 (output, events_rx)
243 }
244
245 fn apply_edit_chunks(
246 &self,
247 buffer: Entity<Buffer>,
248 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
249 cx: &mut AsyncApp,
250 ) -> (
251 Task<Result<EditAgentOutput>>,
252 mpsc::UnboundedReceiver<EditAgentOutputEvent>,
253 ) {
254 let (output_events_tx, output_events_rx) = mpsc::unbounded();
255 let this = self.clone();
256 let task = cx.spawn(async move |mut cx| {
257 this.action_log
258 .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))?;
259 let output = this
260 .apply_edit_chunks_internal(buffer, edit_chunks, output_events_tx, &mut cx)
261 .await;
262 this.project
263 .update(cx, |project, cx| project.set_agent_location(None, cx))?;
264 output
265 });
266 (task, output_events_rx)
267 }
268
269 async fn apply_edit_chunks_internal(
270 &self,
271 buffer: Entity<Buffer>,
272 edit_chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
273 output_events: mpsc::UnboundedSender<EditAgentOutputEvent>,
274 cx: &mut AsyncApp,
275 ) -> Result<EditAgentOutput> {
276 let (output, mut edit_events) = Self::parse_edit_chunks(edit_chunks, cx);
277 while let Some(edit_event) = edit_events.next().await {
278 let EditParserEvent::OldText(old_text_query) = edit_event? else {
279 continue;
280 };
281
282 // Skip edits with an empty old text.
283 if old_text_query.is_empty() {
284 continue;
285 }
286
287 let old_text_query = SharedString::from(old_text_query);
288
289 let (edits_tx, edits_rx) = mpsc::unbounded();
290 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
291 let old_range = cx
292 .background_spawn({
293 let snapshot = snapshot.clone();
294 let old_text_query = old_text_query.clone();
295 async move { Self::resolve_location(&snapshot, &old_text_query) }
296 })
297 .await;
298 let Some(old_range) = old_range else {
299 // We couldn't find the old text in the buffer. Report the error.
300 output_events
301 .unbounded_send(EditAgentOutputEvent::OldTextNotFound(old_text_query))
302 .ok();
303 continue;
304 };
305
306 let compute_edits = cx.background_spawn(async move {
307 let buffer_start_indent =
308 snapshot.line_indent_for_row(snapshot.offset_to_point(old_range.start).row);
309 let old_text_start_indent = old_text_query
310 .lines()
311 .next()
312 .map_or(buffer_start_indent, |line| {
313 LineIndent::from_iter(line.chars())
314 });
315 let indent_delta = if buffer_start_indent.tabs > 0 {
316 IndentDelta::Tabs(
317 buffer_start_indent.tabs as isize - old_text_start_indent.tabs as isize,
318 )
319 } else {
320 IndentDelta::Spaces(
321 buffer_start_indent.spaces as isize - old_text_start_indent.spaces as isize,
322 )
323 };
324
325 let old_text = snapshot
326 .text_for_range(old_range.clone())
327 .collect::<String>();
328 let mut diff = StreamingDiff::new(old_text);
329 let mut edit_start = old_range.start;
330 let mut new_text_chunks =
331 Self::reindent_new_text_chunks(indent_delta, &mut edit_events);
332 let mut done = false;
333 while !done {
334 let char_operations = if let Some(new_text_chunk) = new_text_chunks.next().await
335 {
336 diff.push_new(&new_text_chunk?)
337 } else {
338 done = true;
339 mem::take(&mut diff).finish()
340 };
341
342 for op in char_operations {
343 match op {
344 CharOperation::Insert { text } => {
345 let edit_start = snapshot.anchor_after(edit_start);
346 edits_tx
347 .unbounded_send((edit_start..edit_start, Arc::from(text)))?;
348 }
349 CharOperation::Delete { bytes } => {
350 let edit_end = edit_start + bytes;
351 let edit_range = snapshot.anchor_after(edit_start)
352 ..snapshot.anchor_before(edit_end);
353 edit_start = edit_end;
354 edits_tx.unbounded_send((edit_range, Arc::from("")))?;
355 }
356 CharOperation::Keep { bytes } => edit_start += bytes,
357 }
358 }
359 }
360
361 drop(new_text_chunks);
362 anyhow::Ok(edit_events)
363 });
364
365 // TODO: group all edits into one transaction
366 let mut edits_rx = edits_rx.ready_chunks(32);
367 while let Some(edits) = edits_rx.next().await {
368 if edits.is_empty() {
369 continue;
370 }
371
372 // Edit the buffer and report edits to the action log as part of the
373 // same effect cycle, otherwise the edit will be reported as if the
374 // user made it.
375 cx.update(|cx| {
376 let max_edit_end = buffer.update(cx, |buffer, cx| {
377 buffer.edit(edits.iter().cloned(), None, cx);
378 let max_edit_end = buffer
379 .summaries_for_anchors::<Point, _>(
380 edits.iter().map(|(range, _)| &range.end),
381 )
382 .max()
383 .unwrap();
384 buffer.anchor_before(max_edit_end)
385 });
386 self.action_log
387 .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
388 self.project.update(cx, |project, cx| {
389 project.set_agent_location(
390 Some(AgentLocation {
391 buffer: buffer.downgrade(),
392 position: max_edit_end,
393 }),
394 cx,
395 );
396 });
397 })?;
398 output_events
399 .unbounded_send(EditAgentOutputEvent::Edited)
400 .ok();
401 }
402
403 edit_events = compute_edits.await?;
404 }
405
406 output.await
407 }
408
409 fn parse_edit_chunks(
410 chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
411 cx: &mut AsyncApp,
412 ) -> (
413 Task<Result<EditAgentOutput>>,
414 UnboundedReceiver<Result<EditParserEvent>>,
415 ) {
416 let (tx, rx) = mpsc::unbounded();
417 let output = cx.background_spawn(async move {
418 pin_mut!(chunks);
419
420 let mut parser = EditParser::new();
421 let mut raw_edits = String::new();
422 while let Some(chunk) = chunks.next().await {
423 match chunk {
424 Ok(chunk) => {
425 raw_edits.push_str(&chunk);
426 for event in parser.push(&chunk) {
427 tx.unbounded_send(Ok(event))?;
428 }
429 }
430 Err(error) => {
431 tx.unbounded_send(Err(error.into()))?;
432 }
433 }
434 }
435 Ok(EditAgentOutput {
436 raw_edits,
437 parser_metrics: parser.finish(),
438 })
439 });
440 (output, rx)
441 }
442
443 fn reindent_new_text_chunks(
444 delta: IndentDelta,
445 mut stream: impl Unpin + Stream<Item = Result<EditParserEvent>>,
446 ) -> impl Stream<Item = Result<String>> {
447 let mut buffer = String::new();
448 let mut in_leading_whitespace = true;
449 let mut done = false;
450 futures::stream::poll_fn(move |cx| {
451 while !done {
452 let (chunk, is_last_chunk) = match stream.poll_next_unpin(cx) {
453 Poll::Ready(Some(Ok(EditParserEvent::NewTextChunk { chunk, done }))) => {
454 (chunk, done)
455 }
456 Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
457 Poll::Pending => return Poll::Pending,
458 _ => return Poll::Ready(None),
459 };
460
461 buffer.push_str(&chunk);
462
463 let mut indented_new_text = String::new();
464 let mut start_ix = 0;
465 let mut newlines = buffer.match_indices('\n').peekable();
466 loop {
467 let (line_end, is_pending_line) = match newlines.next() {
468 Some((ix, _)) => (ix, false),
469 None => (buffer.len(), true),
470 };
471 let line = &buffer[start_ix..line_end];
472
473 if in_leading_whitespace {
474 if let Some(non_whitespace_ix) = line.find(|c| delta.character() != c) {
475 // We found a non-whitespace character, adjust
476 // indentation based on the delta.
477 let new_indent_len =
478 cmp::max(0, non_whitespace_ix as isize + delta.len()) as usize;
479 indented_new_text
480 .extend(iter::repeat(delta.character()).take(new_indent_len));
481 indented_new_text.push_str(&line[non_whitespace_ix..]);
482 in_leading_whitespace = false;
483 } else if is_pending_line {
484 // We're still in leading whitespace and this line is incomplete.
485 // Stop processing until we receive more input.
486 break;
487 } else {
488 // This line is entirely whitespace. Push it without indentation.
489 indented_new_text.push_str(line);
490 }
491 } else {
492 indented_new_text.push_str(line);
493 }
494
495 if is_pending_line {
496 start_ix = line_end;
497 break;
498 } else {
499 in_leading_whitespace = true;
500 indented_new_text.push('\n');
501 start_ix = line_end + 1;
502 }
503 }
504 buffer.replace_range(..start_ix, "");
505
506 // This was the last chunk, push all the buffered content as-is.
507 if is_last_chunk {
508 indented_new_text.push_str(&buffer);
509 buffer.clear();
510 done = true;
511 }
512
513 if !indented_new_text.is_empty() {
514 return Poll::Ready(Some(Ok(indented_new_text)));
515 }
516 }
517
518 Poll::Ready(None)
519 })
520 }
521
522 async fn request(
523 &self,
524 mut conversation: LanguageModelRequest,
525 intent: CompletionIntent,
526 prompt: String,
527 cx: &mut AsyncApp,
528 ) -> Result<BoxStream<'static, Result<String, LanguageModelCompletionError>>> {
529 let mut messages_iter = conversation.messages.iter_mut();
530 if let Some(last_message) = messages_iter.next_back() {
531 if last_message.role == Role::Assistant {
532 let old_content_len = last_message.content.len();
533 last_message
534 .content
535 .retain(|content| !matches!(content, MessageContent::ToolUse(_)));
536 let new_content_len = last_message.content.len();
537
538 // We just removed pending tool uses from the content of the
539 // last message, so it doesn't make sense to cache it anymore
540 // (e.g., the message will look very different on the next
541 // request). Thus, we move the flag to the message prior to it,
542 // as it will still be a valid prefix of the conversation.
543 if old_content_len != new_content_len && last_message.cache {
544 if let Some(prev_message) = messages_iter.next_back() {
545 last_message.cache = false;
546 prev_message.cache = true;
547 }
548 }
549
550 if last_message.content.is_empty() {
551 conversation.messages.pop();
552 }
553 } else {
554 debug_panic!(
555 "Last message must be an Assistant tool calling! Got {:?}",
556 last_message.content
557 );
558 }
559 }
560
561 conversation.messages.push(LanguageModelRequestMessage {
562 role: Role::User,
563 content: vec![MessageContent::Text(prompt)],
564 cache: false,
565 });
566
567 // Include tools in the request so that we can take advantage of
568 // caching when ToolChoice::None is supported.
569 let mut tool_choice = None;
570 let mut tools = Vec::new();
571 if !conversation.tools.is_empty()
572 && self
573 .model
574 .supports_tool_choice(LanguageModelToolChoice::None)
575 {
576 tool_choice = Some(LanguageModelToolChoice::None);
577 tools = conversation.tools.clone();
578 }
579
580 let request = LanguageModelRequest {
581 thread_id: conversation.thread_id,
582 prompt_id: conversation.prompt_id,
583 intent: Some(intent),
584 mode: conversation.mode,
585 messages: conversation.messages,
586 tool_choice,
587 tools,
588 stop: Vec::new(),
589 temperature: None,
590 };
591
592 Ok(self.model.stream_completion_text(request, cx).await?.stream)
593 }
594
595 fn resolve_location(buffer: &BufferSnapshot, search_query: &str) -> Option<Range<usize>> {
596 let range = Self::resolve_location_exact(buffer, search_query)
597 .or_else(|| Self::resolve_location_fuzzy(buffer, search_query))?;
598
599 // Expand the range to include entire lines.
600 let mut start = buffer.offset_to_point(buffer.clip_offset(range.start, Bias::Left));
601 start.column = 0;
602 let mut end = buffer.offset_to_point(buffer.clip_offset(range.end, Bias::Right));
603 if end.column > 0 {
604 end.column = buffer.line_len(end.row);
605 }
606
607 Some(buffer.point_to_offset(start)..buffer.point_to_offset(end))
608 }
609
610 fn resolve_location_exact(buffer: &BufferSnapshot, search_query: &str) -> Option<Range<usize>> {
611 let search = AhoCorasick::new([search_query]).ok()?;
612 let mat = search
613 .stream_find_iter(buffer.bytes_in_range(0..buffer.len()))
614 .next()?
615 .expect("buffer can't error");
616 Some(mat.range())
617 }
618
619 fn resolve_location_fuzzy(buffer: &BufferSnapshot, search_query: &str) -> Option<Range<usize>> {
620 const INSERTION_COST: u32 = 3;
621 const DELETION_COST: u32 = 10;
622
623 let buffer_line_count = buffer.max_point().row as usize + 1;
624 let query_line_count = search_query.lines().count();
625 let mut matrix = SearchMatrix::new(query_line_count + 1, buffer_line_count + 1);
626 let mut leading_deletion_cost = 0_u32;
627 for (row, query_line) in search_query.lines().enumerate() {
628 let query_line = query_line.trim();
629 leading_deletion_cost = leading_deletion_cost.saturating_add(DELETION_COST);
630 matrix.set(
631 row + 1,
632 0,
633 SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
634 );
635
636 let mut buffer_lines = buffer.as_rope().chunks().lines();
637 let mut col = 0;
638 while let Some(buffer_line) = buffer_lines.next() {
639 let buffer_line = buffer_line.trim();
640 let up = SearchState::new(
641 matrix.get(row, col + 1).cost.saturating_add(DELETION_COST),
642 SearchDirection::Up,
643 );
644 let left = SearchState::new(
645 matrix.get(row + 1, col).cost.saturating_add(INSERTION_COST),
646 SearchDirection::Left,
647 );
648 let diagonal = SearchState::new(
649 if fuzzy_eq(query_line, buffer_line) {
650 matrix.get(row, col).cost
651 } else {
652 matrix
653 .get(row, col)
654 .cost
655 .saturating_add(DELETION_COST + INSERTION_COST)
656 },
657 SearchDirection::Diagonal,
658 );
659 matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
660 col += 1;
661 }
662 }
663
664 // Traceback to find the best match
665 let mut buffer_row_end = buffer_line_count as u32;
666 let mut best_cost = u32::MAX;
667 for col in 1..=buffer_line_count {
668 let cost = matrix.get(query_line_count, col).cost;
669 if cost < best_cost {
670 best_cost = cost;
671 buffer_row_end = col as u32;
672 }
673 }
674
675 let mut matched_lines = 0;
676 let mut query_row = query_line_count;
677 let mut buffer_row_start = buffer_row_end;
678 while query_row > 0 && buffer_row_start > 0 {
679 let current = matrix.get(query_row, buffer_row_start as usize);
680 match current.direction {
681 SearchDirection::Diagonal => {
682 query_row -= 1;
683 buffer_row_start -= 1;
684 matched_lines += 1;
685 }
686 SearchDirection::Up => {
687 query_row -= 1;
688 }
689 SearchDirection::Left => {
690 buffer_row_start -= 1;
691 }
692 }
693 }
694
695 let matched_buffer_row_count = buffer_row_end - buffer_row_start;
696 let matched_ratio =
697 matched_lines as f32 / (matched_buffer_row_count as f32).max(query_line_count as f32);
698 if matched_ratio >= 0.8 {
699 let buffer_start_ix = buffer.point_to_offset(Point::new(buffer_row_start, 0));
700 let buffer_end_ix = buffer.point_to_offset(Point::new(
701 buffer_row_end - 1,
702 buffer.line_len(buffer_row_end - 1),
703 ));
704 Some(buffer_start_ix..buffer_end_ix)
705 } else {
706 None
707 }
708 }
709}
710
711fn fuzzy_eq(left: &str, right: &str) -> bool {
712 const THRESHOLD: f64 = 0.8;
713
714 let min_levenshtein = left.len().abs_diff(right.len());
715 let min_normalized_levenshtein =
716 1. - (min_levenshtein as f64 / cmp::max(left.len(), right.len()) as f64);
717 if min_normalized_levenshtein < THRESHOLD {
718 return false;
719 }
720
721 strsim::normalized_levenshtein(left, right) >= THRESHOLD
722}
723
724#[derive(Copy, Clone, Debug)]
725enum IndentDelta {
726 Spaces(isize),
727 Tabs(isize),
728}
729
730impl IndentDelta {
731 fn character(&self) -> char {
732 match self {
733 IndentDelta::Spaces(_) => ' ',
734 IndentDelta::Tabs(_) => '\t',
735 }
736 }
737
738 fn len(&self) -> isize {
739 match self {
740 IndentDelta::Spaces(n) => *n,
741 IndentDelta::Tabs(n) => *n,
742 }
743 }
744}
745
746#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
747enum SearchDirection {
748 Up,
749 Left,
750 Diagonal,
751}
752
753#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
754struct SearchState {
755 cost: u32,
756 direction: SearchDirection,
757}
758
759impl SearchState {
760 fn new(cost: u32, direction: SearchDirection) -> Self {
761 Self { cost, direction }
762 }
763}
764
765struct SearchMatrix {
766 cols: usize,
767 data: Vec<SearchState>,
768}
769
770impl SearchMatrix {
771 fn new(rows: usize, cols: usize) -> Self {
772 SearchMatrix {
773 cols,
774 data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
775 }
776 }
777
778 fn get(&self, row: usize, col: usize) -> SearchState {
779 self.data[row * self.cols + col]
780 }
781
782 fn set(&mut self, row: usize, col: usize, cost: SearchState) {
783 self.data[row * self.cols + col] = cost;
784 }
785}
786
787#[cfg(test)]
788mod tests {
789 use super::*;
790 use fs::FakeFs;
791 use futures::stream;
792 use gpui::{App, AppContext, TestAppContext};
793 use indoc::indoc;
794 use language_model::fake_provider::FakeLanguageModel;
795 use project::{AgentLocation, Project};
796 use rand::prelude::*;
797 use rand::rngs::StdRng;
798 use std::cmp;
799 use unindent::Unindent;
800 use util::test::{generate_marked_text, marked_text_ranges};
801
802 #[gpui::test(iterations = 100)]
803 async fn test_empty_old_text(cx: &mut TestAppContext, mut rng: StdRng) {
804 let agent = init_test(cx).await;
805 let buffer = cx.new(|cx| {
806 Buffer::local(
807 indoc! {"
808 abc
809 def
810 ghi
811 "},
812 cx,
813 )
814 });
815 let raw_edits = simulate_llm_output(
816 indoc! {"
817 <old_text></old_text>
818 <new_text>jkl</new_text>
819 <old_text>def</old_text>
820 <new_text>DEF</new_text>
821 "},
822 &mut rng,
823 cx,
824 );
825 let (apply, _events) =
826 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
827 apply.await.unwrap();
828 pretty_assertions::assert_eq!(
829 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
830 indoc! {"
831 abc
832 DEF
833 ghi
834 "}
835 );
836 }
837
838 #[gpui::test(iterations = 100)]
839 async fn test_indentation(cx: &mut TestAppContext, mut rng: StdRng) {
840 let agent = init_test(cx).await;
841 let buffer = cx.new(|cx| {
842 Buffer::local(
843 indoc! {"
844 lorem
845 ipsum
846 dolor
847 sit
848 "},
849 cx,
850 )
851 });
852 let raw_edits = simulate_llm_output(
853 indoc! {"
854 <old_text>
855 ipsum
856 dolor
857 sit
858 </old_text>
859 <new_text>
860 ipsum
861 dolor
862 sit
863 amet
864 </new_text>
865 "},
866 &mut rng,
867 cx,
868 );
869 let (apply, _events) =
870 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
871 apply.await.unwrap();
872 pretty_assertions::assert_eq!(
873 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
874 indoc! {"
875 lorem
876 ipsum
877 dolor
878 sit
879 amet
880 "}
881 );
882 }
883
884 #[gpui::test(iterations = 100)]
885 async fn test_dependent_edits(cx: &mut TestAppContext, mut rng: StdRng) {
886 let agent = init_test(cx).await;
887 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
888 let raw_edits = simulate_llm_output(
889 indoc! {"
890 <old_text>
891 def
892 </old_text>
893 <new_text>
894 DEF
895 </new_text>
896
897 <old_text>
898 DEF
899 </old_text>
900 <new_text>
901 DeF
902 </new_text>
903 "},
904 &mut rng,
905 cx,
906 );
907 let (apply, _events) =
908 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
909 apply.await.unwrap();
910 assert_eq!(
911 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
912 "abc\nDeF\nghi"
913 );
914 }
915
916 #[gpui::test(iterations = 100)]
917 async fn test_old_text_hallucination(cx: &mut TestAppContext, mut rng: StdRng) {
918 let agent = init_test(cx).await;
919 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
920 let raw_edits = simulate_llm_output(
921 indoc! {"
922 <old_text>
923 jkl
924 </old_text>
925 <new_text>
926 mno
927 </new_text>
928
929 <old_text>
930 abc
931 </old_text>
932 <new_text>
933 ABC
934 </new_text>
935 "},
936 &mut rng,
937 cx,
938 );
939 let (apply, _events) =
940 agent.apply_edit_chunks(buffer.clone(), raw_edits, &mut cx.to_async());
941 apply.await.unwrap();
942 assert_eq!(
943 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
944 "ABC\ndef\nghi"
945 );
946 }
947
948 #[gpui::test]
949 async fn test_edit_events(cx: &mut TestAppContext) {
950 let agent = init_test(cx).await;
951 let project = agent
952 .action_log
953 .read_with(cx, |log, _| log.project().clone());
954 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
955 let (chunks_tx, chunks_rx) = mpsc::unbounded();
956 let (apply, mut events) = agent.apply_edit_chunks(
957 buffer.clone(),
958 chunks_rx.map(|chunk: &str| Ok(chunk.to_string())),
959 &mut cx.to_async(),
960 );
961
962 chunks_tx.unbounded_send("<old_text>a").unwrap();
963 cx.run_until_parked();
964 assert_eq!(drain_events(&mut events), vec![]);
965 assert_eq!(
966 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
967 "abc\ndef\nghi"
968 );
969 assert_eq!(
970 project.read_with(cx, |project, _| project.agent_location()),
971 None
972 );
973
974 chunks_tx.unbounded_send("bc</old_text>").unwrap();
975 cx.run_until_parked();
976 assert_eq!(drain_events(&mut events), vec![]);
977 assert_eq!(
978 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
979 "abc\ndef\nghi"
980 );
981 assert_eq!(
982 project.read_with(cx, |project, _| project.agent_location()),
983 None
984 );
985
986 chunks_tx.unbounded_send("<new_text>abX").unwrap();
987 cx.run_until_parked();
988 assert_eq!(drain_events(&mut events), [EditAgentOutputEvent::Edited]);
989 assert_eq!(
990 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
991 "abXc\ndef\nghi"
992 );
993 assert_eq!(
994 project.read_with(cx, |project, _| project.agent_location()),
995 Some(AgentLocation {
996 buffer: buffer.downgrade(),
997 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 3)))
998 })
999 );
1000
1001 chunks_tx.unbounded_send("cY").unwrap();
1002 cx.run_until_parked();
1003 assert_eq!(drain_events(&mut events), [EditAgentOutputEvent::Edited]);
1004 assert_eq!(
1005 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1006 "abXcY\ndef\nghi"
1007 );
1008 assert_eq!(
1009 project.read_with(cx, |project, _| project.agent_location()),
1010 Some(AgentLocation {
1011 buffer: buffer.downgrade(),
1012 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1013 })
1014 );
1015
1016 chunks_tx.unbounded_send("</new_text>").unwrap();
1017 chunks_tx.unbounded_send("<old_text>hall").unwrap();
1018 cx.run_until_parked();
1019 assert_eq!(drain_events(&mut events), vec![]);
1020 assert_eq!(
1021 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1022 "abXcY\ndef\nghi"
1023 );
1024 assert_eq!(
1025 project.read_with(cx, |project, _| project.agent_location()),
1026 Some(AgentLocation {
1027 buffer: buffer.downgrade(),
1028 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1029 })
1030 );
1031
1032 chunks_tx.unbounded_send("ucinated old</old_text>").unwrap();
1033 chunks_tx.unbounded_send("<new_text>").unwrap();
1034 cx.run_until_parked();
1035 assert_eq!(
1036 drain_events(&mut events),
1037 vec![EditAgentOutputEvent::OldTextNotFound(
1038 "hallucinated old".into()
1039 )]
1040 );
1041 assert_eq!(
1042 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1043 "abXcY\ndef\nghi"
1044 );
1045 assert_eq!(
1046 project.read_with(cx, |project, _| project.agent_location()),
1047 Some(AgentLocation {
1048 buffer: buffer.downgrade(),
1049 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1050 })
1051 );
1052
1053 chunks_tx.unbounded_send("hallucinated new</new_").unwrap();
1054 chunks_tx.unbounded_send("text>").unwrap();
1055 cx.run_until_parked();
1056 assert_eq!(drain_events(&mut events), vec![]);
1057 assert_eq!(
1058 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1059 "abXcY\ndef\nghi"
1060 );
1061 assert_eq!(
1062 project.read_with(cx, |project, _| project.agent_location()),
1063 Some(AgentLocation {
1064 buffer: buffer.downgrade(),
1065 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1066 })
1067 );
1068
1069 chunks_tx.unbounded_send("<old_text>gh").unwrap();
1070 chunks_tx.unbounded_send("i</old_text>").unwrap();
1071 chunks_tx.unbounded_send("<new_text>").unwrap();
1072 cx.run_until_parked();
1073 assert_eq!(drain_events(&mut events), vec![]);
1074 assert_eq!(
1075 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1076 "abXcY\ndef\nghi"
1077 );
1078 assert_eq!(
1079 project.read_with(cx, |project, _| project.agent_location()),
1080 Some(AgentLocation {
1081 buffer: buffer.downgrade(),
1082 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(0, 5)))
1083 })
1084 );
1085
1086 chunks_tx.unbounded_send("GHI</new_text>").unwrap();
1087 cx.run_until_parked();
1088 assert_eq!(
1089 drain_events(&mut events),
1090 vec![EditAgentOutputEvent::Edited]
1091 );
1092 assert_eq!(
1093 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1094 "abXcY\ndef\nGHI"
1095 );
1096 assert_eq!(
1097 project.read_with(cx, |project, _| project.agent_location()),
1098 Some(AgentLocation {
1099 buffer: buffer.downgrade(),
1100 position: buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(2, 3)))
1101 })
1102 );
1103
1104 drop(chunks_tx);
1105 apply.await.unwrap();
1106 assert_eq!(
1107 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1108 "abXcY\ndef\nGHI"
1109 );
1110 assert_eq!(drain_events(&mut events), vec![]);
1111 assert_eq!(
1112 project.read_with(cx, |project, _| project.agent_location()),
1113 None
1114 );
1115 }
1116
1117 #[gpui::test]
1118 async fn test_overwrite_events(cx: &mut TestAppContext) {
1119 let agent = init_test(cx).await;
1120 let project = agent
1121 .action_log
1122 .read_with(cx, |log, _| log.project().clone());
1123 let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx));
1124 let (chunks_tx, chunks_rx) = mpsc::unbounded();
1125 let (apply, mut events) = agent.overwrite_with_chunks(
1126 buffer.clone(),
1127 chunks_rx.map(|chunk: &str| Ok(chunk.to_string())),
1128 &mut cx.to_async(),
1129 );
1130
1131 cx.run_until_parked();
1132 assert_eq!(
1133 drain_events(&mut events),
1134 vec![EditAgentOutputEvent::Edited]
1135 );
1136 assert_eq!(
1137 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1138 ""
1139 );
1140 assert_eq!(
1141 project.read_with(cx, |project, _| project.agent_location()),
1142 Some(AgentLocation {
1143 buffer: buffer.downgrade(),
1144 position: language::Anchor::MAX
1145 })
1146 );
1147
1148 chunks_tx.unbounded_send("jkl\n").unwrap();
1149 cx.run_until_parked();
1150 assert_eq!(
1151 drain_events(&mut events),
1152 vec![EditAgentOutputEvent::Edited]
1153 );
1154 assert_eq!(
1155 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1156 "jkl\n"
1157 );
1158 assert_eq!(
1159 project.read_with(cx, |project, _| project.agent_location()),
1160 Some(AgentLocation {
1161 buffer: buffer.downgrade(),
1162 position: language::Anchor::MAX
1163 })
1164 );
1165
1166 chunks_tx.unbounded_send("mno\n").unwrap();
1167 cx.run_until_parked();
1168 assert_eq!(
1169 drain_events(&mut events),
1170 vec![EditAgentOutputEvent::Edited]
1171 );
1172 assert_eq!(
1173 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1174 "jkl\nmno\n"
1175 );
1176 assert_eq!(
1177 project.read_with(cx, |project, _| project.agent_location()),
1178 Some(AgentLocation {
1179 buffer: buffer.downgrade(),
1180 position: language::Anchor::MAX
1181 })
1182 );
1183
1184 chunks_tx.unbounded_send("pqr").unwrap();
1185 cx.run_until_parked();
1186 assert_eq!(
1187 drain_events(&mut events),
1188 vec![EditAgentOutputEvent::Edited]
1189 );
1190 assert_eq!(
1191 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1192 "jkl\nmno\npqr"
1193 );
1194 assert_eq!(
1195 project.read_with(cx, |project, _| project.agent_location()),
1196 Some(AgentLocation {
1197 buffer: buffer.downgrade(),
1198 position: language::Anchor::MAX
1199 })
1200 );
1201
1202 drop(chunks_tx);
1203 apply.await.unwrap();
1204 assert_eq!(
1205 buffer.read_with(cx, |buffer, _| buffer.snapshot().text()),
1206 "jkl\nmno\npqr"
1207 );
1208 assert_eq!(drain_events(&mut events), vec![]);
1209 assert_eq!(
1210 project.read_with(cx, |project, _| project.agent_location()),
1211 None
1212 );
1213 }
1214
1215 #[gpui::test]
1216 fn test_resolve_location(cx: &mut App) {
1217 assert_location_resolution(
1218 concat!(
1219 " Lorem\n",
1220 "« ipsum»\n",
1221 " dolor sit amet\n",
1222 " consecteur",
1223 ),
1224 "ipsum",
1225 cx,
1226 );
1227
1228 assert_location_resolution(
1229 concat!(
1230 " Lorem\n",
1231 "« ipsum\n",
1232 " dolor sit amet»\n",
1233 " consecteur",
1234 ),
1235 "ipsum\ndolor sit amet",
1236 cx,
1237 );
1238
1239 assert_location_resolution(
1240 &"
1241 «fn foo1(a: usize) -> usize {
1242 40
1243 }»
1244
1245 fn foo2(b: usize) -> usize {
1246 42
1247 }
1248 "
1249 .unindent(),
1250 "fn foo1(a: usize) -> u32 {\n40\n}",
1251 cx,
1252 );
1253
1254 assert_location_resolution(
1255 &"
1256 class Something {
1257 one() { return 1; }
1258 « two() { return 2222; }
1259 three() { return 333; }
1260 four() { return 4444; }
1261 five() { return 5555; }
1262 six() { return 6666; }»
1263 seven() { return 7; }
1264 eight() { return 8; }
1265 }
1266 "
1267 .unindent(),
1268 &"
1269 two() { return 2222; }
1270 four() { return 4444; }
1271 five() { return 5555; }
1272 six() { return 6666; }
1273 "
1274 .unindent(),
1275 cx,
1276 );
1277
1278 assert_location_resolution(
1279 &"
1280 use std::ops::Range;
1281 use std::sync::Mutex;
1282 use std::{
1283 collections::HashMap,
1284 env,
1285 ffi::{OsStr, OsString},
1286 fs,
1287 io::{BufRead, BufReader},
1288 mem,
1289 path::{Path, PathBuf},
1290 process::Command,
1291 sync::LazyLock,
1292 time::SystemTime,
1293 };
1294 "
1295 .unindent(),
1296 &"
1297 use std::collections::{HashMap, HashSet};
1298 use std::ffi::{OsStr, OsString};
1299 use std::fmt::Write as _;
1300 use std::fs;
1301 use std::io::{BufReader, Read, Write};
1302 use std::mem;
1303 use std::path::{Path, PathBuf};
1304 use std::process::Command;
1305 use std::sync::Arc;
1306 "
1307 .unindent(),
1308 cx,
1309 );
1310
1311 assert_location_resolution(
1312 indoc! {"
1313 impl Foo {
1314 fn new() -> Self {
1315 Self {
1316 subscriptions: vec![
1317 cx.observe_window_activation(window, |editor, window, cx| {
1318 let active = window.is_window_active();
1319 editor.blink_manager.update(cx, |blink_manager, cx| {
1320 if active {
1321 blink_manager.enable(cx);
1322 } else {
1323 blink_manager.disable(cx);
1324 }
1325 });
1326 }),
1327 ];
1328 }
1329 }
1330 }
1331 "},
1332 concat!(
1333 " editor.blink_manager.update(cx, |blink_manager, cx| {\n",
1334 " blink_manager.enable(cx);\n",
1335 " });",
1336 ),
1337 cx,
1338 );
1339
1340 assert_location_resolution(
1341 indoc! {r#"
1342 let tool = cx
1343 .update(|cx| working_set.tool(&tool_name, cx))
1344 .map_err(|err| {
1345 anyhow!("Failed to look up tool '{}': {}", tool_name, err)
1346 })?;
1347
1348 let Some(tool) = tool else {
1349 return Err(anyhow!("Tool '{}' not found", tool_name));
1350 };
1351
1352 let project = project.clone();
1353 let action_log = action_log.clone();
1354 let messages = messages.clone();
1355 let tool_result = cx
1356 .update(|cx| tool.run(invocation.input, &messages, project, action_log, cx))
1357 .map_err(|err| anyhow!("Failed to start tool '{}': {}", tool_name, err))?;
1358
1359 tasks.push(tool_result.output);
1360 "#},
1361 concat!(
1362 "let tool_result = cx\n",
1363 " .update(|cx| tool.run(invocation.input, &messages, project, action_log, cx))\n",
1364 " .output;",
1365 ),
1366 cx,
1367 );
1368 }
1369
1370 #[gpui::test(iterations = 100)]
1371 async fn test_indent_new_text_chunks(mut rng: StdRng) {
1372 let chunks = to_random_chunks(&mut rng, " abc\n def\n ghi");
1373 let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
1374 Ok(EditParserEvent::NewTextChunk {
1375 chunk: chunk.clone(),
1376 done: index == chunks.len() - 1,
1377 })
1378 }));
1379 let indented_chunks =
1380 EditAgent::reindent_new_text_chunks(IndentDelta::Spaces(2), new_text_chunks)
1381 .collect::<Vec<_>>()
1382 .await;
1383 let new_text = indented_chunks
1384 .into_iter()
1385 .collect::<Result<String>>()
1386 .unwrap();
1387 assert_eq!(new_text, " abc\n def\n ghi");
1388 }
1389
1390 #[gpui::test(iterations = 100)]
1391 async fn test_outdent_new_text_chunks(mut rng: StdRng) {
1392 let chunks = to_random_chunks(&mut rng, "\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi");
1393 let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
1394 Ok(EditParserEvent::NewTextChunk {
1395 chunk: chunk.clone(),
1396 done: index == chunks.len() - 1,
1397 })
1398 }));
1399 let indented_chunks =
1400 EditAgent::reindent_new_text_chunks(IndentDelta::Tabs(-2), new_text_chunks)
1401 .collect::<Vec<_>>()
1402 .await;
1403 let new_text = indented_chunks
1404 .into_iter()
1405 .collect::<Result<String>>()
1406 .unwrap();
1407 assert_eq!(new_text, "\t\tabc\ndef\n\t\t\t\tghi");
1408 }
1409
1410 #[gpui::test(iterations = 100)]
1411 async fn test_random_indents(mut rng: StdRng) {
1412 let len = rng.gen_range(1..=100);
1413 let new_text = util::RandomCharIter::new(&mut rng)
1414 .with_simple_text()
1415 .take(len)
1416 .collect::<String>();
1417 let new_text = new_text
1418 .split('\n')
1419 .map(|line| format!("{}{}", " ".repeat(rng.gen_range(0..=8)), line))
1420 .collect::<Vec<_>>()
1421 .join("\n");
1422 let delta = IndentDelta::Spaces(rng.gen_range(-4..=4));
1423
1424 let chunks = to_random_chunks(&mut rng, &new_text);
1425 let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| {
1426 Ok(EditParserEvent::NewTextChunk {
1427 chunk: chunk.clone(),
1428 done: index == chunks.len() - 1,
1429 })
1430 }));
1431 let reindented_chunks = EditAgent::reindent_new_text_chunks(delta, new_text_chunks)
1432 .collect::<Vec<_>>()
1433 .await;
1434 let actual_reindented_text = reindented_chunks
1435 .into_iter()
1436 .collect::<Result<String>>()
1437 .unwrap();
1438 let expected_reindented_text = new_text
1439 .split('\n')
1440 .map(|line| {
1441 if let Some(ix) = line.find(|c| c != ' ') {
1442 let new_indent = cmp::max(0, ix as isize + delta.len()) as usize;
1443 format!("{}{}", " ".repeat(new_indent), &line[ix..])
1444 } else {
1445 line.to_string()
1446 }
1447 })
1448 .collect::<Vec<_>>()
1449 .join("\n");
1450 assert_eq!(actual_reindented_text, expected_reindented_text);
1451 }
1452
1453 #[track_caller]
1454 fn assert_location_resolution(text_with_expected_range: &str, query: &str, cx: &mut App) {
1455 let (text, _) = marked_text_ranges(text_with_expected_range, false);
1456 let buffer = cx.new(|cx| Buffer::local(text.clone(), cx));
1457 let snapshot = buffer.read(cx).snapshot();
1458 let mut ranges = Vec::new();
1459 ranges.extend(EditAgent::resolve_location(&snapshot, query));
1460 let text_with_actual_range = generate_marked_text(&text, &ranges, false);
1461 pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
1462 }
1463
1464 fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec<String> {
1465 let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50));
1466 let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count);
1467 chunk_indices.sort();
1468 chunk_indices.push(input.len());
1469
1470 let mut chunks = Vec::new();
1471 let mut last_ix = 0;
1472 for chunk_ix in chunk_indices {
1473 chunks.push(input[last_ix..chunk_ix].to_string());
1474 last_ix = chunk_ix;
1475 }
1476 chunks
1477 }
1478
1479 fn simulate_llm_output(
1480 output: &str,
1481 rng: &mut StdRng,
1482 cx: &mut TestAppContext,
1483 ) -> impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>> {
1484 let executor = cx.executor();
1485 stream::iter(to_random_chunks(rng, output).into_iter().map(Ok)).then(move |chunk| {
1486 let executor = executor.clone();
1487 async move {
1488 executor.simulate_random_delay().await;
1489 chunk
1490 }
1491 })
1492 }
1493
1494 async fn init_test(cx: &mut TestAppContext) -> EditAgent {
1495 cx.update(settings::init);
1496 cx.update(Project::init_settings);
1497 let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
1498 let model = Arc::new(FakeLanguageModel::default());
1499 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1500 EditAgent::new(model, project, action_log, Templates::new())
1501 }
1502
1503 fn drain_events(
1504 stream: &mut UnboundedReceiver<EditAgentOutputEvent>,
1505 ) -> Vec<EditAgentOutputEvent> {
1506 let mut events = Vec::new();
1507 while let Ok(Some(event)) = stream.try_next() {
1508 events.push(event);
1509 }
1510 events
1511 }
1512}