1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
278 let git_diff = this
279 .update(cx, |this, cx| {
280 this.project.update(cx, |project, cx| {
281 project.open_uncommitted_diff(buffer.clone(), cx)
282 })
283 })?
284 .await
285 .ok();
286 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
287 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
288 });
289
290 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
291 let _repo_subscription =
292 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
293 cx.update(|cx| {
294 let mut old_head = buffer_repo.read(cx).head_commit.clone();
295 Some(cx.subscribe(git_diff, move |_, event, cx| {
296 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
297 let new_head = buffer_repo.read(cx).head_commit.clone();
298 if new_head != old_head {
299 old_head = new_head;
300 git_diff_updates_tx.send(()).ok();
301 }
302 }
303 }))
304 })
305 } else {
306 None
307 };
308
309 loop {
310 futures::select_biased! {
311 buffer_update = buffer_updates.next() => {
312 if let Some((author, buffer_snapshot)) = buffer_update {
313 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
314 } else {
315 break;
316 }
317 }
318 _ = git_diff_updates_rx.changed().fuse() => {
319 if let Some(git_diff) = git_diff.as_ref() {
320 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
321 }
322 }
323 }
324 }
325
326 Ok(())
327 }
328
329 async fn track_edits(
330 this: &WeakEntity<ActionLog>,
331 buffer: &Entity<Buffer>,
332 author: ChangeAuthor,
333 buffer_snapshot: text::BufferSnapshot,
334 cx: &mut AsyncApp,
335 ) -> Result<()> {
336 let rebase = this.update(cx, |this, cx| {
337 let tracked_buffer = this
338 .tracked_buffers
339 .get_mut(buffer)
340 .context("buffer not tracked")?;
341
342 let rebase = cx.background_spawn({
343 let mut base_text = tracked_buffer.diff_base.clone();
344 let old_snapshot = tracked_buffer.snapshot.clone();
345 let new_snapshot = buffer_snapshot.clone();
346 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
347 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
348 async move {
349 if let ChangeAuthor::User = author {
350 apply_non_conflicting_edits(
351 &unreviewed_edits,
352 edits,
353 &mut base_text,
354 new_snapshot.as_rope(),
355 );
356 }
357
358 (Arc::from(base_text.to_string().as_str()), base_text)
359 }
360 });
361
362 anyhow::Ok(rebase)
363 })??;
364 let (new_base_text, new_diff_base) = rebase.await;
365
366 Self::update_diff(
367 this,
368 buffer,
369 buffer_snapshot,
370 new_base_text,
371 new_diff_base,
372 cx,
373 )
374 .await
375 }
376
377 async fn keep_committed_edits(
378 this: &WeakEntity<ActionLog>,
379 buffer: &Entity<Buffer>,
380 git_diff: &Entity<BufferDiff>,
381 cx: &mut AsyncApp,
382 ) -> Result<()> {
383 let buffer_snapshot = this.read_with(cx, |this, _cx| {
384 let tracked_buffer = this
385 .tracked_buffers
386 .get(buffer)
387 .context("buffer not tracked")?;
388 anyhow::Ok(tracked_buffer.snapshot.clone())
389 })??;
390 let (new_base_text, new_diff_base) = this
391 .read_with(cx, |this, cx| {
392 let tracked_buffer = this
393 .tracked_buffers
394 .get(buffer)
395 .context("buffer not tracked")?;
396 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
397 let agent_diff_base = tracked_buffer.diff_base.clone();
398 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
399 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
400 anyhow::Ok(cx.background_spawn(async move {
401 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
402 let committed_edits = language::line_diff(
403 &agent_diff_base.to_string(),
404 &git_diff_base.to_string(),
405 )
406 .into_iter()
407 .map(|(old, new)| Edit { old, new });
408
409 let mut new_agent_diff_base = agent_diff_base.clone();
410 let mut row_delta = 0i32;
411 for committed in committed_edits {
412 while let Some(unreviewed) = old_unreviewed_edits.peek() {
413 // If the committed edit matches the unreviewed
414 // edit, assume the user wants to keep it.
415 if committed.old == unreviewed.old {
416 let unreviewed_new =
417 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
418 let committed_new =
419 git_diff_base.slice_rows(committed.new.clone()).to_string();
420 if unreviewed_new == committed_new {
421 let old_byte_start =
422 new_agent_diff_base.point_to_offset(Point::new(
423 (unreviewed.old.start as i32 + row_delta) as u32,
424 0,
425 ));
426 let old_byte_end =
427 new_agent_diff_base.point_to_offset(cmp::min(
428 Point::new(
429 (unreviewed.old.end as i32 + row_delta) as u32,
430 0,
431 ),
432 new_agent_diff_base.max_point(),
433 ));
434 new_agent_diff_base
435 .replace(old_byte_start..old_byte_end, &unreviewed_new);
436 row_delta +=
437 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
438 }
439 } else if unreviewed.old.start >= committed.old.end {
440 break;
441 }
442
443 old_unreviewed_edits.next().unwrap();
444 }
445 }
446
447 (
448 Arc::from(new_agent_diff_base.to_string().as_str()),
449 new_agent_diff_base,
450 )
451 }))
452 })??
453 .await;
454
455 Self::update_diff(
456 this,
457 buffer,
458 buffer_snapshot,
459 new_base_text,
460 new_diff_base,
461 cx,
462 )
463 .await
464 }
465
466 async fn update_diff(
467 this: &WeakEntity<ActionLog>,
468 buffer: &Entity<Buffer>,
469 buffer_snapshot: text::BufferSnapshot,
470 new_base_text: Arc<str>,
471 new_diff_base: Rope,
472 cx: &mut AsyncApp,
473 ) -> Result<()> {
474 let (diff, language) = this.read_with(cx, |this, cx| {
475 let tracked_buffer = this
476 .tracked_buffers
477 .get(buffer)
478 .context("buffer not tracked")?;
479 anyhow::Ok((
480 tracked_buffer.diff.clone(),
481 buffer.read(cx).language().cloned(),
482 ))
483 })??;
484 let update = diff
485 .update(cx, |diff, cx| {
486 diff.update_diff(
487 buffer_snapshot.clone(),
488 Some(new_base_text),
489 Some(true),
490 language,
491 cx,
492 )
493 })
494 .await;
495 diff.update(cx, |diff, cx| {
496 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
497 })
498 .await;
499 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
500
501 let unreviewed_edits = cx
502 .background_spawn({
503 let buffer_snapshot = buffer_snapshot.clone();
504 let new_diff_base = new_diff_base.clone();
505 async move {
506 let mut unreviewed_edits = Patch::default();
507 for hunk in diff_snapshot.hunks_intersecting_range(
508 Anchor::min_for_buffer(buffer_snapshot.remote_id())
509 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
510 &buffer_snapshot,
511 ) {
512 let old_range = new_diff_base
513 .offset_to_point(hunk.diff_base_byte_range.start)
514 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
515 let new_range = hunk.range.start..hunk.range.end;
516 unreviewed_edits.push(point_to_row_edit(
517 Edit {
518 old: old_range,
519 new: new_range,
520 },
521 &new_diff_base,
522 buffer_snapshot.as_rope(),
523 ));
524 }
525 unreviewed_edits
526 }
527 })
528 .await;
529 this.update(cx, |this, cx| {
530 let tracked_buffer = this
531 .tracked_buffers
532 .get_mut(buffer)
533 .context("buffer not tracked")?;
534 tracked_buffer.diff_base = new_diff_base;
535 tracked_buffer.snapshot = buffer_snapshot;
536 tracked_buffer.unreviewed_edits = unreviewed_edits;
537 cx.notify();
538 anyhow::Ok(())
539 })?
540 }
541
542 /// Track a buffer as read by agent, so we can notify the model about user edits.
543 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.buffer_read_impl(buffer, true, cx);
545 }
546
547 fn buffer_read_impl(
548 &mut self,
549 buffer: Entity<Buffer>,
550 record_file_read_time: bool,
551 cx: &mut Context<Self>,
552 ) {
553 if let Some(linked_action_log) = &self.linked_action_log {
554 // We don't want to share read times since the other agent hasn't read it necessarily
555 linked_action_log.update(cx, |log, cx| {
556 log.buffer_read_impl(buffer.clone(), false, cx);
557 });
558 }
559 if record_file_read_time {
560 self.update_file_read_time(&buffer, cx);
561 }
562 self.track_buffer_internal(buffer, false, cx);
563 }
564
565 /// Mark a buffer as created by agent, so we can refresh it in the context
566 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
567 self.buffer_created_impl(buffer, true, cx);
568 }
569
570 fn buffer_created_impl(
571 &mut self,
572 buffer: Entity<Buffer>,
573 record_file_read_time: bool,
574 cx: &mut Context<Self>,
575 ) {
576 if let Some(linked_action_log) = &self.linked_action_log {
577 // We don't want to share read times since the other agent hasn't read it necessarily
578 linked_action_log.update(cx, |log, cx| {
579 log.buffer_created_impl(buffer.clone(), false, cx);
580 });
581 }
582 if record_file_read_time {
583 self.update_file_read_time(&buffer, cx);
584 }
585 self.track_buffer_internal(buffer, true, cx);
586 }
587
588 /// Mark a buffer as edited by agent, so we can refresh it in the context
589 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
590 self.buffer_edited_impl(buffer, true, cx);
591 }
592
593 fn buffer_edited_impl(
594 &mut self,
595 buffer: Entity<Buffer>,
596 record_file_read_time: bool,
597 cx: &mut Context<Self>,
598 ) {
599 if let Some(linked_action_log) = &self.linked_action_log {
600 // We don't want to share read times since the other agent hasn't read it necessarily
601 linked_action_log.update(cx, |log, cx| {
602 log.buffer_edited_impl(buffer.clone(), false, cx);
603 });
604 }
605 if record_file_read_time {
606 self.update_file_read_time(&buffer, cx);
607 }
608 let new_version = buffer.read(cx).version();
609 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
610 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
611 tracked_buffer.status = TrackedBufferStatus::Modified;
612 }
613
614 tracked_buffer.version = new_version;
615 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
616 }
617
618 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
619 // Ok to propagate file read time removal to linked action log
620 self.remove_file_read_time(&buffer, cx);
621 let has_linked_action_log = self.linked_action_log.is_some();
622 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
623 match tracked_buffer.status {
624 TrackedBufferStatus::Created { .. } => {
625 self.tracked_buffers.remove(&buffer);
626 cx.notify();
627 }
628 TrackedBufferStatus::Modified => {
629 tracked_buffer.status = TrackedBufferStatus::Deleted;
630 if !has_linked_action_log {
631 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
632 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
633 }
634 }
635
636 TrackedBufferStatus::Deleted => {}
637 }
638
639 if let Some(linked_action_log) = &mut self.linked_action_log {
640 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
641 }
642
643 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
644 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
645 }
646
647 cx.notify();
648 }
649
650 pub fn keep_edits_in_range(
651 &mut self,
652 buffer: Entity<Buffer>,
653 buffer_range: Range<impl language::ToPoint>,
654 telemetry: Option<ActionLogTelemetry>,
655 cx: &mut Context<Self>,
656 ) {
657 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
658 return;
659 };
660
661 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
662 match tracked_buffer.status {
663 TrackedBufferStatus::Deleted => {
664 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
665 self.tracked_buffers.remove(&buffer);
666 cx.notify();
667 }
668 _ => {
669 let buffer = buffer.read(cx);
670 let buffer_range =
671 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
672 let mut delta = 0i32;
673 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
674 edit.old.start = (edit.old.start as i32 + delta) as u32;
675 edit.old.end = (edit.old.end as i32 + delta) as u32;
676
677 if buffer_range.end.row < edit.new.start
678 || buffer_range.start.row > edit.new.end
679 {
680 true
681 } else {
682 let old_range = tracked_buffer
683 .diff_base
684 .point_to_offset(Point::new(edit.old.start, 0))
685 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
686 Point::new(edit.old.end, 0),
687 tracked_buffer.diff_base.max_point(),
688 ));
689 let new_range = tracked_buffer
690 .snapshot
691 .point_to_offset(Point::new(edit.new.start, 0))
692 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
693 Point::new(edit.new.end, 0),
694 tracked_buffer.snapshot.max_point(),
695 ));
696 tracked_buffer.diff_base.replace(
697 old_range,
698 &tracked_buffer
699 .snapshot
700 .text_for_range(new_range)
701 .collect::<String>(),
702 );
703 delta += edit.new_len() as i32 - edit.old_len() as i32;
704 metrics.add_edit(edit);
705 false
706 }
707 });
708 if tracked_buffer.unreviewed_edits.is_empty()
709 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
710 {
711 tracked_buffer.status = TrackedBufferStatus::Modified;
712 }
713 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
714 }
715 }
716 if let Some(telemetry) = telemetry {
717 telemetry_report_accepted_edits(&telemetry, metrics);
718 }
719 }
720
721 pub fn reject_edits_in_ranges(
722 &mut self,
723 buffer: Entity<Buffer>,
724 buffer_ranges: Vec<Range<impl language::ToPoint>>,
725 telemetry: Option<ActionLogTelemetry>,
726 cx: &mut Context<Self>,
727 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
728 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
729 return (Task::ready(Ok(())), None);
730 };
731
732 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
733 let mut undo_info: Option<PerBufferUndo> = None;
734 let task = match &tracked_buffer.status {
735 TrackedBufferStatus::Created {
736 existing_file_content,
737 } => {
738 let task = if let Some(existing_file_content) = existing_file_content {
739 // Capture the agent's content before restoring existing file content
740 let agent_content = buffer.read(cx).text();
741
742 buffer.update(cx, |buffer, cx| {
743 buffer.start_transaction();
744 buffer.set_text("", cx);
745 for chunk in existing_file_content.chunks() {
746 buffer.append(chunk, cx);
747 }
748 buffer.end_transaction(cx);
749 });
750
751 undo_info = Some(PerBufferUndo {
752 buffer: buffer.downgrade(),
753 edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
754 status: UndoBufferStatus::Created {
755 had_existing_content: true,
756 },
757 });
758
759 self.project
760 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
761 } else {
762 // For a file created by AI with no pre-existing content,
763 // only delete the file if we're certain it contains only AI content
764 // with no edits from the user.
765
766 let initial_version = tracked_buffer.version.clone();
767 let current_version = buffer.read(cx).version();
768
769 let current_content = buffer.read(cx).text();
770 let tracked_content = tracked_buffer.snapshot.text();
771
772 let is_ai_only_content =
773 initial_version == current_version && current_content == tracked_content;
774
775 if is_ai_only_content {
776 buffer
777 .read(cx)
778 .entry_id(cx)
779 .and_then(|entry_id| {
780 self.project.update(cx, |project, cx| {
781 project.delete_entry(entry_id, false, cx)
782 })
783 })
784 .unwrap_or(Task::ready(Ok(())))
785 } else {
786 // Not sure how to disentangle edits made by the user
787 // from edits made by the AI at this point.
788 // For now, preserve both to avoid data loss.
789 //
790 // TODO: Better solution (disable "Reject" after user makes some
791 // edit or find a way to differentiate between AI and user edits)
792 Task::ready(Ok(()))
793 }
794 };
795
796 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
797 self.tracked_buffers.remove(&buffer);
798 cx.notify();
799 task
800 }
801 TrackedBufferStatus::Deleted => {
802 buffer.update(cx, |buffer, cx| {
803 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
804 });
805 let save = self
806 .project
807 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
808
809 // Clear all tracked edits for this buffer and start over as if we just read it.
810 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
811 self.tracked_buffers.remove(&buffer);
812 self.buffer_read(buffer.clone(), cx);
813 cx.notify();
814 save
815 }
816 TrackedBufferStatus::Modified => {
817 let edits_to_restore = buffer.update(cx, |buffer, cx| {
818 let mut buffer_row_ranges = buffer_ranges
819 .into_iter()
820 .map(|range| {
821 range.start.to_point(buffer).row..range.end.to_point(buffer).row
822 })
823 .peekable();
824
825 let mut edits_to_revert = Vec::new();
826 let mut edits_for_undo = Vec::new();
827 for edit in tracked_buffer.unreviewed_edits.edits() {
828 let new_range = tracked_buffer
829 .snapshot
830 .anchor_before(Point::new(edit.new.start, 0))
831 ..tracked_buffer.snapshot.anchor_after(cmp::min(
832 Point::new(edit.new.end, 0),
833 tracked_buffer.snapshot.max_point(),
834 ));
835 let new_row_range = new_range.start.to_point(buffer).row
836 ..new_range.end.to_point(buffer).row;
837
838 let mut revert = false;
839 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
840 if buffer_row_range.end < new_row_range.start {
841 buffer_row_ranges.next();
842 } else if buffer_row_range.start > new_row_range.end {
843 break;
844 } else {
845 revert = true;
846 break;
847 }
848 }
849
850 if revert {
851 metrics.add_edit(edit);
852 let old_range = tracked_buffer
853 .diff_base
854 .point_to_offset(Point::new(edit.old.start, 0))
855 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
856 Point::new(edit.old.end, 0),
857 tracked_buffer.diff_base.max_point(),
858 ));
859 let old_text = tracked_buffer
860 .diff_base
861 .chunks_in_range(old_range)
862 .collect::<String>();
863
864 // Capture the agent's text before we revert it (for undo)
865 let new_range_offset =
866 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
867 let agent_text =
868 buffer.text_for_range(new_range_offset).collect::<String>();
869 edits_for_undo.push((new_range.clone(), agent_text));
870
871 edits_to_revert.push((new_range, old_text));
872 }
873 }
874
875 buffer.edit(edits_to_revert, None, cx);
876 edits_for_undo
877 });
878
879 if !edits_to_restore.is_empty() {
880 undo_info = Some(PerBufferUndo {
881 buffer: buffer.downgrade(),
882 edits_to_restore,
883 status: UndoBufferStatus::Modified,
884 });
885 }
886
887 self.project
888 .update(cx, |project, cx| project.save_buffer(buffer, cx))
889 }
890 };
891 if let Some(telemetry) = telemetry {
892 telemetry_report_rejected_edits(&telemetry, metrics);
893 }
894 (task, undo_info)
895 }
896
897 pub fn keep_all_edits(
898 &mut self,
899 telemetry: Option<ActionLogTelemetry>,
900 cx: &mut Context<Self>,
901 ) {
902 self.tracked_buffers.retain(|buffer, tracked_buffer| {
903 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
904 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
905 if let Some(telemetry) = telemetry.as_ref() {
906 telemetry_report_accepted_edits(telemetry, metrics);
907 }
908 match tracked_buffer.status {
909 TrackedBufferStatus::Deleted => false,
910 _ => {
911 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
912 tracked_buffer.status = TrackedBufferStatus::Modified;
913 }
914 tracked_buffer.unreviewed_edits.clear();
915 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
916 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
917 true
918 }
919 }
920 });
921
922 cx.notify();
923 }
924
925 pub fn reject_all_edits(
926 &mut self,
927 telemetry: Option<ActionLogTelemetry>,
928 cx: &mut Context<Self>,
929 ) -> Task<()> {
930 // Clear any previous undo state before starting a new reject operation
931 self.last_reject_undo = None;
932
933 let mut undo_buffers = Vec::new();
934 let mut futures = Vec::new();
935
936 for buffer in self.changed_buffers(cx).into_keys() {
937 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
938 buffer.read(cx).remote_id(),
939 )];
940 let (reject_task, undo_info) =
941 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
942
943 if let Some(undo) = undo_info {
944 undo_buffers.push(undo);
945 }
946
947 futures.push(async move {
948 reject_task.await.log_err();
949 });
950 }
951
952 // Store the undo information if we have any
953 if !undo_buffers.is_empty() {
954 self.last_reject_undo = Some(LastRejectUndo {
955 buffers: undo_buffers,
956 });
957 }
958
959 let task = futures::future::join_all(futures);
960 cx.background_spawn(async move {
961 task.await;
962 })
963 }
964
965 pub fn has_pending_undo(&self) -> bool {
966 self.last_reject_undo.is_some()
967 }
968
969 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
970 self.last_reject_undo = Some(undo);
971 }
972
973 /// Undoes the most recent reject operation, restoring the rejected agent changes.
974 /// This is a best-effort operation: if buffers have been closed or modified externally,
975 /// those buffers will be skipped.
976 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
977 let Some(undo) = self.last_reject_undo.take() else {
978 return Task::ready(());
979 };
980
981 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
982
983 for per_buffer_undo in undo.buffers {
984 // Skip if the buffer entity has been deallocated
985 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
986 continue;
987 };
988
989 buffer.update(cx, |buffer, cx| {
990 let mut valid_edits = Vec::new();
991
992 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
993 if anchor_range.start.buffer_id == Some(buffer.remote_id())
994 && anchor_range.end.buffer_id == Some(buffer.remote_id())
995 {
996 valid_edits.push((anchor_range, text_to_restore));
997 }
998 }
999
1000 if !valid_edits.is_empty() {
1001 buffer.edit(valid_edits, None, cx);
1002 }
1003 });
1004
1005 if !self.tracked_buffers.contains_key(&buffer) {
1006 self.buffer_edited(buffer.clone(), cx);
1007 }
1008
1009 let save = self
1010 .project
1011 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1012 save_tasks.push(save);
1013 }
1014
1015 cx.notify();
1016
1017 cx.background_spawn(async move {
1018 futures::future::join_all(save_tasks).await;
1019 })
1020 }
1021
1022 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1023 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1024 self.tracked_buffers
1025 .iter()
1026 .filter(|(_, tracked)| tracked.has_edits(cx))
1027 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1028 .collect()
1029 }
1030
1031 /// Returns the total number of lines added and removed across all unreviewed buffers.
1032 pub fn diff_stats(&self, cx: &App) -> DiffStats {
1033 DiffStats::all_files(&self.changed_buffers(cx), cx)
1034 }
1035
1036 /// Iterate over buffers changed since last read or edited by the model
1037 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1038 self.tracked_buffers
1039 .iter()
1040 .filter(|(buffer, tracked)| {
1041 let buffer = buffer.read(cx);
1042
1043 tracked.version != buffer.version
1044 && buffer
1045 .file()
1046 .is_some_and(|file| !file.disk_state().is_deleted())
1047 })
1048 .map(|(buffer, _)| buffer)
1049 }
1050}
1051
1052#[derive(Default, Debug, Clone, Copy)]
1053pub struct DiffStats {
1054 pub lines_added: u32,
1055 pub lines_removed: u32,
1056}
1057
1058impl DiffStats {
1059 pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
1060 let mut stats = DiffStats::default();
1061 let diff_snapshot = diff.snapshot(cx);
1062 let buffer_snapshot = buffer.snapshot();
1063 let base_text = diff_snapshot.base_text();
1064
1065 for hunk in diff_snapshot.hunks(&buffer_snapshot) {
1066 let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
1067 stats.lines_added += added_rows;
1068
1069 let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
1070 let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
1071 let removed_rows = base_end.saturating_sub(base_start);
1072 stats.lines_removed += removed_rows;
1073 }
1074
1075 stats
1076 }
1077
1078 pub fn all_files(
1079 changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
1080 cx: &App,
1081 ) -> Self {
1082 let mut total = DiffStats::default();
1083 for (buffer, diff) in changed_buffers {
1084 let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
1085 total.lines_added += stats.lines_added;
1086 total.lines_removed += stats.lines_removed;
1087 }
1088 total
1089 }
1090}
1091
1092#[derive(Clone)]
1093pub struct ActionLogTelemetry {
1094 pub agent_telemetry_id: SharedString,
1095 pub session_id: Arc<str>,
1096}
1097
1098struct ActionLogMetrics {
1099 lines_removed: u32,
1100 lines_added: u32,
1101 language: Option<SharedString>,
1102}
1103
1104impl ActionLogMetrics {
1105 fn for_buffer(buffer: &Buffer) -> Self {
1106 Self {
1107 language: buffer.language().map(|l| l.name().0),
1108 lines_removed: 0,
1109 lines_added: 0,
1110 }
1111 }
1112
1113 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1114 for edit in edits {
1115 self.add_edit(edit);
1116 }
1117 }
1118
1119 fn add_edit(&mut self, edit: &Edit<u32>) {
1120 self.lines_added += edit.new_len();
1121 self.lines_removed += edit.old_len();
1122 }
1123}
1124
1125fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1126 telemetry::event!(
1127 "Agent Edits Accepted",
1128 agent = telemetry.agent_telemetry_id,
1129 session = telemetry.session_id,
1130 language = metrics.language,
1131 lines_added = metrics.lines_added,
1132 lines_removed = metrics.lines_removed
1133 );
1134}
1135
1136fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1137 telemetry::event!(
1138 "Agent Edits Rejected",
1139 agent = telemetry.agent_telemetry_id,
1140 session = telemetry.session_id,
1141 language = metrics.language,
1142 lines_added = metrics.lines_added,
1143 lines_removed = metrics.lines_removed
1144 );
1145}
1146
1147fn apply_non_conflicting_edits(
1148 patch: &Patch<u32>,
1149 edits: Vec<Edit<u32>>,
1150 old_text: &mut Rope,
1151 new_text: &Rope,
1152) -> bool {
1153 let mut old_edits = patch.edits().iter().cloned().peekable();
1154 let mut new_edits = edits.into_iter().peekable();
1155 let mut applied_delta = 0i32;
1156 let mut rebased_delta = 0i32;
1157 let mut has_made_changes = false;
1158
1159 while let Some(mut new_edit) = new_edits.next() {
1160 let mut conflict = false;
1161
1162 // Push all the old edits that are before this new edit or that intersect with it.
1163 while let Some(old_edit) = old_edits.peek() {
1164 if new_edit.old.end < old_edit.new.start
1165 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1166 {
1167 break;
1168 } else if new_edit.old.start > old_edit.new.end
1169 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1170 {
1171 let old_edit = old_edits.next().unwrap();
1172 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1173 } else {
1174 conflict = true;
1175 if new_edits
1176 .peek()
1177 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1178 {
1179 new_edit = new_edits.next().unwrap();
1180 } else {
1181 let old_edit = old_edits.next().unwrap();
1182 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1183 }
1184 }
1185 }
1186
1187 if !conflict {
1188 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1189 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1190 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1191 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1192 ..old_text.point_to_offset(cmp::min(
1193 Point::new(new_edit.old.end, 0),
1194 old_text.max_point(),
1195 ));
1196 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1197 ..new_text.point_to_offset(cmp::min(
1198 Point::new(new_edit.new.end, 0),
1199 new_text.max_point(),
1200 ));
1201
1202 old_text.replace(
1203 old_bytes,
1204 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1205 );
1206 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1207 has_made_changes = true;
1208 }
1209 }
1210 has_made_changes
1211}
1212
1213fn diff_snapshots(
1214 old_snapshot: &text::BufferSnapshot,
1215 new_snapshot: &text::BufferSnapshot,
1216) -> Vec<Edit<u32>> {
1217 let mut edits = new_snapshot
1218 .edits_since::<Point>(&old_snapshot.version)
1219 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1220 .peekable();
1221 let mut row_edits = Vec::new();
1222 while let Some(mut edit) = edits.next() {
1223 while let Some(next_edit) = edits.peek() {
1224 if edit.old.end >= next_edit.old.start {
1225 edit.old.end = next_edit.old.end;
1226 edit.new.end = next_edit.new.end;
1227 edits.next();
1228 } else {
1229 break;
1230 }
1231 }
1232 row_edits.push(edit);
1233 }
1234 row_edits
1235}
1236
1237fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1238 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1239 && new_text
1240 .chars_at(new_text.point_to_offset(edit.new.start))
1241 .next()
1242 == Some('\n')
1243 && edit.old.start != old_text.max_point()
1244 {
1245 Edit {
1246 old: edit.old.start.row + 1..edit.old.end.row + 1,
1247 new: edit.new.start.row + 1..edit.new.end.row + 1,
1248 }
1249 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1250 Edit {
1251 old: edit.old.start.row..edit.old.end.row,
1252 new: edit.new.start.row..edit.new.end.row,
1253 }
1254 } else {
1255 Edit {
1256 old: edit.old.start.row..edit.old.end.row + 1,
1257 new: edit.new.start.row..edit.new.end.row + 1,
1258 }
1259 }
1260}
1261
1262#[derive(Copy, Clone, Debug)]
1263enum ChangeAuthor {
1264 User,
1265 Agent,
1266}
1267
1268#[derive(Debug)]
1269enum TrackedBufferStatus {
1270 Created { existing_file_content: Option<Rope> },
1271 Modified,
1272 Deleted,
1273}
1274
1275pub struct TrackedBuffer {
1276 buffer: Entity<Buffer>,
1277 diff_base: Rope,
1278 unreviewed_edits: Patch<u32>,
1279 status: TrackedBufferStatus,
1280 version: clock::Global,
1281 diff: Entity<BufferDiff>,
1282 snapshot: text::BufferSnapshot,
1283 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1284 _open_lsp_handle: OpenLspBufferHandle,
1285 _maintain_diff: Task<()>,
1286 _subscription: Subscription,
1287}
1288
1289impl TrackedBuffer {
1290 #[cfg(any(test, feature = "test-support"))]
1291 pub fn diff(&self) -> &Entity<BufferDiff> {
1292 &self.diff
1293 }
1294
1295 #[cfg(any(test, feature = "test-support"))]
1296 pub fn diff_base_len(&self) -> usize {
1297 self.diff_base.len()
1298 }
1299
1300 fn has_edits(&self, cx: &App) -> bool {
1301 self.diff
1302 .read(cx)
1303 .snapshot(cx)
1304 .hunks(self.buffer.read(cx))
1305 .next()
1306 .is_some()
1307 }
1308
1309 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1310 self.diff_update
1311 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1312 .ok();
1313 }
1314}
1315
1316pub struct ChangedBuffer {
1317 pub diff: Entity<BufferDiff>,
1318}
1319
1320#[cfg(test)]
1321mod tests {
1322 use super::*;
1323 use buffer_diff::DiffHunkStatusKind;
1324 use gpui::TestAppContext;
1325 use language::Point;
1326 use project::{FakeFs, Fs, Project, RemoveOptions};
1327 use rand::prelude::*;
1328 use serde_json::json;
1329 use settings::SettingsStore;
1330 use std::env;
1331 use util::{RandomCharIter, path};
1332
1333 #[ctor::ctor]
1334 fn init_logger() {
1335 zlog::init_test();
1336 }
1337
1338 fn init_test(cx: &mut TestAppContext) {
1339 cx.update(|cx| {
1340 let settings_store = SettingsStore::test(cx);
1341 cx.set_global(settings_store);
1342 });
1343 }
1344
1345 #[gpui::test(iterations = 10)]
1346 async fn test_keep_edits(cx: &mut TestAppContext) {
1347 init_test(cx);
1348
1349 let fs = FakeFs::new(cx.executor());
1350 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1351 .await;
1352 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1353 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1354 let file_path = project
1355 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1356 .unwrap();
1357 let buffer = project
1358 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1359 .await
1360 .unwrap();
1361
1362 cx.update(|cx| {
1363 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1364 buffer.update(cx, |buffer, cx| {
1365 buffer
1366 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1367 .unwrap()
1368 });
1369 buffer.update(cx, |buffer, cx| {
1370 buffer
1371 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1372 .unwrap()
1373 });
1374 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1375 });
1376 cx.run_until_parked();
1377 assert_eq!(
1378 buffer.read_with(cx, |buffer, _| buffer.text()),
1379 "abc\ndEf\nghi\njkl\nmnO"
1380 );
1381 assert_eq!(
1382 unreviewed_hunks(&action_log, cx),
1383 vec![(
1384 buffer.clone(),
1385 vec![
1386 HunkStatus {
1387 range: Point::new(1, 0)..Point::new(2, 0),
1388 diff_status: DiffHunkStatusKind::Modified,
1389 old_text: "def\n".into(),
1390 },
1391 HunkStatus {
1392 range: Point::new(4, 0)..Point::new(4, 3),
1393 diff_status: DiffHunkStatusKind::Modified,
1394 old_text: "mno".into(),
1395 }
1396 ],
1397 )]
1398 );
1399
1400 action_log.update(cx, |log, cx| {
1401 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1402 });
1403 cx.run_until_parked();
1404 assert_eq!(
1405 unreviewed_hunks(&action_log, cx),
1406 vec![(
1407 buffer.clone(),
1408 vec![HunkStatus {
1409 range: Point::new(1, 0)..Point::new(2, 0),
1410 diff_status: DiffHunkStatusKind::Modified,
1411 old_text: "def\n".into(),
1412 }],
1413 )]
1414 );
1415
1416 action_log.update(cx, |log, cx| {
1417 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1418 });
1419 cx.run_until_parked();
1420 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1421 }
1422
1423 #[gpui::test(iterations = 10)]
1424 async fn test_deletions(cx: &mut TestAppContext) {
1425 init_test(cx);
1426
1427 let fs = FakeFs::new(cx.executor());
1428 fs.insert_tree(
1429 path!("/dir"),
1430 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1431 )
1432 .await;
1433 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1434 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1435 let file_path = project
1436 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1437 .unwrap();
1438 let buffer = project
1439 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1440 .await
1441 .unwrap();
1442
1443 cx.update(|cx| {
1444 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1445 buffer.update(cx, |buffer, cx| {
1446 buffer
1447 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1448 .unwrap();
1449 buffer.finalize_last_transaction();
1450 });
1451 buffer.update(cx, |buffer, cx| {
1452 buffer
1453 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1454 .unwrap();
1455 buffer.finalize_last_transaction();
1456 });
1457 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1458 });
1459 cx.run_until_parked();
1460 assert_eq!(
1461 buffer.read_with(cx, |buffer, _| buffer.text()),
1462 "abc\nghi\njkl\npqr"
1463 );
1464 assert_eq!(
1465 unreviewed_hunks(&action_log, cx),
1466 vec![(
1467 buffer.clone(),
1468 vec![
1469 HunkStatus {
1470 range: Point::new(1, 0)..Point::new(1, 0),
1471 diff_status: DiffHunkStatusKind::Deleted,
1472 old_text: "def\n".into(),
1473 },
1474 HunkStatus {
1475 range: Point::new(3, 0)..Point::new(3, 0),
1476 diff_status: DiffHunkStatusKind::Deleted,
1477 old_text: "mno\n".into(),
1478 }
1479 ],
1480 )]
1481 );
1482
1483 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1484 cx.run_until_parked();
1485 assert_eq!(
1486 buffer.read_with(cx, |buffer, _| buffer.text()),
1487 "abc\nghi\njkl\nmno\npqr"
1488 );
1489 assert_eq!(
1490 unreviewed_hunks(&action_log, cx),
1491 vec![(
1492 buffer.clone(),
1493 vec![HunkStatus {
1494 range: Point::new(1, 0)..Point::new(1, 0),
1495 diff_status: DiffHunkStatusKind::Deleted,
1496 old_text: "def\n".into(),
1497 }],
1498 )]
1499 );
1500
1501 action_log.update(cx, |log, cx| {
1502 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1503 });
1504 cx.run_until_parked();
1505 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1506 }
1507
1508 #[gpui::test(iterations = 10)]
1509 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1510 init_test(cx);
1511
1512 let fs = FakeFs::new(cx.executor());
1513 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1514 .await;
1515 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1516 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1517 let file_path = project
1518 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1519 .unwrap();
1520 let buffer = project
1521 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1522 .await
1523 .unwrap();
1524
1525 cx.update(|cx| {
1526 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1527 buffer.update(cx, |buffer, cx| {
1528 buffer
1529 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1530 .unwrap()
1531 });
1532 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1533 });
1534 cx.run_until_parked();
1535 assert_eq!(
1536 buffer.read_with(cx, |buffer, _| buffer.text()),
1537 "abc\ndeF\nGHI\njkl\nmno"
1538 );
1539 assert_eq!(
1540 unreviewed_hunks(&action_log, cx),
1541 vec![(
1542 buffer.clone(),
1543 vec![HunkStatus {
1544 range: Point::new(1, 0)..Point::new(3, 0),
1545 diff_status: DiffHunkStatusKind::Modified,
1546 old_text: "def\nghi\n".into(),
1547 }],
1548 )]
1549 );
1550
1551 buffer.update(cx, |buffer, cx| {
1552 buffer.edit(
1553 [
1554 (Point::new(0, 2)..Point::new(0, 2), "X"),
1555 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1556 ],
1557 None,
1558 cx,
1559 )
1560 });
1561 cx.run_until_parked();
1562 assert_eq!(
1563 buffer.read_with(cx, |buffer, _| buffer.text()),
1564 "abXc\ndeF\nGHI\nYjkl\nmno"
1565 );
1566 assert_eq!(
1567 unreviewed_hunks(&action_log, cx),
1568 vec![(
1569 buffer.clone(),
1570 vec![HunkStatus {
1571 range: Point::new(1, 0)..Point::new(3, 0),
1572 diff_status: DiffHunkStatusKind::Modified,
1573 old_text: "def\nghi\n".into(),
1574 }],
1575 )]
1576 );
1577
1578 buffer.update(cx, |buffer, cx| {
1579 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1580 });
1581 cx.run_until_parked();
1582 assert_eq!(
1583 buffer.read_with(cx, |buffer, _| buffer.text()),
1584 "abXc\ndZeF\nGHI\nYjkl\nmno"
1585 );
1586 assert_eq!(
1587 unreviewed_hunks(&action_log, cx),
1588 vec![(
1589 buffer.clone(),
1590 vec![HunkStatus {
1591 range: Point::new(1, 0)..Point::new(3, 0),
1592 diff_status: DiffHunkStatusKind::Modified,
1593 old_text: "def\nghi\n".into(),
1594 }],
1595 )]
1596 );
1597
1598 action_log.update(cx, |log, cx| {
1599 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1600 });
1601 cx.run_until_parked();
1602 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1603 }
1604
1605 #[gpui::test(iterations = 10)]
1606 async fn test_creating_files(cx: &mut TestAppContext) {
1607 init_test(cx);
1608
1609 let fs = FakeFs::new(cx.executor());
1610 fs.insert_tree(path!("/dir"), json!({})).await;
1611 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1612 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1613 let file_path = project
1614 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1615 .unwrap();
1616
1617 let buffer = project
1618 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1619 .await
1620 .unwrap();
1621 cx.update(|cx| {
1622 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1623 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1624 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1625 });
1626 project
1627 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1628 .await
1629 .unwrap();
1630 cx.run_until_parked();
1631 assert_eq!(
1632 unreviewed_hunks(&action_log, cx),
1633 vec![(
1634 buffer.clone(),
1635 vec![HunkStatus {
1636 range: Point::new(0, 0)..Point::new(0, 5),
1637 diff_status: DiffHunkStatusKind::Added,
1638 old_text: "".into(),
1639 }],
1640 )]
1641 );
1642
1643 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1644 cx.run_until_parked();
1645 assert_eq!(
1646 unreviewed_hunks(&action_log, cx),
1647 vec![(
1648 buffer.clone(),
1649 vec![HunkStatus {
1650 range: Point::new(0, 0)..Point::new(0, 6),
1651 diff_status: DiffHunkStatusKind::Added,
1652 old_text: "".into(),
1653 }],
1654 )]
1655 );
1656
1657 action_log.update(cx, |log, cx| {
1658 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1659 });
1660 cx.run_until_parked();
1661 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1662 }
1663
1664 #[gpui::test(iterations = 10)]
1665 async fn test_overwriting_files(cx: &mut TestAppContext) {
1666 init_test(cx);
1667
1668 let fs = FakeFs::new(cx.executor());
1669 fs.insert_tree(
1670 path!("/dir"),
1671 json!({
1672 "file1": "Lorem ipsum dolor"
1673 }),
1674 )
1675 .await;
1676 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1677 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1678 let file_path = project
1679 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1680 .unwrap();
1681
1682 let buffer = project
1683 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1684 .await
1685 .unwrap();
1686 cx.update(|cx| {
1687 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1688 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1689 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1690 });
1691 project
1692 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1693 .await
1694 .unwrap();
1695 cx.run_until_parked();
1696 assert_eq!(
1697 unreviewed_hunks(&action_log, cx),
1698 vec![(
1699 buffer.clone(),
1700 vec![HunkStatus {
1701 range: Point::new(0, 0)..Point::new(0, 19),
1702 diff_status: DiffHunkStatusKind::Added,
1703 old_text: "".into(),
1704 }],
1705 )]
1706 );
1707
1708 action_log
1709 .update(cx, |log, cx| {
1710 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1711 task
1712 })
1713 .await
1714 .unwrap();
1715 cx.run_until_parked();
1716 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1717 assert_eq!(
1718 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1719 "Lorem ipsum dolor"
1720 );
1721 }
1722
1723 #[gpui::test(iterations = 10)]
1724 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1725 init_test(cx);
1726
1727 let fs = FakeFs::new(cx.executor());
1728 fs.insert_tree(
1729 path!("/dir"),
1730 json!({
1731 "file1": "Lorem ipsum dolor"
1732 }),
1733 )
1734 .await;
1735 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1736 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1737 let file_path = project
1738 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1739 .unwrap();
1740
1741 let buffer = project
1742 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1743 .await
1744 .unwrap();
1745 cx.update(|cx| {
1746 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1747 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1748 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1749 });
1750 project
1751 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1752 .await
1753 .unwrap();
1754 cx.run_until_parked();
1755 assert_eq!(
1756 unreviewed_hunks(&action_log, cx),
1757 vec![(
1758 buffer.clone(),
1759 vec![HunkStatus {
1760 range: Point::new(0, 0)..Point::new(0, 37),
1761 diff_status: DiffHunkStatusKind::Modified,
1762 old_text: "Lorem ipsum dolor".into(),
1763 }],
1764 )]
1765 );
1766
1767 cx.update(|cx| {
1768 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1769 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1770 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1771 });
1772 project
1773 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1774 .await
1775 .unwrap();
1776 cx.run_until_parked();
1777 assert_eq!(
1778 unreviewed_hunks(&action_log, cx),
1779 vec![(
1780 buffer.clone(),
1781 vec![HunkStatus {
1782 range: Point::new(0, 0)..Point::new(0, 9),
1783 diff_status: DiffHunkStatusKind::Added,
1784 old_text: "".into(),
1785 }],
1786 )]
1787 );
1788
1789 action_log
1790 .update(cx, |log, cx| {
1791 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1792 task
1793 })
1794 .await
1795 .unwrap();
1796 cx.run_until_parked();
1797 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1798 assert_eq!(
1799 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1800 "Lorem ipsum dolor"
1801 );
1802 }
1803
1804 #[gpui::test(iterations = 10)]
1805 async fn test_deleting_files(cx: &mut TestAppContext) {
1806 init_test(cx);
1807
1808 let fs = FakeFs::new(cx.executor());
1809 fs.insert_tree(
1810 path!("/dir"),
1811 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1812 )
1813 .await;
1814
1815 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1816 let file1_path = project
1817 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1818 .unwrap();
1819 let file2_path = project
1820 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1821 .unwrap();
1822
1823 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1824 let buffer1 = project
1825 .update(cx, |project, cx| {
1826 project.open_buffer(file1_path.clone(), cx)
1827 })
1828 .await
1829 .unwrap();
1830 let buffer2 = project
1831 .update(cx, |project, cx| {
1832 project.open_buffer(file2_path.clone(), cx)
1833 })
1834 .await
1835 .unwrap();
1836
1837 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1838 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1839 project
1840 .update(cx, |project, cx| {
1841 project.delete_file(file1_path.clone(), false, cx)
1842 })
1843 .unwrap()
1844 .await
1845 .unwrap();
1846 project
1847 .update(cx, |project, cx| {
1848 project.delete_file(file2_path.clone(), false, cx)
1849 })
1850 .unwrap()
1851 .await
1852 .unwrap();
1853 cx.run_until_parked();
1854 assert_eq!(
1855 unreviewed_hunks(&action_log, cx),
1856 vec![
1857 (
1858 buffer1.clone(),
1859 vec![HunkStatus {
1860 range: Point::new(0, 0)..Point::new(0, 0),
1861 diff_status: DiffHunkStatusKind::Deleted,
1862 old_text: "lorem\n".into(),
1863 }]
1864 ),
1865 (
1866 buffer2.clone(),
1867 vec![HunkStatus {
1868 range: Point::new(0, 0)..Point::new(0, 0),
1869 diff_status: DiffHunkStatusKind::Deleted,
1870 old_text: "ipsum\n".into(),
1871 }],
1872 )
1873 ]
1874 );
1875
1876 // Simulate file1 being recreated externally.
1877 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1878 .await;
1879
1880 // Simulate file2 being recreated by a tool.
1881 let buffer2 = project
1882 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1883 .await
1884 .unwrap();
1885 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1886 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1887 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1888 project
1889 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1890 .await
1891 .unwrap();
1892
1893 cx.run_until_parked();
1894 assert_eq!(
1895 unreviewed_hunks(&action_log, cx),
1896 vec![(
1897 buffer2.clone(),
1898 vec![HunkStatus {
1899 range: Point::new(0, 0)..Point::new(0, 5),
1900 diff_status: DiffHunkStatusKind::Added,
1901 old_text: "".into(),
1902 }],
1903 )]
1904 );
1905
1906 // Simulate file2 being deleted externally.
1907 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1908 .await
1909 .unwrap();
1910 cx.run_until_parked();
1911 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1912 }
1913
1914 #[gpui::test(iterations = 10)]
1915 async fn test_reject_edits(cx: &mut TestAppContext) {
1916 init_test(cx);
1917
1918 let fs = FakeFs::new(cx.executor());
1919 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1920 .await;
1921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1922 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1923 let file_path = project
1924 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1925 .unwrap();
1926 let buffer = project
1927 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1928 .await
1929 .unwrap();
1930
1931 cx.update(|cx| {
1932 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1933 buffer.update(cx, |buffer, cx| {
1934 buffer
1935 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1936 .unwrap()
1937 });
1938 buffer.update(cx, |buffer, cx| {
1939 buffer
1940 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1941 .unwrap()
1942 });
1943 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1944 });
1945 cx.run_until_parked();
1946 assert_eq!(
1947 buffer.read_with(cx, |buffer, _| buffer.text()),
1948 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1949 );
1950 assert_eq!(
1951 unreviewed_hunks(&action_log, cx),
1952 vec![(
1953 buffer.clone(),
1954 vec![
1955 HunkStatus {
1956 range: Point::new(1, 0)..Point::new(3, 0),
1957 diff_status: DiffHunkStatusKind::Modified,
1958 old_text: "def\n".into(),
1959 },
1960 HunkStatus {
1961 range: Point::new(5, 0)..Point::new(5, 3),
1962 diff_status: DiffHunkStatusKind::Modified,
1963 old_text: "mno".into(),
1964 }
1965 ],
1966 )]
1967 );
1968
1969 // If the rejected range doesn't overlap with any hunk, we ignore it.
1970 action_log
1971 .update(cx, |log, cx| {
1972 let (task, _) = log.reject_edits_in_ranges(
1973 buffer.clone(),
1974 vec![Point::new(4, 0)..Point::new(4, 0)],
1975 None,
1976 cx,
1977 );
1978 task
1979 })
1980 .await
1981 .unwrap();
1982 cx.run_until_parked();
1983 assert_eq!(
1984 buffer.read_with(cx, |buffer, _| buffer.text()),
1985 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1986 );
1987 assert_eq!(
1988 unreviewed_hunks(&action_log, cx),
1989 vec![(
1990 buffer.clone(),
1991 vec![
1992 HunkStatus {
1993 range: Point::new(1, 0)..Point::new(3, 0),
1994 diff_status: DiffHunkStatusKind::Modified,
1995 old_text: "def\n".into(),
1996 },
1997 HunkStatus {
1998 range: Point::new(5, 0)..Point::new(5, 3),
1999 diff_status: DiffHunkStatusKind::Modified,
2000 old_text: "mno".into(),
2001 }
2002 ],
2003 )]
2004 );
2005
2006 action_log
2007 .update(cx, |log, cx| {
2008 let (task, _) = log.reject_edits_in_ranges(
2009 buffer.clone(),
2010 vec![Point::new(0, 0)..Point::new(1, 0)],
2011 None,
2012 cx,
2013 );
2014 task
2015 })
2016 .await
2017 .unwrap();
2018 cx.run_until_parked();
2019 assert_eq!(
2020 buffer.read_with(cx, |buffer, _| buffer.text()),
2021 "abc\ndef\nghi\njkl\nmnO"
2022 );
2023 assert_eq!(
2024 unreviewed_hunks(&action_log, cx),
2025 vec![(
2026 buffer.clone(),
2027 vec![HunkStatus {
2028 range: Point::new(4, 0)..Point::new(4, 3),
2029 diff_status: DiffHunkStatusKind::Modified,
2030 old_text: "mno".into(),
2031 }],
2032 )]
2033 );
2034
2035 action_log
2036 .update(cx, |log, cx| {
2037 let (task, _) = log.reject_edits_in_ranges(
2038 buffer.clone(),
2039 vec![Point::new(4, 0)..Point::new(4, 0)],
2040 None,
2041 cx,
2042 );
2043 task
2044 })
2045 .await
2046 .unwrap();
2047 cx.run_until_parked();
2048 assert_eq!(
2049 buffer.read_with(cx, |buffer, _| buffer.text()),
2050 "abc\ndef\nghi\njkl\nmno"
2051 );
2052 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2053 }
2054
2055 #[gpui::test(iterations = 10)]
2056 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2057 init_test(cx);
2058
2059 let fs = FakeFs::new(cx.executor());
2060 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2061 .await;
2062 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2063 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2064 let file_path = project
2065 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2066 .unwrap();
2067 let buffer = project
2068 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2069 .await
2070 .unwrap();
2071
2072 cx.update(|cx| {
2073 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2074 buffer.update(cx, |buffer, cx| {
2075 buffer
2076 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2077 .unwrap()
2078 });
2079 buffer.update(cx, |buffer, cx| {
2080 buffer
2081 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2082 .unwrap()
2083 });
2084 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2085 });
2086 cx.run_until_parked();
2087 assert_eq!(
2088 buffer.read_with(cx, |buffer, _| buffer.text()),
2089 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2090 );
2091 assert_eq!(
2092 unreviewed_hunks(&action_log, cx),
2093 vec![(
2094 buffer.clone(),
2095 vec![
2096 HunkStatus {
2097 range: Point::new(1, 0)..Point::new(3, 0),
2098 diff_status: DiffHunkStatusKind::Modified,
2099 old_text: "def\n".into(),
2100 },
2101 HunkStatus {
2102 range: Point::new(5, 0)..Point::new(5, 3),
2103 diff_status: DiffHunkStatusKind::Modified,
2104 old_text: "mno".into(),
2105 }
2106 ],
2107 )]
2108 );
2109
2110 action_log.update(cx, |log, cx| {
2111 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2112 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2113 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2114 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2115
2116 let (task, _) =
2117 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2118 task.detach();
2119 assert_eq!(
2120 buffer.read_with(cx, |buffer, _| buffer.text()),
2121 "abc\ndef\nghi\njkl\nmno"
2122 );
2123 });
2124 cx.run_until_parked();
2125 assert_eq!(
2126 buffer.read_with(cx, |buffer, _| buffer.text()),
2127 "abc\ndef\nghi\njkl\nmno"
2128 );
2129 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2130 }
2131
2132 #[gpui::test(iterations = 10)]
2133 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2134 init_test(cx);
2135
2136 let fs = FakeFs::new(cx.executor());
2137 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2138 .await;
2139 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2140 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2141 let file_path = project
2142 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2143 .unwrap();
2144 let buffer = project
2145 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2146 .await
2147 .unwrap();
2148
2149 cx.update(|cx| {
2150 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2151 });
2152 project
2153 .update(cx, |project, cx| {
2154 project.delete_file(file_path.clone(), false, cx)
2155 })
2156 .unwrap()
2157 .await
2158 .unwrap();
2159 cx.run_until_parked();
2160 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2161 assert_eq!(
2162 unreviewed_hunks(&action_log, cx),
2163 vec![(
2164 buffer.clone(),
2165 vec![HunkStatus {
2166 range: Point::new(0, 0)..Point::new(0, 0),
2167 diff_status: DiffHunkStatusKind::Deleted,
2168 old_text: "content".into(),
2169 }]
2170 )]
2171 );
2172
2173 action_log
2174 .update(cx, |log, cx| {
2175 let (task, _) = log.reject_edits_in_ranges(
2176 buffer.clone(),
2177 vec![Point::new(0, 0)..Point::new(0, 0)],
2178 None,
2179 cx,
2180 );
2181 task
2182 })
2183 .await
2184 .unwrap();
2185 cx.run_until_parked();
2186 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2187 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2188 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2189 }
2190
2191 #[gpui::test(iterations = 10)]
2192 async fn test_reject_created_file(cx: &mut TestAppContext) {
2193 init_test(cx);
2194
2195 let fs = FakeFs::new(cx.executor());
2196 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2197 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2198 let file_path = project
2199 .read_with(cx, |project, cx| {
2200 project.find_project_path("dir/new_file", cx)
2201 })
2202 .unwrap();
2203 let buffer = project
2204 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2205 .await
2206 .unwrap();
2207 cx.update(|cx| {
2208 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2209 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2210 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2211 });
2212 project
2213 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2214 .await
2215 .unwrap();
2216 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2217 cx.run_until_parked();
2218 assert_eq!(
2219 unreviewed_hunks(&action_log, cx),
2220 vec![(
2221 buffer.clone(),
2222 vec![HunkStatus {
2223 range: Point::new(0, 0)..Point::new(0, 7),
2224 diff_status: DiffHunkStatusKind::Added,
2225 old_text: "".into(),
2226 }],
2227 )]
2228 );
2229
2230 action_log
2231 .update(cx, |log, cx| {
2232 let (task, _) = log.reject_edits_in_ranges(
2233 buffer.clone(),
2234 vec![Point::new(0, 0)..Point::new(0, 11)],
2235 None,
2236 cx,
2237 );
2238 task
2239 })
2240 .await
2241 .unwrap();
2242 cx.run_until_parked();
2243 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2244 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2245 }
2246
2247 #[gpui::test]
2248 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2249 init_test(cx);
2250
2251 let fs = FakeFs::new(cx.executor());
2252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2253 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2254
2255 let file_path = project
2256 .read_with(cx, |project, cx| {
2257 project.find_project_path("dir/new_file", cx)
2258 })
2259 .unwrap();
2260 let buffer = project
2261 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2262 .await
2263 .unwrap();
2264
2265 // AI creates file with initial content
2266 cx.update(|cx| {
2267 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2268 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2269 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2270 });
2271
2272 project
2273 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2274 .await
2275 .unwrap();
2276
2277 cx.run_until_parked();
2278
2279 // User makes additional edits
2280 cx.update(|cx| {
2281 buffer.update(cx, |buffer, cx| {
2282 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2283 });
2284 });
2285
2286 project
2287 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2288 .await
2289 .unwrap();
2290
2291 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2292
2293 // Reject all
2294 action_log
2295 .update(cx, |log, cx| {
2296 let (task, _) = log.reject_edits_in_ranges(
2297 buffer.clone(),
2298 vec![Point::new(0, 0)..Point::new(100, 0)],
2299 None,
2300 cx,
2301 );
2302 task
2303 })
2304 .await
2305 .unwrap();
2306 cx.run_until_parked();
2307
2308 // File should still contain all the content
2309 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2310
2311 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2312 assert_eq!(content, "ai content\nuser added this line");
2313 }
2314
2315 #[gpui::test]
2316 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2317 init_test(cx);
2318
2319 let fs = FakeFs::new(cx.executor());
2320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2321 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2322
2323 let file_path = project
2324 .read_with(cx, |project, cx| {
2325 project.find_project_path("dir/new_file", cx)
2326 })
2327 .unwrap();
2328 let buffer = project
2329 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2330 .await
2331 .unwrap();
2332
2333 // AI creates file with initial content
2334 cx.update(|cx| {
2335 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2336 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2337 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2338 });
2339 project
2340 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2341 .await
2342 .unwrap();
2343 cx.run_until_parked();
2344 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2345
2346 // User accepts the single hunk
2347 action_log.update(cx, |log, cx| {
2348 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2349 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2350 });
2351 cx.run_until_parked();
2352 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2353 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2354
2355 // AI modifies the file
2356 cx.update(|cx| {
2357 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2358 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2359 });
2360 project
2361 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2362 .await
2363 .unwrap();
2364 cx.run_until_parked();
2365 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2366
2367 // User rejects the hunk
2368 action_log
2369 .update(cx, |log, cx| {
2370 let (task, _) = log.reject_edits_in_ranges(
2371 buffer.clone(),
2372 vec![Anchor::min_max_range_for_buffer(
2373 buffer.read(cx).remote_id(),
2374 )],
2375 None,
2376 cx,
2377 );
2378 task
2379 })
2380 .await
2381 .unwrap();
2382 cx.run_until_parked();
2383 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2384 assert_eq!(
2385 buffer.read_with(cx, |buffer, _| buffer.text()),
2386 "ai content v1"
2387 );
2388 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2389 }
2390
2391 #[gpui::test]
2392 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2393 init_test(cx);
2394
2395 let fs = FakeFs::new(cx.executor());
2396 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2397 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2398
2399 let file_path = project
2400 .read_with(cx, |project, cx| {
2401 project.find_project_path("dir/new_file", cx)
2402 })
2403 .unwrap();
2404 let buffer = project
2405 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2406 .await
2407 .unwrap();
2408
2409 // AI creates file with initial content
2410 cx.update(|cx| {
2411 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2412 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2413 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2414 });
2415 project
2416 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2417 .await
2418 .unwrap();
2419 cx.run_until_parked();
2420
2421 // User clicks "Accept All"
2422 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2423 cx.run_until_parked();
2424 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2425 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2426
2427 // AI modifies file again
2428 cx.update(|cx| {
2429 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2430 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2431 });
2432 project
2433 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2434 .await
2435 .unwrap();
2436 cx.run_until_parked();
2437 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2438
2439 // User clicks "Reject All"
2440 action_log
2441 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2442 .await;
2443 cx.run_until_parked();
2444 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2445 assert_eq!(
2446 buffer.read_with(cx, |buffer, _| buffer.text()),
2447 "ai content v1"
2448 );
2449 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2450 }
2451
2452 #[gpui::test(iterations = 100)]
2453 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2454 init_test(cx);
2455
2456 let operations = env::var("OPERATIONS")
2457 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2458 .unwrap_or(20);
2459
2460 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2463 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2464 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2465 let file_path = project
2466 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2467 .unwrap();
2468 let buffer = project
2469 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2470 .await
2471 .unwrap();
2472
2473 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2474
2475 for _ in 0..operations {
2476 match rng.random_range(0..100) {
2477 0..25 => {
2478 action_log.update(cx, |log, cx| {
2479 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2480 log::info!("keeping edits in range {:?}", range);
2481 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2482 });
2483 }
2484 25..50 => {
2485 action_log
2486 .update(cx, |log, cx| {
2487 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2488 log::info!("rejecting edits in range {:?}", range);
2489 let (task, _) =
2490 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2491 task
2492 })
2493 .await
2494 .unwrap();
2495 }
2496 _ => {
2497 let is_agent_edit = rng.random_bool(0.5);
2498 if is_agent_edit {
2499 log::info!("agent edit");
2500 } else {
2501 log::info!("user edit");
2502 }
2503 cx.update(|cx| {
2504 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2505 if is_agent_edit {
2506 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2507 }
2508 });
2509 }
2510 }
2511
2512 if rng.random_bool(0.2) {
2513 quiesce(&action_log, &buffer, cx);
2514 }
2515 }
2516
2517 quiesce(&action_log, &buffer, cx);
2518
2519 fn quiesce(
2520 action_log: &Entity<ActionLog>,
2521 buffer: &Entity<Buffer>,
2522 cx: &mut TestAppContext,
2523 ) {
2524 log::info!("quiescing...");
2525 cx.run_until_parked();
2526 action_log.update(cx, |log, cx| {
2527 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2528 let mut old_text = tracked_buffer.diff_base.clone();
2529 let new_text = buffer.read(cx).as_rope();
2530 for edit in tracked_buffer.unreviewed_edits.edits() {
2531 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2532 let old_end = old_text.point_to_offset(cmp::min(
2533 Point::new(edit.new.start + edit.old_len(), 0),
2534 old_text.max_point(),
2535 ));
2536 old_text.replace(
2537 old_start..old_end,
2538 &new_text.slice_rows(edit.new.clone()).to_string(),
2539 );
2540 }
2541 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2542 })
2543 }
2544 }
2545
2546 #[gpui::test]
2547 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let fs = FakeFs::new(cx.background_executor.clone());
2551 fs.insert_tree(
2552 path!("/project"),
2553 json!({
2554 ".git": {},
2555 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2556 }),
2557 )
2558 .await;
2559 fs.set_head_for_repo(
2560 path!("/project/.git").as_ref(),
2561 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2562 "0000000",
2563 );
2564 cx.run_until_parked();
2565
2566 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2567 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2568
2569 let file_path = project
2570 .read_with(cx, |project, cx| {
2571 project.find_project_path(path!("/project/file.txt"), cx)
2572 })
2573 .unwrap();
2574 let buffer = project
2575 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2576 .await
2577 .unwrap();
2578
2579 cx.update(|cx| {
2580 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2581 buffer.update(cx, |buffer, cx| {
2582 buffer.edit(
2583 [
2584 // Edit at the very start: a -> A
2585 (Point::new(0, 0)..Point::new(0, 1), "A"),
2586 // Deletion in the middle: remove lines d and e
2587 (Point::new(3, 0)..Point::new(5, 0), ""),
2588 // Modification: g -> GGG
2589 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2590 // Addition: insert new line after h
2591 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2592 // Edit the very last character: j -> J
2593 (Point::new(9, 0)..Point::new(9, 1), "J"),
2594 ],
2595 None,
2596 cx,
2597 );
2598 });
2599 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2600 });
2601 cx.run_until_parked();
2602 assert_eq!(
2603 unreviewed_hunks(&action_log, cx),
2604 vec![(
2605 buffer.clone(),
2606 vec![
2607 HunkStatus {
2608 range: Point::new(0, 0)..Point::new(1, 0),
2609 diff_status: DiffHunkStatusKind::Modified,
2610 old_text: "a\n".into()
2611 },
2612 HunkStatus {
2613 range: Point::new(3, 0)..Point::new(3, 0),
2614 diff_status: DiffHunkStatusKind::Deleted,
2615 old_text: "d\ne\n".into()
2616 },
2617 HunkStatus {
2618 range: Point::new(4, 0)..Point::new(5, 0),
2619 diff_status: DiffHunkStatusKind::Modified,
2620 old_text: "g\n".into()
2621 },
2622 HunkStatus {
2623 range: Point::new(6, 0)..Point::new(7, 0),
2624 diff_status: DiffHunkStatusKind::Added,
2625 old_text: "".into()
2626 },
2627 HunkStatus {
2628 range: Point::new(8, 0)..Point::new(8, 1),
2629 diff_status: DiffHunkStatusKind::Modified,
2630 old_text: "j".into()
2631 }
2632 ]
2633 )]
2634 );
2635
2636 // Simulate a git commit that matches some edits but not others:
2637 // - Accepts the first edit (a -> A)
2638 // - Accepts the deletion (remove d and e)
2639 // - Makes a different change to g (g -> G instead of GGG)
2640 // - Ignores the NEW line addition
2641 // - Ignores the last line edit (j stays as j)
2642 fs.set_head_for_repo(
2643 path!("/project/.git").as_ref(),
2644 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2645 "0000001",
2646 );
2647 cx.run_until_parked();
2648 assert_eq!(
2649 unreviewed_hunks(&action_log, cx),
2650 vec![(
2651 buffer.clone(),
2652 vec![
2653 HunkStatus {
2654 range: Point::new(4, 0)..Point::new(5, 0),
2655 diff_status: DiffHunkStatusKind::Modified,
2656 old_text: "g\n".into()
2657 },
2658 HunkStatus {
2659 range: Point::new(6, 0)..Point::new(7, 0),
2660 diff_status: DiffHunkStatusKind::Added,
2661 old_text: "".into()
2662 },
2663 HunkStatus {
2664 range: Point::new(8, 0)..Point::new(8, 1),
2665 diff_status: DiffHunkStatusKind::Modified,
2666 old_text: "j".into()
2667 }
2668 ]
2669 )]
2670 );
2671
2672 // Make another commit that accepts the NEW line but with different content
2673 fs.set_head_for_repo(
2674 path!("/project/.git").as_ref(),
2675 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2676 "0000002",
2677 );
2678 cx.run_until_parked();
2679 assert_eq!(
2680 unreviewed_hunks(&action_log, cx),
2681 vec![(
2682 buffer,
2683 vec![
2684 HunkStatus {
2685 range: Point::new(6, 0)..Point::new(7, 0),
2686 diff_status: DiffHunkStatusKind::Added,
2687 old_text: "".into()
2688 },
2689 HunkStatus {
2690 range: Point::new(8, 0)..Point::new(8, 1),
2691 diff_status: DiffHunkStatusKind::Modified,
2692 old_text: "j".into()
2693 }
2694 ]
2695 )]
2696 );
2697
2698 // Final commit that accepts all remaining edits
2699 fs.set_head_for_repo(
2700 path!("/project/.git").as_ref(),
2701 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2702 "0000003",
2703 );
2704 cx.run_until_parked();
2705 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2706 }
2707
2708 #[gpui::test]
2709 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2710 init_test(cx);
2711
2712 let fs = FakeFs::new(cx.executor());
2713 fs.insert_tree(
2714 path!("/dir"),
2715 json!({
2716 "file1": "abc\ndef\nghi"
2717 }),
2718 )
2719 .await;
2720 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2721 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2722 let file_path = project
2723 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2724 .unwrap();
2725
2726 let buffer = project
2727 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2728 .await
2729 .unwrap();
2730
2731 // Track the buffer and make an agent edit
2732 cx.update(|cx| {
2733 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2734 buffer.update(cx, |buffer, cx| {
2735 buffer
2736 .edit(
2737 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2738 None,
2739 cx,
2740 )
2741 .unwrap()
2742 });
2743 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2744 });
2745 cx.run_until_parked();
2746
2747 // Verify the agent edit is there
2748 assert_eq!(
2749 buffer.read_with(cx, |buffer, _| buffer.text()),
2750 "abc\nAGENT_EDIT\nghi"
2751 );
2752 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2753
2754 // Reject all edits
2755 action_log
2756 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2757 .await;
2758 cx.run_until_parked();
2759
2760 // Verify the buffer is back to original
2761 assert_eq!(
2762 buffer.read_with(cx, |buffer, _| buffer.text()),
2763 "abc\ndef\nghi"
2764 );
2765 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2766
2767 // Verify undo state is available
2768 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2769
2770 // Undo the reject
2771 action_log
2772 .update(cx, |log, cx| log.undo_last_reject(cx))
2773 .await;
2774
2775 cx.run_until_parked();
2776
2777 // Verify the agent edit is restored
2778 assert_eq!(
2779 buffer.read_with(cx, |buffer, _| buffer.text()),
2780 "abc\nAGENT_EDIT\nghi"
2781 );
2782
2783 // Verify undo state is cleared
2784 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2785 }
2786
2787 #[gpui::test]
2788 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2789 init_test(cx);
2790
2791 let fs = FakeFs::new(cx.executor());
2792 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2793 .await;
2794 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2795 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2796 let child_log =
2797 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2798
2799 let file_path = project
2800 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2801 .unwrap();
2802 let buffer = project
2803 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2804 .await
2805 .unwrap();
2806
2807 cx.update(|cx| {
2808 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2809 });
2810
2811 // Neither log considers the buffer stale immediately after reading it.
2812 let child_stale = cx.read(|cx| {
2813 child_log
2814 .read(cx)
2815 .stale_buffers(cx)
2816 .cloned()
2817 .collect::<Vec<_>>()
2818 });
2819 let parent_stale = cx.read(|cx| {
2820 parent_log
2821 .read(cx)
2822 .stale_buffers(cx)
2823 .cloned()
2824 .collect::<Vec<_>>()
2825 });
2826 assert!(child_stale.is_empty());
2827 assert!(parent_stale.is_empty());
2828
2829 // Simulate a user edit after the agent read the file.
2830 cx.update(|cx| {
2831 buffer.update(cx, |buffer, cx| {
2832 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2833 });
2834 });
2835 cx.run_until_parked();
2836
2837 // Both child and parent should see the buffer as stale because both tracked
2838 // it at the pre-edit version via buffer_read forwarding.
2839 let child_stale = cx.read(|cx| {
2840 child_log
2841 .read(cx)
2842 .stale_buffers(cx)
2843 .cloned()
2844 .collect::<Vec<_>>()
2845 });
2846 let parent_stale = cx.read(|cx| {
2847 parent_log
2848 .read(cx)
2849 .stale_buffers(cx)
2850 .cloned()
2851 .collect::<Vec<_>>()
2852 });
2853 assert_eq!(child_stale, vec![buffer.clone()]);
2854 assert_eq!(parent_stale, vec![buffer]);
2855 }
2856
2857 #[gpui::test]
2858 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2859 init_test(cx);
2860
2861 let fs = FakeFs::new(cx.executor());
2862 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2863 .await;
2864 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2865 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2866 let child_log =
2867 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2868
2869 let file_path = project
2870 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2871 .unwrap();
2872 let buffer = project
2873 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2874 .await
2875 .unwrap();
2876
2877 cx.update(|cx| {
2878 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2879 buffer.update(cx, |buffer, cx| {
2880 buffer
2881 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2882 .unwrap();
2883 });
2884 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2885 });
2886 cx.run_until_parked();
2887
2888 let expected_hunks = vec![(
2889 buffer,
2890 vec![HunkStatus {
2891 range: Point::new(1, 0)..Point::new(2, 0),
2892 diff_status: DiffHunkStatusKind::Modified,
2893 old_text: "def\n".into(),
2894 }],
2895 )];
2896 assert_eq!(
2897 unreviewed_hunks(&child_log, cx),
2898 expected_hunks,
2899 "child should track the agent edit"
2900 );
2901 assert_eq!(
2902 unreviewed_hunks(&parent_log, cx),
2903 expected_hunks,
2904 "parent should also track the agent edit via linked log forwarding"
2905 );
2906 }
2907
2908 #[gpui::test]
2909 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
2910 init_test(cx);
2911
2912 let fs = FakeFs::new(cx.executor());
2913 fs.insert_tree(path!("/dir"), json!({})).await;
2914 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2915 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2916 let child_log =
2917 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2918
2919 let file_path = project
2920 .read_with(cx, |project, cx| {
2921 project.find_project_path("dir/new_file", cx)
2922 })
2923 .unwrap();
2924 let buffer = project
2925 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2926 .await
2927 .unwrap();
2928
2929 cx.update(|cx| {
2930 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2931 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
2932 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2933 });
2934 project
2935 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2936 .await
2937 .unwrap();
2938 cx.run_until_parked();
2939
2940 let expected_hunks = vec![(
2941 buffer.clone(),
2942 vec![HunkStatus {
2943 range: Point::new(0, 0)..Point::new(0, 5),
2944 diff_status: DiffHunkStatusKind::Added,
2945 old_text: "".into(),
2946 }],
2947 )];
2948 assert_eq!(
2949 unreviewed_hunks(&child_log, cx),
2950 expected_hunks,
2951 "child should track the created file"
2952 );
2953 assert_eq!(
2954 unreviewed_hunks(&parent_log, cx),
2955 expected_hunks,
2956 "parent should also track the created file via linked log forwarding"
2957 );
2958 }
2959
2960 #[gpui::test]
2961 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
2962 init_test(cx);
2963
2964 let fs = FakeFs::new(cx.executor());
2965 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
2966 .await;
2967 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2968 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2969 let child_log =
2970 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2971
2972 let file_path = project
2973 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2974 .unwrap();
2975 let buffer = project
2976 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2977 .await
2978 .unwrap();
2979
2980 cx.update(|cx| {
2981 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2982 });
2983 project
2984 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
2985 .unwrap()
2986 .await
2987 .unwrap();
2988 cx.run_until_parked();
2989
2990 let expected_hunks = vec![(
2991 buffer.clone(),
2992 vec![HunkStatus {
2993 range: Point::new(0, 0)..Point::new(0, 0),
2994 diff_status: DiffHunkStatusKind::Deleted,
2995 old_text: "hello\n".into(),
2996 }],
2997 )];
2998 assert_eq!(
2999 unreviewed_hunks(&child_log, cx),
3000 expected_hunks,
3001 "child should track the deleted file"
3002 );
3003 assert_eq!(
3004 unreviewed_hunks(&parent_log, cx),
3005 expected_hunks,
3006 "parent should also track the deleted file via linked log forwarding"
3007 );
3008 }
3009
3010 /// Simulates the subagent scenario: two child logs linked to the same parent, each
3011 /// editing a different file. The parent accumulates all edits while each child
3012 /// only sees its own.
3013 #[gpui::test]
3014 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
3015 init_test(cx);
3016
3017 let fs = FakeFs::new(cx.executor());
3018 fs.insert_tree(
3019 path!("/dir"),
3020 json!({
3021 "file_a": "content of a",
3022 "file_b": "content of b",
3023 }),
3024 )
3025 .await;
3026 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3027 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3028 let child_log_1 =
3029 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3030 let child_log_2 =
3031 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3032
3033 let file_a_path = project
3034 .read_with(cx, |project, cx| {
3035 project.find_project_path("dir/file_a", cx)
3036 })
3037 .unwrap();
3038 let file_b_path = project
3039 .read_with(cx, |project, cx| {
3040 project.find_project_path("dir/file_b", cx)
3041 })
3042 .unwrap();
3043 let buffer_a = project
3044 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3045 .await
3046 .unwrap();
3047 let buffer_b = project
3048 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3049 .await
3050 .unwrap();
3051
3052 cx.update(|cx| {
3053 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3054 buffer_a.update(cx, |buffer, cx| {
3055 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3056 });
3057 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3058
3059 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3060 buffer_b.update(cx, |buffer, cx| {
3061 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3062 });
3063 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3064 });
3065 cx.run_until_parked();
3066
3067 let child_1_changed: Vec<_> = cx.read(|cx| {
3068 child_log_1
3069 .read(cx)
3070 .changed_buffers(cx)
3071 .into_keys()
3072 .collect()
3073 });
3074 let child_2_changed: Vec<_> = cx.read(|cx| {
3075 child_log_2
3076 .read(cx)
3077 .changed_buffers(cx)
3078 .into_keys()
3079 .collect()
3080 });
3081 let parent_changed: Vec<_> = cx.read(|cx| {
3082 parent_log
3083 .read(cx)
3084 .changed_buffers(cx)
3085 .into_keys()
3086 .collect()
3087 });
3088
3089 assert_eq!(
3090 child_1_changed,
3091 vec![buffer_a.clone()],
3092 "child 1 should only track file_a"
3093 );
3094 assert_eq!(
3095 child_2_changed,
3096 vec![buffer_b.clone()],
3097 "child 2 should only track file_b"
3098 );
3099 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3100 assert!(
3101 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3102 "parent should contain both buffer_a and buffer_b"
3103 );
3104 }
3105
3106 #[gpui::test]
3107 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3108 init_test(cx);
3109
3110 let fs = FakeFs::new(cx.executor());
3111 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3112 .await;
3113 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3114 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3115
3116 let file_path = project
3117 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3118 .unwrap();
3119 let buffer = project
3120 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3121 .await
3122 .unwrap();
3123
3124 let abs_path = PathBuf::from(path!("/dir/file"));
3125 assert!(
3126 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3127 "file_read_time should be None before buffer_read"
3128 );
3129
3130 cx.update(|cx| {
3131 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3132 });
3133
3134 assert!(
3135 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3136 "file_read_time should be recorded after buffer_read"
3137 );
3138 }
3139
3140 #[gpui::test]
3141 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3142 init_test(cx);
3143
3144 let fs = FakeFs::new(cx.executor());
3145 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3146 .await;
3147 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3148 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3149
3150 let file_path = project
3151 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3152 .unwrap();
3153 let buffer = project
3154 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3155 .await
3156 .unwrap();
3157
3158 let abs_path = PathBuf::from(path!("/dir/file"));
3159 assert!(
3160 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3161 "file_read_time should be None before buffer_edited"
3162 );
3163
3164 cx.update(|cx| {
3165 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3166 });
3167
3168 assert!(
3169 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3170 "file_read_time should be recorded after buffer_edited"
3171 );
3172 }
3173
3174 #[gpui::test]
3175 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3176 init_test(cx);
3177
3178 let fs = FakeFs::new(cx.executor());
3179 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3180 .await;
3181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3182 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3183
3184 let file_path = project
3185 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3186 .unwrap();
3187 let buffer = project
3188 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3189 .await
3190 .unwrap();
3191
3192 let abs_path = PathBuf::from(path!("/dir/file"));
3193 assert!(
3194 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3195 "file_read_time should be None before buffer_created"
3196 );
3197
3198 cx.update(|cx| {
3199 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3200 });
3201
3202 assert!(
3203 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3204 "file_read_time should be recorded after buffer_created"
3205 );
3206 }
3207
3208 #[gpui::test]
3209 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3210 init_test(cx);
3211
3212 let fs = FakeFs::new(cx.executor());
3213 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3214 .await;
3215 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3216 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3217
3218 let file_path = project
3219 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3220 .unwrap();
3221 let buffer = project
3222 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3223 .await
3224 .unwrap();
3225
3226 let abs_path = PathBuf::from(path!("/dir/file"));
3227
3228 cx.update(|cx| {
3229 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3230 });
3231 assert!(
3232 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3233 "file_read_time should exist after buffer_read"
3234 );
3235
3236 cx.update(|cx| {
3237 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3238 });
3239 assert!(
3240 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3241 "file_read_time should be removed after will_delete_buffer"
3242 );
3243 }
3244
3245 #[gpui::test]
3246 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3247 init_test(cx);
3248
3249 let fs = FakeFs::new(cx.executor());
3250 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3251 .await;
3252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3253 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3254 let child_log =
3255 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3256
3257 let file_path = project
3258 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3259 .unwrap();
3260 let buffer = project
3261 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3262 .await
3263 .unwrap();
3264
3265 let abs_path = PathBuf::from(path!("/dir/file"));
3266
3267 cx.update(|cx| {
3268 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3269 });
3270 assert!(
3271 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3272 "child should record file_read_time on buffer_read"
3273 );
3274 assert!(
3275 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3276 "parent should NOT get file_read_time from child's buffer_read"
3277 );
3278
3279 cx.update(|cx| {
3280 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3281 });
3282 assert!(
3283 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3284 "parent should NOT get file_read_time from child's buffer_edited"
3285 );
3286
3287 cx.update(|cx| {
3288 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3289 });
3290 assert!(
3291 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3292 "parent should NOT get file_read_time from child's buffer_created"
3293 );
3294 }
3295
3296 #[derive(Debug, PartialEq)]
3297 struct HunkStatus {
3298 range: Range<Point>,
3299 diff_status: DiffHunkStatusKind,
3300 old_text: String,
3301 }
3302
3303 fn unreviewed_hunks(
3304 action_log: &Entity<ActionLog>,
3305 cx: &TestAppContext,
3306 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
3307 cx.read(|cx| {
3308 action_log
3309 .read(cx)
3310 .changed_buffers(cx)
3311 .into_iter()
3312 .map(|(buffer, diff)| {
3313 let snapshot = buffer.read(cx).snapshot();
3314 (
3315 buffer,
3316 diff.read(cx)
3317 .snapshot(cx)
3318 .hunks(&snapshot)
3319 .map(|hunk| HunkStatus {
3320 diff_status: hunk.status().kind,
3321 range: hunk.range,
3322 old_text: diff
3323 .read(cx)
3324 .base_text(cx)
3325 .text_for_range(hunk.diff_base_byte_range)
3326 .collect(),
3327 })
3328 .collect(),
3329 )
3330 })
3331 .collect()
3332 })
3333 }
3334}