1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
278 let git_diff = this
279 .update(cx, |this, cx| {
280 this.project.update(cx, |project, cx| {
281 project.open_uncommitted_diff(buffer.clone(), cx)
282 })
283 })?
284 .await
285 .ok();
286 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
287 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
288 });
289
290 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
291 let _repo_subscription =
292 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
293 cx.update(|cx| {
294 let mut old_head = buffer_repo.read(cx).head_commit.clone();
295 Some(cx.subscribe(git_diff, move |_, event, cx| {
296 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
297 let new_head = buffer_repo.read(cx).head_commit.clone();
298 if new_head != old_head {
299 old_head = new_head;
300 git_diff_updates_tx.send(()).ok();
301 }
302 }
303 }))
304 })
305 } else {
306 None
307 };
308
309 loop {
310 futures::select_biased! {
311 buffer_update = buffer_updates.next() => {
312 if let Some((author, buffer_snapshot)) = buffer_update {
313 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
314 } else {
315 break;
316 }
317 }
318 _ = git_diff_updates_rx.changed().fuse() => {
319 if let Some(git_diff) = git_diff.as_ref() {
320 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
321 }
322 }
323 }
324 }
325
326 Ok(())
327 }
328
329 async fn track_edits(
330 this: &WeakEntity<ActionLog>,
331 buffer: &Entity<Buffer>,
332 author: ChangeAuthor,
333 buffer_snapshot: text::BufferSnapshot,
334 cx: &mut AsyncApp,
335 ) -> Result<()> {
336 let rebase = this.update(cx, |this, cx| {
337 let tracked_buffer = this
338 .tracked_buffers
339 .get_mut(buffer)
340 .context("buffer not tracked")?;
341
342 let rebase = cx.background_spawn({
343 let mut base_text = tracked_buffer.diff_base.clone();
344 let old_snapshot = tracked_buffer.snapshot.clone();
345 let new_snapshot = buffer_snapshot.clone();
346 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
347 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
348 async move {
349 if let ChangeAuthor::User = author {
350 apply_non_conflicting_edits(
351 &unreviewed_edits,
352 edits,
353 &mut base_text,
354 new_snapshot.as_rope(),
355 );
356 }
357
358 (Arc::from(base_text.to_string().as_str()), base_text)
359 }
360 });
361
362 anyhow::Ok(rebase)
363 })??;
364 let (new_base_text, new_diff_base) = rebase.await;
365
366 Self::update_diff(
367 this,
368 buffer,
369 buffer_snapshot,
370 new_base_text,
371 new_diff_base,
372 cx,
373 )
374 .await
375 }
376
377 async fn keep_committed_edits(
378 this: &WeakEntity<ActionLog>,
379 buffer: &Entity<Buffer>,
380 git_diff: &Entity<BufferDiff>,
381 cx: &mut AsyncApp,
382 ) -> Result<()> {
383 let buffer_snapshot = this.read_with(cx, |this, _cx| {
384 let tracked_buffer = this
385 .tracked_buffers
386 .get(buffer)
387 .context("buffer not tracked")?;
388 anyhow::Ok(tracked_buffer.snapshot.clone())
389 })??;
390 let (new_base_text, new_diff_base) = this
391 .read_with(cx, |this, cx| {
392 let tracked_buffer = this
393 .tracked_buffers
394 .get(buffer)
395 .context("buffer not tracked")?;
396 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
397 let agent_diff_base = tracked_buffer.diff_base.clone();
398 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
399 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
400 anyhow::Ok(cx.background_spawn(async move {
401 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
402 let committed_edits = language::line_diff(
403 &agent_diff_base.to_string(),
404 &git_diff_base.to_string(),
405 )
406 .into_iter()
407 .map(|(old, new)| Edit { old, new });
408
409 let mut new_agent_diff_base = agent_diff_base.clone();
410 let mut row_delta = 0i32;
411 for committed in committed_edits {
412 while let Some(unreviewed) = old_unreviewed_edits.peek() {
413 // If the committed edit matches the unreviewed
414 // edit, assume the user wants to keep it.
415 if committed.old == unreviewed.old {
416 let unreviewed_new =
417 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
418 let committed_new =
419 git_diff_base.slice_rows(committed.new.clone()).to_string();
420 if unreviewed_new == committed_new {
421 let old_byte_start =
422 new_agent_diff_base.point_to_offset(Point::new(
423 (unreviewed.old.start as i32 + row_delta) as u32,
424 0,
425 ));
426 let old_byte_end =
427 new_agent_diff_base.point_to_offset(cmp::min(
428 Point::new(
429 (unreviewed.old.end as i32 + row_delta) as u32,
430 0,
431 ),
432 new_agent_diff_base.max_point(),
433 ));
434 new_agent_diff_base
435 .replace(old_byte_start..old_byte_end, &unreviewed_new);
436 row_delta +=
437 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
438 }
439 } else if unreviewed.old.start >= committed.old.end {
440 break;
441 }
442
443 old_unreviewed_edits.next().unwrap();
444 }
445 }
446
447 (
448 Arc::from(new_agent_diff_base.to_string().as_str()),
449 new_agent_diff_base,
450 )
451 }))
452 })??
453 .await;
454
455 Self::update_diff(
456 this,
457 buffer,
458 buffer_snapshot,
459 new_base_text,
460 new_diff_base,
461 cx,
462 )
463 .await
464 }
465
466 async fn update_diff(
467 this: &WeakEntity<ActionLog>,
468 buffer: &Entity<Buffer>,
469 buffer_snapshot: text::BufferSnapshot,
470 new_base_text: Arc<str>,
471 new_diff_base: Rope,
472 cx: &mut AsyncApp,
473 ) -> Result<()> {
474 let (diff, language) = this.read_with(cx, |this, cx| {
475 let tracked_buffer = this
476 .tracked_buffers
477 .get(buffer)
478 .context("buffer not tracked")?;
479 anyhow::Ok((
480 tracked_buffer.diff.clone(),
481 buffer.read(cx).language().cloned(),
482 ))
483 })??;
484 let update = diff
485 .update(cx, |diff, cx| {
486 diff.update_diff(
487 buffer_snapshot.clone(),
488 Some(new_base_text),
489 Some(true),
490 language,
491 cx,
492 )
493 })
494 .await;
495 diff.update(cx, |diff, cx| {
496 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
497 })
498 .await;
499 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
500
501 let unreviewed_edits = cx
502 .background_spawn({
503 let buffer_snapshot = buffer_snapshot.clone();
504 let new_diff_base = new_diff_base.clone();
505 async move {
506 let mut unreviewed_edits = Patch::default();
507 for hunk in diff_snapshot.hunks_intersecting_range(
508 Anchor::min_for_buffer(buffer_snapshot.remote_id())
509 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
510 &buffer_snapshot,
511 ) {
512 let old_range = new_diff_base
513 .offset_to_point(hunk.diff_base_byte_range.start)
514 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
515 let new_range = hunk.range.start..hunk.range.end;
516 unreviewed_edits.push(point_to_row_edit(
517 Edit {
518 old: old_range,
519 new: new_range,
520 },
521 &new_diff_base,
522 buffer_snapshot.as_rope(),
523 ));
524 }
525 unreviewed_edits
526 }
527 })
528 .await;
529 this.update(cx, |this, cx| {
530 let tracked_buffer = this
531 .tracked_buffers
532 .get_mut(buffer)
533 .context("buffer not tracked")?;
534 tracked_buffer.diff_base = new_diff_base;
535 tracked_buffer.snapshot = buffer_snapshot;
536 tracked_buffer.unreviewed_edits = unreviewed_edits;
537 cx.notify();
538 anyhow::Ok(())
539 })?
540 }
541
542 /// Track a buffer as read by agent, so we can notify the model about user edits.
543 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.buffer_read_impl(buffer, true, cx);
545 }
546
547 fn buffer_read_impl(
548 &mut self,
549 buffer: Entity<Buffer>,
550 record_file_read_time: bool,
551 cx: &mut Context<Self>,
552 ) {
553 if let Some(linked_action_log) = &self.linked_action_log {
554 // We don't want to share read times since the other agent hasn't read it necessarily
555 linked_action_log.update(cx, |log, cx| {
556 log.buffer_read_impl(buffer.clone(), false, cx);
557 });
558 }
559 if record_file_read_time {
560 self.update_file_read_time(&buffer, cx);
561 }
562 self.track_buffer_internal(buffer, false, cx);
563 }
564
565 /// Mark a buffer as created by agent, so we can refresh it in the context
566 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
567 self.buffer_created_impl(buffer, true, cx);
568 }
569
570 fn buffer_created_impl(
571 &mut self,
572 buffer: Entity<Buffer>,
573 record_file_read_time: bool,
574 cx: &mut Context<Self>,
575 ) {
576 if let Some(linked_action_log) = &self.linked_action_log {
577 // We don't want to share read times since the other agent hasn't read it necessarily
578 linked_action_log.update(cx, |log, cx| {
579 log.buffer_created_impl(buffer.clone(), false, cx);
580 });
581 }
582 if record_file_read_time {
583 self.update_file_read_time(&buffer, cx);
584 }
585 self.track_buffer_internal(buffer, true, cx);
586 }
587
588 /// Mark a buffer as edited by agent, so we can refresh it in the context
589 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
590 self.buffer_edited_impl(buffer, true, cx);
591 }
592
593 fn buffer_edited_impl(
594 &mut self,
595 buffer: Entity<Buffer>,
596 record_file_read_time: bool,
597 cx: &mut Context<Self>,
598 ) {
599 if let Some(linked_action_log) = &self.linked_action_log {
600 // We don't want to share read times since the other agent hasn't read it necessarily
601 linked_action_log.update(cx, |log, cx| {
602 log.buffer_edited_impl(buffer.clone(), false, cx);
603 });
604 }
605 if record_file_read_time {
606 self.update_file_read_time(&buffer, cx);
607 }
608 let new_version = buffer.read(cx).version();
609 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
610 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
611 tracked_buffer.status = TrackedBufferStatus::Modified;
612 }
613
614 tracked_buffer.version = new_version;
615 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
616 }
617
618 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
619 // Ok to propagate file read time removal to linked action log
620 self.remove_file_read_time(&buffer, cx);
621 let has_linked_action_log = self.linked_action_log.is_some();
622 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
623 match tracked_buffer.status {
624 TrackedBufferStatus::Created { .. } => {
625 self.tracked_buffers.remove(&buffer);
626 cx.notify();
627 }
628 TrackedBufferStatus::Modified => {
629 tracked_buffer.status = TrackedBufferStatus::Deleted;
630 if !has_linked_action_log {
631 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
632 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
633 }
634 }
635
636 TrackedBufferStatus::Deleted => {}
637 }
638
639 if let Some(linked_action_log) = &mut self.linked_action_log {
640 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
641 }
642
643 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
644 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
645 }
646
647 cx.notify();
648 }
649
650 pub fn keep_edits_in_range(
651 &mut self,
652 buffer: Entity<Buffer>,
653 buffer_range: Range<impl language::ToPoint>,
654 telemetry: Option<ActionLogTelemetry>,
655 cx: &mut Context<Self>,
656 ) {
657 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
658 return;
659 };
660
661 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
662 match tracked_buffer.status {
663 TrackedBufferStatus::Deleted => {
664 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
665 self.tracked_buffers.remove(&buffer);
666 cx.notify();
667 }
668 _ => {
669 let buffer = buffer.read(cx);
670 let buffer_range =
671 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
672 let mut delta = 0i32;
673 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
674 edit.old.start = (edit.old.start as i32 + delta) as u32;
675 edit.old.end = (edit.old.end as i32 + delta) as u32;
676
677 if buffer_range.end.row < edit.new.start
678 || buffer_range.start.row > edit.new.end
679 {
680 true
681 } else {
682 let old_range = tracked_buffer
683 .diff_base
684 .point_to_offset(Point::new(edit.old.start, 0))
685 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
686 Point::new(edit.old.end, 0),
687 tracked_buffer.diff_base.max_point(),
688 ));
689 let new_range = tracked_buffer
690 .snapshot
691 .point_to_offset(Point::new(edit.new.start, 0))
692 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
693 Point::new(edit.new.end, 0),
694 tracked_buffer.snapshot.max_point(),
695 ));
696 tracked_buffer.diff_base.replace(
697 old_range,
698 &tracked_buffer
699 .snapshot
700 .text_for_range(new_range)
701 .collect::<String>(),
702 );
703 delta += edit.new_len() as i32 - edit.old_len() as i32;
704 metrics.add_edit(edit);
705 false
706 }
707 });
708 if tracked_buffer.unreviewed_edits.is_empty()
709 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
710 {
711 tracked_buffer.status = TrackedBufferStatus::Modified;
712 }
713 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
714 }
715 }
716 if let Some(telemetry) = telemetry {
717 telemetry_report_accepted_edits(&telemetry, metrics);
718 }
719 }
720
721 pub fn reject_edits_in_ranges(
722 &mut self,
723 buffer: Entity<Buffer>,
724 buffer_ranges: Vec<Range<impl language::ToPoint>>,
725 telemetry: Option<ActionLogTelemetry>,
726 cx: &mut Context<Self>,
727 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
728 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
729 return (Task::ready(Ok(())), None);
730 };
731
732 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
733 let mut undo_info: Option<PerBufferUndo> = None;
734 let task = match &tracked_buffer.status {
735 TrackedBufferStatus::Created {
736 existing_file_content,
737 } => {
738 let task = if let Some(existing_file_content) = existing_file_content {
739 // Capture the agent's content before restoring existing file content
740 let agent_content = buffer.read(cx).text();
741 let buffer_id = buffer.read(cx).remote_id();
742
743 buffer.update(cx, |buffer, cx| {
744 buffer.start_transaction();
745 buffer.set_text("", cx);
746 for chunk in existing_file_content.chunks() {
747 buffer.append(chunk, cx);
748 }
749 buffer.end_transaction(cx);
750 });
751
752 undo_info = Some(PerBufferUndo {
753 buffer: buffer.downgrade(),
754 edits_to_restore: vec![(
755 Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
756 agent_content,
757 )],
758 status: UndoBufferStatus::Created {
759 had_existing_content: true,
760 },
761 });
762
763 self.project
764 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
765 } else {
766 // For a file created by AI with no pre-existing content,
767 // only delete the file if we're certain it contains only AI content
768 // with no edits from the user.
769
770 let initial_version = tracked_buffer.version.clone();
771 let current_version = buffer.read(cx).version();
772
773 let current_content = buffer.read(cx).text();
774 let tracked_content = tracked_buffer.snapshot.text();
775
776 let is_ai_only_content =
777 initial_version == current_version && current_content == tracked_content;
778
779 if is_ai_only_content {
780 let task = buffer
781 .read(cx)
782 .entry_id(cx)
783 .and_then(|entry_id| {
784 self.project
785 .update(cx, |project, cx| project.delete_entry(entry_id, cx))
786 })
787 .unwrap_or_else(|| Task::ready(Ok(())));
788
789 cx.background_spawn(async move {
790 task.await?;
791 Ok(())
792 })
793 } else {
794 // Not sure how to disentangle edits made by the user
795 // from edits made by the AI at this point.
796 // For now, preserve both to avoid data loss.
797 //
798 // TODO: Better solution (disable "Reject" after user makes some
799 // edit or find a way to differentiate between AI and user edits)
800 Task::ready(Ok(()))
801 }
802 };
803
804 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
805 self.tracked_buffers.remove(&buffer);
806 cx.notify();
807 task
808 }
809 TrackedBufferStatus::Deleted => {
810 buffer.update(cx, |buffer, cx| {
811 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
812 });
813 let save = self
814 .project
815 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
816
817 // Clear all tracked edits for this buffer and start over as if we just read it.
818 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
819 self.tracked_buffers.remove(&buffer);
820 self.buffer_read(buffer.clone(), cx);
821 cx.notify();
822 save
823 }
824 TrackedBufferStatus::Modified => {
825 let edits_to_restore = buffer.update(cx, |buffer, cx| {
826 let mut buffer_row_ranges = buffer_ranges
827 .into_iter()
828 .map(|range| {
829 range.start.to_point(buffer).row..range.end.to_point(buffer).row
830 })
831 .peekable();
832
833 let mut edits_to_revert = Vec::new();
834 let mut edits_for_undo = Vec::new();
835 for edit in tracked_buffer.unreviewed_edits.edits() {
836 let new_range = tracked_buffer
837 .snapshot
838 .anchor_before(Point::new(edit.new.start, 0))
839 ..tracked_buffer.snapshot.anchor_after(cmp::min(
840 Point::new(edit.new.end, 0),
841 tracked_buffer.snapshot.max_point(),
842 ));
843 let new_row_range = new_range.start.to_point(buffer).row
844 ..new_range.end.to_point(buffer).row;
845
846 let mut revert = false;
847 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
848 if buffer_row_range.end < new_row_range.start {
849 buffer_row_ranges.next();
850 } else if buffer_row_range.start > new_row_range.end {
851 break;
852 } else {
853 revert = true;
854 break;
855 }
856 }
857
858 if revert {
859 metrics.add_edit(edit);
860 let old_range = tracked_buffer
861 .diff_base
862 .point_to_offset(Point::new(edit.old.start, 0))
863 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
864 Point::new(edit.old.end, 0),
865 tracked_buffer.diff_base.max_point(),
866 ));
867 let old_text = tracked_buffer
868 .diff_base
869 .chunks_in_range(old_range)
870 .collect::<String>();
871
872 // Capture the agent's text before we revert it (for undo)
873 let new_range_offset =
874 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
875 let agent_text =
876 buffer.text_for_range(new_range_offset).collect::<String>();
877 edits_for_undo.push((new_range.clone(), agent_text));
878
879 edits_to_revert.push((new_range, old_text));
880 }
881 }
882
883 buffer.edit(edits_to_revert, None, cx);
884 edits_for_undo
885 });
886
887 if !edits_to_restore.is_empty() {
888 undo_info = Some(PerBufferUndo {
889 buffer: buffer.downgrade(),
890 edits_to_restore,
891 status: UndoBufferStatus::Modified,
892 });
893 }
894
895 self.project
896 .update(cx, |project, cx| project.save_buffer(buffer, cx))
897 }
898 };
899 if let Some(telemetry) = telemetry {
900 telemetry_report_rejected_edits(&telemetry, metrics);
901 }
902 (task, undo_info)
903 }
904
905 pub fn keep_all_edits(
906 &mut self,
907 telemetry: Option<ActionLogTelemetry>,
908 cx: &mut Context<Self>,
909 ) {
910 self.tracked_buffers.retain(|buffer, tracked_buffer| {
911 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
912 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
913 if let Some(telemetry) = telemetry.as_ref() {
914 telemetry_report_accepted_edits(telemetry, metrics);
915 }
916 match tracked_buffer.status {
917 TrackedBufferStatus::Deleted => false,
918 _ => {
919 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
920 tracked_buffer.status = TrackedBufferStatus::Modified;
921 }
922 tracked_buffer.unreviewed_edits.clear();
923 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
924 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
925 true
926 }
927 }
928 });
929
930 cx.notify();
931 }
932
933 pub fn reject_all_edits(
934 &mut self,
935 telemetry: Option<ActionLogTelemetry>,
936 cx: &mut Context<Self>,
937 ) -> Task<()> {
938 // Clear any previous undo state before starting a new reject operation
939 self.last_reject_undo = None;
940
941 let mut undo_buffers = Vec::new();
942 let mut futures = Vec::new();
943
944 for buffer in self.changed_buffers(cx).into_keys() {
945 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
946 buffer.read(cx).remote_id(),
947 )];
948 let (reject_task, undo_info) =
949 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
950
951 if let Some(undo) = undo_info {
952 undo_buffers.push(undo);
953 }
954
955 futures.push(async move {
956 reject_task.await.log_err();
957 });
958 }
959
960 // Store the undo information if we have any
961 if !undo_buffers.is_empty() {
962 self.last_reject_undo = Some(LastRejectUndo {
963 buffers: undo_buffers,
964 });
965 }
966
967 let task = futures::future::join_all(futures);
968 cx.background_spawn(async move {
969 task.await;
970 })
971 }
972
973 pub fn has_pending_undo(&self) -> bool {
974 self.last_reject_undo.is_some()
975 }
976
977 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
978 self.last_reject_undo = Some(undo);
979 }
980
981 /// Undoes the most recent reject operation, restoring the rejected agent changes.
982 /// This is a best-effort operation: if buffers have been closed or modified externally,
983 /// those buffers will be skipped.
984 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
985 let Some(undo) = self.last_reject_undo.take() else {
986 return Task::ready(());
987 };
988
989 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
990
991 for per_buffer_undo in undo.buffers {
992 // Skip if the buffer entity has been deallocated
993 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
994 continue;
995 };
996
997 buffer.update(cx, |buffer, cx| {
998 let mut valid_edits = Vec::new();
999
1000 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
1001 if anchor_range.start.buffer_id == buffer.remote_id()
1002 && anchor_range.end.buffer_id == buffer.remote_id()
1003 {
1004 valid_edits.push((anchor_range, text_to_restore));
1005 }
1006 }
1007
1008 if !valid_edits.is_empty() {
1009 buffer.edit(valid_edits, None, cx);
1010 }
1011 });
1012
1013 if !self.tracked_buffers.contains_key(&buffer) {
1014 self.buffer_edited(buffer.clone(), cx);
1015 }
1016
1017 let save = self
1018 .project
1019 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1020 save_tasks.push(save);
1021 }
1022
1023 cx.notify();
1024
1025 cx.background_spawn(async move {
1026 futures::future::join_all(save_tasks).await;
1027 })
1028 }
1029
1030 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1031 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1032 self.tracked_buffers
1033 .iter()
1034 .filter(|(_, tracked)| tracked.has_edits(cx))
1035 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1036 .collect()
1037 }
1038
1039 /// Returns the total number of lines added and removed across all unreviewed buffers.
1040 pub fn diff_stats(&self, cx: &App) -> DiffStats {
1041 DiffStats::all_files(&self.changed_buffers(cx), cx)
1042 }
1043
1044 /// Iterate over buffers changed since last read or edited by the model
1045 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1046 self.tracked_buffers
1047 .iter()
1048 .filter(|(buffer, tracked)| {
1049 let buffer = buffer.read(cx);
1050
1051 tracked.version != buffer.version
1052 && buffer
1053 .file()
1054 .is_some_and(|file| !file.disk_state().is_deleted())
1055 })
1056 .map(|(buffer, _)| buffer)
1057 }
1058}
1059
1060#[derive(Default, Debug, Clone, Copy)]
1061pub struct DiffStats {
1062 pub lines_added: u32,
1063 pub lines_removed: u32,
1064}
1065
1066impl DiffStats {
1067 pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
1068 let mut stats = DiffStats::default();
1069 let diff_snapshot = diff.snapshot(cx);
1070 let buffer_snapshot = buffer.snapshot();
1071 let base_text = diff_snapshot.base_text();
1072
1073 for hunk in diff_snapshot.hunks(&buffer_snapshot) {
1074 let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
1075 stats.lines_added += added_rows;
1076
1077 let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
1078 let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
1079 let removed_rows = base_end.saturating_sub(base_start);
1080 stats.lines_removed += removed_rows;
1081 }
1082
1083 stats
1084 }
1085
1086 pub fn all_files(
1087 changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
1088 cx: &App,
1089 ) -> Self {
1090 let mut total = DiffStats::default();
1091 for (buffer, diff) in changed_buffers {
1092 let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
1093 total.lines_added += stats.lines_added;
1094 total.lines_removed += stats.lines_removed;
1095 }
1096 total
1097 }
1098}
1099
1100#[derive(Clone)]
1101pub struct ActionLogTelemetry {
1102 pub agent_telemetry_id: SharedString,
1103 pub session_id: Arc<str>,
1104}
1105
1106struct ActionLogMetrics {
1107 lines_removed: u32,
1108 lines_added: u32,
1109 language: Option<SharedString>,
1110}
1111
1112impl ActionLogMetrics {
1113 fn for_buffer(buffer: &Buffer) -> Self {
1114 Self {
1115 language: buffer.language().map(|l| l.name().0),
1116 lines_removed: 0,
1117 lines_added: 0,
1118 }
1119 }
1120
1121 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1122 for edit in edits {
1123 self.add_edit(edit);
1124 }
1125 }
1126
1127 fn add_edit(&mut self, edit: &Edit<u32>) {
1128 self.lines_added += edit.new_len();
1129 self.lines_removed += edit.old_len();
1130 }
1131}
1132
1133fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1134 telemetry::event!(
1135 "Agent Edits Accepted",
1136 agent = telemetry.agent_telemetry_id,
1137 session = telemetry.session_id,
1138 language = metrics.language,
1139 lines_added = metrics.lines_added,
1140 lines_removed = metrics.lines_removed
1141 );
1142}
1143
1144fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1145 telemetry::event!(
1146 "Agent Edits Rejected",
1147 agent = telemetry.agent_telemetry_id,
1148 session = telemetry.session_id,
1149 language = metrics.language,
1150 lines_added = metrics.lines_added,
1151 lines_removed = metrics.lines_removed
1152 );
1153}
1154
1155fn apply_non_conflicting_edits(
1156 patch: &Patch<u32>,
1157 edits: Vec<Edit<u32>>,
1158 old_text: &mut Rope,
1159 new_text: &Rope,
1160) -> bool {
1161 let mut old_edits = patch.edits().iter().cloned().peekable();
1162 let mut new_edits = edits.into_iter().peekable();
1163 let mut applied_delta = 0i32;
1164 let mut rebased_delta = 0i32;
1165 let mut has_made_changes = false;
1166
1167 while let Some(mut new_edit) = new_edits.next() {
1168 let mut conflict = false;
1169
1170 // Push all the old edits that are before this new edit or that intersect with it.
1171 while let Some(old_edit) = old_edits.peek() {
1172 if new_edit.old.end < old_edit.new.start
1173 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1174 {
1175 break;
1176 } else if new_edit.old.start > old_edit.new.end
1177 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1178 {
1179 let old_edit = old_edits.next().unwrap();
1180 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1181 } else {
1182 conflict = true;
1183 if new_edits
1184 .peek()
1185 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1186 {
1187 new_edit = new_edits.next().unwrap();
1188 } else {
1189 let old_edit = old_edits.next().unwrap();
1190 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1191 }
1192 }
1193 }
1194
1195 if !conflict {
1196 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1197 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1198 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1199 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1200 ..old_text.point_to_offset(cmp::min(
1201 Point::new(new_edit.old.end, 0),
1202 old_text.max_point(),
1203 ));
1204 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1205 ..new_text.point_to_offset(cmp::min(
1206 Point::new(new_edit.new.end, 0),
1207 new_text.max_point(),
1208 ));
1209
1210 old_text.replace(
1211 old_bytes,
1212 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1213 );
1214 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1215 has_made_changes = true;
1216 }
1217 }
1218 has_made_changes
1219}
1220
1221fn diff_snapshots(
1222 old_snapshot: &text::BufferSnapshot,
1223 new_snapshot: &text::BufferSnapshot,
1224) -> Vec<Edit<u32>> {
1225 let mut edits = new_snapshot
1226 .edits_since::<Point>(&old_snapshot.version)
1227 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1228 .peekable();
1229 let mut row_edits = Vec::new();
1230 while let Some(mut edit) = edits.next() {
1231 while let Some(next_edit) = edits.peek() {
1232 if edit.old.end >= next_edit.old.start {
1233 edit.old.end = next_edit.old.end;
1234 edit.new.end = next_edit.new.end;
1235 edits.next();
1236 } else {
1237 break;
1238 }
1239 }
1240 row_edits.push(edit);
1241 }
1242 row_edits
1243}
1244
1245fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1246 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1247 && new_text
1248 .chars_at(new_text.point_to_offset(edit.new.start))
1249 .next()
1250 == Some('\n')
1251 && edit.old.start != old_text.max_point()
1252 {
1253 Edit {
1254 old: edit.old.start.row + 1..edit.old.end.row + 1,
1255 new: edit.new.start.row + 1..edit.new.end.row + 1,
1256 }
1257 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1258 Edit {
1259 old: edit.old.start.row..edit.old.end.row,
1260 new: edit.new.start.row..edit.new.end.row,
1261 }
1262 } else {
1263 Edit {
1264 old: edit.old.start.row..edit.old.end.row + 1,
1265 new: edit.new.start.row..edit.new.end.row + 1,
1266 }
1267 }
1268}
1269
1270#[derive(Copy, Clone, Debug)]
1271enum ChangeAuthor {
1272 User,
1273 Agent,
1274}
1275
1276#[derive(Debug)]
1277enum TrackedBufferStatus {
1278 Created { existing_file_content: Option<Rope> },
1279 Modified,
1280 Deleted,
1281}
1282
1283pub struct TrackedBuffer {
1284 buffer: Entity<Buffer>,
1285 diff_base: Rope,
1286 unreviewed_edits: Patch<u32>,
1287 status: TrackedBufferStatus,
1288 version: clock::Global,
1289 diff: Entity<BufferDiff>,
1290 snapshot: text::BufferSnapshot,
1291 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1292 _open_lsp_handle: OpenLspBufferHandle,
1293 _maintain_diff: Task<()>,
1294 _subscription: Subscription,
1295}
1296
1297impl TrackedBuffer {
1298 #[cfg(any(test, feature = "test-support"))]
1299 pub fn diff(&self) -> &Entity<BufferDiff> {
1300 &self.diff
1301 }
1302
1303 #[cfg(any(test, feature = "test-support"))]
1304 pub fn diff_base_len(&self) -> usize {
1305 self.diff_base.len()
1306 }
1307
1308 fn has_edits(&self, cx: &App) -> bool {
1309 self.diff
1310 .read(cx)
1311 .snapshot(cx)
1312 .hunks(self.buffer.read(cx))
1313 .next()
1314 .is_some()
1315 }
1316
1317 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1318 self.diff_update
1319 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1320 .ok();
1321 }
1322}
1323
1324pub struct ChangedBuffer {
1325 pub diff: Entity<BufferDiff>,
1326}
1327
1328#[cfg(test)]
1329mod tests {
1330 use super::*;
1331 use buffer_diff::DiffHunkStatusKind;
1332 use gpui::TestAppContext;
1333 use language::Point;
1334 use project::{FakeFs, Fs, Project, RemoveOptions};
1335 use rand::prelude::*;
1336 use serde_json::json;
1337 use settings::SettingsStore;
1338 use std::env;
1339 use util::{RandomCharIter, path};
1340
1341 #[ctor::ctor]
1342 fn init_logger() {
1343 zlog::init_test();
1344 }
1345
1346 fn init_test(cx: &mut TestAppContext) {
1347 cx.update(|cx| {
1348 let settings_store = SettingsStore::test(cx);
1349 cx.set_global(settings_store);
1350 });
1351 }
1352
1353 #[gpui::test(iterations = 10)]
1354 async fn test_keep_edits(cx: &mut TestAppContext) {
1355 init_test(cx);
1356
1357 let fs = FakeFs::new(cx.executor());
1358 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1359 .await;
1360 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1361 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1362 let file_path = project
1363 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1364 .unwrap();
1365 let buffer = project
1366 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1367 .await
1368 .unwrap();
1369
1370 cx.update(|cx| {
1371 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1372 buffer.update(cx, |buffer, cx| {
1373 buffer
1374 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1375 .unwrap()
1376 });
1377 buffer.update(cx, |buffer, cx| {
1378 buffer
1379 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1380 .unwrap()
1381 });
1382 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1383 });
1384 cx.run_until_parked();
1385 assert_eq!(
1386 buffer.read_with(cx, |buffer, _| buffer.text()),
1387 "abc\ndEf\nghi\njkl\nmnO"
1388 );
1389 assert_eq!(
1390 unreviewed_hunks(&action_log, cx),
1391 vec![(
1392 buffer.clone(),
1393 vec![
1394 HunkStatus {
1395 range: Point::new(1, 0)..Point::new(2, 0),
1396 diff_status: DiffHunkStatusKind::Modified,
1397 old_text: "def\n".into(),
1398 },
1399 HunkStatus {
1400 range: Point::new(4, 0)..Point::new(4, 3),
1401 diff_status: DiffHunkStatusKind::Modified,
1402 old_text: "mno".into(),
1403 }
1404 ],
1405 )]
1406 );
1407
1408 action_log.update(cx, |log, cx| {
1409 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1410 });
1411 cx.run_until_parked();
1412 assert_eq!(
1413 unreviewed_hunks(&action_log, cx),
1414 vec![(
1415 buffer.clone(),
1416 vec![HunkStatus {
1417 range: Point::new(1, 0)..Point::new(2, 0),
1418 diff_status: DiffHunkStatusKind::Modified,
1419 old_text: "def\n".into(),
1420 }],
1421 )]
1422 );
1423
1424 action_log.update(cx, |log, cx| {
1425 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1426 });
1427 cx.run_until_parked();
1428 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1429 }
1430
1431 #[gpui::test(iterations = 10)]
1432 async fn test_deletions(cx: &mut TestAppContext) {
1433 init_test(cx);
1434
1435 let fs = FakeFs::new(cx.executor());
1436 fs.insert_tree(
1437 path!("/dir"),
1438 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1439 )
1440 .await;
1441 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1442 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1443 let file_path = project
1444 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1445 .unwrap();
1446 let buffer = project
1447 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1448 .await
1449 .unwrap();
1450
1451 cx.update(|cx| {
1452 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1453 buffer.update(cx, |buffer, cx| {
1454 buffer
1455 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1456 .unwrap();
1457 buffer.finalize_last_transaction();
1458 });
1459 buffer.update(cx, |buffer, cx| {
1460 buffer
1461 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1462 .unwrap();
1463 buffer.finalize_last_transaction();
1464 });
1465 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1466 });
1467 cx.run_until_parked();
1468 assert_eq!(
1469 buffer.read_with(cx, |buffer, _| buffer.text()),
1470 "abc\nghi\njkl\npqr"
1471 );
1472 assert_eq!(
1473 unreviewed_hunks(&action_log, cx),
1474 vec![(
1475 buffer.clone(),
1476 vec![
1477 HunkStatus {
1478 range: Point::new(1, 0)..Point::new(1, 0),
1479 diff_status: DiffHunkStatusKind::Deleted,
1480 old_text: "def\n".into(),
1481 },
1482 HunkStatus {
1483 range: Point::new(3, 0)..Point::new(3, 0),
1484 diff_status: DiffHunkStatusKind::Deleted,
1485 old_text: "mno\n".into(),
1486 }
1487 ],
1488 )]
1489 );
1490
1491 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1492 cx.run_until_parked();
1493 assert_eq!(
1494 buffer.read_with(cx, |buffer, _| buffer.text()),
1495 "abc\nghi\njkl\nmno\npqr"
1496 );
1497 assert_eq!(
1498 unreviewed_hunks(&action_log, cx),
1499 vec![(
1500 buffer.clone(),
1501 vec![HunkStatus {
1502 range: Point::new(1, 0)..Point::new(1, 0),
1503 diff_status: DiffHunkStatusKind::Deleted,
1504 old_text: "def\n".into(),
1505 }],
1506 )]
1507 );
1508
1509 action_log.update(cx, |log, cx| {
1510 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1511 });
1512 cx.run_until_parked();
1513 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1514 }
1515
1516 #[gpui::test(iterations = 10)]
1517 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1518 init_test(cx);
1519
1520 let fs = FakeFs::new(cx.executor());
1521 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1522 .await;
1523 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1524 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1525 let file_path = project
1526 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1527 .unwrap();
1528 let buffer = project
1529 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1530 .await
1531 .unwrap();
1532
1533 cx.update(|cx| {
1534 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1535 buffer.update(cx, |buffer, cx| {
1536 buffer
1537 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1538 .unwrap()
1539 });
1540 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1541 });
1542 cx.run_until_parked();
1543 assert_eq!(
1544 buffer.read_with(cx, |buffer, _| buffer.text()),
1545 "abc\ndeF\nGHI\njkl\nmno"
1546 );
1547 assert_eq!(
1548 unreviewed_hunks(&action_log, cx),
1549 vec![(
1550 buffer.clone(),
1551 vec![HunkStatus {
1552 range: Point::new(1, 0)..Point::new(3, 0),
1553 diff_status: DiffHunkStatusKind::Modified,
1554 old_text: "def\nghi\n".into(),
1555 }],
1556 )]
1557 );
1558
1559 buffer.update(cx, |buffer, cx| {
1560 buffer.edit(
1561 [
1562 (Point::new(0, 2)..Point::new(0, 2), "X"),
1563 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1564 ],
1565 None,
1566 cx,
1567 )
1568 });
1569 cx.run_until_parked();
1570 assert_eq!(
1571 buffer.read_with(cx, |buffer, _| buffer.text()),
1572 "abXc\ndeF\nGHI\nYjkl\nmno"
1573 );
1574 assert_eq!(
1575 unreviewed_hunks(&action_log, cx),
1576 vec![(
1577 buffer.clone(),
1578 vec![HunkStatus {
1579 range: Point::new(1, 0)..Point::new(3, 0),
1580 diff_status: DiffHunkStatusKind::Modified,
1581 old_text: "def\nghi\n".into(),
1582 }],
1583 )]
1584 );
1585
1586 buffer.update(cx, |buffer, cx| {
1587 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1588 });
1589 cx.run_until_parked();
1590 assert_eq!(
1591 buffer.read_with(cx, |buffer, _| buffer.text()),
1592 "abXc\ndZeF\nGHI\nYjkl\nmno"
1593 );
1594 assert_eq!(
1595 unreviewed_hunks(&action_log, cx),
1596 vec![(
1597 buffer.clone(),
1598 vec![HunkStatus {
1599 range: Point::new(1, 0)..Point::new(3, 0),
1600 diff_status: DiffHunkStatusKind::Modified,
1601 old_text: "def\nghi\n".into(),
1602 }],
1603 )]
1604 );
1605
1606 action_log.update(cx, |log, cx| {
1607 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1608 });
1609 cx.run_until_parked();
1610 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1611 }
1612
1613 #[gpui::test(iterations = 10)]
1614 async fn test_creating_files(cx: &mut TestAppContext) {
1615 init_test(cx);
1616
1617 let fs = FakeFs::new(cx.executor());
1618 fs.insert_tree(path!("/dir"), json!({})).await;
1619 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1620 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1621 let file_path = project
1622 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1623 .unwrap();
1624
1625 let buffer = project
1626 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1627 .await
1628 .unwrap();
1629 cx.update(|cx| {
1630 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1631 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1632 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1633 });
1634 project
1635 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1636 .await
1637 .unwrap();
1638 cx.run_until_parked();
1639 assert_eq!(
1640 unreviewed_hunks(&action_log, cx),
1641 vec![(
1642 buffer.clone(),
1643 vec![HunkStatus {
1644 range: Point::new(0, 0)..Point::new(0, 5),
1645 diff_status: DiffHunkStatusKind::Added,
1646 old_text: "".into(),
1647 }],
1648 )]
1649 );
1650
1651 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1652 cx.run_until_parked();
1653 assert_eq!(
1654 unreviewed_hunks(&action_log, cx),
1655 vec![(
1656 buffer.clone(),
1657 vec![HunkStatus {
1658 range: Point::new(0, 0)..Point::new(0, 6),
1659 diff_status: DiffHunkStatusKind::Added,
1660 old_text: "".into(),
1661 }],
1662 )]
1663 );
1664
1665 action_log.update(cx, |log, cx| {
1666 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1667 });
1668 cx.run_until_parked();
1669 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1670 }
1671
1672 #[gpui::test(iterations = 10)]
1673 async fn test_overwriting_files(cx: &mut TestAppContext) {
1674 init_test(cx);
1675
1676 let fs = FakeFs::new(cx.executor());
1677 fs.insert_tree(
1678 path!("/dir"),
1679 json!({
1680 "file1": "Lorem ipsum dolor"
1681 }),
1682 )
1683 .await;
1684 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1685 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1686 let file_path = project
1687 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1688 .unwrap();
1689
1690 let buffer = project
1691 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1692 .await
1693 .unwrap();
1694 cx.update(|cx| {
1695 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1696 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1697 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1698 });
1699 project
1700 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1701 .await
1702 .unwrap();
1703 cx.run_until_parked();
1704 assert_eq!(
1705 unreviewed_hunks(&action_log, cx),
1706 vec![(
1707 buffer.clone(),
1708 vec![HunkStatus {
1709 range: Point::new(0, 0)..Point::new(0, 19),
1710 diff_status: DiffHunkStatusKind::Added,
1711 old_text: "".into(),
1712 }],
1713 )]
1714 );
1715
1716 action_log
1717 .update(cx, |log, cx| {
1718 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1719 task
1720 })
1721 .await
1722 .unwrap();
1723 cx.run_until_parked();
1724 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1725 assert_eq!(
1726 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1727 "Lorem ipsum dolor"
1728 );
1729 }
1730
1731 #[gpui::test(iterations = 10)]
1732 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1733 init_test(cx);
1734
1735 let fs = FakeFs::new(cx.executor());
1736 fs.insert_tree(
1737 path!("/dir"),
1738 json!({
1739 "file1": "Lorem ipsum dolor"
1740 }),
1741 )
1742 .await;
1743 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1744 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1745 let file_path = project
1746 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1747 .unwrap();
1748
1749 let buffer = project
1750 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1751 .await
1752 .unwrap();
1753 cx.update(|cx| {
1754 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1755 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1756 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1757 });
1758 project
1759 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1760 .await
1761 .unwrap();
1762 cx.run_until_parked();
1763 assert_eq!(
1764 unreviewed_hunks(&action_log, cx),
1765 vec![(
1766 buffer.clone(),
1767 vec![HunkStatus {
1768 range: Point::new(0, 0)..Point::new(0, 37),
1769 diff_status: DiffHunkStatusKind::Modified,
1770 old_text: "Lorem ipsum dolor".into(),
1771 }],
1772 )]
1773 );
1774
1775 cx.update(|cx| {
1776 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1777 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1778 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1779 });
1780 project
1781 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1782 .await
1783 .unwrap();
1784 cx.run_until_parked();
1785 assert_eq!(
1786 unreviewed_hunks(&action_log, cx),
1787 vec![(
1788 buffer.clone(),
1789 vec![HunkStatus {
1790 range: Point::new(0, 0)..Point::new(0, 9),
1791 diff_status: DiffHunkStatusKind::Added,
1792 old_text: "".into(),
1793 }],
1794 )]
1795 );
1796
1797 action_log
1798 .update(cx, |log, cx| {
1799 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1800 task
1801 })
1802 .await
1803 .unwrap();
1804 cx.run_until_parked();
1805 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1806 assert_eq!(
1807 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1808 "Lorem ipsum dolor"
1809 );
1810 }
1811
1812 #[gpui::test(iterations = 10)]
1813 async fn test_deleting_files(cx: &mut TestAppContext) {
1814 init_test(cx);
1815
1816 let fs = FakeFs::new(cx.executor());
1817 fs.insert_tree(
1818 path!("/dir"),
1819 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1820 )
1821 .await;
1822
1823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1824 let file1_path = project
1825 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1826 .unwrap();
1827 let file2_path = project
1828 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1829 .unwrap();
1830
1831 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1832 let buffer1 = project
1833 .update(cx, |project, cx| {
1834 project.open_buffer(file1_path.clone(), cx)
1835 })
1836 .await
1837 .unwrap();
1838 let buffer2 = project
1839 .update(cx, |project, cx| {
1840 project.open_buffer(file2_path.clone(), cx)
1841 })
1842 .await
1843 .unwrap();
1844
1845 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1846 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1847 project
1848 .update(cx, |project, cx| {
1849 project.delete_file(file1_path.clone(), cx)
1850 })
1851 .unwrap()
1852 .await
1853 .unwrap();
1854 project
1855 .update(cx, |project, cx| {
1856 project.delete_file(file2_path.clone(), cx)
1857 })
1858 .unwrap()
1859 .await
1860 .unwrap();
1861 cx.run_until_parked();
1862 assert_eq!(
1863 unreviewed_hunks(&action_log, cx),
1864 vec![
1865 (
1866 buffer1.clone(),
1867 vec![HunkStatus {
1868 range: Point::new(0, 0)..Point::new(0, 0),
1869 diff_status: DiffHunkStatusKind::Deleted,
1870 old_text: "lorem\n".into(),
1871 }]
1872 ),
1873 (
1874 buffer2.clone(),
1875 vec![HunkStatus {
1876 range: Point::new(0, 0)..Point::new(0, 0),
1877 diff_status: DiffHunkStatusKind::Deleted,
1878 old_text: "ipsum\n".into(),
1879 }],
1880 )
1881 ]
1882 );
1883
1884 // Simulate file1 being recreated externally.
1885 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1886 .await;
1887
1888 // Simulate file2 being recreated by a tool.
1889 let buffer2 = project
1890 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1891 .await
1892 .unwrap();
1893 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1894 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1895 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1896 project
1897 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1898 .await
1899 .unwrap();
1900
1901 cx.run_until_parked();
1902 assert_eq!(
1903 unreviewed_hunks(&action_log, cx),
1904 vec![(
1905 buffer2.clone(),
1906 vec![HunkStatus {
1907 range: Point::new(0, 0)..Point::new(0, 5),
1908 diff_status: DiffHunkStatusKind::Added,
1909 old_text: "".into(),
1910 }],
1911 )]
1912 );
1913
1914 // Simulate file2 being deleted externally.
1915 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1916 .await
1917 .unwrap();
1918 cx.run_until_parked();
1919 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1920 }
1921
1922 #[gpui::test(iterations = 10)]
1923 async fn test_reject_edits(cx: &mut TestAppContext) {
1924 init_test(cx);
1925
1926 let fs = FakeFs::new(cx.executor());
1927 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1928 .await;
1929 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1930 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1931 let file_path = project
1932 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1933 .unwrap();
1934 let buffer = project
1935 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1936 .await
1937 .unwrap();
1938
1939 cx.update(|cx| {
1940 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1941 buffer.update(cx, |buffer, cx| {
1942 buffer
1943 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1944 .unwrap()
1945 });
1946 buffer.update(cx, |buffer, cx| {
1947 buffer
1948 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1949 .unwrap()
1950 });
1951 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1952 });
1953 cx.run_until_parked();
1954 assert_eq!(
1955 buffer.read_with(cx, |buffer, _| buffer.text()),
1956 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1957 );
1958 assert_eq!(
1959 unreviewed_hunks(&action_log, cx),
1960 vec![(
1961 buffer.clone(),
1962 vec![
1963 HunkStatus {
1964 range: Point::new(1, 0)..Point::new(3, 0),
1965 diff_status: DiffHunkStatusKind::Modified,
1966 old_text: "def\n".into(),
1967 },
1968 HunkStatus {
1969 range: Point::new(5, 0)..Point::new(5, 3),
1970 diff_status: DiffHunkStatusKind::Modified,
1971 old_text: "mno".into(),
1972 }
1973 ],
1974 )]
1975 );
1976
1977 // If the rejected range doesn't overlap with any hunk, we ignore it.
1978 action_log
1979 .update(cx, |log, cx| {
1980 let (task, _) = log.reject_edits_in_ranges(
1981 buffer.clone(),
1982 vec![Point::new(4, 0)..Point::new(4, 0)],
1983 None,
1984 cx,
1985 );
1986 task
1987 })
1988 .await
1989 .unwrap();
1990 cx.run_until_parked();
1991 assert_eq!(
1992 buffer.read_with(cx, |buffer, _| buffer.text()),
1993 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1994 );
1995 assert_eq!(
1996 unreviewed_hunks(&action_log, cx),
1997 vec![(
1998 buffer.clone(),
1999 vec![
2000 HunkStatus {
2001 range: Point::new(1, 0)..Point::new(3, 0),
2002 diff_status: DiffHunkStatusKind::Modified,
2003 old_text: "def\n".into(),
2004 },
2005 HunkStatus {
2006 range: Point::new(5, 0)..Point::new(5, 3),
2007 diff_status: DiffHunkStatusKind::Modified,
2008 old_text: "mno".into(),
2009 }
2010 ],
2011 )]
2012 );
2013
2014 action_log
2015 .update(cx, |log, cx| {
2016 let (task, _) = log.reject_edits_in_ranges(
2017 buffer.clone(),
2018 vec![Point::new(0, 0)..Point::new(1, 0)],
2019 None,
2020 cx,
2021 );
2022 task
2023 })
2024 .await
2025 .unwrap();
2026 cx.run_until_parked();
2027 assert_eq!(
2028 buffer.read_with(cx, |buffer, _| buffer.text()),
2029 "abc\ndef\nghi\njkl\nmnO"
2030 );
2031 assert_eq!(
2032 unreviewed_hunks(&action_log, cx),
2033 vec![(
2034 buffer.clone(),
2035 vec![HunkStatus {
2036 range: Point::new(4, 0)..Point::new(4, 3),
2037 diff_status: DiffHunkStatusKind::Modified,
2038 old_text: "mno".into(),
2039 }],
2040 )]
2041 );
2042
2043 action_log
2044 .update(cx, |log, cx| {
2045 let (task, _) = log.reject_edits_in_ranges(
2046 buffer.clone(),
2047 vec![Point::new(4, 0)..Point::new(4, 0)],
2048 None,
2049 cx,
2050 );
2051 task
2052 })
2053 .await
2054 .unwrap();
2055 cx.run_until_parked();
2056 assert_eq!(
2057 buffer.read_with(cx, |buffer, _| buffer.text()),
2058 "abc\ndef\nghi\njkl\nmno"
2059 );
2060 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2061 }
2062
2063 #[gpui::test(iterations = 10)]
2064 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2065 init_test(cx);
2066
2067 let fs = FakeFs::new(cx.executor());
2068 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2069 .await;
2070 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2071 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2072 let file_path = project
2073 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2074 .unwrap();
2075 let buffer = project
2076 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2077 .await
2078 .unwrap();
2079
2080 cx.update(|cx| {
2081 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2082 buffer.update(cx, |buffer, cx| {
2083 buffer
2084 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2085 .unwrap()
2086 });
2087 buffer.update(cx, |buffer, cx| {
2088 buffer
2089 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2090 .unwrap()
2091 });
2092 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2093 });
2094 cx.run_until_parked();
2095 assert_eq!(
2096 buffer.read_with(cx, |buffer, _| buffer.text()),
2097 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2098 );
2099 assert_eq!(
2100 unreviewed_hunks(&action_log, cx),
2101 vec![(
2102 buffer.clone(),
2103 vec![
2104 HunkStatus {
2105 range: Point::new(1, 0)..Point::new(3, 0),
2106 diff_status: DiffHunkStatusKind::Modified,
2107 old_text: "def\n".into(),
2108 },
2109 HunkStatus {
2110 range: Point::new(5, 0)..Point::new(5, 3),
2111 diff_status: DiffHunkStatusKind::Modified,
2112 old_text: "mno".into(),
2113 }
2114 ],
2115 )]
2116 );
2117
2118 action_log.update(cx, |log, cx| {
2119 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2120 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2121 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2122 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2123
2124 let (task, _) =
2125 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2126 task.detach();
2127 assert_eq!(
2128 buffer.read_with(cx, |buffer, _| buffer.text()),
2129 "abc\ndef\nghi\njkl\nmno"
2130 );
2131 });
2132 cx.run_until_parked();
2133 assert_eq!(
2134 buffer.read_with(cx, |buffer, _| buffer.text()),
2135 "abc\ndef\nghi\njkl\nmno"
2136 );
2137 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2138 }
2139
2140 #[gpui::test(iterations = 10)]
2141 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2142 init_test(cx);
2143
2144 let fs = FakeFs::new(cx.executor());
2145 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2146 .await;
2147 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2148 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2149 let file_path = project
2150 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2151 .unwrap();
2152 let buffer = project
2153 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2154 .await
2155 .unwrap();
2156
2157 cx.update(|cx| {
2158 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2159 });
2160 project
2161 .update(cx, |project, cx| project.delete_file(file_path.clone(), cx))
2162 .unwrap()
2163 .await
2164 .unwrap();
2165 cx.run_until_parked();
2166 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2167 assert_eq!(
2168 unreviewed_hunks(&action_log, cx),
2169 vec![(
2170 buffer.clone(),
2171 vec![HunkStatus {
2172 range: Point::new(0, 0)..Point::new(0, 0),
2173 diff_status: DiffHunkStatusKind::Deleted,
2174 old_text: "content".into(),
2175 }]
2176 )]
2177 );
2178
2179 action_log
2180 .update(cx, |log, cx| {
2181 let (task, _) = log.reject_edits_in_ranges(
2182 buffer.clone(),
2183 vec![Point::new(0, 0)..Point::new(0, 0)],
2184 None,
2185 cx,
2186 );
2187 task
2188 })
2189 .await
2190 .unwrap();
2191 cx.run_until_parked();
2192 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2193 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2194 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2195 }
2196
2197 #[gpui::test(iterations = 10)]
2198 async fn test_reject_created_file(cx: &mut TestAppContext) {
2199 init_test(cx);
2200
2201 let fs = FakeFs::new(cx.executor());
2202 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2203 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2204 let file_path = project
2205 .read_with(cx, |project, cx| {
2206 project.find_project_path("dir/new_file", cx)
2207 })
2208 .unwrap();
2209 let buffer = project
2210 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2211 .await
2212 .unwrap();
2213 cx.update(|cx| {
2214 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2215 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2216 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2217 });
2218 project
2219 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2220 .await
2221 .unwrap();
2222 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2223 cx.run_until_parked();
2224 assert_eq!(
2225 unreviewed_hunks(&action_log, cx),
2226 vec![(
2227 buffer.clone(),
2228 vec![HunkStatus {
2229 range: Point::new(0, 0)..Point::new(0, 7),
2230 diff_status: DiffHunkStatusKind::Added,
2231 old_text: "".into(),
2232 }],
2233 )]
2234 );
2235
2236 action_log
2237 .update(cx, |log, cx| {
2238 let (task, _) = log.reject_edits_in_ranges(
2239 buffer.clone(),
2240 vec![Point::new(0, 0)..Point::new(0, 11)],
2241 None,
2242 cx,
2243 );
2244 task
2245 })
2246 .await
2247 .unwrap();
2248 cx.run_until_parked();
2249 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2250 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2251 }
2252
2253 #[gpui::test]
2254 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2255 init_test(cx);
2256
2257 let fs = FakeFs::new(cx.executor());
2258 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2259 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2260
2261 let file_path = project
2262 .read_with(cx, |project, cx| {
2263 project.find_project_path("dir/new_file", cx)
2264 })
2265 .unwrap();
2266 let buffer = project
2267 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2268 .await
2269 .unwrap();
2270
2271 // AI creates file with initial content
2272 cx.update(|cx| {
2273 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2274 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2275 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2276 });
2277
2278 project
2279 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2280 .await
2281 .unwrap();
2282
2283 cx.run_until_parked();
2284
2285 // User makes additional edits
2286 cx.update(|cx| {
2287 buffer.update(cx, |buffer, cx| {
2288 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2289 });
2290 });
2291
2292 project
2293 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2294 .await
2295 .unwrap();
2296
2297 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2298
2299 // Reject all
2300 action_log
2301 .update(cx, |log, cx| {
2302 let (task, _) = log.reject_edits_in_ranges(
2303 buffer.clone(),
2304 vec![Point::new(0, 0)..Point::new(100, 0)],
2305 None,
2306 cx,
2307 );
2308 task
2309 })
2310 .await
2311 .unwrap();
2312 cx.run_until_parked();
2313
2314 // File should still contain all the content
2315 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2316
2317 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2318 assert_eq!(content, "ai content\nuser added this line");
2319 }
2320
2321 #[gpui::test]
2322 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2323 init_test(cx);
2324
2325 let fs = FakeFs::new(cx.executor());
2326 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2327 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2328
2329 let file_path = project
2330 .read_with(cx, |project, cx| {
2331 project.find_project_path("dir/new_file", cx)
2332 })
2333 .unwrap();
2334 let buffer = project
2335 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2336 .await
2337 .unwrap();
2338
2339 // AI creates file with initial content
2340 cx.update(|cx| {
2341 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2342 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2343 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2344 });
2345 project
2346 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2347 .await
2348 .unwrap();
2349 cx.run_until_parked();
2350 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2351
2352 // User accepts the single hunk
2353 action_log.update(cx, |log, cx| {
2354 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2355 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2356 });
2357 cx.run_until_parked();
2358 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2359 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2360
2361 // AI modifies the file
2362 cx.update(|cx| {
2363 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2364 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2365 });
2366 project
2367 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2368 .await
2369 .unwrap();
2370 cx.run_until_parked();
2371 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2372
2373 // User rejects the hunk
2374 action_log
2375 .update(cx, |log, cx| {
2376 let (task, _) = log.reject_edits_in_ranges(
2377 buffer.clone(),
2378 vec![Anchor::min_max_range_for_buffer(
2379 buffer.read(cx).remote_id(),
2380 )],
2381 None,
2382 cx,
2383 );
2384 task
2385 })
2386 .await
2387 .unwrap();
2388 cx.run_until_parked();
2389 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2390 assert_eq!(
2391 buffer.read_with(cx, |buffer, _| buffer.text()),
2392 "ai content v1"
2393 );
2394 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2395 }
2396
2397 #[gpui::test]
2398 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2399 init_test(cx);
2400
2401 let fs = FakeFs::new(cx.executor());
2402 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2403 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2404
2405 let file_path = project
2406 .read_with(cx, |project, cx| {
2407 project.find_project_path("dir/new_file", cx)
2408 })
2409 .unwrap();
2410 let buffer = project
2411 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2412 .await
2413 .unwrap();
2414
2415 // AI creates file with initial content
2416 cx.update(|cx| {
2417 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2418 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2419 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2420 });
2421 project
2422 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2423 .await
2424 .unwrap();
2425 cx.run_until_parked();
2426
2427 // User clicks "Accept All"
2428 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2429 cx.run_until_parked();
2430 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2431 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2432
2433 // AI modifies file again
2434 cx.update(|cx| {
2435 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2436 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2437 });
2438 project
2439 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2440 .await
2441 .unwrap();
2442 cx.run_until_parked();
2443 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2444
2445 // User clicks "Reject All"
2446 action_log
2447 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2448 .await;
2449 cx.run_until_parked();
2450 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2451 assert_eq!(
2452 buffer.read_with(cx, |buffer, _| buffer.text()),
2453 "ai content v1"
2454 );
2455 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2456 }
2457
2458 #[gpui::test(iterations = 100)]
2459 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2460 init_test(cx);
2461
2462 let operations = env::var("OPERATIONS")
2463 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2464 .unwrap_or(20);
2465
2466 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2467 let fs = FakeFs::new(cx.executor());
2468 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2469 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2470 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2471 let file_path = project
2472 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2473 .unwrap();
2474 let buffer = project
2475 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2476 .await
2477 .unwrap();
2478
2479 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2480
2481 for _ in 0..operations {
2482 match rng.random_range(0..100) {
2483 0..25 => {
2484 action_log.update(cx, |log, cx| {
2485 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2486 log::info!("keeping edits in range {:?}", range);
2487 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2488 });
2489 }
2490 25..50 => {
2491 action_log
2492 .update(cx, |log, cx| {
2493 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2494 log::info!("rejecting edits in range {:?}", range);
2495 let (task, _) =
2496 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2497 task
2498 })
2499 .await
2500 .unwrap();
2501 }
2502 _ => {
2503 let is_agent_edit = rng.random_bool(0.5);
2504 if is_agent_edit {
2505 log::info!("agent edit");
2506 } else {
2507 log::info!("user edit");
2508 }
2509 cx.update(|cx| {
2510 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2511 if is_agent_edit {
2512 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2513 }
2514 });
2515 }
2516 }
2517
2518 if rng.random_bool(0.2) {
2519 quiesce(&action_log, &buffer, cx);
2520 }
2521 }
2522
2523 quiesce(&action_log, &buffer, cx);
2524
2525 fn quiesce(
2526 action_log: &Entity<ActionLog>,
2527 buffer: &Entity<Buffer>,
2528 cx: &mut TestAppContext,
2529 ) {
2530 log::info!("quiescing...");
2531 cx.run_until_parked();
2532 action_log.update(cx, |log, cx| {
2533 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2534 let mut old_text = tracked_buffer.diff_base.clone();
2535 let new_text = buffer.read(cx).as_rope();
2536 for edit in tracked_buffer.unreviewed_edits.edits() {
2537 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2538 let old_end = old_text.point_to_offset(cmp::min(
2539 Point::new(edit.new.start + edit.old_len(), 0),
2540 old_text.max_point(),
2541 ));
2542 old_text.replace(
2543 old_start..old_end,
2544 &new_text.slice_rows(edit.new.clone()).to_string(),
2545 );
2546 }
2547 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2548 })
2549 }
2550 }
2551
2552 #[gpui::test]
2553 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2554 init_test(cx);
2555
2556 let fs = FakeFs::new(cx.background_executor.clone());
2557 fs.insert_tree(
2558 path!("/project"),
2559 json!({
2560 ".git": {},
2561 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2562 }),
2563 )
2564 .await;
2565 fs.set_head_for_repo(
2566 path!("/project/.git").as_ref(),
2567 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2568 "0000000",
2569 );
2570 cx.run_until_parked();
2571
2572 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2573 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2574
2575 let file_path = project
2576 .read_with(cx, |project, cx| {
2577 project.find_project_path(path!("/project/file.txt"), cx)
2578 })
2579 .unwrap();
2580 let buffer = project
2581 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2582 .await
2583 .unwrap();
2584
2585 cx.update(|cx| {
2586 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2587 buffer.update(cx, |buffer, cx| {
2588 buffer.edit(
2589 [
2590 // Edit at the very start: a -> A
2591 (Point::new(0, 0)..Point::new(0, 1), "A"),
2592 // Deletion in the middle: remove lines d and e
2593 (Point::new(3, 0)..Point::new(5, 0), ""),
2594 // Modification: g -> GGG
2595 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2596 // Addition: insert new line after h
2597 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2598 // Edit the very last character: j -> J
2599 (Point::new(9, 0)..Point::new(9, 1), "J"),
2600 ],
2601 None,
2602 cx,
2603 );
2604 });
2605 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2606 });
2607 cx.run_until_parked();
2608 assert_eq!(
2609 unreviewed_hunks(&action_log, cx),
2610 vec![(
2611 buffer.clone(),
2612 vec![
2613 HunkStatus {
2614 range: Point::new(0, 0)..Point::new(1, 0),
2615 diff_status: DiffHunkStatusKind::Modified,
2616 old_text: "a\n".into()
2617 },
2618 HunkStatus {
2619 range: Point::new(3, 0)..Point::new(3, 0),
2620 diff_status: DiffHunkStatusKind::Deleted,
2621 old_text: "d\ne\n".into()
2622 },
2623 HunkStatus {
2624 range: Point::new(4, 0)..Point::new(5, 0),
2625 diff_status: DiffHunkStatusKind::Modified,
2626 old_text: "g\n".into()
2627 },
2628 HunkStatus {
2629 range: Point::new(6, 0)..Point::new(7, 0),
2630 diff_status: DiffHunkStatusKind::Added,
2631 old_text: "".into()
2632 },
2633 HunkStatus {
2634 range: Point::new(8, 0)..Point::new(8, 1),
2635 diff_status: DiffHunkStatusKind::Modified,
2636 old_text: "j".into()
2637 }
2638 ]
2639 )]
2640 );
2641
2642 // Simulate a git commit that matches some edits but not others:
2643 // - Accepts the first edit (a -> A)
2644 // - Accepts the deletion (remove d and e)
2645 // - Makes a different change to g (g -> G instead of GGG)
2646 // - Ignores the NEW line addition
2647 // - Ignores the last line edit (j stays as j)
2648 fs.set_head_for_repo(
2649 path!("/project/.git").as_ref(),
2650 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2651 "0000001",
2652 );
2653 cx.run_until_parked();
2654 assert_eq!(
2655 unreviewed_hunks(&action_log, cx),
2656 vec![(
2657 buffer.clone(),
2658 vec![
2659 HunkStatus {
2660 range: Point::new(4, 0)..Point::new(5, 0),
2661 diff_status: DiffHunkStatusKind::Modified,
2662 old_text: "g\n".into()
2663 },
2664 HunkStatus {
2665 range: Point::new(6, 0)..Point::new(7, 0),
2666 diff_status: DiffHunkStatusKind::Added,
2667 old_text: "".into()
2668 },
2669 HunkStatus {
2670 range: Point::new(8, 0)..Point::new(8, 1),
2671 diff_status: DiffHunkStatusKind::Modified,
2672 old_text: "j".into()
2673 }
2674 ]
2675 )]
2676 );
2677
2678 // Make another commit that accepts the NEW line but with different content
2679 fs.set_head_for_repo(
2680 path!("/project/.git").as_ref(),
2681 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2682 "0000002",
2683 );
2684 cx.run_until_parked();
2685 assert_eq!(
2686 unreviewed_hunks(&action_log, cx),
2687 vec![(
2688 buffer,
2689 vec![
2690 HunkStatus {
2691 range: Point::new(6, 0)..Point::new(7, 0),
2692 diff_status: DiffHunkStatusKind::Added,
2693 old_text: "".into()
2694 },
2695 HunkStatus {
2696 range: Point::new(8, 0)..Point::new(8, 1),
2697 diff_status: DiffHunkStatusKind::Modified,
2698 old_text: "j".into()
2699 }
2700 ]
2701 )]
2702 );
2703
2704 // Final commit that accepts all remaining edits
2705 fs.set_head_for_repo(
2706 path!("/project/.git").as_ref(),
2707 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2708 "0000003",
2709 );
2710 cx.run_until_parked();
2711 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2712 }
2713
2714 #[gpui::test]
2715 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2716 init_test(cx);
2717
2718 let fs = FakeFs::new(cx.executor());
2719 fs.insert_tree(
2720 path!("/dir"),
2721 json!({
2722 "file1": "abc\ndef\nghi"
2723 }),
2724 )
2725 .await;
2726 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2727 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2728 let file_path = project
2729 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2730 .unwrap();
2731
2732 let buffer = project
2733 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2734 .await
2735 .unwrap();
2736
2737 // Track the buffer and make an agent edit
2738 cx.update(|cx| {
2739 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2740 buffer.update(cx, |buffer, cx| {
2741 buffer
2742 .edit(
2743 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2744 None,
2745 cx,
2746 )
2747 .unwrap()
2748 });
2749 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2750 });
2751 cx.run_until_parked();
2752
2753 // Verify the agent edit is there
2754 assert_eq!(
2755 buffer.read_with(cx, |buffer, _| buffer.text()),
2756 "abc\nAGENT_EDIT\nghi"
2757 );
2758 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2759
2760 // Reject all edits
2761 action_log
2762 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2763 .await;
2764 cx.run_until_parked();
2765
2766 // Verify the buffer is back to original
2767 assert_eq!(
2768 buffer.read_with(cx, |buffer, _| buffer.text()),
2769 "abc\ndef\nghi"
2770 );
2771 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2772
2773 // Verify undo state is available
2774 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2775
2776 // Undo the reject
2777 action_log
2778 .update(cx, |log, cx| log.undo_last_reject(cx))
2779 .await;
2780
2781 cx.run_until_parked();
2782
2783 // Verify the agent edit is restored
2784 assert_eq!(
2785 buffer.read_with(cx, |buffer, _| buffer.text()),
2786 "abc\nAGENT_EDIT\nghi"
2787 );
2788
2789 // Verify undo state is cleared
2790 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2791 }
2792
2793 #[gpui::test]
2794 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2795 init_test(cx);
2796
2797 let fs = FakeFs::new(cx.executor());
2798 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2799 .await;
2800 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2801 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2802 let child_log =
2803 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2804
2805 let file_path = project
2806 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2807 .unwrap();
2808 let buffer = project
2809 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2810 .await
2811 .unwrap();
2812
2813 cx.update(|cx| {
2814 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2815 });
2816
2817 // Neither log considers the buffer stale immediately after reading it.
2818 let child_stale = cx.read(|cx| {
2819 child_log
2820 .read(cx)
2821 .stale_buffers(cx)
2822 .cloned()
2823 .collect::<Vec<_>>()
2824 });
2825 let parent_stale = cx.read(|cx| {
2826 parent_log
2827 .read(cx)
2828 .stale_buffers(cx)
2829 .cloned()
2830 .collect::<Vec<_>>()
2831 });
2832 assert!(child_stale.is_empty());
2833 assert!(parent_stale.is_empty());
2834
2835 // Simulate a user edit after the agent read the file.
2836 cx.update(|cx| {
2837 buffer.update(cx, |buffer, cx| {
2838 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2839 });
2840 });
2841 cx.run_until_parked();
2842
2843 // Both child and parent should see the buffer as stale because both tracked
2844 // it at the pre-edit version via buffer_read forwarding.
2845 let child_stale = cx.read(|cx| {
2846 child_log
2847 .read(cx)
2848 .stale_buffers(cx)
2849 .cloned()
2850 .collect::<Vec<_>>()
2851 });
2852 let parent_stale = cx.read(|cx| {
2853 parent_log
2854 .read(cx)
2855 .stale_buffers(cx)
2856 .cloned()
2857 .collect::<Vec<_>>()
2858 });
2859 assert_eq!(child_stale, vec![buffer.clone()]);
2860 assert_eq!(parent_stale, vec![buffer]);
2861 }
2862
2863 #[gpui::test]
2864 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2865 init_test(cx);
2866
2867 let fs = FakeFs::new(cx.executor());
2868 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2869 .await;
2870 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2871 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2872 let child_log =
2873 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2874
2875 let file_path = project
2876 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2877 .unwrap();
2878 let buffer = project
2879 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2880 .await
2881 .unwrap();
2882
2883 cx.update(|cx| {
2884 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2885 buffer.update(cx, |buffer, cx| {
2886 buffer
2887 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2888 .unwrap();
2889 });
2890 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2891 });
2892 cx.run_until_parked();
2893
2894 let expected_hunks = vec![(
2895 buffer,
2896 vec![HunkStatus {
2897 range: Point::new(1, 0)..Point::new(2, 0),
2898 diff_status: DiffHunkStatusKind::Modified,
2899 old_text: "def\n".into(),
2900 }],
2901 )];
2902 assert_eq!(
2903 unreviewed_hunks(&child_log, cx),
2904 expected_hunks,
2905 "child should track the agent edit"
2906 );
2907 assert_eq!(
2908 unreviewed_hunks(&parent_log, cx),
2909 expected_hunks,
2910 "parent should also track the agent edit via linked log forwarding"
2911 );
2912 }
2913
2914 #[gpui::test]
2915 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
2916 init_test(cx);
2917
2918 let fs = FakeFs::new(cx.executor());
2919 fs.insert_tree(path!("/dir"), json!({})).await;
2920 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2921 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2922 let child_log =
2923 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2924
2925 let file_path = project
2926 .read_with(cx, |project, cx| {
2927 project.find_project_path("dir/new_file", cx)
2928 })
2929 .unwrap();
2930 let buffer = project
2931 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2932 .await
2933 .unwrap();
2934
2935 cx.update(|cx| {
2936 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2937 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
2938 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2939 });
2940 project
2941 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2942 .await
2943 .unwrap();
2944 cx.run_until_parked();
2945
2946 let expected_hunks = vec![(
2947 buffer.clone(),
2948 vec![HunkStatus {
2949 range: Point::new(0, 0)..Point::new(0, 5),
2950 diff_status: DiffHunkStatusKind::Added,
2951 old_text: "".into(),
2952 }],
2953 )];
2954 assert_eq!(
2955 unreviewed_hunks(&child_log, cx),
2956 expected_hunks,
2957 "child should track the created file"
2958 );
2959 assert_eq!(
2960 unreviewed_hunks(&parent_log, cx),
2961 expected_hunks,
2962 "parent should also track the created file via linked log forwarding"
2963 );
2964 }
2965
2966 #[gpui::test]
2967 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
2968 init_test(cx);
2969
2970 let fs = FakeFs::new(cx.executor());
2971 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
2972 .await;
2973 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2974 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2975 let child_log =
2976 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2977
2978 let file_path = project
2979 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2980 .unwrap();
2981 let buffer = project
2982 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2983 .await
2984 .unwrap();
2985
2986 cx.update(|cx| {
2987 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2988 });
2989 project
2990 .update(cx, |project, cx| project.delete_file(file_path, cx))
2991 .unwrap()
2992 .await
2993 .unwrap();
2994 cx.run_until_parked();
2995
2996 let expected_hunks = vec![(
2997 buffer.clone(),
2998 vec![HunkStatus {
2999 range: Point::new(0, 0)..Point::new(0, 0),
3000 diff_status: DiffHunkStatusKind::Deleted,
3001 old_text: "hello\n".into(),
3002 }],
3003 )];
3004 assert_eq!(
3005 unreviewed_hunks(&child_log, cx),
3006 expected_hunks,
3007 "child should track the deleted file"
3008 );
3009 assert_eq!(
3010 unreviewed_hunks(&parent_log, cx),
3011 expected_hunks,
3012 "parent should also track the deleted file via linked log forwarding"
3013 );
3014 }
3015
3016 /// Simulates the subagent scenario: two child logs linked to the same parent, each
3017 /// editing a different file. The parent accumulates all edits while each child
3018 /// only sees its own.
3019 #[gpui::test]
3020 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
3021 init_test(cx);
3022
3023 let fs = FakeFs::new(cx.executor());
3024 fs.insert_tree(
3025 path!("/dir"),
3026 json!({
3027 "file_a": "content of a",
3028 "file_b": "content of b",
3029 }),
3030 )
3031 .await;
3032 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3033 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3034 let child_log_1 =
3035 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3036 let child_log_2 =
3037 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3038
3039 let file_a_path = project
3040 .read_with(cx, |project, cx| {
3041 project.find_project_path("dir/file_a", cx)
3042 })
3043 .unwrap();
3044 let file_b_path = project
3045 .read_with(cx, |project, cx| {
3046 project.find_project_path("dir/file_b", cx)
3047 })
3048 .unwrap();
3049 let buffer_a = project
3050 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3051 .await
3052 .unwrap();
3053 let buffer_b = project
3054 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3055 .await
3056 .unwrap();
3057
3058 cx.update(|cx| {
3059 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3060 buffer_a.update(cx, |buffer, cx| {
3061 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3062 });
3063 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3064
3065 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3066 buffer_b.update(cx, |buffer, cx| {
3067 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3068 });
3069 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3070 });
3071 cx.run_until_parked();
3072
3073 let child_1_changed: Vec<_> = cx.read(|cx| {
3074 child_log_1
3075 .read(cx)
3076 .changed_buffers(cx)
3077 .into_keys()
3078 .collect()
3079 });
3080 let child_2_changed: Vec<_> = cx.read(|cx| {
3081 child_log_2
3082 .read(cx)
3083 .changed_buffers(cx)
3084 .into_keys()
3085 .collect()
3086 });
3087 let parent_changed: Vec<_> = cx.read(|cx| {
3088 parent_log
3089 .read(cx)
3090 .changed_buffers(cx)
3091 .into_keys()
3092 .collect()
3093 });
3094
3095 assert_eq!(
3096 child_1_changed,
3097 vec![buffer_a.clone()],
3098 "child 1 should only track file_a"
3099 );
3100 assert_eq!(
3101 child_2_changed,
3102 vec![buffer_b.clone()],
3103 "child 2 should only track file_b"
3104 );
3105 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3106 assert!(
3107 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3108 "parent should contain both buffer_a and buffer_b"
3109 );
3110 }
3111
3112 #[gpui::test]
3113 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3114 init_test(cx);
3115
3116 let fs = FakeFs::new(cx.executor());
3117 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3118 .await;
3119 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3120 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3121
3122 let file_path = project
3123 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3124 .unwrap();
3125 let buffer = project
3126 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3127 .await
3128 .unwrap();
3129
3130 let abs_path = PathBuf::from(path!("/dir/file"));
3131 assert!(
3132 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3133 "file_read_time should be None before buffer_read"
3134 );
3135
3136 cx.update(|cx| {
3137 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3138 });
3139
3140 assert!(
3141 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3142 "file_read_time should be recorded after buffer_read"
3143 );
3144 }
3145
3146 #[gpui::test]
3147 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3148 init_test(cx);
3149
3150 let fs = FakeFs::new(cx.executor());
3151 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3152 .await;
3153 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3154 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3155
3156 let file_path = project
3157 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3158 .unwrap();
3159 let buffer = project
3160 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3161 .await
3162 .unwrap();
3163
3164 let abs_path = PathBuf::from(path!("/dir/file"));
3165 assert!(
3166 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3167 "file_read_time should be None before buffer_edited"
3168 );
3169
3170 cx.update(|cx| {
3171 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3172 });
3173
3174 assert!(
3175 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3176 "file_read_time should be recorded after buffer_edited"
3177 );
3178 }
3179
3180 #[gpui::test]
3181 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3182 init_test(cx);
3183
3184 let fs = FakeFs::new(cx.executor());
3185 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3186 .await;
3187 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3188 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3189
3190 let file_path = project
3191 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3192 .unwrap();
3193 let buffer = project
3194 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3195 .await
3196 .unwrap();
3197
3198 let abs_path = PathBuf::from(path!("/dir/file"));
3199 assert!(
3200 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3201 "file_read_time should be None before buffer_created"
3202 );
3203
3204 cx.update(|cx| {
3205 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3206 });
3207
3208 assert!(
3209 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3210 "file_read_time should be recorded after buffer_created"
3211 );
3212 }
3213
3214 #[gpui::test]
3215 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3216 init_test(cx);
3217
3218 let fs = FakeFs::new(cx.executor());
3219 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3220 .await;
3221 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3222 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3223
3224 let file_path = project
3225 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3226 .unwrap();
3227 let buffer = project
3228 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3229 .await
3230 .unwrap();
3231
3232 let abs_path = PathBuf::from(path!("/dir/file"));
3233
3234 cx.update(|cx| {
3235 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3236 });
3237 assert!(
3238 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3239 "file_read_time should exist after buffer_read"
3240 );
3241
3242 cx.update(|cx| {
3243 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3244 });
3245 assert!(
3246 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3247 "file_read_time should be removed after will_delete_buffer"
3248 );
3249 }
3250
3251 #[gpui::test]
3252 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3253 init_test(cx);
3254
3255 let fs = FakeFs::new(cx.executor());
3256 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3257 .await;
3258 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3259 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3260 let child_log =
3261 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3262
3263 let file_path = project
3264 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3265 .unwrap();
3266 let buffer = project
3267 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3268 .await
3269 .unwrap();
3270
3271 let abs_path = PathBuf::from(path!("/dir/file"));
3272
3273 cx.update(|cx| {
3274 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3275 });
3276 assert!(
3277 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3278 "child should record file_read_time on buffer_read"
3279 );
3280 assert!(
3281 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3282 "parent should NOT get file_read_time from child's buffer_read"
3283 );
3284
3285 cx.update(|cx| {
3286 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3287 });
3288 assert!(
3289 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3290 "parent should NOT get file_read_time from child's buffer_edited"
3291 );
3292
3293 cx.update(|cx| {
3294 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3295 });
3296 assert!(
3297 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3298 "parent should NOT get file_read_time from child's buffer_created"
3299 );
3300 }
3301
3302 #[derive(Debug, PartialEq)]
3303 struct HunkStatus {
3304 range: Range<Point>,
3305 diff_status: DiffHunkStatusKind,
3306 old_text: String,
3307 }
3308
3309 fn unreviewed_hunks(
3310 action_log: &Entity<ActionLog>,
3311 cx: &TestAppContext,
3312 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
3313 cx.read(|cx| {
3314 action_log
3315 .read(cx)
3316 .changed_buffers(cx)
3317 .into_iter()
3318 .map(|(buffer, diff)| {
3319 let snapshot = buffer.read(cx).snapshot();
3320 (
3321 buffer,
3322 diff.read(cx)
3323 .snapshot(cx)
3324 .hunks(&snapshot)
3325 .map(|hunk| HunkStatus {
3326 diff_status: hunk.status().kind,
3327 range: hunk.range,
3328 old_text: diff
3329 .read(cx)
3330 .base_text(cx)
3331 .text_for_range(hunk.diff_base_byte_range)
3332 .collect(),
3333 })
3334 .collect(),
3335 )
3336 })
3337 .collect()
3338 })
3339 }
3340}