1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
278 let git_diff = this
279 .update(cx, |this, cx| {
280 this.project.update(cx, |project, cx| {
281 project.open_uncommitted_diff(buffer.clone(), cx)
282 })
283 })?
284 .await
285 .ok();
286 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
287 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
288 });
289
290 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
291 let _repo_subscription =
292 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
293 cx.update(|cx| {
294 let mut old_head = buffer_repo.read(cx).head_commit.clone();
295 Some(cx.subscribe(git_diff, move |_, event, cx| {
296 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
297 let new_head = buffer_repo.read(cx).head_commit.clone();
298 if new_head != old_head {
299 old_head = new_head;
300 git_diff_updates_tx.send(()).ok();
301 }
302 }
303 }))
304 })
305 } else {
306 None
307 };
308
309 loop {
310 futures::select_biased! {
311 buffer_update = buffer_updates.next() => {
312 if let Some((author, buffer_snapshot)) = buffer_update {
313 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
314 } else {
315 break;
316 }
317 }
318 _ = git_diff_updates_rx.changed().fuse() => {
319 if let Some(git_diff) = git_diff.as_ref() {
320 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
321 }
322 }
323 }
324 }
325
326 Ok(())
327 }
328
329 async fn track_edits(
330 this: &WeakEntity<ActionLog>,
331 buffer: &Entity<Buffer>,
332 author: ChangeAuthor,
333 buffer_snapshot: text::BufferSnapshot,
334 cx: &mut AsyncApp,
335 ) -> Result<()> {
336 let rebase = this.update(cx, |this, cx| {
337 let tracked_buffer = this
338 .tracked_buffers
339 .get_mut(buffer)
340 .context("buffer not tracked")?;
341
342 let rebase = cx.background_spawn({
343 let mut base_text = tracked_buffer.diff_base.clone();
344 let old_snapshot = tracked_buffer.snapshot.clone();
345 let new_snapshot = buffer_snapshot.clone();
346 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
347 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
348 async move {
349 if let ChangeAuthor::User = author {
350 apply_non_conflicting_edits(
351 &unreviewed_edits,
352 edits,
353 &mut base_text,
354 new_snapshot.as_rope(),
355 );
356 }
357
358 (Arc::from(base_text.to_string().as_str()), base_text)
359 }
360 });
361
362 anyhow::Ok(rebase)
363 })??;
364 let (new_base_text, new_diff_base) = rebase.await;
365
366 Self::update_diff(
367 this,
368 buffer,
369 buffer_snapshot,
370 new_base_text,
371 new_diff_base,
372 cx,
373 )
374 .await
375 }
376
377 async fn keep_committed_edits(
378 this: &WeakEntity<ActionLog>,
379 buffer: &Entity<Buffer>,
380 git_diff: &Entity<BufferDiff>,
381 cx: &mut AsyncApp,
382 ) -> Result<()> {
383 let buffer_snapshot = this.read_with(cx, |this, _cx| {
384 let tracked_buffer = this
385 .tracked_buffers
386 .get(buffer)
387 .context("buffer not tracked")?;
388 anyhow::Ok(tracked_buffer.snapshot.clone())
389 })??;
390 let (new_base_text, new_diff_base) = this
391 .read_with(cx, |this, cx| {
392 let tracked_buffer = this
393 .tracked_buffers
394 .get(buffer)
395 .context("buffer not tracked")?;
396 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
397 let agent_diff_base = tracked_buffer.diff_base.clone();
398 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
399 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
400 anyhow::Ok(cx.background_spawn(async move {
401 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
402 let committed_edits = language::line_diff(
403 &agent_diff_base.to_string(),
404 &git_diff_base.to_string(),
405 )
406 .into_iter()
407 .map(|(old, new)| Edit { old, new });
408
409 let mut new_agent_diff_base = agent_diff_base.clone();
410 let mut row_delta = 0i32;
411 for committed in committed_edits {
412 while let Some(unreviewed) = old_unreviewed_edits.peek() {
413 // If the committed edit matches the unreviewed
414 // edit, assume the user wants to keep it.
415 if committed.old == unreviewed.old {
416 let unreviewed_new =
417 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
418 let committed_new =
419 git_diff_base.slice_rows(committed.new.clone()).to_string();
420 if unreviewed_new == committed_new {
421 let old_byte_start =
422 new_agent_diff_base.point_to_offset(Point::new(
423 (unreviewed.old.start as i32 + row_delta) as u32,
424 0,
425 ));
426 let old_byte_end =
427 new_agent_diff_base.point_to_offset(cmp::min(
428 Point::new(
429 (unreviewed.old.end as i32 + row_delta) as u32,
430 0,
431 ),
432 new_agent_diff_base.max_point(),
433 ));
434 new_agent_diff_base
435 .replace(old_byte_start..old_byte_end, &unreviewed_new);
436 row_delta +=
437 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
438 }
439 } else if unreviewed.old.start >= committed.old.end {
440 break;
441 }
442
443 old_unreviewed_edits.next().unwrap();
444 }
445 }
446
447 (
448 Arc::from(new_agent_diff_base.to_string().as_str()),
449 new_agent_diff_base,
450 )
451 }))
452 })??
453 .await;
454
455 Self::update_diff(
456 this,
457 buffer,
458 buffer_snapshot,
459 new_base_text,
460 new_diff_base,
461 cx,
462 )
463 .await
464 }
465
466 async fn update_diff(
467 this: &WeakEntity<ActionLog>,
468 buffer: &Entity<Buffer>,
469 buffer_snapshot: text::BufferSnapshot,
470 new_base_text: Arc<str>,
471 new_diff_base: Rope,
472 cx: &mut AsyncApp,
473 ) -> Result<()> {
474 let (diff, language) = this.read_with(cx, |this, cx| {
475 let tracked_buffer = this
476 .tracked_buffers
477 .get(buffer)
478 .context("buffer not tracked")?;
479 anyhow::Ok((
480 tracked_buffer.diff.clone(),
481 buffer.read(cx).language().cloned(),
482 ))
483 })??;
484 let update = diff
485 .update(cx, |diff, cx| {
486 diff.update_diff(
487 buffer_snapshot.clone(),
488 Some(new_base_text),
489 Some(true),
490 language,
491 cx,
492 )
493 })
494 .await;
495 diff.update(cx, |diff, cx| {
496 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
497 })
498 .await;
499 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
500
501 let unreviewed_edits = cx
502 .background_spawn({
503 let buffer_snapshot = buffer_snapshot.clone();
504 let new_diff_base = new_diff_base.clone();
505 async move {
506 let mut unreviewed_edits = Patch::default();
507 for hunk in diff_snapshot.hunks_intersecting_range(
508 Anchor::min_for_buffer(buffer_snapshot.remote_id())
509 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
510 &buffer_snapshot,
511 ) {
512 let old_range = new_diff_base
513 .offset_to_point(hunk.diff_base_byte_range.start)
514 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
515 let new_range = hunk.range.start..hunk.range.end;
516 unreviewed_edits.push(point_to_row_edit(
517 Edit {
518 old: old_range,
519 new: new_range,
520 },
521 &new_diff_base,
522 buffer_snapshot.as_rope(),
523 ));
524 }
525 unreviewed_edits
526 }
527 })
528 .await;
529 this.update(cx, |this, cx| {
530 let tracked_buffer = this
531 .tracked_buffers
532 .get_mut(buffer)
533 .context("buffer not tracked")?;
534 tracked_buffer.diff_base = new_diff_base;
535 tracked_buffer.snapshot = buffer_snapshot;
536 tracked_buffer.unreviewed_edits = unreviewed_edits;
537 cx.notify();
538 anyhow::Ok(())
539 })?
540 }
541
542 /// Track a buffer as read by agent, so we can notify the model about user edits.
543 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.buffer_read_impl(buffer, true, cx);
545 }
546
547 fn buffer_read_impl(
548 &mut self,
549 buffer: Entity<Buffer>,
550 record_file_read_time: bool,
551 cx: &mut Context<Self>,
552 ) {
553 if let Some(linked_action_log) = &self.linked_action_log {
554 // We don't want to share read times since the other agent hasn't read it necessarily
555 linked_action_log.update(cx, |log, cx| {
556 log.buffer_read_impl(buffer.clone(), false, cx);
557 });
558 }
559 if record_file_read_time {
560 self.update_file_read_time(&buffer, cx);
561 }
562 self.track_buffer_internal(buffer, false, cx);
563 }
564
565 /// Mark a buffer as created by agent, so we can refresh it in the context
566 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
567 self.buffer_created_impl(buffer, true, cx);
568 }
569
570 fn buffer_created_impl(
571 &mut self,
572 buffer: Entity<Buffer>,
573 record_file_read_time: bool,
574 cx: &mut Context<Self>,
575 ) {
576 if let Some(linked_action_log) = &self.linked_action_log {
577 // We don't want to share read times since the other agent hasn't read it necessarily
578 linked_action_log.update(cx, |log, cx| {
579 log.buffer_created_impl(buffer.clone(), false, cx);
580 });
581 }
582 if record_file_read_time {
583 self.update_file_read_time(&buffer, cx);
584 }
585 self.track_buffer_internal(buffer, true, cx);
586 }
587
588 /// Mark a buffer as edited by agent, so we can refresh it in the context
589 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
590 self.buffer_edited_impl(buffer, true, cx);
591 }
592
593 fn buffer_edited_impl(
594 &mut self,
595 buffer: Entity<Buffer>,
596 record_file_read_time: bool,
597 cx: &mut Context<Self>,
598 ) {
599 if let Some(linked_action_log) = &self.linked_action_log {
600 // We don't want to share read times since the other agent hasn't read it necessarily
601 linked_action_log.update(cx, |log, cx| {
602 log.buffer_edited_impl(buffer.clone(), false, cx);
603 });
604 }
605 if record_file_read_time {
606 self.update_file_read_time(&buffer, cx);
607 }
608 let new_version = buffer.read(cx).version();
609 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
610 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
611 tracked_buffer.status = TrackedBufferStatus::Modified;
612 }
613
614 tracked_buffer.version = new_version;
615 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
616 }
617
618 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
619 // Ok to propagate file read time removal to linked action log
620 self.remove_file_read_time(&buffer, cx);
621 let has_linked_action_log = self.linked_action_log.is_some();
622 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
623 match tracked_buffer.status {
624 TrackedBufferStatus::Created { .. } => {
625 self.tracked_buffers.remove(&buffer);
626 cx.notify();
627 }
628 TrackedBufferStatus::Modified => {
629 tracked_buffer.status = TrackedBufferStatus::Deleted;
630 if !has_linked_action_log {
631 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
632 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
633 }
634 }
635
636 TrackedBufferStatus::Deleted => {}
637 }
638
639 if let Some(linked_action_log) = &mut self.linked_action_log {
640 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
641 }
642
643 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
644 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
645 }
646
647 cx.notify();
648 }
649
650 pub fn keep_edits_in_range(
651 &mut self,
652 buffer: Entity<Buffer>,
653 buffer_range: Range<impl language::ToPoint>,
654 telemetry: Option<ActionLogTelemetry>,
655 cx: &mut Context<Self>,
656 ) {
657 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
658 return;
659 };
660
661 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
662 match tracked_buffer.status {
663 TrackedBufferStatus::Deleted => {
664 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
665 self.tracked_buffers.remove(&buffer);
666 cx.notify();
667 }
668 _ => {
669 let buffer = buffer.read(cx);
670 let buffer_range =
671 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
672 let mut delta = 0i32;
673 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
674 edit.old.start = (edit.old.start as i32 + delta) as u32;
675 edit.old.end = (edit.old.end as i32 + delta) as u32;
676
677 if buffer_range.end.row < edit.new.start
678 || buffer_range.start.row > edit.new.end
679 {
680 true
681 } else {
682 let old_range = tracked_buffer
683 .diff_base
684 .point_to_offset(Point::new(edit.old.start, 0))
685 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
686 Point::new(edit.old.end, 0),
687 tracked_buffer.diff_base.max_point(),
688 ));
689 let new_range = tracked_buffer
690 .snapshot
691 .point_to_offset(Point::new(edit.new.start, 0))
692 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
693 Point::new(edit.new.end, 0),
694 tracked_buffer.snapshot.max_point(),
695 ));
696 tracked_buffer.diff_base.replace(
697 old_range,
698 &tracked_buffer
699 .snapshot
700 .text_for_range(new_range)
701 .collect::<String>(),
702 );
703 delta += edit.new_len() as i32 - edit.old_len() as i32;
704 metrics.add_edit(edit);
705 false
706 }
707 });
708 if tracked_buffer.unreviewed_edits.is_empty()
709 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
710 {
711 tracked_buffer.status = TrackedBufferStatus::Modified;
712 }
713 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
714 }
715 }
716 if let Some(telemetry) = telemetry {
717 telemetry_report_accepted_edits(&telemetry, metrics);
718 }
719 }
720
721 pub fn reject_edits_in_ranges(
722 &mut self,
723 buffer: Entity<Buffer>,
724 buffer_ranges: Vec<Range<impl language::ToPoint>>,
725 telemetry: Option<ActionLogTelemetry>,
726 cx: &mut Context<Self>,
727 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
728 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
729 return (Task::ready(Ok(())), None);
730 };
731
732 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
733 let mut undo_info: Option<PerBufferUndo> = None;
734 let task = match &tracked_buffer.status {
735 TrackedBufferStatus::Created {
736 existing_file_content,
737 } => {
738 let task = if let Some(existing_file_content) = existing_file_content {
739 // Capture the agent's content before restoring existing file content
740 let agent_content = buffer.read(cx).text();
741 let buffer_id = buffer.read(cx).remote_id();
742
743 buffer.update(cx, |buffer, cx| {
744 buffer.start_transaction();
745 buffer.set_text("", cx);
746 for chunk in existing_file_content.chunks() {
747 buffer.append(chunk, cx);
748 }
749 buffer.end_transaction(cx);
750 });
751
752 undo_info = Some(PerBufferUndo {
753 buffer: buffer.downgrade(),
754 edits_to_restore: vec![(
755 Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
756 agent_content,
757 )],
758 status: UndoBufferStatus::Created {
759 had_existing_content: true,
760 },
761 });
762
763 self.project
764 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
765 } else {
766 // For a file created by AI with no pre-existing content,
767 // only delete the file if we're certain it contains only AI content
768 // with no edits from the user.
769
770 let initial_version = tracked_buffer.version.clone();
771 let current_version = buffer.read(cx).version();
772
773 let current_content = buffer.read(cx).text();
774 let tracked_content = tracked_buffer.snapshot.text();
775
776 let is_ai_only_content =
777 initial_version == current_version && current_content == tracked_content;
778
779 if is_ai_only_content {
780 let task = buffer
781 .read(cx)
782 .entry_id(cx)
783 .and_then(|entry_id| {
784 self.project.update(cx, |project, cx| {
785 project.delete_entry(entry_id, false, cx)
786 })
787 })
788 .unwrap_or_else(|| Task::ready(Ok(None)));
789
790 cx.background_spawn(async move {
791 task.await?;
792 Ok(())
793 })
794 } else {
795 // Not sure how to disentangle edits made by the user
796 // from edits made by the AI at this point.
797 // For now, preserve both to avoid data loss.
798 //
799 // TODO: Better solution (disable "Reject" after user makes some
800 // edit or find a way to differentiate between AI and user edits)
801 Task::ready(Ok(()))
802 }
803 };
804
805 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
806 self.tracked_buffers.remove(&buffer);
807 cx.notify();
808 task
809 }
810 TrackedBufferStatus::Deleted => {
811 buffer.update(cx, |buffer, cx| {
812 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
813 });
814 let save = self
815 .project
816 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
817
818 // Clear all tracked edits for this buffer and start over as if we just read it.
819 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
820 self.tracked_buffers.remove(&buffer);
821 self.buffer_read(buffer.clone(), cx);
822 cx.notify();
823 save
824 }
825 TrackedBufferStatus::Modified => {
826 let edits_to_restore = buffer.update(cx, |buffer, cx| {
827 let mut buffer_row_ranges = buffer_ranges
828 .into_iter()
829 .map(|range| {
830 range.start.to_point(buffer).row..range.end.to_point(buffer).row
831 })
832 .peekable();
833
834 let mut edits_to_revert = Vec::new();
835 let mut edits_for_undo = Vec::new();
836 for edit in tracked_buffer.unreviewed_edits.edits() {
837 let new_range = tracked_buffer
838 .snapshot
839 .anchor_before(Point::new(edit.new.start, 0))
840 ..tracked_buffer.snapshot.anchor_after(cmp::min(
841 Point::new(edit.new.end, 0),
842 tracked_buffer.snapshot.max_point(),
843 ));
844 let new_row_range = new_range.start.to_point(buffer).row
845 ..new_range.end.to_point(buffer).row;
846
847 let mut revert = false;
848 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
849 if buffer_row_range.end < new_row_range.start {
850 buffer_row_ranges.next();
851 } else if buffer_row_range.start > new_row_range.end {
852 break;
853 } else {
854 revert = true;
855 break;
856 }
857 }
858
859 if revert {
860 metrics.add_edit(edit);
861 let old_range = tracked_buffer
862 .diff_base
863 .point_to_offset(Point::new(edit.old.start, 0))
864 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
865 Point::new(edit.old.end, 0),
866 tracked_buffer.diff_base.max_point(),
867 ));
868 let old_text = tracked_buffer
869 .diff_base
870 .chunks_in_range(old_range)
871 .collect::<String>();
872
873 // Capture the agent's text before we revert it (for undo)
874 let new_range_offset =
875 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
876 let agent_text =
877 buffer.text_for_range(new_range_offset).collect::<String>();
878 edits_for_undo.push((new_range.clone(), agent_text));
879
880 edits_to_revert.push((new_range, old_text));
881 }
882 }
883
884 buffer.edit(edits_to_revert, None, cx);
885 edits_for_undo
886 });
887
888 if !edits_to_restore.is_empty() {
889 undo_info = Some(PerBufferUndo {
890 buffer: buffer.downgrade(),
891 edits_to_restore,
892 status: UndoBufferStatus::Modified,
893 });
894 }
895
896 self.project
897 .update(cx, |project, cx| project.save_buffer(buffer, cx))
898 }
899 };
900 if let Some(telemetry) = telemetry {
901 telemetry_report_rejected_edits(&telemetry, metrics);
902 }
903 (task, undo_info)
904 }
905
906 pub fn keep_all_edits(
907 &mut self,
908 telemetry: Option<ActionLogTelemetry>,
909 cx: &mut Context<Self>,
910 ) {
911 self.tracked_buffers.retain(|buffer, tracked_buffer| {
912 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
913 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
914 if let Some(telemetry) = telemetry.as_ref() {
915 telemetry_report_accepted_edits(telemetry, metrics);
916 }
917 match tracked_buffer.status {
918 TrackedBufferStatus::Deleted => false,
919 _ => {
920 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
921 tracked_buffer.status = TrackedBufferStatus::Modified;
922 }
923 tracked_buffer.unreviewed_edits.clear();
924 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
925 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
926 true
927 }
928 }
929 });
930
931 cx.notify();
932 }
933
934 pub fn reject_all_edits(
935 &mut self,
936 telemetry: Option<ActionLogTelemetry>,
937 cx: &mut Context<Self>,
938 ) -> Task<()> {
939 // Clear any previous undo state before starting a new reject operation
940 self.last_reject_undo = None;
941
942 let mut undo_buffers = Vec::new();
943 let mut futures = Vec::new();
944
945 for buffer in self.changed_buffers(cx).into_keys() {
946 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
947 buffer.read(cx).remote_id(),
948 )];
949 let (reject_task, undo_info) =
950 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
951
952 if let Some(undo) = undo_info {
953 undo_buffers.push(undo);
954 }
955
956 futures.push(async move {
957 reject_task.await.log_err();
958 });
959 }
960
961 // Store the undo information if we have any
962 if !undo_buffers.is_empty() {
963 self.last_reject_undo = Some(LastRejectUndo {
964 buffers: undo_buffers,
965 });
966 }
967
968 let task = futures::future::join_all(futures);
969 cx.background_spawn(async move {
970 task.await;
971 })
972 }
973
974 pub fn has_pending_undo(&self) -> bool {
975 self.last_reject_undo.is_some()
976 }
977
978 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
979 self.last_reject_undo = Some(undo);
980 }
981
982 /// Undoes the most recent reject operation, restoring the rejected agent changes.
983 /// This is a best-effort operation: if buffers have been closed or modified externally,
984 /// those buffers will be skipped.
985 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
986 let Some(undo) = self.last_reject_undo.take() else {
987 return Task::ready(());
988 };
989
990 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
991
992 for per_buffer_undo in undo.buffers {
993 // Skip if the buffer entity has been deallocated
994 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
995 continue;
996 };
997
998 buffer.update(cx, |buffer, cx| {
999 let mut valid_edits = Vec::new();
1000
1001 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
1002 if anchor_range.start.buffer_id == buffer.remote_id()
1003 && anchor_range.end.buffer_id == buffer.remote_id()
1004 {
1005 valid_edits.push((anchor_range, text_to_restore));
1006 }
1007 }
1008
1009 if !valid_edits.is_empty() {
1010 buffer.edit(valid_edits, None, cx);
1011 }
1012 });
1013
1014 if !self.tracked_buffers.contains_key(&buffer) {
1015 self.buffer_edited(buffer.clone(), cx);
1016 }
1017
1018 let save = self
1019 .project
1020 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1021 save_tasks.push(save);
1022 }
1023
1024 cx.notify();
1025
1026 cx.background_spawn(async move {
1027 futures::future::join_all(save_tasks).await;
1028 })
1029 }
1030
1031 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1032 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1033 self.tracked_buffers
1034 .iter()
1035 .filter(|(_, tracked)| tracked.has_edits(cx))
1036 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1037 .collect()
1038 }
1039
1040 /// Returns the total number of lines added and removed across all unreviewed buffers.
1041 pub fn diff_stats(&self, cx: &App) -> DiffStats {
1042 DiffStats::all_files(&self.changed_buffers(cx), cx)
1043 }
1044
1045 /// Iterate over buffers changed since last read or edited by the model
1046 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1047 self.tracked_buffers
1048 .iter()
1049 .filter(|(buffer, tracked)| {
1050 let buffer = buffer.read(cx);
1051
1052 tracked.version != buffer.version
1053 && buffer
1054 .file()
1055 .is_some_and(|file| !file.disk_state().is_deleted())
1056 })
1057 .map(|(buffer, _)| buffer)
1058 }
1059}
1060
1061#[derive(Default, Debug, Clone, Copy)]
1062pub struct DiffStats {
1063 pub lines_added: u32,
1064 pub lines_removed: u32,
1065}
1066
1067impl DiffStats {
1068 pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
1069 let mut stats = DiffStats::default();
1070 let diff_snapshot = diff.snapshot(cx);
1071 let buffer_snapshot = buffer.snapshot();
1072 let base_text = diff_snapshot.base_text();
1073
1074 for hunk in diff_snapshot.hunks(&buffer_snapshot) {
1075 let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
1076 stats.lines_added += added_rows;
1077
1078 let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
1079 let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
1080 let removed_rows = base_end.saturating_sub(base_start);
1081 stats.lines_removed += removed_rows;
1082 }
1083
1084 stats
1085 }
1086
1087 pub fn all_files(
1088 changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
1089 cx: &App,
1090 ) -> Self {
1091 let mut total = DiffStats::default();
1092 for (buffer, diff) in changed_buffers {
1093 let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
1094 total.lines_added += stats.lines_added;
1095 total.lines_removed += stats.lines_removed;
1096 }
1097 total
1098 }
1099}
1100
1101#[derive(Clone)]
1102pub struct ActionLogTelemetry {
1103 pub agent_telemetry_id: SharedString,
1104 pub session_id: Arc<str>,
1105}
1106
1107struct ActionLogMetrics {
1108 lines_removed: u32,
1109 lines_added: u32,
1110 language: Option<SharedString>,
1111}
1112
1113impl ActionLogMetrics {
1114 fn for_buffer(buffer: &Buffer) -> Self {
1115 Self {
1116 language: buffer.language().map(|l| l.name().0),
1117 lines_removed: 0,
1118 lines_added: 0,
1119 }
1120 }
1121
1122 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1123 for edit in edits {
1124 self.add_edit(edit);
1125 }
1126 }
1127
1128 fn add_edit(&mut self, edit: &Edit<u32>) {
1129 self.lines_added += edit.new_len();
1130 self.lines_removed += edit.old_len();
1131 }
1132}
1133
1134fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1135 telemetry::event!(
1136 "Agent Edits Accepted",
1137 agent = telemetry.agent_telemetry_id,
1138 session = telemetry.session_id,
1139 language = metrics.language,
1140 lines_added = metrics.lines_added,
1141 lines_removed = metrics.lines_removed
1142 );
1143}
1144
1145fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1146 telemetry::event!(
1147 "Agent Edits Rejected",
1148 agent = telemetry.agent_telemetry_id,
1149 session = telemetry.session_id,
1150 language = metrics.language,
1151 lines_added = metrics.lines_added,
1152 lines_removed = metrics.lines_removed
1153 );
1154}
1155
1156fn apply_non_conflicting_edits(
1157 patch: &Patch<u32>,
1158 edits: Vec<Edit<u32>>,
1159 old_text: &mut Rope,
1160 new_text: &Rope,
1161) -> bool {
1162 let mut old_edits = patch.edits().iter().cloned().peekable();
1163 let mut new_edits = edits.into_iter().peekable();
1164 let mut applied_delta = 0i32;
1165 let mut rebased_delta = 0i32;
1166 let mut has_made_changes = false;
1167
1168 while let Some(mut new_edit) = new_edits.next() {
1169 let mut conflict = false;
1170
1171 // Push all the old edits that are before this new edit or that intersect with it.
1172 while let Some(old_edit) = old_edits.peek() {
1173 if new_edit.old.end < old_edit.new.start
1174 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1175 {
1176 break;
1177 } else if new_edit.old.start > old_edit.new.end
1178 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1179 {
1180 let old_edit = old_edits.next().unwrap();
1181 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1182 } else {
1183 conflict = true;
1184 if new_edits
1185 .peek()
1186 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1187 {
1188 new_edit = new_edits.next().unwrap();
1189 } else {
1190 let old_edit = old_edits.next().unwrap();
1191 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1192 }
1193 }
1194 }
1195
1196 if !conflict {
1197 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1198 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1199 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1200 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1201 ..old_text.point_to_offset(cmp::min(
1202 Point::new(new_edit.old.end, 0),
1203 old_text.max_point(),
1204 ));
1205 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1206 ..new_text.point_to_offset(cmp::min(
1207 Point::new(new_edit.new.end, 0),
1208 new_text.max_point(),
1209 ));
1210
1211 old_text.replace(
1212 old_bytes,
1213 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1214 );
1215 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1216 has_made_changes = true;
1217 }
1218 }
1219 has_made_changes
1220}
1221
1222fn diff_snapshots(
1223 old_snapshot: &text::BufferSnapshot,
1224 new_snapshot: &text::BufferSnapshot,
1225) -> Vec<Edit<u32>> {
1226 let mut edits = new_snapshot
1227 .edits_since::<Point>(&old_snapshot.version)
1228 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1229 .peekable();
1230 let mut row_edits = Vec::new();
1231 while let Some(mut edit) = edits.next() {
1232 while let Some(next_edit) = edits.peek() {
1233 if edit.old.end >= next_edit.old.start {
1234 edit.old.end = next_edit.old.end;
1235 edit.new.end = next_edit.new.end;
1236 edits.next();
1237 } else {
1238 break;
1239 }
1240 }
1241 row_edits.push(edit);
1242 }
1243 row_edits
1244}
1245
1246fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1247 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1248 && new_text
1249 .chars_at(new_text.point_to_offset(edit.new.start))
1250 .next()
1251 == Some('\n')
1252 && edit.old.start != old_text.max_point()
1253 {
1254 Edit {
1255 old: edit.old.start.row + 1..edit.old.end.row + 1,
1256 new: edit.new.start.row + 1..edit.new.end.row + 1,
1257 }
1258 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1259 Edit {
1260 old: edit.old.start.row..edit.old.end.row,
1261 new: edit.new.start.row..edit.new.end.row,
1262 }
1263 } else {
1264 Edit {
1265 old: edit.old.start.row..edit.old.end.row + 1,
1266 new: edit.new.start.row..edit.new.end.row + 1,
1267 }
1268 }
1269}
1270
1271#[derive(Copy, Clone, Debug)]
1272enum ChangeAuthor {
1273 User,
1274 Agent,
1275}
1276
1277#[derive(Debug)]
1278enum TrackedBufferStatus {
1279 Created { existing_file_content: Option<Rope> },
1280 Modified,
1281 Deleted,
1282}
1283
1284pub struct TrackedBuffer {
1285 buffer: Entity<Buffer>,
1286 diff_base: Rope,
1287 unreviewed_edits: Patch<u32>,
1288 status: TrackedBufferStatus,
1289 version: clock::Global,
1290 diff: Entity<BufferDiff>,
1291 snapshot: text::BufferSnapshot,
1292 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1293 _open_lsp_handle: OpenLspBufferHandle,
1294 _maintain_diff: Task<()>,
1295 _subscription: Subscription,
1296}
1297
1298impl TrackedBuffer {
1299 #[cfg(any(test, feature = "test-support"))]
1300 pub fn diff(&self) -> &Entity<BufferDiff> {
1301 &self.diff
1302 }
1303
1304 #[cfg(any(test, feature = "test-support"))]
1305 pub fn diff_base_len(&self) -> usize {
1306 self.diff_base.len()
1307 }
1308
1309 fn has_edits(&self, cx: &App) -> bool {
1310 self.diff
1311 .read(cx)
1312 .snapshot(cx)
1313 .hunks(self.buffer.read(cx))
1314 .next()
1315 .is_some()
1316 }
1317
1318 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1319 self.diff_update
1320 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1321 .ok();
1322 }
1323}
1324
1325pub struct ChangedBuffer {
1326 pub diff: Entity<BufferDiff>,
1327}
1328
1329#[cfg(test)]
1330mod tests {
1331 use super::*;
1332 use buffer_diff::DiffHunkStatusKind;
1333 use gpui::TestAppContext;
1334 use language::Point;
1335 use project::{FakeFs, Fs, Project, RemoveOptions};
1336 use rand::prelude::*;
1337 use serde_json::json;
1338 use settings::SettingsStore;
1339 use std::env;
1340 use util::{RandomCharIter, path};
1341
1342 #[ctor::ctor]
1343 fn init_logger() {
1344 zlog::init_test();
1345 }
1346
1347 fn init_test(cx: &mut TestAppContext) {
1348 cx.update(|cx| {
1349 let settings_store = SettingsStore::test(cx);
1350 cx.set_global(settings_store);
1351 });
1352 }
1353
1354 #[gpui::test(iterations = 10)]
1355 async fn test_keep_edits(cx: &mut TestAppContext) {
1356 init_test(cx);
1357
1358 let fs = FakeFs::new(cx.executor());
1359 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1360 .await;
1361 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1362 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1363 let file_path = project
1364 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1365 .unwrap();
1366 let buffer = project
1367 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1368 .await
1369 .unwrap();
1370
1371 cx.update(|cx| {
1372 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1373 buffer.update(cx, |buffer, cx| {
1374 buffer
1375 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1376 .unwrap()
1377 });
1378 buffer.update(cx, |buffer, cx| {
1379 buffer
1380 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1381 .unwrap()
1382 });
1383 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1384 });
1385 cx.run_until_parked();
1386 assert_eq!(
1387 buffer.read_with(cx, |buffer, _| buffer.text()),
1388 "abc\ndEf\nghi\njkl\nmnO"
1389 );
1390 assert_eq!(
1391 unreviewed_hunks(&action_log, cx),
1392 vec![(
1393 buffer.clone(),
1394 vec![
1395 HunkStatus {
1396 range: Point::new(1, 0)..Point::new(2, 0),
1397 diff_status: DiffHunkStatusKind::Modified,
1398 old_text: "def\n".into(),
1399 },
1400 HunkStatus {
1401 range: Point::new(4, 0)..Point::new(4, 3),
1402 diff_status: DiffHunkStatusKind::Modified,
1403 old_text: "mno".into(),
1404 }
1405 ],
1406 )]
1407 );
1408
1409 action_log.update(cx, |log, cx| {
1410 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1411 });
1412 cx.run_until_parked();
1413 assert_eq!(
1414 unreviewed_hunks(&action_log, cx),
1415 vec![(
1416 buffer.clone(),
1417 vec![HunkStatus {
1418 range: Point::new(1, 0)..Point::new(2, 0),
1419 diff_status: DiffHunkStatusKind::Modified,
1420 old_text: "def\n".into(),
1421 }],
1422 )]
1423 );
1424
1425 action_log.update(cx, |log, cx| {
1426 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1427 });
1428 cx.run_until_parked();
1429 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1430 }
1431
1432 #[gpui::test(iterations = 10)]
1433 async fn test_deletions(cx: &mut TestAppContext) {
1434 init_test(cx);
1435
1436 let fs = FakeFs::new(cx.executor());
1437 fs.insert_tree(
1438 path!("/dir"),
1439 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1440 )
1441 .await;
1442 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1443 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1444 let file_path = project
1445 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1446 .unwrap();
1447 let buffer = project
1448 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1449 .await
1450 .unwrap();
1451
1452 cx.update(|cx| {
1453 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1454 buffer.update(cx, |buffer, cx| {
1455 buffer
1456 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1457 .unwrap();
1458 buffer.finalize_last_transaction();
1459 });
1460 buffer.update(cx, |buffer, cx| {
1461 buffer
1462 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1463 .unwrap();
1464 buffer.finalize_last_transaction();
1465 });
1466 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1467 });
1468 cx.run_until_parked();
1469 assert_eq!(
1470 buffer.read_with(cx, |buffer, _| buffer.text()),
1471 "abc\nghi\njkl\npqr"
1472 );
1473 assert_eq!(
1474 unreviewed_hunks(&action_log, cx),
1475 vec![(
1476 buffer.clone(),
1477 vec![
1478 HunkStatus {
1479 range: Point::new(1, 0)..Point::new(1, 0),
1480 diff_status: DiffHunkStatusKind::Deleted,
1481 old_text: "def\n".into(),
1482 },
1483 HunkStatus {
1484 range: Point::new(3, 0)..Point::new(3, 0),
1485 diff_status: DiffHunkStatusKind::Deleted,
1486 old_text: "mno\n".into(),
1487 }
1488 ],
1489 )]
1490 );
1491
1492 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1493 cx.run_until_parked();
1494 assert_eq!(
1495 buffer.read_with(cx, |buffer, _| buffer.text()),
1496 "abc\nghi\njkl\nmno\npqr"
1497 );
1498 assert_eq!(
1499 unreviewed_hunks(&action_log, cx),
1500 vec![(
1501 buffer.clone(),
1502 vec![HunkStatus {
1503 range: Point::new(1, 0)..Point::new(1, 0),
1504 diff_status: DiffHunkStatusKind::Deleted,
1505 old_text: "def\n".into(),
1506 }],
1507 )]
1508 );
1509
1510 action_log.update(cx, |log, cx| {
1511 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1512 });
1513 cx.run_until_parked();
1514 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1515 }
1516
1517 #[gpui::test(iterations = 10)]
1518 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1519 init_test(cx);
1520
1521 let fs = FakeFs::new(cx.executor());
1522 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1523 .await;
1524 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1525 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1526 let file_path = project
1527 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1528 .unwrap();
1529 let buffer = project
1530 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1531 .await
1532 .unwrap();
1533
1534 cx.update(|cx| {
1535 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1536 buffer.update(cx, |buffer, cx| {
1537 buffer
1538 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1539 .unwrap()
1540 });
1541 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1542 });
1543 cx.run_until_parked();
1544 assert_eq!(
1545 buffer.read_with(cx, |buffer, _| buffer.text()),
1546 "abc\ndeF\nGHI\njkl\nmno"
1547 );
1548 assert_eq!(
1549 unreviewed_hunks(&action_log, cx),
1550 vec![(
1551 buffer.clone(),
1552 vec![HunkStatus {
1553 range: Point::new(1, 0)..Point::new(3, 0),
1554 diff_status: DiffHunkStatusKind::Modified,
1555 old_text: "def\nghi\n".into(),
1556 }],
1557 )]
1558 );
1559
1560 buffer.update(cx, |buffer, cx| {
1561 buffer.edit(
1562 [
1563 (Point::new(0, 2)..Point::new(0, 2), "X"),
1564 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1565 ],
1566 None,
1567 cx,
1568 )
1569 });
1570 cx.run_until_parked();
1571 assert_eq!(
1572 buffer.read_with(cx, |buffer, _| buffer.text()),
1573 "abXc\ndeF\nGHI\nYjkl\nmno"
1574 );
1575 assert_eq!(
1576 unreviewed_hunks(&action_log, cx),
1577 vec![(
1578 buffer.clone(),
1579 vec![HunkStatus {
1580 range: Point::new(1, 0)..Point::new(3, 0),
1581 diff_status: DiffHunkStatusKind::Modified,
1582 old_text: "def\nghi\n".into(),
1583 }],
1584 )]
1585 );
1586
1587 buffer.update(cx, |buffer, cx| {
1588 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1589 });
1590 cx.run_until_parked();
1591 assert_eq!(
1592 buffer.read_with(cx, |buffer, _| buffer.text()),
1593 "abXc\ndZeF\nGHI\nYjkl\nmno"
1594 );
1595 assert_eq!(
1596 unreviewed_hunks(&action_log, cx),
1597 vec![(
1598 buffer.clone(),
1599 vec![HunkStatus {
1600 range: Point::new(1, 0)..Point::new(3, 0),
1601 diff_status: DiffHunkStatusKind::Modified,
1602 old_text: "def\nghi\n".into(),
1603 }],
1604 )]
1605 );
1606
1607 action_log.update(cx, |log, cx| {
1608 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1609 });
1610 cx.run_until_parked();
1611 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1612 }
1613
1614 #[gpui::test(iterations = 10)]
1615 async fn test_creating_files(cx: &mut TestAppContext) {
1616 init_test(cx);
1617
1618 let fs = FakeFs::new(cx.executor());
1619 fs.insert_tree(path!("/dir"), json!({})).await;
1620 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1621 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1622 let file_path = project
1623 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1624 .unwrap();
1625
1626 let buffer = project
1627 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1628 .await
1629 .unwrap();
1630 cx.update(|cx| {
1631 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1632 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1633 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1634 });
1635 project
1636 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1637 .await
1638 .unwrap();
1639 cx.run_until_parked();
1640 assert_eq!(
1641 unreviewed_hunks(&action_log, cx),
1642 vec![(
1643 buffer.clone(),
1644 vec![HunkStatus {
1645 range: Point::new(0, 0)..Point::new(0, 5),
1646 diff_status: DiffHunkStatusKind::Added,
1647 old_text: "".into(),
1648 }],
1649 )]
1650 );
1651
1652 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1653 cx.run_until_parked();
1654 assert_eq!(
1655 unreviewed_hunks(&action_log, cx),
1656 vec![(
1657 buffer.clone(),
1658 vec![HunkStatus {
1659 range: Point::new(0, 0)..Point::new(0, 6),
1660 diff_status: DiffHunkStatusKind::Added,
1661 old_text: "".into(),
1662 }],
1663 )]
1664 );
1665
1666 action_log.update(cx, |log, cx| {
1667 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1668 });
1669 cx.run_until_parked();
1670 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1671 }
1672
1673 #[gpui::test(iterations = 10)]
1674 async fn test_overwriting_files(cx: &mut TestAppContext) {
1675 init_test(cx);
1676
1677 let fs = FakeFs::new(cx.executor());
1678 fs.insert_tree(
1679 path!("/dir"),
1680 json!({
1681 "file1": "Lorem ipsum dolor"
1682 }),
1683 )
1684 .await;
1685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1686 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1687 let file_path = project
1688 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1689 .unwrap();
1690
1691 let buffer = project
1692 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1693 .await
1694 .unwrap();
1695 cx.update(|cx| {
1696 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1697 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1698 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1699 });
1700 project
1701 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1702 .await
1703 .unwrap();
1704 cx.run_until_parked();
1705 assert_eq!(
1706 unreviewed_hunks(&action_log, cx),
1707 vec![(
1708 buffer.clone(),
1709 vec![HunkStatus {
1710 range: Point::new(0, 0)..Point::new(0, 19),
1711 diff_status: DiffHunkStatusKind::Added,
1712 old_text: "".into(),
1713 }],
1714 )]
1715 );
1716
1717 action_log
1718 .update(cx, |log, cx| {
1719 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1720 task
1721 })
1722 .await
1723 .unwrap();
1724 cx.run_until_parked();
1725 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1726 assert_eq!(
1727 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1728 "Lorem ipsum dolor"
1729 );
1730 }
1731
1732 #[gpui::test(iterations = 10)]
1733 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1734 init_test(cx);
1735
1736 let fs = FakeFs::new(cx.executor());
1737 fs.insert_tree(
1738 path!("/dir"),
1739 json!({
1740 "file1": "Lorem ipsum dolor"
1741 }),
1742 )
1743 .await;
1744 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1745 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1746 let file_path = project
1747 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1748 .unwrap();
1749
1750 let buffer = project
1751 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1752 .await
1753 .unwrap();
1754 cx.update(|cx| {
1755 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1756 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1757 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1758 });
1759 project
1760 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1761 .await
1762 .unwrap();
1763 cx.run_until_parked();
1764 assert_eq!(
1765 unreviewed_hunks(&action_log, cx),
1766 vec![(
1767 buffer.clone(),
1768 vec![HunkStatus {
1769 range: Point::new(0, 0)..Point::new(0, 37),
1770 diff_status: DiffHunkStatusKind::Modified,
1771 old_text: "Lorem ipsum dolor".into(),
1772 }],
1773 )]
1774 );
1775
1776 cx.update(|cx| {
1777 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1778 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1779 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1780 });
1781 project
1782 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1783 .await
1784 .unwrap();
1785 cx.run_until_parked();
1786 assert_eq!(
1787 unreviewed_hunks(&action_log, cx),
1788 vec![(
1789 buffer.clone(),
1790 vec![HunkStatus {
1791 range: Point::new(0, 0)..Point::new(0, 9),
1792 diff_status: DiffHunkStatusKind::Added,
1793 old_text: "".into(),
1794 }],
1795 )]
1796 );
1797
1798 action_log
1799 .update(cx, |log, cx| {
1800 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1801 task
1802 })
1803 .await
1804 .unwrap();
1805 cx.run_until_parked();
1806 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1807 assert_eq!(
1808 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1809 "Lorem ipsum dolor"
1810 );
1811 }
1812
1813 #[gpui::test(iterations = 10)]
1814 async fn test_deleting_files(cx: &mut TestAppContext) {
1815 init_test(cx);
1816
1817 let fs = FakeFs::new(cx.executor());
1818 fs.insert_tree(
1819 path!("/dir"),
1820 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1821 )
1822 .await;
1823
1824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1825 let file1_path = project
1826 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1827 .unwrap();
1828 let file2_path = project
1829 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1830 .unwrap();
1831
1832 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1833 let buffer1 = project
1834 .update(cx, |project, cx| {
1835 project.open_buffer(file1_path.clone(), cx)
1836 })
1837 .await
1838 .unwrap();
1839 let buffer2 = project
1840 .update(cx, |project, cx| {
1841 project.open_buffer(file2_path.clone(), cx)
1842 })
1843 .await
1844 .unwrap();
1845
1846 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1847 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1848 project
1849 .update(cx, |project, cx| {
1850 project.delete_file(file1_path.clone(), false, cx)
1851 })
1852 .unwrap()
1853 .await
1854 .unwrap();
1855 project
1856 .update(cx, |project, cx| {
1857 project.delete_file(file2_path.clone(), false, cx)
1858 })
1859 .unwrap()
1860 .await
1861 .unwrap();
1862 cx.run_until_parked();
1863 assert_eq!(
1864 unreviewed_hunks(&action_log, cx),
1865 vec![
1866 (
1867 buffer1.clone(),
1868 vec![HunkStatus {
1869 range: Point::new(0, 0)..Point::new(0, 0),
1870 diff_status: DiffHunkStatusKind::Deleted,
1871 old_text: "lorem\n".into(),
1872 }]
1873 ),
1874 (
1875 buffer2.clone(),
1876 vec![HunkStatus {
1877 range: Point::new(0, 0)..Point::new(0, 0),
1878 diff_status: DiffHunkStatusKind::Deleted,
1879 old_text: "ipsum\n".into(),
1880 }],
1881 )
1882 ]
1883 );
1884
1885 // Simulate file1 being recreated externally.
1886 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1887 .await;
1888
1889 // Simulate file2 being recreated by a tool.
1890 let buffer2 = project
1891 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1892 .await
1893 .unwrap();
1894 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1895 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1896 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1897 project
1898 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1899 .await
1900 .unwrap();
1901
1902 cx.run_until_parked();
1903 assert_eq!(
1904 unreviewed_hunks(&action_log, cx),
1905 vec![(
1906 buffer2.clone(),
1907 vec![HunkStatus {
1908 range: Point::new(0, 0)..Point::new(0, 5),
1909 diff_status: DiffHunkStatusKind::Added,
1910 old_text: "".into(),
1911 }],
1912 )]
1913 );
1914
1915 // Simulate file2 being deleted externally.
1916 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1917 .await
1918 .unwrap();
1919 cx.run_until_parked();
1920 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1921 }
1922
1923 #[gpui::test(iterations = 10)]
1924 async fn test_reject_edits(cx: &mut TestAppContext) {
1925 init_test(cx);
1926
1927 let fs = FakeFs::new(cx.executor());
1928 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1929 .await;
1930 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1931 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1932 let file_path = project
1933 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1934 .unwrap();
1935 let buffer = project
1936 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1937 .await
1938 .unwrap();
1939
1940 cx.update(|cx| {
1941 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1942 buffer.update(cx, |buffer, cx| {
1943 buffer
1944 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1945 .unwrap()
1946 });
1947 buffer.update(cx, |buffer, cx| {
1948 buffer
1949 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1950 .unwrap()
1951 });
1952 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1953 });
1954 cx.run_until_parked();
1955 assert_eq!(
1956 buffer.read_with(cx, |buffer, _| buffer.text()),
1957 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1958 );
1959 assert_eq!(
1960 unreviewed_hunks(&action_log, cx),
1961 vec![(
1962 buffer.clone(),
1963 vec![
1964 HunkStatus {
1965 range: Point::new(1, 0)..Point::new(3, 0),
1966 diff_status: DiffHunkStatusKind::Modified,
1967 old_text: "def\n".into(),
1968 },
1969 HunkStatus {
1970 range: Point::new(5, 0)..Point::new(5, 3),
1971 diff_status: DiffHunkStatusKind::Modified,
1972 old_text: "mno".into(),
1973 }
1974 ],
1975 )]
1976 );
1977
1978 // If the rejected range doesn't overlap with any hunk, we ignore it.
1979 action_log
1980 .update(cx, |log, cx| {
1981 let (task, _) = log.reject_edits_in_ranges(
1982 buffer.clone(),
1983 vec![Point::new(4, 0)..Point::new(4, 0)],
1984 None,
1985 cx,
1986 );
1987 task
1988 })
1989 .await
1990 .unwrap();
1991 cx.run_until_parked();
1992 assert_eq!(
1993 buffer.read_with(cx, |buffer, _| buffer.text()),
1994 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1995 );
1996 assert_eq!(
1997 unreviewed_hunks(&action_log, cx),
1998 vec![(
1999 buffer.clone(),
2000 vec![
2001 HunkStatus {
2002 range: Point::new(1, 0)..Point::new(3, 0),
2003 diff_status: DiffHunkStatusKind::Modified,
2004 old_text: "def\n".into(),
2005 },
2006 HunkStatus {
2007 range: Point::new(5, 0)..Point::new(5, 3),
2008 diff_status: DiffHunkStatusKind::Modified,
2009 old_text: "mno".into(),
2010 }
2011 ],
2012 )]
2013 );
2014
2015 action_log
2016 .update(cx, |log, cx| {
2017 let (task, _) = log.reject_edits_in_ranges(
2018 buffer.clone(),
2019 vec![Point::new(0, 0)..Point::new(1, 0)],
2020 None,
2021 cx,
2022 );
2023 task
2024 })
2025 .await
2026 .unwrap();
2027 cx.run_until_parked();
2028 assert_eq!(
2029 buffer.read_with(cx, |buffer, _| buffer.text()),
2030 "abc\ndef\nghi\njkl\nmnO"
2031 );
2032 assert_eq!(
2033 unreviewed_hunks(&action_log, cx),
2034 vec![(
2035 buffer.clone(),
2036 vec![HunkStatus {
2037 range: Point::new(4, 0)..Point::new(4, 3),
2038 diff_status: DiffHunkStatusKind::Modified,
2039 old_text: "mno".into(),
2040 }],
2041 )]
2042 );
2043
2044 action_log
2045 .update(cx, |log, cx| {
2046 let (task, _) = log.reject_edits_in_ranges(
2047 buffer.clone(),
2048 vec![Point::new(4, 0)..Point::new(4, 0)],
2049 None,
2050 cx,
2051 );
2052 task
2053 })
2054 .await
2055 .unwrap();
2056 cx.run_until_parked();
2057 assert_eq!(
2058 buffer.read_with(cx, |buffer, _| buffer.text()),
2059 "abc\ndef\nghi\njkl\nmno"
2060 );
2061 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2062 }
2063
2064 #[gpui::test(iterations = 10)]
2065 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2066 init_test(cx);
2067
2068 let fs = FakeFs::new(cx.executor());
2069 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2070 .await;
2071 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2072 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2073 let file_path = project
2074 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2075 .unwrap();
2076 let buffer = project
2077 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2078 .await
2079 .unwrap();
2080
2081 cx.update(|cx| {
2082 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2083 buffer.update(cx, |buffer, cx| {
2084 buffer
2085 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2086 .unwrap()
2087 });
2088 buffer.update(cx, |buffer, cx| {
2089 buffer
2090 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2091 .unwrap()
2092 });
2093 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2094 });
2095 cx.run_until_parked();
2096 assert_eq!(
2097 buffer.read_with(cx, |buffer, _| buffer.text()),
2098 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2099 );
2100 assert_eq!(
2101 unreviewed_hunks(&action_log, cx),
2102 vec![(
2103 buffer.clone(),
2104 vec![
2105 HunkStatus {
2106 range: Point::new(1, 0)..Point::new(3, 0),
2107 diff_status: DiffHunkStatusKind::Modified,
2108 old_text: "def\n".into(),
2109 },
2110 HunkStatus {
2111 range: Point::new(5, 0)..Point::new(5, 3),
2112 diff_status: DiffHunkStatusKind::Modified,
2113 old_text: "mno".into(),
2114 }
2115 ],
2116 )]
2117 );
2118
2119 action_log.update(cx, |log, cx| {
2120 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2121 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2122 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2123 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2124
2125 let (task, _) =
2126 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2127 task.detach();
2128 assert_eq!(
2129 buffer.read_with(cx, |buffer, _| buffer.text()),
2130 "abc\ndef\nghi\njkl\nmno"
2131 );
2132 });
2133 cx.run_until_parked();
2134 assert_eq!(
2135 buffer.read_with(cx, |buffer, _| buffer.text()),
2136 "abc\ndef\nghi\njkl\nmno"
2137 );
2138 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2139 }
2140
2141 #[gpui::test(iterations = 10)]
2142 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2143 init_test(cx);
2144
2145 let fs = FakeFs::new(cx.executor());
2146 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2147 .await;
2148 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2149 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2150 let file_path = project
2151 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2152 .unwrap();
2153 let buffer = project
2154 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2155 .await
2156 .unwrap();
2157
2158 cx.update(|cx| {
2159 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2160 });
2161 project
2162 .update(cx, |project, cx| {
2163 project.delete_file(file_path.clone(), false, cx)
2164 })
2165 .unwrap()
2166 .await
2167 .unwrap();
2168 cx.run_until_parked();
2169 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2170 assert_eq!(
2171 unreviewed_hunks(&action_log, cx),
2172 vec![(
2173 buffer.clone(),
2174 vec![HunkStatus {
2175 range: Point::new(0, 0)..Point::new(0, 0),
2176 diff_status: DiffHunkStatusKind::Deleted,
2177 old_text: "content".into(),
2178 }]
2179 )]
2180 );
2181
2182 action_log
2183 .update(cx, |log, cx| {
2184 let (task, _) = log.reject_edits_in_ranges(
2185 buffer.clone(),
2186 vec![Point::new(0, 0)..Point::new(0, 0)],
2187 None,
2188 cx,
2189 );
2190 task
2191 })
2192 .await
2193 .unwrap();
2194 cx.run_until_parked();
2195 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2196 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2197 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2198 }
2199
2200 #[gpui::test(iterations = 10)]
2201 async fn test_reject_created_file(cx: &mut TestAppContext) {
2202 init_test(cx);
2203
2204 let fs = FakeFs::new(cx.executor());
2205 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2206 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2207 let file_path = project
2208 .read_with(cx, |project, cx| {
2209 project.find_project_path("dir/new_file", cx)
2210 })
2211 .unwrap();
2212 let buffer = project
2213 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2214 .await
2215 .unwrap();
2216 cx.update(|cx| {
2217 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2218 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2219 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2220 });
2221 project
2222 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2223 .await
2224 .unwrap();
2225 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2226 cx.run_until_parked();
2227 assert_eq!(
2228 unreviewed_hunks(&action_log, cx),
2229 vec![(
2230 buffer.clone(),
2231 vec![HunkStatus {
2232 range: Point::new(0, 0)..Point::new(0, 7),
2233 diff_status: DiffHunkStatusKind::Added,
2234 old_text: "".into(),
2235 }],
2236 )]
2237 );
2238
2239 action_log
2240 .update(cx, |log, cx| {
2241 let (task, _) = log.reject_edits_in_ranges(
2242 buffer.clone(),
2243 vec![Point::new(0, 0)..Point::new(0, 11)],
2244 None,
2245 cx,
2246 );
2247 task
2248 })
2249 .await
2250 .unwrap();
2251 cx.run_until_parked();
2252 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2253 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2254 }
2255
2256 #[gpui::test]
2257 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2258 init_test(cx);
2259
2260 let fs = FakeFs::new(cx.executor());
2261 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2262 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2263
2264 let file_path = project
2265 .read_with(cx, |project, cx| {
2266 project.find_project_path("dir/new_file", cx)
2267 })
2268 .unwrap();
2269 let buffer = project
2270 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2271 .await
2272 .unwrap();
2273
2274 // AI creates file with initial content
2275 cx.update(|cx| {
2276 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2277 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2278 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2279 });
2280
2281 project
2282 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2283 .await
2284 .unwrap();
2285
2286 cx.run_until_parked();
2287
2288 // User makes additional edits
2289 cx.update(|cx| {
2290 buffer.update(cx, |buffer, cx| {
2291 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2292 });
2293 });
2294
2295 project
2296 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2297 .await
2298 .unwrap();
2299
2300 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2301
2302 // Reject all
2303 action_log
2304 .update(cx, |log, cx| {
2305 let (task, _) = log.reject_edits_in_ranges(
2306 buffer.clone(),
2307 vec![Point::new(0, 0)..Point::new(100, 0)],
2308 None,
2309 cx,
2310 );
2311 task
2312 })
2313 .await
2314 .unwrap();
2315 cx.run_until_parked();
2316
2317 // File should still contain all the content
2318 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2319
2320 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2321 assert_eq!(content, "ai content\nuser added this line");
2322 }
2323
2324 #[gpui::test]
2325 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2326 init_test(cx);
2327
2328 let fs = FakeFs::new(cx.executor());
2329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2330 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2331
2332 let file_path = project
2333 .read_with(cx, |project, cx| {
2334 project.find_project_path("dir/new_file", cx)
2335 })
2336 .unwrap();
2337 let buffer = project
2338 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2339 .await
2340 .unwrap();
2341
2342 // AI creates file with initial content
2343 cx.update(|cx| {
2344 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2345 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2346 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2347 });
2348 project
2349 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2350 .await
2351 .unwrap();
2352 cx.run_until_parked();
2353 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2354
2355 // User accepts the single hunk
2356 action_log.update(cx, |log, cx| {
2357 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2358 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2359 });
2360 cx.run_until_parked();
2361 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2362 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2363
2364 // AI modifies the file
2365 cx.update(|cx| {
2366 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2367 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2368 });
2369 project
2370 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2371 .await
2372 .unwrap();
2373 cx.run_until_parked();
2374 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2375
2376 // User rejects the hunk
2377 action_log
2378 .update(cx, |log, cx| {
2379 let (task, _) = log.reject_edits_in_ranges(
2380 buffer.clone(),
2381 vec![Anchor::min_max_range_for_buffer(
2382 buffer.read(cx).remote_id(),
2383 )],
2384 None,
2385 cx,
2386 );
2387 task
2388 })
2389 .await
2390 .unwrap();
2391 cx.run_until_parked();
2392 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2393 assert_eq!(
2394 buffer.read_with(cx, |buffer, _| buffer.text()),
2395 "ai content v1"
2396 );
2397 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2398 }
2399
2400 #[gpui::test]
2401 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2402 init_test(cx);
2403
2404 let fs = FakeFs::new(cx.executor());
2405 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2406 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2407
2408 let file_path = project
2409 .read_with(cx, |project, cx| {
2410 project.find_project_path("dir/new_file", cx)
2411 })
2412 .unwrap();
2413 let buffer = project
2414 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2415 .await
2416 .unwrap();
2417
2418 // AI creates file with initial content
2419 cx.update(|cx| {
2420 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2421 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2422 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2423 });
2424 project
2425 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2426 .await
2427 .unwrap();
2428 cx.run_until_parked();
2429
2430 // User clicks "Accept All"
2431 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2432 cx.run_until_parked();
2433 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2434 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2435
2436 // AI modifies file again
2437 cx.update(|cx| {
2438 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2439 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2440 });
2441 project
2442 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2443 .await
2444 .unwrap();
2445 cx.run_until_parked();
2446 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2447
2448 // User clicks "Reject All"
2449 action_log
2450 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2451 .await;
2452 cx.run_until_parked();
2453 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2454 assert_eq!(
2455 buffer.read_with(cx, |buffer, _| buffer.text()),
2456 "ai content v1"
2457 );
2458 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2459 }
2460
2461 #[gpui::test(iterations = 100)]
2462 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2463 init_test(cx);
2464
2465 let operations = env::var("OPERATIONS")
2466 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2467 .unwrap_or(20);
2468
2469 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2470 let fs = FakeFs::new(cx.executor());
2471 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2472 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2473 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2474 let file_path = project
2475 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2476 .unwrap();
2477 let buffer = project
2478 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2479 .await
2480 .unwrap();
2481
2482 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2483
2484 for _ in 0..operations {
2485 match rng.random_range(0..100) {
2486 0..25 => {
2487 action_log.update(cx, |log, cx| {
2488 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2489 log::info!("keeping edits in range {:?}", range);
2490 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2491 });
2492 }
2493 25..50 => {
2494 action_log
2495 .update(cx, |log, cx| {
2496 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2497 log::info!("rejecting edits in range {:?}", range);
2498 let (task, _) =
2499 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2500 task
2501 })
2502 .await
2503 .unwrap();
2504 }
2505 _ => {
2506 let is_agent_edit = rng.random_bool(0.5);
2507 if is_agent_edit {
2508 log::info!("agent edit");
2509 } else {
2510 log::info!("user edit");
2511 }
2512 cx.update(|cx| {
2513 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2514 if is_agent_edit {
2515 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2516 }
2517 });
2518 }
2519 }
2520
2521 if rng.random_bool(0.2) {
2522 quiesce(&action_log, &buffer, cx);
2523 }
2524 }
2525
2526 quiesce(&action_log, &buffer, cx);
2527
2528 fn quiesce(
2529 action_log: &Entity<ActionLog>,
2530 buffer: &Entity<Buffer>,
2531 cx: &mut TestAppContext,
2532 ) {
2533 log::info!("quiescing...");
2534 cx.run_until_parked();
2535 action_log.update(cx, |log, cx| {
2536 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2537 let mut old_text = tracked_buffer.diff_base.clone();
2538 let new_text = buffer.read(cx).as_rope();
2539 for edit in tracked_buffer.unreviewed_edits.edits() {
2540 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2541 let old_end = old_text.point_to_offset(cmp::min(
2542 Point::new(edit.new.start + edit.old_len(), 0),
2543 old_text.max_point(),
2544 ));
2545 old_text.replace(
2546 old_start..old_end,
2547 &new_text.slice_rows(edit.new.clone()).to_string(),
2548 );
2549 }
2550 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2551 })
2552 }
2553 }
2554
2555 #[gpui::test]
2556 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2557 init_test(cx);
2558
2559 let fs = FakeFs::new(cx.background_executor.clone());
2560 fs.insert_tree(
2561 path!("/project"),
2562 json!({
2563 ".git": {},
2564 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2565 }),
2566 )
2567 .await;
2568 fs.set_head_for_repo(
2569 path!("/project/.git").as_ref(),
2570 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2571 "0000000",
2572 );
2573 cx.run_until_parked();
2574
2575 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2576 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2577
2578 let file_path = project
2579 .read_with(cx, |project, cx| {
2580 project.find_project_path(path!("/project/file.txt"), cx)
2581 })
2582 .unwrap();
2583 let buffer = project
2584 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2585 .await
2586 .unwrap();
2587
2588 cx.update(|cx| {
2589 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2590 buffer.update(cx, |buffer, cx| {
2591 buffer.edit(
2592 [
2593 // Edit at the very start: a -> A
2594 (Point::new(0, 0)..Point::new(0, 1), "A"),
2595 // Deletion in the middle: remove lines d and e
2596 (Point::new(3, 0)..Point::new(5, 0), ""),
2597 // Modification: g -> GGG
2598 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2599 // Addition: insert new line after h
2600 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2601 // Edit the very last character: j -> J
2602 (Point::new(9, 0)..Point::new(9, 1), "J"),
2603 ],
2604 None,
2605 cx,
2606 );
2607 });
2608 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2609 });
2610 cx.run_until_parked();
2611 assert_eq!(
2612 unreviewed_hunks(&action_log, cx),
2613 vec![(
2614 buffer.clone(),
2615 vec![
2616 HunkStatus {
2617 range: Point::new(0, 0)..Point::new(1, 0),
2618 diff_status: DiffHunkStatusKind::Modified,
2619 old_text: "a\n".into()
2620 },
2621 HunkStatus {
2622 range: Point::new(3, 0)..Point::new(3, 0),
2623 diff_status: DiffHunkStatusKind::Deleted,
2624 old_text: "d\ne\n".into()
2625 },
2626 HunkStatus {
2627 range: Point::new(4, 0)..Point::new(5, 0),
2628 diff_status: DiffHunkStatusKind::Modified,
2629 old_text: "g\n".into()
2630 },
2631 HunkStatus {
2632 range: Point::new(6, 0)..Point::new(7, 0),
2633 diff_status: DiffHunkStatusKind::Added,
2634 old_text: "".into()
2635 },
2636 HunkStatus {
2637 range: Point::new(8, 0)..Point::new(8, 1),
2638 diff_status: DiffHunkStatusKind::Modified,
2639 old_text: "j".into()
2640 }
2641 ]
2642 )]
2643 );
2644
2645 // Simulate a git commit that matches some edits but not others:
2646 // - Accepts the first edit (a -> A)
2647 // - Accepts the deletion (remove d and e)
2648 // - Makes a different change to g (g -> G instead of GGG)
2649 // - Ignores the NEW line addition
2650 // - Ignores the last line edit (j stays as j)
2651 fs.set_head_for_repo(
2652 path!("/project/.git").as_ref(),
2653 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2654 "0000001",
2655 );
2656 cx.run_until_parked();
2657 assert_eq!(
2658 unreviewed_hunks(&action_log, cx),
2659 vec![(
2660 buffer.clone(),
2661 vec![
2662 HunkStatus {
2663 range: Point::new(4, 0)..Point::new(5, 0),
2664 diff_status: DiffHunkStatusKind::Modified,
2665 old_text: "g\n".into()
2666 },
2667 HunkStatus {
2668 range: Point::new(6, 0)..Point::new(7, 0),
2669 diff_status: DiffHunkStatusKind::Added,
2670 old_text: "".into()
2671 },
2672 HunkStatus {
2673 range: Point::new(8, 0)..Point::new(8, 1),
2674 diff_status: DiffHunkStatusKind::Modified,
2675 old_text: "j".into()
2676 }
2677 ]
2678 )]
2679 );
2680
2681 // Make another commit that accepts the NEW line but with different content
2682 fs.set_head_for_repo(
2683 path!("/project/.git").as_ref(),
2684 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2685 "0000002",
2686 );
2687 cx.run_until_parked();
2688 assert_eq!(
2689 unreviewed_hunks(&action_log, cx),
2690 vec![(
2691 buffer,
2692 vec![
2693 HunkStatus {
2694 range: Point::new(6, 0)..Point::new(7, 0),
2695 diff_status: DiffHunkStatusKind::Added,
2696 old_text: "".into()
2697 },
2698 HunkStatus {
2699 range: Point::new(8, 0)..Point::new(8, 1),
2700 diff_status: DiffHunkStatusKind::Modified,
2701 old_text: "j".into()
2702 }
2703 ]
2704 )]
2705 );
2706
2707 // Final commit that accepts all remaining edits
2708 fs.set_head_for_repo(
2709 path!("/project/.git").as_ref(),
2710 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2711 "0000003",
2712 );
2713 cx.run_until_parked();
2714 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2715 }
2716
2717 #[gpui::test]
2718 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2719 init_test(cx);
2720
2721 let fs = FakeFs::new(cx.executor());
2722 fs.insert_tree(
2723 path!("/dir"),
2724 json!({
2725 "file1": "abc\ndef\nghi"
2726 }),
2727 )
2728 .await;
2729 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2730 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2731 let file_path = project
2732 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2733 .unwrap();
2734
2735 let buffer = project
2736 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2737 .await
2738 .unwrap();
2739
2740 // Track the buffer and make an agent edit
2741 cx.update(|cx| {
2742 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2743 buffer.update(cx, |buffer, cx| {
2744 buffer
2745 .edit(
2746 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2747 None,
2748 cx,
2749 )
2750 .unwrap()
2751 });
2752 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2753 });
2754 cx.run_until_parked();
2755
2756 // Verify the agent edit is there
2757 assert_eq!(
2758 buffer.read_with(cx, |buffer, _| buffer.text()),
2759 "abc\nAGENT_EDIT\nghi"
2760 );
2761 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2762
2763 // Reject all edits
2764 action_log
2765 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2766 .await;
2767 cx.run_until_parked();
2768
2769 // Verify the buffer is back to original
2770 assert_eq!(
2771 buffer.read_with(cx, |buffer, _| buffer.text()),
2772 "abc\ndef\nghi"
2773 );
2774 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2775
2776 // Verify undo state is available
2777 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2778
2779 // Undo the reject
2780 action_log
2781 .update(cx, |log, cx| log.undo_last_reject(cx))
2782 .await;
2783
2784 cx.run_until_parked();
2785
2786 // Verify the agent edit is restored
2787 assert_eq!(
2788 buffer.read_with(cx, |buffer, _| buffer.text()),
2789 "abc\nAGENT_EDIT\nghi"
2790 );
2791
2792 // Verify undo state is cleared
2793 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2794 }
2795
2796 #[gpui::test]
2797 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2798 init_test(cx);
2799
2800 let fs = FakeFs::new(cx.executor());
2801 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2802 .await;
2803 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2804 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2805 let child_log =
2806 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2807
2808 let file_path = project
2809 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2810 .unwrap();
2811 let buffer = project
2812 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2813 .await
2814 .unwrap();
2815
2816 cx.update(|cx| {
2817 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2818 });
2819
2820 // Neither log considers the buffer stale immediately after reading it.
2821 let child_stale = cx.read(|cx| {
2822 child_log
2823 .read(cx)
2824 .stale_buffers(cx)
2825 .cloned()
2826 .collect::<Vec<_>>()
2827 });
2828 let parent_stale = cx.read(|cx| {
2829 parent_log
2830 .read(cx)
2831 .stale_buffers(cx)
2832 .cloned()
2833 .collect::<Vec<_>>()
2834 });
2835 assert!(child_stale.is_empty());
2836 assert!(parent_stale.is_empty());
2837
2838 // Simulate a user edit after the agent read the file.
2839 cx.update(|cx| {
2840 buffer.update(cx, |buffer, cx| {
2841 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2842 });
2843 });
2844 cx.run_until_parked();
2845
2846 // Both child and parent should see the buffer as stale because both tracked
2847 // it at the pre-edit version via buffer_read forwarding.
2848 let child_stale = cx.read(|cx| {
2849 child_log
2850 .read(cx)
2851 .stale_buffers(cx)
2852 .cloned()
2853 .collect::<Vec<_>>()
2854 });
2855 let parent_stale = cx.read(|cx| {
2856 parent_log
2857 .read(cx)
2858 .stale_buffers(cx)
2859 .cloned()
2860 .collect::<Vec<_>>()
2861 });
2862 assert_eq!(child_stale, vec![buffer.clone()]);
2863 assert_eq!(parent_stale, vec![buffer]);
2864 }
2865
2866 #[gpui::test]
2867 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2868 init_test(cx);
2869
2870 let fs = FakeFs::new(cx.executor());
2871 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2872 .await;
2873 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2874 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2875 let child_log =
2876 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2877
2878 let file_path = project
2879 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2880 .unwrap();
2881 let buffer = project
2882 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2883 .await
2884 .unwrap();
2885
2886 cx.update(|cx| {
2887 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2888 buffer.update(cx, |buffer, cx| {
2889 buffer
2890 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2891 .unwrap();
2892 });
2893 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2894 });
2895 cx.run_until_parked();
2896
2897 let expected_hunks = vec![(
2898 buffer,
2899 vec![HunkStatus {
2900 range: Point::new(1, 0)..Point::new(2, 0),
2901 diff_status: DiffHunkStatusKind::Modified,
2902 old_text: "def\n".into(),
2903 }],
2904 )];
2905 assert_eq!(
2906 unreviewed_hunks(&child_log, cx),
2907 expected_hunks,
2908 "child should track the agent edit"
2909 );
2910 assert_eq!(
2911 unreviewed_hunks(&parent_log, cx),
2912 expected_hunks,
2913 "parent should also track the agent edit via linked log forwarding"
2914 );
2915 }
2916
2917 #[gpui::test]
2918 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
2919 init_test(cx);
2920
2921 let fs = FakeFs::new(cx.executor());
2922 fs.insert_tree(path!("/dir"), json!({})).await;
2923 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2924 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2925 let child_log =
2926 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2927
2928 let file_path = project
2929 .read_with(cx, |project, cx| {
2930 project.find_project_path("dir/new_file", cx)
2931 })
2932 .unwrap();
2933 let buffer = project
2934 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2935 .await
2936 .unwrap();
2937
2938 cx.update(|cx| {
2939 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2940 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
2941 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2942 });
2943 project
2944 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2945 .await
2946 .unwrap();
2947 cx.run_until_parked();
2948
2949 let expected_hunks = vec![(
2950 buffer.clone(),
2951 vec![HunkStatus {
2952 range: Point::new(0, 0)..Point::new(0, 5),
2953 diff_status: DiffHunkStatusKind::Added,
2954 old_text: "".into(),
2955 }],
2956 )];
2957 assert_eq!(
2958 unreviewed_hunks(&child_log, cx),
2959 expected_hunks,
2960 "child should track the created file"
2961 );
2962 assert_eq!(
2963 unreviewed_hunks(&parent_log, cx),
2964 expected_hunks,
2965 "parent should also track the created file via linked log forwarding"
2966 );
2967 }
2968
2969 #[gpui::test]
2970 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
2971 init_test(cx);
2972
2973 let fs = FakeFs::new(cx.executor());
2974 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
2975 .await;
2976 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2977 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2978 let child_log =
2979 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2980
2981 let file_path = project
2982 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2983 .unwrap();
2984 let buffer = project
2985 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2986 .await
2987 .unwrap();
2988
2989 cx.update(|cx| {
2990 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2991 });
2992 project
2993 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
2994 .unwrap()
2995 .await
2996 .unwrap();
2997 cx.run_until_parked();
2998
2999 let expected_hunks = vec![(
3000 buffer.clone(),
3001 vec![HunkStatus {
3002 range: Point::new(0, 0)..Point::new(0, 0),
3003 diff_status: DiffHunkStatusKind::Deleted,
3004 old_text: "hello\n".into(),
3005 }],
3006 )];
3007 assert_eq!(
3008 unreviewed_hunks(&child_log, cx),
3009 expected_hunks,
3010 "child should track the deleted file"
3011 );
3012 assert_eq!(
3013 unreviewed_hunks(&parent_log, cx),
3014 expected_hunks,
3015 "parent should also track the deleted file via linked log forwarding"
3016 );
3017 }
3018
3019 /// Simulates the subagent scenario: two child logs linked to the same parent, each
3020 /// editing a different file. The parent accumulates all edits while each child
3021 /// only sees its own.
3022 #[gpui::test]
3023 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
3024 init_test(cx);
3025
3026 let fs = FakeFs::new(cx.executor());
3027 fs.insert_tree(
3028 path!("/dir"),
3029 json!({
3030 "file_a": "content of a",
3031 "file_b": "content of b",
3032 }),
3033 )
3034 .await;
3035 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3036 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3037 let child_log_1 =
3038 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3039 let child_log_2 =
3040 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3041
3042 let file_a_path = project
3043 .read_with(cx, |project, cx| {
3044 project.find_project_path("dir/file_a", cx)
3045 })
3046 .unwrap();
3047 let file_b_path = project
3048 .read_with(cx, |project, cx| {
3049 project.find_project_path("dir/file_b", cx)
3050 })
3051 .unwrap();
3052 let buffer_a = project
3053 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3054 .await
3055 .unwrap();
3056 let buffer_b = project
3057 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3058 .await
3059 .unwrap();
3060
3061 cx.update(|cx| {
3062 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3063 buffer_a.update(cx, |buffer, cx| {
3064 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3065 });
3066 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3067
3068 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3069 buffer_b.update(cx, |buffer, cx| {
3070 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3071 });
3072 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3073 });
3074 cx.run_until_parked();
3075
3076 let child_1_changed: Vec<_> = cx.read(|cx| {
3077 child_log_1
3078 .read(cx)
3079 .changed_buffers(cx)
3080 .into_keys()
3081 .collect()
3082 });
3083 let child_2_changed: Vec<_> = cx.read(|cx| {
3084 child_log_2
3085 .read(cx)
3086 .changed_buffers(cx)
3087 .into_keys()
3088 .collect()
3089 });
3090 let parent_changed: Vec<_> = cx.read(|cx| {
3091 parent_log
3092 .read(cx)
3093 .changed_buffers(cx)
3094 .into_keys()
3095 .collect()
3096 });
3097
3098 assert_eq!(
3099 child_1_changed,
3100 vec![buffer_a.clone()],
3101 "child 1 should only track file_a"
3102 );
3103 assert_eq!(
3104 child_2_changed,
3105 vec![buffer_b.clone()],
3106 "child 2 should only track file_b"
3107 );
3108 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3109 assert!(
3110 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3111 "parent should contain both buffer_a and buffer_b"
3112 );
3113 }
3114
3115 #[gpui::test]
3116 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3117 init_test(cx);
3118
3119 let fs = FakeFs::new(cx.executor());
3120 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3121 .await;
3122 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3123 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3124
3125 let file_path = project
3126 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3127 .unwrap();
3128 let buffer = project
3129 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3130 .await
3131 .unwrap();
3132
3133 let abs_path = PathBuf::from(path!("/dir/file"));
3134 assert!(
3135 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3136 "file_read_time should be None before buffer_read"
3137 );
3138
3139 cx.update(|cx| {
3140 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3141 });
3142
3143 assert!(
3144 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3145 "file_read_time should be recorded after buffer_read"
3146 );
3147 }
3148
3149 #[gpui::test]
3150 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3151 init_test(cx);
3152
3153 let fs = FakeFs::new(cx.executor());
3154 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3155 .await;
3156 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3157 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3158
3159 let file_path = project
3160 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3161 .unwrap();
3162 let buffer = project
3163 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3164 .await
3165 .unwrap();
3166
3167 let abs_path = PathBuf::from(path!("/dir/file"));
3168 assert!(
3169 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3170 "file_read_time should be None before buffer_edited"
3171 );
3172
3173 cx.update(|cx| {
3174 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3175 });
3176
3177 assert!(
3178 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3179 "file_read_time should be recorded after buffer_edited"
3180 );
3181 }
3182
3183 #[gpui::test]
3184 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3185 init_test(cx);
3186
3187 let fs = FakeFs::new(cx.executor());
3188 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3189 .await;
3190 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3191 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3192
3193 let file_path = project
3194 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3195 .unwrap();
3196 let buffer = project
3197 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3198 .await
3199 .unwrap();
3200
3201 let abs_path = PathBuf::from(path!("/dir/file"));
3202 assert!(
3203 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3204 "file_read_time should be None before buffer_created"
3205 );
3206
3207 cx.update(|cx| {
3208 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3209 });
3210
3211 assert!(
3212 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3213 "file_read_time should be recorded after buffer_created"
3214 );
3215 }
3216
3217 #[gpui::test]
3218 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3219 init_test(cx);
3220
3221 let fs = FakeFs::new(cx.executor());
3222 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3223 .await;
3224 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3225 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3226
3227 let file_path = project
3228 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3229 .unwrap();
3230 let buffer = project
3231 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3232 .await
3233 .unwrap();
3234
3235 let abs_path = PathBuf::from(path!("/dir/file"));
3236
3237 cx.update(|cx| {
3238 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3239 });
3240 assert!(
3241 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3242 "file_read_time should exist after buffer_read"
3243 );
3244
3245 cx.update(|cx| {
3246 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3247 });
3248 assert!(
3249 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3250 "file_read_time should be removed after will_delete_buffer"
3251 );
3252 }
3253
3254 #[gpui::test]
3255 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3256 init_test(cx);
3257
3258 let fs = FakeFs::new(cx.executor());
3259 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3260 .await;
3261 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3262 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3263 let child_log =
3264 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3265
3266 let file_path = project
3267 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3268 .unwrap();
3269 let buffer = project
3270 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3271 .await
3272 .unwrap();
3273
3274 let abs_path = PathBuf::from(path!("/dir/file"));
3275
3276 cx.update(|cx| {
3277 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3278 });
3279 assert!(
3280 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3281 "child should record file_read_time on buffer_read"
3282 );
3283 assert!(
3284 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3285 "parent should NOT get file_read_time from child's buffer_read"
3286 );
3287
3288 cx.update(|cx| {
3289 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3290 });
3291 assert!(
3292 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3293 "parent should NOT get file_read_time from child's buffer_edited"
3294 );
3295
3296 cx.update(|cx| {
3297 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3298 });
3299 assert!(
3300 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3301 "parent should NOT get file_read_time from child's buffer_created"
3302 );
3303 }
3304
3305 #[derive(Debug, PartialEq)]
3306 struct HunkStatus {
3307 range: Range<Point>,
3308 diff_status: DiffHunkStatusKind,
3309 old_text: String,
3310 }
3311
3312 fn unreviewed_hunks(
3313 action_log: &Entity<ActionLog>,
3314 cx: &TestAppContext,
3315 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
3316 cx.read(|cx| {
3317 action_log
3318 .read(cx)
3319 .changed_buffers(cx)
3320 .into_iter()
3321 .map(|(buffer, diff)| {
3322 let snapshot = buffer.read(cx).snapshot();
3323 (
3324 buffer,
3325 diff.read(cx)
3326 .snapshot(cx)
3327 .hunks(&snapshot)
3328 .map(|hunk| HunkStatus {
3329 diff_status: hunk.status().kind,
3330 range: hunk.range,
3331 old_text: diff
3332 .read(cx)
3333 .base_text(cx)
3334 .text_for_range(hunk.diff_base_byte_range)
3335 .collect(),
3336 })
3337 .collect(),
3338 )
3339 })
3340 .collect()
3341 })
3342 }
3343}