1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
278 let git_diff = this
279 .update(cx, |this, cx| {
280 this.project.update(cx, |project, cx| {
281 project.open_uncommitted_diff(buffer.clone(), cx)
282 })
283 })?
284 .await
285 .ok();
286 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
287 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
288 });
289
290 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
291 let _repo_subscription =
292 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
293 cx.update(|cx| {
294 let mut old_head = buffer_repo.read(cx).head_commit.clone();
295 Some(cx.subscribe(git_diff, move |_, event, cx| {
296 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
297 let new_head = buffer_repo.read(cx).head_commit.clone();
298 if new_head != old_head {
299 old_head = new_head;
300 git_diff_updates_tx.send(()).ok();
301 }
302 }
303 }))
304 })
305 } else {
306 None
307 };
308
309 loop {
310 futures::select_biased! {
311 buffer_update = buffer_updates.next() => {
312 if let Some((author, buffer_snapshot)) = buffer_update {
313 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
314 } else {
315 break;
316 }
317 }
318 _ = git_diff_updates_rx.changed().fuse() => {
319 if let Some(git_diff) = git_diff.as_ref() {
320 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
321 }
322 }
323 }
324 }
325
326 Ok(())
327 }
328
329 async fn track_edits(
330 this: &WeakEntity<ActionLog>,
331 buffer: &Entity<Buffer>,
332 author: ChangeAuthor,
333 buffer_snapshot: text::BufferSnapshot,
334 cx: &mut AsyncApp,
335 ) -> Result<()> {
336 let rebase = this.update(cx, |this, cx| {
337 let tracked_buffer = this
338 .tracked_buffers
339 .get_mut(buffer)
340 .context("buffer not tracked")?;
341
342 let rebase = cx.background_spawn({
343 let mut base_text = tracked_buffer.diff_base.clone();
344 let old_snapshot = tracked_buffer.snapshot.clone();
345 let new_snapshot = buffer_snapshot.clone();
346 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
347 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
348 async move {
349 if let ChangeAuthor::User = author {
350 apply_non_conflicting_edits(
351 &unreviewed_edits,
352 edits,
353 &mut base_text,
354 new_snapshot.as_rope(),
355 );
356 }
357
358 (Arc::from(base_text.to_string().as_str()), base_text)
359 }
360 });
361
362 anyhow::Ok(rebase)
363 })??;
364 let (new_base_text, new_diff_base) = rebase.await;
365
366 Self::update_diff(
367 this,
368 buffer,
369 buffer_snapshot,
370 new_base_text,
371 new_diff_base,
372 cx,
373 )
374 .await
375 }
376
377 async fn keep_committed_edits(
378 this: &WeakEntity<ActionLog>,
379 buffer: &Entity<Buffer>,
380 git_diff: &Entity<BufferDiff>,
381 cx: &mut AsyncApp,
382 ) -> Result<()> {
383 let buffer_snapshot = this.read_with(cx, |this, _cx| {
384 let tracked_buffer = this
385 .tracked_buffers
386 .get(buffer)
387 .context("buffer not tracked")?;
388 anyhow::Ok(tracked_buffer.snapshot.clone())
389 })??;
390 let (new_base_text, new_diff_base) = this
391 .read_with(cx, |this, cx| {
392 let tracked_buffer = this
393 .tracked_buffers
394 .get(buffer)
395 .context("buffer not tracked")?;
396 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
397 let agent_diff_base = tracked_buffer.diff_base.clone();
398 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
399 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
400 anyhow::Ok(cx.background_spawn(async move {
401 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
402 let committed_edits = language::line_diff(
403 &agent_diff_base.to_string(),
404 &git_diff_base.to_string(),
405 )
406 .into_iter()
407 .map(|(old, new)| Edit { old, new });
408
409 let mut new_agent_diff_base = agent_diff_base.clone();
410 let mut row_delta = 0i32;
411 for committed in committed_edits {
412 while let Some(unreviewed) = old_unreviewed_edits.peek() {
413 // If the committed edit matches the unreviewed
414 // edit, assume the user wants to keep it.
415 if committed.old == unreviewed.old {
416 let unreviewed_new =
417 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
418 let committed_new =
419 git_diff_base.slice_rows(committed.new.clone()).to_string();
420 if unreviewed_new == committed_new {
421 let old_byte_start =
422 new_agent_diff_base.point_to_offset(Point::new(
423 (unreviewed.old.start as i32 + row_delta) as u32,
424 0,
425 ));
426 let old_byte_end =
427 new_agent_diff_base.point_to_offset(cmp::min(
428 Point::new(
429 (unreviewed.old.end as i32 + row_delta) as u32,
430 0,
431 ),
432 new_agent_diff_base.max_point(),
433 ));
434 new_agent_diff_base
435 .replace(old_byte_start..old_byte_end, &unreviewed_new);
436 row_delta +=
437 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
438 }
439 } else if unreviewed.old.start >= committed.old.end {
440 break;
441 }
442
443 old_unreviewed_edits.next().unwrap();
444 }
445 }
446
447 (
448 Arc::from(new_agent_diff_base.to_string().as_str()),
449 new_agent_diff_base,
450 )
451 }))
452 })??
453 .await;
454
455 Self::update_diff(
456 this,
457 buffer,
458 buffer_snapshot,
459 new_base_text,
460 new_diff_base,
461 cx,
462 )
463 .await
464 }
465
466 async fn update_diff(
467 this: &WeakEntity<ActionLog>,
468 buffer: &Entity<Buffer>,
469 buffer_snapshot: text::BufferSnapshot,
470 new_base_text: Arc<str>,
471 new_diff_base: Rope,
472 cx: &mut AsyncApp,
473 ) -> Result<()> {
474 let (diff, language) = this.read_with(cx, |this, cx| {
475 let tracked_buffer = this
476 .tracked_buffers
477 .get(buffer)
478 .context("buffer not tracked")?;
479 anyhow::Ok((
480 tracked_buffer.diff.clone(),
481 buffer.read(cx).language().cloned(),
482 ))
483 })??;
484 let update = diff
485 .update(cx, |diff, cx| {
486 diff.update_diff(
487 buffer_snapshot.clone(),
488 Some(new_base_text),
489 Some(true),
490 language,
491 cx,
492 )
493 })
494 .await;
495 diff.update(cx, |diff, cx| {
496 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
497 })
498 .await;
499 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
500
501 let unreviewed_edits = cx
502 .background_spawn({
503 let buffer_snapshot = buffer_snapshot.clone();
504 let new_diff_base = new_diff_base.clone();
505 async move {
506 let mut unreviewed_edits = Patch::default();
507 for hunk in diff_snapshot.hunks_intersecting_range(
508 Anchor::min_for_buffer(buffer_snapshot.remote_id())
509 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
510 &buffer_snapshot,
511 ) {
512 let old_range = new_diff_base
513 .offset_to_point(hunk.diff_base_byte_range.start)
514 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
515 let new_range = hunk.range.start..hunk.range.end;
516 unreviewed_edits.push(point_to_row_edit(
517 Edit {
518 old: old_range,
519 new: new_range,
520 },
521 &new_diff_base,
522 buffer_snapshot.as_rope(),
523 ));
524 }
525 unreviewed_edits
526 }
527 })
528 .await;
529 this.update(cx, |this, cx| {
530 let tracked_buffer = this
531 .tracked_buffers
532 .get_mut(buffer)
533 .context("buffer not tracked")?;
534 tracked_buffer.diff_base = new_diff_base;
535 tracked_buffer.snapshot = buffer_snapshot;
536 tracked_buffer.unreviewed_edits = unreviewed_edits;
537 cx.notify();
538 anyhow::Ok(())
539 })?
540 }
541
542 /// Track a buffer as read by agent, so we can notify the model about user edits.
543 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.buffer_read_impl(buffer, true, cx);
545 }
546
547 fn buffer_read_impl(
548 &mut self,
549 buffer: Entity<Buffer>,
550 record_file_read_time: bool,
551 cx: &mut Context<Self>,
552 ) {
553 if let Some(linked_action_log) = &self.linked_action_log {
554 // We don't want to share read times since the other agent hasn't read it necessarily
555 linked_action_log.update(cx, |log, cx| {
556 log.buffer_read_impl(buffer.clone(), false, cx);
557 });
558 }
559 if record_file_read_time {
560 self.update_file_read_time(&buffer, cx);
561 }
562 self.track_buffer_internal(buffer, false, cx);
563 }
564
565 /// Mark a buffer as created by agent, so we can refresh it in the context
566 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
567 self.buffer_created_impl(buffer, true, cx);
568 }
569
570 fn buffer_created_impl(
571 &mut self,
572 buffer: Entity<Buffer>,
573 record_file_read_time: bool,
574 cx: &mut Context<Self>,
575 ) {
576 if let Some(linked_action_log) = &self.linked_action_log {
577 // We don't want to share read times since the other agent hasn't read it necessarily
578 linked_action_log.update(cx, |log, cx| {
579 log.buffer_created_impl(buffer.clone(), false, cx);
580 });
581 }
582 if record_file_read_time {
583 self.update_file_read_time(&buffer, cx);
584 }
585 self.track_buffer_internal(buffer, true, cx);
586 }
587
588 /// Mark a buffer as edited by agent, so we can refresh it in the context
589 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
590 self.buffer_edited_impl(buffer, true, cx);
591 }
592
593 fn buffer_edited_impl(
594 &mut self,
595 buffer: Entity<Buffer>,
596 record_file_read_time: bool,
597 cx: &mut Context<Self>,
598 ) {
599 if let Some(linked_action_log) = &self.linked_action_log {
600 // We don't want to share read times since the other agent hasn't read it necessarily
601 linked_action_log.update(cx, |log, cx| {
602 log.buffer_edited_impl(buffer.clone(), false, cx);
603 });
604 }
605 if record_file_read_time {
606 self.update_file_read_time(&buffer, cx);
607 }
608 let new_version = buffer.read(cx).version();
609 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
610 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
611 tracked_buffer.status = TrackedBufferStatus::Modified;
612 }
613
614 tracked_buffer.version = new_version;
615 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
616 }
617
618 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
619 // Ok to propagate file read time removal to linked action log
620 self.remove_file_read_time(&buffer, cx);
621 let has_linked_action_log = self.linked_action_log.is_some();
622 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
623 match tracked_buffer.status {
624 TrackedBufferStatus::Created { .. } => {
625 self.tracked_buffers.remove(&buffer);
626 cx.notify();
627 }
628 TrackedBufferStatus::Modified => {
629 tracked_buffer.status = TrackedBufferStatus::Deleted;
630 if !has_linked_action_log {
631 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
632 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
633 }
634 }
635
636 TrackedBufferStatus::Deleted => {}
637 }
638
639 if let Some(linked_action_log) = &mut self.linked_action_log {
640 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
641 }
642
643 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
644 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
645 }
646
647 cx.notify();
648 }
649
650 pub fn keep_edits_in_range(
651 &mut self,
652 buffer: Entity<Buffer>,
653 buffer_range: Range<impl language::ToPoint>,
654 telemetry: Option<ActionLogTelemetry>,
655 cx: &mut Context<Self>,
656 ) {
657 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
658 return;
659 };
660
661 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
662 match tracked_buffer.status {
663 TrackedBufferStatus::Deleted => {
664 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
665 self.tracked_buffers.remove(&buffer);
666 cx.notify();
667 }
668 _ => {
669 let buffer = buffer.read(cx);
670 let buffer_range =
671 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
672 let mut delta = 0i32;
673 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
674 edit.old.start = (edit.old.start as i32 + delta) as u32;
675 edit.old.end = (edit.old.end as i32 + delta) as u32;
676
677 if buffer_range.end.row < edit.new.start
678 || buffer_range.start.row > edit.new.end
679 {
680 true
681 } else {
682 let old_range = tracked_buffer
683 .diff_base
684 .point_to_offset(Point::new(edit.old.start, 0))
685 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
686 Point::new(edit.old.end, 0),
687 tracked_buffer.diff_base.max_point(),
688 ));
689 let new_range = tracked_buffer
690 .snapshot
691 .point_to_offset(Point::new(edit.new.start, 0))
692 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
693 Point::new(edit.new.end, 0),
694 tracked_buffer.snapshot.max_point(),
695 ));
696 tracked_buffer.diff_base.replace(
697 old_range,
698 &tracked_buffer
699 .snapshot
700 .text_for_range(new_range)
701 .collect::<String>(),
702 );
703 delta += edit.new_len() as i32 - edit.old_len() as i32;
704 metrics.add_edit(edit);
705 false
706 }
707 });
708 if tracked_buffer.unreviewed_edits.is_empty()
709 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
710 {
711 tracked_buffer.status = TrackedBufferStatus::Modified;
712 }
713 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
714 }
715 }
716 if let Some(telemetry) = telemetry {
717 telemetry_report_accepted_edits(&telemetry, metrics);
718 }
719 }
720
721 pub fn reject_edits_in_ranges(
722 &mut self,
723 buffer: Entity<Buffer>,
724 buffer_ranges: Vec<Range<impl language::ToPoint>>,
725 telemetry: Option<ActionLogTelemetry>,
726 cx: &mut Context<Self>,
727 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
728 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
729 return (Task::ready(Ok(())), None);
730 };
731
732 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
733 let mut undo_info: Option<PerBufferUndo> = None;
734 let task = match &tracked_buffer.status {
735 TrackedBufferStatus::Created {
736 existing_file_content,
737 } => {
738 let task = if let Some(existing_file_content) = existing_file_content {
739 // Capture the agent's content before restoring existing file content
740 let agent_content = buffer.read(cx).text();
741 let buffer_id = buffer.read(cx).remote_id();
742
743 buffer.update(cx, |buffer, cx| {
744 buffer.start_transaction();
745 buffer.set_text("", cx);
746 for chunk in existing_file_content.chunks() {
747 buffer.append(chunk, cx);
748 }
749 buffer.end_transaction(cx);
750 });
751
752 undo_info = Some(PerBufferUndo {
753 buffer: buffer.downgrade(),
754 edits_to_restore: vec![(
755 Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
756 agent_content,
757 )],
758 status: UndoBufferStatus::Created {
759 had_existing_content: true,
760 },
761 });
762
763 self.project
764 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
765 } else {
766 // For a file created by AI with no pre-existing content,
767 // only delete the file if we're certain it contains only AI content
768 // with no edits from the user.
769
770 let initial_version = tracked_buffer.version.clone();
771 let current_version = buffer.read(cx).version();
772
773 let current_content = buffer.read(cx).text();
774 let tracked_content = tracked_buffer.snapshot.text();
775
776 let is_ai_only_content =
777 initial_version == current_version && current_content == tracked_content;
778
779 if is_ai_only_content {
780 buffer
781 .read(cx)
782 .entry_id(cx)
783 .and_then(|entry_id| {
784 self.project.update(cx, |project, cx| {
785 project.delete_entry(entry_id, false, cx)
786 })
787 })
788 .unwrap_or(Task::ready(Ok(())))
789 } else {
790 // Not sure how to disentangle edits made by the user
791 // from edits made by the AI at this point.
792 // For now, preserve both to avoid data loss.
793 //
794 // TODO: Better solution (disable "Reject" after user makes some
795 // edit or find a way to differentiate between AI and user edits)
796 Task::ready(Ok(()))
797 }
798 };
799
800 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
801 self.tracked_buffers.remove(&buffer);
802 cx.notify();
803 task
804 }
805 TrackedBufferStatus::Deleted => {
806 buffer.update(cx, |buffer, cx| {
807 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
808 });
809 let save = self
810 .project
811 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
812
813 // Clear all tracked edits for this buffer and start over as if we just read it.
814 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
815 self.tracked_buffers.remove(&buffer);
816 self.buffer_read(buffer.clone(), cx);
817 cx.notify();
818 save
819 }
820 TrackedBufferStatus::Modified => {
821 let edits_to_restore = buffer.update(cx, |buffer, cx| {
822 let mut buffer_row_ranges = buffer_ranges
823 .into_iter()
824 .map(|range| {
825 range.start.to_point(buffer).row..range.end.to_point(buffer).row
826 })
827 .peekable();
828
829 let mut edits_to_revert = Vec::new();
830 let mut edits_for_undo = Vec::new();
831 for edit in tracked_buffer.unreviewed_edits.edits() {
832 let new_range = tracked_buffer
833 .snapshot
834 .anchor_before(Point::new(edit.new.start, 0))
835 ..tracked_buffer.snapshot.anchor_after(cmp::min(
836 Point::new(edit.new.end, 0),
837 tracked_buffer.snapshot.max_point(),
838 ));
839 let new_row_range = new_range.start.to_point(buffer).row
840 ..new_range.end.to_point(buffer).row;
841
842 let mut revert = false;
843 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
844 if buffer_row_range.end < new_row_range.start {
845 buffer_row_ranges.next();
846 } else if buffer_row_range.start > new_row_range.end {
847 break;
848 } else {
849 revert = true;
850 break;
851 }
852 }
853
854 if revert {
855 metrics.add_edit(edit);
856 let old_range = tracked_buffer
857 .diff_base
858 .point_to_offset(Point::new(edit.old.start, 0))
859 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
860 Point::new(edit.old.end, 0),
861 tracked_buffer.diff_base.max_point(),
862 ));
863 let old_text = tracked_buffer
864 .diff_base
865 .chunks_in_range(old_range)
866 .collect::<String>();
867
868 // Capture the agent's text before we revert it (for undo)
869 let new_range_offset =
870 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
871 let agent_text =
872 buffer.text_for_range(new_range_offset).collect::<String>();
873 edits_for_undo.push((new_range.clone(), agent_text));
874
875 edits_to_revert.push((new_range, old_text));
876 }
877 }
878
879 buffer.edit(edits_to_revert, None, cx);
880 edits_for_undo
881 });
882
883 if !edits_to_restore.is_empty() {
884 undo_info = Some(PerBufferUndo {
885 buffer: buffer.downgrade(),
886 edits_to_restore,
887 status: UndoBufferStatus::Modified,
888 });
889 }
890
891 self.project
892 .update(cx, |project, cx| project.save_buffer(buffer, cx))
893 }
894 };
895 if let Some(telemetry) = telemetry {
896 telemetry_report_rejected_edits(&telemetry, metrics);
897 }
898 (task, undo_info)
899 }
900
901 pub fn keep_all_edits(
902 &mut self,
903 telemetry: Option<ActionLogTelemetry>,
904 cx: &mut Context<Self>,
905 ) {
906 self.tracked_buffers.retain(|buffer, tracked_buffer| {
907 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
908 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
909 if let Some(telemetry) = telemetry.as_ref() {
910 telemetry_report_accepted_edits(telemetry, metrics);
911 }
912 match tracked_buffer.status {
913 TrackedBufferStatus::Deleted => false,
914 _ => {
915 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
916 tracked_buffer.status = TrackedBufferStatus::Modified;
917 }
918 tracked_buffer.unreviewed_edits.clear();
919 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
920 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
921 true
922 }
923 }
924 });
925
926 cx.notify();
927 }
928
929 pub fn reject_all_edits(
930 &mut self,
931 telemetry: Option<ActionLogTelemetry>,
932 cx: &mut Context<Self>,
933 ) -> Task<()> {
934 // Clear any previous undo state before starting a new reject operation
935 self.last_reject_undo = None;
936
937 let mut undo_buffers = Vec::new();
938 let mut futures = Vec::new();
939
940 for buffer in self.changed_buffers(cx).into_keys() {
941 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
942 buffer.read(cx).remote_id(),
943 )];
944 let (reject_task, undo_info) =
945 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
946
947 if let Some(undo) = undo_info {
948 undo_buffers.push(undo);
949 }
950
951 futures.push(async move {
952 reject_task.await.log_err();
953 });
954 }
955
956 // Store the undo information if we have any
957 if !undo_buffers.is_empty() {
958 self.last_reject_undo = Some(LastRejectUndo {
959 buffers: undo_buffers,
960 });
961 }
962
963 let task = futures::future::join_all(futures);
964 cx.background_spawn(async move {
965 task.await;
966 })
967 }
968
969 pub fn has_pending_undo(&self) -> bool {
970 self.last_reject_undo.is_some()
971 }
972
973 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
974 self.last_reject_undo = Some(undo);
975 }
976
977 /// Undoes the most recent reject operation, restoring the rejected agent changes.
978 /// This is a best-effort operation: if buffers have been closed or modified externally,
979 /// those buffers will be skipped.
980 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
981 let Some(undo) = self.last_reject_undo.take() else {
982 return Task::ready(());
983 };
984
985 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
986
987 for per_buffer_undo in undo.buffers {
988 // Skip if the buffer entity has been deallocated
989 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
990 continue;
991 };
992
993 buffer.update(cx, |buffer, cx| {
994 let mut valid_edits = Vec::new();
995
996 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
997 if anchor_range.start.buffer_id == buffer.remote_id()
998 && anchor_range.end.buffer_id == buffer.remote_id()
999 {
1000 valid_edits.push((anchor_range, text_to_restore));
1001 }
1002 }
1003
1004 if !valid_edits.is_empty() {
1005 buffer.edit(valid_edits, None, cx);
1006 }
1007 });
1008
1009 if !self.tracked_buffers.contains_key(&buffer) {
1010 self.buffer_edited(buffer.clone(), cx);
1011 }
1012
1013 let save = self
1014 .project
1015 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1016 save_tasks.push(save);
1017 }
1018
1019 cx.notify();
1020
1021 cx.background_spawn(async move {
1022 futures::future::join_all(save_tasks).await;
1023 })
1024 }
1025
1026 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1027 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1028 self.tracked_buffers
1029 .iter()
1030 .filter(|(_, tracked)| tracked.has_edits(cx))
1031 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1032 .collect()
1033 }
1034
1035 /// Returns the total number of lines added and removed across all unreviewed buffers.
1036 pub fn diff_stats(&self, cx: &App) -> DiffStats {
1037 DiffStats::all_files(&self.changed_buffers(cx), cx)
1038 }
1039
1040 /// Iterate over buffers changed since last read or edited by the model
1041 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1042 self.tracked_buffers
1043 .iter()
1044 .filter(|(buffer, tracked)| {
1045 let buffer = buffer.read(cx);
1046
1047 tracked.version != buffer.version
1048 && buffer
1049 .file()
1050 .is_some_and(|file| !file.disk_state().is_deleted())
1051 })
1052 .map(|(buffer, _)| buffer)
1053 }
1054}
1055
1056#[derive(Default, Debug, Clone, Copy)]
1057pub struct DiffStats {
1058 pub lines_added: u32,
1059 pub lines_removed: u32,
1060}
1061
1062impl DiffStats {
1063 pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
1064 let mut stats = DiffStats::default();
1065 let diff_snapshot = diff.snapshot(cx);
1066 let buffer_snapshot = buffer.snapshot();
1067 let base_text = diff_snapshot.base_text();
1068
1069 for hunk in diff_snapshot.hunks(&buffer_snapshot) {
1070 let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
1071 stats.lines_added += added_rows;
1072
1073 let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
1074 let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
1075 let removed_rows = base_end.saturating_sub(base_start);
1076 stats.lines_removed += removed_rows;
1077 }
1078
1079 stats
1080 }
1081
1082 pub fn all_files(
1083 changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
1084 cx: &App,
1085 ) -> Self {
1086 let mut total = DiffStats::default();
1087 for (buffer, diff) in changed_buffers {
1088 let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
1089 total.lines_added += stats.lines_added;
1090 total.lines_removed += stats.lines_removed;
1091 }
1092 total
1093 }
1094}
1095
1096#[derive(Clone)]
1097pub struct ActionLogTelemetry {
1098 pub agent_telemetry_id: SharedString,
1099 pub session_id: Arc<str>,
1100}
1101
1102struct ActionLogMetrics {
1103 lines_removed: u32,
1104 lines_added: u32,
1105 language: Option<SharedString>,
1106}
1107
1108impl ActionLogMetrics {
1109 fn for_buffer(buffer: &Buffer) -> Self {
1110 Self {
1111 language: buffer.language().map(|l| l.name().0),
1112 lines_removed: 0,
1113 lines_added: 0,
1114 }
1115 }
1116
1117 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1118 for edit in edits {
1119 self.add_edit(edit);
1120 }
1121 }
1122
1123 fn add_edit(&mut self, edit: &Edit<u32>) {
1124 self.lines_added += edit.new_len();
1125 self.lines_removed += edit.old_len();
1126 }
1127}
1128
1129fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1130 telemetry::event!(
1131 "Agent Edits Accepted",
1132 agent = telemetry.agent_telemetry_id,
1133 session = telemetry.session_id,
1134 language = metrics.language,
1135 lines_added = metrics.lines_added,
1136 lines_removed = metrics.lines_removed
1137 );
1138}
1139
1140fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1141 telemetry::event!(
1142 "Agent Edits Rejected",
1143 agent = telemetry.agent_telemetry_id,
1144 session = telemetry.session_id,
1145 language = metrics.language,
1146 lines_added = metrics.lines_added,
1147 lines_removed = metrics.lines_removed
1148 );
1149}
1150
1151fn apply_non_conflicting_edits(
1152 patch: &Patch<u32>,
1153 edits: Vec<Edit<u32>>,
1154 old_text: &mut Rope,
1155 new_text: &Rope,
1156) -> bool {
1157 let mut old_edits = patch.edits().iter().cloned().peekable();
1158 let mut new_edits = edits.into_iter().peekable();
1159 let mut applied_delta = 0i32;
1160 let mut rebased_delta = 0i32;
1161 let mut has_made_changes = false;
1162
1163 while let Some(mut new_edit) = new_edits.next() {
1164 let mut conflict = false;
1165
1166 // Push all the old edits that are before this new edit or that intersect with it.
1167 while let Some(old_edit) = old_edits.peek() {
1168 if new_edit.old.end < old_edit.new.start
1169 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1170 {
1171 break;
1172 } else if new_edit.old.start > old_edit.new.end
1173 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1174 {
1175 let old_edit = old_edits.next().unwrap();
1176 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1177 } else {
1178 conflict = true;
1179 if new_edits
1180 .peek()
1181 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1182 {
1183 new_edit = new_edits.next().unwrap();
1184 } else {
1185 let old_edit = old_edits.next().unwrap();
1186 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1187 }
1188 }
1189 }
1190
1191 if !conflict {
1192 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1193 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1194 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1195 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1196 ..old_text.point_to_offset(cmp::min(
1197 Point::new(new_edit.old.end, 0),
1198 old_text.max_point(),
1199 ));
1200 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1201 ..new_text.point_to_offset(cmp::min(
1202 Point::new(new_edit.new.end, 0),
1203 new_text.max_point(),
1204 ));
1205
1206 old_text.replace(
1207 old_bytes,
1208 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1209 );
1210 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1211 has_made_changes = true;
1212 }
1213 }
1214 has_made_changes
1215}
1216
1217fn diff_snapshots(
1218 old_snapshot: &text::BufferSnapshot,
1219 new_snapshot: &text::BufferSnapshot,
1220) -> Vec<Edit<u32>> {
1221 let mut edits = new_snapshot
1222 .edits_since::<Point>(&old_snapshot.version)
1223 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1224 .peekable();
1225 let mut row_edits = Vec::new();
1226 while let Some(mut edit) = edits.next() {
1227 while let Some(next_edit) = edits.peek() {
1228 if edit.old.end >= next_edit.old.start {
1229 edit.old.end = next_edit.old.end;
1230 edit.new.end = next_edit.new.end;
1231 edits.next();
1232 } else {
1233 break;
1234 }
1235 }
1236 row_edits.push(edit);
1237 }
1238 row_edits
1239}
1240
1241fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1242 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1243 && new_text
1244 .chars_at(new_text.point_to_offset(edit.new.start))
1245 .next()
1246 == Some('\n')
1247 && edit.old.start != old_text.max_point()
1248 {
1249 Edit {
1250 old: edit.old.start.row + 1..edit.old.end.row + 1,
1251 new: edit.new.start.row + 1..edit.new.end.row + 1,
1252 }
1253 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1254 Edit {
1255 old: edit.old.start.row..edit.old.end.row,
1256 new: edit.new.start.row..edit.new.end.row,
1257 }
1258 } else {
1259 Edit {
1260 old: edit.old.start.row..edit.old.end.row + 1,
1261 new: edit.new.start.row..edit.new.end.row + 1,
1262 }
1263 }
1264}
1265
1266#[derive(Copy, Clone, Debug)]
1267enum ChangeAuthor {
1268 User,
1269 Agent,
1270}
1271
1272#[derive(Debug)]
1273enum TrackedBufferStatus {
1274 Created { existing_file_content: Option<Rope> },
1275 Modified,
1276 Deleted,
1277}
1278
1279pub struct TrackedBuffer {
1280 buffer: Entity<Buffer>,
1281 diff_base: Rope,
1282 unreviewed_edits: Patch<u32>,
1283 status: TrackedBufferStatus,
1284 version: clock::Global,
1285 diff: Entity<BufferDiff>,
1286 snapshot: text::BufferSnapshot,
1287 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1288 _open_lsp_handle: OpenLspBufferHandle,
1289 _maintain_diff: Task<()>,
1290 _subscription: Subscription,
1291}
1292
1293impl TrackedBuffer {
1294 #[cfg(any(test, feature = "test-support"))]
1295 pub fn diff(&self) -> &Entity<BufferDiff> {
1296 &self.diff
1297 }
1298
1299 #[cfg(any(test, feature = "test-support"))]
1300 pub fn diff_base_len(&self) -> usize {
1301 self.diff_base.len()
1302 }
1303
1304 fn has_edits(&self, cx: &App) -> bool {
1305 self.diff
1306 .read(cx)
1307 .snapshot(cx)
1308 .hunks(self.buffer.read(cx))
1309 .next()
1310 .is_some()
1311 }
1312
1313 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1314 self.diff_update
1315 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1316 .ok();
1317 }
1318}
1319
1320pub struct ChangedBuffer {
1321 pub diff: Entity<BufferDiff>,
1322}
1323
1324#[cfg(test)]
1325mod tests {
1326 use super::*;
1327 use buffer_diff::DiffHunkStatusKind;
1328 use gpui::TestAppContext;
1329 use language::Point;
1330 use project::{FakeFs, Fs, Project, RemoveOptions};
1331 use rand::prelude::*;
1332 use serde_json::json;
1333 use settings::SettingsStore;
1334 use std::env;
1335 use util::{RandomCharIter, path};
1336
1337 #[ctor::ctor]
1338 fn init_logger() {
1339 zlog::init_test();
1340 }
1341
1342 fn init_test(cx: &mut TestAppContext) {
1343 cx.update(|cx| {
1344 let settings_store = SettingsStore::test(cx);
1345 cx.set_global(settings_store);
1346 });
1347 }
1348
1349 #[gpui::test(iterations = 10)]
1350 async fn test_keep_edits(cx: &mut TestAppContext) {
1351 init_test(cx);
1352
1353 let fs = FakeFs::new(cx.executor());
1354 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1355 .await;
1356 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1357 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1358 let file_path = project
1359 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1360 .unwrap();
1361 let buffer = project
1362 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1363 .await
1364 .unwrap();
1365
1366 cx.update(|cx| {
1367 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1368 buffer.update(cx, |buffer, cx| {
1369 buffer
1370 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1371 .unwrap()
1372 });
1373 buffer.update(cx, |buffer, cx| {
1374 buffer
1375 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1376 .unwrap()
1377 });
1378 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1379 });
1380 cx.run_until_parked();
1381 assert_eq!(
1382 buffer.read_with(cx, |buffer, _| buffer.text()),
1383 "abc\ndEf\nghi\njkl\nmnO"
1384 );
1385 assert_eq!(
1386 unreviewed_hunks(&action_log, cx),
1387 vec![(
1388 buffer.clone(),
1389 vec![
1390 HunkStatus {
1391 range: Point::new(1, 0)..Point::new(2, 0),
1392 diff_status: DiffHunkStatusKind::Modified,
1393 old_text: "def\n".into(),
1394 },
1395 HunkStatus {
1396 range: Point::new(4, 0)..Point::new(4, 3),
1397 diff_status: DiffHunkStatusKind::Modified,
1398 old_text: "mno".into(),
1399 }
1400 ],
1401 )]
1402 );
1403
1404 action_log.update(cx, |log, cx| {
1405 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1406 });
1407 cx.run_until_parked();
1408 assert_eq!(
1409 unreviewed_hunks(&action_log, cx),
1410 vec![(
1411 buffer.clone(),
1412 vec![HunkStatus {
1413 range: Point::new(1, 0)..Point::new(2, 0),
1414 diff_status: DiffHunkStatusKind::Modified,
1415 old_text: "def\n".into(),
1416 }],
1417 )]
1418 );
1419
1420 action_log.update(cx, |log, cx| {
1421 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1422 });
1423 cx.run_until_parked();
1424 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1425 }
1426
1427 #[gpui::test(iterations = 10)]
1428 async fn test_deletions(cx: &mut TestAppContext) {
1429 init_test(cx);
1430
1431 let fs = FakeFs::new(cx.executor());
1432 fs.insert_tree(
1433 path!("/dir"),
1434 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1435 )
1436 .await;
1437 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1438 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1439 let file_path = project
1440 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1441 .unwrap();
1442 let buffer = project
1443 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1444 .await
1445 .unwrap();
1446
1447 cx.update(|cx| {
1448 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1449 buffer.update(cx, |buffer, cx| {
1450 buffer
1451 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1452 .unwrap();
1453 buffer.finalize_last_transaction();
1454 });
1455 buffer.update(cx, |buffer, cx| {
1456 buffer
1457 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1458 .unwrap();
1459 buffer.finalize_last_transaction();
1460 });
1461 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1462 });
1463 cx.run_until_parked();
1464 assert_eq!(
1465 buffer.read_with(cx, |buffer, _| buffer.text()),
1466 "abc\nghi\njkl\npqr"
1467 );
1468 assert_eq!(
1469 unreviewed_hunks(&action_log, cx),
1470 vec![(
1471 buffer.clone(),
1472 vec![
1473 HunkStatus {
1474 range: Point::new(1, 0)..Point::new(1, 0),
1475 diff_status: DiffHunkStatusKind::Deleted,
1476 old_text: "def\n".into(),
1477 },
1478 HunkStatus {
1479 range: Point::new(3, 0)..Point::new(3, 0),
1480 diff_status: DiffHunkStatusKind::Deleted,
1481 old_text: "mno\n".into(),
1482 }
1483 ],
1484 )]
1485 );
1486
1487 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1488 cx.run_until_parked();
1489 assert_eq!(
1490 buffer.read_with(cx, |buffer, _| buffer.text()),
1491 "abc\nghi\njkl\nmno\npqr"
1492 );
1493 assert_eq!(
1494 unreviewed_hunks(&action_log, cx),
1495 vec![(
1496 buffer.clone(),
1497 vec![HunkStatus {
1498 range: Point::new(1, 0)..Point::new(1, 0),
1499 diff_status: DiffHunkStatusKind::Deleted,
1500 old_text: "def\n".into(),
1501 }],
1502 )]
1503 );
1504
1505 action_log.update(cx, |log, cx| {
1506 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1507 });
1508 cx.run_until_parked();
1509 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1510 }
1511
1512 #[gpui::test(iterations = 10)]
1513 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1514 init_test(cx);
1515
1516 let fs = FakeFs::new(cx.executor());
1517 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1518 .await;
1519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1520 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1521 let file_path = project
1522 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1523 .unwrap();
1524 let buffer = project
1525 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1526 .await
1527 .unwrap();
1528
1529 cx.update(|cx| {
1530 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1531 buffer.update(cx, |buffer, cx| {
1532 buffer
1533 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1534 .unwrap()
1535 });
1536 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1537 });
1538 cx.run_until_parked();
1539 assert_eq!(
1540 buffer.read_with(cx, |buffer, _| buffer.text()),
1541 "abc\ndeF\nGHI\njkl\nmno"
1542 );
1543 assert_eq!(
1544 unreviewed_hunks(&action_log, cx),
1545 vec![(
1546 buffer.clone(),
1547 vec![HunkStatus {
1548 range: Point::new(1, 0)..Point::new(3, 0),
1549 diff_status: DiffHunkStatusKind::Modified,
1550 old_text: "def\nghi\n".into(),
1551 }],
1552 )]
1553 );
1554
1555 buffer.update(cx, |buffer, cx| {
1556 buffer.edit(
1557 [
1558 (Point::new(0, 2)..Point::new(0, 2), "X"),
1559 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1560 ],
1561 None,
1562 cx,
1563 )
1564 });
1565 cx.run_until_parked();
1566 assert_eq!(
1567 buffer.read_with(cx, |buffer, _| buffer.text()),
1568 "abXc\ndeF\nGHI\nYjkl\nmno"
1569 );
1570 assert_eq!(
1571 unreviewed_hunks(&action_log, cx),
1572 vec![(
1573 buffer.clone(),
1574 vec![HunkStatus {
1575 range: Point::new(1, 0)..Point::new(3, 0),
1576 diff_status: DiffHunkStatusKind::Modified,
1577 old_text: "def\nghi\n".into(),
1578 }],
1579 )]
1580 );
1581
1582 buffer.update(cx, |buffer, cx| {
1583 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1584 });
1585 cx.run_until_parked();
1586 assert_eq!(
1587 buffer.read_with(cx, |buffer, _| buffer.text()),
1588 "abXc\ndZeF\nGHI\nYjkl\nmno"
1589 );
1590 assert_eq!(
1591 unreviewed_hunks(&action_log, cx),
1592 vec![(
1593 buffer.clone(),
1594 vec![HunkStatus {
1595 range: Point::new(1, 0)..Point::new(3, 0),
1596 diff_status: DiffHunkStatusKind::Modified,
1597 old_text: "def\nghi\n".into(),
1598 }],
1599 )]
1600 );
1601
1602 action_log.update(cx, |log, cx| {
1603 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1604 });
1605 cx.run_until_parked();
1606 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1607 }
1608
1609 #[gpui::test(iterations = 10)]
1610 async fn test_creating_files(cx: &mut TestAppContext) {
1611 init_test(cx);
1612
1613 let fs = FakeFs::new(cx.executor());
1614 fs.insert_tree(path!("/dir"), json!({})).await;
1615 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1616 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1617 let file_path = project
1618 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1619 .unwrap();
1620
1621 let buffer = project
1622 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1623 .await
1624 .unwrap();
1625 cx.update(|cx| {
1626 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1627 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1628 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1629 });
1630 project
1631 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1632 .await
1633 .unwrap();
1634 cx.run_until_parked();
1635 assert_eq!(
1636 unreviewed_hunks(&action_log, cx),
1637 vec![(
1638 buffer.clone(),
1639 vec![HunkStatus {
1640 range: Point::new(0, 0)..Point::new(0, 5),
1641 diff_status: DiffHunkStatusKind::Added,
1642 old_text: "".into(),
1643 }],
1644 )]
1645 );
1646
1647 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1648 cx.run_until_parked();
1649 assert_eq!(
1650 unreviewed_hunks(&action_log, cx),
1651 vec![(
1652 buffer.clone(),
1653 vec![HunkStatus {
1654 range: Point::new(0, 0)..Point::new(0, 6),
1655 diff_status: DiffHunkStatusKind::Added,
1656 old_text: "".into(),
1657 }],
1658 )]
1659 );
1660
1661 action_log.update(cx, |log, cx| {
1662 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1663 });
1664 cx.run_until_parked();
1665 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1666 }
1667
1668 #[gpui::test(iterations = 10)]
1669 async fn test_overwriting_files(cx: &mut TestAppContext) {
1670 init_test(cx);
1671
1672 let fs = FakeFs::new(cx.executor());
1673 fs.insert_tree(
1674 path!("/dir"),
1675 json!({
1676 "file1": "Lorem ipsum dolor"
1677 }),
1678 )
1679 .await;
1680 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1681 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1682 let file_path = project
1683 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1684 .unwrap();
1685
1686 let buffer = project
1687 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1688 .await
1689 .unwrap();
1690 cx.update(|cx| {
1691 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1692 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1693 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1694 });
1695 project
1696 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1697 .await
1698 .unwrap();
1699 cx.run_until_parked();
1700 assert_eq!(
1701 unreviewed_hunks(&action_log, cx),
1702 vec![(
1703 buffer.clone(),
1704 vec![HunkStatus {
1705 range: Point::new(0, 0)..Point::new(0, 19),
1706 diff_status: DiffHunkStatusKind::Added,
1707 old_text: "".into(),
1708 }],
1709 )]
1710 );
1711
1712 action_log
1713 .update(cx, |log, cx| {
1714 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1715 task
1716 })
1717 .await
1718 .unwrap();
1719 cx.run_until_parked();
1720 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1721 assert_eq!(
1722 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1723 "Lorem ipsum dolor"
1724 );
1725 }
1726
1727 #[gpui::test(iterations = 10)]
1728 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1729 init_test(cx);
1730
1731 let fs = FakeFs::new(cx.executor());
1732 fs.insert_tree(
1733 path!("/dir"),
1734 json!({
1735 "file1": "Lorem ipsum dolor"
1736 }),
1737 )
1738 .await;
1739 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1740 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1741 let file_path = project
1742 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1743 .unwrap();
1744
1745 let buffer = project
1746 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1747 .await
1748 .unwrap();
1749 cx.update(|cx| {
1750 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1751 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1752 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1753 });
1754 project
1755 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1756 .await
1757 .unwrap();
1758 cx.run_until_parked();
1759 assert_eq!(
1760 unreviewed_hunks(&action_log, cx),
1761 vec![(
1762 buffer.clone(),
1763 vec![HunkStatus {
1764 range: Point::new(0, 0)..Point::new(0, 37),
1765 diff_status: DiffHunkStatusKind::Modified,
1766 old_text: "Lorem ipsum dolor".into(),
1767 }],
1768 )]
1769 );
1770
1771 cx.update(|cx| {
1772 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1773 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1774 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1775 });
1776 project
1777 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1778 .await
1779 .unwrap();
1780 cx.run_until_parked();
1781 assert_eq!(
1782 unreviewed_hunks(&action_log, cx),
1783 vec![(
1784 buffer.clone(),
1785 vec![HunkStatus {
1786 range: Point::new(0, 0)..Point::new(0, 9),
1787 diff_status: DiffHunkStatusKind::Added,
1788 old_text: "".into(),
1789 }],
1790 )]
1791 );
1792
1793 action_log
1794 .update(cx, |log, cx| {
1795 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1796 task
1797 })
1798 .await
1799 .unwrap();
1800 cx.run_until_parked();
1801 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1802 assert_eq!(
1803 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1804 "Lorem ipsum dolor"
1805 );
1806 }
1807
1808 #[gpui::test(iterations = 10)]
1809 async fn test_deleting_files(cx: &mut TestAppContext) {
1810 init_test(cx);
1811
1812 let fs = FakeFs::new(cx.executor());
1813 fs.insert_tree(
1814 path!("/dir"),
1815 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1816 )
1817 .await;
1818
1819 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1820 let file1_path = project
1821 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1822 .unwrap();
1823 let file2_path = project
1824 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1825 .unwrap();
1826
1827 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1828 let buffer1 = project
1829 .update(cx, |project, cx| {
1830 project.open_buffer(file1_path.clone(), cx)
1831 })
1832 .await
1833 .unwrap();
1834 let buffer2 = project
1835 .update(cx, |project, cx| {
1836 project.open_buffer(file2_path.clone(), cx)
1837 })
1838 .await
1839 .unwrap();
1840
1841 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1842 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1843 project
1844 .update(cx, |project, cx| {
1845 project.delete_file(file1_path.clone(), false, cx)
1846 })
1847 .unwrap()
1848 .await
1849 .unwrap();
1850 project
1851 .update(cx, |project, cx| {
1852 project.delete_file(file2_path.clone(), false, cx)
1853 })
1854 .unwrap()
1855 .await
1856 .unwrap();
1857 cx.run_until_parked();
1858 assert_eq!(
1859 unreviewed_hunks(&action_log, cx),
1860 vec![
1861 (
1862 buffer1.clone(),
1863 vec![HunkStatus {
1864 range: Point::new(0, 0)..Point::new(0, 0),
1865 diff_status: DiffHunkStatusKind::Deleted,
1866 old_text: "lorem\n".into(),
1867 }]
1868 ),
1869 (
1870 buffer2.clone(),
1871 vec![HunkStatus {
1872 range: Point::new(0, 0)..Point::new(0, 0),
1873 diff_status: DiffHunkStatusKind::Deleted,
1874 old_text: "ipsum\n".into(),
1875 }],
1876 )
1877 ]
1878 );
1879
1880 // Simulate file1 being recreated externally.
1881 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1882 .await;
1883
1884 // Simulate file2 being recreated by a tool.
1885 let buffer2 = project
1886 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1887 .await
1888 .unwrap();
1889 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1890 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1891 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1892 project
1893 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1894 .await
1895 .unwrap();
1896
1897 cx.run_until_parked();
1898 assert_eq!(
1899 unreviewed_hunks(&action_log, cx),
1900 vec![(
1901 buffer2.clone(),
1902 vec![HunkStatus {
1903 range: Point::new(0, 0)..Point::new(0, 5),
1904 diff_status: DiffHunkStatusKind::Added,
1905 old_text: "".into(),
1906 }],
1907 )]
1908 );
1909
1910 // Simulate file2 being deleted externally.
1911 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1912 .await
1913 .unwrap();
1914 cx.run_until_parked();
1915 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1916 }
1917
1918 #[gpui::test(iterations = 10)]
1919 async fn test_reject_edits(cx: &mut TestAppContext) {
1920 init_test(cx);
1921
1922 let fs = FakeFs::new(cx.executor());
1923 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1924 .await;
1925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1926 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1927 let file_path = project
1928 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1929 .unwrap();
1930 let buffer = project
1931 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1932 .await
1933 .unwrap();
1934
1935 cx.update(|cx| {
1936 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1937 buffer.update(cx, |buffer, cx| {
1938 buffer
1939 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1940 .unwrap()
1941 });
1942 buffer.update(cx, |buffer, cx| {
1943 buffer
1944 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1945 .unwrap()
1946 });
1947 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1948 });
1949 cx.run_until_parked();
1950 assert_eq!(
1951 buffer.read_with(cx, |buffer, _| buffer.text()),
1952 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1953 );
1954 assert_eq!(
1955 unreviewed_hunks(&action_log, cx),
1956 vec![(
1957 buffer.clone(),
1958 vec![
1959 HunkStatus {
1960 range: Point::new(1, 0)..Point::new(3, 0),
1961 diff_status: DiffHunkStatusKind::Modified,
1962 old_text: "def\n".into(),
1963 },
1964 HunkStatus {
1965 range: Point::new(5, 0)..Point::new(5, 3),
1966 diff_status: DiffHunkStatusKind::Modified,
1967 old_text: "mno".into(),
1968 }
1969 ],
1970 )]
1971 );
1972
1973 // If the rejected range doesn't overlap with any hunk, we ignore it.
1974 action_log
1975 .update(cx, |log, cx| {
1976 let (task, _) = log.reject_edits_in_ranges(
1977 buffer.clone(),
1978 vec![Point::new(4, 0)..Point::new(4, 0)],
1979 None,
1980 cx,
1981 );
1982 task
1983 })
1984 .await
1985 .unwrap();
1986 cx.run_until_parked();
1987 assert_eq!(
1988 buffer.read_with(cx, |buffer, _| buffer.text()),
1989 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1990 );
1991 assert_eq!(
1992 unreviewed_hunks(&action_log, cx),
1993 vec![(
1994 buffer.clone(),
1995 vec![
1996 HunkStatus {
1997 range: Point::new(1, 0)..Point::new(3, 0),
1998 diff_status: DiffHunkStatusKind::Modified,
1999 old_text: "def\n".into(),
2000 },
2001 HunkStatus {
2002 range: Point::new(5, 0)..Point::new(5, 3),
2003 diff_status: DiffHunkStatusKind::Modified,
2004 old_text: "mno".into(),
2005 }
2006 ],
2007 )]
2008 );
2009
2010 action_log
2011 .update(cx, |log, cx| {
2012 let (task, _) = log.reject_edits_in_ranges(
2013 buffer.clone(),
2014 vec![Point::new(0, 0)..Point::new(1, 0)],
2015 None,
2016 cx,
2017 );
2018 task
2019 })
2020 .await
2021 .unwrap();
2022 cx.run_until_parked();
2023 assert_eq!(
2024 buffer.read_with(cx, |buffer, _| buffer.text()),
2025 "abc\ndef\nghi\njkl\nmnO"
2026 );
2027 assert_eq!(
2028 unreviewed_hunks(&action_log, cx),
2029 vec![(
2030 buffer.clone(),
2031 vec![HunkStatus {
2032 range: Point::new(4, 0)..Point::new(4, 3),
2033 diff_status: DiffHunkStatusKind::Modified,
2034 old_text: "mno".into(),
2035 }],
2036 )]
2037 );
2038
2039 action_log
2040 .update(cx, |log, cx| {
2041 let (task, _) = log.reject_edits_in_ranges(
2042 buffer.clone(),
2043 vec![Point::new(4, 0)..Point::new(4, 0)],
2044 None,
2045 cx,
2046 );
2047 task
2048 })
2049 .await
2050 .unwrap();
2051 cx.run_until_parked();
2052 assert_eq!(
2053 buffer.read_with(cx, |buffer, _| buffer.text()),
2054 "abc\ndef\nghi\njkl\nmno"
2055 );
2056 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2057 }
2058
2059 #[gpui::test(iterations = 10)]
2060 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2061 init_test(cx);
2062
2063 let fs = FakeFs::new(cx.executor());
2064 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2065 .await;
2066 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2067 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2068 let file_path = project
2069 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2070 .unwrap();
2071 let buffer = project
2072 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2073 .await
2074 .unwrap();
2075
2076 cx.update(|cx| {
2077 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2078 buffer.update(cx, |buffer, cx| {
2079 buffer
2080 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2081 .unwrap()
2082 });
2083 buffer.update(cx, |buffer, cx| {
2084 buffer
2085 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2086 .unwrap()
2087 });
2088 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2089 });
2090 cx.run_until_parked();
2091 assert_eq!(
2092 buffer.read_with(cx, |buffer, _| buffer.text()),
2093 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2094 );
2095 assert_eq!(
2096 unreviewed_hunks(&action_log, cx),
2097 vec![(
2098 buffer.clone(),
2099 vec![
2100 HunkStatus {
2101 range: Point::new(1, 0)..Point::new(3, 0),
2102 diff_status: DiffHunkStatusKind::Modified,
2103 old_text: "def\n".into(),
2104 },
2105 HunkStatus {
2106 range: Point::new(5, 0)..Point::new(5, 3),
2107 diff_status: DiffHunkStatusKind::Modified,
2108 old_text: "mno".into(),
2109 }
2110 ],
2111 )]
2112 );
2113
2114 action_log.update(cx, |log, cx| {
2115 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2116 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2117 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2118 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2119
2120 let (task, _) =
2121 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2122 task.detach();
2123 assert_eq!(
2124 buffer.read_with(cx, |buffer, _| buffer.text()),
2125 "abc\ndef\nghi\njkl\nmno"
2126 );
2127 });
2128 cx.run_until_parked();
2129 assert_eq!(
2130 buffer.read_with(cx, |buffer, _| buffer.text()),
2131 "abc\ndef\nghi\njkl\nmno"
2132 );
2133 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2134 }
2135
2136 #[gpui::test(iterations = 10)]
2137 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2138 init_test(cx);
2139
2140 let fs = FakeFs::new(cx.executor());
2141 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2142 .await;
2143 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2144 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2145 let file_path = project
2146 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2147 .unwrap();
2148 let buffer = project
2149 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2150 .await
2151 .unwrap();
2152
2153 cx.update(|cx| {
2154 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2155 });
2156 project
2157 .update(cx, |project, cx| {
2158 project.delete_file(file_path.clone(), false, cx)
2159 })
2160 .unwrap()
2161 .await
2162 .unwrap();
2163 cx.run_until_parked();
2164 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2165 assert_eq!(
2166 unreviewed_hunks(&action_log, cx),
2167 vec![(
2168 buffer.clone(),
2169 vec![HunkStatus {
2170 range: Point::new(0, 0)..Point::new(0, 0),
2171 diff_status: DiffHunkStatusKind::Deleted,
2172 old_text: "content".into(),
2173 }]
2174 )]
2175 );
2176
2177 action_log
2178 .update(cx, |log, cx| {
2179 let (task, _) = log.reject_edits_in_ranges(
2180 buffer.clone(),
2181 vec![Point::new(0, 0)..Point::new(0, 0)],
2182 None,
2183 cx,
2184 );
2185 task
2186 })
2187 .await
2188 .unwrap();
2189 cx.run_until_parked();
2190 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2191 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2192 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2193 }
2194
2195 #[gpui::test(iterations = 10)]
2196 async fn test_reject_created_file(cx: &mut TestAppContext) {
2197 init_test(cx);
2198
2199 let fs = FakeFs::new(cx.executor());
2200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2201 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2202 let file_path = project
2203 .read_with(cx, |project, cx| {
2204 project.find_project_path("dir/new_file", cx)
2205 })
2206 .unwrap();
2207 let buffer = project
2208 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2209 .await
2210 .unwrap();
2211 cx.update(|cx| {
2212 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2213 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2214 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2215 });
2216 project
2217 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2218 .await
2219 .unwrap();
2220 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2221 cx.run_until_parked();
2222 assert_eq!(
2223 unreviewed_hunks(&action_log, cx),
2224 vec![(
2225 buffer.clone(),
2226 vec![HunkStatus {
2227 range: Point::new(0, 0)..Point::new(0, 7),
2228 diff_status: DiffHunkStatusKind::Added,
2229 old_text: "".into(),
2230 }],
2231 )]
2232 );
2233
2234 action_log
2235 .update(cx, |log, cx| {
2236 let (task, _) = log.reject_edits_in_ranges(
2237 buffer.clone(),
2238 vec![Point::new(0, 0)..Point::new(0, 11)],
2239 None,
2240 cx,
2241 );
2242 task
2243 })
2244 .await
2245 .unwrap();
2246 cx.run_until_parked();
2247 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2248 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2249 }
2250
2251 #[gpui::test]
2252 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2253 init_test(cx);
2254
2255 let fs = FakeFs::new(cx.executor());
2256 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2257 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2258
2259 let file_path = project
2260 .read_with(cx, |project, cx| {
2261 project.find_project_path("dir/new_file", cx)
2262 })
2263 .unwrap();
2264 let buffer = project
2265 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2266 .await
2267 .unwrap();
2268
2269 // AI creates file with initial content
2270 cx.update(|cx| {
2271 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2272 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2273 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2274 });
2275
2276 project
2277 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2278 .await
2279 .unwrap();
2280
2281 cx.run_until_parked();
2282
2283 // User makes additional edits
2284 cx.update(|cx| {
2285 buffer.update(cx, |buffer, cx| {
2286 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2287 });
2288 });
2289
2290 project
2291 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2292 .await
2293 .unwrap();
2294
2295 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2296
2297 // Reject all
2298 action_log
2299 .update(cx, |log, cx| {
2300 let (task, _) = log.reject_edits_in_ranges(
2301 buffer.clone(),
2302 vec![Point::new(0, 0)..Point::new(100, 0)],
2303 None,
2304 cx,
2305 );
2306 task
2307 })
2308 .await
2309 .unwrap();
2310 cx.run_until_parked();
2311
2312 // File should still contain all the content
2313 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2314
2315 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2316 assert_eq!(content, "ai content\nuser added this line");
2317 }
2318
2319 #[gpui::test]
2320 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2321 init_test(cx);
2322
2323 let fs = FakeFs::new(cx.executor());
2324 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2325 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2326
2327 let file_path = project
2328 .read_with(cx, |project, cx| {
2329 project.find_project_path("dir/new_file", cx)
2330 })
2331 .unwrap();
2332 let buffer = project
2333 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2334 .await
2335 .unwrap();
2336
2337 // AI creates file with initial content
2338 cx.update(|cx| {
2339 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2340 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2341 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2342 });
2343 project
2344 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2345 .await
2346 .unwrap();
2347 cx.run_until_parked();
2348 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2349
2350 // User accepts the single hunk
2351 action_log.update(cx, |log, cx| {
2352 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2353 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2354 });
2355 cx.run_until_parked();
2356 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2357 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2358
2359 // AI modifies the file
2360 cx.update(|cx| {
2361 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2362 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2363 });
2364 project
2365 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2366 .await
2367 .unwrap();
2368 cx.run_until_parked();
2369 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2370
2371 // User rejects the hunk
2372 action_log
2373 .update(cx, |log, cx| {
2374 let (task, _) = log.reject_edits_in_ranges(
2375 buffer.clone(),
2376 vec![Anchor::min_max_range_for_buffer(
2377 buffer.read(cx).remote_id(),
2378 )],
2379 None,
2380 cx,
2381 );
2382 task
2383 })
2384 .await
2385 .unwrap();
2386 cx.run_until_parked();
2387 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2388 assert_eq!(
2389 buffer.read_with(cx, |buffer, _| buffer.text()),
2390 "ai content v1"
2391 );
2392 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2393 }
2394
2395 #[gpui::test]
2396 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2397 init_test(cx);
2398
2399 let fs = FakeFs::new(cx.executor());
2400 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2401 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2402
2403 let file_path = project
2404 .read_with(cx, |project, cx| {
2405 project.find_project_path("dir/new_file", cx)
2406 })
2407 .unwrap();
2408 let buffer = project
2409 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2410 .await
2411 .unwrap();
2412
2413 // AI creates file with initial content
2414 cx.update(|cx| {
2415 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2416 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2417 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2418 });
2419 project
2420 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2421 .await
2422 .unwrap();
2423 cx.run_until_parked();
2424
2425 // User clicks "Accept All"
2426 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2427 cx.run_until_parked();
2428 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2429 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2430
2431 // AI modifies file again
2432 cx.update(|cx| {
2433 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2434 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2435 });
2436 project
2437 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2438 .await
2439 .unwrap();
2440 cx.run_until_parked();
2441 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2442
2443 // User clicks "Reject All"
2444 action_log
2445 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2446 .await;
2447 cx.run_until_parked();
2448 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2449 assert_eq!(
2450 buffer.read_with(cx, |buffer, _| buffer.text()),
2451 "ai content v1"
2452 );
2453 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2454 }
2455
2456 #[gpui::test(iterations = 100)]
2457 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2458 init_test(cx);
2459
2460 let operations = env::var("OPERATIONS")
2461 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2462 .unwrap_or(20);
2463
2464 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2467 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2468 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2469 let file_path = project
2470 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2471 .unwrap();
2472 let buffer = project
2473 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2474 .await
2475 .unwrap();
2476
2477 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2478
2479 for _ in 0..operations {
2480 match rng.random_range(0..100) {
2481 0..25 => {
2482 action_log.update(cx, |log, cx| {
2483 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2484 log::info!("keeping edits in range {:?}", range);
2485 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2486 });
2487 }
2488 25..50 => {
2489 action_log
2490 .update(cx, |log, cx| {
2491 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2492 log::info!("rejecting edits in range {:?}", range);
2493 let (task, _) =
2494 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2495 task
2496 })
2497 .await
2498 .unwrap();
2499 }
2500 _ => {
2501 let is_agent_edit = rng.random_bool(0.5);
2502 if is_agent_edit {
2503 log::info!("agent edit");
2504 } else {
2505 log::info!("user edit");
2506 }
2507 cx.update(|cx| {
2508 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2509 if is_agent_edit {
2510 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2511 }
2512 });
2513 }
2514 }
2515
2516 if rng.random_bool(0.2) {
2517 quiesce(&action_log, &buffer, cx);
2518 }
2519 }
2520
2521 quiesce(&action_log, &buffer, cx);
2522
2523 fn quiesce(
2524 action_log: &Entity<ActionLog>,
2525 buffer: &Entity<Buffer>,
2526 cx: &mut TestAppContext,
2527 ) {
2528 log::info!("quiescing...");
2529 cx.run_until_parked();
2530 action_log.update(cx, |log, cx| {
2531 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2532 let mut old_text = tracked_buffer.diff_base.clone();
2533 let new_text = buffer.read(cx).as_rope();
2534 for edit in tracked_buffer.unreviewed_edits.edits() {
2535 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2536 let old_end = old_text.point_to_offset(cmp::min(
2537 Point::new(edit.new.start + edit.old_len(), 0),
2538 old_text.max_point(),
2539 ));
2540 old_text.replace(
2541 old_start..old_end,
2542 &new_text.slice_rows(edit.new.clone()).to_string(),
2543 );
2544 }
2545 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2546 })
2547 }
2548 }
2549
2550 #[gpui::test]
2551 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2552 init_test(cx);
2553
2554 let fs = FakeFs::new(cx.background_executor.clone());
2555 fs.insert_tree(
2556 path!("/project"),
2557 json!({
2558 ".git": {},
2559 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2560 }),
2561 )
2562 .await;
2563 fs.set_head_for_repo(
2564 path!("/project/.git").as_ref(),
2565 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2566 "0000000",
2567 );
2568 cx.run_until_parked();
2569
2570 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2571 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2572
2573 let file_path = project
2574 .read_with(cx, |project, cx| {
2575 project.find_project_path(path!("/project/file.txt"), cx)
2576 })
2577 .unwrap();
2578 let buffer = project
2579 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2580 .await
2581 .unwrap();
2582
2583 cx.update(|cx| {
2584 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2585 buffer.update(cx, |buffer, cx| {
2586 buffer.edit(
2587 [
2588 // Edit at the very start: a -> A
2589 (Point::new(0, 0)..Point::new(0, 1), "A"),
2590 // Deletion in the middle: remove lines d and e
2591 (Point::new(3, 0)..Point::new(5, 0), ""),
2592 // Modification: g -> GGG
2593 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2594 // Addition: insert new line after h
2595 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2596 // Edit the very last character: j -> J
2597 (Point::new(9, 0)..Point::new(9, 1), "J"),
2598 ],
2599 None,
2600 cx,
2601 );
2602 });
2603 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2604 });
2605 cx.run_until_parked();
2606 assert_eq!(
2607 unreviewed_hunks(&action_log, cx),
2608 vec![(
2609 buffer.clone(),
2610 vec![
2611 HunkStatus {
2612 range: Point::new(0, 0)..Point::new(1, 0),
2613 diff_status: DiffHunkStatusKind::Modified,
2614 old_text: "a\n".into()
2615 },
2616 HunkStatus {
2617 range: Point::new(3, 0)..Point::new(3, 0),
2618 diff_status: DiffHunkStatusKind::Deleted,
2619 old_text: "d\ne\n".into()
2620 },
2621 HunkStatus {
2622 range: Point::new(4, 0)..Point::new(5, 0),
2623 diff_status: DiffHunkStatusKind::Modified,
2624 old_text: "g\n".into()
2625 },
2626 HunkStatus {
2627 range: Point::new(6, 0)..Point::new(7, 0),
2628 diff_status: DiffHunkStatusKind::Added,
2629 old_text: "".into()
2630 },
2631 HunkStatus {
2632 range: Point::new(8, 0)..Point::new(8, 1),
2633 diff_status: DiffHunkStatusKind::Modified,
2634 old_text: "j".into()
2635 }
2636 ]
2637 )]
2638 );
2639
2640 // Simulate a git commit that matches some edits but not others:
2641 // - Accepts the first edit (a -> A)
2642 // - Accepts the deletion (remove d and e)
2643 // - Makes a different change to g (g -> G instead of GGG)
2644 // - Ignores the NEW line addition
2645 // - Ignores the last line edit (j stays as j)
2646 fs.set_head_for_repo(
2647 path!("/project/.git").as_ref(),
2648 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2649 "0000001",
2650 );
2651 cx.run_until_parked();
2652 assert_eq!(
2653 unreviewed_hunks(&action_log, cx),
2654 vec![(
2655 buffer.clone(),
2656 vec![
2657 HunkStatus {
2658 range: Point::new(4, 0)..Point::new(5, 0),
2659 diff_status: DiffHunkStatusKind::Modified,
2660 old_text: "g\n".into()
2661 },
2662 HunkStatus {
2663 range: Point::new(6, 0)..Point::new(7, 0),
2664 diff_status: DiffHunkStatusKind::Added,
2665 old_text: "".into()
2666 },
2667 HunkStatus {
2668 range: Point::new(8, 0)..Point::new(8, 1),
2669 diff_status: DiffHunkStatusKind::Modified,
2670 old_text: "j".into()
2671 }
2672 ]
2673 )]
2674 );
2675
2676 // Make another commit that accepts the NEW line but with different content
2677 fs.set_head_for_repo(
2678 path!("/project/.git").as_ref(),
2679 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2680 "0000002",
2681 );
2682 cx.run_until_parked();
2683 assert_eq!(
2684 unreviewed_hunks(&action_log, cx),
2685 vec![(
2686 buffer,
2687 vec![
2688 HunkStatus {
2689 range: Point::new(6, 0)..Point::new(7, 0),
2690 diff_status: DiffHunkStatusKind::Added,
2691 old_text: "".into()
2692 },
2693 HunkStatus {
2694 range: Point::new(8, 0)..Point::new(8, 1),
2695 diff_status: DiffHunkStatusKind::Modified,
2696 old_text: "j".into()
2697 }
2698 ]
2699 )]
2700 );
2701
2702 // Final commit that accepts all remaining edits
2703 fs.set_head_for_repo(
2704 path!("/project/.git").as_ref(),
2705 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2706 "0000003",
2707 );
2708 cx.run_until_parked();
2709 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2710 }
2711
2712 #[gpui::test]
2713 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2714 init_test(cx);
2715
2716 let fs = FakeFs::new(cx.executor());
2717 fs.insert_tree(
2718 path!("/dir"),
2719 json!({
2720 "file1": "abc\ndef\nghi"
2721 }),
2722 )
2723 .await;
2724 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2725 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2726 let file_path = project
2727 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2728 .unwrap();
2729
2730 let buffer = project
2731 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2732 .await
2733 .unwrap();
2734
2735 // Track the buffer and make an agent edit
2736 cx.update(|cx| {
2737 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2738 buffer.update(cx, |buffer, cx| {
2739 buffer
2740 .edit(
2741 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2742 None,
2743 cx,
2744 )
2745 .unwrap()
2746 });
2747 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2748 });
2749 cx.run_until_parked();
2750
2751 // Verify the agent edit is there
2752 assert_eq!(
2753 buffer.read_with(cx, |buffer, _| buffer.text()),
2754 "abc\nAGENT_EDIT\nghi"
2755 );
2756 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2757
2758 // Reject all edits
2759 action_log
2760 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2761 .await;
2762 cx.run_until_parked();
2763
2764 // Verify the buffer is back to original
2765 assert_eq!(
2766 buffer.read_with(cx, |buffer, _| buffer.text()),
2767 "abc\ndef\nghi"
2768 );
2769 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2770
2771 // Verify undo state is available
2772 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2773
2774 // Undo the reject
2775 action_log
2776 .update(cx, |log, cx| log.undo_last_reject(cx))
2777 .await;
2778
2779 cx.run_until_parked();
2780
2781 // Verify the agent edit is restored
2782 assert_eq!(
2783 buffer.read_with(cx, |buffer, _| buffer.text()),
2784 "abc\nAGENT_EDIT\nghi"
2785 );
2786
2787 // Verify undo state is cleared
2788 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2789 }
2790
2791 #[gpui::test]
2792 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2793 init_test(cx);
2794
2795 let fs = FakeFs::new(cx.executor());
2796 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2797 .await;
2798 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2799 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2800 let child_log =
2801 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2802
2803 let file_path = project
2804 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2805 .unwrap();
2806 let buffer = project
2807 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2808 .await
2809 .unwrap();
2810
2811 cx.update(|cx| {
2812 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2813 });
2814
2815 // Neither log considers the buffer stale immediately after reading it.
2816 let child_stale = cx.read(|cx| {
2817 child_log
2818 .read(cx)
2819 .stale_buffers(cx)
2820 .cloned()
2821 .collect::<Vec<_>>()
2822 });
2823 let parent_stale = cx.read(|cx| {
2824 parent_log
2825 .read(cx)
2826 .stale_buffers(cx)
2827 .cloned()
2828 .collect::<Vec<_>>()
2829 });
2830 assert!(child_stale.is_empty());
2831 assert!(parent_stale.is_empty());
2832
2833 // Simulate a user edit after the agent read the file.
2834 cx.update(|cx| {
2835 buffer.update(cx, |buffer, cx| {
2836 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2837 });
2838 });
2839 cx.run_until_parked();
2840
2841 // Both child and parent should see the buffer as stale because both tracked
2842 // it at the pre-edit version via buffer_read forwarding.
2843 let child_stale = cx.read(|cx| {
2844 child_log
2845 .read(cx)
2846 .stale_buffers(cx)
2847 .cloned()
2848 .collect::<Vec<_>>()
2849 });
2850 let parent_stale = cx.read(|cx| {
2851 parent_log
2852 .read(cx)
2853 .stale_buffers(cx)
2854 .cloned()
2855 .collect::<Vec<_>>()
2856 });
2857 assert_eq!(child_stale, vec![buffer.clone()]);
2858 assert_eq!(parent_stale, vec![buffer]);
2859 }
2860
2861 #[gpui::test]
2862 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2863 init_test(cx);
2864
2865 let fs = FakeFs::new(cx.executor());
2866 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2867 .await;
2868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2869 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2870 let child_log =
2871 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2872
2873 let file_path = project
2874 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2875 .unwrap();
2876 let buffer = project
2877 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2878 .await
2879 .unwrap();
2880
2881 cx.update(|cx| {
2882 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2883 buffer.update(cx, |buffer, cx| {
2884 buffer
2885 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2886 .unwrap();
2887 });
2888 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2889 });
2890 cx.run_until_parked();
2891
2892 let expected_hunks = vec![(
2893 buffer,
2894 vec![HunkStatus {
2895 range: Point::new(1, 0)..Point::new(2, 0),
2896 diff_status: DiffHunkStatusKind::Modified,
2897 old_text: "def\n".into(),
2898 }],
2899 )];
2900 assert_eq!(
2901 unreviewed_hunks(&child_log, cx),
2902 expected_hunks,
2903 "child should track the agent edit"
2904 );
2905 assert_eq!(
2906 unreviewed_hunks(&parent_log, cx),
2907 expected_hunks,
2908 "parent should also track the agent edit via linked log forwarding"
2909 );
2910 }
2911
2912 #[gpui::test]
2913 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
2914 init_test(cx);
2915
2916 let fs = FakeFs::new(cx.executor());
2917 fs.insert_tree(path!("/dir"), json!({})).await;
2918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2919 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2920 let child_log =
2921 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2922
2923 let file_path = project
2924 .read_with(cx, |project, cx| {
2925 project.find_project_path("dir/new_file", cx)
2926 })
2927 .unwrap();
2928 let buffer = project
2929 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2930 .await
2931 .unwrap();
2932
2933 cx.update(|cx| {
2934 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2935 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
2936 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2937 });
2938 project
2939 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2940 .await
2941 .unwrap();
2942 cx.run_until_parked();
2943
2944 let expected_hunks = vec![(
2945 buffer.clone(),
2946 vec![HunkStatus {
2947 range: Point::new(0, 0)..Point::new(0, 5),
2948 diff_status: DiffHunkStatusKind::Added,
2949 old_text: "".into(),
2950 }],
2951 )];
2952 assert_eq!(
2953 unreviewed_hunks(&child_log, cx),
2954 expected_hunks,
2955 "child should track the created file"
2956 );
2957 assert_eq!(
2958 unreviewed_hunks(&parent_log, cx),
2959 expected_hunks,
2960 "parent should also track the created file via linked log forwarding"
2961 );
2962 }
2963
2964 #[gpui::test]
2965 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
2966 init_test(cx);
2967
2968 let fs = FakeFs::new(cx.executor());
2969 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
2970 .await;
2971 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2972 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2973 let child_log =
2974 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2975
2976 let file_path = project
2977 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2978 .unwrap();
2979 let buffer = project
2980 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2981 .await
2982 .unwrap();
2983
2984 cx.update(|cx| {
2985 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2986 });
2987 project
2988 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
2989 .unwrap()
2990 .await
2991 .unwrap();
2992 cx.run_until_parked();
2993
2994 let expected_hunks = vec![(
2995 buffer.clone(),
2996 vec![HunkStatus {
2997 range: Point::new(0, 0)..Point::new(0, 0),
2998 diff_status: DiffHunkStatusKind::Deleted,
2999 old_text: "hello\n".into(),
3000 }],
3001 )];
3002 assert_eq!(
3003 unreviewed_hunks(&child_log, cx),
3004 expected_hunks,
3005 "child should track the deleted file"
3006 );
3007 assert_eq!(
3008 unreviewed_hunks(&parent_log, cx),
3009 expected_hunks,
3010 "parent should also track the deleted file via linked log forwarding"
3011 );
3012 }
3013
3014 /// Simulates the subagent scenario: two child logs linked to the same parent, each
3015 /// editing a different file. The parent accumulates all edits while each child
3016 /// only sees its own.
3017 #[gpui::test]
3018 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
3019 init_test(cx);
3020
3021 let fs = FakeFs::new(cx.executor());
3022 fs.insert_tree(
3023 path!("/dir"),
3024 json!({
3025 "file_a": "content of a",
3026 "file_b": "content of b",
3027 }),
3028 )
3029 .await;
3030 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3031 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3032 let child_log_1 =
3033 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3034 let child_log_2 =
3035 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3036
3037 let file_a_path = project
3038 .read_with(cx, |project, cx| {
3039 project.find_project_path("dir/file_a", cx)
3040 })
3041 .unwrap();
3042 let file_b_path = project
3043 .read_with(cx, |project, cx| {
3044 project.find_project_path("dir/file_b", cx)
3045 })
3046 .unwrap();
3047 let buffer_a = project
3048 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3049 .await
3050 .unwrap();
3051 let buffer_b = project
3052 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3053 .await
3054 .unwrap();
3055
3056 cx.update(|cx| {
3057 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3058 buffer_a.update(cx, |buffer, cx| {
3059 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3060 });
3061 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3062
3063 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3064 buffer_b.update(cx, |buffer, cx| {
3065 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3066 });
3067 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3068 });
3069 cx.run_until_parked();
3070
3071 let child_1_changed: Vec<_> = cx.read(|cx| {
3072 child_log_1
3073 .read(cx)
3074 .changed_buffers(cx)
3075 .into_keys()
3076 .collect()
3077 });
3078 let child_2_changed: Vec<_> = cx.read(|cx| {
3079 child_log_2
3080 .read(cx)
3081 .changed_buffers(cx)
3082 .into_keys()
3083 .collect()
3084 });
3085 let parent_changed: Vec<_> = cx.read(|cx| {
3086 parent_log
3087 .read(cx)
3088 .changed_buffers(cx)
3089 .into_keys()
3090 .collect()
3091 });
3092
3093 assert_eq!(
3094 child_1_changed,
3095 vec![buffer_a.clone()],
3096 "child 1 should only track file_a"
3097 );
3098 assert_eq!(
3099 child_2_changed,
3100 vec![buffer_b.clone()],
3101 "child 2 should only track file_b"
3102 );
3103 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3104 assert!(
3105 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3106 "parent should contain both buffer_a and buffer_b"
3107 );
3108 }
3109
3110 #[gpui::test]
3111 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3112 init_test(cx);
3113
3114 let fs = FakeFs::new(cx.executor());
3115 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3116 .await;
3117 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3118 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3119
3120 let file_path = project
3121 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3122 .unwrap();
3123 let buffer = project
3124 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3125 .await
3126 .unwrap();
3127
3128 let abs_path = PathBuf::from(path!("/dir/file"));
3129 assert!(
3130 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3131 "file_read_time should be None before buffer_read"
3132 );
3133
3134 cx.update(|cx| {
3135 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3136 });
3137
3138 assert!(
3139 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3140 "file_read_time should be recorded after buffer_read"
3141 );
3142 }
3143
3144 #[gpui::test]
3145 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3146 init_test(cx);
3147
3148 let fs = FakeFs::new(cx.executor());
3149 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3150 .await;
3151 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3152 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3153
3154 let file_path = project
3155 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3156 .unwrap();
3157 let buffer = project
3158 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3159 .await
3160 .unwrap();
3161
3162 let abs_path = PathBuf::from(path!("/dir/file"));
3163 assert!(
3164 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3165 "file_read_time should be None before buffer_edited"
3166 );
3167
3168 cx.update(|cx| {
3169 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3170 });
3171
3172 assert!(
3173 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3174 "file_read_time should be recorded after buffer_edited"
3175 );
3176 }
3177
3178 #[gpui::test]
3179 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3180 init_test(cx);
3181
3182 let fs = FakeFs::new(cx.executor());
3183 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3184 .await;
3185 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3186 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3187
3188 let file_path = project
3189 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3190 .unwrap();
3191 let buffer = project
3192 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3193 .await
3194 .unwrap();
3195
3196 let abs_path = PathBuf::from(path!("/dir/file"));
3197 assert!(
3198 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3199 "file_read_time should be None before buffer_created"
3200 );
3201
3202 cx.update(|cx| {
3203 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3204 });
3205
3206 assert!(
3207 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3208 "file_read_time should be recorded after buffer_created"
3209 );
3210 }
3211
3212 #[gpui::test]
3213 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3214 init_test(cx);
3215
3216 let fs = FakeFs::new(cx.executor());
3217 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3218 .await;
3219 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3220 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3221
3222 let file_path = project
3223 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3224 .unwrap();
3225 let buffer = project
3226 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3227 .await
3228 .unwrap();
3229
3230 let abs_path = PathBuf::from(path!("/dir/file"));
3231
3232 cx.update(|cx| {
3233 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3234 });
3235 assert!(
3236 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3237 "file_read_time should exist after buffer_read"
3238 );
3239
3240 cx.update(|cx| {
3241 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3242 });
3243 assert!(
3244 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3245 "file_read_time should be removed after will_delete_buffer"
3246 );
3247 }
3248
3249 #[gpui::test]
3250 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3251 init_test(cx);
3252
3253 let fs = FakeFs::new(cx.executor());
3254 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3255 .await;
3256 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3257 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3258 let child_log =
3259 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3260
3261 let file_path = project
3262 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3263 .unwrap();
3264 let buffer = project
3265 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3266 .await
3267 .unwrap();
3268
3269 let abs_path = PathBuf::from(path!("/dir/file"));
3270
3271 cx.update(|cx| {
3272 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3273 });
3274 assert!(
3275 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3276 "child should record file_read_time on buffer_read"
3277 );
3278 assert!(
3279 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3280 "parent should NOT get file_read_time from child's buffer_read"
3281 );
3282
3283 cx.update(|cx| {
3284 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3285 });
3286 assert!(
3287 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3288 "parent should NOT get file_read_time from child's buffer_edited"
3289 );
3290
3291 cx.update(|cx| {
3292 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3293 });
3294 assert!(
3295 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3296 "parent should NOT get file_read_time from child's buffer_created"
3297 );
3298 }
3299
3300 #[derive(Debug, PartialEq)]
3301 struct HunkStatus {
3302 range: Range<Point>,
3303 diff_status: DiffHunkStatusKind,
3304 old_text: String,
3305 }
3306
3307 fn unreviewed_hunks(
3308 action_log: &Entity<ActionLog>,
3309 cx: &TestAppContext,
3310 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
3311 cx.read(|cx| {
3312 action_log
3313 .read(cx)
3314 .changed_buffers(cx)
3315 .into_iter()
3316 .map(|(buffer, diff)| {
3317 let snapshot = buffer.read(cx).snapshot();
3318 (
3319 buffer,
3320 diff.read(cx)
3321 .snapshot(cx)
3322 .hunks(&snapshot)
3323 .map(|hunk| HunkStatus {
3324 diff_status: hunk.status().kind,
3325 range: hunk.range,
3326 old_text: diff
3327 .read(cx)
3328 .base_text(cx)
3329 .text_for_range(hunk.diff_base_byte_range)
3330 .collect(),
3331 })
3332 .collect(),
3333 )
3334 })
3335 .collect()
3336 })
3337 }
3338}