1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Stores undo information for a single buffer's rejected edits
16#[derive(Clone)]
17pub struct PerBufferUndo {
18 pub buffer: WeakEntity<Buffer>,
19 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
20 pub status: UndoBufferStatus,
21}
22
23/// Tracks the buffer status for undo purposes
24#[derive(Clone, Debug)]
25pub enum UndoBufferStatus {
26 Modified,
27 /// Buffer was created by the agent.
28 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
29 /// original content was restored. Undo is supported: we restore the agent's content.
30 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
31 /// On reject, the file was deleted. Undo is NOT currently supported (would require
32 /// recreating the file). Future TODO.
33 Created {
34 had_existing_content: bool,
35 },
36}
37
38/// Stores undo information for the most recent reject operation
39#[derive(Clone)]
40pub struct LastRejectUndo {
41 /// Per-buffer undo information
42 pub buffers: Vec<PerBufferUndo>,
43}
44
45/// Tracks actions performed by tools in a thread
46pub struct ActionLog {
47 /// Buffers that we want to notify the model about when they change.
48 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
49 /// The project this action log is associated with
50 project: Entity<Project>,
51 /// Stores undo information for the most recent reject operation
52 last_reject_undo: Option<LastRejectUndo>,
53}
54
55impl ActionLog {
56 /// Creates a new, empty action log associated with the given project.
57 pub fn new(project: Entity<Project>) -> Self {
58 Self {
59 tracked_buffers: BTreeMap::default(),
60 project,
61 last_reject_undo: None,
62 }
63 }
64
65 pub fn project(&self) -> &Entity<Project> {
66 &self.project
67 }
68
69 fn track_buffer_internal(
70 &mut self,
71 buffer: Entity<Buffer>,
72 is_created: bool,
73 cx: &mut Context<Self>,
74 ) -> &mut TrackedBuffer {
75 let status = if is_created {
76 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
77 match tracked.status {
78 TrackedBufferStatus::Created {
79 existing_file_content,
80 } => TrackedBufferStatus::Created {
81 existing_file_content,
82 },
83 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
84 TrackedBufferStatus::Created {
85 existing_file_content: Some(tracked.diff_base),
86 }
87 }
88 }
89 } else if buffer
90 .read(cx)
91 .file()
92 .is_some_and(|file| file.disk_state().exists())
93 {
94 TrackedBufferStatus::Created {
95 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
96 }
97 } else {
98 TrackedBufferStatus::Created {
99 existing_file_content: None,
100 }
101 }
102 } else {
103 TrackedBufferStatus::Modified
104 };
105
106 let tracked_buffer = self
107 .tracked_buffers
108 .entry(buffer.clone())
109 .or_insert_with(|| {
110 let open_lsp_handle = self.project.update(cx, |project, cx| {
111 project.register_buffer_with_language_servers(&buffer, cx)
112 });
113
114 let text_snapshot = buffer.read(cx).text_snapshot();
115 let language = buffer.read(cx).language().cloned();
116 let language_registry = buffer.read(cx).language_registry();
117 let diff = cx.new(|cx| {
118 let mut diff = BufferDiff::new(&text_snapshot, cx);
119 diff.language_changed(language, language_registry, cx);
120 diff
121 });
122 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
123 let diff_base;
124 let unreviewed_edits;
125 if is_created {
126 diff_base = Rope::default();
127 unreviewed_edits = Patch::new(vec![Edit {
128 old: 0..1,
129 new: 0..text_snapshot.max_point().row + 1,
130 }])
131 } else {
132 diff_base = buffer.read(cx).as_rope().clone();
133 unreviewed_edits = Patch::default();
134 }
135 TrackedBuffer {
136 buffer: buffer.clone(),
137 diff_base,
138 unreviewed_edits,
139 snapshot: text_snapshot,
140 status,
141 version: buffer.read(cx).version(),
142 diff,
143 diff_update: diff_update_tx,
144 _open_lsp_handle: open_lsp_handle,
145 _maintain_diff: cx.spawn({
146 let buffer = buffer.clone();
147 async move |this, cx| {
148 Self::maintain_diff(this, buffer, diff_update_rx, cx)
149 .await
150 .ok();
151 }
152 }),
153 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
154 }
155 });
156 tracked_buffer.version = buffer.read(cx).version();
157 tracked_buffer
158 }
159
160 fn handle_buffer_event(
161 &mut self,
162 buffer: Entity<Buffer>,
163 event: &BufferEvent,
164 cx: &mut Context<Self>,
165 ) {
166 match event {
167 BufferEvent::Edited => {
168 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
169 return;
170 };
171 let buffer_version = buffer.read(cx).version();
172 if !buffer_version.changed_since(&tracked_buffer.version) {
173 return;
174 }
175 self.handle_buffer_edited(buffer, cx);
176 }
177 BufferEvent::FileHandleChanged => {
178 self.handle_buffer_file_changed(buffer, cx);
179 }
180 _ => {}
181 };
182 }
183
184 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
185 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
186 return;
187 };
188 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
189 }
190
191 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
192 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
193 return;
194 };
195
196 match tracked_buffer.status {
197 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
198 if buffer
199 .read(cx)
200 .file()
201 .is_some_and(|file| file.disk_state().is_deleted())
202 {
203 // If the buffer had been edited by a tool, but it got
204 // deleted externally, we want to stop tracking it.
205 self.tracked_buffers.remove(&buffer);
206 }
207 cx.notify();
208 }
209 TrackedBufferStatus::Deleted => {
210 if buffer
211 .read(cx)
212 .file()
213 .is_some_and(|file| !file.disk_state().is_deleted())
214 {
215 // If the buffer had been deleted by a tool, but it got
216 // resurrected externally, we want to clear the edits we
217 // were tracking and reset the buffer's state.
218 self.tracked_buffers.remove(&buffer);
219 self.track_buffer_internal(buffer, false, cx);
220 }
221 cx.notify();
222 }
223 }
224 }
225
226 async fn maintain_diff(
227 this: WeakEntity<Self>,
228 buffer: Entity<Buffer>,
229 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
230 cx: &mut AsyncApp,
231 ) -> Result<()> {
232 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
233 let git_diff = this
234 .update(cx, |this, cx| {
235 this.project.update(cx, |project, cx| {
236 project.open_uncommitted_diff(buffer.clone(), cx)
237 })
238 })?
239 .await
240 .ok();
241 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
242 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
243 });
244
245 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
246 let _repo_subscription =
247 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
248 cx.update(|cx| {
249 let mut old_head = buffer_repo.read(cx).head_commit.clone();
250 Some(cx.subscribe(git_diff, move |_, event, cx| {
251 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
252 let new_head = buffer_repo.read(cx).head_commit.clone();
253 if new_head != old_head {
254 old_head = new_head;
255 git_diff_updates_tx.send(()).ok();
256 }
257 }
258 }))
259 })
260 } else {
261 None
262 };
263
264 loop {
265 futures::select_biased! {
266 buffer_update = buffer_updates.next() => {
267 if let Some((author, buffer_snapshot)) = buffer_update {
268 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
269 } else {
270 break;
271 }
272 }
273 _ = git_diff_updates_rx.changed().fuse() => {
274 if let Some(git_diff) = git_diff.as_ref() {
275 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
276 }
277 }
278 }
279 }
280
281 Ok(())
282 }
283
284 async fn track_edits(
285 this: &WeakEntity<ActionLog>,
286 buffer: &Entity<Buffer>,
287 author: ChangeAuthor,
288 buffer_snapshot: text::BufferSnapshot,
289 cx: &mut AsyncApp,
290 ) -> Result<()> {
291 let rebase = this.update(cx, |this, cx| {
292 let tracked_buffer = this
293 .tracked_buffers
294 .get_mut(buffer)
295 .context("buffer not tracked")?;
296
297 let rebase = cx.background_spawn({
298 let mut base_text = tracked_buffer.diff_base.clone();
299 let old_snapshot = tracked_buffer.snapshot.clone();
300 let new_snapshot = buffer_snapshot.clone();
301 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
302 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
303 async move {
304 if let ChangeAuthor::User = author {
305 apply_non_conflicting_edits(
306 &unreviewed_edits,
307 edits,
308 &mut base_text,
309 new_snapshot.as_rope(),
310 );
311 }
312
313 (Arc::from(base_text.to_string().as_str()), base_text)
314 }
315 });
316
317 anyhow::Ok(rebase)
318 })??;
319 let (new_base_text, new_diff_base) = rebase.await;
320
321 Self::update_diff(
322 this,
323 buffer,
324 buffer_snapshot,
325 new_base_text,
326 new_diff_base,
327 cx,
328 )
329 .await
330 }
331
332 async fn keep_committed_edits(
333 this: &WeakEntity<ActionLog>,
334 buffer: &Entity<Buffer>,
335 git_diff: &Entity<BufferDiff>,
336 cx: &mut AsyncApp,
337 ) -> Result<()> {
338 let buffer_snapshot = this.read_with(cx, |this, _cx| {
339 let tracked_buffer = this
340 .tracked_buffers
341 .get(buffer)
342 .context("buffer not tracked")?;
343 anyhow::Ok(tracked_buffer.snapshot.clone())
344 })??;
345 let (new_base_text, new_diff_base) = this
346 .read_with(cx, |this, cx| {
347 let tracked_buffer = this
348 .tracked_buffers
349 .get(buffer)
350 .context("buffer not tracked")?;
351 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
352 let agent_diff_base = tracked_buffer.diff_base.clone();
353 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
354 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
355 anyhow::Ok(cx.background_spawn(async move {
356 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
357 let committed_edits = language::line_diff(
358 &agent_diff_base.to_string(),
359 &git_diff_base.to_string(),
360 )
361 .into_iter()
362 .map(|(old, new)| Edit { old, new });
363
364 let mut new_agent_diff_base = agent_diff_base.clone();
365 let mut row_delta = 0i32;
366 for committed in committed_edits {
367 while let Some(unreviewed) = old_unreviewed_edits.peek() {
368 // If the committed edit matches the unreviewed
369 // edit, assume the user wants to keep it.
370 if committed.old == unreviewed.old {
371 let unreviewed_new =
372 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
373 let committed_new =
374 git_diff_base.slice_rows(committed.new.clone()).to_string();
375 if unreviewed_new == committed_new {
376 let old_byte_start =
377 new_agent_diff_base.point_to_offset(Point::new(
378 (unreviewed.old.start as i32 + row_delta) as u32,
379 0,
380 ));
381 let old_byte_end =
382 new_agent_diff_base.point_to_offset(cmp::min(
383 Point::new(
384 (unreviewed.old.end as i32 + row_delta) as u32,
385 0,
386 ),
387 new_agent_diff_base.max_point(),
388 ));
389 new_agent_diff_base
390 .replace(old_byte_start..old_byte_end, &unreviewed_new);
391 row_delta +=
392 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
393 }
394 } else if unreviewed.old.start >= committed.old.end {
395 break;
396 }
397
398 old_unreviewed_edits.next().unwrap();
399 }
400 }
401
402 (
403 Arc::from(new_agent_diff_base.to_string().as_str()),
404 new_agent_diff_base,
405 )
406 }))
407 })??
408 .await;
409
410 Self::update_diff(
411 this,
412 buffer,
413 buffer_snapshot,
414 new_base_text,
415 new_diff_base,
416 cx,
417 )
418 .await
419 }
420
421 async fn update_diff(
422 this: &WeakEntity<ActionLog>,
423 buffer: &Entity<Buffer>,
424 buffer_snapshot: text::BufferSnapshot,
425 new_base_text: Arc<str>,
426 new_diff_base: Rope,
427 cx: &mut AsyncApp,
428 ) -> Result<()> {
429 let (diff, language) = this.read_with(cx, |this, cx| {
430 let tracked_buffer = this
431 .tracked_buffers
432 .get(buffer)
433 .context("buffer not tracked")?;
434 anyhow::Ok((
435 tracked_buffer.diff.clone(),
436 buffer.read(cx).language().cloned(),
437 ))
438 })??;
439 let update = diff
440 .update(cx, |diff, cx| {
441 diff.update_diff(
442 buffer_snapshot.clone(),
443 Some(new_base_text),
444 Some(true),
445 language,
446 cx,
447 )
448 })
449 .await;
450 diff.update(cx, |diff, cx| {
451 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
452 })
453 .await;
454 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
455
456 let unreviewed_edits = cx
457 .background_spawn({
458 let buffer_snapshot = buffer_snapshot.clone();
459 let new_diff_base = new_diff_base.clone();
460 async move {
461 let mut unreviewed_edits = Patch::default();
462 for hunk in diff_snapshot.hunks_intersecting_range(
463 Anchor::min_for_buffer(buffer_snapshot.remote_id())
464 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
465 &buffer_snapshot,
466 ) {
467 let old_range = new_diff_base
468 .offset_to_point(hunk.diff_base_byte_range.start)
469 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
470 let new_range = hunk.range.start..hunk.range.end;
471 unreviewed_edits.push(point_to_row_edit(
472 Edit {
473 old: old_range,
474 new: new_range,
475 },
476 &new_diff_base,
477 buffer_snapshot.as_rope(),
478 ));
479 }
480 unreviewed_edits
481 }
482 })
483 .await;
484 this.update(cx, |this, cx| {
485 let tracked_buffer = this
486 .tracked_buffers
487 .get_mut(buffer)
488 .context("buffer not tracked")?;
489 tracked_buffer.diff_base = new_diff_base;
490 tracked_buffer.snapshot = buffer_snapshot;
491 tracked_buffer.unreviewed_edits = unreviewed_edits;
492 cx.notify();
493 anyhow::Ok(())
494 })?
495 }
496
497 /// Track a buffer as read by agent, so we can notify the model about user edits.
498 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
499 self.track_buffer_internal(buffer, false, cx);
500 }
501
502 /// Mark a buffer as created by agent, so we can refresh it in the context
503 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
504 self.track_buffer_internal(buffer, true, cx);
505 }
506
507 /// Mark a buffer as edited by agent, so we can refresh it in the context
508 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
509 let new_version = buffer.read(cx).version();
510 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
511 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
512 tracked_buffer.status = TrackedBufferStatus::Modified;
513 }
514
515 tracked_buffer.version = new_version;
516 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
517 }
518
519 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
520 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
521 match tracked_buffer.status {
522 TrackedBufferStatus::Created { .. } => {
523 self.tracked_buffers.remove(&buffer);
524 cx.notify();
525 }
526 TrackedBufferStatus::Modified => {
527 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
528 tracked_buffer.status = TrackedBufferStatus::Deleted;
529 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
530 }
531 TrackedBufferStatus::Deleted => {}
532 }
533 cx.notify();
534 }
535
536 pub fn keep_edits_in_range(
537 &mut self,
538 buffer: Entity<Buffer>,
539 buffer_range: Range<impl language::ToPoint>,
540 telemetry: Option<ActionLogTelemetry>,
541 cx: &mut Context<Self>,
542 ) {
543 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
544 return;
545 };
546
547 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
548 match tracked_buffer.status {
549 TrackedBufferStatus::Deleted => {
550 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
551 self.tracked_buffers.remove(&buffer);
552 cx.notify();
553 }
554 _ => {
555 let buffer = buffer.read(cx);
556 let buffer_range =
557 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
558 let mut delta = 0i32;
559 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
560 edit.old.start = (edit.old.start as i32 + delta) as u32;
561 edit.old.end = (edit.old.end as i32 + delta) as u32;
562
563 if buffer_range.end.row < edit.new.start
564 || buffer_range.start.row > edit.new.end
565 {
566 true
567 } else {
568 let old_range = tracked_buffer
569 .diff_base
570 .point_to_offset(Point::new(edit.old.start, 0))
571 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
572 Point::new(edit.old.end, 0),
573 tracked_buffer.diff_base.max_point(),
574 ));
575 let new_range = tracked_buffer
576 .snapshot
577 .point_to_offset(Point::new(edit.new.start, 0))
578 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
579 Point::new(edit.new.end, 0),
580 tracked_buffer.snapshot.max_point(),
581 ));
582 tracked_buffer.diff_base.replace(
583 old_range,
584 &tracked_buffer
585 .snapshot
586 .text_for_range(new_range)
587 .collect::<String>(),
588 );
589 delta += edit.new_len() as i32 - edit.old_len() as i32;
590 metrics.add_edit(edit);
591 false
592 }
593 });
594 if tracked_buffer.unreviewed_edits.is_empty()
595 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
596 {
597 tracked_buffer.status = TrackedBufferStatus::Modified;
598 }
599 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
600 }
601 }
602 if let Some(telemetry) = telemetry {
603 telemetry_report_accepted_edits(&telemetry, metrics);
604 }
605 }
606
607 pub fn reject_edits_in_ranges(
608 &mut self,
609 buffer: Entity<Buffer>,
610 buffer_ranges: Vec<Range<impl language::ToPoint>>,
611 telemetry: Option<ActionLogTelemetry>,
612 cx: &mut Context<Self>,
613 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
614 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
615 return (Task::ready(Ok(())), None);
616 };
617
618 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
619 let mut undo_info: Option<PerBufferUndo> = None;
620 let task = match &tracked_buffer.status {
621 TrackedBufferStatus::Created {
622 existing_file_content,
623 } => {
624 let task = if let Some(existing_file_content) = existing_file_content {
625 // Capture the agent's content before restoring existing file content
626 let agent_content = buffer.read(cx).text();
627 let buffer_id = buffer.read(cx).remote_id();
628
629 buffer.update(cx, |buffer, cx| {
630 buffer.start_transaction();
631 buffer.set_text("", cx);
632 for chunk in existing_file_content.chunks() {
633 buffer.append(chunk, cx);
634 }
635 buffer.end_transaction(cx);
636 });
637
638 undo_info = Some(PerBufferUndo {
639 buffer: buffer.downgrade(),
640 edits_to_restore: vec![(
641 Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
642 agent_content,
643 )],
644 status: UndoBufferStatus::Created {
645 had_existing_content: true,
646 },
647 });
648
649 self.project
650 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
651 } else {
652 // For a file created by AI with no pre-existing content,
653 // only delete the file if we're certain it contains only AI content
654 // with no edits from the user.
655
656 let initial_version = tracked_buffer.version.clone();
657 let current_version = buffer.read(cx).version();
658
659 let current_content = buffer.read(cx).text();
660 let tracked_content = tracked_buffer.snapshot.text();
661
662 let is_ai_only_content =
663 initial_version == current_version && current_content == tracked_content;
664
665 if is_ai_only_content {
666 buffer
667 .read(cx)
668 .entry_id(cx)
669 .and_then(|entry_id| {
670 self.project.update(cx, |project, cx| {
671 project.delete_entry(entry_id, false, cx)
672 })
673 })
674 .unwrap_or(Task::ready(Ok(())))
675 } else {
676 // Not sure how to disentangle edits made by the user
677 // from edits made by the AI at this point.
678 // For now, preserve both to avoid data loss.
679 //
680 // TODO: Better solution (disable "Reject" after user makes some
681 // edit or find a way to differentiate between AI and user edits)
682 Task::ready(Ok(()))
683 }
684 };
685
686 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
687 self.tracked_buffers.remove(&buffer);
688 cx.notify();
689 task
690 }
691 TrackedBufferStatus::Deleted => {
692 buffer.update(cx, |buffer, cx| {
693 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
694 });
695 let save = self
696 .project
697 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
698
699 // Clear all tracked edits for this buffer and start over as if we just read it.
700 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
701 self.tracked_buffers.remove(&buffer);
702 self.buffer_read(buffer.clone(), cx);
703 cx.notify();
704 save
705 }
706 TrackedBufferStatus::Modified => {
707 let edits_to_restore = buffer.update(cx, |buffer, cx| {
708 let mut buffer_row_ranges = buffer_ranges
709 .into_iter()
710 .map(|range| {
711 range.start.to_point(buffer).row..range.end.to_point(buffer).row
712 })
713 .peekable();
714
715 let mut edits_to_revert = Vec::new();
716 let mut edits_for_undo = Vec::new();
717 for edit in tracked_buffer.unreviewed_edits.edits() {
718 let new_range = tracked_buffer
719 .snapshot
720 .anchor_before(Point::new(edit.new.start, 0))
721 ..tracked_buffer.snapshot.anchor_after(cmp::min(
722 Point::new(edit.new.end, 0),
723 tracked_buffer.snapshot.max_point(),
724 ));
725 let new_row_range = new_range.start.to_point(buffer).row
726 ..new_range.end.to_point(buffer).row;
727
728 let mut revert = false;
729 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
730 if buffer_row_range.end < new_row_range.start {
731 buffer_row_ranges.next();
732 } else if buffer_row_range.start > new_row_range.end {
733 break;
734 } else {
735 revert = true;
736 break;
737 }
738 }
739
740 if revert {
741 metrics.add_edit(edit);
742 let old_range = tracked_buffer
743 .diff_base
744 .point_to_offset(Point::new(edit.old.start, 0))
745 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
746 Point::new(edit.old.end, 0),
747 tracked_buffer.diff_base.max_point(),
748 ));
749 let old_text = tracked_buffer
750 .diff_base
751 .chunks_in_range(old_range)
752 .collect::<String>();
753
754 // Capture the agent's text before we revert it (for undo)
755 let new_range_offset =
756 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
757 let agent_text =
758 buffer.text_for_range(new_range_offset).collect::<String>();
759 edits_for_undo.push((new_range.clone(), agent_text));
760
761 edits_to_revert.push((new_range, old_text));
762 }
763 }
764
765 buffer.edit(edits_to_revert, None, cx);
766 edits_for_undo
767 });
768
769 if !edits_to_restore.is_empty() {
770 undo_info = Some(PerBufferUndo {
771 buffer: buffer.downgrade(),
772 edits_to_restore,
773 status: UndoBufferStatus::Modified,
774 });
775 }
776
777 self.project
778 .update(cx, |project, cx| project.save_buffer(buffer, cx))
779 }
780 };
781 if let Some(telemetry) = telemetry {
782 telemetry_report_rejected_edits(&telemetry, metrics);
783 }
784 (task, undo_info)
785 }
786
787 pub fn keep_all_edits(
788 &mut self,
789 telemetry: Option<ActionLogTelemetry>,
790 cx: &mut Context<Self>,
791 ) {
792 self.tracked_buffers.retain(|buffer, tracked_buffer| {
793 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
794 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
795 if let Some(telemetry) = telemetry.as_ref() {
796 telemetry_report_accepted_edits(telemetry, metrics);
797 }
798 match tracked_buffer.status {
799 TrackedBufferStatus::Deleted => false,
800 _ => {
801 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
802 tracked_buffer.status = TrackedBufferStatus::Modified;
803 }
804 tracked_buffer.unreviewed_edits.clear();
805 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
806 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
807 true
808 }
809 }
810 });
811
812 cx.notify();
813 }
814
815 pub fn reject_all_edits(
816 &mut self,
817 telemetry: Option<ActionLogTelemetry>,
818 cx: &mut Context<Self>,
819 ) -> Task<()> {
820 // Clear any previous undo state before starting a new reject operation
821 self.last_reject_undo = None;
822
823 let mut undo_buffers = Vec::new();
824 let mut futures = Vec::new();
825
826 for buffer in self.changed_buffers(cx).into_keys() {
827 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
828 buffer.read(cx).remote_id(),
829 )];
830 let (reject_task, undo_info) =
831 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
832
833 if let Some(undo) = undo_info {
834 undo_buffers.push(undo);
835 }
836
837 futures.push(async move {
838 reject_task.await.log_err();
839 });
840 }
841
842 // Store the undo information if we have any
843 if !undo_buffers.is_empty() {
844 self.last_reject_undo = Some(LastRejectUndo {
845 buffers: undo_buffers,
846 });
847 }
848
849 let task = futures::future::join_all(futures);
850 cx.background_spawn(async move {
851 task.await;
852 })
853 }
854
855 pub fn has_pending_undo(&self) -> bool {
856 self.last_reject_undo.is_some()
857 }
858
859 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
860 self.last_reject_undo = Some(undo);
861 }
862
863 /// Undoes the most recent reject operation, restoring the rejected agent changes.
864 /// This is a best-effort operation: if buffers have been closed or modified externally,
865 /// those buffers will be skipped.
866 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
867 let Some(undo) = self.last_reject_undo.take() else {
868 return Task::ready(());
869 };
870
871 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
872
873 for per_buffer_undo in undo.buffers {
874 // Skip if the buffer entity has been deallocated
875 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
876 continue;
877 };
878
879 buffer.update(cx, |buffer, cx| {
880 let mut valid_edits = Vec::new();
881
882 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
883 if anchor_range.start.buffer_id == buffer.remote_id()
884 && anchor_range.end.buffer_id == buffer.remote_id()
885 {
886 valid_edits.push((anchor_range, text_to_restore));
887 }
888 }
889
890 if !valid_edits.is_empty() {
891 buffer.edit(valid_edits, None, cx);
892 }
893 });
894
895 if !self.tracked_buffers.contains_key(&buffer) {
896 self.buffer_edited(buffer.clone(), cx);
897 }
898
899 let save = self
900 .project
901 .update(cx, |project, cx| project.save_buffer(buffer, cx));
902 save_tasks.push(save);
903 }
904
905 cx.notify();
906
907 cx.background_spawn(async move {
908 futures::future::join_all(save_tasks).await;
909 })
910 }
911
912 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
913 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
914 self.tracked_buffers
915 .iter()
916 .filter(|(_, tracked)| tracked.has_edits(cx))
917 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
918 .collect()
919 }
920
921 /// Returns all tracked buffers for debugging purposes
922 #[cfg(any(test, feature = "test-support"))]
923 pub fn tracked_buffers_for_debug(
924 &self,
925 _cx: &App,
926 ) -> impl Iterator<Item = (&Entity<Buffer>, &TrackedBuffer)> {
927 self.tracked_buffers.iter()
928 }
929
930 /// Iterate over buffers changed since last read or edited by the model
931 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
932 self.tracked_buffers
933 .iter()
934 .filter(|(buffer, tracked)| {
935 let buffer = buffer.read(cx);
936
937 tracked.version != buffer.version
938 && buffer
939 .file()
940 .is_some_and(|file| !file.disk_state().is_deleted())
941 })
942 .map(|(buffer, _)| buffer)
943 }
944}
945
946#[derive(Clone)]
947pub struct ActionLogTelemetry {
948 pub agent_telemetry_id: SharedString,
949 pub session_id: Arc<str>,
950}
951
952struct ActionLogMetrics {
953 lines_removed: u32,
954 lines_added: u32,
955 language: Option<SharedString>,
956}
957
958impl ActionLogMetrics {
959 fn for_buffer(buffer: &Buffer) -> Self {
960 Self {
961 language: buffer.language().map(|l| l.name().0),
962 lines_removed: 0,
963 lines_added: 0,
964 }
965 }
966
967 fn add_edits(&mut self, edits: &[Edit<u32>]) {
968 for edit in edits {
969 self.add_edit(edit);
970 }
971 }
972
973 fn add_edit(&mut self, edit: &Edit<u32>) {
974 self.lines_added += edit.new_len();
975 self.lines_removed += edit.old_len();
976 }
977}
978
979fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
980 telemetry::event!(
981 "Agent Edits Accepted",
982 agent = telemetry.agent_telemetry_id,
983 session = telemetry.session_id,
984 language = metrics.language,
985 lines_added = metrics.lines_added,
986 lines_removed = metrics.lines_removed
987 );
988}
989
990fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
991 telemetry::event!(
992 "Agent Edits Rejected",
993 agent = telemetry.agent_telemetry_id,
994 session = telemetry.session_id,
995 language = metrics.language,
996 lines_added = metrics.lines_added,
997 lines_removed = metrics.lines_removed
998 );
999}
1000
1001fn apply_non_conflicting_edits(
1002 patch: &Patch<u32>,
1003 edits: Vec<Edit<u32>>,
1004 old_text: &mut Rope,
1005 new_text: &Rope,
1006) -> bool {
1007 let mut old_edits = patch.edits().iter().cloned().peekable();
1008 let mut new_edits = edits.into_iter().peekable();
1009 let mut applied_delta = 0i32;
1010 let mut rebased_delta = 0i32;
1011 let mut has_made_changes = false;
1012
1013 while let Some(mut new_edit) = new_edits.next() {
1014 let mut conflict = false;
1015
1016 // Push all the old edits that are before this new edit or that intersect with it.
1017 while let Some(old_edit) = old_edits.peek() {
1018 if new_edit.old.end < old_edit.new.start
1019 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1020 {
1021 break;
1022 } else if new_edit.old.start > old_edit.new.end
1023 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1024 {
1025 let old_edit = old_edits.next().unwrap();
1026 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1027 } else {
1028 conflict = true;
1029 if new_edits
1030 .peek()
1031 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1032 {
1033 new_edit = new_edits.next().unwrap();
1034 } else {
1035 let old_edit = old_edits.next().unwrap();
1036 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1037 }
1038 }
1039 }
1040
1041 if !conflict {
1042 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1043 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1044 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1045 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1046 ..old_text.point_to_offset(cmp::min(
1047 Point::new(new_edit.old.end, 0),
1048 old_text.max_point(),
1049 ));
1050 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1051 ..new_text.point_to_offset(cmp::min(
1052 Point::new(new_edit.new.end, 0),
1053 new_text.max_point(),
1054 ));
1055
1056 old_text.replace(
1057 old_bytes,
1058 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1059 );
1060 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1061 has_made_changes = true;
1062 }
1063 }
1064 has_made_changes
1065}
1066
1067fn diff_snapshots(
1068 old_snapshot: &text::BufferSnapshot,
1069 new_snapshot: &text::BufferSnapshot,
1070) -> Vec<Edit<u32>> {
1071 let mut edits = new_snapshot
1072 .edits_since::<Point>(&old_snapshot.version)
1073 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1074 .peekable();
1075 let mut row_edits = Vec::new();
1076 while let Some(mut edit) = edits.next() {
1077 while let Some(next_edit) = edits.peek() {
1078 if edit.old.end >= next_edit.old.start {
1079 edit.old.end = next_edit.old.end;
1080 edit.new.end = next_edit.new.end;
1081 edits.next();
1082 } else {
1083 break;
1084 }
1085 }
1086 row_edits.push(edit);
1087 }
1088 row_edits
1089}
1090
1091fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1092 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1093 && new_text
1094 .chars_at(new_text.point_to_offset(edit.new.start))
1095 .next()
1096 == Some('\n')
1097 && edit.old.start != old_text.max_point()
1098 {
1099 Edit {
1100 old: edit.old.start.row + 1..edit.old.end.row + 1,
1101 new: edit.new.start.row + 1..edit.new.end.row + 1,
1102 }
1103 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1104 Edit {
1105 old: edit.old.start.row..edit.old.end.row,
1106 new: edit.new.start.row..edit.new.end.row,
1107 }
1108 } else {
1109 Edit {
1110 old: edit.old.start.row..edit.old.end.row + 1,
1111 new: edit.new.start.row..edit.new.end.row + 1,
1112 }
1113 }
1114}
1115
1116#[derive(Copy, Clone, Debug)]
1117enum ChangeAuthor {
1118 User,
1119 Agent,
1120}
1121
1122#[derive(Debug)]
1123enum TrackedBufferStatus {
1124 Created { existing_file_content: Option<Rope> },
1125 Modified,
1126 Deleted,
1127}
1128
1129pub struct TrackedBuffer {
1130 buffer: Entity<Buffer>,
1131 diff_base: Rope,
1132 unreviewed_edits: Patch<u32>,
1133 status: TrackedBufferStatus,
1134 version: clock::Global,
1135 diff: Entity<BufferDiff>,
1136 snapshot: text::BufferSnapshot,
1137 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1138 _open_lsp_handle: OpenLspBufferHandle,
1139 _maintain_diff: Task<()>,
1140 _subscription: Subscription,
1141}
1142
1143impl TrackedBuffer {
1144 #[cfg(any(test, feature = "test-support"))]
1145 pub fn diff(&self) -> &Entity<BufferDiff> {
1146 &self.diff
1147 }
1148
1149 #[cfg(any(test, feature = "test-support"))]
1150 pub fn diff_base_len(&self) -> usize {
1151 self.diff_base.len()
1152 }
1153
1154 fn has_edits(&self, cx: &App) -> bool {
1155 self.diff
1156 .read(cx)
1157 .snapshot(cx)
1158 .hunks(self.buffer.read(cx))
1159 .next()
1160 .is_some()
1161 }
1162
1163 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1164 self.diff_update
1165 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1166 .ok();
1167 }
1168}
1169
1170pub struct ChangedBuffer {
1171 pub diff: Entity<BufferDiff>,
1172}
1173
1174#[cfg(test)]
1175mod tests {
1176 use super::*;
1177 use buffer_diff::DiffHunkStatusKind;
1178 use gpui::TestAppContext;
1179 use language::Point;
1180 use project::{FakeFs, Fs, Project, RemoveOptions};
1181 use rand::prelude::*;
1182 use serde_json::json;
1183 use settings::SettingsStore;
1184 use std::env;
1185 use util::{RandomCharIter, path};
1186
1187 #[ctor::ctor]
1188 fn init_logger() {
1189 zlog::init_test();
1190 }
1191
1192 fn init_test(cx: &mut TestAppContext) {
1193 cx.update(|cx| {
1194 let settings_store = SettingsStore::test(cx);
1195 cx.set_global(settings_store);
1196 });
1197 }
1198
1199 #[gpui::test(iterations = 10)]
1200 async fn test_keep_edits(cx: &mut TestAppContext) {
1201 init_test(cx);
1202
1203 let fs = FakeFs::new(cx.executor());
1204 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1205 .await;
1206 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1207 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1208 let file_path = project
1209 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1210 .unwrap();
1211 let buffer = project
1212 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1213 .await
1214 .unwrap();
1215
1216 cx.update(|cx| {
1217 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1218 buffer.update(cx, |buffer, cx| {
1219 buffer
1220 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1221 .unwrap()
1222 });
1223 buffer.update(cx, |buffer, cx| {
1224 buffer
1225 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1226 .unwrap()
1227 });
1228 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1229 });
1230 cx.run_until_parked();
1231 assert_eq!(
1232 buffer.read_with(cx, |buffer, _| buffer.text()),
1233 "abc\ndEf\nghi\njkl\nmnO"
1234 );
1235 assert_eq!(
1236 unreviewed_hunks(&action_log, cx),
1237 vec![(
1238 buffer.clone(),
1239 vec![
1240 HunkStatus {
1241 range: Point::new(1, 0)..Point::new(2, 0),
1242 diff_status: DiffHunkStatusKind::Modified,
1243 old_text: "def\n".into(),
1244 },
1245 HunkStatus {
1246 range: Point::new(4, 0)..Point::new(4, 3),
1247 diff_status: DiffHunkStatusKind::Modified,
1248 old_text: "mno".into(),
1249 }
1250 ],
1251 )]
1252 );
1253
1254 action_log.update(cx, |log, cx| {
1255 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1256 });
1257 cx.run_until_parked();
1258 assert_eq!(
1259 unreviewed_hunks(&action_log, cx),
1260 vec![(
1261 buffer.clone(),
1262 vec![HunkStatus {
1263 range: Point::new(1, 0)..Point::new(2, 0),
1264 diff_status: DiffHunkStatusKind::Modified,
1265 old_text: "def\n".into(),
1266 }],
1267 )]
1268 );
1269
1270 action_log.update(cx, |log, cx| {
1271 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1272 });
1273 cx.run_until_parked();
1274 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1275 }
1276
1277 #[gpui::test(iterations = 10)]
1278 async fn test_deletions(cx: &mut TestAppContext) {
1279 init_test(cx);
1280
1281 let fs = FakeFs::new(cx.executor());
1282 fs.insert_tree(
1283 path!("/dir"),
1284 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1285 )
1286 .await;
1287 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1288 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1289 let file_path = project
1290 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1291 .unwrap();
1292 let buffer = project
1293 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1294 .await
1295 .unwrap();
1296
1297 cx.update(|cx| {
1298 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1299 buffer.update(cx, |buffer, cx| {
1300 buffer
1301 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1302 .unwrap();
1303 buffer.finalize_last_transaction();
1304 });
1305 buffer.update(cx, |buffer, cx| {
1306 buffer
1307 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1308 .unwrap();
1309 buffer.finalize_last_transaction();
1310 });
1311 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1312 });
1313 cx.run_until_parked();
1314 assert_eq!(
1315 buffer.read_with(cx, |buffer, _| buffer.text()),
1316 "abc\nghi\njkl\npqr"
1317 );
1318 assert_eq!(
1319 unreviewed_hunks(&action_log, cx),
1320 vec![(
1321 buffer.clone(),
1322 vec![
1323 HunkStatus {
1324 range: Point::new(1, 0)..Point::new(1, 0),
1325 diff_status: DiffHunkStatusKind::Deleted,
1326 old_text: "def\n".into(),
1327 },
1328 HunkStatus {
1329 range: Point::new(3, 0)..Point::new(3, 0),
1330 diff_status: DiffHunkStatusKind::Deleted,
1331 old_text: "mno\n".into(),
1332 }
1333 ],
1334 )]
1335 );
1336
1337 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1338 cx.run_until_parked();
1339 assert_eq!(
1340 buffer.read_with(cx, |buffer, _| buffer.text()),
1341 "abc\nghi\njkl\nmno\npqr"
1342 );
1343 assert_eq!(
1344 unreviewed_hunks(&action_log, cx),
1345 vec![(
1346 buffer.clone(),
1347 vec![HunkStatus {
1348 range: Point::new(1, 0)..Point::new(1, 0),
1349 diff_status: DiffHunkStatusKind::Deleted,
1350 old_text: "def\n".into(),
1351 }],
1352 )]
1353 );
1354
1355 action_log.update(cx, |log, cx| {
1356 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1357 });
1358 cx.run_until_parked();
1359 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1360 }
1361
1362 #[gpui::test(iterations = 10)]
1363 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1364 init_test(cx);
1365
1366 let fs = FakeFs::new(cx.executor());
1367 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1368 .await;
1369 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1370 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1371 let file_path = project
1372 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1373 .unwrap();
1374 let buffer = project
1375 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1376 .await
1377 .unwrap();
1378
1379 cx.update(|cx| {
1380 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1381 buffer.update(cx, |buffer, cx| {
1382 buffer
1383 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1384 .unwrap()
1385 });
1386 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1387 });
1388 cx.run_until_parked();
1389 assert_eq!(
1390 buffer.read_with(cx, |buffer, _| buffer.text()),
1391 "abc\ndeF\nGHI\njkl\nmno"
1392 );
1393 assert_eq!(
1394 unreviewed_hunks(&action_log, cx),
1395 vec![(
1396 buffer.clone(),
1397 vec![HunkStatus {
1398 range: Point::new(1, 0)..Point::new(3, 0),
1399 diff_status: DiffHunkStatusKind::Modified,
1400 old_text: "def\nghi\n".into(),
1401 }],
1402 )]
1403 );
1404
1405 buffer.update(cx, |buffer, cx| {
1406 buffer.edit(
1407 [
1408 (Point::new(0, 2)..Point::new(0, 2), "X"),
1409 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1410 ],
1411 None,
1412 cx,
1413 )
1414 });
1415 cx.run_until_parked();
1416 assert_eq!(
1417 buffer.read_with(cx, |buffer, _| buffer.text()),
1418 "abXc\ndeF\nGHI\nYjkl\nmno"
1419 );
1420 assert_eq!(
1421 unreviewed_hunks(&action_log, cx),
1422 vec![(
1423 buffer.clone(),
1424 vec![HunkStatus {
1425 range: Point::new(1, 0)..Point::new(3, 0),
1426 diff_status: DiffHunkStatusKind::Modified,
1427 old_text: "def\nghi\n".into(),
1428 }],
1429 )]
1430 );
1431
1432 buffer.update(cx, |buffer, cx| {
1433 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1434 });
1435 cx.run_until_parked();
1436 assert_eq!(
1437 buffer.read_with(cx, |buffer, _| buffer.text()),
1438 "abXc\ndZeF\nGHI\nYjkl\nmno"
1439 );
1440 assert_eq!(
1441 unreviewed_hunks(&action_log, cx),
1442 vec![(
1443 buffer.clone(),
1444 vec![HunkStatus {
1445 range: Point::new(1, 0)..Point::new(3, 0),
1446 diff_status: DiffHunkStatusKind::Modified,
1447 old_text: "def\nghi\n".into(),
1448 }],
1449 )]
1450 );
1451
1452 action_log.update(cx, |log, cx| {
1453 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1454 });
1455 cx.run_until_parked();
1456 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1457 }
1458
1459 #[gpui::test(iterations = 10)]
1460 async fn test_creating_files(cx: &mut TestAppContext) {
1461 init_test(cx);
1462
1463 let fs = FakeFs::new(cx.executor());
1464 fs.insert_tree(path!("/dir"), json!({})).await;
1465 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1466 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1467 let file_path = project
1468 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1469 .unwrap();
1470
1471 let buffer = project
1472 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1473 .await
1474 .unwrap();
1475 cx.update(|cx| {
1476 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1477 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1478 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1479 });
1480 project
1481 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1482 .await
1483 .unwrap();
1484 cx.run_until_parked();
1485 assert_eq!(
1486 unreviewed_hunks(&action_log, cx),
1487 vec![(
1488 buffer.clone(),
1489 vec![HunkStatus {
1490 range: Point::new(0, 0)..Point::new(0, 5),
1491 diff_status: DiffHunkStatusKind::Added,
1492 old_text: "".into(),
1493 }],
1494 )]
1495 );
1496
1497 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1498 cx.run_until_parked();
1499 assert_eq!(
1500 unreviewed_hunks(&action_log, cx),
1501 vec![(
1502 buffer.clone(),
1503 vec![HunkStatus {
1504 range: Point::new(0, 0)..Point::new(0, 6),
1505 diff_status: DiffHunkStatusKind::Added,
1506 old_text: "".into(),
1507 }],
1508 )]
1509 );
1510
1511 action_log.update(cx, |log, cx| {
1512 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1513 });
1514 cx.run_until_parked();
1515 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1516 }
1517
1518 #[gpui::test(iterations = 10)]
1519 async fn test_overwriting_files(cx: &mut TestAppContext) {
1520 init_test(cx);
1521
1522 let fs = FakeFs::new(cx.executor());
1523 fs.insert_tree(
1524 path!("/dir"),
1525 json!({
1526 "file1": "Lorem ipsum dolor"
1527 }),
1528 )
1529 .await;
1530 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1531 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1532 let file_path = project
1533 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1534 .unwrap();
1535
1536 let buffer = project
1537 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1538 .await
1539 .unwrap();
1540 cx.update(|cx| {
1541 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1542 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1543 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1544 });
1545 project
1546 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1547 .await
1548 .unwrap();
1549 cx.run_until_parked();
1550 assert_eq!(
1551 unreviewed_hunks(&action_log, cx),
1552 vec![(
1553 buffer.clone(),
1554 vec![HunkStatus {
1555 range: Point::new(0, 0)..Point::new(0, 19),
1556 diff_status: DiffHunkStatusKind::Added,
1557 old_text: "".into(),
1558 }],
1559 )]
1560 );
1561
1562 action_log
1563 .update(cx, |log, cx| {
1564 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1565 task
1566 })
1567 .await
1568 .unwrap();
1569 cx.run_until_parked();
1570 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1571 assert_eq!(
1572 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1573 "Lorem ipsum dolor"
1574 );
1575 }
1576
1577 #[gpui::test(iterations = 10)]
1578 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1579 init_test(cx);
1580
1581 let fs = FakeFs::new(cx.executor());
1582 fs.insert_tree(
1583 path!("/dir"),
1584 json!({
1585 "file1": "Lorem ipsum dolor"
1586 }),
1587 )
1588 .await;
1589 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1590 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1591 let file_path = project
1592 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1593 .unwrap();
1594
1595 let buffer = project
1596 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1597 .await
1598 .unwrap();
1599 cx.update(|cx| {
1600 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1601 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1602 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1603 });
1604 project
1605 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1606 .await
1607 .unwrap();
1608 cx.run_until_parked();
1609 assert_eq!(
1610 unreviewed_hunks(&action_log, cx),
1611 vec![(
1612 buffer.clone(),
1613 vec![HunkStatus {
1614 range: Point::new(0, 0)..Point::new(0, 37),
1615 diff_status: DiffHunkStatusKind::Modified,
1616 old_text: "Lorem ipsum dolor".into(),
1617 }],
1618 )]
1619 );
1620
1621 cx.update(|cx| {
1622 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1623 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1624 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1625 });
1626 project
1627 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1628 .await
1629 .unwrap();
1630 cx.run_until_parked();
1631 assert_eq!(
1632 unreviewed_hunks(&action_log, cx),
1633 vec![(
1634 buffer.clone(),
1635 vec![HunkStatus {
1636 range: Point::new(0, 0)..Point::new(0, 9),
1637 diff_status: DiffHunkStatusKind::Added,
1638 old_text: "".into(),
1639 }],
1640 )]
1641 );
1642
1643 action_log
1644 .update(cx, |log, cx| {
1645 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1646 task
1647 })
1648 .await
1649 .unwrap();
1650 cx.run_until_parked();
1651 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1652 assert_eq!(
1653 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1654 "Lorem ipsum dolor"
1655 );
1656 }
1657
1658 #[gpui::test(iterations = 10)]
1659 async fn test_deleting_files(cx: &mut TestAppContext) {
1660 init_test(cx);
1661
1662 let fs = FakeFs::new(cx.executor());
1663 fs.insert_tree(
1664 path!("/dir"),
1665 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1666 )
1667 .await;
1668
1669 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1670 let file1_path = project
1671 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1672 .unwrap();
1673 let file2_path = project
1674 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1675 .unwrap();
1676
1677 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1678 let buffer1 = project
1679 .update(cx, |project, cx| {
1680 project.open_buffer(file1_path.clone(), cx)
1681 })
1682 .await
1683 .unwrap();
1684 let buffer2 = project
1685 .update(cx, |project, cx| {
1686 project.open_buffer(file2_path.clone(), cx)
1687 })
1688 .await
1689 .unwrap();
1690
1691 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1692 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1693 project
1694 .update(cx, |project, cx| {
1695 project.delete_file(file1_path.clone(), false, cx)
1696 })
1697 .unwrap()
1698 .await
1699 .unwrap();
1700 project
1701 .update(cx, |project, cx| {
1702 project.delete_file(file2_path.clone(), false, cx)
1703 })
1704 .unwrap()
1705 .await
1706 .unwrap();
1707 cx.run_until_parked();
1708 assert_eq!(
1709 unreviewed_hunks(&action_log, cx),
1710 vec![
1711 (
1712 buffer1.clone(),
1713 vec![HunkStatus {
1714 range: Point::new(0, 0)..Point::new(0, 0),
1715 diff_status: DiffHunkStatusKind::Deleted,
1716 old_text: "lorem\n".into(),
1717 }]
1718 ),
1719 (
1720 buffer2.clone(),
1721 vec![HunkStatus {
1722 range: Point::new(0, 0)..Point::new(0, 0),
1723 diff_status: DiffHunkStatusKind::Deleted,
1724 old_text: "ipsum\n".into(),
1725 }],
1726 )
1727 ]
1728 );
1729
1730 // Simulate file1 being recreated externally.
1731 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1732 .await;
1733
1734 // Simulate file2 being recreated by a tool.
1735 let buffer2 = project
1736 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1737 .await
1738 .unwrap();
1739 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1740 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1741 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1742 project
1743 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1744 .await
1745 .unwrap();
1746
1747 cx.run_until_parked();
1748 assert_eq!(
1749 unreviewed_hunks(&action_log, cx),
1750 vec![(
1751 buffer2.clone(),
1752 vec![HunkStatus {
1753 range: Point::new(0, 0)..Point::new(0, 5),
1754 diff_status: DiffHunkStatusKind::Added,
1755 old_text: "".into(),
1756 }],
1757 )]
1758 );
1759
1760 // Simulate file2 being deleted externally.
1761 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1762 .await
1763 .unwrap();
1764 cx.run_until_parked();
1765 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1766 }
1767
1768 #[gpui::test(iterations = 10)]
1769 async fn test_reject_edits(cx: &mut TestAppContext) {
1770 init_test(cx);
1771
1772 let fs = FakeFs::new(cx.executor());
1773 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1774 .await;
1775 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1776 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1777 let file_path = project
1778 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1779 .unwrap();
1780 let buffer = project
1781 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1782 .await
1783 .unwrap();
1784
1785 cx.update(|cx| {
1786 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1787 buffer.update(cx, |buffer, cx| {
1788 buffer
1789 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1790 .unwrap()
1791 });
1792 buffer.update(cx, |buffer, cx| {
1793 buffer
1794 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1795 .unwrap()
1796 });
1797 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1798 });
1799 cx.run_until_parked();
1800 assert_eq!(
1801 buffer.read_with(cx, |buffer, _| buffer.text()),
1802 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1803 );
1804 assert_eq!(
1805 unreviewed_hunks(&action_log, cx),
1806 vec![(
1807 buffer.clone(),
1808 vec![
1809 HunkStatus {
1810 range: Point::new(1, 0)..Point::new(3, 0),
1811 diff_status: DiffHunkStatusKind::Modified,
1812 old_text: "def\n".into(),
1813 },
1814 HunkStatus {
1815 range: Point::new(5, 0)..Point::new(5, 3),
1816 diff_status: DiffHunkStatusKind::Modified,
1817 old_text: "mno".into(),
1818 }
1819 ],
1820 )]
1821 );
1822
1823 // If the rejected range doesn't overlap with any hunk, we ignore it.
1824 action_log
1825 .update(cx, |log, cx| {
1826 let (task, _) = log.reject_edits_in_ranges(
1827 buffer.clone(),
1828 vec![Point::new(4, 0)..Point::new(4, 0)],
1829 None,
1830 cx,
1831 );
1832 task
1833 })
1834 .await
1835 .unwrap();
1836 cx.run_until_parked();
1837 assert_eq!(
1838 buffer.read_with(cx, |buffer, _| buffer.text()),
1839 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1840 );
1841 assert_eq!(
1842 unreviewed_hunks(&action_log, cx),
1843 vec![(
1844 buffer.clone(),
1845 vec![
1846 HunkStatus {
1847 range: Point::new(1, 0)..Point::new(3, 0),
1848 diff_status: DiffHunkStatusKind::Modified,
1849 old_text: "def\n".into(),
1850 },
1851 HunkStatus {
1852 range: Point::new(5, 0)..Point::new(5, 3),
1853 diff_status: DiffHunkStatusKind::Modified,
1854 old_text: "mno".into(),
1855 }
1856 ],
1857 )]
1858 );
1859
1860 action_log
1861 .update(cx, |log, cx| {
1862 let (task, _) = log.reject_edits_in_ranges(
1863 buffer.clone(),
1864 vec![Point::new(0, 0)..Point::new(1, 0)],
1865 None,
1866 cx,
1867 );
1868 task
1869 })
1870 .await
1871 .unwrap();
1872 cx.run_until_parked();
1873 assert_eq!(
1874 buffer.read_with(cx, |buffer, _| buffer.text()),
1875 "abc\ndef\nghi\njkl\nmnO"
1876 );
1877 assert_eq!(
1878 unreviewed_hunks(&action_log, cx),
1879 vec![(
1880 buffer.clone(),
1881 vec![HunkStatus {
1882 range: Point::new(4, 0)..Point::new(4, 3),
1883 diff_status: DiffHunkStatusKind::Modified,
1884 old_text: "mno".into(),
1885 }],
1886 )]
1887 );
1888
1889 action_log
1890 .update(cx, |log, cx| {
1891 let (task, _) = log.reject_edits_in_ranges(
1892 buffer.clone(),
1893 vec![Point::new(4, 0)..Point::new(4, 0)],
1894 None,
1895 cx,
1896 );
1897 task
1898 })
1899 .await
1900 .unwrap();
1901 cx.run_until_parked();
1902 assert_eq!(
1903 buffer.read_with(cx, |buffer, _| buffer.text()),
1904 "abc\ndef\nghi\njkl\nmno"
1905 );
1906 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1907 }
1908
1909 #[gpui::test(iterations = 10)]
1910 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1911 init_test(cx);
1912
1913 let fs = FakeFs::new(cx.executor());
1914 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1915 .await;
1916 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1917 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1918 let file_path = project
1919 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1920 .unwrap();
1921 let buffer = project
1922 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1923 .await
1924 .unwrap();
1925
1926 cx.update(|cx| {
1927 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1928 buffer.update(cx, |buffer, cx| {
1929 buffer
1930 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1931 .unwrap()
1932 });
1933 buffer.update(cx, |buffer, cx| {
1934 buffer
1935 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1936 .unwrap()
1937 });
1938 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1939 });
1940 cx.run_until_parked();
1941 assert_eq!(
1942 buffer.read_with(cx, |buffer, _| buffer.text()),
1943 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1944 );
1945 assert_eq!(
1946 unreviewed_hunks(&action_log, cx),
1947 vec![(
1948 buffer.clone(),
1949 vec![
1950 HunkStatus {
1951 range: Point::new(1, 0)..Point::new(3, 0),
1952 diff_status: DiffHunkStatusKind::Modified,
1953 old_text: "def\n".into(),
1954 },
1955 HunkStatus {
1956 range: Point::new(5, 0)..Point::new(5, 3),
1957 diff_status: DiffHunkStatusKind::Modified,
1958 old_text: "mno".into(),
1959 }
1960 ],
1961 )]
1962 );
1963
1964 action_log.update(cx, |log, cx| {
1965 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1966 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1967 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1968 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1969
1970 let (task, _) =
1971 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
1972 task.detach();
1973 assert_eq!(
1974 buffer.read_with(cx, |buffer, _| buffer.text()),
1975 "abc\ndef\nghi\njkl\nmno"
1976 );
1977 });
1978 cx.run_until_parked();
1979 assert_eq!(
1980 buffer.read_with(cx, |buffer, _| buffer.text()),
1981 "abc\ndef\nghi\njkl\nmno"
1982 );
1983 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1984 }
1985
1986 #[gpui::test(iterations = 10)]
1987 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1988 init_test(cx);
1989
1990 let fs = FakeFs::new(cx.executor());
1991 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1992 .await;
1993 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1994 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1995 let file_path = project
1996 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1997 .unwrap();
1998 let buffer = project
1999 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2000 .await
2001 .unwrap();
2002
2003 cx.update(|cx| {
2004 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2005 });
2006 project
2007 .update(cx, |project, cx| {
2008 project.delete_file(file_path.clone(), false, cx)
2009 })
2010 .unwrap()
2011 .await
2012 .unwrap();
2013 cx.run_until_parked();
2014 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2015 assert_eq!(
2016 unreviewed_hunks(&action_log, cx),
2017 vec![(
2018 buffer.clone(),
2019 vec![HunkStatus {
2020 range: Point::new(0, 0)..Point::new(0, 0),
2021 diff_status: DiffHunkStatusKind::Deleted,
2022 old_text: "content".into(),
2023 }]
2024 )]
2025 );
2026
2027 action_log
2028 .update(cx, |log, cx| {
2029 let (task, _) = log.reject_edits_in_ranges(
2030 buffer.clone(),
2031 vec![Point::new(0, 0)..Point::new(0, 0)],
2032 None,
2033 cx,
2034 );
2035 task
2036 })
2037 .await
2038 .unwrap();
2039 cx.run_until_parked();
2040 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2041 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2042 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2043 }
2044
2045 #[gpui::test(iterations = 10)]
2046 async fn test_reject_created_file(cx: &mut TestAppContext) {
2047 init_test(cx);
2048
2049 let fs = FakeFs::new(cx.executor());
2050 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2051 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2052 let file_path = project
2053 .read_with(cx, |project, cx| {
2054 project.find_project_path("dir/new_file", cx)
2055 })
2056 .unwrap();
2057 let buffer = project
2058 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2059 .await
2060 .unwrap();
2061 cx.update(|cx| {
2062 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2063 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2064 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2065 });
2066 project
2067 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2068 .await
2069 .unwrap();
2070 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2071 cx.run_until_parked();
2072 assert_eq!(
2073 unreviewed_hunks(&action_log, cx),
2074 vec![(
2075 buffer.clone(),
2076 vec![HunkStatus {
2077 range: Point::new(0, 0)..Point::new(0, 7),
2078 diff_status: DiffHunkStatusKind::Added,
2079 old_text: "".into(),
2080 }],
2081 )]
2082 );
2083
2084 action_log
2085 .update(cx, |log, cx| {
2086 let (task, _) = log.reject_edits_in_ranges(
2087 buffer.clone(),
2088 vec![Point::new(0, 0)..Point::new(0, 11)],
2089 None,
2090 cx,
2091 );
2092 task
2093 })
2094 .await
2095 .unwrap();
2096 cx.run_until_parked();
2097 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2098 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2099 }
2100
2101 #[gpui::test]
2102 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2103 init_test(cx);
2104
2105 let fs = FakeFs::new(cx.executor());
2106 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2107 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2108
2109 let file_path = project
2110 .read_with(cx, |project, cx| {
2111 project.find_project_path("dir/new_file", cx)
2112 })
2113 .unwrap();
2114 let buffer = project
2115 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2116 .await
2117 .unwrap();
2118
2119 // AI creates file with initial content
2120 cx.update(|cx| {
2121 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2122 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2123 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2124 });
2125
2126 project
2127 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2128 .await
2129 .unwrap();
2130
2131 cx.run_until_parked();
2132
2133 // User makes additional edits
2134 cx.update(|cx| {
2135 buffer.update(cx, |buffer, cx| {
2136 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2137 });
2138 });
2139
2140 project
2141 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2142 .await
2143 .unwrap();
2144
2145 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2146
2147 // Reject all
2148 action_log
2149 .update(cx, |log, cx| {
2150 let (task, _) = log.reject_edits_in_ranges(
2151 buffer.clone(),
2152 vec![Point::new(0, 0)..Point::new(100, 0)],
2153 None,
2154 cx,
2155 );
2156 task
2157 })
2158 .await
2159 .unwrap();
2160 cx.run_until_parked();
2161
2162 // File should still contain all the content
2163 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2164
2165 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2166 assert_eq!(content, "ai content\nuser added this line");
2167 }
2168
2169 #[gpui::test]
2170 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2171 init_test(cx);
2172
2173 let fs = FakeFs::new(cx.executor());
2174 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2175 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2176
2177 let file_path = project
2178 .read_with(cx, |project, cx| {
2179 project.find_project_path("dir/new_file", cx)
2180 })
2181 .unwrap();
2182 let buffer = project
2183 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2184 .await
2185 .unwrap();
2186
2187 // AI creates file with initial content
2188 cx.update(|cx| {
2189 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2190 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2191 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2192 });
2193 project
2194 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2195 .await
2196 .unwrap();
2197 cx.run_until_parked();
2198 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2199
2200 // User accepts the single hunk
2201 action_log.update(cx, |log, cx| {
2202 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2203 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2204 });
2205 cx.run_until_parked();
2206 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2207 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2208
2209 // AI modifies the file
2210 cx.update(|cx| {
2211 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2212 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2213 });
2214 project
2215 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2216 .await
2217 .unwrap();
2218 cx.run_until_parked();
2219 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2220
2221 // User rejects the hunk
2222 action_log
2223 .update(cx, |log, cx| {
2224 let (task, _) = log.reject_edits_in_ranges(
2225 buffer.clone(),
2226 vec![Anchor::min_max_range_for_buffer(
2227 buffer.read(cx).remote_id(),
2228 )],
2229 None,
2230 cx,
2231 );
2232 task
2233 })
2234 .await
2235 .unwrap();
2236 cx.run_until_parked();
2237 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2238 assert_eq!(
2239 buffer.read_with(cx, |buffer, _| buffer.text()),
2240 "ai content v1"
2241 );
2242 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2243 }
2244
2245 #[gpui::test]
2246 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2247 init_test(cx);
2248
2249 let fs = FakeFs::new(cx.executor());
2250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2251 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2252
2253 let file_path = project
2254 .read_with(cx, |project, cx| {
2255 project.find_project_path("dir/new_file", cx)
2256 })
2257 .unwrap();
2258 let buffer = project
2259 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2260 .await
2261 .unwrap();
2262
2263 // AI creates file with initial content
2264 cx.update(|cx| {
2265 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2266 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2267 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2268 });
2269 project
2270 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2271 .await
2272 .unwrap();
2273 cx.run_until_parked();
2274
2275 // User clicks "Accept All"
2276 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2277 cx.run_until_parked();
2278 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2279 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2280
2281 // AI modifies file again
2282 cx.update(|cx| {
2283 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2284 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2285 });
2286 project
2287 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2288 .await
2289 .unwrap();
2290 cx.run_until_parked();
2291 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2292
2293 // User clicks "Reject All"
2294 action_log
2295 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2296 .await;
2297 cx.run_until_parked();
2298 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2299 assert_eq!(
2300 buffer.read_with(cx, |buffer, _| buffer.text()),
2301 "ai content v1"
2302 );
2303 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2304 }
2305
2306 #[gpui::test(iterations = 100)]
2307 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2308 init_test(cx);
2309
2310 let operations = env::var("OPERATIONS")
2311 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2312 .unwrap_or(20);
2313
2314 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2315 let fs = FakeFs::new(cx.executor());
2316 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2317 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2318 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2319 let file_path = project
2320 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2321 .unwrap();
2322 let buffer = project
2323 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2324 .await
2325 .unwrap();
2326
2327 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2328
2329 for _ in 0..operations {
2330 match rng.random_range(0..100) {
2331 0..25 => {
2332 action_log.update(cx, |log, cx| {
2333 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2334 log::info!("keeping edits in range {:?}", range);
2335 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2336 });
2337 }
2338 25..50 => {
2339 action_log
2340 .update(cx, |log, cx| {
2341 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2342 log::info!("rejecting edits in range {:?}", range);
2343 let (task, _) =
2344 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2345 task
2346 })
2347 .await
2348 .unwrap();
2349 }
2350 _ => {
2351 let is_agent_edit = rng.random_bool(0.5);
2352 if is_agent_edit {
2353 log::info!("agent edit");
2354 } else {
2355 log::info!("user edit");
2356 }
2357 cx.update(|cx| {
2358 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2359 if is_agent_edit {
2360 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2361 }
2362 });
2363 }
2364 }
2365
2366 if rng.random_bool(0.2) {
2367 quiesce(&action_log, &buffer, cx);
2368 }
2369 }
2370
2371 quiesce(&action_log, &buffer, cx);
2372
2373 fn quiesce(
2374 action_log: &Entity<ActionLog>,
2375 buffer: &Entity<Buffer>,
2376 cx: &mut TestAppContext,
2377 ) {
2378 log::info!("quiescing...");
2379 cx.run_until_parked();
2380 action_log.update(cx, |log, cx| {
2381 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2382 let mut old_text = tracked_buffer.diff_base.clone();
2383 let new_text = buffer.read(cx).as_rope();
2384 for edit in tracked_buffer.unreviewed_edits.edits() {
2385 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2386 let old_end = old_text.point_to_offset(cmp::min(
2387 Point::new(edit.new.start + edit.old_len(), 0),
2388 old_text.max_point(),
2389 ));
2390 old_text.replace(
2391 old_start..old_end,
2392 &new_text.slice_rows(edit.new.clone()).to_string(),
2393 );
2394 }
2395 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2396 })
2397 }
2398 }
2399
2400 #[gpui::test]
2401 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2402 init_test(cx);
2403
2404 let fs = FakeFs::new(cx.background_executor.clone());
2405 fs.insert_tree(
2406 path!("/project"),
2407 json!({
2408 ".git": {},
2409 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2410 }),
2411 )
2412 .await;
2413 fs.set_head_for_repo(
2414 path!("/project/.git").as_ref(),
2415 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2416 "0000000",
2417 );
2418 cx.run_until_parked();
2419
2420 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2421 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2422
2423 let file_path = project
2424 .read_with(cx, |project, cx| {
2425 project.find_project_path(path!("/project/file.txt"), cx)
2426 })
2427 .unwrap();
2428 let buffer = project
2429 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2430 .await
2431 .unwrap();
2432
2433 cx.update(|cx| {
2434 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2435 buffer.update(cx, |buffer, cx| {
2436 buffer.edit(
2437 [
2438 // Edit at the very start: a -> A
2439 (Point::new(0, 0)..Point::new(0, 1), "A"),
2440 // Deletion in the middle: remove lines d and e
2441 (Point::new(3, 0)..Point::new(5, 0), ""),
2442 // Modification: g -> GGG
2443 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2444 // Addition: insert new line after h
2445 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2446 // Edit the very last character: j -> J
2447 (Point::new(9, 0)..Point::new(9, 1), "J"),
2448 ],
2449 None,
2450 cx,
2451 );
2452 });
2453 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2454 });
2455 cx.run_until_parked();
2456 assert_eq!(
2457 unreviewed_hunks(&action_log, cx),
2458 vec![(
2459 buffer.clone(),
2460 vec![
2461 HunkStatus {
2462 range: Point::new(0, 0)..Point::new(1, 0),
2463 diff_status: DiffHunkStatusKind::Modified,
2464 old_text: "a\n".into()
2465 },
2466 HunkStatus {
2467 range: Point::new(3, 0)..Point::new(3, 0),
2468 diff_status: DiffHunkStatusKind::Deleted,
2469 old_text: "d\ne\n".into()
2470 },
2471 HunkStatus {
2472 range: Point::new(4, 0)..Point::new(5, 0),
2473 diff_status: DiffHunkStatusKind::Modified,
2474 old_text: "g\n".into()
2475 },
2476 HunkStatus {
2477 range: Point::new(6, 0)..Point::new(7, 0),
2478 diff_status: DiffHunkStatusKind::Added,
2479 old_text: "".into()
2480 },
2481 HunkStatus {
2482 range: Point::new(8, 0)..Point::new(8, 1),
2483 diff_status: DiffHunkStatusKind::Modified,
2484 old_text: "j".into()
2485 }
2486 ]
2487 )]
2488 );
2489
2490 // Simulate a git commit that matches some edits but not others:
2491 // - Accepts the first edit (a -> A)
2492 // - Accepts the deletion (remove d and e)
2493 // - Makes a different change to g (g -> G instead of GGG)
2494 // - Ignores the NEW line addition
2495 // - Ignores the last line edit (j stays as j)
2496 fs.set_head_for_repo(
2497 path!("/project/.git").as_ref(),
2498 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2499 "0000001",
2500 );
2501 cx.run_until_parked();
2502 assert_eq!(
2503 unreviewed_hunks(&action_log, cx),
2504 vec![(
2505 buffer.clone(),
2506 vec![
2507 HunkStatus {
2508 range: Point::new(4, 0)..Point::new(5, 0),
2509 diff_status: DiffHunkStatusKind::Modified,
2510 old_text: "g\n".into()
2511 },
2512 HunkStatus {
2513 range: Point::new(6, 0)..Point::new(7, 0),
2514 diff_status: DiffHunkStatusKind::Added,
2515 old_text: "".into()
2516 },
2517 HunkStatus {
2518 range: Point::new(8, 0)..Point::new(8, 1),
2519 diff_status: DiffHunkStatusKind::Modified,
2520 old_text: "j".into()
2521 }
2522 ]
2523 )]
2524 );
2525
2526 // Make another commit that accepts the NEW line but with different content
2527 fs.set_head_for_repo(
2528 path!("/project/.git").as_ref(),
2529 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2530 "0000002",
2531 );
2532 cx.run_until_parked();
2533 assert_eq!(
2534 unreviewed_hunks(&action_log, cx),
2535 vec![(
2536 buffer,
2537 vec![
2538 HunkStatus {
2539 range: Point::new(6, 0)..Point::new(7, 0),
2540 diff_status: DiffHunkStatusKind::Added,
2541 old_text: "".into()
2542 },
2543 HunkStatus {
2544 range: Point::new(8, 0)..Point::new(8, 1),
2545 diff_status: DiffHunkStatusKind::Modified,
2546 old_text: "j".into()
2547 }
2548 ]
2549 )]
2550 );
2551
2552 // Final commit that accepts all remaining edits
2553 fs.set_head_for_repo(
2554 path!("/project/.git").as_ref(),
2555 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2556 "0000003",
2557 );
2558 cx.run_until_parked();
2559 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2560 }
2561
2562 #[gpui::test]
2563 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2564 init_test(cx);
2565
2566 let fs = FakeFs::new(cx.executor());
2567 fs.insert_tree(
2568 path!("/dir"),
2569 json!({
2570 "file1": "abc\ndef\nghi"
2571 }),
2572 )
2573 .await;
2574 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2575 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2576 let file_path = project
2577 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2578 .unwrap();
2579
2580 let buffer = project
2581 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2582 .await
2583 .unwrap();
2584
2585 // Track the buffer and make an agent edit
2586 cx.update(|cx| {
2587 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2588 buffer.update(cx, |buffer, cx| {
2589 buffer
2590 .edit(
2591 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2592 None,
2593 cx,
2594 )
2595 .unwrap()
2596 });
2597 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2598 });
2599 cx.run_until_parked();
2600
2601 // Verify the agent edit is there
2602 assert_eq!(
2603 buffer.read_with(cx, |buffer, _| buffer.text()),
2604 "abc\nAGENT_EDIT\nghi"
2605 );
2606 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2607
2608 // Reject all edits
2609 action_log
2610 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2611 .await;
2612 cx.run_until_parked();
2613
2614 // Verify the buffer is back to original
2615 assert_eq!(
2616 buffer.read_with(cx, |buffer, _| buffer.text()),
2617 "abc\ndef\nghi"
2618 );
2619 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2620
2621 // Verify undo state is available
2622 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2623
2624 // Undo the reject
2625 action_log
2626 .update(cx, |log, cx| log.undo_last_reject(cx))
2627 .await;
2628
2629 cx.run_until_parked();
2630
2631 // Verify the agent edit is restored
2632 assert_eq!(
2633 buffer.read_with(cx, |buffer, _| buffer.text()),
2634 "abc\nAGENT_EDIT\nghi"
2635 );
2636
2637 // Verify undo state is cleared
2638 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2639 }
2640
2641 #[derive(Debug, PartialEq)]
2642 struct HunkStatus {
2643 range: Range<Point>,
2644 diff_status: DiffHunkStatusKind,
2645 old_text: String,
2646 }
2647
2648 fn unreviewed_hunks(
2649 action_log: &Entity<ActionLog>,
2650 cx: &TestAppContext,
2651 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2652 cx.read(|cx| {
2653 action_log
2654 .read(cx)
2655 .changed_buffers(cx)
2656 .into_iter()
2657 .map(|(buffer, diff)| {
2658 let snapshot = buffer.read(cx).snapshot();
2659 (
2660 buffer,
2661 diff.read(cx)
2662 .snapshot(cx)
2663 .hunks(&snapshot)
2664 .map(|hunk| HunkStatus {
2665 diff_status: hunk.status().kind,
2666 range: hunk.range,
2667 old_text: diff
2668 .read(cx)
2669 .base_text(cx)
2670 .text_for_range(hunk.diff_base_byte_range)
2671 .collect(),
2672 })
2673 .collect(),
2674 )
2675 })
2676 .collect()
2677 })
2678 }
2679}