1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
83 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
84 let diff_base;
85 let unreviewed_edits;
86 if is_created {
87 diff_base = Rope::default();
88 unreviewed_edits = Patch::new(vec![Edit {
89 old: 0..1,
90 new: 0..text_snapshot.max_point().row + 1,
91 }])
92 } else {
93 diff_base = buffer.read(cx).as_rope().clone();
94 unreviewed_edits = Patch::default();
95 }
96 TrackedBuffer {
97 buffer: buffer.clone(),
98 diff_base,
99 unreviewed_edits,
100 snapshot: text_snapshot,
101 status,
102 version: buffer.read(cx).version(),
103 diff,
104 diff_update: diff_update_tx,
105 _open_lsp_handle: open_lsp_handle,
106 _maintain_diff: cx.spawn({
107 let buffer = buffer.clone();
108 async move |this, cx| {
109 Self::maintain_diff(this, buffer, diff_update_rx, cx)
110 .await
111 .ok();
112 }
113 }),
114 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
115 }
116 });
117 tracked_buffer.version = buffer.read(cx).version();
118 tracked_buffer
119 }
120
121 fn handle_buffer_event(
122 &mut self,
123 buffer: Entity<Buffer>,
124 event: &BufferEvent,
125 cx: &mut Context<Self>,
126 ) {
127 match event {
128 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
129 BufferEvent::FileHandleChanged => {
130 self.handle_buffer_file_changed(buffer, cx);
131 }
132 _ => {}
133 };
134 }
135
136 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
137 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
138 return;
139 };
140 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
141 }
142
143 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
144 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
145 return;
146 };
147
148 match tracked_buffer.status {
149 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
150 if buffer
151 .read(cx)
152 .file()
153 .is_some_and(|file| file.disk_state() == DiskState::Deleted)
154 {
155 // If the buffer had been edited by a tool, but it got
156 // deleted externally, we want to stop tracking it.
157 self.tracked_buffers.remove(&buffer);
158 }
159 cx.notify();
160 }
161 TrackedBufferStatus::Deleted => {
162 if buffer
163 .read(cx)
164 .file()
165 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
166 {
167 // If the buffer had been deleted by a tool, but it got
168 // resurrected externally, we want to clear the edits we
169 // were tracking and reset the buffer's state.
170 self.tracked_buffers.remove(&buffer);
171 self.track_buffer_internal(buffer, false, cx);
172 }
173 cx.notify();
174 }
175 }
176 }
177
178 async fn maintain_diff(
179 this: WeakEntity<Self>,
180 buffer: Entity<Buffer>,
181 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
182 cx: &mut AsyncApp,
183 ) -> Result<()> {
184 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
185 let git_diff = this
186 .update(cx, |this, cx| {
187 this.project.update(cx, |project, cx| {
188 project.open_uncommitted_diff(buffer.clone(), cx)
189 })
190 })?
191 .await
192 .ok();
193 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
194 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
195 })?;
196
197 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
198 let _repo_subscription =
199 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
200 cx.update(|cx| {
201 let mut old_head = buffer_repo.read(cx).head_commit.clone();
202 Some(cx.subscribe(git_diff, move |_, event, cx| {
203 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
204 let new_head = buffer_repo.read(cx).head_commit.clone();
205 if new_head != old_head {
206 old_head = new_head;
207 git_diff_updates_tx.send(()).ok();
208 }
209 }
210 }))
211 })?
212 } else {
213 None
214 };
215
216 loop {
217 futures::select_biased! {
218 buffer_update = buffer_updates.next() => {
219 if let Some((mut author, mut buffer_snapshot)) = buffer_update {
220 // TODO kb `buffer.edit(` made by agent input below fires off this code path again
221 // as we react on buffer edits and send them under "user" edits here again and again.
222 // Below is a stub to deduplicate things, but this should be done on the editor level
223
224 // Drain any pending updates and keep only the latest snapshot.
225 // This coalesces rapid edits to avoid repeatedly recalculating diffs.
226 // while let Ok(Some((next_author, next_snapshot))) = buffer_updates.try_next() {
227 // // If any update was from Agent, treat the coalesced update as Agent
228 // if matches!(next_author, ChangeAuthor::Agent) {
229 // author = ChangeAuthor::Agent;
230 // }
231 // buffer_snapshot = next_snapshot;
232 // }
233 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
234 } else {
235 break;
236 }
237 }
238 _ = git_diff_updates_rx.changed().fuse() => {
239 if let Some(git_diff) = git_diff.as_ref() {
240 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
241 }
242 }
243 }
244 }
245
246 Ok(())
247 }
248
249 async fn track_edits(
250 this: &WeakEntity<ActionLog>,
251 buffer: &Entity<Buffer>,
252 author: ChangeAuthor,
253 buffer_snapshot: text::BufferSnapshot,
254 cx: &mut AsyncApp,
255 ) -> Result<()> {
256 let rebase = this.update(cx, |this, cx| {
257 let tracked_buffer = this
258 .tracked_buffers
259 .get_mut(buffer)
260 .context("buffer not tracked")?;
261
262 let old_snapshot = tracked_buffer.snapshot.clone();
263 let new_snapshot = buffer_snapshot.clone();
264
265 if !new_snapshot.version().changed_since(old_snapshot.version()) {
266 Ok(None)
267 } else {
268 let rebase = cx.background_spawn({
269 let mut base_text = tracked_buffer.diff_base.clone();
270
271 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
272 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
273 async move {
274 if let ChangeAuthor::User = author {
275 apply_non_conflicting_edits(
276 &unreviewed_edits,
277 edits,
278 &mut base_text,
279 new_snapshot.as_rope(),
280 );
281 }
282
283 (Arc::new(base_text.to_string()), base_text)
284 }
285 });
286
287 anyhow::Ok(Some(rebase))
288 }
289 })??;
290
291 if let Some(rebase) = rebase {
292 let (new_base_text, new_diff_base) = rebase.await;
293
294 Self::update_diff(
295 this,
296 buffer,
297 buffer_snapshot,
298 new_base_text,
299 new_diff_base,
300 cx,
301 )
302 .await?;
303 }
304
305 Ok(())
306 }
307
308 async fn keep_committed_edits(
309 this: &WeakEntity<ActionLog>,
310 buffer: &Entity<Buffer>,
311 git_diff: &Entity<BufferDiff>,
312 cx: &mut AsyncApp,
313 ) -> Result<()> {
314 let buffer_snapshot = this.read_with(cx, |this, _cx| {
315 let tracked_buffer = this
316 .tracked_buffers
317 .get(buffer)
318 .context("buffer not tracked")?;
319 anyhow::Ok(tracked_buffer.snapshot.clone())
320 })??;
321 let (new_base_text, new_diff_base) = this
322 .read_with(cx, |this, cx| {
323 let tracked_buffer = this
324 .tracked_buffers
325 .get(buffer)
326 .context("buffer not tracked")?;
327 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
328 let agent_diff_base = tracked_buffer.diff_base.clone();
329 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
330 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
331 anyhow::Ok(cx.background_spawn(async move {
332 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
333 let committed_edits = language::line_diff(
334 &agent_diff_base.to_string(),
335 &git_diff_base.to_string(),
336 )
337 .into_iter()
338 .map(|(old, new)| Edit { old, new });
339
340 let mut new_agent_diff_base = agent_diff_base.clone();
341 let mut row_delta = 0i32;
342 for committed in committed_edits {
343 while let Some(unreviewed) = old_unreviewed_edits.peek() {
344 // If the committed edit matches the unreviewed
345 // edit, assume the user wants to keep it.
346 if committed.old == unreviewed.old {
347 let unreviewed_new =
348 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
349 let committed_new =
350 git_diff_base.slice_rows(committed.new.clone()).to_string();
351 if unreviewed_new == committed_new {
352 let old_byte_start =
353 new_agent_diff_base.point_to_offset(Point::new(
354 (unreviewed.old.start as i32 + row_delta) as u32,
355 0,
356 ));
357 let old_byte_end =
358 new_agent_diff_base.point_to_offset(cmp::min(
359 Point::new(
360 (unreviewed.old.end as i32 + row_delta) as u32,
361 0,
362 ),
363 new_agent_diff_base.max_point(),
364 ));
365 new_agent_diff_base
366 .replace(old_byte_start..old_byte_end, &unreviewed_new);
367 row_delta +=
368 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
369 }
370 } else if unreviewed.old.start >= committed.old.end {
371 break;
372 }
373
374 old_unreviewed_edits.next().unwrap();
375 }
376 }
377
378 (
379 Arc::new(new_agent_diff_base.to_string()),
380 new_agent_diff_base,
381 )
382 }))
383 })??
384 .await;
385
386 Self::update_diff(
387 this,
388 buffer,
389 buffer_snapshot,
390 new_base_text,
391 new_diff_base,
392 cx,
393 )
394 .await
395 }
396
397 async fn update_diff(
398 this: &WeakEntity<ActionLog>,
399 buffer: &Entity<Buffer>,
400 buffer_snapshot: text::BufferSnapshot,
401 new_base_text: Arc<String>,
402 new_diff_base: Rope,
403 cx: &mut AsyncApp,
404 ) -> Result<()> {
405 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
406 let tracked_buffer = this
407 .tracked_buffers
408 .get(buffer)
409 .context("buffer not tracked")?;
410 anyhow::Ok((
411 tracked_buffer.diff.clone(),
412 buffer.read(cx).language().cloned(),
413 buffer.read(cx).language_registry(),
414 ))
415 })??;
416 let diff_snapshot = BufferDiff::update_diff(
417 diff.clone(),
418 buffer_snapshot.clone(),
419 Some(new_base_text),
420 true,
421 false,
422 language,
423 language_registry,
424 cx,
425 )
426 .await;
427 let mut unreviewed_edits = Patch::default();
428 if let Ok(diff_snapshot) = diff_snapshot {
429 unreviewed_edits = cx
430 .background_spawn({
431 let diff_snapshot = diff_snapshot.clone();
432 let buffer_snapshot = buffer_snapshot.clone();
433 let new_diff_base = new_diff_base.clone();
434 async move {
435 let mut unreviewed_edits = Patch::default();
436 for hunk in diff_snapshot.hunks_intersecting_range(
437 Anchor::min_for_buffer(buffer_snapshot.remote_id())
438 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
439 &buffer_snapshot,
440 ) {
441 let old_range = new_diff_base
442 .offset_to_point(hunk.diff_base_byte_range.start)
443 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
444 let new_range = hunk.range.start..hunk.range.end;
445 unreviewed_edits.push(point_to_row_edit(
446 Edit {
447 old: old_range,
448 new: new_range,
449 },
450 &new_diff_base,
451 buffer_snapshot.as_rope(),
452 ));
453 }
454 unreviewed_edits
455 }
456 })
457 .await;
458
459 diff.update(cx, |diff, cx| {
460 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
461 })?;
462 }
463 this.update(cx, |this, cx| {
464 let tracked_buffer = this
465 .tracked_buffers
466 .get_mut(buffer)
467 .context("buffer not tracked")?;
468 tracked_buffer.diff_base = new_diff_base;
469 tracked_buffer.snapshot = buffer_snapshot;
470 tracked_buffer.unreviewed_edits = unreviewed_edits;
471 cx.notify();
472 anyhow::Ok(())
473 })?
474 }
475
476 /// Track a buffer as read by agent, so we can notify the model about user edits.
477 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
478 self.track_buffer_internal(buffer, false, cx);
479 }
480
481 /// Mark a buffer as created by agent, so we can refresh it in the context
482 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
483 self.track_buffer_internal(buffer, true, cx);
484 }
485
486 /// Mark a buffer as edited by agent, so we can refresh it in the context
487 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
488 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
489 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
490 tracked_buffer.status = TrackedBufferStatus::Modified;
491 }
492 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
493 }
494
495 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
496 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
497 match tracked_buffer.status {
498 TrackedBufferStatus::Created { .. } => {
499 self.tracked_buffers.remove(&buffer);
500 cx.notify();
501 }
502 TrackedBufferStatus::Modified => {
503 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
504 tracked_buffer.status = TrackedBufferStatus::Deleted;
505 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
506 }
507 TrackedBufferStatus::Deleted => {}
508 }
509 cx.notify();
510 }
511
512 pub fn keep_edits_in_range(
513 &mut self,
514 buffer: Entity<Buffer>,
515 buffer_range: Range<impl language::ToPoint>,
516 telemetry: Option<ActionLogTelemetry>,
517 cx: &mut Context<Self>,
518 ) {
519 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
520 return;
521 };
522
523 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
524 match tracked_buffer.status {
525 TrackedBufferStatus::Deleted => {
526 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
527 self.tracked_buffers.remove(&buffer);
528 cx.notify();
529 }
530 _ => {
531 let buffer = buffer.read(cx);
532 let buffer_range =
533 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
534 let mut delta = 0i32;
535 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
536 edit.old.start = (edit.old.start as i32 + delta) as u32;
537 edit.old.end = (edit.old.end as i32 + delta) as u32;
538
539 if buffer_range.end.row < edit.new.start
540 || buffer_range.start.row > edit.new.end
541 {
542 true
543 } else {
544 let old_range = tracked_buffer
545 .diff_base
546 .point_to_offset(Point::new(edit.old.start, 0))
547 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
548 Point::new(edit.old.end, 0),
549 tracked_buffer.diff_base.max_point(),
550 ));
551 let new_range = tracked_buffer
552 .snapshot
553 .point_to_offset(Point::new(edit.new.start, 0))
554 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
555 Point::new(edit.new.end, 0),
556 tracked_buffer.snapshot.max_point(),
557 ));
558 tracked_buffer.diff_base.replace(
559 old_range,
560 &tracked_buffer
561 .snapshot
562 .text_for_range(new_range)
563 .collect::<String>(),
564 );
565 delta += edit.new_len() as i32 - edit.old_len() as i32;
566 metrics.add_edit(edit);
567 false
568 }
569 });
570 if tracked_buffer.unreviewed_edits.is_empty()
571 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
572 {
573 tracked_buffer.status = TrackedBufferStatus::Modified;
574 }
575 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
576 }
577 }
578 if let Some(telemetry) = telemetry {
579 telemetry_report_accepted_edits(&telemetry, metrics);
580 }
581 }
582
583 pub fn reject_edits_in_ranges(
584 &mut self,
585 buffer: Entity<Buffer>,
586 buffer_ranges: Vec<Range<impl language::ToPoint>>,
587 telemetry: Option<ActionLogTelemetry>,
588 cx: &mut Context<Self>,
589 ) -> Task<Result<()>> {
590 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
591 return Task::ready(Ok(()));
592 };
593
594 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
595 let task = match &tracked_buffer.status {
596 TrackedBufferStatus::Created {
597 existing_file_content,
598 } => {
599 let task = if let Some(existing_file_content) = existing_file_content {
600 buffer.update(cx, |buffer, cx| {
601 buffer.start_transaction();
602 buffer.set_text("", cx);
603 for chunk in existing_file_content.chunks() {
604 buffer.append(chunk, cx);
605 }
606 buffer.end_transaction(cx);
607 });
608 self.project
609 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
610 } else {
611 // For a file created by AI with no pre-existing content,
612 // only delete the file if we're certain it contains only AI content
613 // with no edits from the user.
614
615 let initial_version = tracked_buffer.version.clone();
616 let current_version = buffer.read(cx).version();
617
618 let current_content = buffer.read(cx).text();
619 let tracked_content = tracked_buffer.snapshot.text();
620
621 let is_ai_only_content =
622 initial_version == current_version && current_content == tracked_content;
623
624 if is_ai_only_content {
625 buffer
626 .read(cx)
627 .entry_id(cx)
628 .and_then(|entry_id| {
629 self.project.update(cx, |project, cx| {
630 project.delete_entry(entry_id, false, cx)
631 })
632 })
633 .unwrap_or(Task::ready(Ok(())))
634 } else {
635 // Not sure how to disentangle edits made by the user
636 // from edits made by the AI at this point.
637 // For now, preserve both to avoid data loss.
638 //
639 // TODO: Better solution (disable "Reject" after user makes some
640 // edit or find a way to differentiate between AI and user edits)
641 Task::ready(Ok(()))
642 }
643 };
644
645 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
646 self.tracked_buffers.remove(&buffer);
647 cx.notify();
648 task
649 }
650 TrackedBufferStatus::Deleted => {
651 buffer.update(cx, |buffer, cx| {
652 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
653 });
654 let save = self
655 .project
656 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
657
658 // Clear all tracked edits for this buffer and start over as if we just read it.
659 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
660 self.tracked_buffers.remove(&buffer);
661 self.buffer_read(buffer.clone(), cx);
662 cx.notify();
663 save
664 }
665 TrackedBufferStatus::Modified => {
666 buffer.update(cx, |buffer, cx| {
667 let mut buffer_row_ranges = buffer_ranges
668 .into_iter()
669 .map(|range| {
670 range.start.to_point(buffer).row..range.end.to_point(buffer).row
671 })
672 .peekable();
673
674 let mut edits_to_revert = Vec::new();
675 for edit in tracked_buffer.unreviewed_edits.edits() {
676 let new_range = tracked_buffer
677 .snapshot
678 .anchor_before(Point::new(edit.new.start, 0))
679 ..tracked_buffer.snapshot.anchor_after(cmp::min(
680 Point::new(edit.new.end, 0),
681 tracked_buffer.snapshot.max_point(),
682 ));
683 let new_row_range = new_range.start.to_point(buffer).row
684 ..new_range.end.to_point(buffer).row;
685
686 let mut revert = false;
687 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
688 if buffer_row_range.end < new_row_range.start {
689 buffer_row_ranges.next();
690 } else if buffer_row_range.start > new_row_range.end {
691 break;
692 } else {
693 revert = true;
694 break;
695 }
696 }
697
698 if revert {
699 metrics.add_edit(edit);
700 let old_range = tracked_buffer
701 .diff_base
702 .point_to_offset(Point::new(edit.old.start, 0))
703 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
704 Point::new(edit.old.end, 0),
705 tracked_buffer.diff_base.max_point(),
706 ));
707 let old_text = tracked_buffer
708 .diff_base
709 .chunks_in_range(old_range)
710 .collect::<String>();
711 edits_to_revert.push((new_range, old_text));
712 }
713 }
714
715 buffer.edit(edits_to_revert, None, cx);
716 });
717 self.project
718 .update(cx, |project, cx| project.save_buffer(buffer, cx))
719 }
720 };
721 if let Some(telemetry) = telemetry {
722 telemetry_report_rejected_edits(&telemetry, metrics);
723 }
724 task
725 }
726
727 pub fn keep_all_edits(
728 &mut self,
729 telemetry: Option<ActionLogTelemetry>,
730 cx: &mut Context<Self>,
731 ) {
732 self.tracked_buffers.retain(|buffer, tracked_buffer| {
733 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
734 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
735 if let Some(telemetry) = telemetry.as_ref() {
736 telemetry_report_accepted_edits(telemetry, metrics);
737 }
738 match tracked_buffer.status {
739 TrackedBufferStatus::Deleted => false,
740 _ => {
741 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
742 tracked_buffer.status = TrackedBufferStatus::Modified;
743 }
744 tracked_buffer.unreviewed_edits.clear();
745 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
746 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
747 true
748 }
749 }
750 });
751
752 cx.notify();
753 }
754
755 pub fn reject_all_edits(
756 &mut self,
757 telemetry: Option<ActionLogTelemetry>,
758 cx: &mut Context<Self>,
759 ) -> Task<()> {
760 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
761 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
762 buffer.read(cx).remote_id(),
763 )];
764 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
765
766 async move {
767 reject.await.log_err();
768 }
769 });
770
771 let task = futures::future::join_all(futures);
772 cx.background_spawn(async move {
773 task.await;
774 })
775 }
776
777 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
778 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
779 self.tracked_buffers
780 .iter()
781 .filter(|(_, tracked)| tracked.has_edits(cx))
782 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
783 .collect()
784 }
785
786 /// Iterate over buffers changed since last read or edited by the model
787 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
788 self.tracked_buffers
789 .iter()
790 .filter(|(buffer, tracked)| {
791 let buffer = buffer.read(cx);
792
793 tracked.version != buffer.version
794 && buffer
795 .file()
796 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
797 })
798 .map(|(buffer, _)| buffer)
799 }
800}
801
802#[derive(Clone)]
803pub struct ActionLogTelemetry {
804 pub agent_telemetry_id: SharedString,
805 pub session_id: Arc<str>,
806}
807
808struct ActionLogMetrics {
809 lines_removed: u32,
810 lines_added: u32,
811 language: Option<SharedString>,
812}
813
814impl ActionLogMetrics {
815 fn for_buffer(buffer: &Buffer) -> Self {
816 Self {
817 language: buffer.language().map(|l| l.name().0),
818 lines_removed: 0,
819 lines_added: 0,
820 }
821 }
822
823 fn add_edits(&mut self, edits: &[Edit<u32>]) {
824 for edit in edits {
825 self.add_edit(edit);
826 }
827 }
828
829 fn add_edit(&mut self, edit: &Edit<u32>) {
830 self.lines_added += edit.new_len();
831 self.lines_removed += edit.old_len();
832 }
833}
834
835fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
836 telemetry::event!(
837 "Agent Edits Accepted",
838 agent = telemetry.agent_telemetry_id,
839 session = telemetry.session_id,
840 language = metrics.language,
841 lines_added = metrics.lines_added,
842 lines_removed = metrics.lines_removed
843 );
844}
845
846fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
847 telemetry::event!(
848 "Agent Edits Rejected",
849 agent = telemetry.agent_telemetry_id,
850 session = telemetry.session_id,
851 language = metrics.language,
852 lines_added = metrics.lines_added,
853 lines_removed = metrics.lines_removed
854 );
855}
856
857fn apply_non_conflicting_edits(
858 patch: &Patch<u32>,
859 edits: Vec<Edit<u32>>,
860 old_text: &mut Rope,
861 new_text: &Rope,
862) -> bool {
863 let mut old_edits = patch.edits().iter().cloned().peekable();
864 let mut new_edits = edits.into_iter().peekable();
865 let mut applied_delta = 0i32;
866 let mut rebased_delta = 0i32;
867 let mut has_made_changes = false;
868
869 while let Some(mut new_edit) = new_edits.next() {
870 let mut conflict = false;
871
872 // Push all the old edits that are before this new edit or that intersect with it.
873 while let Some(old_edit) = old_edits.peek() {
874 if new_edit.old.end < old_edit.new.start
875 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
876 {
877 break;
878 } else if new_edit.old.start > old_edit.new.end
879 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
880 {
881 let old_edit = old_edits.next().unwrap();
882 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
883 } else {
884 conflict = true;
885 if new_edits
886 .peek()
887 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
888 {
889 new_edit = new_edits.next().unwrap();
890 } else {
891 let old_edit = old_edits.next().unwrap();
892 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
893 }
894 }
895 }
896
897 if !conflict {
898 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
899 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
900 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
901 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
902 ..old_text.point_to_offset(cmp::min(
903 Point::new(new_edit.old.end, 0),
904 old_text.max_point(),
905 ));
906 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
907 ..new_text.point_to_offset(cmp::min(
908 Point::new(new_edit.new.end, 0),
909 new_text.max_point(),
910 ));
911
912 old_text.replace(
913 old_bytes,
914 &new_text.chunks_in_range(new_bytes).collect::<String>(),
915 );
916 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
917 has_made_changes = true;
918 }
919 }
920 has_made_changes
921}
922
923fn diff_snapshots(
924 old_snapshot: &text::BufferSnapshot,
925 new_snapshot: &text::BufferSnapshot,
926) -> Vec<Edit<u32>> {
927 let mut edits = new_snapshot
928 .edits_since::<Point>(&old_snapshot.version)
929 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
930 .peekable();
931 let mut row_edits = Vec::new();
932 while let Some(mut edit) = edits.next() {
933 while let Some(next_edit) = edits.peek() {
934 if edit.old.end >= next_edit.old.start {
935 edit.old.end = next_edit.old.end;
936 edit.new.end = next_edit.new.end;
937 edits.next();
938 } else {
939 break;
940 }
941 }
942 row_edits.push(edit);
943 }
944 row_edits
945}
946
947fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
948 if edit.old.start.column == old_text.line_len(edit.old.start.row)
949 && new_text
950 .chars_at(new_text.point_to_offset(edit.new.start))
951 .next()
952 == Some('\n')
953 && edit.old.start != old_text.max_point()
954 {
955 Edit {
956 old: edit.old.start.row + 1..edit.old.end.row + 1,
957 new: edit.new.start.row + 1..edit.new.end.row + 1,
958 }
959 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
960 Edit {
961 old: edit.old.start.row..edit.old.end.row,
962 new: edit.new.start.row..edit.new.end.row,
963 }
964 } else {
965 Edit {
966 old: edit.old.start.row..edit.old.end.row + 1,
967 new: edit.new.start.row..edit.new.end.row + 1,
968 }
969 }
970}
971
972#[derive(Copy, Clone, Debug)]
973enum ChangeAuthor {
974 User,
975 Agent,
976}
977
978enum TrackedBufferStatus {
979 Created { existing_file_content: Option<Rope> },
980 Modified,
981 Deleted,
982}
983
984struct TrackedBuffer {
985 buffer: Entity<Buffer>,
986 diff_base: Rope,
987 unreviewed_edits: Patch<u32>,
988 status: TrackedBufferStatus,
989 version: clock::Global,
990 diff: Entity<BufferDiff>,
991 snapshot: text::BufferSnapshot,
992 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
993 _open_lsp_handle: OpenLspBufferHandle,
994 _maintain_diff: Task<()>,
995 _subscription: Subscription,
996}
997
998impl TrackedBuffer {
999 fn has_edits(&self, cx: &App) -> bool {
1000 self.diff
1001 .read(cx)
1002 .hunks(self.buffer.read(cx), cx)
1003 .next()
1004 .is_some()
1005 }
1006
1007 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1008 self.diff_update
1009 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1010 .ok();
1011 }
1012}
1013
1014pub struct ChangedBuffer {
1015 pub diff: Entity<BufferDiff>,
1016}
1017
1018#[cfg(test)]
1019mod tests {
1020 use super::*;
1021 use buffer_diff::DiffHunkStatusKind;
1022 use gpui::TestAppContext;
1023 use language::Point;
1024 use project::{FakeFs, Fs, Project, RemoveOptions};
1025 use rand::prelude::*;
1026 use serde_json::json;
1027 use settings::SettingsStore;
1028 use std::env;
1029 use util::{RandomCharIter, path};
1030
1031 #[ctor::ctor]
1032 fn init_logger() {
1033 zlog::init_test();
1034 }
1035
1036 fn init_test(cx: &mut TestAppContext) {
1037 cx.update(|cx| {
1038 let settings_store = SettingsStore::test(cx);
1039 cx.set_global(settings_store);
1040 });
1041 }
1042
1043 #[gpui::test(iterations = 10)]
1044 async fn test_keep_edits(cx: &mut TestAppContext) {
1045 init_test(cx);
1046
1047 let fs = FakeFs::new(cx.executor());
1048 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1049 .await;
1050 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1051 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1052 let file_path = project
1053 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1054 .unwrap();
1055 let buffer = project
1056 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1057 .await
1058 .unwrap();
1059
1060 cx.update(|cx| {
1061 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1062 buffer.update(cx, |buffer, cx| {
1063 buffer
1064 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1065 .unwrap()
1066 });
1067 buffer.update(cx, |buffer, cx| {
1068 buffer
1069 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1070 .unwrap()
1071 });
1072 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1073 });
1074 cx.run_until_parked();
1075 assert_eq!(
1076 buffer.read_with(cx, |buffer, _| buffer.text()),
1077 "abc\ndEf\nghi\njkl\nmnO"
1078 );
1079 assert_eq!(
1080 unreviewed_hunks(&action_log, cx),
1081 vec![(
1082 buffer.clone(),
1083 vec![
1084 HunkStatus {
1085 range: Point::new(1, 0)..Point::new(2, 0),
1086 diff_status: DiffHunkStatusKind::Modified,
1087 old_text: "def\n".into(),
1088 },
1089 HunkStatus {
1090 range: Point::new(4, 0)..Point::new(4, 3),
1091 diff_status: DiffHunkStatusKind::Modified,
1092 old_text: "mno".into(),
1093 }
1094 ],
1095 )]
1096 );
1097
1098 action_log.update(cx, |log, cx| {
1099 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1100 });
1101 cx.run_until_parked();
1102 assert_eq!(
1103 unreviewed_hunks(&action_log, cx),
1104 vec![(
1105 buffer.clone(),
1106 vec![HunkStatus {
1107 range: Point::new(1, 0)..Point::new(2, 0),
1108 diff_status: DiffHunkStatusKind::Modified,
1109 old_text: "def\n".into(),
1110 }],
1111 )]
1112 );
1113
1114 action_log.update(cx, |log, cx| {
1115 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1116 });
1117 cx.run_until_parked();
1118 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1119 }
1120
1121 #[gpui::test(iterations = 10)]
1122 async fn test_deletions(cx: &mut TestAppContext) {
1123 init_test(cx);
1124
1125 let fs = FakeFs::new(cx.executor());
1126 fs.insert_tree(
1127 path!("/dir"),
1128 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1129 )
1130 .await;
1131 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1132 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1133 let file_path = project
1134 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1135 .unwrap();
1136 let buffer = project
1137 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1138 .await
1139 .unwrap();
1140
1141 cx.update(|cx| {
1142 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1143 buffer.update(cx, |buffer, cx| {
1144 buffer
1145 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1146 .unwrap();
1147 buffer.finalize_last_transaction();
1148 });
1149 buffer.update(cx, |buffer, cx| {
1150 buffer
1151 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1152 .unwrap();
1153 buffer.finalize_last_transaction();
1154 });
1155 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1156 });
1157 cx.run_until_parked();
1158 assert_eq!(
1159 buffer.read_with(cx, |buffer, _| buffer.text()),
1160 "abc\nghi\njkl\npqr"
1161 );
1162 assert_eq!(
1163 unreviewed_hunks(&action_log, cx),
1164 vec![(
1165 buffer.clone(),
1166 vec![
1167 HunkStatus {
1168 range: Point::new(1, 0)..Point::new(1, 0),
1169 diff_status: DiffHunkStatusKind::Deleted,
1170 old_text: "def\n".into(),
1171 },
1172 HunkStatus {
1173 range: Point::new(3, 0)..Point::new(3, 0),
1174 diff_status: DiffHunkStatusKind::Deleted,
1175 old_text: "mno\n".into(),
1176 }
1177 ],
1178 )]
1179 );
1180
1181 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1182 cx.run_until_parked();
1183 assert_eq!(
1184 buffer.read_with(cx, |buffer, _| buffer.text()),
1185 "abc\nghi\njkl\nmno\npqr"
1186 );
1187 assert_eq!(
1188 unreviewed_hunks(&action_log, cx),
1189 vec![(
1190 buffer.clone(),
1191 vec![HunkStatus {
1192 range: Point::new(1, 0)..Point::new(1, 0),
1193 diff_status: DiffHunkStatusKind::Deleted,
1194 old_text: "def\n".into(),
1195 }],
1196 )]
1197 );
1198
1199 action_log.update(cx, |log, cx| {
1200 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1201 });
1202 cx.run_until_parked();
1203 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1204 }
1205
1206 #[gpui::test(iterations = 10)]
1207 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1208 init_test(cx);
1209
1210 let fs = FakeFs::new(cx.executor());
1211 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1212 .await;
1213 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1214 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1215 let file_path = project
1216 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1217 .unwrap();
1218 let buffer = project
1219 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1220 .await
1221 .unwrap();
1222
1223 cx.update(|cx| {
1224 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1225 buffer.update(cx, |buffer, cx| {
1226 buffer
1227 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1228 .unwrap()
1229 });
1230 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1231 });
1232 cx.run_until_parked();
1233 assert_eq!(
1234 buffer.read_with(cx, |buffer, _| buffer.text()),
1235 "abc\ndeF\nGHI\njkl\nmno"
1236 );
1237 assert_eq!(
1238 unreviewed_hunks(&action_log, cx),
1239 vec![(
1240 buffer.clone(),
1241 vec![HunkStatus {
1242 range: Point::new(1, 0)..Point::new(3, 0),
1243 diff_status: DiffHunkStatusKind::Modified,
1244 old_text: "def\nghi\n".into(),
1245 }],
1246 )]
1247 );
1248
1249 buffer.update(cx, |buffer, cx| {
1250 buffer.edit(
1251 [
1252 (Point::new(0, 2)..Point::new(0, 2), "X"),
1253 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1254 ],
1255 None,
1256 cx,
1257 )
1258 });
1259 cx.run_until_parked();
1260 assert_eq!(
1261 buffer.read_with(cx, |buffer, _| buffer.text()),
1262 "abXc\ndeF\nGHI\nYjkl\nmno"
1263 );
1264 assert_eq!(
1265 unreviewed_hunks(&action_log, cx),
1266 vec![(
1267 buffer.clone(),
1268 vec![HunkStatus {
1269 range: Point::new(1, 0)..Point::new(3, 0),
1270 diff_status: DiffHunkStatusKind::Modified,
1271 old_text: "def\nghi\n".into(),
1272 }],
1273 )]
1274 );
1275
1276 buffer.update(cx, |buffer, cx| {
1277 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1278 });
1279 cx.run_until_parked();
1280 assert_eq!(
1281 buffer.read_with(cx, |buffer, _| buffer.text()),
1282 "abXc\ndZeF\nGHI\nYjkl\nmno"
1283 );
1284 assert_eq!(
1285 unreviewed_hunks(&action_log, cx),
1286 vec![(
1287 buffer.clone(),
1288 vec![HunkStatus {
1289 range: Point::new(1, 0)..Point::new(3, 0),
1290 diff_status: DiffHunkStatusKind::Modified,
1291 old_text: "def\nghi\n".into(),
1292 }],
1293 )]
1294 );
1295
1296 action_log.update(cx, |log, cx| {
1297 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1298 });
1299 cx.run_until_parked();
1300 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1301 }
1302
1303 #[gpui::test(iterations = 10)]
1304 async fn test_creating_files(cx: &mut TestAppContext) {
1305 init_test(cx);
1306
1307 let fs = FakeFs::new(cx.executor());
1308 fs.insert_tree(path!("/dir"), json!({})).await;
1309 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1310 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1311 let file_path = project
1312 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1313 .unwrap();
1314
1315 let buffer = project
1316 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1317 .await
1318 .unwrap();
1319 cx.update(|cx| {
1320 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1321 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1322 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1323 });
1324 project
1325 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1326 .await
1327 .unwrap();
1328 cx.run_until_parked();
1329 assert_eq!(
1330 unreviewed_hunks(&action_log, cx),
1331 vec![(
1332 buffer.clone(),
1333 vec![HunkStatus {
1334 range: Point::new(0, 0)..Point::new(0, 5),
1335 diff_status: DiffHunkStatusKind::Added,
1336 old_text: "".into(),
1337 }],
1338 )]
1339 );
1340
1341 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1342 cx.run_until_parked();
1343 assert_eq!(
1344 unreviewed_hunks(&action_log, cx),
1345 vec![(
1346 buffer.clone(),
1347 vec![HunkStatus {
1348 range: Point::new(0, 0)..Point::new(0, 6),
1349 diff_status: DiffHunkStatusKind::Added,
1350 old_text: "".into(),
1351 }],
1352 )]
1353 );
1354
1355 action_log.update(cx, |log, cx| {
1356 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1357 });
1358 cx.run_until_parked();
1359 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1360 }
1361
1362 #[gpui::test(iterations = 10)]
1363 async fn test_overwriting_files(cx: &mut TestAppContext) {
1364 init_test(cx);
1365
1366 let fs = FakeFs::new(cx.executor());
1367 fs.insert_tree(
1368 path!("/dir"),
1369 json!({
1370 "file1": "Lorem ipsum dolor"
1371 }),
1372 )
1373 .await;
1374 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1375 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1376 let file_path = project
1377 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1378 .unwrap();
1379
1380 let buffer = project
1381 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1382 .await
1383 .unwrap();
1384 cx.update(|cx| {
1385 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1386 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1387 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1388 });
1389 project
1390 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1391 .await
1392 .unwrap();
1393 cx.run_until_parked();
1394 assert_eq!(
1395 unreviewed_hunks(&action_log, cx),
1396 vec![(
1397 buffer.clone(),
1398 vec![HunkStatus {
1399 range: Point::new(0, 0)..Point::new(0, 19),
1400 diff_status: DiffHunkStatusKind::Added,
1401 old_text: "".into(),
1402 }],
1403 )]
1404 );
1405
1406 action_log
1407 .update(cx, |log, cx| {
1408 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1409 })
1410 .await
1411 .unwrap();
1412 cx.run_until_parked();
1413 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1414 assert_eq!(
1415 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1416 "Lorem ipsum dolor"
1417 );
1418 }
1419
1420 #[gpui::test(iterations = 10)]
1421 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1422 init_test(cx);
1423
1424 let fs = FakeFs::new(cx.executor());
1425 fs.insert_tree(
1426 path!("/dir"),
1427 json!({
1428 "file1": "Lorem ipsum dolor"
1429 }),
1430 )
1431 .await;
1432 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1433 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1434 let file_path = project
1435 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1436 .unwrap();
1437
1438 let buffer = project
1439 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1440 .await
1441 .unwrap();
1442 cx.update(|cx| {
1443 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1444 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1445 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1446 });
1447 project
1448 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1449 .await
1450 .unwrap();
1451 cx.run_until_parked();
1452 assert_eq!(
1453 unreviewed_hunks(&action_log, cx),
1454 vec![(
1455 buffer.clone(),
1456 vec![HunkStatus {
1457 range: Point::new(0, 0)..Point::new(0, 37),
1458 diff_status: DiffHunkStatusKind::Modified,
1459 old_text: "Lorem ipsum dolor".into(),
1460 }],
1461 )]
1462 );
1463
1464 cx.update(|cx| {
1465 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1466 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1467 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1468 });
1469 project
1470 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1471 .await
1472 .unwrap();
1473 cx.run_until_parked();
1474 assert_eq!(
1475 unreviewed_hunks(&action_log, cx),
1476 vec![(
1477 buffer.clone(),
1478 vec![HunkStatus {
1479 range: Point::new(0, 0)..Point::new(0, 9),
1480 diff_status: DiffHunkStatusKind::Added,
1481 old_text: "".into(),
1482 }],
1483 )]
1484 );
1485
1486 action_log
1487 .update(cx, |log, cx| {
1488 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1489 })
1490 .await
1491 .unwrap();
1492 cx.run_until_parked();
1493 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1494 assert_eq!(
1495 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1496 "Lorem ipsum dolor"
1497 );
1498 }
1499
1500 #[gpui::test(iterations = 10)]
1501 async fn test_deleting_files(cx: &mut TestAppContext) {
1502 init_test(cx);
1503
1504 let fs = FakeFs::new(cx.executor());
1505 fs.insert_tree(
1506 path!("/dir"),
1507 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1508 )
1509 .await;
1510
1511 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1512 let file1_path = project
1513 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1514 .unwrap();
1515 let file2_path = project
1516 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1517 .unwrap();
1518
1519 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1520 let buffer1 = project
1521 .update(cx, |project, cx| {
1522 project.open_buffer(file1_path.clone(), cx)
1523 })
1524 .await
1525 .unwrap();
1526 let buffer2 = project
1527 .update(cx, |project, cx| {
1528 project.open_buffer(file2_path.clone(), cx)
1529 })
1530 .await
1531 .unwrap();
1532
1533 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1534 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1535 project
1536 .update(cx, |project, cx| {
1537 project.delete_file(file1_path.clone(), false, cx)
1538 })
1539 .unwrap()
1540 .await
1541 .unwrap();
1542 project
1543 .update(cx, |project, cx| {
1544 project.delete_file(file2_path.clone(), false, cx)
1545 })
1546 .unwrap()
1547 .await
1548 .unwrap();
1549 cx.run_until_parked();
1550 assert_eq!(
1551 unreviewed_hunks(&action_log, cx),
1552 vec![
1553 (
1554 buffer1.clone(),
1555 vec![HunkStatus {
1556 range: Point::new(0, 0)..Point::new(0, 0),
1557 diff_status: DiffHunkStatusKind::Deleted,
1558 old_text: "lorem\n".into(),
1559 }]
1560 ),
1561 (
1562 buffer2.clone(),
1563 vec![HunkStatus {
1564 range: Point::new(0, 0)..Point::new(0, 0),
1565 diff_status: DiffHunkStatusKind::Deleted,
1566 old_text: "ipsum\n".into(),
1567 }],
1568 )
1569 ]
1570 );
1571
1572 // Simulate file1 being recreated externally.
1573 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1574 .await;
1575
1576 // Simulate file2 being recreated by a tool.
1577 let buffer2 = project
1578 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1579 .await
1580 .unwrap();
1581 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1582 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1583 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1584 project
1585 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1586 .await
1587 .unwrap();
1588
1589 cx.run_until_parked();
1590 assert_eq!(
1591 unreviewed_hunks(&action_log, cx),
1592 vec![(
1593 buffer2.clone(),
1594 vec![HunkStatus {
1595 range: Point::new(0, 0)..Point::new(0, 5),
1596 diff_status: DiffHunkStatusKind::Added,
1597 old_text: "".into(),
1598 }],
1599 )]
1600 );
1601
1602 // Simulate file2 being deleted externally.
1603 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1604 .await
1605 .unwrap();
1606 cx.run_until_parked();
1607 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1608 }
1609
1610 #[gpui::test(iterations = 10)]
1611 async fn test_reject_edits(cx: &mut TestAppContext) {
1612 init_test(cx);
1613
1614 let fs = FakeFs::new(cx.executor());
1615 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1616 .await;
1617 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1618 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1619 let file_path = project
1620 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1621 .unwrap();
1622 let buffer = project
1623 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1624 .await
1625 .unwrap();
1626
1627 cx.update(|cx| {
1628 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1629 buffer.update(cx, |buffer, cx| {
1630 buffer
1631 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1632 .unwrap()
1633 });
1634 buffer.update(cx, |buffer, cx| {
1635 buffer
1636 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1637 .unwrap()
1638 });
1639 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1640 });
1641 cx.run_until_parked();
1642 assert_eq!(
1643 buffer.read_with(cx, |buffer, _| buffer.text()),
1644 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1645 );
1646 assert_eq!(
1647 unreviewed_hunks(&action_log, cx),
1648 vec![(
1649 buffer.clone(),
1650 vec![
1651 HunkStatus {
1652 range: Point::new(1, 0)..Point::new(3, 0),
1653 diff_status: DiffHunkStatusKind::Modified,
1654 old_text: "def\n".into(),
1655 },
1656 HunkStatus {
1657 range: Point::new(5, 0)..Point::new(5, 3),
1658 diff_status: DiffHunkStatusKind::Modified,
1659 old_text: "mno".into(),
1660 }
1661 ],
1662 )]
1663 );
1664
1665 // If the rejected range doesn't overlap with any hunk, we ignore it.
1666 action_log
1667 .update(cx, |log, cx| {
1668 log.reject_edits_in_ranges(
1669 buffer.clone(),
1670 vec![Point::new(4, 0)..Point::new(4, 0)],
1671 None,
1672 cx,
1673 )
1674 })
1675 .await
1676 .unwrap();
1677 cx.run_until_parked();
1678 assert_eq!(
1679 buffer.read_with(cx, |buffer, _| buffer.text()),
1680 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1681 );
1682 assert_eq!(
1683 unreviewed_hunks(&action_log, cx),
1684 vec![(
1685 buffer.clone(),
1686 vec![
1687 HunkStatus {
1688 range: Point::new(1, 0)..Point::new(3, 0),
1689 diff_status: DiffHunkStatusKind::Modified,
1690 old_text: "def\n".into(),
1691 },
1692 HunkStatus {
1693 range: Point::new(5, 0)..Point::new(5, 3),
1694 diff_status: DiffHunkStatusKind::Modified,
1695 old_text: "mno".into(),
1696 }
1697 ],
1698 )]
1699 );
1700
1701 action_log
1702 .update(cx, |log, cx| {
1703 log.reject_edits_in_ranges(
1704 buffer.clone(),
1705 vec![Point::new(0, 0)..Point::new(1, 0)],
1706 None,
1707 cx,
1708 )
1709 })
1710 .await
1711 .unwrap();
1712 cx.run_until_parked();
1713 assert_eq!(
1714 buffer.read_with(cx, |buffer, _| buffer.text()),
1715 "abc\ndef\nghi\njkl\nmnO"
1716 );
1717 assert_eq!(
1718 unreviewed_hunks(&action_log, cx),
1719 vec![(
1720 buffer.clone(),
1721 vec![HunkStatus {
1722 range: Point::new(4, 0)..Point::new(4, 3),
1723 diff_status: DiffHunkStatusKind::Modified,
1724 old_text: "mno".into(),
1725 }],
1726 )]
1727 );
1728
1729 action_log
1730 .update(cx, |log, cx| {
1731 log.reject_edits_in_ranges(
1732 buffer.clone(),
1733 vec![Point::new(4, 0)..Point::new(4, 0)],
1734 None,
1735 cx,
1736 )
1737 })
1738 .await
1739 .unwrap();
1740 cx.run_until_parked();
1741 assert_eq!(
1742 buffer.read_with(cx, |buffer, _| buffer.text()),
1743 "abc\ndef\nghi\njkl\nmno"
1744 );
1745 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1746 }
1747
1748 #[gpui::test(iterations = 10)]
1749 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1750 init_test(cx);
1751
1752 let fs = FakeFs::new(cx.executor());
1753 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1754 .await;
1755 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1756 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1757 let file_path = project
1758 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1759 .unwrap();
1760 let buffer = project
1761 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1762 .await
1763 .unwrap();
1764
1765 cx.update(|cx| {
1766 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1767 buffer.update(cx, |buffer, cx| {
1768 buffer
1769 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1770 .unwrap()
1771 });
1772 buffer.update(cx, |buffer, cx| {
1773 buffer
1774 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1775 .unwrap()
1776 });
1777 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1778 });
1779 cx.run_until_parked();
1780 assert_eq!(
1781 buffer.read_with(cx, |buffer, _| buffer.text()),
1782 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1783 );
1784 assert_eq!(
1785 unreviewed_hunks(&action_log, cx),
1786 vec![(
1787 buffer.clone(),
1788 vec![
1789 HunkStatus {
1790 range: Point::new(1, 0)..Point::new(3, 0),
1791 diff_status: DiffHunkStatusKind::Modified,
1792 old_text: "def\n".into(),
1793 },
1794 HunkStatus {
1795 range: Point::new(5, 0)..Point::new(5, 3),
1796 diff_status: DiffHunkStatusKind::Modified,
1797 old_text: "mno".into(),
1798 }
1799 ],
1800 )]
1801 );
1802
1803 action_log.update(cx, |log, cx| {
1804 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1805 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1806 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1807 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1808
1809 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1810 .detach();
1811 assert_eq!(
1812 buffer.read_with(cx, |buffer, _| buffer.text()),
1813 "abc\ndef\nghi\njkl\nmno"
1814 );
1815 });
1816 cx.run_until_parked();
1817 assert_eq!(
1818 buffer.read_with(cx, |buffer, _| buffer.text()),
1819 "abc\ndef\nghi\njkl\nmno"
1820 );
1821 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1822 }
1823
1824 #[gpui::test(iterations = 10)]
1825 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1826 init_test(cx);
1827
1828 let fs = FakeFs::new(cx.executor());
1829 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1830 .await;
1831 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1832 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1833 let file_path = project
1834 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1835 .unwrap();
1836 let buffer = project
1837 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1838 .await
1839 .unwrap();
1840
1841 cx.update(|cx| {
1842 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1843 });
1844 project
1845 .update(cx, |project, cx| {
1846 project.delete_file(file_path.clone(), false, cx)
1847 })
1848 .unwrap()
1849 .await
1850 .unwrap();
1851 cx.run_until_parked();
1852 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1853 assert_eq!(
1854 unreviewed_hunks(&action_log, cx),
1855 vec![(
1856 buffer.clone(),
1857 vec![HunkStatus {
1858 range: Point::new(0, 0)..Point::new(0, 0),
1859 diff_status: DiffHunkStatusKind::Deleted,
1860 old_text: "content".into(),
1861 }]
1862 )]
1863 );
1864
1865 action_log
1866 .update(cx, |log, cx| {
1867 log.reject_edits_in_ranges(
1868 buffer.clone(),
1869 vec![Point::new(0, 0)..Point::new(0, 0)],
1870 None,
1871 cx,
1872 )
1873 })
1874 .await
1875 .unwrap();
1876 cx.run_until_parked();
1877 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1878 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1879 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1880 }
1881
1882 #[gpui::test(iterations = 10)]
1883 async fn test_reject_created_file(cx: &mut TestAppContext) {
1884 init_test(cx);
1885
1886 let fs = FakeFs::new(cx.executor());
1887 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1888 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1889 let file_path = project
1890 .read_with(cx, |project, cx| {
1891 project.find_project_path("dir/new_file", cx)
1892 })
1893 .unwrap();
1894 let buffer = project
1895 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1896 .await
1897 .unwrap();
1898 cx.update(|cx| {
1899 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1900 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1901 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1902 });
1903 project
1904 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1905 .await
1906 .unwrap();
1907 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1908 cx.run_until_parked();
1909 assert_eq!(
1910 unreviewed_hunks(&action_log, cx),
1911 vec![(
1912 buffer.clone(),
1913 vec![HunkStatus {
1914 range: Point::new(0, 0)..Point::new(0, 7),
1915 diff_status: DiffHunkStatusKind::Added,
1916 old_text: "".into(),
1917 }],
1918 )]
1919 );
1920
1921 action_log
1922 .update(cx, |log, cx| {
1923 log.reject_edits_in_ranges(
1924 buffer.clone(),
1925 vec![Point::new(0, 0)..Point::new(0, 11)],
1926 None,
1927 cx,
1928 )
1929 })
1930 .await
1931 .unwrap();
1932 cx.run_until_parked();
1933 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1934 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1935 }
1936
1937 #[gpui::test]
1938 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1939 init_test(cx);
1940
1941 let fs = FakeFs::new(cx.executor());
1942 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1943 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1944
1945 let file_path = project
1946 .read_with(cx, |project, cx| {
1947 project.find_project_path("dir/new_file", cx)
1948 })
1949 .unwrap();
1950 let buffer = project
1951 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1952 .await
1953 .unwrap();
1954
1955 // AI creates file with initial content
1956 cx.update(|cx| {
1957 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1958 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1959 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1960 });
1961
1962 project
1963 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1964 .await
1965 .unwrap();
1966
1967 cx.run_until_parked();
1968
1969 // User makes additional edits
1970 cx.update(|cx| {
1971 buffer.update(cx, |buffer, cx| {
1972 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1973 });
1974 });
1975
1976 project
1977 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1978 .await
1979 .unwrap();
1980
1981 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1982
1983 // Reject all
1984 action_log
1985 .update(cx, |log, cx| {
1986 log.reject_edits_in_ranges(
1987 buffer.clone(),
1988 vec![Point::new(0, 0)..Point::new(100, 0)],
1989 None,
1990 cx,
1991 )
1992 })
1993 .await
1994 .unwrap();
1995 cx.run_until_parked();
1996
1997 // File should still contain all the content
1998 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1999
2000 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2001 assert_eq!(content, "ai content\nuser added this line");
2002 }
2003
2004 #[gpui::test]
2005 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2006 init_test(cx);
2007
2008 let fs = FakeFs::new(cx.executor());
2009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2010 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2011
2012 let file_path = project
2013 .read_with(cx, |project, cx| {
2014 project.find_project_path("dir/new_file", cx)
2015 })
2016 .unwrap();
2017 let buffer = project
2018 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2019 .await
2020 .unwrap();
2021
2022 // AI creates file with initial content
2023 cx.update(|cx| {
2024 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2025 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2026 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2027 });
2028 project
2029 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2030 .await
2031 .unwrap();
2032 cx.run_until_parked();
2033 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2034
2035 // User accepts the single hunk
2036 action_log.update(cx, |log, cx| {
2037 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2038 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2039 });
2040 cx.run_until_parked();
2041 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2042 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2043
2044 // AI modifies the file
2045 cx.update(|cx| {
2046 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2047 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2048 });
2049 project
2050 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2051 .await
2052 .unwrap();
2053 cx.run_until_parked();
2054 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2055
2056 // User rejects the hunk
2057 action_log
2058 .update(cx, |log, cx| {
2059 log.reject_edits_in_ranges(
2060 buffer.clone(),
2061 vec![Anchor::min_max_range_for_buffer(
2062 buffer.read(cx).remote_id(),
2063 )],
2064 None,
2065 cx,
2066 )
2067 })
2068 .await
2069 .unwrap();
2070 cx.run_until_parked();
2071 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2072 assert_eq!(
2073 buffer.read_with(cx, |buffer, _| buffer.text()),
2074 "ai content v1"
2075 );
2076 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2077 }
2078
2079 #[gpui::test]
2080 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2081 init_test(cx);
2082
2083 let fs = FakeFs::new(cx.executor());
2084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2085 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2086
2087 let file_path = project
2088 .read_with(cx, |project, cx| {
2089 project.find_project_path("dir/new_file", cx)
2090 })
2091 .unwrap();
2092 let buffer = project
2093 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2094 .await
2095 .unwrap();
2096
2097 // AI creates file with initial content
2098 cx.update(|cx| {
2099 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2100 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2101 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2102 });
2103 project
2104 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2105 .await
2106 .unwrap();
2107 cx.run_until_parked();
2108
2109 // User clicks "Accept All"
2110 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2111 cx.run_until_parked();
2112 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2113 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2114
2115 // AI modifies file again
2116 cx.update(|cx| {
2117 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2118 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2119 });
2120 project
2121 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2122 .await
2123 .unwrap();
2124 cx.run_until_parked();
2125 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2126
2127 // User clicks "Reject All"
2128 action_log
2129 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2130 .await;
2131 cx.run_until_parked();
2132 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2133 assert_eq!(
2134 buffer.read_with(cx, |buffer, _| buffer.text()),
2135 "ai content v1"
2136 );
2137 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2138 }
2139
2140 #[gpui::test(iterations = 100)]
2141 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2142 init_test(cx);
2143
2144 let operations = env::var("OPERATIONS")
2145 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2146 .unwrap_or(20);
2147
2148 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2149 let fs = FakeFs::new(cx.executor());
2150 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2151 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2152 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2153 let file_path = project
2154 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2155 .unwrap();
2156 let buffer = project
2157 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2158 .await
2159 .unwrap();
2160
2161 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2162
2163 for _ in 0..operations {
2164 match rng.random_range(0..100) {
2165 0..25 => {
2166 action_log.update(cx, |log, cx| {
2167 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2168 log::info!("keeping edits in range {:?}", range);
2169 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2170 });
2171 }
2172 25..50 => {
2173 action_log
2174 .update(cx, |log, cx| {
2175 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2176 log::info!("rejecting edits in range {:?}", range);
2177 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2178 })
2179 .await
2180 .unwrap();
2181 }
2182 _ => {
2183 let is_agent_edit = rng.random_bool(0.5);
2184 if is_agent_edit {
2185 log::info!("agent edit");
2186 } else {
2187 log::info!("user edit");
2188 }
2189 cx.update(|cx| {
2190 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2191 if is_agent_edit {
2192 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2193 }
2194 });
2195 }
2196 }
2197
2198 if rng.random_bool(0.2) {
2199 quiesce(&action_log, &buffer, cx);
2200 }
2201 }
2202
2203 quiesce(&action_log, &buffer, cx);
2204
2205 fn quiesce(
2206 action_log: &Entity<ActionLog>,
2207 buffer: &Entity<Buffer>,
2208 cx: &mut TestAppContext,
2209 ) {
2210 log::info!("quiescing...");
2211 cx.run_until_parked();
2212 action_log.update(cx, |log, cx| {
2213 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2214 let mut old_text = tracked_buffer.diff_base.clone();
2215 let new_text = buffer.read(cx).as_rope();
2216 for edit in tracked_buffer.unreviewed_edits.edits() {
2217 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2218 let old_end = old_text.point_to_offset(cmp::min(
2219 Point::new(edit.new.start + edit.old_len(), 0),
2220 old_text.max_point(),
2221 ));
2222 old_text.replace(
2223 old_start..old_end,
2224 &new_text.slice_rows(edit.new.clone()).to_string(),
2225 );
2226 }
2227 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2228 })
2229 }
2230 }
2231
2232 #[gpui::test]
2233 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2234 init_test(cx);
2235
2236 let fs = FakeFs::new(cx.background_executor.clone());
2237 fs.insert_tree(
2238 path!("/project"),
2239 json!({
2240 ".git": {},
2241 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2242 }),
2243 )
2244 .await;
2245 fs.set_head_for_repo(
2246 path!("/project/.git").as_ref(),
2247 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2248 "0000000",
2249 );
2250 cx.run_until_parked();
2251
2252 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2253 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2254
2255 let file_path = project
2256 .read_with(cx, |project, cx| {
2257 project.find_project_path(path!("/project/file.txt"), cx)
2258 })
2259 .unwrap();
2260 let buffer = project
2261 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2262 .await
2263 .unwrap();
2264
2265 cx.update(|cx| {
2266 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2267 buffer.update(cx, |buffer, cx| {
2268 buffer.edit(
2269 [
2270 // Edit at the very start: a -> A
2271 (Point::new(0, 0)..Point::new(0, 1), "A"),
2272 // Deletion in the middle: remove lines d and e
2273 (Point::new(3, 0)..Point::new(5, 0), ""),
2274 // Modification: g -> GGG
2275 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2276 // Addition: insert new line after h
2277 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2278 // Edit the very last character: j -> J
2279 (Point::new(9, 0)..Point::new(9, 1), "J"),
2280 ],
2281 None,
2282 cx,
2283 );
2284 });
2285 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2286 });
2287 cx.run_until_parked();
2288 assert_eq!(
2289 unreviewed_hunks(&action_log, cx),
2290 vec![(
2291 buffer.clone(),
2292 vec![
2293 HunkStatus {
2294 range: Point::new(0, 0)..Point::new(1, 0),
2295 diff_status: DiffHunkStatusKind::Modified,
2296 old_text: "a\n".into()
2297 },
2298 HunkStatus {
2299 range: Point::new(3, 0)..Point::new(3, 0),
2300 diff_status: DiffHunkStatusKind::Deleted,
2301 old_text: "d\ne\n".into()
2302 },
2303 HunkStatus {
2304 range: Point::new(4, 0)..Point::new(5, 0),
2305 diff_status: DiffHunkStatusKind::Modified,
2306 old_text: "g\n".into()
2307 },
2308 HunkStatus {
2309 range: Point::new(6, 0)..Point::new(7, 0),
2310 diff_status: DiffHunkStatusKind::Added,
2311 old_text: "".into()
2312 },
2313 HunkStatus {
2314 range: Point::new(8, 0)..Point::new(8, 1),
2315 diff_status: DiffHunkStatusKind::Modified,
2316 old_text: "j".into()
2317 }
2318 ]
2319 )]
2320 );
2321
2322 // Simulate a git commit that matches some edits but not others:
2323 // - Accepts the first edit (a -> A)
2324 // - Accepts the deletion (remove d and e)
2325 // - Makes a different change to g (g -> G instead of GGG)
2326 // - Ignores the NEW line addition
2327 // - Ignores the last line edit (j stays as j)
2328 fs.set_head_for_repo(
2329 path!("/project/.git").as_ref(),
2330 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2331 "0000001",
2332 );
2333 cx.run_until_parked();
2334 assert_eq!(
2335 unreviewed_hunks(&action_log, cx),
2336 vec![(
2337 buffer.clone(),
2338 vec![
2339 HunkStatus {
2340 range: Point::new(4, 0)..Point::new(5, 0),
2341 diff_status: DiffHunkStatusKind::Modified,
2342 old_text: "g\n".into()
2343 },
2344 HunkStatus {
2345 range: Point::new(6, 0)..Point::new(7, 0),
2346 diff_status: DiffHunkStatusKind::Added,
2347 old_text: "".into()
2348 },
2349 HunkStatus {
2350 range: Point::new(8, 0)..Point::new(8, 1),
2351 diff_status: DiffHunkStatusKind::Modified,
2352 old_text: "j".into()
2353 }
2354 ]
2355 )]
2356 );
2357
2358 // Make another commit that accepts the NEW line but with different content
2359 fs.set_head_for_repo(
2360 path!("/project/.git").as_ref(),
2361 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2362 "0000002",
2363 );
2364 cx.run_until_parked();
2365 assert_eq!(
2366 unreviewed_hunks(&action_log, cx),
2367 vec![(
2368 buffer,
2369 vec![
2370 HunkStatus {
2371 range: Point::new(6, 0)..Point::new(7, 0),
2372 diff_status: DiffHunkStatusKind::Added,
2373 old_text: "".into()
2374 },
2375 HunkStatus {
2376 range: Point::new(8, 0)..Point::new(8, 1),
2377 diff_status: DiffHunkStatusKind::Modified,
2378 old_text: "j".into()
2379 }
2380 ]
2381 )]
2382 );
2383
2384 // Final commit that accepts all remaining edits
2385 fs.set_head_for_repo(
2386 path!("/project/.git").as_ref(),
2387 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2388 "0000003",
2389 );
2390 cx.run_until_parked();
2391 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2392 }
2393
2394 #[derive(Debug, Clone, PartialEq, Eq)]
2395 struct HunkStatus {
2396 range: Range<Point>,
2397 diff_status: DiffHunkStatusKind,
2398 old_text: String,
2399 }
2400
2401 fn unreviewed_hunks(
2402 action_log: &Entity<ActionLog>,
2403 cx: &TestAppContext,
2404 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2405 cx.read(|cx| {
2406 action_log
2407 .read(cx)
2408 .changed_buffers(cx)
2409 .into_iter()
2410 .map(|(buffer, diff)| {
2411 let snapshot = buffer.read(cx).snapshot();
2412 (
2413 buffer,
2414 diff.read(cx)
2415 .hunks(&snapshot, cx)
2416 .map(|hunk| HunkStatus {
2417 diff_status: hunk.status().kind,
2418 range: hunk.range,
2419 old_text: diff
2420 .read(cx)
2421 .base_text()
2422 .text_for_range(hunk.diff_base_byte_range)
2423 .collect(),
2424 })
2425 .collect(),
2426 )
2427 })
2428 .collect()
2429 })
2430 }
2431}