1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
83 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
84 let diff_base;
85 let unreviewed_edits;
86 if is_created {
87 diff_base = Rope::default();
88 unreviewed_edits = Patch::new(vec![Edit {
89 old: 0..1,
90 new: 0..text_snapshot.max_point().row + 1,
91 }])
92 } else {
93 diff_base = buffer.read(cx).as_rope().clone();
94 unreviewed_edits = Patch::default();
95 }
96 TrackedBuffer {
97 buffer: buffer.clone(),
98 diff_base,
99 unreviewed_edits,
100 snapshot: text_snapshot,
101 status,
102 version: buffer.read(cx).version(),
103 diff,
104 diff_update: diff_update_tx,
105 _open_lsp_handle: open_lsp_handle,
106 _maintain_diff: cx.spawn({
107 let buffer = buffer.clone();
108 async move |this, cx| {
109 Self::maintain_diff(this, buffer, diff_update_rx, cx)
110 .await
111 .ok();
112 }
113 }),
114 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
115 }
116 });
117 tracked_buffer.version = buffer.read(cx).version();
118 tracked_buffer
119 }
120
121 fn handle_buffer_event(
122 &mut self,
123 buffer: Entity<Buffer>,
124 event: &BufferEvent,
125 cx: &mut Context<Self>,
126 ) {
127 match event {
128 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
129 BufferEvent::FileHandleChanged => {
130 self.handle_buffer_file_changed(buffer, cx);
131 }
132 _ => {}
133 };
134 }
135
136 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
137 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
138 return;
139 };
140 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
141 }
142
143 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
144 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
145 return;
146 };
147
148 match tracked_buffer.status {
149 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
150 if buffer
151 .read(cx)
152 .file()
153 .is_some_and(|file| file.disk_state().is_deleted())
154 {
155 // If the buffer had been edited by a tool, but it got
156 // deleted externally, we want to stop tracking it.
157 self.tracked_buffers.remove(&buffer);
158 }
159 cx.notify();
160 }
161 TrackedBufferStatus::Deleted => {
162 if buffer
163 .read(cx)
164 .file()
165 .is_some_and(|file| !file.disk_state().is_deleted())
166 {
167 // If the buffer had been deleted by a tool, but it got
168 // resurrected externally, we want to clear the edits we
169 // were tracking and reset the buffer's state.
170 self.tracked_buffers.remove(&buffer);
171 self.track_buffer_internal(buffer, false, cx);
172 }
173 cx.notify();
174 }
175 }
176 }
177
178 async fn maintain_diff(
179 this: WeakEntity<Self>,
180 buffer: Entity<Buffer>,
181 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
182 cx: &mut AsyncApp,
183 ) -> Result<()> {
184 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
185 let git_diff = this
186 .update(cx, |this, cx| {
187 this.project.update(cx, |project, cx| {
188 project.open_uncommitted_diff(buffer.clone(), cx)
189 })
190 })?
191 .await
192 .ok();
193 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
194 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
195 })?;
196
197 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
198 let _repo_subscription =
199 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
200 cx.update(|cx| {
201 let mut old_head = buffer_repo.read(cx).head_commit.clone();
202 Some(cx.subscribe(git_diff, move |_, event, cx| {
203 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
204 let new_head = buffer_repo.read(cx).head_commit.clone();
205 if new_head != old_head {
206 old_head = new_head;
207 git_diff_updates_tx.send(()).ok();
208 }
209 }
210 }))
211 })?
212 } else {
213 None
214 };
215
216 loop {
217 futures::select_biased! {
218 buffer_update = buffer_updates.next() => {
219 if let Some((author, buffer_snapshot)) = buffer_update {
220 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
221 } else {
222 break;
223 }
224 }
225 _ = git_diff_updates_rx.changed().fuse() => {
226 if let Some(git_diff) = git_diff.as_ref() {
227 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
228 }
229 }
230 }
231 }
232
233 Ok(())
234 }
235
236 async fn track_edits(
237 this: &WeakEntity<ActionLog>,
238 buffer: &Entity<Buffer>,
239 author: ChangeAuthor,
240 buffer_snapshot: text::BufferSnapshot,
241 cx: &mut AsyncApp,
242 ) -> Result<()> {
243 let rebase = this.update(cx, |this, cx| {
244 let tracked_buffer = this
245 .tracked_buffers
246 .get_mut(buffer)
247 .context("buffer not tracked")?;
248
249 let rebase = cx.background_spawn({
250 let mut base_text = tracked_buffer.diff_base.clone();
251 let old_snapshot = tracked_buffer.snapshot.clone();
252 let new_snapshot = buffer_snapshot.clone();
253 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
254 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
255 async move {
256 if let ChangeAuthor::User = author {
257 apply_non_conflicting_edits(
258 &unreviewed_edits,
259 edits,
260 &mut base_text,
261 new_snapshot.as_rope(),
262 );
263 }
264
265 (Arc::from(base_text.to_string().as_str()), base_text)
266 }
267 });
268
269 anyhow::Ok(rebase)
270 })??;
271 let (new_base_text, new_diff_base) = rebase.await;
272
273 Self::update_diff(
274 this,
275 buffer,
276 buffer_snapshot,
277 new_base_text,
278 new_diff_base,
279 cx,
280 )
281 .await
282 }
283
284 async fn keep_committed_edits(
285 this: &WeakEntity<ActionLog>,
286 buffer: &Entity<Buffer>,
287 git_diff: &Entity<BufferDiff>,
288 cx: &mut AsyncApp,
289 ) -> Result<()> {
290 let buffer_snapshot = this.read_with(cx, |this, _cx| {
291 let tracked_buffer = this
292 .tracked_buffers
293 .get(buffer)
294 .context("buffer not tracked")?;
295 anyhow::Ok(tracked_buffer.snapshot.clone())
296 })??;
297 let (new_base_text, new_diff_base) = this
298 .read_with(cx, |this, cx| {
299 let tracked_buffer = this
300 .tracked_buffers
301 .get(buffer)
302 .context("buffer not tracked")?;
303 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
304 let agent_diff_base = tracked_buffer.diff_base.clone();
305 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
306 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
307 anyhow::Ok(cx.background_spawn(async move {
308 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
309 let committed_edits = language::line_diff(
310 &agent_diff_base.to_string(),
311 &git_diff_base.to_string(),
312 )
313 .into_iter()
314 .map(|(old, new)| Edit { old, new });
315
316 let mut new_agent_diff_base = agent_diff_base.clone();
317 let mut row_delta = 0i32;
318 for committed in committed_edits {
319 while let Some(unreviewed) = old_unreviewed_edits.peek() {
320 // If the committed edit matches the unreviewed
321 // edit, assume the user wants to keep it.
322 if committed.old == unreviewed.old {
323 let unreviewed_new =
324 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
325 let committed_new =
326 git_diff_base.slice_rows(committed.new.clone()).to_string();
327 if unreviewed_new == committed_new {
328 let old_byte_start =
329 new_agent_diff_base.point_to_offset(Point::new(
330 (unreviewed.old.start as i32 + row_delta) as u32,
331 0,
332 ));
333 let old_byte_end =
334 new_agent_diff_base.point_to_offset(cmp::min(
335 Point::new(
336 (unreviewed.old.end as i32 + row_delta) as u32,
337 0,
338 ),
339 new_agent_diff_base.max_point(),
340 ));
341 new_agent_diff_base
342 .replace(old_byte_start..old_byte_end, &unreviewed_new);
343 row_delta +=
344 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
345 }
346 } else if unreviewed.old.start >= committed.old.end {
347 break;
348 }
349
350 old_unreviewed_edits.next().unwrap();
351 }
352 }
353
354 (
355 Arc::from(new_agent_diff_base.to_string().as_str()),
356 new_agent_diff_base,
357 )
358 }))
359 })??
360 .await;
361
362 Self::update_diff(
363 this,
364 buffer,
365 buffer_snapshot,
366 new_base_text,
367 new_diff_base,
368 cx,
369 )
370 .await
371 }
372
373 async fn update_diff(
374 this: &WeakEntity<ActionLog>,
375 buffer: &Entity<Buffer>,
376 buffer_snapshot: text::BufferSnapshot,
377 new_base_text: Arc<str>,
378 new_diff_base: Rope,
379 cx: &mut AsyncApp,
380 ) -> Result<()> {
381 let (diff, language) = this.read_with(cx, |this, cx| {
382 let tracked_buffer = this
383 .tracked_buffers
384 .get(buffer)
385 .context("buffer not tracked")?;
386 anyhow::Ok((
387 tracked_buffer.diff.clone(),
388 buffer.read(cx).language().cloned(),
389 ))
390 })??;
391 let update = diff.update(cx, |diff, cx| {
392 diff.update_diff(
393 buffer_snapshot.clone(),
394 Some(new_base_text),
395 true,
396 language,
397 cx,
398 )
399 });
400 let mut unreviewed_edits = Patch::default();
401 if let Ok(update) = update {
402 let update = update.await;
403
404 diff.update(cx, |diff, cx| {
405 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
406 })?
407 .await;
408 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?;
409
410 unreviewed_edits = cx
411 .background_spawn({
412 let buffer_snapshot = buffer_snapshot.clone();
413 let new_diff_base = new_diff_base.clone();
414 async move {
415 let mut unreviewed_edits = Patch::default();
416 for hunk in diff_snapshot.hunks_intersecting_range(
417 Anchor::min_for_buffer(buffer_snapshot.remote_id())
418 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
419 &buffer_snapshot,
420 ) {
421 let old_range = new_diff_base
422 .offset_to_point(hunk.diff_base_byte_range.start)
423 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
424 let new_range = hunk.range.start..hunk.range.end;
425 unreviewed_edits.push(point_to_row_edit(
426 Edit {
427 old: old_range,
428 new: new_range,
429 },
430 &new_diff_base,
431 buffer_snapshot.as_rope(),
432 ));
433 }
434 unreviewed_edits
435 }
436 })
437 .await;
438 }
439 this.update(cx, |this, cx| {
440 let tracked_buffer = this
441 .tracked_buffers
442 .get_mut(buffer)
443 .context("buffer not tracked")?;
444 tracked_buffer.diff_base = new_diff_base;
445 tracked_buffer.snapshot = buffer_snapshot;
446 tracked_buffer.unreviewed_edits = unreviewed_edits;
447 cx.notify();
448 anyhow::Ok(())
449 })?
450 }
451
452 /// Track a buffer as read by agent, so we can notify the model about user edits.
453 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
454 self.track_buffer_internal(buffer, false, cx);
455 }
456
457 /// Mark a buffer as created by agent, so we can refresh it in the context
458 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
459 self.track_buffer_internal(buffer, true, cx);
460 }
461
462 /// Mark a buffer as edited by agent, so we can refresh it in the context
463 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
464 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
465 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
466 tracked_buffer.status = TrackedBufferStatus::Modified;
467 }
468 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
469 }
470
471 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
472 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
473 match tracked_buffer.status {
474 TrackedBufferStatus::Created { .. } => {
475 self.tracked_buffers.remove(&buffer);
476 cx.notify();
477 }
478 TrackedBufferStatus::Modified => {
479 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
480 tracked_buffer.status = TrackedBufferStatus::Deleted;
481 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
482 }
483 TrackedBufferStatus::Deleted => {}
484 }
485 cx.notify();
486 }
487
488 pub fn keep_edits_in_range(
489 &mut self,
490 buffer: Entity<Buffer>,
491 buffer_range: Range<impl language::ToPoint>,
492 telemetry: Option<ActionLogTelemetry>,
493 cx: &mut Context<Self>,
494 ) {
495 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
496 return;
497 };
498
499 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
500 match tracked_buffer.status {
501 TrackedBufferStatus::Deleted => {
502 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
503 self.tracked_buffers.remove(&buffer);
504 cx.notify();
505 }
506 _ => {
507 let buffer = buffer.read(cx);
508 let buffer_range =
509 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
510 let mut delta = 0i32;
511 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
512 edit.old.start = (edit.old.start as i32 + delta) as u32;
513 edit.old.end = (edit.old.end as i32 + delta) as u32;
514
515 if buffer_range.end.row < edit.new.start
516 || buffer_range.start.row > edit.new.end
517 {
518 true
519 } else {
520 let old_range = tracked_buffer
521 .diff_base
522 .point_to_offset(Point::new(edit.old.start, 0))
523 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
524 Point::new(edit.old.end, 0),
525 tracked_buffer.diff_base.max_point(),
526 ));
527 let new_range = tracked_buffer
528 .snapshot
529 .point_to_offset(Point::new(edit.new.start, 0))
530 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
531 Point::new(edit.new.end, 0),
532 tracked_buffer.snapshot.max_point(),
533 ));
534 tracked_buffer.diff_base.replace(
535 old_range,
536 &tracked_buffer
537 .snapshot
538 .text_for_range(new_range)
539 .collect::<String>(),
540 );
541 delta += edit.new_len() as i32 - edit.old_len() as i32;
542 metrics.add_edit(edit);
543 false
544 }
545 });
546 if tracked_buffer.unreviewed_edits.is_empty()
547 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
548 {
549 tracked_buffer.status = TrackedBufferStatus::Modified;
550 }
551 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
552 }
553 }
554 if let Some(telemetry) = telemetry {
555 telemetry_report_accepted_edits(&telemetry, metrics);
556 }
557 }
558
559 pub fn reject_edits_in_ranges(
560 &mut self,
561 buffer: Entity<Buffer>,
562 buffer_ranges: Vec<Range<impl language::ToPoint>>,
563 telemetry: Option<ActionLogTelemetry>,
564 cx: &mut Context<Self>,
565 ) -> Task<Result<()>> {
566 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
567 return Task::ready(Ok(()));
568 };
569
570 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
571 let task = match &tracked_buffer.status {
572 TrackedBufferStatus::Created {
573 existing_file_content,
574 } => {
575 let task = if let Some(existing_file_content) = existing_file_content {
576 buffer.update(cx, |buffer, cx| {
577 buffer.start_transaction();
578 buffer.set_text("", cx);
579 for chunk in existing_file_content.chunks() {
580 buffer.append(chunk, cx);
581 }
582 buffer.end_transaction(cx);
583 });
584 self.project
585 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
586 } else {
587 // For a file created by AI with no pre-existing content,
588 // only delete the file if we're certain it contains only AI content
589 // with no edits from the user.
590
591 let initial_version = tracked_buffer.version.clone();
592 let current_version = buffer.read(cx).version();
593
594 let current_content = buffer.read(cx).text();
595 let tracked_content = tracked_buffer.snapshot.text();
596
597 let is_ai_only_content =
598 initial_version == current_version && current_content == tracked_content;
599
600 if is_ai_only_content {
601 buffer
602 .read(cx)
603 .entry_id(cx)
604 .and_then(|entry_id| {
605 self.project.update(cx, |project, cx| {
606 project.delete_entry(entry_id, false, cx)
607 })
608 })
609 .unwrap_or(Task::ready(Ok(())))
610 } else {
611 // Not sure how to disentangle edits made by the user
612 // from edits made by the AI at this point.
613 // For now, preserve both to avoid data loss.
614 //
615 // TODO: Better solution (disable "Reject" after user makes some
616 // edit or find a way to differentiate between AI and user edits)
617 Task::ready(Ok(()))
618 }
619 };
620
621 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
622 self.tracked_buffers.remove(&buffer);
623 cx.notify();
624 task
625 }
626 TrackedBufferStatus::Deleted => {
627 buffer.update(cx, |buffer, cx| {
628 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
629 });
630 let save = self
631 .project
632 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
633
634 // Clear all tracked edits for this buffer and start over as if we just read it.
635 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
636 self.tracked_buffers.remove(&buffer);
637 self.buffer_read(buffer.clone(), cx);
638 cx.notify();
639 save
640 }
641 TrackedBufferStatus::Modified => {
642 buffer.update(cx, |buffer, cx| {
643 let mut buffer_row_ranges = buffer_ranges
644 .into_iter()
645 .map(|range| {
646 range.start.to_point(buffer).row..range.end.to_point(buffer).row
647 })
648 .peekable();
649
650 let mut edits_to_revert = Vec::new();
651 for edit in tracked_buffer.unreviewed_edits.edits() {
652 let new_range = tracked_buffer
653 .snapshot
654 .anchor_before(Point::new(edit.new.start, 0))
655 ..tracked_buffer.snapshot.anchor_after(cmp::min(
656 Point::new(edit.new.end, 0),
657 tracked_buffer.snapshot.max_point(),
658 ));
659 let new_row_range = new_range.start.to_point(buffer).row
660 ..new_range.end.to_point(buffer).row;
661
662 let mut revert = false;
663 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
664 if buffer_row_range.end < new_row_range.start {
665 buffer_row_ranges.next();
666 } else if buffer_row_range.start > new_row_range.end {
667 break;
668 } else {
669 revert = true;
670 break;
671 }
672 }
673
674 if revert {
675 metrics.add_edit(edit);
676 let old_range = tracked_buffer
677 .diff_base
678 .point_to_offset(Point::new(edit.old.start, 0))
679 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
680 Point::new(edit.old.end, 0),
681 tracked_buffer.diff_base.max_point(),
682 ));
683 let old_text = tracked_buffer
684 .diff_base
685 .chunks_in_range(old_range)
686 .collect::<String>();
687 edits_to_revert.push((new_range, old_text));
688 }
689 }
690
691 buffer.edit(edits_to_revert, None, cx);
692 });
693 self.project
694 .update(cx, |project, cx| project.save_buffer(buffer, cx))
695 }
696 };
697 if let Some(telemetry) = telemetry {
698 telemetry_report_rejected_edits(&telemetry, metrics);
699 }
700 task
701 }
702
703 pub fn keep_all_edits(
704 &mut self,
705 telemetry: Option<ActionLogTelemetry>,
706 cx: &mut Context<Self>,
707 ) {
708 self.tracked_buffers.retain(|buffer, tracked_buffer| {
709 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
710 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
711 if let Some(telemetry) = telemetry.as_ref() {
712 telemetry_report_accepted_edits(telemetry, metrics);
713 }
714 match tracked_buffer.status {
715 TrackedBufferStatus::Deleted => false,
716 _ => {
717 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
718 tracked_buffer.status = TrackedBufferStatus::Modified;
719 }
720 tracked_buffer.unreviewed_edits.clear();
721 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
722 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
723 true
724 }
725 }
726 });
727
728 cx.notify();
729 }
730
731 pub fn reject_all_edits(
732 &mut self,
733 telemetry: Option<ActionLogTelemetry>,
734 cx: &mut Context<Self>,
735 ) -> Task<()> {
736 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
737 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
738 buffer.read(cx).remote_id(),
739 )];
740 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
741
742 async move {
743 reject.await.log_err();
744 }
745 });
746
747 let task = futures::future::join_all(futures);
748 cx.background_spawn(async move {
749 task.await;
750 })
751 }
752
753 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
754 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
755 self.tracked_buffers
756 .iter()
757 .filter(|(_, tracked)| tracked.has_edits(cx))
758 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
759 .collect()
760 }
761
762 /// Iterate over buffers changed since last read or edited by the model
763 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
764 self.tracked_buffers
765 .iter()
766 .filter(|(buffer, tracked)| {
767 let buffer = buffer.read(cx);
768
769 tracked.version != buffer.version
770 && buffer
771 .file()
772 .is_some_and(|file| !file.disk_state().is_deleted())
773 })
774 .map(|(buffer, _)| buffer)
775 }
776}
777
778#[derive(Clone)]
779pub struct ActionLogTelemetry {
780 pub agent_telemetry_id: SharedString,
781 pub session_id: Arc<str>,
782}
783
784struct ActionLogMetrics {
785 lines_removed: u32,
786 lines_added: u32,
787 language: Option<SharedString>,
788}
789
790impl ActionLogMetrics {
791 fn for_buffer(buffer: &Buffer) -> Self {
792 Self {
793 language: buffer.language().map(|l| l.name().0),
794 lines_removed: 0,
795 lines_added: 0,
796 }
797 }
798
799 fn add_edits(&mut self, edits: &[Edit<u32>]) {
800 for edit in edits {
801 self.add_edit(edit);
802 }
803 }
804
805 fn add_edit(&mut self, edit: &Edit<u32>) {
806 self.lines_added += edit.new_len();
807 self.lines_removed += edit.old_len();
808 }
809}
810
811fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
812 telemetry::event!(
813 "Agent Edits Accepted",
814 agent = telemetry.agent_telemetry_id,
815 session = telemetry.session_id,
816 language = metrics.language,
817 lines_added = metrics.lines_added,
818 lines_removed = metrics.lines_removed
819 );
820}
821
822fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
823 telemetry::event!(
824 "Agent Edits Rejected",
825 agent = telemetry.agent_telemetry_id,
826 session = telemetry.session_id,
827 language = metrics.language,
828 lines_added = metrics.lines_added,
829 lines_removed = metrics.lines_removed
830 );
831}
832
833fn apply_non_conflicting_edits(
834 patch: &Patch<u32>,
835 edits: Vec<Edit<u32>>,
836 old_text: &mut Rope,
837 new_text: &Rope,
838) -> bool {
839 let mut old_edits = patch.edits().iter().cloned().peekable();
840 let mut new_edits = edits.into_iter().peekable();
841 let mut applied_delta = 0i32;
842 let mut rebased_delta = 0i32;
843 let mut has_made_changes = false;
844
845 while let Some(mut new_edit) = new_edits.next() {
846 let mut conflict = false;
847
848 // Push all the old edits that are before this new edit or that intersect with it.
849 while let Some(old_edit) = old_edits.peek() {
850 if new_edit.old.end < old_edit.new.start
851 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
852 {
853 break;
854 } else if new_edit.old.start > old_edit.new.end
855 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
856 {
857 let old_edit = old_edits.next().unwrap();
858 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
859 } else {
860 conflict = true;
861 if new_edits
862 .peek()
863 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
864 {
865 new_edit = new_edits.next().unwrap();
866 } else {
867 let old_edit = old_edits.next().unwrap();
868 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
869 }
870 }
871 }
872
873 if !conflict {
874 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
875 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
876 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
877 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
878 ..old_text.point_to_offset(cmp::min(
879 Point::new(new_edit.old.end, 0),
880 old_text.max_point(),
881 ));
882 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
883 ..new_text.point_to_offset(cmp::min(
884 Point::new(new_edit.new.end, 0),
885 new_text.max_point(),
886 ));
887
888 old_text.replace(
889 old_bytes,
890 &new_text.chunks_in_range(new_bytes).collect::<String>(),
891 );
892 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
893 has_made_changes = true;
894 }
895 }
896 has_made_changes
897}
898
899fn diff_snapshots(
900 old_snapshot: &text::BufferSnapshot,
901 new_snapshot: &text::BufferSnapshot,
902) -> Vec<Edit<u32>> {
903 let mut edits = new_snapshot
904 .edits_since::<Point>(&old_snapshot.version)
905 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
906 .peekable();
907 let mut row_edits = Vec::new();
908 while let Some(mut edit) = edits.next() {
909 while let Some(next_edit) = edits.peek() {
910 if edit.old.end >= next_edit.old.start {
911 edit.old.end = next_edit.old.end;
912 edit.new.end = next_edit.new.end;
913 edits.next();
914 } else {
915 break;
916 }
917 }
918 row_edits.push(edit);
919 }
920 row_edits
921}
922
923fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
924 if edit.old.start.column == old_text.line_len(edit.old.start.row)
925 && new_text
926 .chars_at(new_text.point_to_offset(edit.new.start))
927 .next()
928 == Some('\n')
929 && edit.old.start != old_text.max_point()
930 {
931 Edit {
932 old: edit.old.start.row + 1..edit.old.end.row + 1,
933 new: edit.new.start.row + 1..edit.new.end.row + 1,
934 }
935 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
936 Edit {
937 old: edit.old.start.row..edit.old.end.row,
938 new: edit.new.start.row..edit.new.end.row,
939 }
940 } else {
941 Edit {
942 old: edit.old.start.row..edit.old.end.row + 1,
943 new: edit.new.start.row..edit.new.end.row + 1,
944 }
945 }
946}
947
948#[derive(Copy, Clone, Debug)]
949enum ChangeAuthor {
950 User,
951 Agent,
952}
953
954enum TrackedBufferStatus {
955 Created { existing_file_content: Option<Rope> },
956 Modified,
957 Deleted,
958}
959
960struct TrackedBuffer {
961 buffer: Entity<Buffer>,
962 diff_base: Rope,
963 unreviewed_edits: Patch<u32>,
964 status: TrackedBufferStatus,
965 version: clock::Global,
966 diff: Entity<BufferDiff>,
967 snapshot: text::BufferSnapshot,
968 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
969 _open_lsp_handle: OpenLspBufferHandle,
970 _maintain_diff: Task<()>,
971 _subscription: Subscription,
972}
973
974impl TrackedBuffer {
975 fn has_edits(&self, cx: &App) -> bool {
976 self.diff
977 .read(cx)
978 .snapshot(cx)
979 .hunks(self.buffer.read(cx))
980 .next()
981 .is_some()
982 }
983
984 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
985 self.diff_update
986 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
987 .ok();
988 }
989}
990
991pub struct ChangedBuffer {
992 pub diff: Entity<BufferDiff>,
993}
994
995#[cfg(test)]
996mod tests {
997 use super::*;
998 use buffer_diff::DiffHunkStatusKind;
999 use gpui::TestAppContext;
1000 use language::Point;
1001 use project::{FakeFs, Fs, Project, RemoveOptions};
1002 use rand::prelude::*;
1003 use serde_json::json;
1004 use settings::SettingsStore;
1005 use std::env;
1006 use util::{RandomCharIter, path};
1007
1008 #[ctor::ctor]
1009 fn init_logger() {
1010 zlog::init_test();
1011 }
1012
1013 fn init_test(cx: &mut TestAppContext) {
1014 cx.update(|cx| {
1015 let settings_store = SettingsStore::test(cx);
1016 cx.set_global(settings_store);
1017 });
1018 }
1019
1020 #[gpui::test(iterations = 10)]
1021 async fn test_keep_edits(cx: &mut TestAppContext) {
1022 init_test(cx);
1023
1024 let fs = FakeFs::new(cx.executor());
1025 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1026 .await;
1027 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1028 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1029 let file_path = project
1030 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1031 .unwrap();
1032 let buffer = project
1033 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1034 .await
1035 .unwrap();
1036
1037 cx.update(|cx| {
1038 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1039 buffer.update(cx, |buffer, cx| {
1040 buffer
1041 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1042 .unwrap()
1043 });
1044 buffer.update(cx, |buffer, cx| {
1045 buffer
1046 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1047 .unwrap()
1048 });
1049 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1050 });
1051 cx.run_until_parked();
1052 assert_eq!(
1053 buffer.read_with(cx, |buffer, _| buffer.text()),
1054 "abc\ndEf\nghi\njkl\nmnO"
1055 );
1056 assert_eq!(
1057 unreviewed_hunks(&action_log, cx),
1058 vec![(
1059 buffer.clone(),
1060 vec![
1061 HunkStatus {
1062 range: Point::new(1, 0)..Point::new(2, 0),
1063 diff_status: DiffHunkStatusKind::Modified,
1064 old_text: "def\n".into(),
1065 },
1066 HunkStatus {
1067 range: Point::new(4, 0)..Point::new(4, 3),
1068 diff_status: DiffHunkStatusKind::Modified,
1069 old_text: "mno".into(),
1070 }
1071 ],
1072 )]
1073 );
1074
1075 action_log.update(cx, |log, cx| {
1076 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1077 });
1078 cx.run_until_parked();
1079 assert_eq!(
1080 unreviewed_hunks(&action_log, cx),
1081 vec![(
1082 buffer.clone(),
1083 vec![HunkStatus {
1084 range: Point::new(1, 0)..Point::new(2, 0),
1085 diff_status: DiffHunkStatusKind::Modified,
1086 old_text: "def\n".into(),
1087 }],
1088 )]
1089 );
1090
1091 action_log.update(cx, |log, cx| {
1092 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1093 });
1094 cx.run_until_parked();
1095 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1096 }
1097
1098 #[gpui::test(iterations = 10)]
1099 async fn test_deletions(cx: &mut TestAppContext) {
1100 init_test(cx);
1101
1102 let fs = FakeFs::new(cx.executor());
1103 fs.insert_tree(
1104 path!("/dir"),
1105 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1106 )
1107 .await;
1108 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1109 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1110 let file_path = project
1111 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1112 .unwrap();
1113 let buffer = project
1114 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1115 .await
1116 .unwrap();
1117
1118 cx.update(|cx| {
1119 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1120 buffer.update(cx, |buffer, cx| {
1121 buffer
1122 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1123 .unwrap();
1124 buffer.finalize_last_transaction();
1125 });
1126 buffer.update(cx, |buffer, cx| {
1127 buffer
1128 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1129 .unwrap();
1130 buffer.finalize_last_transaction();
1131 });
1132 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1133 });
1134 cx.run_until_parked();
1135 assert_eq!(
1136 buffer.read_with(cx, |buffer, _| buffer.text()),
1137 "abc\nghi\njkl\npqr"
1138 );
1139 assert_eq!(
1140 unreviewed_hunks(&action_log, cx),
1141 vec![(
1142 buffer.clone(),
1143 vec![
1144 HunkStatus {
1145 range: Point::new(1, 0)..Point::new(1, 0),
1146 diff_status: DiffHunkStatusKind::Deleted,
1147 old_text: "def\n".into(),
1148 },
1149 HunkStatus {
1150 range: Point::new(3, 0)..Point::new(3, 0),
1151 diff_status: DiffHunkStatusKind::Deleted,
1152 old_text: "mno\n".into(),
1153 }
1154 ],
1155 )]
1156 );
1157
1158 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1159 cx.run_until_parked();
1160 assert_eq!(
1161 buffer.read_with(cx, |buffer, _| buffer.text()),
1162 "abc\nghi\njkl\nmno\npqr"
1163 );
1164 assert_eq!(
1165 unreviewed_hunks(&action_log, cx),
1166 vec![(
1167 buffer.clone(),
1168 vec![HunkStatus {
1169 range: Point::new(1, 0)..Point::new(1, 0),
1170 diff_status: DiffHunkStatusKind::Deleted,
1171 old_text: "def\n".into(),
1172 }],
1173 )]
1174 );
1175
1176 action_log.update(cx, |log, cx| {
1177 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1178 });
1179 cx.run_until_parked();
1180 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1181 }
1182
1183 #[gpui::test(iterations = 10)]
1184 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1185 init_test(cx);
1186
1187 let fs = FakeFs::new(cx.executor());
1188 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1189 .await;
1190 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1191 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1192 let file_path = project
1193 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1194 .unwrap();
1195 let buffer = project
1196 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1197 .await
1198 .unwrap();
1199
1200 cx.update(|cx| {
1201 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1202 buffer.update(cx, |buffer, cx| {
1203 buffer
1204 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1205 .unwrap()
1206 });
1207 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1208 });
1209 cx.run_until_parked();
1210 assert_eq!(
1211 buffer.read_with(cx, |buffer, _| buffer.text()),
1212 "abc\ndeF\nGHI\njkl\nmno"
1213 );
1214 assert_eq!(
1215 unreviewed_hunks(&action_log, cx),
1216 vec![(
1217 buffer.clone(),
1218 vec![HunkStatus {
1219 range: Point::new(1, 0)..Point::new(3, 0),
1220 diff_status: DiffHunkStatusKind::Modified,
1221 old_text: "def\nghi\n".into(),
1222 }],
1223 )]
1224 );
1225
1226 buffer.update(cx, |buffer, cx| {
1227 buffer.edit(
1228 [
1229 (Point::new(0, 2)..Point::new(0, 2), "X"),
1230 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1231 ],
1232 None,
1233 cx,
1234 )
1235 });
1236 cx.run_until_parked();
1237 assert_eq!(
1238 buffer.read_with(cx, |buffer, _| buffer.text()),
1239 "abXc\ndeF\nGHI\nYjkl\nmno"
1240 );
1241 assert_eq!(
1242 unreviewed_hunks(&action_log, cx),
1243 vec![(
1244 buffer.clone(),
1245 vec![HunkStatus {
1246 range: Point::new(1, 0)..Point::new(3, 0),
1247 diff_status: DiffHunkStatusKind::Modified,
1248 old_text: "def\nghi\n".into(),
1249 }],
1250 )]
1251 );
1252
1253 buffer.update(cx, |buffer, cx| {
1254 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1255 });
1256 cx.run_until_parked();
1257 assert_eq!(
1258 buffer.read_with(cx, |buffer, _| buffer.text()),
1259 "abXc\ndZeF\nGHI\nYjkl\nmno"
1260 );
1261 assert_eq!(
1262 unreviewed_hunks(&action_log, cx),
1263 vec![(
1264 buffer.clone(),
1265 vec![HunkStatus {
1266 range: Point::new(1, 0)..Point::new(3, 0),
1267 diff_status: DiffHunkStatusKind::Modified,
1268 old_text: "def\nghi\n".into(),
1269 }],
1270 )]
1271 );
1272
1273 action_log.update(cx, |log, cx| {
1274 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1275 });
1276 cx.run_until_parked();
1277 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1278 }
1279
1280 #[gpui::test(iterations = 10)]
1281 async fn test_creating_files(cx: &mut TestAppContext) {
1282 init_test(cx);
1283
1284 let fs = FakeFs::new(cx.executor());
1285 fs.insert_tree(path!("/dir"), json!({})).await;
1286 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1287 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1288 let file_path = project
1289 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1290 .unwrap();
1291
1292 let buffer = project
1293 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1294 .await
1295 .unwrap();
1296 cx.update(|cx| {
1297 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1298 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1299 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1300 });
1301 project
1302 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1303 .await
1304 .unwrap();
1305 cx.run_until_parked();
1306 assert_eq!(
1307 unreviewed_hunks(&action_log, cx),
1308 vec![(
1309 buffer.clone(),
1310 vec![HunkStatus {
1311 range: Point::new(0, 0)..Point::new(0, 5),
1312 diff_status: DiffHunkStatusKind::Added,
1313 old_text: "".into(),
1314 }],
1315 )]
1316 );
1317
1318 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1319 cx.run_until_parked();
1320 assert_eq!(
1321 unreviewed_hunks(&action_log, cx),
1322 vec![(
1323 buffer.clone(),
1324 vec![HunkStatus {
1325 range: Point::new(0, 0)..Point::new(0, 6),
1326 diff_status: DiffHunkStatusKind::Added,
1327 old_text: "".into(),
1328 }],
1329 )]
1330 );
1331
1332 action_log.update(cx, |log, cx| {
1333 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1334 });
1335 cx.run_until_parked();
1336 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1337 }
1338
1339 #[gpui::test(iterations = 10)]
1340 async fn test_overwriting_files(cx: &mut TestAppContext) {
1341 init_test(cx);
1342
1343 let fs = FakeFs::new(cx.executor());
1344 fs.insert_tree(
1345 path!("/dir"),
1346 json!({
1347 "file1": "Lorem ipsum dolor"
1348 }),
1349 )
1350 .await;
1351 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1352 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1353 let file_path = project
1354 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1355 .unwrap();
1356
1357 let buffer = project
1358 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1359 .await
1360 .unwrap();
1361 cx.update(|cx| {
1362 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1363 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1364 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1365 });
1366 project
1367 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1368 .await
1369 .unwrap();
1370 cx.run_until_parked();
1371 assert_eq!(
1372 unreviewed_hunks(&action_log, cx),
1373 vec![(
1374 buffer.clone(),
1375 vec![HunkStatus {
1376 range: Point::new(0, 0)..Point::new(0, 19),
1377 diff_status: DiffHunkStatusKind::Added,
1378 old_text: "".into(),
1379 }],
1380 )]
1381 );
1382
1383 action_log
1384 .update(cx, |log, cx| {
1385 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1386 })
1387 .await
1388 .unwrap();
1389 cx.run_until_parked();
1390 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1391 assert_eq!(
1392 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1393 "Lorem ipsum dolor"
1394 );
1395 }
1396
1397 #[gpui::test(iterations = 10)]
1398 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1399 init_test(cx);
1400
1401 let fs = FakeFs::new(cx.executor());
1402 fs.insert_tree(
1403 path!("/dir"),
1404 json!({
1405 "file1": "Lorem ipsum dolor"
1406 }),
1407 )
1408 .await;
1409 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1410 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1411 let file_path = project
1412 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1413 .unwrap();
1414
1415 let buffer = project
1416 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1417 .await
1418 .unwrap();
1419 cx.update(|cx| {
1420 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1421 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1422 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1423 });
1424 project
1425 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1426 .await
1427 .unwrap();
1428 cx.run_until_parked();
1429 assert_eq!(
1430 unreviewed_hunks(&action_log, cx),
1431 vec![(
1432 buffer.clone(),
1433 vec![HunkStatus {
1434 range: Point::new(0, 0)..Point::new(0, 37),
1435 diff_status: DiffHunkStatusKind::Modified,
1436 old_text: "Lorem ipsum dolor".into(),
1437 }],
1438 )]
1439 );
1440
1441 cx.update(|cx| {
1442 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1443 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1444 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1445 });
1446 project
1447 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1448 .await
1449 .unwrap();
1450 cx.run_until_parked();
1451 assert_eq!(
1452 unreviewed_hunks(&action_log, cx),
1453 vec![(
1454 buffer.clone(),
1455 vec![HunkStatus {
1456 range: Point::new(0, 0)..Point::new(0, 9),
1457 diff_status: DiffHunkStatusKind::Added,
1458 old_text: "".into(),
1459 }],
1460 )]
1461 );
1462
1463 action_log
1464 .update(cx, |log, cx| {
1465 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1466 })
1467 .await
1468 .unwrap();
1469 cx.run_until_parked();
1470 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1471 assert_eq!(
1472 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1473 "Lorem ipsum dolor"
1474 );
1475 }
1476
1477 #[gpui::test(iterations = 10)]
1478 async fn test_deleting_files(cx: &mut TestAppContext) {
1479 init_test(cx);
1480
1481 let fs = FakeFs::new(cx.executor());
1482 fs.insert_tree(
1483 path!("/dir"),
1484 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1485 )
1486 .await;
1487
1488 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1489 let file1_path = project
1490 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1491 .unwrap();
1492 let file2_path = project
1493 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1494 .unwrap();
1495
1496 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1497 let buffer1 = project
1498 .update(cx, |project, cx| {
1499 project.open_buffer(file1_path.clone(), cx)
1500 })
1501 .await
1502 .unwrap();
1503 let buffer2 = project
1504 .update(cx, |project, cx| {
1505 project.open_buffer(file2_path.clone(), cx)
1506 })
1507 .await
1508 .unwrap();
1509
1510 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1511 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1512 project
1513 .update(cx, |project, cx| {
1514 project.delete_file(file1_path.clone(), false, cx)
1515 })
1516 .unwrap()
1517 .await
1518 .unwrap();
1519 project
1520 .update(cx, |project, cx| {
1521 project.delete_file(file2_path.clone(), false, cx)
1522 })
1523 .unwrap()
1524 .await
1525 .unwrap();
1526 cx.run_until_parked();
1527 assert_eq!(
1528 unreviewed_hunks(&action_log, cx),
1529 vec![
1530 (
1531 buffer1.clone(),
1532 vec![HunkStatus {
1533 range: Point::new(0, 0)..Point::new(0, 0),
1534 diff_status: DiffHunkStatusKind::Deleted,
1535 old_text: "lorem\n".into(),
1536 }]
1537 ),
1538 (
1539 buffer2.clone(),
1540 vec![HunkStatus {
1541 range: Point::new(0, 0)..Point::new(0, 0),
1542 diff_status: DiffHunkStatusKind::Deleted,
1543 old_text: "ipsum\n".into(),
1544 }],
1545 )
1546 ]
1547 );
1548
1549 // Simulate file1 being recreated externally.
1550 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1551 .await;
1552
1553 // Simulate file2 being recreated by a tool.
1554 let buffer2 = project
1555 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1556 .await
1557 .unwrap();
1558 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1559 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1560 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1561 project
1562 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1563 .await
1564 .unwrap();
1565
1566 cx.run_until_parked();
1567 assert_eq!(
1568 unreviewed_hunks(&action_log, cx),
1569 vec![(
1570 buffer2.clone(),
1571 vec![HunkStatus {
1572 range: Point::new(0, 0)..Point::new(0, 5),
1573 diff_status: DiffHunkStatusKind::Added,
1574 old_text: "".into(),
1575 }],
1576 )]
1577 );
1578
1579 // Simulate file2 being deleted externally.
1580 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1581 .await
1582 .unwrap();
1583 cx.run_until_parked();
1584 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1585 }
1586
1587 #[gpui::test(iterations = 10)]
1588 async fn test_reject_edits(cx: &mut TestAppContext) {
1589 init_test(cx);
1590
1591 let fs = FakeFs::new(cx.executor());
1592 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1593 .await;
1594 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1595 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1596 let file_path = project
1597 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1598 .unwrap();
1599 let buffer = project
1600 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1601 .await
1602 .unwrap();
1603
1604 cx.update(|cx| {
1605 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1606 buffer.update(cx, |buffer, cx| {
1607 buffer
1608 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1609 .unwrap()
1610 });
1611 buffer.update(cx, |buffer, cx| {
1612 buffer
1613 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1614 .unwrap()
1615 });
1616 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1617 });
1618 cx.run_until_parked();
1619 assert_eq!(
1620 buffer.read_with(cx, |buffer, _| buffer.text()),
1621 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1622 );
1623 assert_eq!(
1624 unreviewed_hunks(&action_log, cx),
1625 vec![(
1626 buffer.clone(),
1627 vec![
1628 HunkStatus {
1629 range: Point::new(1, 0)..Point::new(3, 0),
1630 diff_status: DiffHunkStatusKind::Modified,
1631 old_text: "def\n".into(),
1632 },
1633 HunkStatus {
1634 range: Point::new(5, 0)..Point::new(5, 3),
1635 diff_status: DiffHunkStatusKind::Modified,
1636 old_text: "mno".into(),
1637 }
1638 ],
1639 )]
1640 );
1641
1642 // If the rejected range doesn't overlap with any hunk, we ignore it.
1643 action_log
1644 .update(cx, |log, cx| {
1645 log.reject_edits_in_ranges(
1646 buffer.clone(),
1647 vec![Point::new(4, 0)..Point::new(4, 0)],
1648 None,
1649 cx,
1650 )
1651 })
1652 .await
1653 .unwrap();
1654 cx.run_until_parked();
1655 assert_eq!(
1656 buffer.read_with(cx, |buffer, _| buffer.text()),
1657 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1658 );
1659 assert_eq!(
1660 unreviewed_hunks(&action_log, cx),
1661 vec![(
1662 buffer.clone(),
1663 vec![
1664 HunkStatus {
1665 range: Point::new(1, 0)..Point::new(3, 0),
1666 diff_status: DiffHunkStatusKind::Modified,
1667 old_text: "def\n".into(),
1668 },
1669 HunkStatus {
1670 range: Point::new(5, 0)..Point::new(5, 3),
1671 diff_status: DiffHunkStatusKind::Modified,
1672 old_text: "mno".into(),
1673 }
1674 ],
1675 )]
1676 );
1677
1678 action_log
1679 .update(cx, |log, cx| {
1680 log.reject_edits_in_ranges(
1681 buffer.clone(),
1682 vec![Point::new(0, 0)..Point::new(1, 0)],
1683 None,
1684 cx,
1685 )
1686 })
1687 .await
1688 .unwrap();
1689 cx.run_until_parked();
1690 assert_eq!(
1691 buffer.read_with(cx, |buffer, _| buffer.text()),
1692 "abc\ndef\nghi\njkl\nmnO"
1693 );
1694 assert_eq!(
1695 unreviewed_hunks(&action_log, cx),
1696 vec![(
1697 buffer.clone(),
1698 vec![HunkStatus {
1699 range: Point::new(4, 0)..Point::new(4, 3),
1700 diff_status: DiffHunkStatusKind::Modified,
1701 old_text: "mno".into(),
1702 }],
1703 )]
1704 );
1705
1706 action_log
1707 .update(cx, |log, cx| {
1708 log.reject_edits_in_ranges(
1709 buffer.clone(),
1710 vec![Point::new(4, 0)..Point::new(4, 0)],
1711 None,
1712 cx,
1713 )
1714 })
1715 .await
1716 .unwrap();
1717 cx.run_until_parked();
1718 assert_eq!(
1719 buffer.read_with(cx, |buffer, _| buffer.text()),
1720 "abc\ndef\nghi\njkl\nmno"
1721 );
1722 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1723 }
1724
1725 #[gpui::test(iterations = 10)]
1726 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1727 init_test(cx);
1728
1729 let fs = FakeFs::new(cx.executor());
1730 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1731 .await;
1732 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1733 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1734 let file_path = project
1735 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1736 .unwrap();
1737 let buffer = project
1738 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1739 .await
1740 .unwrap();
1741
1742 cx.update(|cx| {
1743 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1744 buffer.update(cx, |buffer, cx| {
1745 buffer
1746 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1747 .unwrap()
1748 });
1749 buffer.update(cx, |buffer, cx| {
1750 buffer
1751 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1752 .unwrap()
1753 });
1754 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1755 });
1756 cx.run_until_parked();
1757 assert_eq!(
1758 buffer.read_with(cx, |buffer, _| buffer.text()),
1759 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1760 );
1761 assert_eq!(
1762 unreviewed_hunks(&action_log, cx),
1763 vec![(
1764 buffer.clone(),
1765 vec![
1766 HunkStatus {
1767 range: Point::new(1, 0)..Point::new(3, 0),
1768 diff_status: DiffHunkStatusKind::Modified,
1769 old_text: "def\n".into(),
1770 },
1771 HunkStatus {
1772 range: Point::new(5, 0)..Point::new(5, 3),
1773 diff_status: DiffHunkStatusKind::Modified,
1774 old_text: "mno".into(),
1775 }
1776 ],
1777 )]
1778 );
1779
1780 action_log.update(cx, |log, cx| {
1781 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1782 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1783 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1784 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1785
1786 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1787 .detach();
1788 assert_eq!(
1789 buffer.read_with(cx, |buffer, _| buffer.text()),
1790 "abc\ndef\nghi\njkl\nmno"
1791 );
1792 });
1793 cx.run_until_parked();
1794 assert_eq!(
1795 buffer.read_with(cx, |buffer, _| buffer.text()),
1796 "abc\ndef\nghi\njkl\nmno"
1797 );
1798 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1799 }
1800
1801 #[gpui::test(iterations = 10)]
1802 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1803 init_test(cx);
1804
1805 let fs = FakeFs::new(cx.executor());
1806 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1807 .await;
1808 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1809 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1810 let file_path = project
1811 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1812 .unwrap();
1813 let buffer = project
1814 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1815 .await
1816 .unwrap();
1817
1818 cx.update(|cx| {
1819 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1820 });
1821 project
1822 .update(cx, |project, cx| {
1823 project.delete_file(file_path.clone(), false, cx)
1824 })
1825 .unwrap()
1826 .await
1827 .unwrap();
1828 cx.run_until_parked();
1829 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1830 assert_eq!(
1831 unreviewed_hunks(&action_log, cx),
1832 vec![(
1833 buffer.clone(),
1834 vec![HunkStatus {
1835 range: Point::new(0, 0)..Point::new(0, 0),
1836 diff_status: DiffHunkStatusKind::Deleted,
1837 old_text: "content".into(),
1838 }]
1839 )]
1840 );
1841
1842 action_log
1843 .update(cx, |log, cx| {
1844 log.reject_edits_in_ranges(
1845 buffer.clone(),
1846 vec![Point::new(0, 0)..Point::new(0, 0)],
1847 None,
1848 cx,
1849 )
1850 })
1851 .await
1852 .unwrap();
1853 cx.run_until_parked();
1854 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1855 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1856 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1857 }
1858
1859 #[gpui::test(iterations = 10)]
1860 async fn test_reject_created_file(cx: &mut TestAppContext) {
1861 init_test(cx);
1862
1863 let fs = FakeFs::new(cx.executor());
1864 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1865 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1866 let file_path = project
1867 .read_with(cx, |project, cx| {
1868 project.find_project_path("dir/new_file", cx)
1869 })
1870 .unwrap();
1871 let buffer = project
1872 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1873 .await
1874 .unwrap();
1875 cx.update(|cx| {
1876 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1877 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1878 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1879 });
1880 project
1881 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1882 .await
1883 .unwrap();
1884 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1885 cx.run_until_parked();
1886 assert_eq!(
1887 unreviewed_hunks(&action_log, cx),
1888 vec![(
1889 buffer.clone(),
1890 vec![HunkStatus {
1891 range: Point::new(0, 0)..Point::new(0, 7),
1892 diff_status: DiffHunkStatusKind::Added,
1893 old_text: "".into(),
1894 }],
1895 )]
1896 );
1897
1898 action_log
1899 .update(cx, |log, cx| {
1900 log.reject_edits_in_ranges(
1901 buffer.clone(),
1902 vec![Point::new(0, 0)..Point::new(0, 11)],
1903 None,
1904 cx,
1905 )
1906 })
1907 .await
1908 .unwrap();
1909 cx.run_until_parked();
1910 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1911 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1912 }
1913
1914 #[gpui::test]
1915 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1916 init_test(cx);
1917
1918 let fs = FakeFs::new(cx.executor());
1919 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1920 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1921
1922 let file_path = project
1923 .read_with(cx, |project, cx| {
1924 project.find_project_path("dir/new_file", cx)
1925 })
1926 .unwrap();
1927 let buffer = project
1928 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1929 .await
1930 .unwrap();
1931
1932 // AI creates file with initial content
1933 cx.update(|cx| {
1934 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1935 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1936 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1937 });
1938
1939 project
1940 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1941 .await
1942 .unwrap();
1943
1944 cx.run_until_parked();
1945
1946 // User makes additional edits
1947 cx.update(|cx| {
1948 buffer.update(cx, |buffer, cx| {
1949 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1950 });
1951 });
1952
1953 project
1954 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1955 .await
1956 .unwrap();
1957
1958 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1959
1960 // Reject all
1961 action_log
1962 .update(cx, |log, cx| {
1963 log.reject_edits_in_ranges(
1964 buffer.clone(),
1965 vec![Point::new(0, 0)..Point::new(100, 0)],
1966 None,
1967 cx,
1968 )
1969 })
1970 .await
1971 .unwrap();
1972 cx.run_until_parked();
1973
1974 // File should still contain all the content
1975 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1976
1977 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1978 assert_eq!(content, "ai content\nuser added this line");
1979 }
1980
1981 #[gpui::test]
1982 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
1983 init_test(cx);
1984
1985 let fs = FakeFs::new(cx.executor());
1986 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1987 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1988
1989 let file_path = project
1990 .read_with(cx, |project, cx| {
1991 project.find_project_path("dir/new_file", cx)
1992 })
1993 .unwrap();
1994 let buffer = project
1995 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1996 .await
1997 .unwrap();
1998
1999 // AI creates file with initial content
2000 cx.update(|cx| {
2001 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2002 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2003 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2004 });
2005 project
2006 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2007 .await
2008 .unwrap();
2009 cx.run_until_parked();
2010 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2011
2012 // User accepts the single hunk
2013 action_log.update(cx, |log, cx| {
2014 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2015 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2016 });
2017 cx.run_until_parked();
2018 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2019 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2020
2021 // AI modifies the file
2022 cx.update(|cx| {
2023 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2024 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2025 });
2026 project
2027 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2028 .await
2029 .unwrap();
2030 cx.run_until_parked();
2031 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2032
2033 // User rejects the hunk
2034 action_log
2035 .update(cx, |log, cx| {
2036 log.reject_edits_in_ranges(
2037 buffer.clone(),
2038 vec![Anchor::min_max_range_for_buffer(
2039 buffer.read(cx).remote_id(),
2040 )],
2041 None,
2042 cx,
2043 )
2044 })
2045 .await
2046 .unwrap();
2047 cx.run_until_parked();
2048 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2049 assert_eq!(
2050 buffer.read_with(cx, |buffer, _| buffer.text()),
2051 "ai content v1"
2052 );
2053 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2054 }
2055
2056 #[gpui::test]
2057 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2058 init_test(cx);
2059
2060 let fs = FakeFs::new(cx.executor());
2061 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2062 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2063
2064 let file_path = project
2065 .read_with(cx, |project, cx| {
2066 project.find_project_path("dir/new_file", cx)
2067 })
2068 .unwrap();
2069 let buffer = project
2070 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2071 .await
2072 .unwrap();
2073
2074 // AI creates file with initial content
2075 cx.update(|cx| {
2076 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2077 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2078 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2079 });
2080 project
2081 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2082 .await
2083 .unwrap();
2084 cx.run_until_parked();
2085
2086 // User clicks "Accept All"
2087 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2088 cx.run_until_parked();
2089 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2090 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2091
2092 // AI modifies file again
2093 cx.update(|cx| {
2094 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2095 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2096 });
2097 project
2098 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2099 .await
2100 .unwrap();
2101 cx.run_until_parked();
2102 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2103
2104 // User clicks "Reject All"
2105 action_log
2106 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2107 .await;
2108 cx.run_until_parked();
2109 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2110 assert_eq!(
2111 buffer.read_with(cx, |buffer, _| buffer.text()),
2112 "ai content v1"
2113 );
2114 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2115 }
2116
2117 #[gpui::test(iterations = 100)]
2118 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2119 init_test(cx);
2120
2121 let operations = env::var("OPERATIONS")
2122 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2123 .unwrap_or(20);
2124
2125 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2126 let fs = FakeFs::new(cx.executor());
2127 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2128 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2129 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2130 let file_path = project
2131 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2132 .unwrap();
2133 let buffer = project
2134 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2135 .await
2136 .unwrap();
2137
2138 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2139
2140 for _ in 0..operations {
2141 match rng.random_range(0..100) {
2142 0..25 => {
2143 action_log.update(cx, |log, cx| {
2144 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2145 log::info!("keeping edits in range {:?}", range);
2146 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2147 });
2148 }
2149 25..50 => {
2150 action_log
2151 .update(cx, |log, cx| {
2152 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2153 log::info!("rejecting edits in range {:?}", range);
2154 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2155 })
2156 .await
2157 .unwrap();
2158 }
2159 _ => {
2160 let is_agent_edit = rng.random_bool(0.5);
2161 if is_agent_edit {
2162 log::info!("agent edit");
2163 } else {
2164 log::info!("user edit");
2165 }
2166 cx.update(|cx| {
2167 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2168 if is_agent_edit {
2169 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2170 }
2171 });
2172 }
2173 }
2174
2175 if rng.random_bool(0.2) {
2176 quiesce(&action_log, &buffer, cx);
2177 }
2178 }
2179
2180 quiesce(&action_log, &buffer, cx);
2181
2182 fn quiesce(
2183 action_log: &Entity<ActionLog>,
2184 buffer: &Entity<Buffer>,
2185 cx: &mut TestAppContext,
2186 ) {
2187 log::info!("quiescing...");
2188 cx.run_until_parked();
2189 action_log.update(cx, |log, cx| {
2190 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2191 let mut old_text = tracked_buffer.diff_base.clone();
2192 let new_text = buffer.read(cx).as_rope();
2193 for edit in tracked_buffer.unreviewed_edits.edits() {
2194 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2195 let old_end = old_text.point_to_offset(cmp::min(
2196 Point::new(edit.new.start + edit.old_len(), 0),
2197 old_text.max_point(),
2198 ));
2199 old_text.replace(
2200 old_start..old_end,
2201 &new_text.slice_rows(edit.new.clone()).to_string(),
2202 );
2203 }
2204 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2205 })
2206 }
2207 }
2208
2209 #[gpui::test]
2210 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2211 init_test(cx);
2212
2213 let fs = FakeFs::new(cx.background_executor.clone());
2214 fs.insert_tree(
2215 path!("/project"),
2216 json!({
2217 ".git": {},
2218 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2219 }),
2220 )
2221 .await;
2222 fs.set_head_for_repo(
2223 path!("/project/.git").as_ref(),
2224 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2225 "0000000",
2226 );
2227 cx.run_until_parked();
2228
2229 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2230 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2231
2232 let file_path = project
2233 .read_with(cx, |project, cx| {
2234 project.find_project_path(path!("/project/file.txt"), cx)
2235 })
2236 .unwrap();
2237 let buffer = project
2238 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2239 .await
2240 .unwrap();
2241
2242 cx.update(|cx| {
2243 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2244 buffer.update(cx, |buffer, cx| {
2245 buffer.edit(
2246 [
2247 // Edit at the very start: a -> A
2248 (Point::new(0, 0)..Point::new(0, 1), "A"),
2249 // Deletion in the middle: remove lines d and e
2250 (Point::new(3, 0)..Point::new(5, 0), ""),
2251 // Modification: g -> GGG
2252 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2253 // Addition: insert new line after h
2254 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2255 // Edit the very last character: j -> J
2256 (Point::new(9, 0)..Point::new(9, 1), "J"),
2257 ],
2258 None,
2259 cx,
2260 );
2261 });
2262 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2263 });
2264 cx.run_until_parked();
2265 assert_eq!(
2266 unreviewed_hunks(&action_log, cx),
2267 vec![(
2268 buffer.clone(),
2269 vec![
2270 HunkStatus {
2271 range: Point::new(0, 0)..Point::new(1, 0),
2272 diff_status: DiffHunkStatusKind::Modified,
2273 old_text: "a\n".into()
2274 },
2275 HunkStatus {
2276 range: Point::new(3, 0)..Point::new(3, 0),
2277 diff_status: DiffHunkStatusKind::Deleted,
2278 old_text: "d\ne\n".into()
2279 },
2280 HunkStatus {
2281 range: Point::new(4, 0)..Point::new(5, 0),
2282 diff_status: DiffHunkStatusKind::Modified,
2283 old_text: "g\n".into()
2284 },
2285 HunkStatus {
2286 range: Point::new(6, 0)..Point::new(7, 0),
2287 diff_status: DiffHunkStatusKind::Added,
2288 old_text: "".into()
2289 },
2290 HunkStatus {
2291 range: Point::new(8, 0)..Point::new(8, 1),
2292 diff_status: DiffHunkStatusKind::Modified,
2293 old_text: "j".into()
2294 }
2295 ]
2296 )]
2297 );
2298
2299 // Simulate a git commit that matches some edits but not others:
2300 // - Accepts the first edit (a -> A)
2301 // - Accepts the deletion (remove d and e)
2302 // - Makes a different change to g (g -> G instead of GGG)
2303 // - Ignores the NEW line addition
2304 // - Ignores the last line edit (j stays as j)
2305 fs.set_head_for_repo(
2306 path!("/project/.git").as_ref(),
2307 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2308 "0000001",
2309 );
2310 cx.run_until_parked();
2311 assert_eq!(
2312 unreviewed_hunks(&action_log, cx),
2313 vec![(
2314 buffer.clone(),
2315 vec![
2316 HunkStatus {
2317 range: Point::new(4, 0)..Point::new(5, 0),
2318 diff_status: DiffHunkStatusKind::Modified,
2319 old_text: "g\n".into()
2320 },
2321 HunkStatus {
2322 range: Point::new(6, 0)..Point::new(7, 0),
2323 diff_status: DiffHunkStatusKind::Added,
2324 old_text: "".into()
2325 },
2326 HunkStatus {
2327 range: Point::new(8, 0)..Point::new(8, 1),
2328 diff_status: DiffHunkStatusKind::Modified,
2329 old_text: "j".into()
2330 }
2331 ]
2332 )]
2333 );
2334
2335 // Make another commit that accepts the NEW line but with different content
2336 fs.set_head_for_repo(
2337 path!("/project/.git").as_ref(),
2338 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2339 "0000002",
2340 );
2341 cx.run_until_parked();
2342 assert_eq!(
2343 unreviewed_hunks(&action_log, cx),
2344 vec![(
2345 buffer,
2346 vec![
2347 HunkStatus {
2348 range: Point::new(6, 0)..Point::new(7, 0),
2349 diff_status: DiffHunkStatusKind::Added,
2350 old_text: "".into()
2351 },
2352 HunkStatus {
2353 range: Point::new(8, 0)..Point::new(8, 1),
2354 diff_status: DiffHunkStatusKind::Modified,
2355 old_text: "j".into()
2356 }
2357 ]
2358 )]
2359 );
2360
2361 // Final commit that accepts all remaining edits
2362 fs.set_head_for_repo(
2363 path!("/project/.git").as_ref(),
2364 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2365 "0000003",
2366 );
2367 cx.run_until_parked();
2368 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2369 }
2370
2371 #[derive(Debug, Clone, PartialEq, Eq)]
2372 struct HunkStatus {
2373 range: Range<Point>,
2374 diff_status: DiffHunkStatusKind,
2375 old_text: String,
2376 }
2377
2378 fn unreviewed_hunks(
2379 action_log: &Entity<ActionLog>,
2380 cx: &TestAppContext,
2381 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2382 cx.read(|cx| {
2383 action_log
2384 .read(cx)
2385 .changed_buffers(cx)
2386 .into_iter()
2387 .map(|(buffer, diff)| {
2388 let snapshot = buffer.read(cx).snapshot();
2389 (
2390 buffer,
2391 diff.read(cx)
2392 .snapshot(cx)
2393 .hunks(&snapshot)
2394 .map(|hunk| HunkStatus {
2395 diff_status: hunk.status().kind,
2396 range: hunk.range,
2397 old_text: diff
2398 .read(cx)
2399 .base_text(cx)
2400 .text_for_range(hunk.diff_base_byte_range)
2401 .collect(),
2402 })
2403 .collect(),
2404 )
2405 })
2406 .collect()
2407 })
2408 }
2409}