1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let language = buffer.read(cx).language().cloned();
83 let language_registry = buffer.read(cx).language_registry();
84 let diff = cx.new(|cx| {
85 let mut diff = BufferDiff::new(&text_snapshot, cx);
86 diff.language_changed(language, language_registry, cx);
87 diff
88 });
89 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
90 let diff_base;
91 let unreviewed_edits;
92 if is_created {
93 diff_base = Rope::default();
94 unreviewed_edits = Patch::new(vec![Edit {
95 old: 0..1,
96 new: 0..text_snapshot.max_point().row + 1,
97 }])
98 } else {
99 diff_base = buffer.read(cx).as_rope().clone();
100 unreviewed_edits = Patch::default();
101 }
102 TrackedBuffer {
103 buffer: buffer.clone(),
104 diff_base,
105 unreviewed_edits,
106 snapshot: text_snapshot,
107 status,
108 version: buffer.read(cx).version(),
109 diff,
110 diff_update: diff_update_tx,
111 _open_lsp_handle: open_lsp_handle,
112 _maintain_diff: cx.spawn({
113 let buffer = buffer.clone();
114 async move |this, cx| {
115 Self::maintain_diff(this, buffer, diff_update_rx, cx)
116 .await
117 .ok();
118 }
119 }),
120 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
121 }
122 });
123 tracked_buffer.version = buffer.read(cx).version();
124 tracked_buffer
125 }
126
127 fn handle_buffer_event(
128 &mut self,
129 buffer: Entity<Buffer>,
130 event: &BufferEvent,
131 cx: &mut Context<Self>,
132 ) {
133 match event {
134 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
135 BufferEvent::FileHandleChanged => {
136 self.handle_buffer_file_changed(buffer, cx);
137 }
138 _ => {}
139 };
140 }
141
142 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
143 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
144 return;
145 };
146 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
147 }
148
149 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
150 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
151 return;
152 };
153
154 match tracked_buffer.status {
155 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
156 if buffer
157 .read(cx)
158 .file()
159 .is_some_and(|file| file.disk_state().is_deleted())
160 {
161 // If the buffer had been edited by a tool, but it got
162 // deleted externally, we want to stop tracking it.
163 self.tracked_buffers.remove(&buffer);
164 }
165 cx.notify();
166 }
167 TrackedBufferStatus::Deleted => {
168 if buffer
169 .read(cx)
170 .file()
171 .is_some_and(|file| !file.disk_state().is_deleted())
172 {
173 // If the buffer had been deleted by a tool, but it got
174 // resurrected externally, we want to clear the edits we
175 // were tracking and reset the buffer's state.
176 self.tracked_buffers.remove(&buffer);
177 self.track_buffer_internal(buffer, false, cx);
178 }
179 cx.notify();
180 }
181 }
182 }
183
184 async fn maintain_diff(
185 this: WeakEntity<Self>,
186 buffer: Entity<Buffer>,
187 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
188 cx: &mut AsyncApp,
189 ) -> Result<()> {
190 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
191 let git_diff = this
192 .update(cx, |this, cx| {
193 this.project.update(cx, |project, cx| {
194 project.open_uncommitted_diff(buffer.clone(), cx)
195 })
196 })?
197 .await
198 .ok();
199 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
200 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
201 })?;
202
203 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
204 let _repo_subscription =
205 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
206 cx.update(|cx| {
207 let mut old_head = buffer_repo.read(cx).head_commit.clone();
208 Some(cx.subscribe(git_diff, move |_, event, cx| {
209 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
210 let new_head = buffer_repo.read(cx).head_commit.clone();
211 if new_head != old_head {
212 old_head = new_head;
213 git_diff_updates_tx.send(()).ok();
214 }
215 }
216 }))
217 })?
218 } else {
219 None
220 };
221
222 loop {
223 futures::select_biased! {
224 buffer_update = buffer_updates.next() => {
225 if let Some((author, buffer_snapshot)) = buffer_update {
226 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
227 } else {
228 break;
229 }
230 }
231 _ = git_diff_updates_rx.changed().fuse() => {
232 if let Some(git_diff) = git_diff.as_ref() {
233 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
234 }
235 }
236 }
237 }
238
239 Ok(())
240 }
241
242 async fn track_edits(
243 this: &WeakEntity<ActionLog>,
244 buffer: &Entity<Buffer>,
245 author: ChangeAuthor,
246 buffer_snapshot: text::BufferSnapshot,
247 cx: &mut AsyncApp,
248 ) -> Result<()> {
249 let rebase = this.update(cx, |this, cx| {
250 let tracked_buffer = this
251 .tracked_buffers
252 .get_mut(buffer)
253 .context("buffer not tracked")?;
254
255 let rebase = cx.background_spawn({
256 let mut base_text = tracked_buffer.diff_base.clone();
257 let old_snapshot = tracked_buffer.snapshot.clone();
258 let new_snapshot = buffer_snapshot.clone();
259 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
260 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
261 async move {
262 if let ChangeAuthor::User = author {
263 apply_non_conflicting_edits(
264 &unreviewed_edits,
265 edits,
266 &mut base_text,
267 new_snapshot.as_rope(),
268 );
269 }
270
271 (Arc::from(base_text.to_string().as_str()), base_text)
272 }
273 });
274
275 anyhow::Ok(rebase)
276 })??;
277 let (new_base_text, new_diff_base) = rebase.await;
278
279 Self::update_diff(
280 this,
281 buffer,
282 buffer_snapshot,
283 new_base_text,
284 new_diff_base,
285 cx,
286 )
287 .await
288 }
289
290 async fn keep_committed_edits(
291 this: &WeakEntity<ActionLog>,
292 buffer: &Entity<Buffer>,
293 git_diff: &Entity<BufferDiff>,
294 cx: &mut AsyncApp,
295 ) -> Result<()> {
296 let buffer_snapshot = this.read_with(cx, |this, _cx| {
297 let tracked_buffer = this
298 .tracked_buffers
299 .get(buffer)
300 .context("buffer not tracked")?;
301 anyhow::Ok(tracked_buffer.snapshot.clone())
302 })??;
303 let (new_base_text, new_diff_base) = this
304 .read_with(cx, |this, cx| {
305 let tracked_buffer = this
306 .tracked_buffers
307 .get(buffer)
308 .context("buffer not tracked")?;
309 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
310 let agent_diff_base = tracked_buffer.diff_base.clone();
311 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
312 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
313 anyhow::Ok(cx.background_spawn(async move {
314 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
315 let committed_edits = language::line_diff(
316 &agent_diff_base.to_string(),
317 &git_diff_base.to_string(),
318 )
319 .into_iter()
320 .map(|(old, new)| Edit { old, new });
321
322 let mut new_agent_diff_base = agent_diff_base.clone();
323 let mut row_delta = 0i32;
324 for committed in committed_edits {
325 while let Some(unreviewed) = old_unreviewed_edits.peek() {
326 // If the committed edit matches the unreviewed
327 // edit, assume the user wants to keep it.
328 if committed.old == unreviewed.old {
329 let unreviewed_new =
330 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
331 let committed_new =
332 git_diff_base.slice_rows(committed.new.clone()).to_string();
333 if unreviewed_new == committed_new {
334 let old_byte_start =
335 new_agent_diff_base.point_to_offset(Point::new(
336 (unreviewed.old.start as i32 + row_delta) as u32,
337 0,
338 ));
339 let old_byte_end =
340 new_agent_diff_base.point_to_offset(cmp::min(
341 Point::new(
342 (unreviewed.old.end as i32 + row_delta) as u32,
343 0,
344 ),
345 new_agent_diff_base.max_point(),
346 ));
347 new_agent_diff_base
348 .replace(old_byte_start..old_byte_end, &unreviewed_new);
349 row_delta +=
350 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
351 }
352 } else if unreviewed.old.start >= committed.old.end {
353 break;
354 }
355
356 old_unreviewed_edits.next().unwrap();
357 }
358 }
359
360 (
361 Arc::from(new_agent_diff_base.to_string().as_str()),
362 new_agent_diff_base,
363 )
364 }))
365 })??
366 .await;
367
368 Self::update_diff(
369 this,
370 buffer,
371 buffer_snapshot,
372 new_base_text,
373 new_diff_base,
374 cx,
375 )
376 .await
377 }
378
379 async fn update_diff(
380 this: &WeakEntity<ActionLog>,
381 buffer: &Entity<Buffer>,
382 buffer_snapshot: text::BufferSnapshot,
383 new_base_text: Arc<str>,
384 new_diff_base: Rope,
385 cx: &mut AsyncApp,
386 ) -> Result<()> {
387 let (diff, language) = this.read_with(cx, |this, cx| {
388 let tracked_buffer = this
389 .tracked_buffers
390 .get(buffer)
391 .context("buffer not tracked")?;
392 anyhow::Ok((
393 tracked_buffer.diff.clone(),
394 buffer.read(cx).language().cloned(),
395 ))
396 })??;
397 let update = diff.update(cx, |diff, cx| {
398 diff.update_diff(
399 buffer_snapshot.clone(),
400 Some(new_base_text),
401 true,
402 language,
403 cx,
404 )
405 });
406 let mut unreviewed_edits = Patch::default();
407 if let Ok(update) = update {
408 let update = update.await;
409
410 diff.update(cx, |diff, cx| {
411 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
412 })?
413 .await;
414 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx))?;
415
416 unreviewed_edits = cx
417 .background_spawn({
418 let buffer_snapshot = buffer_snapshot.clone();
419 let new_diff_base = new_diff_base.clone();
420 async move {
421 let mut unreviewed_edits = Patch::default();
422 for hunk in diff_snapshot.hunks_intersecting_range(
423 Anchor::min_for_buffer(buffer_snapshot.remote_id())
424 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
425 &buffer_snapshot,
426 ) {
427 let old_range = new_diff_base
428 .offset_to_point(hunk.diff_base_byte_range.start)
429 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
430 let new_range = hunk.range.start..hunk.range.end;
431 unreviewed_edits.push(point_to_row_edit(
432 Edit {
433 old: old_range,
434 new: new_range,
435 },
436 &new_diff_base,
437 buffer_snapshot.as_rope(),
438 ));
439 }
440 unreviewed_edits
441 }
442 })
443 .await;
444 }
445 this.update(cx, |this, cx| {
446 let tracked_buffer = this
447 .tracked_buffers
448 .get_mut(buffer)
449 .context("buffer not tracked")?;
450 tracked_buffer.diff_base = new_diff_base;
451 tracked_buffer.snapshot = buffer_snapshot;
452 tracked_buffer.unreviewed_edits = unreviewed_edits;
453 cx.notify();
454 anyhow::Ok(())
455 })?
456 }
457
458 /// Track a buffer as read by agent, so we can notify the model about user edits.
459 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
460 self.track_buffer_internal(buffer, false, cx);
461 }
462
463 /// Mark a buffer as created by agent, so we can refresh it in the context
464 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
465 self.track_buffer_internal(buffer, true, cx);
466 }
467
468 /// Mark a buffer as edited by agent, so we can refresh it in the context
469 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
470 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
471 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
472 tracked_buffer.status = TrackedBufferStatus::Modified;
473 }
474 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
475 }
476
477 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
478 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
479 match tracked_buffer.status {
480 TrackedBufferStatus::Created { .. } => {
481 self.tracked_buffers.remove(&buffer);
482 cx.notify();
483 }
484 TrackedBufferStatus::Modified => {
485 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
486 tracked_buffer.status = TrackedBufferStatus::Deleted;
487 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
488 }
489 TrackedBufferStatus::Deleted => {}
490 }
491 cx.notify();
492 }
493
494 pub fn keep_edits_in_range(
495 &mut self,
496 buffer: Entity<Buffer>,
497 buffer_range: Range<impl language::ToPoint>,
498 telemetry: Option<ActionLogTelemetry>,
499 cx: &mut Context<Self>,
500 ) {
501 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
502 return;
503 };
504
505 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
506 match tracked_buffer.status {
507 TrackedBufferStatus::Deleted => {
508 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
509 self.tracked_buffers.remove(&buffer);
510 cx.notify();
511 }
512 _ => {
513 let buffer = buffer.read(cx);
514 let buffer_range =
515 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
516 let mut delta = 0i32;
517 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
518 edit.old.start = (edit.old.start as i32 + delta) as u32;
519 edit.old.end = (edit.old.end as i32 + delta) as u32;
520
521 if buffer_range.end.row < edit.new.start
522 || buffer_range.start.row > edit.new.end
523 {
524 true
525 } else {
526 let old_range = tracked_buffer
527 .diff_base
528 .point_to_offset(Point::new(edit.old.start, 0))
529 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
530 Point::new(edit.old.end, 0),
531 tracked_buffer.diff_base.max_point(),
532 ));
533 let new_range = tracked_buffer
534 .snapshot
535 .point_to_offset(Point::new(edit.new.start, 0))
536 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
537 Point::new(edit.new.end, 0),
538 tracked_buffer.snapshot.max_point(),
539 ));
540 tracked_buffer.diff_base.replace(
541 old_range,
542 &tracked_buffer
543 .snapshot
544 .text_for_range(new_range)
545 .collect::<String>(),
546 );
547 delta += edit.new_len() as i32 - edit.old_len() as i32;
548 metrics.add_edit(edit);
549 false
550 }
551 });
552 if tracked_buffer.unreviewed_edits.is_empty()
553 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
554 {
555 tracked_buffer.status = TrackedBufferStatus::Modified;
556 }
557 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
558 }
559 }
560 if let Some(telemetry) = telemetry {
561 telemetry_report_accepted_edits(&telemetry, metrics);
562 }
563 }
564
565 pub fn reject_edits_in_ranges(
566 &mut self,
567 buffer: Entity<Buffer>,
568 buffer_ranges: Vec<Range<impl language::ToPoint>>,
569 telemetry: Option<ActionLogTelemetry>,
570 cx: &mut Context<Self>,
571 ) -> Task<Result<()>> {
572 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
573 return Task::ready(Ok(()));
574 };
575
576 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
577 let task = match &tracked_buffer.status {
578 TrackedBufferStatus::Created {
579 existing_file_content,
580 } => {
581 let task = if let Some(existing_file_content) = existing_file_content {
582 buffer.update(cx, |buffer, cx| {
583 buffer.start_transaction();
584 buffer.set_text("", cx);
585 for chunk in existing_file_content.chunks() {
586 buffer.append(chunk, cx);
587 }
588 buffer.end_transaction(cx);
589 });
590 self.project
591 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
592 } else {
593 // For a file created by AI with no pre-existing content,
594 // only delete the file if we're certain it contains only AI content
595 // with no edits from the user.
596
597 let initial_version = tracked_buffer.version.clone();
598 let current_version = buffer.read(cx).version();
599
600 let current_content = buffer.read(cx).text();
601 let tracked_content = tracked_buffer.snapshot.text();
602
603 let is_ai_only_content =
604 initial_version == current_version && current_content == tracked_content;
605
606 if is_ai_only_content {
607 buffer
608 .read(cx)
609 .entry_id(cx)
610 .and_then(|entry_id| {
611 self.project.update(cx, |project, cx| {
612 project.delete_entry(entry_id, false, cx)
613 })
614 })
615 .unwrap_or(Task::ready(Ok(())))
616 } else {
617 // Not sure how to disentangle edits made by the user
618 // from edits made by the AI at this point.
619 // For now, preserve both to avoid data loss.
620 //
621 // TODO: Better solution (disable "Reject" after user makes some
622 // edit or find a way to differentiate between AI and user edits)
623 Task::ready(Ok(()))
624 }
625 };
626
627 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
628 self.tracked_buffers.remove(&buffer);
629 cx.notify();
630 task
631 }
632 TrackedBufferStatus::Deleted => {
633 buffer.update(cx, |buffer, cx| {
634 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
635 });
636 let save = self
637 .project
638 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
639
640 // Clear all tracked edits for this buffer and start over as if we just read it.
641 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
642 self.tracked_buffers.remove(&buffer);
643 self.buffer_read(buffer.clone(), cx);
644 cx.notify();
645 save
646 }
647 TrackedBufferStatus::Modified => {
648 buffer.update(cx, |buffer, cx| {
649 let mut buffer_row_ranges = buffer_ranges
650 .into_iter()
651 .map(|range| {
652 range.start.to_point(buffer).row..range.end.to_point(buffer).row
653 })
654 .peekable();
655
656 let mut edits_to_revert = Vec::new();
657 for edit in tracked_buffer.unreviewed_edits.edits() {
658 let new_range = tracked_buffer
659 .snapshot
660 .anchor_before(Point::new(edit.new.start, 0))
661 ..tracked_buffer.snapshot.anchor_after(cmp::min(
662 Point::new(edit.new.end, 0),
663 tracked_buffer.snapshot.max_point(),
664 ));
665 let new_row_range = new_range.start.to_point(buffer).row
666 ..new_range.end.to_point(buffer).row;
667
668 let mut revert = false;
669 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
670 if buffer_row_range.end < new_row_range.start {
671 buffer_row_ranges.next();
672 } else if buffer_row_range.start > new_row_range.end {
673 break;
674 } else {
675 revert = true;
676 break;
677 }
678 }
679
680 if revert {
681 metrics.add_edit(edit);
682 let old_range = tracked_buffer
683 .diff_base
684 .point_to_offset(Point::new(edit.old.start, 0))
685 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
686 Point::new(edit.old.end, 0),
687 tracked_buffer.diff_base.max_point(),
688 ));
689 let old_text = tracked_buffer
690 .diff_base
691 .chunks_in_range(old_range)
692 .collect::<String>();
693 edits_to_revert.push((new_range, old_text));
694 }
695 }
696
697 buffer.edit(edits_to_revert, None, cx);
698 });
699 self.project
700 .update(cx, |project, cx| project.save_buffer(buffer, cx))
701 }
702 };
703 if let Some(telemetry) = telemetry {
704 telemetry_report_rejected_edits(&telemetry, metrics);
705 }
706 task
707 }
708
709 pub fn keep_all_edits(
710 &mut self,
711 telemetry: Option<ActionLogTelemetry>,
712 cx: &mut Context<Self>,
713 ) {
714 self.tracked_buffers.retain(|buffer, tracked_buffer| {
715 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
716 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
717 if let Some(telemetry) = telemetry.as_ref() {
718 telemetry_report_accepted_edits(telemetry, metrics);
719 }
720 match tracked_buffer.status {
721 TrackedBufferStatus::Deleted => false,
722 _ => {
723 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
724 tracked_buffer.status = TrackedBufferStatus::Modified;
725 }
726 tracked_buffer.unreviewed_edits.clear();
727 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
728 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
729 true
730 }
731 }
732 });
733
734 cx.notify();
735 }
736
737 pub fn reject_all_edits(
738 &mut self,
739 telemetry: Option<ActionLogTelemetry>,
740 cx: &mut Context<Self>,
741 ) -> Task<()> {
742 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
743 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
744 buffer.read(cx).remote_id(),
745 )];
746 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
747
748 async move {
749 reject.await.log_err();
750 }
751 });
752
753 let task = futures::future::join_all(futures);
754 cx.background_spawn(async move {
755 task.await;
756 })
757 }
758
759 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
760 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
761 self.tracked_buffers
762 .iter()
763 .filter(|(_, tracked)| tracked.has_edits(cx))
764 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
765 .collect()
766 }
767
768 /// Iterate over buffers changed since last read or edited by the model
769 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
770 self.tracked_buffers
771 .iter()
772 .filter(|(buffer, tracked)| {
773 let buffer = buffer.read(cx);
774
775 tracked.version != buffer.version
776 && buffer
777 .file()
778 .is_some_and(|file| !file.disk_state().is_deleted())
779 })
780 .map(|(buffer, _)| buffer)
781 }
782}
783
784#[derive(Clone)]
785pub struct ActionLogTelemetry {
786 pub agent_telemetry_id: SharedString,
787 pub session_id: Arc<str>,
788}
789
790struct ActionLogMetrics {
791 lines_removed: u32,
792 lines_added: u32,
793 language: Option<SharedString>,
794}
795
796impl ActionLogMetrics {
797 fn for_buffer(buffer: &Buffer) -> Self {
798 Self {
799 language: buffer.language().map(|l| l.name().0),
800 lines_removed: 0,
801 lines_added: 0,
802 }
803 }
804
805 fn add_edits(&mut self, edits: &[Edit<u32>]) {
806 for edit in edits {
807 self.add_edit(edit);
808 }
809 }
810
811 fn add_edit(&mut self, edit: &Edit<u32>) {
812 self.lines_added += edit.new_len();
813 self.lines_removed += edit.old_len();
814 }
815}
816
817fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
818 telemetry::event!(
819 "Agent Edits Accepted",
820 agent = telemetry.agent_telemetry_id,
821 session = telemetry.session_id,
822 language = metrics.language,
823 lines_added = metrics.lines_added,
824 lines_removed = metrics.lines_removed
825 );
826}
827
828fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
829 telemetry::event!(
830 "Agent Edits Rejected",
831 agent = telemetry.agent_telemetry_id,
832 session = telemetry.session_id,
833 language = metrics.language,
834 lines_added = metrics.lines_added,
835 lines_removed = metrics.lines_removed
836 );
837}
838
839fn apply_non_conflicting_edits(
840 patch: &Patch<u32>,
841 edits: Vec<Edit<u32>>,
842 old_text: &mut Rope,
843 new_text: &Rope,
844) -> bool {
845 let mut old_edits = patch.edits().iter().cloned().peekable();
846 let mut new_edits = edits.into_iter().peekable();
847 let mut applied_delta = 0i32;
848 let mut rebased_delta = 0i32;
849 let mut has_made_changes = false;
850
851 while let Some(mut new_edit) = new_edits.next() {
852 let mut conflict = false;
853
854 // Push all the old edits that are before this new edit or that intersect with it.
855 while let Some(old_edit) = old_edits.peek() {
856 if new_edit.old.end < old_edit.new.start
857 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
858 {
859 break;
860 } else if new_edit.old.start > old_edit.new.end
861 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
862 {
863 let old_edit = old_edits.next().unwrap();
864 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
865 } else {
866 conflict = true;
867 if new_edits
868 .peek()
869 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
870 {
871 new_edit = new_edits.next().unwrap();
872 } else {
873 let old_edit = old_edits.next().unwrap();
874 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
875 }
876 }
877 }
878
879 if !conflict {
880 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
881 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
882 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
883 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
884 ..old_text.point_to_offset(cmp::min(
885 Point::new(new_edit.old.end, 0),
886 old_text.max_point(),
887 ));
888 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
889 ..new_text.point_to_offset(cmp::min(
890 Point::new(new_edit.new.end, 0),
891 new_text.max_point(),
892 ));
893
894 old_text.replace(
895 old_bytes,
896 &new_text.chunks_in_range(new_bytes).collect::<String>(),
897 );
898 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
899 has_made_changes = true;
900 }
901 }
902 has_made_changes
903}
904
905fn diff_snapshots(
906 old_snapshot: &text::BufferSnapshot,
907 new_snapshot: &text::BufferSnapshot,
908) -> Vec<Edit<u32>> {
909 let mut edits = new_snapshot
910 .edits_since::<Point>(&old_snapshot.version)
911 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
912 .peekable();
913 let mut row_edits = Vec::new();
914 while let Some(mut edit) = edits.next() {
915 while let Some(next_edit) = edits.peek() {
916 if edit.old.end >= next_edit.old.start {
917 edit.old.end = next_edit.old.end;
918 edit.new.end = next_edit.new.end;
919 edits.next();
920 } else {
921 break;
922 }
923 }
924 row_edits.push(edit);
925 }
926 row_edits
927}
928
929fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
930 if edit.old.start.column == old_text.line_len(edit.old.start.row)
931 && new_text
932 .chars_at(new_text.point_to_offset(edit.new.start))
933 .next()
934 == Some('\n')
935 && edit.old.start != old_text.max_point()
936 {
937 Edit {
938 old: edit.old.start.row + 1..edit.old.end.row + 1,
939 new: edit.new.start.row + 1..edit.new.end.row + 1,
940 }
941 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
942 Edit {
943 old: edit.old.start.row..edit.old.end.row,
944 new: edit.new.start.row..edit.new.end.row,
945 }
946 } else {
947 Edit {
948 old: edit.old.start.row..edit.old.end.row + 1,
949 new: edit.new.start.row..edit.new.end.row + 1,
950 }
951 }
952}
953
954#[derive(Copy, Clone, Debug)]
955enum ChangeAuthor {
956 User,
957 Agent,
958}
959
960enum TrackedBufferStatus {
961 Created { existing_file_content: Option<Rope> },
962 Modified,
963 Deleted,
964}
965
966struct TrackedBuffer {
967 buffer: Entity<Buffer>,
968 diff_base: Rope,
969 unreviewed_edits: Patch<u32>,
970 status: TrackedBufferStatus,
971 version: clock::Global,
972 diff: Entity<BufferDiff>,
973 snapshot: text::BufferSnapshot,
974 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
975 _open_lsp_handle: OpenLspBufferHandle,
976 _maintain_diff: Task<()>,
977 _subscription: Subscription,
978}
979
980impl TrackedBuffer {
981 fn has_edits(&self, cx: &App) -> bool {
982 self.diff
983 .read(cx)
984 .snapshot(cx)
985 .hunks(self.buffer.read(cx))
986 .next()
987 .is_some()
988 }
989
990 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
991 self.diff_update
992 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
993 .ok();
994 }
995}
996
997pub struct ChangedBuffer {
998 pub diff: Entity<BufferDiff>,
999}
1000
1001#[cfg(test)]
1002mod tests {
1003 use super::*;
1004 use buffer_diff::DiffHunkStatusKind;
1005 use gpui::TestAppContext;
1006 use language::Point;
1007 use project::{FakeFs, Fs, Project, RemoveOptions};
1008 use rand::prelude::*;
1009 use serde_json::json;
1010 use settings::SettingsStore;
1011 use std::env;
1012 use util::{RandomCharIter, path};
1013
1014 #[ctor::ctor]
1015 fn init_logger() {
1016 zlog::init_test();
1017 }
1018
1019 fn init_test(cx: &mut TestAppContext) {
1020 cx.update(|cx| {
1021 let settings_store = SettingsStore::test(cx);
1022 cx.set_global(settings_store);
1023 });
1024 }
1025
1026 #[gpui::test(iterations = 10)]
1027 async fn test_keep_edits(cx: &mut TestAppContext) {
1028 init_test(cx);
1029
1030 let fs = FakeFs::new(cx.executor());
1031 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1032 .await;
1033 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1034 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1035 let file_path = project
1036 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1037 .unwrap();
1038 let buffer = project
1039 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1040 .await
1041 .unwrap();
1042
1043 cx.update(|cx| {
1044 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1045 buffer.update(cx, |buffer, cx| {
1046 buffer
1047 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1048 .unwrap()
1049 });
1050 buffer.update(cx, |buffer, cx| {
1051 buffer
1052 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1053 .unwrap()
1054 });
1055 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1056 });
1057 cx.run_until_parked();
1058 assert_eq!(
1059 buffer.read_with(cx, |buffer, _| buffer.text()),
1060 "abc\ndEf\nghi\njkl\nmnO"
1061 );
1062 assert_eq!(
1063 unreviewed_hunks(&action_log, cx),
1064 vec![(
1065 buffer.clone(),
1066 vec![
1067 HunkStatus {
1068 range: Point::new(1, 0)..Point::new(2, 0),
1069 diff_status: DiffHunkStatusKind::Modified,
1070 old_text: "def\n".into(),
1071 },
1072 HunkStatus {
1073 range: Point::new(4, 0)..Point::new(4, 3),
1074 diff_status: DiffHunkStatusKind::Modified,
1075 old_text: "mno".into(),
1076 }
1077 ],
1078 )]
1079 );
1080
1081 action_log.update(cx, |log, cx| {
1082 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1083 });
1084 cx.run_until_parked();
1085 assert_eq!(
1086 unreviewed_hunks(&action_log, cx),
1087 vec![(
1088 buffer.clone(),
1089 vec![HunkStatus {
1090 range: Point::new(1, 0)..Point::new(2, 0),
1091 diff_status: DiffHunkStatusKind::Modified,
1092 old_text: "def\n".into(),
1093 }],
1094 )]
1095 );
1096
1097 action_log.update(cx, |log, cx| {
1098 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1099 });
1100 cx.run_until_parked();
1101 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1102 }
1103
1104 #[gpui::test(iterations = 10)]
1105 async fn test_deletions(cx: &mut TestAppContext) {
1106 init_test(cx);
1107
1108 let fs = FakeFs::new(cx.executor());
1109 fs.insert_tree(
1110 path!("/dir"),
1111 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1112 )
1113 .await;
1114 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1115 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1116 let file_path = project
1117 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1118 .unwrap();
1119 let buffer = project
1120 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1121 .await
1122 .unwrap();
1123
1124 cx.update(|cx| {
1125 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1126 buffer.update(cx, |buffer, cx| {
1127 buffer
1128 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1129 .unwrap();
1130 buffer.finalize_last_transaction();
1131 });
1132 buffer.update(cx, |buffer, cx| {
1133 buffer
1134 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1135 .unwrap();
1136 buffer.finalize_last_transaction();
1137 });
1138 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1139 });
1140 cx.run_until_parked();
1141 assert_eq!(
1142 buffer.read_with(cx, |buffer, _| buffer.text()),
1143 "abc\nghi\njkl\npqr"
1144 );
1145 assert_eq!(
1146 unreviewed_hunks(&action_log, cx),
1147 vec![(
1148 buffer.clone(),
1149 vec![
1150 HunkStatus {
1151 range: Point::new(1, 0)..Point::new(1, 0),
1152 diff_status: DiffHunkStatusKind::Deleted,
1153 old_text: "def\n".into(),
1154 },
1155 HunkStatus {
1156 range: Point::new(3, 0)..Point::new(3, 0),
1157 diff_status: DiffHunkStatusKind::Deleted,
1158 old_text: "mno\n".into(),
1159 }
1160 ],
1161 )]
1162 );
1163
1164 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1165 cx.run_until_parked();
1166 assert_eq!(
1167 buffer.read_with(cx, |buffer, _| buffer.text()),
1168 "abc\nghi\njkl\nmno\npqr"
1169 );
1170 assert_eq!(
1171 unreviewed_hunks(&action_log, cx),
1172 vec![(
1173 buffer.clone(),
1174 vec![HunkStatus {
1175 range: Point::new(1, 0)..Point::new(1, 0),
1176 diff_status: DiffHunkStatusKind::Deleted,
1177 old_text: "def\n".into(),
1178 }],
1179 )]
1180 );
1181
1182 action_log.update(cx, |log, cx| {
1183 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1184 });
1185 cx.run_until_parked();
1186 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1187 }
1188
1189 #[gpui::test(iterations = 10)]
1190 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1191 init_test(cx);
1192
1193 let fs = FakeFs::new(cx.executor());
1194 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1195 .await;
1196 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1197 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1198 let file_path = project
1199 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1200 .unwrap();
1201 let buffer = project
1202 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1203 .await
1204 .unwrap();
1205
1206 cx.update(|cx| {
1207 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1208 buffer.update(cx, |buffer, cx| {
1209 buffer
1210 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1211 .unwrap()
1212 });
1213 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1214 });
1215 cx.run_until_parked();
1216 assert_eq!(
1217 buffer.read_with(cx, |buffer, _| buffer.text()),
1218 "abc\ndeF\nGHI\njkl\nmno"
1219 );
1220 assert_eq!(
1221 unreviewed_hunks(&action_log, cx),
1222 vec![(
1223 buffer.clone(),
1224 vec![HunkStatus {
1225 range: Point::new(1, 0)..Point::new(3, 0),
1226 diff_status: DiffHunkStatusKind::Modified,
1227 old_text: "def\nghi\n".into(),
1228 }],
1229 )]
1230 );
1231
1232 buffer.update(cx, |buffer, cx| {
1233 buffer.edit(
1234 [
1235 (Point::new(0, 2)..Point::new(0, 2), "X"),
1236 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1237 ],
1238 None,
1239 cx,
1240 )
1241 });
1242 cx.run_until_parked();
1243 assert_eq!(
1244 buffer.read_with(cx, |buffer, _| buffer.text()),
1245 "abXc\ndeF\nGHI\nYjkl\nmno"
1246 );
1247 assert_eq!(
1248 unreviewed_hunks(&action_log, cx),
1249 vec![(
1250 buffer.clone(),
1251 vec![HunkStatus {
1252 range: Point::new(1, 0)..Point::new(3, 0),
1253 diff_status: DiffHunkStatusKind::Modified,
1254 old_text: "def\nghi\n".into(),
1255 }],
1256 )]
1257 );
1258
1259 buffer.update(cx, |buffer, cx| {
1260 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1261 });
1262 cx.run_until_parked();
1263 assert_eq!(
1264 buffer.read_with(cx, |buffer, _| buffer.text()),
1265 "abXc\ndZeF\nGHI\nYjkl\nmno"
1266 );
1267 assert_eq!(
1268 unreviewed_hunks(&action_log, cx),
1269 vec![(
1270 buffer.clone(),
1271 vec![HunkStatus {
1272 range: Point::new(1, 0)..Point::new(3, 0),
1273 diff_status: DiffHunkStatusKind::Modified,
1274 old_text: "def\nghi\n".into(),
1275 }],
1276 )]
1277 );
1278
1279 action_log.update(cx, |log, cx| {
1280 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1281 });
1282 cx.run_until_parked();
1283 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1284 }
1285
1286 #[gpui::test(iterations = 10)]
1287 async fn test_creating_files(cx: &mut TestAppContext) {
1288 init_test(cx);
1289
1290 let fs = FakeFs::new(cx.executor());
1291 fs.insert_tree(path!("/dir"), json!({})).await;
1292 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1293 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1294 let file_path = project
1295 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1296 .unwrap();
1297
1298 let buffer = project
1299 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1300 .await
1301 .unwrap();
1302 cx.update(|cx| {
1303 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1304 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1305 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1306 });
1307 project
1308 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1309 .await
1310 .unwrap();
1311 cx.run_until_parked();
1312 assert_eq!(
1313 unreviewed_hunks(&action_log, cx),
1314 vec![(
1315 buffer.clone(),
1316 vec![HunkStatus {
1317 range: Point::new(0, 0)..Point::new(0, 5),
1318 diff_status: DiffHunkStatusKind::Added,
1319 old_text: "".into(),
1320 }],
1321 )]
1322 );
1323
1324 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1325 cx.run_until_parked();
1326 assert_eq!(
1327 unreviewed_hunks(&action_log, cx),
1328 vec![(
1329 buffer.clone(),
1330 vec![HunkStatus {
1331 range: Point::new(0, 0)..Point::new(0, 6),
1332 diff_status: DiffHunkStatusKind::Added,
1333 old_text: "".into(),
1334 }],
1335 )]
1336 );
1337
1338 action_log.update(cx, |log, cx| {
1339 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1340 });
1341 cx.run_until_parked();
1342 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1343 }
1344
1345 #[gpui::test(iterations = 10)]
1346 async fn test_overwriting_files(cx: &mut TestAppContext) {
1347 init_test(cx);
1348
1349 let fs = FakeFs::new(cx.executor());
1350 fs.insert_tree(
1351 path!("/dir"),
1352 json!({
1353 "file1": "Lorem ipsum dolor"
1354 }),
1355 )
1356 .await;
1357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1358 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1359 let file_path = project
1360 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1361 .unwrap();
1362
1363 let buffer = project
1364 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1365 .await
1366 .unwrap();
1367 cx.update(|cx| {
1368 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1369 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1370 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1371 });
1372 project
1373 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1374 .await
1375 .unwrap();
1376 cx.run_until_parked();
1377 assert_eq!(
1378 unreviewed_hunks(&action_log, cx),
1379 vec![(
1380 buffer.clone(),
1381 vec![HunkStatus {
1382 range: Point::new(0, 0)..Point::new(0, 19),
1383 diff_status: DiffHunkStatusKind::Added,
1384 old_text: "".into(),
1385 }],
1386 )]
1387 );
1388
1389 action_log
1390 .update(cx, |log, cx| {
1391 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1392 })
1393 .await
1394 .unwrap();
1395 cx.run_until_parked();
1396 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1397 assert_eq!(
1398 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1399 "Lorem ipsum dolor"
1400 );
1401 }
1402
1403 #[gpui::test(iterations = 10)]
1404 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1405 init_test(cx);
1406
1407 let fs = FakeFs::new(cx.executor());
1408 fs.insert_tree(
1409 path!("/dir"),
1410 json!({
1411 "file1": "Lorem ipsum dolor"
1412 }),
1413 )
1414 .await;
1415 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1416 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1417 let file_path = project
1418 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1419 .unwrap();
1420
1421 let buffer = project
1422 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1423 .await
1424 .unwrap();
1425 cx.update(|cx| {
1426 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1427 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1428 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1429 });
1430 project
1431 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1432 .await
1433 .unwrap();
1434 cx.run_until_parked();
1435 assert_eq!(
1436 unreviewed_hunks(&action_log, cx),
1437 vec![(
1438 buffer.clone(),
1439 vec![HunkStatus {
1440 range: Point::new(0, 0)..Point::new(0, 37),
1441 diff_status: DiffHunkStatusKind::Modified,
1442 old_text: "Lorem ipsum dolor".into(),
1443 }],
1444 )]
1445 );
1446
1447 cx.update(|cx| {
1448 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1449 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1450 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1451 });
1452 project
1453 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1454 .await
1455 .unwrap();
1456 cx.run_until_parked();
1457 assert_eq!(
1458 unreviewed_hunks(&action_log, cx),
1459 vec![(
1460 buffer.clone(),
1461 vec![HunkStatus {
1462 range: Point::new(0, 0)..Point::new(0, 9),
1463 diff_status: DiffHunkStatusKind::Added,
1464 old_text: "".into(),
1465 }],
1466 )]
1467 );
1468
1469 action_log
1470 .update(cx, |log, cx| {
1471 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1472 })
1473 .await
1474 .unwrap();
1475 cx.run_until_parked();
1476 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1477 assert_eq!(
1478 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1479 "Lorem ipsum dolor"
1480 );
1481 }
1482
1483 #[gpui::test(iterations = 10)]
1484 async fn test_deleting_files(cx: &mut TestAppContext) {
1485 init_test(cx);
1486
1487 let fs = FakeFs::new(cx.executor());
1488 fs.insert_tree(
1489 path!("/dir"),
1490 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1491 )
1492 .await;
1493
1494 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1495 let file1_path = project
1496 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1497 .unwrap();
1498 let file2_path = project
1499 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1500 .unwrap();
1501
1502 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1503 let buffer1 = project
1504 .update(cx, |project, cx| {
1505 project.open_buffer(file1_path.clone(), cx)
1506 })
1507 .await
1508 .unwrap();
1509 let buffer2 = project
1510 .update(cx, |project, cx| {
1511 project.open_buffer(file2_path.clone(), cx)
1512 })
1513 .await
1514 .unwrap();
1515
1516 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1517 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1518 project
1519 .update(cx, |project, cx| {
1520 project.delete_file(file1_path.clone(), false, cx)
1521 })
1522 .unwrap()
1523 .await
1524 .unwrap();
1525 project
1526 .update(cx, |project, cx| {
1527 project.delete_file(file2_path.clone(), false, cx)
1528 })
1529 .unwrap()
1530 .await
1531 .unwrap();
1532 cx.run_until_parked();
1533 assert_eq!(
1534 unreviewed_hunks(&action_log, cx),
1535 vec![
1536 (
1537 buffer1.clone(),
1538 vec![HunkStatus {
1539 range: Point::new(0, 0)..Point::new(0, 0),
1540 diff_status: DiffHunkStatusKind::Deleted,
1541 old_text: "lorem\n".into(),
1542 }]
1543 ),
1544 (
1545 buffer2.clone(),
1546 vec![HunkStatus {
1547 range: Point::new(0, 0)..Point::new(0, 0),
1548 diff_status: DiffHunkStatusKind::Deleted,
1549 old_text: "ipsum\n".into(),
1550 }],
1551 )
1552 ]
1553 );
1554
1555 // Simulate file1 being recreated externally.
1556 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1557 .await;
1558
1559 // Simulate file2 being recreated by a tool.
1560 let buffer2 = project
1561 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1562 .await
1563 .unwrap();
1564 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1565 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1566 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1567 project
1568 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1569 .await
1570 .unwrap();
1571
1572 cx.run_until_parked();
1573 assert_eq!(
1574 unreviewed_hunks(&action_log, cx),
1575 vec![(
1576 buffer2.clone(),
1577 vec![HunkStatus {
1578 range: Point::new(0, 0)..Point::new(0, 5),
1579 diff_status: DiffHunkStatusKind::Added,
1580 old_text: "".into(),
1581 }],
1582 )]
1583 );
1584
1585 // Simulate file2 being deleted externally.
1586 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1587 .await
1588 .unwrap();
1589 cx.run_until_parked();
1590 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1591 }
1592
1593 #[gpui::test(iterations = 10)]
1594 async fn test_reject_edits(cx: &mut TestAppContext) {
1595 init_test(cx);
1596
1597 let fs = FakeFs::new(cx.executor());
1598 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1599 .await;
1600 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1601 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1602 let file_path = project
1603 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1604 .unwrap();
1605 let buffer = project
1606 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1607 .await
1608 .unwrap();
1609
1610 cx.update(|cx| {
1611 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1612 buffer.update(cx, |buffer, cx| {
1613 buffer
1614 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1615 .unwrap()
1616 });
1617 buffer.update(cx, |buffer, cx| {
1618 buffer
1619 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1620 .unwrap()
1621 });
1622 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1623 });
1624 cx.run_until_parked();
1625 assert_eq!(
1626 buffer.read_with(cx, |buffer, _| buffer.text()),
1627 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1628 );
1629 assert_eq!(
1630 unreviewed_hunks(&action_log, cx),
1631 vec![(
1632 buffer.clone(),
1633 vec![
1634 HunkStatus {
1635 range: Point::new(1, 0)..Point::new(3, 0),
1636 diff_status: DiffHunkStatusKind::Modified,
1637 old_text: "def\n".into(),
1638 },
1639 HunkStatus {
1640 range: Point::new(5, 0)..Point::new(5, 3),
1641 diff_status: DiffHunkStatusKind::Modified,
1642 old_text: "mno".into(),
1643 }
1644 ],
1645 )]
1646 );
1647
1648 // If the rejected range doesn't overlap with any hunk, we ignore it.
1649 action_log
1650 .update(cx, |log, cx| {
1651 log.reject_edits_in_ranges(
1652 buffer.clone(),
1653 vec![Point::new(4, 0)..Point::new(4, 0)],
1654 None,
1655 cx,
1656 )
1657 })
1658 .await
1659 .unwrap();
1660 cx.run_until_parked();
1661 assert_eq!(
1662 buffer.read_with(cx, |buffer, _| buffer.text()),
1663 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1664 );
1665 assert_eq!(
1666 unreviewed_hunks(&action_log, cx),
1667 vec![(
1668 buffer.clone(),
1669 vec![
1670 HunkStatus {
1671 range: Point::new(1, 0)..Point::new(3, 0),
1672 diff_status: DiffHunkStatusKind::Modified,
1673 old_text: "def\n".into(),
1674 },
1675 HunkStatus {
1676 range: Point::new(5, 0)..Point::new(5, 3),
1677 diff_status: DiffHunkStatusKind::Modified,
1678 old_text: "mno".into(),
1679 }
1680 ],
1681 )]
1682 );
1683
1684 action_log
1685 .update(cx, |log, cx| {
1686 log.reject_edits_in_ranges(
1687 buffer.clone(),
1688 vec![Point::new(0, 0)..Point::new(1, 0)],
1689 None,
1690 cx,
1691 )
1692 })
1693 .await
1694 .unwrap();
1695 cx.run_until_parked();
1696 assert_eq!(
1697 buffer.read_with(cx, |buffer, _| buffer.text()),
1698 "abc\ndef\nghi\njkl\nmnO"
1699 );
1700 assert_eq!(
1701 unreviewed_hunks(&action_log, cx),
1702 vec![(
1703 buffer.clone(),
1704 vec![HunkStatus {
1705 range: Point::new(4, 0)..Point::new(4, 3),
1706 diff_status: DiffHunkStatusKind::Modified,
1707 old_text: "mno".into(),
1708 }],
1709 )]
1710 );
1711
1712 action_log
1713 .update(cx, |log, cx| {
1714 log.reject_edits_in_ranges(
1715 buffer.clone(),
1716 vec![Point::new(4, 0)..Point::new(4, 0)],
1717 None,
1718 cx,
1719 )
1720 })
1721 .await
1722 .unwrap();
1723 cx.run_until_parked();
1724 assert_eq!(
1725 buffer.read_with(cx, |buffer, _| buffer.text()),
1726 "abc\ndef\nghi\njkl\nmno"
1727 );
1728 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1729 }
1730
1731 #[gpui::test(iterations = 10)]
1732 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1733 init_test(cx);
1734
1735 let fs = FakeFs::new(cx.executor());
1736 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1737 .await;
1738 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1739 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1740 let file_path = project
1741 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1742 .unwrap();
1743 let buffer = project
1744 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1745 .await
1746 .unwrap();
1747
1748 cx.update(|cx| {
1749 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1750 buffer.update(cx, |buffer, cx| {
1751 buffer
1752 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1753 .unwrap()
1754 });
1755 buffer.update(cx, |buffer, cx| {
1756 buffer
1757 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1758 .unwrap()
1759 });
1760 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1761 });
1762 cx.run_until_parked();
1763 assert_eq!(
1764 buffer.read_with(cx, |buffer, _| buffer.text()),
1765 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1766 );
1767 assert_eq!(
1768 unreviewed_hunks(&action_log, cx),
1769 vec![(
1770 buffer.clone(),
1771 vec![
1772 HunkStatus {
1773 range: Point::new(1, 0)..Point::new(3, 0),
1774 diff_status: DiffHunkStatusKind::Modified,
1775 old_text: "def\n".into(),
1776 },
1777 HunkStatus {
1778 range: Point::new(5, 0)..Point::new(5, 3),
1779 diff_status: DiffHunkStatusKind::Modified,
1780 old_text: "mno".into(),
1781 }
1782 ],
1783 )]
1784 );
1785
1786 action_log.update(cx, |log, cx| {
1787 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1788 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1789 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1790 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1791
1792 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1793 .detach();
1794 assert_eq!(
1795 buffer.read_with(cx, |buffer, _| buffer.text()),
1796 "abc\ndef\nghi\njkl\nmno"
1797 );
1798 });
1799 cx.run_until_parked();
1800 assert_eq!(
1801 buffer.read_with(cx, |buffer, _| buffer.text()),
1802 "abc\ndef\nghi\njkl\nmno"
1803 );
1804 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1805 }
1806
1807 #[gpui::test(iterations = 10)]
1808 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1809 init_test(cx);
1810
1811 let fs = FakeFs::new(cx.executor());
1812 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1813 .await;
1814 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1815 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1816 let file_path = project
1817 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1818 .unwrap();
1819 let buffer = project
1820 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1821 .await
1822 .unwrap();
1823
1824 cx.update(|cx| {
1825 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1826 });
1827 project
1828 .update(cx, |project, cx| {
1829 project.delete_file(file_path.clone(), false, cx)
1830 })
1831 .unwrap()
1832 .await
1833 .unwrap();
1834 cx.run_until_parked();
1835 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1836 assert_eq!(
1837 unreviewed_hunks(&action_log, cx),
1838 vec![(
1839 buffer.clone(),
1840 vec![HunkStatus {
1841 range: Point::new(0, 0)..Point::new(0, 0),
1842 diff_status: DiffHunkStatusKind::Deleted,
1843 old_text: "content".into(),
1844 }]
1845 )]
1846 );
1847
1848 action_log
1849 .update(cx, |log, cx| {
1850 log.reject_edits_in_ranges(
1851 buffer.clone(),
1852 vec![Point::new(0, 0)..Point::new(0, 0)],
1853 None,
1854 cx,
1855 )
1856 })
1857 .await
1858 .unwrap();
1859 cx.run_until_parked();
1860 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1861 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1862 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1863 }
1864
1865 #[gpui::test(iterations = 10)]
1866 async fn test_reject_created_file(cx: &mut TestAppContext) {
1867 init_test(cx);
1868
1869 let fs = FakeFs::new(cx.executor());
1870 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1871 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1872 let file_path = project
1873 .read_with(cx, |project, cx| {
1874 project.find_project_path("dir/new_file", cx)
1875 })
1876 .unwrap();
1877 let buffer = project
1878 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1879 .await
1880 .unwrap();
1881 cx.update(|cx| {
1882 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1883 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1884 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1885 });
1886 project
1887 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1888 .await
1889 .unwrap();
1890 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1891 cx.run_until_parked();
1892 assert_eq!(
1893 unreviewed_hunks(&action_log, cx),
1894 vec![(
1895 buffer.clone(),
1896 vec![HunkStatus {
1897 range: Point::new(0, 0)..Point::new(0, 7),
1898 diff_status: DiffHunkStatusKind::Added,
1899 old_text: "".into(),
1900 }],
1901 )]
1902 );
1903
1904 action_log
1905 .update(cx, |log, cx| {
1906 log.reject_edits_in_ranges(
1907 buffer.clone(),
1908 vec![Point::new(0, 0)..Point::new(0, 11)],
1909 None,
1910 cx,
1911 )
1912 })
1913 .await
1914 .unwrap();
1915 cx.run_until_parked();
1916 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1917 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1918 }
1919
1920 #[gpui::test]
1921 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1922 init_test(cx);
1923
1924 let fs = FakeFs::new(cx.executor());
1925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1926 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1927
1928 let file_path = project
1929 .read_with(cx, |project, cx| {
1930 project.find_project_path("dir/new_file", cx)
1931 })
1932 .unwrap();
1933 let buffer = project
1934 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1935 .await
1936 .unwrap();
1937
1938 // AI creates file with initial content
1939 cx.update(|cx| {
1940 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1941 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1942 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1943 });
1944
1945 project
1946 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1947 .await
1948 .unwrap();
1949
1950 cx.run_until_parked();
1951
1952 // User makes additional edits
1953 cx.update(|cx| {
1954 buffer.update(cx, |buffer, cx| {
1955 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1956 });
1957 });
1958
1959 project
1960 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1961 .await
1962 .unwrap();
1963
1964 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1965
1966 // Reject all
1967 action_log
1968 .update(cx, |log, cx| {
1969 log.reject_edits_in_ranges(
1970 buffer.clone(),
1971 vec![Point::new(0, 0)..Point::new(100, 0)],
1972 None,
1973 cx,
1974 )
1975 })
1976 .await
1977 .unwrap();
1978 cx.run_until_parked();
1979
1980 // File should still contain all the content
1981 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1982
1983 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1984 assert_eq!(content, "ai content\nuser added this line");
1985 }
1986
1987 #[gpui::test]
1988 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
1989 init_test(cx);
1990
1991 let fs = FakeFs::new(cx.executor());
1992 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1993 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1994
1995 let file_path = project
1996 .read_with(cx, |project, cx| {
1997 project.find_project_path("dir/new_file", cx)
1998 })
1999 .unwrap();
2000 let buffer = project
2001 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2002 .await
2003 .unwrap();
2004
2005 // AI creates file with initial content
2006 cx.update(|cx| {
2007 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2008 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2009 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2010 });
2011 project
2012 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2013 .await
2014 .unwrap();
2015 cx.run_until_parked();
2016 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2017
2018 // User accepts the single hunk
2019 action_log.update(cx, |log, cx| {
2020 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2021 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2022 });
2023 cx.run_until_parked();
2024 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2025 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2026
2027 // AI modifies the file
2028 cx.update(|cx| {
2029 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2030 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2031 });
2032 project
2033 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2034 .await
2035 .unwrap();
2036 cx.run_until_parked();
2037 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2038
2039 // User rejects the hunk
2040 action_log
2041 .update(cx, |log, cx| {
2042 log.reject_edits_in_ranges(
2043 buffer.clone(),
2044 vec![Anchor::min_max_range_for_buffer(
2045 buffer.read(cx).remote_id(),
2046 )],
2047 None,
2048 cx,
2049 )
2050 })
2051 .await
2052 .unwrap();
2053 cx.run_until_parked();
2054 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2055 assert_eq!(
2056 buffer.read_with(cx, |buffer, _| buffer.text()),
2057 "ai content v1"
2058 );
2059 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2060 }
2061
2062 #[gpui::test]
2063 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2064 init_test(cx);
2065
2066 let fs = FakeFs::new(cx.executor());
2067 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2068 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2069
2070 let file_path = project
2071 .read_with(cx, |project, cx| {
2072 project.find_project_path("dir/new_file", cx)
2073 })
2074 .unwrap();
2075 let buffer = project
2076 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2077 .await
2078 .unwrap();
2079
2080 // AI creates file with initial content
2081 cx.update(|cx| {
2082 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2083 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2084 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2085 });
2086 project
2087 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2088 .await
2089 .unwrap();
2090 cx.run_until_parked();
2091
2092 // User clicks "Accept All"
2093 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2094 cx.run_until_parked();
2095 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2096 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2097
2098 // AI modifies file again
2099 cx.update(|cx| {
2100 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2101 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2102 });
2103 project
2104 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2105 .await
2106 .unwrap();
2107 cx.run_until_parked();
2108 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2109
2110 // User clicks "Reject All"
2111 action_log
2112 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2113 .await;
2114 cx.run_until_parked();
2115 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2116 assert_eq!(
2117 buffer.read_with(cx, |buffer, _| buffer.text()),
2118 "ai content v1"
2119 );
2120 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2121 }
2122
2123 #[gpui::test(iterations = 100)]
2124 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2125 init_test(cx);
2126
2127 let operations = env::var("OPERATIONS")
2128 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2129 .unwrap_or(20);
2130
2131 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2132 let fs = FakeFs::new(cx.executor());
2133 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2134 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2135 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2136 let file_path = project
2137 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2138 .unwrap();
2139 let buffer = project
2140 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2141 .await
2142 .unwrap();
2143
2144 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2145
2146 for _ in 0..operations {
2147 match rng.random_range(0..100) {
2148 0..25 => {
2149 action_log.update(cx, |log, cx| {
2150 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2151 log::info!("keeping edits in range {:?}", range);
2152 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2153 });
2154 }
2155 25..50 => {
2156 action_log
2157 .update(cx, |log, cx| {
2158 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2159 log::info!("rejecting edits in range {:?}", range);
2160 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2161 })
2162 .await
2163 .unwrap();
2164 }
2165 _ => {
2166 let is_agent_edit = rng.random_bool(0.5);
2167 if is_agent_edit {
2168 log::info!("agent edit");
2169 } else {
2170 log::info!("user edit");
2171 }
2172 cx.update(|cx| {
2173 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2174 if is_agent_edit {
2175 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2176 }
2177 });
2178 }
2179 }
2180
2181 if rng.random_bool(0.2) {
2182 quiesce(&action_log, &buffer, cx);
2183 }
2184 }
2185
2186 quiesce(&action_log, &buffer, cx);
2187
2188 fn quiesce(
2189 action_log: &Entity<ActionLog>,
2190 buffer: &Entity<Buffer>,
2191 cx: &mut TestAppContext,
2192 ) {
2193 log::info!("quiescing...");
2194 cx.run_until_parked();
2195 action_log.update(cx, |log, cx| {
2196 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2197 let mut old_text = tracked_buffer.diff_base.clone();
2198 let new_text = buffer.read(cx).as_rope();
2199 for edit in tracked_buffer.unreviewed_edits.edits() {
2200 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2201 let old_end = old_text.point_to_offset(cmp::min(
2202 Point::new(edit.new.start + edit.old_len(), 0),
2203 old_text.max_point(),
2204 ));
2205 old_text.replace(
2206 old_start..old_end,
2207 &new_text.slice_rows(edit.new.clone()).to_string(),
2208 );
2209 }
2210 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2211 })
2212 }
2213 }
2214
2215 #[gpui::test]
2216 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2217 init_test(cx);
2218
2219 let fs = FakeFs::new(cx.background_executor.clone());
2220 fs.insert_tree(
2221 path!("/project"),
2222 json!({
2223 ".git": {},
2224 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2225 }),
2226 )
2227 .await;
2228 fs.set_head_for_repo(
2229 path!("/project/.git").as_ref(),
2230 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2231 "0000000",
2232 );
2233 cx.run_until_parked();
2234
2235 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2236 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2237
2238 let file_path = project
2239 .read_with(cx, |project, cx| {
2240 project.find_project_path(path!("/project/file.txt"), cx)
2241 })
2242 .unwrap();
2243 let buffer = project
2244 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2245 .await
2246 .unwrap();
2247
2248 cx.update(|cx| {
2249 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2250 buffer.update(cx, |buffer, cx| {
2251 buffer.edit(
2252 [
2253 // Edit at the very start: a -> A
2254 (Point::new(0, 0)..Point::new(0, 1), "A"),
2255 // Deletion in the middle: remove lines d and e
2256 (Point::new(3, 0)..Point::new(5, 0), ""),
2257 // Modification: g -> GGG
2258 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2259 // Addition: insert new line after h
2260 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2261 // Edit the very last character: j -> J
2262 (Point::new(9, 0)..Point::new(9, 1), "J"),
2263 ],
2264 None,
2265 cx,
2266 );
2267 });
2268 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2269 });
2270 cx.run_until_parked();
2271 assert_eq!(
2272 unreviewed_hunks(&action_log, cx),
2273 vec![(
2274 buffer.clone(),
2275 vec![
2276 HunkStatus {
2277 range: Point::new(0, 0)..Point::new(1, 0),
2278 diff_status: DiffHunkStatusKind::Modified,
2279 old_text: "a\n".into()
2280 },
2281 HunkStatus {
2282 range: Point::new(3, 0)..Point::new(3, 0),
2283 diff_status: DiffHunkStatusKind::Deleted,
2284 old_text: "d\ne\n".into()
2285 },
2286 HunkStatus {
2287 range: Point::new(4, 0)..Point::new(5, 0),
2288 diff_status: DiffHunkStatusKind::Modified,
2289 old_text: "g\n".into()
2290 },
2291 HunkStatus {
2292 range: Point::new(6, 0)..Point::new(7, 0),
2293 diff_status: DiffHunkStatusKind::Added,
2294 old_text: "".into()
2295 },
2296 HunkStatus {
2297 range: Point::new(8, 0)..Point::new(8, 1),
2298 diff_status: DiffHunkStatusKind::Modified,
2299 old_text: "j".into()
2300 }
2301 ]
2302 )]
2303 );
2304
2305 // Simulate a git commit that matches some edits but not others:
2306 // - Accepts the first edit (a -> A)
2307 // - Accepts the deletion (remove d and e)
2308 // - Makes a different change to g (g -> G instead of GGG)
2309 // - Ignores the NEW line addition
2310 // - Ignores the last line edit (j stays as j)
2311 fs.set_head_for_repo(
2312 path!("/project/.git").as_ref(),
2313 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2314 "0000001",
2315 );
2316 cx.run_until_parked();
2317 assert_eq!(
2318 unreviewed_hunks(&action_log, cx),
2319 vec![(
2320 buffer.clone(),
2321 vec![
2322 HunkStatus {
2323 range: Point::new(4, 0)..Point::new(5, 0),
2324 diff_status: DiffHunkStatusKind::Modified,
2325 old_text: "g\n".into()
2326 },
2327 HunkStatus {
2328 range: Point::new(6, 0)..Point::new(7, 0),
2329 diff_status: DiffHunkStatusKind::Added,
2330 old_text: "".into()
2331 },
2332 HunkStatus {
2333 range: Point::new(8, 0)..Point::new(8, 1),
2334 diff_status: DiffHunkStatusKind::Modified,
2335 old_text: "j".into()
2336 }
2337 ]
2338 )]
2339 );
2340
2341 // Make another commit that accepts the NEW line but with different content
2342 fs.set_head_for_repo(
2343 path!("/project/.git").as_ref(),
2344 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2345 "0000002",
2346 );
2347 cx.run_until_parked();
2348 assert_eq!(
2349 unreviewed_hunks(&action_log, cx),
2350 vec![(
2351 buffer,
2352 vec![
2353 HunkStatus {
2354 range: Point::new(6, 0)..Point::new(7, 0),
2355 diff_status: DiffHunkStatusKind::Added,
2356 old_text: "".into()
2357 },
2358 HunkStatus {
2359 range: Point::new(8, 0)..Point::new(8, 1),
2360 diff_status: DiffHunkStatusKind::Modified,
2361 old_text: "j".into()
2362 }
2363 ]
2364 )]
2365 );
2366
2367 // Final commit that accepts all remaining edits
2368 fs.set_head_for_repo(
2369 path!("/project/.git").as_ref(),
2370 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2371 "0000003",
2372 );
2373 cx.run_until_parked();
2374 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2375 }
2376
2377 #[derive(Debug, Clone, PartialEq, Eq)]
2378 struct HunkStatus {
2379 range: Range<Point>,
2380 diff_status: DiffHunkStatusKind,
2381 old_text: String,
2382 }
2383
2384 fn unreviewed_hunks(
2385 action_log: &Entity<ActionLog>,
2386 cx: &TestAppContext,
2387 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2388 cx.read(|cx| {
2389 action_log
2390 .read(cx)
2391 .changed_buffers(cx)
2392 .into_iter()
2393 .map(|(buffer, diff)| {
2394 let snapshot = buffer.read(cx).snapshot();
2395 (
2396 buffer,
2397 diff.read(cx)
2398 .snapshot(cx)
2399 .hunks(&snapshot)
2400 .map(|hunk| HunkStatus {
2401 diff_status: hunk.status().kind,
2402 range: hunk.range,
2403 old_text: diff
2404 .read(cx)
2405 .base_text(cx)
2406 .text_for_range(hunk.diff_base_byte_range)
2407 .collect(),
2408 })
2409 .collect(),
2410 )
2411 })
2412 .collect()
2413 })
2414 }
2415}