1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
83 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
84 let diff_base;
85 let unreviewed_edits;
86 if is_created {
87 diff_base = Rope::default();
88 unreviewed_edits = Patch::new(vec![Edit {
89 old: 0..1,
90 new: 0..text_snapshot.max_point().row + 1,
91 }])
92 } else {
93 diff_base = buffer.read(cx).as_rope().clone();
94 unreviewed_edits = Patch::default();
95 }
96 TrackedBuffer {
97 buffer: buffer.clone(),
98 diff_base,
99 unreviewed_edits,
100 snapshot: text_snapshot,
101 status,
102 version: buffer.read(cx).version(),
103 diff,
104 diff_update: diff_update_tx,
105 _open_lsp_handle: open_lsp_handle,
106 _maintain_diff: cx.spawn({
107 let buffer = buffer.clone();
108 async move |this, cx| {
109 Self::maintain_diff(this, buffer, diff_update_rx, cx)
110 .await
111 .ok();
112 }
113 }),
114 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
115 }
116 });
117 tracked_buffer.version = buffer.read(cx).version();
118 tracked_buffer
119 }
120
121 fn handle_buffer_event(
122 &mut self,
123 buffer: Entity<Buffer>,
124 event: &BufferEvent,
125 cx: &mut Context<Self>,
126 ) {
127 match event {
128 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
129 BufferEvent::FileHandleChanged => {
130 self.handle_buffer_file_changed(buffer, cx);
131 }
132 _ => {}
133 };
134 }
135
136 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
137 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
138 return;
139 };
140 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
141 }
142
143 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
144 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
145 return;
146 };
147
148 match tracked_buffer.status {
149 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
150 if buffer
151 .read(cx)
152 .file()
153 .is_some_and(|file| file.disk_state() == DiskState::Deleted)
154 {
155 // If the buffer had been edited by a tool, but it got
156 // deleted externally, we want to stop tracking it.
157 self.tracked_buffers.remove(&buffer);
158 }
159 cx.notify();
160 }
161 TrackedBufferStatus::Deleted => {
162 if buffer
163 .read(cx)
164 .file()
165 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
166 {
167 // If the buffer had been deleted by a tool, but it got
168 // resurrected externally, we want to clear the edits we
169 // were tracking and reset the buffer's state.
170 self.tracked_buffers.remove(&buffer);
171 self.track_buffer_internal(buffer, false, cx);
172 }
173 cx.notify();
174 }
175 }
176 }
177
178 async fn maintain_diff(
179 this: WeakEntity<Self>,
180 buffer: Entity<Buffer>,
181 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
182 cx: &mut AsyncApp,
183 ) -> Result<()> {
184 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
185 let git_diff = this
186 .update(cx, |this, cx| {
187 this.project.update(cx, |project, cx| {
188 project.open_uncommitted_diff(buffer.clone(), cx)
189 })
190 })?
191 .await
192 .ok();
193 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
194 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
195 })?;
196
197 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
198 let _repo_subscription =
199 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
200 cx.update(|cx| {
201 let mut old_head = buffer_repo.read(cx).head_commit.clone();
202 Some(cx.subscribe(git_diff, move |_, event, cx| {
203 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
204 let new_head = buffer_repo.read(cx).head_commit.clone();
205 if new_head != old_head {
206 old_head = new_head;
207 git_diff_updates_tx.send(()).ok();
208 }
209 }
210 }))
211 })?
212 } else {
213 None
214 };
215
216 loop {
217 futures::select_biased! {
218 buffer_update = buffer_updates.next() => {
219 if let Some((author, buffer_snapshot)) = buffer_update {
220 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
221 } else {
222 break;
223 }
224 }
225 _ = git_diff_updates_rx.changed().fuse() => {
226 if let Some(git_diff) = git_diff.as_ref() {
227 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
228 }
229 }
230 }
231 }
232
233 Ok(())
234 }
235
236 async fn track_edits(
237 this: &WeakEntity<ActionLog>,
238 buffer: &Entity<Buffer>,
239 author: ChangeAuthor,
240 buffer_snapshot: text::BufferSnapshot,
241 cx: &mut AsyncApp,
242 ) -> Result<()> {
243 let rebase = this.update(cx, |this, cx| {
244 let tracked_buffer = this
245 .tracked_buffers
246 .get_mut(buffer)
247 .context("buffer not tracked")?;
248
249 let rebase = cx.background_spawn({
250 let mut base_text = tracked_buffer.diff_base.clone();
251 let old_snapshot = tracked_buffer.snapshot.clone();
252 let new_snapshot = buffer_snapshot.clone();
253 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
254 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
255 async move {
256 if let ChangeAuthor::User = author {
257 apply_non_conflicting_edits(
258 &unreviewed_edits,
259 edits,
260 &mut base_text,
261 new_snapshot.as_rope(),
262 );
263 }
264
265 (Arc::new(base_text.to_string()), base_text)
266 }
267 });
268
269 anyhow::Ok(rebase)
270 })??;
271 let (new_base_text, new_diff_base) = rebase.await;
272
273 Self::update_diff(
274 this,
275 buffer,
276 buffer_snapshot,
277 new_base_text,
278 new_diff_base,
279 cx,
280 )
281 .await
282 }
283
284 async fn keep_committed_edits(
285 this: &WeakEntity<ActionLog>,
286 buffer: &Entity<Buffer>,
287 git_diff: &Entity<BufferDiff>,
288 cx: &mut AsyncApp,
289 ) -> Result<()> {
290 let buffer_snapshot = this.read_with(cx, |this, _cx| {
291 let tracked_buffer = this
292 .tracked_buffers
293 .get(buffer)
294 .context("buffer not tracked")?;
295 anyhow::Ok(tracked_buffer.snapshot.clone())
296 })??;
297 let (new_base_text, new_diff_base) = this
298 .read_with(cx, |this, cx| {
299 let tracked_buffer = this
300 .tracked_buffers
301 .get(buffer)
302 .context("buffer not tracked")?;
303 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
304 let agent_diff_base = tracked_buffer.diff_base.clone();
305 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
306 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
307 anyhow::Ok(cx.background_spawn(async move {
308 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
309 let committed_edits = language::line_diff(
310 &agent_diff_base.to_string(),
311 &git_diff_base.to_string(),
312 )
313 .into_iter()
314 .map(|(old, new)| Edit { old, new });
315
316 let mut new_agent_diff_base = agent_diff_base.clone();
317 let mut row_delta = 0i32;
318 for committed in committed_edits {
319 while let Some(unreviewed) = old_unreviewed_edits.peek() {
320 // If the committed edit matches the unreviewed
321 // edit, assume the user wants to keep it.
322 if committed.old == unreviewed.old {
323 let unreviewed_new =
324 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
325 let committed_new =
326 git_diff_base.slice_rows(committed.new.clone()).to_string();
327 if unreviewed_new == committed_new {
328 let old_byte_start =
329 new_agent_diff_base.point_to_offset(Point::new(
330 (unreviewed.old.start as i32 + row_delta) as u32,
331 0,
332 ));
333 let old_byte_end =
334 new_agent_diff_base.point_to_offset(cmp::min(
335 Point::new(
336 (unreviewed.old.end as i32 + row_delta) as u32,
337 0,
338 ),
339 new_agent_diff_base.max_point(),
340 ));
341 new_agent_diff_base
342 .replace(old_byte_start..old_byte_end, &unreviewed_new);
343 row_delta +=
344 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
345 }
346 } else if unreviewed.old.start >= committed.old.end {
347 break;
348 }
349
350 old_unreviewed_edits.next().unwrap();
351 }
352 }
353
354 (
355 Arc::new(new_agent_diff_base.to_string()),
356 new_agent_diff_base,
357 )
358 }))
359 })??
360 .await;
361
362 Self::update_diff(
363 this,
364 buffer,
365 buffer_snapshot,
366 new_base_text,
367 new_diff_base,
368 cx,
369 )
370 .await
371 }
372
373 async fn update_diff(
374 this: &WeakEntity<ActionLog>,
375 buffer: &Entity<Buffer>,
376 buffer_snapshot: text::BufferSnapshot,
377 new_base_text: Arc<String>,
378 new_diff_base: Rope,
379 cx: &mut AsyncApp,
380 ) -> Result<()> {
381 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
382 let tracked_buffer = this
383 .tracked_buffers
384 .get(buffer)
385 .context("buffer not tracked")?;
386 anyhow::Ok((
387 tracked_buffer.diff.clone(),
388 buffer.read(cx).language().cloned(),
389 buffer.read(cx).language_registry(),
390 ))
391 })??;
392 let diff_snapshot = BufferDiff::update_diff(
393 diff.clone(),
394 buffer_snapshot.clone(),
395 Some(new_base_text),
396 true,
397 false,
398 language,
399 language_registry,
400 cx,
401 )
402 .await;
403 let mut unreviewed_edits = Patch::default();
404 if let Ok(diff_snapshot) = diff_snapshot {
405 unreviewed_edits = cx
406 .background_spawn({
407 let diff_snapshot = diff_snapshot.clone();
408 let buffer_snapshot = buffer_snapshot.clone();
409 let new_diff_base = new_diff_base.clone();
410 async move {
411 let mut unreviewed_edits = Patch::default();
412 for hunk in diff_snapshot.hunks_intersecting_range(
413 Anchor::min_for_buffer(buffer_snapshot.remote_id())
414 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
415 &buffer_snapshot,
416 ) {
417 let old_range = new_diff_base
418 .offset_to_point(hunk.diff_base_byte_range.start)
419 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
420 let new_range = hunk.range.start..hunk.range.end;
421 unreviewed_edits.push(point_to_row_edit(
422 Edit {
423 old: old_range,
424 new: new_range,
425 },
426 &new_diff_base,
427 buffer_snapshot.as_rope(),
428 ));
429 }
430 unreviewed_edits
431 }
432 })
433 .await;
434
435 diff.update(cx, |diff, cx| {
436 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
437 })?;
438 }
439 this.update(cx, |this, cx| {
440 let tracked_buffer = this
441 .tracked_buffers
442 .get_mut(buffer)
443 .context("buffer not tracked")?;
444 tracked_buffer.diff_base = new_diff_base;
445 tracked_buffer.snapshot = buffer_snapshot;
446 tracked_buffer.unreviewed_edits = unreviewed_edits;
447 cx.notify();
448 anyhow::Ok(())
449 })?
450 }
451
452 /// Track a buffer as read by agent, so we can notify the model about user edits.
453 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
454 self.track_buffer_internal(buffer, false, cx);
455 }
456
457 /// Mark a buffer as created by agent, so we can refresh it in the context
458 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
459 self.track_buffer_internal(buffer, true, cx);
460 }
461
462 /// Mark a buffer as edited by agent, so we can refresh it in the context
463 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
464 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
465 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
466 tracked_buffer.status = TrackedBufferStatus::Modified;
467 }
468 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
469 }
470
471 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
472 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
473 match tracked_buffer.status {
474 TrackedBufferStatus::Created { .. } => {
475 self.tracked_buffers.remove(&buffer);
476 cx.notify();
477 }
478 TrackedBufferStatus::Modified => {
479 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
480 tracked_buffer.status = TrackedBufferStatus::Deleted;
481 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
482 }
483 TrackedBufferStatus::Deleted => {}
484 }
485 cx.notify();
486 }
487
488 pub fn keep_edits_in_range(
489 &mut self,
490 buffer: Entity<Buffer>,
491 buffer_range: Range<impl language::ToPoint>,
492 telemetry: Option<ActionLogTelemetry>,
493 cx: &mut Context<Self>,
494 ) {
495 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
496 return;
497 };
498
499 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
500 match tracked_buffer.status {
501 TrackedBufferStatus::Deleted => {
502 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
503 self.tracked_buffers.remove(&buffer);
504 cx.notify();
505 }
506 _ => {
507 let buffer = buffer.read(cx);
508 let buffer_range =
509 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
510 let mut delta = 0i32;
511 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
512 edit.old.start = (edit.old.start as i32 + delta) as u32;
513 edit.old.end = (edit.old.end as i32 + delta) as u32;
514
515 if buffer_range.end.row < edit.new.start
516 || buffer_range.start.row > edit.new.end
517 {
518 true
519 } else {
520 let old_range = tracked_buffer
521 .diff_base
522 .point_to_offset(Point::new(edit.old.start, 0))
523 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
524 Point::new(edit.old.end, 0),
525 tracked_buffer.diff_base.max_point(),
526 ));
527 let new_range = tracked_buffer
528 .snapshot
529 .point_to_offset(Point::new(edit.new.start, 0))
530 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
531 Point::new(edit.new.end, 0),
532 tracked_buffer.snapshot.max_point(),
533 ));
534 tracked_buffer.diff_base.replace(
535 old_range,
536 &tracked_buffer
537 .snapshot
538 .text_for_range(new_range)
539 .collect::<String>(),
540 );
541 delta += edit.new_len() as i32 - edit.old_len() as i32;
542 metrics.add_edit(edit);
543 false
544 }
545 });
546 if tracked_buffer.unreviewed_edits.is_empty()
547 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
548 {
549 tracked_buffer.status = TrackedBufferStatus::Modified;
550 }
551 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
552 }
553 }
554 if let Some(telemetry) = telemetry {
555 telemetry_report_accepted_edits(&telemetry, metrics);
556 }
557 }
558
559 pub fn reject_edits_in_ranges(
560 &mut self,
561 buffer: Entity<Buffer>,
562 buffer_ranges: Vec<Range<impl language::ToPoint>>,
563 telemetry: Option<ActionLogTelemetry>,
564 cx: &mut Context<Self>,
565 ) -> Task<Result<()>> {
566 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
567 return Task::ready(Ok(()));
568 };
569
570 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
571 let task = match &tracked_buffer.status {
572 TrackedBufferStatus::Created {
573 existing_file_content,
574 } => {
575 let task = if let Some(existing_file_content) = existing_file_content {
576 buffer.update(cx, |buffer, cx| {
577 buffer.start_transaction();
578 buffer.set_text("", cx);
579 for chunk in existing_file_content.chunks() {
580 buffer.append(chunk, cx);
581 }
582 buffer.end_transaction(cx);
583 });
584 self.project
585 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
586 } else {
587 // For a file created by AI with no pre-existing content,
588 // only delete the file if we're certain it contains only AI content
589 // with no edits from the user.
590
591 let initial_version = tracked_buffer.version.clone();
592 let current_version = buffer.read(cx).version();
593
594 let current_content = buffer.read(cx).text();
595 let tracked_content = tracked_buffer.snapshot.text();
596
597 let is_ai_only_content =
598 initial_version == current_version && current_content == tracked_content;
599
600 if is_ai_only_content {
601 buffer
602 .read(cx)
603 .entry_id(cx)
604 .and_then(|entry_id| {
605 self.project.update(cx, |project, cx| {
606 project.delete_entry(entry_id, false, cx)
607 })
608 })
609 .unwrap_or(Task::ready(Ok(())))
610 } else {
611 // Not sure how to disentangle edits made by the user
612 // from edits made by the AI at this point.
613 // For now, preserve both to avoid data loss.
614 //
615 // TODO: Better solution (disable "Reject" after user makes some
616 // edit or find a way to differentiate between AI and user edits)
617 Task::ready(Ok(()))
618 }
619 };
620
621 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
622 self.tracked_buffers.remove(&buffer);
623 cx.notify();
624 task
625 }
626 TrackedBufferStatus::Deleted => {
627 buffer.update(cx, |buffer, cx| {
628 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
629 });
630 let save = self
631 .project
632 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
633
634 // Clear all tracked edits for this buffer and start over as if we just read it.
635 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
636 self.tracked_buffers.remove(&buffer);
637 self.buffer_read(buffer.clone(), cx);
638 cx.notify();
639 save
640 }
641 TrackedBufferStatus::Modified => {
642 buffer.update(cx, |buffer, cx| {
643 let mut buffer_row_ranges = buffer_ranges
644 .into_iter()
645 .map(|range| {
646 range.start.to_point(buffer).row..range.end.to_point(buffer).row
647 })
648 .peekable();
649
650 let mut edits_to_revert = Vec::new();
651 for edit in tracked_buffer.unreviewed_edits.edits() {
652 let new_range = tracked_buffer
653 .snapshot
654 .anchor_before(Point::new(edit.new.start, 0))
655 ..tracked_buffer.snapshot.anchor_after(cmp::min(
656 Point::new(edit.new.end, 0),
657 tracked_buffer.snapshot.max_point(),
658 ));
659 let new_row_range = new_range.start.to_point(buffer).row
660 ..new_range.end.to_point(buffer).row;
661
662 let mut revert = false;
663 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
664 if buffer_row_range.end < new_row_range.start {
665 buffer_row_ranges.next();
666 } else if buffer_row_range.start > new_row_range.end {
667 break;
668 } else {
669 revert = true;
670 break;
671 }
672 }
673
674 if revert {
675 metrics.add_edit(edit);
676 let old_range = tracked_buffer
677 .diff_base
678 .point_to_offset(Point::new(edit.old.start, 0))
679 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
680 Point::new(edit.old.end, 0),
681 tracked_buffer.diff_base.max_point(),
682 ));
683 let old_text = tracked_buffer
684 .diff_base
685 .chunks_in_range(old_range)
686 .collect::<String>();
687 edits_to_revert.push((new_range, old_text));
688 }
689 }
690
691 buffer.edit(edits_to_revert, None, cx);
692 });
693 self.project
694 .update(cx, |project, cx| project.save_buffer(buffer, cx))
695 }
696 };
697 if let Some(telemetry) = telemetry {
698 telemetry_report_rejected_edits(&telemetry, metrics);
699 }
700 task
701 }
702
703 pub fn keep_all_edits(
704 &mut self,
705 telemetry: Option<ActionLogTelemetry>,
706 cx: &mut Context<Self>,
707 ) {
708 self.tracked_buffers.retain(|buffer, tracked_buffer| {
709 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
710 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
711 if let Some(telemetry) = telemetry.as_ref() {
712 telemetry_report_accepted_edits(telemetry, metrics);
713 }
714 match tracked_buffer.status {
715 TrackedBufferStatus::Deleted => false,
716 _ => {
717 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
718 tracked_buffer.status = TrackedBufferStatus::Modified;
719 }
720 tracked_buffer.unreviewed_edits.clear();
721 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
722 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
723 true
724 }
725 }
726 });
727
728 cx.notify();
729 }
730
731 pub fn reject_all_edits(
732 &mut self,
733 telemetry: Option<ActionLogTelemetry>,
734 cx: &mut Context<Self>,
735 ) -> Task<()> {
736 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
737 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
738 buffer.read(cx).remote_id(),
739 )];
740 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
741
742 async move {
743 reject.await.log_err();
744 }
745 });
746
747 let task = futures::future::join_all(futures);
748 cx.background_spawn(async move {
749 task.await;
750 })
751 }
752
753 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
754 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
755 self.tracked_buffers
756 .iter()
757 .filter(|(_, tracked)| tracked.has_edits(cx))
758 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
759 .collect()
760 }
761
762 /// Iterate over buffers changed since last read or edited by the model
763 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
764 self.tracked_buffers
765 .iter()
766 .filter(|(buffer, tracked)| {
767 let buffer = buffer.read(cx);
768
769 tracked.version != buffer.version
770 && buffer
771 .file()
772 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
773 })
774 .map(|(buffer, _)| buffer)
775 }
776}
777
778#[derive(Clone)]
779pub struct ActionLogTelemetry {
780 pub agent_telemetry_id: &'static str,
781 pub session_id: Arc<str>,
782}
783
784struct ActionLogMetrics {
785 lines_removed: u32,
786 lines_added: u32,
787 language: Option<SharedString>,
788}
789
790impl ActionLogMetrics {
791 fn for_buffer(buffer: &Buffer) -> Self {
792 Self {
793 language: buffer.language().map(|l| l.name().0),
794 lines_removed: 0,
795 lines_added: 0,
796 }
797 }
798
799 fn add_edits(&mut self, edits: &[Edit<u32>]) {
800 for edit in edits {
801 self.add_edit(edit);
802 }
803 }
804
805 fn add_edit(&mut self, edit: &Edit<u32>) {
806 self.lines_added += edit.new_len();
807 self.lines_removed += edit.old_len();
808 }
809}
810
811fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
812 telemetry::event!(
813 "Agent Edits Accepted",
814 agent = telemetry.agent_telemetry_id,
815 session = telemetry.session_id,
816 language = metrics.language,
817 lines_added = metrics.lines_added,
818 lines_removed = metrics.lines_removed
819 );
820}
821
822fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
823 telemetry::event!(
824 "Agent Edits Rejected",
825 agent = telemetry.agent_telemetry_id,
826 session = telemetry.session_id,
827 language = metrics.language,
828 lines_added = metrics.lines_added,
829 lines_removed = metrics.lines_removed
830 );
831}
832
833fn apply_non_conflicting_edits(
834 patch: &Patch<u32>,
835 edits: Vec<Edit<u32>>,
836 old_text: &mut Rope,
837 new_text: &Rope,
838) -> bool {
839 let mut old_edits = patch.edits().iter().cloned().peekable();
840 let mut new_edits = edits.into_iter().peekable();
841 let mut applied_delta = 0i32;
842 let mut rebased_delta = 0i32;
843 let mut has_made_changes = false;
844
845 while let Some(mut new_edit) = new_edits.next() {
846 let mut conflict = false;
847
848 // Push all the old edits that are before this new edit or that intersect with it.
849 while let Some(old_edit) = old_edits.peek() {
850 if new_edit.old.end < old_edit.new.start
851 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
852 {
853 break;
854 } else if new_edit.old.start > old_edit.new.end
855 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
856 {
857 let old_edit = old_edits.next().unwrap();
858 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
859 } else {
860 conflict = true;
861 if new_edits
862 .peek()
863 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
864 {
865 new_edit = new_edits.next().unwrap();
866 } else {
867 let old_edit = old_edits.next().unwrap();
868 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
869 }
870 }
871 }
872
873 if !conflict {
874 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
875 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
876 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
877 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
878 ..old_text.point_to_offset(cmp::min(
879 Point::new(new_edit.old.end, 0),
880 old_text.max_point(),
881 ));
882 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
883 ..new_text.point_to_offset(cmp::min(
884 Point::new(new_edit.new.end, 0),
885 new_text.max_point(),
886 ));
887
888 old_text.replace(
889 old_bytes,
890 &new_text.chunks_in_range(new_bytes).collect::<String>(),
891 );
892 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
893 has_made_changes = true;
894 }
895 }
896 has_made_changes
897}
898
899fn diff_snapshots(
900 old_snapshot: &text::BufferSnapshot,
901 new_snapshot: &text::BufferSnapshot,
902) -> Vec<Edit<u32>> {
903 let mut edits = new_snapshot
904 .edits_since::<Point>(&old_snapshot.version)
905 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
906 .peekable();
907 let mut row_edits = Vec::new();
908 while let Some(mut edit) = edits.next() {
909 while let Some(next_edit) = edits.peek() {
910 if edit.old.end >= next_edit.old.start {
911 edit.old.end = next_edit.old.end;
912 edit.new.end = next_edit.new.end;
913 edits.next();
914 } else {
915 break;
916 }
917 }
918 row_edits.push(edit);
919 }
920 row_edits
921}
922
923fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
924 if edit.old.start.column == old_text.line_len(edit.old.start.row)
925 && new_text
926 .chars_at(new_text.point_to_offset(edit.new.start))
927 .next()
928 == Some('\n')
929 && edit.old.start != old_text.max_point()
930 {
931 Edit {
932 old: edit.old.start.row + 1..edit.old.end.row + 1,
933 new: edit.new.start.row + 1..edit.new.end.row + 1,
934 }
935 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
936 Edit {
937 old: edit.old.start.row..edit.old.end.row,
938 new: edit.new.start.row..edit.new.end.row,
939 }
940 } else {
941 Edit {
942 old: edit.old.start.row..edit.old.end.row + 1,
943 new: edit.new.start.row..edit.new.end.row + 1,
944 }
945 }
946}
947
948#[derive(Copy, Clone, Debug)]
949enum ChangeAuthor {
950 User,
951 Agent,
952}
953
954enum TrackedBufferStatus {
955 Created { existing_file_content: Option<Rope> },
956 Modified,
957 Deleted,
958}
959
960struct TrackedBuffer {
961 buffer: Entity<Buffer>,
962 diff_base: Rope,
963 unreviewed_edits: Patch<u32>,
964 status: TrackedBufferStatus,
965 version: clock::Global,
966 diff: Entity<BufferDiff>,
967 snapshot: text::BufferSnapshot,
968 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
969 _open_lsp_handle: OpenLspBufferHandle,
970 _maintain_diff: Task<()>,
971 _subscription: Subscription,
972}
973
974impl TrackedBuffer {
975 fn has_edits(&self, cx: &App) -> bool {
976 self.diff
977 .read(cx)
978 .hunks(self.buffer.read(cx), cx)
979 .next()
980 .is_some()
981 }
982
983 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
984 self.diff_update
985 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
986 .ok();
987 }
988}
989
990pub struct ChangedBuffer {
991 pub diff: Entity<BufferDiff>,
992}
993
994#[cfg(test)]
995mod tests {
996 use super::*;
997 use buffer_diff::DiffHunkStatusKind;
998 use gpui::TestAppContext;
999 use language::Point;
1000 use project::{FakeFs, Fs, Project, RemoveOptions};
1001 use rand::prelude::*;
1002 use serde_json::json;
1003 use settings::SettingsStore;
1004 use std::env;
1005 use util::{RandomCharIter, path};
1006
1007 #[ctor::ctor]
1008 fn init_logger() {
1009 zlog::init_test();
1010 }
1011
1012 fn init_test(cx: &mut TestAppContext) {
1013 cx.update(|cx| {
1014 let settings_store = SettingsStore::test(cx);
1015 cx.set_global(settings_store);
1016 });
1017 }
1018
1019 #[gpui::test(iterations = 10)]
1020 async fn test_keep_edits(cx: &mut TestAppContext) {
1021 init_test(cx);
1022
1023 let fs = FakeFs::new(cx.executor());
1024 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1025 .await;
1026 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1027 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1028 let file_path = project
1029 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1030 .unwrap();
1031 let buffer = project
1032 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1033 .await
1034 .unwrap();
1035
1036 cx.update(|cx| {
1037 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1038 buffer.update(cx, |buffer, cx| {
1039 buffer
1040 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1041 .unwrap()
1042 });
1043 buffer.update(cx, |buffer, cx| {
1044 buffer
1045 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1046 .unwrap()
1047 });
1048 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1049 });
1050 cx.run_until_parked();
1051 assert_eq!(
1052 buffer.read_with(cx, |buffer, _| buffer.text()),
1053 "abc\ndEf\nghi\njkl\nmnO"
1054 );
1055 assert_eq!(
1056 unreviewed_hunks(&action_log, cx),
1057 vec![(
1058 buffer.clone(),
1059 vec![
1060 HunkStatus {
1061 range: Point::new(1, 0)..Point::new(2, 0),
1062 diff_status: DiffHunkStatusKind::Modified,
1063 old_text: "def\n".into(),
1064 },
1065 HunkStatus {
1066 range: Point::new(4, 0)..Point::new(4, 3),
1067 diff_status: DiffHunkStatusKind::Modified,
1068 old_text: "mno".into(),
1069 }
1070 ],
1071 )]
1072 );
1073
1074 action_log.update(cx, |log, cx| {
1075 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1076 });
1077 cx.run_until_parked();
1078 assert_eq!(
1079 unreviewed_hunks(&action_log, cx),
1080 vec![(
1081 buffer.clone(),
1082 vec![HunkStatus {
1083 range: Point::new(1, 0)..Point::new(2, 0),
1084 diff_status: DiffHunkStatusKind::Modified,
1085 old_text: "def\n".into(),
1086 }],
1087 )]
1088 );
1089
1090 action_log.update(cx, |log, cx| {
1091 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1092 });
1093 cx.run_until_parked();
1094 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1095 }
1096
1097 #[gpui::test(iterations = 10)]
1098 async fn test_deletions(cx: &mut TestAppContext) {
1099 init_test(cx);
1100
1101 let fs = FakeFs::new(cx.executor());
1102 fs.insert_tree(
1103 path!("/dir"),
1104 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1105 )
1106 .await;
1107 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1108 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1109 let file_path = project
1110 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1111 .unwrap();
1112 let buffer = project
1113 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1114 .await
1115 .unwrap();
1116
1117 cx.update(|cx| {
1118 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1119 buffer.update(cx, |buffer, cx| {
1120 buffer
1121 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1122 .unwrap();
1123 buffer.finalize_last_transaction();
1124 });
1125 buffer.update(cx, |buffer, cx| {
1126 buffer
1127 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1128 .unwrap();
1129 buffer.finalize_last_transaction();
1130 });
1131 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1132 });
1133 cx.run_until_parked();
1134 assert_eq!(
1135 buffer.read_with(cx, |buffer, _| buffer.text()),
1136 "abc\nghi\njkl\npqr"
1137 );
1138 assert_eq!(
1139 unreviewed_hunks(&action_log, cx),
1140 vec![(
1141 buffer.clone(),
1142 vec![
1143 HunkStatus {
1144 range: Point::new(1, 0)..Point::new(1, 0),
1145 diff_status: DiffHunkStatusKind::Deleted,
1146 old_text: "def\n".into(),
1147 },
1148 HunkStatus {
1149 range: Point::new(3, 0)..Point::new(3, 0),
1150 diff_status: DiffHunkStatusKind::Deleted,
1151 old_text: "mno\n".into(),
1152 }
1153 ],
1154 )]
1155 );
1156
1157 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1158 cx.run_until_parked();
1159 assert_eq!(
1160 buffer.read_with(cx, |buffer, _| buffer.text()),
1161 "abc\nghi\njkl\nmno\npqr"
1162 );
1163 assert_eq!(
1164 unreviewed_hunks(&action_log, cx),
1165 vec![(
1166 buffer.clone(),
1167 vec![HunkStatus {
1168 range: Point::new(1, 0)..Point::new(1, 0),
1169 diff_status: DiffHunkStatusKind::Deleted,
1170 old_text: "def\n".into(),
1171 }],
1172 )]
1173 );
1174
1175 action_log.update(cx, |log, cx| {
1176 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1177 });
1178 cx.run_until_parked();
1179 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1180 }
1181
1182 #[gpui::test(iterations = 10)]
1183 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1184 init_test(cx);
1185
1186 let fs = FakeFs::new(cx.executor());
1187 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1188 .await;
1189 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1190 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1191 let file_path = project
1192 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1193 .unwrap();
1194 let buffer = project
1195 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1196 .await
1197 .unwrap();
1198
1199 cx.update(|cx| {
1200 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1201 buffer.update(cx, |buffer, cx| {
1202 buffer
1203 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1204 .unwrap()
1205 });
1206 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1207 });
1208 cx.run_until_parked();
1209 assert_eq!(
1210 buffer.read_with(cx, |buffer, _| buffer.text()),
1211 "abc\ndeF\nGHI\njkl\nmno"
1212 );
1213 assert_eq!(
1214 unreviewed_hunks(&action_log, cx),
1215 vec![(
1216 buffer.clone(),
1217 vec![HunkStatus {
1218 range: Point::new(1, 0)..Point::new(3, 0),
1219 diff_status: DiffHunkStatusKind::Modified,
1220 old_text: "def\nghi\n".into(),
1221 }],
1222 )]
1223 );
1224
1225 buffer.update(cx, |buffer, cx| {
1226 buffer.edit(
1227 [
1228 (Point::new(0, 2)..Point::new(0, 2), "X"),
1229 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1230 ],
1231 None,
1232 cx,
1233 )
1234 });
1235 cx.run_until_parked();
1236 assert_eq!(
1237 buffer.read_with(cx, |buffer, _| buffer.text()),
1238 "abXc\ndeF\nGHI\nYjkl\nmno"
1239 );
1240 assert_eq!(
1241 unreviewed_hunks(&action_log, cx),
1242 vec![(
1243 buffer.clone(),
1244 vec![HunkStatus {
1245 range: Point::new(1, 0)..Point::new(3, 0),
1246 diff_status: DiffHunkStatusKind::Modified,
1247 old_text: "def\nghi\n".into(),
1248 }],
1249 )]
1250 );
1251
1252 buffer.update(cx, |buffer, cx| {
1253 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1254 });
1255 cx.run_until_parked();
1256 assert_eq!(
1257 buffer.read_with(cx, |buffer, _| buffer.text()),
1258 "abXc\ndZeF\nGHI\nYjkl\nmno"
1259 );
1260 assert_eq!(
1261 unreviewed_hunks(&action_log, cx),
1262 vec![(
1263 buffer.clone(),
1264 vec![HunkStatus {
1265 range: Point::new(1, 0)..Point::new(3, 0),
1266 diff_status: DiffHunkStatusKind::Modified,
1267 old_text: "def\nghi\n".into(),
1268 }],
1269 )]
1270 );
1271
1272 action_log.update(cx, |log, cx| {
1273 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1274 });
1275 cx.run_until_parked();
1276 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1277 }
1278
1279 #[gpui::test(iterations = 10)]
1280 async fn test_creating_files(cx: &mut TestAppContext) {
1281 init_test(cx);
1282
1283 let fs = FakeFs::new(cx.executor());
1284 fs.insert_tree(path!("/dir"), json!({})).await;
1285 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1286 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1287 let file_path = project
1288 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1289 .unwrap();
1290
1291 let buffer = project
1292 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1293 .await
1294 .unwrap();
1295 cx.update(|cx| {
1296 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1297 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1298 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1299 });
1300 project
1301 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1302 .await
1303 .unwrap();
1304 cx.run_until_parked();
1305 assert_eq!(
1306 unreviewed_hunks(&action_log, cx),
1307 vec![(
1308 buffer.clone(),
1309 vec![HunkStatus {
1310 range: Point::new(0, 0)..Point::new(0, 5),
1311 diff_status: DiffHunkStatusKind::Added,
1312 old_text: "".into(),
1313 }],
1314 )]
1315 );
1316
1317 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1318 cx.run_until_parked();
1319 assert_eq!(
1320 unreviewed_hunks(&action_log, cx),
1321 vec![(
1322 buffer.clone(),
1323 vec![HunkStatus {
1324 range: Point::new(0, 0)..Point::new(0, 6),
1325 diff_status: DiffHunkStatusKind::Added,
1326 old_text: "".into(),
1327 }],
1328 )]
1329 );
1330
1331 action_log.update(cx, |log, cx| {
1332 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1333 });
1334 cx.run_until_parked();
1335 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1336 }
1337
1338 #[gpui::test(iterations = 10)]
1339 async fn test_overwriting_files(cx: &mut TestAppContext) {
1340 init_test(cx);
1341
1342 let fs = FakeFs::new(cx.executor());
1343 fs.insert_tree(
1344 path!("/dir"),
1345 json!({
1346 "file1": "Lorem ipsum dolor"
1347 }),
1348 )
1349 .await;
1350 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1351 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1352 let file_path = project
1353 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1354 .unwrap();
1355
1356 let buffer = project
1357 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1358 .await
1359 .unwrap();
1360 cx.update(|cx| {
1361 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1362 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1363 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1364 });
1365 project
1366 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1367 .await
1368 .unwrap();
1369 cx.run_until_parked();
1370 assert_eq!(
1371 unreviewed_hunks(&action_log, cx),
1372 vec![(
1373 buffer.clone(),
1374 vec![HunkStatus {
1375 range: Point::new(0, 0)..Point::new(0, 19),
1376 diff_status: DiffHunkStatusKind::Added,
1377 old_text: "".into(),
1378 }],
1379 )]
1380 );
1381
1382 action_log
1383 .update(cx, |log, cx| {
1384 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1385 })
1386 .await
1387 .unwrap();
1388 cx.run_until_parked();
1389 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1390 assert_eq!(
1391 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1392 "Lorem ipsum dolor"
1393 );
1394 }
1395
1396 #[gpui::test(iterations = 10)]
1397 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1398 init_test(cx);
1399
1400 let fs = FakeFs::new(cx.executor());
1401 fs.insert_tree(
1402 path!("/dir"),
1403 json!({
1404 "file1": "Lorem ipsum dolor"
1405 }),
1406 )
1407 .await;
1408 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1409 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1410 let file_path = project
1411 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1412 .unwrap();
1413
1414 let buffer = project
1415 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1416 .await
1417 .unwrap();
1418 cx.update(|cx| {
1419 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1420 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1421 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1422 });
1423 project
1424 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1425 .await
1426 .unwrap();
1427 cx.run_until_parked();
1428 assert_eq!(
1429 unreviewed_hunks(&action_log, cx),
1430 vec![(
1431 buffer.clone(),
1432 vec![HunkStatus {
1433 range: Point::new(0, 0)..Point::new(0, 37),
1434 diff_status: DiffHunkStatusKind::Modified,
1435 old_text: "Lorem ipsum dolor".into(),
1436 }],
1437 )]
1438 );
1439
1440 cx.update(|cx| {
1441 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1442 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1443 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1444 });
1445 project
1446 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1447 .await
1448 .unwrap();
1449 cx.run_until_parked();
1450 assert_eq!(
1451 unreviewed_hunks(&action_log, cx),
1452 vec![(
1453 buffer.clone(),
1454 vec![HunkStatus {
1455 range: Point::new(0, 0)..Point::new(0, 9),
1456 diff_status: DiffHunkStatusKind::Added,
1457 old_text: "".into(),
1458 }],
1459 )]
1460 );
1461
1462 action_log
1463 .update(cx, |log, cx| {
1464 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1465 })
1466 .await
1467 .unwrap();
1468 cx.run_until_parked();
1469 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1470 assert_eq!(
1471 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1472 "Lorem ipsum dolor"
1473 );
1474 }
1475
1476 #[gpui::test(iterations = 10)]
1477 async fn test_deleting_files(cx: &mut TestAppContext) {
1478 init_test(cx);
1479
1480 let fs = FakeFs::new(cx.executor());
1481 fs.insert_tree(
1482 path!("/dir"),
1483 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1484 )
1485 .await;
1486
1487 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1488 let file1_path = project
1489 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1490 .unwrap();
1491 let file2_path = project
1492 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1493 .unwrap();
1494
1495 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1496 let buffer1 = project
1497 .update(cx, |project, cx| {
1498 project.open_buffer(file1_path.clone(), cx)
1499 })
1500 .await
1501 .unwrap();
1502 let buffer2 = project
1503 .update(cx, |project, cx| {
1504 project.open_buffer(file2_path.clone(), cx)
1505 })
1506 .await
1507 .unwrap();
1508
1509 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1510 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1511 project
1512 .update(cx, |project, cx| {
1513 project.delete_file(file1_path.clone(), false, cx)
1514 })
1515 .unwrap()
1516 .await
1517 .unwrap();
1518 project
1519 .update(cx, |project, cx| {
1520 project.delete_file(file2_path.clone(), false, cx)
1521 })
1522 .unwrap()
1523 .await
1524 .unwrap();
1525 cx.run_until_parked();
1526 assert_eq!(
1527 unreviewed_hunks(&action_log, cx),
1528 vec![
1529 (
1530 buffer1.clone(),
1531 vec![HunkStatus {
1532 range: Point::new(0, 0)..Point::new(0, 0),
1533 diff_status: DiffHunkStatusKind::Deleted,
1534 old_text: "lorem\n".into(),
1535 }]
1536 ),
1537 (
1538 buffer2.clone(),
1539 vec![HunkStatus {
1540 range: Point::new(0, 0)..Point::new(0, 0),
1541 diff_status: DiffHunkStatusKind::Deleted,
1542 old_text: "ipsum\n".into(),
1543 }],
1544 )
1545 ]
1546 );
1547
1548 // Simulate file1 being recreated externally.
1549 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1550 .await;
1551
1552 // Simulate file2 being recreated by a tool.
1553 let buffer2 = project
1554 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1555 .await
1556 .unwrap();
1557 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1558 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1559 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1560 project
1561 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1562 .await
1563 .unwrap();
1564
1565 cx.run_until_parked();
1566 assert_eq!(
1567 unreviewed_hunks(&action_log, cx),
1568 vec![(
1569 buffer2.clone(),
1570 vec![HunkStatus {
1571 range: Point::new(0, 0)..Point::new(0, 5),
1572 diff_status: DiffHunkStatusKind::Added,
1573 old_text: "".into(),
1574 }],
1575 )]
1576 );
1577
1578 // Simulate file2 being deleted externally.
1579 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1580 .await
1581 .unwrap();
1582 cx.run_until_parked();
1583 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1584 }
1585
1586 #[gpui::test(iterations = 10)]
1587 async fn test_reject_edits(cx: &mut TestAppContext) {
1588 init_test(cx);
1589
1590 let fs = FakeFs::new(cx.executor());
1591 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1592 .await;
1593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1594 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1595 let file_path = project
1596 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1597 .unwrap();
1598 let buffer = project
1599 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1600 .await
1601 .unwrap();
1602
1603 cx.update(|cx| {
1604 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1605 buffer.update(cx, |buffer, cx| {
1606 buffer
1607 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1608 .unwrap()
1609 });
1610 buffer.update(cx, |buffer, cx| {
1611 buffer
1612 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1613 .unwrap()
1614 });
1615 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1616 });
1617 cx.run_until_parked();
1618 assert_eq!(
1619 buffer.read_with(cx, |buffer, _| buffer.text()),
1620 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1621 );
1622 assert_eq!(
1623 unreviewed_hunks(&action_log, cx),
1624 vec![(
1625 buffer.clone(),
1626 vec![
1627 HunkStatus {
1628 range: Point::new(1, 0)..Point::new(3, 0),
1629 diff_status: DiffHunkStatusKind::Modified,
1630 old_text: "def\n".into(),
1631 },
1632 HunkStatus {
1633 range: Point::new(5, 0)..Point::new(5, 3),
1634 diff_status: DiffHunkStatusKind::Modified,
1635 old_text: "mno".into(),
1636 }
1637 ],
1638 )]
1639 );
1640
1641 // If the rejected range doesn't overlap with any hunk, we ignore it.
1642 action_log
1643 .update(cx, |log, cx| {
1644 log.reject_edits_in_ranges(
1645 buffer.clone(),
1646 vec![Point::new(4, 0)..Point::new(4, 0)],
1647 None,
1648 cx,
1649 )
1650 })
1651 .await
1652 .unwrap();
1653 cx.run_until_parked();
1654 assert_eq!(
1655 buffer.read_with(cx, |buffer, _| buffer.text()),
1656 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1657 );
1658 assert_eq!(
1659 unreviewed_hunks(&action_log, cx),
1660 vec![(
1661 buffer.clone(),
1662 vec![
1663 HunkStatus {
1664 range: Point::new(1, 0)..Point::new(3, 0),
1665 diff_status: DiffHunkStatusKind::Modified,
1666 old_text: "def\n".into(),
1667 },
1668 HunkStatus {
1669 range: Point::new(5, 0)..Point::new(5, 3),
1670 diff_status: DiffHunkStatusKind::Modified,
1671 old_text: "mno".into(),
1672 }
1673 ],
1674 )]
1675 );
1676
1677 action_log
1678 .update(cx, |log, cx| {
1679 log.reject_edits_in_ranges(
1680 buffer.clone(),
1681 vec![Point::new(0, 0)..Point::new(1, 0)],
1682 None,
1683 cx,
1684 )
1685 })
1686 .await
1687 .unwrap();
1688 cx.run_until_parked();
1689 assert_eq!(
1690 buffer.read_with(cx, |buffer, _| buffer.text()),
1691 "abc\ndef\nghi\njkl\nmnO"
1692 );
1693 assert_eq!(
1694 unreviewed_hunks(&action_log, cx),
1695 vec![(
1696 buffer.clone(),
1697 vec![HunkStatus {
1698 range: Point::new(4, 0)..Point::new(4, 3),
1699 diff_status: DiffHunkStatusKind::Modified,
1700 old_text: "mno".into(),
1701 }],
1702 )]
1703 );
1704
1705 action_log
1706 .update(cx, |log, cx| {
1707 log.reject_edits_in_ranges(
1708 buffer.clone(),
1709 vec![Point::new(4, 0)..Point::new(4, 0)],
1710 None,
1711 cx,
1712 )
1713 })
1714 .await
1715 .unwrap();
1716 cx.run_until_parked();
1717 assert_eq!(
1718 buffer.read_with(cx, |buffer, _| buffer.text()),
1719 "abc\ndef\nghi\njkl\nmno"
1720 );
1721 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1722 }
1723
1724 #[gpui::test(iterations = 10)]
1725 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1726 init_test(cx);
1727
1728 let fs = FakeFs::new(cx.executor());
1729 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1730 .await;
1731 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1732 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1733 let file_path = project
1734 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1735 .unwrap();
1736 let buffer = project
1737 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1738 .await
1739 .unwrap();
1740
1741 cx.update(|cx| {
1742 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1743 buffer.update(cx, |buffer, cx| {
1744 buffer
1745 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1746 .unwrap()
1747 });
1748 buffer.update(cx, |buffer, cx| {
1749 buffer
1750 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1751 .unwrap()
1752 });
1753 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1754 });
1755 cx.run_until_parked();
1756 assert_eq!(
1757 buffer.read_with(cx, |buffer, _| buffer.text()),
1758 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1759 );
1760 assert_eq!(
1761 unreviewed_hunks(&action_log, cx),
1762 vec![(
1763 buffer.clone(),
1764 vec![
1765 HunkStatus {
1766 range: Point::new(1, 0)..Point::new(3, 0),
1767 diff_status: DiffHunkStatusKind::Modified,
1768 old_text: "def\n".into(),
1769 },
1770 HunkStatus {
1771 range: Point::new(5, 0)..Point::new(5, 3),
1772 diff_status: DiffHunkStatusKind::Modified,
1773 old_text: "mno".into(),
1774 }
1775 ],
1776 )]
1777 );
1778
1779 action_log.update(cx, |log, cx| {
1780 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1781 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1782 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1783 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1784
1785 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1786 .detach();
1787 assert_eq!(
1788 buffer.read_with(cx, |buffer, _| buffer.text()),
1789 "abc\ndef\nghi\njkl\nmno"
1790 );
1791 });
1792 cx.run_until_parked();
1793 assert_eq!(
1794 buffer.read_with(cx, |buffer, _| buffer.text()),
1795 "abc\ndef\nghi\njkl\nmno"
1796 );
1797 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1798 }
1799
1800 #[gpui::test(iterations = 10)]
1801 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1802 init_test(cx);
1803
1804 let fs = FakeFs::new(cx.executor());
1805 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1806 .await;
1807 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1808 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1809 let file_path = project
1810 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1811 .unwrap();
1812 let buffer = project
1813 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1814 .await
1815 .unwrap();
1816
1817 cx.update(|cx| {
1818 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1819 });
1820 project
1821 .update(cx, |project, cx| {
1822 project.delete_file(file_path.clone(), false, cx)
1823 })
1824 .unwrap()
1825 .await
1826 .unwrap();
1827 cx.run_until_parked();
1828 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1829 assert_eq!(
1830 unreviewed_hunks(&action_log, cx),
1831 vec![(
1832 buffer.clone(),
1833 vec![HunkStatus {
1834 range: Point::new(0, 0)..Point::new(0, 0),
1835 diff_status: DiffHunkStatusKind::Deleted,
1836 old_text: "content".into(),
1837 }]
1838 )]
1839 );
1840
1841 action_log
1842 .update(cx, |log, cx| {
1843 log.reject_edits_in_ranges(
1844 buffer.clone(),
1845 vec![Point::new(0, 0)..Point::new(0, 0)],
1846 None,
1847 cx,
1848 )
1849 })
1850 .await
1851 .unwrap();
1852 cx.run_until_parked();
1853 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1854 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1855 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1856 }
1857
1858 #[gpui::test(iterations = 10)]
1859 async fn test_reject_created_file(cx: &mut TestAppContext) {
1860 init_test(cx);
1861
1862 let fs = FakeFs::new(cx.executor());
1863 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1864 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1865 let file_path = project
1866 .read_with(cx, |project, cx| {
1867 project.find_project_path("dir/new_file", cx)
1868 })
1869 .unwrap();
1870 let buffer = project
1871 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1872 .await
1873 .unwrap();
1874 cx.update(|cx| {
1875 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1876 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1877 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1878 });
1879 project
1880 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1881 .await
1882 .unwrap();
1883 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1884 cx.run_until_parked();
1885 assert_eq!(
1886 unreviewed_hunks(&action_log, cx),
1887 vec![(
1888 buffer.clone(),
1889 vec![HunkStatus {
1890 range: Point::new(0, 0)..Point::new(0, 7),
1891 diff_status: DiffHunkStatusKind::Added,
1892 old_text: "".into(),
1893 }],
1894 )]
1895 );
1896
1897 action_log
1898 .update(cx, |log, cx| {
1899 log.reject_edits_in_ranges(
1900 buffer.clone(),
1901 vec![Point::new(0, 0)..Point::new(0, 11)],
1902 None,
1903 cx,
1904 )
1905 })
1906 .await
1907 .unwrap();
1908 cx.run_until_parked();
1909 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1910 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1911 }
1912
1913 #[gpui::test]
1914 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1915 init_test(cx);
1916
1917 let fs = FakeFs::new(cx.executor());
1918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1919 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1920
1921 let file_path = project
1922 .read_with(cx, |project, cx| {
1923 project.find_project_path("dir/new_file", cx)
1924 })
1925 .unwrap();
1926 let buffer = project
1927 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1928 .await
1929 .unwrap();
1930
1931 // AI creates file with initial content
1932 cx.update(|cx| {
1933 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1934 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1935 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1936 });
1937
1938 project
1939 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1940 .await
1941 .unwrap();
1942
1943 cx.run_until_parked();
1944
1945 // User makes additional edits
1946 cx.update(|cx| {
1947 buffer.update(cx, |buffer, cx| {
1948 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1949 });
1950 });
1951
1952 project
1953 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1954 .await
1955 .unwrap();
1956
1957 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1958
1959 // Reject all
1960 action_log
1961 .update(cx, |log, cx| {
1962 log.reject_edits_in_ranges(
1963 buffer.clone(),
1964 vec![Point::new(0, 0)..Point::new(100, 0)],
1965 None,
1966 cx,
1967 )
1968 })
1969 .await
1970 .unwrap();
1971 cx.run_until_parked();
1972
1973 // File should still contain all the content
1974 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1975
1976 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1977 assert_eq!(content, "ai content\nuser added this line");
1978 }
1979
1980 #[gpui::test]
1981 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
1982 init_test(cx);
1983
1984 let fs = FakeFs::new(cx.executor());
1985 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1986 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1987
1988 let file_path = project
1989 .read_with(cx, |project, cx| {
1990 project.find_project_path("dir/new_file", cx)
1991 })
1992 .unwrap();
1993 let buffer = project
1994 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1995 .await
1996 .unwrap();
1997
1998 // AI creates file with initial content
1999 cx.update(|cx| {
2000 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2001 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2002 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2003 });
2004 project
2005 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2006 .await
2007 .unwrap();
2008 cx.run_until_parked();
2009 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2010
2011 // User accepts the single hunk
2012 action_log.update(cx, |log, cx| {
2013 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2014 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2015 });
2016 cx.run_until_parked();
2017 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2018 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2019
2020 // AI modifies the file
2021 cx.update(|cx| {
2022 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2023 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2024 });
2025 project
2026 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2027 .await
2028 .unwrap();
2029 cx.run_until_parked();
2030 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2031
2032 // User rejects the hunk
2033 action_log
2034 .update(cx, |log, cx| {
2035 log.reject_edits_in_ranges(
2036 buffer.clone(),
2037 vec![Anchor::min_max_range_for_buffer(
2038 buffer.read(cx).remote_id(),
2039 )],
2040 None,
2041 cx,
2042 )
2043 })
2044 .await
2045 .unwrap();
2046 cx.run_until_parked();
2047 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2048 assert_eq!(
2049 buffer.read_with(cx, |buffer, _| buffer.text()),
2050 "ai content v1"
2051 );
2052 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2053 }
2054
2055 #[gpui::test]
2056 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2057 init_test(cx);
2058
2059 let fs = FakeFs::new(cx.executor());
2060 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2061 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2062
2063 let file_path = project
2064 .read_with(cx, |project, cx| {
2065 project.find_project_path("dir/new_file", cx)
2066 })
2067 .unwrap();
2068 let buffer = project
2069 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2070 .await
2071 .unwrap();
2072
2073 // AI creates file with initial content
2074 cx.update(|cx| {
2075 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2076 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2077 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2078 });
2079 project
2080 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2081 .await
2082 .unwrap();
2083 cx.run_until_parked();
2084
2085 // User clicks "Accept All"
2086 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2087 cx.run_until_parked();
2088 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2089 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2090
2091 // AI modifies file again
2092 cx.update(|cx| {
2093 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2094 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2095 });
2096 project
2097 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2098 .await
2099 .unwrap();
2100 cx.run_until_parked();
2101 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2102
2103 // User clicks "Reject All"
2104 action_log
2105 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2106 .await;
2107 cx.run_until_parked();
2108 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2109 assert_eq!(
2110 buffer.read_with(cx, |buffer, _| buffer.text()),
2111 "ai content v1"
2112 );
2113 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2114 }
2115
2116 #[gpui::test(iterations = 100)]
2117 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2118 init_test(cx);
2119
2120 let operations = env::var("OPERATIONS")
2121 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2122 .unwrap_or(20);
2123
2124 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2125 let fs = FakeFs::new(cx.executor());
2126 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2127 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2128 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2129 let file_path = project
2130 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2131 .unwrap();
2132 let buffer = project
2133 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2134 .await
2135 .unwrap();
2136
2137 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2138
2139 for _ in 0..operations {
2140 match rng.random_range(0..100) {
2141 0..25 => {
2142 action_log.update(cx, |log, cx| {
2143 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2144 log::info!("keeping edits in range {:?}", range);
2145 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2146 });
2147 }
2148 25..50 => {
2149 action_log
2150 .update(cx, |log, cx| {
2151 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2152 log::info!("rejecting edits in range {:?}", range);
2153 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2154 })
2155 .await
2156 .unwrap();
2157 }
2158 _ => {
2159 let is_agent_edit = rng.random_bool(0.5);
2160 if is_agent_edit {
2161 log::info!("agent edit");
2162 } else {
2163 log::info!("user edit");
2164 }
2165 cx.update(|cx| {
2166 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2167 if is_agent_edit {
2168 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2169 }
2170 });
2171 }
2172 }
2173
2174 if rng.random_bool(0.2) {
2175 quiesce(&action_log, &buffer, cx);
2176 }
2177 }
2178
2179 quiesce(&action_log, &buffer, cx);
2180
2181 fn quiesce(
2182 action_log: &Entity<ActionLog>,
2183 buffer: &Entity<Buffer>,
2184 cx: &mut TestAppContext,
2185 ) {
2186 log::info!("quiescing...");
2187 cx.run_until_parked();
2188 action_log.update(cx, |log, cx| {
2189 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2190 let mut old_text = tracked_buffer.diff_base.clone();
2191 let new_text = buffer.read(cx).as_rope();
2192 for edit in tracked_buffer.unreviewed_edits.edits() {
2193 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2194 let old_end = old_text.point_to_offset(cmp::min(
2195 Point::new(edit.new.start + edit.old_len(), 0),
2196 old_text.max_point(),
2197 ));
2198 old_text.replace(
2199 old_start..old_end,
2200 &new_text.slice_rows(edit.new.clone()).to_string(),
2201 );
2202 }
2203 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2204 })
2205 }
2206 }
2207
2208 #[gpui::test]
2209 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2210 init_test(cx);
2211
2212 let fs = FakeFs::new(cx.background_executor.clone());
2213 fs.insert_tree(
2214 path!("/project"),
2215 json!({
2216 ".git": {},
2217 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2218 }),
2219 )
2220 .await;
2221 fs.set_head_for_repo(
2222 path!("/project/.git").as_ref(),
2223 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2224 "0000000",
2225 );
2226 cx.run_until_parked();
2227
2228 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2229 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2230
2231 let file_path = project
2232 .read_with(cx, |project, cx| {
2233 project.find_project_path(path!("/project/file.txt"), cx)
2234 })
2235 .unwrap();
2236 let buffer = project
2237 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2238 .await
2239 .unwrap();
2240
2241 cx.update(|cx| {
2242 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2243 buffer.update(cx, |buffer, cx| {
2244 buffer.edit(
2245 [
2246 // Edit at the very start: a -> A
2247 (Point::new(0, 0)..Point::new(0, 1), "A"),
2248 // Deletion in the middle: remove lines d and e
2249 (Point::new(3, 0)..Point::new(5, 0), ""),
2250 // Modification: g -> GGG
2251 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2252 // Addition: insert new line after h
2253 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2254 // Edit the very last character: j -> J
2255 (Point::new(9, 0)..Point::new(9, 1), "J"),
2256 ],
2257 None,
2258 cx,
2259 );
2260 });
2261 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2262 });
2263 cx.run_until_parked();
2264 assert_eq!(
2265 unreviewed_hunks(&action_log, cx),
2266 vec![(
2267 buffer.clone(),
2268 vec![
2269 HunkStatus {
2270 range: Point::new(0, 0)..Point::new(1, 0),
2271 diff_status: DiffHunkStatusKind::Modified,
2272 old_text: "a\n".into()
2273 },
2274 HunkStatus {
2275 range: Point::new(3, 0)..Point::new(3, 0),
2276 diff_status: DiffHunkStatusKind::Deleted,
2277 old_text: "d\ne\n".into()
2278 },
2279 HunkStatus {
2280 range: Point::new(4, 0)..Point::new(5, 0),
2281 diff_status: DiffHunkStatusKind::Modified,
2282 old_text: "g\n".into()
2283 },
2284 HunkStatus {
2285 range: Point::new(6, 0)..Point::new(7, 0),
2286 diff_status: DiffHunkStatusKind::Added,
2287 old_text: "".into()
2288 },
2289 HunkStatus {
2290 range: Point::new(8, 0)..Point::new(8, 1),
2291 diff_status: DiffHunkStatusKind::Modified,
2292 old_text: "j".into()
2293 }
2294 ]
2295 )]
2296 );
2297
2298 // Simulate a git commit that matches some edits but not others:
2299 // - Accepts the first edit (a -> A)
2300 // - Accepts the deletion (remove d and e)
2301 // - Makes a different change to g (g -> G instead of GGG)
2302 // - Ignores the NEW line addition
2303 // - Ignores the last line edit (j stays as j)
2304 fs.set_head_for_repo(
2305 path!("/project/.git").as_ref(),
2306 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2307 "0000001",
2308 );
2309 cx.run_until_parked();
2310 assert_eq!(
2311 unreviewed_hunks(&action_log, cx),
2312 vec![(
2313 buffer.clone(),
2314 vec![
2315 HunkStatus {
2316 range: Point::new(4, 0)..Point::new(5, 0),
2317 diff_status: DiffHunkStatusKind::Modified,
2318 old_text: "g\n".into()
2319 },
2320 HunkStatus {
2321 range: Point::new(6, 0)..Point::new(7, 0),
2322 diff_status: DiffHunkStatusKind::Added,
2323 old_text: "".into()
2324 },
2325 HunkStatus {
2326 range: Point::new(8, 0)..Point::new(8, 1),
2327 diff_status: DiffHunkStatusKind::Modified,
2328 old_text: "j".into()
2329 }
2330 ]
2331 )]
2332 );
2333
2334 // Make another commit that accepts the NEW line but with different content
2335 fs.set_head_for_repo(
2336 path!("/project/.git").as_ref(),
2337 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2338 "0000002",
2339 );
2340 cx.run_until_parked();
2341 assert_eq!(
2342 unreviewed_hunks(&action_log, cx),
2343 vec![(
2344 buffer,
2345 vec![
2346 HunkStatus {
2347 range: Point::new(6, 0)..Point::new(7, 0),
2348 diff_status: DiffHunkStatusKind::Added,
2349 old_text: "".into()
2350 },
2351 HunkStatus {
2352 range: Point::new(8, 0)..Point::new(8, 1),
2353 diff_status: DiffHunkStatusKind::Modified,
2354 old_text: "j".into()
2355 }
2356 ]
2357 )]
2358 );
2359
2360 // Final commit that accepts all remaining edits
2361 fs.set_head_for_repo(
2362 path!("/project/.git").as_ref(),
2363 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2364 "0000003",
2365 );
2366 cx.run_until_parked();
2367 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2368 }
2369
2370 #[derive(Debug, Clone, PartialEq, Eq)]
2371 struct HunkStatus {
2372 range: Range<Point>,
2373 diff_status: DiffHunkStatusKind,
2374 old_text: String,
2375 }
2376
2377 fn unreviewed_hunks(
2378 action_log: &Entity<ActionLog>,
2379 cx: &TestAppContext,
2380 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2381 cx.read(|cx| {
2382 action_log
2383 .read(cx)
2384 .changed_buffers(cx)
2385 .into_iter()
2386 .map(|(buffer, diff)| {
2387 let snapshot = buffer.read(cx).snapshot();
2388 (
2389 buffer,
2390 diff.read(cx)
2391 .hunks(&snapshot, cx)
2392 .map(|hunk| HunkStatus {
2393 diff_status: hunk.status().kind,
2394 range: hunk.range,
2395 old_text: diff
2396 .read(cx)
2397 .base_text()
2398 .text_for_range(hunk.diff_base_byte_range)
2399 .collect(),
2400 })
2401 .collect(),
2402 )
2403 })
2404 .collect()
2405 })
2406 }
2407}