1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let language = buffer.read(cx).language().cloned();
83 let language_registry = buffer.read(cx).language_registry();
84 let diff = cx.new(|cx| {
85 let mut diff = BufferDiff::new(&text_snapshot, cx);
86 diff.language_changed(language, language_registry, cx);
87 diff
88 });
89 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
90 let diff_base;
91 let unreviewed_edits;
92 if is_created {
93 diff_base = Rope::default();
94 unreviewed_edits = Patch::new(vec![Edit {
95 old: 0..1,
96 new: 0..text_snapshot.max_point().row + 1,
97 }])
98 } else {
99 diff_base = buffer.read(cx).as_rope().clone();
100 unreviewed_edits = Patch::default();
101 }
102 TrackedBuffer {
103 buffer: buffer.clone(),
104 diff_base,
105 unreviewed_edits,
106 snapshot: text_snapshot,
107 status,
108 version: buffer.read(cx).version(),
109 diff,
110 diff_update: diff_update_tx,
111 _open_lsp_handle: open_lsp_handle,
112 _maintain_diff: cx.spawn({
113 let buffer = buffer.clone();
114 async move |this, cx| {
115 Self::maintain_diff(this, buffer, diff_update_rx, cx)
116 .await
117 .ok();
118 }
119 }),
120 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
121 }
122 });
123 tracked_buffer.version = buffer.read(cx).version();
124 tracked_buffer
125 }
126
127 fn handle_buffer_event(
128 &mut self,
129 buffer: Entity<Buffer>,
130 event: &BufferEvent,
131 cx: &mut Context<Self>,
132 ) {
133 match event {
134 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
135 BufferEvent::FileHandleChanged => {
136 self.handle_buffer_file_changed(buffer, cx);
137 }
138 _ => {}
139 };
140 }
141
142 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
143 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
144 return;
145 };
146 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
147 }
148
149 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
150 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
151 return;
152 };
153
154 match tracked_buffer.status {
155 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
156 if buffer
157 .read(cx)
158 .file()
159 .is_some_and(|file| file.disk_state().is_deleted())
160 {
161 // If the buffer had been edited by a tool, but it got
162 // deleted externally, we want to stop tracking it.
163 self.tracked_buffers.remove(&buffer);
164 }
165 cx.notify();
166 }
167 TrackedBufferStatus::Deleted => {
168 if buffer
169 .read(cx)
170 .file()
171 .is_some_and(|file| !file.disk_state().is_deleted())
172 {
173 // If the buffer had been deleted by a tool, but it got
174 // resurrected externally, we want to clear the edits we
175 // were tracking and reset the buffer's state.
176 self.tracked_buffers.remove(&buffer);
177 self.track_buffer_internal(buffer, false, cx);
178 }
179 cx.notify();
180 }
181 }
182 }
183
184 async fn maintain_diff(
185 this: WeakEntity<Self>,
186 buffer: Entity<Buffer>,
187 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
188 cx: &mut AsyncApp,
189 ) -> Result<()> {
190 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
191 let git_diff = this
192 .update(cx, |this, cx| {
193 this.project.update(cx, |project, cx| {
194 project.open_uncommitted_diff(buffer.clone(), cx)
195 })
196 })?
197 .await
198 .ok();
199 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
200 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
201 });
202
203 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
204 let _repo_subscription =
205 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
206 cx.update(|cx| {
207 let mut old_head = buffer_repo.read(cx).head_commit.clone();
208 Some(cx.subscribe(git_diff, move |_, event, cx| {
209 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
210 let new_head = buffer_repo.read(cx).head_commit.clone();
211 if new_head != old_head {
212 old_head = new_head;
213 git_diff_updates_tx.send(()).ok();
214 }
215 }
216 }))
217 })
218 } else {
219 None
220 };
221
222 loop {
223 futures::select_biased! {
224 buffer_update = buffer_updates.next() => {
225 if let Some((author, buffer_snapshot)) = buffer_update {
226 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
227 } else {
228 break;
229 }
230 }
231 _ = git_diff_updates_rx.changed().fuse() => {
232 if let Some(git_diff) = git_diff.as_ref() {
233 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
234 }
235 }
236 }
237 }
238
239 Ok(())
240 }
241
242 async fn track_edits(
243 this: &WeakEntity<ActionLog>,
244 buffer: &Entity<Buffer>,
245 author: ChangeAuthor,
246 buffer_snapshot: text::BufferSnapshot,
247 cx: &mut AsyncApp,
248 ) -> Result<()> {
249 let rebase = this.update(cx, |this, cx| {
250 let tracked_buffer = this
251 .tracked_buffers
252 .get_mut(buffer)
253 .context("buffer not tracked")?;
254
255 let rebase = cx.background_spawn({
256 let mut base_text = tracked_buffer.diff_base.clone();
257 let old_snapshot = tracked_buffer.snapshot.clone();
258 let new_snapshot = buffer_snapshot.clone();
259 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
260 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
261 async move {
262 if let ChangeAuthor::User = author {
263 apply_non_conflicting_edits(
264 &unreviewed_edits,
265 edits,
266 &mut base_text,
267 new_snapshot.as_rope(),
268 );
269 }
270
271 (Arc::from(base_text.to_string().as_str()), base_text)
272 }
273 });
274
275 anyhow::Ok(rebase)
276 })??;
277 let (new_base_text, new_diff_base) = rebase.await;
278
279 Self::update_diff(
280 this,
281 buffer,
282 buffer_snapshot,
283 new_base_text,
284 new_diff_base,
285 cx,
286 )
287 .await
288 }
289
290 async fn keep_committed_edits(
291 this: &WeakEntity<ActionLog>,
292 buffer: &Entity<Buffer>,
293 git_diff: &Entity<BufferDiff>,
294 cx: &mut AsyncApp,
295 ) -> Result<()> {
296 let buffer_snapshot = this.read_with(cx, |this, _cx| {
297 let tracked_buffer = this
298 .tracked_buffers
299 .get(buffer)
300 .context("buffer not tracked")?;
301 anyhow::Ok(tracked_buffer.snapshot.clone())
302 })??;
303 let (new_base_text, new_diff_base) = this
304 .read_with(cx, |this, cx| {
305 let tracked_buffer = this
306 .tracked_buffers
307 .get(buffer)
308 .context("buffer not tracked")?;
309 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
310 let agent_diff_base = tracked_buffer.diff_base.clone();
311 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
312 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
313 anyhow::Ok(cx.background_spawn(async move {
314 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
315 let committed_edits = language::line_diff(
316 &agent_diff_base.to_string(),
317 &git_diff_base.to_string(),
318 )
319 .into_iter()
320 .map(|(old, new)| Edit { old, new });
321
322 let mut new_agent_diff_base = agent_diff_base.clone();
323 let mut row_delta = 0i32;
324 for committed in committed_edits {
325 while let Some(unreviewed) = old_unreviewed_edits.peek() {
326 // If the committed edit matches the unreviewed
327 // edit, assume the user wants to keep it.
328 if committed.old == unreviewed.old {
329 let unreviewed_new =
330 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
331 let committed_new =
332 git_diff_base.slice_rows(committed.new.clone()).to_string();
333 if unreviewed_new == committed_new {
334 let old_byte_start =
335 new_agent_diff_base.point_to_offset(Point::new(
336 (unreviewed.old.start as i32 + row_delta) as u32,
337 0,
338 ));
339 let old_byte_end =
340 new_agent_diff_base.point_to_offset(cmp::min(
341 Point::new(
342 (unreviewed.old.end as i32 + row_delta) as u32,
343 0,
344 ),
345 new_agent_diff_base.max_point(),
346 ));
347 new_agent_diff_base
348 .replace(old_byte_start..old_byte_end, &unreviewed_new);
349 row_delta +=
350 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
351 }
352 } else if unreviewed.old.start >= committed.old.end {
353 break;
354 }
355
356 old_unreviewed_edits.next().unwrap();
357 }
358 }
359
360 (
361 Arc::from(new_agent_diff_base.to_string().as_str()),
362 new_agent_diff_base,
363 )
364 }))
365 })??
366 .await;
367
368 Self::update_diff(
369 this,
370 buffer,
371 buffer_snapshot,
372 new_base_text,
373 new_diff_base,
374 cx,
375 )
376 .await
377 }
378
379 async fn update_diff(
380 this: &WeakEntity<ActionLog>,
381 buffer: &Entity<Buffer>,
382 buffer_snapshot: text::BufferSnapshot,
383 new_base_text: Arc<str>,
384 new_diff_base: Rope,
385 cx: &mut AsyncApp,
386 ) -> Result<()> {
387 let (diff, language) = this.read_with(cx, |this, cx| {
388 let tracked_buffer = this
389 .tracked_buffers
390 .get(buffer)
391 .context("buffer not tracked")?;
392 anyhow::Ok((
393 tracked_buffer.diff.clone(),
394 buffer.read(cx).language().cloned(),
395 ))
396 })??;
397 let update = diff
398 .update(cx, |diff, cx| {
399 diff.update_diff(
400 buffer_snapshot.clone(),
401 Some(new_base_text),
402 true,
403 language,
404 cx,
405 )
406 })
407 .await;
408 diff.update(cx, |diff, cx| {
409 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
410 })
411 .await;
412 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
413
414 let unreviewed_edits = cx
415 .background_spawn({
416 let buffer_snapshot = buffer_snapshot.clone();
417 let new_diff_base = new_diff_base.clone();
418 async move {
419 let mut unreviewed_edits = Patch::default();
420 for hunk in diff_snapshot.hunks_intersecting_range(
421 Anchor::min_for_buffer(buffer_snapshot.remote_id())
422 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
423 &buffer_snapshot,
424 ) {
425 let old_range = new_diff_base
426 .offset_to_point(hunk.diff_base_byte_range.start)
427 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
428 let new_range = hunk.range.start..hunk.range.end;
429 unreviewed_edits.push(point_to_row_edit(
430 Edit {
431 old: old_range,
432 new: new_range,
433 },
434 &new_diff_base,
435 buffer_snapshot.as_rope(),
436 ));
437 }
438 unreviewed_edits
439 }
440 })
441 .await;
442 this.update(cx, |this, cx| {
443 let tracked_buffer = this
444 .tracked_buffers
445 .get_mut(buffer)
446 .context("buffer not tracked")?;
447 tracked_buffer.diff_base = new_diff_base;
448 tracked_buffer.snapshot = buffer_snapshot;
449 tracked_buffer.unreviewed_edits = unreviewed_edits;
450 cx.notify();
451 anyhow::Ok(())
452 })?
453 }
454
455 /// Track a buffer as read by agent, so we can notify the model about user edits.
456 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
457 self.track_buffer_internal(buffer, false, cx);
458 }
459
460 /// Mark a buffer as created by agent, so we can refresh it in the context
461 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
462 self.track_buffer_internal(buffer, true, cx);
463 }
464
465 /// Mark a buffer as edited by agent, so we can refresh it in the context
466 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
467 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
468 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
469 tracked_buffer.status = TrackedBufferStatus::Modified;
470 }
471 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
472 }
473
474 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
475 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
476 match tracked_buffer.status {
477 TrackedBufferStatus::Created { .. } => {
478 self.tracked_buffers.remove(&buffer);
479 cx.notify();
480 }
481 TrackedBufferStatus::Modified => {
482 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
483 tracked_buffer.status = TrackedBufferStatus::Deleted;
484 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
485 }
486 TrackedBufferStatus::Deleted => {}
487 }
488 cx.notify();
489 }
490
491 pub fn keep_edits_in_range(
492 &mut self,
493 buffer: Entity<Buffer>,
494 buffer_range: Range<impl language::ToPoint>,
495 telemetry: Option<ActionLogTelemetry>,
496 cx: &mut Context<Self>,
497 ) {
498 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
499 return;
500 };
501
502 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
503 match tracked_buffer.status {
504 TrackedBufferStatus::Deleted => {
505 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
506 self.tracked_buffers.remove(&buffer);
507 cx.notify();
508 }
509 _ => {
510 let buffer = buffer.read(cx);
511 let buffer_range =
512 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
513 let mut delta = 0i32;
514 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
515 edit.old.start = (edit.old.start as i32 + delta) as u32;
516 edit.old.end = (edit.old.end as i32 + delta) as u32;
517
518 if buffer_range.end.row < edit.new.start
519 || buffer_range.start.row > edit.new.end
520 {
521 true
522 } else {
523 let old_range = tracked_buffer
524 .diff_base
525 .point_to_offset(Point::new(edit.old.start, 0))
526 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
527 Point::new(edit.old.end, 0),
528 tracked_buffer.diff_base.max_point(),
529 ));
530 let new_range = tracked_buffer
531 .snapshot
532 .point_to_offset(Point::new(edit.new.start, 0))
533 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
534 Point::new(edit.new.end, 0),
535 tracked_buffer.snapshot.max_point(),
536 ));
537 tracked_buffer.diff_base.replace(
538 old_range,
539 &tracked_buffer
540 .snapshot
541 .text_for_range(new_range)
542 .collect::<String>(),
543 );
544 delta += edit.new_len() as i32 - edit.old_len() as i32;
545 metrics.add_edit(edit);
546 false
547 }
548 });
549 if tracked_buffer.unreviewed_edits.is_empty()
550 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
551 {
552 tracked_buffer.status = TrackedBufferStatus::Modified;
553 }
554 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
555 }
556 }
557 if let Some(telemetry) = telemetry {
558 telemetry_report_accepted_edits(&telemetry, metrics);
559 }
560 }
561
562 pub fn reject_edits_in_ranges(
563 &mut self,
564 buffer: Entity<Buffer>,
565 buffer_ranges: Vec<Range<impl language::ToPoint>>,
566 telemetry: Option<ActionLogTelemetry>,
567 cx: &mut Context<Self>,
568 ) -> Task<Result<()>> {
569 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
570 return Task::ready(Ok(()));
571 };
572
573 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
574 let task = match &tracked_buffer.status {
575 TrackedBufferStatus::Created {
576 existing_file_content,
577 } => {
578 let task = if let Some(existing_file_content) = existing_file_content {
579 buffer.update(cx, |buffer, cx| {
580 buffer.start_transaction();
581 buffer.set_text("", cx);
582 for chunk in existing_file_content.chunks() {
583 buffer.append(chunk, cx);
584 }
585 buffer.end_transaction(cx);
586 });
587 self.project
588 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
589 } else {
590 // For a file created by AI with no pre-existing content,
591 // only delete the file if we're certain it contains only AI content
592 // with no edits from the user.
593
594 let initial_version = tracked_buffer.version.clone();
595 let current_version = buffer.read(cx).version();
596
597 let current_content = buffer.read(cx).text();
598 let tracked_content = tracked_buffer.snapshot.text();
599
600 let is_ai_only_content =
601 initial_version == current_version && current_content == tracked_content;
602
603 if is_ai_only_content {
604 buffer
605 .read(cx)
606 .entry_id(cx)
607 .and_then(|entry_id| {
608 self.project.update(cx, |project, cx| {
609 project.delete_entry(entry_id, false, cx)
610 })
611 })
612 .unwrap_or(Task::ready(Ok(())))
613 } else {
614 // Not sure how to disentangle edits made by the user
615 // from edits made by the AI at this point.
616 // For now, preserve both to avoid data loss.
617 //
618 // TODO: Better solution (disable "Reject" after user makes some
619 // edit or find a way to differentiate between AI and user edits)
620 Task::ready(Ok(()))
621 }
622 };
623
624 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
625 self.tracked_buffers.remove(&buffer);
626 cx.notify();
627 task
628 }
629 TrackedBufferStatus::Deleted => {
630 buffer.update(cx, |buffer, cx| {
631 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
632 });
633 let save = self
634 .project
635 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
636
637 // Clear all tracked edits for this buffer and start over as if we just read it.
638 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
639 self.tracked_buffers.remove(&buffer);
640 self.buffer_read(buffer.clone(), cx);
641 cx.notify();
642 save
643 }
644 TrackedBufferStatus::Modified => {
645 buffer.update(cx, |buffer, cx| {
646 let mut buffer_row_ranges = buffer_ranges
647 .into_iter()
648 .map(|range| {
649 range.start.to_point(buffer).row..range.end.to_point(buffer).row
650 })
651 .peekable();
652
653 let mut edits_to_revert = Vec::new();
654 for edit in tracked_buffer.unreviewed_edits.edits() {
655 let new_range = tracked_buffer
656 .snapshot
657 .anchor_before(Point::new(edit.new.start, 0))
658 ..tracked_buffer.snapshot.anchor_after(cmp::min(
659 Point::new(edit.new.end, 0),
660 tracked_buffer.snapshot.max_point(),
661 ));
662 let new_row_range = new_range.start.to_point(buffer).row
663 ..new_range.end.to_point(buffer).row;
664
665 let mut revert = false;
666 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
667 if buffer_row_range.end < new_row_range.start {
668 buffer_row_ranges.next();
669 } else if buffer_row_range.start > new_row_range.end {
670 break;
671 } else {
672 revert = true;
673 break;
674 }
675 }
676
677 if revert {
678 metrics.add_edit(edit);
679 let old_range = tracked_buffer
680 .diff_base
681 .point_to_offset(Point::new(edit.old.start, 0))
682 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
683 Point::new(edit.old.end, 0),
684 tracked_buffer.diff_base.max_point(),
685 ));
686 let old_text = tracked_buffer
687 .diff_base
688 .chunks_in_range(old_range)
689 .collect::<String>();
690 edits_to_revert.push((new_range, old_text));
691 }
692 }
693
694 buffer.edit(edits_to_revert, None, cx);
695 });
696 self.project
697 .update(cx, |project, cx| project.save_buffer(buffer, cx))
698 }
699 };
700 if let Some(telemetry) = telemetry {
701 telemetry_report_rejected_edits(&telemetry, metrics);
702 }
703 task
704 }
705
706 pub fn keep_all_edits(
707 &mut self,
708 telemetry: Option<ActionLogTelemetry>,
709 cx: &mut Context<Self>,
710 ) {
711 self.tracked_buffers.retain(|buffer, tracked_buffer| {
712 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
713 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
714 if let Some(telemetry) = telemetry.as_ref() {
715 telemetry_report_accepted_edits(telemetry, metrics);
716 }
717 match tracked_buffer.status {
718 TrackedBufferStatus::Deleted => false,
719 _ => {
720 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
721 tracked_buffer.status = TrackedBufferStatus::Modified;
722 }
723 tracked_buffer.unreviewed_edits.clear();
724 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
725 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
726 true
727 }
728 }
729 });
730
731 cx.notify();
732 }
733
734 pub fn reject_all_edits(
735 &mut self,
736 telemetry: Option<ActionLogTelemetry>,
737 cx: &mut Context<Self>,
738 ) -> Task<()> {
739 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
740 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
741 buffer.read(cx).remote_id(),
742 )];
743 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
744
745 async move {
746 reject.await.log_err();
747 }
748 });
749
750 let task = futures::future::join_all(futures);
751 cx.background_spawn(async move {
752 task.await;
753 })
754 }
755
756 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
757 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
758 self.tracked_buffers
759 .iter()
760 .filter(|(_, tracked)| tracked.has_edits(cx))
761 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
762 .collect()
763 }
764
765 /// Iterate over buffers changed since last read or edited by the model
766 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
767 self.tracked_buffers
768 .iter()
769 .filter(|(buffer, tracked)| {
770 let buffer = buffer.read(cx);
771
772 tracked.version != buffer.version
773 && buffer
774 .file()
775 .is_some_and(|file| !file.disk_state().is_deleted())
776 })
777 .map(|(buffer, _)| buffer)
778 }
779}
780
781#[derive(Clone)]
782pub struct ActionLogTelemetry {
783 pub agent_telemetry_id: SharedString,
784 pub session_id: Arc<str>,
785}
786
787struct ActionLogMetrics {
788 lines_removed: u32,
789 lines_added: u32,
790 language: Option<SharedString>,
791}
792
793impl ActionLogMetrics {
794 fn for_buffer(buffer: &Buffer) -> Self {
795 Self {
796 language: buffer.language().map(|l| l.name().0),
797 lines_removed: 0,
798 lines_added: 0,
799 }
800 }
801
802 fn add_edits(&mut self, edits: &[Edit<u32>]) {
803 for edit in edits {
804 self.add_edit(edit);
805 }
806 }
807
808 fn add_edit(&mut self, edit: &Edit<u32>) {
809 self.lines_added += edit.new_len();
810 self.lines_removed += edit.old_len();
811 }
812}
813
814fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
815 telemetry::event!(
816 "Agent Edits Accepted",
817 agent = telemetry.agent_telemetry_id,
818 session = telemetry.session_id,
819 language = metrics.language,
820 lines_added = metrics.lines_added,
821 lines_removed = metrics.lines_removed
822 );
823}
824
825fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
826 telemetry::event!(
827 "Agent Edits Rejected",
828 agent = telemetry.agent_telemetry_id,
829 session = telemetry.session_id,
830 language = metrics.language,
831 lines_added = metrics.lines_added,
832 lines_removed = metrics.lines_removed
833 );
834}
835
836fn apply_non_conflicting_edits(
837 patch: &Patch<u32>,
838 edits: Vec<Edit<u32>>,
839 old_text: &mut Rope,
840 new_text: &Rope,
841) -> bool {
842 let mut old_edits = patch.edits().iter().cloned().peekable();
843 let mut new_edits = edits.into_iter().peekable();
844 let mut applied_delta = 0i32;
845 let mut rebased_delta = 0i32;
846 let mut has_made_changes = false;
847
848 while let Some(mut new_edit) = new_edits.next() {
849 let mut conflict = false;
850
851 // Push all the old edits that are before this new edit or that intersect with it.
852 while let Some(old_edit) = old_edits.peek() {
853 if new_edit.old.end < old_edit.new.start
854 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
855 {
856 break;
857 } else if new_edit.old.start > old_edit.new.end
858 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
859 {
860 let old_edit = old_edits.next().unwrap();
861 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
862 } else {
863 conflict = true;
864 if new_edits
865 .peek()
866 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
867 {
868 new_edit = new_edits.next().unwrap();
869 } else {
870 let old_edit = old_edits.next().unwrap();
871 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
872 }
873 }
874 }
875
876 if !conflict {
877 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
878 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
879 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
880 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
881 ..old_text.point_to_offset(cmp::min(
882 Point::new(new_edit.old.end, 0),
883 old_text.max_point(),
884 ));
885 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
886 ..new_text.point_to_offset(cmp::min(
887 Point::new(new_edit.new.end, 0),
888 new_text.max_point(),
889 ));
890
891 old_text.replace(
892 old_bytes,
893 &new_text.chunks_in_range(new_bytes).collect::<String>(),
894 );
895 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
896 has_made_changes = true;
897 }
898 }
899 has_made_changes
900}
901
902fn diff_snapshots(
903 old_snapshot: &text::BufferSnapshot,
904 new_snapshot: &text::BufferSnapshot,
905) -> Vec<Edit<u32>> {
906 let mut edits = new_snapshot
907 .edits_since::<Point>(&old_snapshot.version)
908 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
909 .peekable();
910 let mut row_edits = Vec::new();
911 while let Some(mut edit) = edits.next() {
912 while let Some(next_edit) = edits.peek() {
913 if edit.old.end >= next_edit.old.start {
914 edit.old.end = next_edit.old.end;
915 edit.new.end = next_edit.new.end;
916 edits.next();
917 } else {
918 break;
919 }
920 }
921 row_edits.push(edit);
922 }
923 row_edits
924}
925
926fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
927 if edit.old.start.column == old_text.line_len(edit.old.start.row)
928 && new_text
929 .chars_at(new_text.point_to_offset(edit.new.start))
930 .next()
931 == Some('\n')
932 && edit.old.start != old_text.max_point()
933 {
934 Edit {
935 old: edit.old.start.row + 1..edit.old.end.row + 1,
936 new: edit.new.start.row + 1..edit.new.end.row + 1,
937 }
938 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
939 Edit {
940 old: edit.old.start.row..edit.old.end.row,
941 new: edit.new.start.row..edit.new.end.row,
942 }
943 } else {
944 Edit {
945 old: edit.old.start.row..edit.old.end.row + 1,
946 new: edit.new.start.row..edit.new.end.row + 1,
947 }
948 }
949}
950
951#[derive(Copy, Clone, Debug)]
952enum ChangeAuthor {
953 User,
954 Agent,
955}
956
957enum TrackedBufferStatus {
958 Created { existing_file_content: Option<Rope> },
959 Modified,
960 Deleted,
961}
962
963struct TrackedBuffer {
964 buffer: Entity<Buffer>,
965 diff_base: Rope,
966 unreviewed_edits: Patch<u32>,
967 status: TrackedBufferStatus,
968 version: clock::Global,
969 diff: Entity<BufferDiff>,
970 snapshot: text::BufferSnapshot,
971 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
972 _open_lsp_handle: OpenLspBufferHandle,
973 _maintain_diff: Task<()>,
974 _subscription: Subscription,
975}
976
977impl TrackedBuffer {
978 fn has_edits(&self, cx: &App) -> bool {
979 self.diff
980 .read(cx)
981 .snapshot(cx)
982 .hunks(self.buffer.read(cx))
983 .next()
984 .is_some()
985 }
986
987 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
988 self.diff_update
989 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
990 .ok();
991 }
992}
993
994pub struct ChangedBuffer {
995 pub diff: Entity<BufferDiff>,
996}
997
998#[cfg(test)]
999mod tests {
1000 use super::*;
1001 use buffer_diff::DiffHunkStatusKind;
1002 use gpui::TestAppContext;
1003 use language::Point;
1004 use project::{FakeFs, Fs, Project, RemoveOptions};
1005 use rand::prelude::*;
1006 use serde_json::json;
1007 use settings::SettingsStore;
1008 use std::env;
1009 use util::{RandomCharIter, path};
1010
1011 #[ctor::ctor]
1012 fn init_logger() {
1013 zlog::init_test();
1014 }
1015
1016 fn init_test(cx: &mut TestAppContext) {
1017 cx.update(|cx| {
1018 let settings_store = SettingsStore::test(cx);
1019 cx.set_global(settings_store);
1020 });
1021 }
1022
1023 #[gpui::test(iterations = 10)]
1024 async fn test_keep_edits(cx: &mut TestAppContext) {
1025 init_test(cx);
1026
1027 let fs = FakeFs::new(cx.executor());
1028 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1029 .await;
1030 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1031 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1032 let file_path = project
1033 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1034 .unwrap();
1035 let buffer = project
1036 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1037 .await
1038 .unwrap();
1039
1040 cx.update(|cx| {
1041 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1042 buffer.update(cx, |buffer, cx| {
1043 buffer
1044 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1045 .unwrap()
1046 });
1047 buffer.update(cx, |buffer, cx| {
1048 buffer
1049 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1050 .unwrap()
1051 });
1052 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1053 });
1054 cx.run_until_parked();
1055 assert_eq!(
1056 buffer.read_with(cx, |buffer, _| buffer.text()),
1057 "abc\ndEf\nghi\njkl\nmnO"
1058 );
1059 assert_eq!(
1060 unreviewed_hunks(&action_log, cx),
1061 vec![(
1062 buffer.clone(),
1063 vec![
1064 HunkStatus {
1065 range: Point::new(1, 0)..Point::new(2, 0),
1066 diff_status: DiffHunkStatusKind::Modified,
1067 old_text: "def\n".into(),
1068 },
1069 HunkStatus {
1070 range: Point::new(4, 0)..Point::new(4, 3),
1071 diff_status: DiffHunkStatusKind::Modified,
1072 old_text: "mno".into(),
1073 }
1074 ],
1075 )]
1076 );
1077
1078 action_log.update(cx, |log, cx| {
1079 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1080 });
1081 cx.run_until_parked();
1082 assert_eq!(
1083 unreviewed_hunks(&action_log, cx),
1084 vec![(
1085 buffer.clone(),
1086 vec![HunkStatus {
1087 range: Point::new(1, 0)..Point::new(2, 0),
1088 diff_status: DiffHunkStatusKind::Modified,
1089 old_text: "def\n".into(),
1090 }],
1091 )]
1092 );
1093
1094 action_log.update(cx, |log, cx| {
1095 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1096 });
1097 cx.run_until_parked();
1098 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1099 }
1100
1101 #[gpui::test(iterations = 10)]
1102 async fn test_deletions(cx: &mut TestAppContext) {
1103 init_test(cx);
1104
1105 let fs = FakeFs::new(cx.executor());
1106 fs.insert_tree(
1107 path!("/dir"),
1108 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1109 )
1110 .await;
1111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1112 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1113 let file_path = project
1114 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1115 .unwrap();
1116 let buffer = project
1117 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1118 .await
1119 .unwrap();
1120
1121 cx.update(|cx| {
1122 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1123 buffer.update(cx, |buffer, cx| {
1124 buffer
1125 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1126 .unwrap();
1127 buffer.finalize_last_transaction();
1128 });
1129 buffer.update(cx, |buffer, cx| {
1130 buffer
1131 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1132 .unwrap();
1133 buffer.finalize_last_transaction();
1134 });
1135 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1136 });
1137 cx.run_until_parked();
1138 assert_eq!(
1139 buffer.read_with(cx, |buffer, _| buffer.text()),
1140 "abc\nghi\njkl\npqr"
1141 );
1142 assert_eq!(
1143 unreviewed_hunks(&action_log, cx),
1144 vec![(
1145 buffer.clone(),
1146 vec![
1147 HunkStatus {
1148 range: Point::new(1, 0)..Point::new(1, 0),
1149 diff_status: DiffHunkStatusKind::Deleted,
1150 old_text: "def\n".into(),
1151 },
1152 HunkStatus {
1153 range: Point::new(3, 0)..Point::new(3, 0),
1154 diff_status: DiffHunkStatusKind::Deleted,
1155 old_text: "mno\n".into(),
1156 }
1157 ],
1158 )]
1159 );
1160
1161 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1162 cx.run_until_parked();
1163 assert_eq!(
1164 buffer.read_with(cx, |buffer, _| buffer.text()),
1165 "abc\nghi\njkl\nmno\npqr"
1166 );
1167 assert_eq!(
1168 unreviewed_hunks(&action_log, cx),
1169 vec![(
1170 buffer.clone(),
1171 vec![HunkStatus {
1172 range: Point::new(1, 0)..Point::new(1, 0),
1173 diff_status: DiffHunkStatusKind::Deleted,
1174 old_text: "def\n".into(),
1175 }],
1176 )]
1177 );
1178
1179 action_log.update(cx, |log, cx| {
1180 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1181 });
1182 cx.run_until_parked();
1183 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1184 }
1185
1186 #[gpui::test(iterations = 10)]
1187 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1188 init_test(cx);
1189
1190 let fs = FakeFs::new(cx.executor());
1191 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1192 .await;
1193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1194 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1195 let file_path = project
1196 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1197 .unwrap();
1198 let buffer = project
1199 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1200 .await
1201 .unwrap();
1202
1203 cx.update(|cx| {
1204 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1205 buffer.update(cx, |buffer, cx| {
1206 buffer
1207 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1208 .unwrap()
1209 });
1210 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1211 });
1212 cx.run_until_parked();
1213 assert_eq!(
1214 buffer.read_with(cx, |buffer, _| buffer.text()),
1215 "abc\ndeF\nGHI\njkl\nmno"
1216 );
1217 assert_eq!(
1218 unreviewed_hunks(&action_log, cx),
1219 vec![(
1220 buffer.clone(),
1221 vec![HunkStatus {
1222 range: Point::new(1, 0)..Point::new(3, 0),
1223 diff_status: DiffHunkStatusKind::Modified,
1224 old_text: "def\nghi\n".into(),
1225 }],
1226 )]
1227 );
1228
1229 buffer.update(cx, |buffer, cx| {
1230 buffer.edit(
1231 [
1232 (Point::new(0, 2)..Point::new(0, 2), "X"),
1233 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1234 ],
1235 None,
1236 cx,
1237 )
1238 });
1239 cx.run_until_parked();
1240 assert_eq!(
1241 buffer.read_with(cx, |buffer, _| buffer.text()),
1242 "abXc\ndeF\nGHI\nYjkl\nmno"
1243 );
1244 assert_eq!(
1245 unreviewed_hunks(&action_log, cx),
1246 vec![(
1247 buffer.clone(),
1248 vec![HunkStatus {
1249 range: Point::new(1, 0)..Point::new(3, 0),
1250 diff_status: DiffHunkStatusKind::Modified,
1251 old_text: "def\nghi\n".into(),
1252 }],
1253 )]
1254 );
1255
1256 buffer.update(cx, |buffer, cx| {
1257 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1258 });
1259 cx.run_until_parked();
1260 assert_eq!(
1261 buffer.read_with(cx, |buffer, _| buffer.text()),
1262 "abXc\ndZeF\nGHI\nYjkl\nmno"
1263 );
1264 assert_eq!(
1265 unreviewed_hunks(&action_log, cx),
1266 vec![(
1267 buffer.clone(),
1268 vec![HunkStatus {
1269 range: Point::new(1, 0)..Point::new(3, 0),
1270 diff_status: DiffHunkStatusKind::Modified,
1271 old_text: "def\nghi\n".into(),
1272 }],
1273 )]
1274 );
1275
1276 action_log.update(cx, |log, cx| {
1277 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1278 });
1279 cx.run_until_parked();
1280 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1281 }
1282
1283 #[gpui::test(iterations = 10)]
1284 async fn test_creating_files(cx: &mut TestAppContext) {
1285 init_test(cx);
1286
1287 let fs = FakeFs::new(cx.executor());
1288 fs.insert_tree(path!("/dir"), json!({})).await;
1289 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1290 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1291 let file_path = project
1292 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1293 .unwrap();
1294
1295 let buffer = project
1296 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1297 .await
1298 .unwrap();
1299 cx.update(|cx| {
1300 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1301 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1302 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1303 });
1304 project
1305 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1306 .await
1307 .unwrap();
1308 cx.run_until_parked();
1309 assert_eq!(
1310 unreviewed_hunks(&action_log, cx),
1311 vec![(
1312 buffer.clone(),
1313 vec![HunkStatus {
1314 range: Point::new(0, 0)..Point::new(0, 5),
1315 diff_status: DiffHunkStatusKind::Added,
1316 old_text: "".into(),
1317 }],
1318 )]
1319 );
1320
1321 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1322 cx.run_until_parked();
1323 assert_eq!(
1324 unreviewed_hunks(&action_log, cx),
1325 vec![(
1326 buffer.clone(),
1327 vec![HunkStatus {
1328 range: Point::new(0, 0)..Point::new(0, 6),
1329 diff_status: DiffHunkStatusKind::Added,
1330 old_text: "".into(),
1331 }],
1332 )]
1333 );
1334
1335 action_log.update(cx, |log, cx| {
1336 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1337 });
1338 cx.run_until_parked();
1339 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1340 }
1341
1342 #[gpui::test(iterations = 10)]
1343 async fn test_overwriting_files(cx: &mut TestAppContext) {
1344 init_test(cx);
1345
1346 let fs = FakeFs::new(cx.executor());
1347 fs.insert_tree(
1348 path!("/dir"),
1349 json!({
1350 "file1": "Lorem ipsum dolor"
1351 }),
1352 )
1353 .await;
1354 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1355 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1356 let file_path = project
1357 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1358 .unwrap();
1359
1360 let buffer = project
1361 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1362 .await
1363 .unwrap();
1364 cx.update(|cx| {
1365 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1366 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1367 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1368 });
1369 project
1370 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1371 .await
1372 .unwrap();
1373 cx.run_until_parked();
1374 assert_eq!(
1375 unreviewed_hunks(&action_log, cx),
1376 vec![(
1377 buffer.clone(),
1378 vec![HunkStatus {
1379 range: Point::new(0, 0)..Point::new(0, 19),
1380 diff_status: DiffHunkStatusKind::Added,
1381 old_text: "".into(),
1382 }],
1383 )]
1384 );
1385
1386 action_log
1387 .update(cx, |log, cx| {
1388 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1389 })
1390 .await
1391 .unwrap();
1392 cx.run_until_parked();
1393 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1394 assert_eq!(
1395 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1396 "Lorem ipsum dolor"
1397 );
1398 }
1399
1400 #[gpui::test(iterations = 10)]
1401 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1402 init_test(cx);
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree(
1406 path!("/dir"),
1407 json!({
1408 "file1": "Lorem ipsum dolor"
1409 }),
1410 )
1411 .await;
1412 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1413 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1414 let file_path = project
1415 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1416 .unwrap();
1417
1418 let buffer = project
1419 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1420 .await
1421 .unwrap();
1422 cx.update(|cx| {
1423 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1424 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1425 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1426 });
1427 project
1428 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1429 .await
1430 .unwrap();
1431 cx.run_until_parked();
1432 assert_eq!(
1433 unreviewed_hunks(&action_log, cx),
1434 vec![(
1435 buffer.clone(),
1436 vec![HunkStatus {
1437 range: Point::new(0, 0)..Point::new(0, 37),
1438 diff_status: DiffHunkStatusKind::Modified,
1439 old_text: "Lorem ipsum dolor".into(),
1440 }],
1441 )]
1442 );
1443
1444 cx.update(|cx| {
1445 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1446 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1447 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1448 });
1449 project
1450 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1451 .await
1452 .unwrap();
1453 cx.run_until_parked();
1454 assert_eq!(
1455 unreviewed_hunks(&action_log, cx),
1456 vec![(
1457 buffer.clone(),
1458 vec![HunkStatus {
1459 range: Point::new(0, 0)..Point::new(0, 9),
1460 diff_status: DiffHunkStatusKind::Added,
1461 old_text: "".into(),
1462 }],
1463 )]
1464 );
1465
1466 action_log
1467 .update(cx, |log, cx| {
1468 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1469 })
1470 .await
1471 .unwrap();
1472 cx.run_until_parked();
1473 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1474 assert_eq!(
1475 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1476 "Lorem ipsum dolor"
1477 );
1478 }
1479
1480 #[gpui::test(iterations = 10)]
1481 async fn test_deleting_files(cx: &mut TestAppContext) {
1482 init_test(cx);
1483
1484 let fs = FakeFs::new(cx.executor());
1485 fs.insert_tree(
1486 path!("/dir"),
1487 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1488 )
1489 .await;
1490
1491 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1492 let file1_path = project
1493 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1494 .unwrap();
1495 let file2_path = project
1496 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1497 .unwrap();
1498
1499 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1500 let buffer1 = project
1501 .update(cx, |project, cx| {
1502 project.open_buffer(file1_path.clone(), cx)
1503 })
1504 .await
1505 .unwrap();
1506 let buffer2 = project
1507 .update(cx, |project, cx| {
1508 project.open_buffer(file2_path.clone(), cx)
1509 })
1510 .await
1511 .unwrap();
1512
1513 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1514 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1515 project
1516 .update(cx, |project, cx| {
1517 project.delete_file(file1_path.clone(), false, cx)
1518 })
1519 .unwrap()
1520 .await
1521 .unwrap();
1522 project
1523 .update(cx, |project, cx| {
1524 project.delete_file(file2_path.clone(), false, cx)
1525 })
1526 .unwrap()
1527 .await
1528 .unwrap();
1529 cx.run_until_parked();
1530 assert_eq!(
1531 unreviewed_hunks(&action_log, cx),
1532 vec![
1533 (
1534 buffer1.clone(),
1535 vec![HunkStatus {
1536 range: Point::new(0, 0)..Point::new(0, 0),
1537 diff_status: DiffHunkStatusKind::Deleted,
1538 old_text: "lorem\n".into(),
1539 }]
1540 ),
1541 (
1542 buffer2.clone(),
1543 vec![HunkStatus {
1544 range: Point::new(0, 0)..Point::new(0, 0),
1545 diff_status: DiffHunkStatusKind::Deleted,
1546 old_text: "ipsum\n".into(),
1547 }],
1548 )
1549 ]
1550 );
1551
1552 // Simulate file1 being recreated externally.
1553 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1554 .await;
1555
1556 // Simulate file2 being recreated by a tool.
1557 let buffer2 = project
1558 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1559 .await
1560 .unwrap();
1561 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1562 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1563 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1564 project
1565 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1566 .await
1567 .unwrap();
1568
1569 cx.run_until_parked();
1570 assert_eq!(
1571 unreviewed_hunks(&action_log, cx),
1572 vec![(
1573 buffer2.clone(),
1574 vec![HunkStatus {
1575 range: Point::new(0, 0)..Point::new(0, 5),
1576 diff_status: DiffHunkStatusKind::Added,
1577 old_text: "".into(),
1578 }],
1579 )]
1580 );
1581
1582 // Simulate file2 being deleted externally.
1583 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1584 .await
1585 .unwrap();
1586 cx.run_until_parked();
1587 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1588 }
1589
1590 #[gpui::test(iterations = 10)]
1591 async fn test_reject_edits(cx: &mut TestAppContext) {
1592 init_test(cx);
1593
1594 let fs = FakeFs::new(cx.executor());
1595 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1596 .await;
1597 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1598 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1599 let file_path = project
1600 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1601 .unwrap();
1602 let buffer = project
1603 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1604 .await
1605 .unwrap();
1606
1607 cx.update(|cx| {
1608 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1609 buffer.update(cx, |buffer, cx| {
1610 buffer
1611 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1612 .unwrap()
1613 });
1614 buffer.update(cx, |buffer, cx| {
1615 buffer
1616 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1617 .unwrap()
1618 });
1619 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1620 });
1621 cx.run_until_parked();
1622 assert_eq!(
1623 buffer.read_with(cx, |buffer, _| buffer.text()),
1624 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1625 );
1626 assert_eq!(
1627 unreviewed_hunks(&action_log, cx),
1628 vec![(
1629 buffer.clone(),
1630 vec![
1631 HunkStatus {
1632 range: Point::new(1, 0)..Point::new(3, 0),
1633 diff_status: DiffHunkStatusKind::Modified,
1634 old_text: "def\n".into(),
1635 },
1636 HunkStatus {
1637 range: Point::new(5, 0)..Point::new(5, 3),
1638 diff_status: DiffHunkStatusKind::Modified,
1639 old_text: "mno".into(),
1640 }
1641 ],
1642 )]
1643 );
1644
1645 // If the rejected range doesn't overlap with any hunk, we ignore it.
1646 action_log
1647 .update(cx, |log, cx| {
1648 log.reject_edits_in_ranges(
1649 buffer.clone(),
1650 vec![Point::new(4, 0)..Point::new(4, 0)],
1651 None,
1652 cx,
1653 )
1654 })
1655 .await
1656 .unwrap();
1657 cx.run_until_parked();
1658 assert_eq!(
1659 buffer.read_with(cx, |buffer, _| buffer.text()),
1660 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1661 );
1662 assert_eq!(
1663 unreviewed_hunks(&action_log, cx),
1664 vec![(
1665 buffer.clone(),
1666 vec![
1667 HunkStatus {
1668 range: Point::new(1, 0)..Point::new(3, 0),
1669 diff_status: DiffHunkStatusKind::Modified,
1670 old_text: "def\n".into(),
1671 },
1672 HunkStatus {
1673 range: Point::new(5, 0)..Point::new(5, 3),
1674 diff_status: DiffHunkStatusKind::Modified,
1675 old_text: "mno".into(),
1676 }
1677 ],
1678 )]
1679 );
1680
1681 action_log
1682 .update(cx, |log, cx| {
1683 log.reject_edits_in_ranges(
1684 buffer.clone(),
1685 vec![Point::new(0, 0)..Point::new(1, 0)],
1686 None,
1687 cx,
1688 )
1689 })
1690 .await
1691 .unwrap();
1692 cx.run_until_parked();
1693 assert_eq!(
1694 buffer.read_with(cx, |buffer, _| buffer.text()),
1695 "abc\ndef\nghi\njkl\nmnO"
1696 );
1697 assert_eq!(
1698 unreviewed_hunks(&action_log, cx),
1699 vec![(
1700 buffer.clone(),
1701 vec![HunkStatus {
1702 range: Point::new(4, 0)..Point::new(4, 3),
1703 diff_status: DiffHunkStatusKind::Modified,
1704 old_text: "mno".into(),
1705 }],
1706 )]
1707 );
1708
1709 action_log
1710 .update(cx, |log, cx| {
1711 log.reject_edits_in_ranges(
1712 buffer.clone(),
1713 vec![Point::new(4, 0)..Point::new(4, 0)],
1714 None,
1715 cx,
1716 )
1717 })
1718 .await
1719 .unwrap();
1720 cx.run_until_parked();
1721 assert_eq!(
1722 buffer.read_with(cx, |buffer, _| buffer.text()),
1723 "abc\ndef\nghi\njkl\nmno"
1724 );
1725 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1726 }
1727
1728 #[gpui::test(iterations = 10)]
1729 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1730 init_test(cx);
1731
1732 let fs = FakeFs::new(cx.executor());
1733 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1734 .await;
1735 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1736 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1737 let file_path = project
1738 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1739 .unwrap();
1740 let buffer = project
1741 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1742 .await
1743 .unwrap();
1744
1745 cx.update(|cx| {
1746 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1747 buffer.update(cx, |buffer, cx| {
1748 buffer
1749 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1750 .unwrap()
1751 });
1752 buffer.update(cx, |buffer, cx| {
1753 buffer
1754 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1755 .unwrap()
1756 });
1757 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1758 });
1759 cx.run_until_parked();
1760 assert_eq!(
1761 buffer.read_with(cx, |buffer, _| buffer.text()),
1762 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1763 );
1764 assert_eq!(
1765 unreviewed_hunks(&action_log, cx),
1766 vec![(
1767 buffer.clone(),
1768 vec![
1769 HunkStatus {
1770 range: Point::new(1, 0)..Point::new(3, 0),
1771 diff_status: DiffHunkStatusKind::Modified,
1772 old_text: "def\n".into(),
1773 },
1774 HunkStatus {
1775 range: Point::new(5, 0)..Point::new(5, 3),
1776 diff_status: DiffHunkStatusKind::Modified,
1777 old_text: "mno".into(),
1778 }
1779 ],
1780 )]
1781 );
1782
1783 action_log.update(cx, |log, cx| {
1784 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1785 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1786 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1787 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1788
1789 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1790 .detach();
1791 assert_eq!(
1792 buffer.read_with(cx, |buffer, _| buffer.text()),
1793 "abc\ndef\nghi\njkl\nmno"
1794 );
1795 });
1796 cx.run_until_parked();
1797 assert_eq!(
1798 buffer.read_with(cx, |buffer, _| buffer.text()),
1799 "abc\ndef\nghi\njkl\nmno"
1800 );
1801 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1802 }
1803
1804 #[gpui::test(iterations = 10)]
1805 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1806 init_test(cx);
1807
1808 let fs = FakeFs::new(cx.executor());
1809 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1810 .await;
1811 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1812 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1813 let file_path = project
1814 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1815 .unwrap();
1816 let buffer = project
1817 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1818 .await
1819 .unwrap();
1820
1821 cx.update(|cx| {
1822 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1823 });
1824 project
1825 .update(cx, |project, cx| {
1826 project.delete_file(file_path.clone(), false, cx)
1827 })
1828 .unwrap()
1829 .await
1830 .unwrap();
1831 cx.run_until_parked();
1832 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1833 assert_eq!(
1834 unreviewed_hunks(&action_log, cx),
1835 vec![(
1836 buffer.clone(),
1837 vec![HunkStatus {
1838 range: Point::new(0, 0)..Point::new(0, 0),
1839 diff_status: DiffHunkStatusKind::Deleted,
1840 old_text: "content".into(),
1841 }]
1842 )]
1843 );
1844
1845 action_log
1846 .update(cx, |log, cx| {
1847 log.reject_edits_in_ranges(
1848 buffer.clone(),
1849 vec![Point::new(0, 0)..Point::new(0, 0)],
1850 None,
1851 cx,
1852 )
1853 })
1854 .await
1855 .unwrap();
1856 cx.run_until_parked();
1857 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1858 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1859 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1860 }
1861
1862 #[gpui::test(iterations = 10)]
1863 async fn test_reject_created_file(cx: &mut TestAppContext) {
1864 init_test(cx);
1865
1866 let fs = FakeFs::new(cx.executor());
1867 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1868 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1869 let file_path = project
1870 .read_with(cx, |project, cx| {
1871 project.find_project_path("dir/new_file", cx)
1872 })
1873 .unwrap();
1874 let buffer = project
1875 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1876 .await
1877 .unwrap();
1878 cx.update(|cx| {
1879 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1880 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1881 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1882 });
1883 project
1884 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1885 .await
1886 .unwrap();
1887 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1888 cx.run_until_parked();
1889 assert_eq!(
1890 unreviewed_hunks(&action_log, cx),
1891 vec![(
1892 buffer.clone(),
1893 vec![HunkStatus {
1894 range: Point::new(0, 0)..Point::new(0, 7),
1895 diff_status: DiffHunkStatusKind::Added,
1896 old_text: "".into(),
1897 }],
1898 )]
1899 );
1900
1901 action_log
1902 .update(cx, |log, cx| {
1903 log.reject_edits_in_ranges(
1904 buffer.clone(),
1905 vec![Point::new(0, 0)..Point::new(0, 11)],
1906 None,
1907 cx,
1908 )
1909 })
1910 .await
1911 .unwrap();
1912 cx.run_until_parked();
1913 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1914 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1915 }
1916
1917 #[gpui::test]
1918 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1919 init_test(cx);
1920
1921 let fs = FakeFs::new(cx.executor());
1922 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1923 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1924
1925 let file_path = project
1926 .read_with(cx, |project, cx| {
1927 project.find_project_path("dir/new_file", cx)
1928 })
1929 .unwrap();
1930 let buffer = project
1931 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1932 .await
1933 .unwrap();
1934
1935 // AI creates file with initial content
1936 cx.update(|cx| {
1937 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1938 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1939 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1940 });
1941
1942 project
1943 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1944 .await
1945 .unwrap();
1946
1947 cx.run_until_parked();
1948
1949 // User makes additional edits
1950 cx.update(|cx| {
1951 buffer.update(cx, |buffer, cx| {
1952 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1953 });
1954 });
1955
1956 project
1957 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1958 .await
1959 .unwrap();
1960
1961 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1962
1963 // Reject all
1964 action_log
1965 .update(cx, |log, cx| {
1966 log.reject_edits_in_ranges(
1967 buffer.clone(),
1968 vec![Point::new(0, 0)..Point::new(100, 0)],
1969 None,
1970 cx,
1971 )
1972 })
1973 .await
1974 .unwrap();
1975 cx.run_until_parked();
1976
1977 // File should still contain all the content
1978 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1979
1980 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1981 assert_eq!(content, "ai content\nuser added this line");
1982 }
1983
1984 #[gpui::test]
1985 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
1986 init_test(cx);
1987
1988 let fs = FakeFs::new(cx.executor());
1989 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1990 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1991
1992 let file_path = project
1993 .read_with(cx, |project, cx| {
1994 project.find_project_path("dir/new_file", cx)
1995 })
1996 .unwrap();
1997 let buffer = project
1998 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1999 .await
2000 .unwrap();
2001
2002 // AI creates file with initial content
2003 cx.update(|cx| {
2004 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2005 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2006 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2007 });
2008 project
2009 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2010 .await
2011 .unwrap();
2012 cx.run_until_parked();
2013 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2014
2015 // User accepts the single hunk
2016 action_log.update(cx, |log, cx| {
2017 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2018 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2019 });
2020 cx.run_until_parked();
2021 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2022 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2023
2024 // AI modifies the file
2025 cx.update(|cx| {
2026 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2027 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2028 });
2029 project
2030 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2031 .await
2032 .unwrap();
2033 cx.run_until_parked();
2034 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2035
2036 // User rejects the hunk
2037 action_log
2038 .update(cx, |log, cx| {
2039 log.reject_edits_in_ranges(
2040 buffer.clone(),
2041 vec![Anchor::min_max_range_for_buffer(
2042 buffer.read(cx).remote_id(),
2043 )],
2044 None,
2045 cx,
2046 )
2047 })
2048 .await
2049 .unwrap();
2050 cx.run_until_parked();
2051 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2052 assert_eq!(
2053 buffer.read_with(cx, |buffer, _| buffer.text()),
2054 "ai content v1"
2055 );
2056 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2057 }
2058
2059 #[gpui::test]
2060 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2061 init_test(cx);
2062
2063 let fs = FakeFs::new(cx.executor());
2064 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2065 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2066
2067 let file_path = project
2068 .read_with(cx, |project, cx| {
2069 project.find_project_path("dir/new_file", cx)
2070 })
2071 .unwrap();
2072 let buffer = project
2073 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2074 .await
2075 .unwrap();
2076
2077 // AI creates file with initial content
2078 cx.update(|cx| {
2079 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2080 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2081 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2082 });
2083 project
2084 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2085 .await
2086 .unwrap();
2087 cx.run_until_parked();
2088
2089 // User clicks "Accept All"
2090 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2091 cx.run_until_parked();
2092 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2093 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2094
2095 // AI modifies file again
2096 cx.update(|cx| {
2097 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2098 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2099 });
2100 project
2101 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2102 .await
2103 .unwrap();
2104 cx.run_until_parked();
2105 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2106
2107 // User clicks "Reject All"
2108 action_log
2109 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2110 .await;
2111 cx.run_until_parked();
2112 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2113 assert_eq!(
2114 buffer.read_with(cx, |buffer, _| buffer.text()),
2115 "ai content v1"
2116 );
2117 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2118 }
2119
2120 #[gpui::test(iterations = 100)]
2121 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2122 init_test(cx);
2123
2124 let operations = env::var("OPERATIONS")
2125 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2126 .unwrap_or(20);
2127
2128 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2129 let fs = FakeFs::new(cx.executor());
2130 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2131 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2132 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2133 let file_path = project
2134 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2135 .unwrap();
2136 let buffer = project
2137 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2138 .await
2139 .unwrap();
2140
2141 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2142
2143 for _ in 0..operations {
2144 match rng.random_range(0..100) {
2145 0..25 => {
2146 action_log.update(cx, |log, cx| {
2147 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2148 log::info!("keeping edits in range {:?}", range);
2149 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2150 });
2151 }
2152 25..50 => {
2153 action_log
2154 .update(cx, |log, cx| {
2155 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2156 log::info!("rejecting edits in range {:?}", range);
2157 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2158 })
2159 .await
2160 .unwrap();
2161 }
2162 _ => {
2163 let is_agent_edit = rng.random_bool(0.5);
2164 if is_agent_edit {
2165 log::info!("agent edit");
2166 } else {
2167 log::info!("user edit");
2168 }
2169 cx.update(|cx| {
2170 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2171 if is_agent_edit {
2172 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2173 }
2174 });
2175 }
2176 }
2177
2178 if rng.random_bool(0.2) {
2179 quiesce(&action_log, &buffer, cx);
2180 }
2181 }
2182
2183 quiesce(&action_log, &buffer, cx);
2184
2185 fn quiesce(
2186 action_log: &Entity<ActionLog>,
2187 buffer: &Entity<Buffer>,
2188 cx: &mut TestAppContext,
2189 ) {
2190 log::info!("quiescing...");
2191 cx.run_until_parked();
2192 action_log.update(cx, |log, cx| {
2193 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2194 let mut old_text = tracked_buffer.diff_base.clone();
2195 let new_text = buffer.read(cx).as_rope();
2196 for edit in tracked_buffer.unreviewed_edits.edits() {
2197 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2198 let old_end = old_text.point_to_offset(cmp::min(
2199 Point::new(edit.new.start + edit.old_len(), 0),
2200 old_text.max_point(),
2201 ));
2202 old_text.replace(
2203 old_start..old_end,
2204 &new_text.slice_rows(edit.new.clone()).to_string(),
2205 );
2206 }
2207 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2208 })
2209 }
2210 }
2211
2212 #[gpui::test]
2213 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2214 init_test(cx);
2215
2216 let fs = FakeFs::new(cx.background_executor.clone());
2217 fs.insert_tree(
2218 path!("/project"),
2219 json!({
2220 ".git": {},
2221 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2222 }),
2223 )
2224 .await;
2225 fs.set_head_for_repo(
2226 path!("/project/.git").as_ref(),
2227 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2228 "0000000",
2229 );
2230 cx.run_until_parked();
2231
2232 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2233 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2234
2235 let file_path = project
2236 .read_with(cx, |project, cx| {
2237 project.find_project_path(path!("/project/file.txt"), cx)
2238 })
2239 .unwrap();
2240 let buffer = project
2241 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2242 .await
2243 .unwrap();
2244
2245 cx.update(|cx| {
2246 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2247 buffer.update(cx, |buffer, cx| {
2248 buffer.edit(
2249 [
2250 // Edit at the very start: a -> A
2251 (Point::new(0, 0)..Point::new(0, 1), "A"),
2252 // Deletion in the middle: remove lines d and e
2253 (Point::new(3, 0)..Point::new(5, 0), ""),
2254 // Modification: g -> GGG
2255 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2256 // Addition: insert new line after h
2257 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2258 // Edit the very last character: j -> J
2259 (Point::new(9, 0)..Point::new(9, 1), "J"),
2260 ],
2261 None,
2262 cx,
2263 );
2264 });
2265 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2266 });
2267 cx.run_until_parked();
2268 assert_eq!(
2269 unreviewed_hunks(&action_log, cx),
2270 vec![(
2271 buffer.clone(),
2272 vec![
2273 HunkStatus {
2274 range: Point::new(0, 0)..Point::new(1, 0),
2275 diff_status: DiffHunkStatusKind::Modified,
2276 old_text: "a\n".into()
2277 },
2278 HunkStatus {
2279 range: Point::new(3, 0)..Point::new(3, 0),
2280 diff_status: DiffHunkStatusKind::Deleted,
2281 old_text: "d\ne\n".into()
2282 },
2283 HunkStatus {
2284 range: Point::new(4, 0)..Point::new(5, 0),
2285 diff_status: DiffHunkStatusKind::Modified,
2286 old_text: "g\n".into()
2287 },
2288 HunkStatus {
2289 range: Point::new(6, 0)..Point::new(7, 0),
2290 diff_status: DiffHunkStatusKind::Added,
2291 old_text: "".into()
2292 },
2293 HunkStatus {
2294 range: Point::new(8, 0)..Point::new(8, 1),
2295 diff_status: DiffHunkStatusKind::Modified,
2296 old_text: "j".into()
2297 }
2298 ]
2299 )]
2300 );
2301
2302 // Simulate a git commit that matches some edits but not others:
2303 // - Accepts the first edit (a -> A)
2304 // - Accepts the deletion (remove d and e)
2305 // - Makes a different change to g (g -> G instead of GGG)
2306 // - Ignores the NEW line addition
2307 // - Ignores the last line edit (j stays as j)
2308 fs.set_head_for_repo(
2309 path!("/project/.git").as_ref(),
2310 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2311 "0000001",
2312 );
2313 cx.run_until_parked();
2314 assert_eq!(
2315 unreviewed_hunks(&action_log, cx),
2316 vec![(
2317 buffer.clone(),
2318 vec![
2319 HunkStatus {
2320 range: Point::new(4, 0)..Point::new(5, 0),
2321 diff_status: DiffHunkStatusKind::Modified,
2322 old_text: "g\n".into()
2323 },
2324 HunkStatus {
2325 range: Point::new(6, 0)..Point::new(7, 0),
2326 diff_status: DiffHunkStatusKind::Added,
2327 old_text: "".into()
2328 },
2329 HunkStatus {
2330 range: Point::new(8, 0)..Point::new(8, 1),
2331 diff_status: DiffHunkStatusKind::Modified,
2332 old_text: "j".into()
2333 }
2334 ]
2335 )]
2336 );
2337
2338 // Make another commit that accepts the NEW line but with different content
2339 fs.set_head_for_repo(
2340 path!("/project/.git").as_ref(),
2341 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2342 "0000002",
2343 );
2344 cx.run_until_parked();
2345 assert_eq!(
2346 unreviewed_hunks(&action_log, cx),
2347 vec![(
2348 buffer,
2349 vec![
2350 HunkStatus {
2351 range: Point::new(6, 0)..Point::new(7, 0),
2352 diff_status: DiffHunkStatusKind::Added,
2353 old_text: "".into()
2354 },
2355 HunkStatus {
2356 range: Point::new(8, 0)..Point::new(8, 1),
2357 diff_status: DiffHunkStatusKind::Modified,
2358 old_text: "j".into()
2359 }
2360 ]
2361 )]
2362 );
2363
2364 // Final commit that accepts all remaining edits
2365 fs.set_head_for_repo(
2366 path!("/project/.git").as_ref(),
2367 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2368 "0000003",
2369 );
2370 cx.run_until_parked();
2371 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2372 }
2373
2374 #[derive(Debug, Clone, PartialEq, Eq)]
2375 struct HunkStatus {
2376 range: Range<Point>,
2377 diff_status: DiffHunkStatusKind,
2378 old_text: String,
2379 }
2380
2381 fn unreviewed_hunks(
2382 action_log: &Entity<ActionLog>,
2383 cx: &TestAppContext,
2384 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2385 cx.read(|cx| {
2386 action_log
2387 .read(cx)
2388 .changed_buffers(cx)
2389 .into_iter()
2390 .map(|(buffer, diff)| {
2391 let snapshot = buffer.read(cx).snapshot();
2392 (
2393 buffer,
2394 diff.read(cx)
2395 .snapshot(cx)
2396 .hunks(&snapshot)
2397 .map(|hunk| HunkStatus {
2398 diff_status: hunk.status().kind,
2399 range: hunk.range,
2400 old_text: diff
2401 .read(cx)
2402 .base_text(cx)
2403 .text_for_range(hunk.diff_base_byte_range)
2404 .collect(),
2405 })
2406 .collect(),
2407 )
2408 })
2409 .collect()
2410 })
2411 }
2412}