1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let language = buffer.read(cx).language().cloned();
83 let language_registry = buffer.read(cx).language_registry();
84 let diff = cx.new(|cx| {
85 let mut diff = BufferDiff::new(&text_snapshot, cx);
86 diff.language_changed(language, language_registry, cx);
87 diff
88 });
89 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
90 let diff_base;
91 let unreviewed_edits;
92 if is_created {
93 diff_base = Rope::default();
94 unreviewed_edits = Patch::new(vec![Edit {
95 old: 0..1,
96 new: 0..text_snapshot.max_point().row + 1,
97 }])
98 } else {
99 diff_base = buffer.read(cx).as_rope().clone();
100 unreviewed_edits = Patch::default();
101 }
102 TrackedBuffer {
103 buffer: buffer.clone(),
104 diff_base,
105 unreviewed_edits,
106 snapshot: text_snapshot,
107 status,
108 version: buffer.read(cx).version(),
109 diff,
110 diff_update: diff_update_tx,
111 _open_lsp_handle: open_lsp_handle,
112 _maintain_diff: cx.spawn({
113 let buffer = buffer.clone();
114 async move |this, cx| {
115 Self::maintain_diff(this, buffer, diff_update_rx, cx)
116 .await
117 .ok();
118 }
119 }),
120 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
121 }
122 });
123 tracked_buffer.version = buffer.read(cx).version();
124 tracked_buffer
125 }
126
127 fn handle_buffer_event(
128 &mut self,
129 buffer: Entity<Buffer>,
130 event: &BufferEvent,
131 cx: &mut Context<Self>,
132 ) {
133 match event {
134 BufferEvent::Edited => {
135 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
136 return;
137 };
138 let buffer_version = buffer.read(cx).version();
139 if !buffer_version.changed_since(&tracked_buffer.version) {
140 return;
141 }
142 self.handle_buffer_edited(buffer, cx);
143 }
144 BufferEvent::FileHandleChanged => {
145 self.handle_buffer_file_changed(buffer, cx);
146 }
147 _ => {}
148 };
149 }
150
151 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
152 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
153 return;
154 };
155 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
156 }
157
158 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
159 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
160 return;
161 };
162
163 match tracked_buffer.status {
164 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
165 if buffer
166 .read(cx)
167 .file()
168 .is_some_and(|file| file.disk_state().is_deleted())
169 {
170 // If the buffer had been edited by a tool, but it got
171 // deleted externally, we want to stop tracking it.
172 self.tracked_buffers.remove(&buffer);
173 }
174 cx.notify();
175 }
176 TrackedBufferStatus::Deleted => {
177 if buffer
178 .read(cx)
179 .file()
180 .is_some_and(|file| !file.disk_state().is_deleted())
181 {
182 // If the buffer had been deleted by a tool, but it got
183 // resurrected externally, we want to clear the edits we
184 // were tracking and reset the buffer's state.
185 self.tracked_buffers.remove(&buffer);
186 self.track_buffer_internal(buffer, false, cx);
187 }
188 cx.notify();
189 }
190 }
191 }
192
193 async fn maintain_diff(
194 this: WeakEntity<Self>,
195 buffer: Entity<Buffer>,
196 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
197 cx: &mut AsyncApp,
198 ) -> Result<()> {
199 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
200 let git_diff = this
201 .update(cx, |this, cx| {
202 this.project.update(cx, |project, cx| {
203 project.open_uncommitted_diff(buffer.clone(), cx)
204 })
205 })?
206 .await
207 .ok();
208 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
209 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
210 });
211
212 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
213 let _repo_subscription =
214 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
215 cx.update(|cx| {
216 let mut old_head = buffer_repo.read(cx).head_commit.clone();
217 Some(cx.subscribe(git_diff, move |_, event, cx| {
218 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
219 let new_head = buffer_repo.read(cx).head_commit.clone();
220 if new_head != old_head {
221 old_head = new_head;
222 git_diff_updates_tx.send(()).ok();
223 }
224 }
225 }))
226 })
227 } else {
228 None
229 };
230
231 loop {
232 futures::select_biased! {
233 buffer_update = buffer_updates.next() => {
234 if let Some((author, buffer_snapshot)) = buffer_update {
235 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
236 } else {
237 break;
238 }
239 }
240 _ = git_diff_updates_rx.changed().fuse() => {
241 if let Some(git_diff) = git_diff.as_ref() {
242 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
243 }
244 }
245 }
246 }
247
248 Ok(())
249 }
250
251 async fn track_edits(
252 this: &WeakEntity<ActionLog>,
253 buffer: &Entity<Buffer>,
254 author: ChangeAuthor,
255 buffer_snapshot: text::BufferSnapshot,
256 cx: &mut AsyncApp,
257 ) -> Result<()> {
258 let rebase = this.update(cx, |this, cx| {
259 let tracked_buffer = this
260 .tracked_buffers
261 .get_mut(buffer)
262 .context("buffer not tracked")?;
263
264 let rebase = cx.background_spawn({
265 let mut base_text = tracked_buffer.diff_base.clone();
266 let old_snapshot = tracked_buffer.snapshot.clone();
267 let new_snapshot = buffer_snapshot.clone();
268 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
269 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
270 async move {
271 if let ChangeAuthor::User = author {
272 apply_non_conflicting_edits(
273 &unreviewed_edits,
274 edits,
275 &mut base_text,
276 new_snapshot.as_rope(),
277 );
278 }
279
280 (Arc::from(base_text.to_string().as_str()), base_text)
281 }
282 });
283
284 anyhow::Ok(rebase)
285 })??;
286 let (new_base_text, new_diff_base) = rebase.await;
287
288 Self::update_diff(
289 this,
290 buffer,
291 buffer_snapshot,
292 new_base_text,
293 new_diff_base,
294 cx,
295 )
296 .await
297 }
298
299 async fn keep_committed_edits(
300 this: &WeakEntity<ActionLog>,
301 buffer: &Entity<Buffer>,
302 git_diff: &Entity<BufferDiff>,
303 cx: &mut AsyncApp,
304 ) -> Result<()> {
305 let buffer_snapshot = this.read_with(cx, |this, _cx| {
306 let tracked_buffer = this
307 .tracked_buffers
308 .get(buffer)
309 .context("buffer not tracked")?;
310 anyhow::Ok(tracked_buffer.snapshot.clone())
311 })??;
312 let (new_base_text, new_diff_base) = this
313 .read_with(cx, |this, cx| {
314 let tracked_buffer = this
315 .tracked_buffers
316 .get(buffer)
317 .context("buffer not tracked")?;
318 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
319 let agent_diff_base = tracked_buffer.diff_base.clone();
320 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
321 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
322 anyhow::Ok(cx.background_spawn(async move {
323 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
324 let committed_edits = language::line_diff(
325 &agent_diff_base.to_string(),
326 &git_diff_base.to_string(),
327 )
328 .into_iter()
329 .map(|(old, new)| Edit { old, new });
330
331 let mut new_agent_diff_base = agent_diff_base.clone();
332 let mut row_delta = 0i32;
333 for committed in committed_edits {
334 while let Some(unreviewed) = old_unreviewed_edits.peek() {
335 // If the committed edit matches the unreviewed
336 // edit, assume the user wants to keep it.
337 if committed.old == unreviewed.old {
338 let unreviewed_new =
339 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
340 let committed_new =
341 git_diff_base.slice_rows(committed.new.clone()).to_string();
342 if unreviewed_new == committed_new {
343 let old_byte_start =
344 new_agent_diff_base.point_to_offset(Point::new(
345 (unreviewed.old.start as i32 + row_delta) as u32,
346 0,
347 ));
348 let old_byte_end =
349 new_agent_diff_base.point_to_offset(cmp::min(
350 Point::new(
351 (unreviewed.old.end as i32 + row_delta) as u32,
352 0,
353 ),
354 new_agent_diff_base.max_point(),
355 ));
356 new_agent_diff_base
357 .replace(old_byte_start..old_byte_end, &unreviewed_new);
358 row_delta +=
359 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
360 }
361 } else if unreviewed.old.start >= committed.old.end {
362 break;
363 }
364
365 old_unreviewed_edits.next().unwrap();
366 }
367 }
368
369 (
370 Arc::from(new_agent_diff_base.to_string().as_str()),
371 new_agent_diff_base,
372 )
373 }))
374 })??
375 .await;
376
377 Self::update_diff(
378 this,
379 buffer,
380 buffer_snapshot,
381 new_base_text,
382 new_diff_base,
383 cx,
384 )
385 .await
386 }
387
388 async fn update_diff(
389 this: &WeakEntity<ActionLog>,
390 buffer: &Entity<Buffer>,
391 buffer_snapshot: text::BufferSnapshot,
392 new_base_text: Arc<str>,
393 new_diff_base: Rope,
394 cx: &mut AsyncApp,
395 ) -> Result<()> {
396 let (diff, language) = this.read_with(cx, |this, cx| {
397 let tracked_buffer = this
398 .tracked_buffers
399 .get(buffer)
400 .context("buffer not tracked")?;
401 anyhow::Ok((
402 tracked_buffer.diff.clone(),
403 buffer.read(cx).language().cloned(),
404 ))
405 })??;
406 let update = diff
407 .update(cx, |diff, cx| {
408 diff.update_diff(
409 buffer_snapshot.clone(),
410 Some(new_base_text),
411 true,
412 language,
413 cx,
414 )
415 })
416 .await;
417 diff.update(cx, |diff, cx| {
418 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
419 })
420 .await;
421 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
422
423 let unreviewed_edits = cx
424 .background_spawn({
425 let buffer_snapshot = buffer_snapshot.clone();
426 let new_diff_base = new_diff_base.clone();
427 async move {
428 let mut unreviewed_edits = Patch::default();
429 for hunk in diff_snapshot.hunks_intersecting_range(
430 Anchor::min_for_buffer(buffer_snapshot.remote_id())
431 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
432 &buffer_snapshot,
433 ) {
434 let old_range = new_diff_base
435 .offset_to_point(hunk.diff_base_byte_range.start)
436 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
437 let new_range = hunk.range.start..hunk.range.end;
438 unreviewed_edits.push(point_to_row_edit(
439 Edit {
440 old: old_range,
441 new: new_range,
442 },
443 &new_diff_base,
444 buffer_snapshot.as_rope(),
445 ));
446 }
447 unreviewed_edits
448 }
449 })
450 .await;
451 this.update(cx, |this, cx| {
452 let tracked_buffer = this
453 .tracked_buffers
454 .get_mut(buffer)
455 .context("buffer not tracked")?;
456 tracked_buffer.diff_base = new_diff_base;
457 tracked_buffer.snapshot = buffer_snapshot;
458 tracked_buffer.unreviewed_edits = unreviewed_edits;
459 cx.notify();
460 anyhow::Ok(())
461 })?
462 }
463
464 /// Track a buffer as read by agent, so we can notify the model about user edits.
465 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
466 self.track_buffer_internal(buffer, false, cx);
467 }
468
469 /// Mark a buffer as created by agent, so we can refresh it in the context
470 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
471 self.track_buffer_internal(buffer, true, cx);
472 }
473
474 /// Mark a buffer as edited by agent, so we can refresh it in the context
475 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
476 let new_version = buffer.read(cx).version();
477 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
478 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
479 tracked_buffer.status = TrackedBufferStatus::Modified;
480 }
481
482 tracked_buffer.version = new_version;
483 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
484 }
485
486 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
487 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
488 match tracked_buffer.status {
489 TrackedBufferStatus::Created { .. } => {
490 self.tracked_buffers.remove(&buffer);
491 cx.notify();
492 }
493 TrackedBufferStatus::Modified => {
494 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
495 tracked_buffer.status = TrackedBufferStatus::Deleted;
496 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
497 }
498 TrackedBufferStatus::Deleted => {}
499 }
500 cx.notify();
501 }
502
503 pub fn keep_edits_in_range(
504 &mut self,
505 buffer: Entity<Buffer>,
506 buffer_range: Range<impl language::ToPoint>,
507 telemetry: Option<ActionLogTelemetry>,
508 cx: &mut Context<Self>,
509 ) {
510 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
511 return;
512 };
513
514 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
515 match tracked_buffer.status {
516 TrackedBufferStatus::Deleted => {
517 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
518 self.tracked_buffers.remove(&buffer);
519 cx.notify();
520 }
521 _ => {
522 let buffer = buffer.read(cx);
523 let buffer_range =
524 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
525 let mut delta = 0i32;
526 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
527 edit.old.start = (edit.old.start as i32 + delta) as u32;
528 edit.old.end = (edit.old.end as i32 + delta) as u32;
529
530 if buffer_range.end.row < edit.new.start
531 || buffer_range.start.row > edit.new.end
532 {
533 true
534 } else {
535 let old_range = tracked_buffer
536 .diff_base
537 .point_to_offset(Point::new(edit.old.start, 0))
538 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
539 Point::new(edit.old.end, 0),
540 tracked_buffer.diff_base.max_point(),
541 ));
542 let new_range = tracked_buffer
543 .snapshot
544 .point_to_offset(Point::new(edit.new.start, 0))
545 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
546 Point::new(edit.new.end, 0),
547 tracked_buffer.snapshot.max_point(),
548 ));
549 tracked_buffer.diff_base.replace(
550 old_range,
551 &tracked_buffer
552 .snapshot
553 .text_for_range(new_range)
554 .collect::<String>(),
555 );
556 delta += edit.new_len() as i32 - edit.old_len() as i32;
557 metrics.add_edit(edit);
558 false
559 }
560 });
561 if tracked_buffer.unreviewed_edits.is_empty()
562 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
563 {
564 tracked_buffer.status = TrackedBufferStatus::Modified;
565 }
566 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
567 }
568 }
569 if let Some(telemetry) = telemetry {
570 telemetry_report_accepted_edits(&telemetry, metrics);
571 }
572 }
573
574 pub fn reject_edits_in_ranges(
575 &mut self,
576 buffer: Entity<Buffer>,
577 buffer_ranges: Vec<Range<impl language::ToPoint>>,
578 telemetry: Option<ActionLogTelemetry>,
579 cx: &mut Context<Self>,
580 ) -> Task<Result<()>> {
581 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
582 return Task::ready(Ok(()));
583 };
584
585 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
586 let task = match &tracked_buffer.status {
587 TrackedBufferStatus::Created {
588 existing_file_content,
589 } => {
590 let task = if let Some(existing_file_content) = existing_file_content {
591 buffer.update(cx, |buffer, cx| {
592 buffer.start_transaction();
593 buffer.set_text("", cx);
594 for chunk in existing_file_content.chunks() {
595 buffer.append(chunk, cx);
596 }
597 buffer.end_transaction(cx);
598 });
599 self.project
600 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
601 } else {
602 // For a file created by AI with no pre-existing content,
603 // only delete the file if we're certain it contains only AI content
604 // with no edits from the user.
605
606 let initial_version = tracked_buffer.version.clone();
607 let current_version = buffer.read(cx).version();
608
609 let current_content = buffer.read(cx).text();
610 let tracked_content = tracked_buffer.snapshot.text();
611
612 let is_ai_only_content =
613 initial_version == current_version && current_content == tracked_content;
614
615 if is_ai_only_content {
616 buffer
617 .read(cx)
618 .entry_id(cx)
619 .and_then(|entry_id| {
620 self.project.update(cx, |project, cx| {
621 project.delete_entry(entry_id, false, cx)
622 })
623 })
624 .unwrap_or(Task::ready(Ok(())))
625 } else {
626 // Not sure how to disentangle edits made by the user
627 // from edits made by the AI at this point.
628 // For now, preserve both to avoid data loss.
629 //
630 // TODO: Better solution (disable "Reject" after user makes some
631 // edit or find a way to differentiate between AI and user edits)
632 Task::ready(Ok(()))
633 }
634 };
635
636 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
637 self.tracked_buffers.remove(&buffer);
638 cx.notify();
639 task
640 }
641 TrackedBufferStatus::Deleted => {
642 buffer.update(cx, |buffer, cx| {
643 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
644 });
645 let save = self
646 .project
647 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
648
649 // Clear all tracked edits for this buffer and start over as if we just read it.
650 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
651 self.tracked_buffers.remove(&buffer);
652 self.buffer_read(buffer.clone(), cx);
653 cx.notify();
654 save
655 }
656 TrackedBufferStatus::Modified => {
657 buffer.update(cx, |buffer, cx| {
658 let mut buffer_row_ranges = buffer_ranges
659 .into_iter()
660 .map(|range| {
661 range.start.to_point(buffer).row..range.end.to_point(buffer).row
662 })
663 .peekable();
664
665 let mut edits_to_revert = Vec::new();
666 for edit in tracked_buffer.unreviewed_edits.edits() {
667 let new_range = tracked_buffer
668 .snapshot
669 .anchor_before(Point::new(edit.new.start, 0))
670 ..tracked_buffer.snapshot.anchor_after(cmp::min(
671 Point::new(edit.new.end, 0),
672 tracked_buffer.snapshot.max_point(),
673 ));
674 let new_row_range = new_range.start.to_point(buffer).row
675 ..new_range.end.to_point(buffer).row;
676
677 let mut revert = false;
678 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
679 if buffer_row_range.end < new_row_range.start {
680 buffer_row_ranges.next();
681 } else if buffer_row_range.start > new_row_range.end {
682 break;
683 } else {
684 revert = true;
685 break;
686 }
687 }
688
689 if revert {
690 metrics.add_edit(edit);
691 let old_range = tracked_buffer
692 .diff_base
693 .point_to_offset(Point::new(edit.old.start, 0))
694 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
695 Point::new(edit.old.end, 0),
696 tracked_buffer.diff_base.max_point(),
697 ));
698 let old_text = tracked_buffer
699 .diff_base
700 .chunks_in_range(old_range)
701 .collect::<String>();
702 edits_to_revert.push((new_range, old_text));
703 }
704 }
705
706 buffer.edit(edits_to_revert, None, cx);
707 });
708 self.project
709 .update(cx, |project, cx| project.save_buffer(buffer, cx))
710 }
711 };
712 if let Some(telemetry) = telemetry {
713 telemetry_report_rejected_edits(&telemetry, metrics);
714 }
715 task
716 }
717
718 pub fn keep_all_edits(
719 &mut self,
720 telemetry: Option<ActionLogTelemetry>,
721 cx: &mut Context<Self>,
722 ) {
723 self.tracked_buffers.retain(|buffer, tracked_buffer| {
724 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
725 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
726 if let Some(telemetry) = telemetry.as_ref() {
727 telemetry_report_accepted_edits(telemetry, metrics);
728 }
729 match tracked_buffer.status {
730 TrackedBufferStatus::Deleted => false,
731 _ => {
732 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
733 tracked_buffer.status = TrackedBufferStatus::Modified;
734 }
735 tracked_buffer.unreviewed_edits.clear();
736 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
737 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
738 true
739 }
740 }
741 });
742
743 cx.notify();
744 }
745
746 pub fn reject_all_edits(
747 &mut self,
748 telemetry: Option<ActionLogTelemetry>,
749 cx: &mut Context<Self>,
750 ) -> Task<()> {
751 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
752 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
753 buffer.read(cx).remote_id(),
754 )];
755 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
756
757 async move {
758 reject.await.log_err();
759 }
760 });
761
762 let task = futures::future::join_all(futures);
763 cx.background_spawn(async move {
764 task.await;
765 })
766 }
767
768 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
769 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
770 self.tracked_buffers
771 .iter()
772 .filter(|(_, tracked)| tracked.has_edits(cx))
773 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
774 .collect()
775 }
776
777 /// Iterate over buffers changed since last read or edited by the model
778 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
779 self.tracked_buffers
780 .iter()
781 .filter(|(buffer, tracked)| {
782 let buffer = buffer.read(cx);
783
784 tracked.version != buffer.version
785 && buffer
786 .file()
787 .is_some_and(|file| !file.disk_state().is_deleted())
788 })
789 .map(|(buffer, _)| buffer)
790 }
791}
792
793#[derive(Clone)]
794pub struct ActionLogTelemetry {
795 pub agent_telemetry_id: SharedString,
796 pub session_id: Arc<str>,
797}
798
799struct ActionLogMetrics {
800 lines_removed: u32,
801 lines_added: u32,
802 language: Option<SharedString>,
803}
804
805impl ActionLogMetrics {
806 fn for_buffer(buffer: &Buffer) -> Self {
807 Self {
808 language: buffer.language().map(|l| l.name().0),
809 lines_removed: 0,
810 lines_added: 0,
811 }
812 }
813
814 fn add_edits(&mut self, edits: &[Edit<u32>]) {
815 for edit in edits {
816 self.add_edit(edit);
817 }
818 }
819
820 fn add_edit(&mut self, edit: &Edit<u32>) {
821 self.lines_added += edit.new_len();
822 self.lines_removed += edit.old_len();
823 }
824}
825
826fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
827 telemetry::event!(
828 "Agent Edits Accepted",
829 agent = telemetry.agent_telemetry_id,
830 session = telemetry.session_id,
831 language = metrics.language,
832 lines_added = metrics.lines_added,
833 lines_removed = metrics.lines_removed
834 );
835}
836
837fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
838 telemetry::event!(
839 "Agent Edits Rejected",
840 agent = telemetry.agent_telemetry_id,
841 session = telemetry.session_id,
842 language = metrics.language,
843 lines_added = metrics.lines_added,
844 lines_removed = metrics.lines_removed
845 );
846}
847
848fn apply_non_conflicting_edits(
849 patch: &Patch<u32>,
850 edits: Vec<Edit<u32>>,
851 old_text: &mut Rope,
852 new_text: &Rope,
853) -> bool {
854 let mut old_edits = patch.edits().iter().cloned().peekable();
855 let mut new_edits = edits.into_iter().peekable();
856 let mut applied_delta = 0i32;
857 let mut rebased_delta = 0i32;
858 let mut has_made_changes = false;
859
860 while let Some(mut new_edit) = new_edits.next() {
861 let mut conflict = false;
862
863 // Push all the old edits that are before this new edit or that intersect with it.
864 while let Some(old_edit) = old_edits.peek() {
865 if new_edit.old.end < old_edit.new.start
866 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
867 {
868 break;
869 } else if new_edit.old.start > old_edit.new.end
870 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
871 {
872 let old_edit = old_edits.next().unwrap();
873 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
874 } else {
875 conflict = true;
876 if new_edits
877 .peek()
878 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
879 {
880 new_edit = new_edits.next().unwrap();
881 } else {
882 let old_edit = old_edits.next().unwrap();
883 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
884 }
885 }
886 }
887
888 if !conflict {
889 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
890 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
891 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
892 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
893 ..old_text.point_to_offset(cmp::min(
894 Point::new(new_edit.old.end, 0),
895 old_text.max_point(),
896 ));
897 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
898 ..new_text.point_to_offset(cmp::min(
899 Point::new(new_edit.new.end, 0),
900 new_text.max_point(),
901 ));
902
903 old_text.replace(
904 old_bytes,
905 &new_text.chunks_in_range(new_bytes).collect::<String>(),
906 );
907 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
908 has_made_changes = true;
909 }
910 }
911 has_made_changes
912}
913
914fn diff_snapshots(
915 old_snapshot: &text::BufferSnapshot,
916 new_snapshot: &text::BufferSnapshot,
917) -> Vec<Edit<u32>> {
918 let mut edits = new_snapshot
919 .edits_since::<Point>(&old_snapshot.version)
920 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
921 .peekable();
922 let mut row_edits = Vec::new();
923 while let Some(mut edit) = edits.next() {
924 while let Some(next_edit) = edits.peek() {
925 if edit.old.end >= next_edit.old.start {
926 edit.old.end = next_edit.old.end;
927 edit.new.end = next_edit.new.end;
928 edits.next();
929 } else {
930 break;
931 }
932 }
933 row_edits.push(edit);
934 }
935 row_edits
936}
937
938fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
939 if edit.old.start.column == old_text.line_len(edit.old.start.row)
940 && new_text
941 .chars_at(new_text.point_to_offset(edit.new.start))
942 .next()
943 == Some('\n')
944 && edit.old.start != old_text.max_point()
945 {
946 Edit {
947 old: edit.old.start.row + 1..edit.old.end.row + 1,
948 new: edit.new.start.row + 1..edit.new.end.row + 1,
949 }
950 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
951 Edit {
952 old: edit.old.start.row..edit.old.end.row,
953 new: edit.new.start.row..edit.new.end.row,
954 }
955 } else {
956 Edit {
957 old: edit.old.start.row..edit.old.end.row + 1,
958 new: edit.new.start.row..edit.new.end.row + 1,
959 }
960 }
961}
962
963#[derive(Copy, Clone, Debug)]
964enum ChangeAuthor {
965 User,
966 Agent,
967}
968
969#[derive(Debug)]
970enum TrackedBufferStatus {
971 Created { existing_file_content: Option<Rope> },
972 Modified,
973 Deleted,
974}
975
976struct TrackedBuffer {
977 buffer: Entity<Buffer>,
978 diff_base: Rope,
979 unreviewed_edits: Patch<u32>,
980 status: TrackedBufferStatus,
981 version: clock::Global,
982 diff: Entity<BufferDiff>,
983 snapshot: text::BufferSnapshot,
984 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
985 _open_lsp_handle: OpenLspBufferHandle,
986 _maintain_diff: Task<()>,
987 _subscription: Subscription,
988}
989
990impl TrackedBuffer {
991 fn has_edits(&self, cx: &App) -> bool {
992 self.diff
993 .read(cx)
994 .snapshot(cx)
995 .hunks(self.buffer.read(cx))
996 .next()
997 .is_some()
998 }
999
1000 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1001 self.diff_update
1002 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1003 .ok();
1004 }
1005}
1006
1007pub struct ChangedBuffer {
1008 pub diff: Entity<BufferDiff>,
1009}
1010
1011#[cfg(test)]
1012mod tests {
1013 use super::*;
1014 use buffer_diff::DiffHunkStatusKind;
1015 use gpui::TestAppContext;
1016 use language::Point;
1017 use project::{FakeFs, Fs, Project, RemoveOptions};
1018 use rand::prelude::*;
1019 use serde_json::json;
1020 use settings::SettingsStore;
1021 use std::env;
1022 use util::{RandomCharIter, path};
1023
1024 #[ctor::ctor]
1025 fn init_logger() {
1026 zlog::init_test();
1027 }
1028
1029 fn init_test(cx: &mut TestAppContext) {
1030 cx.update(|cx| {
1031 let settings_store = SettingsStore::test(cx);
1032 cx.set_global(settings_store);
1033 });
1034 }
1035
1036 #[gpui::test(iterations = 10)]
1037 async fn test_keep_edits(cx: &mut TestAppContext) {
1038 init_test(cx);
1039
1040 let fs = FakeFs::new(cx.executor());
1041 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1042 .await;
1043 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1044 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1045 let file_path = project
1046 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1047 .unwrap();
1048 let buffer = project
1049 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1050 .await
1051 .unwrap();
1052
1053 cx.update(|cx| {
1054 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1055 buffer.update(cx, |buffer, cx| {
1056 buffer
1057 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1058 .unwrap()
1059 });
1060 buffer.update(cx, |buffer, cx| {
1061 buffer
1062 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1063 .unwrap()
1064 });
1065 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1066 });
1067 cx.run_until_parked();
1068 assert_eq!(
1069 buffer.read_with(cx, |buffer, _| buffer.text()),
1070 "abc\ndEf\nghi\njkl\nmnO"
1071 );
1072 assert_eq!(
1073 unreviewed_hunks(&action_log, cx),
1074 vec![(
1075 buffer.clone(),
1076 vec![
1077 HunkStatus {
1078 range: Point::new(1, 0)..Point::new(2, 0),
1079 diff_status: DiffHunkStatusKind::Modified,
1080 old_text: "def\n".into(),
1081 },
1082 HunkStatus {
1083 range: Point::new(4, 0)..Point::new(4, 3),
1084 diff_status: DiffHunkStatusKind::Modified,
1085 old_text: "mno".into(),
1086 }
1087 ],
1088 )]
1089 );
1090
1091 action_log.update(cx, |log, cx| {
1092 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1093 });
1094 cx.run_until_parked();
1095 assert_eq!(
1096 unreviewed_hunks(&action_log, cx),
1097 vec![(
1098 buffer.clone(),
1099 vec![HunkStatus {
1100 range: Point::new(1, 0)..Point::new(2, 0),
1101 diff_status: DiffHunkStatusKind::Modified,
1102 old_text: "def\n".into(),
1103 }],
1104 )]
1105 );
1106
1107 action_log.update(cx, |log, cx| {
1108 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1109 });
1110 cx.run_until_parked();
1111 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1112 }
1113
1114 #[gpui::test(iterations = 10)]
1115 async fn test_deletions(cx: &mut TestAppContext) {
1116 init_test(cx);
1117
1118 let fs = FakeFs::new(cx.executor());
1119 fs.insert_tree(
1120 path!("/dir"),
1121 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1122 )
1123 .await;
1124 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1125 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1126 let file_path = project
1127 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1128 .unwrap();
1129 let buffer = project
1130 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1131 .await
1132 .unwrap();
1133
1134 cx.update(|cx| {
1135 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1136 buffer.update(cx, |buffer, cx| {
1137 buffer
1138 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1139 .unwrap();
1140 buffer.finalize_last_transaction();
1141 });
1142 buffer.update(cx, |buffer, cx| {
1143 buffer
1144 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1145 .unwrap();
1146 buffer.finalize_last_transaction();
1147 });
1148 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1149 });
1150 cx.run_until_parked();
1151 assert_eq!(
1152 buffer.read_with(cx, |buffer, _| buffer.text()),
1153 "abc\nghi\njkl\npqr"
1154 );
1155 assert_eq!(
1156 unreviewed_hunks(&action_log, cx),
1157 vec![(
1158 buffer.clone(),
1159 vec![
1160 HunkStatus {
1161 range: Point::new(1, 0)..Point::new(1, 0),
1162 diff_status: DiffHunkStatusKind::Deleted,
1163 old_text: "def\n".into(),
1164 },
1165 HunkStatus {
1166 range: Point::new(3, 0)..Point::new(3, 0),
1167 diff_status: DiffHunkStatusKind::Deleted,
1168 old_text: "mno\n".into(),
1169 }
1170 ],
1171 )]
1172 );
1173
1174 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1175 cx.run_until_parked();
1176 assert_eq!(
1177 buffer.read_with(cx, |buffer, _| buffer.text()),
1178 "abc\nghi\njkl\nmno\npqr"
1179 );
1180 assert_eq!(
1181 unreviewed_hunks(&action_log, cx),
1182 vec![(
1183 buffer.clone(),
1184 vec![HunkStatus {
1185 range: Point::new(1, 0)..Point::new(1, 0),
1186 diff_status: DiffHunkStatusKind::Deleted,
1187 old_text: "def\n".into(),
1188 }],
1189 )]
1190 );
1191
1192 action_log.update(cx, |log, cx| {
1193 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1194 });
1195 cx.run_until_parked();
1196 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1197 }
1198
1199 #[gpui::test(iterations = 10)]
1200 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1201 init_test(cx);
1202
1203 let fs = FakeFs::new(cx.executor());
1204 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1205 .await;
1206 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1207 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1208 let file_path = project
1209 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1210 .unwrap();
1211 let buffer = project
1212 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1213 .await
1214 .unwrap();
1215
1216 cx.update(|cx| {
1217 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1218 buffer.update(cx, |buffer, cx| {
1219 buffer
1220 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1221 .unwrap()
1222 });
1223 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1224 });
1225 cx.run_until_parked();
1226 assert_eq!(
1227 buffer.read_with(cx, |buffer, _| buffer.text()),
1228 "abc\ndeF\nGHI\njkl\nmno"
1229 );
1230 assert_eq!(
1231 unreviewed_hunks(&action_log, cx),
1232 vec![(
1233 buffer.clone(),
1234 vec![HunkStatus {
1235 range: Point::new(1, 0)..Point::new(3, 0),
1236 diff_status: DiffHunkStatusKind::Modified,
1237 old_text: "def\nghi\n".into(),
1238 }],
1239 )]
1240 );
1241
1242 buffer.update(cx, |buffer, cx| {
1243 buffer.edit(
1244 [
1245 (Point::new(0, 2)..Point::new(0, 2), "X"),
1246 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1247 ],
1248 None,
1249 cx,
1250 )
1251 });
1252 cx.run_until_parked();
1253 assert_eq!(
1254 buffer.read_with(cx, |buffer, _| buffer.text()),
1255 "abXc\ndeF\nGHI\nYjkl\nmno"
1256 );
1257 assert_eq!(
1258 unreviewed_hunks(&action_log, cx),
1259 vec![(
1260 buffer.clone(),
1261 vec![HunkStatus {
1262 range: Point::new(1, 0)..Point::new(3, 0),
1263 diff_status: DiffHunkStatusKind::Modified,
1264 old_text: "def\nghi\n".into(),
1265 }],
1266 )]
1267 );
1268
1269 buffer.update(cx, |buffer, cx| {
1270 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1271 });
1272 cx.run_until_parked();
1273 assert_eq!(
1274 buffer.read_with(cx, |buffer, _| buffer.text()),
1275 "abXc\ndZeF\nGHI\nYjkl\nmno"
1276 );
1277 assert_eq!(
1278 unreviewed_hunks(&action_log, cx),
1279 vec![(
1280 buffer.clone(),
1281 vec![HunkStatus {
1282 range: Point::new(1, 0)..Point::new(3, 0),
1283 diff_status: DiffHunkStatusKind::Modified,
1284 old_text: "def\nghi\n".into(),
1285 }],
1286 )]
1287 );
1288
1289 action_log.update(cx, |log, cx| {
1290 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1291 });
1292 cx.run_until_parked();
1293 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1294 }
1295
1296 #[gpui::test(iterations = 10)]
1297 async fn test_creating_files(cx: &mut TestAppContext) {
1298 init_test(cx);
1299
1300 let fs = FakeFs::new(cx.executor());
1301 fs.insert_tree(path!("/dir"), json!({})).await;
1302 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1303 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1304 let file_path = project
1305 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1306 .unwrap();
1307
1308 let buffer = project
1309 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1310 .await
1311 .unwrap();
1312 cx.update(|cx| {
1313 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1314 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1315 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1316 });
1317 project
1318 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1319 .await
1320 .unwrap();
1321 cx.run_until_parked();
1322 assert_eq!(
1323 unreviewed_hunks(&action_log, cx),
1324 vec![(
1325 buffer.clone(),
1326 vec![HunkStatus {
1327 range: Point::new(0, 0)..Point::new(0, 5),
1328 diff_status: DiffHunkStatusKind::Added,
1329 old_text: "".into(),
1330 }],
1331 )]
1332 );
1333
1334 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1335 cx.run_until_parked();
1336 assert_eq!(
1337 unreviewed_hunks(&action_log, cx),
1338 vec![(
1339 buffer.clone(),
1340 vec![HunkStatus {
1341 range: Point::new(0, 0)..Point::new(0, 6),
1342 diff_status: DiffHunkStatusKind::Added,
1343 old_text: "".into(),
1344 }],
1345 )]
1346 );
1347
1348 action_log.update(cx, |log, cx| {
1349 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1350 });
1351 cx.run_until_parked();
1352 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1353 }
1354
1355 #[gpui::test(iterations = 10)]
1356 async fn test_overwriting_files(cx: &mut TestAppContext) {
1357 init_test(cx);
1358
1359 let fs = FakeFs::new(cx.executor());
1360 fs.insert_tree(
1361 path!("/dir"),
1362 json!({
1363 "file1": "Lorem ipsum dolor"
1364 }),
1365 )
1366 .await;
1367 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1368 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1369 let file_path = project
1370 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1371 .unwrap();
1372
1373 let buffer = project
1374 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1375 .await
1376 .unwrap();
1377 cx.update(|cx| {
1378 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1379 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1380 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1381 });
1382 project
1383 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1384 .await
1385 .unwrap();
1386 cx.run_until_parked();
1387 assert_eq!(
1388 unreviewed_hunks(&action_log, cx),
1389 vec![(
1390 buffer.clone(),
1391 vec![HunkStatus {
1392 range: Point::new(0, 0)..Point::new(0, 19),
1393 diff_status: DiffHunkStatusKind::Added,
1394 old_text: "".into(),
1395 }],
1396 )]
1397 );
1398
1399 action_log
1400 .update(cx, |log, cx| {
1401 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1402 })
1403 .await
1404 .unwrap();
1405 cx.run_until_parked();
1406 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1407 assert_eq!(
1408 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1409 "Lorem ipsum dolor"
1410 );
1411 }
1412
1413 #[gpui::test(iterations = 10)]
1414 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1415 init_test(cx);
1416
1417 let fs = FakeFs::new(cx.executor());
1418 fs.insert_tree(
1419 path!("/dir"),
1420 json!({
1421 "file1": "Lorem ipsum dolor"
1422 }),
1423 )
1424 .await;
1425 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1426 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1427 let file_path = project
1428 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1429 .unwrap();
1430
1431 let buffer = project
1432 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1433 .await
1434 .unwrap();
1435 cx.update(|cx| {
1436 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1437 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1438 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1439 });
1440 project
1441 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1442 .await
1443 .unwrap();
1444 cx.run_until_parked();
1445 assert_eq!(
1446 unreviewed_hunks(&action_log, cx),
1447 vec![(
1448 buffer.clone(),
1449 vec![HunkStatus {
1450 range: Point::new(0, 0)..Point::new(0, 37),
1451 diff_status: DiffHunkStatusKind::Modified,
1452 old_text: "Lorem ipsum dolor".into(),
1453 }],
1454 )]
1455 );
1456
1457 cx.update(|cx| {
1458 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1459 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1460 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1461 });
1462 project
1463 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1464 .await
1465 .unwrap();
1466 cx.run_until_parked();
1467 assert_eq!(
1468 unreviewed_hunks(&action_log, cx),
1469 vec![(
1470 buffer.clone(),
1471 vec![HunkStatus {
1472 range: Point::new(0, 0)..Point::new(0, 9),
1473 diff_status: DiffHunkStatusKind::Added,
1474 old_text: "".into(),
1475 }],
1476 )]
1477 );
1478
1479 action_log
1480 .update(cx, |log, cx| {
1481 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1482 })
1483 .await
1484 .unwrap();
1485 cx.run_until_parked();
1486 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1487 assert_eq!(
1488 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1489 "Lorem ipsum dolor"
1490 );
1491 }
1492
1493 #[gpui::test(iterations = 10)]
1494 async fn test_deleting_files(cx: &mut TestAppContext) {
1495 init_test(cx);
1496
1497 let fs = FakeFs::new(cx.executor());
1498 fs.insert_tree(
1499 path!("/dir"),
1500 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1501 )
1502 .await;
1503
1504 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1505 let file1_path = project
1506 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1507 .unwrap();
1508 let file2_path = project
1509 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1510 .unwrap();
1511
1512 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1513 let buffer1 = project
1514 .update(cx, |project, cx| {
1515 project.open_buffer(file1_path.clone(), cx)
1516 })
1517 .await
1518 .unwrap();
1519 let buffer2 = project
1520 .update(cx, |project, cx| {
1521 project.open_buffer(file2_path.clone(), cx)
1522 })
1523 .await
1524 .unwrap();
1525
1526 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1527 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1528 project
1529 .update(cx, |project, cx| {
1530 project.delete_file(file1_path.clone(), false, cx)
1531 })
1532 .unwrap()
1533 .await
1534 .unwrap();
1535 project
1536 .update(cx, |project, cx| {
1537 project.delete_file(file2_path.clone(), false, cx)
1538 })
1539 .unwrap()
1540 .await
1541 .unwrap();
1542 cx.run_until_parked();
1543 assert_eq!(
1544 unreviewed_hunks(&action_log, cx),
1545 vec![
1546 (
1547 buffer1.clone(),
1548 vec![HunkStatus {
1549 range: Point::new(0, 0)..Point::new(0, 0),
1550 diff_status: DiffHunkStatusKind::Deleted,
1551 old_text: "lorem\n".into(),
1552 }]
1553 ),
1554 (
1555 buffer2.clone(),
1556 vec![HunkStatus {
1557 range: Point::new(0, 0)..Point::new(0, 0),
1558 diff_status: DiffHunkStatusKind::Deleted,
1559 old_text: "ipsum\n".into(),
1560 }],
1561 )
1562 ]
1563 );
1564
1565 // Simulate file1 being recreated externally.
1566 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1567 .await;
1568
1569 // Simulate file2 being recreated by a tool.
1570 let buffer2 = project
1571 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1572 .await
1573 .unwrap();
1574 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1575 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1576 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1577 project
1578 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1579 .await
1580 .unwrap();
1581
1582 cx.run_until_parked();
1583 assert_eq!(
1584 unreviewed_hunks(&action_log, cx),
1585 vec![(
1586 buffer2.clone(),
1587 vec![HunkStatus {
1588 range: Point::new(0, 0)..Point::new(0, 5),
1589 diff_status: DiffHunkStatusKind::Added,
1590 old_text: "".into(),
1591 }],
1592 )]
1593 );
1594
1595 // Simulate file2 being deleted externally.
1596 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1597 .await
1598 .unwrap();
1599 cx.run_until_parked();
1600 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1601 }
1602
1603 #[gpui::test(iterations = 10)]
1604 async fn test_reject_edits(cx: &mut TestAppContext) {
1605 init_test(cx);
1606
1607 let fs = FakeFs::new(cx.executor());
1608 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1609 .await;
1610 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1611 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1612 let file_path = project
1613 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1614 .unwrap();
1615 let buffer = project
1616 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1617 .await
1618 .unwrap();
1619
1620 cx.update(|cx| {
1621 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1622 buffer.update(cx, |buffer, cx| {
1623 buffer
1624 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1625 .unwrap()
1626 });
1627 buffer.update(cx, |buffer, cx| {
1628 buffer
1629 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1630 .unwrap()
1631 });
1632 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1633 });
1634 cx.run_until_parked();
1635 assert_eq!(
1636 buffer.read_with(cx, |buffer, _| buffer.text()),
1637 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1638 );
1639 assert_eq!(
1640 unreviewed_hunks(&action_log, cx),
1641 vec![(
1642 buffer.clone(),
1643 vec![
1644 HunkStatus {
1645 range: Point::new(1, 0)..Point::new(3, 0),
1646 diff_status: DiffHunkStatusKind::Modified,
1647 old_text: "def\n".into(),
1648 },
1649 HunkStatus {
1650 range: Point::new(5, 0)..Point::new(5, 3),
1651 diff_status: DiffHunkStatusKind::Modified,
1652 old_text: "mno".into(),
1653 }
1654 ],
1655 )]
1656 );
1657
1658 // If the rejected range doesn't overlap with any hunk, we ignore it.
1659 action_log
1660 .update(cx, |log, cx| {
1661 log.reject_edits_in_ranges(
1662 buffer.clone(),
1663 vec![Point::new(4, 0)..Point::new(4, 0)],
1664 None,
1665 cx,
1666 )
1667 })
1668 .await
1669 .unwrap();
1670 cx.run_until_parked();
1671 assert_eq!(
1672 buffer.read_with(cx, |buffer, _| buffer.text()),
1673 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1674 );
1675 assert_eq!(
1676 unreviewed_hunks(&action_log, cx),
1677 vec![(
1678 buffer.clone(),
1679 vec![
1680 HunkStatus {
1681 range: Point::new(1, 0)..Point::new(3, 0),
1682 diff_status: DiffHunkStatusKind::Modified,
1683 old_text: "def\n".into(),
1684 },
1685 HunkStatus {
1686 range: Point::new(5, 0)..Point::new(5, 3),
1687 diff_status: DiffHunkStatusKind::Modified,
1688 old_text: "mno".into(),
1689 }
1690 ],
1691 )]
1692 );
1693
1694 action_log
1695 .update(cx, |log, cx| {
1696 log.reject_edits_in_ranges(
1697 buffer.clone(),
1698 vec![Point::new(0, 0)..Point::new(1, 0)],
1699 None,
1700 cx,
1701 )
1702 })
1703 .await
1704 .unwrap();
1705 cx.run_until_parked();
1706 assert_eq!(
1707 buffer.read_with(cx, |buffer, _| buffer.text()),
1708 "abc\ndef\nghi\njkl\nmnO"
1709 );
1710 assert_eq!(
1711 unreviewed_hunks(&action_log, cx),
1712 vec![(
1713 buffer.clone(),
1714 vec![HunkStatus {
1715 range: Point::new(4, 0)..Point::new(4, 3),
1716 diff_status: DiffHunkStatusKind::Modified,
1717 old_text: "mno".into(),
1718 }],
1719 )]
1720 );
1721
1722 action_log
1723 .update(cx, |log, cx| {
1724 log.reject_edits_in_ranges(
1725 buffer.clone(),
1726 vec![Point::new(4, 0)..Point::new(4, 0)],
1727 None,
1728 cx,
1729 )
1730 })
1731 .await
1732 .unwrap();
1733 cx.run_until_parked();
1734 assert_eq!(
1735 buffer.read_with(cx, |buffer, _| buffer.text()),
1736 "abc\ndef\nghi\njkl\nmno"
1737 );
1738 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1739 }
1740
1741 #[gpui::test(iterations = 10)]
1742 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1743 init_test(cx);
1744
1745 let fs = FakeFs::new(cx.executor());
1746 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1747 .await;
1748 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1749 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1750 let file_path = project
1751 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1752 .unwrap();
1753 let buffer = project
1754 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1755 .await
1756 .unwrap();
1757
1758 cx.update(|cx| {
1759 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1760 buffer.update(cx, |buffer, cx| {
1761 buffer
1762 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1763 .unwrap()
1764 });
1765 buffer.update(cx, |buffer, cx| {
1766 buffer
1767 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1768 .unwrap()
1769 });
1770 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1771 });
1772 cx.run_until_parked();
1773 assert_eq!(
1774 buffer.read_with(cx, |buffer, _| buffer.text()),
1775 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1776 );
1777 assert_eq!(
1778 unreviewed_hunks(&action_log, cx),
1779 vec![(
1780 buffer.clone(),
1781 vec![
1782 HunkStatus {
1783 range: Point::new(1, 0)..Point::new(3, 0),
1784 diff_status: DiffHunkStatusKind::Modified,
1785 old_text: "def\n".into(),
1786 },
1787 HunkStatus {
1788 range: Point::new(5, 0)..Point::new(5, 3),
1789 diff_status: DiffHunkStatusKind::Modified,
1790 old_text: "mno".into(),
1791 }
1792 ],
1793 )]
1794 );
1795
1796 action_log.update(cx, |log, cx| {
1797 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1798 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1799 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1800 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1801
1802 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1803 .detach();
1804 assert_eq!(
1805 buffer.read_with(cx, |buffer, _| buffer.text()),
1806 "abc\ndef\nghi\njkl\nmno"
1807 );
1808 });
1809 cx.run_until_parked();
1810 assert_eq!(
1811 buffer.read_with(cx, |buffer, _| buffer.text()),
1812 "abc\ndef\nghi\njkl\nmno"
1813 );
1814 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1815 }
1816
1817 #[gpui::test(iterations = 10)]
1818 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1819 init_test(cx);
1820
1821 let fs = FakeFs::new(cx.executor());
1822 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1823 .await;
1824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1825 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1826 let file_path = project
1827 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1828 .unwrap();
1829 let buffer = project
1830 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1831 .await
1832 .unwrap();
1833
1834 cx.update(|cx| {
1835 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1836 });
1837 project
1838 .update(cx, |project, cx| {
1839 project.delete_file(file_path.clone(), false, cx)
1840 })
1841 .unwrap()
1842 .await
1843 .unwrap();
1844 cx.run_until_parked();
1845 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1846 assert_eq!(
1847 unreviewed_hunks(&action_log, cx),
1848 vec![(
1849 buffer.clone(),
1850 vec![HunkStatus {
1851 range: Point::new(0, 0)..Point::new(0, 0),
1852 diff_status: DiffHunkStatusKind::Deleted,
1853 old_text: "content".into(),
1854 }]
1855 )]
1856 );
1857
1858 action_log
1859 .update(cx, |log, cx| {
1860 log.reject_edits_in_ranges(
1861 buffer.clone(),
1862 vec![Point::new(0, 0)..Point::new(0, 0)],
1863 None,
1864 cx,
1865 )
1866 })
1867 .await
1868 .unwrap();
1869 cx.run_until_parked();
1870 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1871 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1872 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1873 }
1874
1875 #[gpui::test(iterations = 10)]
1876 async fn test_reject_created_file(cx: &mut TestAppContext) {
1877 init_test(cx);
1878
1879 let fs = FakeFs::new(cx.executor());
1880 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1881 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1882 let file_path = project
1883 .read_with(cx, |project, cx| {
1884 project.find_project_path("dir/new_file", cx)
1885 })
1886 .unwrap();
1887 let buffer = project
1888 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1889 .await
1890 .unwrap();
1891 cx.update(|cx| {
1892 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1893 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1894 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1895 });
1896 project
1897 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1898 .await
1899 .unwrap();
1900 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1901 cx.run_until_parked();
1902 assert_eq!(
1903 unreviewed_hunks(&action_log, cx),
1904 vec![(
1905 buffer.clone(),
1906 vec![HunkStatus {
1907 range: Point::new(0, 0)..Point::new(0, 7),
1908 diff_status: DiffHunkStatusKind::Added,
1909 old_text: "".into(),
1910 }],
1911 )]
1912 );
1913
1914 action_log
1915 .update(cx, |log, cx| {
1916 log.reject_edits_in_ranges(
1917 buffer.clone(),
1918 vec![Point::new(0, 0)..Point::new(0, 11)],
1919 None,
1920 cx,
1921 )
1922 })
1923 .await
1924 .unwrap();
1925 cx.run_until_parked();
1926 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1927 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1928 }
1929
1930 #[gpui::test]
1931 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1932 init_test(cx);
1933
1934 let fs = FakeFs::new(cx.executor());
1935 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1936 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1937
1938 let file_path = project
1939 .read_with(cx, |project, cx| {
1940 project.find_project_path("dir/new_file", cx)
1941 })
1942 .unwrap();
1943 let buffer = project
1944 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1945 .await
1946 .unwrap();
1947
1948 // AI creates file with initial content
1949 cx.update(|cx| {
1950 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1951 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1952 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1953 });
1954
1955 project
1956 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1957 .await
1958 .unwrap();
1959
1960 cx.run_until_parked();
1961
1962 // User makes additional edits
1963 cx.update(|cx| {
1964 buffer.update(cx, |buffer, cx| {
1965 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1966 });
1967 });
1968
1969 project
1970 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1971 .await
1972 .unwrap();
1973
1974 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1975
1976 // Reject all
1977 action_log
1978 .update(cx, |log, cx| {
1979 log.reject_edits_in_ranges(
1980 buffer.clone(),
1981 vec![Point::new(0, 0)..Point::new(100, 0)],
1982 None,
1983 cx,
1984 )
1985 })
1986 .await
1987 .unwrap();
1988 cx.run_until_parked();
1989
1990 // File should still contain all the content
1991 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1992
1993 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1994 assert_eq!(content, "ai content\nuser added this line");
1995 }
1996
1997 #[gpui::test]
1998 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
1999 init_test(cx);
2000
2001 let fs = FakeFs::new(cx.executor());
2002 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2003 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2004
2005 let file_path = project
2006 .read_with(cx, |project, cx| {
2007 project.find_project_path("dir/new_file", cx)
2008 })
2009 .unwrap();
2010 let buffer = project
2011 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2012 .await
2013 .unwrap();
2014
2015 // AI creates file with initial content
2016 cx.update(|cx| {
2017 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2018 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2019 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2020 });
2021 project
2022 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2023 .await
2024 .unwrap();
2025 cx.run_until_parked();
2026 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2027
2028 // User accepts the single hunk
2029 action_log.update(cx, |log, cx| {
2030 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2031 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2032 });
2033 cx.run_until_parked();
2034 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2035 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2036
2037 // AI modifies the file
2038 cx.update(|cx| {
2039 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2040 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2041 });
2042 project
2043 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2044 .await
2045 .unwrap();
2046 cx.run_until_parked();
2047 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2048
2049 // User rejects the hunk
2050 action_log
2051 .update(cx, |log, cx| {
2052 log.reject_edits_in_ranges(
2053 buffer.clone(),
2054 vec![Anchor::min_max_range_for_buffer(
2055 buffer.read(cx).remote_id(),
2056 )],
2057 None,
2058 cx,
2059 )
2060 })
2061 .await
2062 .unwrap();
2063 cx.run_until_parked();
2064 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2065 assert_eq!(
2066 buffer.read_with(cx, |buffer, _| buffer.text()),
2067 "ai content v1"
2068 );
2069 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2070 }
2071
2072 #[gpui::test]
2073 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2074 init_test(cx);
2075
2076 let fs = FakeFs::new(cx.executor());
2077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2078 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2079
2080 let file_path = project
2081 .read_with(cx, |project, cx| {
2082 project.find_project_path("dir/new_file", cx)
2083 })
2084 .unwrap();
2085 let buffer = project
2086 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2087 .await
2088 .unwrap();
2089
2090 // AI creates file with initial content
2091 cx.update(|cx| {
2092 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2093 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2094 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2095 });
2096 project
2097 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2098 .await
2099 .unwrap();
2100 cx.run_until_parked();
2101
2102 // User clicks "Accept All"
2103 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2104 cx.run_until_parked();
2105 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2106 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2107
2108 // AI modifies file again
2109 cx.update(|cx| {
2110 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2111 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2112 });
2113 project
2114 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2115 .await
2116 .unwrap();
2117 cx.run_until_parked();
2118 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2119
2120 // User clicks "Reject All"
2121 action_log
2122 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2123 .await;
2124 cx.run_until_parked();
2125 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2126 assert_eq!(
2127 buffer.read_with(cx, |buffer, _| buffer.text()),
2128 "ai content v1"
2129 );
2130 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2131 }
2132
2133 #[gpui::test(iterations = 100)]
2134 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2135 init_test(cx);
2136
2137 let operations = env::var("OPERATIONS")
2138 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2139 .unwrap_or(20);
2140
2141 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2142 let fs = FakeFs::new(cx.executor());
2143 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2144 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2145 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2146 let file_path = project
2147 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2148 .unwrap();
2149 let buffer = project
2150 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2151 .await
2152 .unwrap();
2153
2154 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2155
2156 for _ in 0..operations {
2157 match rng.random_range(0..100) {
2158 0..25 => {
2159 action_log.update(cx, |log, cx| {
2160 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2161 log::info!("keeping edits in range {:?}", range);
2162 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2163 });
2164 }
2165 25..50 => {
2166 action_log
2167 .update(cx, |log, cx| {
2168 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2169 log::info!("rejecting edits in range {:?}", range);
2170 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2171 })
2172 .await
2173 .unwrap();
2174 }
2175 _ => {
2176 let is_agent_edit = rng.random_bool(0.5);
2177 if is_agent_edit {
2178 log::info!("agent edit");
2179 } else {
2180 log::info!("user edit");
2181 }
2182 cx.update(|cx| {
2183 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2184 if is_agent_edit {
2185 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2186 }
2187 });
2188 }
2189 }
2190
2191 if rng.random_bool(0.2) {
2192 quiesce(&action_log, &buffer, cx);
2193 }
2194 }
2195
2196 quiesce(&action_log, &buffer, cx);
2197
2198 fn quiesce(
2199 action_log: &Entity<ActionLog>,
2200 buffer: &Entity<Buffer>,
2201 cx: &mut TestAppContext,
2202 ) {
2203 log::info!("quiescing...");
2204 cx.run_until_parked();
2205 action_log.update(cx, |log, cx| {
2206 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2207 let mut old_text = tracked_buffer.diff_base.clone();
2208 let new_text = buffer.read(cx).as_rope();
2209 for edit in tracked_buffer.unreviewed_edits.edits() {
2210 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2211 let old_end = old_text.point_to_offset(cmp::min(
2212 Point::new(edit.new.start + edit.old_len(), 0),
2213 old_text.max_point(),
2214 ));
2215 old_text.replace(
2216 old_start..old_end,
2217 &new_text.slice_rows(edit.new.clone()).to_string(),
2218 );
2219 }
2220 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2221 })
2222 }
2223 }
2224
2225 #[gpui::test]
2226 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2227 init_test(cx);
2228
2229 let fs = FakeFs::new(cx.background_executor.clone());
2230 fs.insert_tree(
2231 path!("/project"),
2232 json!({
2233 ".git": {},
2234 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2235 }),
2236 )
2237 .await;
2238 fs.set_head_for_repo(
2239 path!("/project/.git").as_ref(),
2240 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2241 "0000000",
2242 );
2243 cx.run_until_parked();
2244
2245 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2246 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2247
2248 let file_path = project
2249 .read_with(cx, |project, cx| {
2250 project.find_project_path(path!("/project/file.txt"), cx)
2251 })
2252 .unwrap();
2253 let buffer = project
2254 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2255 .await
2256 .unwrap();
2257
2258 cx.update(|cx| {
2259 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2260 buffer.update(cx, |buffer, cx| {
2261 buffer.edit(
2262 [
2263 // Edit at the very start: a -> A
2264 (Point::new(0, 0)..Point::new(0, 1), "A"),
2265 // Deletion in the middle: remove lines d and e
2266 (Point::new(3, 0)..Point::new(5, 0), ""),
2267 // Modification: g -> GGG
2268 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2269 // Addition: insert new line after h
2270 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2271 // Edit the very last character: j -> J
2272 (Point::new(9, 0)..Point::new(9, 1), "J"),
2273 ],
2274 None,
2275 cx,
2276 );
2277 });
2278 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2279 });
2280 cx.run_until_parked();
2281 assert_eq!(
2282 unreviewed_hunks(&action_log, cx),
2283 vec![(
2284 buffer.clone(),
2285 vec![
2286 HunkStatus {
2287 range: Point::new(0, 0)..Point::new(1, 0),
2288 diff_status: DiffHunkStatusKind::Modified,
2289 old_text: "a\n".into()
2290 },
2291 HunkStatus {
2292 range: Point::new(3, 0)..Point::new(3, 0),
2293 diff_status: DiffHunkStatusKind::Deleted,
2294 old_text: "d\ne\n".into()
2295 },
2296 HunkStatus {
2297 range: Point::new(4, 0)..Point::new(5, 0),
2298 diff_status: DiffHunkStatusKind::Modified,
2299 old_text: "g\n".into()
2300 },
2301 HunkStatus {
2302 range: Point::new(6, 0)..Point::new(7, 0),
2303 diff_status: DiffHunkStatusKind::Added,
2304 old_text: "".into()
2305 },
2306 HunkStatus {
2307 range: Point::new(8, 0)..Point::new(8, 1),
2308 diff_status: DiffHunkStatusKind::Modified,
2309 old_text: "j".into()
2310 }
2311 ]
2312 )]
2313 );
2314
2315 // Simulate a git commit that matches some edits but not others:
2316 // - Accepts the first edit (a -> A)
2317 // - Accepts the deletion (remove d and e)
2318 // - Makes a different change to g (g -> G instead of GGG)
2319 // - Ignores the NEW line addition
2320 // - Ignores the last line edit (j stays as j)
2321 fs.set_head_for_repo(
2322 path!("/project/.git").as_ref(),
2323 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2324 "0000001",
2325 );
2326 cx.run_until_parked();
2327 assert_eq!(
2328 unreviewed_hunks(&action_log, cx),
2329 vec![(
2330 buffer.clone(),
2331 vec![
2332 HunkStatus {
2333 range: Point::new(4, 0)..Point::new(5, 0),
2334 diff_status: DiffHunkStatusKind::Modified,
2335 old_text: "g\n".into()
2336 },
2337 HunkStatus {
2338 range: Point::new(6, 0)..Point::new(7, 0),
2339 diff_status: DiffHunkStatusKind::Added,
2340 old_text: "".into()
2341 },
2342 HunkStatus {
2343 range: Point::new(8, 0)..Point::new(8, 1),
2344 diff_status: DiffHunkStatusKind::Modified,
2345 old_text: "j".into()
2346 }
2347 ]
2348 )]
2349 );
2350
2351 // Make another commit that accepts the NEW line but with different content
2352 fs.set_head_for_repo(
2353 path!("/project/.git").as_ref(),
2354 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2355 "0000002",
2356 );
2357 cx.run_until_parked();
2358 assert_eq!(
2359 unreviewed_hunks(&action_log, cx),
2360 vec![(
2361 buffer,
2362 vec![
2363 HunkStatus {
2364 range: Point::new(6, 0)..Point::new(7, 0),
2365 diff_status: DiffHunkStatusKind::Added,
2366 old_text: "".into()
2367 },
2368 HunkStatus {
2369 range: Point::new(8, 0)..Point::new(8, 1),
2370 diff_status: DiffHunkStatusKind::Modified,
2371 old_text: "j".into()
2372 }
2373 ]
2374 )]
2375 );
2376
2377 // Final commit that accepts all remaining edits
2378 fs.set_head_for_repo(
2379 path!("/project/.git").as_ref(),
2380 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2381 "0000003",
2382 );
2383 cx.run_until_parked();
2384 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2385 }
2386
2387 #[derive(Debug, Clone, PartialEq, Eq)]
2388 struct HunkStatus {
2389 range: Range<Point>,
2390 diff_status: DiffHunkStatusKind,
2391 old_text: String,
2392 }
2393
2394 fn unreviewed_hunks(
2395 action_log: &Entity<ActionLog>,
2396 cx: &TestAppContext,
2397 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2398 cx.read(|cx| {
2399 action_log
2400 .read(cx)
2401 .changed_buffers(cx)
2402 .into_iter()
2403 .map(|(buffer, diff)| {
2404 let snapshot = buffer.read(cx).snapshot();
2405 (
2406 buffer,
2407 diff.read(cx)
2408 .snapshot(cx)
2409 .hunks(&snapshot)
2410 .map(|hunk| HunkStatus {
2411 diff_status: hunk.status().kind,
2412 range: hunk.range,
2413 old_text: diff
2414 .read(cx)
2415 .base_text(cx)
2416 .text_for_range(hunk.diff_base_byte_range)
2417 .collect(),
2418 })
2419 .collect(),
2420 )
2421 })
2422 .collect()
2423 })
2424 }
2425}