1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 fn track_buffer_internal(
37 &mut self,
38 buffer: Entity<Buffer>,
39 is_created: bool,
40 cx: &mut Context<Self>,
41 ) -> &mut TrackedBuffer {
42 let status = if is_created {
43 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
44 match tracked.status {
45 TrackedBufferStatus::Created {
46 existing_file_content,
47 } => TrackedBufferStatus::Created {
48 existing_file_content,
49 },
50 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
51 TrackedBufferStatus::Created {
52 existing_file_content: Some(tracked.diff_base),
53 }
54 }
55 }
56 } else if buffer
57 .read(cx)
58 .file()
59 .is_some_and(|file| file.disk_state().exists())
60 {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
63 }
64 } else {
65 TrackedBufferStatus::Created {
66 existing_file_content: None,
67 }
68 }
69 } else {
70 TrackedBufferStatus::Modified
71 };
72
73 let tracked_buffer = self
74 .tracked_buffers
75 .entry(buffer.clone())
76 .or_insert_with(|| {
77 let open_lsp_handle = self.project.update(cx, |project, cx| {
78 project.register_buffer_with_language_servers(&buffer, cx)
79 });
80
81 let text_snapshot = buffer.read(cx).text_snapshot();
82 let language = buffer.read(cx).language().cloned();
83 let language_registry = buffer.read(cx).language_registry();
84 let diff = cx.new(|cx| {
85 let mut diff = BufferDiff::new(&text_snapshot, cx);
86 diff.language_changed(language, language_registry, cx);
87 diff
88 });
89 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
90 let diff_base;
91 let unreviewed_edits;
92 if is_created {
93 diff_base = Rope::default();
94 unreviewed_edits = Patch::new(vec![Edit {
95 old: 0..1,
96 new: 0..text_snapshot.max_point().row + 1,
97 }])
98 } else {
99 diff_base = buffer.read(cx).as_rope().clone();
100 unreviewed_edits = Patch::default();
101 }
102 TrackedBuffer {
103 buffer: buffer.clone(),
104 diff_base,
105 unreviewed_edits,
106 snapshot: text_snapshot,
107 status,
108 version: buffer.read(cx).version(),
109 diff,
110 diff_update: diff_update_tx,
111 _open_lsp_handle: open_lsp_handle,
112 _maintain_diff: cx.spawn({
113 let buffer = buffer.clone();
114 async move |this, cx| {
115 Self::maintain_diff(this, buffer, diff_update_rx, cx)
116 .await
117 .ok();
118 }
119 }),
120 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
121 }
122 });
123 tracked_buffer.version = buffer.read(cx).version();
124 tracked_buffer
125 }
126
127 fn handle_buffer_event(
128 &mut self,
129 buffer: Entity<Buffer>,
130 event: &BufferEvent,
131 cx: &mut Context<Self>,
132 ) {
133 match event {
134 BufferEvent::Edited => {
135 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
136 return;
137 };
138 let buffer_version = buffer.read(cx).version();
139 if !buffer_version.changed_since(&tracked_buffer.version) {
140 return;
141 }
142 self.handle_buffer_edited(buffer, cx);
143 }
144 BufferEvent::FileHandleChanged => {
145 self.handle_buffer_file_changed(buffer, cx);
146 }
147 _ => {}
148 };
149 }
150
151 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
152 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
153 return;
154 };
155 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
156 }
157
158 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
159 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
160 return;
161 };
162
163 match tracked_buffer.status {
164 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
165 if buffer
166 .read(cx)
167 .file()
168 .is_some_and(|file| file.disk_state().is_deleted())
169 {
170 // If the buffer had been edited by a tool, but it got
171 // deleted externally, we want to stop tracking it.
172 self.tracked_buffers.remove(&buffer);
173 }
174 cx.notify();
175 }
176 TrackedBufferStatus::Deleted => {
177 if buffer
178 .read(cx)
179 .file()
180 .is_some_and(|file| !file.disk_state().is_deleted())
181 {
182 // If the buffer had been deleted by a tool, but it got
183 // resurrected externally, we want to clear the edits we
184 // were tracking and reset the buffer's state.
185 self.tracked_buffers.remove(&buffer);
186 self.track_buffer_internal(buffer, false, cx);
187 }
188 cx.notify();
189 }
190 }
191 }
192
193 async fn maintain_diff(
194 this: WeakEntity<Self>,
195 buffer: Entity<Buffer>,
196 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
197 cx: &mut AsyncApp,
198 ) -> Result<()> {
199 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
200 let git_diff = this
201 .update(cx, |this, cx| {
202 this.project.update(cx, |project, cx| {
203 project.open_uncommitted_diff(buffer.clone(), cx)
204 })
205 })?
206 .await
207 .ok();
208 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
209 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
210 });
211
212 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
213 let _repo_subscription =
214 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
215 cx.update(|cx| {
216 let mut old_head = buffer_repo.read(cx).head_commit.clone();
217 Some(cx.subscribe(git_diff, move |_, event, cx| {
218 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
219 let new_head = buffer_repo.read(cx).head_commit.clone();
220 if new_head != old_head {
221 old_head = new_head;
222 git_diff_updates_tx.send(()).ok();
223 }
224 }
225 }))
226 })
227 } else {
228 None
229 };
230
231 loop {
232 futures::select_biased! {
233 buffer_update = buffer_updates.next() => {
234 if let Some((author, buffer_snapshot)) = buffer_update {
235 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
236 } else {
237 break;
238 }
239 }
240 _ = git_diff_updates_rx.changed().fuse() => {
241 if let Some(git_diff) = git_diff.as_ref() {
242 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
243 }
244 }
245 }
246 }
247
248 Ok(())
249 }
250
251 async fn track_edits(
252 this: &WeakEntity<ActionLog>,
253 buffer: &Entity<Buffer>,
254 author: ChangeAuthor,
255 buffer_snapshot: text::BufferSnapshot,
256 cx: &mut AsyncApp,
257 ) -> Result<()> {
258 let rebase = this.update(cx, |this, cx| {
259 let tracked_buffer = this
260 .tracked_buffers
261 .get_mut(buffer)
262 .context("buffer not tracked")?;
263
264 let rebase = cx.background_spawn({
265 let mut base_text = tracked_buffer.diff_base.clone();
266 let old_snapshot = tracked_buffer.snapshot.clone();
267 let new_snapshot = buffer_snapshot.clone();
268 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
269 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
270 async move {
271 if let ChangeAuthor::User = author {
272 apply_non_conflicting_edits(
273 &unreviewed_edits,
274 edits,
275 &mut base_text,
276 new_snapshot.as_rope(),
277 );
278 }
279
280 (Arc::from(base_text.to_string().as_str()), base_text)
281 }
282 });
283
284 anyhow::Ok(rebase)
285 })??;
286 let (new_base_text, new_diff_base) = rebase.await;
287
288 Self::update_diff(
289 this,
290 buffer,
291 buffer_snapshot,
292 new_base_text,
293 new_diff_base,
294 cx,
295 )
296 .await
297 }
298
299 async fn keep_committed_edits(
300 this: &WeakEntity<ActionLog>,
301 buffer: &Entity<Buffer>,
302 git_diff: &Entity<BufferDiff>,
303 cx: &mut AsyncApp,
304 ) -> Result<()> {
305 let buffer_snapshot = this.read_with(cx, |this, _cx| {
306 let tracked_buffer = this
307 .tracked_buffers
308 .get(buffer)
309 .context("buffer not tracked")?;
310 anyhow::Ok(tracked_buffer.snapshot.clone())
311 })??;
312 let (new_base_text, new_diff_base) = this
313 .read_with(cx, |this, cx| {
314 let tracked_buffer = this
315 .tracked_buffers
316 .get(buffer)
317 .context("buffer not tracked")?;
318 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
319 let agent_diff_base = tracked_buffer.diff_base.clone();
320 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
321 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
322 anyhow::Ok(cx.background_spawn(async move {
323 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
324 let committed_edits = language::line_diff(
325 &agent_diff_base.to_string(),
326 &git_diff_base.to_string(),
327 )
328 .into_iter()
329 .map(|(old, new)| Edit { old, new });
330
331 let mut new_agent_diff_base = agent_diff_base.clone();
332 let mut row_delta = 0i32;
333 for committed in committed_edits {
334 while let Some(unreviewed) = old_unreviewed_edits.peek() {
335 // If the committed edit matches the unreviewed
336 // edit, assume the user wants to keep it.
337 if committed.old == unreviewed.old {
338 let unreviewed_new =
339 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
340 let committed_new =
341 git_diff_base.slice_rows(committed.new.clone()).to_string();
342 if unreviewed_new == committed_new {
343 let old_byte_start =
344 new_agent_diff_base.point_to_offset(Point::new(
345 (unreviewed.old.start as i32 + row_delta) as u32,
346 0,
347 ));
348 let old_byte_end =
349 new_agent_diff_base.point_to_offset(cmp::min(
350 Point::new(
351 (unreviewed.old.end as i32 + row_delta) as u32,
352 0,
353 ),
354 new_agent_diff_base.max_point(),
355 ));
356 new_agent_diff_base
357 .replace(old_byte_start..old_byte_end, &unreviewed_new);
358 row_delta +=
359 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
360 }
361 } else if unreviewed.old.start >= committed.old.end {
362 break;
363 }
364
365 old_unreviewed_edits.next().unwrap();
366 }
367 }
368
369 (
370 Arc::from(new_agent_diff_base.to_string().as_str()),
371 new_agent_diff_base,
372 )
373 }))
374 })??
375 .await;
376
377 Self::update_diff(
378 this,
379 buffer,
380 buffer_snapshot,
381 new_base_text,
382 new_diff_base,
383 cx,
384 )
385 .await
386 }
387
388 async fn update_diff(
389 this: &WeakEntity<ActionLog>,
390 buffer: &Entity<Buffer>,
391 buffer_snapshot: text::BufferSnapshot,
392 new_base_text: Arc<str>,
393 new_diff_base: Rope,
394 cx: &mut AsyncApp,
395 ) -> Result<()> {
396 let (diff, language) = this.read_with(cx, |this, cx| {
397 let tracked_buffer = this
398 .tracked_buffers
399 .get(buffer)
400 .context("buffer not tracked")?;
401 anyhow::Ok((
402 tracked_buffer.diff.clone(),
403 buffer.read(cx).language().cloned(),
404 ))
405 })??;
406 let update = diff
407 .update(cx, |diff, cx| {
408 diff.update_diff(
409 buffer_snapshot.clone(),
410 Some(new_base_text),
411 true,
412 language,
413 cx,
414 )
415 })
416 .await;
417 diff.update(cx, |diff, cx| {
418 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
419 })
420 .await;
421 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
422
423 let unreviewed_edits = cx
424 .background_spawn({
425 let buffer_snapshot = buffer_snapshot.clone();
426 let new_diff_base = new_diff_base.clone();
427 async move {
428 let mut unreviewed_edits = Patch::default();
429 for hunk in diff_snapshot.hunks_intersecting_range(
430 Anchor::min_for_buffer(buffer_snapshot.remote_id())
431 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
432 &buffer_snapshot,
433 ) {
434 let old_range = new_diff_base
435 .offset_to_point(hunk.diff_base_byte_range.start)
436 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
437 let new_range = hunk.range.start..hunk.range.end;
438 unreviewed_edits.push(point_to_row_edit(
439 Edit {
440 old: old_range,
441 new: new_range,
442 },
443 &new_diff_base,
444 buffer_snapshot.as_rope(),
445 ));
446 }
447 unreviewed_edits
448 }
449 })
450 .await;
451 this.update(cx, |this, cx| {
452 let tracked_buffer = this
453 .tracked_buffers
454 .get_mut(buffer)
455 .context("buffer not tracked")?;
456 tracked_buffer.diff_base = new_diff_base;
457 tracked_buffer.snapshot = buffer_snapshot;
458 tracked_buffer.unreviewed_edits = unreviewed_edits;
459 cx.notify();
460 anyhow::Ok(())
461 })?
462 }
463
464 /// Track a buffer as read by agent, so we can notify the model about user edits.
465 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
466 self.track_buffer_internal(buffer, false, cx);
467 }
468
469 /// Mark a buffer as created by agent, so we can refresh it in the context
470 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
471 self.track_buffer_internal(buffer, true, cx);
472 }
473
474 /// Mark a buffer as edited by agent, so we can refresh it in the context
475 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
476 let new_version = buffer.read(cx).version();
477 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
478 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
479 tracked_buffer.status = TrackedBufferStatus::Modified;
480 }
481
482 tracked_buffer.version = new_version;
483 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
484 }
485
486 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
487 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
488 match tracked_buffer.status {
489 TrackedBufferStatus::Created { .. } => {
490 self.tracked_buffers.remove(&buffer);
491 cx.notify();
492 }
493 TrackedBufferStatus::Modified => {
494 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
495 tracked_buffer.status = TrackedBufferStatus::Deleted;
496 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
497 }
498 TrackedBufferStatus::Deleted => {}
499 }
500 cx.notify();
501 }
502
503 pub fn keep_edits_in_range(
504 &mut self,
505 buffer: Entity<Buffer>,
506 buffer_range: Range<impl language::ToPoint>,
507 telemetry: Option<ActionLogTelemetry>,
508 cx: &mut Context<Self>,
509 ) {
510 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
511 return;
512 };
513
514 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
515 match tracked_buffer.status {
516 TrackedBufferStatus::Deleted => {
517 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
518 self.tracked_buffers.remove(&buffer);
519 cx.notify();
520 }
521 _ => {
522 let buffer = buffer.read(cx);
523 let buffer_range =
524 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
525 let mut delta = 0i32;
526 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
527 edit.old.start = (edit.old.start as i32 + delta) as u32;
528 edit.old.end = (edit.old.end as i32 + delta) as u32;
529
530 if buffer_range.end.row < edit.new.start
531 || buffer_range.start.row > edit.new.end
532 {
533 true
534 } else {
535 let old_range = tracked_buffer
536 .diff_base
537 .point_to_offset(Point::new(edit.old.start, 0))
538 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
539 Point::new(edit.old.end, 0),
540 tracked_buffer.diff_base.max_point(),
541 ));
542 let new_range = tracked_buffer
543 .snapshot
544 .point_to_offset(Point::new(edit.new.start, 0))
545 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
546 Point::new(edit.new.end, 0),
547 tracked_buffer.snapshot.max_point(),
548 ));
549 tracked_buffer.diff_base.replace(
550 old_range,
551 &tracked_buffer
552 .snapshot
553 .text_for_range(new_range)
554 .collect::<String>(),
555 );
556 delta += edit.new_len() as i32 - edit.old_len() as i32;
557 metrics.add_edit(edit);
558 false
559 }
560 });
561 if tracked_buffer.unreviewed_edits.is_empty()
562 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
563 {
564 tracked_buffer.status = TrackedBufferStatus::Modified;
565 }
566 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
567 }
568 }
569 if let Some(telemetry) = telemetry {
570 telemetry_report_accepted_edits(&telemetry, metrics);
571 }
572 }
573
574 pub fn reject_edits_in_ranges(
575 &mut self,
576 buffer: Entity<Buffer>,
577 buffer_ranges: Vec<Range<impl language::ToPoint>>,
578 telemetry: Option<ActionLogTelemetry>,
579 cx: &mut Context<Self>,
580 ) -> Task<Result<()>> {
581 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
582 return Task::ready(Ok(()));
583 };
584
585 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
586 let task = match &tracked_buffer.status {
587 TrackedBufferStatus::Created {
588 existing_file_content,
589 } => {
590 let task = if let Some(existing_file_content) = existing_file_content {
591 buffer.update(cx, |buffer, cx| {
592 buffer.start_transaction();
593 buffer.set_text("", cx);
594 for chunk in existing_file_content.chunks() {
595 buffer.append(chunk, cx);
596 }
597 buffer.end_transaction(cx);
598 });
599 self.project
600 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
601 } else {
602 // For a file created by AI with no pre-existing content,
603 // only delete the file if we're certain it contains only AI content
604 // with no edits from the user.
605
606 let initial_version = tracked_buffer.version.clone();
607 let current_version = buffer.read(cx).version();
608
609 let current_content = buffer.read(cx).text();
610 let tracked_content = tracked_buffer.snapshot.text();
611
612 let is_ai_only_content =
613 initial_version == current_version && current_content == tracked_content;
614
615 if is_ai_only_content {
616 buffer
617 .read(cx)
618 .entry_id(cx)
619 .and_then(|entry_id| {
620 self.project.update(cx, |project, cx| {
621 project.delete_entry(entry_id, false, cx)
622 })
623 })
624 .unwrap_or(Task::ready(Ok(())))
625 } else {
626 // Not sure how to disentangle edits made by the user
627 // from edits made by the AI at this point.
628 // For now, preserve both to avoid data loss.
629 //
630 // TODO: Better solution (disable "Reject" after user makes some
631 // edit or find a way to differentiate between AI and user edits)
632 Task::ready(Ok(()))
633 }
634 };
635
636 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
637 self.tracked_buffers.remove(&buffer);
638 cx.notify();
639 task
640 }
641 TrackedBufferStatus::Deleted => {
642 buffer.update(cx, |buffer, cx| {
643 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
644 });
645 let save = self
646 .project
647 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
648
649 // Clear all tracked edits for this buffer and start over as if we just read it.
650 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
651 self.tracked_buffers.remove(&buffer);
652 self.buffer_read(buffer.clone(), cx);
653 cx.notify();
654 save
655 }
656 TrackedBufferStatus::Modified => {
657 buffer.update(cx, |buffer, cx| {
658 let mut buffer_row_ranges = buffer_ranges
659 .into_iter()
660 .map(|range| {
661 range.start.to_point(buffer).row..range.end.to_point(buffer).row
662 })
663 .peekable();
664
665 let mut edits_to_revert = Vec::new();
666 for edit in tracked_buffer.unreviewed_edits.edits() {
667 let new_range = tracked_buffer
668 .snapshot
669 .anchor_before(Point::new(edit.new.start, 0))
670 ..tracked_buffer.snapshot.anchor_after(cmp::min(
671 Point::new(edit.new.end, 0),
672 tracked_buffer.snapshot.max_point(),
673 ));
674 let new_row_range = new_range.start.to_point(buffer).row
675 ..new_range.end.to_point(buffer).row;
676
677 let mut revert = false;
678 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
679 if buffer_row_range.end < new_row_range.start {
680 buffer_row_ranges.next();
681 } else if buffer_row_range.start > new_row_range.end {
682 break;
683 } else {
684 revert = true;
685 break;
686 }
687 }
688
689 if revert {
690 metrics.add_edit(edit);
691 let old_range = tracked_buffer
692 .diff_base
693 .point_to_offset(Point::new(edit.old.start, 0))
694 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
695 Point::new(edit.old.end, 0),
696 tracked_buffer.diff_base.max_point(),
697 ));
698 let old_text = tracked_buffer
699 .diff_base
700 .chunks_in_range(old_range)
701 .collect::<String>();
702 edits_to_revert.push((new_range, old_text));
703 }
704 }
705
706 buffer.edit(edits_to_revert, None, cx);
707 });
708 self.project
709 .update(cx, |project, cx| project.save_buffer(buffer, cx))
710 }
711 };
712 if let Some(telemetry) = telemetry {
713 telemetry_report_rejected_edits(&telemetry, metrics);
714 }
715 task
716 }
717
718 pub fn keep_all_edits(
719 &mut self,
720 telemetry: Option<ActionLogTelemetry>,
721 cx: &mut Context<Self>,
722 ) {
723 self.tracked_buffers.retain(|buffer, tracked_buffer| {
724 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
725 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
726 if let Some(telemetry) = telemetry.as_ref() {
727 telemetry_report_accepted_edits(telemetry, metrics);
728 }
729 match tracked_buffer.status {
730 TrackedBufferStatus::Deleted => false,
731 _ => {
732 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
733 tracked_buffer.status = TrackedBufferStatus::Modified;
734 }
735 tracked_buffer.unreviewed_edits.clear();
736 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
737 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
738 true
739 }
740 }
741 });
742
743 cx.notify();
744 }
745
746 pub fn reject_all_edits(
747 &mut self,
748 telemetry: Option<ActionLogTelemetry>,
749 cx: &mut Context<Self>,
750 ) -> Task<()> {
751 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
752 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
753 buffer.read(cx).remote_id(),
754 )];
755 let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
756
757 async move {
758 reject.await.log_err();
759 }
760 });
761
762 let task = futures::future::join_all(futures);
763 cx.background_spawn(async move {
764 task.await;
765 })
766 }
767
768 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
769 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
770 self.tracked_buffers
771 .iter()
772 .filter(|(_, tracked)| tracked.has_edits(cx))
773 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
774 .collect()
775 }
776
777 /// Returns all tracked buffers for debugging purposes
778 #[cfg(any(test, feature = "test-support"))]
779 pub fn tracked_buffers_for_debug(
780 &self,
781 _cx: &App,
782 ) -> impl Iterator<Item = (&Entity<Buffer>, &TrackedBuffer)> {
783 self.tracked_buffers.iter()
784 }
785
786 /// Iterate over buffers changed since last read or edited by the model
787 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
788 self.tracked_buffers
789 .iter()
790 .filter(|(buffer, tracked)| {
791 let buffer = buffer.read(cx);
792
793 tracked.version != buffer.version
794 && buffer
795 .file()
796 .is_some_and(|file| !file.disk_state().is_deleted())
797 })
798 .map(|(buffer, _)| buffer)
799 }
800}
801
802#[derive(Clone)]
803pub struct ActionLogTelemetry {
804 pub agent_telemetry_id: SharedString,
805 pub session_id: Arc<str>,
806}
807
808struct ActionLogMetrics {
809 lines_removed: u32,
810 lines_added: u32,
811 language: Option<SharedString>,
812}
813
814impl ActionLogMetrics {
815 fn for_buffer(buffer: &Buffer) -> Self {
816 Self {
817 language: buffer.language().map(|l| l.name().0),
818 lines_removed: 0,
819 lines_added: 0,
820 }
821 }
822
823 fn add_edits(&mut self, edits: &[Edit<u32>]) {
824 for edit in edits {
825 self.add_edit(edit);
826 }
827 }
828
829 fn add_edit(&mut self, edit: &Edit<u32>) {
830 self.lines_added += edit.new_len();
831 self.lines_removed += edit.old_len();
832 }
833}
834
835fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
836 telemetry::event!(
837 "Agent Edits Accepted",
838 agent = telemetry.agent_telemetry_id,
839 session = telemetry.session_id,
840 language = metrics.language,
841 lines_added = metrics.lines_added,
842 lines_removed = metrics.lines_removed
843 );
844}
845
846fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
847 telemetry::event!(
848 "Agent Edits Rejected",
849 agent = telemetry.agent_telemetry_id,
850 session = telemetry.session_id,
851 language = metrics.language,
852 lines_added = metrics.lines_added,
853 lines_removed = metrics.lines_removed
854 );
855}
856
857fn apply_non_conflicting_edits(
858 patch: &Patch<u32>,
859 edits: Vec<Edit<u32>>,
860 old_text: &mut Rope,
861 new_text: &Rope,
862) -> bool {
863 let mut old_edits = patch.edits().iter().cloned().peekable();
864 let mut new_edits = edits.into_iter().peekable();
865 let mut applied_delta = 0i32;
866 let mut rebased_delta = 0i32;
867 let mut has_made_changes = false;
868
869 while let Some(mut new_edit) = new_edits.next() {
870 let mut conflict = false;
871
872 // Push all the old edits that are before this new edit or that intersect with it.
873 while let Some(old_edit) = old_edits.peek() {
874 if new_edit.old.end < old_edit.new.start
875 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
876 {
877 break;
878 } else if new_edit.old.start > old_edit.new.end
879 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
880 {
881 let old_edit = old_edits.next().unwrap();
882 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
883 } else {
884 conflict = true;
885 if new_edits
886 .peek()
887 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
888 {
889 new_edit = new_edits.next().unwrap();
890 } else {
891 let old_edit = old_edits.next().unwrap();
892 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
893 }
894 }
895 }
896
897 if !conflict {
898 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
899 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
900 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
901 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
902 ..old_text.point_to_offset(cmp::min(
903 Point::new(new_edit.old.end, 0),
904 old_text.max_point(),
905 ));
906 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
907 ..new_text.point_to_offset(cmp::min(
908 Point::new(new_edit.new.end, 0),
909 new_text.max_point(),
910 ));
911
912 old_text.replace(
913 old_bytes,
914 &new_text.chunks_in_range(new_bytes).collect::<String>(),
915 );
916 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
917 has_made_changes = true;
918 }
919 }
920 has_made_changes
921}
922
923fn diff_snapshots(
924 old_snapshot: &text::BufferSnapshot,
925 new_snapshot: &text::BufferSnapshot,
926) -> Vec<Edit<u32>> {
927 let mut edits = new_snapshot
928 .edits_since::<Point>(&old_snapshot.version)
929 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
930 .peekable();
931 let mut row_edits = Vec::new();
932 while let Some(mut edit) = edits.next() {
933 while let Some(next_edit) = edits.peek() {
934 if edit.old.end >= next_edit.old.start {
935 edit.old.end = next_edit.old.end;
936 edit.new.end = next_edit.new.end;
937 edits.next();
938 } else {
939 break;
940 }
941 }
942 row_edits.push(edit);
943 }
944 row_edits
945}
946
947fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
948 if edit.old.start.column == old_text.line_len(edit.old.start.row)
949 && new_text
950 .chars_at(new_text.point_to_offset(edit.new.start))
951 .next()
952 == Some('\n')
953 && edit.old.start != old_text.max_point()
954 {
955 Edit {
956 old: edit.old.start.row + 1..edit.old.end.row + 1,
957 new: edit.new.start.row + 1..edit.new.end.row + 1,
958 }
959 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
960 Edit {
961 old: edit.old.start.row..edit.old.end.row,
962 new: edit.new.start.row..edit.new.end.row,
963 }
964 } else {
965 Edit {
966 old: edit.old.start.row..edit.old.end.row + 1,
967 new: edit.new.start.row..edit.new.end.row + 1,
968 }
969 }
970}
971
972#[derive(Copy, Clone, Debug)]
973enum ChangeAuthor {
974 User,
975 Agent,
976}
977
978#[derive(Debug)]
979enum TrackedBufferStatus {
980 Created { existing_file_content: Option<Rope> },
981 Modified,
982 Deleted,
983}
984
985pub struct TrackedBuffer {
986 buffer: Entity<Buffer>,
987 diff_base: Rope,
988 unreviewed_edits: Patch<u32>,
989 status: TrackedBufferStatus,
990 version: clock::Global,
991 diff: Entity<BufferDiff>,
992 snapshot: text::BufferSnapshot,
993 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
994 _open_lsp_handle: OpenLspBufferHandle,
995 _maintain_diff: Task<()>,
996 _subscription: Subscription,
997}
998
999impl TrackedBuffer {
1000 #[cfg(any(test, feature = "test-support"))]
1001 pub fn diff(&self) -> &Entity<BufferDiff> {
1002 &self.diff
1003 }
1004
1005 #[cfg(any(test, feature = "test-support"))]
1006 pub fn diff_base_len(&self) -> usize {
1007 self.diff_base.len()
1008 }
1009
1010 fn has_edits(&self, cx: &App) -> bool {
1011 self.diff
1012 .read(cx)
1013 .snapshot(cx)
1014 .hunks(self.buffer.read(cx))
1015 .next()
1016 .is_some()
1017 }
1018
1019 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1020 self.diff_update
1021 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1022 .ok();
1023 }
1024}
1025
1026pub struct ChangedBuffer {
1027 pub diff: Entity<BufferDiff>,
1028}
1029
1030#[cfg(test)]
1031mod tests {
1032 use super::*;
1033 use buffer_diff::DiffHunkStatusKind;
1034 use gpui::TestAppContext;
1035 use language::Point;
1036 use project::{FakeFs, Fs, Project, RemoveOptions};
1037 use rand::prelude::*;
1038 use serde_json::json;
1039 use settings::SettingsStore;
1040 use std::env;
1041 use util::{RandomCharIter, path};
1042
1043 #[ctor::ctor]
1044 fn init_logger() {
1045 zlog::init_test();
1046 }
1047
1048 fn init_test(cx: &mut TestAppContext) {
1049 cx.update(|cx| {
1050 let settings_store = SettingsStore::test(cx);
1051 cx.set_global(settings_store);
1052 });
1053 }
1054
1055 #[gpui::test(iterations = 10)]
1056 async fn test_keep_edits(cx: &mut TestAppContext) {
1057 init_test(cx);
1058
1059 let fs = FakeFs::new(cx.executor());
1060 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1061 .await;
1062 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1063 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1064 let file_path = project
1065 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1066 .unwrap();
1067 let buffer = project
1068 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1069 .await
1070 .unwrap();
1071
1072 cx.update(|cx| {
1073 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1074 buffer.update(cx, |buffer, cx| {
1075 buffer
1076 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1077 .unwrap()
1078 });
1079 buffer.update(cx, |buffer, cx| {
1080 buffer
1081 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1082 .unwrap()
1083 });
1084 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1085 });
1086 cx.run_until_parked();
1087 assert_eq!(
1088 buffer.read_with(cx, |buffer, _| buffer.text()),
1089 "abc\ndEf\nghi\njkl\nmnO"
1090 );
1091 assert_eq!(
1092 unreviewed_hunks(&action_log, cx),
1093 vec![(
1094 buffer.clone(),
1095 vec![
1096 HunkStatus {
1097 range: Point::new(1, 0)..Point::new(2, 0),
1098 diff_status: DiffHunkStatusKind::Modified,
1099 old_text: "def\n".into(),
1100 },
1101 HunkStatus {
1102 range: Point::new(4, 0)..Point::new(4, 3),
1103 diff_status: DiffHunkStatusKind::Modified,
1104 old_text: "mno".into(),
1105 }
1106 ],
1107 )]
1108 );
1109
1110 action_log.update(cx, |log, cx| {
1111 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1112 });
1113 cx.run_until_parked();
1114 assert_eq!(
1115 unreviewed_hunks(&action_log, cx),
1116 vec![(
1117 buffer.clone(),
1118 vec![HunkStatus {
1119 range: Point::new(1, 0)..Point::new(2, 0),
1120 diff_status: DiffHunkStatusKind::Modified,
1121 old_text: "def\n".into(),
1122 }],
1123 )]
1124 );
1125
1126 action_log.update(cx, |log, cx| {
1127 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1128 });
1129 cx.run_until_parked();
1130 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1131 }
1132
1133 #[gpui::test(iterations = 10)]
1134 async fn test_deletions(cx: &mut TestAppContext) {
1135 init_test(cx);
1136
1137 let fs = FakeFs::new(cx.executor());
1138 fs.insert_tree(
1139 path!("/dir"),
1140 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1141 )
1142 .await;
1143 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1144 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1145 let file_path = project
1146 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1147 .unwrap();
1148 let buffer = project
1149 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1150 .await
1151 .unwrap();
1152
1153 cx.update(|cx| {
1154 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1155 buffer.update(cx, |buffer, cx| {
1156 buffer
1157 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1158 .unwrap();
1159 buffer.finalize_last_transaction();
1160 });
1161 buffer.update(cx, |buffer, cx| {
1162 buffer
1163 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1164 .unwrap();
1165 buffer.finalize_last_transaction();
1166 });
1167 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1168 });
1169 cx.run_until_parked();
1170 assert_eq!(
1171 buffer.read_with(cx, |buffer, _| buffer.text()),
1172 "abc\nghi\njkl\npqr"
1173 );
1174 assert_eq!(
1175 unreviewed_hunks(&action_log, cx),
1176 vec![(
1177 buffer.clone(),
1178 vec![
1179 HunkStatus {
1180 range: Point::new(1, 0)..Point::new(1, 0),
1181 diff_status: DiffHunkStatusKind::Deleted,
1182 old_text: "def\n".into(),
1183 },
1184 HunkStatus {
1185 range: Point::new(3, 0)..Point::new(3, 0),
1186 diff_status: DiffHunkStatusKind::Deleted,
1187 old_text: "mno\n".into(),
1188 }
1189 ],
1190 )]
1191 );
1192
1193 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1194 cx.run_until_parked();
1195 assert_eq!(
1196 buffer.read_with(cx, |buffer, _| buffer.text()),
1197 "abc\nghi\njkl\nmno\npqr"
1198 );
1199 assert_eq!(
1200 unreviewed_hunks(&action_log, cx),
1201 vec![(
1202 buffer.clone(),
1203 vec![HunkStatus {
1204 range: Point::new(1, 0)..Point::new(1, 0),
1205 diff_status: DiffHunkStatusKind::Deleted,
1206 old_text: "def\n".into(),
1207 }],
1208 )]
1209 );
1210
1211 action_log.update(cx, |log, cx| {
1212 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1213 });
1214 cx.run_until_parked();
1215 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1216 }
1217
1218 #[gpui::test(iterations = 10)]
1219 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1220 init_test(cx);
1221
1222 let fs = FakeFs::new(cx.executor());
1223 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1224 .await;
1225 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1226 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1227 let file_path = project
1228 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1229 .unwrap();
1230 let buffer = project
1231 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1232 .await
1233 .unwrap();
1234
1235 cx.update(|cx| {
1236 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1237 buffer.update(cx, |buffer, cx| {
1238 buffer
1239 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1240 .unwrap()
1241 });
1242 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1243 });
1244 cx.run_until_parked();
1245 assert_eq!(
1246 buffer.read_with(cx, |buffer, _| buffer.text()),
1247 "abc\ndeF\nGHI\njkl\nmno"
1248 );
1249 assert_eq!(
1250 unreviewed_hunks(&action_log, cx),
1251 vec![(
1252 buffer.clone(),
1253 vec![HunkStatus {
1254 range: Point::new(1, 0)..Point::new(3, 0),
1255 diff_status: DiffHunkStatusKind::Modified,
1256 old_text: "def\nghi\n".into(),
1257 }],
1258 )]
1259 );
1260
1261 buffer.update(cx, |buffer, cx| {
1262 buffer.edit(
1263 [
1264 (Point::new(0, 2)..Point::new(0, 2), "X"),
1265 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1266 ],
1267 None,
1268 cx,
1269 )
1270 });
1271 cx.run_until_parked();
1272 assert_eq!(
1273 buffer.read_with(cx, |buffer, _| buffer.text()),
1274 "abXc\ndeF\nGHI\nYjkl\nmno"
1275 );
1276 assert_eq!(
1277 unreviewed_hunks(&action_log, cx),
1278 vec![(
1279 buffer.clone(),
1280 vec![HunkStatus {
1281 range: Point::new(1, 0)..Point::new(3, 0),
1282 diff_status: DiffHunkStatusKind::Modified,
1283 old_text: "def\nghi\n".into(),
1284 }],
1285 )]
1286 );
1287
1288 buffer.update(cx, |buffer, cx| {
1289 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1290 });
1291 cx.run_until_parked();
1292 assert_eq!(
1293 buffer.read_with(cx, |buffer, _| buffer.text()),
1294 "abXc\ndZeF\nGHI\nYjkl\nmno"
1295 );
1296 assert_eq!(
1297 unreviewed_hunks(&action_log, cx),
1298 vec![(
1299 buffer.clone(),
1300 vec![HunkStatus {
1301 range: Point::new(1, 0)..Point::new(3, 0),
1302 diff_status: DiffHunkStatusKind::Modified,
1303 old_text: "def\nghi\n".into(),
1304 }],
1305 )]
1306 );
1307
1308 action_log.update(cx, |log, cx| {
1309 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1310 });
1311 cx.run_until_parked();
1312 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1313 }
1314
1315 #[gpui::test(iterations = 10)]
1316 async fn test_creating_files(cx: &mut TestAppContext) {
1317 init_test(cx);
1318
1319 let fs = FakeFs::new(cx.executor());
1320 fs.insert_tree(path!("/dir"), json!({})).await;
1321 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1322 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1323 let file_path = project
1324 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1325 .unwrap();
1326
1327 let buffer = project
1328 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1329 .await
1330 .unwrap();
1331 cx.update(|cx| {
1332 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1333 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1334 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1335 });
1336 project
1337 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1338 .await
1339 .unwrap();
1340 cx.run_until_parked();
1341 assert_eq!(
1342 unreviewed_hunks(&action_log, cx),
1343 vec![(
1344 buffer.clone(),
1345 vec![HunkStatus {
1346 range: Point::new(0, 0)..Point::new(0, 5),
1347 diff_status: DiffHunkStatusKind::Added,
1348 old_text: "".into(),
1349 }],
1350 )]
1351 );
1352
1353 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1354 cx.run_until_parked();
1355 assert_eq!(
1356 unreviewed_hunks(&action_log, cx),
1357 vec![(
1358 buffer.clone(),
1359 vec![HunkStatus {
1360 range: Point::new(0, 0)..Point::new(0, 6),
1361 diff_status: DiffHunkStatusKind::Added,
1362 old_text: "".into(),
1363 }],
1364 )]
1365 );
1366
1367 action_log.update(cx, |log, cx| {
1368 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1369 });
1370 cx.run_until_parked();
1371 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1372 }
1373
1374 #[gpui::test(iterations = 10)]
1375 async fn test_overwriting_files(cx: &mut TestAppContext) {
1376 init_test(cx);
1377
1378 let fs = FakeFs::new(cx.executor());
1379 fs.insert_tree(
1380 path!("/dir"),
1381 json!({
1382 "file1": "Lorem ipsum dolor"
1383 }),
1384 )
1385 .await;
1386 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1387 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1388 let file_path = project
1389 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1390 .unwrap();
1391
1392 let buffer = project
1393 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1394 .await
1395 .unwrap();
1396 cx.update(|cx| {
1397 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1398 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1399 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1400 });
1401 project
1402 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1403 .await
1404 .unwrap();
1405 cx.run_until_parked();
1406 assert_eq!(
1407 unreviewed_hunks(&action_log, cx),
1408 vec![(
1409 buffer.clone(),
1410 vec![HunkStatus {
1411 range: Point::new(0, 0)..Point::new(0, 19),
1412 diff_status: DiffHunkStatusKind::Added,
1413 old_text: "".into(),
1414 }],
1415 )]
1416 );
1417
1418 action_log
1419 .update(cx, |log, cx| {
1420 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1421 })
1422 .await
1423 .unwrap();
1424 cx.run_until_parked();
1425 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1426 assert_eq!(
1427 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1428 "Lorem ipsum dolor"
1429 );
1430 }
1431
1432 #[gpui::test(iterations = 10)]
1433 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1434 init_test(cx);
1435
1436 let fs = FakeFs::new(cx.executor());
1437 fs.insert_tree(
1438 path!("/dir"),
1439 json!({
1440 "file1": "Lorem ipsum dolor"
1441 }),
1442 )
1443 .await;
1444 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1445 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1446 let file_path = project
1447 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1448 .unwrap();
1449
1450 let buffer = project
1451 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1452 .await
1453 .unwrap();
1454 cx.update(|cx| {
1455 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1456 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1457 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1458 });
1459 project
1460 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1461 .await
1462 .unwrap();
1463 cx.run_until_parked();
1464 assert_eq!(
1465 unreviewed_hunks(&action_log, cx),
1466 vec![(
1467 buffer.clone(),
1468 vec![HunkStatus {
1469 range: Point::new(0, 0)..Point::new(0, 37),
1470 diff_status: DiffHunkStatusKind::Modified,
1471 old_text: "Lorem ipsum dolor".into(),
1472 }],
1473 )]
1474 );
1475
1476 cx.update(|cx| {
1477 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1478 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1479 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1480 });
1481 project
1482 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1483 .await
1484 .unwrap();
1485 cx.run_until_parked();
1486 assert_eq!(
1487 unreviewed_hunks(&action_log, cx),
1488 vec![(
1489 buffer.clone(),
1490 vec![HunkStatus {
1491 range: Point::new(0, 0)..Point::new(0, 9),
1492 diff_status: DiffHunkStatusKind::Added,
1493 old_text: "".into(),
1494 }],
1495 )]
1496 );
1497
1498 action_log
1499 .update(cx, |log, cx| {
1500 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1501 })
1502 .await
1503 .unwrap();
1504 cx.run_until_parked();
1505 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1506 assert_eq!(
1507 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1508 "Lorem ipsum dolor"
1509 );
1510 }
1511
1512 #[gpui::test(iterations = 10)]
1513 async fn test_deleting_files(cx: &mut TestAppContext) {
1514 init_test(cx);
1515
1516 let fs = FakeFs::new(cx.executor());
1517 fs.insert_tree(
1518 path!("/dir"),
1519 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1520 )
1521 .await;
1522
1523 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1524 let file1_path = project
1525 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1526 .unwrap();
1527 let file2_path = project
1528 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1529 .unwrap();
1530
1531 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1532 let buffer1 = project
1533 .update(cx, |project, cx| {
1534 project.open_buffer(file1_path.clone(), cx)
1535 })
1536 .await
1537 .unwrap();
1538 let buffer2 = project
1539 .update(cx, |project, cx| {
1540 project.open_buffer(file2_path.clone(), cx)
1541 })
1542 .await
1543 .unwrap();
1544
1545 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1546 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1547 project
1548 .update(cx, |project, cx| {
1549 project.delete_file(file1_path.clone(), false, cx)
1550 })
1551 .unwrap()
1552 .await
1553 .unwrap();
1554 project
1555 .update(cx, |project, cx| {
1556 project.delete_file(file2_path.clone(), false, cx)
1557 })
1558 .unwrap()
1559 .await
1560 .unwrap();
1561 cx.run_until_parked();
1562 assert_eq!(
1563 unreviewed_hunks(&action_log, cx),
1564 vec![
1565 (
1566 buffer1.clone(),
1567 vec![HunkStatus {
1568 range: Point::new(0, 0)..Point::new(0, 0),
1569 diff_status: DiffHunkStatusKind::Deleted,
1570 old_text: "lorem\n".into(),
1571 }]
1572 ),
1573 (
1574 buffer2.clone(),
1575 vec![HunkStatus {
1576 range: Point::new(0, 0)..Point::new(0, 0),
1577 diff_status: DiffHunkStatusKind::Deleted,
1578 old_text: "ipsum\n".into(),
1579 }],
1580 )
1581 ]
1582 );
1583
1584 // Simulate file1 being recreated externally.
1585 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1586 .await;
1587
1588 // Simulate file2 being recreated by a tool.
1589 let buffer2 = project
1590 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1591 .await
1592 .unwrap();
1593 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1594 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1595 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1596 project
1597 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1598 .await
1599 .unwrap();
1600
1601 cx.run_until_parked();
1602 assert_eq!(
1603 unreviewed_hunks(&action_log, cx),
1604 vec![(
1605 buffer2.clone(),
1606 vec![HunkStatus {
1607 range: Point::new(0, 0)..Point::new(0, 5),
1608 diff_status: DiffHunkStatusKind::Added,
1609 old_text: "".into(),
1610 }],
1611 )]
1612 );
1613
1614 // Simulate file2 being deleted externally.
1615 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1616 .await
1617 .unwrap();
1618 cx.run_until_parked();
1619 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1620 }
1621
1622 #[gpui::test(iterations = 10)]
1623 async fn test_reject_edits(cx: &mut TestAppContext) {
1624 init_test(cx);
1625
1626 let fs = FakeFs::new(cx.executor());
1627 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1628 .await;
1629 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1630 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1631 let file_path = project
1632 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1633 .unwrap();
1634 let buffer = project
1635 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1636 .await
1637 .unwrap();
1638
1639 cx.update(|cx| {
1640 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1641 buffer.update(cx, |buffer, cx| {
1642 buffer
1643 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1644 .unwrap()
1645 });
1646 buffer.update(cx, |buffer, cx| {
1647 buffer
1648 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1649 .unwrap()
1650 });
1651 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1652 });
1653 cx.run_until_parked();
1654 assert_eq!(
1655 buffer.read_with(cx, |buffer, _| buffer.text()),
1656 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1657 );
1658 assert_eq!(
1659 unreviewed_hunks(&action_log, cx),
1660 vec![(
1661 buffer.clone(),
1662 vec![
1663 HunkStatus {
1664 range: Point::new(1, 0)..Point::new(3, 0),
1665 diff_status: DiffHunkStatusKind::Modified,
1666 old_text: "def\n".into(),
1667 },
1668 HunkStatus {
1669 range: Point::new(5, 0)..Point::new(5, 3),
1670 diff_status: DiffHunkStatusKind::Modified,
1671 old_text: "mno".into(),
1672 }
1673 ],
1674 )]
1675 );
1676
1677 // If the rejected range doesn't overlap with any hunk, we ignore it.
1678 action_log
1679 .update(cx, |log, cx| {
1680 log.reject_edits_in_ranges(
1681 buffer.clone(),
1682 vec![Point::new(4, 0)..Point::new(4, 0)],
1683 None,
1684 cx,
1685 )
1686 })
1687 .await
1688 .unwrap();
1689 cx.run_until_parked();
1690 assert_eq!(
1691 buffer.read_with(cx, |buffer, _| buffer.text()),
1692 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1693 );
1694 assert_eq!(
1695 unreviewed_hunks(&action_log, cx),
1696 vec![(
1697 buffer.clone(),
1698 vec![
1699 HunkStatus {
1700 range: Point::new(1, 0)..Point::new(3, 0),
1701 diff_status: DiffHunkStatusKind::Modified,
1702 old_text: "def\n".into(),
1703 },
1704 HunkStatus {
1705 range: Point::new(5, 0)..Point::new(5, 3),
1706 diff_status: DiffHunkStatusKind::Modified,
1707 old_text: "mno".into(),
1708 }
1709 ],
1710 )]
1711 );
1712
1713 action_log
1714 .update(cx, |log, cx| {
1715 log.reject_edits_in_ranges(
1716 buffer.clone(),
1717 vec![Point::new(0, 0)..Point::new(1, 0)],
1718 None,
1719 cx,
1720 )
1721 })
1722 .await
1723 .unwrap();
1724 cx.run_until_parked();
1725 assert_eq!(
1726 buffer.read_with(cx, |buffer, _| buffer.text()),
1727 "abc\ndef\nghi\njkl\nmnO"
1728 );
1729 assert_eq!(
1730 unreviewed_hunks(&action_log, cx),
1731 vec![(
1732 buffer.clone(),
1733 vec![HunkStatus {
1734 range: Point::new(4, 0)..Point::new(4, 3),
1735 diff_status: DiffHunkStatusKind::Modified,
1736 old_text: "mno".into(),
1737 }],
1738 )]
1739 );
1740
1741 action_log
1742 .update(cx, |log, cx| {
1743 log.reject_edits_in_ranges(
1744 buffer.clone(),
1745 vec![Point::new(4, 0)..Point::new(4, 0)],
1746 None,
1747 cx,
1748 )
1749 })
1750 .await
1751 .unwrap();
1752 cx.run_until_parked();
1753 assert_eq!(
1754 buffer.read_with(cx, |buffer, _| buffer.text()),
1755 "abc\ndef\nghi\njkl\nmno"
1756 );
1757 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1758 }
1759
1760 #[gpui::test(iterations = 10)]
1761 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1762 init_test(cx);
1763
1764 let fs = FakeFs::new(cx.executor());
1765 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1766 .await;
1767 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1768 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1769 let file_path = project
1770 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1771 .unwrap();
1772 let buffer = project
1773 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1774 .await
1775 .unwrap();
1776
1777 cx.update(|cx| {
1778 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1779 buffer.update(cx, |buffer, cx| {
1780 buffer
1781 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1782 .unwrap()
1783 });
1784 buffer.update(cx, |buffer, cx| {
1785 buffer
1786 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1787 .unwrap()
1788 });
1789 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1790 });
1791 cx.run_until_parked();
1792 assert_eq!(
1793 buffer.read_with(cx, |buffer, _| buffer.text()),
1794 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1795 );
1796 assert_eq!(
1797 unreviewed_hunks(&action_log, cx),
1798 vec![(
1799 buffer.clone(),
1800 vec![
1801 HunkStatus {
1802 range: Point::new(1, 0)..Point::new(3, 0),
1803 diff_status: DiffHunkStatusKind::Modified,
1804 old_text: "def\n".into(),
1805 },
1806 HunkStatus {
1807 range: Point::new(5, 0)..Point::new(5, 3),
1808 diff_status: DiffHunkStatusKind::Modified,
1809 old_text: "mno".into(),
1810 }
1811 ],
1812 )]
1813 );
1814
1815 action_log.update(cx, |log, cx| {
1816 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1817 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1818 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1819 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1820
1821 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1822 .detach();
1823 assert_eq!(
1824 buffer.read_with(cx, |buffer, _| buffer.text()),
1825 "abc\ndef\nghi\njkl\nmno"
1826 );
1827 });
1828 cx.run_until_parked();
1829 assert_eq!(
1830 buffer.read_with(cx, |buffer, _| buffer.text()),
1831 "abc\ndef\nghi\njkl\nmno"
1832 );
1833 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1834 }
1835
1836 #[gpui::test(iterations = 10)]
1837 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1838 init_test(cx);
1839
1840 let fs = FakeFs::new(cx.executor());
1841 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1842 .await;
1843 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1844 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1845 let file_path = project
1846 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1847 .unwrap();
1848 let buffer = project
1849 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1850 .await
1851 .unwrap();
1852
1853 cx.update(|cx| {
1854 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1855 });
1856 project
1857 .update(cx, |project, cx| {
1858 project.delete_file(file_path.clone(), false, cx)
1859 })
1860 .unwrap()
1861 .await
1862 .unwrap();
1863 cx.run_until_parked();
1864 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1865 assert_eq!(
1866 unreviewed_hunks(&action_log, cx),
1867 vec![(
1868 buffer.clone(),
1869 vec![HunkStatus {
1870 range: Point::new(0, 0)..Point::new(0, 0),
1871 diff_status: DiffHunkStatusKind::Deleted,
1872 old_text: "content".into(),
1873 }]
1874 )]
1875 );
1876
1877 action_log
1878 .update(cx, |log, cx| {
1879 log.reject_edits_in_ranges(
1880 buffer.clone(),
1881 vec![Point::new(0, 0)..Point::new(0, 0)],
1882 None,
1883 cx,
1884 )
1885 })
1886 .await
1887 .unwrap();
1888 cx.run_until_parked();
1889 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1890 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1891 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1892 }
1893
1894 #[gpui::test(iterations = 10)]
1895 async fn test_reject_created_file(cx: &mut TestAppContext) {
1896 init_test(cx);
1897
1898 let fs = FakeFs::new(cx.executor());
1899 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1900 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1901 let file_path = project
1902 .read_with(cx, |project, cx| {
1903 project.find_project_path("dir/new_file", cx)
1904 })
1905 .unwrap();
1906 let buffer = project
1907 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1908 .await
1909 .unwrap();
1910 cx.update(|cx| {
1911 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1912 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1913 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1914 });
1915 project
1916 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1917 .await
1918 .unwrap();
1919 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1920 cx.run_until_parked();
1921 assert_eq!(
1922 unreviewed_hunks(&action_log, cx),
1923 vec![(
1924 buffer.clone(),
1925 vec![HunkStatus {
1926 range: Point::new(0, 0)..Point::new(0, 7),
1927 diff_status: DiffHunkStatusKind::Added,
1928 old_text: "".into(),
1929 }],
1930 )]
1931 );
1932
1933 action_log
1934 .update(cx, |log, cx| {
1935 log.reject_edits_in_ranges(
1936 buffer.clone(),
1937 vec![Point::new(0, 0)..Point::new(0, 11)],
1938 None,
1939 cx,
1940 )
1941 })
1942 .await
1943 .unwrap();
1944 cx.run_until_parked();
1945 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1946 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1947 }
1948
1949 #[gpui::test]
1950 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1951 init_test(cx);
1952
1953 let fs = FakeFs::new(cx.executor());
1954 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1955 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1956
1957 let file_path = project
1958 .read_with(cx, |project, cx| {
1959 project.find_project_path("dir/new_file", cx)
1960 })
1961 .unwrap();
1962 let buffer = project
1963 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1964 .await
1965 .unwrap();
1966
1967 // AI creates file with initial content
1968 cx.update(|cx| {
1969 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1970 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1971 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1972 });
1973
1974 project
1975 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1976 .await
1977 .unwrap();
1978
1979 cx.run_until_parked();
1980
1981 // User makes additional edits
1982 cx.update(|cx| {
1983 buffer.update(cx, |buffer, cx| {
1984 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1985 });
1986 });
1987
1988 project
1989 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1990 .await
1991 .unwrap();
1992
1993 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1994
1995 // Reject all
1996 action_log
1997 .update(cx, |log, cx| {
1998 log.reject_edits_in_ranges(
1999 buffer.clone(),
2000 vec![Point::new(0, 0)..Point::new(100, 0)],
2001 None,
2002 cx,
2003 )
2004 })
2005 .await
2006 .unwrap();
2007 cx.run_until_parked();
2008
2009 // File should still contain all the content
2010 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2011
2012 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2013 assert_eq!(content, "ai content\nuser added this line");
2014 }
2015
2016 #[gpui::test]
2017 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2018 init_test(cx);
2019
2020 let fs = FakeFs::new(cx.executor());
2021 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2022 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2023
2024 let file_path = project
2025 .read_with(cx, |project, cx| {
2026 project.find_project_path("dir/new_file", cx)
2027 })
2028 .unwrap();
2029 let buffer = project
2030 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2031 .await
2032 .unwrap();
2033
2034 // AI creates file with initial content
2035 cx.update(|cx| {
2036 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2037 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2038 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2039 });
2040 project
2041 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2042 .await
2043 .unwrap();
2044 cx.run_until_parked();
2045 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2046
2047 // User accepts the single hunk
2048 action_log.update(cx, |log, cx| {
2049 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2050 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2051 });
2052 cx.run_until_parked();
2053 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2054 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2055
2056 // AI modifies the file
2057 cx.update(|cx| {
2058 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2059 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2060 });
2061 project
2062 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2063 .await
2064 .unwrap();
2065 cx.run_until_parked();
2066 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2067
2068 // User rejects the hunk
2069 action_log
2070 .update(cx, |log, cx| {
2071 log.reject_edits_in_ranges(
2072 buffer.clone(),
2073 vec![Anchor::min_max_range_for_buffer(
2074 buffer.read(cx).remote_id(),
2075 )],
2076 None,
2077 cx,
2078 )
2079 })
2080 .await
2081 .unwrap();
2082 cx.run_until_parked();
2083 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2084 assert_eq!(
2085 buffer.read_with(cx, |buffer, _| buffer.text()),
2086 "ai content v1"
2087 );
2088 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2089 }
2090
2091 #[gpui::test]
2092 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2093 init_test(cx);
2094
2095 let fs = FakeFs::new(cx.executor());
2096 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2097 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2098
2099 let file_path = project
2100 .read_with(cx, |project, cx| {
2101 project.find_project_path("dir/new_file", cx)
2102 })
2103 .unwrap();
2104 let buffer = project
2105 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2106 .await
2107 .unwrap();
2108
2109 // AI creates file with initial content
2110 cx.update(|cx| {
2111 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2112 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2113 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2114 });
2115 project
2116 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2117 .await
2118 .unwrap();
2119 cx.run_until_parked();
2120
2121 // User clicks "Accept All"
2122 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2123 cx.run_until_parked();
2124 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2125 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2126
2127 // AI modifies file again
2128 cx.update(|cx| {
2129 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2130 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2131 });
2132 project
2133 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2134 .await
2135 .unwrap();
2136 cx.run_until_parked();
2137 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2138
2139 // User clicks "Reject All"
2140 action_log
2141 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2142 .await;
2143 cx.run_until_parked();
2144 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2145 assert_eq!(
2146 buffer.read_with(cx, |buffer, _| buffer.text()),
2147 "ai content v1"
2148 );
2149 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2150 }
2151
2152 #[gpui::test(iterations = 100)]
2153 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2154 init_test(cx);
2155
2156 let operations = env::var("OPERATIONS")
2157 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2158 .unwrap_or(20);
2159
2160 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2161 let fs = FakeFs::new(cx.executor());
2162 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2163 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2164 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2165 let file_path = project
2166 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2167 .unwrap();
2168 let buffer = project
2169 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2170 .await
2171 .unwrap();
2172
2173 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2174
2175 for _ in 0..operations {
2176 match rng.random_range(0..100) {
2177 0..25 => {
2178 action_log.update(cx, |log, cx| {
2179 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2180 log::info!("keeping edits in range {:?}", range);
2181 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2182 });
2183 }
2184 25..50 => {
2185 action_log
2186 .update(cx, |log, cx| {
2187 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2188 log::info!("rejecting edits in range {:?}", range);
2189 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2190 })
2191 .await
2192 .unwrap();
2193 }
2194 _ => {
2195 let is_agent_edit = rng.random_bool(0.5);
2196 if is_agent_edit {
2197 log::info!("agent edit");
2198 } else {
2199 log::info!("user edit");
2200 }
2201 cx.update(|cx| {
2202 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2203 if is_agent_edit {
2204 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2205 }
2206 });
2207 }
2208 }
2209
2210 if rng.random_bool(0.2) {
2211 quiesce(&action_log, &buffer, cx);
2212 }
2213 }
2214
2215 quiesce(&action_log, &buffer, cx);
2216
2217 fn quiesce(
2218 action_log: &Entity<ActionLog>,
2219 buffer: &Entity<Buffer>,
2220 cx: &mut TestAppContext,
2221 ) {
2222 log::info!("quiescing...");
2223 cx.run_until_parked();
2224 action_log.update(cx, |log, cx| {
2225 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2226 let mut old_text = tracked_buffer.diff_base.clone();
2227 let new_text = buffer.read(cx).as_rope();
2228 for edit in tracked_buffer.unreviewed_edits.edits() {
2229 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2230 let old_end = old_text.point_to_offset(cmp::min(
2231 Point::new(edit.new.start + edit.old_len(), 0),
2232 old_text.max_point(),
2233 ));
2234 old_text.replace(
2235 old_start..old_end,
2236 &new_text.slice_rows(edit.new.clone()).to_string(),
2237 );
2238 }
2239 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2240 })
2241 }
2242 }
2243
2244 #[gpui::test]
2245 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2246 init_test(cx);
2247
2248 let fs = FakeFs::new(cx.background_executor.clone());
2249 fs.insert_tree(
2250 path!("/project"),
2251 json!({
2252 ".git": {},
2253 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2254 }),
2255 )
2256 .await;
2257 fs.set_head_for_repo(
2258 path!("/project/.git").as_ref(),
2259 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2260 "0000000",
2261 );
2262 cx.run_until_parked();
2263
2264 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2265 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2266
2267 let file_path = project
2268 .read_with(cx, |project, cx| {
2269 project.find_project_path(path!("/project/file.txt"), cx)
2270 })
2271 .unwrap();
2272 let buffer = project
2273 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2274 .await
2275 .unwrap();
2276
2277 cx.update(|cx| {
2278 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2279 buffer.update(cx, |buffer, cx| {
2280 buffer.edit(
2281 [
2282 // Edit at the very start: a -> A
2283 (Point::new(0, 0)..Point::new(0, 1), "A"),
2284 // Deletion in the middle: remove lines d and e
2285 (Point::new(3, 0)..Point::new(5, 0), ""),
2286 // Modification: g -> GGG
2287 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2288 // Addition: insert new line after h
2289 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2290 // Edit the very last character: j -> J
2291 (Point::new(9, 0)..Point::new(9, 1), "J"),
2292 ],
2293 None,
2294 cx,
2295 );
2296 });
2297 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2298 });
2299 cx.run_until_parked();
2300 assert_eq!(
2301 unreviewed_hunks(&action_log, cx),
2302 vec![(
2303 buffer.clone(),
2304 vec![
2305 HunkStatus {
2306 range: Point::new(0, 0)..Point::new(1, 0),
2307 diff_status: DiffHunkStatusKind::Modified,
2308 old_text: "a\n".into()
2309 },
2310 HunkStatus {
2311 range: Point::new(3, 0)..Point::new(3, 0),
2312 diff_status: DiffHunkStatusKind::Deleted,
2313 old_text: "d\ne\n".into()
2314 },
2315 HunkStatus {
2316 range: Point::new(4, 0)..Point::new(5, 0),
2317 diff_status: DiffHunkStatusKind::Modified,
2318 old_text: "g\n".into()
2319 },
2320 HunkStatus {
2321 range: Point::new(6, 0)..Point::new(7, 0),
2322 diff_status: DiffHunkStatusKind::Added,
2323 old_text: "".into()
2324 },
2325 HunkStatus {
2326 range: Point::new(8, 0)..Point::new(8, 1),
2327 diff_status: DiffHunkStatusKind::Modified,
2328 old_text: "j".into()
2329 }
2330 ]
2331 )]
2332 );
2333
2334 // Simulate a git commit that matches some edits but not others:
2335 // - Accepts the first edit (a -> A)
2336 // - Accepts the deletion (remove d and e)
2337 // - Makes a different change to g (g -> G instead of GGG)
2338 // - Ignores the NEW line addition
2339 // - Ignores the last line edit (j stays as j)
2340 fs.set_head_for_repo(
2341 path!("/project/.git").as_ref(),
2342 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2343 "0000001",
2344 );
2345 cx.run_until_parked();
2346 assert_eq!(
2347 unreviewed_hunks(&action_log, cx),
2348 vec![(
2349 buffer.clone(),
2350 vec![
2351 HunkStatus {
2352 range: Point::new(4, 0)..Point::new(5, 0),
2353 diff_status: DiffHunkStatusKind::Modified,
2354 old_text: "g\n".into()
2355 },
2356 HunkStatus {
2357 range: Point::new(6, 0)..Point::new(7, 0),
2358 diff_status: DiffHunkStatusKind::Added,
2359 old_text: "".into()
2360 },
2361 HunkStatus {
2362 range: Point::new(8, 0)..Point::new(8, 1),
2363 diff_status: DiffHunkStatusKind::Modified,
2364 old_text: "j".into()
2365 }
2366 ]
2367 )]
2368 );
2369
2370 // Make another commit that accepts the NEW line but with different content
2371 fs.set_head_for_repo(
2372 path!("/project/.git").as_ref(),
2373 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2374 "0000002",
2375 );
2376 cx.run_until_parked();
2377 assert_eq!(
2378 unreviewed_hunks(&action_log, cx),
2379 vec![(
2380 buffer,
2381 vec![
2382 HunkStatus {
2383 range: Point::new(6, 0)..Point::new(7, 0),
2384 diff_status: DiffHunkStatusKind::Added,
2385 old_text: "".into()
2386 },
2387 HunkStatus {
2388 range: Point::new(8, 0)..Point::new(8, 1),
2389 diff_status: DiffHunkStatusKind::Modified,
2390 old_text: "j".into()
2391 }
2392 ]
2393 )]
2394 );
2395
2396 // Final commit that accepts all remaining edits
2397 fs.set_head_for_repo(
2398 path!("/project/.git").as_ref(),
2399 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2400 "0000003",
2401 );
2402 cx.run_until_parked();
2403 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2404 }
2405
2406 #[derive(Debug, Clone, PartialEq, Eq)]
2407 struct HunkStatus {
2408 range: Range<Point>,
2409 diff_status: DiffHunkStatusKind,
2410 old_text: String,
2411 }
2412
2413 fn unreviewed_hunks(
2414 action_log: &Entity<ActionLog>,
2415 cx: &TestAppContext,
2416 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2417 cx.read(|cx| {
2418 action_log
2419 .read(cx)
2420 .changed_buffers(cx)
2421 .into_iter()
2422 .map(|(buffer, diff)| {
2423 let snapshot = buffer.read(cx).snapshot();
2424 (
2425 buffer,
2426 diff.read(cx)
2427 .snapshot(cx)
2428 .hunks(&snapshot)
2429 .map(|hunk| HunkStatus {
2430 diff_status: hunk.status().kind,
2431 range: hunk.range,
2432 old_text: diff
2433 .read(cx)
2434 .base_text(cx)
2435 .text_for_range(hunk.diff_base_byte_range)
2436 .collect(),
2437 })
2438 .collect(),
2439 )
2440 })
2441 .collect()
2442 })
2443 }
2444}