1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Tracks actions performed by tools in a thread
16pub struct ActionLog {
17 /// Buffers that we want to notify the model about when they change.
18 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21}
22
23impl ActionLog {
24 /// Creates a new, empty action log associated with the given project.
25 pub fn new(project: Entity<Project>) -> Self {
26 Self {
27 tracked_buffers: BTreeMap::default(),
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
37 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
38 }
39
40 /// Return a unified diff patch with user edits made since last read or notification
41 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
42 let diffs = self
43 .tracked_buffers
44 .values()
45 .filter_map(|tracked| {
46 if !tracked.may_have_unnotified_user_edits {
47 return None;
48 }
49
50 let text_with_latest_user_edits = tracked.diff_base.to_string();
51 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
52 if text_with_latest_user_edits == text_with_last_seen_user_edits {
53 return None;
54 }
55 let patch = language::unified_diff(
56 &text_with_last_seen_user_edits,
57 &text_with_latest_user_edits,
58 );
59
60 let buffer = tracked.buffer.clone();
61 let file_path = buffer
62 .read(cx)
63 .file()
64 .map(|file| {
65 let mut path = file.full_path(cx).to_string_lossy().into_owned();
66 if file.path_style(cx).is_windows() {
67 path = path.replace('\\', "/");
68 }
69 path
70 })
71 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
72
73 let mut result = String::new();
74 result.push_str(&format!("--- a/{}\n", file_path));
75 result.push_str(&format!("+++ b/{}\n", file_path));
76 result.push_str(&patch);
77
78 Some(result)
79 })
80 .collect::<Vec<_>>();
81
82 if diffs.is_empty() {
83 return None;
84 }
85
86 let unified_diff = diffs.join("\n\n");
87 Some(unified_diff)
88 }
89
90 /// Return a unified diff patch with user edits made since last read/notification
91 /// and mark them as notified
92 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
93 let patch = self.unnotified_user_edits(cx);
94 self.tracked_buffers.values_mut().for_each(|tracked| {
95 tracked.may_have_unnotified_user_edits = false;
96 tracked.last_seen_base = tracked.diff_base.clone();
97 });
98 patch
99 }
100
101 fn track_buffer_internal(
102 &mut self,
103 buffer: Entity<Buffer>,
104 is_created: bool,
105 cx: &mut Context<Self>,
106 ) -> &mut TrackedBuffer {
107 let status = if is_created {
108 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
109 match tracked.status {
110 TrackedBufferStatus::Created {
111 existing_file_content,
112 } => TrackedBufferStatus::Created {
113 existing_file_content,
114 },
115 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
116 TrackedBufferStatus::Created {
117 existing_file_content: Some(tracked.diff_base),
118 }
119 }
120 }
121 } else if buffer
122 .read(cx)
123 .file()
124 .is_some_and(|file| file.disk_state().exists())
125 {
126 TrackedBufferStatus::Created {
127 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
128 }
129 } else {
130 TrackedBufferStatus::Created {
131 existing_file_content: None,
132 }
133 }
134 } else {
135 TrackedBufferStatus::Modified
136 };
137
138 let tracked_buffer = self
139 .tracked_buffers
140 .entry(buffer.clone())
141 .or_insert_with(|| {
142 let open_lsp_handle = self.project.update(cx, |project, cx| {
143 project.register_buffer_with_language_servers(&buffer, cx)
144 });
145
146 let text_snapshot = buffer.read(cx).text_snapshot();
147 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
148 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
149 let diff_base;
150 let last_seen_base;
151 let unreviewed_edits;
152 if is_created {
153 diff_base = Rope::default();
154 last_seen_base = Rope::default();
155 unreviewed_edits = Patch::new(vec![Edit {
156 old: 0..1,
157 new: 0..text_snapshot.max_point().row + 1,
158 }])
159 } else {
160 diff_base = buffer.read(cx).as_rope().clone();
161 last_seen_base = diff_base.clone();
162 unreviewed_edits = Patch::default();
163 }
164 TrackedBuffer {
165 buffer: buffer.clone(),
166 diff_base,
167 last_seen_base,
168 unreviewed_edits,
169 snapshot: text_snapshot,
170 status,
171 version: buffer.read(cx).version(),
172 diff,
173 diff_update: diff_update_tx,
174 may_have_unnotified_user_edits: false,
175 _open_lsp_handle: open_lsp_handle,
176 _maintain_diff: cx.spawn({
177 let buffer = buffer.clone();
178 async move |this, cx| {
179 Self::maintain_diff(this, buffer, diff_update_rx, cx)
180 .await
181 .ok();
182 }
183 }),
184 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
185 }
186 });
187 tracked_buffer.version = buffer.read(cx).version();
188 tracked_buffer
189 }
190
191 fn handle_buffer_event(
192 &mut self,
193 buffer: Entity<Buffer>,
194 event: &BufferEvent,
195 cx: &mut Context<Self>,
196 ) {
197 match event {
198 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
199 BufferEvent::FileHandleChanged => {
200 self.handle_buffer_file_changed(buffer, cx);
201 }
202 _ => {}
203 };
204 }
205
206 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
207 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
208 return;
209 };
210 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
211 }
212
213 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
214 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
215 return;
216 };
217
218 match tracked_buffer.status {
219 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
220 if buffer
221 .read(cx)
222 .file()
223 .is_some_and(|file| file.disk_state() == DiskState::Deleted)
224 {
225 // If the buffer had been edited by a tool, but it got
226 // deleted externally, we want to stop tracking it.
227 self.tracked_buffers.remove(&buffer);
228 }
229 cx.notify();
230 }
231 TrackedBufferStatus::Deleted => {
232 if buffer
233 .read(cx)
234 .file()
235 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
236 {
237 // If the buffer had been deleted by a tool, but it got
238 // resurrected externally, we want to clear the edits we
239 // were tracking and reset the buffer's state.
240 self.tracked_buffers.remove(&buffer);
241 self.track_buffer_internal(buffer, false, cx);
242 }
243 cx.notify();
244 }
245 }
246 }
247
248 async fn maintain_diff(
249 this: WeakEntity<Self>,
250 buffer: Entity<Buffer>,
251 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
252 cx: &mut AsyncApp,
253 ) -> Result<()> {
254 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
255 let git_diff = this
256 .update(cx, |this, cx| {
257 this.project.update(cx, |project, cx| {
258 project.open_uncommitted_diff(buffer.clone(), cx)
259 })
260 })?
261 .await
262 .ok();
263 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
264 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
265 })?;
266
267 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
268 let _repo_subscription =
269 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
270 cx.update(|cx| {
271 let mut old_head = buffer_repo.read(cx).head_commit.clone();
272 Some(cx.subscribe(git_diff, move |_, event, cx| {
273 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
274 let new_head = buffer_repo.read(cx).head_commit.clone();
275 if new_head != old_head {
276 old_head = new_head;
277 git_diff_updates_tx.send(()).ok();
278 }
279 }
280 }))
281 })?
282 } else {
283 None
284 };
285
286 loop {
287 futures::select_biased! {
288 buffer_update = buffer_updates.next() => {
289 if let Some((author, buffer_snapshot)) = buffer_update {
290 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
291 } else {
292 break;
293 }
294 }
295 _ = git_diff_updates_rx.changed().fuse() => {
296 if let Some(git_diff) = git_diff.as_ref() {
297 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
298 }
299 }
300 }
301 }
302
303 Ok(())
304 }
305
306 async fn track_edits(
307 this: &WeakEntity<ActionLog>,
308 buffer: &Entity<Buffer>,
309 author: ChangeAuthor,
310 buffer_snapshot: text::BufferSnapshot,
311 cx: &mut AsyncApp,
312 ) -> Result<()> {
313 let rebase = this.update(cx, |this, cx| {
314 let tracked_buffer = this
315 .tracked_buffers
316 .get_mut(buffer)
317 .context("buffer not tracked")?;
318
319 let rebase = cx.background_spawn({
320 let mut base_text = tracked_buffer.diff_base.clone();
321 let old_snapshot = tracked_buffer.snapshot.clone();
322 let new_snapshot = buffer_snapshot.clone();
323 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
324 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
325 let mut has_user_changes = false;
326 async move {
327 if let ChangeAuthor::User = author {
328 has_user_changes = apply_non_conflicting_edits(
329 &unreviewed_edits,
330 edits,
331 &mut base_text,
332 new_snapshot.as_rope(),
333 );
334 }
335
336 (Arc::new(base_text.to_string()), base_text, has_user_changes)
337 }
338 });
339
340 anyhow::Ok(rebase)
341 })??;
342 let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
343
344 this.update(cx, |this, _| {
345 let tracked_buffer = this
346 .tracked_buffers
347 .get_mut(buffer)
348 .context("buffer not tracked")
349 .unwrap();
350 tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
351 })?;
352
353 Self::update_diff(
354 this,
355 buffer,
356 buffer_snapshot,
357 new_base_text,
358 new_diff_base,
359 cx,
360 )
361 .await
362 }
363
364 async fn keep_committed_edits(
365 this: &WeakEntity<ActionLog>,
366 buffer: &Entity<Buffer>,
367 git_diff: &Entity<BufferDiff>,
368 cx: &mut AsyncApp,
369 ) -> Result<()> {
370 let buffer_snapshot = this.read_with(cx, |this, _cx| {
371 let tracked_buffer = this
372 .tracked_buffers
373 .get(buffer)
374 .context("buffer not tracked")?;
375 anyhow::Ok(tracked_buffer.snapshot.clone())
376 })??;
377 let (new_base_text, new_diff_base) = this
378 .read_with(cx, |this, cx| {
379 let tracked_buffer = this
380 .tracked_buffers
381 .get(buffer)
382 .context("buffer not tracked")?;
383 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
384 let agent_diff_base = tracked_buffer.diff_base.clone();
385 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
386 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
387 anyhow::Ok(cx.background_spawn(async move {
388 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
389 let committed_edits = language::line_diff(
390 &agent_diff_base.to_string(),
391 &git_diff_base.to_string(),
392 )
393 .into_iter()
394 .map(|(old, new)| Edit { old, new });
395
396 let mut new_agent_diff_base = agent_diff_base.clone();
397 let mut row_delta = 0i32;
398 for committed in committed_edits {
399 while let Some(unreviewed) = old_unreviewed_edits.peek() {
400 // If the committed edit matches the unreviewed
401 // edit, assume the user wants to keep it.
402 if committed.old == unreviewed.old {
403 let unreviewed_new =
404 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
405 let committed_new =
406 git_diff_base.slice_rows(committed.new.clone()).to_string();
407 if unreviewed_new == committed_new {
408 let old_byte_start =
409 new_agent_diff_base.point_to_offset(Point::new(
410 (unreviewed.old.start as i32 + row_delta) as u32,
411 0,
412 ));
413 let old_byte_end =
414 new_agent_diff_base.point_to_offset(cmp::min(
415 Point::new(
416 (unreviewed.old.end as i32 + row_delta) as u32,
417 0,
418 ),
419 new_agent_diff_base.max_point(),
420 ));
421 new_agent_diff_base
422 .replace(old_byte_start..old_byte_end, &unreviewed_new);
423 row_delta +=
424 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
425 }
426 } else if unreviewed.old.start >= committed.old.end {
427 break;
428 }
429
430 old_unreviewed_edits.next().unwrap();
431 }
432 }
433
434 (
435 Arc::new(new_agent_diff_base.to_string()),
436 new_agent_diff_base,
437 )
438 }))
439 })??
440 .await;
441
442 Self::update_diff(
443 this,
444 buffer,
445 buffer_snapshot,
446 new_base_text,
447 new_diff_base,
448 cx,
449 )
450 .await
451 }
452
453 async fn update_diff(
454 this: &WeakEntity<ActionLog>,
455 buffer: &Entity<Buffer>,
456 buffer_snapshot: text::BufferSnapshot,
457 new_base_text: Arc<String>,
458 new_diff_base: Rope,
459 cx: &mut AsyncApp,
460 ) -> Result<()> {
461 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
462 let tracked_buffer = this
463 .tracked_buffers
464 .get(buffer)
465 .context("buffer not tracked")?;
466 anyhow::Ok((
467 tracked_buffer.diff.clone(),
468 buffer.read(cx).language().cloned(),
469 buffer.read(cx).language_registry(),
470 ))
471 })??;
472 let diff_snapshot = BufferDiff::update_diff(
473 diff.clone(),
474 buffer_snapshot.clone(),
475 Some(new_base_text),
476 true,
477 false,
478 language,
479 language_registry,
480 cx,
481 )
482 .await;
483 let mut unreviewed_edits = Patch::default();
484 if let Ok(diff_snapshot) = diff_snapshot {
485 unreviewed_edits = cx
486 .background_spawn({
487 let diff_snapshot = diff_snapshot.clone();
488 let buffer_snapshot = buffer_snapshot.clone();
489 let new_diff_base = new_diff_base.clone();
490 async move {
491 let mut unreviewed_edits = Patch::default();
492 for hunk in diff_snapshot
493 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
494 {
495 let old_range = new_diff_base
496 .offset_to_point(hunk.diff_base_byte_range.start)
497 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
498 let new_range = hunk.range.start..hunk.range.end;
499 unreviewed_edits.push(point_to_row_edit(
500 Edit {
501 old: old_range,
502 new: new_range,
503 },
504 &new_diff_base,
505 buffer_snapshot.as_rope(),
506 ));
507 }
508 unreviewed_edits
509 }
510 })
511 .await;
512
513 diff.update(cx, |diff, cx| {
514 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
515 })?;
516 }
517 this.update(cx, |this, cx| {
518 let tracked_buffer = this
519 .tracked_buffers
520 .get_mut(buffer)
521 .context("buffer not tracked")?;
522 tracked_buffer.diff_base = new_diff_base;
523 tracked_buffer.snapshot = buffer_snapshot;
524 tracked_buffer.unreviewed_edits = unreviewed_edits;
525 cx.notify();
526 anyhow::Ok(())
527 })?
528 }
529
530 /// Track a buffer as read by agent, so we can notify the model about user edits.
531 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
532 self.track_buffer_internal(buffer, false, cx);
533 }
534
535 /// Mark a buffer as created by agent, so we can refresh it in the context
536 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
537 self.track_buffer_internal(buffer, true, cx);
538 }
539
540 /// Mark a buffer as edited by agent, so we can refresh it in the context
541 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
542 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
543 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
544 tracked_buffer.status = TrackedBufferStatus::Modified;
545 }
546 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
547 }
548
549 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
550 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
551 match tracked_buffer.status {
552 TrackedBufferStatus::Created { .. } => {
553 self.tracked_buffers.remove(&buffer);
554 cx.notify();
555 }
556 TrackedBufferStatus::Modified => {
557 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
558 tracked_buffer.status = TrackedBufferStatus::Deleted;
559 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
560 }
561 TrackedBufferStatus::Deleted => {}
562 }
563 cx.notify();
564 }
565
566 pub fn keep_edits_in_range(
567 &mut self,
568 buffer: Entity<Buffer>,
569 buffer_range: Range<impl language::ToPoint>,
570 telemetry: Option<ActionLogTelemetry>,
571 cx: &mut Context<Self>,
572 ) {
573 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
574 return;
575 };
576
577 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
578 match tracked_buffer.status {
579 TrackedBufferStatus::Deleted => {
580 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
581 self.tracked_buffers.remove(&buffer);
582 cx.notify();
583 }
584 _ => {
585 let buffer = buffer.read(cx);
586 let buffer_range =
587 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
588 let mut delta = 0i32;
589 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
590 edit.old.start = (edit.old.start as i32 + delta) as u32;
591 edit.old.end = (edit.old.end as i32 + delta) as u32;
592
593 if buffer_range.end.row < edit.new.start
594 || buffer_range.start.row > edit.new.end
595 {
596 true
597 } else {
598 let old_range = tracked_buffer
599 .diff_base
600 .point_to_offset(Point::new(edit.old.start, 0))
601 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
602 Point::new(edit.old.end, 0),
603 tracked_buffer.diff_base.max_point(),
604 ));
605 let new_range = tracked_buffer
606 .snapshot
607 .point_to_offset(Point::new(edit.new.start, 0))
608 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
609 Point::new(edit.new.end, 0),
610 tracked_buffer.snapshot.max_point(),
611 ));
612 tracked_buffer.diff_base.replace(
613 old_range,
614 &tracked_buffer
615 .snapshot
616 .text_for_range(new_range)
617 .collect::<String>(),
618 );
619 delta += edit.new_len() as i32 - edit.old_len() as i32;
620 metrics.add_edit(edit);
621 false
622 }
623 });
624 if tracked_buffer.unreviewed_edits.is_empty()
625 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
626 {
627 tracked_buffer.status = TrackedBufferStatus::Modified;
628 }
629 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
630 }
631 }
632 if let Some(telemetry) = telemetry {
633 telemetry_report_accepted_edits(&telemetry, metrics);
634 }
635 }
636
637 pub fn reject_edits_in_ranges(
638 &mut self,
639 buffer: Entity<Buffer>,
640 buffer_ranges: Vec<Range<impl language::ToPoint>>,
641 telemetry: Option<ActionLogTelemetry>,
642 cx: &mut Context<Self>,
643 ) -> Task<Result<()>> {
644 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
645 return Task::ready(Ok(()));
646 };
647
648 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
649 let task = match &tracked_buffer.status {
650 TrackedBufferStatus::Created {
651 existing_file_content,
652 } => {
653 let task = if let Some(existing_file_content) = existing_file_content {
654 buffer.update(cx, |buffer, cx| {
655 buffer.start_transaction();
656 buffer.set_text("", cx);
657 for chunk in existing_file_content.chunks() {
658 buffer.append(chunk, cx);
659 }
660 buffer.end_transaction(cx);
661 });
662 self.project
663 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
664 } else {
665 // For a file created by AI with no pre-existing content,
666 // only delete the file if we're certain it contains only AI content
667 // with no edits from the user.
668
669 let initial_version = tracked_buffer.version.clone();
670 let current_version = buffer.read(cx).version();
671
672 let current_content = buffer.read(cx).text();
673 let tracked_content = tracked_buffer.snapshot.text();
674
675 let is_ai_only_content =
676 initial_version == current_version && current_content == tracked_content;
677
678 if is_ai_only_content {
679 buffer
680 .read(cx)
681 .entry_id(cx)
682 .and_then(|entry_id| {
683 self.project.update(cx, |project, cx| {
684 project.delete_entry(entry_id, false, cx)
685 })
686 })
687 .unwrap_or(Task::ready(Ok(())))
688 } else {
689 // Not sure how to disentangle edits made by the user
690 // from edits made by the AI at this point.
691 // For now, preserve both to avoid data loss.
692 //
693 // TODO: Better solution (disable "Reject" after user makes some
694 // edit or find a way to differentiate between AI and user edits)
695 Task::ready(Ok(()))
696 }
697 };
698
699 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
700 self.tracked_buffers.remove(&buffer);
701 cx.notify();
702 task
703 }
704 TrackedBufferStatus::Deleted => {
705 buffer.update(cx, |buffer, cx| {
706 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
707 });
708 let save = self
709 .project
710 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
711
712 // Clear all tracked edits for this buffer and start over as if we just read it.
713 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
714 self.tracked_buffers.remove(&buffer);
715 self.buffer_read(buffer.clone(), cx);
716 cx.notify();
717 save
718 }
719 TrackedBufferStatus::Modified => {
720 buffer.update(cx, |buffer, cx| {
721 let mut buffer_row_ranges = buffer_ranges
722 .into_iter()
723 .map(|range| {
724 range.start.to_point(buffer).row..range.end.to_point(buffer).row
725 })
726 .peekable();
727
728 let mut edits_to_revert = Vec::new();
729 for edit in tracked_buffer.unreviewed_edits.edits() {
730 let new_range = tracked_buffer
731 .snapshot
732 .anchor_before(Point::new(edit.new.start, 0))
733 ..tracked_buffer.snapshot.anchor_after(cmp::min(
734 Point::new(edit.new.end, 0),
735 tracked_buffer.snapshot.max_point(),
736 ));
737 let new_row_range = new_range.start.to_point(buffer).row
738 ..new_range.end.to_point(buffer).row;
739
740 let mut revert = false;
741 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
742 if buffer_row_range.end < new_row_range.start {
743 buffer_row_ranges.next();
744 } else if buffer_row_range.start > new_row_range.end {
745 break;
746 } else {
747 revert = true;
748 break;
749 }
750 }
751
752 if revert {
753 metrics.add_edit(edit);
754 let old_range = tracked_buffer
755 .diff_base
756 .point_to_offset(Point::new(edit.old.start, 0))
757 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
758 Point::new(edit.old.end, 0),
759 tracked_buffer.diff_base.max_point(),
760 ));
761 let old_text = tracked_buffer
762 .diff_base
763 .chunks_in_range(old_range)
764 .collect::<String>();
765 edits_to_revert.push((new_range, old_text));
766 }
767 }
768
769 buffer.edit(edits_to_revert, None, cx);
770 });
771 self.project
772 .update(cx, |project, cx| project.save_buffer(buffer, cx))
773 }
774 };
775 if let Some(telemetry) = telemetry {
776 telemetry_report_rejected_edits(&telemetry, metrics);
777 }
778 task
779 }
780
781 pub fn keep_all_edits(
782 &mut self,
783 telemetry: Option<ActionLogTelemetry>,
784 cx: &mut Context<Self>,
785 ) {
786 self.tracked_buffers.retain(|buffer, tracked_buffer| {
787 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
788 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
789 if let Some(telemetry) = telemetry.as_ref() {
790 telemetry_report_accepted_edits(telemetry, metrics);
791 }
792 match tracked_buffer.status {
793 TrackedBufferStatus::Deleted => false,
794 _ => {
795 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
796 tracked_buffer.status = TrackedBufferStatus::Modified;
797 }
798 tracked_buffer.unreviewed_edits.clear();
799 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
800 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
801 true
802 }
803 }
804 });
805
806 cx.notify();
807 }
808
809 pub fn reject_all_edits(
810 &mut self,
811 telemetry: Option<ActionLogTelemetry>,
812 cx: &mut Context<Self>,
813 ) -> Task<()> {
814 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
815 let reject = self.reject_edits_in_ranges(
816 buffer,
817 vec![Anchor::MIN..Anchor::MAX],
818 telemetry.clone(),
819 cx,
820 );
821
822 async move {
823 reject.await.log_err();
824 }
825 });
826
827 let task = futures::future::join_all(futures);
828 cx.background_spawn(async move {
829 task.await;
830 })
831 }
832
833 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
834 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
835 self.tracked_buffers
836 .iter()
837 .filter(|(_, tracked)| tracked.has_edits(cx))
838 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
839 .collect()
840 }
841
842 /// Iterate over buffers changed since last read or edited by the model
843 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
844 self.tracked_buffers
845 .iter()
846 .filter(|(buffer, tracked)| {
847 let buffer = buffer.read(cx);
848
849 tracked.version != buffer.version
850 && buffer
851 .file()
852 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
853 })
854 .map(|(buffer, _)| buffer)
855 }
856}
857
858#[derive(Clone)]
859pub struct ActionLogTelemetry {
860 pub agent_telemetry_id: &'static str,
861 pub session_id: Arc<str>,
862}
863
864struct ActionLogMetrics {
865 lines_removed: u32,
866 lines_added: u32,
867 language: Option<SharedString>,
868}
869
870impl ActionLogMetrics {
871 fn for_buffer(buffer: &Buffer) -> Self {
872 Self {
873 language: buffer.language().map(|l| l.name().0),
874 lines_removed: 0,
875 lines_added: 0,
876 }
877 }
878
879 fn add_edits(&mut self, edits: &[Edit<u32>]) {
880 for edit in edits {
881 self.add_edit(edit);
882 }
883 }
884
885 fn add_edit(&mut self, edit: &Edit<u32>) {
886 self.lines_added += edit.new_len();
887 self.lines_removed += edit.old_len();
888 }
889}
890
891fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
892 telemetry::event!(
893 "Agent Edits Accepted",
894 agent = telemetry.agent_telemetry_id,
895 session = telemetry.session_id,
896 language = metrics.language,
897 lines_added = metrics.lines_added,
898 lines_removed = metrics.lines_removed
899 );
900}
901
902fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
903 telemetry::event!(
904 "Agent Edits Rejected",
905 agent = telemetry.agent_telemetry_id,
906 session = telemetry.session_id,
907 language = metrics.language,
908 lines_added = metrics.lines_added,
909 lines_removed = metrics.lines_removed
910 );
911}
912
913fn apply_non_conflicting_edits(
914 patch: &Patch<u32>,
915 edits: Vec<Edit<u32>>,
916 old_text: &mut Rope,
917 new_text: &Rope,
918) -> bool {
919 let mut old_edits = patch.edits().iter().cloned().peekable();
920 let mut new_edits = edits.into_iter().peekable();
921 let mut applied_delta = 0i32;
922 let mut rebased_delta = 0i32;
923 let mut has_made_changes = false;
924
925 while let Some(mut new_edit) = new_edits.next() {
926 let mut conflict = false;
927
928 // Push all the old edits that are before this new edit or that intersect with it.
929 while let Some(old_edit) = old_edits.peek() {
930 if new_edit.old.end < old_edit.new.start
931 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
932 {
933 break;
934 } else if new_edit.old.start > old_edit.new.end
935 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
936 {
937 let old_edit = old_edits.next().unwrap();
938 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
939 } else {
940 conflict = true;
941 if new_edits
942 .peek()
943 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
944 {
945 new_edit = new_edits.next().unwrap();
946 } else {
947 let old_edit = old_edits.next().unwrap();
948 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
949 }
950 }
951 }
952
953 if !conflict {
954 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
955 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
956 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
957 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
958 ..old_text.point_to_offset(cmp::min(
959 Point::new(new_edit.old.end, 0),
960 old_text.max_point(),
961 ));
962 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
963 ..new_text.point_to_offset(cmp::min(
964 Point::new(new_edit.new.end, 0),
965 new_text.max_point(),
966 ));
967
968 old_text.replace(
969 old_bytes,
970 &new_text.chunks_in_range(new_bytes).collect::<String>(),
971 );
972 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
973 has_made_changes = true;
974 }
975 }
976 has_made_changes
977}
978
979fn diff_snapshots(
980 old_snapshot: &text::BufferSnapshot,
981 new_snapshot: &text::BufferSnapshot,
982) -> Vec<Edit<u32>> {
983 let mut edits = new_snapshot
984 .edits_since::<Point>(&old_snapshot.version)
985 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
986 .peekable();
987 let mut row_edits = Vec::new();
988 while let Some(mut edit) = edits.next() {
989 while let Some(next_edit) = edits.peek() {
990 if edit.old.end >= next_edit.old.start {
991 edit.old.end = next_edit.old.end;
992 edit.new.end = next_edit.new.end;
993 edits.next();
994 } else {
995 break;
996 }
997 }
998 row_edits.push(edit);
999 }
1000 row_edits
1001}
1002
1003fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1004 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1005 && new_text
1006 .chars_at(new_text.point_to_offset(edit.new.start))
1007 .next()
1008 == Some('\n')
1009 && edit.old.start != old_text.max_point()
1010 {
1011 Edit {
1012 old: edit.old.start.row + 1..edit.old.end.row + 1,
1013 new: edit.new.start.row + 1..edit.new.end.row + 1,
1014 }
1015 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1016 Edit {
1017 old: edit.old.start.row..edit.old.end.row,
1018 new: edit.new.start.row..edit.new.end.row,
1019 }
1020 } else {
1021 Edit {
1022 old: edit.old.start.row..edit.old.end.row + 1,
1023 new: edit.new.start.row..edit.new.end.row + 1,
1024 }
1025 }
1026}
1027
1028#[derive(Copy, Clone, Debug)]
1029enum ChangeAuthor {
1030 User,
1031 Agent,
1032}
1033
1034enum TrackedBufferStatus {
1035 Created { existing_file_content: Option<Rope> },
1036 Modified,
1037 Deleted,
1038}
1039
1040struct TrackedBuffer {
1041 buffer: Entity<Buffer>,
1042 diff_base: Rope,
1043 last_seen_base: Rope,
1044 unreviewed_edits: Patch<u32>,
1045 status: TrackedBufferStatus,
1046 version: clock::Global,
1047 diff: Entity<BufferDiff>,
1048 snapshot: text::BufferSnapshot,
1049 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1050 may_have_unnotified_user_edits: bool,
1051 _open_lsp_handle: OpenLspBufferHandle,
1052 _maintain_diff: Task<()>,
1053 _subscription: Subscription,
1054}
1055
1056impl TrackedBuffer {
1057 fn has_edits(&self, cx: &App) -> bool {
1058 self.diff
1059 .read(cx)
1060 .hunks(self.buffer.read(cx), cx)
1061 .next()
1062 .is_some()
1063 }
1064
1065 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1066 self.diff_update
1067 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1068 .ok();
1069 }
1070}
1071
1072pub struct ChangedBuffer {
1073 pub diff: Entity<BufferDiff>,
1074}
1075
1076#[cfg(test)]
1077mod tests {
1078 use super::*;
1079 use buffer_diff::DiffHunkStatusKind;
1080 use gpui::TestAppContext;
1081 use indoc::indoc;
1082 use language::Point;
1083 use project::{FakeFs, Fs, Project, RemoveOptions};
1084 use rand::prelude::*;
1085 use serde_json::json;
1086 use settings::SettingsStore;
1087 use std::env;
1088 use util::{RandomCharIter, path};
1089
1090 #[ctor::ctor]
1091 fn init_logger() {
1092 zlog::init_test();
1093 }
1094
1095 fn init_test(cx: &mut TestAppContext) {
1096 cx.update(|cx| {
1097 let settings_store = SettingsStore::test(cx);
1098 cx.set_global(settings_store);
1099 language::init(cx);
1100 Project::init_settings(cx);
1101 });
1102 }
1103
1104 #[gpui::test(iterations = 10)]
1105 async fn test_keep_edits(cx: &mut TestAppContext) {
1106 init_test(cx);
1107
1108 let fs = FakeFs::new(cx.executor());
1109 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1110 .await;
1111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1112 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1113 let file_path = project
1114 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1115 .unwrap();
1116 let buffer = project
1117 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1118 .await
1119 .unwrap();
1120
1121 cx.update(|cx| {
1122 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1123 buffer.update(cx, |buffer, cx| {
1124 buffer
1125 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1126 .unwrap()
1127 });
1128 buffer.update(cx, |buffer, cx| {
1129 buffer
1130 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1131 .unwrap()
1132 });
1133 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1134 });
1135 cx.run_until_parked();
1136 assert_eq!(
1137 buffer.read_with(cx, |buffer, _| buffer.text()),
1138 "abc\ndEf\nghi\njkl\nmnO"
1139 );
1140 assert_eq!(
1141 unreviewed_hunks(&action_log, cx),
1142 vec![(
1143 buffer.clone(),
1144 vec![
1145 HunkStatus {
1146 range: Point::new(1, 0)..Point::new(2, 0),
1147 diff_status: DiffHunkStatusKind::Modified,
1148 old_text: "def\n".into(),
1149 },
1150 HunkStatus {
1151 range: Point::new(4, 0)..Point::new(4, 3),
1152 diff_status: DiffHunkStatusKind::Modified,
1153 old_text: "mno".into(),
1154 }
1155 ],
1156 )]
1157 );
1158
1159 action_log.update(cx, |log, cx| {
1160 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1161 });
1162 cx.run_until_parked();
1163 assert_eq!(
1164 unreviewed_hunks(&action_log, cx),
1165 vec![(
1166 buffer.clone(),
1167 vec![HunkStatus {
1168 range: Point::new(1, 0)..Point::new(2, 0),
1169 diff_status: DiffHunkStatusKind::Modified,
1170 old_text: "def\n".into(),
1171 }],
1172 )]
1173 );
1174
1175 action_log.update(cx, |log, cx| {
1176 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1177 });
1178 cx.run_until_parked();
1179 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1180 }
1181
1182 #[gpui::test(iterations = 10)]
1183 async fn test_deletions(cx: &mut TestAppContext) {
1184 init_test(cx);
1185
1186 let fs = FakeFs::new(cx.executor());
1187 fs.insert_tree(
1188 path!("/dir"),
1189 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1190 )
1191 .await;
1192 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1193 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1194 let file_path = project
1195 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1196 .unwrap();
1197 let buffer = project
1198 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1199 .await
1200 .unwrap();
1201
1202 cx.update(|cx| {
1203 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1204 buffer.update(cx, |buffer, cx| {
1205 buffer
1206 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1207 .unwrap();
1208 buffer.finalize_last_transaction();
1209 });
1210 buffer.update(cx, |buffer, cx| {
1211 buffer
1212 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1213 .unwrap();
1214 buffer.finalize_last_transaction();
1215 });
1216 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1217 });
1218 cx.run_until_parked();
1219 assert_eq!(
1220 buffer.read_with(cx, |buffer, _| buffer.text()),
1221 "abc\nghi\njkl\npqr"
1222 );
1223 assert_eq!(
1224 unreviewed_hunks(&action_log, cx),
1225 vec![(
1226 buffer.clone(),
1227 vec![
1228 HunkStatus {
1229 range: Point::new(1, 0)..Point::new(1, 0),
1230 diff_status: DiffHunkStatusKind::Deleted,
1231 old_text: "def\n".into(),
1232 },
1233 HunkStatus {
1234 range: Point::new(3, 0)..Point::new(3, 0),
1235 diff_status: DiffHunkStatusKind::Deleted,
1236 old_text: "mno\n".into(),
1237 }
1238 ],
1239 )]
1240 );
1241
1242 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1243 cx.run_until_parked();
1244 assert_eq!(
1245 buffer.read_with(cx, |buffer, _| buffer.text()),
1246 "abc\nghi\njkl\nmno\npqr"
1247 );
1248 assert_eq!(
1249 unreviewed_hunks(&action_log, cx),
1250 vec![(
1251 buffer.clone(),
1252 vec![HunkStatus {
1253 range: Point::new(1, 0)..Point::new(1, 0),
1254 diff_status: DiffHunkStatusKind::Deleted,
1255 old_text: "def\n".into(),
1256 }],
1257 )]
1258 );
1259
1260 action_log.update(cx, |log, cx| {
1261 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1262 });
1263 cx.run_until_parked();
1264 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1265 }
1266
1267 #[gpui::test(iterations = 10)]
1268 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1269 init_test(cx);
1270
1271 let fs = FakeFs::new(cx.executor());
1272 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1273 .await;
1274 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1275 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1276 let file_path = project
1277 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1278 .unwrap();
1279 let buffer = project
1280 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1281 .await
1282 .unwrap();
1283
1284 cx.update(|cx| {
1285 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1286 buffer.update(cx, |buffer, cx| {
1287 buffer
1288 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1289 .unwrap()
1290 });
1291 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1292 });
1293 cx.run_until_parked();
1294 assert_eq!(
1295 buffer.read_with(cx, |buffer, _| buffer.text()),
1296 "abc\ndeF\nGHI\njkl\nmno"
1297 );
1298 assert_eq!(
1299 unreviewed_hunks(&action_log, cx),
1300 vec![(
1301 buffer.clone(),
1302 vec![HunkStatus {
1303 range: Point::new(1, 0)..Point::new(3, 0),
1304 diff_status: DiffHunkStatusKind::Modified,
1305 old_text: "def\nghi\n".into(),
1306 }],
1307 )]
1308 );
1309
1310 buffer.update(cx, |buffer, cx| {
1311 buffer.edit(
1312 [
1313 (Point::new(0, 2)..Point::new(0, 2), "X"),
1314 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1315 ],
1316 None,
1317 cx,
1318 )
1319 });
1320 cx.run_until_parked();
1321 assert_eq!(
1322 buffer.read_with(cx, |buffer, _| buffer.text()),
1323 "abXc\ndeF\nGHI\nYjkl\nmno"
1324 );
1325 assert_eq!(
1326 unreviewed_hunks(&action_log, cx),
1327 vec![(
1328 buffer.clone(),
1329 vec![HunkStatus {
1330 range: Point::new(1, 0)..Point::new(3, 0),
1331 diff_status: DiffHunkStatusKind::Modified,
1332 old_text: "def\nghi\n".into(),
1333 }],
1334 )]
1335 );
1336
1337 buffer.update(cx, |buffer, cx| {
1338 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1339 });
1340 cx.run_until_parked();
1341 assert_eq!(
1342 buffer.read_with(cx, |buffer, _| buffer.text()),
1343 "abXc\ndZeF\nGHI\nYjkl\nmno"
1344 );
1345 assert_eq!(
1346 unreviewed_hunks(&action_log, cx),
1347 vec![(
1348 buffer.clone(),
1349 vec![HunkStatus {
1350 range: Point::new(1, 0)..Point::new(3, 0),
1351 diff_status: DiffHunkStatusKind::Modified,
1352 old_text: "def\nghi\n".into(),
1353 }],
1354 )]
1355 );
1356
1357 action_log.update(cx, |log, cx| {
1358 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1359 });
1360 cx.run_until_parked();
1361 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1362 }
1363
1364 #[gpui::test(iterations = 10)]
1365 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1366 init_test(cx);
1367
1368 let fs = FakeFs::new(cx.executor());
1369 fs.insert_tree(
1370 path!("/dir"),
1371 json!({"file": indoc! {"
1372 abc
1373 def
1374 ghi
1375 jkl
1376 mno"}}),
1377 )
1378 .await;
1379 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1380 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1381 let file_path = project
1382 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1383 .unwrap();
1384 let buffer = project
1385 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1386 .await
1387 .unwrap();
1388
1389 // Agent edits
1390 cx.update(|cx| {
1391 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1392 buffer.update(cx, |buffer, cx| {
1393 buffer
1394 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1395 .unwrap()
1396 });
1397 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1398 });
1399 cx.run_until_parked();
1400 assert_eq!(
1401 buffer.read_with(cx, |buffer, _| buffer.text()),
1402 indoc! {"
1403 abc
1404 deF
1405 GHI
1406 jkl
1407 mno"}
1408 );
1409 assert_eq!(
1410 unreviewed_hunks(&action_log, cx),
1411 vec![(
1412 buffer.clone(),
1413 vec![HunkStatus {
1414 range: Point::new(1, 0)..Point::new(3, 0),
1415 diff_status: DiffHunkStatusKind::Modified,
1416 old_text: "def\nghi\n".into(),
1417 }],
1418 )]
1419 );
1420
1421 // User edits
1422 buffer.update(cx, |buffer, cx| {
1423 buffer.edit(
1424 [
1425 (Point::new(0, 2)..Point::new(0, 2), "X"),
1426 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1427 ],
1428 None,
1429 cx,
1430 )
1431 });
1432 cx.run_until_parked();
1433 assert_eq!(
1434 buffer.read_with(cx, |buffer, _| buffer.text()),
1435 indoc! {"
1436 abXc
1437 deF
1438 GHI
1439 Yjkl
1440 mno"}
1441 );
1442
1443 // User edits should be stored separately from agent's
1444 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1445 assert_eq!(
1446 user_edits.expect("should have some user edits"),
1447 indoc! {"
1448 --- a/dir/file
1449 +++ b/dir/file
1450 @@ -1,5 +1,5 @@
1451 -abc
1452 +abXc
1453 def
1454 ghi
1455 -jkl
1456 +Yjkl
1457 mno
1458 "}
1459 );
1460
1461 action_log.update(cx, |log, cx| {
1462 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1463 });
1464 cx.run_until_parked();
1465 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1466 }
1467
1468 #[gpui::test(iterations = 10)]
1469 async fn test_creating_files(cx: &mut TestAppContext) {
1470 init_test(cx);
1471
1472 let fs = FakeFs::new(cx.executor());
1473 fs.insert_tree(path!("/dir"), json!({})).await;
1474 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1475 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1476 let file_path = project
1477 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1478 .unwrap();
1479
1480 let buffer = project
1481 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1482 .await
1483 .unwrap();
1484 cx.update(|cx| {
1485 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1486 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1487 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1488 });
1489 project
1490 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1491 .await
1492 .unwrap();
1493 cx.run_until_parked();
1494 assert_eq!(
1495 unreviewed_hunks(&action_log, cx),
1496 vec![(
1497 buffer.clone(),
1498 vec![HunkStatus {
1499 range: Point::new(0, 0)..Point::new(0, 5),
1500 diff_status: DiffHunkStatusKind::Added,
1501 old_text: "".into(),
1502 }],
1503 )]
1504 );
1505
1506 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1507 cx.run_until_parked();
1508 assert_eq!(
1509 unreviewed_hunks(&action_log, cx),
1510 vec![(
1511 buffer.clone(),
1512 vec![HunkStatus {
1513 range: Point::new(0, 0)..Point::new(0, 6),
1514 diff_status: DiffHunkStatusKind::Added,
1515 old_text: "".into(),
1516 }],
1517 )]
1518 );
1519
1520 action_log.update(cx, |log, cx| {
1521 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1522 });
1523 cx.run_until_parked();
1524 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1525 }
1526
1527 #[gpui::test(iterations = 10)]
1528 async fn test_overwriting_files(cx: &mut TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "file1": "Lorem ipsum dolor"
1536 }),
1537 )
1538 .await;
1539 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1540 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1541 let file_path = project
1542 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1543 .unwrap();
1544
1545 let buffer = project
1546 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1547 .await
1548 .unwrap();
1549 cx.update(|cx| {
1550 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1551 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1552 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1553 });
1554 project
1555 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1556 .await
1557 .unwrap();
1558 cx.run_until_parked();
1559 assert_eq!(
1560 unreviewed_hunks(&action_log, cx),
1561 vec![(
1562 buffer.clone(),
1563 vec![HunkStatus {
1564 range: Point::new(0, 0)..Point::new(0, 19),
1565 diff_status: DiffHunkStatusKind::Added,
1566 old_text: "".into(),
1567 }],
1568 )]
1569 );
1570
1571 action_log
1572 .update(cx, |log, cx| {
1573 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1574 })
1575 .await
1576 .unwrap();
1577 cx.run_until_parked();
1578 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1579 assert_eq!(
1580 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1581 "Lorem ipsum dolor"
1582 );
1583 }
1584
1585 #[gpui::test(iterations = 10)]
1586 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1587 init_test(cx);
1588
1589 let fs = FakeFs::new(cx.executor());
1590 fs.insert_tree(
1591 path!("/dir"),
1592 json!({
1593 "file1": "Lorem ipsum dolor"
1594 }),
1595 )
1596 .await;
1597 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1598 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1599 let file_path = project
1600 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1601 .unwrap();
1602
1603 let buffer = project
1604 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1605 .await
1606 .unwrap();
1607 cx.update(|cx| {
1608 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1609 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1610 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1611 });
1612 project
1613 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1614 .await
1615 .unwrap();
1616 cx.run_until_parked();
1617 assert_eq!(
1618 unreviewed_hunks(&action_log, cx),
1619 vec![(
1620 buffer.clone(),
1621 vec![HunkStatus {
1622 range: Point::new(0, 0)..Point::new(0, 37),
1623 diff_status: DiffHunkStatusKind::Modified,
1624 old_text: "Lorem ipsum dolor".into(),
1625 }],
1626 )]
1627 );
1628
1629 cx.update(|cx| {
1630 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1631 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1632 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1633 });
1634 project
1635 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1636 .await
1637 .unwrap();
1638 cx.run_until_parked();
1639 assert_eq!(
1640 unreviewed_hunks(&action_log, cx),
1641 vec![(
1642 buffer.clone(),
1643 vec![HunkStatus {
1644 range: Point::new(0, 0)..Point::new(0, 9),
1645 diff_status: DiffHunkStatusKind::Added,
1646 old_text: "".into(),
1647 }],
1648 )]
1649 );
1650
1651 action_log
1652 .update(cx, |log, cx| {
1653 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
1654 })
1655 .await
1656 .unwrap();
1657 cx.run_until_parked();
1658 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1659 assert_eq!(
1660 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1661 "Lorem ipsum dolor"
1662 );
1663 }
1664
1665 #[gpui::test(iterations = 10)]
1666 async fn test_deleting_files(cx: &mut TestAppContext) {
1667 init_test(cx);
1668
1669 let fs = FakeFs::new(cx.executor());
1670 fs.insert_tree(
1671 path!("/dir"),
1672 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1673 )
1674 .await;
1675
1676 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1677 let file1_path = project
1678 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1679 .unwrap();
1680 let file2_path = project
1681 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1682 .unwrap();
1683
1684 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1685 let buffer1 = project
1686 .update(cx, |project, cx| {
1687 project.open_buffer(file1_path.clone(), cx)
1688 })
1689 .await
1690 .unwrap();
1691 let buffer2 = project
1692 .update(cx, |project, cx| {
1693 project.open_buffer(file2_path.clone(), cx)
1694 })
1695 .await
1696 .unwrap();
1697
1698 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1699 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1700 project
1701 .update(cx, |project, cx| {
1702 project.delete_file(file1_path.clone(), false, cx)
1703 })
1704 .unwrap()
1705 .await
1706 .unwrap();
1707 project
1708 .update(cx, |project, cx| {
1709 project.delete_file(file2_path.clone(), false, cx)
1710 })
1711 .unwrap()
1712 .await
1713 .unwrap();
1714 cx.run_until_parked();
1715 assert_eq!(
1716 unreviewed_hunks(&action_log, cx),
1717 vec![
1718 (
1719 buffer1.clone(),
1720 vec![HunkStatus {
1721 range: Point::new(0, 0)..Point::new(0, 0),
1722 diff_status: DiffHunkStatusKind::Deleted,
1723 old_text: "lorem\n".into(),
1724 }]
1725 ),
1726 (
1727 buffer2.clone(),
1728 vec![HunkStatus {
1729 range: Point::new(0, 0)..Point::new(0, 0),
1730 diff_status: DiffHunkStatusKind::Deleted,
1731 old_text: "ipsum\n".into(),
1732 }],
1733 )
1734 ]
1735 );
1736
1737 // Simulate file1 being recreated externally.
1738 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1739 .await;
1740
1741 // Simulate file2 being recreated by a tool.
1742 let buffer2 = project
1743 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1744 .await
1745 .unwrap();
1746 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1747 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1748 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1749 project
1750 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1751 .await
1752 .unwrap();
1753
1754 cx.run_until_parked();
1755 assert_eq!(
1756 unreviewed_hunks(&action_log, cx),
1757 vec![(
1758 buffer2.clone(),
1759 vec![HunkStatus {
1760 range: Point::new(0, 0)..Point::new(0, 5),
1761 diff_status: DiffHunkStatusKind::Added,
1762 old_text: "".into(),
1763 }],
1764 )]
1765 );
1766
1767 // Simulate file2 being deleted externally.
1768 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1769 .await
1770 .unwrap();
1771 cx.run_until_parked();
1772 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1773 }
1774
1775 #[gpui::test(iterations = 10)]
1776 async fn test_reject_edits(cx: &mut TestAppContext) {
1777 init_test(cx);
1778
1779 let fs = FakeFs::new(cx.executor());
1780 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1781 .await;
1782 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1783 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1784 let file_path = project
1785 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1786 .unwrap();
1787 let buffer = project
1788 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1789 .await
1790 .unwrap();
1791
1792 cx.update(|cx| {
1793 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1794 buffer.update(cx, |buffer, cx| {
1795 buffer
1796 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1797 .unwrap()
1798 });
1799 buffer.update(cx, |buffer, cx| {
1800 buffer
1801 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1802 .unwrap()
1803 });
1804 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1805 });
1806 cx.run_until_parked();
1807 assert_eq!(
1808 buffer.read_with(cx, |buffer, _| buffer.text()),
1809 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1810 );
1811 assert_eq!(
1812 unreviewed_hunks(&action_log, cx),
1813 vec![(
1814 buffer.clone(),
1815 vec![
1816 HunkStatus {
1817 range: Point::new(1, 0)..Point::new(3, 0),
1818 diff_status: DiffHunkStatusKind::Modified,
1819 old_text: "def\n".into(),
1820 },
1821 HunkStatus {
1822 range: Point::new(5, 0)..Point::new(5, 3),
1823 diff_status: DiffHunkStatusKind::Modified,
1824 old_text: "mno".into(),
1825 }
1826 ],
1827 )]
1828 );
1829
1830 // If the rejected range doesn't overlap with any hunk, we ignore it.
1831 action_log
1832 .update(cx, |log, cx| {
1833 log.reject_edits_in_ranges(
1834 buffer.clone(),
1835 vec![Point::new(4, 0)..Point::new(4, 0)],
1836 None,
1837 cx,
1838 )
1839 })
1840 .await
1841 .unwrap();
1842 cx.run_until_parked();
1843 assert_eq!(
1844 buffer.read_with(cx, |buffer, _| buffer.text()),
1845 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1846 );
1847 assert_eq!(
1848 unreviewed_hunks(&action_log, cx),
1849 vec![(
1850 buffer.clone(),
1851 vec![
1852 HunkStatus {
1853 range: Point::new(1, 0)..Point::new(3, 0),
1854 diff_status: DiffHunkStatusKind::Modified,
1855 old_text: "def\n".into(),
1856 },
1857 HunkStatus {
1858 range: Point::new(5, 0)..Point::new(5, 3),
1859 diff_status: DiffHunkStatusKind::Modified,
1860 old_text: "mno".into(),
1861 }
1862 ],
1863 )]
1864 );
1865
1866 action_log
1867 .update(cx, |log, cx| {
1868 log.reject_edits_in_ranges(
1869 buffer.clone(),
1870 vec![Point::new(0, 0)..Point::new(1, 0)],
1871 None,
1872 cx,
1873 )
1874 })
1875 .await
1876 .unwrap();
1877 cx.run_until_parked();
1878 assert_eq!(
1879 buffer.read_with(cx, |buffer, _| buffer.text()),
1880 "abc\ndef\nghi\njkl\nmnO"
1881 );
1882 assert_eq!(
1883 unreviewed_hunks(&action_log, cx),
1884 vec![(
1885 buffer.clone(),
1886 vec![HunkStatus {
1887 range: Point::new(4, 0)..Point::new(4, 3),
1888 diff_status: DiffHunkStatusKind::Modified,
1889 old_text: "mno".into(),
1890 }],
1891 )]
1892 );
1893
1894 action_log
1895 .update(cx, |log, cx| {
1896 log.reject_edits_in_ranges(
1897 buffer.clone(),
1898 vec![Point::new(4, 0)..Point::new(4, 0)],
1899 None,
1900 cx,
1901 )
1902 })
1903 .await
1904 .unwrap();
1905 cx.run_until_parked();
1906 assert_eq!(
1907 buffer.read_with(cx, |buffer, _| buffer.text()),
1908 "abc\ndef\nghi\njkl\nmno"
1909 );
1910 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1911 }
1912
1913 #[gpui::test(iterations = 10)]
1914 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1915 init_test(cx);
1916
1917 let fs = FakeFs::new(cx.executor());
1918 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1919 .await;
1920 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1921 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1922 let file_path = project
1923 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1924 .unwrap();
1925 let buffer = project
1926 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1927 .await
1928 .unwrap();
1929
1930 cx.update(|cx| {
1931 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1932 buffer.update(cx, |buffer, cx| {
1933 buffer
1934 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1935 .unwrap()
1936 });
1937 buffer.update(cx, |buffer, cx| {
1938 buffer
1939 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1940 .unwrap()
1941 });
1942 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1943 });
1944 cx.run_until_parked();
1945 assert_eq!(
1946 buffer.read_with(cx, |buffer, _| buffer.text()),
1947 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1948 );
1949 assert_eq!(
1950 unreviewed_hunks(&action_log, cx),
1951 vec![(
1952 buffer.clone(),
1953 vec![
1954 HunkStatus {
1955 range: Point::new(1, 0)..Point::new(3, 0),
1956 diff_status: DiffHunkStatusKind::Modified,
1957 old_text: "def\n".into(),
1958 },
1959 HunkStatus {
1960 range: Point::new(5, 0)..Point::new(5, 3),
1961 diff_status: DiffHunkStatusKind::Modified,
1962 old_text: "mno".into(),
1963 }
1964 ],
1965 )]
1966 );
1967
1968 action_log.update(cx, |log, cx| {
1969 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1970 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1971 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1972 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1973
1974 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
1975 .detach();
1976 assert_eq!(
1977 buffer.read_with(cx, |buffer, _| buffer.text()),
1978 "abc\ndef\nghi\njkl\nmno"
1979 );
1980 });
1981 cx.run_until_parked();
1982 assert_eq!(
1983 buffer.read_with(cx, |buffer, _| buffer.text()),
1984 "abc\ndef\nghi\njkl\nmno"
1985 );
1986 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1987 }
1988
1989 #[gpui::test(iterations = 10)]
1990 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1991 init_test(cx);
1992
1993 let fs = FakeFs::new(cx.executor());
1994 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1995 .await;
1996 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1997 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1998 let file_path = project
1999 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2000 .unwrap();
2001 let buffer = project
2002 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2003 .await
2004 .unwrap();
2005
2006 cx.update(|cx| {
2007 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2008 });
2009 project
2010 .update(cx, |project, cx| {
2011 project.delete_file(file_path.clone(), false, cx)
2012 })
2013 .unwrap()
2014 .await
2015 .unwrap();
2016 cx.run_until_parked();
2017 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2018 assert_eq!(
2019 unreviewed_hunks(&action_log, cx),
2020 vec![(
2021 buffer.clone(),
2022 vec![HunkStatus {
2023 range: Point::new(0, 0)..Point::new(0, 0),
2024 diff_status: DiffHunkStatusKind::Deleted,
2025 old_text: "content".into(),
2026 }]
2027 )]
2028 );
2029
2030 action_log
2031 .update(cx, |log, cx| {
2032 log.reject_edits_in_ranges(
2033 buffer.clone(),
2034 vec![Point::new(0, 0)..Point::new(0, 0)],
2035 None,
2036 cx,
2037 )
2038 })
2039 .await
2040 .unwrap();
2041 cx.run_until_parked();
2042 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2043 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2044 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2045 }
2046
2047 #[gpui::test(iterations = 10)]
2048 async fn test_reject_created_file(cx: &mut TestAppContext) {
2049 init_test(cx);
2050
2051 let fs = FakeFs::new(cx.executor());
2052 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2053 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2054 let file_path = project
2055 .read_with(cx, |project, cx| {
2056 project.find_project_path("dir/new_file", cx)
2057 })
2058 .unwrap();
2059 let buffer = project
2060 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2061 .await
2062 .unwrap();
2063 cx.update(|cx| {
2064 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2065 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2066 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2067 });
2068 project
2069 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2070 .await
2071 .unwrap();
2072 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2073 cx.run_until_parked();
2074 assert_eq!(
2075 unreviewed_hunks(&action_log, cx),
2076 vec![(
2077 buffer.clone(),
2078 vec![HunkStatus {
2079 range: Point::new(0, 0)..Point::new(0, 7),
2080 diff_status: DiffHunkStatusKind::Added,
2081 old_text: "".into(),
2082 }],
2083 )]
2084 );
2085
2086 action_log
2087 .update(cx, |log, cx| {
2088 log.reject_edits_in_ranges(
2089 buffer.clone(),
2090 vec![Point::new(0, 0)..Point::new(0, 11)],
2091 None,
2092 cx,
2093 )
2094 })
2095 .await
2096 .unwrap();
2097 cx.run_until_parked();
2098 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2099 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2100 }
2101
2102 #[gpui::test]
2103 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2104 init_test(cx);
2105
2106 let fs = FakeFs::new(cx.executor());
2107 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2108 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2109
2110 let file_path = project
2111 .read_with(cx, |project, cx| {
2112 project.find_project_path("dir/new_file", cx)
2113 })
2114 .unwrap();
2115 let buffer = project
2116 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2117 .await
2118 .unwrap();
2119
2120 // AI creates file with initial content
2121 cx.update(|cx| {
2122 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2123 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2124 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2125 });
2126
2127 project
2128 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2129 .await
2130 .unwrap();
2131
2132 cx.run_until_parked();
2133
2134 // User makes additional edits
2135 cx.update(|cx| {
2136 buffer.update(cx, |buffer, cx| {
2137 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2138 });
2139 });
2140
2141 project
2142 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2143 .await
2144 .unwrap();
2145
2146 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2147
2148 // Reject all
2149 action_log
2150 .update(cx, |log, cx| {
2151 log.reject_edits_in_ranges(
2152 buffer.clone(),
2153 vec![Point::new(0, 0)..Point::new(100, 0)],
2154 None,
2155 cx,
2156 )
2157 })
2158 .await
2159 .unwrap();
2160 cx.run_until_parked();
2161
2162 // File should still contain all the content
2163 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2164
2165 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2166 assert_eq!(content, "ai content\nuser added this line");
2167 }
2168
2169 #[gpui::test]
2170 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2171 init_test(cx);
2172
2173 let fs = FakeFs::new(cx.executor());
2174 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2175 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2176
2177 let file_path = project
2178 .read_with(cx, |project, cx| {
2179 project.find_project_path("dir/new_file", cx)
2180 })
2181 .unwrap();
2182 let buffer = project
2183 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2184 .await
2185 .unwrap();
2186
2187 // AI creates file with initial content
2188 cx.update(|cx| {
2189 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2190 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2191 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2192 });
2193 project
2194 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2195 .await
2196 .unwrap();
2197 cx.run_until_parked();
2198 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2199
2200 // User accepts the single hunk
2201 action_log.update(cx, |log, cx| {
2202 log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, None, cx)
2203 });
2204 cx.run_until_parked();
2205 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2206 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2207
2208 // AI modifies the file
2209 cx.update(|cx| {
2210 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2211 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2212 });
2213 project
2214 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2215 .await
2216 .unwrap();
2217 cx.run_until_parked();
2218 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2219
2220 // User rejects the hunk
2221 action_log
2222 .update(cx, |log, cx| {
2223 log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], None, cx)
2224 })
2225 .await
2226 .unwrap();
2227 cx.run_until_parked();
2228 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2229 assert_eq!(
2230 buffer.read_with(cx, |buffer, _| buffer.text()),
2231 "ai content v1"
2232 );
2233 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2234 }
2235
2236 #[gpui::test]
2237 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2238 init_test(cx);
2239
2240 let fs = FakeFs::new(cx.executor());
2241 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2242 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2243
2244 let file_path = project
2245 .read_with(cx, |project, cx| {
2246 project.find_project_path("dir/new_file", cx)
2247 })
2248 .unwrap();
2249 let buffer = project
2250 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2251 .await
2252 .unwrap();
2253
2254 // AI creates file with initial content
2255 cx.update(|cx| {
2256 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2257 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2258 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2259 });
2260 project
2261 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2262 .await
2263 .unwrap();
2264 cx.run_until_parked();
2265
2266 // User clicks "Accept All"
2267 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2268 cx.run_until_parked();
2269 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2270 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2271
2272 // AI modifies file again
2273 cx.update(|cx| {
2274 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2275 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2276 });
2277 project
2278 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2279 .await
2280 .unwrap();
2281 cx.run_until_parked();
2282 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2283
2284 // User clicks "Reject All"
2285 action_log
2286 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2287 .await;
2288 cx.run_until_parked();
2289 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2290 assert_eq!(
2291 buffer.read_with(cx, |buffer, _| buffer.text()),
2292 "ai content v1"
2293 );
2294 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2295 }
2296
2297 #[gpui::test(iterations = 100)]
2298 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2299 init_test(cx);
2300
2301 let operations = env::var("OPERATIONS")
2302 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2303 .unwrap_or(20);
2304
2305 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2306 let fs = FakeFs::new(cx.executor());
2307 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2308 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2309 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2310 let file_path = project
2311 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2312 .unwrap();
2313 let buffer = project
2314 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2315 .await
2316 .unwrap();
2317
2318 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2319
2320 for _ in 0..operations {
2321 match rng.random_range(0..100) {
2322 0..25 => {
2323 action_log.update(cx, |log, cx| {
2324 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2325 log::info!("keeping edits in range {:?}", range);
2326 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2327 });
2328 }
2329 25..50 => {
2330 action_log
2331 .update(cx, |log, cx| {
2332 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2333 log::info!("rejecting edits in range {:?}", range);
2334 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
2335 })
2336 .await
2337 .unwrap();
2338 }
2339 _ => {
2340 let is_agent_edit = rng.random_bool(0.5);
2341 if is_agent_edit {
2342 log::info!("agent edit");
2343 } else {
2344 log::info!("user edit");
2345 }
2346 cx.update(|cx| {
2347 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2348 if is_agent_edit {
2349 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2350 }
2351 });
2352 }
2353 }
2354
2355 if rng.random_bool(0.2) {
2356 quiesce(&action_log, &buffer, cx);
2357 }
2358 }
2359
2360 quiesce(&action_log, &buffer, cx);
2361
2362 fn quiesce(
2363 action_log: &Entity<ActionLog>,
2364 buffer: &Entity<Buffer>,
2365 cx: &mut TestAppContext,
2366 ) {
2367 log::info!("quiescing...");
2368 cx.run_until_parked();
2369 action_log.update(cx, |log, cx| {
2370 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2371 let mut old_text = tracked_buffer.diff_base.clone();
2372 let new_text = buffer.read(cx).as_rope();
2373 for edit in tracked_buffer.unreviewed_edits.edits() {
2374 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2375 let old_end = old_text.point_to_offset(cmp::min(
2376 Point::new(edit.new.start + edit.old_len(), 0),
2377 old_text.max_point(),
2378 ));
2379 old_text.replace(
2380 old_start..old_end,
2381 &new_text.slice_rows(edit.new.clone()).to_string(),
2382 );
2383 }
2384 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2385 })
2386 }
2387 }
2388
2389 #[gpui::test]
2390 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2391 init_test(cx);
2392
2393 let fs = FakeFs::new(cx.background_executor.clone());
2394 fs.insert_tree(
2395 path!("/project"),
2396 json!({
2397 ".git": {},
2398 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2399 }),
2400 )
2401 .await;
2402 fs.set_head_for_repo(
2403 path!("/project/.git").as_ref(),
2404 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2405 "0000000",
2406 );
2407 cx.run_until_parked();
2408
2409 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2410 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2411
2412 let file_path = project
2413 .read_with(cx, |project, cx| {
2414 project.find_project_path(path!("/project/file.txt"), cx)
2415 })
2416 .unwrap();
2417 let buffer = project
2418 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2419 .await
2420 .unwrap();
2421
2422 cx.update(|cx| {
2423 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2424 buffer.update(cx, |buffer, cx| {
2425 buffer.edit(
2426 [
2427 // Edit at the very start: a -> A
2428 (Point::new(0, 0)..Point::new(0, 1), "A"),
2429 // Deletion in the middle: remove lines d and e
2430 (Point::new(3, 0)..Point::new(5, 0), ""),
2431 // Modification: g -> GGG
2432 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2433 // Addition: insert new line after h
2434 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2435 // Edit the very last character: j -> J
2436 (Point::new(9, 0)..Point::new(9, 1), "J"),
2437 ],
2438 None,
2439 cx,
2440 );
2441 });
2442 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2443 });
2444 cx.run_until_parked();
2445 assert_eq!(
2446 unreviewed_hunks(&action_log, cx),
2447 vec![(
2448 buffer.clone(),
2449 vec![
2450 HunkStatus {
2451 range: Point::new(0, 0)..Point::new(1, 0),
2452 diff_status: DiffHunkStatusKind::Modified,
2453 old_text: "a\n".into()
2454 },
2455 HunkStatus {
2456 range: Point::new(3, 0)..Point::new(3, 0),
2457 diff_status: DiffHunkStatusKind::Deleted,
2458 old_text: "d\ne\n".into()
2459 },
2460 HunkStatus {
2461 range: Point::new(4, 0)..Point::new(5, 0),
2462 diff_status: DiffHunkStatusKind::Modified,
2463 old_text: "g\n".into()
2464 },
2465 HunkStatus {
2466 range: Point::new(6, 0)..Point::new(7, 0),
2467 diff_status: DiffHunkStatusKind::Added,
2468 old_text: "".into()
2469 },
2470 HunkStatus {
2471 range: Point::new(8, 0)..Point::new(8, 1),
2472 diff_status: DiffHunkStatusKind::Modified,
2473 old_text: "j".into()
2474 }
2475 ]
2476 )]
2477 );
2478
2479 // Simulate a git commit that matches some edits but not others:
2480 // - Accepts the first edit (a -> A)
2481 // - Accepts the deletion (remove d and e)
2482 // - Makes a different change to g (g -> G instead of GGG)
2483 // - Ignores the NEW line addition
2484 // - Ignores the last line edit (j stays as j)
2485 fs.set_head_for_repo(
2486 path!("/project/.git").as_ref(),
2487 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2488 "0000001",
2489 );
2490 cx.run_until_parked();
2491 assert_eq!(
2492 unreviewed_hunks(&action_log, cx),
2493 vec![(
2494 buffer.clone(),
2495 vec![
2496 HunkStatus {
2497 range: Point::new(4, 0)..Point::new(5, 0),
2498 diff_status: DiffHunkStatusKind::Modified,
2499 old_text: "g\n".into()
2500 },
2501 HunkStatus {
2502 range: Point::new(6, 0)..Point::new(7, 0),
2503 diff_status: DiffHunkStatusKind::Added,
2504 old_text: "".into()
2505 },
2506 HunkStatus {
2507 range: Point::new(8, 0)..Point::new(8, 1),
2508 diff_status: DiffHunkStatusKind::Modified,
2509 old_text: "j".into()
2510 }
2511 ]
2512 )]
2513 );
2514
2515 // Make another commit that accepts the NEW line but with different content
2516 fs.set_head_for_repo(
2517 path!("/project/.git").as_ref(),
2518 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2519 "0000002",
2520 );
2521 cx.run_until_parked();
2522 assert_eq!(
2523 unreviewed_hunks(&action_log, cx),
2524 vec![(
2525 buffer,
2526 vec![
2527 HunkStatus {
2528 range: Point::new(6, 0)..Point::new(7, 0),
2529 diff_status: DiffHunkStatusKind::Added,
2530 old_text: "".into()
2531 },
2532 HunkStatus {
2533 range: Point::new(8, 0)..Point::new(8, 1),
2534 diff_status: DiffHunkStatusKind::Modified,
2535 old_text: "j".into()
2536 }
2537 ]
2538 )]
2539 );
2540
2541 // Final commit that accepts all remaining edits
2542 fs.set_head_for_repo(
2543 path!("/project/.git").as_ref(),
2544 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2545 "0000003",
2546 );
2547 cx.run_until_parked();
2548 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2549 }
2550
2551 #[derive(Debug, Clone, PartialEq, Eq)]
2552 struct HunkStatus {
2553 range: Range<Point>,
2554 diff_status: DiffHunkStatusKind,
2555 old_text: String,
2556 }
2557
2558 fn unreviewed_hunks(
2559 action_log: &Entity<ActionLog>,
2560 cx: &TestAppContext,
2561 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2562 cx.read(|cx| {
2563 action_log
2564 .read(cx)
2565 .changed_buffers(cx)
2566 .into_iter()
2567 .map(|(buffer, diff)| {
2568 let snapshot = buffer.read(cx).snapshot();
2569 (
2570 buffer,
2571 diff.read(cx)
2572 .hunks(&snapshot, cx)
2573 .map(|hunk| HunkStatus {
2574 diff_status: hunk.status().kind,
2575 range: hunk.range,
2576 old_text: diff
2577 .read(cx)
2578 .base_text()
2579 .text_for_range(hunk.diff_base_byte_range)
2580 .collect(),
2581 })
2582 .collect(),
2583 )
2584 })
2585 .collect()
2586 })
2587 }
2588
2589 #[gpui::test]
2590 async fn test_format_patch(cx: &mut TestAppContext) {
2591 init_test(cx);
2592
2593 let fs = FakeFs::new(cx.executor());
2594 fs.insert_tree(
2595 path!("/dir"),
2596 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2597 )
2598 .await;
2599 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2600 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2601
2602 let file_path = project
2603 .read_with(cx, |project, cx| {
2604 project.find_project_path("dir/test.txt", cx)
2605 })
2606 .unwrap();
2607 let buffer = project
2608 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2609 .await
2610 .unwrap();
2611
2612 cx.update(|cx| {
2613 // Track the buffer and mark it as read first
2614 action_log.update(cx, |log, cx| {
2615 log.buffer_read(buffer.clone(), cx);
2616 });
2617
2618 // Make some edits to create a patch
2619 buffer.update(cx, |buffer, cx| {
2620 buffer
2621 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2622 .unwrap(); // Replace "line2" with "CHANGED"
2623 });
2624 });
2625
2626 cx.run_until_parked();
2627
2628 // Get the patch
2629 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2630
2631 // Verify the patch format contains expected unified diff elements
2632 assert_eq!(
2633 patch.unwrap(),
2634 indoc! {"
2635 --- a/dir/test.txt
2636 +++ b/dir/test.txt
2637 @@ -1,3 +1,3 @@
2638 line 1
2639 -line 2
2640 +CHANGED
2641 line 3
2642 "}
2643 );
2644 }
2645}