1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{
7 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
8};
9use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
10use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
11use std::{cmp, ops::Range, sync::Arc};
12use text::{Edit, Patch, Rope};
13use util::{RangeExt, ResultExt as _};
14
15/// Stores undo information for a single buffer's rejected edits
16#[derive(Clone)]
17pub struct PerBufferUndo {
18 pub buffer: WeakEntity<Buffer>,
19 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
20 pub status: UndoBufferStatus,
21}
22
23/// Tracks the buffer status for undo purposes
24#[derive(Clone, Debug)]
25pub enum UndoBufferStatus {
26 Modified,
27 /// Buffer was created by the agent.
28 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
29 /// original content was restored. Undo is supported: we restore the agent's content.
30 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
31 /// On reject, the file was deleted. Undo is NOT currently supported (would require
32 /// recreating the file). Future TODO.
33 Created {
34 had_existing_content: bool,
35 },
36}
37
38/// Stores undo information for the most recent reject operation
39#[derive(Clone)]
40pub struct LastRejectUndo {
41 /// Per-buffer undo information
42 pub buffers: Vec<PerBufferUndo>,
43}
44
45/// Tracks actions performed by tools in a thread
46pub struct ActionLog {
47 /// Buffers that we want to notify the model about when they change.
48 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
49 /// The project this action log is associated with
50 project: Entity<Project>,
51 /// An action log to forward all public methods to
52 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
53 /// but also want to associate the reads/writes with a parent review experience
54 linked_action_log: Option<Entity<ActionLog>>,
55 /// Stores undo information for the most recent reject operation
56 last_reject_undo: Option<LastRejectUndo>,
57}
58
59impl ActionLog {
60 /// Creates a new, empty action log associated with the given project.
61 pub fn new(project: Entity<Project>) -> Self {
62 Self {
63 tracked_buffers: BTreeMap::default(),
64 project,
65 linked_action_log: None,
66 last_reject_undo: None,
67 }
68 }
69
70 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
71 self.linked_action_log = Some(linked_action_log);
72 self
73 }
74
75 pub fn project(&self) -> &Entity<Project> {
76 &self.project
77 }
78
79 fn track_buffer_internal(
80 &mut self,
81 buffer: Entity<Buffer>,
82 is_created: bool,
83 cx: &mut Context<Self>,
84 ) -> &mut TrackedBuffer {
85 let status = if is_created {
86 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
87 match tracked.status {
88 TrackedBufferStatus::Created {
89 existing_file_content,
90 } => TrackedBufferStatus::Created {
91 existing_file_content,
92 },
93 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
94 TrackedBufferStatus::Created {
95 existing_file_content: Some(tracked.diff_base),
96 }
97 }
98 }
99 } else if buffer
100 .read(cx)
101 .file()
102 .is_some_and(|file| file.disk_state().exists())
103 {
104 TrackedBufferStatus::Created {
105 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
106 }
107 } else {
108 TrackedBufferStatus::Created {
109 existing_file_content: None,
110 }
111 }
112 } else {
113 TrackedBufferStatus::Modified
114 };
115
116 let tracked_buffer = self
117 .tracked_buffers
118 .entry(buffer.clone())
119 .or_insert_with(|| {
120 let open_lsp_handle = self.project.update(cx, |project, cx| {
121 project.register_buffer_with_language_servers(&buffer, cx)
122 });
123
124 let text_snapshot = buffer.read(cx).text_snapshot();
125 let language = buffer.read(cx).language().cloned();
126 let language_registry = buffer.read(cx).language_registry();
127 let diff = cx.new(|cx| {
128 let mut diff = BufferDiff::new(&text_snapshot, cx);
129 diff.language_changed(language, language_registry, cx);
130 diff
131 });
132 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
133 let diff_base;
134 let unreviewed_edits;
135 if is_created {
136 diff_base = Rope::default();
137 unreviewed_edits = Patch::new(vec![Edit {
138 old: 0..1,
139 new: 0..text_snapshot.max_point().row + 1,
140 }])
141 } else {
142 diff_base = buffer.read(cx).as_rope().clone();
143 unreviewed_edits = Patch::default();
144 }
145 TrackedBuffer {
146 buffer: buffer.clone(),
147 diff_base,
148 unreviewed_edits,
149 snapshot: text_snapshot,
150 status,
151 version: buffer.read(cx).version(),
152 diff,
153 diff_update: diff_update_tx,
154 _open_lsp_handle: open_lsp_handle,
155 _maintain_diff: cx.spawn({
156 let buffer = buffer.clone();
157 async move |this, cx| {
158 Self::maintain_diff(this, buffer, diff_update_rx, cx)
159 .await
160 .ok();
161 }
162 }),
163 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
164 }
165 });
166 tracked_buffer.version = buffer.read(cx).version();
167 tracked_buffer
168 }
169
170 fn handle_buffer_event(
171 &mut self,
172 buffer: Entity<Buffer>,
173 event: &BufferEvent,
174 cx: &mut Context<Self>,
175 ) {
176 match event {
177 BufferEvent::Edited => {
178 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
179 return;
180 };
181 let buffer_version = buffer.read(cx).version();
182 if !buffer_version.changed_since(&tracked_buffer.version) {
183 return;
184 }
185 self.handle_buffer_edited(buffer, cx);
186 }
187 BufferEvent::FileHandleChanged => {
188 self.handle_buffer_file_changed(buffer, cx);
189 }
190 _ => {}
191 };
192 }
193
194 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
195 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
196 return;
197 };
198 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
199 }
200
201 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
202 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
203 return;
204 };
205
206 match tracked_buffer.status {
207 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
208 if buffer
209 .read(cx)
210 .file()
211 .is_some_and(|file| file.disk_state().is_deleted())
212 {
213 // If the buffer had been edited by a tool, but it got
214 // deleted externally, we want to stop tracking it.
215 self.tracked_buffers.remove(&buffer);
216 }
217 cx.notify();
218 }
219 TrackedBufferStatus::Deleted => {
220 if buffer
221 .read(cx)
222 .file()
223 .is_some_and(|file| !file.disk_state().is_deleted())
224 {
225 // If the buffer had been deleted by a tool, but it got
226 // resurrected externally, we want to clear the edits we
227 // were tracking and reset the buffer's state.
228 self.tracked_buffers.remove(&buffer);
229 self.track_buffer_internal(buffer, false, cx);
230 }
231 cx.notify();
232 }
233 }
234 }
235
236 async fn maintain_diff(
237 this: WeakEntity<Self>,
238 buffer: Entity<Buffer>,
239 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
240 cx: &mut AsyncApp,
241 ) -> Result<()> {
242 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
243 let git_diff = this
244 .update(cx, |this, cx| {
245 this.project.update(cx, |project, cx| {
246 project.open_uncommitted_diff(buffer.clone(), cx)
247 })
248 })?
249 .await
250 .ok();
251 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
252 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
253 });
254
255 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
256 let _repo_subscription =
257 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
258 cx.update(|cx| {
259 let mut old_head = buffer_repo.read(cx).head_commit.clone();
260 Some(cx.subscribe(git_diff, move |_, event, cx| {
261 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
262 let new_head = buffer_repo.read(cx).head_commit.clone();
263 if new_head != old_head {
264 old_head = new_head;
265 git_diff_updates_tx.send(()).ok();
266 }
267 }
268 }))
269 })
270 } else {
271 None
272 };
273
274 loop {
275 futures::select_biased! {
276 buffer_update = buffer_updates.next() => {
277 if let Some((author, buffer_snapshot)) = buffer_update {
278 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
279 } else {
280 break;
281 }
282 }
283 _ = git_diff_updates_rx.changed().fuse() => {
284 if let Some(git_diff) = git_diff.as_ref() {
285 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
286 }
287 }
288 }
289 }
290
291 Ok(())
292 }
293
294 async fn track_edits(
295 this: &WeakEntity<ActionLog>,
296 buffer: &Entity<Buffer>,
297 author: ChangeAuthor,
298 buffer_snapshot: text::BufferSnapshot,
299 cx: &mut AsyncApp,
300 ) -> Result<()> {
301 let rebase = this.update(cx, |this, cx| {
302 let tracked_buffer = this
303 .tracked_buffers
304 .get_mut(buffer)
305 .context("buffer not tracked")?;
306
307 let rebase = cx.background_spawn({
308 let mut base_text = tracked_buffer.diff_base.clone();
309 let old_snapshot = tracked_buffer.snapshot.clone();
310 let new_snapshot = buffer_snapshot.clone();
311 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
312 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
313 async move {
314 if let ChangeAuthor::User = author {
315 apply_non_conflicting_edits(
316 &unreviewed_edits,
317 edits,
318 &mut base_text,
319 new_snapshot.as_rope(),
320 );
321 }
322
323 (Arc::from(base_text.to_string().as_str()), base_text)
324 }
325 });
326
327 anyhow::Ok(rebase)
328 })??;
329 let (new_base_text, new_diff_base) = rebase.await;
330
331 Self::update_diff(
332 this,
333 buffer,
334 buffer_snapshot,
335 new_base_text,
336 new_diff_base,
337 cx,
338 )
339 .await
340 }
341
342 async fn keep_committed_edits(
343 this: &WeakEntity<ActionLog>,
344 buffer: &Entity<Buffer>,
345 git_diff: &Entity<BufferDiff>,
346 cx: &mut AsyncApp,
347 ) -> Result<()> {
348 let buffer_snapshot = this.read_with(cx, |this, _cx| {
349 let tracked_buffer = this
350 .tracked_buffers
351 .get(buffer)
352 .context("buffer not tracked")?;
353 anyhow::Ok(tracked_buffer.snapshot.clone())
354 })??;
355 let (new_base_text, new_diff_base) = this
356 .read_with(cx, |this, cx| {
357 let tracked_buffer = this
358 .tracked_buffers
359 .get(buffer)
360 .context("buffer not tracked")?;
361 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
362 let agent_diff_base = tracked_buffer.diff_base.clone();
363 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
364 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
365 anyhow::Ok(cx.background_spawn(async move {
366 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
367 let committed_edits = language::line_diff(
368 &agent_diff_base.to_string(),
369 &git_diff_base.to_string(),
370 )
371 .into_iter()
372 .map(|(old, new)| Edit { old, new });
373
374 let mut new_agent_diff_base = agent_diff_base.clone();
375 let mut row_delta = 0i32;
376 for committed in committed_edits {
377 while let Some(unreviewed) = old_unreviewed_edits.peek() {
378 // If the committed edit matches the unreviewed
379 // edit, assume the user wants to keep it.
380 if committed.old == unreviewed.old {
381 let unreviewed_new =
382 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
383 let committed_new =
384 git_diff_base.slice_rows(committed.new.clone()).to_string();
385 if unreviewed_new == committed_new {
386 let old_byte_start =
387 new_agent_diff_base.point_to_offset(Point::new(
388 (unreviewed.old.start as i32 + row_delta) as u32,
389 0,
390 ));
391 let old_byte_end =
392 new_agent_diff_base.point_to_offset(cmp::min(
393 Point::new(
394 (unreviewed.old.end as i32 + row_delta) as u32,
395 0,
396 ),
397 new_agent_diff_base.max_point(),
398 ));
399 new_agent_diff_base
400 .replace(old_byte_start..old_byte_end, &unreviewed_new);
401 row_delta +=
402 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
403 }
404 } else if unreviewed.old.start >= committed.old.end {
405 break;
406 }
407
408 old_unreviewed_edits.next().unwrap();
409 }
410 }
411
412 (
413 Arc::from(new_agent_diff_base.to_string().as_str()),
414 new_agent_diff_base,
415 )
416 }))
417 })??
418 .await;
419
420 Self::update_diff(
421 this,
422 buffer,
423 buffer_snapshot,
424 new_base_text,
425 new_diff_base,
426 cx,
427 )
428 .await
429 }
430
431 async fn update_diff(
432 this: &WeakEntity<ActionLog>,
433 buffer: &Entity<Buffer>,
434 buffer_snapshot: text::BufferSnapshot,
435 new_base_text: Arc<str>,
436 new_diff_base: Rope,
437 cx: &mut AsyncApp,
438 ) -> Result<()> {
439 let (diff, language) = this.read_with(cx, |this, cx| {
440 let tracked_buffer = this
441 .tracked_buffers
442 .get(buffer)
443 .context("buffer not tracked")?;
444 anyhow::Ok((
445 tracked_buffer.diff.clone(),
446 buffer.read(cx).language().cloned(),
447 ))
448 })??;
449 let update = diff
450 .update(cx, |diff, cx| {
451 diff.update_diff(
452 buffer_snapshot.clone(),
453 Some(new_base_text),
454 Some(true),
455 language,
456 cx,
457 )
458 })
459 .await;
460 diff.update(cx, |diff, cx| {
461 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
462 })
463 .await;
464 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
465
466 let unreviewed_edits = cx
467 .background_spawn({
468 let buffer_snapshot = buffer_snapshot.clone();
469 let new_diff_base = new_diff_base.clone();
470 async move {
471 let mut unreviewed_edits = Patch::default();
472 for hunk in diff_snapshot.hunks_intersecting_range(
473 Anchor::min_for_buffer(buffer_snapshot.remote_id())
474 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
475 &buffer_snapshot,
476 ) {
477 let old_range = new_diff_base
478 .offset_to_point(hunk.diff_base_byte_range.start)
479 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
480 let new_range = hunk.range.start..hunk.range.end;
481 unreviewed_edits.push(point_to_row_edit(
482 Edit {
483 old: old_range,
484 new: new_range,
485 },
486 &new_diff_base,
487 buffer_snapshot.as_rope(),
488 ));
489 }
490 unreviewed_edits
491 }
492 })
493 .await;
494 this.update(cx, |this, cx| {
495 let tracked_buffer = this
496 .tracked_buffers
497 .get_mut(buffer)
498 .context("buffer not tracked")?;
499 tracked_buffer.diff_base = new_diff_base;
500 tracked_buffer.snapshot = buffer_snapshot;
501 tracked_buffer.unreviewed_edits = unreviewed_edits;
502 cx.notify();
503 anyhow::Ok(())
504 })?
505 }
506
507 /// Track a buffer as read by agent, so we can notify the model about user edits.
508 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
509 if let Some(linked_action_log) = &mut self.linked_action_log {
510 linked_action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
511 }
512 self.track_buffer_internal(buffer, false, cx);
513 }
514
515 /// Mark a buffer as created by agent, so we can refresh it in the context
516 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
517 if let Some(linked_action_log) = &mut self.linked_action_log {
518 linked_action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
519 }
520 self.track_buffer_internal(buffer, true, cx);
521 }
522
523 /// Mark a buffer as edited by agent, so we can refresh it in the context
524 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
525 if let Some(linked_action_log) = &mut self.linked_action_log {
526 linked_action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
527 }
528 let new_version = buffer.read(cx).version();
529 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
530 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
531 tracked_buffer.status = TrackedBufferStatus::Modified;
532 }
533
534 tracked_buffer.version = new_version;
535 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
536 }
537
538 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
539 let has_linked_action_log = self.linked_action_log.is_some();
540 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
541 match tracked_buffer.status {
542 TrackedBufferStatus::Created { .. } => {
543 self.tracked_buffers.remove(&buffer);
544 cx.notify();
545 }
546 TrackedBufferStatus::Modified => {
547 tracked_buffer.status = TrackedBufferStatus::Deleted;
548 if !has_linked_action_log {
549 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
550 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
551 }
552 }
553
554 TrackedBufferStatus::Deleted => {}
555 }
556
557 if let Some(linked_action_log) = &mut self.linked_action_log {
558 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
559 }
560
561 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
562 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
563 }
564
565 cx.notify();
566 }
567
568 pub fn keep_edits_in_range(
569 &mut self,
570 buffer: Entity<Buffer>,
571 buffer_range: Range<impl language::ToPoint>,
572 telemetry: Option<ActionLogTelemetry>,
573 cx: &mut Context<Self>,
574 ) {
575 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
576 return;
577 };
578
579 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
580 match tracked_buffer.status {
581 TrackedBufferStatus::Deleted => {
582 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
583 self.tracked_buffers.remove(&buffer);
584 cx.notify();
585 }
586 _ => {
587 let buffer = buffer.read(cx);
588 let buffer_range =
589 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
590 let mut delta = 0i32;
591 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
592 edit.old.start = (edit.old.start as i32 + delta) as u32;
593 edit.old.end = (edit.old.end as i32 + delta) as u32;
594
595 if buffer_range.end.row < edit.new.start
596 || buffer_range.start.row > edit.new.end
597 {
598 true
599 } else {
600 let old_range = tracked_buffer
601 .diff_base
602 .point_to_offset(Point::new(edit.old.start, 0))
603 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
604 Point::new(edit.old.end, 0),
605 tracked_buffer.diff_base.max_point(),
606 ));
607 let new_range = tracked_buffer
608 .snapshot
609 .point_to_offset(Point::new(edit.new.start, 0))
610 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
611 Point::new(edit.new.end, 0),
612 tracked_buffer.snapshot.max_point(),
613 ));
614 tracked_buffer.diff_base.replace(
615 old_range,
616 &tracked_buffer
617 .snapshot
618 .text_for_range(new_range)
619 .collect::<String>(),
620 );
621 delta += edit.new_len() as i32 - edit.old_len() as i32;
622 metrics.add_edit(edit);
623 false
624 }
625 });
626 if tracked_buffer.unreviewed_edits.is_empty()
627 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
628 {
629 tracked_buffer.status = TrackedBufferStatus::Modified;
630 }
631 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
632 }
633 }
634 if let Some(telemetry) = telemetry {
635 telemetry_report_accepted_edits(&telemetry, metrics);
636 }
637 }
638
639 pub fn reject_edits_in_ranges(
640 &mut self,
641 buffer: Entity<Buffer>,
642 buffer_ranges: Vec<Range<impl language::ToPoint>>,
643 telemetry: Option<ActionLogTelemetry>,
644 cx: &mut Context<Self>,
645 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
646 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
647 return (Task::ready(Ok(())), None);
648 };
649
650 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
651 let mut undo_info: Option<PerBufferUndo> = None;
652 let task = match &tracked_buffer.status {
653 TrackedBufferStatus::Created {
654 existing_file_content,
655 } => {
656 let task = if let Some(existing_file_content) = existing_file_content {
657 // Capture the agent's content before restoring existing file content
658 let agent_content = buffer.read(cx).text();
659
660 buffer.update(cx, |buffer, cx| {
661 buffer.start_transaction();
662 buffer.set_text("", cx);
663 for chunk in existing_file_content.chunks() {
664 buffer.append(chunk, cx);
665 }
666 buffer.end_transaction(cx);
667 });
668
669 undo_info = Some(PerBufferUndo {
670 buffer: buffer.downgrade(),
671 edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
672 status: UndoBufferStatus::Created {
673 had_existing_content: true,
674 },
675 });
676
677 self.project
678 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
679 } else {
680 // For a file created by AI with no pre-existing content,
681 // only delete the file if we're certain it contains only AI content
682 // with no edits from the user.
683
684 let initial_version = tracked_buffer.version.clone();
685 let current_version = buffer.read(cx).version();
686
687 let current_content = buffer.read(cx).text();
688 let tracked_content = tracked_buffer.snapshot.text();
689
690 let is_ai_only_content =
691 initial_version == current_version && current_content == tracked_content;
692
693 if is_ai_only_content {
694 buffer
695 .read(cx)
696 .entry_id(cx)
697 .and_then(|entry_id| {
698 self.project.update(cx, |project, cx| {
699 project.delete_entry(entry_id, false, cx)
700 })
701 })
702 .unwrap_or(Task::ready(Ok(())))
703 } else {
704 // Not sure how to disentangle edits made by the user
705 // from edits made by the AI at this point.
706 // For now, preserve both to avoid data loss.
707 //
708 // TODO: Better solution (disable "Reject" after user makes some
709 // edit or find a way to differentiate between AI and user edits)
710 Task::ready(Ok(()))
711 }
712 };
713
714 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
715 self.tracked_buffers.remove(&buffer);
716 cx.notify();
717 task
718 }
719 TrackedBufferStatus::Deleted => {
720 buffer.update(cx, |buffer, cx| {
721 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
722 });
723 let save = self
724 .project
725 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
726
727 // Clear all tracked edits for this buffer and start over as if we just read it.
728 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
729 self.tracked_buffers.remove(&buffer);
730 self.buffer_read(buffer.clone(), cx);
731 cx.notify();
732 save
733 }
734 TrackedBufferStatus::Modified => {
735 let edits_to_restore = buffer.update(cx, |buffer, cx| {
736 let mut buffer_row_ranges = buffer_ranges
737 .into_iter()
738 .map(|range| {
739 range.start.to_point(buffer).row..range.end.to_point(buffer).row
740 })
741 .peekable();
742
743 let mut edits_to_revert = Vec::new();
744 let mut edits_for_undo = Vec::new();
745 for edit in tracked_buffer.unreviewed_edits.edits() {
746 let new_range = tracked_buffer
747 .snapshot
748 .anchor_before(Point::new(edit.new.start, 0))
749 ..tracked_buffer.snapshot.anchor_after(cmp::min(
750 Point::new(edit.new.end, 0),
751 tracked_buffer.snapshot.max_point(),
752 ));
753 let new_row_range = new_range.start.to_point(buffer).row
754 ..new_range.end.to_point(buffer).row;
755
756 let mut revert = false;
757 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
758 if buffer_row_range.end < new_row_range.start {
759 buffer_row_ranges.next();
760 } else if buffer_row_range.start > new_row_range.end {
761 break;
762 } else {
763 revert = true;
764 break;
765 }
766 }
767
768 if revert {
769 metrics.add_edit(edit);
770 let old_range = tracked_buffer
771 .diff_base
772 .point_to_offset(Point::new(edit.old.start, 0))
773 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
774 Point::new(edit.old.end, 0),
775 tracked_buffer.diff_base.max_point(),
776 ));
777 let old_text = tracked_buffer
778 .diff_base
779 .chunks_in_range(old_range)
780 .collect::<String>();
781
782 // Capture the agent's text before we revert it (for undo)
783 let new_range_offset =
784 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
785 let agent_text =
786 buffer.text_for_range(new_range_offset).collect::<String>();
787 edits_for_undo.push((new_range.clone(), agent_text));
788
789 edits_to_revert.push((new_range, old_text));
790 }
791 }
792
793 buffer.edit(edits_to_revert, None, cx);
794 edits_for_undo
795 });
796
797 if !edits_to_restore.is_empty() {
798 undo_info = Some(PerBufferUndo {
799 buffer: buffer.downgrade(),
800 edits_to_restore,
801 status: UndoBufferStatus::Modified,
802 });
803 }
804
805 self.project
806 .update(cx, |project, cx| project.save_buffer(buffer, cx))
807 }
808 };
809 if let Some(telemetry) = telemetry {
810 telemetry_report_rejected_edits(&telemetry, metrics);
811 }
812 (task, undo_info)
813 }
814
815 pub fn keep_all_edits(
816 &mut self,
817 telemetry: Option<ActionLogTelemetry>,
818 cx: &mut Context<Self>,
819 ) {
820 self.tracked_buffers.retain(|buffer, tracked_buffer| {
821 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
822 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
823 if let Some(telemetry) = telemetry.as_ref() {
824 telemetry_report_accepted_edits(telemetry, metrics);
825 }
826 match tracked_buffer.status {
827 TrackedBufferStatus::Deleted => false,
828 _ => {
829 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
830 tracked_buffer.status = TrackedBufferStatus::Modified;
831 }
832 tracked_buffer.unreviewed_edits.clear();
833 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
834 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
835 true
836 }
837 }
838 });
839
840 cx.notify();
841 }
842
843 pub fn reject_all_edits(
844 &mut self,
845 telemetry: Option<ActionLogTelemetry>,
846 cx: &mut Context<Self>,
847 ) -> Task<()> {
848 // Clear any previous undo state before starting a new reject operation
849 self.last_reject_undo = None;
850
851 let mut undo_buffers = Vec::new();
852 let mut futures = Vec::new();
853
854 for buffer in self.changed_buffers(cx).into_keys() {
855 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
856 buffer.read(cx).remote_id(),
857 )];
858 let (reject_task, undo_info) =
859 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
860
861 if let Some(undo) = undo_info {
862 undo_buffers.push(undo);
863 }
864
865 futures.push(async move {
866 reject_task.await.log_err();
867 });
868 }
869
870 // Store the undo information if we have any
871 if !undo_buffers.is_empty() {
872 self.last_reject_undo = Some(LastRejectUndo {
873 buffers: undo_buffers,
874 });
875 }
876
877 let task = futures::future::join_all(futures);
878 cx.background_spawn(async move {
879 task.await;
880 })
881 }
882
883 pub fn has_pending_undo(&self) -> bool {
884 self.last_reject_undo.is_some()
885 }
886
887 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
888 self.last_reject_undo = Some(undo);
889 }
890
891 /// Undoes the most recent reject operation, restoring the rejected agent changes.
892 /// This is a best-effort operation: if buffers have been closed or modified externally,
893 /// those buffers will be skipped.
894 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
895 let Some(undo) = self.last_reject_undo.take() else {
896 return Task::ready(());
897 };
898
899 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
900
901 for per_buffer_undo in undo.buffers {
902 // Skip if the buffer entity has been deallocated
903 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
904 continue;
905 };
906
907 buffer.update(cx, |buffer, cx| {
908 let mut valid_edits = Vec::new();
909
910 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
911 if anchor_range.start.buffer_id == Some(buffer.remote_id())
912 && anchor_range.end.buffer_id == Some(buffer.remote_id())
913 {
914 valid_edits.push((anchor_range, text_to_restore));
915 }
916 }
917
918 if !valid_edits.is_empty() {
919 buffer.edit(valid_edits, None, cx);
920 }
921 });
922
923 if !self.tracked_buffers.contains_key(&buffer) {
924 self.buffer_edited(buffer.clone(), cx);
925 }
926
927 let save = self
928 .project
929 .update(cx, |project, cx| project.save_buffer(buffer, cx));
930 save_tasks.push(save);
931 }
932
933 cx.notify();
934
935 cx.background_spawn(async move {
936 futures::future::join_all(save_tasks).await;
937 })
938 }
939
940 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
941 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
942 self.tracked_buffers
943 .iter()
944 .filter(|(_, tracked)| tracked.has_edits(cx))
945 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
946 .collect()
947 }
948
949 /// Iterate over buffers changed since last read or edited by the model
950 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
951 self.tracked_buffers
952 .iter()
953 .filter(|(buffer, tracked)| {
954 let buffer = buffer.read(cx);
955
956 tracked.version != buffer.version
957 && buffer
958 .file()
959 .is_some_and(|file| !file.disk_state().is_deleted())
960 })
961 .map(|(buffer, _)| buffer)
962 }
963}
964
965#[derive(Clone)]
966pub struct ActionLogTelemetry {
967 pub agent_telemetry_id: SharedString,
968 pub session_id: Arc<str>,
969}
970
971struct ActionLogMetrics {
972 lines_removed: u32,
973 lines_added: u32,
974 language: Option<SharedString>,
975}
976
977impl ActionLogMetrics {
978 fn for_buffer(buffer: &Buffer) -> Self {
979 Self {
980 language: buffer.language().map(|l| l.name().0),
981 lines_removed: 0,
982 lines_added: 0,
983 }
984 }
985
986 fn add_edits(&mut self, edits: &[Edit<u32>]) {
987 for edit in edits {
988 self.add_edit(edit);
989 }
990 }
991
992 fn add_edit(&mut self, edit: &Edit<u32>) {
993 self.lines_added += edit.new_len();
994 self.lines_removed += edit.old_len();
995 }
996}
997
998fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
999 telemetry::event!(
1000 "Agent Edits Accepted",
1001 agent = telemetry.agent_telemetry_id,
1002 session = telemetry.session_id,
1003 language = metrics.language,
1004 lines_added = metrics.lines_added,
1005 lines_removed = metrics.lines_removed
1006 );
1007}
1008
1009fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1010 telemetry::event!(
1011 "Agent Edits Rejected",
1012 agent = telemetry.agent_telemetry_id,
1013 session = telemetry.session_id,
1014 language = metrics.language,
1015 lines_added = metrics.lines_added,
1016 lines_removed = metrics.lines_removed
1017 );
1018}
1019
1020fn apply_non_conflicting_edits(
1021 patch: &Patch<u32>,
1022 edits: Vec<Edit<u32>>,
1023 old_text: &mut Rope,
1024 new_text: &Rope,
1025) -> bool {
1026 let mut old_edits = patch.edits().iter().cloned().peekable();
1027 let mut new_edits = edits.into_iter().peekable();
1028 let mut applied_delta = 0i32;
1029 let mut rebased_delta = 0i32;
1030 let mut has_made_changes = false;
1031
1032 while let Some(mut new_edit) = new_edits.next() {
1033 let mut conflict = false;
1034
1035 // Push all the old edits that are before this new edit or that intersect with it.
1036 while let Some(old_edit) = old_edits.peek() {
1037 if new_edit.old.end < old_edit.new.start
1038 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1039 {
1040 break;
1041 } else if new_edit.old.start > old_edit.new.end
1042 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1043 {
1044 let old_edit = old_edits.next().unwrap();
1045 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1046 } else {
1047 conflict = true;
1048 if new_edits
1049 .peek()
1050 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1051 {
1052 new_edit = new_edits.next().unwrap();
1053 } else {
1054 let old_edit = old_edits.next().unwrap();
1055 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1056 }
1057 }
1058 }
1059
1060 if !conflict {
1061 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1062 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1063 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1064 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1065 ..old_text.point_to_offset(cmp::min(
1066 Point::new(new_edit.old.end, 0),
1067 old_text.max_point(),
1068 ));
1069 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1070 ..new_text.point_to_offset(cmp::min(
1071 Point::new(new_edit.new.end, 0),
1072 new_text.max_point(),
1073 ));
1074
1075 old_text.replace(
1076 old_bytes,
1077 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1078 );
1079 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1080 has_made_changes = true;
1081 }
1082 }
1083 has_made_changes
1084}
1085
1086fn diff_snapshots(
1087 old_snapshot: &text::BufferSnapshot,
1088 new_snapshot: &text::BufferSnapshot,
1089) -> Vec<Edit<u32>> {
1090 let mut edits = new_snapshot
1091 .edits_since::<Point>(&old_snapshot.version)
1092 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1093 .peekable();
1094 let mut row_edits = Vec::new();
1095 while let Some(mut edit) = edits.next() {
1096 while let Some(next_edit) = edits.peek() {
1097 if edit.old.end >= next_edit.old.start {
1098 edit.old.end = next_edit.old.end;
1099 edit.new.end = next_edit.new.end;
1100 edits.next();
1101 } else {
1102 break;
1103 }
1104 }
1105 row_edits.push(edit);
1106 }
1107 row_edits
1108}
1109
1110fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1111 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1112 && new_text
1113 .chars_at(new_text.point_to_offset(edit.new.start))
1114 .next()
1115 == Some('\n')
1116 && edit.old.start != old_text.max_point()
1117 {
1118 Edit {
1119 old: edit.old.start.row + 1..edit.old.end.row + 1,
1120 new: edit.new.start.row + 1..edit.new.end.row + 1,
1121 }
1122 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1123 Edit {
1124 old: edit.old.start.row..edit.old.end.row,
1125 new: edit.new.start.row..edit.new.end.row,
1126 }
1127 } else {
1128 Edit {
1129 old: edit.old.start.row..edit.old.end.row + 1,
1130 new: edit.new.start.row..edit.new.end.row + 1,
1131 }
1132 }
1133}
1134
1135#[derive(Copy, Clone, Debug)]
1136enum ChangeAuthor {
1137 User,
1138 Agent,
1139}
1140
1141#[derive(Debug)]
1142enum TrackedBufferStatus {
1143 Created { existing_file_content: Option<Rope> },
1144 Modified,
1145 Deleted,
1146}
1147
1148pub struct TrackedBuffer {
1149 buffer: Entity<Buffer>,
1150 diff_base: Rope,
1151 unreviewed_edits: Patch<u32>,
1152 status: TrackedBufferStatus,
1153 version: clock::Global,
1154 diff: Entity<BufferDiff>,
1155 snapshot: text::BufferSnapshot,
1156 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1157 _open_lsp_handle: OpenLspBufferHandle,
1158 _maintain_diff: Task<()>,
1159 _subscription: Subscription,
1160}
1161
1162impl TrackedBuffer {
1163 #[cfg(any(test, feature = "test-support"))]
1164 pub fn diff(&self) -> &Entity<BufferDiff> {
1165 &self.diff
1166 }
1167
1168 #[cfg(any(test, feature = "test-support"))]
1169 pub fn diff_base_len(&self) -> usize {
1170 self.diff_base.len()
1171 }
1172
1173 fn has_edits(&self, cx: &App) -> bool {
1174 self.diff
1175 .read(cx)
1176 .snapshot(cx)
1177 .hunks(self.buffer.read(cx))
1178 .next()
1179 .is_some()
1180 }
1181
1182 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1183 self.diff_update
1184 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1185 .ok();
1186 }
1187}
1188
1189pub struct ChangedBuffer {
1190 pub diff: Entity<BufferDiff>,
1191}
1192
1193#[cfg(test)]
1194mod tests {
1195 use super::*;
1196 use buffer_diff::DiffHunkStatusKind;
1197 use gpui::TestAppContext;
1198 use language::Point;
1199 use project::{FakeFs, Fs, Project, RemoveOptions};
1200 use rand::prelude::*;
1201 use serde_json::json;
1202 use settings::SettingsStore;
1203 use std::env;
1204 use util::{RandomCharIter, path};
1205
1206 #[ctor::ctor]
1207 fn init_logger() {
1208 zlog::init_test();
1209 }
1210
1211 fn init_test(cx: &mut TestAppContext) {
1212 cx.update(|cx| {
1213 let settings_store = SettingsStore::test(cx);
1214 cx.set_global(settings_store);
1215 });
1216 }
1217
1218 #[gpui::test(iterations = 10)]
1219 async fn test_keep_edits(cx: &mut TestAppContext) {
1220 init_test(cx);
1221
1222 let fs = FakeFs::new(cx.executor());
1223 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1224 .await;
1225 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1226 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1227 let file_path = project
1228 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1229 .unwrap();
1230 let buffer = project
1231 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1232 .await
1233 .unwrap();
1234
1235 cx.update(|cx| {
1236 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1237 buffer.update(cx, |buffer, cx| {
1238 buffer
1239 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1240 .unwrap()
1241 });
1242 buffer.update(cx, |buffer, cx| {
1243 buffer
1244 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1245 .unwrap()
1246 });
1247 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1248 });
1249 cx.run_until_parked();
1250 assert_eq!(
1251 buffer.read_with(cx, |buffer, _| buffer.text()),
1252 "abc\ndEf\nghi\njkl\nmnO"
1253 );
1254 assert_eq!(
1255 unreviewed_hunks(&action_log, cx),
1256 vec![(
1257 buffer.clone(),
1258 vec![
1259 HunkStatus {
1260 range: Point::new(1, 0)..Point::new(2, 0),
1261 diff_status: DiffHunkStatusKind::Modified,
1262 old_text: "def\n".into(),
1263 },
1264 HunkStatus {
1265 range: Point::new(4, 0)..Point::new(4, 3),
1266 diff_status: DiffHunkStatusKind::Modified,
1267 old_text: "mno".into(),
1268 }
1269 ],
1270 )]
1271 );
1272
1273 action_log.update(cx, |log, cx| {
1274 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1275 });
1276 cx.run_until_parked();
1277 assert_eq!(
1278 unreviewed_hunks(&action_log, cx),
1279 vec![(
1280 buffer.clone(),
1281 vec![HunkStatus {
1282 range: Point::new(1, 0)..Point::new(2, 0),
1283 diff_status: DiffHunkStatusKind::Modified,
1284 old_text: "def\n".into(),
1285 }],
1286 )]
1287 );
1288
1289 action_log.update(cx, |log, cx| {
1290 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1291 });
1292 cx.run_until_parked();
1293 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1294 }
1295
1296 #[gpui::test(iterations = 10)]
1297 async fn test_deletions(cx: &mut TestAppContext) {
1298 init_test(cx);
1299
1300 let fs = FakeFs::new(cx.executor());
1301 fs.insert_tree(
1302 path!("/dir"),
1303 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1304 )
1305 .await;
1306 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1307 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1308 let file_path = project
1309 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1310 .unwrap();
1311 let buffer = project
1312 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1313 .await
1314 .unwrap();
1315
1316 cx.update(|cx| {
1317 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1318 buffer.update(cx, |buffer, cx| {
1319 buffer
1320 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1321 .unwrap();
1322 buffer.finalize_last_transaction();
1323 });
1324 buffer.update(cx, |buffer, cx| {
1325 buffer
1326 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1327 .unwrap();
1328 buffer.finalize_last_transaction();
1329 });
1330 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1331 });
1332 cx.run_until_parked();
1333 assert_eq!(
1334 buffer.read_with(cx, |buffer, _| buffer.text()),
1335 "abc\nghi\njkl\npqr"
1336 );
1337 assert_eq!(
1338 unreviewed_hunks(&action_log, cx),
1339 vec![(
1340 buffer.clone(),
1341 vec![
1342 HunkStatus {
1343 range: Point::new(1, 0)..Point::new(1, 0),
1344 diff_status: DiffHunkStatusKind::Deleted,
1345 old_text: "def\n".into(),
1346 },
1347 HunkStatus {
1348 range: Point::new(3, 0)..Point::new(3, 0),
1349 diff_status: DiffHunkStatusKind::Deleted,
1350 old_text: "mno\n".into(),
1351 }
1352 ],
1353 )]
1354 );
1355
1356 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1357 cx.run_until_parked();
1358 assert_eq!(
1359 buffer.read_with(cx, |buffer, _| buffer.text()),
1360 "abc\nghi\njkl\nmno\npqr"
1361 );
1362 assert_eq!(
1363 unreviewed_hunks(&action_log, cx),
1364 vec![(
1365 buffer.clone(),
1366 vec![HunkStatus {
1367 range: Point::new(1, 0)..Point::new(1, 0),
1368 diff_status: DiffHunkStatusKind::Deleted,
1369 old_text: "def\n".into(),
1370 }],
1371 )]
1372 );
1373
1374 action_log.update(cx, |log, cx| {
1375 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1376 });
1377 cx.run_until_parked();
1378 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1379 }
1380
1381 #[gpui::test(iterations = 10)]
1382 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1383 init_test(cx);
1384
1385 let fs = FakeFs::new(cx.executor());
1386 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1387 .await;
1388 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1389 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1390 let file_path = project
1391 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1392 .unwrap();
1393 let buffer = project
1394 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1395 .await
1396 .unwrap();
1397
1398 cx.update(|cx| {
1399 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1400 buffer.update(cx, |buffer, cx| {
1401 buffer
1402 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1403 .unwrap()
1404 });
1405 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1406 });
1407 cx.run_until_parked();
1408 assert_eq!(
1409 buffer.read_with(cx, |buffer, _| buffer.text()),
1410 "abc\ndeF\nGHI\njkl\nmno"
1411 );
1412 assert_eq!(
1413 unreviewed_hunks(&action_log, cx),
1414 vec![(
1415 buffer.clone(),
1416 vec![HunkStatus {
1417 range: Point::new(1, 0)..Point::new(3, 0),
1418 diff_status: DiffHunkStatusKind::Modified,
1419 old_text: "def\nghi\n".into(),
1420 }],
1421 )]
1422 );
1423
1424 buffer.update(cx, |buffer, cx| {
1425 buffer.edit(
1426 [
1427 (Point::new(0, 2)..Point::new(0, 2), "X"),
1428 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1429 ],
1430 None,
1431 cx,
1432 )
1433 });
1434 cx.run_until_parked();
1435 assert_eq!(
1436 buffer.read_with(cx, |buffer, _| buffer.text()),
1437 "abXc\ndeF\nGHI\nYjkl\nmno"
1438 );
1439 assert_eq!(
1440 unreviewed_hunks(&action_log, cx),
1441 vec![(
1442 buffer.clone(),
1443 vec![HunkStatus {
1444 range: Point::new(1, 0)..Point::new(3, 0),
1445 diff_status: DiffHunkStatusKind::Modified,
1446 old_text: "def\nghi\n".into(),
1447 }],
1448 )]
1449 );
1450
1451 buffer.update(cx, |buffer, cx| {
1452 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1453 });
1454 cx.run_until_parked();
1455 assert_eq!(
1456 buffer.read_with(cx, |buffer, _| buffer.text()),
1457 "abXc\ndZeF\nGHI\nYjkl\nmno"
1458 );
1459 assert_eq!(
1460 unreviewed_hunks(&action_log, cx),
1461 vec![(
1462 buffer.clone(),
1463 vec![HunkStatus {
1464 range: Point::new(1, 0)..Point::new(3, 0),
1465 diff_status: DiffHunkStatusKind::Modified,
1466 old_text: "def\nghi\n".into(),
1467 }],
1468 )]
1469 );
1470
1471 action_log.update(cx, |log, cx| {
1472 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1473 });
1474 cx.run_until_parked();
1475 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1476 }
1477
1478 #[gpui::test(iterations = 10)]
1479 async fn test_creating_files(cx: &mut TestAppContext) {
1480 init_test(cx);
1481
1482 let fs = FakeFs::new(cx.executor());
1483 fs.insert_tree(path!("/dir"), json!({})).await;
1484 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1485 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1486 let file_path = project
1487 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1488 .unwrap();
1489
1490 let buffer = project
1491 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1492 .await
1493 .unwrap();
1494 cx.update(|cx| {
1495 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1496 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1497 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1498 });
1499 project
1500 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1501 .await
1502 .unwrap();
1503 cx.run_until_parked();
1504 assert_eq!(
1505 unreviewed_hunks(&action_log, cx),
1506 vec![(
1507 buffer.clone(),
1508 vec![HunkStatus {
1509 range: Point::new(0, 0)..Point::new(0, 5),
1510 diff_status: DiffHunkStatusKind::Added,
1511 old_text: "".into(),
1512 }],
1513 )]
1514 );
1515
1516 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1517 cx.run_until_parked();
1518 assert_eq!(
1519 unreviewed_hunks(&action_log, cx),
1520 vec![(
1521 buffer.clone(),
1522 vec![HunkStatus {
1523 range: Point::new(0, 0)..Point::new(0, 6),
1524 diff_status: DiffHunkStatusKind::Added,
1525 old_text: "".into(),
1526 }],
1527 )]
1528 );
1529
1530 action_log.update(cx, |log, cx| {
1531 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1532 });
1533 cx.run_until_parked();
1534 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1535 }
1536
1537 #[gpui::test(iterations = 10)]
1538 async fn test_overwriting_files(cx: &mut TestAppContext) {
1539 init_test(cx);
1540
1541 let fs = FakeFs::new(cx.executor());
1542 fs.insert_tree(
1543 path!("/dir"),
1544 json!({
1545 "file1": "Lorem ipsum dolor"
1546 }),
1547 )
1548 .await;
1549 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1550 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1551 let file_path = project
1552 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1553 .unwrap();
1554
1555 let buffer = project
1556 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1557 .await
1558 .unwrap();
1559 cx.update(|cx| {
1560 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1561 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1562 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1563 });
1564 project
1565 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1566 .await
1567 .unwrap();
1568 cx.run_until_parked();
1569 assert_eq!(
1570 unreviewed_hunks(&action_log, cx),
1571 vec![(
1572 buffer.clone(),
1573 vec![HunkStatus {
1574 range: Point::new(0, 0)..Point::new(0, 19),
1575 diff_status: DiffHunkStatusKind::Added,
1576 old_text: "".into(),
1577 }],
1578 )]
1579 );
1580
1581 action_log
1582 .update(cx, |log, cx| {
1583 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1584 task
1585 })
1586 .await
1587 .unwrap();
1588 cx.run_until_parked();
1589 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1590 assert_eq!(
1591 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1592 "Lorem ipsum dolor"
1593 );
1594 }
1595
1596 #[gpui::test(iterations = 10)]
1597 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1598 init_test(cx);
1599
1600 let fs = FakeFs::new(cx.executor());
1601 fs.insert_tree(
1602 path!("/dir"),
1603 json!({
1604 "file1": "Lorem ipsum dolor"
1605 }),
1606 )
1607 .await;
1608 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1609 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1610 let file_path = project
1611 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1612 .unwrap();
1613
1614 let buffer = project
1615 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1616 .await
1617 .unwrap();
1618 cx.update(|cx| {
1619 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1620 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1621 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1622 });
1623 project
1624 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1625 .await
1626 .unwrap();
1627 cx.run_until_parked();
1628 assert_eq!(
1629 unreviewed_hunks(&action_log, cx),
1630 vec![(
1631 buffer.clone(),
1632 vec![HunkStatus {
1633 range: Point::new(0, 0)..Point::new(0, 37),
1634 diff_status: DiffHunkStatusKind::Modified,
1635 old_text: "Lorem ipsum dolor".into(),
1636 }],
1637 )]
1638 );
1639
1640 cx.update(|cx| {
1641 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1642 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1643 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1644 });
1645 project
1646 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1647 .await
1648 .unwrap();
1649 cx.run_until_parked();
1650 assert_eq!(
1651 unreviewed_hunks(&action_log, cx),
1652 vec![(
1653 buffer.clone(),
1654 vec![HunkStatus {
1655 range: Point::new(0, 0)..Point::new(0, 9),
1656 diff_status: DiffHunkStatusKind::Added,
1657 old_text: "".into(),
1658 }],
1659 )]
1660 );
1661
1662 action_log
1663 .update(cx, |log, cx| {
1664 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1665 task
1666 })
1667 .await
1668 .unwrap();
1669 cx.run_until_parked();
1670 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1671 assert_eq!(
1672 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1673 "Lorem ipsum dolor"
1674 );
1675 }
1676
1677 #[gpui::test(iterations = 10)]
1678 async fn test_deleting_files(cx: &mut TestAppContext) {
1679 init_test(cx);
1680
1681 let fs = FakeFs::new(cx.executor());
1682 fs.insert_tree(
1683 path!("/dir"),
1684 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1685 )
1686 .await;
1687
1688 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1689 let file1_path = project
1690 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1691 .unwrap();
1692 let file2_path = project
1693 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1694 .unwrap();
1695
1696 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1697 let buffer1 = project
1698 .update(cx, |project, cx| {
1699 project.open_buffer(file1_path.clone(), cx)
1700 })
1701 .await
1702 .unwrap();
1703 let buffer2 = project
1704 .update(cx, |project, cx| {
1705 project.open_buffer(file2_path.clone(), cx)
1706 })
1707 .await
1708 .unwrap();
1709
1710 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1711 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1712 project
1713 .update(cx, |project, cx| {
1714 project.delete_file(file1_path.clone(), false, cx)
1715 })
1716 .unwrap()
1717 .await
1718 .unwrap();
1719 project
1720 .update(cx, |project, cx| {
1721 project.delete_file(file2_path.clone(), false, cx)
1722 })
1723 .unwrap()
1724 .await
1725 .unwrap();
1726 cx.run_until_parked();
1727 assert_eq!(
1728 unreviewed_hunks(&action_log, cx),
1729 vec![
1730 (
1731 buffer1.clone(),
1732 vec![HunkStatus {
1733 range: Point::new(0, 0)..Point::new(0, 0),
1734 diff_status: DiffHunkStatusKind::Deleted,
1735 old_text: "lorem\n".into(),
1736 }]
1737 ),
1738 (
1739 buffer2.clone(),
1740 vec![HunkStatus {
1741 range: Point::new(0, 0)..Point::new(0, 0),
1742 diff_status: DiffHunkStatusKind::Deleted,
1743 old_text: "ipsum\n".into(),
1744 }],
1745 )
1746 ]
1747 );
1748
1749 // Simulate file1 being recreated externally.
1750 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1751 .await;
1752
1753 // Simulate file2 being recreated by a tool.
1754 let buffer2 = project
1755 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1756 .await
1757 .unwrap();
1758 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1759 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1760 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1761 project
1762 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1763 .await
1764 .unwrap();
1765
1766 cx.run_until_parked();
1767 assert_eq!(
1768 unreviewed_hunks(&action_log, cx),
1769 vec![(
1770 buffer2.clone(),
1771 vec![HunkStatus {
1772 range: Point::new(0, 0)..Point::new(0, 5),
1773 diff_status: DiffHunkStatusKind::Added,
1774 old_text: "".into(),
1775 }],
1776 )]
1777 );
1778
1779 // Simulate file2 being deleted externally.
1780 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1781 .await
1782 .unwrap();
1783 cx.run_until_parked();
1784 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1785 }
1786
1787 #[gpui::test(iterations = 10)]
1788 async fn test_reject_edits(cx: &mut TestAppContext) {
1789 init_test(cx);
1790
1791 let fs = FakeFs::new(cx.executor());
1792 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1793 .await;
1794 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1795 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1796 let file_path = project
1797 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1798 .unwrap();
1799 let buffer = project
1800 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1801 .await
1802 .unwrap();
1803
1804 cx.update(|cx| {
1805 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1806 buffer.update(cx, |buffer, cx| {
1807 buffer
1808 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1809 .unwrap()
1810 });
1811 buffer.update(cx, |buffer, cx| {
1812 buffer
1813 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1814 .unwrap()
1815 });
1816 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1817 });
1818 cx.run_until_parked();
1819 assert_eq!(
1820 buffer.read_with(cx, |buffer, _| buffer.text()),
1821 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1822 );
1823 assert_eq!(
1824 unreviewed_hunks(&action_log, cx),
1825 vec![(
1826 buffer.clone(),
1827 vec![
1828 HunkStatus {
1829 range: Point::new(1, 0)..Point::new(3, 0),
1830 diff_status: DiffHunkStatusKind::Modified,
1831 old_text: "def\n".into(),
1832 },
1833 HunkStatus {
1834 range: Point::new(5, 0)..Point::new(5, 3),
1835 diff_status: DiffHunkStatusKind::Modified,
1836 old_text: "mno".into(),
1837 }
1838 ],
1839 )]
1840 );
1841
1842 // If the rejected range doesn't overlap with any hunk, we ignore it.
1843 action_log
1844 .update(cx, |log, cx| {
1845 let (task, _) = log.reject_edits_in_ranges(
1846 buffer.clone(),
1847 vec![Point::new(4, 0)..Point::new(4, 0)],
1848 None,
1849 cx,
1850 );
1851 task
1852 })
1853 .await
1854 .unwrap();
1855 cx.run_until_parked();
1856 assert_eq!(
1857 buffer.read_with(cx, |buffer, _| buffer.text()),
1858 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1859 );
1860 assert_eq!(
1861 unreviewed_hunks(&action_log, cx),
1862 vec![(
1863 buffer.clone(),
1864 vec![
1865 HunkStatus {
1866 range: Point::new(1, 0)..Point::new(3, 0),
1867 diff_status: DiffHunkStatusKind::Modified,
1868 old_text: "def\n".into(),
1869 },
1870 HunkStatus {
1871 range: Point::new(5, 0)..Point::new(5, 3),
1872 diff_status: DiffHunkStatusKind::Modified,
1873 old_text: "mno".into(),
1874 }
1875 ],
1876 )]
1877 );
1878
1879 action_log
1880 .update(cx, |log, cx| {
1881 let (task, _) = log.reject_edits_in_ranges(
1882 buffer.clone(),
1883 vec![Point::new(0, 0)..Point::new(1, 0)],
1884 None,
1885 cx,
1886 );
1887 task
1888 })
1889 .await
1890 .unwrap();
1891 cx.run_until_parked();
1892 assert_eq!(
1893 buffer.read_with(cx, |buffer, _| buffer.text()),
1894 "abc\ndef\nghi\njkl\nmnO"
1895 );
1896 assert_eq!(
1897 unreviewed_hunks(&action_log, cx),
1898 vec![(
1899 buffer.clone(),
1900 vec![HunkStatus {
1901 range: Point::new(4, 0)..Point::new(4, 3),
1902 diff_status: DiffHunkStatusKind::Modified,
1903 old_text: "mno".into(),
1904 }],
1905 )]
1906 );
1907
1908 action_log
1909 .update(cx, |log, cx| {
1910 let (task, _) = log.reject_edits_in_ranges(
1911 buffer.clone(),
1912 vec![Point::new(4, 0)..Point::new(4, 0)],
1913 None,
1914 cx,
1915 );
1916 task
1917 })
1918 .await
1919 .unwrap();
1920 cx.run_until_parked();
1921 assert_eq!(
1922 buffer.read_with(cx, |buffer, _| buffer.text()),
1923 "abc\ndef\nghi\njkl\nmno"
1924 );
1925 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1926 }
1927
1928 #[gpui::test(iterations = 10)]
1929 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1930 init_test(cx);
1931
1932 let fs = FakeFs::new(cx.executor());
1933 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1934 .await;
1935 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1936 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1937 let file_path = project
1938 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1939 .unwrap();
1940 let buffer = project
1941 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1942 .await
1943 .unwrap();
1944
1945 cx.update(|cx| {
1946 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1947 buffer.update(cx, |buffer, cx| {
1948 buffer
1949 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1950 .unwrap()
1951 });
1952 buffer.update(cx, |buffer, cx| {
1953 buffer
1954 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1955 .unwrap()
1956 });
1957 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1958 });
1959 cx.run_until_parked();
1960 assert_eq!(
1961 buffer.read_with(cx, |buffer, _| buffer.text()),
1962 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1963 );
1964 assert_eq!(
1965 unreviewed_hunks(&action_log, cx),
1966 vec![(
1967 buffer.clone(),
1968 vec![
1969 HunkStatus {
1970 range: Point::new(1, 0)..Point::new(3, 0),
1971 diff_status: DiffHunkStatusKind::Modified,
1972 old_text: "def\n".into(),
1973 },
1974 HunkStatus {
1975 range: Point::new(5, 0)..Point::new(5, 3),
1976 diff_status: DiffHunkStatusKind::Modified,
1977 old_text: "mno".into(),
1978 }
1979 ],
1980 )]
1981 );
1982
1983 action_log.update(cx, |log, cx| {
1984 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1985 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1986 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1987 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1988
1989 let (task, _) =
1990 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
1991 task.detach();
1992 assert_eq!(
1993 buffer.read_with(cx, |buffer, _| buffer.text()),
1994 "abc\ndef\nghi\njkl\nmno"
1995 );
1996 });
1997 cx.run_until_parked();
1998 assert_eq!(
1999 buffer.read_with(cx, |buffer, _| buffer.text()),
2000 "abc\ndef\nghi\njkl\nmno"
2001 );
2002 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2003 }
2004
2005 #[gpui::test(iterations = 10)]
2006 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2007 init_test(cx);
2008
2009 let fs = FakeFs::new(cx.executor());
2010 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2011 .await;
2012 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2013 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2014 let file_path = project
2015 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2016 .unwrap();
2017 let buffer = project
2018 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2019 .await
2020 .unwrap();
2021
2022 cx.update(|cx| {
2023 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2024 });
2025 project
2026 .update(cx, |project, cx| {
2027 project.delete_file(file_path.clone(), false, cx)
2028 })
2029 .unwrap()
2030 .await
2031 .unwrap();
2032 cx.run_until_parked();
2033 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2034 assert_eq!(
2035 unreviewed_hunks(&action_log, cx),
2036 vec![(
2037 buffer.clone(),
2038 vec![HunkStatus {
2039 range: Point::new(0, 0)..Point::new(0, 0),
2040 diff_status: DiffHunkStatusKind::Deleted,
2041 old_text: "content".into(),
2042 }]
2043 )]
2044 );
2045
2046 action_log
2047 .update(cx, |log, cx| {
2048 let (task, _) = log.reject_edits_in_ranges(
2049 buffer.clone(),
2050 vec![Point::new(0, 0)..Point::new(0, 0)],
2051 None,
2052 cx,
2053 );
2054 task
2055 })
2056 .await
2057 .unwrap();
2058 cx.run_until_parked();
2059 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2060 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2061 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2062 }
2063
2064 #[gpui::test(iterations = 10)]
2065 async fn test_reject_created_file(cx: &mut TestAppContext) {
2066 init_test(cx);
2067
2068 let fs = FakeFs::new(cx.executor());
2069 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2070 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2071 let file_path = project
2072 .read_with(cx, |project, cx| {
2073 project.find_project_path("dir/new_file", cx)
2074 })
2075 .unwrap();
2076 let buffer = project
2077 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2078 .await
2079 .unwrap();
2080 cx.update(|cx| {
2081 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2082 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2083 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2084 });
2085 project
2086 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2087 .await
2088 .unwrap();
2089 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2090 cx.run_until_parked();
2091 assert_eq!(
2092 unreviewed_hunks(&action_log, cx),
2093 vec![(
2094 buffer.clone(),
2095 vec![HunkStatus {
2096 range: Point::new(0, 0)..Point::new(0, 7),
2097 diff_status: DiffHunkStatusKind::Added,
2098 old_text: "".into(),
2099 }],
2100 )]
2101 );
2102
2103 action_log
2104 .update(cx, |log, cx| {
2105 let (task, _) = log.reject_edits_in_ranges(
2106 buffer.clone(),
2107 vec![Point::new(0, 0)..Point::new(0, 11)],
2108 None,
2109 cx,
2110 );
2111 task
2112 })
2113 .await
2114 .unwrap();
2115 cx.run_until_parked();
2116 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2117 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2118 }
2119
2120 #[gpui::test]
2121 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2122 init_test(cx);
2123
2124 let fs = FakeFs::new(cx.executor());
2125 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2126 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2127
2128 let file_path = project
2129 .read_with(cx, |project, cx| {
2130 project.find_project_path("dir/new_file", cx)
2131 })
2132 .unwrap();
2133 let buffer = project
2134 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2135 .await
2136 .unwrap();
2137
2138 // AI creates file with initial content
2139 cx.update(|cx| {
2140 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2141 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2142 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2143 });
2144
2145 project
2146 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2147 .await
2148 .unwrap();
2149
2150 cx.run_until_parked();
2151
2152 // User makes additional edits
2153 cx.update(|cx| {
2154 buffer.update(cx, |buffer, cx| {
2155 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2156 });
2157 });
2158
2159 project
2160 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2161 .await
2162 .unwrap();
2163
2164 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2165
2166 // Reject all
2167 action_log
2168 .update(cx, |log, cx| {
2169 let (task, _) = log.reject_edits_in_ranges(
2170 buffer.clone(),
2171 vec![Point::new(0, 0)..Point::new(100, 0)],
2172 None,
2173 cx,
2174 );
2175 task
2176 })
2177 .await
2178 .unwrap();
2179 cx.run_until_parked();
2180
2181 // File should still contain all the content
2182 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2183
2184 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2185 assert_eq!(content, "ai content\nuser added this line");
2186 }
2187
2188 #[gpui::test]
2189 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2190 init_test(cx);
2191
2192 let fs = FakeFs::new(cx.executor());
2193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2194 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2195
2196 let file_path = project
2197 .read_with(cx, |project, cx| {
2198 project.find_project_path("dir/new_file", cx)
2199 })
2200 .unwrap();
2201 let buffer = project
2202 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2203 .await
2204 .unwrap();
2205
2206 // AI creates file with initial content
2207 cx.update(|cx| {
2208 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2209 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2210 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2211 });
2212 project
2213 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2214 .await
2215 .unwrap();
2216 cx.run_until_parked();
2217 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2218
2219 // User accepts the single hunk
2220 action_log.update(cx, |log, cx| {
2221 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2222 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2223 });
2224 cx.run_until_parked();
2225 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2226 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2227
2228 // AI modifies the file
2229 cx.update(|cx| {
2230 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2231 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2232 });
2233 project
2234 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2235 .await
2236 .unwrap();
2237 cx.run_until_parked();
2238 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2239
2240 // User rejects the hunk
2241 action_log
2242 .update(cx, |log, cx| {
2243 let (task, _) = log.reject_edits_in_ranges(
2244 buffer.clone(),
2245 vec![Anchor::min_max_range_for_buffer(
2246 buffer.read(cx).remote_id(),
2247 )],
2248 None,
2249 cx,
2250 );
2251 task
2252 })
2253 .await
2254 .unwrap();
2255 cx.run_until_parked();
2256 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2257 assert_eq!(
2258 buffer.read_with(cx, |buffer, _| buffer.text()),
2259 "ai content v1"
2260 );
2261 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2262 }
2263
2264 #[gpui::test]
2265 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2266 init_test(cx);
2267
2268 let fs = FakeFs::new(cx.executor());
2269 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2270 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2271
2272 let file_path = project
2273 .read_with(cx, |project, cx| {
2274 project.find_project_path("dir/new_file", cx)
2275 })
2276 .unwrap();
2277 let buffer = project
2278 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2279 .await
2280 .unwrap();
2281
2282 // AI creates file with initial content
2283 cx.update(|cx| {
2284 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2285 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2286 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2287 });
2288 project
2289 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2290 .await
2291 .unwrap();
2292 cx.run_until_parked();
2293
2294 // User clicks "Accept All"
2295 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2296 cx.run_until_parked();
2297 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2298 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2299
2300 // AI modifies file again
2301 cx.update(|cx| {
2302 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2303 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2304 });
2305 project
2306 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2307 .await
2308 .unwrap();
2309 cx.run_until_parked();
2310 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2311
2312 // User clicks "Reject All"
2313 action_log
2314 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2315 .await;
2316 cx.run_until_parked();
2317 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2318 assert_eq!(
2319 buffer.read_with(cx, |buffer, _| buffer.text()),
2320 "ai content v1"
2321 );
2322 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2323 }
2324
2325 #[gpui::test(iterations = 100)]
2326 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2327 init_test(cx);
2328
2329 let operations = env::var("OPERATIONS")
2330 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2331 .unwrap_or(20);
2332
2333 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2334 let fs = FakeFs::new(cx.executor());
2335 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2336 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2337 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2338 let file_path = project
2339 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2340 .unwrap();
2341 let buffer = project
2342 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2343 .await
2344 .unwrap();
2345
2346 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2347
2348 for _ in 0..operations {
2349 match rng.random_range(0..100) {
2350 0..25 => {
2351 action_log.update(cx, |log, cx| {
2352 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2353 log::info!("keeping edits in range {:?}", range);
2354 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2355 });
2356 }
2357 25..50 => {
2358 action_log
2359 .update(cx, |log, cx| {
2360 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2361 log::info!("rejecting edits in range {:?}", range);
2362 let (task, _) =
2363 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2364 task
2365 })
2366 .await
2367 .unwrap();
2368 }
2369 _ => {
2370 let is_agent_edit = rng.random_bool(0.5);
2371 if is_agent_edit {
2372 log::info!("agent edit");
2373 } else {
2374 log::info!("user edit");
2375 }
2376 cx.update(|cx| {
2377 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2378 if is_agent_edit {
2379 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2380 }
2381 });
2382 }
2383 }
2384
2385 if rng.random_bool(0.2) {
2386 quiesce(&action_log, &buffer, cx);
2387 }
2388 }
2389
2390 quiesce(&action_log, &buffer, cx);
2391
2392 fn quiesce(
2393 action_log: &Entity<ActionLog>,
2394 buffer: &Entity<Buffer>,
2395 cx: &mut TestAppContext,
2396 ) {
2397 log::info!("quiescing...");
2398 cx.run_until_parked();
2399 action_log.update(cx, |log, cx| {
2400 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2401 let mut old_text = tracked_buffer.diff_base.clone();
2402 let new_text = buffer.read(cx).as_rope();
2403 for edit in tracked_buffer.unreviewed_edits.edits() {
2404 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2405 let old_end = old_text.point_to_offset(cmp::min(
2406 Point::new(edit.new.start + edit.old_len(), 0),
2407 old_text.max_point(),
2408 ));
2409 old_text.replace(
2410 old_start..old_end,
2411 &new_text.slice_rows(edit.new.clone()).to_string(),
2412 );
2413 }
2414 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2415 })
2416 }
2417 }
2418
2419 #[gpui::test]
2420 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2421 init_test(cx);
2422
2423 let fs = FakeFs::new(cx.background_executor.clone());
2424 fs.insert_tree(
2425 path!("/project"),
2426 json!({
2427 ".git": {},
2428 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2429 }),
2430 )
2431 .await;
2432 fs.set_head_for_repo(
2433 path!("/project/.git").as_ref(),
2434 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2435 "0000000",
2436 );
2437 cx.run_until_parked();
2438
2439 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2440 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2441
2442 let file_path = project
2443 .read_with(cx, |project, cx| {
2444 project.find_project_path(path!("/project/file.txt"), cx)
2445 })
2446 .unwrap();
2447 let buffer = project
2448 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2449 .await
2450 .unwrap();
2451
2452 cx.update(|cx| {
2453 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2454 buffer.update(cx, |buffer, cx| {
2455 buffer.edit(
2456 [
2457 // Edit at the very start: a -> A
2458 (Point::new(0, 0)..Point::new(0, 1), "A"),
2459 // Deletion in the middle: remove lines d and e
2460 (Point::new(3, 0)..Point::new(5, 0), ""),
2461 // Modification: g -> GGG
2462 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2463 // Addition: insert new line after h
2464 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2465 // Edit the very last character: j -> J
2466 (Point::new(9, 0)..Point::new(9, 1), "J"),
2467 ],
2468 None,
2469 cx,
2470 );
2471 });
2472 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2473 });
2474 cx.run_until_parked();
2475 assert_eq!(
2476 unreviewed_hunks(&action_log, cx),
2477 vec![(
2478 buffer.clone(),
2479 vec![
2480 HunkStatus {
2481 range: Point::new(0, 0)..Point::new(1, 0),
2482 diff_status: DiffHunkStatusKind::Modified,
2483 old_text: "a\n".into()
2484 },
2485 HunkStatus {
2486 range: Point::new(3, 0)..Point::new(3, 0),
2487 diff_status: DiffHunkStatusKind::Deleted,
2488 old_text: "d\ne\n".into()
2489 },
2490 HunkStatus {
2491 range: Point::new(4, 0)..Point::new(5, 0),
2492 diff_status: DiffHunkStatusKind::Modified,
2493 old_text: "g\n".into()
2494 },
2495 HunkStatus {
2496 range: Point::new(6, 0)..Point::new(7, 0),
2497 diff_status: DiffHunkStatusKind::Added,
2498 old_text: "".into()
2499 },
2500 HunkStatus {
2501 range: Point::new(8, 0)..Point::new(8, 1),
2502 diff_status: DiffHunkStatusKind::Modified,
2503 old_text: "j".into()
2504 }
2505 ]
2506 )]
2507 );
2508
2509 // Simulate a git commit that matches some edits but not others:
2510 // - Accepts the first edit (a -> A)
2511 // - Accepts the deletion (remove d and e)
2512 // - Makes a different change to g (g -> G instead of GGG)
2513 // - Ignores the NEW line addition
2514 // - Ignores the last line edit (j stays as j)
2515 fs.set_head_for_repo(
2516 path!("/project/.git").as_ref(),
2517 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2518 "0000001",
2519 );
2520 cx.run_until_parked();
2521 assert_eq!(
2522 unreviewed_hunks(&action_log, cx),
2523 vec![(
2524 buffer.clone(),
2525 vec![
2526 HunkStatus {
2527 range: Point::new(4, 0)..Point::new(5, 0),
2528 diff_status: DiffHunkStatusKind::Modified,
2529 old_text: "g\n".into()
2530 },
2531 HunkStatus {
2532 range: Point::new(6, 0)..Point::new(7, 0),
2533 diff_status: DiffHunkStatusKind::Added,
2534 old_text: "".into()
2535 },
2536 HunkStatus {
2537 range: Point::new(8, 0)..Point::new(8, 1),
2538 diff_status: DiffHunkStatusKind::Modified,
2539 old_text: "j".into()
2540 }
2541 ]
2542 )]
2543 );
2544
2545 // Make another commit that accepts the NEW line but with different content
2546 fs.set_head_for_repo(
2547 path!("/project/.git").as_ref(),
2548 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2549 "0000002",
2550 );
2551 cx.run_until_parked();
2552 assert_eq!(
2553 unreviewed_hunks(&action_log, cx),
2554 vec![(
2555 buffer,
2556 vec![
2557 HunkStatus {
2558 range: Point::new(6, 0)..Point::new(7, 0),
2559 diff_status: DiffHunkStatusKind::Added,
2560 old_text: "".into()
2561 },
2562 HunkStatus {
2563 range: Point::new(8, 0)..Point::new(8, 1),
2564 diff_status: DiffHunkStatusKind::Modified,
2565 old_text: "j".into()
2566 }
2567 ]
2568 )]
2569 );
2570
2571 // Final commit that accepts all remaining edits
2572 fs.set_head_for_repo(
2573 path!("/project/.git").as_ref(),
2574 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2575 "0000003",
2576 );
2577 cx.run_until_parked();
2578 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2579 }
2580
2581 #[gpui::test]
2582 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2583 init_test(cx);
2584
2585 let fs = FakeFs::new(cx.executor());
2586 fs.insert_tree(
2587 path!("/dir"),
2588 json!({
2589 "file1": "abc\ndef\nghi"
2590 }),
2591 )
2592 .await;
2593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2594 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2595 let file_path = project
2596 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2597 .unwrap();
2598
2599 let buffer = project
2600 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2601 .await
2602 .unwrap();
2603
2604 // Track the buffer and make an agent edit
2605 cx.update(|cx| {
2606 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2607 buffer.update(cx, |buffer, cx| {
2608 buffer
2609 .edit(
2610 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2611 None,
2612 cx,
2613 )
2614 .unwrap()
2615 });
2616 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2617 });
2618 cx.run_until_parked();
2619
2620 // Verify the agent edit is there
2621 assert_eq!(
2622 buffer.read_with(cx, |buffer, _| buffer.text()),
2623 "abc\nAGENT_EDIT\nghi"
2624 );
2625 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2626
2627 // Reject all edits
2628 action_log
2629 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2630 .await;
2631 cx.run_until_parked();
2632
2633 // Verify the buffer is back to original
2634 assert_eq!(
2635 buffer.read_with(cx, |buffer, _| buffer.text()),
2636 "abc\ndef\nghi"
2637 );
2638 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2639
2640 // Verify undo state is available
2641 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2642
2643 // Undo the reject
2644 action_log
2645 .update(cx, |log, cx| log.undo_last_reject(cx))
2646 .await;
2647
2648 cx.run_until_parked();
2649
2650 // Verify the agent edit is restored
2651 assert_eq!(
2652 buffer.read_with(cx, |buffer, _| buffer.text()),
2653 "abc\nAGENT_EDIT\nghi"
2654 );
2655
2656 // Verify undo state is cleared
2657 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2658 }
2659
2660 #[gpui::test]
2661 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2662 init_test(cx);
2663
2664 let fs = FakeFs::new(cx.executor());
2665 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2666 .await;
2667 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2668 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2669 let child_log =
2670 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2671
2672 let file_path = project
2673 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2674 .unwrap();
2675 let buffer = project
2676 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2677 .await
2678 .unwrap();
2679
2680 cx.update(|cx| {
2681 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2682 });
2683
2684 // Neither log considers the buffer stale immediately after reading it.
2685 let child_stale = cx.read(|cx| {
2686 child_log
2687 .read(cx)
2688 .stale_buffers(cx)
2689 .cloned()
2690 .collect::<Vec<_>>()
2691 });
2692 let parent_stale = cx.read(|cx| {
2693 parent_log
2694 .read(cx)
2695 .stale_buffers(cx)
2696 .cloned()
2697 .collect::<Vec<_>>()
2698 });
2699 assert!(child_stale.is_empty());
2700 assert!(parent_stale.is_empty());
2701
2702 // Simulate a user edit after the agent read the file.
2703 cx.update(|cx| {
2704 buffer.update(cx, |buffer, cx| {
2705 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2706 });
2707 });
2708 cx.run_until_parked();
2709
2710 // Both child and parent should see the buffer as stale because both tracked
2711 // it at the pre-edit version via buffer_read forwarding.
2712 let child_stale = cx.read(|cx| {
2713 child_log
2714 .read(cx)
2715 .stale_buffers(cx)
2716 .cloned()
2717 .collect::<Vec<_>>()
2718 });
2719 let parent_stale = cx.read(|cx| {
2720 parent_log
2721 .read(cx)
2722 .stale_buffers(cx)
2723 .cloned()
2724 .collect::<Vec<_>>()
2725 });
2726 assert_eq!(child_stale, vec![buffer.clone()]);
2727 assert_eq!(parent_stale, vec![buffer]);
2728 }
2729
2730 #[gpui::test]
2731 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2732 init_test(cx);
2733
2734 let fs = FakeFs::new(cx.executor());
2735 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2736 .await;
2737 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2738 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2739 let child_log =
2740 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2741
2742 let file_path = project
2743 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2744 .unwrap();
2745 let buffer = project
2746 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2747 .await
2748 .unwrap();
2749
2750 cx.update(|cx| {
2751 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2752 buffer.update(cx, |buffer, cx| {
2753 buffer
2754 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2755 .unwrap();
2756 });
2757 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2758 });
2759 cx.run_until_parked();
2760
2761 let expected_hunks = vec![(
2762 buffer,
2763 vec![HunkStatus {
2764 range: Point::new(1, 0)..Point::new(2, 0),
2765 diff_status: DiffHunkStatusKind::Modified,
2766 old_text: "def\n".into(),
2767 }],
2768 )];
2769 assert_eq!(
2770 unreviewed_hunks(&child_log, cx),
2771 expected_hunks,
2772 "child should track the agent edit"
2773 );
2774 assert_eq!(
2775 unreviewed_hunks(&parent_log, cx),
2776 expected_hunks,
2777 "parent should also track the agent edit via linked log forwarding"
2778 );
2779 }
2780
2781 #[gpui::test]
2782 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
2783 init_test(cx);
2784
2785 let fs = FakeFs::new(cx.executor());
2786 fs.insert_tree(path!("/dir"), json!({})).await;
2787 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2788 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2789 let child_log =
2790 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2791
2792 let file_path = project
2793 .read_with(cx, |project, cx| {
2794 project.find_project_path("dir/new_file", cx)
2795 })
2796 .unwrap();
2797 let buffer = project
2798 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2799 .await
2800 .unwrap();
2801
2802 cx.update(|cx| {
2803 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2804 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
2805 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2806 });
2807 project
2808 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2809 .await
2810 .unwrap();
2811 cx.run_until_parked();
2812
2813 let expected_hunks = vec![(
2814 buffer.clone(),
2815 vec![HunkStatus {
2816 range: Point::new(0, 0)..Point::new(0, 5),
2817 diff_status: DiffHunkStatusKind::Added,
2818 old_text: "".into(),
2819 }],
2820 )];
2821 assert_eq!(
2822 unreviewed_hunks(&child_log, cx),
2823 expected_hunks,
2824 "child should track the created file"
2825 );
2826 assert_eq!(
2827 unreviewed_hunks(&parent_log, cx),
2828 expected_hunks,
2829 "parent should also track the created file via linked log forwarding"
2830 );
2831 }
2832
2833 #[gpui::test]
2834 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
2835 init_test(cx);
2836
2837 let fs = FakeFs::new(cx.executor());
2838 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
2839 .await;
2840 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2841 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2842 let child_log =
2843 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2844
2845 let file_path = project
2846 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2847 .unwrap();
2848 let buffer = project
2849 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2850 .await
2851 .unwrap();
2852
2853 cx.update(|cx| {
2854 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2855 });
2856 project
2857 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
2858 .unwrap()
2859 .await
2860 .unwrap();
2861 cx.run_until_parked();
2862
2863 let expected_hunks = vec![(
2864 buffer.clone(),
2865 vec![HunkStatus {
2866 range: Point::new(0, 0)..Point::new(0, 0),
2867 diff_status: DiffHunkStatusKind::Deleted,
2868 old_text: "hello\n".into(),
2869 }],
2870 )];
2871 assert_eq!(
2872 unreviewed_hunks(&child_log, cx),
2873 expected_hunks,
2874 "child should track the deleted file"
2875 );
2876 assert_eq!(
2877 unreviewed_hunks(&parent_log, cx),
2878 expected_hunks,
2879 "parent should also track the deleted file via linked log forwarding"
2880 );
2881 }
2882
2883 /// Simulates the subagent scenario: two child logs linked to the same parent, each
2884 /// editing a different file. The parent accumulates all edits while each child
2885 /// only sees its own.
2886 #[gpui::test]
2887 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
2888 init_test(cx);
2889
2890 let fs = FakeFs::new(cx.executor());
2891 fs.insert_tree(
2892 path!("/dir"),
2893 json!({
2894 "file_a": "content of a",
2895 "file_b": "content of b",
2896 }),
2897 )
2898 .await;
2899 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2900 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2901 let child_log_1 =
2902 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2903 let child_log_2 =
2904 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2905
2906 let file_a_path = project
2907 .read_with(cx, |project, cx| {
2908 project.find_project_path("dir/file_a", cx)
2909 })
2910 .unwrap();
2911 let file_b_path = project
2912 .read_with(cx, |project, cx| {
2913 project.find_project_path("dir/file_b", cx)
2914 })
2915 .unwrap();
2916 let buffer_a = project
2917 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
2918 .await
2919 .unwrap();
2920 let buffer_b = project
2921 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
2922 .await
2923 .unwrap();
2924
2925 cx.update(|cx| {
2926 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
2927 buffer_a.update(cx, |buffer, cx| {
2928 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
2929 });
2930 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
2931
2932 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
2933 buffer_b.update(cx, |buffer, cx| {
2934 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
2935 });
2936 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
2937 });
2938 cx.run_until_parked();
2939
2940 let child_1_changed: Vec<_> = cx.read(|cx| {
2941 child_log_1
2942 .read(cx)
2943 .changed_buffers(cx)
2944 .into_keys()
2945 .collect()
2946 });
2947 let child_2_changed: Vec<_> = cx.read(|cx| {
2948 child_log_2
2949 .read(cx)
2950 .changed_buffers(cx)
2951 .into_keys()
2952 .collect()
2953 });
2954 let parent_changed: Vec<_> = cx.read(|cx| {
2955 parent_log
2956 .read(cx)
2957 .changed_buffers(cx)
2958 .into_keys()
2959 .collect()
2960 });
2961
2962 assert_eq!(
2963 child_1_changed,
2964 vec![buffer_a.clone()],
2965 "child 1 should only track file_a"
2966 );
2967 assert_eq!(
2968 child_2_changed,
2969 vec![buffer_b.clone()],
2970 "child 2 should only track file_b"
2971 );
2972 assert_eq!(parent_changed.len(), 2, "parent should track both files");
2973 assert!(
2974 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
2975 "parent should contain both buffer_a and buffer_b"
2976 );
2977 }
2978
2979 #[derive(Debug, PartialEq)]
2980 struct HunkStatus {
2981 range: Range<Point>,
2982 diff_status: DiffHunkStatusKind,
2983 old_text: String,
2984 }
2985
2986 fn unreviewed_hunks(
2987 action_log: &Entity<ActionLog>,
2988 cx: &TestAppContext,
2989 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2990 cx.read(|cx| {
2991 action_log
2992 .read(cx)
2993 .changed_buffers(cx)
2994 .into_iter()
2995 .map(|(buffer, diff)| {
2996 let snapshot = buffer.read(cx).snapshot();
2997 (
2998 buffer,
2999 diff.read(cx)
3000 .snapshot(cx)
3001 .hunks(&snapshot)
3002 .map(|hunk| HunkStatus {
3003 diff_status: hunk.status().kind,
3004 range: hunk.range,
3005 old_text: diff
3006 .read(cx)
3007 .base_text(cx)
3008 .text_for_range(hunk.diff_base_byte_range)
3009 .collect(),
3010 })
3011 .collect(),
3012 )
3013 })
3014 .collect()
3015 })
3016 }
3017}