1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
278 let git_diff = this
279 .update(cx, |this, cx| {
280 this.project.update(cx, |project, cx| {
281 project.open_uncommitted_diff(buffer.clone(), cx)
282 })
283 })?
284 .await
285 .ok();
286 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
287 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
288 });
289
290 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
291 let _repo_subscription =
292 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
293 cx.update(|cx| {
294 let mut old_head = buffer_repo.read(cx).head_commit.clone();
295 Some(cx.subscribe(git_diff, move |_, event, cx| {
296 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
297 let new_head = buffer_repo.read(cx).head_commit.clone();
298 if new_head != old_head {
299 old_head = new_head;
300 git_diff_updates_tx.send(()).ok();
301 }
302 }
303 }))
304 })
305 } else {
306 None
307 };
308
309 loop {
310 futures::select_biased! {
311 buffer_update = buffer_updates.next() => {
312 if let Some((author, buffer_snapshot)) = buffer_update {
313 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
314 } else {
315 break;
316 }
317 }
318 _ = git_diff_updates_rx.changed().fuse() => {
319 if let Some(git_diff) = git_diff.as_ref() {
320 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
321 }
322 }
323 }
324 }
325
326 Ok(())
327 }
328
329 async fn track_edits(
330 this: &WeakEntity<ActionLog>,
331 buffer: &Entity<Buffer>,
332 author: ChangeAuthor,
333 buffer_snapshot: text::BufferSnapshot,
334 cx: &mut AsyncApp,
335 ) -> Result<()> {
336 let rebase = this.update(cx, |this, cx| {
337 let tracked_buffer = this
338 .tracked_buffers
339 .get_mut(buffer)
340 .context("buffer not tracked")?;
341
342 let rebase = cx.background_spawn({
343 let mut base_text = tracked_buffer.diff_base.clone();
344 let old_snapshot = tracked_buffer.snapshot.clone();
345 let new_snapshot = buffer_snapshot.clone();
346 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
347 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
348 async move {
349 if let ChangeAuthor::User = author {
350 apply_non_conflicting_edits(
351 &unreviewed_edits,
352 edits,
353 &mut base_text,
354 new_snapshot.as_rope(),
355 );
356 }
357
358 (Arc::from(base_text.to_string().as_str()), base_text)
359 }
360 });
361
362 anyhow::Ok(rebase)
363 })??;
364 let (new_base_text, new_diff_base) = rebase.await;
365
366 Self::update_diff(
367 this,
368 buffer,
369 buffer_snapshot,
370 new_base_text,
371 new_diff_base,
372 cx,
373 )
374 .await
375 }
376
377 async fn keep_committed_edits(
378 this: &WeakEntity<ActionLog>,
379 buffer: &Entity<Buffer>,
380 git_diff: &Entity<BufferDiff>,
381 cx: &mut AsyncApp,
382 ) -> Result<()> {
383 let buffer_snapshot = this.read_with(cx, |this, _cx| {
384 let tracked_buffer = this
385 .tracked_buffers
386 .get(buffer)
387 .context("buffer not tracked")?;
388 anyhow::Ok(tracked_buffer.snapshot.clone())
389 })??;
390 let (new_base_text, new_diff_base) = this
391 .read_with(cx, |this, cx| {
392 let tracked_buffer = this
393 .tracked_buffers
394 .get(buffer)
395 .context("buffer not tracked")?;
396 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
397 let agent_diff_base = tracked_buffer.diff_base.clone();
398 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
399 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
400 anyhow::Ok(cx.background_spawn(async move {
401 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
402 let committed_edits = language::line_diff(
403 &agent_diff_base.to_string(),
404 &git_diff_base.to_string(),
405 )
406 .into_iter()
407 .map(|(old, new)| Edit { old, new });
408
409 let mut new_agent_diff_base = agent_diff_base.clone();
410 let mut row_delta = 0i32;
411 for committed in committed_edits {
412 while let Some(unreviewed) = old_unreviewed_edits.peek() {
413 // If the committed edit matches the unreviewed
414 // edit, assume the user wants to keep it.
415 if committed.old == unreviewed.old {
416 let unreviewed_new =
417 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
418 let committed_new =
419 git_diff_base.slice_rows(committed.new.clone()).to_string();
420 if unreviewed_new == committed_new {
421 let old_byte_start =
422 new_agent_diff_base.point_to_offset(Point::new(
423 (unreviewed.old.start as i32 + row_delta) as u32,
424 0,
425 ));
426 let old_byte_end =
427 new_agent_diff_base.point_to_offset(cmp::min(
428 Point::new(
429 (unreviewed.old.end as i32 + row_delta) as u32,
430 0,
431 ),
432 new_agent_diff_base.max_point(),
433 ));
434 new_agent_diff_base
435 .replace(old_byte_start..old_byte_end, &unreviewed_new);
436 row_delta +=
437 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
438 }
439 } else if unreviewed.old.start >= committed.old.end {
440 break;
441 }
442
443 old_unreviewed_edits.next().unwrap();
444 }
445 }
446
447 (
448 Arc::from(new_agent_diff_base.to_string().as_str()),
449 new_agent_diff_base,
450 )
451 }))
452 })??
453 .await;
454
455 Self::update_diff(
456 this,
457 buffer,
458 buffer_snapshot,
459 new_base_text,
460 new_diff_base,
461 cx,
462 )
463 .await
464 }
465
466 async fn update_diff(
467 this: &WeakEntity<ActionLog>,
468 buffer: &Entity<Buffer>,
469 buffer_snapshot: text::BufferSnapshot,
470 new_base_text: Arc<str>,
471 new_diff_base: Rope,
472 cx: &mut AsyncApp,
473 ) -> Result<()> {
474 let (diff, language) = this.read_with(cx, |this, cx| {
475 let tracked_buffer = this
476 .tracked_buffers
477 .get(buffer)
478 .context("buffer not tracked")?;
479 anyhow::Ok((
480 tracked_buffer.diff.clone(),
481 buffer.read(cx).language().cloned(),
482 ))
483 })??;
484 let update = diff
485 .update(cx, |diff, cx| {
486 diff.update_diff(
487 buffer_snapshot.clone(),
488 Some(new_base_text),
489 Some(true),
490 language,
491 cx,
492 )
493 })
494 .await;
495 diff.update(cx, |diff, cx| {
496 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
497 })
498 .await;
499 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
500
501 let unreviewed_edits = cx
502 .background_spawn({
503 let buffer_snapshot = buffer_snapshot.clone();
504 let new_diff_base = new_diff_base.clone();
505 async move {
506 let mut unreviewed_edits = Patch::default();
507 for hunk in diff_snapshot.hunks_intersecting_range(
508 Anchor::min_for_buffer(buffer_snapshot.remote_id())
509 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
510 &buffer_snapshot,
511 ) {
512 let old_range = new_diff_base
513 .offset_to_point(hunk.diff_base_byte_range.start)
514 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
515 let new_range = hunk.range.start..hunk.range.end;
516 unreviewed_edits.push(point_to_row_edit(
517 Edit {
518 old: old_range,
519 new: new_range,
520 },
521 &new_diff_base,
522 buffer_snapshot.as_rope(),
523 ));
524 }
525 unreviewed_edits
526 }
527 })
528 .await;
529 this.update(cx, |this, cx| {
530 let tracked_buffer = this
531 .tracked_buffers
532 .get_mut(buffer)
533 .context("buffer not tracked")?;
534 tracked_buffer.diff_base = new_diff_base;
535 tracked_buffer.snapshot = buffer_snapshot;
536 tracked_buffer.unreviewed_edits = unreviewed_edits;
537 cx.notify();
538 anyhow::Ok(())
539 })?
540 }
541
542 /// Track a buffer as read by agent, so we can notify the model about user edits.
543 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.buffer_read_impl(buffer, true, cx);
545 }
546
547 fn buffer_read_impl(
548 &mut self,
549 buffer: Entity<Buffer>,
550 record_file_read_time: bool,
551 cx: &mut Context<Self>,
552 ) {
553 if let Some(linked_action_log) = &self.linked_action_log {
554 // We don't want to share read times since the other agent hasn't read it necessarily
555 linked_action_log.update(cx, |log, cx| {
556 log.buffer_read_impl(buffer.clone(), false, cx);
557 });
558 }
559 if record_file_read_time {
560 self.update_file_read_time(&buffer, cx);
561 }
562 self.track_buffer_internal(buffer, false, cx);
563 }
564
565 /// Mark a buffer as created by agent, so we can refresh it in the context
566 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
567 self.buffer_created_impl(buffer, true, cx);
568 }
569
570 fn buffer_created_impl(
571 &mut self,
572 buffer: Entity<Buffer>,
573 record_file_read_time: bool,
574 cx: &mut Context<Self>,
575 ) {
576 if let Some(linked_action_log) = &self.linked_action_log {
577 // We don't want to share read times since the other agent hasn't read it necessarily
578 linked_action_log.update(cx, |log, cx| {
579 log.buffer_created_impl(buffer.clone(), false, cx);
580 });
581 }
582 if record_file_read_time {
583 self.update_file_read_time(&buffer, cx);
584 }
585 self.track_buffer_internal(buffer, true, cx);
586 }
587
588 /// Mark a buffer as edited by agent, so we can refresh it in the context
589 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
590 self.buffer_edited_impl(buffer, true, cx);
591 }
592
593 fn buffer_edited_impl(
594 &mut self,
595 buffer: Entity<Buffer>,
596 record_file_read_time: bool,
597 cx: &mut Context<Self>,
598 ) {
599 if let Some(linked_action_log) = &self.linked_action_log {
600 // We don't want to share read times since the other agent hasn't read it necessarily
601 linked_action_log.update(cx, |log, cx| {
602 log.buffer_edited_impl(buffer.clone(), false, cx);
603 });
604 }
605 if record_file_read_time {
606 self.update_file_read_time(&buffer, cx);
607 }
608 let new_version = buffer.read(cx).version();
609 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
610 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
611 tracked_buffer.status = TrackedBufferStatus::Modified;
612 }
613
614 tracked_buffer.version = new_version;
615 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
616 }
617
618 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
619 // Ok to propagate file read time removal to linked action log
620 self.remove_file_read_time(&buffer, cx);
621 let has_linked_action_log = self.linked_action_log.is_some();
622 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
623 match tracked_buffer.status {
624 TrackedBufferStatus::Created { .. } => {
625 self.tracked_buffers.remove(&buffer);
626 cx.notify();
627 }
628 TrackedBufferStatus::Modified => {
629 tracked_buffer.status = TrackedBufferStatus::Deleted;
630 if !has_linked_action_log {
631 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
632 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
633 }
634 }
635
636 TrackedBufferStatus::Deleted => {}
637 }
638
639 if let Some(linked_action_log) = &mut self.linked_action_log {
640 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
641 }
642
643 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
644 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
645 }
646
647 cx.notify();
648 }
649
650 pub fn keep_edits_in_range(
651 &mut self,
652 buffer: Entity<Buffer>,
653 buffer_range: Range<impl language::ToPoint>,
654 telemetry: Option<ActionLogTelemetry>,
655 cx: &mut Context<Self>,
656 ) {
657 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
658 return;
659 };
660
661 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
662 match tracked_buffer.status {
663 TrackedBufferStatus::Deleted => {
664 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
665 self.tracked_buffers.remove(&buffer);
666 cx.notify();
667 }
668 _ => {
669 let buffer = buffer.read(cx);
670 let buffer_range =
671 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
672 let mut delta = 0i32;
673 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
674 edit.old.start = (edit.old.start as i32 + delta) as u32;
675 edit.old.end = (edit.old.end as i32 + delta) as u32;
676
677 if buffer_range.end.row < edit.new.start
678 || buffer_range.start.row > edit.new.end
679 {
680 true
681 } else {
682 let old_range = tracked_buffer
683 .diff_base
684 .point_to_offset(Point::new(edit.old.start, 0))
685 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
686 Point::new(edit.old.end, 0),
687 tracked_buffer.diff_base.max_point(),
688 ));
689 let new_range = tracked_buffer
690 .snapshot
691 .point_to_offset(Point::new(edit.new.start, 0))
692 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
693 Point::new(edit.new.end, 0),
694 tracked_buffer.snapshot.max_point(),
695 ));
696 tracked_buffer.diff_base.replace(
697 old_range,
698 &tracked_buffer
699 .snapshot
700 .text_for_range(new_range)
701 .collect::<String>(),
702 );
703 delta += edit.new_len() as i32 - edit.old_len() as i32;
704 metrics.add_edit(edit);
705 false
706 }
707 });
708 if tracked_buffer.unreviewed_edits.is_empty()
709 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
710 {
711 tracked_buffer.status = TrackedBufferStatus::Modified;
712 }
713 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
714 }
715 }
716 if let Some(telemetry) = telemetry {
717 telemetry_report_accepted_edits(&telemetry, metrics);
718 }
719 }
720
721 pub fn reject_edits_in_ranges(
722 &mut self,
723 buffer: Entity<Buffer>,
724 buffer_ranges: Vec<Range<impl language::ToPoint>>,
725 telemetry: Option<ActionLogTelemetry>,
726 cx: &mut Context<Self>,
727 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
728 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
729 return (Task::ready(Ok(())), None);
730 };
731
732 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
733 let mut undo_info: Option<PerBufferUndo> = None;
734 let task = match &tracked_buffer.status {
735 TrackedBufferStatus::Created {
736 existing_file_content,
737 } => {
738 let task = if let Some(existing_file_content) = existing_file_content {
739 // Capture the agent's content before restoring existing file content
740 let agent_content = buffer.read(cx).text();
741
742 buffer.update(cx, |buffer, cx| {
743 buffer.start_transaction();
744 buffer.set_text("", cx);
745 for chunk in existing_file_content.chunks() {
746 buffer.append(chunk, cx);
747 }
748 buffer.end_transaction(cx);
749 });
750
751 undo_info = Some(PerBufferUndo {
752 buffer: buffer.downgrade(),
753 edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
754 status: UndoBufferStatus::Created {
755 had_existing_content: true,
756 },
757 });
758
759 self.project
760 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
761 } else {
762 // For a file created by AI with no pre-existing content,
763 // only delete the file if we're certain it contains only AI content
764 // with no edits from the user.
765
766 let initial_version = tracked_buffer.version.clone();
767 let current_version = buffer.read(cx).version();
768
769 let current_content = buffer.read(cx).text();
770 let tracked_content = tracked_buffer.snapshot.text();
771
772 let is_ai_only_content =
773 initial_version == current_version && current_content == tracked_content;
774
775 if is_ai_only_content {
776 buffer
777 .read(cx)
778 .entry_id(cx)
779 .and_then(|entry_id| {
780 self.project.update(cx, |project, cx| {
781 project.delete_entry(entry_id, false, cx)
782 })
783 })
784 .unwrap_or(Task::ready(Ok(())))
785 } else {
786 // Not sure how to disentangle edits made by the user
787 // from edits made by the AI at this point.
788 // For now, preserve both to avoid data loss.
789 //
790 // TODO: Better solution (disable "Reject" after user makes some
791 // edit or find a way to differentiate between AI and user edits)
792 Task::ready(Ok(()))
793 }
794 };
795
796 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
797 self.tracked_buffers.remove(&buffer);
798 cx.notify();
799 task
800 }
801 TrackedBufferStatus::Deleted => {
802 buffer.update(cx, |buffer, cx| {
803 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
804 });
805 let save = self
806 .project
807 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
808
809 // Clear all tracked edits for this buffer and start over as if we just read it.
810 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
811 self.tracked_buffers.remove(&buffer);
812 self.buffer_read(buffer.clone(), cx);
813 cx.notify();
814 save
815 }
816 TrackedBufferStatus::Modified => {
817 let edits_to_restore = buffer.update(cx, |buffer, cx| {
818 let mut buffer_row_ranges = buffer_ranges
819 .into_iter()
820 .map(|range| {
821 range.start.to_point(buffer).row..range.end.to_point(buffer).row
822 })
823 .peekable();
824
825 let mut edits_to_revert = Vec::new();
826 let mut edits_for_undo = Vec::new();
827 for edit in tracked_buffer.unreviewed_edits.edits() {
828 let new_range = tracked_buffer
829 .snapshot
830 .anchor_before(Point::new(edit.new.start, 0))
831 ..tracked_buffer.snapshot.anchor_after(cmp::min(
832 Point::new(edit.new.end, 0),
833 tracked_buffer.snapshot.max_point(),
834 ));
835 let new_row_range = new_range.start.to_point(buffer).row
836 ..new_range.end.to_point(buffer).row;
837
838 let mut revert = false;
839 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
840 if buffer_row_range.end < new_row_range.start {
841 buffer_row_ranges.next();
842 } else if buffer_row_range.start > new_row_range.end {
843 break;
844 } else {
845 revert = true;
846 break;
847 }
848 }
849
850 if revert {
851 metrics.add_edit(edit);
852 let old_range = tracked_buffer
853 .diff_base
854 .point_to_offset(Point::new(edit.old.start, 0))
855 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
856 Point::new(edit.old.end, 0),
857 tracked_buffer.diff_base.max_point(),
858 ));
859 let old_text = tracked_buffer
860 .diff_base
861 .chunks_in_range(old_range)
862 .collect::<String>();
863
864 // Capture the agent's text before we revert it (for undo)
865 let new_range_offset =
866 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
867 let agent_text =
868 buffer.text_for_range(new_range_offset).collect::<String>();
869 edits_for_undo.push((new_range.clone(), agent_text));
870
871 edits_to_revert.push((new_range, old_text));
872 }
873 }
874
875 buffer.edit(edits_to_revert, None, cx);
876 edits_for_undo
877 });
878
879 if !edits_to_restore.is_empty() {
880 undo_info = Some(PerBufferUndo {
881 buffer: buffer.downgrade(),
882 edits_to_restore,
883 status: UndoBufferStatus::Modified,
884 });
885 }
886
887 self.project
888 .update(cx, |project, cx| project.save_buffer(buffer, cx))
889 }
890 };
891 if let Some(telemetry) = telemetry {
892 telemetry_report_rejected_edits(&telemetry, metrics);
893 }
894 (task, undo_info)
895 }
896
897 pub fn keep_all_edits(
898 &mut self,
899 telemetry: Option<ActionLogTelemetry>,
900 cx: &mut Context<Self>,
901 ) {
902 self.tracked_buffers.retain(|buffer, tracked_buffer| {
903 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
904 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
905 if let Some(telemetry) = telemetry.as_ref() {
906 telemetry_report_accepted_edits(telemetry, metrics);
907 }
908 match tracked_buffer.status {
909 TrackedBufferStatus::Deleted => false,
910 _ => {
911 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
912 tracked_buffer.status = TrackedBufferStatus::Modified;
913 }
914 tracked_buffer.unreviewed_edits.clear();
915 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
916 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
917 true
918 }
919 }
920 });
921
922 cx.notify();
923 }
924
925 pub fn reject_all_edits(
926 &mut self,
927 telemetry: Option<ActionLogTelemetry>,
928 cx: &mut Context<Self>,
929 ) -> Task<()> {
930 // Clear any previous undo state before starting a new reject operation
931 self.last_reject_undo = None;
932
933 let mut undo_buffers = Vec::new();
934 let mut futures = Vec::new();
935
936 for buffer in self.changed_buffers(cx).into_keys() {
937 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
938 buffer.read(cx).remote_id(),
939 )];
940 let (reject_task, undo_info) =
941 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
942
943 if let Some(undo) = undo_info {
944 undo_buffers.push(undo);
945 }
946
947 futures.push(async move {
948 reject_task.await.log_err();
949 });
950 }
951
952 // Store the undo information if we have any
953 if !undo_buffers.is_empty() {
954 self.last_reject_undo = Some(LastRejectUndo {
955 buffers: undo_buffers,
956 });
957 }
958
959 let task = futures::future::join_all(futures);
960 cx.background_spawn(async move {
961 task.await;
962 })
963 }
964
965 pub fn has_pending_undo(&self) -> bool {
966 self.last_reject_undo.is_some()
967 }
968
969 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
970 self.last_reject_undo = Some(undo);
971 }
972
973 /// Undoes the most recent reject operation, restoring the rejected agent changes.
974 /// This is a best-effort operation: if buffers have been closed or modified externally,
975 /// those buffers will be skipped.
976 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
977 let Some(undo) = self.last_reject_undo.take() else {
978 return Task::ready(());
979 };
980
981 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
982
983 for per_buffer_undo in undo.buffers {
984 // Skip if the buffer entity has been deallocated
985 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
986 continue;
987 };
988
989 buffer.update(cx, |buffer, cx| {
990 let mut valid_edits = Vec::new();
991
992 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
993 if anchor_range.start.buffer_id == Some(buffer.remote_id())
994 && anchor_range.end.buffer_id == Some(buffer.remote_id())
995 {
996 valid_edits.push((anchor_range, text_to_restore));
997 }
998 }
999
1000 if !valid_edits.is_empty() {
1001 buffer.edit(valid_edits, None, cx);
1002 }
1003 });
1004
1005 if !self.tracked_buffers.contains_key(&buffer) {
1006 self.buffer_edited(buffer.clone(), cx);
1007 }
1008
1009 let save = self
1010 .project
1011 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1012 save_tasks.push(save);
1013 }
1014
1015 cx.notify();
1016
1017 cx.background_spawn(async move {
1018 futures::future::join_all(save_tasks).await;
1019 })
1020 }
1021
1022 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1023 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1024 self.tracked_buffers
1025 .iter()
1026 .filter(|(_, tracked)| tracked.has_edits(cx))
1027 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1028 .collect()
1029 }
1030
1031 /// Iterate over buffers changed since last read or edited by the model
1032 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1033 self.tracked_buffers
1034 .iter()
1035 .filter(|(buffer, tracked)| {
1036 let buffer = buffer.read(cx);
1037
1038 tracked.version != buffer.version
1039 && buffer
1040 .file()
1041 .is_some_and(|file| !file.disk_state().is_deleted())
1042 })
1043 .map(|(buffer, _)| buffer)
1044 }
1045}
1046
1047#[derive(Clone)]
1048pub struct ActionLogTelemetry {
1049 pub agent_telemetry_id: SharedString,
1050 pub session_id: Arc<str>,
1051}
1052
1053struct ActionLogMetrics {
1054 lines_removed: u32,
1055 lines_added: u32,
1056 language: Option<SharedString>,
1057}
1058
1059impl ActionLogMetrics {
1060 fn for_buffer(buffer: &Buffer) -> Self {
1061 Self {
1062 language: buffer.language().map(|l| l.name().0),
1063 lines_removed: 0,
1064 lines_added: 0,
1065 }
1066 }
1067
1068 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1069 for edit in edits {
1070 self.add_edit(edit);
1071 }
1072 }
1073
1074 fn add_edit(&mut self, edit: &Edit<u32>) {
1075 self.lines_added += edit.new_len();
1076 self.lines_removed += edit.old_len();
1077 }
1078}
1079
1080fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1081 telemetry::event!(
1082 "Agent Edits Accepted",
1083 agent = telemetry.agent_telemetry_id,
1084 session = telemetry.session_id,
1085 language = metrics.language,
1086 lines_added = metrics.lines_added,
1087 lines_removed = metrics.lines_removed
1088 );
1089}
1090
1091fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1092 telemetry::event!(
1093 "Agent Edits Rejected",
1094 agent = telemetry.agent_telemetry_id,
1095 session = telemetry.session_id,
1096 language = metrics.language,
1097 lines_added = metrics.lines_added,
1098 lines_removed = metrics.lines_removed
1099 );
1100}
1101
1102fn apply_non_conflicting_edits(
1103 patch: &Patch<u32>,
1104 edits: Vec<Edit<u32>>,
1105 old_text: &mut Rope,
1106 new_text: &Rope,
1107) -> bool {
1108 let mut old_edits = patch.edits().iter().cloned().peekable();
1109 let mut new_edits = edits.into_iter().peekable();
1110 let mut applied_delta = 0i32;
1111 let mut rebased_delta = 0i32;
1112 let mut has_made_changes = false;
1113
1114 while let Some(mut new_edit) = new_edits.next() {
1115 let mut conflict = false;
1116
1117 // Push all the old edits that are before this new edit or that intersect with it.
1118 while let Some(old_edit) = old_edits.peek() {
1119 if new_edit.old.end < old_edit.new.start
1120 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1121 {
1122 break;
1123 } else if new_edit.old.start > old_edit.new.end
1124 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1125 {
1126 let old_edit = old_edits.next().unwrap();
1127 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1128 } else {
1129 conflict = true;
1130 if new_edits
1131 .peek()
1132 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1133 {
1134 new_edit = new_edits.next().unwrap();
1135 } else {
1136 let old_edit = old_edits.next().unwrap();
1137 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1138 }
1139 }
1140 }
1141
1142 if !conflict {
1143 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1144 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1145 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1146 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1147 ..old_text.point_to_offset(cmp::min(
1148 Point::new(new_edit.old.end, 0),
1149 old_text.max_point(),
1150 ));
1151 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1152 ..new_text.point_to_offset(cmp::min(
1153 Point::new(new_edit.new.end, 0),
1154 new_text.max_point(),
1155 ));
1156
1157 old_text.replace(
1158 old_bytes,
1159 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1160 );
1161 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1162 has_made_changes = true;
1163 }
1164 }
1165 has_made_changes
1166}
1167
1168fn diff_snapshots(
1169 old_snapshot: &text::BufferSnapshot,
1170 new_snapshot: &text::BufferSnapshot,
1171) -> Vec<Edit<u32>> {
1172 let mut edits = new_snapshot
1173 .edits_since::<Point>(&old_snapshot.version)
1174 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1175 .peekable();
1176 let mut row_edits = Vec::new();
1177 while let Some(mut edit) = edits.next() {
1178 while let Some(next_edit) = edits.peek() {
1179 if edit.old.end >= next_edit.old.start {
1180 edit.old.end = next_edit.old.end;
1181 edit.new.end = next_edit.new.end;
1182 edits.next();
1183 } else {
1184 break;
1185 }
1186 }
1187 row_edits.push(edit);
1188 }
1189 row_edits
1190}
1191
1192fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1193 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1194 && new_text
1195 .chars_at(new_text.point_to_offset(edit.new.start))
1196 .next()
1197 == Some('\n')
1198 && edit.old.start != old_text.max_point()
1199 {
1200 Edit {
1201 old: edit.old.start.row + 1..edit.old.end.row + 1,
1202 new: edit.new.start.row + 1..edit.new.end.row + 1,
1203 }
1204 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1205 Edit {
1206 old: edit.old.start.row..edit.old.end.row,
1207 new: edit.new.start.row..edit.new.end.row,
1208 }
1209 } else {
1210 Edit {
1211 old: edit.old.start.row..edit.old.end.row + 1,
1212 new: edit.new.start.row..edit.new.end.row + 1,
1213 }
1214 }
1215}
1216
1217#[derive(Copy, Clone, Debug)]
1218enum ChangeAuthor {
1219 User,
1220 Agent,
1221}
1222
1223#[derive(Debug)]
1224enum TrackedBufferStatus {
1225 Created { existing_file_content: Option<Rope> },
1226 Modified,
1227 Deleted,
1228}
1229
1230pub struct TrackedBuffer {
1231 buffer: Entity<Buffer>,
1232 diff_base: Rope,
1233 unreviewed_edits: Patch<u32>,
1234 status: TrackedBufferStatus,
1235 version: clock::Global,
1236 diff: Entity<BufferDiff>,
1237 snapshot: text::BufferSnapshot,
1238 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1239 _open_lsp_handle: OpenLspBufferHandle,
1240 _maintain_diff: Task<()>,
1241 _subscription: Subscription,
1242}
1243
1244impl TrackedBuffer {
1245 #[cfg(any(test, feature = "test-support"))]
1246 pub fn diff(&self) -> &Entity<BufferDiff> {
1247 &self.diff
1248 }
1249
1250 #[cfg(any(test, feature = "test-support"))]
1251 pub fn diff_base_len(&self) -> usize {
1252 self.diff_base.len()
1253 }
1254
1255 fn has_edits(&self, cx: &App) -> bool {
1256 self.diff
1257 .read(cx)
1258 .snapshot(cx)
1259 .hunks(self.buffer.read(cx))
1260 .next()
1261 .is_some()
1262 }
1263
1264 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1265 self.diff_update
1266 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1267 .ok();
1268 }
1269}
1270
1271pub struct ChangedBuffer {
1272 pub diff: Entity<BufferDiff>,
1273}
1274
1275#[cfg(test)]
1276mod tests {
1277 use super::*;
1278 use buffer_diff::DiffHunkStatusKind;
1279 use gpui::TestAppContext;
1280 use language::Point;
1281 use project::{FakeFs, Fs, Project, RemoveOptions};
1282 use rand::prelude::*;
1283 use serde_json::json;
1284 use settings::SettingsStore;
1285 use std::env;
1286 use util::{RandomCharIter, path};
1287
1288 #[ctor::ctor]
1289 fn init_logger() {
1290 zlog::init_test();
1291 }
1292
1293 fn init_test(cx: &mut TestAppContext) {
1294 cx.update(|cx| {
1295 let settings_store = SettingsStore::test(cx);
1296 cx.set_global(settings_store);
1297 });
1298 }
1299
1300 #[gpui::test(iterations = 10)]
1301 async fn test_keep_edits(cx: &mut TestAppContext) {
1302 init_test(cx);
1303
1304 let fs = FakeFs::new(cx.executor());
1305 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1306 .await;
1307 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1308 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1309 let file_path = project
1310 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1311 .unwrap();
1312 let buffer = project
1313 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1314 .await
1315 .unwrap();
1316
1317 cx.update(|cx| {
1318 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1319 buffer.update(cx, |buffer, cx| {
1320 buffer
1321 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1322 .unwrap()
1323 });
1324 buffer.update(cx, |buffer, cx| {
1325 buffer
1326 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1327 .unwrap()
1328 });
1329 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1330 });
1331 cx.run_until_parked();
1332 assert_eq!(
1333 buffer.read_with(cx, |buffer, _| buffer.text()),
1334 "abc\ndEf\nghi\njkl\nmnO"
1335 );
1336 assert_eq!(
1337 unreviewed_hunks(&action_log, cx),
1338 vec![(
1339 buffer.clone(),
1340 vec![
1341 HunkStatus {
1342 range: Point::new(1, 0)..Point::new(2, 0),
1343 diff_status: DiffHunkStatusKind::Modified,
1344 old_text: "def\n".into(),
1345 },
1346 HunkStatus {
1347 range: Point::new(4, 0)..Point::new(4, 3),
1348 diff_status: DiffHunkStatusKind::Modified,
1349 old_text: "mno".into(),
1350 }
1351 ],
1352 )]
1353 );
1354
1355 action_log.update(cx, |log, cx| {
1356 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1357 });
1358 cx.run_until_parked();
1359 assert_eq!(
1360 unreviewed_hunks(&action_log, cx),
1361 vec![(
1362 buffer.clone(),
1363 vec![HunkStatus {
1364 range: Point::new(1, 0)..Point::new(2, 0),
1365 diff_status: DiffHunkStatusKind::Modified,
1366 old_text: "def\n".into(),
1367 }],
1368 )]
1369 );
1370
1371 action_log.update(cx, |log, cx| {
1372 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1373 });
1374 cx.run_until_parked();
1375 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1376 }
1377
1378 #[gpui::test(iterations = 10)]
1379 async fn test_deletions(cx: &mut TestAppContext) {
1380 init_test(cx);
1381
1382 let fs = FakeFs::new(cx.executor());
1383 fs.insert_tree(
1384 path!("/dir"),
1385 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1386 )
1387 .await;
1388 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1389 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1390 let file_path = project
1391 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1392 .unwrap();
1393 let buffer = project
1394 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1395 .await
1396 .unwrap();
1397
1398 cx.update(|cx| {
1399 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1400 buffer.update(cx, |buffer, cx| {
1401 buffer
1402 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1403 .unwrap();
1404 buffer.finalize_last_transaction();
1405 });
1406 buffer.update(cx, |buffer, cx| {
1407 buffer
1408 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1409 .unwrap();
1410 buffer.finalize_last_transaction();
1411 });
1412 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1413 });
1414 cx.run_until_parked();
1415 assert_eq!(
1416 buffer.read_with(cx, |buffer, _| buffer.text()),
1417 "abc\nghi\njkl\npqr"
1418 );
1419 assert_eq!(
1420 unreviewed_hunks(&action_log, cx),
1421 vec![(
1422 buffer.clone(),
1423 vec![
1424 HunkStatus {
1425 range: Point::new(1, 0)..Point::new(1, 0),
1426 diff_status: DiffHunkStatusKind::Deleted,
1427 old_text: "def\n".into(),
1428 },
1429 HunkStatus {
1430 range: Point::new(3, 0)..Point::new(3, 0),
1431 diff_status: DiffHunkStatusKind::Deleted,
1432 old_text: "mno\n".into(),
1433 }
1434 ],
1435 )]
1436 );
1437
1438 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1439 cx.run_until_parked();
1440 assert_eq!(
1441 buffer.read_with(cx, |buffer, _| buffer.text()),
1442 "abc\nghi\njkl\nmno\npqr"
1443 );
1444 assert_eq!(
1445 unreviewed_hunks(&action_log, cx),
1446 vec![(
1447 buffer.clone(),
1448 vec![HunkStatus {
1449 range: Point::new(1, 0)..Point::new(1, 0),
1450 diff_status: DiffHunkStatusKind::Deleted,
1451 old_text: "def\n".into(),
1452 }],
1453 )]
1454 );
1455
1456 action_log.update(cx, |log, cx| {
1457 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1458 });
1459 cx.run_until_parked();
1460 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1461 }
1462
1463 #[gpui::test(iterations = 10)]
1464 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1465 init_test(cx);
1466
1467 let fs = FakeFs::new(cx.executor());
1468 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1469 .await;
1470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1471 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1472 let file_path = project
1473 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1474 .unwrap();
1475 let buffer = project
1476 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1477 .await
1478 .unwrap();
1479
1480 cx.update(|cx| {
1481 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1482 buffer.update(cx, |buffer, cx| {
1483 buffer
1484 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1485 .unwrap()
1486 });
1487 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1488 });
1489 cx.run_until_parked();
1490 assert_eq!(
1491 buffer.read_with(cx, |buffer, _| buffer.text()),
1492 "abc\ndeF\nGHI\njkl\nmno"
1493 );
1494 assert_eq!(
1495 unreviewed_hunks(&action_log, cx),
1496 vec![(
1497 buffer.clone(),
1498 vec![HunkStatus {
1499 range: Point::new(1, 0)..Point::new(3, 0),
1500 diff_status: DiffHunkStatusKind::Modified,
1501 old_text: "def\nghi\n".into(),
1502 }],
1503 )]
1504 );
1505
1506 buffer.update(cx, |buffer, cx| {
1507 buffer.edit(
1508 [
1509 (Point::new(0, 2)..Point::new(0, 2), "X"),
1510 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1511 ],
1512 None,
1513 cx,
1514 )
1515 });
1516 cx.run_until_parked();
1517 assert_eq!(
1518 buffer.read_with(cx, |buffer, _| buffer.text()),
1519 "abXc\ndeF\nGHI\nYjkl\nmno"
1520 );
1521 assert_eq!(
1522 unreviewed_hunks(&action_log, cx),
1523 vec![(
1524 buffer.clone(),
1525 vec![HunkStatus {
1526 range: Point::new(1, 0)..Point::new(3, 0),
1527 diff_status: DiffHunkStatusKind::Modified,
1528 old_text: "def\nghi\n".into(),
1529 }],
1530 )]
1531 );
1532
1533 buffer.update(cx, |buffer, cx| {
1534 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1535 });
1536 cx.run_until_parked();
1537 assert_eq!(
1538 buffer.read_with(cx, |buffer, _| buffer.text()),
1539 "abXc\ndZeF\nGHI\nYjkl\nmno"
1540 );
1541 assert_eq!(
1542 unreviewed_hunks(&action_log, cx),
1543 vec![(
1544 buffer.clone(),
1545 vec![HunkStatus {
1546 range: Point::new(1, 0)..Point::new(3, 0),
1547 diff_status: DiffHunkStatusKind::Modified,
1548 old_text: "def\nghi\n".into(),
1549 }],
1550 )]
1551 );
1552
1553 action_log.update(cx, |log, cx| {
1554 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1555 });
1556 cx.run_until_parked();
1557 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1558 }
1559
1560 #[gpui::test(iterations = 10)]
1561 async fn test_creating_files(cx: &mut TestAppContext) {
1562 init_test(cx);
1563
1564 let fs = FakeFs::new(cx.executor());
1565 fs.insert_tree(path!("/dir"), json!({})).await;
1566 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1567 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1568 let file_path = project
1569 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1570 .unwrap();
1571
1572 let buffer = project
1573 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1574 .await
1575 .unwrap();
1576 cx.update(|cx| {
1577 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1578 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1579 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1580 });
1581 project
1582 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1583 .await
1584 .unwrap();
1585 cx.run_until_parked();
1586 assert_eq!(
1587 unreviewed_hunks(&action_log, cx),
1588 vec![(
1589 buffer.clone(),
1590 vec![HunkStatus {
1591 range: Point::new(0, 0)..Point::new(0, 5),
1592 diff_status: DiffHunkStatusKind::Added,
1593 old_text: "".into(),
1594 }],
1595 )]
1596 );
1597
1598 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1599 cx.run_until_parked();
1600 assert_eq!(
1601 unreviewed_hunks(&action_log, cx),
1602 vec![(
1603 buffer.clone(),
1604 vec![HunkStatus {
1605 range: Point::new(0, 0)..Point::new(0, 6),
1606 diff_status: DiffHunkStatusKind::Added,
1607 old_text: "".into(),
1608 }],
1609 )]
1610 );
1611
1612 action_log.update(cx, |log, cx| {
1613 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1614 });
1615 cx.run_until_parked();
1616 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1617 }
1618
1619 #[gpui::test(iterations = 10)]
1620 async fn test_overwriting_files(cx: &mut TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(
1625 path!("/dir"),
1626 json!({
1627 "file1": "Lorem ipsum dolor"
1628 }),
1629 )
1630 .await;
1631 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1632 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1633 let file_path = project
1634 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1635 .unwrap();
1636
1637 let buffer = project
1638 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1639 .await
1640 .unwrap();
1641 cx.update(|cx| {
1642 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1643 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1644 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1645 });
1646 project
1647 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1648 .await
1649 .unwrap();
1650 cx.run_until_parked();
1651 assert_eq!(
1652 unreviewed_hunks(&action_log, cx),
1653 vec![(
1654 buffer.clone(),
1655 vec![HunkStatus {
1656 range: Point::new(0, 0)..Point::new(0, 19),
1657 diff_status: DiffHunkStatusKind::Added,
1658 old_text: "".into(),
1659 }],
1660 )]
1661 );
1662
1663 action_log
1664 .update(cx, |log, cx| {
1665 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1666 task
1667 })
1668 .await
1669 .unwrap();
1670 cx.run_until_parked();
1671 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1672 assert_eq!(
1673 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1674 "Lorem ipsum dolor"
1675 );
1676 }
1677
1678 #[gpui::test(iterations = 10)]
1679 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1680 init_test(cx);
1681
1682 let fs = FakeFs::new(cx.executor());
1683 fs.insert_tree(
1684 path!("/dir"),
1685 json!({
1686 "file1": "Lorem ipsum dolor"
1687 }),
1688 )
1689 .await;
1690 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1691 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1692 let file_path = project
1693 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1694 .unwrap();
1695
1696 let buffer = project
1697 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1698 .await
1699 .unwrap();
1700 cx.update(|cx| {
1701 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1702 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1703 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1704 });
1705 project
1706 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1707 .await
1708 .unwrap();
1709 cx.run_until_parked();
1710 assert_eq!(
1711 unreviewed_hunks(&action_log, cx),
1712 vec![(
1713 buffer.clone(),
1714 vec![HunkStatus {
1715 range: Point::new(0, 0)..Point::new(0, 37),
1716 diff_status: DiffHunkStatusKind::Modified,
1717 old_text: "Lorem ipsum dolor".into(),
1718 }],
1719 )]
1720 );
1721
1722 cx.update(|cx| {
1723 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1724 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1725 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1726 });
1727 project
1728 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1729 .await
1730 .unwrap();
1731 cx.run_until_parked();
1732 assert_eq!(
1733 unreviewed_hunks(&action_log, cx),
1734 vec![(
1735 buffer.clone(),
1736 vec![HunkStatus {
1737 range: Point::new(0, 0)..Point::new(0, 9),
1738 diff_status: DiffHunkStatusKind::Added,
1739 old_text: "".into(),
1740 }],
1741 )]
1742 );
1743
1744 action_log
1745 .update(cx, |log, cx| {
1746 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1747 task
1748 })
1749 .await
1750 .unwrap();
1751 cx.run_until_parked();
1752 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1753 assert_eq!(
1754 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1755 "Lorem ipsum dolor"
1756 );
1757 }
1758
1759 #[gpui::test(iterations = 10)]
1760 async fn test_deleting_files(cx: &mut TestAppContext) {
1761 init_test(cx);
1762
1763 let fs = FakeFs::new(cx.executor());
1764 fs.insert_tree(
1765 path!("/dir"),
1766 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1767 )
1768 .await;
1769
1770 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1771 let file1_path = project
1772 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1773 .unwrap();
1774 let file2_path = project
1775 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1776 .unwrap();
1777
1778 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1779 let buffer1 = project
1780 .update(cx, |project, cx| {
1781 project.open_buffer(file1_path.clone(), cx)
1782 })
1783 .await
1784 .unwrap();
1785 let buffer2 = project
1786 .update(cx, |project, cx| {
1787 project.open_buffer(file2_path.clone(), cx)
1788 })
1789 .await
1790 .unwrap();
1791
1792 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1793 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1794 project
1795 .update(cx, |project, cx| {
1796 project.delete_file(file1_path.clone(), false, cx)
1797 })
1798 .unwrap()
1799 .await
1800 .unwrap();
1801 project
1802 .update(cx, |project, cx| {
1803 project.delete_file(file2_path.clone(), false, cx)
1804 })
1805 .unwrap()
1806 .await
1807 .unwrap();
1808 cx.run_until_parked();
1809 assert_eq!(
1810 unreviewed_hunks(&action_log, cx),
1811 vec![
1812 (
1813 buffer1.clone(),
1814 vec![HunkStatus {
1815 range: Point::new(0, 0)..Point::new(0, 0),
1816 diff_status: DiffHunkStatusKind::Deleted,
1817 old_text: "lorem\n".into(),
1818 }]
1819 ),
1820 (
1821 buffer2.clone(),
1822 vec![HunkStatus {
1823 range: Point::new(0, 0)..Point::new(0, 0),
1824 diff_status: DiffHunkStatusKind::Deleted,
1825 old_text: "ipsum\n".into(),
1826 }],
1827 )
1828 ]
1829 );
1830
1831 // Simulate file1 being recreated externally.
1832 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1833 .await;
1834
1835 // Simulate file2 being recreated by a tool.
1836 let buffer2 = project
1837 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1838 .await
1839 .unwrap();
1840 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1841 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1842 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1843 project
1844 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1845 .await
1846 .unwrap();
1847
1848 cx.run_until_parked();
1849 assert_eq!(
1850 unreviewed_hunks(&action_log, cx),
1851 vec![(
1852 buffer2.clone(),
1853 vec![HunkStatus {
1854 range: Point::new(0, 0)..Point::new(0, 5),
1855 diff_status: DiffHunkStatusKind::Added,
1856 old_text: "".into(),
1857 }],
1858 )]
1859 );
1860
1861 // Simulate file2 being deleted externally.
1862 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1863 .await
1864 .unwrap();
1865 cx.run_until_parked();
1866 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1867 }
1868
1869 #[gpui::test(iterations = 10)]
1870 async fn test_reject_edits(cx: &mut TestAppContext) {
1871 init_test(cx);
1872
1873 let fs = FakeFs::new(cx.executor());
1874 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1875 .await;
1876 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1877 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1878 let file_path = project
1879 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1880 .unwrap();
1881 let buffer = project
1882 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1883 .await
1884 .unwrap();
1885
1886 cx.update(|cx| {
1887 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1888 buffer.update(cx, |buffer, cx| {
1889 buffer
1890 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1891 .unwrap()
1892 });
1893 buffer.update(cx, |buffer, cx| {
1894 buffer
1895 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1896 .unwrap()
1897 });
1898 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1899 });
1900 cx.run_until_parked();
1901 assert_eq!(
1902 buffer.read_with(cx, |buffer, _| buffer.text()),
1903 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1904 );
1905 assert_eq!(
1906 unreviewed_hunks(&action_log, cx),
1907 vec![(
1908 buffer.clone(),
1909 vec![
1910 HunkStatus {
1911 range: Point::new(1, 0)..Point::new(3, 0),
1912 diff_status: DiffHunkStatusKind::Modified,
1913 old_text: "def\n".into(),
1914 },
1915 HunkStatus {
1916 range: Point::new(5, 0)..Point::new(5, 3),
1917 diff_status: DiffHunkStatusKind::Modified,
1918 old_text: "mno".into(),
1919 }
1920 ],
1921 )]
1922 );
1923
1924 // If the rejected range doesn't overlap with any hunk, we ignore it.
1925 action_log
1926 .update(cx, |log, cx| {
1927 let (task, _) = log.reject_edits_in_ranges(
1928 buffer.clone(),
1929 vec![Point::new(4, 0)..Point::new(4, 0)],
1930 None,
1931 cx,
1932 );
1933 task
1934 })
1935 .await
1936 .unwrap();
1937 cx.run_until_parked();
1938 assert_eq!(
1939 buffer.read_with(cx, |buffer, _| buffer.text()),
1940 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1941 );
1942 assert_eq!(
1943 unreviewed_hunks(&action_log, cx),
1944 vec![(
1945 buffer.clone(),
1946 vec![
1947 HunkStatus {
1948 range: Point::new(1, 0)..Point::new(3, 0),
1949 diff_status: DiffHunkStatusKind::Modified,
1950 old_text: "def\n".into(),
1951 },
1952 HunkStatus {
1953 range: Point::new(5, 0)..Point::new(5, 3),
1954 diff_status: DiffHunkStatusKind::Modified,
1955 old_text: "mno".into(),
1956 }
1957 ],
1958 )]
1959 );
1960
1961 action_log
1962 .update(cx, |log, cx| {
1963 let (task, _) = log.reject_edits_in_ranges(
1964 buffer.clone(),
1965 vec![Point::new(0, 0)..Point::new(1, 0)],
1966 None,
1967 cx,
1968 );
1969 task
1970 })
1971 .await
1972 .unwrap();
1973 cx.run_until_parked();
1974 assert_eq!(
1975 buffer.read_with(cx, |buffer, _| buffer.text()),
1976 "abc\ndef\nghi\njkl\nmnO"
1977 );
1978 assert_eq!(
1979 unreviewed_hunks(&action_log, cx),
1980 vec![(
1981 buffer.clone(),
1982 vec![HunkStatus {
1983 range: Point::new(4, 0)..Point::new(4, 3),
1984 diff_status: DiffHunkStatusKind::Modified,
1985 old_text: "mno".into(),
1986 }],
1987 )]
1988 );
1989
1990 action_log
1991 .update(cx, |log, cx| {
1992 let (task, _) = log.reject_edits_in_ranges(
1993 buffer.clone(),
1994 vec![Point::new(4, 0)..Point::new(4, 0)],
1995 None,
1996 cx,
1997 );
1998 task
1999 })
2000 .await
2001 .unwrap();
2002 cx.run_until_parked();
2003 assert_eq!(
2004 buffer.read_with(cx, |buffer, _| buffer.text()),
2005 "abc\ndef\nghi\njkl\nmno"
2006 );
2007 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2008 }
2009
2010 #[gpui::test(iterations = 10)]
2011 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2012 init_test(cx);
2013
2014 let fs = FakeFs::new(cx.executor());
2015 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2016 .await;
2017 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2018 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2019 let file_path = project
2020 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2021 .unwrap();
2022 let buffer = project
2023 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2024 .await
2025 .unwrap();
2026
2027 cx.update(|cx| {
2028 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2029 buffer.update(cx, |buffer, cx| {
2030 buffer
2031 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2032 .unwrap()
2033 });
2034 buffer.update(cx, |buffer, cx| {
2035 buffer
2036 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2037 .unwrap()
2038 });
2039 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2040 });
2041 cx.run_until_parked();
2042 assert_eq!(
2043 buffer.read_with(cx, |buffer, _| buffer.text()),
2044 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2045 );
2046 assert_eq!(
2047 unreviewed_hunks(&action_log, cx),
2048 vec![(
2049 buffer.clone(),
2050 vec![
2051 HunkStatus {
2052 range: Point::new(1, 0)..Point::new(3, 0),
2053 diff_status: DiffHunkStatusKind::Modified,
2054 old_text: "def\n".into(),
2055 },
2056 HunkStatus {
2057 range: Point::new(5, 0)..Point::new(5, 3),
2058 diff_status: DiffHunkStatusKind::Modified,
2059 old_text: "mno".into(),
2060 }
2061 ],
2062 )]
2063 );
2064
2065 action_log.update(cx, |log, cx| {
2066 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2067 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2068 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2069 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2070
2071 let (task, _) =
2072 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2073 task.detach();
2074 assert_eq!(
2075 buffer.read_with(cx, |buffer, _| buffer.text()),
2076 "abc\ndef\nghi\njkl\nmno"
2077 );
2078 });
2079 cx.run_until_parked();
2080 assert_eq!(
2081 buffer.read_with(cx, |buffer, _| buffer.text()),
2082 "abc\ndef\nghi\njkl\nmno"
2083 );
2084 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2085 }
2086
2087 #[gpui::test(iterations = 10)]
2088 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2089 init_test(cx);
2090
2091 let fs = FakeFs::new(cx.executor());
2092 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2093 .await;
2094 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2095 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2096 let file_path = project
2097 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2098 .unwrap();
2099 let buffer = project
2100 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2101 .await
2102 .unwrap();
2103
2104 cx.update(|cx| {
2105 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2106 });
2107 project
2108 .update(cx, |project, cx| {
2109 project.delete_file(file_path.clone(), false, cx)
2110 })
2111 .unwrap()
2112 .await
2113 .unwrap();
2114 cx.run_until_parked();
2115 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2116 assert_eq!(
2117 unreviewed_hunks(&action_log, cx),
2118 vec![(
2119 buffer.clone(),
2120 vec![HunkStatus {
2121 range: Point::new(0, 0)..Point::new(0, 0),
2122 diff_status: DiffHunkStatusKind::Deleted,
2123 old_text: "content".into(),
2124 }]
2125 )]
2126 );
2127
2128 action_log
2129 .update(cx, |log, cx| {
2130 let (task, _) = log.reject_edits_in_ranges(
2131 buffer.clone(),
2132 vec![Point::new(0, 0)..Point::new(0, 0)],
2133 None,
2134 cx,
2135 );
2136 task
2137 })
2138 .await
2139 .unwrap();
2140 cx.run_until_parked();
2141 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2142 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2143 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2144 }
2145
2146 #[gpui::test(iterations = 10)]
2147 async fn test_reject_created_file(cx: &mut TestAppContext) {
2148 init_test(cx);
2149
2150 let fs = FakeFs::new(cx.executor());
2151 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2152 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2153 let file_path = project
2154 .read_with(cx, |project, cx| {
2155 project.find_project_path("dir/new_file", cx)
2156 })
2157 .unwrap();
2158 let buffer = project
2159 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2160 .await
2161 .unwrap();
2162 cx.update(|cx| {
2163 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2164 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2165 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2166 });
2167 project
2168 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2169 .await
2170 .unwrap();
2171 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2172 cx.run_until_parked();
2173 assert_eq!(
2174 unreviewed_hunks(&action_log, cx),
2175 vec![(
2176 buffer.clone(),
2177 vec![HunkStatus {
2178 range: Point::new(0, 0)..Point::new(0, 7),
2179 diff_status: DiffHunkStatusKind::Added,
2180 old_text: "".into(),
2181 }],
2182 )]
2183 );
2184
2185 action_log
2186 .update(cx, |log, cx| {
2187 let (task, _) = log.reject_edits_in_ranges(
2188 buffer.clone(),
2189 vec![Point::new(0, 0)..Point::new(0, 11)],
2190 None,
2191 cx,
2192 );
2193 task
2194 })
2195 .await
2196 .unwrap();
2197 cx.run_until_parked();
2198 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2199 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2200 }
2201
2202 #[gpui::test]
2203 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2204 init_test(cx);
2205
2206 let fs = FakeFs::new(cx.executor());
2207 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2208 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2209
2210 let file_path = project
2211 .read_with(cx, |project, cx| {
2212 project.find_project_path("dir/new_file", cx)
2213 })
2214 .unwrap();
2215 let buffer = project
2216 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2217 .await
2218 .unwrap();
2219
2220 // AI creates file with initial content
2221 cx.update(|cx| {
2222 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2223 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2224 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2225 });
2226
2227 project
2228 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2229 .await
2230 .unwrap();
2231
2232 cx.run_until_parked();
2233
2234 // User makes additional edits
2235 cx.update(|cx| {
2236 buffer.update(cx, |buffer, cx| {
2237 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2238 });
2239 });
2240
2241 project
2242 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2243 .await
2244 .unwrap();
2245
2246 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2247
2248 // Reject all
2249 action_log
2250 .update(cx, |log, cx| {
2251 let (task, _) = log.reject_edits_in_ranges(
2252 buffer.clone(),
2253 vec![Point::new(0, 0)..Point::new(100, 0)],
2254 None,
2255 cx,
2256 );
2257 task
2258 })
2259 .await
2260 .unwrap();
2261 cx.run_until_parked();
2262
2263 // File should still contain all the content
2264 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2265
2266 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2267 assert_eq!(content, "ai content\nuser added this line");
2268 }
2269
2270 #[gpui::test]
2271 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2272 init_test(cx);
2273
2274 let fs = FakeFs::new(cx.executor());
2275 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2276 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2277
2278 let file_path = project
2279 .read_with(cx, |project, cx| {
2280 project.find_project_path("dir/new_file", cx)
2281 })
2282 .unwrap();
2283 let buffer = project
2284 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2285 .await
2286 .unwrap();
2287
2288 // AI creates file with initial content
2289 cx.update(|cx| {
2290 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2291 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2292 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2293 });
2294 project
2295 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2296 .await
2297 .unwrap();
2298 cx.run_until_parked();
2299 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2300
2301 // User accepts the single hunk
2302 action_log.update(cx, |log, cx| {
2303 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2304 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2305 });
2306 cx.run_until_parked();
2307 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2308 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2309
2310 // AI modifies the file
2311 cx.update(|cx| {
2312 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2313 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2314 });
2315 project
2316 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2317 .await
2318 .unwrap();
2319 cx.run_until_parked();
2320 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2321
2322 // User rejects the hunk
2323 action_log
2324 .update(cx, |log, cx| {
2325 let (task, _) = log.reject_edits_in_ranges(
2326 buffer.clone(),
2327 vec![Anchor::min_max_range_for_buffer(
2328 buffer.read(cx).remote_id(),
2329 )],
2330 None,
2331 cx,
2332 );
2333 task
2334 })
2335 .await
2336 .unwrap();
2337 cx.run_until_parked();
2338 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2339 assert_eq!(
2340 buffer.read_with(cx, |buffer, _| buffer.text()),
2341 "ai content v1"
2342 );
2343 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2344 }
2345
2346 #[gpui::test]
2347 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2348 init_test(cx);
2349
2350 let fs = FakeFs::new(cx.executor());
2351 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2352 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2353
2354 let file_path = project
2355 .read_with(cx, |project, cx| {
2356 project.find_project_path("dir/new_file", cx)
2357 })
2358 .unwrap();
2359 let buffer = project
2360 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2361 .await
2362 .unwrap();
2363
2364 // AI creates file with initial content
2365 cx.update(|cx| {
2366 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2367 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2368 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2369 });
2370 project
2371 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2372 .await
2373 .unwrap();
2374 cx.run_until_parked();
2375
2376 // User clicks "Accept All"
2377 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2378 cx.run_until_parked();
2379 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2380 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2381
2382 // AI modifies file again
2383 cx.update(|cx| {
2384 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2385 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2386 });
2387 project
2388 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2389 .await
2390 .unwrap();
2391 cx.run_until_parked();
2392 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2393
2394 // User clicks "Reject All"
2395 action_log
2396 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2397 .await;
2398 cx.run_until_parked();
2399 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2400 assert_eq!(
2401 buffer.read_with(cx, |buffer, _| buffer.text()),
2402 "ai content v1"
2403 );
2404 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2405 }
2406
2407 #[gpui::test(iterations = 100)]
2408 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2409 init_test(cx);
2410
2411 let operations = env::var("OPERATIONS")
2412 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2413 .unwrap_or(20);
2414
2415 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2416 let fs = FakeFs::new(cx.executor());
2417 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2418 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2419 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2420 let file_path = project
2421 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2422 .unwrap();
2423 let buffer = project
2424 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2425 .await
2426 .unwrap();
2427
2428 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2429
2430 for _ in 0..operations {
2431 match rng.random_range(0..100) {
2432 0..25 => {
2433 action_log.update(cx, |log, cx| {
2434 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2435 log::info!("keeping edits in range {:?}", range);
2436 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2437 });
2438 }
2439 25..50 => {
2440 action_log
2441 .update(cx, |log, cx| {
2442 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2443 log::info!("rejecting edits in range {:?}", range);
2444 let (task, _) =
2445 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2446 task
2447 })
2448 .await
2449 .unwrap();
2450 }
2451 _ => {
2452 let is_agent_edit = rng.random_bool(0.5);
2453 if is_agent_edit {
2454 log::info!("agent edit");
2455 } else {
2456 log::info!("user edit");
2457 }
2458 cx.update(|cx| {
2459 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2460 if is_agent_edit {
2461 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2462 }
2463 });
2464 }
2465 }
2466
2467 if rng.random_bool(0.2) {
2468 quiesce(&action_log, &buffer, cx);
2469 }
2470 }
2471
2472 quiesce(&action_log, &buffer, cx);
2473
2474 fn quiesce(
2475 action_log: &Entity<ActionLog>,
2476 buffer: &Entity<Buffer>,
2477 cx: &mut TestAppContext,
2478 ) {
2479 log::info!("quiescing...");
2480 cx.run_until_parked();
2481 action_log.update(cx, |log, cx| {
2482 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2483 let mut old_text = tracked_buffer.diff_base.clone();
2484 let new_text = buffer.read(cx).as_rope();
2485 for edit in tracked_buffer.unreviewed_edits.edits() {
2486 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2487 let old_end = old_text.point_to_offset(cmp::min(
2488 Point::new(edit.new.start + edit.old_len(), 0),
2489 old_text.max_point(),
2490 ));
2491 old_text.replace(
2492 old_start..old_end,
2493 &new_text.slice_rows(edit.new.clone()).to_string(),
2494 );
2495 }
2496 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2497 })
2498 }
2499 }
2500
2501 #[gpui::test]
2502 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2503 init_test(cx);
2504
2505 let fs = FakeFs::new(cx.background_executor.clone());
2506 fs.insert_tree(
2507 path!("/project"),
2508 json!({
2509 ".git": {},
2510 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2511 }),
2512 )
2513 .await;
2514 fs.set_head_for_repo(
2515 path!("/project/.git").as_ref(),
2516 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2517 "0000000",
2518 );
2519 cx.run_until_parked();
2520
2521 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2522 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2523
2524 let file_path = project
2525 .read_with(cx, |project, cx| {
2526 project.find_project_path(path!("/project/file.txt"), cx)
2527 })
2528 .unwrap();
2529 let buffer = project
2530 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2531 .await
2532 .unwrap();
2533
2534 cx.update(|cx| {
2535 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2536 buffer.update(cx, |buffer, cx| {
2537 buffer.edit(
2538 [
2539 // Edit at the very start: a -> A
2540 (Point::new(0, 0)..Point::new(0, 1), "A"),
2541 // Deletion in the middle: remove lines d and e
2542 (Point::new(3, 0)..Point::new(5, 0), ""),
2543 // Modification: g -> GGG
2544 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2545 // Addition: insert new line after h
2546 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2547 // Edit the very last character: j -> J
2548 (Point::new(9, 0)..Point::new(9, 1), "J"),
2549 ],
2550 None,
2551 cx,
2552 );
2553 });
2554 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2555 });
2556 cx.run_until_parked();
2557 assert_eq!(
2558 unreviewed_hunks(&action_log, cx),
2559 vec![(
2560 buffer.clone(),
2561 vec![
2562 HunkStatus {
2563 range: Point::new(0, 0)..Point::new(1, 0),
2564 diff_status: DiffHunkStatusKind::Modified,
2565 old_text: "a\n".into()
2566 },
2567 HunkStatus {
2568 range: Point::new(3, 0)..Point::new(3, 0),
2569 diff_status: DiffHunkStatusKind::Deleted,
2570 old_text: "d\ne\n".into()
2571 },
2572 HunkStatus {
2573 range: Point::new(4, 0)..Point::new(5, 0),
2574 diff_status: DiffHunkStatusKind::Modified,
2575 old_text: "g\n".into()
2576 },
2577 HunkStatus {
2578 range: Point::new(6, 0)..Point::new(7, 0),
2579 diff_status: DiffHunkStatusKind::Added,
2580 old_text: "".into()
2581 },
2582 HunkStatus {
2583 range: Point::new(8, 0)..Point::new(8, 1),
2584 diff_status: DiffHunkStatusKind::Modified,
2585 old_text: "j".into()
2586 }
2587 ]
2588 )]
2589 );
2590
2591 // Simulate a git commit that matches some edits but not others:
2592 // - Accepts the first edit (a -> A)
2593 // - Accepts the deletion (remove d and e)
2594 // - Makes a different change to g (g -> G instead of GGG)
2595 // - Ignores the NEW line addition
2596 // - Ignores the last line edit (j stays as j)
2597 fs.set_head_for_repo(
2598 path!("/project/.git").as_ref(),
2599 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2600 "0000001",
2601 );
2602 cx.run_until_parked();
2603 assert_eq!(
2604 unreviewed_hunks(&action_log, cx),
2605 vec![(
2606 buffer.clone(),
2607 vec![
2608 HunkStatus {
2609 range: Point::new(4, 0)..Point::new(5, 0),
2610 diff_status: DiffHunkStatusKind::Modified,
2611 old_text: "g\n".into()
2612 },
2613 HunkStatus {
2614 range: Point::new(6, 0)..Point::new(7, 0),
2615 diff_status: DiffHunkStatusKind::Added,
2616 old_text: "".into()
2617 },
2618 HunkStatus {
2619 range: Point::new(8, 0)..Point::new(8, 1),
2620 diff_status: DiffHunkStatusKind::Modified,
2621 old_text: "j".into()
2622 }
2623 ]
2624 )]
2625 );
2626
2627 // Make another commit that accepts the NEW line but with different content
2628 fs.set_head_for_repo(
2629 path!("/project/.git").as_ref(),
2630 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2631 "0000002",
2632 );
2633 cx.run_until_parked();
2634 assert_eq!(
2635 unreviewed_hunks(&action_log, cx),
2636 vec![(
2637 buffer,
2638 vec![
2639 HunkStatus {
2640 range: Point::new(6, 0)..Point::new(7, 0),
2641 diff_status: DiffHunkStatusKind::Added,
2642 old_text: "".into()
2643 },
2644 HunkStatus {
2645 range: Point::new(8, 0)..Point::new(8, 1),
2646 diff_status: DiffHunkStatusKind::Modified,
2647 old_text: "j".into()
2648 }
2649 ]
2650 )]
2651 );
2652
2653 // Final commit that accepts all remaining edits
2654 fs.set_head_for_repo(
2655 path!("/project/.git").as_ref(),
2656 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2657 "0000003",
2658 );
2659 cx.run_until_parked();
2660 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2661 }
2662
2663 #[gpui::test]
2664 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2665 init_test(cx);
2666
2667 let fs = FakeFs::new(cx.executor());
2668 fs.insert_tree(
2669 path!("/dir"),
2670 json!({
2671 "file1": "abc\ndef\nghi"
2672 }),
2673 )
2674 .await;
2675 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2676 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2677 let file_path = project
2678 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2679 .unwrap();
2680
2681 let buffer = project
2682 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2683 .await
2684 .unwrap();
2685
2686 // Track the buffer and make an agent edit
2687 cx.update(|cx| {
2688 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2689 buffer.update(cx, |buffer, cx| {
2690 buffer
2691 .edit(
2692 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2693 None,
2694 cx,
2695 )
2696 .unwrap()
2697 });
2698 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2699 });
2700 cx.run_until_parked();
2701
2702 // Verify the agent edit is there
2703 assert_eq!(
2704 buffer.read_with(cx, |buffer, _| buffer.text()),
2705 "abc\nAGENT_EDIT\nghi"
2706 );
2707 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2708
2709 // Reject all edits
2710 action_log
2711 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2712 .await;
2713 cx.run_until_parked();
2714
2715 // Verify the buffer is back to original
2716 assert_eq!(
2717 buffer.read_with(cx, |buffer, _| buffer.text()),
2718 "abc\ndef\nghi"
2719 );
2720 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2721
2722 // Verify undo state is available
2723 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2724
2725 // Undo the reject
2726 action_log
2727 .update(cx, |log, cx| log.undo_last_reject(cx))
2728 .await;
2729
2730 cx.run_until_parked();
2731
2732 // Verify the agent edit is restored
2733 assert_eq!(
2734 buffer.read_with(cx, |buffer, _| buffer.text()),
2735 "abc\nAGENT_EDIT\nghi"
2736 );
2737
2738 // Verify undo state is cleared
2739 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2740 }
2741
2742 #[gpui::test]
2743 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2744 init_test(cx);
2745
2746 let fs = FakeFs::new(cx.executor());
2747 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2748 .await;
2749 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2750 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2751 let child_log =
2752 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2753
2754 let file_path = project
2755 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2756 .unwrap();
2757 let buffer = project
2758 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2759 .await
2760 .unwrap();
2761
2762 cx.update(|cx| {
2763 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2764 });
2765
2766 // Neither log considers the buffer stale immediately after reading it.
2767 let child_stale = cx.read(|cx| {
2768 child_log
2769 .read(cx)
2770 .stale_buffers(cx)
2771 .cloned()
2772 .collect::<Vec<_>>()
2773 });
2774 let parent_stale = cx.read(|cx| {
2775 parent_log
2776 .read(cx)
2777 .stale_buffers(cx)
2778 .cloned()
2779 .collect::<Vec<_>>()
2780 });
2781 assert!(child_stale.is_empty());
2782 assert!(parent_stale.is_empty());
2783
2784 // Simulate a user edit after the agent read the file.
2785 cx.update(|cx| {
2786 buffer.update(cx, |buffer, cx| {
2787 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2788 });
2789 });
2790 cx.run_until_parked();
2791
2792 // Both child and parent should see the buffer as stale because both tracked
2793 // it at the pre-edit version via buffer_read forwarding.
2794 let child_stale = cx.read(|cx| {
2795 child_log
2796 .read(cx)
2797 .stale_buffers(cx)
2798 .cloned()
2799 .collect::<Vec<_>>()
2800 });
2801 let parent_stale = cx.read(|cx| {
2802 parent_log
2803 .read(cx)
2804 .stale_buffers(cx)
2805 .cloned()
2806 .collect::<Vec<_>>()
2807 });
2808 assert_eq!(child_stale, vec![buffer.clone()]);
2809 assert_eq!(parent_stale, vec![buffer]);
2810 }
2811
2812 #[gpui::test]
2813 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2814 init_test(cx);
2815
2816 let fs = FakeFs::new(cx.executor());
2817 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2818 .await;
2819 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2820 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2821 let child_log =
2822 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2823
2824 let file_path = project
2825 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2826 .unwrap();
2827 let buffer = project
2828 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2829 .await
2830 .unwrap();
2831
2832 cx.update(|cx| {
2833 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2834 buffer.update(cx, |buffer, cx| {
2835 buffer
2836 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2837 .unwrap();
2838 });
2839 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2840 });
2841 cx.run_until_parked();
2842
2843 let expected_hunks = vec![(
2844 buffer,
2845 vec![HunkStatus {
2846 range: Point::new(1, 0)..Point::new(2, 0),
2847 diff_status: DiffHunkStatusKind::Modified,
2848 old_text: "def\n".into(),
2849 }],
2850 )];
2851 assert_eq!(
2852 unreviewed_hunks(&child_log, cx),
2853 expected_hunks,
2854 "child should track the agent edit"
2855 );
2856 assert_eq!(
2857 unreviewed_hunks(&parent_log, cx),
2858 expected_hunks,
2859 "parent should also track the agent edit via linked log forwarding"
2860 );
2861 }
2862
2863 #[gpui::test]
2864 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
2865 init_test(cx);
2866
2867 let fs = FakeFs::new(cx.executor());
2868 fs.insert_tree(path!("/dir"), json!({})).await;
2869 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2870 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2871 let child_log =
2872 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2873
2874 let file_path = project
2875 .read_with(cx, |project, cx| {
2876 project.find_project_path("dir/new_file", cx)
2877 })
2878 .unwrap();
2879 let buffer = project
2880 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2881 .await
2882 .unwrap();
2883
2884 cx.update(|cx| {
2885 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2886 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
2887 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2888 });
2889 project
2890 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2891 .await
2892 .unwrap();
2893 cx.run_until_parked();
2894
2895 let expected_hunks = vec![(
2896 buffer.clone(),
2897 vec![HunkStatus {
2898 range: Point::new(0, 0)..Point::new(0, 5),
2899 diff_status: DiffHunkStatusKind::Added,
2900 old_text: "".into(),
2901 }],
2902 )];
2903 assert_eq!(
2904 unreviewed_hunks(&child_log, cx),
2905 expected_hunks,
2906 "child should track the created file"
2907 );
2908 assert_eq!(
2909 unreviewed_hunks(&parent_log, cx),
2910 expected_hunks,
2911 "parent should also track the created file via linked log forwarding"
2912 );
2913 }
2914
2915 #[gpui::test]
2916 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
2917 init_test(cx);
2918
2919 let fs = FakeFs::new(cx.executor());
2920 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
2921 .await;
2922 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2923 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2924 let child_log =
2925 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2926
2927 let file_path = project
2928 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2929 .unwrap();
2930 let buffer = project
2931 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2932 .await
2933 .unwrap();
2934
2935 cx.update(|cx| {
2936 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2937 });
2938 project
2939 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
2940 .unwrap()
2941 .await
2942 .unwrap();
2943 cx.run_until_parked();
2944
2945 let expected_hunks = vec![(
2946 buffer.clone(),
2947 vec![HunkStatus {
2948 range: Point::new(0, 0)..Point::new(0, 0),
2949 diff_status: DiffHunkStatusKind::Deleted,
2950 old_text: "hello\n".into(),
2951 }],
2952 )];
2953 assert_eq!(
2954 unreviewed_hunks(&child_log, cx),
2955 expected_hunks,
2956 "child should track the deleted file"
2957 );
2958 assert_eq!(
2959 unreviewed_hunks(&parent_log, cx),
2960 expected_hunks,
2961 "parent should also track the deleted file via linked log forwarding"
2962 );
2963 }
2964
2965 /// Simulates the subagent scenario: two child logs linked to the same parent, each
2966 /// editing a different file. The parent accumulates all edits while each child
2967 /// only sees its own.
2968 #[gpui::test]
2969 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
2970 init_test(cx);
2971
2972 let fs = FakeFs::new(cx.executor());
2973 fs.insert_tree(
2974 path!("/dir"),
2975 json!({
2976 "file_a": "content of a",
2977 "file_b": "content of b",
2978 }),
2979 )
2980 .await;
2981 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2982 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2983 let child_log_1 =
2984 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2985 let child_log_2 =
2986 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2987
2988 let file_a_path = project
2989 .read_with(cx, |project, cx| {
2990 project.find_project_path("dir/file_a", cx)
2991 })
2992 .unwrap();
2993 let file_b_path = project
2994 .read_with(cx, |project, cx| {
2995 project.find_project_path("dir/file_b", cx)
2996 })
2997 .unwrap();
2998 let buffer_a = project
2999 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3000 .await
3001 .unwrap();
3002 let buffer_b = project
3003 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3004 .await
3005 .unwrap();
3006
3007 cx.update(|cx| {
3008 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3009 buffer_a.update(cx, |buffer, cx| {
3010 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3011 });
3012 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3013
3014 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3015 buffer_b.update(cx, |buffer, cx| {
3016 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3017 });
3018 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3019 });
3020 cx.run_until_parked();
3021
3022 let child_1_changed: Vec<_> = cx.read(|cx| {
3023 child_log_1
3024 .read(cx)
3025 .changed_buffers(cx)
3026 .into_keys()
3027 .collect()
3028 });
3029 let child_2_changed: Vec<_> = cx.read(|cx| {
3030 child_log_2
3031 .read(cx)
3032 .changed_buffers(cx)
3033 .into_keys()
3034 .collect()
3035 });
3036 let parent_changed: Vec<_> = cx.read(|cx| {
3037 parent_log
3038 .read(cx)
3039 .changed_buffers(cx)
3040 .into_keys()
3041 .collect()
3042 });
3043
3044 assert_eq!(
3045 child_1_changed,
3046 vec![buffer_a.clone()],
3047 "child 1 should only track file_a"
3048 );
3049 assert_eq!(
3050 child_2_changed,
3051 vec![buffer_b.clone()],
3052 "child 2 should only track file_b"
3053 );
3054 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3055 assert!(
3056 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3057 "parent should contain both buffer_a and buffer_b"
3058 );
3059 }
3060
3061 #[gpui::test]
3062 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3063 init_test(cx);
3064
3065 let fs = FakeFs::new(cx.executor());
3066 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3067 .await;
3068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3069 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3070
3071 let file_path = project
3072 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3073 .unwrap();
3074 let buffer = project
3075 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3076 .await
3077 .unwrap();
3078
3079 let abs_path = PathBuf::from(path!("/dir/file"));
3080 assert!(
3081 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3082 "file_read_time should be None before buffer_read"
3083 );
3084
3085 cx.update(|cx| {
3086 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3087 });
3088
3089 assert!(
3090 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3091 "file_read_time should be recorded after buffer_read"
3092 );
3093 }
3094
3095 #[gpui::test]
3096 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3097 init_test(cx);
3098
3099 let fs = FakeFs::new(cx.executor());
3100 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3101 .await;
3102 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3103 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3104
3105 let file_path = project
3106 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3107 .unwrap();
3108 let buffer = project
3109 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3110 .await
3111 .unwrap();
3112
3113 let abs_path = PathBuf::from(path!("/dir/file"));
3114 assert!(
3115 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3116 "file_read_time should be None before buffer_edited"
3117 );
3118
3119 cx.update(|cx| {
3120 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3121 });
3122
3123 assert!(
3124 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3125 "file_read_time should be recorded after buffer_edited"
3126 );
3127 }
3128
3129 #[gpui::test]
3130 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3131 init_test(cx);
3132
3133 let fs = FakeFs::new(cx.executor());
3134 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3135 .await;
3136 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3137 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3138
3139 let file_path = project
3140 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3141 .unwrap();
3142 let buffer = project
3143 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3144 .await
3145 .unwrap();
3146
3147 let abs_path = PathBuf::from(path!("/dir/file"));
3148 assert!(
3149 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3150 "file_read_time should be None before buffer_created"
3151 );
3152
3153 cx.update(|cx| {
3154 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3155 });
3156
3157 assert!(
3158 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3159 "file_read_time should be recorded after buffer_created"
3160 );
3161 }
3162
3163 #[gpui::test]
3164 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3165 init_test(cx);
3166
3167 let fs = FakeFs::new(cx.executor());
3168 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3169 .await;
3170 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3171 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3172
3173 let file_path = project
3174 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3175 .unwrap();
3176 let buffer = project
3177 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3178 .await
3179 .unwrap();
3180
3181 let abs_path = PathBuf::from(path!("/dir/file"));
3182
3183 cx.update(|cx| {
3184 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3185 });
3186 assert!(
3187 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3188 "file_read_time should exist after buffer_read"
3189 );
3190
3191 cx.update(|cx| {
3192 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3193 });
3194 assert!(
3195 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3196 "file_read_time should be removed after will_delete_buffer"
3197 );
3198 }
3199
3200 #[gpui::test]
3201 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3202 init_test(cx);
3203
3204 let fs = FakeFs::new(cx.executor());
3205 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3206 .await;
3207 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3208 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3209 let child_log =
3210 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3211
3212 let file_path = project
3213 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3214 .unwrap();
3215 let buffer = project
3216 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3217 .await
3218 .unwrap();
3219
3220 let abs_path = PathBuf::from(path!("/dir/file"));
3221
3222 cx.update(|cx| {
3223 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3224 });
3225 assert!(
3226 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3227 "child should record file_read_time on buffer_read"
3228 );
3229 assert!(
3230 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3231 "parent should NOT get file_read_time from child's buffer_read"
3232 );
3233
3234 cx.update(|cx| {
3235 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3236 });
3237 assert!(
3238 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3239 "parent should NOT get file_read_time from child's buffer_edited"
3240 );
3241
3242 cx.update(|cx| {
3243 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3244 });
3245 assert!(
3246 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3247 "parent should NOT get file_read_time from child's buffer_created"
3248 );
3249 }
3250
3251 #[derive(Debug, PartialEq)]
3252 struct HunkStatus {
3253 range: Range<Point>,
3254 diff_status: DiffHunkStatusKind,
3255 old_text: String,
3256 }
3257
3258 fn unreviewed_hunks(
3259 action_log: &Entity<ActionLog>,
3260 cx: &TestAppContext,
3261 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
3262 cx.read(|cx| {
3263 action_log
3264 .read(cx)
3265 .changed_buffers(cx)
3266 .into_iter()
3267 .map(|(buffer, diff)| {
3268 let snapshot = buffer.read(cx).snapshot();
3269 (
3270 buffer,
3271 diff.read(cx)
3272 .snapshot(cx)
3273 .hunks(&snapshot)
3274 .map(|hunk| HunkStatus {
3275 diff_status: hunk.status().kind,
3276 range: hunk.range,
3277 old_text: diff
3278 .read(cx)
3279 .base_text(cx)
3280 .text_for_range(hunk.diff_base_byte_range)
3281 .collect(),
3282 })
3283 .collect(),
3284 )
3285 })
3286 .collect()
3287 })
3288 }
3289}