1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_diff = this
278 .update(cx, |this, cx| {
279 this.project.update(cx, |project, cx| {
280 project.open_uncommitted_diff(buffer.clone(), cx)
281 })
282 })?
283 .await
284 .ok();
285 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
286 let _diff_subscription = if let Some(git_diff) = git_diff.as_ref() {
287 cx.update(|cx| {
288 Some(cx.subscribe(git_diff, move |_, event, _cx| {
289 if matches!(event, buffer_diff::BufferDiffEvent::BaseTextChanged) {
290 git_diff_updates_tx.send(()).ok();
291 }
292 }))
293 })
294 } else {
295 None
296 };
297
298 loop {
299 futures::select_biased! {
300 buffer_update = buffer_updates.next() => {
301 if let Some((author, buffer_snapshot)) = buffer_update {
302 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
303 } else {
304 break;
305 }
306 }
307 _ = git_diff_updates_rx.changed().fuse() => {
308 if let Some(git_diff) = git_diff.as_ref() {
309 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
310 }
311 }
312 }
313 }
314
315 Ok(())
316 }
317
318 async fn track_edits(
319 this: &WeakEntity<ActionLog>,
320 buffer: &Entity<Buffer>,
321 author: ChangeAuthor,
322 buffer_snapshot: text::BufferSnapshot,
323 cx: &mut AsyncApp,
324 ) -> Result<()> {
325 let rebase = this.update(cx, |this, cx| {
326 let tracked_buffer = this
327 .tracked_buffers
328 .get_mut(buffer)
329 .context("buffer not tracked")?;
330
331 let rebase = cx.background_spawn({
332 let mut base_text = tracked_buffer.diff_base.clone();
333 let old_snapshot = tracked_buffer.snapshot.clone();
334 let new_snapshot = buffer_snapshot.clone();
335 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
336 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
337 async move {
338 if let ChangeAuthor::User = author {
339 apply_non_conflicting_edits(
340 &unreviewed_edits,
341 edits,
342 &mut base_text,
343 new_snapshot.as_rope(),
344 );
345 }
346
347 (Arc::from(base_text.to_string().as_str()), base_text)
348 }
349 });
350
351 anyhow::Ok(rebase)
352 })??;
353 let (new_base_text, new_diff_base) = rebase.await;
354
355 Self::update_diff(
356 this,
357 buffer,
358 buffer_snapshot,
359 new_base_text,
360 new_diff_base,
361 cx,
362 )
363 .await
364 }
365
366 async fn keep_committed_edits(
367 this: &WeakEntity<ActionLog>,
368 buffer: &Entity<Buffer>,
369 git_diff: &Entity<BufferDiff>,
370 cx: &mut AsyncApp,
371 ) -> Result<()> {
372 let buffer_snapshot = this.read_with(cx, |this, _cx| {
373 let tracked_buffer = this
374 .tracked_buffers
375 .get(buffer)
376 .context("buffer not tracked")?;
377 anyhow::Ok(tracked_buffer.snapshot.clone())
378 })??;
379 let (new_base_text, new_diff_base) = this
380 .read_with(cx, |this, cx| {
381 let tracked_buffer = this
382 .tracked_buffers
383 .get(buffer)
384 .context("buffer not tracked")?;
385 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
386 let agent_diff_base = tracked_buffer.diff_base.clone();
387 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
388 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
389 anyhow::Ok(cx.background_spawn(async move {
390 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
391 let committed_edits = language::line_diff(
392 &agent_diff_base.to_string(),
393 &git_diff_base.to_string(),
394 )
395 .into_iter()
396 .map(|(old, new)| Edit { old, new });
397
398 let mut new_agent_diff_base = agent_diff_base.clone();
399 let mut row_delta = 0i32;
400 for committed in committed_edits {
401 while let Some(unreviewed) = old_unreviewed_edits.peek() {
402 // If the committed edit matches the unreviewed
403 // edit, assume the user wants to keep it.
404 if committed.old == unreviewed.old {
405 let unreviewed_new =
406 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
407 let committed_new =
408 git_diff_base.slice_rows(committed.new.clone()).to_string();
409 if unreviewed_new == committed_new {
410 let old_byte_start =
411 new_agent_diff_base.point_to_offset(Point::new(
412 (unreviewed.old.start as i32 + row_delta) as u32,
413 0,
414 ));
415 let old_byte_end =
416 new_agent_diff_base.point_to_offset(cmp::min(
417 Point::new(
418 (unreviewed.old.end as i32 + row_delta) as u32,
419 0,
420 ),
421 new_agent_diff_base.max_point(),
422 ));
423 new_agent_diff_base
424 .replace(old_byte_start..old_byte_end, &unreviewed_new);
425 row_delta +=
426 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
427 }
428 } else if unreviewed.old.start >= committed.old.end {
429 break;
430 }
431
432 old_unreviewed_edits.next().unwrap();
433 }
434 }
435
436 (
437 Arc::from(new_agent_diff_base.to_string().as_str()),
438 new_agent_diff_base,
439 )
440 }))
441 })??
442 .await;
443
444 Self::update_diff(
445 this,
446 buffer,
447 buffer_snapshot,
448 new_base_text,
449 new_diff_base,
450 cx,
451 )
452 .await
453 }
454
455 async fn update_diff(
456 this: &WeakEntity<ActionLog>,
457 buffer: &Entity<Buffer>,
458 buffer_snapshot: text::BufferSnapshot,
459 new_base_text: Arc<str>,
460 new_diff_base: Rope,
461 cx: &mut AsyncApp,
462 ) -> Result<()> {
463 let (diff, language) = this.read_with(cx, |this, cx| {
464 let tracked_buffer = this
465 .tracked_buffers
466 .get(buffer)
467 .context("buffer not tracked")?;
468 anyhow::Ok((
469 tracked_buffer.diff.clone(),
470 buffer.read(cx).language().cloned(),
471 ))
472 })??;
473 let update = diff
474 .update(cx, |diff, cx| {
475 diff.update_diff(
476 buffer_snapshot.clone(),
477 Some(new_base_text),
478 Some(true),
479 language,
480 cx,
481 )
482 })
483 .await;
484 diff.update(cx, |diff, cx| {
485 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
486 })
487 .await;
488 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
489
490 let unreviewed_edits = cx
491 .background_spawn({
492 let buffer_snapshot = buffer_snapshot.clone();
493 let new_diff_base = new_diff_base.clone();
494 async move {
495 let mut unreviewed_edits = Patch::default();
496 for hunk in diff_snapshot.hunks_intersecting_range(
497 Anchor::min_for_buffer(buffer_snapshot.remote_id())
498 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
499 &buffer_snapshot,
500 ) {
501 let old_range = new_diff_base
502 .offset_to_point(hunk.diff_base_byte_range.start)
503 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
504 let new_range = hunk.range.start..hunk.range.end;
505 unreviewed_edits.push(point_to_row_edit(
506 Edit {
507 old: old_range,
508 new: new_range,
509 },
510 &new_diff_base,
511 buffer_snapshot.as_rope(),
512 ));
513 }
514 unreviewed_edits
515 }
516 })
517 .await;
518 this.update(cx, |this, cx| {
519 let tracked_buffer = this
520 .tracked_buffers
521 .get_mut(buffer)
522 .context("buffer not tracked")?;
523 tracked_buffer.diff_base = new_diff_base;
524 tracked_buffer.snapshot = buffer_snapshot;
525 tracked_buffer.unreviewed_edits = unreviewed_edits;
526 cx.notify();
527 anyhow::Ok(())
528 })?
529 }
530
531 /// Track a buffer as read by agent, so we can notify the model about user edits.
532 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
533 self.buffer_read_impl(buffer, true, cx);
534 }
535
536 fn buffer_read_impl(
537 &mut self,
538 buffer: Entity<Buffer>,
539 record_file_read_time: bool,
540 cx: &mut Context<Self>,
541 ) {
542 if let Some(linked_action_log) = &self.linked_action_log {
543 // We don't want to share read times since the other agent hasn't read it necessarily
544 linked_action_log.update(cx, |log, cx| {
545 log.buffer_read_impl(buffer.clone(), false, cx);
546 });
547 }
548 if record_file_read_time {
549 self.update_file_read_time(&buffer, cx);
550 }
551 self.track_buffer_internal(buffer, false, cx);
552 }
553
554 /// Mark a buffer as created by agent, so we can refresh it in the context
555 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
556 self.buffer_created_impl(buffer, true, cx);
557 }
558
559 fn buffer_created_impl(
560 &mut self,
561 buffer: Entity<Buffer>,
562 record_file_read_time: bool,
563 cx: &mut Context<Self>,
564 ) {
565 if let Some(linked_action_log) = &self.linked_action_log {
566 // We don't want to share read times since the other agent hasn't read it necessarily
567 linked_action_log.update(cx, |log, cx| {
568 log.buffer_created_impl(buffer.clone(), false, cx);
569 });
570 }
571 if record_file_read_time {
572 self.update_file_read_time(&buffer, cx);
573 }
574 self.track_buffer_internal(buffer, true, cx);
575 }
576
577 /// Mark a buffer as edited by agent, so we can refresh it in the context
578 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
579 self.buffer_edited_impl(buffer, true, cx);
580 }
581
582 fn buffer_edited_impl(
583 &mut self,
584 buffer: Entity<Buffer>,
585 record_file_read_time: bool,
586 cx: &mut Context<Self>,
587 ) {
588 if let Some(linked_action_log) = &self.linked_action_log {
589 // We don't want to share read times since the other agent hasn't read it necessarily
590 linked_action_log.update(cx, |log, cx| {
591 log.buffer_edited_impl(buffer.clone(), false, cx);
592 });
593 }
594 if record_file_read_time {
595 self.update_file_read_time(&buffer, cx);
596 }
597 let new_version = buffer.read(cx).version();
598 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
599 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
600 tracked_buffer.status = TrackedBufferStatus::Modified;
601 }
602
603 tracked_buffer.version = new_version;
604 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
605 }
606
607 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
608 // Ok to propagate file read time removal to linked action log
609 self.remove_file_read_time(&buffer, cx);
610 let has_linked_action_log = self.linked_action_log.is_some();
611 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
612 match tracked_buffer.status {
613 TrackedBufferStatus::Created { .. } => {
614 self.tracked_buffers.remove(&buffer);
615 cx.notify();
616 }
617 TrackedBufferStatus::Modified => {
618 tracked_buffer.status = TrackedBufferStatus::Deleted;
619 if !has_linked_action_log {
620 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
621 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
622 }
623 }
624
625 TrackedBufferStatus::Deleted => {}
626 }
627
628 if let Some(linked_action_log) = &mut self.linked_action_log {
629 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
630 }
631
632 if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
633 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
634 }
635
636 cx.notify();
637 }
638
639 pub fn keep_edits_in_range(
640 &mut self,
641 buffer: Entity<Buffer>,
642 buffer_range: Range<impl language::ToPoint>,
643 telemetry: Option<ActionLogTelemetry>,
644 cx: &mut Context<Self>,
645 ) {
646 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
647 return;
648 };
649
650 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
651 match tracked_buffer.status {
652 TrackedBufferStatus::Deleted => {
653 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
654 self.tracked_buffers.remove(&buffer);
655 cx.notify();
656 }
657 _ => {
658 let buffer = buffer.read(cx);
659 let buffer_range =
660 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
661 let mut delta = 0i32;
662 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
663 edit.old.start = (edit.old.start as i32 + delta) as u32;
664 edit.old.end = (edit.old.end as i32 + delta) as u32;
665
666 if buffer_range.end.row < edit.new.start
667 || buffer_range.start.row > edit.new.end
668 {
669 true
670 } else {
671 let old_range = tracked_buffer
672 .diff_base
673 .point_to_offset(Point::new(edit.old.start, 0))
674 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
675 Point::new(edit.old.end, 0),
676 tracked_buffer.diff_base.max_point(),
677 ));
678 let new_range = tracked_buffer
679 .snapshot
680 .point_to_offset(Point::new(edit.new.start, 0))
681 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
682 Point::new(edit.new.end, 0),
683 tracked_buffer.snapshot.max_point(),
684 ));
685 tracked_buffer.diff_base.replace(
686 old_range,
687 &tracked_buffer
688 .snapshot
689 .text_for_range(new_range)
690 .collect::<String>(),
691 );
692 delta += edit.new_len() as i32 - edit.old_len() as i32;
693 metrics.add_edit(edit);
694 false
695 }
696 });
697 if tracked_buffer.unreviewed_edits.is_empty()
698 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
699 {
700 tracked_buffer.status = TrackedBufferStatus::Modified;
701 }
702 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
703 }
704 }
705 if let Some(telemetry) = telemetry {
706 telemetry_report_accepted_edits(&telemetry, metrics);
707 }
708 }
709
710 pub fn reject_edits_in_ranges(
711 &mut self,
712 buffer: Entity<Buffer>,
713 buffer_ranges: Vec<Range<impl language::ToPoint>>,
714 telemetry: Option<ActionLogTelemetry>,
715 cx: &mut Context<Self>,
716 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
717 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
718 return (Task::ready(Ok(())), None);
719 };
720
721 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
722 let mut undo_info: Option<PerBufferUndo> = None;
723 let task = match &tracked_buffer.status {
724 TrackedBufferStatus::Created {
725 existing_file_content,
726 } => {
727 let task = if let Some(existing_file_content) = existing_file_content {
728 // Capture the agent's content before restoring existing file content
729 let agent_content = buffer.read(cx).text();
730 let buffer_id = buffer.read(cx).remote_id();
731
732 buffer.update(cx, |buffer, cx| {
733 buffer.start_transaction();
734 buffer.set_text("", cx);
735 for chunk in existing_file_content.chunks() {
736 buffer.append(chunk, cx);
737 }
738 buffer.end_transaction(cx);
739 });
740
741 undo_info = Some(PerBufferUndo {
742 buffer: buffer.downgrade(),
743 edits_to_restore: vec![(
744 Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
745 agent_content,
746 )],
747 status: UndoBufferStatus::Created {
748 had_existing_content: true,
749 },
750 });
751
752 self.project
753 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
754 } else {
755 // For a file created by AI with no pre-existing content,
756 // only delete the file if we're certain it contains only AI content
757 // with no edits from the user.
758
759 let initial_version = tracked_buffer.version.clone();
760 let current_version = buffer.read(cx).version();
761
762 let current_content = buffer.read(cx).text();
763 let tracked_content = tracked_buffer.snapshot.text();
764
765 let is_ai_only_content =
766 initial_version == current_version && current_content == tracked_content;
767
768 if is_ai_only_content {
769 let task = buffer
770 .read(cx)
771 .entry_id(cx)
772 .and_then(|entry_id| {
773 self.project.update(cx, |project, cx| {
774 project.delete_entry(entry_id, false, cx)
775 })
776 })
777 .unwrap_or_else(|| Task::ready(Ok(None)));
778
779 cx.background_spawn(async move {
780 task.await?;
781 Ok(())
782 })
783 } else {
784 // Not sure how to disentangle edits made by the user
785 // from edits made by the AI at this point.
786 // For now, preserve both to avoid data loss.
787 //
788 // TODO: Better solution (disable "Reject" after user makes some
789 // edit or find a way to differentiate between AI and user edits)
790 Task::ready(Ok(()))
791 }
792 };
793
794 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
795 self.tracked_buffers.remove(&buffer);
796 cx.notify();
797 task
798 }
799 TrackedBufferStatus::Deleted => {
800 buffer.update(cx, |buffer, cx| {
801 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
802 });
803 let save = self
804 .project
805 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
806
807 // Clear all tracked edits for this buffer and start over as if we just read it.
808 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
809 self.tracked_buffers.remove(&buffer);
810 self.buffer_read(buffer.clone(), cx);
811 cx.notify();
812 save
813 }
814 TrackedBufferStatus::Modified => {
815 let edits_to_restore = buffer.update(cx, |buffer, cx| {
816 let mut buffer_row_ranges = buffer_ranges
817 .into_iter()
818 .map(|range| {
819 range.start.to_point(buffer).row..range.end.to_point(buffer).row
820 })
821 .peekable();
822
823 let mut edits_to_revert = Vec::new();
824 let mut edits_for_undo = Vec::new();
825 for edit in tracked_buffer.unreviewed_edits.edits() {
826 let new_range = tracked_buffer
827 .snapshot
828 .anchor_before(Point::new(edit.new.start, 0))
829 ..tracked_buffer.snapshot.anchor_after(cmp::min(
830 Point::new(edit.new.end, 0),
831 tracked_buffer.snapshot.max_point(),
832 ));
833 let new_row_range = new_range.start.to_point(buffer).row
834 ..new_range.end.to_point(buffer).row;
835
836 let mut revert = false;
837 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
838 if buffer_row_range.end < new_row_range.start {
839 buffer_row_ranges.next();
840 } else if buffer_row_range.start > new_row_range.end {
841 break;
842 } else {
843 revert = true;
844 break;
845 }
846 }
847
848 if revert {
849 metrics.add_edit(edit);
850 let old_range = tracked_buffer
851 .diff_base
852 .point_to_offset(Point::new(edit.old.start, 0))
853 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
854 Point::new(edit.old.end, 0),
855 tracked_buffer.diff_base.max_point(),
856 ));
857 let old_text = tracked_buffer
858 .diff_base
859 .chunks_in_range(old_range)
860 .collect::<String>();
861
862 // Capture the agent's text before we revert it (for undo)
863 let new_range_offset =
864 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
865 let agent_text =
866 buffer.text_for_range(new_range_offset).collect::<String>();
867 edits_for_undo.push((new_range.clone(), agent_text));
868
869 edits_to_revert.push((new_range, old_text));
870 }
871 }
872
873 buffer.edit(edits_to_revert, None, cx);
874 edits_for_undo
875 });
876
877 if !edits_to_restore.is_empty() {
878 undo_info = Some(PerBufferUndo {
879 buffer: buffer.downgrade(),
880 edits_to_restore,
881 status: UndoBufferStatus::Modified,
882 });
883 }
884
885 self.project
886 .update(cx, |project, cx| project.save_buffer(buffer, cx))
887 }
888 };
889 if let Some(telemetry) = telemetry {
890 telemetry_report_rejected_edits(&telemetry, metrics);
891 }
892 (task, undo_info)
893 }
894
895 pub fn keep_all_edits(
896 &mut self,
897 telemetry: Option<ActionLogTelemetry>,
898 cx: &mut Context<Self>,
899 ) {
900 self.tracked_buffers.retain(|buffer, tracked_buffer| {
901 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
902 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
903 if let Some(telemetry) = telemetry.as_ref() {
904 telemetry_report_accepted_edits(telemetry, metrics);
905 }
906 match tracked_buffer.status {
907 TrackedBufferStatus::Deleted => false,
908 _ => {
909 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
910 tracked_buffer.status = TrackedBufferStatus::Modified;
911 }
912 tracked_buffer.unreviewed_edits.clear();
913 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
914 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
915 true
916 }
917 }
918 });
919
920 cx.notify();
921 }
922
923 pub fn reject_all_edits(
924 &mut self,
925 telemetry: Option<ActionLogTelemetry>,
926 cx: &mut Context<Self>,
927 ) -> Task<()> {
928 // Clear any previous undo state before starting a new reject operation
929 self.last_reject_undo = None;
930
931 let mut undo_buffers = Vec::new();
932 let mut futures = Vec::new();
933
934 for buffer in self.changed_buffers(cx).into_keys() {
935 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
936 buffer.read(cx).remote_id(),
937 )];
938 let (reject_task, undo_info) =
939 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
940
941 if let Some(undo) = undo_info {
942 undo_buffers.push(undo);
943 }
944
945 futures.push(async move {
946 reject_task.await.log_err();
947 });
948 }
949
950 // Store the undo information if we have any
951 if !undo_buffers.is_empty() {
952 self.last_reject_undo = Some(LastRejectUndo {
953 buffers: undo_buffers,
954 });
955 }
956
957 let task = futures::future::join_all(futures);
958 cx.background_spawn(async move {
959 task.await;
960 })
961 }
962
963 pub fn has_pending_undo(&self) -> bool {
964 self.last_reject_undo.is_some()
965 }
966
967 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
968 self.last_reject_undo = Some(undo);
969 }
970
971 /// Undoes the most recent reject operation, restoring the rejected agent changes.
972 /// This is a best-effort operation: if buffers have been closed or modified externally,
973 /// those buffers will be skipped.
974 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
975 let Some(undo) = self.last_reject_undo.take() else {
976 return Task::ready(());
977 };
978
979 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
980
981 for per_buffer_undo in undo.buffers {
982 // Skip if the buffer entity has been deallocated
983 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
984 continue;
985 };
986
987 buffer.update(cx, |buffer, cx| {
988 let mut valid_edits = Vec::new();
989
990 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
991 if anchor_range.start.buffer_id == buffer.remote_id()
992 && anchor_range.end.buffer_id == buffer.remote_id()
993 {
994 valid_edits.push((anchor_range, text_to_restore));
995 }
996 }
997
998 if !valid_edits.is_empty() {
999 buffer.edit(valid_edits, None, cx);
1000 }
1001 });
1002
1003 if !self.tracked_buffers.contains_key(&buffer) {
1004 self.buffer_edited(buffer.clone(), cx);
1005 }
1006
1007 let save = self
1008 .project
1009 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1010 save_tasks.push(save);
1011 }
1012
1013 cx.notify();
1014
1015 cx.background_spawn(async move {
1016 futures::future::join_all(save_tasks).await;
1017 })
1018 }
1019
1020 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1021 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1022 self.tracked_buffers
1023 .iter()
1024 .filter(|(_, tracked)| tracked.has_edits(cx))
1025 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1026 .collect()
1027 }
1028
1029 /// Returns the total number of lines added and removed across all unreviewed buffers.
1030 pub fn diff_stats(&self, cx: &App) -> DiffStats {
1031 DiffStats::all_files(&self.changed_buffers(cx), cx)
1032 }
1033
1034 /// Iterate over buffers changed since last read or edited by the model
1035 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1036 self.tracked_buffers
1037 .iter()
1038 .filter(|(buffer, tracked)| {
1039 let buffer = buffer.read(cx);
1040
1041 tracked.version != buffer.version
1042 && buffer
1043 .file()
1044 .is_some_and(|file| !file.disk_state().is_deleted())
1045 })
1046 .map(|(buffer, _)| buffer)
1047 }
1048}
1049
1050#[derive(Default, Debug, Clone, Copy)]
1051pub struct DiffStats {
1052 pub lines_added: u32,
1053 pub lines_removed: u32,
1054}
1055
1056impl DiffStats {
1057 pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
1058 let mut stats = DiffStats::default();
1059 let diff_snapshot = diff.snapshot(cx);
1060 let buffer_snapshot = buffer.snapshot();
1061 let base_text = diff_snapshot.base_text();
1062
1063 for hunk in diff_snapshot.hunks(&buffer_snapshot) {
1064 let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
1065 stats.lines_added += added_rows;
1066
1067 let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
1068 let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
1069 let removed_rows = base_end.saturating_sub(base_start);
1070 stats.lines_removed += removed_rows;
1071 }
1072
1073 stats
1074 }
1075
1076 pub fn all_files(
1077 changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
1078 cx: &App,
1079 ) -> Self {
1080 let mut total = DiffStats::default();
1081 for (buffer, diff) in changed_buffers {
1082 let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
1083 total.lines_added += stats.lines_added;
1084 total.lines_removed += stats.lines_removed;
1085 }
1086 total
1087 }
1088}
1089
1090#[derive(Clone)]
1091pub struct ActionLogTelemetry {
1092 pub agent_telemetry_id: SharedString,
1093 pub session_id: Arc<str>,
1094}
1095
1096struct ActionLogMetrics {
1097 lines_removed: u32,
1098 lines_added: u32,
1099 language: Option<SharedString>,
1100}
1101
1102impl ActionLogMetrics {
1103 fn for_buffer(buffer: &Buffer) -> Self {
1104 Self {
1105 language: buffer.language().map(|l| l.name().0),
1106 lines_removed: 0,
1107 lines_added: 0,
1108 }
1109 }
1110
1111 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1112 for edit in edits {
1113 self.add_edit(edit);
1114 }
1115 }
1116
1117 fn add_edit(&mut self, edit: &Edit<u32>) {
1118 self.lines_added += edit.new_len();
1119 self.lines_removed += edit.old_len();
1120 }
1121}
1122
1123fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1124 telemetry::event!(
1125 "Agent Edits Accepted",
1126 agent = telemetry.agent_telemetry_id,
1127 session = telemetry.session_id,
1128 language = metrics.language,
1129 lines_added = metrics.lines_added,
1130 lines_removed = metrics.lines_removed
1131 );
1132}
1133
1134fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1135 telemetry::event!(
1136 "Agent Edits Rejected",
1137 agent = telemetry.agent_telemetry_id,
1138 session = telemetry.session_id,
1139 language = metrics.language,
1140 lines_added = metrics.lines_added,
1141 lines_removed = metrics.lines_removed
1142 );
1143}
1144
1145fn apply_non_conflicting_edits(
1146 patch: &Patch<u32>,
1147 edits: Vec<Edit<u32>>,
1148 old_text: &mut Rope,
1149 new_text: &Rope,
1150) -> bool {
1151 let mut old_edits = patch.edits().iter().cloned().peekable();
1152 let mut new_edits = edits.into_iter().peekable();
1153 let mut applied_delta = 0i32;
1154 let mut rebased_delta = 0i32;
1155 let mut has_made_changes = false;
1156
1157 while let Some(mut new_edit) = new_edits.next() {
1158 let mut conflict = false;
1159
1160 // Push all the old edits that are before this new edit or that intersect with it.
1161 while let Some(old_edit) = old_edits.peek() {
1162 if new_edit.old.end < old_edit.new.start
1163 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1164 {
1165 break;
1166 } else if new_edit.old.start > old_edit.new.end
1167 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1168 {
1169 let old_edit = old_edits.next().unwrap();
1170 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1171 } else {
1172 conflict = true;
1173 if new_edits
1174 .peek()
1175 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1176 {
1177 new_edit = new_edits.next().unwrap();
1178 } else {
1179 let old_edit = old_edits.next().unwrap();
1180 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1181 }
1182 }
1183 }
1184
1185 if !conflict {
1186 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1187 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1188 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1189 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1190 ..old_text.point_to_offset(cmp::min(
1191 Point::new(new_edit.old.end, 0),
1192 old_text.max_point(),
1193 ));
1194 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1195 ..new_text.point_to_offset(cmp::min(
1196 Point::new(new_edit.new.end, 0),
1197 new_text.max_point(),
1198 ));
1199
1200 old_text.replace(
1201 old_bytes,
1202 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1203 );
1204 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1205 has_made_changes = true;
1206 }
1207 }
1208 has_made_changes
1209}
1210
1211fn diff_snapshots(
1212 old_snapshot: &text::BufferSnapshot,
1213 new_snapshot: &text::BufferSnapshot,
1214) -> Vec<Edit<u32>> {
1215 let mut edits = new_snapshot
1216 .edits_since::<Point>(&old_snapshot.version)
1217 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1218 .peekable();
1219 let mut row_edits = Vec::new();
1220 while let Some(mut edit) = edits.next() {
1221 while let Some(next_edit) = edits.peek() {
1222 if edit.old.end >= next_edit.old.start {
1223 edit.old.end = next_edit.old.end;
1224 edit.new.end = next_edit.new.end;
1225 edits.next();
1226 } else {
1227 break;
1228 }
1229 }
1230 row_edits.push(edit);
1231 }
1232 row_edits
1233}
1234
1235fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1236 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1237 && new_text
1238 .chars_at(new_text.point_to_offset(edit.new.start))
1239 .next()
1240 == Some('\n')
1241 && edit.old.start != old_text.max_point()
1242 {
1243 Edit {
1244 old: edit.old.start.row + 1..edit.old.end.row + 1,
1245 new: edit.new.start.row + 1..edit.new.end.row + 1,
1246 }
1247 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1248 Edit {
1249 old: edit.old.start.row..edit.old.end.row,
1250 new: edit.new.start.row..edit.new.end.row,
1251 }
1252 } else {
1253 Edit {
1254 old: edit.old.start.row..edit.old.end.row + 1,
1255 new: edit.new.start.row..edit.new.end.row + 1,
1256 }
1257 }
1258}
1259
1260#[derive(Copy, Clone, Debug)]
1261enum ChangeAuthor {
1262 User,
1263 Agent,
1264}
1265
1266#[derive(Debug)]
1267enum TrackedBufferStatus {
1268 Created { existing_file_content: Option<Rope> },
1269 Modified,
1270 Deleted,
1271}
1272
1273pub struct TrackedBuffer {
1274 buffer: Entity<Buffer>,
1275 diff_base: Rope,
1276 unreviewed_edits: Patch<u32>,
1277 status: TrackedBufferStatus,
1278 version: clock::Global,
1279 diff: Entity<BufferDiff>,
1280 snapshot: text::BufferSnapshot,
1281 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1282 _open_lsp_handle: OpenLspBufferHandle,
1283 _maintain_diff: Task<()>,
1284 _subscription: Subscription,
1285}
1286
1287impl TrackedBuffer {
1288 #[cfg(any(test, feature = "test-support"))]
1289 pub fn diff(&self) -> &Entity<BufferDiff> {
1290 &self.diff
1291 }
1292
1293 #[cfg(any(test, feature = "test-support"))]
1294 pub fn diff_base_len(&self) -> usize {
1295 self.diff_base.len()
1296 }
1297
1298 fn has_edits(&self, cx: &App) -> bool {
1299 self.diff
1300 .read(cx)
1301 .snapshot(cx)
1302 .hunks(self.buffer.read(cx))
1303 .next()
1304 .is_some()
1305 }
1306
1307 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1308 self.diff_update
1309 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1310 .ok();
1311 }
1312}
1313
1314pub struct ChangedBuffer {
1315 pub diff: Entity<BufferDiff>,
1316}
1317
1318#[cfg(test)]
1319mod tests {
1320 use super::*;
1321 use buffer_diff::DiffHunkStatusKind;
1322 use gpui::TestAppContext;
1323 use language::Point;
1324 use project::{FakeFs, Fs, Project, RemoveOptions};
1325 use rand::prelude::*;
1326 use serde_json::json;
1327 use settings::SettingsStore;
1328 use std::env;
1329 use util::{RandomCharIter, path};
1330
1331 #[ctor::ctor]
1332 fn init_logger() {
1333 zlog::init_test();
1334 }
1335
1336 fn init_test(cx: &mut TestAppContext) {
1337 cx.update(|cx| {
1338 let settings_store = SettingsStore::test(cx);
1339 cx.set_global(settings_store);
1340 });
1341 }
1342
1343 #[gpui::test(iterations = 10)]
1344 async fn test_keep_edits(cx: &mut TestAppContext) {
1345 init_test(cx);
1346
1347 let fs = FakeFs::new(cx.executor());
1348 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1349 .await;
1350 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1351 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1352 let file_path = project
1353 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1354 .unwrap();
1355 let buffer = project
1356 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1357 .await
1358 .unwrap();
1359
1360 cx.update(|cx| {
1361 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1362 buffer.update(cx, |buffer, cx| {
1363 buffer
1364 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1365 .unwrap()
1366 });
1367 buffer.update(cx, |buffer, cx| {
1368 buffer
1369 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1370 .unwrap()
1371 });
1372 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1373 });
1374 cx.run_until_parked();
1375 assert_eq!(
1376 buffer.read_with(cx, |buffer, _| buffer.text()),
1377 "abc\ndEf\nghi\njkl\nmnO"
1378 );
1379 assert_eq!(
1380 unreviewed_hunks(&action_log, cx),
1381 vec![(
1382 buffer.clone(),
1383 vec![
1384 HunkStatus {
1385 range: Point::new(1, 0)..Point::new(2, 0),
1386 diff_status: DiffHunkStatusKind::Modified,
1387 old_text: "def\n".into(),
1388 },
1389 HunkStatus {
1390 range: Point::new(4, 0)..Point::new(4, 3),
1391 diff_status: DiffHunkStatusKind::Modified,
1392 old_text: "mno".into(),
1393 }
1394 ],
1395 )]
1396 );
1397
1398 action_log.update(cx, |log, cx| {
1399 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1400 });
1401 cx.run_until_parked();
1402 assert_eq!(
1403 unreviewed_hunks(&action_log, cx),
1404 vec![(
1405 buffer.clone(),
1406 vec![HunkStatus {
1407 range: Point::new(1, 0)..Point::new(2, 0),
1408 diff_status: DiffHunkStatusKind::Modified,
1409 old_text: "def\n".into(),
1410 }],
1411 )]
1412 );
1413
1414 action_log.update(cx, |log, cx| {
1415 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1416 });
1417 cx.run_until_parked();
1418 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1419 }
1420
1421 #[gpui::test(iterations = 10)]
1422 async fn test_deletions(cx: &mut TestAppContext) {
1423 init_test(cx);
1424
1425 let fs = FakeFs::new(cx.executor());
1426 fs.insert_tree(
1427 path!("/dir"),
1428 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1429 )
1430 .await;
1431 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1432 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1433 let file_path = project
1434 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1435 .unwrap();
1436 let buffer = project
1437 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1438 .await
1439 .unwrap();
1440
1441 cx.update(|cx| {
1442 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1443 buffer.update(cx, |buffer, cx| {
1444 buffer
1445 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1446 .unwrap();
1447 buffer.finalize_last_transaction();
1448 });
1449 buffer.update(cx, |buffer, cx| {
1450 buffer
1451 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1452 .unwrap();
1453 buffer.finalize_last_transaction();
1454 });
1455 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1456 });
1457 cx.run_until_parked();
1458 assert_eq!(
1459 buffer.read_with(cx, |buffer, _| buffer.text()),
1460 "abc\nghi\njkl\npqr"
1461 );
1462 assert_eq!(
1463 unreviewed_hunks(&action_log, cx),
1464 vec![(
1465 buffer.clone(),
1466 vec![
1467 HunkStatus {
1468 range: Point::new(1, 0)..Point::new(1, 0),
1469 diff_status: DiffHunkStatusKind::Deleted,
1470 old_text: "def\n".into(),
1471 },
1472 HunkStatus {
1473 range: Point::new(3, 0)..Point::new(3, 0),
1474 diff_status: DiffHunkStatusKind::Deleted,
1475 old_text: "mno\n".into(),
1476 }
1477 ],
1478 )]
1479 );
1480
1481 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1482 cx.run_until_parked();
1483 assert_eq!(
1484 buffer.read_with(cx, |buffer, _| buffer.text()),
1485 "abc\nghi\njkl\nmno\npqr"
1486 );
1487 assert_eq!(
1488 unreviewed_hunks(&action_log, cx),
1489 vec![(
1490 buffer.clone(),
1491 vec![HunkStatus {
1492 range: Point::new(1, 0)..Point::new(1, 0),
1493 diff_status: DiffHunkStatusKind::Deleted,
1494 old_text: "def\n".into(),
1495 }],
1496 )]
1497 );
1498
1499 action_log.update(cx, |log, cx| {
1500 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1501 });
1502 cx.run_until_parked();
1503 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1504 }
1505
1506 #[gpui::test(iterations = 10)]
1507 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1508 init_test(cx);
1509
1510 let fs = FakeFs::new(cx.executor());
1511 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1512 .await;
1513 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1514 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1515 let file_path = project
1516 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1517 .unwrap();
1518 let buffer = project
1519 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1520 .await
1521 .unwrap();
1522
1523 cx.update(|cx| {
1524 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1525 buffer.update(cx, |buffer, cx| {
1526 buffer
1527 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1528 .unwrap()
1529 });
1530 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1531 });
1532 cx.run_until_parked();
1533 assert_eq!(
1534 buffer.read_with(cx, |buffer, _| buffer.text()),
1535 "abc\ndeF\nGHI\njkl\nmno"
1536 );
1537 assert_eq!(
1538 unreviewed_hunks(&action_log, cx),
1539 vec![(
1540 buffer.clone(),
1541 vec![HunkStatus {
1542 range: Point::new(1, 0)..Point::new(3, 0),
1543 diff_status: DiffHunkStatusKind::Modified,
1544 old_text: "def\nghi\n".into(),
1545 }],
1546 )]
1547 );
1548
1549 buffer.update(cx, |buffer, cx| {
1550 buffer.edit(
1551 [
1552 (Point::new(0, 2)..Point::new(0, 2), "X"),
1553 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1554 ],
1555 None,
1556 cx,
1557 )
1558 });
1559 cx.run_until_parked();
1560 assert_eq!(
1561 buffer.read_with(cx, |buffer, _| buffer.text()),
1562 "abXc\ndeF\nGHI\nYjkl\nmno"
1563 );
1564 assert_eq!(
1565 unreviewed_hunks(&action_log, cx),
1566 vec![(
1567 buffer.clone(),
1568 vec![HunkStatus {
1569 range: Point::new(1, 0)..Point::new(3, 0),
1570 diff_status: DiffHunkStatusKind::Modified,
1571 old_text: "def\nghi\n".into(),
1572 }],
1573 )]
1574 );
1575
1576 buffer.update(cx, |buffer, cx| {
1577 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1578 });
1579 cx.run_until_parked();
1580 assert_eq!(
1581 buffer.read_with(cx, |buffer, _| buffer.text()),
1582 "abXc\ndZeF\nGHI\nYjkl\nmno"
1583 );
1584 assert_eq!(
1585 unreviewed_hunks(&action_log, cx),
1586 vec![(
1587 buffer.clone(),
1588 vec![HunkStatus {
1589 range: Point::new(1, 0)..Point::new(3, 0),
1590 diff_status: DiffHunkStatusKind::Modified,
1591 old_text: "def\nghi\n".into(),
1592 }],
1593 )]
1594 );
1595
1596 action_log.update(cx, |log, cx| {
1597 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1598 });
1599 cx.run_until_parked();
1600 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1601 }
1602
1603 #[gpui::test(iterations = 10)]
1604 async fn test_creating_files(cx: &mut TestAppContext) {
1605 init_test(cx);
1606
1607 let fs = FakeFs::new(cx.executor());
1608 fs.insert_tree(path!("/dir"), json!({})).await;
1609 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1610 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1611 let file_path = project
1612 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1613 .unwrap();
1614
1615 let buffer = project
1616 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1617 .await
1618 .unwrap();
1619 cx.update(|cx| {
1620 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1621 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1622 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1623 });
1624 project
1625 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1626 .await
1627 .unwrap();
1628 cx.run_until_parked();
1629 assert_eq!(
1630 unreviewed_hunks(&action_log, cx),
1631 vec![(
1632 buffer.clone(),
1633 vec![HunkStatus {
1634 range: Point::new(0, 0)..Point::new(0, 5),
1635 diff_status: DiffHunkStatusKind::Added,
1636 old_text: "".into(),
1637 }],
1638 )]
1639 );
1640
1641 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1642 cx.run_until_parked();
1643 assert_eq!(
1644 unreviewed_hunks(&action_log, cx),
1645 vec![(
1646 buffer.clone(),
1647 vec![HunkStatus {
1648 range: Point::new(0, 0)..Point::new(0, 6),
1649 diff_status: DiffHunkStatusKind::Added,
1650 old_text: "".into(),
1651 }],
1652 )]
1653 );
1654
1655 action_log.update(cx, |log, cx| {
1656 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1657 });
1658 cx.run_until_parked();
1659 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1660 }
1661
1662 #[gpui::test(iterations = 10)]
1663 async fn test_overwriting_files(cx: &mut TestAppContext) {
1664 init_test(cx);
1665
1666 let fs = FakeFs::new(cx.executor());
1667 fs.insert_tree(
1668 path!("/dir"),
1669 json!({
1670 "file1": "Lorem ipsum dolor"
1671 }),
1672 )
1673 .await;
1674 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1675 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1676 let file_path = project
1677 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1678 .unwrap();
1679
1680 let buffer = project
1681 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1682 .await
1683 .unwrap();
1684 cx.update(|cx| {
1685 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1686 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1687 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1688 });
1689 project
1690 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1691 .await
1692 .unwrap();
1693 cx.run_until_parked();
1694 assert_eq!(
1695 unreviewed_hunks(&action_log, cx),
1696 vec![(
1697 buffer.clone(),
1698 vec![HunkStatus {
1699 range: Point::new(0, 0)..Point::new(0, 19),
1700 diff_status: DiffHunkStatusKind::Added,
1701 old_text: "".into(),
1702 }],
1703 )]
1704 );
1705
1706 action_log
1707 .update(cx, |log, cx| {
1708 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1709 task
1710 })
1711 .await
1712 .unwrap();
1713 cx.run_until_parked();
1714 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1715 assert_eq!(
1716 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1717 "Lorem ipsum dolor"
1718 );
1719 }
1720
1721 #[gpui::test(iterations = 10)]
1722 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1723 init_test(cx);
1724
1725 let fs = FakeFs::new(cx.executor());
1726 fs.insert_tree(
1727 path!("/dir"),
1728 json!({
1729 "file1": "Lorem ipsum dolor"
1730 }),
1731 )
1732 .await;
1733 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1734 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1735 let file_path = project
1736 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1737 .unwrap();
1738
1739 let buffer = project
1740 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1741 .await
1742 .unwrap();
1743 cx.update(|cx| {
1744 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1745 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1746 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1747 });
1748 project
1749 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1750 .await
1751 .unwrap();
1752 cx.run_until_parked();
1753 assert_eq!(
1754 unreviewed_hunks(&action_log, cx),
1755 vec![(
1756 buffer.clone(),
1757 vec![HunkStatus {
1758 range: Point::new(0, 0)..Point::new(0, 37),
1759 diff_status: DiffHunkStatusKind::Modified,
1760 old_text: "Lorem ipsum dolor".into(),
1761 }],
1762 )]
1763 );
1764
1765 cx.update(|cx| {
1766 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1767 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1768 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1769 });
1770 project
1771 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1772 .await
1773 .unwrap();
1774 cx.run_until_parked();
1775 assert_eq!(
1776 unreviewed_hunks(&action_log, cx),
1777 vec![(
1778 buffer.clone(),
1779 vec![HunkStatus {
1780 range: Point::new(0, 0)..Point::new(0, 9),
1781 diff_status: DiffHunkStatusKind::Added,
1782 old_text: "".into(),
1783 }],
1784 )]
1785 );
1786
1787 action_log
1788 .update(cx, |log, cx| {
1789 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1790 task
1791 })
1792 .await
1793 .unwrap();
1794 cx.run_until_parked();
1795 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1796 assert_eq!(
1797 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1798 "Lorem ipsum dolor"
1799 );
1800 }
1801
1802 #[gpui::test(iterations = 10)]
1803 async fn test_deleting_files(cx: &mut TestAppContext) {
1804 init_test(cx);
1805
1806 let fs = FakeFs::new(cx.executor());
1807 fs.insert_tree(
1808 path!("/dir"),
1809 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1810 )
1811 .await;
1812
1813 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1814 let file1_path = project
1815 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1816 .unwrap();
1817 let file2_path = project
1818 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1819 .unwrap();
1820
1821 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1822 let buffer1 = project
1823 .update(cx, |project, cx| {
1824 project.open_buffer(file1_path.clone(), cx)
1825 })
1826 .await
1827 .unwrap();
1828 let buffer2 = project
1829 .update(cx, |project, cx| {
1830 project.open_buffer(file2_path.clone(), cx)
1831 })
1832 .await
1833 .unwrap();
1834
1835 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1836 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1837 project
1838 .update(cx, |project, cx| {
1839 project.delete_file(file1_path.clone(), false, cx)
1840 })
1841 .unwrap()
1842 .await
1843 .unwrap();
1844 project
1845 .update(cx, |project, cx| {
1846 project.delete_file(file2_path.clone(), false, cx)
1847 })
1848 .unwrap()
1849 .await
1850 .unwrap();
1851 cx.run_until_parked();
1852 assert_eq!(
1853 unreviewed_hunks(&action_log, cx),
1854 vec![
1855 (
1856 buffer1.clone(),
1857 vec![HunkStatus {
1858 range: Point::new(0, 0)..Point::new(0, 0),
1859 diff_status: DiffHunkStatusKind::Deleted,
1860 old_text: "lorem\n".into(),
1861 }]
1862 ),
1863 (
1864 buffer2.clone(),
1865 vec![HunkStatus {
1866 range: Point::new(0, 0)..Point::new(0, 0),
1867 diff_status: DiffHunkStatusKind::Deleted,
1868 old_text: "ipsum\n".into(),
1869 }],
1870 )
1871 ]
1872 );
1873
1874 // Simulate file1 being recreated externally.
1875 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1876 .await;
1877
1878 // Simulate file2 being recreated by a tool.
1879 let buffer2 = project
1880 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1881 .await
1882 .unwrap();
1883 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1884 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1885 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1886 project
1887 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1888 .await
1889 .unwrap();
1890
1891 cx.run_until_parked();
1892 assert_eq!(
1893 unreviewed_hunks(&action_log, cx),
1894 vec![(
1895 buffer2.clone(),
1896 vec![HunkStatus {
1897 range: Point::new(0, 0)..Point::new(0, 5),
1898 diff_status: DiffHunkStatusKind::Added,
1899 old_text: "".into(),
1900 }],
1901 )]
1902 );
1903
1904 // Simulate file2 being deleted externally.
1905 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1906 .await
1907 .unwrap();
1908 cx.run_until_parked();
1909 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1910 }
1911
1912 #[gpui::test(iterations = 10)]
1913 async fn test_reject_edits(cx: &mut TestAppContext) {
1914 init_test(cx);
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1918 .await;
1919 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1920 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1921 let file_path = project
1922 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1923 .unwrap();
1924 let buffer = project
1925 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1926 .await
1927 .unwrap();
1928
1929 cx.update(|cx| {
1930 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1931 buffer.update(cx, |buffer, cx| {
1932 buffer
1933 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1934 .unwrap()
1935 });
1936 buffer.update(cx, |buffer, cx| {
1937 buffer
1938 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1939 .unwrap()
1940 });
1941 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1942 });
1943 cx.run_until_parked();
1944 assert_eq!(
1945 buffer.read_with(cx, |buffer, _| buffer.text()),
1946 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1947 );
1948 assert_eq!(
1949 unreviewed_hunks(&action_log, cx),
1950 vec![(
1951 buffer.clone(),
1952 vec![
1953 HunkStatus {
1954 range: Point::new(1, 0)..Point::new(3, 0),
1955 diff_status: DiffHunkStatusKind::Modified,
1956 old_text: "def\n".into(),
1957 },
1958 HunkStatus {
1959 range: Point::new(5, 0)..Point::new(5, 3),
1960 diff_status: DiffHunkStatusKind::Modified,
1961 old_text: "mno".into(),
1962 }
1963 ],
1964 )]
1965 );
1966
1967 // If the rejected range doesn't overlap with any hunk, we ignore it.
1968 action_log
1969 .update(cx, |log, cx| {
1970 let (task, _) = log.reject_edits_in_ranges(
1971 buffer.clone(),
1972 vec![Point::new(4, 0)..Point::new(4, 0)],
1973 None,
1974 cx,
1975 );
1976 task
1977 })
1978 .await
1979 .unwrap();
1980 cx.run_until_parked();
1981 assert_eq!(
1982 buffer.read_with(cx, |buffer, _| buffer.text()),
1983 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1984 );
1985 assert_eq!(
1986 unreviewed_hunks(&action_log, cx),
1987 vec![(
1988 buffer.clone(),
1989 vec![
1990 HunkStatus {
1991 range: Point::new(1, 0)..Point::new(3, 0),
1992 diff_status: DiffHunkStatusKind::Modified,
1993 old_text: "def\n".into(),
1994 },
1995 HunkStatus {
1996 range: Point::new(5, 0)..Point::new(5, 3),
1997 diff_status: DiffHunkStatusKind::Modified,
1998 old_text: "mno".into(),
1999 }
2000 ],
2001 )]
2002 );
2003
2004 action_log
2005 .update(cx, |log, cx| {
2006 let (task, _) = log.reject_edits_in_ranges(
2007 buffer.clone(),
2008 vec![Point::new(0, 0)..Point::new(1, 0)],
2009 None,
2010 cx,
2011 );
2012 task
2013 })
2014 .await
2015 .unwrap();
2016 cx.run_until_parked();
2017 assert_eq!(
2018 buffer.read_with(cx, |buffer, _| buffer.text()),
2019 "abc\ndef\nghi\njkl\nmnO"
2020 );
2021 assert_eq!(
2022 unreviewed_hunks(&action_log, cx),
2023 vec![(
2024 buffer.clone(),
2025 vec![HunkStatus {
2026 range: Point::new(4, 0)..Point::new(4, 3),
2027 diff_status: DiffHunkStatusKind::Modified,
2028 old_text: "mno".into(),
2029 }],
2030 )]
2031 );
2032
2033 action_log
2034 .update(cx, |log, cx| {
2035 let (task, _) = log.reject_edits_in_ranges(
2036 buffer.clone(),
2037 vec![Point::new(4, 0)..Point::new(4, 0)],
2038 None,
2039 cx,
2040 );
2041 task
2042 })
2043 .await
2044 .unwrap();
2045 cx.run_until_parked();
2046 assert_eq!(
2047 buffer.read_with(cx, |buffer, _| buffer.text()),
2048 "abc\ndef\nghi\njkl\nmno"
2049 );
2050 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2051 }
2052
2053 #[gpui::test(iterations = 10)]
2054 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2055 init_test(cx);
2056
2057 let fs = FakeFs::new(cx.executor());
2058 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2059 .await;
2060 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2061 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2062 let file_path = project
2063 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2064 .unwrap();
2065 let buffer = project
2066 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2067 .await
2068 .unwrap();
2069
2070 cx.update(|cx| {
2071 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2072 buffer.update(cx, |buffer, cx| {
2073 buffer
2074 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2075 .unwrap()
2076 });
2077 buffer.update(cx, |buffer, cx| {
2078 buffer
2079 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2080 .unwrap()
2081 });
2082 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2083 });
2084 cx.run_until_parked();
2085 assert_eq!(
2086 buffer.read_with(cx, |buffer, _| buffer.text()),
2087 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2088 );
2089 assert_eq!(
2090 unreviewed_hunks(&action_log, cx),
2091 vec![(
2092 buffer.clone(),
2093 vec![
2094 HunkStatus {
2095 range: Point::new(1, 0)..Point::new(3, 0),
2096 diff_status: DiffHunkStatusKind::Modified,
2097 old_text: "def\n".into(),
2098 },
2099 HunkStatus {
2100 range: Point::new(5, 0)..Point::new(5, 3),
2101 diff_status: DiffHunkStatusKind::Modified,
2102 old_text: "mno".into(),
2103 }
2104 ],
2105 )]
2106 );
2107
2108 action_log.update(cx, |log, cx| {
2109 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2110 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2111 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2112 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2113
2114 let (task, _) =
2115 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2116 task.detach();
2117 assert_eq!(
2118 buffer.read_with(cx, |buffer, _| buffer.text()),
2119 "abc\ndef\nghi\njkl\nmno"
2120 );
2121 });
2122 cx.run_until_parked();
2123 assert_eq!(
2124 buffer.read_with(cx, |buffer, _| buffer.text()),
2125 "abc\ndef\nghi\njkl\nmno"
2126 );
2127 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2128 }
2129
2130 #[gpui::test(iterations = 10)]
2131 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2132 init_test(cx);
2133
2134 let fs = FakeFs::new(cx.executor());
2135 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2136 .await;
2137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2138 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2139 let file_path = project
2140 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2141 .unwrap();
2142 let buffer = project
2143 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2144 .await
2145 .unwrap();
2146
2147 cx.update(|cx| {
2148 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2149 });
2150 project
2151 .update(cx, |project, cx| {
2152 project.delete_file(file_path.clone(), false, cx)
2153 })
2154 .unwrap()
2155 .await
2156 .unwrap();
2157 cx.run_until_parked();
2158 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2159 assert_eq!(
2160 unreviewed_hunks(&action_log, cx),
2161 vec![(
2162 buffer.clone(),
2163 vec![HunkStatus {
2164 range: Point::new(0, 0)..Point::new(0, 0),
2165 diff_status: DiffHunkStatusKind::Deleted,
2166 old_text: "content".into(),
2167 }]
2168 )]
2169 );
2170
2171 action_log
2172 .update(cx, |log, cx| {
2173 let (task, _) = log.reject_edits_in_ranges(
2174 buffer.clone(),
2175 vec![Point::new(0, 0)..Point::new(0, 0)],
2176 None,
2177 cx,
2178 );
2179 task
2180 })
2181 .await
2182 .unwrap();
2183 cx.run_until_parked();
2184 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2185 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2186 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2187 }
2188
2189 #[gpui::test(iterations = 10)]
2190 async fn test_reject_created_file(cx: &mut TestAppContext) {
2191 init_test(cx);
2192
2193 let fs = FakeFs::new(cx.executor());
2194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2195 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2196 let file_path = project
2197 .read_with(cx, |project, cx| {
2198 project.find_project_path("dir/new_file", cx)
2199 })
2200 .unwrap();
2201 let buffer = project
2202 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2203 .await
2204 .unwrap();
2205 cx.update(|cx| {
2206 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2207 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2208 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2209 });
2210 project
2211 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2212 .await
2213 .unwrap();
2214 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2215 cx.run_until_parked();
2216 assert_eq!(
2217 unreviewed_hunks(&action_log, cx),
2218 vec![(
2219 buffer.clone(),
2220 vec![HunkStatus {
2221 range: Point::new(0, 0)..Point::new(0, 7),
2222 diff_status: DiffHunkStatusKind::Added,
2223 old_text: "".into(),
2224 }],
2225 )]
2226 );
2227
2228 action_log
2229 .update(cx, |log, cx| {
2230 let (task, _) = log.reject_edits_in_ranges(
2231 buffer.clone(),
2232 vec![Point::new(0, 0)..Point::new(0, 11)],
2233 None,
2234 cx,
2235 );
2236 task
2237 })
2238 .await
2239 .unwrap();
2240 cx.run_until_parked();
2241 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2242 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2243 }
2244
2245 #[gpui::test]
2246 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2247 init_test(cx);
2248
2249 let fs = FakeFs::new(cx.executor());
2250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2251 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2252
2253 let file_path = project
2254 .read_with(cx, |project, cx| {
2255 project.find_project_path("dir/new_file", cx)
2256 })
2257 .unwrap();
2258 let buffer = project
2259 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2260 .await
2261 .unwrap();
2262
2263 // AI creates file with initial content
2264 cx.update(|cx| {
2265 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2266 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2267 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2268 });
2269
2270 project
2271 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2272 .await
2273 .unwrap();
2274
2275 cx.run_until_parked();
2276
2277 // User makes additional edits
2278 cx.update(|cx| {
2279 buffer.update(cx, |buffer, cx| {
2280 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2281 });
2282 });
2283
2284 project
2285 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2286 .await
2287 .unwrap();
2288
2289 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2290
2291 // Reject all
2292 action_log
2293 .update(cx, |log, cx| {
2294 let (task, _) = log.reject_edits_in_ranges(
2295 buffer.clone(),
2296 vec![Point::new(0, 0)..Point::new(100, 0)],
2297 None,
2298 cx,
2299 );
2300 task
2301 })
2302 .await
2303 .unwrap();
2304 cx.run_until_parked();
2305
2306 // File should still contain all the content
2307 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2308
2309 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2310 assert_eq!(content, "ai content\nuser added this line");
2311 }
2312
2313 #[gpui::test]
2314 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2315 init_test(cx);
2316
2317 let fs = FakeFs::new(cx.executor());
2318 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2319 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2320
2321 let file_path = project
2322 .read_with(cx, |project, cx| {
2323 project.find_project_path("dir/new_file", cx)
2324 })
2325 .unwrap();
2326 let buffer = project
2327 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2328 .await
2329 .unwrap();
2330
2331 // AI creates file with initial content
2332 cx.update(|cx| {
2333 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2334 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2335 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2336 });
2337 project
2338 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2339 .await
2340 .unwrap();
2341 cx.run_until_parked();
2342 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2343
2344 // User accepts the single hunk
2345 action_log.update(cx, |log, cx| {
2346 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2347 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2348 });
2349 cx.run_until_parked();
2350 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2351 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2352
2353 // AI modifies the file
2354 cx.update(|cx| {
2355 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2356 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2357 });
2358 project
2359 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2360 .await
2361 .unwrap();
2362 cx.run_until_parked();
2363 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2364
2365 // User rejects the hunk
2366 action_log
2367 .update(cx, |log, cx| {
2368 let (task, _) = log.reject_edits_in_ranges(
2369 buffer.clone(),
2370 vec![Anchor::min_max_range_for_buffer(
2371 buffer.read(cx).remote_id(),
2372 )],
2373 None,
2374 cx,
2375 );
2376 task
2377 })
2378 .await
2379 .unwrap();
2380 cx.run_until_parked();
2381 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2382 assert_eq!(
2383 buffer.read_with(cx, |buffer, _| buffer.text()),
2384 "ai content v1"
2385 );
2386 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2387 }
2388
2389 #[gpui::test]
2390 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2391 init_test(cx);
2392
2393 let fs = FakeFs::new(cx.executor());
2394 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2395 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2396
2397 let file_path = project
2398 .read_with(cx, |project, cx| {
2399 project.find_project_path("dir/new_file", cx)
2400 })
2401 .unwrap();
2402 let buffer = project
2403 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2404 .await
2405 .unwrap();
2406
2407 // AI creates file with initial content
2408 cx.update(|cx| {
2409 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2410 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2411 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2412 });
2413 project
2414 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2415 .await
2416 .unwrap();
2417 cx.run_until_parked();
2418
2419 // User clicks "Accept All"
2420 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2421 cx.run_until_parked();
2422 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2423 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2424
2425 // AI modifies file again
2426 cx.update(|cx| {
2427 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2428 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2429 });
2430 project
2431 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2432 .await
2433 .unwrap();
2434 cx.run_until_parked();
2435 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2436
2437 // User clicks "Reject All"
2438 action_log
2439 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2440 .await;
2441 cx.run_until_parked();
2442 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2443 assert_eq!(
2444 buffer.read_with(cx, |buffer, _| buffer.text()),
2445 "ai content v1"
2446 );
2447 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2448 }
2449
2450 #[gpui::test(iterations = 100)]
2451 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2452 init_test(cx);
2453
2454 let operations = env::var("OPERATIONS")
2455 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2456 .unwrap_or(20);
2457
2458 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2459 let fs = FakeFs::new(cx.executor());
2460 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2461 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2462 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2463 let file_path = project
2464 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2465 .unwrap();
2466 let buffer = project
2467 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2468 .await
2469 .unwrap();
2470
2471 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2472
2473 for _ in 0..operations {
2474 match rng.random_range(0..100) {
2475 0..25 => {
2476 action_log.update(cx, |log, cx| {
2477 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2478 log::info!("keeping edits in range {:?}", range);
2479 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2480 });
2481 }
2482 25..50 => {
2483 action_log
2484 .update(cx, |log, cx| {
2485 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2486 log::info!("rejecting edits in range {:?}", range);
2487 let (task, _) =
2488 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2489 task
2490 })
2491 .await
2492 .unwrap();
2493 }
2494 _ => {
2495 let is_agent_edit = rng.random_bool(0.5);
2496 if is_agent_edit {
2497 log::info!("agent edit");
2498 } else {
2499 log::info!("user edit");
2500 }
2501 cx.update(|cx| {
2502 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2503 if is_agent_edit {
2504 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2505 }
2506 });
2507 }
2508 }
2509
2510 if rng.random_bool(0.2) {
2511 quiesce(&action_log, &buffer, cx);
2512 }
2513 }
2514
2515 quiesce(&action_log, &buffer, cx);
2516
2517 fn quiesce(
2518 action_log: &Entity<ActionLog>,
2519 buffer: &Entity<Buffer>,
2520 cx: &mut TestAppContext,
2521 ) {
2522 log::info!("quiescing...");
2523 cx.run_until_parked();
2524 action_log.update(cx, |log, cx| {
2525 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2526 let mut old_text = tracked_buffer.diff_base.clone();
2527 let new_text = buffer.read(cx).as_rope();
2528 for edit in tracked_buffer.unreviewed_edits.edits() {
2529 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2530 let old_end = old_text.point_to_offset(cmp::min(
2531 Point::new(edit.new.start + edit.old_len(), 0),
2532 old_text.max_point(),
2533 ));
2534 old_text.replace(
2535 old_start..old_end,
2536 &new_text.slice_rows(edit.new.clone()).to_string(),
2537 );
2538 }
2539 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2540 })
2541 }
2542 }
2543
2544 #[gpui::test]
2545 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2546 init_test(cx);
2547
2548 let fs = FakeFs::new(cx.background_executor.clone());
2549 fs.insert_tree(
2550 path!("/project"),
2551 json!({
2552 ".git": {},
2553 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2554 }),
2555 )
2556 .await;
2557 fs.set_head_for_repo(
2558 path!("/project/.git").as_ref(),
2559 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2560 "0000000",
2561 );
2562 cx.run_until_parked();
2563
2564 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2565 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2566
2567 let file_path = project
2568 .read_with(cx, |project, cx| {
2569 project.find_project_path(path!("/project/file.txt"), cx)
2570 })
2571 .unwrap();
2572 let buffer = project
2573 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2574 .await
2575 .unwrap();
2576
2577 cx.update(|cx| {
2578 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2579 buffer.update(cx, |buffer, cx| {
2580 buffer.edit(
2581 [
2582 // Edit at the very start: a -> A
2583 (Point::new(0, 0)..Point::new(0, 1), "A"),
2584 // Deletion in the middle: remove lines d and e
2585 (Point::new(3, 0)..Point::new(5, 0), ""),
2586 // Modification: g -> GGG
2587 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2588 // Addition: insert new line after h
2589 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2590 // Edit the very last character: j -> J
2591 (Point::new(9, 0)..Point::new(9, 1), "J"),
2592 ],
2593 None,
2594 cx,
2595 );
2596 });
2597 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2598 });
2599 cx.run_until_parked();
2600 assert_eq!(
2601 unreviewed_hunks(&action_log, cx),
2602 vec![(
2603 buffer.clone(),
2604 vec![
2605 HunkStatus {
2606 range: Point::new(0, 0)..Point::new(1, 0),
2607 diff_status: DiffHunkStatusKind::Modified,
2608 old_text: "a\n".into()
2609 },
2610 HunkStatus {
2611 range: Point::new(3, 0)..Point::new(3, 0),
2612 diff_status: DiffHunkStatusKind::Deleted,
2613 old_text: "d\ne\n".into()
2614 },
2615 HunkStatus {
2616 range: Point::new(4, 0)..Point::new(5, 0),
2617 diff_status: DiffHunkStatusKind::Modified,
2618 old_text: "g\n".into()
2619 },
2620 HunkStatus {
2621 range: Point::new(6, 0)..Point::new(7, 0),
2622 diff_status: DiffHunkStatusKind::Added,
2623 old_text: "".into()
2624 },
2625 HunkStatus {
2626 range: Point::new(8, 0)..Point::new(8, 1),
2627 diff_status: DiffHunkStatusKind::Modified,
2628 old_text: "j".into()
2629 }
2630 ]
2631 )]
2632 );
2633
2634 // Simulate a git commit that matches some edits but not others:
2635 // - Accepts the first edit (a -> A)
2636 // - Accepts the deletion (remove d and e)
2637 // - Makes a different change to g (g -> G instead of GGG)
2638 // - Ignores the NEW line addition
2639 // - Ignores the last line edit (j stays as j)
2640 fs.set_head_for_repo(
2641 path!("/project/.git").as_ref(),
2642 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2643 "0000001",
2644 );
2645 cx.run_until_parked();
2646 assert_eq!(
2647 unreviewed_hunks(&action_log, cx),
2648 vec![(
2649 buffer.clone(),
2650 vec![
2651 HunkStatus {
2652 range: Point::new(4, 0)..Point::new(5, 0),
2653 diff_status: DiffHunkStatusKind::Modified,
2654 old_text: "g\n".into()
2655 },
2656 HunkStatus {
2657 range: Point::new(6, 0)..Point::new(7, 0),
2658 diff_status: DiffHunkStatusKind::Added,
2659 old_text: "".into()
2660 },
2661 HunkStatus {
2662 range: Point::new(8, 0)..Point::new(8, 1),
2663 diff_status: DiffHunkStatusKind::Modified,
2664 old_text: "j".into()
2665 }
2666 ]
2667 )]
2668 );
2669
2670 // Make another commit that accepts the NEW line but with different content
2671 fs.set_head_for_repo(
2672 path!("/project/.git").as_ref(),
2673 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2674 "0000002",
2675 );
2676 cx.run_until_parked();
2677 assert_eq!(
2678 unreviewed_hunks(&action_log, cx),
2679 vec![(
2680 buffer,
2681 vec![
2682 HunkStatus {
2683 range: Point::new(6, 0)..Point::new(7, 0),
2684 diff_status: DiffHunkStatusKind::Added,
2685 old_text: "".into()
2686 },
2687 HunkStatus {
2688 range: Point::new(8, 0)..Point::new(8, 1),
2689 diff_status: DiffHunkStatusKind::Modified,
2690 old_text: "j".into()
2691 }
2692 ]
2693 )]
2694 );
2695
2696 // Final commit that accepts all remaining edits
2697 fs.set_head_for_repo(
2698 path!("/project/.git").as_ref(),
2699 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2700 "0000003",
2701 );
2702 cx.run_until_parked();
2703 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2704 }
2705
2706 /// Regression test: when head_commit updates before the BufferDiff's base
2707 /// text does, an intermediate DiffChanged (e.g. from a buffer-edit diff
2708 /// recalculation) must NOT consume the commit signal. The subscription
2709 /// should only fire once the base text itself has changed.
2710 #[gpui::test]
2711 async fn test_keep_edits_on_commit_with_stale_diff_changed(cx: &mut TestAppContext) {
2712 init_test(cx);
2713
2714 let fs = FakeFs::new(cx.executor());
2715 fs.insert_tree(
2716 path!("/project"),
2717 json!({
2718 ".git": {},
2719 "file.txt": "aaa\nbbb\nccc\nddd\neee",
2720 }),
2721 )
2722 .await;
2723 fs.set_head_for_repo(
2724 path!("/project/.git").as_ref(),
2725 &[("file.txt", "aaa\nbbb\nccc\nddd\neee".into())],
2726 "0000000",
2727 );
2728 cx.run_until_parked();
2729
2730 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2731 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2732
2733 let file_path = project
2734 .read_with(cx, |project, cx| {
2735 project.find_project_path(path!("/project/file.txt"), cx)
2736 })
2737 .unwrap();
2738 let buffer = project
2739 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2740 .await
2741 .unwrap();
2742
2743 // Agent makes an edit: bbb -> BBB
2744 cx.update(|cx| {
2745 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2746 buffer.update(cx, |buffer, cx| {
2747 buffer.edit([(Point::new(1, 0)..Point::new(1, 3), "BBB")], None, cx);
2748 });
2749 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2750 });
2751 cx.run_until_parked();
2752
2753 // Verify the edit is tracked
2754 let hunks = unreviewed_hunks(&action_log, cx);
2755 assert_eq!(hunks.len(), 1);
2756 let hunk = &hunks[0].1;
2757 assert_eq!(hunk.len(), 1);
2758 assert_eq!(hunk[0].old_text, "bbb\n");
2759
2760 // Simulate the race condition: update only the HEAD SHA first,
2761 // without changing the committed file contents. This is analogous
2762 // to compute_snapshot updating head_commit before
2763 // reload_buffer_diff_bases has loaded the new base text.
2764 fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
2765 state.refs.insert("HEAD".into(), "0000001".into());
2766 })
2767 .unwrap();
2768 cx.run_until_parked();
2769
2770 // Make a user edit (on a different line) to trigger a buffer diff
2771 // recalculation. This fires DiffChanged while the BufferDiff base
2772 // text is still the OLD text. With the old head_commit-based
2773 // subscription this would "consume" the commit detection.
2774 cx.update(|cx| {
2775 buffer.update(cx, |buffer, cx| {
2776 buffer.edit([(Point::new(3, 0)..Point::new(3, 3), "DDD")], None, cx);
2777 });
2778 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2779 });
2780 cx.run_until_parked();
2781
2782 // Now update the committed file contents to match the buffer
2783 // (the agent edit was committed). Keep the same SHA so head_commit
2784 // does NOT change again — this is the second half of the race.
2785 {
2786 use git::repository::repo_path;
2787 fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
2788 state
2789 .head_contents
2790 .insert(repo_path("file.txt"), "aaa\nBBB\nccc\nDDD\neee".into());
2791 })
2792 .unwrap();
2793 }
2794 cx.run_until_parked();
2795
2796 // The agent's edit (bbb -> BBB) should be accepted because the
2797 // committed content now matches. Only the user edit (ddd -> DDD)
2798 // should remain, but since the user edit is tracked as coming from
2799 // the user (ChangeAuthor::User) it would have been rebased into
2800 // the diff base already. So no unreviewed hunks should remain.
2801 assert_eq!(
2802 unreviewed_hunks(&action_log, cx),
2803 vec![],
2804 "agent edits should have been accepted after the base text update"
2805 );
2806 }
2807
2808 #[gpui::test]
2809 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2810 init_test(cx);
2811
2812 let fs = FakeFs::new(cx.executor());
2813 fs.insert_tree(
2814 path!("/dir"),
2815 json!({
2816 "file1": "abc\ndef\nghi"
2817 }),
2818 )
2819 .await;
2820 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2821 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2822 let file_path = project
2823 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2824 .unwrap();
2825
2826 let buffer = project
2827 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2828 .await
2829 .unwrap();
2830
2831 // Track the buffer and make an agent edit
2832 cx.update(|cx| {
2833 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2834 buffer.update(cx, |buffer, cx| {
2835 buffer
2836 .edit(
2837 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2838 None,
2839 cx,
2840 )
2841 .unwrap()
2842 });
2843 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2844 });
2845 cx.run_until_parked();
2846
2847 // Verify the agent edit is there
2848 assert_eq!(
2849 buffer.read_with(cx, |buffer, _| buffer.text()),
2850 "abc\nAGENT_EDIT\nghi"
2851 );
2852 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2853
2854 // Reject all edits
2855 action_log
2856 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2857 .await;
2858 cx.run_until_parked();
2859
2860 // Verify the buffer is back to original
2861 assert_eq!(
2862 buffer.read_with(cx, |buffer, _| buffer.text()),
2863 "abc\ndef\nghi"
2864 );
2865 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2866
2867 // Verify undo state is available
2868 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2869
2870 // Undo the reject
2871 action_log
2872 .update(cx, |log, cx| log.undo_last_reject(cx))
2873 .await;
2874
2875 cx.run_until_parked();
2876
2877 // Verify the agent edit is restored
2878 assert_eq!(
2879 buffer.read_with(cx, |buffer, _| buffer.text()),
2880 "abc\nAGENT_EDIT\nghi"
2881 );
2882
2883 // Verify undo state is cleared
2884 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2885 }
2886
2887 #[gpui::test]
2888 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2889 init_test(cx);
2890
2891 let fs = FakeFs::new(cx.executor());
2892 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2893 .await;
2894 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2895 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2896 let child_log =
2897 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2898
2899 let file_path = project
2900 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2901 .unwrap();
2902 let buffer = project
2903 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2904 .await
2905 .unwrap();
2906
2907 cx.update(|cx| {
2908 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2909 });
2910
2911 // Neither log considers the buffer stale immediately after reading it.
2912 let child_stale = cx.read(|cx| {
2913 child_log
2914 .read(cx)
2915 .stale_buffers(cx)
2916 .cloned()
2917 .collect::<Vec<_>>()
2918 });
2919 let parent_stale = cx.read(|cx| {
2920 parent_log
2921 .read(cx)
2922 .stale_buffers(cx)
2923 .cloned()
2924 .collect::<Vec<_>>()
2925 });
2926 assert!(child_stale.is_empty());
2927 assert!(parent_stale.is_empty());
2928
2929 // Simulate a user edit after the agent read the file.
2930 cx.update(|cx| {
2931 buffer.update(cx, |buffer, cx| {
2932 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
2933 });
2934 });
2935 cx.run_until_parked();
2936
2937 // Both child and parent should see the buffer as stale because both tracked
2938 // it at the pre-edit version via buffer_read forwarding.
2939 let child_stale = cx.read(|cx| {
2940 child_log
2941 .read(cx)
2942 .stale_buffers(cx)
2943 .cloned()
2944 .collect::<Vec<_>>()
2945 });
2946 let parent_stale = cx.read(|cx| {
2947 parent_log
2948 .read(cx)
2949 .stale_buffers(cx)
2950 .cloned()
2951 .collect::<Vec<_>>()
2952 });
2953 assert_eq!(child_stale, vec![buffer.clone()]);
2954 assert_eq!(parent_stale, vec![buffer]);
2955 }
2956
2957 #[gpui::test]
2958 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
2959 init_test(cx);
2960
2961 let fs = FakeFs::new(cx.executor());
2962 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
2963 .await;
2964 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2965 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2966 let child_log =
2967 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2968
2969 let file_path = project
2970 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2971 .unwrap();
2972 let buffer = project
2973 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2974 .await
2975 .unwrap();
2976
2977 cx.update(|cx| {
2978 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2979 buffer.update(cx, |buffer, cx| {
2980 buffer
2981 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
2982 .unwrap();
2983 });
2984 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2985 });
2986 cx.run_until_parked();
2987
2988 let expected_hunks = vec![(
2989 buffer,
2990 vec![HunkStatus {
2991 range: Point::new(1, 0)..Point::new(2, 0),
2992 diff_status: DiffHunkStatusKind::Modified,
2993 old_text: "def\n".into(),
2994 }],
2995 )];
2996 assert_eq!(
2997 unreviewed_hunks(&child_log, cx),
2998 expected_hunks,
2999 "child should track the agent edit"
3000 );
3001 assert_eq!(
3002 unreviewed_hunks(&parent_log, cx),
3003 expected_hunks,
3004 "parent should also track the agent edit via linked log forwarding"
3005 );
3006 }
3007
3008 #[gpui::test]
3009 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
3010 init_test(cx);
3011
3012 let fs = FakeFs::new(cx.executor());
3013 fs.insert_tree(path!("/dir"), json!({})).await;
3014 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3015 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3016 let child_log =
3017 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3018
3019 let file_path = project
3020 .read_with(cx, |project, cx| {
3021 project.find_project_path("dir/new_file", cx)
3022 })
3023 .unwrap();
3024 let buffer = project
3025 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3026 .await
3027 .unwrap();
3028
3029 cx.update(|cx| {
3030 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3031 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
3032 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3033 });
3034 project
3035 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3036 .await
3037 .unwrap();
3038 cx.run_until_parked();
3039
3040 let expected_hunks = vec![(
3041 buffer.clone(),
3042 vec![HunkStatus {
3043 range: Point::new(0, 0)..Point::new(0, 5),
3044 diff_status: DiffHunkStatusKind::Added,
3045 old_text: "".into(),
3046 }],
3047 )];
3048 assert_eq!(
3049 unreviewed_hunks(&child_log, cx),
3050 expected_hunks,
3051 "child should track the created file"
3052 );
3053 assert_eq!(
3054 unreviewed_hunks(&parent_log, cx),
3055 expected_hunks,
3056 "parent should also track the created file via linked log forwarding"
3057 );
3058 }
3059
3060 #[gpui::test]
3061 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
3062 init_test(cx);
3063
3064 let fs = FakeFs::new(cx.executor());
3065 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
3066 .await;
3067 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3068 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3069 let child_log =
3070 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3071
3072 let file_path = project
3073 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3074 .unwrap();
3075 let buffer = project
3076 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
3077 .await
3078 .unwrap();
3079
3080 cx.update(|cx| {
3081 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3082 });
3083 project
3084 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
3085 .unwrap()
3086 .await
3087 .unwrap();
3088 cx.run_until_parked();
3089
3090 let expected_hunks = vec![(
3091 buffer.clone(),
3092 vec![HunkStatus {
3093 range: Point::new(0, 0)..Point::new(0, 0),
3094 diff_status: DiffHunkStatusKind::Deleted,
3095 old_text: "hello\n".into(),
3096 }],
3097 )];
3098 assert_eq!(
3099 unreviewed_hunks(&child_log, cx),
3100 expected_hunks,
3101 "child should track the deleted file"
3102 );
3103 assert_eq!(
3104 unreviewed_hunks(&parent_log, cx),
3105 expected_hunks,
3106 "parent should also track the deleted file via linked log forwarding"
3107 );
3108 }
3109
3110 /// Simulates the subagent scenario: two child logs linked to the same parent, each
3111 /// editing a different file. The parent accumulates all edits while each child
3112 /// only sees its own.
3113 #[gpui::test]
3114 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
3115 init_test(cx);
3116
3117 let fs = FakeFs::new(cx.executor());
3118 fs.insert_tree(
3119 path!("/dir"),
3120 json!({
3121 "file_a": "content of a",
3122 "file_b": "content of b",
3123 }),
3124 )
3125 .await;
3126 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3127 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3128 let child_log_1 =
3129 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3130 let child_log_2 =
3131 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3132
3133 let file_a_path = project
3134 .read_with(cx, |project, cx| {
3135 project.find_project_path("dir/file_a", cx)
3136 })
3137 .unwrap();
3138 let file_b_path = project
3139 .read_with(cx, |project, cx| {
3140 project.find_project_path("dir/file_b", cx)
3141 })
3142 .unwrap();
3143 let buffer_a = project
3144 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3145 .await
3146 .unwrap();
3147 let buffer_b = project
3148 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3149 .await
3150 .unwrap();
3151
3152 cx.update(|cx| {
3153 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3154 buffer_a.update(cx, |buffer, cx| {
3155 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3156 });
3157 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3158
3159 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3160 buffer_b.update(cx, |buffer, cx| {
3161 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3162 });
3163 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3164 });
3165 cx.run_until_parked();
3166
3167 let child_1_changed: Vec<_> = cx.read(|cx| {
3168 child_log_1
3169 .read(cx)
3170 .changed_buffers(cx)
3171 .into_keys()
3172 .collect()
3173 });
3174 let child_2_changed: Vec<_> = cx.read(|cx| {
3175 child_log_2
3176 .read(cx)
3177 .changed_buffers(cx)
3178 .into_keys()
3179 .collect()
3180 });
3181 let parent_changed: Vec<_> = cx.read(|cx| {
3182 parent_log
3183 .read(cx)
3184 .changed_buffers(cx)
3185 .into_keys()
3186 .collect()
3187 });
3188
3189 assert_eq!(
3190 child_1_changed,
3191 vec![buffer_a.clone()],
3192 "child 1 should only track file_a"
3193 );
3194 assert_eq!(
3195 child_2_changed,
3196 vec![buffer_b.clone()],
3197 "child 2 should only track file_b"
3198 );
3199 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3200 assert!(
3201 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3202 "parent should contain both buffer_a and buffer_b"
3203 );
3204 }
3205
3206 #[gpui::test]
3207 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3208 init_test(cx);
3209
3210 let fs = FakeFs::new(cx.executor());
3211 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3212 .await;
3213 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3214 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3215
3216 let file_path = project
3217 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3218 .unwrap();
3219 let buffer = project
3220 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3221 .await
3222 .unwrap();
3223
3224 let abs_path = PathBuf::from(path!("/dir/file"));
3225 assert!(
3226 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3227 "file_read_time should be None before buffer_read"
3228 );
3229
3230 cx.update(|cx| {
3231 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3232 });
3233
3234 assert!(
3235 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3236 "file_read_time should be recorded after buffer_read"
3237 );
3238 }
3239
3240 #[gpui::test]
3241 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3242 init_test(cx);
3243
3244 let fs = FakeFs::new(cx.executor());
3245 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3246 .await;
3247 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3248 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3249
3250 let file_path = project
3251 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3252 .unwrap();
3253 let buffer = project
3254 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3255 .await
3256 .unwrap();
3257
3258 let abs_path = PathBuf::from(path!("/dir/file"));
3259 assert!(
3260 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3261 "file_read_time should be None before buffer_edited"
3262 );
3263
3264 cx.update(|cx| {
3265 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3266 });
3267
3268 assert!(
3269 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3270 "file_read_time should be recorded after buffer_edited"
3271 );
3272 }
3273
3274 #[gpui::test]
3275 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3276 init_test(cx);
3277
3278 let fs = FakeFs::new(cx.executor());
3279 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3280 .await;
3281 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3282 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3283
3284 let file_path = project
3285 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3286 .unwrap();
3287 let buffer = project
3288 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3289 .await
3290 .unwrap();
3291
3292 let abs_path = PathBuf::from(path!("/dir/file"));
3293 assert!(
3294 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3295 "file_read_time should be None before buffer_created"
3296 );
3297
3298 cx.update(|cx| {
3299 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3300 });
3301
3302 assert!(
3303 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3304 "file_read_time should be recorded after buffer_created"
3305 );
3306 }
3307
3308 #[gpui::test]
3309 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3310 init_test(cx);
3311
3312 let fs = FakeFs::new(cx.executor());
3313 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3314 .await;
3315 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3316 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3317
3318 let file_path = project
3319 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3320 .unwrap();
3321 let buffer = project
3322 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3323 .await
3324 .unwrap();
3325
3326 let abs_path = PathBuf::from(path!("/dir/file"));
3327
3328 cx.update(|cx| {
3329 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3330 });
3331 assert!(
3332 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3333 "file_read_time should exist after buffer_read"
3334 );
3335
3336 cx.update(|cx| {
3337 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3338 });
3339 assert!(
3340 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3341 "file_read_time should be removed after will_delete_buffer"
3342 );
3343 }
3344
3345 #[gpui::test]
3346 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3347 init_test(cx);
3348
3349 let fs = FakeFs::new(cx.executor());
3350 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3351 .await;
3352 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3353 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3354 let child_log =
3355 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3356
3357 let file_path = project
3358 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3359 .unwrap();
3360 let buffer = project
3361 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3362 .await
3363 .unwrap();
3364
3365 let abs_path = PathBuf::from(path!("/dir/file"));
3366
3367 cx.update(|cx| {
3368 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3369 });
3370 assert!(
3371 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3372 "child should record file_read_time on buffer_read"
3373 );
3374 assert!(
3375 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3376 "parent should NOT get file_read_time from child's buffer_read"
3377 );
3378
3379 cx.update(|cx| {
3380 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3381 });
3382 assert!(
3383 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3384 "parent should NOT get file_read_time from child's buffer_edited"
3385 );
3386
3387 cx.update(|cx| {
3388 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3389 });
3390 assert!(
3391 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3392 "parent should NOT get file_read_time from child's buffer_created"
3393 );
3394 }
3395
3396 #[derive(Debug, PartialEq)]
3397 struct HunkStatus {
3398 range: Range<Point>,
3399 diff_status: DiffHunkStatusKind,
3400 old_text: String,
3401 }
3402
3403 fn unreviewed_hunks(
3404 action_log: &Entity<ActionLog>,
3405 cx: &TestAppContext,
3406 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
3407 cx.read(|cx| {
3408 action_log
3409 .read(cx)
3410 .changed_buffers(cx)
3411 .into_iter()
3412 .map(|(buffer, diff)| {
3413 let snapshot = buffer.read(cx).snapshot();
3414 (
3415 buffer,
3416 diff.read(cx)
3417 .snapshot(cx)
3418 .hunks(&snapshot)
3419 .map(|hunk| HunkStatus {
3420 diff_status: hunk.status().kind,
3421 range: hunk.range,
3422 old_text: diff
3423 .read(cx)
3424 .base_text(cx)
3425 .text_for_range(hunk.diff_base_byte_range)
3426 .collect(),
3427 })
3428 .collect(),
3429 )
3430 })
3431 .collect()
3432 })
3433 }
3434}