1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::{BTreeMap, HashMap};
5use fs::MTime;
6use futures::{FutureExt, StreamExt, channel::mpsc};
7use gpui::{
8 App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
9};
10use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
11use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
12use std::{
13 cmp,
14 ops::Range,
15 path::{Path, PathBuf},
16 sync::Arc,
17};
18use text::{Edit, Patch, Rope};
19use util::{RangeExt, ResultExt as _};
20
21/// Stores undo information for a single buffer's rejected edits
22#[derive(Clone)]
23pub struct PerBufferUndo {
24 pub buffer: WeakEntity<Buffer>,
25 pub edits_to_restore: Vec<(Range<Anchor>, String)>,
26 pub status: UndoBufferStatus,
27}
28
29/// Tracks the buffer status for undo purposes
30#[derive(Clone, Debug)]
31pub enum UndoBufferStatus {
32 Modified,
33 /// Buffer was created by the agent.
34 /// - `had_existing_content: true` - Agent overwrote an existing file. On reject, the
35 /// original content was restored. Undo is supported: we restore the agent's content.
36 /// - `had_existing_content: false` - Agent created a new file that didn't exist before.
37 /// On reject, the file was deleted. Undo is NOT currently supported (would require
38 /// recreating the file). Future TODO.
39 Created {
40 had_existing_content: bool,
41 },
42}
43
44/// Stores undo information for the most recent reject operation
45#[derive(Clone)]
46pub struct LastRejectUndo {
47 /// Per-buffer undo information
48 pub buffers: Vec<PerBufferUndo>,
49}
50
51/// Tracks actions performed by tools in a thread
52pub struct ActionLog {
53 /// Buffers that we want to notify the model about when they change.
54 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
55 /// The project this action log is associated with
56 project: Entity<Project>,
57 /// An action log to forward all public methods to
58 /// Useful in cases like subagents, where we want to track individual diffs for this subagent,
59 /// but also want to associate the reads/writes with a parent review experience
60 linked_action_log: Option<Entity<ActionLog>>,
61 /// Stores undo information for the most recent reject operation
62 last_reject_undo: Option<LastRejectUndo>,
63 /// Tracks the last time files were read by the agent, to detect external modifications
64 file_read_times: HashMap<PathBuf, MTime>,
65}
66
67impl ActionLog {
68 /// Creates a new, empty action log associated with the given project.
69 pub fn new(project: Entity<Project>) -> Self {
70 Self {
71 tracked_buffers: BTreeMap::default(),
72 project,
73 linked_action_log: None,
74 last_reject_undo: None,
75 file_read_times: HashMap::default(),
76 }
77 }
78
79 pub fn with_linked_action_log(mut self, linked_action_log: Entity<ActionLog>) -> Self {
80 self.linked_action_log = Some(linked_action_log);
81 self
82 }
83
84 pub fn project(&self) -> &Entity<Project> {
85 &self.project
86 }
87
88 pub fn file_read_time(&self, path: &Path) -> Option<MTime> {
89 self.file_read_times.get(path).copied()
90 }
91
92 fn update_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
93 let buffer = buffer.read(cx);
94 if let Some(file) = buffer.file() {
95 if let Some(local_file) = file.as_local() {
96 if let Some(mtime) = file.disk_state().mtime() {
97 let abs_path = local_file.abs_path(cx);
98 self.file_read_times.insert(abs_path, mtime);
99 }
100 }
101 }
102 }
103
104 fn remove_file_read_time(&mut self, buffer: &Entity<Buffer>, cx: &App) {
105 let buffer = buffer.read(cx);
106 if let Some(file) = buffer.file() {
107 if let Some(local_file) = file.as_local() {
108 let abs_path = local_file.abs_path(cx);
109 self.file_read_times.remove(&abs_path);
110 }
111 }
112 }
113
114 fn track_buffer_internal(
115 &mut self,
116 buffer: Entity<Buffer>,
117 is_created: bool,
118 cx: &mut Context<Self>,
119 ) -> &mut TrackedBuffer {
120 let status = if is_created {
121 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
122 match tracked.status {
123 TrackedBufferStatus::Created {
124 existing_file_content,
125 } => TrackedBufferStatus::Created {
126 existing_file_content,
127 },
128 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
129 TrackedBufferStatus::Created {
130 existing_file_content: Some(tracked.diff_base),
131 }
132 }
133 }
134 } else if buffer
135 .read(cx)
136 .file()
137 .is_some_and(|file| file.disk_state().exists())
138 {
139 TrackedBufferStatus::Created {
140 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
141 }
142 } else {
143 TrackedBufferStatus::Created {
144 existing_file_content: None,
145 }
146 }
147 } else {
148 TrackedBufferStatus::Modified
149 };
150
151 let tracked_buffer = self
152 .tracked_buffers
153 .entry(buffer.clone())
154 .or_insert_with(|| {
155 let open_lsp_handle = self.project.update(cx, |project, cx| {
156 project.register_buffer_with_language_servers(&buffer, cx)
157 });
158
159 let text_snapshot = buffer.read(cx).text_snapshot();
160 let language = buffer.read(cx).language().cloned();
161 let language_registry = buffer.read(cx).language_registry();
162 let diff = cx.new(|cx| {
163 let mut diff = BufferDiff::new(&text_snapshot, cx);
164 diff.language_changed(language, language_registry, cx);
165 diff
166 });
167 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
168 let diff_base;
169 let unreviewed_edits;
170 if is_created {
171 diff_base = Rope::default();
172 unreviewed_edits = Patch::new(vec![Edit {
173 old: 0..1,
174 new: 0..text_snapshot.max_point().row + 1,
175 }])
176 } else {
177 diff_base = buffer.read(cx).as_rope().clone();
178 unreviewed_edits = Patch::default();
179 }
180 TrackedBuffer {
181 buffer: buffer.clone(),
182 diff_base,
183 unreviewed_edits,
184 snapshot: text_snapshot,
185 status,
186 version: buffer.read(cx).version(),
187 diff,
188 diff_update: diff_update_tx,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => {
213 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
214 return;
215 };
216 let buffer_version = buffer.read(cx).version();
217 if !buffer_version.changed_since(&tracked_buffer.version) {
218 return;
219 }
220 self.handle_buffer_edited(buffer, cx);
221 }
222 BufferEvent::FileHandleChanged => {
223 self.handle_buffer_file_changed(buffer, cx);
224 }
225 _ => {}
226 };
227 }
228
229 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
230 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
231 return;
232 };
233 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
234 }
235
236 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
237 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
238 return;
239 };
240
241 match tracked_buffer.status {
242 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
243 if buffer
244 .read(cx)
245 .file()
246 .is_some_and(|file| file.disk_state().is_deleted())
247 {
248 // If the buffer had been edited by a tool, but it got
249 // deleted externally, we want to stop tracking it.
250 self.tracked_buffers.remove(&buffer);
251 }
252 cx.notify();
253 }
254 TrackedBufferStatus::Deleted => {
255 if buffer
256 .read(cx)
257 .file()
258 .is_some_and(|file| !file.disk_state().is_deleted())
259 {
260 // If the buffer had been deleted by a tool, but it got
261 // resurrected externally, we want to clear the edits we
262 // were tracking and reset the buffer's state.
263 self.tracked_buffers.remove(&buffer);
264 self.track_buffer_internal(buffer, false, cx);
265 }
266 cx.notify();
267 }
268 }
269 }
270
271 async fn maintain_diff(
272 this: WeakEntity<Self>,
273 buffer: Entity<Buffer>,
274 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
275 cx: &mut AsyncApp,
276 ) -> Result<()> {
277 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
278 let git_diff = this
279 .update(cx, |this, cx| {
280 this.project.update(cx, |project, cx| {
281 project.open_uncommitted_diff(buffer.clone(), cx)
282 })
283 })?
284 .await
285 .ok();
286 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
287 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
288 });
289
290 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
291 let _repo_subscription =
292 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
293 cx.update(|cx| {
294 let mut old_head = buffer_repo.read(cx).head_commit.clone();
295 Some(cx.subscribe(git_diff, move |_, event, cx| {
296 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
297 let new_head = buffer_repo.read(cx).head_commit.clone();
298 if new_head != old_head {
299 old_head = new_head;
300 git_diff_updates_tx.send(()).ok();
301 }
302 }
303 }))
304 })
305 } else {
306 None
307 };
308
309 loop {
310 futures::select_biased! {
311 buffer_update = buffer_updates.next() => {
312 if let Some((author, buffer_snapshot)) = buffer_update {
313 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
314 } else {
315 break;
316 }
317 }
318 _ = git_diff_updates_rx.changed().fuse() => {
319 if let Some(git_diff) = git_diff.as_ref() {
320 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
321 }
322 }
323 }
324 }
325
326 Ok(())
327 }
328
329 async fn track_edits(
330 this: &WeakEntity<ActionLog>,
331 buffer: &Entity<Buffer>,
332 author: ChangeAuthor,
333 buffer_snapshot: text::BufferSnapshot,
334 cx: &mut AsyncApp,
335 ) -> Result<()> {
336 let rebase = this.update(cx, |this, cx| {
337 let tracked_buffer = this
338 .tracked_buffers
339 .get_mut(buffer)
340 .context("buffer not tracked")?;
341
342 let rebase = cx.background_spawn({
343 let mut base_text = tracked_buffer.diff_base.clone();
344 let old_snapshot = tracked_buffer.snapshot.clone();
345 let new_snapshot = buffer_snapshot.clone();
346 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
347 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
348 async move {
349 if let ChangeAuthor::User = author {
350 apply_non_conflicting_edits(
351 &unreviewed_edits,
352 edits,
353 &mut base_text,
354 new_snapshot.as_rope(),
355 );
356 }
357
358 (Arc::from(base_text.to_string().as_str()), base_text)
359 }
360 });
361
362 anyhow::Ok(rebase)
363 })??;
364 let (new_base_text, new_diff_base) = rebase.await;
365
366 Self::update_diff(
367 this,
368 buffer,
369 buffer_snapshot,
370 new_base_text,
371 new_diff_base,
372 cx,
373 )
374 .await
375 }
376
377 async fn keep_committed_edits(
378 this: &WeakEntity<ActionLog>,
379 buffer: &Entity<Buffer>,
380 git_diff: &Entity<BufferDiff>,
381 cx: &mut AsyncApp,
382 ) -> Result<()> {
383 let buffer_snapshot = this.read_with(cx, |this, _cx| {
384 let tracked_buffer = this
385 .tracked_buffers
386 .get(buffer)
387 .context("buffer not tracked")?;
388 anyhow::Ok(tracked_buffer.snapshot.clone())
389 })??;
390 let (new_base_text, new_diff_base) = this
391 .read_with(cx, |this, cx| {
392 let tracked_buffer = this
393 .tracked_buffers
394 .get(buffer)
395 .context("buffer not tracked")?;
396 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
397 let agent_diff_base = tracked_buffer.diff_base.clone();
398 let git_diff_base = git_diff.read(cx).base_text(cx).as_rope().clone();
399 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
400 anyhow::Ok(cx.background_spawn(async move {
401 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
402 let committed_edits = language::line_diff(
403 &agent_diff_base.to_string(),
404 &git_diff_base.to_string(),
405 )
406 .into_iter()
407 .map(|(old, new)| Edit { old, new });
408
409 let mut new_agent_diff_base = agent_diff_base.clone();
410 let mut row_delta = 0i32;
411 for committed in committed_edits {
412 while let Some(unreviewed) = old_unreviewed_edits.peek() {
413 // If the committed edit matches the unreviewed
414 // edit, assume the user wants to keep it.
415 if committed.old == unreviewed.old {
416 let unreviewed_new =
417 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
418 let committed_new =
419 git_diff_base.slice_rows(committed.new.clone()).to_string();
420 if unreviewed_new == committed_new {
421 let old_byte_start =
422 new_agent_diff_base.point_to_offset(Point::new(
423 (unreviewed.old.start as i32 + row_delta) as u32,
424 0,
425 ));
426 let old_byte_end =
427 new_agent_diff_base.point_to_offset(cmp::min(
428 Point::new(
429 (unreviewed.old.end as i32 + row_delta) as u32,
430 0,
431 ),
432 new_agent_diff_base.max_point(),
433 ));
434 new_agent_diff_base
435 .replace(old_byte_start..old_byte_end, &unreviewed_new);
436 row_delta +=
437 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
438 }
439 } else if unreviewed.old.start >= committed.old.end {
440 break;
441 }
442
443 old_unreviewed_edits.next().unwrap();
444 }
445 }
446
447 (
448 Arc::from(new_agent_diff_base.to_string().as_str()),
449 new_agent_diff_base,
450 )
451 }))
452 })??
453 .await;
454
455 Self::update_diff(
456 this,
457 buffer,
458 buffer_snapshot,
459 new_base_text,
460 new_diff_base,
461 cx,
462 )
463 .await
464 }
465
466 async fn update_diff(
467 this: &WeakEntity<ActionLog>,
468 buffer: &Entity<Buffer>,
469 buffer_snapshot: text::BufferSnapshot,
470 new_base_text: Arc<str>,
471 new_diff_base: Rope,
472 cx: &mut AsyncApp,
473 ) -> Result<()> {
474 let (diff, language) = this.read_with(cx, |this, cx| {
475 let tracked_buffer = this
476 .tracked_buffers
477 .get(buffer)
478 .context("buffer not tracked")?;
479 anyhow::Ok((
480 tracked_buffer.diff.clone(),
481 buffer.read(cx).language().cloned(),
482 ))
483 })??;
484 let update = diff
485 .update(cx, |diff, cx| {
486 diff.update_diff(
487 buffer_snapshot.clone(),
488 Some(new_base_text),
489 Some(true),
490 language,
491 cx,
492 )
493 })
494 .await;
495 diff.update(cx, |diff, cx| {
496 diff.set_snapshot(update.clone(), &buffer_snapshot, cx)
497 })
498 .await;
499 let diff_snapshot = diff.update(cx, |diff, cx| diff.snapshot(cx));
500
501 let unreviewed_edits = cx
502 .background_spawn({
503 let buffer_snapshot = buffer_snapshot.clone();
504 let new_diff_base = new_diff_base.clone();
505 async move {
506 let mut unreviewed_edits = Patch::default();
507 for hunk in diff_snapshot.hunks_intersecting_range(
508 Anchor::min_for_buffer(buffer_snapshot.remote_id())
509 ..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
510 &buffer_snapshot,
511 ) {
512 let old_range = new_diff_base
513 .offset_to_point(hunk.diff_base_byte_range.start)
514 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
515 let new_range = hunk.range.start..hunk.range.end;
516 unreviewed_edits.push(point_to_row_edit(
517 Edit {
518 old: old_range,
519 new: new_range,
520 },
521 &new_diff_base,
522 buffer_snapshot.as_rope(),
523 ));
524 }
525 unreviewed_edits
526 }
527 })
528 .await;
529 this.update(cx, |this, cx| {
530 let tracked_buffer = this
531 .tracked_buffers
532 .get_mut(buffer)
533 .context("buffer not tracked")?;
534 tracked_buffer.diff_base = new_diff_base;
535 tracked_buffer.snapshot = buffer_snapshot;
536 tracked_buffer.unreviewed_edits = unreviewed_edits;
537 cx.notify();
538 anyhow::Ok(())
539 })?
540 }
541
542 /// Track a buffer as read by agent, so we can notify the model about user edits.
543 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.buffer_read_impl(buffer, true, cx);
545 }
546
547 fn buffer_read_impl(
548 &mut self,
549 buffer: Entity<Buffer>,
550 record_file_read_time: bool,
551 cx: &mut Context<Self>,
552 ) {
553 if let Some(linked_action_log) = &self.linked_action_log {
554 // We don't want to share read times since the other agent hasn't read it necessarily
555 linked_action_log.update(cx, |log, cx| {
556 log.buffer_read_impl(buffer.clone(), false, cx);
557 });
558 }
559 if record_file_read_time {
560 self.update_file_read_time(&buffer, cx);
561 }
562 self.track_buffer_internal(buffer, false, cx);
563 }
564
565 /// Mark a buffer as created by agent, so we can refresh it in the context
566 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
567 self.buffer_created_impl(buffer, true, cx);
568 }
569
570 fn buffer_created_impl(
571 &mut self,
572 buffer: Entity<Buffer>,
573 record_file_read_time: bool,
574 cx: &mut Context<Self>,
575 ) {
576 if let Some(linked_action_log) = &self.linked_action_log {
577 // We don't want to share read times since the other agent hasn't read it necessarily
578 linked_action_log.update(cx, |log, cx| {
579 log.buffer_created_impl(buffer.clone(), false, cx);
580 });
581 }
582 if record_file_read_time {
583 self.update_file_read_time(&buffer, cx);
584 }
585 self.track_buffer_internal(buffer, true, cx);
586 }
587
588 /// Mark a buffer as edited by agent, so we can refresh it in the context
589 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
590 self.buffer_edited_impl(buffer, true, cx);
591 }
592
593 fn buffer_edited_impl(
594 &mut self,
595 buffer: Entity<Buffer>,
596 record_file_read_time: bool,
597 cx: &mut Context<Self>,
598 ) {
599 if let Some(linked_action_log) = &self.linked_action_log {
600 // We don't want to share read times since the other agent hasn't read it necessarily
601 linked_action_log.update(cx, |log, cx| {
602 log.buffer_edited_impl(buffer.clone(), false, cx);
603 });
604 }
605 if record_file_read_time {
606 self.update_file_read_time(&buffer, cx);
607 }
608 let new_version = buffer.read(cx).version();
609 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
610 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
611 tracked_buffer.status = TrackedBufferStatus::Modified;
612 }
613
614 tracked_buffer.version = new_version;
615 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
616 }
617
618 fn prime_tracked_buffer_from_snapshot(
619 &mut self,
620 buffer: Entity<Buffer>,
621 baseline_snapshot: text::BufferSnapshot,
622 status: TrackedBufferStatus,
623 cx: &mut Context<Self>,
624 ) {
625 let version = buffer.read(cx).version();
626 let diff_base = match &status {
627 TrackedBufferStatus::Created { .. } => Rope::default(),
628 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
629 baseline_snapshot.as_rope().clone()
630 }
631 };
632
633 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
634 tracked_buffer.diff_base = diff_base;
635 tracked_buffer.snapshot = baseline_snapshot;
636 tracked_buffer.unreviewed_edits.clear();
637 tracked_buffer.status = status;
638 tracked_buffer.version = version;
639 }
640
641 pub fn has_changed_buffer(&self, buffer: &Entity<Buffer>, cx: &App) -> bool {
642 self.tracked_buffers
643 .get(buffer)
644 .is_some_and(|tracked_buffer| tracked_buffer.has_edits(cx))
645 }
646
647 pub fn infer_buffer_created(
648 &mut self,
649 buffer: Entity<Buffer>,
650 baseline_snapshot: text::BufferSnapshot,
651 cx: &mut Context<Self>,
652 ) {
653 self.infer_buffer_created_impl(buffer, baseline_snapshot, true, cx);
654 }
655
656 fn infer_buffer_created_impl(
657 &mut self,
658 buffer: Entity<Buffer>,
659 baseline_snapshot: text::BufferSnapshot,
660 record_file_read_time: bool,
661 cx: &mut Context<Self>,
662 ) {
663 if let Some(linked_action_log) = &self.linked_action_log {
664 let linked_baseline_snapshot = baseline_snapshot.clone();
665 if !linked_action_log.read(cx).has_changed_buffer(&buffer, cx) {
666 linked_action_log.update(cx, |log, cx| {
667 log.infer_buffer_created_impl(
668 buffer.clone(),
669 linked_baseline_snapshot,
670 false,
671 cx,
672 );
673 });
674 }
675 }
676
677 if record_file_read_time {
678 self.update_file_read_time(&buffer, cx);
679 }
680 self.prime_tracked_buffer_from_snapshot(
681 buffer.clone(),
682 baseline_snapshot,
683 TrackedBufferStatus::Created {
684 existing_file_content: None,
685 },
686 cx,
687 );
688
689 if let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
690 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
691 }
692 }
693
694 pub fn infer_buffer_edited_from_snapshot(
695 &mut self,
696 buffer: Entity<Buffer>,
697 baseline_snapshot: text::BufferSnapshot,
698 cx: &mut Context<Self>,
699 ) {
700 self.infer_buffer_edited_from_snapshot_impl(buffer, baseline_snapshot, true, cx);
701 }
702
703 fn infer_buffer_edited_from_snapshot_impl(
704 &mut self,
705 buffer: Entity<Buffer>,
706 baseline_snapshot: text::BufferSnapshot,
707 record_file_read_time: bool,
708 cx: &mut Context<Self>,
709 ) {
710 if let Some(linked_action_log) = &self.linked_action_log {
711 let linked_baseline_snapshot = baseline_snapshot.clone();
712 if !linked_action_log.read(cx).has_changed_buffer(&buffer, cx) {
713 linked_action_log.update(cx, |log, cx| {
714 log.infer_buffer_edited_from_snapshot_impl(
715 buffer.clone(),
716 linked_baseline_snapshot,
717 false,
718 cx,
719 );
720 });
721 }
722 }
723
724 if record_file_read_time {
725 self.update_file_read_time(&buffer, cx);
726 }
727 self.prime_tracked_buffer_from_snapshot(
728 buffer.clone(),
729 baseline_snapshot,
730 TrackedBufferStatus::Modified,
731 cx,
732 );
733
734 if let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) {
735 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
736 }
737 }
738
739 pub fn infer_buffer_deleted_from_snapshot(
740 &mut self,
741 buffer: Entity<Buffer>,
742 baseline_snapshot: text::BufferSnapshot,
743 cx: &mut Context<Self>,
744 ) {
745 self.infer_buffer_deleted_from_snapshot_impl(buffer, baseline_snapshot, true, cx);
746 }
747
748 fn infer_buffer_deleted_from_snapshot_impl(
749 &mut self,
750 buffer: Entity<Buffer>,
751 baseline_snapshot: text::BufferSnapshot,
752 record_file_read_time: bool,
753 cx: &mut Context<Self>,
754 ) {
755 if let Some(linked_action_log) = &self.linked_action_log {
756 let linked_baseline_snapshot = baseline_snapshot.clone();
757 if !linked_action_log.read(cx).has_changed_buffer(&buffer, cx) {
758 linked_action_log.update(cx, |log, cx| {
759 log.infer_buffer_deleted_from_snapshot_impl(
760 buffer.clone(),
761 linked_baseline_snapshot,
762 false,
763 cx,
764 );
765 });
766 }
767 }
768
769 if record_file_read_time {
770 self.remove_file_read_time(&buffer, cx);
771 }
772 let has_linked_action_log = self.linked_action_log.is_some();
773 self.prime_tracked_buffer_from_snapshot(
774 buffer.clone(),
775 baseline_snapshot,
776 TrackedBufferStatus::Deleted,
777 cx,
778 );
779
780 if !has_linked_action_log {
781 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
782 }
783
784 if let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) {
785 tracked_buffer.version = buffer.read(cx).version();
786 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
787 }
788 }
789
790 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
791 // Ok to propagate file read time removal to linked action log
792 self.remove_file_read_time(&buffer, cx);
793 let has_linked_action_log = self.linked_action_log.is_some();
794 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
795 match tracked_buffer.status {
796 TrackedBufferStatus::Created { .. } => {
797 self.tracked_buffers.remove(&buffer);
798 cx.notify();
799 }
800 TrackedBufferStatus::Modified => {
801 tracked_buffer.status = TrackedBufferStatus::Deleted;
802 if !has_linked_action_log {
803 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
804 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
805 }
806 }
807
808 TrackedBufferStatus::Deleted => {}
809 }
810
811 if let Some(linked_action_log) = &mut self.linked_action_log {
812 linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
813 }
814
815 if let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) {
816 tracked_buffer.version = buffer.read(cx).version();
817 if has_linked_action_log {
818 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
819 }
820 }
821
822 cx.notify();
823 }
824
825 pub fn keep_edits_in_range(
826 &mut self,
827 buffer: Entity<Buffer>,
828 buffer_range: Range<impl language::ToPoint>,
829 telemetry: Option<ActionLogTelemetry>,
830 cx: &mut Context<Self>,
831 ) {
832 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
833 return;
834 };
835
836 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
837 match tracked_buffer.status {
838 TrackedBufferStatus::Deleted => {
839 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
840 self.tracked_buffers.remove(&buffer);
841 cx.notify();
842 }
843 _ => {
844 let buffer = buffer.read(cx);
845 let buffer_range =
846 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
847 let mut delta = 0i32;
848 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
849 edit.old.start = (edit.old.start as i32 + delta) as u32;
850 edit.old.end = (edit.old.end as i32 + delta) as u32;
851
852 if buffer_range.end.row < edit.new.start
853 || buffer_range.start.row > edit.new.end
854 {
855 true
856 } else {
857 let old_range = tracked_buffer
858 .diff_base
859 .point_to_offset(Point::new(edit.old.start, 0))
860 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
861 Point::new(edit.old.end, 0),
862 tracked_buffer.diff_base.max_point(),
863 ));
864 let new_range = tracked_buffer
865 .snapshot
866 .point_to_offset(Point::new(edit.new.start, 0))
867 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
868 Point::new(edit.new.end, 0),
869 tracked_buffer.snapshot.max_point(),
870 ));
871 tracked_buffer.diff_base.replace(
872 old_range,
873 &tracked_buffer
874 .snapshot
875 .text_for_range(new_range)
876 .collect::<String>(),
877 );
878 delta += edit.new_len() as i32 - edit.old_len() as i32;
879 metrics.add_edit(edit);
880 false
881 }
882 });
883 if tracked_buffer.unreviewed_edits.is_empty()
884 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
885 {
886 tracked_buffer.status = TrackedBufferStatus::Modified;
887 }
888 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
889 }
890 }
891 if let Some(telemetry) = telemetry {
892 telemetry_report_accepted_edits(&telemetry, metrics);
893 }
894 }
895
896 pub fn reject_edits_in_ranges(
897 &mut self,
898 buffer: Entity<Buffer>,
899 buffer_ranges: Vec<Range<impl language::ToPoint>>,
900 telemetry: Option<ActionLogTelemetry>,
901 cx: &mut Context<Self>,
902 ) -> (Task<Result<()>>, Option<PerBufferUndo>) {
903 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
904 return (Task::ready(Ok(())), None);
905 };
906
907 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
908 let mut undo_info: Option<PerBufferUndo> = None;
909 let task = match &tracked_buffer.status {
910 TrackedBufferStatus::Created {
911 existing_file_content,
912 } => {
913 let task = if let Some(existing_file_content) = existing_file_content {
914 // Capture the agent's content before restoring existing file content
915 let agent_content = buffer.read(cx).text();
916
917 buffer.update(cx, |buffer, cx| {
918 buffer.start_transaction();
919 buffer.set_text("", cx);
920 for chunk in existing_file_content.chunks() {
921 buffer.append(chunk, cx);
922 }
923 buffer.end_transaction(cx);
924 });
925
926 undo_info = Some(PerBufferUndo {
927 buffer: buffer.downgrade(),
928 edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
929 status: UndoBufferStatus::Created {
930 had_existing_content: true,
931 },
932 });
933
934 self.project
935 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
936 } else {
937 // For a file created by AI with no pre-existing content,
938 // only delete the file if we're certain it contains only AI content
939 // with no edits from the user.
940
941 let initial_version = tracked_buffer.version.clone();
942 let current_version = buffer.read(cx).version();
943
944 let current_content = buffer.read(cx).text();
945 let tracked_content = tracked_buffer.snapshot.text();
946
947 let is_ai_only_content =
948 initial_version == current_version && current_content == tracked_content;
949
950 if is_ai_only_content {
951 buffer
952 .read(cx)
953 .entry_id(cx)
954 .and_then(|entry_id| {
955 self.project.update(cx, |project, cx| {
956 project.delete_entry(entry_id, false, cx)
957 })
958 })
959 .unwrap_or(Task::ready(Ok(())))
960 } else {
961 // Not sure how to disentangle edits made by the user
962 // from edits made by the AI at this point.
963 // For now, preserve both to avoid data loss.
964 //
965 // TODO: Better solution (disable "Reject" after user makes some
966 // edit or find a way to differentiate between AI and user edits)
967 Task::ready(Ok(()))
968 }
969 };
970
971 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
972 self.tracked_buffers.remove(&buffer);
973 cx.notify();
974 task
975 }
976 TrackedBufferStatus::Deleted => {
977 let current_version = buffer.read(cx).version();
978 if current_version != tracked_buffer.version {
979 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
980 self.tracked_buffers.remove(&buffer);
981 cx.notify();
982 Task::ready(Ok(()))
983 } else {
984 buffer.update(cx, |buffer, cx| {
985 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
986 });
987 let save = self
988 .project
989 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
990
991 // Clear all tracked edits for this buffer and start over as if we just read it.
992 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
993 self.tracked_buffers.remove(&buffer);
994 self.buffer_read(buffer.clone(), cx);
995 cx.notify();
996 save
997 }
998 }
999 TrackedBufferStatus::Modified => {
1000 let edits_to_restore = buffer.update(cx, |buffer, cx| {
1001 let mut buffer_row_ranges = buffer_ranges
1002 .into_iter()
1003 .map(|range| {
1004 range.start.to_point(buffer).row..range.end.to_point(buffer).row
1005 })
1006 .peekable();
1007
1008 let mut edits_to_revert = Vec::new();
1009 let mut edits_for_undo = Vec::new();
1010 for edit in tracked_buffer.unreviewed_edits.edits() {
1011 let new_range = tracked_buffer
1012 .snapshot
1013 .anchor_before(Point::new(edit.new.start, 0))
1014 ..tracked_buffer.snapshot.anchor_after(cmp::min(
1015 Point::new(edit.new.end, 0),
1016 tracked_buffer.snapshot.max_point(),
1017 ));
1018 let new_row_range = new_range.start.to_point(buffer).row
1019 ..new_range.end.to_point(buffer).row;
1020
1021 let mut revert = false;
1022 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
1023 if buffer_row_range.end < new_row_range.start {
1024 buffer_row_ranges.next();
1025 } else if buffer_row_range.start > new_row_range.end {
1026 break;
1027 } else {
1028 revert = true;
1029 break;
1030 }
1031 }
1032
1033 if revert {
1034 metrics.add_edit(edit);
1035 let old_range = tracked_buffer
1036 .diff_base
1037 .point_to_offset(Point::new(edit.old.start, 0))
1038 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
1039 Point::new(edit.old.end, 0),
1040 tracked_buffer.diff_base.max_point(),
1041 ));
1042 let old_text = tracked_buffer
1043 .diff_base
1044 .chunks_in_range(old_range)
1045 .collect::<String>();
1046
1047 // Capture the agent's text before we revert it (for undo)
1048 let new_range_offset =
1049 new_range.start.to_offset(buffer)..new_range.end.to_offset(buffer);
1050 let agent_text =
1051 buffer.text_for_range(new_range_offset).collect::<String>();
1052 edits_for_undo.push((new_range.clone(), agent_text));
1053
1054 edits_to_revert.push((new_range, old_text));
1055 }
1056 }
1057
1058 buffer.edit(edits_to_revert, None, cx);
1059 edits_for_undo
1060 });
1061
1062 if !edits_to_restore.is_empty() {
1063 undo_info = Some(PerBufferUndo {
1064 buffer: buffer.downgrade(),
1065 edits_to_restore,
1066 status: UndoBufferStatus::Modified,
1067 });
1068 }
1069
1070 self.project
1071 .update(cx, |project, cx| project.save_buffer(buffer, cx))
1072 }
1073 };
1074 if let Some(telemetry) = telemetry {
1075 telemetry_report_rejected_edits(&telemetry, metrics);
1076 }
1077 (task, undo_info)
1078 }
1079
1080 pub fn keep_all_edits(
1081 &mut self,
1082 telemetry: Option<ActionLogTelemetry>,
1083 cx: &mut Context<Self>,
1084 ) {
1085 self.tracked_buffers.retain(|buffer, tracked_buffer| {
1086 let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
1087 metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
1088 if let Some(telemetry) = telemetry.as_ref() {
1089 telemetry_report_accepted_edits(telemetry, metrics);
1090 }
1091 match tracked_buffer.status {
1092 TrackedBufferStatus::Deleted => false,
1093 _ => {
1094 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
1095 tracked_buffer.status = TrackedBufferStatus::Modified;
1096 }
1097 tracked_buffer.unreviewed_edits.clear();
1098 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
1099 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
1100 true
1101 }
1102 }
1103 });
1104
1105 cx.notify();
1106 }
1107
1108 pub fn reject_all_edits(
1109 &mut self,
1110 telemetry: Option<ActionLogTelemetry>,
1111 cx: &mut Context<Self>,
1112 ) -> Task<()> {
1113 // Clear any previous undo state before starting a new reject operation
1114 self.last_reject_undo = None;
1115
1116 let mut undo_buffers = Vec::new();
1117 let mut futures = Vec::new();
1118
1119 for buffer in self.changed_buffers(cx).into_keys() {
1120 let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
1121 buffer.read(cx).remote_id(),
1122 )];
1123 let (reject_task, undo_info) =
1124 self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
1125
1126 if let Some(undo) = undo_info {
1127 undo_buffers.push(undo);
1128 }
1129
1130 futures.push(async move {
1131 reject_task.await.log_err();
1132 });
1133 }
1134
1135 // Store the undo information if we have any
1136 if !undo_buffers.is_empty() {
1137 self.last_reject_undo = Some(LastRejectUndo {
1138 buffers: undo_buffers,
1139 });
1140 }
1141
1142 let task = futures::future::join_all(futures);
1143 cx.background_spawn(async move {
1144 task.await;
1145 })
1146 }
1147
1148 pub fn has_pending_undo(&self) -> bool {
1149 self.last_reject_undo.is_some()
1150 }
1151
1152 pub fn set_last_reject_undo(&mut self, undo: LastRejectUndo) {
1153 self.last_reject_undo = Some(undo);
1154 }
1155
1156 /// Undoes the most recent reject operation, restoring the rejected agent changes.
1157 /// This is a best-effort operation: if buffers have been closed or modified externally,
1158 /// those buffers will be skipped.
1159 pub fn undo_last_reject(&mut self, cx: &mut Context<Self>) -> Task<()> {
1160 let Some(undo) = self.last_reject_undo.take() else {
1161 return Task::ready(());
1162 };
1163
1164 let mut save_tasks = Vec::with_capacity(undo.buffers.len());
1165
1166 for per_buffer_undo in undo.buffers {
1167 // Skip if the buffer entity has been deallocated
1168 let Some(buffer) = per_buffer_undo.buffer.upgrade() else {
1169 continue;
1170 };
1171
1172 buffer.update(cx, |buffer, cx| {
1173 let mut valid_edits = Vec::new();
1174
1175 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
1176 if anchor_range.start.buffer_id == Some(buffer.remote_id())
1177 && anchor_range.end.buffer_id == Some(buffer.remote_id())
1178 {
1179 valid_edits.push((anchor_range, text_to_restore));
1180 }
1181 }
1182
1183 if !valid_edits.is_empty() {
1184 buffer.edit(valid_edits, None, cx);
1185 }
1186 });
1187
1188 if !self.tracked_buffers.contains_key(&buffer) {
1189 self.buffer_edited(buffer.clone(), cx);
1190 }
1191
1192 let save = self
1193 .project
1194 .update(cx, |project, cx| project.save_buffer(buffer, cx));
1195 save_tasks.push(save);
1196 }
1197
1198 cx.notify();
1199
1200 cx.background_spawn(async move {
1201 futures::future::join_all(save_tasks).await;
1202 })
1203 }
1204
1205 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
1206 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
1207 self.tracked_buffers
1208 .iter()
1209 .filter(|(_, tracked)| tracked.has_edits(cx))
1210 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
1211 .collect()
1212 }
1213
1214 /// Returns the total number of lines added and removed across all unreviewed buffers.
1215 pub fn diff_stats(&self, cx: &App) -> DiffStats {
1216 DiffStats::all_files(&self.changed_buffers(cx), cx)
1217 }
1218
1219 /// Iterate over buffers changed since last read or edited by the model
1220 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
1221 self.tracked_buffers
1222 .iter()
1223 .filter(|(buffer, tracked)| {
1224 let buffer = buffer.read(cx);
1225
1226 tracked.version != buffer.version
1227 && buffer
1228 .file()
1229 .is_some_and(|file| !file.disk_state().is_deleted())
1230 })
1231 .map(|(buffer, _)| buffer)
1232 }
1233}
1234
1235#[derive(Default, Debug, Clone, Copy)]
1236pub struct DiffStats {
1237 pub lines_added: u32,
1238 pub lines_removed: u32,
1239}
1240
1241impl DiffStats {
1242 pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self {
1243 let mut stats = DiffStats::default();
1244 let diff_snapshot = diff.snapshot(cx);
1245 let buffer_snapshot = buffer.snapshot();
1246 let base_text = diff_snapshot.base_text();
1247
1248 for hunk in diff_snapshot.hunks(&buffer_snapshot) {
1249 let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row);
1250 stats.lines_added += added_rows;
1251
1252 let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row;
1253 let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row;
1254 let removed_rows = base_end.saturating_sub(base_start);
1255 stats.lines_removed += removed_rows;
1256 }
1257
1258 stats
1259 }
1260
1261 pub fn all_files(
1262 changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
1263 cx: &App,
1264 ) -> Self {
1265 let mut total = DiffStats::default();
1266 for (buffer, diff) in changed_buffers {
1267 let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx);
1268 total.lines_added += stats.lines_added;
1269 total.lines_removed += stats.lines_removed;
1270 }
1271 total
1272 }
1273}
1274
1275#[derive(Clone)]
1276pub struct ActionLogTelemetry {
1277 pub agent_telemetry_id: SharedString,
1278 pub session_id: Arc<str>,
1279}
1280
1281struct ActionLogMetrics {
1282 lines_removed: u32,
1283 lines_added: u32,
1284 language: Option<SharedString>,
1285}
1286
1287impl ActionLogMetrics {
1288 fn for_buffer(buffer: &Buffer) -> Self {
1289 Self {
1290 language: buffer.language().map(|l| l.name().0),
1291 lines_removed: 0,
1292 lines_added: 0,
1293 }
1294 }
1295
1296 fn add_edits(&mut self, edits: &[Edit<u32>]) {
1297 for edit in edits {
1298 self.add_edit(edit);
1299 }
1300 }
1301
1302 fn add_edit(&mut self, edit: &Edit<u32>) {
1303 self.lines_added += edit.new_len();
1304 self.lines_removed += edit.old_len();
1305 }
1306}
1307
1308fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1309 telemetry::event!(
1310 "Agent Edits Accepted",
1311 agent = telemetry.agent_telemetry_id,
1312 session = telemetry.session_id,
1313 language = metrics.language,
1314 lines_added = metrics.lines_added,
1315 lines_removed = metrics.lines_removed
1316 );
1317}
1318
1319fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
1320 telemetry::event!(
1321 "Agent Edits Rejected",
1322 agent = telemetry.agent_telemetry_id,
1323 session = telemetry.session_id,
1324 language = metrics.language,
1325 lines_added = metrics.lines_added,
1326 lines_removed = metrics.lines_removed
1327 );
1328}
1329
1330fn apply_non_conflicting_edits(
1331 patch: &Patch<u32>,
1332 edits: Vec<Edit<u32>>,
1333 old_text: &mut Rope,
1334 new_text: &Rope,
1335) -> bool {
1336 let mut old_edits = patch.edits().iter().cloned().peekable();
1337 let mut new_edits = edits.into_iter().peekable();
1338 let mut applied_delta = 0i32;
1339 let mut rebased_delta = 0i32;
1340 let mut has_made_changes = false;
1341
1342 while let Some(mut new_edit) = new_edits.next() {
1343 let mut conflict = false;
1344
1345 // Push all the old edits that are before this new edit or that intersect with it.
1346 while let Some(old_edit) = old_edits.peek() {
1347 if new_edit.old.end < old_edit.new.start
1348 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
1349 {
1350 break;
1351 } else if new_edit.old.start > old_edit.new.end
1352 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
1353 {
1354 let old_edit = old_edits.next().unwrap();
1355 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1356 } else {
1357 conflict = true;
1358 if new_edits
1359 .peek()
1360 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
1361 {
1362 new_edit = new_edits.next().unwrap();
1363 } else {
1364 let old_edit = old_edits.next().unwrap();
1365 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
1366 }
1367 }
1368 }
1369
1370 if !conflict {
1371 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
1372 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
1373 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
1374 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
1375 ..old_text.point_to_offset(cmp::min(
1376 Point::new(new_edit.old.end, 0),
1377 old_text.max_point(),
1378 ));
1379 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
1380 ..new_text.point_to_offset(cmp::min(
1381 Point::new(new_edit.new.end, 0),
1382 new_text.max_point(),
1383 ));
1384
1385 old_text.replace(
1386 old_bytes,
1387 &new_text.chunks_in_range(new_bytes).collect::<String>(),
1388 );
1389 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
1390 has_made_changes = true;
1391 }
1392 }
1393 has_made_changes
1394}
1395
1396fn diff_snapshots(
1397 old_snapshot: &text::BufferSnapshot,
1398 new_snapshot: &text::BufferSnapshot,
1399) -> Vec<Edit<u32>> {
1400 let mut edits = new_snapshot
1401 .edits_since::<Point>(&old_snapshot.version)
1402 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
1403 .peekable();
1404 let mut row_edits = Vec::new();
1405 while let Some(mut edit) = edits.next() {
1406 while let Some(next_edit) = edits.peek() {
1407 if edit.old.end >= next_edit.old.start {
1408 edit.old.end = next_edit.old.end;
1409 edit.new.end = next_edit.new.end;
1410 edits.next();
1411 } else {
1412 break;
1413 }
1414 }
1415 row_edits.push(edit);
1416 }
1417 row_edits
1418}
1419
1420fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
1421 if edit.old.start.column == old_text.line_len(edit.old.start.row)
1422 && new_text
1423 .chars_at(new_text.point_to_offset(edit.new.start))
1424 .next()
1425 == Some('\n')
1426 && edit.old.start != old_text.max_point()
1427 {
1428 Edit {
1429 old: edit.old.start.row + 1..edit.old.end.row + 1,
1430 new: edit.new.start.row + 1..edit.new.end.row + 1,
1431 }
1432 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
1433 Edit {
1434 old: edit.old.start.row..edit.old.end.row,
1435 new: edit.new.start.row..edit.new.end.row,
1436 }
1437 } else {
1438 Edit {
1439 old: edit.old.start.row..edit.old.end.row + 1,
1440 new: edit.new.start.row..edit.new.end.row + 1,
1441 }
1442 }
1443}
1444
1445#[derive(Copy, Clone, Debug)]
1446enum ChangeAuthor {
1447 User,
1448 Agent,
1449}
1450
1451#[derive(Debug)]
1452enum TrackedBufferStatus {
1453 Created { existing_file_content: Option<Rope> },
1454 Modified,
1455 Deleted,
1456}
1457
1458pub struct TrackedBuffer {
1459 buffer: Entity<Buffer>,
1460 diff_base: Rope,
1461 unreviewed_edits: Patch<u32>,
1462 status: TrackedBufferStatus,
1463 version: clock::Global,
1464 diff: Entity<BufferDiff>,
1465 snapshot: text::BufferSnapshot,
1466 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
1467 _open_lsp_handle: OpenLspBufferHandle,
1468 _maintain_diff: Task<()>,
1469 _subscription: Subscription,
1470}
1471
1472impl TrackedBuffer {
1473 #[cfg(any(test, feature = "test-support"))]
1474 pub fn diff(&self) -> &Entity<BufferDiff> {
1475 &self.diff
1476 }
1477
1478 #[cfg(any(test, feature = "test-support"))]
1479 pub fn diff_base_len(&self) -> usize {
1480 self.diff_base.len()
1481 }
1482
1483 fn has_edits(&self, cx: &App) -> bool {
1484 self.diff
1485 .read(cx)
1486 .snapshot(cx)
1487 .hunks(self.buffer.read(cx))
1488 .next()
1489 .is_some()
1490 }
1491
1492 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
1493 self.diff_update
1494 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
1495 .ok();
1496 }
1497}
1498
1499pub struct ChangedBuffer {
1500 pub diff: Entity<BufferDiff>,
1501}
1502
1503#[cfg(test)]
1504mod tests {
1505 use super::*;
1506 use buffer_diff::DiffHunkStatusKind;
1507 use gpui::TestAppContext;
1508 use language::Point;
1509 use project::{FakeFs, Fs, Project, RemoveOptions};
1510 use rand::prelude::*;
1511 use serde_json::json;
1512 use settings::SettingsStore;
1513 use std::env;
1514 use util::{RandomCharIter, path};
1515
1516 #[ctor::ctor]
1517 fn init_logger() {
1518 zlog::init_test();
1519 }
1520
1521 fn init_test(cx: &mut TestAppContext) {
1522 cx.update(|cx| {
1523 let settings_store = SettingsStore::test(cx);
1524 cx.set_global(settings_store);
1525 });
1526 }
1527
1528 #[gpui::test(iterations = 10)]
1529 async fn test_keep_edits(cx: &mut TestAppContext) {
1530 init_test(cx);
1531
1532 let fs = FakeFs::new(cx.executor());
1533 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1534 .await;
1535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1536 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1537 let file_path = project
1538 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1539 .unwrap();
1540 let buffer = project
1541 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1542 .await
1543 .unwrap();
1544
1545 cx.update(|cx| {
1546 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1547 buffer.update(cx, |buffer, cx| {
1548 buffer
1549 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1550 .unwrap()
1551 });
1552 buffer.update(cx, |buffer, cx| {
1553 buffer
1554 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1555 .unwrap()
1556 });
1557 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1558 });
1559 cx.run_until_parked();
1560 assert_eq!(
1561 buffer.read_with(cx, |buffer, _| buffer.text()),
1562 "abc\ndEf\nghi\njkl\nmnO"
1563 );
1564 assert_eq!(
1565 unreviewed_hunks(&action_log, cx),
1566 vec![(
1567 buffer.clone(),
1568 vec![
1569 HunkStatus {
1570 range: Point::new(1, 0)..Point::new(2, 0),
1571 diff_status: DiffHunkStatusKind::Modified,
1572 old_text: "def\n".into(),
1573 },
1574 HunkStatus {
1575 range: Point::new(4, 0)..Point::new(4, 3),
1576 diff_status: DiffHunkStatusKind::Modified,
1577 old_text: "mno".into(),
1578 }
1579 ],
1580 )]
1581 );
1582
1583 action_log.update(cx, |log, cx| {
1584 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
1585 });
1586 cx.run_until_parked();
1587 assert_eq!(
1588 unreviewed_hunks(&action_log, cx),
1589 vec![(
1590 buffer.clone(),
1591 vec![HunkStatus {
1592 range: Point::new(1, 0)..Point::new(2, 0),
1593 diff_status: DiffHunkStatusKind::Modified,
1594 old_text: "def\n".into(),
1595 }],
1596 )]
1597 );
1598
1599 action_log.update(cx, |log, cx| {
1600 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
1601 });
1602 cx.run_until_parked();
1603 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1604 }
1605
1606 #[gpui::test(iterations = 10)]
1607 async fn test_deletions(cx: &mut TestAppContext) {
1608 init_test(cx);
1609
1610 let fs = FakeFs::new(cx.executor());
1611 fs.insert_tree(
1612 path!("/dir"),
1613 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1614 )
1615 .await;
1616 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1617 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1618 let file_path = project
1619 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1620 .unwrap();
1621 let buffer = project
1622 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1623 .await
1624 .unwrap();
1625
1626 cx.update(|cx| {
1627 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1628 buffer.update(cx, |buffer, cx| {
1629 buffer
1630 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1631 .unwrap();
1632 buffer.finalize_last_transaction();
1633 });
1634 buffer.update(cx, |buffer, cx| {
1635 buffer
1636 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1637 .unwrap();
1638 buffer.finalize_last_transaction();
1639 });
1640 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1641 });
1642 cx.run_until_parked();
1643 assert_eq!(
1644 buffer.read_with(cx, |buffer, _| buffer.text()),
1645 "abc\nghi\njkl\npqr"
1646 );
1647 assert_eq!(
1648 unreviewed_hunks(&action_log, cx),
1649 vec![(
1650 buffer.clone(),
1651 vec![
1652 HunkStatus {
1653 range: Point::new(1, 0)..Point::new(1, 0),
1654 diff_status: DiffHunkStatusKind::Deleted,
1655 old_text: "def\n".into(),
1656 },
1657 HunkStatus {
1658 range: Point::new(3, 0)..Point::new(3, 0),
1659 diff_status: DiffHunkStatusKind::Deleted,
1660 old_text: "mno\n".into(),
1661 }
1662 ],
1663 )]
1664 );
1665
1666 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1667 cx.run_until_parked();
1668 assert_eq!(
1669 buffer.read_with(cx, |buffer, _| buffer.text()),
1670 "abc\nghi\njkl\nmno\npqr"
1671 );
1672 assert_eq!(
1673 unreviewed_hunks(&action_log, cx),
1674 vec![(
1675 buffer.clone(),
1676 vec![HunkStatus {
1677 range: Point::new(1, 0)..Point::new(1, 0),
1678 diff_status: DiffHunkStatusKind::Deleted,
1679 old_text: "def\n".into(),
1680 }],
1681 )]
1682 );
1683
1684 action_log.update(cx, |log, cx| {
1685 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
1686 });
1687 cx.run_until_parked();
1688 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1689 }
1690
1691 #[gpui::test(iterations = 10)]
1692 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1693 init_test(cx);
1694
1695 let fs = FakeFs::new(cx.executor());
1696 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1697 .await;
1698 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1699 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1700 let file_path = project
1701 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1702 .unwrap();
1703 let buffer = project
1704 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1705 .await
1706 .unwrap();
1707
1708 cx.update(|cx| {
1709 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1710 buffer.update(cx, |buffer, cx| {
1711 buffer
1712 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1713 .unwrap()
1714 });
1715 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1716 });
1717 cx.run_until_parked();
1718 assert_eq!(
1719 buffer.read_with(cx, |buffer, _| buffer.text()),
1720 "abc\ndeF\nGHI\njkl\nmno"
1721 );
1722 assert_eq!(
1723 unreviewed_hunks(&action_log, cx),
1724 vec![(
1725 buffer.clone(),
1726 vec![HunkStatus {
1727 range: Point::new(1, 0)..Point::new(3, 0),
1728 diff_status: DiffHunkStatusKind::Modified,
1729 old_text: "def\nghi\n".into(),
1730 }],
1731 )]
1732 );
1733
1734 buffer.update(cx, |buffer, cx| {
1735 buffer.edit(
1736 [
1737 (Point::new(0, 2)..Point::new(0, 2), "X"),
1738 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1739 ],
1740 None,
1741 cx,
1742 )
1743 });
1744 cx.run_until_parked();
1745 assert_eq!(
1746 buffer.read_with(cx, |buffer, _| buffer.text()),
1747 "abXc\ndeF\nGHI\nYjkl\nmno"
1748 );
1749 assert_eq!(
1750 unreviewed_hunks(&action_log, cx),
1751 vec![(
1752 buffer.clone(),
1753 vec![HunkStatus {
1754 range: Point::new(1, 0)..Point::new(3, 0),
1755 diff_status: DiffHunkStatusKind::Modified,
1756 old_text: "def\nghi\n".into(),
1757 }],
1758 )]
1759 );
1760
1761 buffer.update(cx, |buffer, cx| {
1762 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1763 });
1764 cx.run_until_parked();
1765 assert_eq!(
1766 buffer.read_with(cx, |buffer, _| buffer.text()),
1767 "abXc\ndZeF\nGHI\nYjkl\nmno"
1768 );
1769 assert_eq!(
1770 unreviewed_hunks(&action_log, cx),
1771 vec![(
1772 buffer.clone(),
1773 vec![HunkStatus {
1774 range: Point::new(1, 0)..Point::new(3, 0),
1775 diff_status: DiffHunkStatusKind::Modified,
1776 old_text: "def\nghi\n".into(),
1777 }],
1778 )]
1779 );
1780
1781 action_log.update(cx, |log, cx| {
1782 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
1783 });
1784 cx.run_until_parked();
1785 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1786 }
1787
1788 #[gpui::test(iterations = 10)]
1789 async fn test_creating_files(cx: &mut TestAppContext) {
1790 init_test(cx);
1791
1792 let fs = FakeFs::new(cx.executor());
1793 fs.insert_tree(path!("/dir"), json!({})).await;
1794 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1795 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1796 let file_path = project
1797 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1798 .unwrap();
1799
1800 let buffer = project
1801 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1802 .await
1803 .unwrap();
1804 cx.update(|cx| {
1805 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1806 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1807 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1808 });
1809 project
1810 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1811 .await
1812 .unwrap();
1813 cx.run_until_parked();
1814 assert_eq!(
1815 unreviewed_hunks(&action_log, cx),
1816 vec![(
1817 buffer.clone(),
1818 vec![HunkStatus {
1819 range: Point::new(0, 0)..Point::new(0, 5),
1820 diff_status: DiffHunkStatusKind::Added,
1821 old_text: "".into(),
1822 }],
1823 )]
1824 );
1825
1826 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1827 cx.run_until_parked();
1828 assert_eq!(
1829 unreviewed_hunks(&action_log, cx),
1830 vec![(
1831 buffer.clone(),
1832 vec![HunkStatus {
1833 range: Point::new(0, 0)..Point::new(0, 6),
1834 diff_status: DiffHunkStatusKind::Added,
1835 old_text: "".into(),
1836 }],
1837 )]
1838 );
1839
1840 action_log.update(cx, |log, cx| {
1841 log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
1842 });
1843 cx.run_until_parked();
1844 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1845 }
1846
1847 #[gpui::test(iterations = 10)]
1848 async fn test_overwriting_files(cx: &mut TestAppContext) {
1849 init_test(cx);
1850
1851 let fs = FakeFs::new(cx.executor());
1852 fs.insert_tree(
1853 path!("/dir"),
1854 json!({
1855 "file1": "Lorem ipsum dolor"
1856 }),
1857 )
1858 .await;
1859 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1860 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1861 let file_path = project
1862 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1863 .unwrap();
1864
1865 let buffer = project
1866 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1867 .await
1868 .unwrap();
1869 cx.update(|cx| {
1870 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1871 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1872 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1873 });
1874 project
1875 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1876 .await
1877 .unwrap();
1878 cx.run_until_parked();
1879 assert_eq!(
1880 unreviewed_hunks(&action_log, cx),
1881 vec![(
1882 buffer.clone(),
1883 vec![HunkStatus {
1884 range: Point::new(0, 0)..Point::new(0, 19),
1885 diff_status: DiffHunkStatusKind::Added,
1886 old_text: "".into(),
1887 }],
1888 )]
1889 );
1890
1891 action_log
1892 .update(cx, |log, cx| {
1893 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1894 task
1895 })
1896 .await
1897 .unwrap();
1898 cx.run_until_parked();
1899 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1900 assert_eq!(
1901 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1902 "Lorem ipsum dolor"
1903 );
1904 }
1905
1906 #[gpui::test(iterations = 10)]
1907 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1908 init_test(cx);
1909
1910 let fs = FakeFs::new(cx.executor());
1911 fs.insert_tree(
1912 path!("/dir"),
1913 json!({
1914 "file1": "Lorem ipsum dolor"
1915 }),
1916 )
1917 .await;
1918 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1919 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1920 let file_path = project
1921 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1922 .unwrap();
1923
1924 let buffer = project
1925 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1926 .await
1927 .unwrap();
1928 cx.update(|cx| {
1929 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1930 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1931 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1932 });
1933 project
1934 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1935 .await
1936 .unwrap();
1937 cx.run_until_parked();
1938 assert_eq!(
1939 unreviewed_hunks(&action_log, cx),
1940 vec![(
1941 buffer.clone(),
1942 vec![HunkStatus {
1943 range: Point::new(0, 0)..Point::new(0, 37),
1944 diff_status: DiffHunkStatusKind::Modified,
1945 old_text: "Lorem ipsum dolor".into(),
1946 }],
1947 )]
1948 );
1949
1950 cx.update(|cx| {
1951 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1952 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1953 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1954 });
1955 project
1956 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1957 .await
1958 .unwrap();
1959 cx.run_until_parked();
1960 assert_eq!(
1961 unreviewed_hunks(&action_log, cx),
1962 vec![(
1963 buffer.clone(),
1964 vec![HunkStatus {
1965 range: Point::new(0, 0)..Point::new(0, 9),
1966 diff_status: DiffHunkStatusKind::Added,
1967 old_text: "".into(),
1968 }],
1969 )]
1970 );
1971
1972 action_log
1973 .update(cx, |log, cx| {
1974 let (task, _) = log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx);
1975 task
1976 })
1977 .await
1978 .unwrap();
1979 cx.run_until_parked();
1980 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1981 assert_eq!(
1982 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1983 "Lorem ipsum dolor"
1984 );
1985 }
1986
1987 #[gpui::test(iterations = 10)]
1988 async fn test_deleting_files(cx: &mut TestAppContext) {
1989 init_test(cx);
1990
1991 let fs = FakeFs::new(cx.executor());
1992 fs.insert_tree(
1993 path!("/dir"),
1994 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1995 )
1996 .await;
1997
1998 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1999 let file1_path = project
2000 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2001 .unwrap();
2002 let file2_path = project
2003 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
2004 .unwrap();
2005
2006 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2007 let buffer1 = project
2008 .update(cx, |project, cx| {
2009 project.open_buffer(file1_path.clone(), cx)
2010 })
2011 .await
2012 .unwrap();
2013 let buffer2 = project
2014 .update(cx, |project, cx| {
2015 project.open_buffer(file2_path.clone(), cx)
2016 })
2017 .await
2018 .unwrap();
2019
2020 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
2021 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
2022 project
2023 .update(cx, |project, cx| {
2024 project.delete_file(file1_path.clone(), false, cx)
2025 })
2026 .unwrap()
2027 .await
2028 .unwrap();
2029 project
2030 .update(cx, |project, cx| {
2031 project.delete_file(file2_path.clone(), false, cx)
2032 })
2033 .unwrap()
2034 .await
2035 .unwrap();
2036 cx.run_until_parked();
2037 assert_eq!(
2038 unreviewed_hunks(&action_log, cx),
2039 vec![
2040 (
2041 buffer1.clone(),
2042 vec![HunkStatus {
2043 range: Point::new(0, 0)..Point::new(0, 0),
2044 diff_status: DiffHunkStatusKind::Deleted,
2045 old_text: "lorem\n".into(),
2046 }]
2047 ),
2048 (
2049 buffer2.clone(),
2050 vec![HunkStatus {
2051 range: Point::new(0, 0)..Point::new(0, 0),
2052 diff_status: DiffHunkStatusKind::Deleted,
2053 old_text: "ipsum\n".into(),
2054 }],
2055 )
2056 ]
2057 );
2058
2059 // Simulate file1 being recreated externally.
2060 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
2061 .await;
2062
2063 // Simulate file2 being recreated by a tool.
2064 let buffer2 = project
2065 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
2066 .await
2067 .unwrap();
2068 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
2069 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
2070 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
2071 project
2072 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
2073 .await
2074 .unwrap();
2075
2076 cx.run_until_parked();
2077 assert_eq!(
2078 unreviewed_hunks(&action_log, cx),
2079 vec![(
2080 buffer2.clone(),
2081 vec![HunkStatus {
2082 range: Point::new(0, 0)..Point::new(0, 5),
2083 diff_status: DiffHunkStatusKind::Added,
2084 old_text: "".into(),
2085 }],
2086 )]
2087 );
2088
2089 // Simulate file2 being deleted externally.
2090 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
2091 .await
2092 .unwrap();
2093 cx.run_until_parked();
2094 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2095 }
2096
2097 #[gpui::test(iterations = 10)]
2098 async fn test_reject_edits(cx: &mut TestAppContext) {
2099 init_test(cx);
2100
2101 let fs = FakeFs::new(cx.executor());
2102 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2103 .await;
2104 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2105 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2106 let file_path = project
2107 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2108 .unwrap();
2109 let buffer = project
2110 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2111 .await
2112 .unwrap();
2113
2114 cx.update(|cx| {
2115 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2116 buffer.update(cx, |buffer, cx| {
2117 buffer
2118 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2119 .unwrap()
2120 });
2121 buffer.update(cx, |buffer, cx| {
2122 buffer
2123 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2124 .unwrap()
2125 });
2126 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2127 });
2128 cx.run_until_parked();
2129 assert_eq!(
2130 buffer.read_with(cx, |buffer, _| buffer.text()),
2131 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2132 );
2133 assert_eq!(
2134 unreviewed_hunks(&action_log, cx),
2135 vec![(
2136 buffer.clone(),
2137 vec![
2138 HunkStatus {
2139 range: Point::new(1, 0)..Point::new(3, 0),
2140 diff_status: DiffHunkStatusKind::Modified,
2141 old_text: "def\n".into(),
2142 },
2143 HunkStatus {
2144 range: Point::new(5, 0)..Point::new(5, 3),
2145 diff_status: DiffHunkStatusKind::Modified,
2146 old_text: "mno".into(),
2147 }
2148 ],
2149 )]
2150 );
2151
2152 // If the rejected range doesn't overlap with any hunk, we ignore it.
2153 action_log
2154 .update(cx, |log, cx| {
2155 let (task, _) = log.reject_edits_in_ranges(
2156 buffer.clone(),
2157 vec![Point::new(4, 0)..Point::new(4, 0)],
2158 None,
2159 cx,
2160 );
2161 task
2162 })
2163 .await
2164 .unwrap();
2165 cx.run_until_parked();
2166 assert_eq!(
2167 buffer.read_with(cx, |buffer, _| buffer.text()),
2168 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2169 );
2170 assert_eq!(
2171 unreviewed_hunks(&action_log, cx),
2172 vec![(
2173 buffer.clone(),
2174 vec![
2175 HunkStatus {
2176 range: Point::new(1, 0)..Point::new(3, 0),
2177 diff_status: DiffHunkStatusKind::Modified,
2178 old_text: "def\n".into(),
2179 },
2180 HunkStatus {
2181 range: Point::new(5, 0)..Point::new(5, 3),
2182 diff_status: DiffHunkStatusKind::Modified,
2183 old_text: "mno".into(),
2184 }
2185 ],
2186 )]
2187 );
2188
2189 action_log
2190 .update(cx, |log, cx| {
2191 let (task, _) = log.reject_edits_in_ranges(
2192 buffer.clone(),
2193 vec![Point::new(0, 0)..Point::new(1, 0)],
2194 None,
2195 cx,
2196 );
2197 task
2198 })
2199 .await
2200 .unwrap();
2201 cx.run_until_parked();
2202 assert_eq!(
2203 buffer.read_with(cx, |buffer, _| buffer.text()),
2204 "abc\ndef\nghi\njkl\nmnO"
2205 );
2206 assert_eq!(
2207 unreviewed_hunks(&action_log, cx),
2208 vec![(
2209 buffer.clone(),
2210 vec![HunkStatus {
2211 range: Point::new(4, 0)..Point::new(4, 3),
2212 diff_status: DiffHunkStatusKind::Modified,
2213 old_text: "mno".into(),
2214 }],
2215 )]
2216 );
2217
2218 action_log
2219 .update(cx, |log, cx| {
2220 let (task, _) = log.reject_edits_in_ranges(
2221 buffer.clone(),
2222 vec![Point::new(4, 0)..Point::new(4, 0)],
2223 None,
2224 cx,
2225 );
2226 task
2227 })
2228 .await
2229 .unwrap();
2230 cx.run_until_parked();
2231 assert_eq!(
2232 buffer.read_with(cx, |buffer, _| buffer.text()),
2233 "abc\ndef\nghi\njkl\nmno"
2234 );
2235 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2236 }
2237
2238 #[gpui::test(iterations = 10)]
2239 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
2240 init_test(cx);
2241
2242 let fs = FakeFs::new(cx.executor());
2243 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
2244 .await;
2245 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2246 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2247 let file_path = project
2248 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2249 .unwrap();
2250 let buffer = project
2251 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2252 .await
2253 .unwrap();
2254
2255 cx.update(|cx| {
2256 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2257 buffer.update(cx, |buffer, cx| {
2258 buffer
2259 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
2260 .unwrap()
2261 });
2262 buffer.update(cx, |buffer, cx| {
2263 buffer
2264 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
2265 .unwrap()
2266 });
2267 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2268 });
2269 cx.run_until_parked();
2270 assert_eq!(
2271 buffer.read_with(cx, |buffer, _| buffer.text()),
2272 "abc\ndE\nXYZf\nghi\njkl\nmnO"
2273 );
2274 assert_eq!(
2275 unreviewed_hunks(&action_log, cx),
2276 vec![(
2277 buffer.clone(),
2278 vec![
2279 HunkStatus {
2280 range: Point::new(1, 0)..Point::new(3, 0),
2281 diff_status: DiffHunkStatusKind::Modified,
2282 old_text: "def\n".into(),
2283 },
2284 HunkStatus {
2285 range: Point::new(5, 0)..Point::new(5, 3),
2286 diff_status: DiffHunkStatusKind::Modified,
2287 old_text: "mno".into(),
2288 }
2289 ],
2290 )]
2291 );
2292
2293 action_log.update(cx, |log, cx| {
2294 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
2295 ..buffer.read(cx).anchor_before(Point::new(1, 0));
2296 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
2297 ..buffer.read(cx).anchor_before(Point::new(5, 3));
2298
2299 let (task, _) =
2300 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx);
2301 task.detach();
2302 assert_eq!(
2303 buffer.read_with(cx, |buffer, _| buffer.text()),
2304 "abc\ndef\nghi\njkl\nmno"
2305 );
2306 });
2307 cx.run_until_parked();
2308 assert_eq!(
2309 buffer.read_with(cx, |buffer, _| buffer.text()),
2310 "abc\ndef\nghi\njkl\nmno"
2311 );
2312 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2313 }
2314
2315 #[gpui::test(iterations = 10)]
2316 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
2317 init_test(cx);
2318
2319 let fs = FakeFs::new(cx.executor());
2320 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
2321 .await;
2322 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2323 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2324 let file_path = project
2325 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2326 .unwrap();
2327 let buffer = project
2328 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2329 .await
2330 .unwrap();
2331
2332 cx.update(|cx| {
2333 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
2334 });
2335 project
2336 .update(cx, |project, cx| {
2337 project.delete_file(file_path.clone(), false, cx)
2338 })
2339 .unwrap()
2340 .await
2341 .unwrap();
2342 cx.run_until_parked();
2343 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
2344 assert_eq!(
2345 unreviewed_hunks(&action_log, cx),
2346 vec![(
2347 buffer.clone(),
2348 vec![HunkStatus {
2349 range: Point::new(0, 0)..Point::new(0, 0),
2350 diff_status: DiffHunkStatusKind::Deleted,
2351 old_text: "content".into(),
2352 }]
2353 )]
2354 );
2355
2356 action_log
2357 .update(cx, |log, cx| {
2358 let (task, _) = log.reject_edits_in_ranges(
2359 buffer.clone(),
2360 vec![Point::new(0, 0)..Point::new(0, 0)],
2361 None,
2362 cx,
2363 );
2364 task
2365 })
2366 .await
2367 .unwrap();
2368 cx.run_until_parked();
2369 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
2370 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
2371 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2372 }
2373
2374 #[gpui::test(iterations = 10)]
2375 async fn test_reject_created_file(cx: &mut TestAppContext) {
2376 init_test(cx);
2377
2378 let fs = FakeFs::new(cx.executor());
2379 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2380 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2381 let file_path = project
2382 .read_with(cx, |project, cx| {
2383 project.find_project_path("dir/new_file", cx)
2384 })
2385 .unwrap();
2386 let buffer = project
2387 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2388 .await
2389 .unwrap();
2390 cx.update(|cx| {
2391 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2392 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
2393 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2394 });
2395 project
2396 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2397 .await
2398 .unwrap();
2399 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2400 cx.run_until_parked();
2401 assert_eq!(
2402 unreviewed_hunks(&action_log, cx),
2403 vec![(
2404 buffer.clone(),
2405 vec![HunkStatus {
2406 range: Point::new(0, 0)..Point::new(0, 7),
2407 diff_status: DiffHunkStatusKind::Added,
2408 old_text: "".into(),
2409 }],
2410 )]
2411 );
2412
2413 action_log
2414 .update(cx, |log, cx| {
2415 let (task, _) = log.reject_edits_in_ranges(
2416 buffer.clone(),
2417 vec![Point::new(0, 0)..Point::new(0, 11)],
2418 None,
2419 cx,
2420 );
2421 task
2422 })
2423 .await
2424 .unwrap();
2425 cx.run_until_parked();
2426 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2427 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2428 }
2429
2430 #[gpui::test]
2431 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2432 init_test(cx);
2433
2434 let fs = FakeFs::new(cx.executor());
2435 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2436 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2437
2438 let file_path = project
2439 .read_with(cx, |project, cx| {
2440 project.find_project_path("dir/new_file", cx)
2441 })
2442 .unwrap();
2443 let buffer = project
2444 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2445 .await
2446 .unwrap();
2447
2448 // AI creates file with initial content
2449 cx.update(|cx| {
2450 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2451 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2452 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2453 });
2454
2455 project
2456 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2457 .await
2458 .unwrap();
2459
2460 cx.run_until_parked();
2461
2462 // User makes additional edits
2463 cx.update(|cx| {
2464 buffer.update(cx, |buffer, cx| {
2465 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2466 });
2467 });
2468
2469 project
2470 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2471 .await
2472 .unwrap();
2473
2474 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2475
2476 // Reject all
2477 action_log
2478 .update(cx, |log, cx| {
2479 let (task, _) = log.reject_edits_in_ranges(
2480 buffer.clone(),
2481 vec![Point::new(0, 0)..Point::new(100, 0)],
2482 None,
2483 cx,
2484 );
2485 task
2486 })
2487 .await
2488 .unwrap();
2489 cx.run_until_parked();
2490
2491 // File should still contain all the content
2492 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2493
2494 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2495 assert_eq!(content, "ai content\nuser added this line");
2496 }
2497
2498 #[gpui::test]
2499 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2500 init_test(cx);
2501
2502 let fs = FakeFs::new(cx.executor());
2503 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2504 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2505
2506 let file_path = project
2507 .read_with(cx, |project, cx| {
2508 project.find_project_path("dir/new_file", cx)
2509 })
2510 .unwrap();
2511 let buffer = project
2512 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2513 .await
2514 .unwrap();
2515
2516 // AI creates file with initial content
2517 cx.update(|cx| {
2518 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2519 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2520 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2521 });
2522 project
2523 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2524 .await
2525 .unwrap();
2526 cx.run_until_parked();
2527 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2528
2529 // User accepts the single hunk
2530 action_log.update(cx, |log, cx| {
2531 let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
2532 log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
2533 });
2534 cx.run_until_parked();
2535 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2536 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2537
2538 // AI modifies the file
2539 cx.update(|cx| {
2540 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2541 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2542 });
2543 project
2544 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2545 .await
2546 .unwrap();
2547 cx.run_until_parked();
2548 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2549
2550 // User rejects the hunk
2551 action_log
2552 .update(cx, |log, cx| {
2553 let (task, _) = log.reject_edits_in_ranges(
2554 buffer.clone(),
2555 vec![Anchor::min_max_range_for_buffer(
2556 buffer.read(cx).remote_id(),
2557 )],
2558 None,
2559 cx,
2560 );
2561 task
2562 })
2563 .await
2564 .unwrap();
2565 cx.run_until_parked();
2566 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2567 assert_eq!(
2568 buffer.read_with(cx, |buffer, _| buffer.text()),
2569 "ai content v1"
2570 );
2571 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2572 }
2573
2574 #[gpui::test]
2575 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2576 init_test(cx);
2577
2578 let fs = FakeFs::new(cx.executor());
2579 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2580 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2581
2582 let file_path = project
2583 .read_with(cx, |project, cx| {
2584 project.find_project_path("dir/new_file", cx)
2585 })
2586 .unwrap();
2587 let buffer = project
2588 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2589 .await
2590 .unwrap();
2591
2592 // AI creates file with initial content
2593 cx.update(|cx| {
2594 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2595 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2596 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2597 });
2598 project
2599 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2600 .await
2601 .unwrap();
2602 cx.run_until_parked();
2603
2604 // User clicks "Accept All"
2605 action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
2606 cx.run_until_parked();
2607 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2608 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2609
2610 // AI modifies file again
2611 cx.update(|cx| {
2612 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2613 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2614 });
2615 project
2616 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2617 .await
2618 .unwrap();
2619 cx.run_until_parked();
2620 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2621
2622 // User clicks "Reject All"
2623 action_log
2624 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2625 .await;
2626 cx.run_until_parked();
2627 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2628 assert_eq!(
2629 buffer.read_with(cx, |buffer, _| buffer.text()),
2630 "ai content v1"
2631 );
2632 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2633 }
2634
2635 #[gpui::test(iterations = 100)]
2636 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2637 init_test(cx);
2638
2639 let operations = env::var("OPERATIONS")
2640 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2641 .unwrap_or(20);
2642
2643 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2644 let fs = FakeFs::new(cx.executor());
2645 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2646 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2647 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2648 let file_path = project
2649 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2650 .unwrap();
2651 let buffer = project
2652 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2653 .await
2654 .unwrap();
2655
2656 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2657
2658 for _ in 0..operations {
2659 match rng.random_range(0..100) {
2660 0..25 => {
2661 action_log.update(cx, |log, cx| {
2662 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2663 log::info!("keeping edits in range {:?}", range);
2664 log.keep_edits_in_range(buffer.clone(), range, None, cx)
2665 });
2666 }
2667 25..50 => {
2668 action_log
2669 .update(cx, |log, cx| {
2670 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2671 log::info!("rejecting edits in range {:?}", range);
2672 let (task, _) =
2673 log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx);
2674 task
2675 })
2676 .await
2677 .unwrap();
2678 }
2679 _ => {
2680 let is_agent_edit = rng.random_bool(0.5);
2681 if is_agent_edit {
2682 log::info!("agent edit");
2683 } else {
2684 log::info!("user edit");
2685 }
2686 cx.update(|cx| {
2687 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2688 if is_agent_edit {
2689 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2690 }
2691 });
2692 }
2693 }
2694
2695 if rng.random_bool(0.2) {
2696 quiesce(&action_log, &buffer, cx);
2697 }
2698 }
2699
2700 quiesce(&action_log, &buffer, cx);
2701
2702 fn quiesce(
2703 action_log: &Entity<ActionLog>,
2704 buffer: &Entity<Buffer>,
2705 cx: &mut TestAppContext,
2706 ) {
2707 log::info!("quiescing...");
2708 cx.run_until_parked();
2709 action_log.update(cx, |log, cx| {
2710 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2711 let mut old_text = tracked_buffer.diff_base.clone();
2712 let new_text = buffer.read(cx).as_rope();
2713 for edit in tracked_buffer.unreviewed_edits.edits() {
2714 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2715 let old_end = old_text.point_to_offset(cmp::min(
2716 Point::new(edit.new.start + edit.old_len(), 0),
2717 old_text.max_point(),
2718 ));
2719 old_text.replace(
2720 old_start..old_end,
2721 &new_text.slice_rows(edit.new.clone()).to_string(),
2722 );
2723 }
2724 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2725 })
2726 }
2727 }
2728
2729 #[gpui::test]
2730 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2731 init_test(cx);
2732
2733 let fs = FakeFs::new(cx.background_executor.clone());
2734 fs.insert_tree(
2735 path!("/project"),
2736 json!({
2737 ".git": {},
2738 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2739 }),
2740 )
2741 .await;
2742 fs.set_head_for_repo(
2743 path!("/project/.git").as_ref(),
2744 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2745 "0000000",
2746 );
2747 cx.run_until_parked();
2748
2749 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2750 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2751
2752 let file_path = project
2753 .read_with(cx, |project, cx| {
2754 project.find_project_path(path!("/project/file.txt"), cx)
2755 })
2756 .unwrap();
2757 let buffer = project
2758 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2759 .await
2760 .unwrap();
2761
2762 cx.update(|cx| {
2763 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2764 buffer.update(cx, |buffer, cx| {
2765 buffer.edit(
2766 [
2767 // Edit at the very start: a -> A
2768 (Point::new(0, 0)..Point::new(0, 1), "A"),
2769 // Deletion in the middle: remove lines d and e
2770 (Point::new(3, 0)..Point::new(5, 0), ""),
2771 // Modification: g -> GGG
2772 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2773 // Addition: insert new line after h
2774 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2775 // Edit the very last character: j -> J
2776 (Point::new(9, 0)..Point::new(9, 1), "J"),
2777 ],
2778 None,
2779 cx,
2780 );
2781 });
2782 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2783 });
2784 cx.run_until_parked();
2785 assert_eq!(
2786 unreviewed_hunks(&action_log, cx),
2787 vec![(
2788 buffer.clone(),
2789 vec![
2790 HunkStatus {
2791 range: Point::new(0, 0)..Point::new(1, 0),
2792 diff_status: DiffHunkStatusKind::Modified,
2793 old_text: "a\n".into()
2794 },
2795 HunkStatus {
2796 range: Point::new(3, 0)..Point::new(3, 0),
2797 diff_status: DiffHunkStatusKind::Deleted,
2798 old_text: "d\ne\n".into()
2799 },
2800 HunkStatus {
2801 range: Point::new(4, 0)..Point::new(5, 0),
2802 diff_status: DiffHunkStatusKind::Modified,
2803 old_text: "g\n".into()
2804 },
2805 HunkStatus {
2806 range: Point::new(6, 0)..Point::new(7, 0),
2807 diff_status: DiffHunkStatusKind::Added,
2808 old_text: "".into()
2809 },
2810 HunkStatus {
2811 range: Point::new(8, 0)..Point::new(8, 1),
2812 diff_status: DiffHunkStatusKind::Modified,
2813 old_text: "j".into()
2814 }
2815 ]
2816 )]
2817 );
2818
2819 // Simulate a git commit that matches some edits but not others:
2820 // - Accepts the first edit (a -> A)
2821 // - Accepts the deletion (remove d and e)
2822 // - Makes a different change to g (g -> G instead of GGG)
2823 // - Ignores the NEW line addition
2824 // - Ignores the last line edit (j stays as j)
2825 fs.set_head_for_repo(
2826 path!("/project/.git").as_ref(),
2827 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2828 "0000001",
2829 );
2830 cx.run_until_parked();
2831 assert_eq!(
2832 unreviewed_hunks(&action_log, cx),
2833 vec![(
2834 buffer.clone(),
2835 vec![
2836 HunkStatus {
2837 range: Point::new(4, 0)..Point::new(5, 0),
2838 diff_status: DiffHunkStatusKind::Modified,
2839 old_text: "g\n".into()
2840 },
2841 HunkStatus {
2842 range: Point::new(6, 0)..Point::new(7, 0),
2843 diff_status: DiffHunkStatusKind::Added,
2844 old_text: "".into()
2845 },
2846 HunkStatus {
2847 range: Point::new(8, 0)..Point::new(8, 1),
2848 diff_status: DiffHunkStatusKind::Modified,
2849 old_text: "j".into()
2850 }
2851 ]
2852 )]
2853 );
2854
2855 // Make another commit that accepts the NEW line but with different content
2856 fs.set_head_for_repo(
2857 path!("/project/.git").as_ref(),
2858 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2859 "0000002",
2860 );
2861 cx.run_until_parked();
2862 assert_eq!(
2863 unreviewed_hunks(&action_log, cx),
2864 vec![(
2865 buffer,
2866 vec![
2867 HunkStatus {
2868 range: Point::new(6, 0)..Point::new(7, 0),
2869 diff_status: DiffHunkStatusKind::Added,
2870 old_text: "".into()
2871 },
2872 HunkStatus {
2873 range: Point::new(8, 0)..Point::new(8, 1),
2874 diff_status: DiffHunkStatusKind::Modified,
2875 old_text: "j".into()
2876 }
2877 ]
2878 )]
2879 );
2880
2881 // Final commit that accepts all remaining edits
2882 fs.set_head_for_repo(
2883 path!("/project/.git").as_ref(),
2884 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2885 "0000003",
2886 );
2887 cx.run_until_parked();
2888 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2889 }
2890
2891 #[gpui::test]
2892 async fn test_undo_last_reject(cx: &mut TestAppContext) {
2893 init_test(cx);
2894
2895 let fs = FakeFs::new(cx.executor());
2896 fs.insert_tree(
2897 path!("/dir"),
2898 json!({
2899 "file1": "abc\ndef\nghi"
2900 }),
2901 )
2902 .await;
2903 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2904 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2905 let file_path = project
2906 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
2907 .unwrap();
2908
2909 let buffer = project
2910 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2911 .await
2912 .unwrap();
2913
2914 // Track the buffer and make an agent edit
2915 cx.update(|cx| {
2916 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2917 buffer.update(cx, |buffer, cx| {
2918 buffer
2919 .edit(
2920 [(Point::new(1, 0)..Point::new(1, 3), "AGENT_EDIT")],
2921 None,
2922 cx,
2923 )
2924 .unwrap()
2925 });
2926 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2927 });
2928 cx.run_until_parked();
2929
2930 // Verify the agent edit is there
2931 assert_eq!(
2932 buffer.read_with(cx, |buffer, _| buffer.text()),
2933 "abc\nAGENT_EDIT\nghi"
2934 );
2935 assert!(!unreviewed_hunks(&action_log, cx).is_empty());
2936
2937 // Reject all edits
2938 action_log
2939 .update(cx, |log, cx| log.reject_all_edits(None, cx))
2940 .await;
2941 cx.run_until_parked();
2942
2943 // Verify the buffer is back to original
2944 assert_eq!(
2945 buffer.read_with(cx, |buffer, _| buffer.text()),
2946 "abc\ndef\nghi"
2947 );
2948 assert!(unreviewed_hunks(&action_log, cx).is_empty());
2949
2950 // Verify undo state is available
2951 assert!(action_log.read_with(cx, |log, _| log.has_pending_undo()));
2952
2953 // Undo the reject
2954 action_log
2955 .update(cx, |log, cx| log.undo_last_reject(cx))
2956 .await;
2957
2958 cx.run_until_parked();
2959
2960 // Verify the agent edit is restored
2961 assert_eq!(
2962 buffer.read_with(cx, |buffer, _| buffer.text()),
2963 "abc\nAGENT_EDIT\nghi"
2964 );
2965
2966 // Verify undo state is cleared
2967 assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo()));
2968 }
2969
2970 #[gpui::test]
2971 async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) {
2972 init_test(cx);
2973
2974 let fs = FakeFs::new(cx.executor());
2975 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
2976 .await;
2977 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2978 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
2979 let child_log =
2980 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
2981
2982 let file_path = project
2983 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2984 .unwrap();
2985 let buffer = project
2986 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2987 .await
2988 .unwrap();
2989
2990 cx.update(|cx| {
2991 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2992 });
2993
2994 // Neither log considers the buffer stale immediately after reading it.
2995 let child_stale = cx.read(|cx| {
2996 child_log
2997 .read(cx)
2998 .stale_buffers(cx)
2999 .cloned()
3000 .collect::<Vec<_>>()
3001 });
3002 let parent_stale = cx.read(|cx| {
3003 parent_log
3004 .read(cx)
3005 .stale_buffers(cx)
3006 .cloned()
3007 .collect::<Vec<_>>()
3008 });
3009 assert!(child_stale.is_empty());
3010 assert!(parent_stale.is_empty());
3011
3012 // Simulate a user edit after the agent read the file.
3013 cx.update(|cx| {
3014 buffer.update(cx, |buffer, cx| {
3015 buffer.edit([(0..5, "goodbye")], None, cx).unwrap();
3016 });
3017 });
3018 cx.run_until_parked();
3019
3020 // Both child and parent should see the buffer as stale because both tracked
3021 // it at the pre-edit version via buffer_read forwarding.
3022 let child_stale = cx.read(|cx| {
3023 child_log
3024 .read(cx)
3025 .stale_buffers(cx)
3026 .cloned()
3027 .collect::<Vec<_>>()
3028 });
3029 let parent_stale = cx.read(|cx| {
3030 parent_log
3031 .read(cx)
3032 .stale_buffers(cx)
3033 .cloned()
3034 .collect::<Vec<_>>()
3035 });
3036 assert_eq!(child_stale, vec![buffer.clone()]);
3037 assert_eq!(parent_stale, vec![buffer]);
3038 }
3039
3040 #[gpui::test]
3041 async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) {
3042 init_test(cx);
3043
3044 let fs = FakeFs::new(cx.executor());
3045 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"}))
3046 .await;
3047 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3048 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3049 let child_log =
3050 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3051
3052 let file_path = project
3053 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3054 .unwrap();
3055 let buffer = project
3056 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3057 .await
3058 .unwrap();
3059
3060 cx.update(|cx| {
3061 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3062 buffer.update(cx, |buffer, cx| {
3063 buffer
3064 .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx)
3065 .unwrap();
3066 });
3067 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3068 });
3069 cx.run_until_parked();
3070
3071 let expected_hunks = vec![(
3072 buffer,
3073 vec![HunkStatus {
3074 range: Point::new(1, 0)..Point::new(2, 0),
3075 diff_status: DiffHunkStatusKind::Modified,
3076 old_text: "def\n".into(),
3077 }],
3078 )];
3079 assert_eq!(
3080 unreviewed_hunks(&child_log, cx),
3081 expected_hunks,
3082 "child should track the agent edit"
3083 );
3084 assert_eq!(
3085 unreviewed_hunks(&parent_log, cx),
3086 expected_hunks,
3087 "parent should also track the agent edit via linked log forwarding"
3088 );
3089 }
3090
3091 #[gpui::test]
3092 async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) {
3093 init_test(cx);
3094
3095 let fs = FakeFs::new(cx.executor());
3096 fs.insert_tree(path!("/dir"), json!({})).await;
3097 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3098 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3099 let child_log =
3100 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3101
3102 let file_path = project
3103 .read_with(cx, |project, cx| {
3104 project.find_project_path("dir/new_file", cx)
3105 })
3106 .unwrap();
3107 let buffer = project
3108 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3109 .await
3110 .unwrap();
3111
3112 cx.update(|cx| {
3113 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3114 buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx));
3115 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3116 });
3117 project
3118 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3119 .await
3120 .unwrap();
3121 cx.run_until_parked();
3122
3123 let expected_hunks = vec![(
3124 buffer.clone(),
3125 vec![HunkStatus {
3126 range: Point::new(0, 0)..Point::new(0, 5),
3127 diff_status: DiffHunkStatusKind::Added,
3128 old_text: "".into(),
3129 }],
3130 )];
3131 assert_eq!(
3132 unreviewed_hunks(&child_log, cx),
3133 expected_hunks,
3134 "child should track the created file"
3135 );
3136 assert_eq!(
3137 unreviewed_hunks(&parent_log, cx),
3138 expected_hunks,
3139 "parent should also track the created file via linked log forwarding"
3140 );
3141 }
3142
3143 #[gpui::test]
3144 async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
3149 .await;
3150 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3151 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3152 let child_log =
3153 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3154
3155 let file_path = project
3156 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3157 .unwrap();
3158 let buffer = project
3159 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
3160 .await
3161 .unwrap();
3162
3163 cx.update(|cx| {
3164 child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3165 });
3166 project
3167 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
3168 .unwrap()
3169 .await
3170 .unwrap();
3171 cx.run_until_parked();
3172
3173 let expected_hunks = vec![(
3174 buffer.clone(),
3175 vec![HunkStatus {
3176 range: Point::new(0, 0)..Point::new(0, 0),
3177 diff_status: DiffHunkStatusKind::Deleted,
3178 old_text: "hello\n".into(),
3179 }],
3180 )];
3181 assert_eq!(
3182 unreviewed_hunks(&child_log, cx),
3183 expected_hunks,
3184 "child should track the deleted file"
3185 );
3186 assert_eq!(
3187 unreviewed_hunks(&parent_log, cx),
3188 expected_hunks,
3189 "parent should also track the deleted file via linked log forwarding"
3190 );
3191 }
3192
3193 /// Simulates the subagent scenario: two child logs linked to the same parent, each
3194 /// editing a different file. The parent accumulates all edits while each child
3195 /// only sees its own.
3196 #[gpui::test]
3197 async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) {
3198 init_test(cx);
3199
3200 let fs = FakeFs::new(cx.executor());
3201 fs.insert_tree(
3202 path!("/dir"),
3203 json!({
3204 "file_a": "content of a",
3205 "file_b": "content of b",
3206 }),
3207 )
3208 .await;
3209 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3210 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3211 let child_log_1 =
3212 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3213 let child_log_2 =
3214 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3215
3216 let file_a_path = project
3217 .read_with(cx, |project, cx| {
3218 project.find_project_path("dir/file_a", cx)
3219 })
3220 .unwrap();
3221 let file_b_path = project
3222 .read_with(cx, |project, cx| {
3223 project.find_project_path("dir/file_b", cx)
3224 })
3225 .unwrap();
3226 let buffer_a = project
3227 .update(cx, |project, cx| project.open_buffer(file_a_path, cx))
3228 .await
3229 .unwrap();
3230 let buffer_b = project
3231 .update(cx, |project, cx| project.open_buffer(file_b_path, cx))
3232 .await
3233 .unwrap();
3234
3235 cx.update(|cx| {
3236 child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx));
3237 buffer_a.update(cx, |buffer, cx| {
3238 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3239 });
3240 child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx));
3241
3242 child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx));
3243 buffer_b.update(cx, |buffer, cx| {
3244 buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap();
3245 });
3246 child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx));
3247 });
3248 cx.run_until_parked();
3249
3250 let child_1_changed: Vec<_> = cx.read(|cx| {
3251 child_log_1
3252 .read(cx)
3253 .changed_buffers(cx)
3254 .into_keys()
3255 .collect()
3256 });
3257 let child_2_changed: Vec<_> = cx.read(|cx| {
3258 child_log_2
3259 .read(cx)
3260 .changed_buffers(cx)
3261 .into_keys()
3262 .collect()
3263 });
3264 let parent_changed: Vec<_> = cx.read(|cx| {
3265 parent_log
3266 .read(cx)
3267 .changed_buffers(cx)
3268 .into_keys()
3269 .collect()
3270 });
3271
3272 assert_eq!(
3273 child_1_changed,
3274 vec![buffer_a.clone()],
3275 "child 1 should only track file_a"
3276 );
3277 assert_eq!(
3278 child_2_changed,
3279 vec![buffer_b.clone()],
3280 "child 2 should only track file_b"
3281 );
3282 assert_eq!(parent_changed.len(), 2, "parent should track both files");
3283 assert!(
3284 parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b),
3285 "parent should contain both buffer_a and buffer_b"
3286 );
3287 }
3288
3289 #[gpui::test]
3290 async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
3291 init_test(cx);
3292
3293 let fs = FakeFs::new(cx.executor());
3294 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3295 .await;
3296 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3297 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3298
3299 let file_path = project
3300 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3301 .unwrap();
3302 let buffer = project
3303 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3304 .await
3305 .unwrap();
3306
3307 let abs_path = PathBuf::from(path!("/dir/file"));
3308 assert!(
3309 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3310 "file_read_time should be None before buffer_read"
3311 );
3312
3313 cx.update(|cx| {
3314 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3315 });
3316
3317 assert!(
3318 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3319 "file_read_time should be recorded after buffer_read"
3320 );
3321 }
3322
3323 #[gpui::test]
3324 async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
3325 init_test(cx);
3326
3327 let fs = FakeFs::new(cx.executor());
3328 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3329 .await;
3330 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3331 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3332
3333 let file_path = project
3334 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3335 .unwrap();
3336 let buffer = project
3337 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3338 .await
3339 .unwrap();
3340
3341 let abs_path = PathBuf::from(path!("/dir/file"));
3342 assert!(
3343 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3344 "file_read_time should be None before buffer_edited"
3345 );
3346
3347 cx.update(|cx| {
3348 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3349 });
3350
3351 assert!(
3352 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3353 "file_read_time should be recorded after buffer_edited"
3354 );
3355 }
3356
3357 #[gpui::test]
3358 async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
3359 init_test(cx);
3360
3361 let fs = FakeFs::new(cx.executor());
3362 fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
3363 .await;
3364 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3365 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3366
3367 let file_path = project
3368 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3369 .unwrap();
3370 let buffer = project
3371 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3372 .await
3373 .unwrap();
3374
3375 let abs_path = PathBuf::from(path!("/dir/file"));
3376 assert!(
3377 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3378 "file_read_time should be None before buffer_created"
3379 );
3380
3381 cx.update(|cx| {
3382 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3383 });
3384
3385 assert!(
3386 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3387 "file_read_time should be recorded after buffer_created"
3388 );
3389 }
3390
3391 #[gpui::test]
3392 async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
3393 init_test(cx);
3394
3395 let fs = FakeFs::new(cx.executor());
3396 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3397 .await;
3398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3399 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3400
3401 let file_path = project
3402 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3403 .unwrap();
3404 let buffer = project
3405 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3406 .await
3407 .unwrap();
3408
3409 let abs_path = PathBuf::from(path!("/dir/file"));
3410
3411 cx.update(|cx| {
3412 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3413 });
3414 assert!(
3415 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3416 "file_read_time should exist after buffer_read"
3417 );
3418
3419 cx.update(|cx| {
3420 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
3421 });
3422 assert!(
3423 action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3424 "file_read_time should be removed after will_delete_buffer"
3425 );
3426 }
3427
3428 #[gpui::test]
3429 async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
3430 init_test(cx);
3431
3432 let fs = FakeFs::new(cx.executor());
3433 fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
3434 .await;
3435 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3436 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3437 let child_log =
3438 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3439
3440 let file_path = project
3441 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3442 .unwrap();
3443 let buffer = project
3444 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3445 .await
3446 .unwrap();
3447
3448 let abs_path = PathBuf::from(path!("/dir/file"));
3449
3450 cx.update(|cx| {
3451 child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
3452 });
3453 assert!(
3454 child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
3455 "child should record file_read_time on buffer_read"
3456 );
3457 assert!(
3458 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3459 "parent should NOT get file_read_time from child's buffer_read"
3460 );
3461
3462 cx.update(|cx| {
3463 child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
3464 });
3465 assert!(
3466 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3467 "parent should NOT get file_read_time from child's buffer_edited"
3468 );
3469
3470 cx.update(|cx| {
3471 child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
3472 });
3473 assert!(
3474 parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
3475 "parent should NOT get file_read_time from child's buffer_created"
3476 );
3477 }
3478
3479 #[gpui::test]
3480 async fn test_file_read_time_not_forwarded_to_linked_action_log_for_inferred_edits(
3481 cx: &mut TestAppContext,
3482 ) {
3483 init_test(cx);
3484
3485 let fs = FakeFs::new(cx.executor());
3486 fs.insert_tree(
3487 path!("/dir"),
3488 json!({
3489 "edit": "hello world\n",
3490 "delete": "goodbye world\n",
3491 }),
3492 )
3493 .await;
3494 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3495 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3496 let child_log =
3497 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3498
3499 let edit_file_path = project
3500 .read_with(cx, |project, cx| project.find_project_path("dir/edit", cx))
3501 .unwrap();
3502 let edit_buffer = project
3503 .update(cx, |project, cx| project.open_buffer(edit_file_path, cx))
3504 .await
3505 .unwrap();
3506 let edit_abs_path = PathBuf::from(path!("/dir/edit"));
3507 let edit_baseline_snapshot = edit_buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3508
3509 edit_buffer.update(cx, |buffer, cx| buffer.set_text("hello world!\n", cx));
3510 project
3511 .update(cx, |project, cx| {
3512 project.save_buffer(edit_buffer.clone(), cx)
3513 })
3514 .await
3515 .unwrap();
3516
3517 cx.update(|cx| {
3518 child_log.update(cx, |log, cx| {
3519 log.infer_buffer_edited_from_snapshot(
3520 edit_buffer.clone(),
3521 edit_baseline_snapshot.clone(),
3522 cx,
3523 );
3524 });
3525 });
3526
3527 assert!(
3528 child_log.read_with(cx, |log, _| log.file_read_time(&edit_abs_path).is_some()),
3529 "child should record file_read_time on inferred edit"
3530 );
3531 assert!(
3532 parent_log.read_with(cx, |log, _| log.file_read_time(&edit_abs_path).is_none()),
3533 "parent should NOT get file_read_time from child's inferred edit"
3534 );
3535
3536 let create_file_path = project
3537 .read_with(cx, |project, cx| {
3538 project.find_project_path("dir/new_file", cx)
3539 })
3540 .unwrap();
3541 let create_buffer = project
3542 .update(cx, |project, cx| project.open_buffer(create_file_path, cx))
3543 .await
3544 .unwrap();
3545 let create_abs_path = PathBuf::from(path!("/dir/new_file"));
3546 let create_baseline_snapshot =
3547 create_buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3548
3549 create_buffer.update(cx, |buffer, cx| buffer.set_text("new file\n", cx));
3550 project
3551 .update(cx, |project, cx| {
3552 project.save_buffer(create_buffer.clone(), cx)
3553 })
3554 .await
3555 .unwrap();
3556
3557 cx.update(|cx| {
3558 child_log.update(cx, |log, cx| {
3559 log.infer_buffer_created(
3560 create_buffer.clone(),
3561 create_baseline_snapshot.clone(),
3562 cx,
3563 );
3564 });
3565 });
3566
3567 assert!(
3568 child_log.read_with(cx, |log, _| log.file_read_time(&create_abs_path).is_some()),
3569 "child should record file_read_time on inferred create"
3570 );
3571 assert!(
3572 parent_log.read_with(cx, |log, _| log.file_read_time(&create_abs_path).is_none()),
3573 "parent should NOT get file_read_time from child's inferred create"
3574 );
3575
3576 let delete_file_path = project
3577 .read_with(cx, |project, cx| {
3578 project.find_project_path("dir/delete", cx)
3579 })
3580 .unwrap();
3581 let delete_buffer = project
3582 .update(cx, |project, cx| project.open_buffer(delete_file_path, cx))
3583 .await
3584 .unwrap();
3585 let delete_abs_path = PathBuf::from(path!("/dir/delete"));
3586 let delete_baseline_snapshot =
3587 delete_buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3588
3589 cx.update(|cx| {
3590 parent_log.update(cx, |log, cx| log.buffer_read(delete_buffer.clone(), cx));
3591 child_log.update(cx, |log, cx| log.buffer_read(delete_buffer.clone(), cx));
3592 });
3593
3594 assert!(
3595 parent_log.read_with(cx, |log, _| log.file_read_time(&delete_abs_path).is_some()),
3596 "parent should record its own file_read_time before inferred delete"
3597 );
3598 assert!(
3599 child_log.read_with(cx, |log, _| log.file_read_time(&delete_abs_path).is_some()),
3600 "child should record its own file_read_time before inferred delete"
3601 );
3602
3603 fs.remove_file(path!("/dir/delete").as_ref(), RemoveOptions::default())
3604 .await
3605 .unwrap();
3606 cx.run_until_parked();
3607
3608 cx.update(|cx| {
3609 child_log.update(cx, |log, cx| {
3610 log.infer_buffer_deleted_from_snapshot(
3611 delete_buffer.clone(),
3612 delete_baseline_snapshot.clone(),
3613 cx,
3614 );
3615 });
3616 });
3617
3618 assert!(
3619 child_log.read_with(cx, |log, _| log.file_read_time(&delete_abs_path).is_none()),
3620 "child should remove file_read_time on inferred delete"
3621 );
3622 assert!(
3623 parent_log.read_with(cx, |log, _| log.file_read_time(&delete_abs_path).is_some()),
3624 "parent should keep its own file_read_time on linked inferred delete"
3625 );
3626 }
3627
3628 #[gpui::test]
3629 async fn test_linked_action_log_infer_buffer_edited_from_snapshot(cx: &mut TestAppContext) {
3630 init_test(cx);
3631
3632 let fs = FakeFs::new(cx.executor());
3633 fs.insert_tree(path!("/dir"), json!({"file": "one\ntwo\n"}))
3634 .await;
3635 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3636 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3637 let child_log =
3638 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3639
3640 let file_path = project
3641 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3642 .unwrap();
3643 let buffer = project
3644 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3645 .await
3646 .unwrap();
3647
3648 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3649
3650 buffer.update(cx, |buffer, cx| buffer.set_text("one\ntwo\nthree\n", cx));
3651 project
3652 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3653 .await
3654 .unwrap();
3655
3656 cx.update(|cx| {
3657 child_log.update(cx, |log, cx| {
3658 log.infer_buffer_edited_from_snapshot(
3659 buffer.clone(),
3660 baseline_snapshot.clone(),
3661 cx,
3662 );
3663 });
3664 });
3665 cx.run_until_parked();
3666
3667 let child_hunks = unreviewed_hunks(&child_log, cx);
3668 assert!(
3669 !child_hunks.is_empty(),
3670 "child should track the inferred edit"
3671 );
3672 assert_eq!(
3673 unreviewed_hunks(&parent_log, cx),
3674 child_hunks,
3675 "parent should also track the inferred edit via linked log forwarding"
3676 );
3677 }
3678
3679 #[gpui::test]
3680 async fn test_linked_action_log_infer_buffer_created(cx: &mut TestAppContext) {
3681 init_test(cx);
3682
3683 let fs = FakeFs::new(cx.executor());
3684 fs.insert_tree(path!("/dir"), json!({})).await;
3685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3686 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3687 let child_log =
3688 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3689
3690 let file_path = project
3691 .read_with(cx, |project, cx| {
3692 project.find_project_path("dir/new_file", cx)
3693 })
3694 .unwrap();
3695 let buffer = project
3696 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3697 .await
3698 .unwrap();
3699
3700 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3701
3702 buffer.update(cx, |buffer, cx| buffer.set_text("hello\n", cx));
3703 project
3704 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3705 .await
3706 .unwrap();
3707
3708 cx.update(|cx| {
3709 child_log.update(cx, |log, cx| {
3710 log.infer_buffer_created(buffer.clone(), baseline_snapshot.clone(), cx);
3711 });
3712 });
3713 cx.run_until_parked();
3714
3715 let child_hunks = unreviewed_hunks(&child_log, cx);
3716 assert!(
3717 !child_hunks.is_empty(),
3718 "child should track the inferred creation"
3719 );
3720 assert_eq!(
3721 unreviewed_hunks(&parent_log, cx),
3722 child_hunks,
3723 "parent should also track the inferred creation via linked log forwarding"
3724 );
3725 }
3726
3727 #[gpui::test]
3728 async fn test_linked_action_log_infer_buffer_deleted_from_snapshot(cx: &mut TestAppContext) {
3729 init_test(cx);
3730
3731 let fs = FakeFs::new(cx.executor());
3732 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
3733 .await;
3734 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3735 let parent_log = cx.new(|_| ActionLog::new(project.clone()));
3736 let child_log =
3737 cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
3738
3739 let file_path = project
3740 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3741 .unwrap();
3742 let buffer = project
3743 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3744 .await
3745 .unwrap();
3746
3747 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3748
3749 fs.remove_file(path!("/dir/file").as_ref(), RemoveOptions::default())
3750 .await
3751 .unwrap();
3752 cx.run_until_parked();
3753
3754 cx.update(|cx| {
3755 child_log.update(cx, |log, cx| {
3756 log.infer_buffer_deleted_from_snapshot(
3757 buffer.clone(),
3758 baseline_snapshot.clone(),
3759 cx,
3760 );
3761 });
3762 });
3763 cx.run_until_parked();
3764
3765 let child_hunks = unreviewed_hunks(&child_log, cx);
3766 assert!(
3767 !child_hunks.is_empty(),
3768 "child should track the inferred deletion"
3769 );
3770 assert_eq!(
3771 unreviewed_hunks(&parent_log, cx),
3772 child_hunks,
3773 "parent should also track the inferred deletion via linked log forwarding"
3774 );
3775 }
3776
3777 #[gpui::test]
3778 async fn test_infer_buffer_edited_from_snapshot(cx: &mut TestAppContext) {
3779 init_test(cx);
3780
3781 let fs = FakeFs::new(cx.executor());
3782 fs.insert_tree(path!("/dir"), json!({"file": "one\ntwo\n"}))
3783 .await;
3784 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3785 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3786 let file_path = project
3787 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3788 .unwrap();
3789 let buffer = project
3790 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3791 .await
3792 .unwrap();
3793
3794 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3795
3796 buffer.update(cx, |buffer, cx| buffer.set_text("one\ntwo\nthree\n", cx));
3797 project
3798 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3799 .await
3800 .unwrap();
3801
3802 cx.update(|cx| {
3803 action_log.update(cx, |log, cx| {
3804 log.infer_buffer_edited_from_snapshot(
3805 buffer.clone(),
3806 baseline_snapshot.clone(),
3807 cx,
3808 );
3809 });
3810 });
3811 cx.run_until_parked();
3812
3813 assert!(
3814 !unreviewed_hunks(&action_log, cx).is_empty(),
3815 "inferred edit should produce reviewable hunks"
3816 );
3817
3818 action_log
3819 .update(cx, |log, cx| log.reject_all_edits(None, cx))
3820 .await;
3821 cx.run_until_parked();
3822
3823 assert_eq!(
3824 buffer.read_with(cx, |buffer, _| buffer.text()),
3825 "one\ntwo\n"
3826 );
3827 }
3828
3829 #[gpui::test]
3830 async fn test_infer_buffer_created(cx: &mut TestAppContext) {
3831 init_test(cx);
3832
3833 let fs = FakeFs::new(cx.executor());
3834 fs.insert_tree(path!("/dir"), json!({})).await;
3835 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3836 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3837 let file_path = project
3838 .read_with(cx, |project, cx| {
3839 project.find_project_path("dir/new_file", cx)
3840 })
3841 .unwrap();
3842 let buffer = project
3843 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3844 .await
3845 .unwrap();
3846
3847 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3848
3849 buffer.update(cx, |buffer, cx| buffer.set_text("hello\n", cx));
3850 project
3851 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3852 .await
3853 .unwrap();
3854
3855 cx.update(|cx| {
3856 action_log.update(cx, |log, cx| {
3857 log.infer_buffer_created(buffer.clone(), baseline_snapshot.clone(), cx);
3858 });
3859 });
3860 cx.run_until_parked();
3861
3862 assert!(
3863 !unreviewed_hunks(&action_log, cx).is_empty(),
3864 "inferred creation should produce reviewable hunks"
3865 );
3866
3867 action_log
3868 .update(cx, |log, cx| log.reject_all_edits(None, cx))
3869 .await;
3870 cx.run_until_parked();
3871
3872 assert!(fs.read_file_sync(path!("/dir/new_file")).is_err());
3873 }
3874
3875 #[gpui::test]
3876 async fn test_infer_buffer_deleted_from_snapshot(cx: &mut TestAppContext) {
3877 init_test(cx);
3878
3879 let fs = FakeFs::new(cx.executor());
3880 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
3881 .await;
3882 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3883 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3884 let file_path = project
3885 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3886 .unwrap();
3887 let buffer = project
3888 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3889 .await
3890 .unwrap();
3891
3892 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3893
3894 fs.remove_file(path!("/dir/file").as_ref(), RemoveOptions::default())
3895 .await
3896 .unwrap();
3897 cx.run_until_parked();
3898
3899 cx.update(|cx| {
3900 action_log.update(cx, |log, cx| {
3901 log.infer_buffer_deleted_from_snapshot(
3902 buffer.clone(),
3903 baseline_snapshot.clone(),
3904 cx,
3905 );
3906 });
3907 });
3908 cx.run_until_parked();
3909
3910 assert!(
3911 !unreviewed_hunks(&action_log, cx).is_empty(),
3912 "inferred deletion should produce reviewable hunks"
3913 );
3914
3915 action_log
3916 .update(cx, |log, cx| log.reject_all_edits(None, cx))
3917 .await;
3918 cx.run_until_parked();
3919
3920 assert_eq!(
3921 String::from_utf8(fs.read_file_sync(path!("/dir/file")).unwrap()).unwrap(),
3922 "hello\n"
3923 );
3924 }
3925
3926 #[gpui::test]
3927 async fn test_infer_buffer_deleted_from_snapshot_preserves_later_user_edits_on_reject(
3928 cx: &mut TestAppContext,
3929 ) {
3930 init_test(cx);
3931
3932 let fs = FakeFs::new(cx.executor());
3933 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
3934 .await;
3935 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3936 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3937 let file_path = project
3938 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3939 .unwrap();
3940 let buffer = project
3941 .update(cx, |project, cx| project.open_buffer(file_path, cx))
3942 .await
3943 .unwrap();
3944
3945 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
3946
3947 fs.remove_file(path!("/dir/file").as_ref(), RemoveOptions::default())
3948 .await
3949 .unwrap();
3950 cx.run_until_parked();
3951
3952 cx.update(|cx| {
3953 action_log.update(cx, |log, cx| {
3954 log.infer_buffer_deleted_from_snapshot(
3955 buffer.clone(),
3956 baseline_snapshot.clone(),
3957 cx,
3958 );
3959 });
3960 });
3961 cx.run_until_parked();
3962
3963 buffer.update(cx, |buffer, cx| buffer.append("world\n", cx));
3964 project
3965 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3966 .await
3967 .unwrap();
3968 cx.run_until_parked();
3969
3970 action_log
3971 .update(cx, |log, cx| log.reject_all_edits(None, cx))
3972 .await;
3973 cx.run_until_parked();
3974
3975 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "world\n");
3976 assert_eq!(
3977 String::from_utf8(fs.read_file_sync(path!("/dir/file")).unwrap()).unwrap(),
3978 "world\n"
3979 );
3980 }
3981
3982 #[gpui::test]
3983 async fn test_will_delete_buffer_preserves_later_user_edits_on_reject(cx: &mut TestAppContext) {
3984 init_test(cx);
3985
3986 let fs = FakeFs::new(cx.executor());
3987 fs.insert_tree(path!("/dir"), json!({"file": "hello\n"}))
3988 .await;
3989 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3990 let action_log = cx.new(|_| ActionLog::new(project.clone()));
3991 let file_path = project
3992 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
3993 .unwrap();
3994 let buffer = project
3995 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
3996 .await
3997 .unwrap();
3998
3999 cx.update(|cx| {
4000 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
4001 });
4002 project
4003 .update(cx, |project, cx| project.delete_file(file_path, false, cx))
4004 .unwrap()
4005 .await
4006 .unwrap();
4007 cx.run_until_parked();
4008
4009 buffer.update(cx, |buffer, cx| buffer.append("world\n", cx));
4010 project
4011 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4012 .await
4013 .unwrap();
4014 cx.run_until_parked();
4015
4016 action_log
4017 .update(cx, |log, cx| log.reject_all_edits(None, cx))
4018 .await;
4019 cx.run_until_parked();
4020
4021 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "world\n");
4022 assert_eq!(
4023 String::from_utf8(fs.read_file_sync(path!("/dir/file")).unwrap()).unwrap(),
4024 "world\n"
4025 );
4026 }
4027
4028 #[gpui::test]
4029 async fn test_infer_buffer_edited_from_snapshot_preserves_later_user_edits(
4030 cx: &mut TestAppContext,
4031 ) {
4032 init_test(cx);
4033
4034 let fs = FakeFs::new(cx.executor());
4035 fs.insert_tree(path!("/dir"), json!({"file": "one\ntwo\n"}))
4036 .await;
4037 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4038 let action_log = cx.new(|_| ActionLog::new(project.clone()));
4039 let file_path = project
4040 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
4041 .unwrap();
4042 let buffer = project
4043 .update(cx, |project, cx| project.open_buffer(file_path, cx))
4044 .await
4045 .unwrap();
4046
4047 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
4048
4049 buffer.update(cx, |buffer, cx| buffer.set_text("one\ntwo\nthree\n", cx));
4050 project
4051 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4052 .await
4053 .unwrap();
4054
4055 cx.update(|cx| {
4056 action_log.update(cx, |log, cx| {
4057 log.infer_buffer_edited_from_snapshot(
4058 buffer.clone(),
4059 baseline_snapshot.clone(),
4060 cx,
4061 );
4062 });
4063 });
4064 cx.run_until_parked();
4065
4066 buffer.update(cx, |buffer, cx| {
4067 buffer.edit([(0..0, "zero\n")], None, cx);
4068 });
4069 cx.run_until_parked();
4070
4071 action_log
4072 .update(cx, |log, cx| log.reject_all_edits(None, cx))
4073 .await;
4074 cx.run_until_parked();
4075
4076 assert_eq!(
4077 buffer.read_with(cx, |buffer, _| buffer.text()),
4078 "zero\none\ntwo\n"
4079 );
4080 assert_eq!(
4081 String::from_utf8(fs.read_file_sync(path!("/dir/file")).unwrap()).unwrap(),
4082 "zero\none\ntwo\n"
4083 );
4084 }
4085
4086 #[gpui::test]
4087 async fn test_infer_buffer_created_preserves_later_user_edits_on_reject(
4088 cx: &mut TestAppContext,
4089 ) {
4090 init_test(cx);
4091
4092 let fs = FakeFs::new(cx.executor());
4093 fs.insert_tree(path!("/dir"), json!({})).await;
4094 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4095 let action_log = cx.new(|_| ActionLog::new(project.clone()));
4096 let file_path = project
4097 .read_with(cx, |project, cx| {
4098 project.find_project_path("dir/new_file", cx)
4099 })
4100 .unwrap();
4101 let buffer = project
4102 .update(cx, |project, cx| project.open_buffer(file_path, cx))
4103 .await
4104 .unwrap();
4105
4106 let baseline_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot());
4107
4108 buffer.update(cx, |buffer, cx| buffer.set_text("hello\n", cx));
4109 project
4110 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4111 .await
4112 .unwrap();
4113
4114 cx.update(|cx| {
4115 action_log.update(cx, |log, cx| {
4116 log.infer_buffer_created(buffer.clone(), baseline_snapshot.clone(), cx);
4117 });
4118 });
4119 cx.run_until_parked();
4120
4121 buffer.update(cx, |buffer, cx| buffer.append("world\n", cx));
4122 cx.run_until_parked();
4123
4124 action_log
4125 .update(cx, |log, cx| log.reject_all_edits(None, cx))
4126 .await;
4127 cx.run_until_parked();
4128
4129 assert_eq!(
4130 buffer.read_with(cx, |buffer, _| buffer.text()),
4131 "hello\nworld\n"
4132 );
4133 assert!(fs.read_file_sync(path!("/dir/new_file")).is_ok());
4134 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
4135 }
4136
4137 #[derive(Debug, PartialEq)]
4138 struct HunkStatus {
4139 range: Range<Point>,
4140 diff_status: DiffHunkStatusKind,
4141 old_text: String,
4142 }
4143
4144 fn unreviewed_hunks(
4145 action_log: &Entity<ActionLog>,
4146 cx: &TestAppContext,
4147 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
4148 cx.read(|cx| {
4149 action_log
4150 .read(cx)
4151 .changed_buffers(cx)
4152 .into_iter()
4153 .map(|(buffer, diff)| {
4154 let snapshot = buffer.read(cx).snapshot();
4155 (
4156 buffer,
4157 diff.read(cx)
4158 .snapshot(cx)
4159 .hunks(&snapshot)
4160 .map(|hunk| HunkStatus {
4161 diff_status: hunk.status().kind,
4162 range: hunk.range,
4163 old_text: diff
4164 .read(cx)
4165 .base_text(cx)
4166 .text_for_range(hunk.diff_base_byte_range)
4167 .collect(),
4168 })
4169 .collect(),
4170 )
4171 })
4172 .collect()
4173 })
4174 }
4175}