1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{
12 RangeExt, ResultExt as _,
13 paths::{PathStyle, RemotePathBuf},
14};
15
16/// Tracks actions performed by tools in a thread
17pub struct ActionLog {
18 /// Buffers that we want to notify the model about when they change.
19 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
20 /// The project this action log is associated with
21 project: Entity<Project>,
22}
23
24impl ActionLog {
25 /// Creates a new, empty action log associated with the given project.
26 pub fn new(project: Entity<Project>) -> Self {
27 Self {
28 tracked_buffers: BTreeMap::default(),
29 project,
30 }
31 }
32
33 pub fn project(&self) -> &Entity<Project> {
34 &self.project
35 }
36
37 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
38 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
39 }
40
41 /// Return a unified diff patch with user edits made since last read or notification
42 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
43 let diffs = self
44 .tracked_buffers
45 .values()
46 .filter_map(|tracked| {
47 if !tracked.may_have_unnotified_user_edits {
48 return None;
49 }
50
51 let text_with_latest_user_edits = tracked.diff_base.to_string();
52 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
53 if text_with_latest_user_edits == text_with_last_seen_user_edits {
54 return None;
55 }
56 let patch = language::unified_diff(
57 &text_with_last_seen_user_edits,
58 &text_with_latest_user_edits,
59 );
60
61 let buffer = tracked.buffer.clone();
62 let file_path = buffer
63 .read(cx)
64 .file()
65 .map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
66 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
67
68 let mut result = String::new();
69 result.push_str(&format!("--- a/{}\n", file_path));
70 result.push_str(&format!("+++ b/{}\n", file_path));
71 result.push_str(&patch);
72
73 Some(result)
74 })
75 .collect::<Vec<_>>();
76
77 if diffs.is_empty() {
78 return None;
79 }
80
81 let unified_diff = diffs.join("\n\n");
82 Some(unified_diff)
83 }
84
85 /// Return a unified diff patch with user edits made since last read/notification
86 /// and mark them as notified
87 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
88 let patch = self.unnotified_user_edits(cx);
89 self.tracked_buffers.values_mut().for_each(|tracked| {
90 tracked.may_have_unnotified_user_edits = false;
91 tracked.last_seen_base = tracked.diff_base.clone();
92 });
93 patch
94 }
95
96 fn track_buffer_internal(
97 &mut self,
98 buffer: Entity<Buffer>,
99 is_created: bool,
100 cx: &mut Context<Self>,
101 ) -> &mut TrackedBuffer {
102 let status = if is_created {
103 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
104 match tracked.status {
105 TrackedBufferStatus::Created {
106 existing_file_content,
107 } => TrackedBufferStatus::Created {
108 existing_file_content,
109 },
110 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
111 TrackedBufferStatus::Created {
112 existing_file_content: Some(tracked.diff_base),
113 }
114 }
115 }
116 } else if buffer
117 .read(cx)
118 .file()
119 .is_some_and(|file| file.disk_state().exists())
120 {
121 TrackedBufferStatus::Created {
122 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
123 }
124 } else {
125 TrackedBufferStatus::Created {
126 existing_file_content: None,
127 }
128 }
129 } else {
130 TrackedBufferStatus::Modified
131 };
132
133 let tracked_buffer = self
134 .tracked_buffers
135 .entry(buffer.clone())
136 .or_insert_with(|| {
137 let open_lsp_handle = self.project.update(cx, |project, cx| {
138 project.register_buffer_with_language_servers(&buffer, cx)
139 });
140
141 let text_snapshot = buffer.read(cx).text_snapshot();
142 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
143 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
144 let diff_base;
145 let last_seen_base;
146 let unreviewed_edits;
147 if is_created {
148 diff_base = Rope::default();
149 last_seen_base = Rope::default();
150 unreviewed_edits = Patch::new(vec![Edit {
151 old: 0..1,
152 new: 0..text_snapshot.max_point().row + 1,
153 }])
154 } else {
155 diff_base = buffer.read(cx).as_rope().clone();
156 last_seen_base = diff_base.clone();
157 unreviewed_edits = Patch::default();
158 }
159 TrackedBuffer {
160 buffer: buffer.clone(),
161 diff_base,
162 last_seen_base,
163 unreviewed_edits,
164 snapshot: text_snapshot,
165 status,
166 version: buffer.read(cx).version(),
167 diff,
168 diff_update: diff_update_tx,
169 may_have_unnotified_user_edits: false,
170 _open_lsp_handle: open_lsp_handle,
171 _maintain_diff: cx.spawn({
172 let buffer = buffer.clone();
173 async move |this, cx| {
174 Self::maintain_diff(this, buffer, diff_update_rx, cx)
175 .await
176 .ok();
177 }
178 }),
179 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
180 }
181 });
182 tracked_buffer.version = buffer.read(cx).version();
183 tracked_buffer
184 }
185
186 fn handle_buffer_event(
187 &mut self,
188 buffer: Entity<Buffer>,
189 event: &BufferEvent,
190 cx: &mut Context<Self>,
191 ) {
192 match event {
193 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
194 BufferEvent::FileHandleChanged => {
195 self.handle_buffer_file_changed(buffer, cx);
196 }
197 _ => {}
198 };
199 }
200
201 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
202 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
203 return;
204 };
205 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
206 }
207
208 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
209 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
210 return;
211 };
212
213 match tracked_buffer.status {
214 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
215 if buffer
216 .read(cx)
217 .file()
218 .is_some_and(|file| file.disk_state() == DiskState::Deleted)
219 {
220 // If the buffer had been edited by a tool, but it got
221 // deleted externally, we want to stop tracking it.
222 self.tracked_buffers.remove(&buffer);
223 }
224 cx.notify();
225 }
226 TrackedBufferStatus::Deleted => {
227 if buffer
228 .read(cx)
229 .file()
230 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
231 {
232 // If the buffer had been deleted by a tool, but it got
233 // resurrected externally, we want to clear the edits we
234 // were tracking and reset the buffer's state.
235 self.tracked_buffers.remove(&buffer);
236 self.track_buffer_internal(buffer, false, cx);
237 }
238 cx.notify();
239 }
240 }
241 }
242
243 async fn maintain_diff(
244 this: WeakEntity<Self>,
245 buffer: Entity<Buffer>,
246 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
247 cx: &mut AsyncApp,
248 ) -> Result<()> {
249 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
250 let git_diff = this
251 .update(cx, |this, cx| {
252 this.project.update(cx, |project, cx| {
253 project.open_uncommitted_diff(buffer.clone(), cx)
254 })
255 })?
256 .await
257 .ok();
258 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
259 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
260 })?;
261
262 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
263 let _repo_subscription =
264 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
265 cx.update(|cx| {
266 let mut old_head = buffer_repo.read(cx).head_commit.clone();
267 Some(cx.subscribe(git_diff, move |_, event, cx| {
268 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
269 let new_head = buffer_repo.read(cx).head_commit.clone();
270 if new_head != old_head {
271 old_head = new_head;
272 git_diff_updates_tx.send(()).ok();
273 }
274 }
275 }))
276 })?
277 } else {
278 None
279 };
280
281 loop {
282 futures::select_biased! {
283 buffer_update = buffer_updates.next() => {
284 if let Some((author, buffer_snapshot)) = buffer_update {
285 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
286 } else {
287 break;
288 }
289 }
290 _ = git_diff_updates_rx.changed().fuse() => {
291 if let Some(git_diff) = git_diff.as_ref() {
292 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
293 }
294 }
295 }
296 }
297
298 Ok(())
299 }
300
301 async fn track_edits(
302 this: &WeakEntity<ActionLog>,
303 buffer: &Entity<Buffer>,
304 author: ChangeAuthor,
305 buffer_snapshot: text::BufferSnapshot,
306 cx: &mut AsyncApp,
307 ) -> Result<()> {
308 let rebase = this.update(cx, |this, cx| {
309 let tracked_buffer = this
310 .tracked_buffers
311 .get_mut(buffer)
312 .context("buffer not tracked")?;
313
314 let rebase = cx.background_spawn({
315 let mut base_text = tracked_buffer.diff_base.clone();
316 let old_snapshot = tracked_buffer.snapshot.clone();
317 let new_snapshot = buffer_snapshot.clone();
318 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
319 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
320 let mut has_user_changes = false;
321 async move {
322 if let ChangeAuthor::User = author {
323 has_user_changes = apply_non_conflicting_edits(
324 &unreviewed_edits,
325 edits,
326 &mut base_text,
327 new_snapshot.as_rope(),
328 );
329 }
330
331 (Arc::new(base_text.to_string()), base_text, has_user_changes)
332 }
333 });
334
335 anyhow::Ok(rebase)
336 })??;
337 let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
338
339 this.update(cx, |this, _| {
340 let tracked_buffer = this
341 .tracked_buffers
342 .get_mut(buffer)
343 .context("buffer not tracked")
344 .unwrap();
345 tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
346 })?;
347
348 Self::update_diff(
349 this,
350 buffer,
351 buffer_snapshot,
352 new_base_text,
353 new_diff_base,
354 cx,
355 )
356 .await
357 }
358
359 async fn keep_committed_edits(
360 this: &WeakEntity<ActionLog>,
361 buffer: &Entity<Buffer>,
362 git_diff: &Entity<BufferDiff>,
363 cx: &mut AsyncApp,
364 ) -> Result<()> {
365 let buffer_snapshot = this.read_with(cx, |this, _cx| {
366 let tracked_buffer = this
367 .tracked_buffers
368 .get(buffer)
369 .context("buffer not tracked")?;
370 anyhow::Ok(tracked_buffer.snapshot.clone())
371 })??;
372 let (new_base_text, new_diff_base) = this
373 .read_with(cx, |this, cx| {
374 let tracked_buffer = this
375 .tracked_buffers
376 .get(buffer)
377 .context("buffer not tracked")?;
378 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
379 let agent_diff_base = tracked_buffer.diff_base.clone();
380 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
381 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
382 anyhow::Ok(cx.background_spawn(async move {
383 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
384 let committed_edits = language::line_diff(
385 &agent_diff_base.to_string(),
386 &git_diff_base.to_string(),
387 )
388 .into_iter()
389 .map(|(old, new)| Edit { old, new });
390
391 let mut new_agent_diff_base = agent_diff_base.clone();
392 let mut row_delta = 0i32;
393 for committed in committed_edits {
394 while let Some(unreviewed) = old_unreviewed_edits.peek() {
395 // If the committed edit matches the unreviewed
396 // edit, assume the user wants to keep it.
397 if committed.old == unreviewed.old {
398 let unreviewed_new =
399 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
400 let committed_new =
401 git_diff_base.slice_rows(committed.new.clone()).to_string();
402 if unreviewed_new == committed_new {
403 let old_byte_start =
404 new_agent_diff_base.point_to_offset(Point::new(
405 (unreviewed.old.start as i32 + row_delta) as u32,
406 0,
407 ));
408 let old_byte_end =
409 new_agent_diff_base.point_to_offset(cmp::min(
410 Point::new(
411 (unreviewed.old.end as i32 + row_delta) as u32,
412 0,
413 ),
414 new_agent_diff_base.max_point(),
415 ));
416 new_agent_diff_base
417 .replace(old_byte_start..old_byte_end, &unreviewed_new);
418 row_delta +=
419 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
420 }
421 } else if unreviewed.old.start >= committed.old.end {
422 break;
423 }
424
425 old_unreviewed_edits.next().unwrap();
426 }
427 }
428
429 (
430 Arc::new(new_agent_diff_base.to_string()),
431 new_agent_diff_base,
432 )
433 }))
434 })??
435 .await;
436
437 Self::update_diff(
438 this,
439 buffer,
440 buffer_snapshot,
441 new_base_text,
442 new_diff_base,
443 cx,
444 )
445 .await
446 }
447
448 async fn update_diff(
449 this: &WeakEntity<ActionLog>,
450 buffer: &Entity<Buffer>,
451 buffer_snapshot: text::BufferSnapshot,
452 new_base_text: Arc<String>,
453 new_diff_base: Rope,
454 cx: &mut AsyncApp,
455 ) -> Result<()> {
456 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
457 let tracked_buffer = this
458 .tracked_buffers
459 .get(buffer)
460 .context("buffer not tracked")?;
461 anyhow::Ok((
462 tracked_buffer.diff.clone(),
463 buffer.read(cx).language().cloned(),
464 buffer.read(cx).language_registry(),
465 ))
466 })??;
467 let diff_snapshot = BufferDiff::update_diff(
468 diff.clone(),
469 buffer_snapshot.clone(),
470 Some(new_base_text),
471 true,
472 false,
473 language,
474 language_registry,
475 cx,
476 )
477 .await;
478 let mut unreviewed_edits = Patch::default();
479 if let Ok(diff_snapshot) = diff_snapshot {
480 unreviewed_edits = cx
481 .background_spawn({
482 let diff_snapshot = diff_snapshot.clone();
483 let buffer_snapshot = buffer_snapshot.clone();
484 let new_diff_base = new_diff_base.clone();
485 async move {
486 let mut unreviewed_edits = Patch::default();
487 for hunk in diff_snapshot
488 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
489 {
490 let old_range = new_diff_base
491 .offset_to_point(hunk.diff_base_byte_range.start)
492 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
493 let new_range = hunk.range.start..hunk.range.end;
494 unreviewed_edits.push(point_to_row_edit(
495 Edit {
496 old: old_range,
497 new: new_range,
498 },
499 &new_diff_base,
500 buffer_snapshot.as_rope(),
501 ));
502 }
503 unreviewed_edits
504 }
505 })
506 .await;
507
508 diff.update(cx, |diff, cx| {
509 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
510 })?;
511 }
512 this.update(cx, |this, cx| {
513 let tracked_buffer = this
514 .tracked_buffers
515 .get_mut(buffer)
516 .context("buffer not tracked")?;
517 tracked_buffer.diff_base = new_diff_base;
518 tracked_buffer.snapshot = buffer_snapshot;
519 tracked_buffer.unreviewed_edits = unreviewed_edits;
520 cx.notify();
521 anyhow::Ok(())
522 })?
523 }
524
525 /// Track a buffer as read by agent, so we can notify the model about user edits.
526 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
527 self.track_buffer_internal(buffer, false, cx);
528 }
529
530 /// Mark a buffer as created by agent, so we can refresh it in the context
531 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
532 self.track_buffer_internal(buffer, true, cx);
533 }
534
535 /// Mark a buffer as edited by agent, so we can refresh it in the context
536 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
537 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
538 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
539 tracked_buffer.status = TrackedBufferStatus::Modified;
540 }
541 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
542 }
543
544 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
545 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
546 match tracked_buffer.status {
547 TrackedBufferStatus::Created { .. } => {
548 self.tracked_buffers.remove(&buffer);
549 cx.notify();
550 }
551 TrackedBufferStatus::Modified => {
552 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
553 tracked_buffer.status = TrackedBufferStatus::Deleted;
554 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
555 }
556 TrackedBufferStatus::Deleted => {}
557 }
558 cx.notify();
559 }
560
561 pub fn keep_edits_in_range(
562 &mut self,
563 buffer: Entity<Buffer>,
564 buffer_range: Range<impl language::ToPoint>,
565 cx: &mut Context<Self>,
566 ) {
567 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
568 return;
569 };
570
571 match tracked_buffer.status {
572 TrackedBufferStatus::Deleted => {
573 self.tracked_buffers.remove(&buffer);
574 cx.notify();
575 }
576 _ => {
577 let buffer = buffer.read(cx);
578 let buffer_range =
579 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
580 let mut delta = 0i32;
581
582 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
583 edit.old.start = (edit.old.start as i32 + delta) as u32;
584 edit.old.end = (edit.old.end as i32 + delta) as u32;
585
586 if buffer_range.end.row < edit.new.start
587 || buffer_range.start.row > edit.new.end
588 {
589 true
590 } else {
591 let old_range = tracked_buffer
592 .diff_base
593 .point_to_offset(Point::new(edit.old.start, 0))
594 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
595 Point::new(edit.old.end, 0),
596 tracked_buffer.diff_base.max_point(),
597 ));
598 let new_range = tracked_buffer
599 .snapshot
600 .point_to_offset(Point::new(edit.new.start, 0))
601 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
602 Point::new(edit.new.end, 0),
603 tracked_buffer.snapshot.max_point(),
604 ));
605 tracked_buffer.diff_base.replace(
606 old_range,
607 &tracked_buffer
608 .snapshot
609 .text_for_range(new_range)
610 .collect::<String>(),
611 );
612 delta += edit.new_len() as i32 - edit.old_len() as i32;
613 false
614 }
615 });
616 if tracked_buffer.unreviewed_edits.is_empty()
617 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
618 {
619 tracked_buffer.status = TrackedBufferStatus::Modified;
620 }
621 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
622 }
623 }
624 }
625
626 pub fn reject_edits_in_ranges(
627 &mut self,
628 buffer: Entity<Buffer>,
629 buffer_ranges: Vec<Range<impl language::ToPoint>>,
630 cx: &mut Context<Self>,
631 ) -> Task<Result<()>> {
632 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
633 return Task::ready(Ok(()));
634 };
635
636 match &tracked_buffer.status {
637 TrackedBufferStatus::Created {
638 existing_file_content,
639 } => {
640 let task = if let Some(existing_file_content) = existing_file_content {
641 buffer.update(cx, |buffer, cx| {
642 buffer.start_transaction();
643 buffer.set_text("", cx);
644 for chunk in existing_file_content.chunks() {
645 buffer.append(chunk, cx);
646 }
647 buffer.end_transaction(cx);
648 });
649 self.project
650 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
651 } else {
652 // For a file created by AI with no pre-existing content,
653 // only delete the file if we're certain it contains only AI content
654 // with no edits from the user.
655
656 let initial_version = tracked_buffer.version.clone();
657 let current_version = buffer.read(cx).version();
658
659 let current_content = buffer.read(cx).text();
660 let tracked_content = tracked_buffer.snapshot.text();
661
662 let is_ai_only_content =
663 initial_version == current_version && current_content == tracked_content;
664
665 if is_ai_only_content {
666 buffer
667 .read(cx)
668 .entry_id(cx)
669 .and_then(|entry_id| {
670 self.project.update(cx, |project, cx| {
671 project.delete_entry(entry_id, false, cx)
672 })
673 })
674 .unwrap_or(Task::ready(Ok(())))
675 } else {
676 // Not sure how to disentangle edits made by the user
677 // from edits made by the AI at this point.
678 // For now, preserve both to avoid data loss.
679 //
680 // TODO: Better solution (disable "Reject" after user makes some
681 // edit or find a way to differentiate between AI and user edits)
682 Task::ready(Ok(()))
683 }
684 };
685
686 self.tracked_buffers.remove(&buffer);
687 cx.notify();
688 task
689 }
690 TrackedBufferStatus::Deleted => {
691 buffer.update(cx, |buffer, cx| {
692 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
693 });
694 let save = self
695 .project
696 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
697
698 // Clear all tracked edits for this buffer and start over as if we just read it.
699 self.tracked_buffers.remove(&buffer);
700 self.buffer_read(buffer.clone(), cx);
701 cx.notify();
702 save
703 }
704 TrackedBufferStatus::Modified => {
705 buffer.update(cx, |buffer, cx| {
706 let mut buffer_row_ranges = buffer_ranges
707 .into_iter()
708 .map(|range| {
709 range.start.to_point(buffer).row..range.end.to_point(buffer).row
710 })
711 .peekable();
712
713 let mut edits_to_revert = Vec::new();
714 for edit in tracked_buffer.unreviewed_edits.edits() {
715 let new_range = tracked_buffer
716 .snapshot
717 .anchor_before(Point::new(edit.new.start, 0))
718 ..tracked_buffer.snapshot.anchor_after(cmp::min(
719 Point::new(edit.new.end, 0),
720 tracked_buffer.snapshot.max_point(),
721 ));
722 let new_row_range = new_range.start.to_point(buffer).row
723 ..new_range.end.to_point(buffer).row;
724
725 let mut revert = false;
726 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
727 if buffer_row_range.end < new_row_range.start {
728 buffer_row_ranges.next();
729 } else if buffer_row_range.start > new_row_range.end {
730 break;
731 } else {
732 revert = true;
733 break;
734 }
735 }
736
737 if revert {
738 let old_range = tracked_buffer
739 .diff_base
740 .point_to_offset(Point::new(edit.old.start, 0))
741 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
742 Point::new(edit.old.end, 0),
743 tracked_buffer.diff_base.max_point(),
744 ));
745 let old_text = tracked_buffer
746 .diff_base
747 .chunks_in_range(old_range)
748 .collect::<String>();
749 edits_to_revert.push((new_range, old_text));
750 }
751 }
752
753 buffer.edit(edits_to_revert, None, cx);
754 });
755 self.project
756 .update(cx, |project, cx| project.save_buffer(buffer, cx))
757 }
758 }
759 }
760
761 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
762 self.tracked_buffers
763 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
764 TrackedBufferStatus::Deleted => false,
765 _ => {
766 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
767 tracked_buffer.status = TrackedBufferStatus::Modified;
768 }
769 tracked_buffer.unreviewed_edits.clear();
770 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
771 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
772 true
773 }
774 });
775 cx.notify();
776 }
777
778 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
779 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
780 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
781
782 async move {
783 reject.await.log_err();
784 }
785 });
786
787 let task = futures::future::join_all(futures);
788
789 cx.spawn(async move |_, _| {
790 task.await;
791 })
792 }
793
794 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
795 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
796 self.tracked_buffers
797 .iter()
798 .filter(|(_, tracked)| tracked.has_edits(cx))
799 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
800 .collect()
801 }
802
803 /// Iterate over buffers changed since last read or edited by the model
804 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
805 self.tracked_buffers
806 .iter()
807 .filter(|(buffer, tracked)| {
808 let buffer = buffer.read(cx);
809
810 tracked.version != buffer.version
811 && buffer
812 .file()
813 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
814 })
815 .map(|(buffer, _)| buffer)
816 }
817}
818
819fn apply_non_conflicting_edits(
820 patch: &Patch<u32>,
821 edits: Vec<Edit<u32>>,
822 old_text: &mut Rope,
823 new_text: &Rope,
824) -> bool {
825 let mut old_edits = patch.edits().iter().cloned().peekable();
826 let mut new_edits = edits.into_iter().peekable();
827 let mut applied_delta = 0i32;
828 let mut rebased_delta = 0i32;
829 let mut has_made_changes = false;
830
831 while let Some(mut new_edit) = new_edits.next() {
832 let mut conflict = false;
833
834 // Push all the old edits that are before this new edit or that intersect with it.
835 while let Some(old_edit) = old_edits.peek() {
836 if new_edit.old.end < old_edit.new.start
837 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
838 {
839 break;
840 } else if new_edit.old.start > old_edit.new.end
841 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
842 {
843 let old_edit = old_edits.next().unwrap();
844 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
845 } else {
846 conflict = true;
847 if new_edits
848 .peek()
849 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
850 {
851 new_edit = new_edits.next().unwrap();
852 } else {
853 let old_edit = old_edits.next().unwrap();
854 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
855 }
856 }
857 }
858
859 if !conflict {
860 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
861 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
862 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
863 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
864 ..old_text.point_to_offset(cmp::min(
865 Point::new(new_edit.old.end, 0),
866 old_text.max_point(),
867 ));
868 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
869 ..new_text.point_to_offset(cmp::min(
870 Point::new(new_edit.new.end, 0),
871 new_text.max_point(),
872 ));
873
874 old_text.replace(
875 old_bytes,
876 &new_text.chunks_in_range(new_bytes).collect::<String>(),
877 );
878 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
879 has_made_changes = true;
880 }
881 }
882 has_made_changes
883}
884
885fn diff_snapshots(
886 old_snapshot: &text::BufferSnapshot,
887 new_snapshot: &text::BufferSnapshot,
888) -> Vec<Edit<u32>> {
889 let mut edits = new_snapshot
890 .edits_since::<Point>(&old_snapshot.version)
891 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
892 .peekable();
893 let mut row_edits = Vec::new();
894 while let Some(mut edit) = edits.next() {
895 while let Some(next_edit) = edits.peek() {
896 if edit.old.end >= next_edit.old.start {
897 edit.old.end = next_edit.old.end;
898 edit.new.end = next_edit.new.end;
899 edits.next();
900 } else {
901 break;
902 }
903 }
904 row_edits.push(edit);
905 }
906 row_edits
907}
908
909fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
910 if edit.old.start.column == old_text.line_len(edit.old.start.row)
911 && new_text
912 .chars_at(new_text.point_to_offset(edit.new.start))
913 .next()
914 == Some('\n')
915 && edit.old.start != old_text.max_point()
916 {
917 Edit {
918 old: edit.old.start.row + 1..edit.old.end.row + 1,
919 new: edit.new.start.row + 1..edit.new.end.row + 1,
920 }
921 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
922 Edit {
923 old: edit.old.start.row..edit.old.end.row,
924 new: edit.new.start.row..edit.new.end.row,
925 }
926 } else {
927 Edit {
928 old: edit.old.start.row..edit.old.end.row + 1,
929 new: edit.new.start.row..edit.new.end.row + 1,
930 }
931 }
932}
933
934#[derive(Copy, Clone, Debug)]
935enum ChangeAuthor {
936 User,
937 Agent,
938}
939
940enum TrackedBufferStatus {
941 Created { existing_file_content: Option<Rope> },
942 Modified,
943 Deleted,
944}
945
946struct TrackedBuffer {
947 buffer: Entity<Buffer>,
948 diff_base: Rope,
949 last_seen_base: Rope,
950 unreviewed_edits: Patch<u32>,
951 status: TrackedBufferStatus,
952 version: clock::Global,
953 diff: Entity<BufferDiff>,
954 snapshot: text::BufferSnapshot,
955 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
956 may_have_unnotified_user_edits: bool,
957 _open_lsp_handle: OpenLspBufferHandle,
958 _maintain_diff: Task<()>,
959 _subscription: Subscription,
960}
961
962impl TrackedBuffer {
963 fn has_edits(&self, cx: &App) -> bool {
964 self.diff
965 .read(cx)
966 .hunks(self.buffer.read(cx), cx)
967 .next()
968 .is_some()
969 }
970
971 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
972 self.diff_update
973 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
974 .ok();
975 }
976}
977
978pub struct ChangedBuffer {
979 pub diff: Entity<BufferDiff>,
980}
981
982#[cfg(test)]
983mod tests {
984 use super::*;
985 use buffer_diff::DiffHunkStatusKind;
986 use gpui::TestAppContext;
987 use indoc::indoc;
988 use language::Point;
989 use project::{FakeFs, Fs, Project, RemoveOptions};
990 use rand::prelude::*;
991 use serde_json::json;
992 use settings::SettingsStore;
993 use std::env;
994 use util::{RandomCharIter, path};
995
996 #[ctor::ctor]
997 fn init_logger() {
998 zlog::init_test();
999 }
1000
1001 fn init_test(cx: &mut TestAppContext) {
1002 cx.update(|cx| {
1003 let settings_store = SettingsStore::test(cx);
1004 cx.set_global(settings_store);
1005 SettingsStore::load_registered_settings(cx);
1006
1007 language::init(cx);
1008 Project::init_settings(cx);
1009 });
1010 }
1011
1012 #[gpui::test(iterations = 10)]
1013 async fn test_keep_edits(cx: &mut TestAppContext) {
1014 init_test(cx);
1015
1016 let fs = FakeFs::new(cx.executor());
1017 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1018 .await;
1019 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1020 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1021 let file_path = project
1022 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1023 .unwrap();
1024 let buffer = project
1025 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1026 .await
1027 .unwrap();
1028
1029 cx.update(|cx| {
1030 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1031 buffer.update(cx, |buffer, cx| {
1032 buffer
1033 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1034 .unwrap()
1035 });
1036 buffer.update(cx, |buffer, cx| {
1037 buffer
1038 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1039 .unwrap()
1040 });
1041 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1042 });
1043 cx.run_until_parked();
1044 assert_eq!(
1045 buffer.read_with(cx, |buffer, _| buffer.text()),
1046 "abc\ndEf\nghi\njkl\nmnO"
1047 );
1048 assert_eq!(
1049 unreviewed_hunks(&action_log, cx),
1050 vec![(
1051 buffer.clone(),
1052 vec![
1053 HunkStatus {
1054 range: Point::new(1, 0)..Point::new(2, 0),
1055 diff_status: DiffHunkStatusKind::Modified,
1056 old_text: "def\n".into(),
1057 },
1058 HunkStatus {
1059 range: Point::new(4, 0)..Point::new(4, 3),
1060 diff_status: DiffHunkStatusKind::Modified,
1061 old_text: "mno".into(),
1062 }
1063 ],
1064 )]
1065 );
1066
1067 action_log.update(cx, |log, cx| {
1068 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1069 });
1070 cx.run_until_parked();
1071 assert_eq!(
1072 unreviewed_hunks(&action_log, cx),
1073 vec![(
1074 buffer.clone(),
1075 vec![HunkStatus {
1076 range: Point::new(1, 0)..Point::new(2, 0),
1077 diff_status: DiffHunkStatusKind::Modified,
1078 old_text: "def\n".into(),
1079 }],
1080 )]
1081 );
1082
1083 action_log.update(cx, |log, cx| {
1084 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1085 });
1086 cx.run_until_parked();
1087 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1088 }
1089
1090 #[gpui::test(iterations = 10)]
1091 async fn test_deletions(cx: &mut TestAppContext) {
1092 init_test(cx);
1093
1094 let fs = FakeFs::new(cx.executor());
1095 fs.insert_tree(
1096 path!("/dir"),
1097 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1098 )
1099 .await;
1100 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1101 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1102 let file_path = project
1103 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1104 .unwrap();
1105 let buffer = project
1106 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1107 .await
1108 .unwrap();
1109
1110 cx.update(|cx| {
1111 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1112 buffer.update(cx, |buffer, cx| {
1113 buffer
1114 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1115 .unwrap();
1116 buffer.finalize_last_transaction();
1117 });
1118 buffer.update(cx, |buffer, cx| {
1119 buffer
1120 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1121 .unwrap();
1122 buffer.finalize_last_transaction();
1123 });
1124 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1125 });
1126 cx.run_until_parked();
1127 assert_eq!(
1128 buffer.read_with(cx, |buffer, _| buffer.text()),
1129 "abc\nghi\njkl\npqr"
1130 );
1131 assert_eq!(
1132 unreviewed_hunks(&action_log, cx),
1133 vec![(
1134 buffer.clone(),
1135 vec![
1136 HunkStatus {
1137 range: Point::new(1, 0)..Point::new(1, 0),
1138 diff_status: DiffHunkStatusKind::Deleted,
1139 old_text: "def\n".into(),
1140 },
1141 HunkStatus {
1142 range: Point::new(3, 0)..Point::new(3, 0),
1143 diff_status: DiffHunkStatusKind::Deleted,
1144 old_text: "mno\n".into(),
1145 }
1146 ],
1147 )]
1148 );
1149
1150 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1151 cx.run_until_parked();
1152 assert_eq!(
1153 buffer.read_with(cx, |buffer, _| buffer.text()),
1154 "abc\nghi\njkl\nmno\npqr"
1155 );
1156 assert_eq!(
1157 unreviewed_hunks(&action_log, cx),
1158 vec![(
1159 buffer.clone(),
1160 vec![HunkStatus {
1161 range: Point::new(1, 0)..Point::new(1, 0),
1162 diff_status: DiffHunkStatusKind::Deleted,
1163 old_text: "def\n".into(),
1164 }],
1165 )]
1166 );
1167
1168 action_log.update(cx, |log, cx| {
1169 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1170 });
1171 cx.run_until_parked();
1172 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1173 }
1174
1175 #[gpui::test(iterations = 10)]
1176 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1177 init_test(cx);
1178
1179 let fs = FakeFs::new(cx.executor());
1180 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1181 .await;
1182 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1183 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1184 let file_path = project
1185 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1186 .unwrap();
1187 let buffer = project
1188 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1189 .await
1190 .unwrap();
1191
1192 cx.update(|cx| {
1193 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1194 buffer.update(cx, |buffer, cx| {
1195 buffer
1196 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1197 .unwrap()
1198 });
1199 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1200 });
1201 cx.run_until_parked();
1202 assert_eq!(
1203 buffer.read_with(cx, |buffer, _| buffer.text()),
1204 "abc\ndeF\nGHI\njkl\nmno"
1205 );
1206 assert_eq!(
1207 unreviewed_hunks(&action_log, cx),
1208 vec![(
1209 buffer.clone(),
1210 vec![HunkStatus {
1211 range: Point::new(1, 0)..Point::new(3, 0),
1212 diff_status: DiffHunkStatusKind::Modified,
1213 old_text: "def\nghi\n".into(),
1214 }],
1215 )]
1216 );
1217
1218 buffer.update(cx, |buffer, cx| {
1219 buffer.edit(
1220 [
1221 (Point::new(0, 2)..Point::new(0, 2), "X"),
1222 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1223 ],
1224 None,
1225 cx,
1226 )
1227 });
1228 cx.run_until_parked();
1229 assert_eq!(
1230 buffer.read_with(cx, |buffer, _| buffer.text()),
1231 "abXc\ndeF\nGHI\nYjkl\nmno"
1232 );
1233 assert_eq!(
1234 unreviewed_hunks(&action_log, cx),
1235 vec![(
1236 buffer.clone(),
1237 vec![HunkStatus {
1238 range: Point::new(1, 0)..Point::new(3, 0),
1239 diff_status: DiffHunkStatusKind::Modified,
1240 old_text: "def\nghi\n".into(),
1241 }],
1242 )]
1243 );
1244
1245 buffer.update(cx, |buffer, cx| {
1246 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1247 });
1248 cx.run_until_parked();
1249 assert_eq!(
1250 buffer.read_with(cx, |buffer, _| buffer.text()),
1251 "abXc\ndZeF\nGHI\nYjkl\nmno"
1252 );
1253 assert_eq!(
1254 unreviewed_hunks(&action_log, cx),
1255 vec![(
1256 buffer.clone(),
1257 vec![HunkStatus {
1258 range: Point::new(1, 0)..Point::new(3, 0),
1259 diff_status: DiffHunkStatusKind::Modified,
1260 old_text: "def\nghi\n".into(),
1261 }],
1262 )]
1263 );
1264
1265 action_log.update(cx, |log, cx| {
1266 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1267 });
1268 cx.run_until_parked();
1269 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1270 }
1271
1272 #[gpui::test(iterations = 10)]
1273 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1274 init_test(cx);
1275
1276 let fs = FakeFs::new(cx.executor());
1277 fs.insert_tree(
1278 path!("/dir"),
1279 json!({"file": indoc! {"
1280 abc
1281 def
1282 ghi
1283 jkl
1284 mno"}}),
1285 )
1286 .await;
1287 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1288 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1289 let file_path = project
1290 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1291 .unwrap();
1292 let buffer = project
1293 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1294 .await
1295 .unwrap();
1296
1297 // Agent edits
1298 cx.update(|cx| {
1299 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1300 buffer.update(cx, |buffer, cx| {
1301 buffer
1302 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1303 .unwrap()
1304 });
1305 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1306 });
1307 cx.run_until_parked();
1308 assert_eq!(
1309 buffer.read_with(cx, |buffer, _| buffer.text()),
1310 indoc! {"
1311 abc
1312 deF
1313 GHI
1314 jkl
1315 mno"}
1316 );
1317 assert_eq!(
1318 unreviewed_hunks(&action_log, cx),
1319 vec![(
1320 buffer.clone(),
1321 vec![HunkStatus {
1322 range: Point::new(1, 0)..Point::new(3, 0),
1323 diff_status: DiffHunkStatusKind::Modified,
1324 old_text: "def\nghi\n".into(),
1325 }],
1326 )]
1327 );
1328
1329 // User edits
1330 buffer.update(cx, |buffer, cx| {
1331 buffer.edit(
1332 [
1333 (Point::new(0, 2)..Point::new(0, 2), "X"),
1334 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1335 ],
1336 None,
1337 cx,
1338 )
1339 });
1340 cx.run_until_parked();
1341 assert_eq!(
1342 buffer.read_with(cx, |buffer, _| buffer.text()),
1343 indoc! {"
1344 abXc
1345 deF
1346 GHI
1347 Yjkl
1348 mno"}
1349 );
1350
1351 // User edits should be stored separately from agent's
1352 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1353 assert_eq!(
1354 user_edits.expect("should have some user edits"),
1355 indoc! {"
1356 --- a/dir/file
1357 +++ b/dir/file
1358 @@ -1,5 +1,5 @@
1359 -abc
1360 +abXc
1361 def
1362 ghi
1363 -jkl
1364 +Yjkl
1365 mno
1366 "}
1367 );
1368
1369 action_log.update(cx, |log, cx| {
1370 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1371 });
1372 cx.run_until_parked();
1373 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1374 }
1375
1376 #[gpui::test(iterations = 10)]
1377 async fn test_creating_files(cx: &mut TestAppContext) {
1378 init_test(cx);
1379
1380 let fs = FakeFs::new(cx.executor());
1381 fs.insert_tree(path!("/dir"), json!({})).await;
1382 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1383 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1384 let file_path = project
1385 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1386 .unwrap();
1387
1388 let buffer = project
1389 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1390 .await
1391 .unwrap();
1392 cx.update(|cx| {
1393 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1394 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1395 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1396 });
1397 project
1398 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1399 .await
1400 .unwrap();
1401 cx.run_until_parked();
1402 assert_eq!(
1403 unreviewed_hunks(&action_log, cx),
1404 vec![(
1405 buffer.clone(),
1406 vec![HunkStatus {
1407 range: Point::new(0, 0)..Point::new(0, 5),
1408 diff_status: DiffHunkStatusKind::Added,
1409 old_text: "".into(),
1410 }],
1411 )]
1412 );
1413
1414 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1415 cx.run_until_parked();
1416 assert_eq!(
1417 unreviewed_hunks(&action_log, cx),
1418 vec![(
1419 buffer.clone(),
1420 vec![HunkStatus {
1421 range: Point::new(0, 0)..Point::new(0, 6),
1422 diff_status: DiffHunkStatusKind::Added,
1423 old_text: "".into(),
1424 }],
1425 )]
1426 );
1427
1428 action_log.update(cx, |log, cx| {
1429 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1430 });
1431 cx.run_until_parked();
1432 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1433 }
1434
1435 #[gpui::test(iterations = 10)]
1436 async fn test_overwriting_files(cx: &mut TestAppContext) {
1437 init_test(cx);
1438
1439 let fs = FakeFs::new(cx.executor());
1440 fs.insert_tree(
1441 path!("/dir"),
1442 json!({
1443 "file1": "Lorem ipsum dolor"
1444 }),
1445 )
1446 .await;
1447 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1448 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1449 let file_path = project
1450 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1451 .unwrap();
1452
1453 let buffer = project
1454 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1455 .await
1456 .unwrap();
1457 cx.update(|cx| {
1458 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1459 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1460 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1461 });
1462 project
1463 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1464 .await
1465 .unwrap();
1466 cx.run_until_parked();
1467 assert_eq!(
1468 unreviewed_hunks(&action_log, cx),
1469 vec![(
1470 buffer.clone(),
1471 vec![HunkStatus {
1472 range: Point::new(0, 0)..Point::new(0, 19),
1473 diff_status: DiffHunkStatusKind::Added,
1474 old_text: "".into(),
1475 }],
1476 )]
1477 );
1478
1479 action_log
1480 .update(cx, |log, cx| {
1481 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1482 })
1483 .await
1484 .unwrap();
1485 cx.run_until_parked();
1486 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1487 assert_eq!(
1488 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1489 "Lorem ipsum dolor"
1490 );
1491 }
1492
1493 #[gpui::test(iterations = 10)]
1494 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1495 init_test(cx);
1496
1497 let fs = FakeFs::new(cx.executor());
1498 fs.insert_tree(
1499 path!("/dir"),
1500 json!({
1501 "file1": "Lorem ipsum dolor"
1502 }),
1503 )
1504 .await;
1505 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1506 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1507 let file_path = project
1508 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1509 .unwrap();
1510
1511 let buffer = project
1512 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1513 .await
1514 .unwrap();
1515 cx.update(|cx| {
1516 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1517 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1518 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1519 });
1520 project
1521 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1522 .await
1523 .unwrap();
1524 cx.run_until_parked();
1525 assert_eq!(
1526 unreviewed_hunks(&action_log, cx),
1527 vec![(
1528 buffer.clone(),
1529 vec![HunkStatus {
1530 range: Point::new(0, 0)..Point::new(0, 37),
1531 diff_status: DiffHunkStatusKind::Modified,
1532 old_text: "Lorem ipsum dolor".into(),
1533 }],
1534 )]
1535 );
1536
1537 cx.update(|cx| {
1538 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1539 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1540 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1541 });
1542 project
1543 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1544 .await
1545 .unwrap();
1546 cx.run_until_parked();
1547 assert_eq!(
1548 unreviewed_hunks(&action_log, cx),
1549 vec![(
1550 buffer.clone(),
1551 vec![HunkStatus {
1552 range: Point::new(0, 0)..Point::new(0, 9),
1553 diff_status: DiffHunkStatusKind::Added,
1554 old_text: "".into(),
1555 }],
1556 )]
1557 );
1558
1559 action_log
1560 .update(cx, |log, cx| {
1561 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1562 })
1563 .await
1564 .unwrap();
1565 cx.run_until_parked();
1566 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1567 assert_eq!(
1568 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1569 "Lorem ipsum dolor"
1570 );
1571 }
1572
1573 #[gpui::test(iterations = 10)]
1574 async fn test_deleting_files(cx: &mut TestAppContext) {
1575 init_test(cx);
1576
1577 let fs = FakeFs::new(cx.executor());
1578 fs.insert_tree(
1579 path!("/dir"),
1580 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1581 )
1582 .await;
1583
1584 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1585 let file1_path = project
1586 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1587 .unwrap();
1588 let file2_path = project
1589 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1590 .unwrap();
1591
1592 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1593 let buffer1 = project
1594 .update(cx, |project, cx| {
1595 project.open_buffer(file1_path.clone(), cx)
1596 })
1597 .await
1598 .unwrap();
1599 let buffer2 = project
1600 .update(cx, |project, cx| {
1601 project.open_buffer(file2_path.clone(), cx)
1602 })
1603 .await
1604 .unwrap();
1605
1606 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1607 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1608 project
1609 .update(cx, |project, cx| {
1610 project.delete_file(file1_path.clone(), false, cx)
1611 })
1612 .unwrap()
1613 .await
1614 .unwrap();
1615 project
1616 .update(cx, |project, cx| {
1617 project.delete_file(file2_path.clone(), false, cx)
1618 })
1619 .unwrap()
1620 .await
1621 .unwrap();
1622 cx.run_until_parked();
1623 assert_eq!(
1624 unreviewed_hunks(&action_log, cx),
1625 vec![
1626 (
1627 buffer1.clone(),
1628 vec![HunkStatus {
1629 range: Point::new(0, 0)..Point::new(0, 0),
1630 diff_status: DiffHunkStatusKind::Deleted,
1631 old_text: "lorem\n".into(),
1632 }]
1633 ),
1634 (
1635 buffer2.clone(),
1636 vec![HunkStatus {
1637 range: Point::new(0, 0)..Point::new(0, 0),
1638 diff_status: DiffHunkStatusKind::Deleted,
1639 old_text: "ipsum\n".into(),
1640 }],
1641 )
1642 ]
1643 );
1644
1645 // Simulate file1 being recreated externally.
1646 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1647 .await;
1648
1649 // Simulate file2 being recreated by a tool.
1650 let buffer2 = project
1651 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1652 .await
1653 .unwrap();
1654 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1655 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1656 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1657 project
1658 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1659 .await
1660 .unwrap();
1661
1662 cx.run_until_parked();
1663 assert_eq!(
1664 unreviewed_hunks(&action_log, cx),
1665 vec![(
1666 buffer2.clone(),
1667 vec![HunkStatus {
1668 range: Point::new(0, 0)..Point::new(0, 5),
1669 diff_status: DiffHunkStatusKind::Added,
1670 old_text: "".into(),
1671 }],
1672 )]
1673 );
1674
1675 // Simulate file2 being deleted externally.
1676 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1677 .await
1678 .unwrap();
1679 cx.run_until_parked();
1680 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1681 }
1682
1683 #[gpui::test(iterations = 10)]
1684 async fn test_reject_edits(cx: &mut TestAppContext) {
1685 init_test(cx);
1686
1687 let fs = FakeFs::new(cx.executor());
1688 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1689 .await;
1690 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1691 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1692 let file_path = project
1693 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1694 .unwrap();
1695 let buffer = project
1696 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1697 .await
1698 .unwrap();
1699
1700 cx.update(|cx| {
1701 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1702 buffer.update(cx, |buffer, cx| {
1703 buffer
1704 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1705 .unwrap()
1706 });
1707 buffer.update(cx, |buffer, cx| {
1708 buffer
1709 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1710 .unwrap()
1711 });
1712 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1713 });
1714 cx.run_until_parked();
1715 assert_eq!(
1716 buffer.read_with(cx, |buffer, _| buffer.text()),
1717 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1718 );
1719 assert_eq!(
1720 unreviewed_hunks(&action_log, cx),
1721 vec![(
1722 buffer.clone(),
1723 vec![
1724 HunkStatus {
1725 range: Point::new(1, 0)..Point::new(3, 0),
1726 diff_status: DiffHunkStatusKind::Modified,
1727 old_text: "def\n".into(),
1728 },
1729 HunkStatus {
1730 range: Point::new(5, 0)..Point::new(5, 3),
1731 diff_status: DiffHunkStatusKind::Modified,
1732 old_text: "mno".into(),
1733 }
1734 ],
1735 )]
1736 );
1737
1738 // If the rejected range doesn't overlap with any hunk, we ignore it.
1739 action_log
1740 .update(cx, |log, cx| {
1741 log.reject_edits_in_ranges(
1742 buffer.clone(),
1743 vec![Point::new(4, 0)..Point::new(4, 0)],
1744 cx,
1745 )
1746 })
1747 .await
1748 .unwrap();
1749 cx.run_until_parked();
1750 assert_eq!(
1751 buffer.read_with(cx, |buffer, _| buffer.text()),
1752 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1753 );
1754 assert_eq!(
1755 unreviewed_hunks(&action_log, cx),
1756 vec![(
1757 buffer.clone(),
1758 vec![
1759 HunkStatus {
1760 range: Point::new(1, 0)..Point::new(3, 0),
1761 diff_status: DiffHunkStatusKind::Modified,
1762 old_text: "def\n".into(),
1763 },
1764 HunkStatus {
1765 range: Point::new(5, 0)..Point::new(5, 3),
1766 diff_status: DiffHunkStatusKind::Modified,
1767 old_text: "mno".into(),
1768 }
1769 ],
1770 )]
1771 );
1772
1773 action_log
1774 .update(cx, |log, cx| {
1775 log.reject_edits_in_ranges(
1776 buffer.clone(),
1777 vec![Point::new(0, 0)..Point::new(1, 0)],
1778 cx,
1779 )
1780 })
1781 .await
1782 .unwrap();
1783 cx.run_until_parked();
1784 assert_eq!(
1785 buffer.read_with(cx, |buffer, _| buffer.text()),
1786 "abc\ndef\nghi\njkl\nmnO"
1787 );
1788 assert_eq!(
1789 unreviewed_hunks(&action_log, cx),
1790 vec![(
1791 buffer.clone(),
1792 vec![HunkStatus {
1793 range: Point::new(4, 0)..Point::new(4, 3),
1794 diff_status: DiffHunkStatusKind::Modified,
1795 old_text: "mno".into(),
1796 }],
1797 )]
1798 );
1799
1800 action_log
1801 .update(cx, |log, cx| {
1802 log.reject_edits_in_ranges(
1803 buffer.clone(),
1804 vec![Point::new(4, 0)..Point::new(4, 0)],
1805 cx,
1806 )
1807 })
1808 .await
1809 .unwrap();
1810 cx.run_until_parked();
1811 assert_eq!(
1812 buffer.read_with(cx, |buffer, _| buffer.text()),
1813 "abc\ndef\nghi\njkl\nmno"
1814 );
1815 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1816 }
1817
1818 #[gpui::test(iterations = 10)]
1819 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1820 init_test(cx);
1821
1822 let fs = FakeFs::new(cx.executor());
1823 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1824 .await;
1825 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1826 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1827 let file_path = project
1828 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1829 .unwrap();
1830 let buffer = project
1831 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1832 .await
1833 .unwrap();
1834
1835 cx.update(|cx| {
1836 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1837 buffer.update(cx, |buffer, cx| {
1838 buffer
1839 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1840 .unwrap()
1841 });
1842 buffer.update(cx, |buffer, cx| {
1843 buffer
1844 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1845 .unwrap()
1846 });
1847 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1848 });
1849 cx.run_until_parked();
1850 assert_eq!(
1851 buffer.read_with(cx, |buffer, _| buffer.text()),
1852 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1853 );
1854 assert_eq!(
1855 unreviewed_hunks(&action_log, cx),
1856 vec![(
1857 buffer.clone(),
1858 vec![
1859 HunkStatus {
1860 range: Point::new(1, 0)..Point::new(3, 0),
1861 diff_status: DiffHunkStatusKind::Modified,
1862 old_text: "def\n".into(),
1863 },
1864 HunkStatus {
1865 range: Point::new(5, 0)..Point::new(5, 3),
1866 diff_status: DiffHunkStatusKind::Modified,
1867 old_text: "mno".into(),
1868 }
1869 ],
1870 )]
1871 );
1872
1873 action_log.update(cx, |log, cx| {
1874 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1875 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1876 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1877 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1878
1879 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1880 .detach();
1881 assert_eq!(
1882 buffer.read_with(cx, |buffer, _| buffer.text()),
1883 "abc\ndef\nghi\njkl\nmno"
1884 );
1885 });
1886 cx.run_until_parked();
1887 assert_eq!(
1888 buffer.read_with(cx, |buffer, _| buffer.text()),
1889 "abc\ndef\nghi\njkl\nmno"
1890 );
1891 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1892 }
1893
1894 #[gpui::test(iterations = 10)]
1895 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1896 init_test(cx);
1897
1898 let fs = FakeFs::new(cx.executor());
1899 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1900 .await;
1901 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1902 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1903 let file_path = project
1904 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1905 .unwrap();
1906 let buffer = project
1907 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1908 .await
1909 .unwrap();
1910
1911 cx.update(|cx| {
1912 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1913 });
1914 project
1915 .update(cx, |project, cx| {
1916 project.delete_file(file_path.clone(), false, cx)
1917 })
1918 .unwrap()
1919 .await
1920 .unwrap();
1921 cx.run_until_parked();
1922 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1923 assert_eq!(
1924 unreviewed_hunks(&action_log, cx),
1925 vec![(
1926 buffer.clone(),
1927 vec![HunkStatus {
1928 range: Point::new(0, 0)..Point::new(0, 0),
1929 diff_status: DiffHunkStatusKind::Deleted,
1930 old_text: "content".into(),
1931 }]
1932 )]
1933 );
1934
1935 action_log
1936 .update(cx, |log, cx| {
1937 log.reject_edits_in_ranges(
1938 buffer.clone(),
1939 vec![Point::new(0, 0)..Point::new(0, 0)],
1940 cx,
1941 )
1942 })
1943 .await
1944 .unwrap();
1945 cx.run_until_parked();
1946 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1947 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1948 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1949 }
1950
1951 #[gpui::test(iterations = 10)]
1952 async fn test_reject_created_file(cx: &mut TestAppContext) {
1953 init_test(cx);
1954
1955 let fs = FakeFs::new(cx.executor());
1956 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1957 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1958 let file_path = project
1959 .read_with(cx, |project, cx| {
1960 project.find_project_path("dir/new_file", cx)
1961 })
1962 .unwrap();
1963 let buffer = project
1964 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1965 .await
1966 .unwrap();
1967 cx.update(|cx| {
1968 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1969 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1970 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1971 });
1972 project
1973 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1974 .await
1975 .unwrap();
1976 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1977 cx.run_until_parked();
1978 assert_eq!(
1979 unreviewed_hunks(&action_log, cx),
1980 vec![(
1981 buffer.clone(),
1982 vec![HunkStatus {
1983 range: Point::new(0, 0)..Point::new(0, 7),
1984 diff_status: DiffHunkStatusKind::Added,
1985 old_text: "".into(),
1986 }],
1987 )]
1988 );
1989
1990 action_log
1991 .update(cx, |log, cx| {
1992 log.reject_edits_in_ranges(
1993 buffer.clone(),
1994 vec![Point::new(0, 0)..Point::new(0, 11)],
1995 cx,
1996 )
1997 })
1998 .await
1999 .unwrap();
2000 cx.run_until_parked();
2001 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2002 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2003 }
2004
2005 #[gpui::test]
2006 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2007 init_test(cx);
2008
2009 let fs = FakeFs::new(cx.executor());
2010 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2011 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2012
2013 let file_path = project
2014 .read_with(cx, |project, cx| {
2015 project.find_project_path("dir/new_file", cx)
2016 })
2017 .unwrap();
2018 let buffer = project
2019 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2020 .await
2021 .unwrap();
2022
2023 // AI creates file with initial content
2024 cx.update(|cx| {
2025 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2026 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2027 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2028 });
2029
2030 project
2031 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2032 .await
2033 .unwrap();
2034
2035 cx.run_until_parked();
2036
2037 // User makes additional edits
2038 cx.update(|cx| {
2039 buffer.update(cx, |buffer, cx| {
2040 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2041 });
2042 });
2043
2044 project
2045 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2046 .await
2047 .unwrap();
2048
2049 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2050
2051 // Reject all
2052 action_log
2053 .update(cx, |log, cx| {
2054 log.reject_edits_in_ranges(
2055 buffer.clone(),
2056 vec![Point::new(0, 0)..Point::new(100, 0)],
2057 cx,
2058 )
2059 })
2060 .await
2061 .unwrap();
2062 cx.run_until_parked();
2063
2064 // File should still contain all the content
2065 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2066
2067 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2068 assert_eq!(content, "ai content\nuser added this line");
2069 }
2070
2071 #[gpui::test]
2072 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2073 init_test(cx);
2074
2075 let fs = FakeFs::new(cx.executor());
2076 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2077 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2078
2079 let file_path = project
2080 .read_with(cx, |project, cx| {
2081 project.find_project_path("dir/new_file", cx)
2082 })
2083 .unwrap();
2084 let buffer = project
2085 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2086 .await
2087 .unwrap();
2088
2089 // AI creates file with initial content
2090 cx.update(|cx| {
2091 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2092 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2093 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2094 });
2095 project
2096 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2097 .await
2098 .unwrap();
2099 cx.run_until_parked();
2100 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2101
2102 // User accepts the single hunk
2103 action_log.update(cx, |log, cx| {
2104 log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
2105 });
2106 cx.run_until_parked();
2107 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2108 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2109
2110 // AI modifies the file
2111 cx.update(|cx| {
2112 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2113 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2114 });
2115 project
2116 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2117 .await
2118 .unwrap();
2119 cx.run_until_parked();
2120 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2121
2122 // User rejects the hunk
2123 action_log
2124 .update(cx, |log, cx| {
2125 log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
2126 })
2127 .await
2128 .unwrap();
2129 cx.run_until_parked();
2130 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2131 assert_eq!(
2132 buffer.read_with(cx, |buffer, _| buffer.text()),
2133 "ai content v1"
2134 );
2135 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2136 }
2137
2138 #[gpui::test]
2139 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2140 init_test(cx);
2141
2142 let fs = FakeFs::new(cx.executor());
2143 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2144 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2145
2146 let file_path = project
2147 .read_with(cx, |project, cx| {
2148 project.find_project_path("dir/new_file", cx)
2149 })
2150 .unwrap();
2151 let buffer = project
2152 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2153 .await
2154 .unwrap();
2155
2156 // AI creates file with initial content
2157 cx.update(|cx| {
2158 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2159 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2160 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2161 });
2162 project
2163 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2164 .await
2165 .unwrap();
2166 cx.run_until_parked();
2167
2168 // User clicks "Accept All"
2169 action_log.update(cx, |log, cx| log.keep_all_edits(cx));
2170 cx.run_until_parked();
2171 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2172 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2173
2174 // AI modifies file again
2175 cx.update(|cx| {
2176 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2177 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2178 });
2179 project
2180 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2181 .await
2182 .unwrap();
2183 cx.run_until_parked();
2184 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2185
2186 // User clicks "Reject All"
2187 action_log
2188 .update(cx, |log, cx| log.reject_all_edits(cx))
2189 .await;
2190 cx.run_until_parked();
2191 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2192 assert_eq!(
2193 buffer.read_with(cx, |buffer, _| buffer.text()),
2194 "ai content v1"
2195 );
2196 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2197 }
2198
2199 #[gpui::test(iterations = 100)]
2200 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2201 init_test(cx);
2202
2203 let operations = env::var("OPERATIONS")
2204 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2205 .unwrap_or(20);
2206
2207 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2208 let fs = FakeFs::new(cx.executor());
2209 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2210 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2211 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2212 let file_path = project
2213 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2214 .unwrap();
2215 let buffer = project
2216 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2217 .await
2218 .unwrap();
2219
2220 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2221
2222 for _ in 0..operations {
2223 match rng.random_range(0..100) {
2224 0..25 => {
2225 action_log.update(cx, |log, cx| {
2226 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2227 log::info!("keeping edits in range {:?}", range);
2228 log.keep_edits_in_range(buffer.clone(), range, cx)
2229 });
2230 }
2231 25..50 => {
2232 action_log
2233 .update(cx, |log, cx| {
2234 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2235 log::info!("rejecting edits in range {:?}", range);
2236 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
2237 })
2238 .await
2239 .unwrap();
2240 }
2241 _ => {
2242 let is_agent_edit = rng.random_bool(0.5);
2243 if is_agent_edit {
2244 log::info!("agent edit");
2245 } else {
2246 log::info!("user edit");
2247 }
2248 cx.update(|cx| {
2249 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2250 if is_agent_edit {
2251 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2252 }
2253 });
2254 }
2255 }
2256
2257 if rng.random_bool(0.2) {
2258 quiesce(&action_log, &buffer, cx);
2259 }
2260 }
2261
2262 quiesce(&action_log, &buffer, cx);
2263
2264 fn quiesce(
2265 action_log: &Entity<ActionLog>,
2266 buffer: &Entity<Buffer>,
2267 cx: &mut TestAppContext,
2268 ) {
2269 log::info!("quiescing...");
2270 cx.run_until_parked();
2271 action_log.update(cx, |log, cx| {
2272 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2273 let mut old_text = tracked_buffer.diff_base.clone();
2274 let new_text = buffer.read(cx).as_rope();
2275 for edit in tracked_buffer.unreviewed_edits.edits() {
2276 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2277 let old_end = old_text.point_to_offset(cmp::min(
2278 Point::new(edit.new.start + edit.old_len(), 0),
2279 old_text.max_point(),
2280 ));
2281 old_text.replace(
2282 old_start..old_end,
2283 &new_text.slice_rows(edit.new.clone()).to_string(),
2284 );
2285 }
2286 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2287 })
2288 }
2289 }
2290
2291 #[gpui::test]
2292 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2293 init_test(cx);
2294
2295 let fs = FakeFs::new(cx.background_executor.clone());
2296 fs.insert_tree(
2297 path!("/project"),
2298 json!({
2299 ".git": {},
2300 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2301 }),
2302 )
2303 .await;
2304 fs.set_head_for_repo(
2305 path!("/project/.git").as_ref(),
2306 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2307 "0000000",
2308 );
2309 cx.run_until_parked();
2310
2311 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2312 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2313
2314 let file_path = project
2315 .read_with(cx, |project, cx| {
2316 project.find_project_path(path!("/project/file.txt"), cx)
2317 })
2318 .unwrap();
2319 let buffer = project
2320 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2321 .await
2322 .unwrap();
2323
2324 cx.update(|cx| {
2325 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2326 buffer.update(cx, |buffer, cx| {
2327 buffer.edit(
2328 [
2329 // Edit at the very start: a -> A
2330 (Point::new(0, 0)..Point::new(0, 1), "A"),
2331 // Deletion in the middle: remove lines d and e
2332 (Point::new(3, 0)..Point::new(5, 0), ""),
2333 // Modification: g -> GGG
2334 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2335 // Addition: insert new line after h
2336 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2337 // Edit the very last character: j -> J
2338 (Point::new(9, 0)..Point::new(9, 1), "J"),
2339 ],
2340 None,
2341 cx,
2342 );
2343 });
2344 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2345 });
2346 cx.run_until_parked();
2347 assert_eq!(
2348 unreviewed_hunks(&action_log, cx),
2349 vec![(
2350 buffer.clone(),
2351 vec![
2352 HunkStatus {
2353 range: Point::new(0, 0)..Point::new(1, 0),
2354 diff_status: DiffHunkStatusKind::Modified,
2355 old_text: "a\n".into()
2356 },
2357 HunkStatus {
2358 range: Point::new(3, 0)..Point::new(3, 0),
2359 diff_status: DiffHunkStatusKind::Deleted,
2360 old_text: "d\ne\n".into()
2361 },
2362 HunkStatus {
2363 range: Point::new(4, 0)..Point::new(5, 0),
2364 diff_status: DiffHunkStatusKind::Modified,
2365 old_text: "g\n".into()
2366 },
2367 HunkStatus {
2368 range: Point::new(6, 0)..Point::new(7, 0),
2369 diff_status: DiffHunkStatusKind::Added,
2370 old_text: "".into()
2371 },
2372 HunkStatus {
2373 range: Point::new(8, 0)..Point::new(8, 1),
2374 diff_status: DiffHunkStatusKind::Modified,
2375 old_text: "j".into()
2376 }
2377 ]
2378 )]
2379 );
2380
2381 // Simulate a git commit that matches some edits but not others:
2382 // - Accepts the first edit (a -> A)
2383 // - Accepts the deletion (remove d and e)
2384 // - Makes a different change to g (g -> G instead of GGG)
2385 // - Ignores the NEW line addition
2386 // - Ignores the last line edit (j stays as j)
2387 fs.set_head_for_repo(
2388 path!("/project/.git").as_ref(),
2389 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2390 "0000001",
2391 );
2392 cx.run_until_parked();
2393 assert_eq!(
2394 unreviewed_hunks(&action_log, cx),
2395 vec![(
2396 buffer.clone(),
2397 vec![
2398 HunkStatus {
2399 range: Point::new(4, 0)..Point::new(5, 0),
2400 diff_status: DiffHunkStatusKind::Modified,
2401 old_text: "g\n".into()
2402 },
2403 HunkStatus {
2404 range: Point::new(6, 0)..Point::new(7, 0),
2405 diff_status: DiffHunkStatusKind::Added,
2406 old_text: "".into()
2407 },
2408 HunkStatus {
2409 range: Point::new(8, 0)..Point::new(8, 1),
2410 diff_status: DiffHunkStatusKind::Modified,
2411 old_text: "j".into()
2412 }
2413 ]
2414 )]
2415 );
2416
2417 // Make another commit that accepts the NEW line but with different content
2418 fs.set_head_for_repo(
2419 path!("/project/.git").as_ref(),
2420 &[(
2421 "file.txt".into(),
2422 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2423 )],
2424 "0000002",
2425 );
2426 cx.run_until_parked();
2427 assert_eq!(
2428 unreviewed_hunks(&action_log, cx),
2429 vec![(
2430 buffer,
2431 vec![
2432 HunkStatus {
2433 range: Point::new(6, 0)..Point::new(7, 0),
2434 diff_status: DiffHunkStatusKind::Added,
2435 old_text: "".into()
2436 },
2437 HunkStatus {
2438 range: Point::new(8, 0)..Point::new(8, 1),
2439 diff_status: DiffHunkStatusKind::Modified,
2440 old_text: "j".into()
2441 }
2442 ]
2443 )]
2444 );
2445
2446 // Final commit that accepts all remaining edits
2447 fs.set_head_for_repo(
2448 path!("/project/.git").as_ref(),
2449 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2450 "0000003",
2451 );
2452 cx.run_until_parked();
2453 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2454 }
2455
2456 #[derive(Debug, Clone, PartialEq, Eq)]
2457 struct HunkStatus {
2458 range: Range<Point>,
2459 diff_status: DiffHunkStatusKind,
2460 old_text: String,
2461 }
2462
2463 fn unreviewed_hunks(
2464 action_log: &Entity<ActionLog>,
2465 cx: &TestAppContext,
2466 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2467 cx.read(|cx| {
2468 action_log
2469 .read(cx)
2470 .changed_buffers(cx)
2471 .into_iter()
2472 .map(|(buffer, diff)| {
2473 let snapshot = buffer.read(cx).snapshot();
2474 (
2475 buffer,
2476 diff.read(cx)
2477 .hunks(&snapshot, cx)
2478 .map(|hunk| HunkStatus {
2479 diff_status: hunk.status().kind,
2480 range: hunk.range,
2481 old_text: diff
2482 .read(cx)
2483 .base_text()
2484 .text_for_range(hunk.diff_base_byte_range)
2485 .collect(),
2486 })
2487 .collect(),
2488 )
2489 })
2490 .collect()
2491 })
2492 }
2493
2494 #[gpui::test]
2495 async fn test_format_patch(cx: &mut TestAppContext) {
2496 init_test(cx);
2497
2498 let fs = FakeFs::new(cx.executor());
2499 fs.insert_tree(
2500 path!("/dir"),
2501 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2502 )
2503 .await;
2504 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2505 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2506
2507 let file_path = project
2508 .read_with(cx, |project, cx| {
2509 project.find_project_path("dir/test.txt", cx)
2510 })
2511 .unwrap();
2512 let buffer = project
2513 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2514 .await
2515 .unwrap();
2516
2517 cx.update(|cx| {
2518 // Track the buffer and mark it as read first
2519 action_log.update(cx, |log, cx| {
2520 log.buffer_read(buffer.clone(), cx);
2521 });
2522
2523 // Make some edits to create a patch
2524 buffer.update(cx, |buffer, cx| {
2525 buffer
2526 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2527 .unwrap(); // Replace "line2" with "CHANGED"
2528 });
2529 });
2530
2531 cx.run_until_parked();
2532
2533 // Get the patch
2534 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2535
2536 // Verify the patch format contains expected unified diff elements
2537 assert_eq!(
2538 patch.unwrap(),
2539 indoc! {"
2540 --- a/dir/test.txt
2541 +++ b/dir/test.txt
2542 @@ -1,3 +1,3 @@
2543 line 1
2544 -line 2
2545 +CHANGED
2546 line 3
2547 "}
2548 );
2549 }
2550}