1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{RangeExt, ResultExt as _};
12
13/// Tracks actions performed by tools in a thread
14pub struct ActionLog {
15 /// Buffers that we want to notify the model about when they change.
16 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
17 /// The project this action log is associated with
18 project: Entity<Project>,
19}
20
21impl ActionLog {
22 /// Creates a new, empty action log associated with the given project.
23 pub fn new(project: Entity<Project>) -> Self {
24 Self {
25 tracked_buffers: BTreeMap::default(),
26 project,
27 }
28 }
29
30 pub fn project(&self) -> &Entity<Project> {
31 &self.project
32 }
33
34 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
35 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
36 }
37
38 /// Return a unified diff patch with user edits made since last read or notification
39 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
40 let diffs = self
41 .tracked_buffers
42 .values()
43 .filter_map(|tracked| {
44 if !tracked.may_have_unnotified_user_edits {
45 return None;
46 }
47
48 let text_with_latest_user_edits = tracked.diff_base.to_string();
49 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
50 if text_with_latest_user_edits == text_with_last_seen_user_edits {
51 return None;
52 }
53 let patch = language::unified_diff(
54 &text_with_last_seen_user_edits,
55 &text_with_latest_user_edits,
56 );
57
58 let buffer = tracked.buffer.clone();
59 let file_path = buffer
60 .read(cx)
61 .file()
62 .map(|file| {
63 let mut path = file.full_path(cx).to_string_lossy().into_owned();
64 if file.path_style(cx).is_windows() {
65 path = path.replace('\\', "/");
66 }
67 path
68 })
69 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
70
71 let mut result = String::new();
72 result.push_str(&format!("--- a/{}\n", file_path));
73 result.push_str(&format!("+++ b/{}\n", file_path));
74 result.push_str(&patch);
75
76 Some(result)
77 })
78 .collect::<Vec<_>>();
79
80 if diffs.is_empty() {
81 return None;
82 }
83
84 let unified_diff = diffs.join("\n\n");
85 Some(unified_diff)
86 }
87
88 /// Return a unified diff patch with user edits made since last read/notification
89 /// and mark them as notified
90 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
91 let patch = self.unnotified_user_edits(cx);
92 self.tracked_buffers.values_mut().for_each(|tracked| {
93 tracked.may_have_unnotified_user_edits = false;
94 tracked.last_seen_base = tracked.diff_base.clone();
95 });
96 patch
97 }
98
99 fn track_buffer_internal(
100 &mut self,
101 buffer: Entity<Buffer>,
102 is_created: bool,
103 cx: &mut Context<Self>,
104 ) -> &mut TrackedBuffer {
105 let status = if is_created {
106 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
107 match tracked.status {
108 TrackedBufferStatus::Created {
109 existing_file_content,
110 } => TrackedBufferStatus::Created {
111 existing_file_content,
112 },
113 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
114 TrackedBufferStatus::Created {
115 existing_file_content: Some(tracked.diff_base),
116 }
117 }
118 }
119 } else if buffer
120 .read(cx)
121 .file()
122 .is_some_and(|file| file.disk_state().exists())
123 {
124 TrackedBufferStatus::Created {
125 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
126 }
127 } else {
128 TrackedBufferStatus::Created {
129 existing_file_content: None,
130 }
131 }
132 } else {
133 TrackedBufferStatus::Modified
134 };
135
136 let tracked_buffer = self
137 .tracked_buffers
138 .entry(buffer.clone())
139 .or_insert_with(|| {
140 let open_lsp_handle = self.project.update(cx, |project, cx| {
141 project.register_buffer_with_language_servers(&buffer, cx)
142 });
143
144 let text_snapshot = buffer.read(cx).text_snapshot();
145 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
146 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
147 let diff_base;
148 let last_seen_base;
149 let unreviewed_edits;
150 if is_created {
151 diff_base = Rope::default();
152 last_seen_base = Rope::default();
153 unreviewed_edits = Patch::new(vec![Edit {
154 old: 0..1,
155 new: 0..text_snapshot.max_point().row + 1,
156 }])
157 } else {
158 diff_base = buffer.read(cx).as_rope().clone();
159 last_seen_base = diff_base.clone();
160 unreviewed_edits = Patch::default();
161 }
162 TrackedBuffer {
163 buffer: buffer.clone(),
164 diff_base,
165 last_seen_base,
166 unreviewed_edits,
167 snapshot: text_snapshot,
168 status,
169 version: buffer.read(cx).version(),
170 diff,
171 diff_update: diff_update_tx,
172 may_have_unnotified_user_edits: false,
173 _open_lsp_handle: open_lsp_handle,
174 _maintain_diff: cx.spawn({
175 let buffer = buffer.clone();
176 async move |this, cx| {
177 Self::maintain_diff(this, buffer, diff_update_rx, cx)
178 .await
179 .ok();
180 }
181 }),
182 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
183 }
184 });
185 tracked_buffer.version = buffer.read(cx).version();
186 tracked_buffer
187 }
188
189 fn handle_buffer_event(
190 &mut self,
191 buffer: Entity<Buffer>,
192 event: &BufferEvent,
193 cx: &mut Context<Self>,
194 ) {
195 match event {
196 BufferEvent::Edited => self.handle_buffer_edited(buffer, cx),
197 BufferEvent::FileHandleChanged => {
198 self.handle_buffer_file_changed(buffer, cx);
199 }
200 _ => {}
201 };
202 }
203
204 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
205 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
206 return;
207 };
208 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
209 }
210
211 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
212 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
213 return;
214 };
215
216 match tracked_buffer.status {
217 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
218 if buffer
219 .read(cx)
220 .file()
221 .is_some_and(|file| file.disk_state() == DiskState::Deleted)
222 {
223 // If the buffer had been edited by a tool, but it got
224 // deleted externally, we want to stop tracking it.
225 self.tracked_buffers.remove(&buffer);
226 }
227 cx.notify();
228 }
229 TrackedBufferStatus::Deleted => {
230 if buffer
231 .read(cx)
232 .file()
233 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
234 {
235 // If the buffer had been deleted by a tool, but it got
236 // resurrected externally, we want to clear the edits we
237 // were tracking and reset the buffer's state.
238 self.tracked_buffers.remove(&buffer);
239 self.track_buffer_internal(buffer, false, cx);
240 }
241 cx.notify();
242 }
243 }
244 }
245
246 async fn maintain_diff(
247 this: WeakEntity<Self>,
248 buffer: Entity<Buffer>,
249 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
250 cx: &mut AsyncApp,
251 ) -> Result<()> {
252 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
253 let git_diff = this
254 .update(cx, |this, cx| {
255 this.project.update(cx, |project, cx| {
256 project.open_uncommitted_diff(buffer.clone(), cx)
257 })
258 })?
259 .await
260 .ok();
261 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
262 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
263 })?;
264
265 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
266 let _repo_subscription =
267 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
268 cx.update(|cx| {
269 let mut old_head = buffer_repo.read(cx).head_commit.clone();
270 Some(cx.subscribe(git_diff, move |_, event, cx| {
271 if let buffer_diff::BufferDiffEvent::DiffChanged { .. } = event {
272 let new_head = buffer_repo.read(cx).head_commit.clone();
273 if new_head != old_head {
274 old_head = new_head;
275 git_diff_updates_tx.send(()).ok();
276 }
277 }
278 }))
279 })?
280 } else {
281 None
282 };
283
284 loop {
285 futures::select_biased! {
286 buffer_update = buffer_updates.next() => {
287 if let Some((author, buffer_snapshot)) = buffer_update {
288 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
289 } else {
290 break;
291 }
292 }
293 _ = git_diff_updates_rx.changed().fuse() => {
294 if let Some(git_diff) = git_diff.as_ref() {
295 Self::keep_committed_edits(&this, &buffer, git_diff, cx).await?;
296 }
297 }
298 }
299 }
300
301 Ok(())
302 }
303
304 async fn track_edits(
305 this: &WeakEntity<ActionLog>,
306 buffer: &Entity<Buffer>,
307 author: ChangeAuthor,
308 buffer_snapshot: text::BufferSnapshot,
309 cx: &mut AsyncApp,
310 ) -> Result<()> {
311 let rebase = this.update(cx, |this, cx| {
312 let tracked_buffer = this
313 .tracked_buffers
314 .get_mut(buffer)
315 .context("buffer not tracked")?;
316
317 let rebase = cx.background_spawn({
318 let mut base_text = tracked_buffer.diff_base.clone();
319 let old_snapshot = tracked_buffer.snapshot.clone();
320 let new_snapshot = buffer_snapshot.clone();
321 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
322 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
323 let mut has_user_changes = false;
324 async move {
325 if let ChangeAuthor::User = author {
326 has_user_changes = apply_non_conflicting_edits(
327 &unreviewed_edits,
328 edits,
329 &mut base_text,
330 new_snapshot.as_rope(),
331 );
332 }
333
334 (Arc::new(base_text.to_string()), base_text, has_user_changes)
335 }
336 });
337
338 anyhow::Ok(rebase)
339 })??;
340 let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
341
342 this.update(cx, |this, _| {
343 let tracked_buffer = this
344 .tracked_buffers
345 .get_mut(buffer)
346 .context("buffer not tracked")
347 .unwrap();
348 tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
349 })?;
350
351 Self::update_diff(
352 this,
353 buffer,
354 buffer_snapshot,
355 new_base_text,
356 new_diff_base,
357 cx,
358 )
359 .await
360 }
361
362 async fn keep_committed_edits(
363 this: &WeakEntity<ActionLog>,
364 buffer: &Entity<Buffer>,
365 git_diff: &Entity<BufferDiff>,
366 cx: &mut AsyncApp,
367 ) -> Result<()> {
368 let buffer_snapshot = this.read_with(cx, |this, _cx| {
369 let tracked_buffer = this
370 .tracked_buffers
371 .get(buffer)
372 .context("buffer not tracked")?;
373 anyhow::Ok(tracked_buffer.snapshot.clone())
374 })??;
375 let (new_base_text, new_diff_base) = this
376 .read_with(cx, |this, cx| {
377 let tracked_buffer = this
378 .tracked_buffers
379 .get(buffer)
380 .context("buffer not tracked")?;
381 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
382 let agent_diff_base = tracked_buffer.diff_base.clone();
383 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
384 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
385 anyhow::Ok(cx.background_spawn(async move {
386 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
387 let committed_edits = language::line_diff(
388 &agent_diff_base.to_string(),
389 &git_diff_base.to_string(),
390 )
391 .into_iter()
392 .map(|(old, new)| Edit { old, new });
393
394 let mut new_agent_diff_base = agent_diff_base.clone();
395 let mut row_delta = 0i32;
396 for committed in committed_edits {
397 while let Some(unreviewed) = old_unreviewed_edits.peek() {
398 // If the committed edit matches the unreviewed
399 // edit, assume the user wants to keep it.
400 if committed.old == unreviewed.old {
401 let unreviewed_new =
402 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
403 let committed_new =
404 git_diff_base.slice_rows(committed.new.clone()).to_string();
405 if unreviewed_new == committed_new {
406 let old_byte_start =
407 new_agent_diff_base.point_to_offset(Point::new(
408 (unreviewed.old.start as i32 + row_delta) as u32,
409 0,
410 ));
411 let old_byte_end =
412 new_agent_diff_base.point_to_offset(cmp::min(
413 Point::new(
414 (unreviewed.old.end as i32 + row_delta) as u32,
415 0,
416 ),
417 new_agent_diff_base.max_point(),
418 ));
419 new_agent_diff_base
420 .replace(old_byte_start..old_byte_end, &unreviewed_new);
421 row_delta +=
422 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
423 }
424 } else if unreviewed.old.start >= committed.old.end {
425 break;
426 }
427
428 old_unreviewed_edits.next().unwrap();
429 }
430 }
431
432 (
433 Arc::new(new_agent_diff_base.to_string()),
434 new_agent_diff_base,
435 )
436 }))
437 })??
438 .await;
439
440 Self::update_diff(
441 this,
442 buffer,
443 buffer_snapshot,
444 new_base_text,
445 new_diff_base,
446 cx,
447 )
448 .await
449 }
450
451 async fn update_diff(
452 this: &WeakEntity<ActionLog>,
453 buffer: &Entity<Buffer>,
454 buffer_snapshot: text::BufferSnapshot,
455 new_base_text: Arc<String>,
456 new_diff_base: Rope,
457 cx: &mut AsyncApp,
458 ) -> Result<()> {
459 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
460 let tracked_buffer = this
461 .tracked_buffers
462 .get(buffer)
463 .context("buffer not tracked")?;
464 anyhow::Ok((
465 tracked_buffer.diff.clone(),
466 buffer.read(cx).language().cloned(),
467 buffer.read(cx).language_registry(),
468 ))
469 })??;
470 let diff_snapshot = BufferDiff::update_diff(
471 diff.clone(),
472 buffer_snapshot.clone(),
473 Some(new_base_text),
474 true,
475 false,
476 language,
477 language_registry,
478 cx,
479 )
480 .await;
481 let mut unreviewed_edits = Patch::default();
482 if let Ok(diff_snapshot) = diff_snapshot {
483 unreviewed_edits = cx
484 .background_spawn({
485 let diff_snapshot = diff_snapshot.clone();
486 let buffer_snapshot = buffer_snapshot.clone();
487 let new_diff_base = new_diff_base.clone();
488 async move {
489 let mut unreviewed_edits = Patch::default();
490 for hunk in diff_snapshot
491 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
492 {
493 let old_range = new_diff_base
494 .offset_to_point(hunk.diff_base_byte_range.start)
495 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
496 let new_range = hunk.range.start..hunk.range.end;
497 unreviewed_edits.push(point_to_row_edit(
498 Edit {
499 old: old_range,
500 new: new_range,
501 },
502 &new_diff_base,
503 buffer_snapshot.as_rope(),
504 ));
505 }
506 unreviewed_edits
507 }
508 })
509 .await;
510
511 diff.update(cx, |diff, cx| {
512 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
513 })?;
514 }
515 this.update(cx, |this, cx| {
516 let tracked_buffer = this
517 .tracked_buffers
518 .get_mut(buffer)
519 .context("buffer not tracked")?;
520 tracked_buffer.diff_base = new_diff_base;
521 tracked_buffer.snapshot = buffer_snapshot;
522 tracked_buffer.unreviewed_edits = unreviewed_edits;
523 cx.notify();
524 anyhow::Ok(())
525 })?
526 }
527
528 /// Track a buffer as read by agent, so we can notify the model about user edits.
529 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
530 self.track_buffer_internal(buffer, false, cx);
531 }
532
533 /// Mark a buffer as created by agent, so we can refresh it in the context
534 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
535 self.track_buffer_internal(buffer, true, cx);
536 }
537
538 /// Mark a buffer as edited by agent, so we can refresh it in the context
539 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
540 let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
541 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
542 tracked_buffer.status = TrackedBufferStatus::Modified;
543 }
544 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
545 }
546
547 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
548 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
549 match tracked_buffer.status {
550 TrackedBufferStatus::Created { .. } => {
551 self.tracked_buffers.remove(&buffer);
552 cx.notify();
553 }
554 TrackedBufferStatus::Modified => {
555 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
556 tracked_buffer.status = TrackedBufferStatus::Deleted;
557 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
558 }
559 TrackedBufferStatus::Deleted => {}
560 }
561 cx.notify();
562 }
563
564 pub fn keep_edits_in_range(
565 &mut self,
566 buffer: Entity<Buffer>,
567 buffer_range: Range<impl language::ToPoint>,
568 cx: &mut Context<Self>,
569 ) {
570 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
571 return;
572 };
573
574 match tracked_buffer.status {
575 TrackedBufferStatus::Deleted => {
576 self.tracked_buffers.remove(&buffer);
577 cx.notify();
578 }
579 _ => {
580 let buffer = buffer.read(cx);
581 let buffer_range =
582 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
583 let mut delta = 0i32;
584
585 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
586 edit.old.start = (edit.old.start as i32 + delta) as u32;
587 edit.old.end = (edit.old.end as i32 + delta) as u32;
588
589 if buffer_range.end.row < edit.new.start
590 || buffer_range.start.row > edit.new.end
591 {
592 true
593 } else {
594 let old_range = tracked_buffer
595 .diff_base
596 .point_to_offset(Point::new(edit.old.start, 0))
597 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
598 Point::new(edit.old.end, 0),
599 tracked_buffer.diff_base.max_point(),
600 ));
601 let new_range = tracked_buffer
602 .snapshot
603 .point_to_offset(Point::new(edit.new.start, 0))
604 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
605 Point::new(edit.new.end, 0),
606 tracked_buffer.snapshot.max_point(),
607 ));
608 tracked_buffer.diff_base.replace(
609 old_range,
610 &tracked_buffer
611 .snapshot
612 .text_for_range(new_range)
613 .collect::<String>(),
614 );
615 delta += edit.new_len() as i32 - edit.old_len() as i32;
616 false
617 }
618 });
619 if tracked_buffer.unreviewed_edits.is_empty()
620 && let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status
621 {
622 tracked_buffer.status = TrackedBufferStatus::Modified;
623 }
624 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
625 }
626 }
627 }
628
629 pub fn reject_edits_in_ranges(
630 &mut self,
631 buffer: Entity<Buffer>,
632 buffer_ranges: Vec<Range<impl language::ToPoint>>,
633 cx: &mut Context<Self>,
634 ) -> Task<Result<()>> {
635 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
636 return Task::ready(Ok(()));
637 };
638
639 match &tracked_buffer.status {
640 TrackedBufferStatus::Created {
641 existing_file_content,
642 } => {
643 let task = if let Some(existing_file_content) = existing_file_content {
644 buffer.update(cx, |buffer, cx| {
645 buffer.start_transaction();
646 buffer.set_text("", cx);
647 for chunk in existing_file_content.chunks() {
648 buffer.append(chunk, cx);
649 }
650 buffer.end_transaction(cx);
651 });
652 self.project
653 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
654 } else {
655 // For a file created by AI with no pre-existing content,
656 // only delete the file if we're certain it contains only AI content
657 // with no edits from the user.
658
659 let initial_version = tracked_buffer.version.clone();
660 let current_version = buffer.read(cx).version();
661
662 let current_content = buffer.read(cx).text();
663 let tracked_content = tracked_buffer.snapshot.text();
664
665 let is_ai_only_content =
666 initial_version == current_version && current_content == tracked_content;
667
668 if is_ai_only_content {
669 buffer
670 .read(cx)
671 .entry_id(cx)
672 .and_then(|entry_id| {
673 self.project.update(cx, |project, cx| {
674 project.delete_entry(entry_id, false, cx)
675 })
676 })
677 .unwrap_or(Task::ready(Ok(())))
678 } else {
679 // Not sure how to disentangle edits made by the user
680 // from edits made by the AI at this point.
681 // For now, preserve both to avoid data loss.
682 //
683 // TODO: Better solution (disable "Reject" after user makes some
684 // edit or find a way to differentiate between AI and user edits)
685 Task::ready(Ok(()))
686 }
687 };
688
689 self.tracked_buffers.remove(&buffer);
690 cx.notify();
691 task
692 }
693 TrackedBufferStatus::Deleted => {
694 buffer.update(cx, |buffer, cx| {
695 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
696 });
697 let save = self
698 .project
699 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
700
701 // Clear all tracked edits for this buffer and start over as if we just read it.
702 self.tracked_buffers.remove(&buffer);
703 self.buffer_read(buffer.clone(), cx);
704 cx.notify();
705 save
706 }
707 TrackedBufferStatus::Modified => {
708 buffer.update(cx, |buffer, cx| {
709 let mut buffer_row_ranges = buffer_ranges
710 .into_iter()
711 .map(|range| {
712 range.start.to_point(buffer).row..range.end.to_point(buffer).row
713 })
714 .peekable();
715
716 let mut edits_to_revert = Vec::new();
717 for edit in tracked_buffer.unreviewed_edits.edits() {
718 let new_range = tracked_buffer
719 .snapshot
720 .anchor_before(Point::new(edit.new.start, 0))
721 ..tracked_buffer.snapshot.anchor_after(cmp::min(
722 Point::new(edit.new.end, 0),
723 tracked_buffer.snapshot.max_point(),
724 ));
725 let new_row_range = new_range.start.to_point(buffer).row
726 ..new_range.end.to_point(buffer).row;
727
728 let mut revert = false;
729 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
730 if buffer_row_range.end < new_row_range.start {
731 buffer_row_ranges.next();
732 } else if buffer_row_range.start > new_row_range.end {
733 break;
734 } else {
735 revert = true;
736 break;
737 }
738 }
739
740 if revert {
741 let old_range = tracked_buffer
742 .diff_base
743 .point_to_offset(Point::new(edit.old.start, 0))
744 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
745 Point::new(edit.old.end, 0),
746 tracked_buffer.diff_base.max_point(),
747 ));
748 let old_text = tracked_buffer
749 .diff_base
750 .chunks_in_range(old_range)
751 .collect::<String>();
752 edits_to_revert.push((new_range, old_text));
753 }
754 }
755
756 buffer.edit(edits_to_revert, None, cx);
757 });
758 self.project
759 .update(cx, |project, cx| project.save_buffer(buffer, cx))
760 }
761 }
762 }
763
764 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
765 self.tracked_buffers
766 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
767 TrackedBufferStatus::Deleted => false,
768 _ => {
769 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
770 tracked_buffer.status = TrackedBufferStatus::Modified;
771 }
772 tracked_buffer.unreviewed_edits.clear();
773 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
774 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
775 true
776 }
777 });
778 cx.notify();
779 }
780
781 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
782 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
783 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
784
785 async move {
786 reject.await.log_err();
787 }
788 });
789
790 let task = futures::future::join_all(futures);
791
792 cx.spawn(async move |_, _| {
793 task.await;
794 })
795 }
796
797 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
798 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
799 self.tracked_buffers
800 .iter()
801 .filter(|(_, tracked)| tracked.has_edits(cx))
802 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
803 .collect()
804 }
805
806 /// Iterate over buffers changed since last read or edited by the model
807 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
808 self.tracked_buffers
809 .iter()
810 .filter(|(buffer, tracked)| {
811 let buffer = buffer.read(cx);
812
813 tracked.version != buffer.version
814 && buffer
815 .file()
816 .is_some_and(|file| file.disk_state() != DiskState::Deleted)
817 })
818 .map(|(buffer, _)| buffer)
819 }
820}
821
822fn apply_non_conflicting_edits(
823 patch: &Patch<u32>,
824 edits: Vec<Edit<u32>>,
825 old_text: &mut Rope,
826 new_text: &Rope,
827) -> bool {
828 let mut old_edits = patch.edits().iter().cloned().peekable();
829 let mut new_edits = edits.into_iter().peekable();
830 let mut applied_delta = 0i32;
831 let mut rebased_delta = 0i32;
832 let mut has_made_changes = false;
833
834 while let Some(mut new_edit) = new_edits.next() {
835 let mut conflict = false;
836
837 // Push all the old edits that are before this new edit or that intersect with it.
838 while let Some(old_edit) = old_edits.peek() {
839 if new_edit.old.end < old_edit.new.start
840 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
841 {
842 break;
843 } else if new_edit.old.start > old_edit.new.end
844 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
845 {
846 let old_edit = old_edits.next().unwrap();
847 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
848 } else {
849 conflict = true;
850 if new_edits
851 .peek()
852 .is_some_and(|next_edit| next_edit.old.overlaps(&old_edit.new))
853 {
854 new_edit = new_edits.next().unwrap();
855 } else {
856 let old_edit = old_edits.next().unwrap();
857 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
858 }
859 }
860 }
861
862 if !conflict {
863 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
864 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
865 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
866 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
867 ..old_text.point_to_offset(cmp::min(
868 Point::new(new_edit.old.end, 0),
869 old_text.max_point(),
870 ));
871 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
872 ..new_text.point_to_offset(cmp::min(
873 Point::new(new_edit.new.end, 0),
874 new_text.max_point(),
875 ));
876
877 old_text.replace(
878 old_bytes,
879 &new_text.chunks_in_range(new_bytes).collect::<String>(),
880 );
881 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
882 has_made_changes = true;
883 }
884 }
885 has_made_changes
886}
887
888fn diff_snapshots(
889 old_snapshot: &text::BufferSnapshot,
890 new_snapshot: &text::BufferSnapshot,
891) -> Vec<Edit<u32>> {
892 let mut edits = new_snapshot
893 .edits_since::<Point>(&old_snapshot.version)
894 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
895 .peekable();
896 let mut row_edits = Vec::new();
897 while let Some(mut edit) = edits.next() {
898 while let Some(next_edit) = edits.peek() {
899 if edit.old.end >= next_edit.old.start {
900 edit.old.end = next_edit.old.end;
901 edit.new.end = next_edit.new.end;
902 edits.next();
903 } else {
904 break;
905 }
906 }
907 row_edits.push(edit);
908 }
909 row_edits
910}
911
912fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
913 if edit.old.start.column == old_text.line_len(edit.old.start.row)
914 && new_text
915 .chars_at(new_text.point_to_offset(edit.new.start))
916 .next()
917 == Some('\n')
918 && edit.old.start != old_text.max_point()
919 {
920 Edit {
921 old: edit.old.start.row + 1..edit.old.end.row + 1,
922 new: edit.new.start.row + 1..edit.new.end.row + 1,
923 }
924 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
925 Edit {
926 old: edit.old.start.row..edit.old.end.row,
927 new: edit.new.start.row..edit.new.end.row,
928 }
929 } else {
930 Edit {
931 old: edit.old.start.row..edit.old.end.row + 1,
932 new: edit.new.start.row..edit.new.end.row + 1,
933 }
934 }
935}
936
937#[derive(Copy, Clone, Debug)]
938enum ChangeAuthor {
939 User,
940 Agent,
941}
942
943enum TrackedBufferStatus {
944 Created { existing_file_content: Option<Rope> },
945 Modified,
946 Deleted,
947}
948
949struct TrackedBuffer {
950 buffer: Entity<Buffer>,
951 diff_base: Rope,
952 last_seen_base: Rope,
953 unreviewed_edits: Patch<u32>,
954 status: TrackedBufferStatus,
955 version: clock::Global,
956 diff: Entity<BufferDiff>,
957 snapshot: text::BufferSnapshot,
958 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
959 may_have_unnotified_user_edits: bool,
960 _open_lsp_handle: OpenLspBufferHandle,
961 _maintain_diff: Task<()>,
962 _subscription: Subscription,
963}
964
965impl TrackedBuffer {
966 fn has_edits(&self, cx: &App) -> bool {
967 self.diff
968 .read(cx)
969 .hunks(self.buffer.read(cx), cx)
970 .next()
971 .is_some()
972 }
973
974 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
975 self.diff_update
976 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
977 .ok();
978 }
979}
980
981pub struct ChangedBuffer {
982 pub diff: Entity<BufferDiff>,
983}
984
985#[cfg(test)]
986mod tests {
987 use super::*;
988 use buffer_diff::DiffHunkStatusKind;
989 use gpui::TestAppContext;
990 use indoc::indoc;
991 use language::Point;
992 use project::{FakeFs, Fs, Project, RemoveOptions};
993 use rand::prelude::*;
994 use serde_json::json;
995 use settings::SettingsStore;
996 use std::env;
997 use util::{RandomCharIter, path};
998
999 #[ctor::ctor]
1000 fn init_logger() {
1001 zlog::init_test();
1002 }
1003
1004 fn init_test(cx: &mut TestAppContext) {
1005 cx.update(|cx| {
1006 let settings_store = SettingsStore::test(cx);
1007 cx.set_global(settings_store);
1008 language::init(cx);
1009 Project::init_settings(cx);
1010 });
1011 }
1012
1013 #[gpui::test(iterations = 10)]
1014 async fn test_keep_edits(cx: &mut TestAppContext) {
1015 init_test(cx);
1016
1017 let fs = FakeFs::new(cx.executor());
1018 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1019 .await;
1020 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1021 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1022 let file_path = project
1023 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1024 .unwrap();
1025 let buffer = project
1026 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1027 .await
1028 .unwrap();
1029
1030 cx.update(|cx| {
1031 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1032 buffer.update(cx, |buffer, cx| {
1033 buffer
1034 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1035 .unwrap()
1036 });
1037 buffer.update(cx, |buffer, cx| {
1038 buffer
1039 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1040 .unwrap()
1041 });
1042 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1043 });
1044 cx.run_until_parked();
1045 assert_eq!(
1046 buffer.read_with(cx, |buffer, _| buffer.text()),
1047 "abc\ndEf\nghi\njkl\nmnO"
1048 );
1049 assert_eq!(
1050 unreviewed_hunks(&action_log, cx),
1051 vec![(
1052 buffer.clone(),
1053 vec![
1054 HunkStatus {
1055 range: Point::new(1, 0)..Point::new(2, 0),
1056 diff_status: DiffHunkStatusKind::Modified,
1057 old_text: "def\n".into(),
1058 },
1059 HunkStatus {
1060 range: Point::new(4, 0)..Point::new(4, 3),
1061 diff_status: DiffHunkStatusKind::Modified,
1062 old_text: "mno".into(),
1063 }
1064 ],
1065 )]
1066 );
1067
1068 action_log.update(cx, |log, cx| {
1069 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1070 });
1071 cx.run_until_parked();
1072 assert_eq!(
1073 unreviewed_hunks(&action_log, cx),
1074 vec![(
1075 buffer.clone(),
1076 vec![HunkStatus {
1077 range: Point::new(1, 0)..Point::new(2, 0),
1078 diff_status: DiffHunkStatusKind::Modified,
1079 old_text: "def\n".into(),
1080 }],
1081 )]
1082 );
1083
1084 action_log.update(cx, |log, cx| {
1085 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1086 });
1087 cx.run_until_parked();
1088 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1089 }
1090
1091 #[gpui::test(iterations = 10)]
1092 async fn test_deletions(cx: &mut TestAppContext) {
1093 init_test(cx);
1094
1095 let fs = FakeFs::new(cx.executor());
1096 fs.insert_tree(
1097 path!("/dir"),
1098 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1099 )
1100 .await;
1101 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1102 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1103 let file_path = project
1104 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1105 .unwrap();
1106 let buffer = project
1107 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1108 .await
1109 .unwrap();
1110
1111 cx.update(|cx| {
1112 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1113 buffer.update(cx, |buffer, cx| {
1114 buffer
1115 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1116 .unwrap();
1117 buffer.finalize_last_transaction();
1118 });
1119 buffer.update(cx, |buffer, cx| {
1120 buffer
1121 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1122 .unwrap();
1123 buffer.finalize_last_transaction();
1124 });
1125 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1126 });
1127 cx.run_until_parked();
1128 assert_eq!(
1129 buffer.read_with(cx, |buffer, _| buffer.text()),
1130 "abc\nghi\njkl\npqr"
1131 );
1132 assert_eq!(
1133 unreviewed_hunks(&action_log, cx),
1134 vec![(
1135 buffer.clone(),
1136 vec![
1137 HunkStatus {
1138 range: Point::new(1, 0)..Point::new(1, 0),
1139 diff_status: DiffHunkStatusKind::Deleted,
1140 old_text: "def\n".into(),
1141 },
1142 HunkStatus {
1143 range: Point::new(3, 0)..Point::new(3, 0),
1144 diff_status: DiffHunkStatusKind::Deleted,
1145 old_text: "mno\n".into(),
1146 }
1147 ],
1148 )]
1149 );
1150
1151 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1152 cx.run_until_parked();
1153 assert_eq!(
1154 buffer.read_with(cx, |buffer, _| buffer.text()),
1155 "abc\nghi\njkl\nmno\npqr"
1156 );
1157 assert_eq!(
1158 unreviewed_hunks(&action_log, cx),
1159 vec![(
1160 buffer.clone(),
1161 vec![HunkStatus {
1162 range: Point::new(1, 0)..Point::new(1, 0),
1163 diff_status: DiffHunkStatusKind::Deleted,
1164 old_text: "def\n".into(),
1165 }],
1166 )]
1167 );
1168
1169 action_log.update(cx, |log, cx| {
1170 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1171 });
1172 cx.run_until_parked();
1173 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1174 }
1175
1176 #[gpui::test(iterations = 10)]
1177 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1178 init_test(cx);
1179
1180 let fs = FakeFs::new(cx.executor());
1181 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1182 .await;
1183 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1184 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1185 let file_path = project
1186 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1187 .unwrap();
1188 let buffer = project
1189 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1190 .await
1191 .unwrap();
1192
1193 cx.update(|cx| {
1194 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1195 buffer.update(cx, |buffer, cx| {
1196 buffer
1197 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1198 .unwrap()
1199 });
1200 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1201 });
1202 cx.run_until_parked();
1203 assert_eq!(
1204 buffer.read_with(cx, |buffer, _| buffer.text()),
1205 "abc\ndeF\nGHI\njkl\nmno"
1206 );
1207 assert_eq!(
1208 unreviewed_hunks(&action_log, cx),
1209 vec![(
1210 buffer.clone(),
1211 vec![HunkStatus {
1212 range: Point::new(1, 0)..Point::new(3, 0),
1213 diff_status: DiffHunkStatusKind::Modified,
1214 old_text: "def\nghi\n".into(),
1215 }],
1216 )]
1217 );
1218
1219 buffer.update(cx, |buffer, cx| {
1220 buffer.edit(
1221 [
1222 (Point::new(0, 2)..Point::new(0, 2), "X"),
1223 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1224 ],
1225 None,
1226 cx,
1227 )
1228 });
1229 cx.run_until_parked();
1230 assert_eq!(
1231 buffer.read_with(cx, |buffer, _| buffer.text()),
1232 "abXc\ndeF\nGHI\nYjkl\nmno"
1233 );
1234 assert_eq!(
1235 unreviewed_hunks(&action_log, cx),
1236 vec![(
1237 buffer.clone(),
1238 vec![HunkStatus {
1239 range: Point::new(1, 0)..Point::new(3, 0),
1240 diff_status: DiffHunkStatusKind::Modified,
1241 old_text: "def\nghi\n".into(),
1242 }],
1243 )]
1244 );
1245
1246 buffer.update(cx, |buffer, cx| {
1247 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1248 });
1249 cx.run_until_parked();
1250 assert_eq!(
1251 buffer.read_with(cx, |buffer, _| buffer.text()),
1252 "abXc\ndZeF\nGHI\nYjkl\nmno"
1253 );
1254 assert_eq!(
1255 unreviewed_hunks(&action_log, cx),
1256 vec![(
1257 buffer.clone(),
1258 vec![HunkStatus {
1259 range: Point::new(1, 0)..Point::new(3, 0),
1260 diff_status: DiffHunkStatusKind::Modified,
1261 old_text: "def\nghi\n".into(),
1262 }],
1263 )]
1264 );
1265
1266 action_log.update(cx, |log, cx| {
1267 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1268 });
1269 cx.run_until_parked();
1270 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1271 }
1272
1273 #[gpui::test(iterations = 10)]
1274 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1275 init_test(cx);
1276
1277 let fs = FakeFs::new(cx.executor());
1278 fs.insert_tree(
1279 path!("/dir"),
1280 json!({"file": indoc! {"
1281 abc
1282 def
1283 ghi
1284 jkl
1285 mno"}}),
1286 )
1287 .await;
1288 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1289 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1290 let file_path = project
1291 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1292 .unwrap();
1293 let buffer = project
1294 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1295 .await
1296 .unwrap();
1297
1298 // Agent edits
1299 cx.update(|cx| {
1300 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1301 buffer.update(cx, |buffer, cx| {
1302 buffer
1303 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1304 .unwrap()
1305 });
1306 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1307 });
1308 cx.run_until_parked();
1309 assert_eq!(
1310 buffer.read_with(cx, |buffer, _| buffer.text()),
1311 indoc! {"
1312 abc
1313 deF
1314 GHI
1315 jkl
1316 mno"}
1317 );
1318 assert_eq!(
1319 unreviewed_hunks(&action_log, cx),
1320 vec![(
1321 buffer.clone(),
1322 vec![HunkStatus {
1323 range: Point::new(1, 0)..Point::new(3, 0),
1324 diff_status: DiffHunkStatusKind::Modified,
1325 old_text: "def\nghi\n".into(),
1326 }],
1327 )]
1328 );
1329
1330 // User edits
1331 buffer.update(cx, |buffer, cx| {
1332 buffer.edit(
1333 [
1334 (Point::new(0, 2)..Point::new(0, 2), "X"),
1335 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1336 ],
1337 None,
1338 cx,
1339 )
1340 });
1341 cx.run_until_parked();
1342 assert_eq!(
1343 buffer.read_with(cx, |buffer, _| buffer.text()),
1344 indoc! {"
1345 abXc
1346 deF
1347 GHI
1348 Yjkl
1349 mno"}
1350 );
1351
1352 // User edits should be stored separately from agent's
1353 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1354 assert_eq!(
1355 user_edits.expect("should have some user edits"),
1356 indoc! {"
1357 --- a/dir/file
1358 +++ b/dir/file
1359 @@ -1,5 +1,5 @@
1360 -abc
1361 +abXc
1362 def
1363 ghi
1364 -jkl
1365 +Yjkl
1366 mno
1367 "}
1368 );
1369
1370 action_log.update(cx, |log, cx| {
1371 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1372 });
1373 cx.run_until_parked();
1374 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1375 }
1376
1377 #[gpui::test(iterations = 10)]
1378 async fn test_creating_files(cx: &mut TestAppContext) {
1379 init_test(cx);
1380
1381 let fs = FakeFs::new(cx.executor());
1382 fs.insert_tree(path!("/dir"), json!({})).await;
1383 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1384 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1385 let file_path = project
1386 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1387 .unwrap();
1388
1389 let buffer = project
1390 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1391 .await
1392 .unwrap();
1393 cx.update(|cx| {
1394 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1395 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1396 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1397 });
1398 project
1399 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1400 .await
1401 .unwrap();
1402 cx.run_until_parked();
1403 assert_eq!(
1404 unreviewed_hunks(&action_log, cx),
1405 vec![(
1406 buffer.clone(),
1407 vec![HunkStatus {
1408 range: Point::new(0, 0)..Point::new(0, 5),
1409 diff_status: DiffHunkStatusKind::Added,
1410 old_text: "".into(),
1411 }],
1412 )]
1413 );
1414
1415 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1416 cx.run_until_parked();
1417 assert_eq!(
1418 unreviewed_hunks(&action_log, cx),
1419 vec![(
1420 buffer.clone(),
1421 vec![HunkStatus {
1422 range: Point::new(0, 0)..Point::new(0, 6),
1423 diff_status: DiffHunkStatusKind::Added,
1424 old_text: "".into(),
1425 }],
1426 )]
1427 );
1428
1429 action_log.update(cx, |log, cx| {
1430 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1431 });
1432 cx.run_until_parked();
1433 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1434 }
1435
1436 #[gpui::test(iterations = 10)]
1437 async fn test_overwriting_files(cx: &mut TestAppContext) {
1438 init_test(cx);
1439
1440 let fs = FakeFs::new(cx.executor());
1441 fs.insert_tree(
1442 path!("/dir"),
1443 json!({
1444 "file1": "Lorem ipsum dolor"
1445 }),
1446 )
1447 .await;
1448 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1449 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1450 let file_path = project
1451 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1452 .unwrap();
1453
1454 let buffer = project
1455 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1456 .await
1457 .unwrap();
1458 cx.update(|cx| {
1459 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1460 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1461 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1462 });
1463 project
1464 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1465 .await
1466 .unwrap();
1467 cx.run_until_parked();
1468 assert_eq!(
1469 unreviewed_hunks(&action_log, cx),
1470 vec![(
1471 buffer.clone(),
1472 vec![HunkStatus {
1473 range: Point::new(0, 0)..Point::new(0, 19),
1474 diff_status: DiffHunkStatusKind::Added,
1475 old_text: "".into(),
1476 }],
1477 )]
1478 );
1479
1480 action_log
1481 .update(cx, |log, cx| {
1482 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1483 })
1484 .await
1485 .unwrap();
1486 cx.run_until_parked();
1487 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1488 assert_eq!(
1489 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1490 "Lorem ipsum dolor"
1491 );
1492 }
1493
1494 #[gpui::test(iterations = 10)]
1495 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1496 init_test(cx);
1497
1498 let fs = FakeFs::new(cx.executor());
1499 fs.insert_tree(
1500 path!("/dir"),
1501 json!({
1502 "file1": "Lorem ipsum dolor"
1503 }),
1504 )
1505 .await;
1506 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1507 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1508 let file_path = project
1509 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1510 .unwrap();
1511
1512 let buffer = project
1513 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1514 .await
1515 .unwrap();
1516 cx.update(|cx| {
1517 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1518 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1519 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1520 });
1521 project
1522 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1523 .await
1524 .unwrap();
1525 cx.run_until_parked();
1526 assert_eq!(
1527 unreviewed_hunks(&action_log, cx),
1528 vec![(
1529 buffer.clone(),
1530 vec![HunkStatus {
1531 range: Point::new(0, 0)..Point::new(0, 37),
1532 diff_status: DiffHunkStatusKind::Modified,
1533 old_text: "Lorem ipsum dolor".into(),
1534 }],
1535 )]
1536 );
1537
1538 cx.update(|cx| {
1539 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1540 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1541 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1542 });
1543 project
1544 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1545 .await
1546 .unwrap();
1547 cx.run_until_parked();
1548 assert_eq!(
1549 unreviewed_hunks(&action_log, cx),
1550 vec![(
1551 buffer.clone(),
1552 vec![HunkStatus {
1553 range: Point::new(0, 0)..Point::new(0, 9),
1554 diff_status: DiffHunkStatusKind::Added,
1555 old_text: "".into(),
1556 }],
1557 )]
1558 );
1559
1560 action_log
1561 .update(cx, |log, cx| {
1562 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1563 })
1564 .await
1565 .unwrap();
1566 cx.run_until_parked();
1567 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1568 assert_eq!(
1569 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1570 "Lorem ipsum dolor"
1571 );
1572 }
1573
1574 #[gpui::test(iterations = 10)]
1575 async fn test_deleting_files(cx: &mut TestAppContext) {
1576 init_test(cx);
1577
1578 let fs = FakeFs::new(cx.executor());
1579 fs.insert_tree(
1580 path!("/dir"),
1581 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1582 )
1583 .await;
1584
1585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1586 let file1_path = project
1587 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1588 .unwrap();
1589 let file2_path = project
1590 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1591 .unwrap();
1592
1593 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1594 let buffer1 = project
1595 .update(cx, |project, cx| {
1596 project.open_buffer(file1_path.clone(), cx)
1597 })
1598 .await
1599 .unwrap();
1600 let buffer2 = project
1601 .update(cx, |project, cx| {
1602 project.open_buffer(file2_path.clone(), cx)
1603 })
1604 .await
1605 .unwrap();
1606
1607 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1608 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1609 project
1610 .update(cx, |project, cx| {
1611 project.delete_file(file1_path.clone(), false, cx)
1612 })
1613 .unwrap()
1614 .await
1615 .unwrap();
1616 project
1617 .update(cx, |project, cx| {
1618 project.delete_file(file2_path.clone(), false, cx)
1619 })
1620 .unwrap()
1621 .await
1622 .unwrap();
1623 cx.run_until_parked();
1624 assert_eq!(
1625 unreviewed_hunks(&action_log, cx),
1626 vec![
1627 (
1628 buffer1.clone(),
1629 vec![HunkStatus {
1630 range: Point::new(0, 0)..Point::new(0, 0),
1631 diff_status: DiffHunkStatusKind::Deleted,
1632 old_text: "lorem\n".into(),
1633 }]
1634 ),
1635 (
1636 buffer2.clone(),
1637 vec![HunkStatus {
1638 range: Point::new(0, 0)..Point::new(0, 0),
1639 diff_status: DiffHunkStatusKind::Deleted,
1640 old_text: "ipsum\n".into(),
1641 }],
1642 )
1643 ]
1644 );
1645
1646 // Simulate file1 being recreated externally.
1647 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1648 .await;
1649
1650 // Simulate file2 being recreated by a tool.
1651 let buffer2 = project
1652 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1653 .await
1654 .unwrap();
1655 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1656 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1657 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1658 project
1659 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1660 .await
1661 .unwrap();
1662
1663 cx.run_until_parked();
1664 assert_eq!(
1665 unreviewed_hunks(&action_log, cx),
1666 vec![(
1667 buffer2.clone(),
1668 vec![HunkStatus {
1669 range: Point::new(0, 0)..Point::new(0, 5),
1670 diff_status: DiffHunkStatusKind::Added,
1671 old_text: "".into(),
1672 }],
1673 )]
1674 );
1675
1676 // Simulate file2 being deleted externally.
1677 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1678 .await
1679 .unwrap();
1680 cx.run_until_parked();
1681 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1682 }
1683
1684 #[gpui::test(iterations = 10)]
1685 async fn test_reject_edits(cx: &mut TestAppContext) {
1686 init_test(cx);
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1690 .await;
1691 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1692 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1693 let file_path = project
1694 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1695 .unwrap();
1696 let buffer = project
1697 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1698 .await
1699 .unwrap();
1700
1701 cx.update(|cx| {
1702 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1703 buffer.update(cx, |buffer, cx| {
1704 buffer
1705 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1706 .unwrap()
1707 });
1708 buffer.update(cx, |buffer, cx| {
1709 buffer
1710 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1711 .unwrap()
1712 });
1713 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1714 });
1715 cx.run_until_parked();
1716 assert_eq!(
1717 buffer.read_with(cx, |buffer, _| buffer.text()),
1718 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1719 );
1720 assert_eq!(
1721 unreviewed_hunks(&action_log, cx),
1722 vec![(
1723 buffer.clone(),
1724 vec![
1725 HunkStatus {
1726 range: Point::new(1, 0)..Point::new(3, 0),
1727 diff_status: DiffHunkStatusKind::Modified,
1728 old_text: "def\n".into(),
1729 },
1730 HunkStatus {
1731 range: Point::new(5, 0)..Point::new(5, 3),
1732 diff_status: DiffHunkStatusKind::Modified,
1733 old_text: "mno".into(),
1734 }
1735 ],
1736 )]
1737 );
1738
1739 // If the rejected range doesn't overlap with any hunk, we ignore it.
1740 action_log
1741 .update(cx, |log, cx| {
1742 log.reject_edits_in_ranges(
1743 buffer.clone(),
1744 vec![Point::new(4, 0)..Point::new(4, 0)],
1745 cx,
1746 )
1747 })
1748 .await
1749 .unwrap();
1750 cx.run_until_parked();
1751 assert_eq!(
1752 buffer.read_with(cx, |buffer, _| buffer.text()),
1753 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1754 );
1755 assert_eq!(
1756 unreviewed_hunks(&action_log, cx),
1757 vec![(
1758 buffer.clone(),
1759 vec![
1760 HunkStatus {
1761 range: Point::new(1, 0)..Point::new(3, 0),
1762 diff_status: DiffHunkStatusKind::Modified,
1763 old_text: "def\n".into(),
1764 },
1765 HunkStatus {
1766 range: Point::new(5, 0)..Point::new(5, 3),
1767 diff_status: DiffHunkStatusKind::Modified,
1768 old_text: "mno".into(),
1769 }
1770 ],
1771 )]
1772 );
1773
1774 action_log
1775 .update(cx, |log, cx| {
1776 log.reject_edits_in_ranges(
1777 buffer.clone(),
1778 vec![Point::new(0, 0)..Point::new(1, 0)],
1779 cx,
1780 )
1781 })
1782 .await
1783 .unwrap();
1784 cx.run_until_parked();
1785 assert_eq!(
1786 buffer.read_with(cx, |buffer, _| buffer.text()),
1787 "abc\ndef\nghi\njkl\nmnO"
1788 );
1789 assert_eq!(
1790 unreviewed_hunks(&action_log, cx),
1791 vec![(
1792 buffer.clone(),
1793 vec![HunkStatus {
1794 range: Point::new(4, 0)..Point::new(4, 3),
1795 diff_status: DiffHunkStatusKind::Modified,
1796 old_text: "mno".into(),
1797 }],
1798 )]
1799 );
1800
1801 action_log
1802 .update(cx, |log, cx| {
1803 log.reject_edits_in_ranges(
1804 buffer.clone(),
1805 vec![Point::new(4, 0)..Point::new(4, 0)],
1806 cx,
1807 )
1808 })
1809 .await
1810 .unwrap();
1811 cx.run_until_parked();
1812 assert_eq!(
1813 buffer.read_with(cx, |buffer, _| buffer.text()),
1814 "abc\ndef\nghi\njkl\nmno"
1815 );
1816 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1817 }
1818
1819 #[gpui::test(iterations = 10)]
1820 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1821 init_test(cx);
1822
1823 let fs = FakeFs::new(cx.executor());
1824 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1825 .await;
1826 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1827 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1828 let file_path = project
1829 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1830 .unwrap();
1831 let buffer = project
1832 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1833 .await
1834 .unwrap();
1835
1836 cx.update(|cx| {
1837 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1838 buffer.update(cx, |buffer, cx| {
1839 buffer
1840 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1841 .unwrap()
1842 });
1843 buffer.update(cx, |buffer, cx| {
1844 buffer
1845 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1846 .unwrap()
1847 });
1848 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1849 });
1850 cx.run_until_parked();
1851 assert_eq!(
1852 buffer.read_with(cx, |buffer, _| buffer.text()),
1853 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1854 );
1855 assert_eq!(
1856 unreviewed_hunks(&action_log, cx),
1857 vec![(
1858 buffer.clone(),
1859 vec![
1860 HunkStatus {
1861 range: Point::new(1, 0)..Point::new(3, 0),
1862 diff_status: DiffHunkStatusKind::Modified,
1863 old_text: "def\n".into(),
1864 },
1865 HunkStatus {
1866 range: Point::new(5, 0)..Point::new(5, 3),
1867 diff_status: DiffHunkStatusKind::Modified,
1868 old_text: "mno".into(),
1869 }
1870 ],
1871 )]
1872 );
1873
1874 action_log.update(cx, |log, cx| {
1875 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1876 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1877 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1878 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1879
1880 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1881 .detach();
1882 assert_eq!(
1883 buffer.read_with(cx, |buffer, _| buffer.text()),
1884 "abc\ndef\nghi\njkl\nmno"
1885 );
1886 });
1887 cx.run_until_parked();
1888 assert_eq!(
1889 buffer.read_with(cx, |buffer, _| buffer.text()),
1890 "abc\ndef\nghi\njkl\nmno"
1891 );
1892 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1893 }
1894
1895 #[gpui::test(iterations = 10)]
1896 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1897 init_test(cx);
1898
1899 let fs = FakeFs::new(cx.executor());
1900 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1901 .await;
1902 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1903 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1904 let file_path = project
1905 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1906 .unwrap();
1907 let buffer = project
1908 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1909 .await
1910 .unwrap();
1911
1912 cx.update(|cx| {
1913 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1914 });
1915 project
1916 .update(cx, |project, cx| {
1917 project.delete_file(file_path.clone(), false, cx)
1918 })
1919 .unwrap()
1920 .await
1921 .unwrap();
1922 cx.run_until_parked();
1923 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1924 assert_eq!(
1925 unreviewed_hunks(&action_log, cx),
1926 vec![(
1927 buffer.clone(),
1928 vec![HunkStatus {
1929 range: Point::new(0, 0)..Point::new(0, 0),
1930 diff_status: DiffHunkStatusKind::Deleted,
1931 old_text: "content".into(),
1932 }]
1933 )]
1934 );
1935
1936 action_log
1937 .update(cx, |log, cx| {
1938 log.reject_edits_in_ranges(
1939 buffer.clone(),
1940 vec![Point::new(0, 0)..Point::new(0, 0)],
1941 cx,
1942 )
1943 })
1944 .await
1945 .unwrap();
1946 cx.run_until_parked();
1947 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1948 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1949 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1950 }
1951
1952 #[gpui::test(iterations = 10)]
1953 async fn test_reject_created_file(cx: &mut TestAppContext) {
1954 init_test(cx);
1955
1956 let fs = FakeFs::new(cx.executor());
1957 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1958 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1959 let file_path = project
1960 .read_with(cx, |project, cx| {
1961 project.find_project_path("dir/new_file", cx)
1962 })
1963 .unwrap();
1964 let buffer = project
1965 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1966 .await
1967 .unwrap();
1968 cx.update(|cx| {
1969 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1970 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1971 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1972 });
1973 project
1974 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1975 .await
1976 .unwrap();
1977 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1978 cx.run_until_parked();
1979 assert_eq!(
1980 unreviewed_hunks(&action_log, cx),
1981 vec![(
1982 buffer.clone(),
1983 vec![HunkStatus {
1984 range: Point::new(0, 0)..Point::new(0, 7),
1985 diff_status: DiffHunkStatusKind::Added,
1986 old_text: "".into(),
1987 }],
1988 )]
1989 );
1990
1991 action_log
1992 .update(cx, |log, cx| {
1993 log.reject_edits_in_ranges(
1994 buffer.clone(),
1995 vec![Point::new(0, 0)..Point::new(0, 11)],
1996 cx,
1997 )
1998 })
1999 .await
2000 .unwrap();
2001 cx.run_until_parked();
2002 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2003 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2004 }
2005
2006 #[gpui::test]
2007 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2008 init_test(cx);
2009
2010 let fs = FakeFs::new(cx.executor());
2011 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2012 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2013
2014 let file_path = project
2015 .read_with(cx, |project, cx| {
2016 project.find_project_path("dir/new_file", cx)
2017 })
2018 .unwrap();
2019 let buffer = project
2020 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2021 .await
2022 .unwrap();
2023
2024 // AI creates file with initial content
2025 cx.update(|cx| {
2026 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2027 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2028 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2029 });
2030
2031 project
2032 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2033 .await
2034 .unwrap();
2035
2036 cx.run_until_parked();
2037
2038 // User makes additional edits
2039 cx.update(|cx| {
2040 buffer.update(cx, |buffer, cx| {
2041 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2042 });
2043 });
2044
2045 project
2046 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2047 .await
2048 .unwrap();
2049
2050 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2051
2052 // Reject all
2053 action_log
2054 .update(cx, |log, cx| {
2055 log.reject_edits_in_ranges(
2056 buffer.clone(),
2057 vec![Point::new(0, 0)..Point::new(100, 0)],
2058 cx,
2059 )
2060 })
2061 .await
2062 .unwrap();
2063 cx.run_until_parked();
2064
2065 // File should still contain all the content
2066 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2067
2068 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2069 assert_eq!(content, "ai content\nuser added this line");
2070 }
2071
2072 #[gpui::test]
2073 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2074 init_test(cx);
2075
2076 let fs = FakeFs::new(cx.executor());
2077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2078 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2079
2080 let file_path = project
2081 .read_with(cx, |project, cx| {
2082 project.find_project_path("dir/new_file", cx)
2083 })
2084 .unwrap();
2085 let buffer = project
2086 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2087 .await
2088 .unwrap();
2089
2090 // AI creates file with initial content
2091 cx.update(|cx| {
2092 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2093 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2094 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2095 });
2096 project
2097 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2098 .await
2099 .unwrap();
2100 cx.run_until_parked();
2101 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2102
2103 // User accepts the single hunk
2104 action_log.update(cx, |log, cx| {
2105 log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
2106 });
2107 cx.run_until_parked();
2108 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2109 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2110
2111 // AI modifies the file
2112 cx.update(|cx| {
2113 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2114 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2115 });
2116 project
2117 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2118 .await
2119 .unwrap();
2120 cx.run_until_parked();
2121 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2122
2123 // User rejects the hunk
2124 action_log
2125 .update(cx, |log, cx| {
2126 log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
2127 })
2128 .await
2129 .unwrap();
2130 cx.run_until_parked();
2131 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2132 assert_eq!(
2133 buffer.read_with(cx, |buffer, _| buffer.text()),
2134 "ai content v1"
2135 );
2136 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2137 }
2138
2139 #[gpui::test]
2140 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2141 init_test(cx);
2142
2143 let fs = FakeFs::new(cx.executor());
2144 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2145 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2146
2147 let file_path = project
2148 .read_with(cx, |project, cx| {
2149 project.find_project_path("dir/new_file", cx)
2150 })
2151 .unwrap();
2152 let buffer = project
2153 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2154 .await
2155 .unwrap();
2156
2157 // AI creates file with initial content
2158 cx.update(|cx| {
2159 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2160 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2161 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2162 });
2163 project
2164 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2165 .await
2166 .unwrap();
2167 cx.run_until_parked();
2168
2169 // User clicks "Accept All"
2170 action_log.update(cx, |log, cx| log.keep_all_edits(cx));
2171 cx.run_until_parked();
2172 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2173 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2174
2175 // AI modifies file again
2176 cx.update(|cx| {
2177 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2178 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2179 });
2180 project
2181 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2182 .await
2183 .unwrap();
2184 cx.run_until_parked();
2185 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2186
2187 // User clicks "Reject All"
2188 action_log
2189 .update(cx, |log, cx| log.reject_all_edits(cx))
2190 .await;
2191 cx.run_until_parked();
2192 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2193 assert_eq!(
2194 buffer.read_with(cx, |buffer, _| buffer.text()),
2195 "ai content v1"
2196 );
2197 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2198 }
2199
2200 #[gpui::test(iterations = 100)]
2201 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2202 init_test(cx);
2203
2204 let operations = env::var("OPERATIONS")
2205 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2206 .unwrap_or(20);
2207
2208 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2209 let fs = FakeFs::new(cx.executor());
2210 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2211 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2212 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2213 let file_path = project
2214 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2215 .unwrap();
2216 let buffer = project
2217 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2218 .await
2219 .unwrap();
2220
2221 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2222
2223 for _ in 0..operations {
2224 match rng.random_range(0..100) {
2225 0..25 => {
2226 action_log.update(cx, |log, cx| {
2227 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2228 log::info!("keeping edits in range {:?}", range);
2229 log.keep_edits_in_range(buffer.clone(), range, cx)
2230 });
2231 }
2232 25..50 => {
2233 action_log
2234 .update(cx, |log, cx| {
2235 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2236 log::info!("rejecting edits in range {:?}", range);
2237 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
2238 })
2239 .await
2240 .unwrap();
2241 }
2242 _ => {
2243 let is_agent_edit = rng.random_bool(0.5);
2244 if is_agent_edit {
2245 log::info!("agent edit");
2246 } else {
2247 log::info!("user edit");
2248 }
2249 cx.update(|cx| {
2250 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2251 if is_agent_edit {
2252 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2253 }
2254 });
2255 }
2256 }
2257
2258 if rng.random_bool(0.2) {
2259 quiesce(&action_log, &buffer, cx);
2260 }
2261 }
2262
2263 quiesce(&action_log, &buffer, cx);
2264
2265 fn quiesce(
2266 action_log: &Entity<ActionLog>,
2267 buffer: &Entity<Buffer>,
2268 cx: &mut TestAppContext,
2269 ) {
2270 log::info!("quiescing...");
2271 cx.run_until_parked();
2272 action_log.update(cx, |log, cx| {
2273 let tracked_buffer = log.tracked_buffers.get(buffer).unwrap();
2274 let mut old_text = tracked_buffer.diff_base.clone();
2275 let new_text = buffer.read(cx).as_rope();
2276 for edit in tracked_buffer.unreviewed_edits.edits() {
2277 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2278 let old_end = old_text.point_to_offset(cmp::min(
2279 Point::new(edit.new.start + edit.old_len(), 0),
2280 old_text.max_point(),
2281 ));
2282 old_text.replace(
2283 old_start..old_end,
2284 &new_text.slice_rows(edit.new.clone()).to_string(),
2285 );
2286 }
2287 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2288 })
2289 }
2290 }
2291
2292 #[gpui::test]
2293 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2294 init_test(cx);
2295
2296 let fs = FakeFs::new(cx.background_executor.clone());
2297 fs.insert_tree(
2298 path!("/project"),
2299 json!({
2300 ".git": {},
2301 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2302 }),
2303 )
2304 .await;
2305 fs.set_head_for_repo(
2306 path!("/project/.git").as_ref(),
2307 &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2308 "0000000",
2309 );
2310 cx.run_until_parked();
2311
2312 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2313 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2314
2315 let file_path = project
2316 .read_with(cx, |project, cx| {
2317 project.find_project_path(path!("/project/file.txt"), cx)
2318 })
2319 .unwrap();
2320 let buffer = project
2321 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2322 .await
2323 .unwrap();
2324
2325 cx.update(|cx| {
2326 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2327 buffer.update(cx, |buffer, cx| {
2328 buffer.edit(
2329 [
2330 // Edit at the very start: a -> A
2331 (Point::new(0, 0)..Point::new(0, 1), "A"),
2332 // Deletion in the middle: remove lines d and e
2333 (Point::new(3, 0)..Point::new(5, 0), ""),
2334 // Modification: g -> GGG
2335 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2336 // Addition: insert new line after h
2337 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2338 // Edit the very last character: j -> J
2339 (Point::new(9, 0)..Point::new(9, 1), "J"),
2340 ],
2341 None,
2342 cx,
2343 );
2344 });
2345 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2346 });
2347 cx.run_until_parked();
2348 assert_eq!(
2349 unreviewed_hunks(&action_log, cx),
2350 vec![(
2351 buffer.clone(),
2352 vec![
2353 HunkStatus {
2354 range: Point::new(0, 0)..Point::new(1, 0),
2355 diff_status: DiffHunkStatusKind::Modified,
2356 old_text: "a\n".into()
2357 },
2358 HunkStatus {
2359 range: Point::new(3, 0)..Point::new(3, 0),
2360 diff_status: DiffHunkStatusKind::Deleted,
2361 old_text: "d\ne\n".into()
2362 },
2363 HunkStatus {
2364 range: Point::new(4, 0)..Point::new(5, 0),
2365 diff_status: DiffHunkStatusKind::Modified,
2366 old_text: "g\n".into()
2367 },
2368 HunkStatus {
2369 range: Point::new(6, 0)..Point::new(7, 0),
2370 diff_status: DiffHunkStatusKind::Added,
2371 old_text: "".into()
2372 },
2373 HunkStatus {
2374 range: Point::new(8, 0)..Point::new(8, 1),
2375 diff_status: DiffHunkStatusKind::Modified,
2376 old_text: "j".into()
2377 }
2378 ]
2379 )]
2380 );
2381
2382 // Simulate a git commit that matches some edits but not others:
2383 // - Accepts the first edit (a -> A)
2384 // - Accepts the deletion (remove d and e)
2385 // - Makes a different change to g (g -> G instead of GGG)
2386 // - Ignores the NEW line addition
2387 // - Ignores the last line edit (j stays as j)
2388 fs.set_head_for_repo(
2389 path!("/project/.git").as_ref(),
2390 &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())],
2391 "0000001",
2392 );
2393 cx.run_until_parked();
2394 assert_eq!(
2395 unreviewed_hunks(&action_log, cx),
2396 vec![(
2397 buffer.clone(),
2398 vec![
2399 HunkStatus {
2400 range: Point::new(4, 0)..Point::new(5, 0),
2401 diff_status: DiffHunkStatusKind::Modified,
2402 old_text: "g\n".into()
2403 },
2404 HunkStatus {
2405 range: Point::new(6, 0)..Point::new(7, 0),
2406 diff_status: DiffHunkStatusKind::Added,
2407 old_text: "".into()
2408 },
2409 HunkStatus {
2410 range: Point::new(8, 0)..Point::new(8, 1),
2411 diff_status: DiffHunkStatusKind::Modified,
2412 old_text: "j".into()
2413 }
2414 ]
2415 )]
2416 );
2417
2418 // Make another commit that accepts the NEW line but with different content
2419 fs.set_head_for_repo(
2420 path!("/project/.git").as_ref(),
2421 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())],
2422 "0000002",
2423 );
2424 cx.run_until_parked();
2425 assert_eq!(
2426 unreviewed_hunks(&action_log, cx),
2427 vec![(
2428 buffer,
2429 vec![
2430 HunkStatus {
2431 range: Point::new(6, 0)..Point::new(7, 0),
2432 diff_status: DiffHunkStatusKind::Added,
2433 old_text: "".into()
2434 },
2435 HunkStatus {
2436 range: Point::new(8, 0)..Point::new(8, 1),
2437 diff_status: DiffHunkStatusKind::Modified,
2438 old_text: "j".into()
2439 }
2440 ]
2441 )]
2442 );
2443
2444 // Final commit that accepts all remaining edits
2445 fs.set_head_for_repo(
2446 path!("/project/.git").as_ref(),
2447 &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2448 "0000003",
2449 );
2450 cx.run_until_parked();
2451 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2452 }
2453
2454 #[derive(Debug, Clone, PartialEq, Eq)]
2455 struct HunkStatus {
2456 range: Range<Point>,
2457 diff_status: DiffHunkStatusKind,
2458 old_text: String,
2459 }
2460
2461 fn unreviewed_hunks(
2462 action_log: &Entity<ActionLog>,
2463 cx: &TestAppContext,
2464 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2465 cx.read(|cx| {
2466 action_log
2467 .read(cx)
2468 .changed_buffers(cx)
2469 .into_iter()
2470 .map(|(buffer, diff)| {
2471 let snapshot = buffer.read(cx).snapshot();
2472 (
2473 buffer,
2474 diff.read(cx)
2475 .hunks(&snapshot, cx)
2476 .map(|hunk| HunkStatus {
2477 diff_status: hunk.status().kind,
2478 range: hunk.range,
2479 old_text: diff
2480 .read(cx)
2481 .base_text()
2482 .text_for_range(hunk.diff_base_byte_range)
2483 .collect(),
2484 })
2485 .collect(),
2486 )
2487 })
2488 .collect()
2489 })
2490 }
2491
2492 #[gpui::test]
2493 async fn test_format_patch(cx: &mut TestAppContext) {
2494 init_test(cx);
2495
2496 let fs = FakeFs::new(cx.executor());
2497 fs.insert_tree(
2498 path!("/dir"),
2499 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2500 )
2501 .await;
2502 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2503 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2504
2505 let file_path = project
2506 .read_with(cx, |project, cx| {
2507 project.find_project_path("dir/test.txt", cx)
2508 })
2509 .unwrap();
2510 let buffer = project
2511 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2512 .await
2513 .unwrap();
2514
2515 cx.update(|cx| {
2516 // Track the buffer and mark it as read first
2517 action_log.update(cx, |log, cx| {
2518 log.buffer_read(buffer.clone(), cx);
2519 });
2520
2521 // Make some edits to create a patch
2522 buffer.update(cx, |buffer, cx| {
2523 buffer
2524 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2525 .unwrap(); // Replace "line2" with "CHANGED"
2526 });
2527 });
2528
2529 cx.run_until_parked();
2530
2531 // Get the patch
2532 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2533
2534 // Verify the patch format contains expected unified diff elements
2535 assert_eq!(
2536 patch.unwrap(),
2537 indoc! {"
2538 --- a/dir/test.txt
2539 +++ b/dir/test.txt
2540 @@ -1,3 +1,3 @@
2541 line 1
2542 -line 2
2543 +CHANGED
2544 line 3
2545 "}
2546 );
2547 }
2548}