1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use collections::BTreeMap;
4use futures::{FutureExt, StreamExt, channel::mpsc};
5use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
6use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
7use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
8use std::{cmp, ops::Range, sync::Arc};
9use text::{Edit, Patch, Rope};
10use util::RangeExt;
11
12/// Tracks actions performed by tools in a thread
13pub struct ActionLog {
14 /// Buffers that we want to notify the model about when they change.
15 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
16 /// Has the model edited a file since it last checked diagnostics?
17 edited_since_project_diagnostics_check: bool,
18 /// The project this action log is associated with
19 project: Entity<Project>,
20}
21
22impl ActionLog {
23 /// Creates a new, empty action log associated with the given project.
24 pub fn new(project: Entity<Project>) -> Self {
25 Self {
26 tracked_buffers: BTreeMap::default(),
27 edited_since_project_diagnostics_check: false,
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 /// Notifies a diagnostics check
37 pub fn checked_project_diagnostics(&mut self) {
38 self.edited_since_project_diagnostics_check = false;
39 }
40
41 /// Returns true if any files have been edited since the last project diagnostics check
42 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
43 self.edited_since_project_diagnostics_check
44 }
45
46 fn track_buffer_internal(
47 &mut self,
48 buffer: Entity<Buffer>,
49 is_created: bool,
50 cx: &mut Context<Self>,
51 ) -> &mut TrackedBuffer {
52 let status = if is_created {
53 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
54 match tracked.status {
55 TrackedBufferStatus::Created {
56 existing_file_content,
57 } => TrackedBufferStatus::Created {
58 existing_file_content,
59 },
60 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(tracked.diff_base),
63 }
64 }
65 }
66 } else if buffer
67 .read(cx)
68 .file()
69 .map_or(false, |file| file.disk_state().exists())
70 {
71 TrackedBufferStatus::Created {
72 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
73 }
74 } else {
75 TrackedBufferStatus::Created {
76 existing_file_content: None,
77 }
78 }
79 } else {
80 TrackedBufferStatus::Modified
81 };
82
83 let tracked_buffer = self
84 .tracked_buffers
85 .entry(buffer.clone())
86 .or_insert_with(|| {
87 let open_lsp_handle = self.project.update(cx, |project, cx| {
88 project.register_buffer_with_language_servers(&buffer, cx)
89 });
90
91 let text_snapshot = buffer.read(cx).text_snapshot();
92 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
93 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
94 let diff_base;
95 let unreviewed_edits;
96 if is_created {
97 diff_base = Rope::default();
98 unreviewed_edits = Patch::new(vec![Edit {
99 old: 0..1,
100 new: 0..text_snapshot.max_point().row + 1,
101 }])
102 } else {
103 diff_base = buffer.read(cx).as_rope().clone();
104 unreviewed_edits = Patch::default();
105 }
106 TrackedBuffer {
107 buffer: buffer.clone(),
108 diff_base,
109 unreviewed_edits: unreviewed_edits,
110 snapshot: text_snapshot.clone(),
111 status,
112 version: buffer.read(cx).version(),
113 diff,
114 diff_update: diff_update_tx,
115 _open_lsp_handle: open_lsp_handle,
116 _maintain_diff: cx.spawn({
117 let buffer = buffer.clone();
118 async move |this, cx| {
119 Self::maintain_diff(this, buffer, diff_update_rx, cx)
120 .await
121 .ok();
122 }
123 }),
124 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
125 }
126 });
127 tracked_buffer.version = buffer.read(cx).version();
128 tracked_buffer
129 }
130
131 fn handle_buffer_event(
132 &mut self,
133 buffer: Entity<Buffer>,
134 event: &BufferEvent,
135 cx: &mut Context<Self>,
136 ) {
137 match event {
138 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
139 BufferEvent::FileHandleChanged => {
140 self.handle_buffer_file_changed(buffer, cx);
141 }
142 _ => {}
143 };
144 }
145
146 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
147 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
148 return;
149 };
150 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
151 }
152
153 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
154 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
155 return;
156 };
157
158 match tracked_buffer.status {
159 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
160 if buffer
161 .read(cx)
162 .file()
163 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
164 {
165 // If the buffer had been edited by a tool, but it got
166 // deleted externally, we want to stop tracking it.
167 self.tracked_buffers.remove(&buffer);
168 }
169 cx.notify();
170 }
171 TrackedBufferStatus::Deleted => {
172 if buffer
173 .read(cx)
174 .file()
175 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
176 {
177 // If the buffer had been deleted by a tool, but it got
178 // resurrected externally, we want to clear the edits we
179 // were tracking and reset the buffer's state.
180 self.tracked_buffers.remove(&buffer);
181 self.track_buffer_internal(buffer, false, cx);
182 }
183 cx.notify();
184 }
185 }
186 }
187
188 async fn maintain_diff(
189 this: WeakEntity<Self>,
190 buffer: Entity<Buffer>,
191 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
192 cx: &mut AsyncApp,
193 ) -> Result<()> {
194 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
195 let git_diff = this
196 .update(cx, |this, cx| {
197 this.project.update(cx, |project, cx| {
198 project.open_uncommitted_diff(buffer.clone(), cx)
199 })
200 })?
201 .await
202 .ok();
203 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
204 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
205 })?;
206
207 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
208 let _repo_subscription =
209 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
210 cx.update(|cx| {
211 let mut old_head = buffer_repo.read(cx).head_commit.clone();
212 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
213 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
214 let new_head = buffer_repo.read(cx).head_commit.clone();
215 if new_head != old_head {
216 old_head = new_head;
217 git_diff_updates_tx.send(()).ok();
218 }
219 }
220 _ => {}
221 }))
222 })?
223 } else {
224 None
225 };
226
227 loop {
228 futures::select_biased! {
229 buffer_update = buffer_updates.next() => {
230 if let Some((author, buffer_snapshot)) = buffer_update {
231 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
232 } else {
233 break;
234 }
235 }
236 _ = git_diff_updates_rx.changed().fuse() => {
237 if let Some(git_diff) = git_diff.as_ref() {
238 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
239 }
240 }
241 }
242 }
243
244 Ok(())
245 }
246
247 async fn track_edits(
248 this: &WeakEntity<ActionLog>,
249 buffer: &Entity<Buffer>,
250 author: ChangeAuthor,
251 buffer_snapshot: text::BufferSnapshot,
252 cx: &mut AsyncApp,
253 ) -> Result<()> {
254 let rebase = this.read_with(cx, |this, cx| {
255 let tracked_buffer = this
256 .tracked_buffers
257 .get(buffer)
258 .context("buffer not tracked")?;
259
260 let rebase = cx.background_spawn({
261 let mut base_text = tracked_buffer.diff_base.clone();
262 let old_snapshot = tracked_buffer.snapshot.clone();
263 let new_snapshot = buffer_snapshot.clone();
264 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
265 async move {
266 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
267 if let ChangeAuthor::User = author {
268 apply_non_conflicting_edits(
269 &unreviewed_edits,
270 edits,
271 &mut base_text,
272 new_snapshot.as_rope(),
273 );
274 }
275 (Arc::new(base_text.to_string()), base_text)
276 }
277 });
278
279 anyhow::Ok(rebase)
280 })??;
281 let (new_base_text, new_diff_base) = rebase.await;
282 Self::update_diff(
283 this,
284 buffer,
285 buffer_snapshot,
286 new_base_text,
287 new_diff_base,
288 cx,
289 )
290 .await
291 }
292
293 async fn keep_committed_edits(
294 this: &WeakEntity<ActionLog>,
295 buffer: &Entity<Buffer>,
296 git_diff: &Entity<BufferDiff>,
297 cx: &mut AsyncApp,
298 ) -> Result<()> {
299 let buffer_snapshot = this.read_with(cx, |this, _cx| {
300 let tracked_buffer = this
301 .tracked_buffers
302 .get(buffer)
303 .context("buffer not tracked")?;
304 anyhow::Ok(tracked_buffer.snapshot.clone())
305 })??;
306 let (new_base_text, new_diff_base) = this
307 .read_with(cx, |this, cx| {
308 let tracked_buffer = this
309 .tracked_buffers
310 .get(buffer)
311 .context("buffer not tracked")?;
312 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
313 let agent_diff_base = tracked_buffer.diff_base.clone();
314 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
315 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
316 anyhow::Ok(cx.background_spawn(async move {
317 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
318 let committed_edits = language::line_diff(
319 &agent_diff_base.to_string(),
320 &git_diff_base.to_string(),
321 )
322 .into_iter()
323 .map(|(old, new)| Edit { old, new });
324
325 let mut new_agent_diff_base = agent_diff_base.clone();
326 let mut row_delta = 0i32;
327 for committed in committed_edits {
328 while let Some(unreviewed) = old_unreviewed_edits.peek() {
329 // If the committed edit matches the unreviewed
330 // edit, assume the user wants to keep it.
331 if committed.old == unreviewed.old {
332 let unreviewed_new =
333 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
334 let committed_new =
335 git_diff_base.slice_rows(committed.new.clone()).to_string();
336 if unreviewed_new == committed_new {
337 let old_byte_start =
338 new_agent_diff_base.point_to_offset(Point::new(
339 (unreviewed.old.start as i32 + row_delta) as u32,
340 0,
341 ));
342 let old_byte_end =
343 new_agent_diff_base.point_to_offset(cmp::min(
344 Point::new(
345 (unreviewed.old.end as i32 + row_delta) as u32,
346 0,
347 ),
348 new_agent_diff_base.max_point(),
349 ));
350 new_agent_diff_base
351 .replace(old_byte_start..old_byte_end, &unreviewed_new);
352 row_delta +=
353 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
354 }
355 } else if unreviewed.old.start >= committed.old.end {
356 break;
357 }
358
359 old_unreviewed_edits.next().unwrap();
360 }
361 }
362
363 (
364 Arc::new(new_agent_diff_base.to_string()),
365 new_agent_diff_base,
366 )
367 }))
368 })??
369 .await;
370
371 Self::update_diff(
372 this,
373 buffer,
374 buffer_snapshot,
375 new_base_text,
376 new_diff_base,
377 cx,
378 )
379 .await
380 }
381
382 async fn update_diff(
383 this: &WeakEntity<ActionLog>,
384 buffer: &Entity<Buffer>,
385 buffer_snapshot: text::BufferSnapshot,
386 new_base_text: Arc<String>,
387 new_diff_base: Rope,
388 cx: &mut AsyncApp,
389 ) -> Result<()> {
390 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
391 let tracked_buffer = this
392 .tracked_buffers
393 .get(buffer)
394 .context("buffer not tracked")?;
395 anyhow::Ok((
396 tracked_buffer.diff.clone(),
397 buffer.read(cx).language().cloned(),
398 buffer.read(cx).language_registry().clone(),
399 ))
400 })??;
401 let diff_snapshot = BufferDiff::update_diff(
402 diff.clone(),
403 buffer_snapshot.clone(),
404 Some(new_base_text),
405 true,
406 false,
407 language,
408 language_registry,
409 cx,
410 )
411 .await;
412 let mut unreviewed_edits = Patch::default();
413 if let Ok(diff_snapshot) = diff_snapshot {
414 unreviewed_edits = cx
415 .background_spawn({
416 let diff_snapshot = diff_snapshot.clone();
417 let buffer_snapshot = buffer_snapshot.clone();
418 let new_diff_base = new_diff_base.clone();
419 async move {
420 let mut unreviewed_edits = Patch::default();
421 for hunk in diff_snapshot
422 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
423 {
424 let old_range = new_diff_base
425 .offset_to_point(hunk.diff_base_byte_range.start)
426 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
427 let new_range = hunk.range.start..hunk.range.end;
428 unreviewed_edits.push(point_to_row_edit(
429 Edit {
430 old: old_range,
431 new: new_range,
432 },
433 &new_diff_base,
434 &buffer_snapshot.as_rope(),
435 ));
436 }
437 unreviewed_edits
438 }
439 })
440 .await;
441
442 diff.update(cx, |diff, cx| {
443 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
444 })?;
445 }
446 this.update(cx, |this, cx| {
447 let tracked_buffer = this
448 .tracked_buffers
449 .get_mut(buffer)
450 .context("buffer not tracked")?;
451 tracked_buffer.diff_base = new_diff_base;
452 tracked_buffer.snapshot = buffer_snapshot;
453 tracked_buffer.unreviewed_edits = unreviewed_edits;
454 cx.notify();
455 anyhow::Ok(())
456 })?
457 }
458
459 /// Track a buffer as read by agent, so we can notify the model about user edits.
460 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
461 self.track_buffer_internal(buffer, false, cx);
462 }
463
464 /// Mark a buffer as created by agent, so we can refresh it in the context
465 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
466 self.edited_since_project_diagnostics_check = true;
467 self.track_buffer_internal(buffer.clone(), true, cx);
468 }
469
470 /// Mark a buffer as edited by agent, so we can refresh it in the context
471 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
472 self.edited_since_project_diagnostics_check = true;
473
474 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
475 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
476 tracked_buffer.status = TrackedBufferStatus::Modified;
477 }
478 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
479 }
480
481 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
482 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
483 match tracked_buffer.status {
484 TrackedBufferStatus::Created { .. } => {
485 self.tracked_buffers.remove(&buffer);
486 cx.notify();
487 }
488 TrackedBufferStatus::Modified => {
489 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
490 tracked_buffer.status = TrackedBufferStatus::Deleted;
491 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
492 }
493 TrackedBufferStatus::Deleted => {}
494 }
495 cx.notify();
496 }
497
498 pub fn keep_buffer_edits(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
499 self.keep_edits_in_range(buffer, Anchor::MIN..Anchor::MAX, cx);
500 }
501
502 pub fn keep_edits_in_range(
503 &mut self,
504 buffer: Entity<Buffer>,
505 buffer_range: Range<impl language::ToPoint>,
506 cx: &mut Context<Self>,
507 ) {
508 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
509 return;
510 };
511
512 match tracked_buffer.status {
513 TrackedBufferStatus::Deleted => {
514 self.tracked_buffers.remove(&buffer);
515 cx.notify();
516 }
517 _ => {
518 let buffer = buffer.read(cx);
519 let buffer_range =
520 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
521 let mut delta = 0i32;
522
523 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
524 edit.old.start = (edit.old.start as i32 + delta) as u32;
525 edit.old.end = (edit.old.end as i32 + delta) as u32;
526
527 if buffer_range.end.row < edit.new.start
528 || buffer_range.start.row > edit.new.end
529 {
530 true
531 } else {
532 let old_range = tracked_buffer
533 .diff_base
534 .point_to_offset(Point::new(edit.old.start, 0))
535 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
536 Point::new(edit.old.end, 0),
537 tracked_buffer.diff_base.max_point(),
538 ));
539 let new_range = tracked_buffer
540 .snapshot
541 .point_to_offset(Point::new(edit.new.start, 0))
542 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
543 Point::new(edit.new.end, 0),
544 tracked_buffer.snapshot.max_point(),
545 ));
546 tracked_buffer.diff_base.replace(
547 old_range,
548 &tracked_buffer
549 .snapshot
550 .text_for_range(new_range)
551 .collect::<String>(),
552 );
553 delta += edit.new_len() as i32 - edit.old_len() as i32;
554 false
555 }
556 });
557 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
558 }
559 }
560 }
561
562 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) {
563 let changed_buffers = self.changed_buffers(cx);
564 for (buffer, _) in changed_buffers {
565 self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx)
566 .detach();
567 }
568 }
569
570 pub fn reject_buffer_edits(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
571 self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx)
572 .detach()
573 }
574
575 pub fn reject_edits_in_ranges(
576 &mut self,
577 buffer: Entity<Buffer>,
578 buffer_ranges: Vec<Range<impl language::ToPoint>>,
579 cx: &mut Context<Self>,
580 ) -> Task<Result<()>> {
581 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
582 return Task::ready(Ok(()));
583 };
584
585 match &tracked_buffer.status {
586 TrackedBufferStatus::Created {
587 existing_file_content,
588 } => {
589 let task = if let Some(existing_file_content) = existing_file_content {
590 buffer.update(cx, |buffer, cx| {
591 buffer.start_transaction();
592 buffer.set_text("", cx);
593 for chunk in existing_file_content.chunks() {
594 buffer.append(chunk, cx);
595 }
596 buffer.end_transaction(cx);
597 });
598 self.project
599 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
600 } else {
601 // For a file created by AI with no pre-existing content,
602 // only delete the file if we're certain it contains only AI content
603 // with no edits from the user.
604
605 let initial_version = tracked_buffer.version.clone();
606 let current_version = buffer.read(cx).version();
607
608 let current_content = buffer.read(cx).text();
609 let tracked_content = tracked_buffer.snapshot.text();
610
611 let is_ai_only_content =
612 initial_version == current_version && current_content == tracked_content;
613
614 if is_ai_only_content {
615 buffer
616 .read(cx)
617 .entry_id(cx)
618 .and_then(|entry_id| {
619 self.project.update(cx, |project, cx| {
620 project.delete_entry(entry_id, false, cx)
621 })
622 })
623 .unwrap_or(Task::ready(Ok(())))
624 } else {
625 // Not sure how to disentangle edits made by the user
626 // from edits made by the AI at this point.
627 // For now, preserve both to avoid data loss.
628 //
629 // TODO: Better solution (disable "Reject" after user makes some
630 // edit or find a way to differentiate between AI and user edits)
631 Task::ready(Ok(()))
632 }
633 };
634
635 self.tracked_buffers.remove(&buffer);
636 cx.notify();
637 task
638 }
639 TrackedBufferStatus::Deleted => {
640 buffer.update(cx, |buffer, cx| {
641 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
642 });
643 let save = self
644 .project
645 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
646
647 // Clear all tracked edits for this buffer and start over as if we just read it.
648 self.tracked_buffers.remove(&buffer);
649 self.buffer_read(buffer.clone(), cx);
650 cx.notify();
651 save
652 }
653 TrackedBufferStatus::Modified => {
654 buffer.update(cx, |buffer, cx| {
655 let mut buffer_row_ranges = buffer_ranges
656 .into_iter()
657 .map(|range| {
658 range.start.to_point(buffer).row..range.end.to_point(buffer).row
659 })
660 .peekable();
661
662 let mut edits_to_revert = Vec::new();
663 for edit in tracked_buffer.unreviewed_edits.edits() {
664 let new_range = tracked_buffer
665 .snapshot
666 .anchor_before(Point::new(edit.new.start, 0))
667 ..tracked_buffer.snapshot.anchor_after(cmp::min(
668 Point::new(edit.new.end, 0),
669 tracked_buffer.snapshot.max_point(),
670 ));
671 let new_row_range = new_range.start.to_point(buffer).row
672 ..new_range.end.to_point(buffer).row;
673
674 let mut revert = false;
675 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
676 if buffer_row_range.end < new_row_range.start {
677 buffer_row_ranges.next();
678 } else if buffer_row_range.start > new_row_range.end {
679 break;
680 } else {
681 revert = true;
682 break;
683 }
684 }
685
686 if revert {
687 let old_range = tracked_buffer
688 .diff_base
689 .point_to_offset(Point::new(edit.old.start, 0))
690 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
691 Point::new(edit.old.end, 0),
692 tracked_buffer.diff_base.max_point(),
693 ));
694 let old_text = tracked_buffer
695 .diff_base
696 .chunks_in_range(old_range)
697 .collect::<String>();
698 edits_to_revert.push((new_range, old_text));
699 }
700 }
701
702 buffer.edit(edits_to_revert, None, cx);
703 });
704 self.project
705 .update(cx, |project, cx| project.save_buffer(buffer, cx))
706 }
707 }
708 }
709
710 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
711 self.tracked_buffers
712 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
713 TrackedBufferStatus::Deleted => false,
714 _ => {
715 tracked_buffer.unreviewed_edits.clear();
716 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
717 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
718 true
719 }
720 });
721 cx.notify();
722 }
723
724 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
725 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
726 self.tracked_buffers
727 .iter()
728 .filter(|(_, tracked)| tracked.has_edits(cx))
729 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
730 .collect()
731 }
732
733 /// Iterate over buffers changed since last read or edited by the model
734 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
735 self.tracked_buffers
736 .iter()
737 .filter(|(buffer, tracked)| {
738 let buffer = buffer.read(cx);
739
740 tracked.version != buffer.version
741 && buffer
742 .file()
743 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
744 })
745 .map(|(buffer, _)| buffer)
746 }
747}
748
749fn apply_non_conflicting_edits(
750 patch: &Patch<u32>,
751 edits: Vec<Edit<u32>>,
752 old_text: &mut Rope,
753 new_text: &Rope,
754) {
755 let mut old_edits = patch.edits().iter().cloned().peekable();
756 let mut new_edits = edits.into_iter().peekable();
757 let mut applied_delta = 0i32;
758 let mut rebased_delta = 0i32;
759
760 while let Some(mut new_edit) = new_edits.next() {
761 let mut conflict = false;
762
763 // Push all the old edits that are before this new edit or that intersect with it.
764 while let Some(old_edit) = old_edits.peek() {
765 if new_edit.old.end < old_edit.new.start
766 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
767 {
768 break;
769 } else if new_edit.old.start > old_edit.new.end
770 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
771 {
772 let old_edit = old_edits.next().unwrap();
773 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
774 } else {
775 conflict = true;
776 if new_edits
777 .peek()
778 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
779 {
780 new_edit = new_edits.next().unwrap();
781 } else {
782 let old_edit = old_edits.next().unwrap();
783 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
784 }
785 }
786 }
787
788 if !conflict {
789 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
790 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
791 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
792 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
793 ..old_text.point_to_offset(cmp::min(
794 Point::new(new_edit.old.end, 0),
795 old_text.max_point(),
796 ));
797 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
798 ..new_text.point_to_offset(cmp::min(
799 Point::new(new_edit.new.end, 0),
800 new_text.max_point(),
801 ));
802
803 old_text.replace(
804 old_bytes,
805 &new_text.chunks_in_range(new_bytes).collect::<String>(),
806 );
807 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
808 }
809 }
810}
811
812fn diff_snapshots(
813 old_snapshot: &text::BufferSnapshot,
814 new_snapshot: &text::BufferSnapshot,
815) -> Vec<Edit<u32>> {
816 let mut edits = new_snapshot
817 .edits_since::<Point>(&old_snapshot.version)
818 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
819 .peekable();
820 let mut row_edits = Vec::new();
821 while let Some(mut edit) = edits.next() {
822 while let Some(next_edit) = edits.peek() {
823 if edit.old.end >= next_edit.old.start {
824 edit.old.end = next_edit.old.end;
825 edit.new.end = next_edit.new.end;
826 edits.next();
827 } else {
828 break;
829 }
830 }
831 row_edits.push(edit);
832 }
833 row_edits
834}
835
836fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
837 if edit.old.start.column == old_text.line_len(edit.old.start.row)
838 && new_text
839 .chars_at(new_text.point_to_offset(edit.new.start))
840 .next()
841 == Some('\n')
842 && edit.old.start != old_text.max_point()
843 {
844 Edit {
845 old: edit.old.start.row + 1..edit.old.end.row + 1,
846 new: edit.new.start.row + 1..edit.new.end.row + 1,
847 }
848 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
849 Edit {
850 old: edit.old.start.row..edit.old.end.row,
851 new: edit.new.start.row..edit.new.end.row,
852 }
853 } else {
854 Edit {
855 old: edit.old.start.row..edit.old.end.row + 1,
856 new: edit.new.start.row..edit.new.end.row + 1,
857 }
858 }
859}
860
861#[derive(Copy, Clone, Debug)]
862enum ChangeAuthor {
863 User,
864 Agent,
865}
866
867enum TrackedBufferStatus {
868 Created { existing_file_content: Option<Rope> },
869 Modified,
870 Deleted,
871}
872
873struct TrackedBuffer {
874 buffer: Entity<Buffer>,
875 diff_base: Rope,
876 unreviewed_edits: Patch<u32>,
877 status: TrackedBufferStatus,
878 version: clock::Global,
879 diff: Entity<BufferDiff>,
880 snapshot: text::BufferSnapshot,
881 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
882 _open_lsp_handle: OpenLspBufferHandle,
883 _maintain_diff: Task<()>,
884 _subscription: Subscription,
885}
886
887impl TrackedBuffer {
888 fn has_edits(&self, cx: &App) -> bool {
889 self.diff
890 .read(cx)
891 .hunks(&self.buffer.read(cx), cx)
892 .next()
893 .is_some()
894 }
895
896 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
897 self.diff_update
898 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
899 .ok();
900 }
901}
902
903pub struct ChangedBuffer {
904 pub diff: Entity<BufferDiff>,
905}
906
907#[cfg(test)]
908mod tests {
909 use super::*;
910 use buffer_diff::DiffHunkStatusKind;
911 use gpui::TestAppContext;
912 use language::Point;
913 use project::{FakeFs, Fs, Project, RemoveOptions};
914 use rand::prelude::*;
915 use serde_json::json;
916 use settings::SettingsStore;
917 use std::env;
918 use util::{RandomCharIter, path};
919
920 #[ctor::ctor]
921 fn init_logger() {
922 zlog::init_test();
923 }
924
925 fn init_test(cx: &mut TestAppContext) {
926 cx.update(|cx| {
927 let settings_store = SettingsStore::test(cx);
928 cx.set_global(settings_store);
929 language::init(cx);
930 Project::init_settings(cx);
931 });
932 }
933
934 #[gpui::test(iterations = 10)]
935 async fn test_keep_edits(cx: &mut TestAppContext) {
936 init_test(cx);
937
938 let fs = FakeFs::new(cx.executor());
939 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
940 .await;
941 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
942 let action_log = cx.new(|_| ActionLog::new(project.clone()));
943 let file_path = project
944 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
945 .unwrap();
946 let buffer = project
947 .update(cx, |project, cx| project.open_buffer(file_path, cx))
948 .await
949 .unwrap();
950
951 cx.update(|cx| {
952 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
953 buffer.update(cx, |buffer, cx| {
954 buffer
955 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
956 .unwrap()
957 });
958 buffer.update(cx, |buffer, cx| {
959 buffer
960 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
961 .unwrap()
962 });
963 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
964 });
965 cx.run_until_parked();
966 assert_eq!(
967 buffer.read_with(cx, |buffer, _| buffer.text()),
968 "abc\ndEf\nghi\njkl\nmnO"
969 );
970 assert_eq!(
971 unreviewed_hunks(&action_log, cx),
972 vec![(
973 buffer.clone(),
974 vec![
975 HunkStatus {
976 range: Point::new(1, 0)..Point::new(2, 0),
977 diff_status: DiffHunkStatusKind::Modified,
978 old_text: "def\n".into(),
979 },
980 HunkStatus {
981 range: Point::new(4, 0)..Point::new(4, 3),
982 diff_status: DiffHunkStatusKind::Modified,
983 old_text: "mno".into(),
984 }
985 ],
986 )]
987 );
988
989 action_log.update(cx, |log, cx| {
990 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
991 });
992 cx.run_until_parked();
993 assert_eq!(
994 unreviewed_hunks(&action_log, cx),
995 vec![(
996 buffer.clone(),
997 vec![HunkStatus {
998 range: Point::new(1, 0)..Point::new(2, 0),
999 diff_status: DiffHunkStatusKind::Modified,
1000 old_text: "def\n".into(),
1001 }],
1002 )]
1003 );
1004
1005 action_log.update(cx, |log, cx| {
1006 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1007 });
1008 cx.run_until_parked();
1009 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1010 }
1011
1012 #[gpui::test(iterations = 10)]
1013 async fn test_deletions(cx: &mut TestAppContext) {
1014 init_test(cx);
1015
1016 let fs = FakeFs::new(cx.executor());
1017 fs.insert_tree(
1018 path!("/dir"),
1019 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1020 )
1021 .await;
1022 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1023 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1024 let file_path = project
1025 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1026 .unwrap();
1027 let buffer = project
1028 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1029 .await
1030 .unwrap();
1031
1032 cx.update(|cx| {
1033 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1034 buffer.update(cx, |buffer, cx| {
1035 buffer
1036 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1037 .unwrap();
1038 buffer.finalize_last_transaction();
1039 });
1040 buffer.update(cx, |buffer, cx| {
1041 buffer
1042 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1043 .unwrap();
1044 buffer.finalize_last_transaction();
1045 });
1046 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1047 });
1048 cx.run_until_parked();
1049 assert_eq!(
1050 buffer.read_with(cx, |buffer, _| buffer.text()),
1051 "abc\nghi\njkl\npqr"
1052 );
1053 assert_eq!(
1054 unreviewed_hunks(&action_log, cx),
1055 vec![(
1056 buffer.clone(),
1057 vec![
1058 HunkStatus {
1059 range: Point::new(1, 0)..Point::new(1, 0),
1060 diff_status: DiffHunkStatusKind::Deleted,
1061 old_text: "def\n".into(),
1062 },
1063 HunkStatus {
1064 range: Point::new(3, 0)..Point::new(3, 0),
1065 diff_status: DiffHunkStatusKind::Deleted,
1066 old_text: "mno\n".into(),
1067 }
1068 ],
1069 )]
1070 );
1071
1072 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1073 cx.run_until_parked();
1074 assert_eq!(
1075 buffer.read_with(cx, |buffer, _| buffer.text()),
1076 "abc\nghi\njkl\nmno\npqr"
1077 );
1078 assert_eq!(
1079 unreviewed_hunks(&action_log, cx),
1080 vec![(
1081 buffer.clone(),
1082 vec![HunkStatus {
1083 range: Point::new(1, 0)..Point::new(1, 0),
1084 diff_status: DiffHunkStatusKind::Deleted,
1085 old_text: "def\n".into(),
1086 }],
1087 )]
1088 );
1089
1090 action_log.update(cx, |log, cx| {
1091 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1092 });
1093 cx.run_until_parked();
1094 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1095 }
1096
1097 #[gpui::test(iterations = 10)]
1098 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1099 init_test(cx);
1100
1101 let fs = FakeFs::new(cx.executor());
1102 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1103 .await;
1104 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1105 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1106 let file_path = project
1107 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1108 .unwrap();
1109 let buffer = project
1110 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1111 .await
1112 .unwrap();
1113
1114 cx.update(|cx| {
1115 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1116 buffer.update(cx, |buffer, cx| {
1117 buffer
1118 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1119 .unwrap()
1120 });
1121 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1122 });
1123 cx.run_until_parked();
1124 assert_eq!(
1125 buffer.read_with(cx, |buffer, _| buffer.text()),
1126 "abc\ndeF\nGHI\njkl\nmno"
1127 );
1128 assert_eq!(
1129 unreviewed_hunks(&action_log, cx),
1130 vec![(
1131 buffer.clone(),
1132 vec![HunkStatus {
1133 range: Point::new(1, 0)..Point::new(3, 0),
1134 diff_status: DiffHunkStatusKind::Modified,
1135 old_text: "def\nghi\n".into(),
1136 }],
1137 )]
1138 );
1139
1140 buffer.update(cx, |buffer, cx| {
1141 buffer.edit(
1142 [
1143 (Point::new(0, 2)..Point::new(0, 2), "X"),
1144 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1145 ],
1146 None,
1147 cx,
1148 )
1149 });
1150 cx.run_until_parked();
1151 assert_eq!(
1152 buffer.read_with(cx, |buffer, _| buffer.text()),
1153 "abXc\ndeF\nGHI\nYjkl\nmno"
1154 );
1155 assert_eq!(
1156 unreviewed_hunks(&action_log, cx),
1157 vec![(
1158 buffer.clone(),
1159 vec![HunkStatus {
1160 range: Point::new(1, 0)..Point::new(3, 0),
1161 diff_status: DiffHunkStatusKind::Modified,
1162 old_text: "def\nghi\n".into(),
1163 }],
1164 )]
1165 );
1166
1167 buffer.update(cx, |buffer, cx| {
1168 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1169 });
1170 cx.run_until_parked();
1171 assert_eq!(
1172 buffer.read_with(cx, |buffer, _| buffer.text()),
1173 "abXc\ndZeF\nGHI\nYjkl\nmno"
1174 );
1175 assert_eq!(
1176 unreviewed_hunks(&action_log, cx),
1177 vec![(
1178 buffer.clone(),
1179 vec![HunkStatus {
1180 range: Point::new(1, 0)..Point::new(3, 0),
1181 diff_status: DiffHunkStatusKind::Modified,
1182 old_text: "def\nghi\n".into(),
1183 }],
1184 )]
1185 );
1186
1187 action_log.update(cx, |log, cx| {
1188 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1189 });
1190 cx.run_until_parked();
1191 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1192 }
1193
1194 #[gpui::test(iterations = 10)]
1195 async fn test_creating_files(cx: &mut TestAppContext) {
1196 init_test(cx);
1197
1198 let fs = FakeFs::new(cx.executor());
1199 fs.insert_tree(path!("/dir"), json!({})).await;
1200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1201 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1202 let file_path = project
1203 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1204 .unwrap();
1205
1206 let buffer = project
1207 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1208 .await
1209 .unwrap();
1210 cx.update(|cx| {
1211 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1212 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1213 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1214 });
1215 project
1216 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1217 .await
1218 .unwrap();
1219 cx.run_until_parked();
1220 assert_eq!(
1221 unreviewed_hunks(&action_log, cx),
1222 vec![(
1223 buffer.clone(),
1224 vec![HunkStatus {
1225 range: Point::new(0, 0)..Point::new(0, 5),
1226 diff_status: DiffHunkStatusKind::Added,
1227 old_text: "".into(),
1228 }],
1229 )]
1230 );
1231
1232 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1233 cx.run_until_parked();
1234 assert_eq!(
1235 unreviewed_hunks(&action_log, cx),
1236 vec![(
1237 buffer.clone(),
1238 vec![HunkStatus {
1239 range: Point::new(0, 0)..Point::new(0, 6),
1240 diff_status: DiffHunkStatusKind::Added,
1241 old_text: "".into(),
1242 }],
1243 )]
1244 );
1245
1246 action_log.update(cx, |log, cx| {
1247 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1248 });
1249 cx.run_until_parked();
1250 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1251 }
1252
1253 #[gpui::test(iterations = 10)]
1254 async fn test_overwriting_files(cx: &mut TestAppContext) {
1255 init_test(cx);
1256
1257 let fs = FakeFs::new(cx.executor());
1258 fs.insert_tree(
1259 path!("/dir"),
1260 json!({
1261 "file1": "Lorem ipsum dolor"
1262 }),
1263 )
1264 .await;
1265 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1266 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1267 let file_path = project
1268 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1269 .unwrap();
1270
1271 let buffer = project
1272 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1273 .await
1274 .unwrap();
1275 cx.update(|cx| {
1276 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1277 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1278 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1279 });
1280 project
1281 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1282 .await
1283 .unwrap();
1284 cx.run_until_parked();
1285 assert_eq!(
1286 unreviewed_hunks(&action_log, cx),
1287 vec![(
1288 buffer.clone(),
1289 vec![HunkStatus {
1290 range: Point::new(0, 0)..Point::new(0, 19),
1291 diff_status: DiffHunkStatusKind::Added,
1292 old_text: "".into(),
1293 }],
1294 )]
1295 );
1296
1297 action_log
1298 .update(cx, |log, cx| {
1299 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1300 })
1301 .await
1302 .unwrap();
1303 cx.run_until_parked();
1304 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1305 assert_eq!(
1306 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1307 "Lorem ipsum dolor"
1308 );
1309 }
1310
1311 #[gpui::test(iterations = 10)]
1312 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1313 init_test(cx);
1314
1315 let fs = FakeFs::new(cx.executor());
1316 fs.insert_tree(
1317 path!("/dir"),
1318 json!({
1319 "file1": "Lorem ipsum dolor"
1320 }),
1321 )
1322 .await;
1323 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1324 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1325 let file_path = project
1326 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1327 .unwrap();
1328
1329 let buffer = project
1330 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1331 .await
1332 .unwrap();
1333 cx.update(|cx| {
1334 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1335 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1336 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1337 });
1338 project
1339 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1340 .await
1341 .unwrap();
1342 cx.run_until_parked();
1343 assert_eq!(
1344 unreviewed_hunks(&action_log, cx),
1345 vec![(
1346 buffer.clone(),
1347 vec![HunkStatus {
1348 range: Point::new(0, 0)..Point::new(0, 37),
1349 diff_status: DiffHunkStatusKind::Modified,
1350 old_text: "Lorem ipsum dolor".into(),
1351 }],
1352 )]
1353 );
1354
1355 cx.update(|cx| {
1356 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1357 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1358 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1359 });
1360 project
1361 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1362 .await
1363 .unwrap();
1364 cx.run_until_parked();
1365 assert_eq!(
1366 unreviewed_hunks(&action_log, cx),
1367 vec![(
1368 buffer.clone(),
1369 vec![HunkStatus {
1370 range: Point::new(0, 0)..Point::new(0, 9),
1371 diff_status: DiffHunkStatusKind::Added,
1372 old_text: "".into(),
1373 }],
1374 )]
1375 );
1376
1377 action_log
1378 .update(cx, |log, cx| {
1379 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1380 })
1381 .await
1382 .unwrap();
1383 cx.run_until_parked();
1384 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1385 assert_eq!(
1386 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1387 "Lorem ipsum dolor"
1388 );
1389 }
1390
1391 #[gpui::test(iterations = 10)]
1392 async fn test_deleting_files(cx: &mut TestAppContext) {
1393 init_test(cx);
1394
1395 let fs = FakeFs::new(cx.executor());
1396 fs.insert_tree(
1397 path!("/dir"),
1398 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1399 )
1400 .await;
1401
1402 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1403 let file1_path = project
1404 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1405 .unwrap();
1406 let file2_path = project
1407 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1408 .unwrap();
1409
1410 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1411 let buffer1 = project
1412 .update(cx, |project, cx| {
1413 project.open_buffer(file1_path.clone(), cx)
1414 })
1415 .await
1416 .unwrap();
1417 let buffer2 = project
1418 .update(cx, |project, cx| {
1419 project.open_buffer(file2_path.clone(), cx)
1420 })
1421 .await
1422 .unwrap();
1423
1424 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1425 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1426 project
1427 .update(cx, |project, cx| {
1428 project.delete_file(file1_path.clone(), false, cx)
1429 })
1430 .unwrap()
1431 .await
1432 .unwrap();
1433 project
1434 .update(cx, |project, cx| {
1435 project.delete_file(file2_path.clone(), false, cx)
1436 })
1437 .unwrap()
1438 .await
1439 .unwrap();
1440 cx.run_until_parked();
1441 assert_eq!(
1442 unreviewed_hunks(&action_log, cx),
1443 vec![
1444 (
1445 buffer1.clone(),
1446 vec![HunkStatus {
1447 range: Point::new(0, 0)..Point::new(0, 0),
1448 diff_status: DiffHunkStatusKind::Deleted,
1449 old_text: "lorem\n".into(),
1450 }]
1451 ),
1452 (
1453 buffer2.clone(),
1454 vec![HunkStatus {
1455 range: Point::new(0, 0)..Point::new(0, 0),
1456 diff_status: DiffHunkStatusKind::Deleted,
1457 old_text: "ipsum\n".into(),
1458 }],
1459 )
1460 ]
1461 );
1462
1463 // Simulate file1 being recreated externally.
1464 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1465 .await;
1466
1467 // Simulate file2 being recreated by a tool.
1468 let buffer2 = project
1469 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1470 .await
1471 .unwrap();
1472 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1473 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1474 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1475 project
1476 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1477 .await
1478 .unwrap();
1479
1480 cx.run_until_parked();
1481 assert_eq!(
1482 unreviewed_hunks(&action_log, cx),
1483 vec![(
1484 buffer2.clone(),
1485 vec![HunkStatus {
1486 range: Point::new(0, 0)..Point::new(0, 5),
1487 diff_status: DiffHunkStatusKind::Added,
1488 old_text: "".into(),
1489 }],
1490 )]
1491 );
1492
1493 // Simulate file2 being deleted externally.
1494 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1495 .await
1496 .unwrap();
1497 cx.run_until_parked();
1498 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1499 }
1500
1501 #[gpui::test(iterations = 10)]
1502 async fn test_reject_edits(cx: &mut TestAppContext) {
1503 init_test(cx);
1504
1505 let fs = FakeFs::new(cx.executor());
1506 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1507 .await;
1508 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1509 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1510 let file_path = project
1511 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1512 .unwrap();
1513 let buffer = project
1514 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1515 .await
1516 .unwrap();
1517
1518 cx.update(|cx| {
1519 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1520 buffer.update(cx, |buffer, cx| {
1521 buffer
1522 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1523 .unwrap()
1524 });
1525 buffer.update(cx, |buffer, cx| {
1526 buffer
1527 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1528 .unwrap()
1529 });
1530 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1531 });
1532 cx.run_until_parked();
1533 assert_eq!(
1534 buffer.read_with(cx, |buffer, _| buffer.text()),
1535 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1536 );
1537 assert_eq!(
1538 unreviewed_hunks(&action_log, cx),
1539 vec![(
1540 buffer.clone(),
1541 vec![
1542 HunkStatus {
1543 range: Point::new(1, 0)..Point::new(3, 0),
1544 diff_status: DiffHunkStatusKind::Modified,
1545 old_text: "def\n".into(),
1546 },
1547 HunkStatus {
1548 range: Point::new(5, 0)..Point::new(5, 3),
1549 diff_status: DiffHunkStatusKind::Modified,
1550 old_text: "mno".into(),
1551 }
1552 ],
1553 )]
1554 );
1555
1556 // If the rejected range doesn't overlap with any hunk, we ignore it.
1557 action_log
1558 .update(cx, |log, cx| {
1559 log.reject_edits_in_ranges(
1560 buffer.clone(),
1561 vec![Point::new(4, 0)..Point::new(4, 0)],
1562 cx,
1563 )
1564 })
1565 .await
1566 .unwrap();
1567 cx.run_until_parked();
1568 assert_eq!(
1569 buffer.read_with(cx, |buffer, _| buffer.text()),
1570 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1571 );
1572 assert_eq!(
1573 unreviewed_hunks(&action_log, cx),
1574 vec![(
1575 buffer.clone(),
1576 vec![
1577 HunkStatus {
1578 range: Point::new(1, 0)..Point::new(3, 0),
1579 diff_status: DiffHunkStatusKind::Modified,
1580 old_text: "def\n".into(),
1581 },
1582 HunkStatus {
1583 range: Point::new(5, 0)..Point::new(5, 3),
1584 diff_status: DiffHunkStatusKind::Modified,
1585 old_text: "mno".into(),
1586 }
1587 ],
1588 )]
1589 );
1590
1591 action_log
1592 .update(cx, |log, cx| {
1593 log.reject_edits_in_ranges(
1594 buffer.clone(),
1595 vec![Point::new(0, 0)..Point::new(1, 0)],
1596 cx,
1597 )
1598 })
1599 .await
1600 .unwrap();
1601 cx.run_until_parked();
1602 assert_eq!(
1603 buffer.read_with(cx, |buffer, _| buffer.text()),
1604 "abc\ndef\nghi\njkl\nmnO"
1605 );
1606 assert_eq!(
1607 unreviewed_hunks(&action_log, cx),
1608 vec![(
1609 buffer.clone(),
1610 vec![HunkStatus {
1611 range: Point::new(4, 0)..Point::new(4, 3),
1612 diff_status: DiffHunkStatusKind::Modified,
1613 old_text: "mno".into(),
1614 }],
1615 )]
1616 );
1617
1618 action_log
1619 .update(cx, |log, cx| {
1620 log.reject_edits_in_ranges(
1621 buffer.clone(),
1622 vec![Point::new(4, 0)..Point::new(4, 0)],
1623 cx,
1624 )
1625 })
1626 .await
1627 .unwrap();
1628 cx.run_until_parked();
1629 assert_eq!(
1630 buffer.read_with(cx, |buffer, _| buffer.text()),
1631 "abc\ndef\nghi\njkl\nmno"
1632 );
1633 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1634 }
1635
1636 #[gpui::test(iterations = 10)]
1637 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1638 init_test(cx);
1639
1640 let fs = FakeFs::new(cx.executor());
1641 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1642 .await;
1643 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1644 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1645 let file_path = project
1646 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1647 .unwrap();
1648 let buffer = project
1649 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1650 .await
1651 .unwrap();
1652
1653 cx.update(|cx| {
1654 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1655 buffer.update(cx, |buffer, cx| {
1656 buffer
1657 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1658 .unwrap()
1659 });
1660 buffer.update(cx, |buffer, cx| {
1661 buffer
1662 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1663 .unwrap()
1664 });
1665 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1666 });
1667 cx.run_until_parked();
1668 assert_eq!(
1669 buffer.read_with(cx, |buffer, _| buffer.text()),
1670 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1671 );
1672 assert_eq!(
1673 unreviewed_hunks(&action_log, cx),
1674 vec![(
1675 buffer.clone(),
1676 vec![
1677 HunkStatus {
1678 range: Point::new(1, 0)..Point::new(3, 0),
1679 diff_status: DiffHunkStatusKind::Modified,
1680 old_text: "def\n".into(),
1681 },
1682 HunkStatus {
1683 range: Point::new(5, 0)..Point::new(5, 3),
1684 diff_status: DiffHunkStatusKind::Modified,
1685 old_text: "mno".into(),
1686 }
1687 ],
1688 )]
1689 );
1690
1691 action_log.update(cx, |log, cx| {
1692 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1693 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1694 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1695 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1696
1697 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1698 .detach();
1699 assert_eq!(
1700 buffer.read_with(cx, |buffer, _| buffer.text()),
1701 "abc\ndef\nghi\njkl\nmno"
1702 );
1703 });
1704 cx.run_until_parked();
1705 assert_eq!(
1706 buffer.read_with(cx, |buffer, _| buffer.text()),
1707 "abc\ndef\nghi\njkl\nmno"
1708 );
1709 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1710 }
1711
1712 #[gpui::test(iterations = 10)]
1713 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1714 init_test(cx);
1715
1716 let fs = FakeFs::new(cx.executor());
1717 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1718 .await;
1719 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1720 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1721 let file_path = project
1722 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1723 .unwrap();
1724 let buffer = project
1725 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1726 .await
1727 .unwrap();
1728
1729 cx.update(|cx| {
1730 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1731 });
1732 project
1733 .update(cx, |project, cx| {
1734 project.delete_file(file_path.clone(), false, cx)
1735 })
1736 .unwrap()
1737 .await
1738 .unwrap();
1739 cx.run_until_parked();
1740 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1741 assert_eq!(
1742 unreviewed_hunks(&action_log, cx),
1743 vec![(
1744 buffer.clone(),
1745 vec![HunkStatus {
1746 range: Point::new(0, 0)..Point::new(0, 0),
1747 diff_status: DiffHunkStatusKind::Deleted,
1748 old_text: "content".into(),
1749 }]
1750 )]
1751 );
1752
1753 action_log
1754 .update(cx, |log, cx| {
1755 log.reject_edits_in_ranges(
1756 buffer.clone(),
1757 vec![Point::new(0, 0)..Point::new(0, 0)],
1758 cx,
1759 )
1760 })
1761 .await
1762 .unwrap();
1763 cx.run_until_parked();
1764 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1765 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1766 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1767 }
1768
1769 #[gpui::test(iterations = 10)]
1770 async fn test_reject_created_file(cx: &mut TestAppContext) {
1771 init_test(cx);
1772
1773 let fs = FakeFs::new(cx.executor());
1774 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1775 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1776 let file_path = project
1777 .read_with(cx, |project, cx| {
1778 project.find_project_path("dir/new_file", cx)
1779 })
1780 .unwrap();
1781 let buffer = project
1782 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1783 .await
1784 .unwrap();
1785 cx.update(|cx| {
1786 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1787 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1788 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1789 });
1790 project
1791 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1792 .await
1793 .unwrap();
1794 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1795 cx.run_until_parked();
1796 assert_eq!(
1797 unreviewed_hunks(&action_log, cx),
1798 vec![(
1799 buffer.clone(),
1800 vec![HunkStatus {
1801 range: Point::new(0, 0)..Point::new(0, 7),
1802 diff_status: DiffHunkStatusKind::Added,
1803 old_text: "".into(),
1804 }],
1805 )]
1806 );
1807
1808 action_log
1809 .update(cx, |log, cx| {
1810 log.reject_edits_in_ranges(
1811 buffer.clone(),
1812 vec![Point::new(0, 0)..Point::new(0, 11)],
1813 cx,
1814 )
1815 })
1816 .await
1817 .unwrap();
1818 cx.run_until_parked();
1819 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1820 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1821 }
1822
1823 #[gpui::test]
1824 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1825 init_test(cx);
1826
1827 let fs = FakeFs::new(cx.executor());
1828 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1829 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1830
1831 let file_path = project
1832 .read_with(cx, |project, cx| {
1833 project.find_project_path("dir/new_file", cx)
1834 })
1835 .unwrap();
1836 let buffer = project
1837 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1838 .await
1839 .unwrap();
1840
1841 // AI creates file with initial content
1842 cx.update(|cx| {
1843 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1844 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1845 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1846 });
1847
1848 project
1849 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1850 .await
1851 .unwrap();
1852
1853 cx.run_until_parked();
1854
1855 // User makes additional edits
1856 cx.update(|cx| {
1857 buffer.update(cx, |buffer, cx| {
1858 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1859 });
1860 });
1861
1862 project
1863 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1864 .await
1865 .unwrap();
1866
1867 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1868
1869 // Reject all
1870 action_log
1871 .update(cx, |log, cx| {
1872 log.reject_edits_in_ranges(
1873 buffer.clone(),
1874 vec![Point::new(0, 0)..Point::new(100, 0)],
1875 cx,
1876 )
1877 })
1878 .await
1879 .unwrap();
1880 cx.run_until_parked();
1881
1882 // File should still contain all the content
1883 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1884
1885 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1886 assert_eq!(content, "ai content\nuser added this line");
1887 }
1888
1889 #[gpui::test(iterations = 100)]
1890 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
1891 init_test(cx);
1892
1893 let operations = env::var("OPERATIONS")
1894 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1895 .unwrap_or(20);
1896
1897 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
1898 let fs = FakeFs::new(cx.executor());
1899 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
1900 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1901 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1902 let file_path = project
1903 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1904 .unwrap();
1905 let buffer = project
1906 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1907 .await
1908 .unwrap();
1909
1910 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1911
1912 for _ in 0..operations {
1913 match rng.gen_range(0..100) {
1914 0..25 => {
1915 action_log.update(cx, |log, cx| {
1916 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1917 log::info!("keeping edits in range {:?}", range);
1918 log.keep_edits_in_range(buffer.clone(), range, cx)
1919 });
1920 }
1921 25..50 => {
1922 action_log
1923 .update(cx, |log, cx| {
1924 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1925 log::info!("rejecting edits in range {:?}", range);
1926 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
1927 })
1928 .await
1929 .unwrap();
1930 }
1931 _ => {
1932 let is_agent_edit = rng.gen_bool(0.5);
1933 if is_agent_edit {
1934 log::info!("agent edit");
1935 } else {
1936 log::info!("user edit");
1937 }
1938 cx.update(|cx| {
1939 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
1940 if is_agent_edit {
1941 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1942 }
1943 });
1944 }
1945 }
1946
1947 if rng.gen_bool(0.2) {
1948 quiesce(&action_log, &buffer, cx);
1949 }
1950 }
1951
1952 quiesce(&action_log, &buffer, cx);
1953
1954 fn quiesce(
1955 action_log: &Entity<ActionLog>,
1956 buffer: &Entity<Buffer>,
1957 cx: &mut TestAppContext,
1958 ) {
1959 log::info!("quiescing...");
1960 cx.run_until_parked();
1961 action_log.update(cx, |log, cx| {
1962 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
1963 let mut old_text = tracked_buffer.diff_base.clone();
1964 let new_text = buffer.read(cx).as_rope();
1965 for edit in tracked_buffer.unreviewed_edits.edits() {
1966 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
1967 let old_end = old_text.point_to_offset(cmp::min(
1968 Point::new(edit.new.start + edit.old_len(), 0),
1969 old_text.max_point(),
1970 ));
1971 old_text.replace(
1972 old_start..old_end,
1973 &new_text.slice_rows(edit.new.clone()).to_string(),
1974 );
1975 }
1976 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
1977 })
1978 }
1979 }
1980
1981 #[gpui::test]
1982 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
1983 init_test(cx);
1984
1985 let fs = FakeFs::new(cx.background_executor.clone());
1986 fs.insert_tree(
1987 path!("/project"),
1988 json!({
1989 ".git": {},
1990 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
1991 }),
1992 )
1993 .await;
1994 fs.set_head_for_repo(
1995 path!("/project/.git").as_ref(),
1996 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
1997 "0000000",
1998 );
1999 cx.run_until_parked();
2000
2001 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2002 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2003
2004 let file_path = project
2005 .read_with(cx, |project, cx| {
2006 project.find_project_path(path!("/project/file.txt"), cx)
2007 })
2008 .unwrap();
2009 let buffer = project
2010 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2011 .await
2012 .unwrap();
2013
2014 cx.update(|cx| {
2015 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2016 buffer.update(cx, |buffer, cx| {
2017 buffer.edit(
2018 [
2019 // Edit at the very start: a -> A
2020 (Point::new(0, 0)..Point::new(0, 1), "A"),
2021 // Deletion in the middle: remove lines d and e
2022 (Point::new(3, 0)..Point::new(5, 0), ""),
2023 // Modification: g -> GGG
2024 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2025 // Addition: insert new line after h
2026 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2027 // Edit the very last character: j -> J
2028 (Point::new(9, 0)..Point::new(9, 1), "J"),
2029 ],
2030 None,
2031 cx,
2032 );
2033 });
2034 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2035 });
2036 cx.run_until_parked();
2037 assert_eq!(
2038 unreviewed_hunks(&action_log, cx),
2039 vec![(
2040 buffer.clone(),
2041 vec![
2042 HunkStatus {
2043 range: Point::new(0, 0)..Point::new(1, 0),
2044 diff_status: DiffHunkStatusKind::Modified,
2045 old_text: "a\n".into()
2046 },
2047 HunkStatus {
2048 range: Point::new(3, 0)..Point::new(3, 0),
2049 diff_status: DiffHunkStatusKind::Deleted,
2050 old_text: "d\ne\n".into()
2051 },
2052 HunkStatus {
2053 range: Point::new(4, 0)..Point::new(5, 0),
2054 diff_status: DiffHunkStatusKind::Modified,
2055 old_text: "g\n".into()
2056 },
2057 HunkStatus {
2058 range: Point::new(6, 0)..Point::new(7, 0),
2059 diff_status: DiffHunkStatusKind::Added,
2060 old_text: "".into()
2061 },
2062 HunkStatus {
2063 range: Point::new(8, 0)..Point::new(8, 1),
2064 diff_status: DiffHunkStatusKind::Modified,
2065 old_text: "j".into()
2066 }
2067 ]
2068 )]
2069 );
2070
2071 // Simulate a git commit that matches some edits but not others:
2072 // - Accepts the first edit (a -> A)
2073 // - Accepts the deletion (remove d and e)
2074 // - Makes a different change to g (g -> G instead of GGG)
2075 // - Ignores the NEW line addition
2076 // - Ignores the last line edit (j stays as j)
2077 fs.set_head_for_repo(
2078 path!("/project/.git").as_ref(),
2079 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2080 "0000001",
2081 );
2082 cx.run_until_parked();
2083 assert_eq!(
2084 unreviewed_hunks(&action_log, cx),
2085 vec![(
2086 buffer.clone(),
2087 vec![
2088 HunkStatus {
2089 range: Point::new(4, 0)..Point::new(5, 0),
2090 diff_status: DiffHunkStatusKind::Modified,
2091 old_text: "g\n".into()
2092 },
2093 HunkStatus {
2094 range: Point::new(6, 0)..Point::new(7, 0),
2095 diff_status: DiffHunkStatusKind::Added,
2096 old_text: "".into()
2097 },
2098 HunkStatus {
2099 range: Point::new(8, 0)..Point::new(8, 1),
2100 diff_status: DiffHunkStatusKind::Modified,
2101 old_text: "j".into()
2102 }
2103 ]
2104 )]
2105 );
2106
2107 // Make another commit that accepts the NEW line but with different content
2108 fs.set_head_for_repo(
2109 path!("/project/.git").as_ref(),
2110 &[(
2111 "file.txt".into(),
2112 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2113 )],
2114 "0000002",
2115 );
2116 cx.run_until_parked();
2117 assert_eq!(
2118 unreviewed_hunks(&action_log, cx),
2119 vec![(
2120 buffer.clone(),
2121 vec![
2122 HunkStatus {
2123 range: Point::new(6, 0)..Point::new(7, 0),
2124 diff_status: DiffHunkStatusKind::Added,
2125 old_text: "".into()
2126 },
2127 HunkStatus {
2128 range: Point::new(8, 0)..Point::new(8, 1),
2129 diff_status: DiffHunkStatusKind::Modified,
2130 old_text: "j".into()
2131 }
2132 ]
2133 )]
2134 );
2135
2136 // Final commit that accepts all remaining edits
2137 fs.set_head_for_repo(
2138 path!("/project/.git").as_ref(),
2139 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2140 "0000003",
2141 );
2142 cx.run_until_parked();
2143 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2144 }
2145
2146 #[derive(Debug, Clone, PartialEq, Eq)]
2147 struct HunkStatus {
2148 range: Range<Point>,
2149 diff_status: DiffHunkStatusKind,
2150 old_text: String,
2151 }
2152
2153 fn unreviewed_hunks(
2154 action_log: &Entity<ActionLog>,
2155 cx: &TestAppContext,
2156 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2157 cx.read(|cx| {
2158 action_log
2159 .read(cx)
2160 .changed_buffers(cx)
2161 .into_iter()
2162 .map(|(buffer, diff)| {
2163 let snapshot = buffer.read(cx).snapshot();
2164 (
2165 buffer,
2166 diff.read(cx)
2167 .hunks(&snapshot, cx)
2168 .map(|hunk| HunkStatus {
2169 diff_status: hunk.status().kind,
2170 range: hunk.range,
2171 old_text: diff
2172 .read(cx)
2173 .base_text()
2174 .text_for_range(hunk.diff_base_byte_range)
2175 .collect(),
2176 })
2177 .collect(),
2178 )
2179 })
2180 .collect()
2181 })
2182 }
2183}