1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use collections::BTreeMap;
4use futures::{FutureExt, StreamExt, channel::mpsc};
5use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
6use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
7use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
8use std::{cmp, ops::Range, sync::Arc};
9use text::{Edit, Patch, Rope};
10use util::RangeExt;
11
12/// Tracks actions performed by tools in a thread
13pub struct ActionLog {
14 /// Buffers that we want to notify the model about when they change.
15 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
16 /// Has the model edited a file since it last checked diagnostics?
17 edited_since_project_diagnostics_check: bool,
18 /// The project this action log is associated with
19 project: Entity<Project>,
20}
21
22impl ActionLog {
23 /// Creates a new, empty action log associated with the given project.
24 pub fn new(project: Entity<Project>) -> Self {
25 Self {
26 tracked_buffers: BTreeMap::default(),
27 edited_since_project_diagnostics_check: false,
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 /// Notifies a diagnostics check
37 pub fn checked_project_diagnostics(&mut self) {
38 self.edited_since_project_diagnostics_check = false;
39 }
40
41 /// Returns true if any files have been edited since the last project diagnostics check
42 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
43 self.edited_since_project_diagnostics_check
44 }
45
46 fn track_buffer_internal(
47 &mut self,
48 buffer: Entity<Buffer>,
49 is_created: bool,
50 cx: &mut Context<Self>,
51 ) -> &mut TrackedBuffer {
52 let status = if is_created {
53 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
54 match tracked.status {
55 TrackedBufferStatus::Created {
56 existing_file_content,
57 } => TrackedBufferStatus::Created {
58 existing_file_content,
59 },
60 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(tracked.diff_base),
63 }
64 }
65 }
66 } else if buffer
67 .read(cx)
68 .file()
69 .map_or(false, |file| file.disk_state().exists())
70 {
71 TrackedBufferStatus::Created {
72 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
73 }
74 } else {
75 TrackedBufferStatus::Created {
76 existing_file_content: None,
77 }
78 }
79 } else {
80 TrackedBufferStatus::Modified
81 };
82
83 let tracked_buffer = self
84 .tracked_buffers
85 .entry(buffer.clone())
86 .or_insert_with(|| {
87 let open_lsp_handle = self.project.update(cx, |project, cx| {
88 project.register_buffer_with_language_servers(&buffer, cx)
89 });
90
91 let text_snapshot = buffer.read(cx).text_snapshot();
92 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
93 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
94 let diff_base;
95 let unreviewed_edits;
96 if is_created {
97 diff_base = Rope::default();
98 unreviewed_edits = Patch::new(vec![Edit {
99 old: 0..1,
100 new: 0..text_snapshot.max_point().row + 1,
101 }])
102 } else {
103 diff_base = buffer.read(cx).as_rope().clone();
104 unreviewed_edits = Patch::default();
105 }
106 TrackedBuffer {
107 buffer: buffer.clone(),
108 diff_base,
109 unreviewed_edits: unreviewed_edits,
110 snapshot: text_snapshot.clone(),
111 status,
112 version: buffer.read(cx).version(),
113 diff,
114 diff_update: diff_update_tx,
115 _open_lsp_handle: open_lsp_handle,
116 _maintain_diff: cx.spawn({
117 let buffer = buffer.clone();
118 async move |this, cx| {
119 Self::maintain_diff(this, buffer, diff_update_rx, cx)
120 .await
121 .ok();
122 }
123 }),
124 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
125 }
126 });
127 tracked_buffer.version = buffer.read(cx).version();
128 tracked_buffer
129 }
130
131 fn handle_buffer_event(
132 &mut self,
133 buffer: Entity<Buffer>,
134 event: &BufferEvent,
135 cx: &mut Context<Self>,
136 ) {
137 match event {
138 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
139 BufferEvent::FileHandleChanged => {
140 self.handle_buffer_file_changed(buffer, cx);
141 }
142 _ => {}
143 };
144 }
145
146 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
147 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
148 return;
149 };
150 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
151 }
152
153 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
154 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
155 return;
156 };
157
158 match tracked_buffer.status {
159 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
160 if buffer
161 .read(cx)
162 .file()
163 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
164 {
165 // If the buffer had been edited by a tool, but it got
166 // deleted externally, we want to stop tracking it.
167 self.tracked_buffers.remove(&buffer);
168 }
169 cx.notify();
170 }
171 TrackedBufferStatus::Deleted => {
172 if buffer
173 .read(cx)
174 .file()
175 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
176 {
177 // If the buffer had been deleted by a tool, but it got
178 // resurrected externally, we want to clear the edits we
179 // were tracking and reset the buffer's state.
180 self.tracked_buffers.remove(&buffer);
181 self.track_buffer_internal(buffer, false, cx);
182 }
183 cx.notify();
184 }
185 }
186 }
187
188 async fn maintain_diff(
189 this: WeakEntity<Self>,
190 buffer: Entity<Buffer>,
191 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
192 cx: &mut AsyncApp,
193 ) -> Result<()> {
194 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
195 let git_diff = this
196 .update(cx, |this, cx| {
197 this.project.update(cx, |project, cx| {
198 project.open_uncommitted_diff(buffer.clone(), cx)
199 })
200 })?
201 .await
202 .ok();
203 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
204 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
205 })?;
206
207 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
208 let _repo_subscription =
209 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
210 cx.update(|cx| {
211 let mut old_head = buffer_repo.read(cx).head_commit.clone();
212 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
213 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
214 let new_head = buffer_repo.read(cx).head_commit.clone();
215 if new_head != old_head {
216 old_head = new_head;
217 git_diff_updates_tx.send(()).ok();
218 }
219 }
220 _ => {}
221 }))
222 })?
223 } else {
224 None
225 };
226
227 loop {
228 futures::select_biased! {
229 buffer_update = buffer_updates.next() => {
230 if let Some((author, buffer_snapshot)) = buffer_update {
231 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
232 } else {
233 break;
234 }
235 }
236 _ = git_diff_updates_rx.changed().fuse() => {
237 if let Some(git_diff) = git_diff.as_ref() {
238 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
239 }
240 }
241 }
242 }
243
244 Ok(())
245 }
246
247 async fn track_edits(
248 this: &WeakEntity<ActionLog>,
249 buffer: &Entity<Buffer>,
250 author: ChangeAuthor,
251 buffer_snapshot: text::BufferSnapshot,
252 cx: &mut AsyncApp,
253 ) -> Result<()> {
254 let rebase = this.read_with(cx, |this, cx| {
255 let tracked_buffer = this
256 .tracked_buffers
257 .get(buffer)
258 .context("buffer not tracked")?;
259
260 let rebase = cx.background_spawn({
261 let mut base_text = tracked_buffer.diff_base.clone();
262 let old_snapshot = tracked_buffer.snapshot.clone();
263 let new_snapshot = buffer_snapshot.clone();
264 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
265 async move {
266 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
267 if let ChangeAuthor::User = author {
268 apply_non_conflicting_edits(
269 &unreviewed_edits,
270 edits,
271 &mut base_text,
272 new_snapshot.as_rope(),
273 );
274 }
275 (Arc::new(base_text.to_string()), base_text)
276 }
277 });
278
279 anyhow::Ok(rebase)
280 })??;
281 let (new_base_text, new_diff_base) = rebase.await;
282 Self::update_diff(
283 this,
284 buffer,
285 buffer_snapshot,
286 new_base_text,
287 new_diff_base,
288 cx,
289 )
290 .await
291 }
292
293 async fn keep_committed_edits(
294 this: &WeakEntity<ActionLog>,
295 buffer: &Entity<Buffer>,
296 git_diff: &Entity<BufferDiff>,
297 cx: &mut AsyncApp,
298 ) -> Result<()> {
299 let buffer_snapshot = this.read_with(cx, |this, _cx| {
300 let tracked_buffer = this
301 .tracked_buffers
302 .get(buffer)
303 .context("buffer not tracked")?;
304 anyhow::Ok(tracked_buffer.snapshot.clone())
305 })??;
306 let (new_base_text, new_diff_base) = this
307 .read_with(cx, |this, cx| {
308 let tracked_buffer = this
309 .tracked_buffers
310 .get(buffer)
311 .context("buffer not tracked")?;
312 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
313 let agent_diff_base = tracked_buffer.diff_base.clone();
314 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
315 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
316 anyhow::Ok(cx.background_spawn(async move {
317 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
318 let committed_edits = language::line_diff(
319 &agent_diff_base.to_string(),
320 &git_diff_base.to_string(),
321 )
322 .into_iter()
323 .map(|(old, new)| Edit { old, new });
324
325 let mut new_agent_diff_base = agent_diff_base.clone();
326 let mut row_delta = 0i32;
327 for committed in committed_edits {
328 while let Some(unreviewed) = old_unreviewed_edits.peek() {
329 // If the committed edit matches the unreviewed
330 // edit, assume the user wants to keep it.
331 if committed.old == unreviewed.old {
332 let unreviewed_new =
333 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
334 let committed_new =
335 git_diff_base.slice_rows(committed.new.clone()).to_string();
336 if unreviewed_new == committed_new {
337 let old_byte_start =
338 new_agent_diff_base.point_to_offset(Point::new(
339 (unreviewed.old.start as i32 + row_delta) as u32,
340 0,
341 ));
342 let old_byte_end =
343 new_agent_diff_base.point_to_offset(cmp::min(
344 Point::new(
345 (unreviewed.old.end as i32 + row_delta) as u32,
346 0,
347 ),
348 new_agent_diff_base.max_point(),
349 ));
350 new_agent_diff_base
351 .replace(old_byte_start..old_byte_end, &unreviewed_new);
352 row_delta +=
353 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
354 }
355 } else if unreviewed.old.start >= committed.old.end {
356 break;
357 }
358
359 old_unreviewed_edits.next().unwrap();
360 }
361 }
362
363 (
364 Arc::new(new_agent_diff_base.to_string()),
365 new_agent_diff_base,
366 )
367 }))
368 })??
369 .await;
370
371 Self::update_diff(
372 this,
373 buffer,
374 buffer_snapshot,
375 new_base_text,
376 new_diff_base,
377 cx,
378 )
379 .await
380 }
381
382 async fn update_diff(
383 this: &WeakEntity<ActionLog>,
384 buffer: &Entity<Buffer>,
385 buffer_snapshot: text::BufferSnapshot,
386 new_base_text: Arc<String>,
387 new_diff_base: Rope,
388 cx: &mut AsyncApp,
389 ) -> Result<()> {
390 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
391 let tracked_buffer = this
392 .tracked_buffers
393 .get(buffer)
394 .context("buffer not tracked")?;
395 anyhow::Ok((
396 tracked_buffer.diff.clone(),
397 buffer.read(cx).language().cloned(),
398 buffer.read(cx).language_registry().clone(),
399 ))
400 })??;
401 let diff_snapshot = BufferDiff::update_diff(
402 diff.clone(),
403 buffer_snapshot.clone(),
404 Some(new_base_text),
405 true,
406 false,
407 language,
408 language_registry,
409 cx,
410 )
411 .await;
412 let mut unreviewed_edits = Patch::default();
413 if let Ok(diff_snapshot) = diff_snapshot {
414 unreviewed_edits = cx
415 .background_spawn({
416 let diff_snapshot = diff_snapshot.clone();
417 let buffer_snapshot = buffer_snapshot.clone();
418 let new_diff_base = new_diff_base.clone();
419 async move {
420 let mut unreviewed_edits = Patch::default();
421 for hunk in diff_snapshot
422 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
423 {
424 let old_range = new_diff_base
425 .offset_to_point(hunk.diff_base_byte_range.start)
426 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
427 let new_range = hunk.range.start..hunk.range.end;
428 unreviewed_edits.push(point_to_row_edit(
429 Edit {
430 old: old_range,
431 new: new_range,
432 },
433 &new_diff_base,
434 &buffer_snapshot.as_rope(),
435 ));
436 }
437 unreviewed_edits
438 }
439 })
440 .await;
441
442 diff.update(cx, |diff, cx| {
443 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
444 })?;
445 }
446 this.update(cx, |this, cx| {
447 let tracked_buffer = this
448 .tracked_buffers
449 .get_mut(buffer)
450 .context("buffer not tracked")?;
451 tracked_buffer.diff_base = new_diff_base;
452 tracked_buffer.snapshot = buffer_snapshot;
453 tracked_buffer.unreviewed_edits = unreviewed_edits;
454 cx.notify();
455 anyhow::Ok(())
456 })?
457 }
458
459 /// Track a buffer as read by agent, so we can notify the model about user edits.
460 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
461 self.track_buffer_internal(buffer, false, cx);
462 }
463
464 /// Mark a buffer as created by agent, so we can refresh it in the context
465 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
466 self.edited_since_project_diagnostics_check = true;
467 self.track_buffer_internal(buffer.clone(), true, cx);
468 }
469
470 /// Mark a buffer as edited by agent, so we can refresh it in the context
471 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
472 self.edited_since_project_diagnostics_check = true;
473
474 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
475 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
476 tracked_buffer.status = TrackedBufferStatus::Modified;
477 }
478 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
479 }
480
481 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
482 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
483 match tracked_buffer.status {
484 TrackedBufferStatus::Created { .. } => {
485 self.tracked_buffers.remove(&buffer);
486 cx.notify();
487 }
488 TrackedBufferStatus::Modified => {
489 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
490 tracked_buffer.status = TrackedBufferStatus::Deleted;
491 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
492 }
493 TrackedBufferStatus::Deleted => {}
494 }
495 cx.notify();
496 }
497
498 pub fn keep_edits_in_range(
499 &mut self,
500 buffer: Entity<Buffer>,
501 buffer_range: Range<impl language::ToPoint>,
502 cx: &mut Context<Self>,
503 ) {
504 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
505 return;
506 };
507
508 match tracked_buffer.status {
509 TrackedBufferStatus::Deleted => {
510 self.tracked_buffers.remove(&buffer);
511 cx.notify();
512 }
513 _ => {
514 let buffer = buffer.read(cx);
515 let buffer_range =
516 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
517 let mut delta = 0i32;
518
519 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
520 edit.old.start = (edit.old.start as i32 + delta) as u32;
521 edit.old.end = (edit.old.end as i32 + delta) as u32;
522
523 if buffer_range.end.row < edit.new.start
524 || buffer_range.start.row > edit.new.end
525 {
526 true
527 } else {
528 let old_range = tracked_buffer
529 .diff_base
530 .point_to_offset(Point::new(edit.old.start, 0))
531 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
532 Point::new(edit.old.end, 0),
533 tracked_buffer.diff_base.max_point(),
534 ));
535 let new_range = tracked_buffer
536 .snapshot
537 .point_to_offset(Point::new(edit.new.start, 0))
538 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
539 Point::new(edit.new.end, 0),
540 tracked_buffer.snapshot.max_point(),
541 ));
542 tracked_buffer.diff_base.replace(
543 old_range,
544 &tracked_buffer
545 .snapshot
546 .text_for_range(new_range)
547 .collect::<String>(),
548 );
549 delta += edit.new_len() as i32 - edit.old_len() as i32;
550 false
551 }
552 });
553 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
554 }
555 }
556 }
557
558 pub fn reject_edits_in_ranges(
559 &mut self,
560 buffer: Entity<Buffer>,
561 buffer_ranges: Vec<Range<impl language::ToPoint>>,
562 cx: &mut Context<Self>,
563 ) -> Task<Result<()>> {
564 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
565 return Task::ready(Ok(()));
566 };
567
568 match &tracked_buffer.status {
569 TrackedBufferStatus::Created {
570 existing_file_content,
571 } => {
572 let task = if let Some(existing_file_content) = existing_file_content {
573 buffer.update(cx, |buffer, cx| {
574 buffer.start_transaction();
575 buffer.set_text("", cx);
576 for chunk in existing_file_content.chunks() {
577 buffer.append(chunk, cx);
578 }
579 buffer.end_transaction(cx);
580 });
581 self.project
582 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
583 } else {
584 // For a file created by AI with no pre-existing content,
585 // only delete the file if we're certain it contains only AI content
586 // with no edits from the user.
587
588 let initial_version = tracked_buffer.version.clone();
589 let current_version = buffer.read(cx).version();
590
591 let current_content = buffer.read(cx).text();
592 let tracked_content = tracked_buffer.snapshot.text();
593
594 let is_ai_only_content =
595 initial_version == current_version && current_content == tracked_content;
596
597 if is_ai_only_content {
598 buffer
599 .read(cx)
600 .entry_id(cx)
601 .and_then(|entry_id| {
602 self.project.update(cx, |project, cx| {
603 project.delete_entry(entry_id, false, cx)
604 })
605 })
606 .unwrap_or(Task::ready(Ok(())))
607 } else {
608 // Not sure how to disentangle edits made by the user
609 // from edits made by the AI at this point.
610 // For now, preserve both to avoid data loss.
611 //
612 // TODO: Better solution (disable "Reject" after user makes some
613 // edit or find a way to differentiate between AI and user edits)
614 Task::ready(Ok(()))
615 }
616 };
617
618 self.tracked_buffers.remove(&buffer);
619 cx.notify();
620 task
621 }
622 TrackedBufferStatus::Deleted => {
623 buffer.update(cx, |buffer, cx| {
624 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
625 });
626 let save = self
627 .project
628 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
629
630 // Clear all tracked edits for this buffer and start over as if we just read it.
631 self.tracked_buffers.remove(&buffer);
632 self.buffer_read(buffer.clone(), cx);
633 cx.notify();
634 save
635 }
636 TrackedBufferStatus::Modified => {
637 buffer.update(cx, |buffer, cx| {
638 let mut buffer_row_ranges = buffer_ranges
639 .into_iter()
640 .map(|range| {
641 range.start.to_point(buffer).row..range.end.to_point(buffer).row
642 })
643 .peekable();
644
645 let mut edits_to_revert = Vec::new();
646 for edit in tracked_buffer.unreviewed_edits.edits() {
647 let new_range = tracked_buffer
648 .snapshot
649 .anchor_before(Point::new(edit.new.start, 0))
650 ..tracked_buffer.snapshot.anchor_after(cmp::min(
651 Point::new(edit.new.end, 0),
652 tracked_buffer.snapshot.max_point(),
653 ));
654 let new_row_range = new_range.start.to_point(buffer).row
655 ..new_range.end.to_point(buffer).row;
656
657 let mut revert = false;
658 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
659 if buffer_row_range.end < new_row_range.start {
660 buffer_row_ranges.next();
661 } else if buffer_row_range.start > new_row_range.end {
662 break;
663 } else {
664 revert = true;
665 break;
666 }
667 }
668
669 if revert {
670 let old_range = tracked_buffer
671 .diff_base
672 .point_to_offset(Point::new(edit.old.start, 0))
673 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
674 Point::new(edit.old.end, 0),
675 tracked_buffer.diff_base.max_point(),
676 ));
677 let old_text = tracked_buffer
678 .diff_base
679 .chunks_in_range(old_range)
680 .collect::<String>();
681 edits_to_revert.push((new_range, old_text));
682 }
683 }
684
685 buffer.edit(edits_to_revert, None, cx);
686 });
687 self.project
688 .update(cx, |project, cx| project.save_buffer(buffer, cx))
689 }
690 }
691 }
692
693 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
694 self.tracked_buffers
695 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
696 TrackedBufferStatus::Deleted => false,
697 _ => {
698 tracked_buffer.unreviewed_edits.clear();
699 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
700 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
701 true
702 }
703 });
704 cx.notify();
705 }
706
707 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
708 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
709 self.tracked_buffers
710 .iter()
711 .filter(|(_, tracked)| tracked.has_edits(cx))
712 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
713 .collect()
714 }
715
716 /// Iterate over buffers changed since last read or edited by the model
717 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
718 self.tracked_buffers
719 .iter()
720 .filter(|(buffer, tracked)| {
721 let buffer = buffer.read(cx);
722
723 tracked.version != buffer.version
724 && buffer
725 .file()
726 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
727 })
728 .map(|(buffer, _)| buffer)
729 }
730}
731
732fn apply_non_conflicting_edits(
733 patch: &Patch<u32>,
734 edits: Vec<Edit<u32>>,
735 old_text: &mut Rope,
736 new_text: &Rope,
737) {
738 let mut old_edits = patch.edits().iter().cloned().peekable();
739 let mut new_edits = edits.into_iter().peekable();
740 let mut applied_delta = 0i32;
741 let mut rebased_delta = 0i32;
742
743 while let Some(mut new_edit) = new_edits.next() {
744 let mut conflict = false;
745
746 // Push all the old edits that are before this new edit or that intersect with it.
747 while let Some(old_edit) = old_edits.peek() {
748 if new_edit.old.end < old_edit.new.start
749 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
750 {
751 break;
752 } else if new_edit.old.start > old_edit.new.end
753 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
754 {
755 let old_edit = old_edits.next().unwrap();
756 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
757 } else {
758 conflict = true;
759 if new_edits
760 .peek()
761 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
762 {
763 new_edit = new_edits.next().unwrap();
764 } else {
765 let old_edit = old_edits.next().unwrap();
766 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
767 }
768 }
769 }
770
771 if !conflict {
772 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
773 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
774 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
775 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
776 ..old_text.point_to_offset(cmp::min(
777 Point::new(new_edit.old.end, 0),
778 old_text.max_point(),
779 ));
780 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
781 ..new_text.point_to_offset(cmp::min(
782 Point::new(new_edit.new.end, 0),
783 new_text.max_point(),
784 ));
785
786 old_text.replace(
787 old_bytes,
788 &new_text.chunks_in_range(new_bytes).collect::<String>(),
789 );
790 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
791 }
792 }
793}
794
795fn diff_snapshots(
796 old_snapshot: &text::BufferSnapshot,
797 new_snapshot: &text::BufferSnapshot,
798) -> Vec<Edit<u32>> {
799 let mut edits = new_snapshot
800 .edits_since::<Point>(&old_snapshot.version)
801 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
802 .peekable();
803 let mut row_edits = Vec::new();
804 while let Some(mut edit) = edits.next() {
805 while let Some(next_edit) = edits.peek() {
806 if edit.old.end >= next_edit.old.start {
807 edit.old.end = next_edit.old.end;
808 edit.new.end = next_edit.new.end;
809 edits.next();
810 } else {
811 break;
812 }
813 }
814 row_edits.push(edit);
815 }
816 row_edits
817}
818
819fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
820 if edit.old.start.column == old_text.line_len(edit.old.start.row)
821 && new_text
822 .chars_at(new_text.point_to_offset(edit.new.start))
823 .next()
824 == Some('\n')
825 && edit.old.start != old_text.max_point()
826 {
827 Edit {
828 old: edit.old.start.row + 1..edit.old.end.row + 1,
829 new: edit.new.start.row + 1..edit.new.end.row + 1,
830 }
831 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
832 Edit {
833 old: edit.old.start.row..edit.old.end.row,
834 new: edit.new.start.row..edit.new.end.row,
835 }
836 } else {
837 Edit {
838 old: edit.old.start.row..edit.old.end.row + 1,
839 new: edit.new.start.row..edit.new.end.row + 1,
840 }
841 }
842}
843
844#[derive(Copy, Clone, Debug)]
845enum ChangeAuthor {
846 User,
847 Agent,
848}
849
850enum TrackedBufferStatus {
851 Created { existing_file_content: Option<Rope> },
852 Modified,
853 Deleted,
854}
855
856struct TrackedBuffer {
857 buffer: Entity<Buffer>,
858 diff_base: Rope,
859 unreviewed_edits: Patch<u32>,
860 status: TrackedBufferStatus,
861 version: clock::Global,
862 diff: Entity<BufferDiff>,
863 snapshot: text::BufferSnapshot,
864 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
865 _open_lsp_handle: OpenLspBufferHandle,
866 _maintain_diff: Task<()>,
867 _subscription: Subscription,
868}
869
870impl TrackedBuffer {
871 fn has_edits(&self, cx: &App) -> bool {
872 self.diff
873 .read(cx)
874 .hunks(&self.buffer.read(cx), cx)
875 .next()
876 .is_some()
877 }
878
879 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
880 self.diff_update
881 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
882 .ok();
883 }
884}
885
886pub struct ChangedBuffer {
887 pub diff: Entity<BufferDiff>,
888}
889
890#[cfg(test)]
891mod tests {
892 use super::*;
893 use buffer_diff::DiffHunkStatusKind;
894 use gpui::TestAppContext;
895 use language::Point;
896 use project::{FakeFs, Fs, Project, RemoveOptions};
897 use rand::prelude::*;
898 use serde_json::json;
899 use settings::SettingsStore;
900 use std::env;
901 use util::{RandomCharIter, path};
902
903 #[ctor::ctor]
904 fn init_logger() {
905 zlog::init_test();
906 }
907
908 fn init_test(cx: &mut TestAppContext) {
909 cx.update(|cx| {
910 let settings_store = SettingsStore::test(cx);
911 cx.set_global(settings_store);
912 language::init(cx);
913 Project::init_settings(cx);
914 });
915 }
916
917 #[gpui::test(iterations = 10)]
918 async fn test_keep_edits(cx: &mut TestAppContext) {
919 init_test(cx);
920
921 let fs = FakeFs::new(cx.executor());
922 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
923 .await;
924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
925 let action_log = cx.new(|_| ActionLog::new(project.clone()));
926 let file_path = project
927 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
928 .unwrap();
929 let buffer = project
930 .update(cx, |project, cx| project.open_buffer(file_path, cx))
931 .await
932 .unwrap();
933
934 cx.update(|cx| {
935 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
936 buffer.update(cx, |buffer, cx| {
937 buffer
938 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
939 .unwrap()
940 });
941 buffer.update(cx, |buffer, cx| {
942 buffer
943 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
944 .unwrap()
945 });
946 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
947 });
948 cx.run_until_parked();
949 assert_eq!(
950 buffer.read_with(cx, |buffer, _| buffer.text()),
951 "abc\ndEf\nghi\njkl\nmnO"
952 );
953 assert_eq!(
954 unreviewed_hunks(&action_log, cx),
955 vec![(
956 buffer.clone(),
957 vec![
958 HunkStatus {
959 range: Point::new(1, 0)..Point::new(2, 0),
960 diff_status: DiffHunkStatusKind::Modified,
961 old_text: "def\n".into(),
962 },
963 HunkStatus {
964 range: Point::new(4, 0)..Point::new(4, 3),
965 diff_status: DiffHunkStatusKind::Modified,
966 old_text: "mno".into(),
967 }
968 ],
969 )]
970 );
971
972 action_log.update(cx, |log, cx| {
973 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
974 });
975 cx.run_until_parked();
976 assert_eq!(
977 unreviewed_hunks(&action_log, cx),
978 vec![(
979 buffer.clone(),
980 vec![HunkStatus {
981 range: Point::new(1, 0)..Point::new(2, 0),
982 diff_status: DiffHunkStatusKind::Modified,
983 old_text: "def\n".into(),
984 }],
985 )]
986 );
987
988 action_log.update(cx, |log, cx| {
989 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
990 });
991 cx.run_until_parked();
992 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
993 }
994
995 #[gpui::test(iterations = 10)]
996 async fn test_deletions(cx: &mut TestAppContext) {
997 init_test(cx);
998
999 let fs = FakeFs::new(cx.executor());
1000 fs.insert_tree(
1001 path!("/dir"),
1002 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1003 )
1004 .await;
1005 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1006 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1007 let file_path = project
1008 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1009 .unwrap();
1010 let buffer = project
1011 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1012 .await
1013 .unwrap();
1014
1015 cx.update(|cx| {
1016 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1017 buffer.update(cx, |buffer, cx| {
1018 buffer
1019 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1020 .unwrap();
1021 buffer.finalize_last_transaction();
1022 });
1023 buffer.update(cx, |buffer, cx| {
1024 buffer
1025 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1026 .unwrap();
1027 buffer.finalize_last_transaction();
1028 });
1029 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1030 });
1031 cx.run_until_parked();
1032 assert_eq!(
1033 buffer.read_with(cx, |buffer, _| buffer.text()),
1034 "abc\nghi\njkl\npqr"
1035 );
1036 assert_eq!(
1037 unreviewed_hunks(&action_log, cx),
1038 vec![(
1039 buffer.clone(),
1040 vec![
1041 HunkStatus {
1042 range: Point::new(1, 0)..Point::new(1, 0),
1043 diff_status: DiffHunkStatusKind::Deleted,
1044 old_text: "def\n".into(),
1045 },
1046 HunkStatus {
1047 range: Point::new(3, 0)..Point::new(3, 0),
1048 diff_status: DiffHunkStatusKind::Deleted,
1049 old_text: "mno\n".into(),
1050 }
1051 ],
1052 )]
1053 );
1054
1055 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1056 cx.run_until_parked();
1057 assert_eq!(
1058 buffer.read_with(cx, |buffer, _| buffer.text()),
1059 "abc\nghi\njkl\nmno\npqr"
1060 );
1061 assert_eq!(
1062 unreviewed_hunks(&action_log, cx),
1063 vec![(
1064 buffer.clone(),
1065 vec![HunkStatus {
1066 range: Point::new(1, 0)..Point::new(1, 0),
1067 diff_status: DiffHunkStatusKind::Deleted,
1068 old_text: "def\n".into(),
1069 }],
1070 )]
1071 );
1072
1073 action_log.update(cx, |log, cx| {
1074 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1075 });
1076 cx.run_until_parked();
1077 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1078 }
1079
1080 #[gpui::test(iterations = 10)]
1081 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1082 init_test(cx);
1083
1084 let fs = FakeFs::new(cx.executor());
1085 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1086 .await;
1087 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1088 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1089 let file_path = project
1090 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1091 .unwrap();
1092 let buffer = project
1093 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1094 .await
1095 .unwrap();
1096
1097 cx.update(|cx| {
1098 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1099 buffer.update(cx, |buffer, cx| {
1100 buffer
1101 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1102 .unwrap()
1103 });
1104 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1105 });
1106 cx.run_until_parked();
1107 assert_eq!(
1108 buffer.read_with(cx, |buffer, _| buffer.text()),
1109 "abc\ndeF\nGHI\njkl\nmno"
1110 );
1111 assert_eq!(
1112 unreviewed_hunks(&action_log, cx),
1113 vec![(
1114 buffer.clone(),
1115 vec![HunkStatus {
1116 range: Point::new(1, 0)..Point::new(3, 0),
1117 diff_status: DiffHunkStatusKind::Modified,
1118 old_text: "def\nghi\n".into(),
1119 }],
1120 )]
1121 );
1122
1123 buffer.update(cx, |buffer, cx| {
1124 buffer.edit(
1125 [
1126 (Point::new(0, 2)..Point::new(0, 2), "X"),
1127 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1128 ],
1129 None,
1130 cx,
1131 )
1132 });
1133 cx.run_until_parked();
1134 assert_eq!(
1135 buffer.read_with(cx, |buffer, _| buffer.text()),
1136 "abXc\ndeF\nGHI\nYjkl\nmno"
1137 );
1138 assert_eq!(
1139 unreviewed_hunks(&action_log, cx),
1140 vec![(
1141 buffer.clone(),
1142 vec![HunkStatus {
1143 range: Point::new(1, 0)..Point::new(3, 0),
1144 diff_status: DiffHunkStatusKind::Modified,
1145 old_text: "def\nghi\n".into(),
1146 }],
1147 )]
1148 );
1149
1150 buffer.update(cx, |buffer, cx| {
1151 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1152 });
1153 cx.run_until_parked();
1154 assert_eq!(
1155 buffer.read_with(cx, |buffer, _| buffer.text()),
1156 "abXc\ndZeF\nGHI\nYjkl\nmno"
1157 );
1158 assert_eq!(
1159 unreviewed_hunks(&action_log, cx),
1160 vec![(
1161 buffer.clone(),
1162 vec![HunkStatus {
1163 range: Point::new(1, 0)..Point::new(3, 0),
1164 diff_status: DiffHunkStatusKind::Modified,
1165 old_text: "def\nghi\n".into(),
1166 }],
1167 )]
1168 );
1169
1170 action_log.update(cx, |log, cx| {
1171 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1172 });
1173 cx.run_until_parked();
1174 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1175 }
1176
1177 #[gpui::test(iterations = 10)]
1178 async fn test_creating_files(cx: &mut TestAppContext) {
1179 init_test(cx);
1180
1181 let fs = FakeFs::new(cx.executor());
1182 fs.insert_tree(path!("/dir"), json!({})).await;
1183 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1184 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1185 let file_path = project
1186 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1187 .unwrap();
1188
1189 let buffer = project
1190 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1191 .await
1192 .unwrap();
1193 cx.update(|cx| {
1194 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1195 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1196 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1197 });
1198 project
1199 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1200 .await
1201 .unwrap();
1202 cx.run_until_parked();
1203 assert_eq!(
1204 unreviewed_hunks(&action_log, cx),
1205 vec![(
1206 buffer.clone(),
1207 vec![HunkStatus {
1208 range: Point::new(0, 0)..Point::new(0, 5),
1209 diff_status: DiffHunkStatusKind::Added,
1210 old_text: "".into(),
1211 }],
1212 )]
1213 );
1214
1215 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1216 cx.run_until_parked();
1217 assert_eq!(
1218 unreviewed_hunks(&action_log, cx),
1219 vec![(
1220 buffer.clone(),
1221 vec![HunkStatus {
1222 range: Point::new(0, 0)..Point::new(0, 6),
1223 diff_status: DiffHunkStatusKind::Added,
1224 old_text: "".into(),
1225 }],
1226 )]
1227 );
1228
1229 action_log.update(cx, |log, cx| {
1230 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1231 });
1232 cx.run_until_parked();
1233 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1234 }
1235
1236 #[gpui::test(iterations = 10)]
1237 async fn test_overwriting_files(cx: &mut TestAppContext) {
1238 init_test(cx);
1239
1240 let fs = FakeFs::new(cx.executor());
1241 fs.insert_tree(
1242 path!("/dir"),
1243 json!({
1244 "file1": "Lorem ipsum dolor"
1245 }),
1246 )
1247 .await;
1248 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1249 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1250 let file_path = project
1251 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1252 .unwrap();
1253
1254 let buffer = project
1255 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1256 .await
1257 .unwrap();
1258 cx.update(|cx| {
1259 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1260 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1261 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1262 });
1263 project
1264 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1265 .await
1266 .unwrap();
1267 cx.run_until_parked();
1268 assert_eq!(
1269 unreviewed_hunks(&action_log, cx),
1270 vec![(
1271 buffer.clone(),
1272 vec![HunkStatus {
1273 range: Point::new(0, 0)..Point::new(0, 19),
1274 diff_status: DiffHunkStatusKind::Added,
1275 old_text: "".into(),
1276 }],
1277 )]
1278 );
1279
1280 action_log
1281 .update(cx, |log, cx| {
1282 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1283 })
1284 .await
1285 .unwrap();
1286 cx.run_until_parked();
1287 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1288 assert_eq!(
1289 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1290 "Lorem ipsum dolor"
1291 );
1292 }
1293
1294 #[gpui::test(iterations = 10)]
1295 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "file1": "Lorem ipsum dolor"
1303 }),
1304 )
1305 .await;
1306 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1307 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1308 let file_path = project
1309 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1310 .unwrap();
1311
1312 let buffer = project
1313 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1314 .await
1315 .unwrap();
1316 cx.update(|cx| {
1317 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1318 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1319 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1320 });
1321 project
1322 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1323 .await
1324 .unwrap();
1325 cx.run_until_parked();
1326 assert_eq!(
1327 unreviewed_hunks(&action_log, cx),
1328 vec![(
1329 buffer.clone(),
1330 vec![HunkStatus {
1331 range: Point::new(0, 0)..Point::new(0, 37),
1332 diff_status: DiffHunkStatusKind::Modified,
1333 old_text: "Lorem ipsum dolor".into(),
1334 }],
1335 )]
1336 );
1337
1338 cx.update(|cx| {
1339 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1340 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1341 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1342 });
1343 project
1344 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1345 .await
1346 .unwrap();
1347 cx.run_until_parked();
1348 assert_eq!(
1349 unreviewed_hunks(&action_log, cx),
1350 vec![(
1351 buffer.clone(),
1352 vec![HunkStatus {
1353 range: Point::new(0, 0)..Point::new(0, 9),
1354 diff_status: DiffHunkStatusKind::Added,
1355 old_text: "".into(),
1356 }],
1357 )]
1358 );
1359
1360 action_log
1361 .update(cx, |log, cx| {
1362 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1363 })
1364 .await
1365 .unwrap();
1366 cx.run_until_parked();
1367 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1368 assert_eq!(
1369 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1370 "Lorem ipsum dolor"
1371 );
1372 }
1373
1374 #[gpui::test(iterations = 10)]
1375 async fn test_deleting_files(cx: &mut TestAppContext) {
1376 init_test(cx);
1377
1378 let fs = FakeFs::new(cx.executor());
1379 fs.insert_tree(
1380 path!("/dir"),
1381 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1382 )
1383 .await;
1384
1385 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1386 let file1_path = project
1387 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1388 .unwrap();
1389 let file2_path = project
1390 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1391 .unwrap();
1392
1393 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1394 let buffer1 = project
1395 .update(cx, |project, cx| {
1396 project.open_buffer(file1_path.clone(), cx)
1397 })
1398 .await
1399 .unwrap();
1400 let buffer2 = project
1401 .update(cx, |project, cx| {
1402 project.open_buffer(file2_path.clone(), cx)
1403 })
1404 .await
1405 .unwrap();
1406
1407 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1408 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1409 project
1410 .update(cx, |project, cx| {
1411 project.delete_file(file1_path.clone(), false, cx)
1412 })
1413 .unwrap()
1414 .await
1415 .unwrap();
1416 project
1417 .update(cx, |project, cx| {
1418 project.delete_file(file2_path.clone(), false, cx)
1419 })
1420 .unwrap()
1421 .await
1422 .unwrap();
1423 cx.run_until_parked();
1424 assert_eq!(
1425 unreviewed_hunks(&action_log, cx),
1426 vec![
1427 (
1428 buffer1.clone(),
1429 vec![HunkStatus {
1430 range: Point::new(0, 0)..Point::new(0, 0),
1431 diff_status: DiffHunkStatusKind::Deleted,
1432 old_text: "lorem\n".into(),
1433 }]
1434 ),
1435 (
1436 buffer2.clone(),
1437 vec![HunkStatus {
1438 range: Point::new(0, 0)..Point::new(0, 0),
1439 diff_status: DiffHunkStatusKind::Deleted,
1440 old_text: "ipsum\n".into(),
1441 }],
1442 )
1443 ]
1444 );
1445
1446 // Simulate file1 being recreated externally.
1447 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1448 .await;
1449
1450 // Simulate file2 being recreated by a tool.
1451 let buffer2 = project
1452 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1453 .await
1454 .unwrap();
1455 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1456 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1457 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1458 project
1459 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1460 .await
1461 .unwrap();
1462
1463 cx.run_until_parked();
1464 assert_eq!(
1465 unreviewed_hunks(&action_log, cx),
1466 vec![(
1467 buffer2.clone(),
1468 vec![HunkStatus {
1469 range: Point::new(0, 0)..Point::new(0, 5),
1470 diff_status: DiffHunkStatusKind::Added,
1471 old_text: "".into(),
1472 }],
1473 )]
1474 );
1475
1476 // Simulate file2 being deleted externally.
1477 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1478 .await
1479 .unwrap();
1480 cx.run_until_parked();
1481 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1482 }
1483
1484 #[gpui::test(iterations = 10)]
1485 async fn test_reject_edits(cx: &mut TestAppContext) {
1486 init_test(cx);
1487
1488 let fs = FakeFs::new(cx.executor());
1489 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1490 .await;
1491 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1492 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1493 let file_path = project
1494 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1495 .unwrap();
1496 let buffer = project
1497 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1498 .await
1499 .unwrap();
1500
1501 cx.update(|cx| {
1502 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1503 buffer.update(cx, |buffer, cx| {
1504 buffer
1505 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1506 .unwrap()
1507 });
1508 buffer.update(cx, |buffer, cx| {
1509 buffer
1510 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1511 .unwrap()
1512 });
1513 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1514 });
1515 cx.run_until_parked();
1516 assert_eq!(
1517 buffer.read_with(cx, |buffer, _| buffer.text()),
1518 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1519 );
1520 assert_eq!(
1521 unreviewed_hunks(&action_log, cx),
1522 vec![(
1523 buffer.clone(),
1524 vec![
1525 HunkStatus {
1526 range: Point::new(1, 0)..Point::new(3, 0),
1527 diff_status: DiffHunkStatusKind::Modified,
1528 old_text: "def\n".into(),
1529 },
1530 HunkStatus {
1531 range: Point::new(5, 0)..Point::new(5, 3),
1532 diff_status: DiffHunkStatusKind::Modified,
1533 old_text: "mno".into(),
1534 }
1535 ],
1536 )]
1537 );
1538
1539 // If the rejected range doesn't overlap with any hunk, we ignore it.
1540 action_log
1541 .update(cx, |log, cx| {
1542 log.reject_edits_in_ranges(
1543 buffer.clone(),
1544 vec![Point::new(4, 0)..Point::new(4, 0)],
1545 cx,
1546 )
1547 })
1548 .await
1549 .unwrap();
1550 cx.run_until_parked();
1551 assert_eq!(
1552 buffer.read_with(cx, |buffer, _| buffer.text()),
1553 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1554 );
1555 assert_eq!(
1556 unreviewed_hunks(&action_log, cx),
1557 vec![(
1558 buffer.clone(),
1559 vec![
1560 HunkStatus {
1561 range: Point::new(1, 0)..Point::new(3, 0),
1562 diff_status: DiffHunkStatusKind::Modified,
1563 old_text: "def\n".into(),
1564 },
1565 HunkStatus {
1566 range: Point::new(5, 0)..Point::new(5, 3),
1567 diff_status: DiffHunkStatusKind::Modified,
1568 old_text: "mno".into(),
1569 }
1570 ],
1571 )]
1572 );
1573
1574 action_log
1575 .update(cx, |log, cx| {
1576 log.reject_edits_in_ranges(
1577 buffer.clone(),
1578 vec![Point::new(0, 0)..Point::new(1, 0)],
1579 cx,
1580 )
1581 })
1582 .await
1583 .unwrap();
1584 cx.run_until_parked();
1585 assert_eq!(
1586 buffer.read_with(cx, |buffer, _| buffer.text()),
1587 "abc\ndef\nghi\njkl\nmnO"
1588 );
1589 assert_eq!(
1590 unreviewed_hunks(&action_log, cx),
1591 vec![(
1592 buffer.clone(),
1593 vec![HunkStatus {
1594 range: Point::new(4, 0)..Point::new(4, 3),
1595 diff_status: DiffHunkStatusKind::Modified,
1596 old_text: "mno".into(),
1597 }],
1598 )]
1599 );
1600
1601 action_log
1602 .update(cx, |log, cx| {
1603 log.reject_edits_in_ranges(
1604 buffer.clone(),
1605 vec![Point::new(4, 0)..Point::new(4, 0)],
1606 cx,
1607 )
1608 })
1609 .await
1610 .unwrap();
1611 cx.run_until_parked();
1612 assert_eq!(
1613 buffer.read_with(cx, |buffer, _| buffer.text()),
1614 "abc\ndef\nghi\njkl\nmno"
1615 );
1616 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1617 }
1618
1619 #[gpui::test(iterations = 10)]
1620 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1625 .await;
1626 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1627 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1628 let file_path = project
1629 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1630 .unwrap();
1631 let buffer = project
1632 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1633 .await
1634 .unwrap();
1635
1636 cx.update(|cx| {
1637 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1638 buffer.update(cx, |buffer, cx| {
1639 buffer
1640 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1641 .unwrap()
1642 });
1643 buffer.update(cx, |buffer, cx| {
1644 buffer
1645 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1646 .unwrap()
1647 });
1648 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1649 });
1650 cx.run_until_parked();
1651 assert_eq!(
1652 buffer.read_with(cx, |buffer, _| buffer.text()),
1653 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1654 );
1655 assert_eq!(
1656 unreviewed_hunks(&action_log, cx),
1657 vec![(
1658 buffer.clone(),
1659 vec![
1660 HunkStatus {
1661 range: Point::new(1, 0)..Point::new(3, 0),
1662 diff_status: DiffHunkStatusKind::Modified,
1663 old_text: "def\n".into(),
1664 },
1665 HunkStatus {
1666 range: Point::new(5, 0)..Point::new(5, 3),
1667 diff_status: DiffHunkStatusKind::Modified,
1668 old_text: "mno".into(),
1669 }
1670 ],
1671 )]
1672 );
1673
1674 action_log.update(cx, |log, cx| {
1675 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1676 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1677 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1678 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1679
1680 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1681 .detach();
1682 assert_eq!(
1683 buffer.read_with(cx, |buffer, _| buffer.text()),
1684 "abc\ndef\nghi\njkl\nmno"
1685 );
1686 });
1687 cx.run_until_parked();
1688 assert_eq!(
1689 buffer.read_with(cx, |buffer, _| buffer.text()),
1690 "abc\ndef\nghi\njkl\nmno"
1691 );
1692 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1693 }
1694
1695 #[gpui::test(iterations = 10)]
1696 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1697 init_test(cx);
1698
1699 let fs = FakeFs::new(cx.executor());
1700 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1701 .await;
1702 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1703 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1704 let file_path = project
1705 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1706 .unwrap();
1707 let buffer = project
1708 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1709 .await
1710 .unwrap();
1711
1712 cx.update(|cx| {
1713 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1714 });
1715 project
1716 .update(cx, |project, cx| {
1717 project.delete_file(file_path.clone(), false, cx)
1718 })
1719 .unwrap()
1720 .await
1721 .unwrap();
1722 cx.run_until_parked();
1723 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1724 assert_eq!(
1725 unreviewed_hunks(&action_log, cx),
1726 vec![(
1727 buffer.clone(),
1728 vec![HunkStatus {
1729 range: Point::new(0, 0)..Point::new(0, 0),
1730 diff_status: DiffHunkStatusKind::Deleted,
1731 old_text: "content".into(),
1732 }]
1733 )]
1734 );
1735
1736 action_log
1737 .update(cx, |log, cx| {
1738 log.reject_edits_in_ranges(
1739 buffer.clone(),
1740 vec![Point::new(0, 0)..Point::new(0, 0)],
1741 cx,
1742 )
1743 })
1744 .await
1745 .unwrap();
1746 cx.run_until_parked();
1747 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1748 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1749 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1750 }
1751
1752 #[gpui::test(iterations = 10)]
1753 async fn test_reject_created_file(cx: &mut TestAppContext) {
1754 init_test(cx);
1755
1756 let fs = FakeFs::new(cx.executor());
1757 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1758 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1759 let file_path = project
1760 .read_with(cx, |project, cx| {
1761 project.find_project_path("dir/new_file", cx)
1762 })
1763 .unwrap();
1764 let buffer = project
1765 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1766 .await
1767 .unwrap();
1768 cx.update(|cx| {
1769 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1770 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1771 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1772 });
1773 project
1774 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1775 .await
1776 .unwrap();
1777 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1778 cx.run_until_parked();
1779 assert_eq!(
1780 unreviewed_hunks(&action_log, cx),
1781 vec![(
1782 buffer.clone(),
1783 vec![HunkStatus {
1784 range: Point::new(0, 0)..Point::new(0, 7),
1785 diff_status: DiffHunkStatusKind::Added,
1786 old_text: "".into(),
1787 }],
1788 )]
1789 );
1790
1791 action_log
1792 .update(cx, |log, cx| {
1793 log.reject_edits_in_ranges(
1794 buffer.clone(),
1795 vec![Point::new(0, 0)..Point::new(0, 11)],
1796 cx,
1797 )
1798 })
1799 .await
1800 .unwrap();
1801 cx.run_until_parked();
1802 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1803 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1804 }
1805
1806 #[gpui::test]
1807 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1808 init_test(cx);
1809
1810 let fs = FakeFs::new(cx.executor());
1811 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1812 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1813
1814 let file_path = project
1815 .read_with(cx, |project, cx| {
1816 project.find_project_path("dir/new_file", cx)
1817 })
1818 .unwrap();
1819 let buffer = project
1820 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1821 .await
1822 .unwrap();
1823
1824 // AI creates file with initial content
1825 cx.update(|cx| {
1826 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1827 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1828 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1829 });
1830
1831 project
1832 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1833 .await
1834 .unwrap();
1835
1836 cx.run_until_parked();
1837
1838 // User makes additional edits
1839 cx.update(|cx| {
1840 buffer.update(cx, |buffer, cx| {
1841 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1842 });
1843 });
1844
1845 project
1846 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1847 .await
1848 .unwrap();
1849
1850 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1851
1852 // Reject all
1853 action_log
1854 .update(cx, |log, cx| {
1855 log.reject_edits_in_ranges(
1856 buffer.clone(),
1857 vec![Point::new(0, 0)..Point::new(100, 0)],
1858 cx,
1859 )
1860 })
1861 .await
1862 .unwrap();
1863 cx.run_until_parked();
1864
1865 // File should still contain all the content
1866 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1867
1868 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1869 assert_eq!(content, "ai content\nuser added this line");
1870 }
1871
1872 #[gpui::test(iterations = 100)]
1873 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
1874 init_test(cx);
1875
1876 let operations = env::var("OPERATIONS")
1877 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1878 .unwrap_or(20);
1879
1880 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
1881 let fs = FakeFs::new(cx.executor());
1882 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
1883 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1884 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1885 let file_path = project
1886 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1887 .unwrap();
1888 let buffer = project
1889 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1890 .await
1891 .unwrap();
1892
1893 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1894
1895 for _ in 0..operations {
1896 match rng.gen_range(0..100) {
1897 0..25 => {
1898 action_log.update(cx, |log, cx| {
1899 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1900 log::info!("keeping edits in range {:?}", range);
1901 log.keep_edits_in_range(buffer.clone(), range, cx)
1902 });
1903 }
1904 25..50 => {
1905 action_log
1906 .update(cx, |log, cx| {
1907 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1908 log::info!("rejecting edits in range {:?}", range);
1909 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
1910 })
1911 .await
1912 .unwrap();
1913 }
1914 _ => {
1915 let is_agent_edit = rng.gen_bool(0.5);
1916 if is_agent_edit {
1917 log::info!("agent edit");
1918 } else {
1919 log::info!("user edit");
1920 }
1921 cx.update(|cx| {
1922 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
1923 if is_agent_edit {
1924 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1925 }
1926 });
1927 }
1928 }
1929
1930 if rng.gen_bool(0.2) {
1931 quiesce(&action_log, &buffer, cx);
1932 }
1933 }
1934
1935 quiesce(&action_log, &buffer, cx);
1936
1937 fn quiesce(
1938 action_log: &Entity<ActionLog>,
1939 buffer: &Entity<Buffer>,
1940 cx: &mut TestAppContext,
1941 ) {
1942 log::info!("quiescing...");
1943 cx.run_until_parked();
1944 action_log.update(cx, |log, cx| {
1945 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
1946 let mut old_text = tracked_buffer.diff_base.clone();
1947 let new_text = buffer.read(cx).as_rope();
1948 for edit in tracked_buffer.unreviewed_edits.edits() {
1949 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
1950 let old_end = old_text.point_to_offset(cmp::min(
1951 Point::new(edit.new.start + edit.old_len(), 0),
1952 old_text.max_point(),
1953 ));
1954 old_text.replace(
1955 old_start..old_end,
1956 &new_text.slice_rows(edit.new.clone()).to_string(),
1957 );
1958 }
1959 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
1960 })
1961 }
1962 }
1963
1964 #[gpui::test]
1965 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
1966 init_test(cx);
1967
1968 let fs = FakeFs::new(cx.background_executor.clone());
1969 fs.insert_tree(
1970 path!("/project"),
1971 json!({
1972 ".git": {},
1973 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
1974 }),
1975 )
1976 .await;
1977 fs.set_head_for_repo(
1978 path!("/project/.git").as_ref(),
1979 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
1980 "0000000",
1981 );
1982 cx.run_until_parked();
1983
1984 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
1985 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1986
1987 let file_path = project
1988 .read_with(cx, |project, cx| {
1989 project.find_project_path(path!("/project/file.txt"), cx)
1990 })
1991 .unwrap();
1992 let buffer = project
1993 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1994 .await
1995 .unwrap();
1996
1997 cx.update(|cx| {
1998 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1999 buffer.update(cx, |buffer, cx| {
2000 buffer.edit(
2001 [
2002 // Edit at the very start: a -> A
2003 (Point::new(0, 0)..Point::new(0, 1), "A"),
2004 // Deletion in the middle: remove lines d and e
2005 (Point::new(3, 0)..Point::new(5, 0), ""),
2006 // Modification: g -> GGG
2007 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2008 // Addition: insert new line after h
2009 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2010 // Edit the very last character: j -> J
2011 (Point::new(9, 0)..Point::new(9, 1), "J"),
2012 ],
2013 None,
2014 cx,
2015 );
2016 });
2017 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2018 });
2019 cx.run_until_parked();
2020 assert_eq!(
2021 unreviewed_hunks(&action_log, cx),
2022 vec![(
2023 buffer.clone(),
2024 vec![
2025 HunkStatus {
2026 range: Point::new(0, 0)..Point::new(1, 0),
2027 diff_status: DiffHunkStatusKind::Modified,
2028 old_text: "a\n".into()
2029 },
2030 HunkStatus {
2031 range: Point::new(3, 0)..Point::new(3, 0),
2032 diff_status: DiffHunkStatusKind::Deleted,
2033 old_text: "d\ne\n".into()
2034 },
2035 HunkStatus {
2036 range: Point::new(4, 0)..Point::new(5, 0),
2037 diff_status: DiffHunkStatusKind::Modified,
2038 old_text: "g\n".into()
2039 },
2040 HunkStatus {
2041 range: Point::new(6, 0)..Point::new(7, 0),
2042 diff_status: DiffHunkStatusKind::Added,
2043 old_text: "".into()
2044 },
2045 HunkStatus {
2046 range: Point::new(8, 0)..Point::new(8, 1),
2047 diff_status: DiffHunkStatusKind::Modified,
2048 old_text: "j".into()
2049 }
2050 ]
2051 )]
2052 );
2053
2054 // Simulate a git commit that matches some edits but not others:
2055 // - Accepts the first edit (a -> A)
2056 // - Accepts the deletion (remove d and e)
2057 // - Makes a different change to g (g -> G instead of GGG)
2058 // - Ignores the NEW line addition
2059 // - Ignores the last line edit (j stays as j)
2060 fs.set_head_for_repo(
2061 path!("/project/.git").as_ref(),
2062 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2063 "0000001",
2064 );
2065 cx.run_until_parked();
2066 assert_eq!(
2067 unreviewed_hunks(&action_log, cx),
2068 vec![(
2069 buffer.clone(),
2070 vec![
2071 HunkStatus {
2072 range: Point::new(4, 0)..Point::new(5, 0),
2073 diff_status: DiffHunkStatusKind::Modified,
2074 old_text: "g\n".into()
2075 },
2076 HunkStatus {
2077 range: Point::new(6, 0)..Point::new(7, 0),
2078 diff_status: DiffHunkStatusKind::Added,
2079 old_text: "".into()
2080 },
2081 HunkStatus {
2082 range: Point::new(8, 0)..Point::new(8, 1),
2083 diff_status: DiffHunkStatusKind::Modified,
2084 old_text: "j".into()
2085 }
2086 ]
2087 )]
2088 );
2089
2090 // Make another commit that accepts the NEW line but with different content
2091 fs.set_head_for_repo(
2092 path!("/project/.git").as_ref(),
2093 &[(
2094 "file.txt".into(),
2095 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2096 )],
2097 "0000002",
2098 );
2099 cx.run_until_parked();
2100 assert_eq!(
2101 unreviewed_hunks(&action_log, cx),
2102 vec![(
2103 buffer.clone(),
2104 vec![
2105 HunkStatus {
2106 range: Point::new(6, 0)..Point::new(7, 0),
2107 diff_status: DiffHunkStatusKind::Added,
2108 old_text: "".into()
2109 },
2110 HunkStatus {
2111 range: Point::new(8, 0)..Point::new(8, 1),
2112 diff_status: DiffHunkStatusKind::Modified,
2113 old_text: "j".into()
2114 }
2115 ]
2116 )]
2117 );
2118
2119 // Final commit that accepts all remaining edits
2120 fs.set_head_for_repo(
2121 path!("/project/.git").as_ref(),
2122 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2123 "0000003",
2124 );
2125 cx.run_until_parked();
2126 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2127 }
2128
2129 #[derive(Debug, Clone, PartialEq, Eq)]
2130 struct HunkStatus {
2131 range: Range<Point>,
2132 diff_status: DiffHunkStatusKind,
2133 old_text: String,
2134 }
2135
2136 fn unreviewed_hunks(
2137 action_log: &Entity<ActionLog>,
2138 cx: &TestAppContext,
2139 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2140 cx.read(|cx| {
2141 action_log
2142 .read(cx)
2143 .changed_buffers(cx)
2144 .into_iter()
2145 .map(|(buffer, diff)| {
2146 let snapshot = buffer.read(cx).snapshot();
2147 (
2148 buffer,
2149 diff.read(cx)
2150 .hunks(&snapshot, cx)
2151 .map(|hunk| HunkStatus {
2152 diff_status: hunk.status().kind,
2153 range: hunk.range,
2154 old_text: diff
2155 .read(cx)
2156 .base_text()
2157 .text_for_range(hunk.diff_base_byte_range)
2158 .collect(),
2159 })
2160 .collect(),
2161 )
2162 })
2163 .collect()
2164 })
2165 }
2166}