1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::RangeExt;
12
13/// Tracks actions performed by tools in a thread
14pub struct ActionLog {
15 /// Buffers that we want to notify the model about when they change.
16 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
17 /// Has the model edited a file since it last checked diagnostics?
18 edited_since_project_diagnostics_check: bool,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21 /// Tracks which buffer versions have already been notified as changed externally
22 notified_versions: BTreeMap<Entity<Buffer>, clock::Global>,
23}
24
25impl ActionLog {
26 /// Creates a new, empty action log associated with the given project.
27 pub fn new(project: Entity<Project>) -> Self {
28 Self {
29 tracked_buffers: BTreeMap::default(),
30 edited_since_project_diagnostics_check: false,
31 project,
32 notified_versions: BTreeMap::default(),
33 }
34 }
35
36 pub fn project(&self) -> &Entity<Project> {
37 &self.project
38 }
39
40 /// Notifies a diagnostics check
41 pub fn checked_project_diagnostics(&mut self) {
42 self.edited_since_project_diagnostics_check = false;
43 }
44
45 /// Returns true if any files have been edited since the last project diagnostics check
46 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
47 self.edited_since_project_diagnostics_check
48 }
49
50 fn track_buffer_internal(
51 &mut self,
52 buffer: Entity<Buffer>,
53 is_created: bool,
54 cx: &mut Context<Self>,
55 ) -> &mut TrackedBuffer {
56 let status = if is_created {
57 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
58 self.notified_versions.remove(&buffer);
59 match tracked.status {
60 TrackedBufferStatus::Created {
61 existing_file_content,
62 } => TrackedBufferStatus::Created {
63 existing_file_content,
64 },
65 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
66 TrackedBufferStatus::Created {
67 existing_file_content: Some(tracked.diff_base),
68 }
69 }
70 }
71 } else if buffer
72 .read(cx)
73 .file()
74 .map_or(false, |file| file.disk_state().exists())
75 {
76 TrackedBufferStatus::Created {
77 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
78 }
79 } else {
80 TrackedBufferStatus::Created {
81 existing_file_content: None,
82 }
83 }
84 } else {
85 TrackedBufferStatus::Modified
86 };
87
88 let tracked_buffer = self
89 .tracked_buffers
90 .entry(buffer.clone())
91 .or_insert_with(|| {
92 let open_lsp_handle = self.project.update(cx, |project, cx| {
93 project.register_buffer_with_language_servers(&buffer, cx)
94 });
95
96 let text_snapshot = buffer.read(cx).text_snapshot();
97 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
98 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
99 let diff_base;
100 let unreviewed_edits;
101 if is_created {
102 diff_base = Rope::default();
103 unreviewed_edits = Patch::new(vec![Edit {
104 old: 0..1,
105 new: 0..text_snapshot.max_point().row + 1,
106 }])
107 } else {
108 diff_base = buffer.read(cx).as_rope().clone();
109 unreviewed_edits = Patch::default();
110 }
111 TrackedBuffer {
112 buffer: buffer.clone(),
113 diff_base,
114 unreviewed_edits,
115 snapshot: text_snapshot.clone(),
116 status,
117 version: buffer.read(cx).version(),
118 diff,
119 diff_update: diff_update_tx,
120 _open_lsp_handle: open_lsp_handle,
121 _maintain_diff: cx.spawn({
122 let buffer = buffer.clone();
123 async move |this, cx| {
124 Self::maintain_diff(this, buffer, diff_update_rx, cx)
125 .await
126 .ok();
127 }
128 }),
129 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
130 }
131 });
132 tracked_buffer.version = buffer.read(cx).version();
133 tracked_buffer
134 }
135
136 fn handle_buffer_event(
137 &mut self,
138 buffer: Entity<Buffer>,
139 event: &BufferEvent,
140 cx: &mut Context<Self>,
141 ) {
142 match event {
143 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
144 BufferEvent::FileHandleChanged => {
145 self.handle_buffer_file_changed(buffer, cx);
146 }
147 _ => {}
148 };
149 }
150
151 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
152 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
153 return;
154 };
155 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
156 }
157
158 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
159 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
160 return;
161 };
162
163 match tracked_buffer.status {
164 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
165 if buffer
166 .read(cx)
167 .file()
168 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
169 {
170 // If the buffer had been edited by a tool, but it got
171 // deleted externally, we want to stop tracking it.
172 self.tracked_buffers.remove(&buffer);
173 self.notified_versions.remove(&buffer);
174 }
175 cx.notify();
176 }
177 TrackedBufferStatus::Deleted => {
178 if buffer
179 .read(cx)
180 .file()
181 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
182 {
183 // If the buffer had been deleted by a tool, but it got
184 // resurrected externally, we want to clear the edits we
185 // were tracking and reset the buffer's state.
186 self.tracked_buffers.remove(&buffer);
187 self.notified_versions.remove(&buffer);
188 self.track_buffer_internal(buffer, false, cx);
189 }
190 cx.notify();
191 }
192 }
193 }
194
195 async fn maintain_diff(
196 this: WeakEntity<Self>,
197 buffer: Entity<Buffer>,
198 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
199 cx: &mut AsyncApp,
200 ) -> Result<()> {
201 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
202 let git_diff = this
203 .update(cx, |this, cx| {
204 this.project.update(cx, |project, cx| {
205 project.open_uncommitted_diff(buffer.clone(), cx)
206 })
207 })?
208 .await
209 .ok();
210 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
211 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
212 })?;
213
214 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
215 let _repo_subscription =
216 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
217 cx.update(|cx| {
218 let mut old_head = buffer_repo.read(cx).head_commit.clone();
219 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
220 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
221 let new_head = buffer_repo.read(cx).head_commit.clone();
222 if new_head != old_head {
223 old_head = new_head;
224 git_diff_updates_tx.send(()).ok();
225 }
226 }
227 _ => {}
228 }))
229 })?
230 } else {
231 None
232 };
233
234 loop {
235 futures::select_biased! {
236 buffer_update = buffer_updates.next() => {
237 if let Some((author, buffer_snapshot)) = buffer_update {
238 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
239 } else {
240 break;
241 }
242 }
243 _ = git_diff_updates_rx.changed().fuse() => {
244 if let Some(git_diff) = git_diff.as_ref() {
245 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
246 }
247 }
248 }
249 }
250
251 Ok(())
252 }
253
254 async fn track_edits(
255 this: &WeakEntity<ActionLog>,
256 buffer: &Entity<Buffer>,
257 author: ChangeAuthor,
258 buffer_snapshot: text::BufferSnapshot,
259 cx: &mut AsyncApp,
260 ) -> Result<()> {
261 let rebase = this.read_with(cx, |this, cx| {
262 let tracked_buffer = this
263 .tracked_buffers
264 .get(buffer)
265 .context("buffer not tracked")?;
266
267 let rebase = cx.background_spawn({
268 let mut base_text = tracked_buffer.diff_base.clone();
269 let old_snapshot = tracked_buffer.snapshot.clone();
270 let new_snapshot = buffer_snapshot.clone();
271 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
272 async move {
273 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
274 if let ChangeAuthor::User = author {
275 apply_non_conflicting_edits(
276 &unreviewed_edits,
277 edits,
278 &mut base_text,
279 new_snapshot.as_rope(),
280 );
281 }
282 (Arc::new(base_text.to_string()), base_text)
283 }
284 });
285
286 anyhow::Ok(rebase)
287 })??;
288 let (new_base_text, new_diff_base) = rebase.await;
289 Self::update_diff(
290 this,
291 buffer,
292 buffer_snapshot,
293 new_base_text,
294 new_diff_base,
295 cx,
296 )
297 .await
298 }
299
300 async fn keep_committed_edits(
301 this: &WeakEntity<ActionLog>,
302 buffer: &Entity<Buffer>,
303 git_diff: &Entity<BufferDiff>,
304 cx: &mut AsyncApp,
305 ) -> Result<()> {
306 let buffer_snapshot = this.read_with(cx, |this, _cx| {
307 let tracked_buffer = this
308 .tracked_buffers
309 .get(buffer)
310 .context("buffer not tracked")?;
311 anyhow::Ok(tracked_buffer.snapshot.clone())
312 })??;
313 let (new_base_text, new_diff_base) = this
314 .read_with(cx, |this, cx| {
315 let tracked_buffer = this
316 .tracked_buffers
317 .get(buffer)
318 .context("buffer not tracked")?;
319 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
320 let agent_diff_base = tracked_buffer.diff_base.clone();
321 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
322 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
323 anyhow::Ok(cx.background_spawn(async move {
324 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
325 let committed_edits = language::line_diff(
326 &agent_diff_base.to_string(),
327 &git_diff_base.to_string(),
328 )
329 .into_iter()
330 .map(|(old, new)| Edit { old, new });
331
332 let mut new_agent_diff_base = agent_diff_base.clone();
333 let mut row_delta = 0i32;
334 for committed in committed_edits {
335 while let Some(unreviewed) = old_unreviewed_edits.peek() {
336 // If the committed edit matches the unreviewed
337 // edit, assume the user wants to keep it.
338 if committed.old == unreviewed.old {
339 let unreviewed_new =
340 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
341 let committed_new =
342 git_diff_base.slice_rows(committed.new.clone()).to_string();
343 if unreviewed_new == committed_new {
344 let old_byte_start =
345 new_agent_diff_base.point_to_offset(Point::new(
346 (unreviewed.old.start as i32 + row_delta) as u32,
347 0,
348 ));
349 let old_byte_end =
350 new_agent_diff_base.point_to_offset(cmp::min(
351 Point::new(
352 (unreviewed.old.end as i32 + row_delta) as u32,
353 0,
354 ),
355 new_agent_diff_base.max_point(),
356 ));
357 new_agent_diff_base
358 .replace(old_byte_start..old_byte_end, &unreviewed_new);
359 row_delta +=
360 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
361 }
362 } else if unreviewed.old.start >= committed.old.end {
363 break;
364 }
365
366 old_unreviewed_edits.next().unwrap();
367 }
368 }
369
370 (
371 Arc::new(new_agent_diff_base.to_string()),
372 new_agent_diff_base,
373 )
374 }))
375 })??
376 .await;
377
378 Self::update_diff(
379 this,
380 buffer,
381 buffer_snapshot,
382 new_base_text,
383 new_diff_base,
384 cx,
385 )
386 .await
387 }
388
389 async fn update_diff(
390 this: &WeakEntity<ActionLog>,
391 buffer: &Entity<Buffer>,
392 buffer_snapshot: text::BufferSnapshot,
393 new_base_text: Arc<String>,
394 new_diff_base: Rope,
395 cx: &mut AsyncApp,
396 ) -> Result<()> {
397 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
398 let tracked_buffer = this
399 .tracked_buffers
400 .get(buffer)
401 .context("buffer not tracked")?;
402 anyhow::Ok((
403 tracked_buffer.diff.clone(),
404 buffer.read(cx).language().cloned(),
405 buffer.read(cx).language_registry().clone(),
406 ))
407 })??;
408 let diff_snapshot = BufferDiff::update_diff(
409 diff.clone(),
410 buffer_snapshot.clone(),
411 Some(new_base_text),
412 true,
413 false,
414 language,
415 language_registry,
416 cx,
417 )
418 .await;
419 let mut unreviewed_edits = Patch::default();
420 if let Ok(diff_snapshot) = diff_snapshot {
421 unreviewed_edits = cx
422 .background_spawn({
423 let diff_snapshot = diff_snapshot.clone();
424 let buffer_snapshot = buffer_snapshot.clone();
425 let new_diff_base = new_diff_base.clone();
426 async move {
427 let mut unreviewed_edits = Patch::default();
428 for hunk in diff_snapshot
429 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
430 {
431 let old_range = new_diff_base
432 .offset_to_point(hunk.diff_base_byte_range.start)
433 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
434 let new_range = hunk.range.start..hunk.range.end;
435 unreviewed_edits.push(point_to_row_edit(
436 Edit {
437 old: old_range,
438 new: new_range,
439 },
440 &new_diff_base,
441 &buffer_snapshot.as_rope(),
442 ));
443 }
444 unreviewed_edits
445 }
446 })
447 .await;
448
449 diff.update(cx, |diff, cx| {
450 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
451 })?;
452 }
453 this.update(cx, |this, cx| {
454 let tracked_buffer = this
455 .tracked_buffers
456 .get_mut(buffer)
457 .context("buffer not tracked")?;
458 tracked_buffer.diff_base = new_diff_base;
459 tracked_buffer.snapshot = buffer_snapshot;
460 tracked_buffer.unreviewed_edits = unreviewed_edits;
461 cx.notify();
462 anyhow::Ok(())
463 })?
464 }
465
466 /// Track a buffer as read by agent, so we can notify the model about user edits.
467 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
468 self.track_buffer_internal(buffer, false, cx);
469 }
470
471 /// Mark a buffer as created by agent, so we can refresh it in the context
472 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
473 self.edited_since_project_diagnostics_check = true;
474 self.track_buffer_internal(buffer.clone(), true, cx);
475 }
476
477 /// Mark a buffer as edited by agent, so we can refresh it in the context
478 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
479 self.edited_since_project_diagnostics_check = true;
480
481 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
482 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
483 tracked_buffer.status = TrackedBufferStatus::Modified;
484 }
485 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
486 }
487
488 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
489 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
490 match tracked_buffer.status {
491 TrackedBufferStatus::Created { .. } => {
492 self.tracked_buffers.remove(&buffer);
493 self.notified_versions.remove(&buffer);
494 cx.notify();
495 }
496 TrackedBufferStatus::Modified => {
497 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
498 tracked_buffer.status = TrackedBufferStatus::Deleted;
499 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
500 }
501 TrackedBufferStatus::Deleted => {}
502 }
503 cx.notify();
504 }
505
506 pub fn keep_edits_in_range(
507 &mut self,
508 buffer: Entity<Buffer>,
509 buffer_range: Range<impl language::ToPoint>,
510 cx: &mut Context<Self>,
511 ) {
512 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
513 return;
514 };
515
516 match tracked_buffer.status {
517 TrackedBufferStatus::Deleted => {
518 self.tracked_buffers.remove(&buffer);
519 self.notified_versions.remove(&buffer);
520 cx.notify();
521 }
522 _ => {
523 let buffer = buffer.read(cx);
524 let buffer_range =
525 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
526 let mut delta = 0i32;
527
528 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
529 edit.old.start = (edit.old.start as i32 + delta) as u32;
530 edit.old.end = (edit.old.end as i32 + delta) as u32;
531
532 if buffer_range.end.row < edit.new.start
533 || buffer_range.start.row > edit.new.end
534 {
535 true
536 } else {
537 let old_range = tracked_buffer
538 .diff_base
539 .point_to_offset(Point::new(edit.old.start, 0))
540 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
541 Point::new(edit.old.end, 0),
542 tracked_buffer.diff_base.max_point(),
543 ));
544 let new_range = tracked_buffer
545 .snapshot
546 .point_to_offset(Point::new(edit.new.start, 0))
547 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
548 Point::new(edit.new.end, 0),
549 tracked_buffer.snapshot.max_point(),
550 ));
551 tracked_buffer.diff_base.replace(
552 old_range,
553 &tracked_buffer
554 .snapshot
555 .text_for_range(new_range)
556 .collect::<String>(),
557 );
558 delta += edit.new_len() as i32 - edit.old_len() as i32;
559 false
560 }
561 });
562 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
563 }
564 }
565 }
566
567 pub fn reject_edits_in_ranges(
568 &mut self,
569 buffer: Entity<Buffer>,
570 buffer_ranges: Vec<Range<impl language::ToPoint>>,
571 cx: &mut Context<Self>,
572 ) -> Task<Result<()>> {
573 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
574 return Task::ready(Ok(()));
575 };
576
577 match &tracked_buffer.status {
578 TrackedBufferStatus::Created {
579 existing_file_content,
580 } => {
581 let task = if let Some(existing_file_content) = existing_file_content {
582 buffer.update(cx, |buffer, cx| {
583 buffer.start_transaction();
584 buffer.set_text("", cx);
585 for chunk in existing_file_content.chunks() {
586 buffer.append(chunk, cx);
587 }
588 buffer.end_transaction(cx);
589 });
590 self.project
591 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
592 } else {
593 // For a file created by AI with no pre-existing content,
594 // only delete the file if we're certain it contains only AI content
595 // with no edits from the user.
596
597 let initial_version = tracked_buffer.version.clone();
598 let current_version = buffer.read(cx).version();
599
600 let current_content = buffer.read(cx).text();
601 let tracked_content = tracked_buffer.snapshot.text();
602
603 let is_ai_only_content =
604 initial_version == current_version && current_content == tracked_content;
605
606 if is_ai_only_content {
607 buffer
608 .read(cx)
609 .entry_id(cx)
610 .and_then(|entry_id| {
611 self.project.update(cx, |project, cx| {
612 project.delete_entry(entry_id, false, cx)
613 })
614 })
615 .unwrap_or(Task::ready(Ok(())))
616 } else {
617 // Not sure how to disentangle edits made by the user
618 // from edits made by the AI at this point.
619 // For now, preserve both to avoid data loss.
620 //
621 // TODO: Better solution (disable "Reject" after user makes some
622 // edit or find a way to differentiate between AI and user edits)
623 Task::ready(Ok(()))
624 }
625 };
626
627 self.tracked_buffers.remove(&buffer);
628 self.notified_versions.remove(&buffer);
629 cx.notify();
630 task
631 }
632 TrackedBufferStatus::Deleted => {
633 buffer.update(cx, |buffer, cx| {
634 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
635 });
636 let save = self
637 .project
638 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
639
640 // Clear all tracked edits for this buffer and start over as if we just read it.
641 self.tracked_buffers.remove(&buffer);
642 self.notified_versions.remove(&buffer);
643 self.buffer_read(buffer.clone(), cx);
644 cx.notify();
645 save
646 }
647 TrackedBufferStatus::Modified => {
648 buffer.update(cx, |buffer, cx| {
649 let mut buffer_row_ranges = buffer_ranges
650 .into_iter()
651 .map(|range| {
652 range.start.to_point(buffer).row..range.end.to_point(buffer).row
653 })
654 .peekable();
655
656 let mut edits_to_revert = Vec::new();
657 for edit in tracked_buffer.unreviewed_edits.edits() {
658 let new_range = tracked_buffer
659 .snapshot
660 .anchor_before(Point::new(edit.new.start, 0))
661 ..tracked_buffer.snapshot.anchor_after(cmp::min(
662 Point::new(edit.new.end, 0),
663 tracked_buffer.snapshot.max_point(),
664 ));
665 let new_row_range = new_range.start.to_point(buffer).row
666 ..new_range.end.to_point(buffer).row;
667
668 let mut revert = false;
669 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
670 if buffer_row_range.end < new_row_range.start {
671 buffer_row_ranges.next();
672 } else if buffer_row_range.start > new_row_range.end {
673 break;
674 } else {
675 revert = true;
676 break;
677 }
678 }
679
680 if revert {
681 let old_range = tracked_buffer
682 .diff_base
683 .point_to_offset(Point::new(edit.old.start, 0))
684 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
685 Point::new(edit.old.end, 0),
686 tracked_buffer.diff_base.max_point(),
687 ));
688 let old_text = tracked_buffer
689 .diff_base
690 .chunks_in_range(old_range)
691 .collect::<String>();
692 edits_to_revert.push((new_range, old_text));
693 }
694 }
695
696 buffer.edit(edits_to_revert, None, cx);
697 });
698 self.project
699 .update(cx, |project, cx| project.save_buffer(buffer, cx))
700 }
701 }
702 }
703
704 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
705 self.tracked_buffers
706 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
707 TrackedBufferStatus::Deleted => false,
708 _ => {
709 tracked_buffer.unreviewed_edits.clear();
710 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
711 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
712 true
713 }
714 });
715 cx.notify();
716 }
717
718 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
719 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
720 self.tracked_buffers
721 .iter()
722 .filter(|(_, tracked)| tracked.has_edits(cx))
723 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
724 .collect()
725 }
726
727 /// Returns stale buffers that haven't been notified yet
728 pub fn unnotified_stale_buffers<'a>(
729 &'a self,
730 cx: &'a App,
731 ) -> impl Iterator<Item = &'a Entity<Buffer>> {
732 self.stale_buffers(cx).filter(|buffer| {
733 let buffer_entity = buffer.read(cx);
734 self.notified_versions
735 .get(buffer)
736 .map_or(true, |notified_version| {
737 *notified_version != buffer_entity.version
738 })
739 })
740 }
741
742 /// Marks the given buffers as notified at their current versions
743 pub fn mark_buffers_as_notified(
744 &mut self,
745 buffers: impl IntoIterator<Item = Entity<Buffer>>,
746 cx: &App,
747 ) {
748 for buffer in buffers {
749 let version = buffer.read(cx).version.clone();
750 self.notified_versions.insert(buffer, version);
751 }
752 }
753
754 /// Iterate over buffers changed since last read or edited by the model
755 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
756 self.tracked_buffers
757 .iter()
758 .filter(|(buffer, tracked)| {
759 let buffer = buffer.read(cx);
760
761 tracked.version != buffer.version
762 && buffer
763 .file()
764 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
765 })
766 .map(|(buffer, _)| buffer)
767 }
768}
769
770fn apply_non_conflicting_edits(
771 patch: &Patch<u32>,
772 edits: Vec<Edit<u32>>,
773 old_text: &mut Rope,
774 new_text: &Rope,
775) {
776 let mut old_edits = patch.edits().iter().cloned().peekable();
777 let mut new_edits = edits.into_iter().peekable();
778 let mut applied_delta = 0i32;
779 let mut rebased_delta = 0i32;
780
781 while let Some(mut new_edit) = new_edits.next() {
782 let mut conflict = false;
783
784 // Push all the old edits that are before this new edit or that intersect with it.
785 while let Some(old_edit) = old_edits.peek() {
786 if new_edit.old.end < old_edit.new.start
787 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
788 {
789 break;
790 } else if new_edit.old.start > old_edit.new.end
791 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
792 {
793 let old_edit = old_edits.next().unwrap();
794 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
795 } else {
796 conflict = true;
797 if new_edits
798 .peek()
799 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
800 {
801 new_edit = new_edits.next().unwrap();
802 } else {
803 let old_edit = old_edits.next().unwrap();
804 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
805 }
806 }
807 }
808
809 if !conflict {
810 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
811 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
812 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
813 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
814 ..old_text.point_to_offset(cmp::min(
815 Point::new(new_edit.old.end, 0),
816 old_text.max_point(),
817 ));
818 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
819 ..new_text.point_to_offset(cmp::min(
820 Point::new(new_edit.new.end, 0),
821 new_text.max_point(),
822 ));
823
824 old_text.replace(
825 old_bytes,
826 &new_text.chunks_in_range(new_bytes).collect::<String>(),
827 );
828 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
829 }
830 }
831}
832
833fn diff_snapshots(
834 old_snapshot: &text::BufferSnapshot,
835 new_snapshot: &text::BufferSnapshot,
836) -> Vec<Edit<u32>> {
837 let mut edits = new_snapshot
838 .edits_since::<Point>(&old_snapshot.version)
839 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
840 .peekable();
841 let mut row_edits = Vec::new();
842 while let Some(mut edit) = edits.next() {
843 while let Some(next_edit) = edits.peek() {
844 if edit.old.end >= next_edit.old.start {
845 edit.old.end = next_edit.old.end;
846 edit.new.end = next_edit.new.end;
847 edits.next();
848 } else {
849 break;
850 }
851 }
852 row_edits.push(edit);
853 }
854 row_edits
855}
856
857fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
858 if edit.old.start.column == old_text.line_len(edit.old.start.row)
859 && new_text
860 .chars_at(new_text.point_to_offset(edit.new.start))
861 .next()
862 == Some('\n')
863 && edit.old.start != old_text.max_point()
864 {
865 Edit {
866 old: edit.old.start.row + 1..edit.old.end.row + 1,
867 new: edit.new.start.row + 1..edit.new.end.row + 1,
868 }
869 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
870 Edit {
871 old: edit.old.start.row..edit.old.end.row,
872 new: edit.new.start.row..edit.new.end.row,
873 }
874 } else {
875 Edit {
876 old: edit.old.start.row..edit.old.end.row + 1,
877 new: edit.new.start.row..edit.new.end.row + 1,
878 }
879 }
880}
881
882#[derive(Copy, Clone, Debug)]
883enum ChangeAuthor {
884 User,
885 Agent,
886}
887
888enum TrackedBufferStatus {
889 Created { existing_file_content: Option<Rope> },
890 Modified,
891 Deleted,
892}
893
894struct TrackedBuffer {
895 buffer: Entity<Buffer>,
896 diff_base: Rope,
897 unreviewed_edits: Patch<u32>,
898 status: TrackedBufferStatus,
899 version: clock::Global,
900 diff: Entity<BufferDiff>,
901 snapshot: text::BufferSnapshot,
902 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
903 _open_lsp_handle: OpenLspBufferHandle,
904 _maintain_diff: Task<()>,
905 _subscription: Subscription,
906}
907
908impl TrackedBuffer {
909 fn has_edits(&self, cx: &App) -> bool {
910 self.diff
911 .read(cx)
912 .hunks(&self.buffer.read(cx), cx)
913 .next()
914 .is_some()
915 }
916
917 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
918 self.diff_update
919 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
920 .ok();
921 }
922}
923
924pub struct ChangedBuffer {
925 pub diff: Entity<BufferDiff>,
926}
927
928#[cfg(test)]
929mod tests {
930 use super::*;
931 use buffer_diff::DiffHunkStatusKind;
932 use gpui::TestAppContext;
933 use language::Point;
934 use project::{FakeFs, Fs, Project, RemoveOptions};
935 use rand::prelude::*;
936 use serde_json::json;
937 use settings::SettingsStore;
938 use std::env;
939 use util::{RandomCharIter, path};
940
941 #[ctor::ctor]
942 fn init_logger() {
943 zlog::init_test();
944 }
945
946 fn init_test(cx: &mut TestAppContext) {
947 cx.update(|cx| {
948 let settings_store = SettingsStore::test(cx);
949 cx.set_global(settings_store);
950 language::init(cx);
951 Project::init_settings(cx);
952 });
953 }
954
955 #[gpui::test(iterations = 10)]
956 async fn test_keep_edits(cx: &mut TestAppContext) {
957 init_test(cx);
958
959 let fs = FakeFs::new(cx.executor());
960 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
961 .await;
962 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
963 let action_log = cx.new(|_| ActionLog::new(project.clone()));
964 let file_path = project
965 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
966 .unwrap();
967 let buffer = project
968 .update(cx, |project, cx| project.open_buffer(file_path, cx))
969 .await
970 .unwrap();
971
972 cx.update(|cx| {
973 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
974 buffer.update(cx, |buffer, cx| {
975 buffer
976 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
977 .unwrap()
978 });
979 buffer.update(cx, |buffer, cx| {
980 buffer
981 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
982 .unwrap()
983 });
984 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
985 });
986 cx.run_until_parked();
987 assert_eq!(
988 buffer.read_with(cx, |buffer, _| buffer.text()),
989 "abc\ndEf\nghi\njkl\nmnO"
990 );
991 assert_eq!(
992 unreviewed_hunks(&action_log, cx),
993 vec![(
994 buffer.clone(),
995 vec![
996 HunkStatus {
997 range: Point::new(1, 0)..Point::new(2, 0),
998 diff_status: DiffHunkStatusKind::Modified,
999 old_text: "def\n".into(),
1000 },
1001 HunkStatus {
1002 range: Point::new(4, 0)..Point::new(4, 3),
1003 diff_status: DiffHunkStatusKind::Modified,
1004 old_text: "mno".into(),
1005 }
1006 ],
1007 )]
1008 );
1009
1010 action_log.update(cx, |log, cx| {
1011 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1012 });
1013 cx.run_until_parked();
1014 assert_eq!(
1015 unreviewed_hunks(&action_log, cx),
1016 vec![(
1017 buffer.clone(),
1018 vec![HunkStatus {
1019 range: Point::new(1, 0)..Point::new(2, 0),
1020 diff_status: DiffHunkStatusKind::Modified,
1021 old_text: "def\n".into(),
1022 }],
1023 )]
1024 );
1025
1026 action_log.update(cx, |log, cx| {
1027 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1028 });
1029 cx.run_until_parked();
1030 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1031 }
1032
1033 #[gpui::test(iterations = 10)]
1034 async fn test_deletions(cx: &mut TestAppContext) {
1035 init_test(cx);
1036
1037 let fs = FakeFs::new(cx.executor());
1038 fs.insert_tree(
1039 path!("/dir"),
1040 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1041 )
1042 .await;
1043 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1044 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1045 let file_path = project
1046 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1047 .unwrap();
1048 let buffer = project
1049 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1050 .await
1051 .unwrap();
1052
1053 cx.update(|cx| {
1054 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1055 buffer.update(cx, |buffer, cx| {
1056 buffer
1057 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1058 .unwrap();
1059 buffer.finalize_last_transaction();
1060 });
1061 buffer.update(cx, |buffer, cx| {
1062 buffer
1063 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1064 .unwrap();
1065 buffer.finalize_last_transaction();
1066 });
1067 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1068 });
1069 cx.run_until_parked();
1070 assert_eq!(
1071 buffer.read_with(cx, |buffer, _| buffer.text()),
1072 "abc\nghi\njkl\npqr"
1073 );
1074 assert_eq!(
1075 unreviewed_hunks(&action_log, cx),
1076 vec![(
1077 buffer.clone(),
1078 vec![
1079 HunkStatus {
1080 range: Point::new(1, 0)..Point::new(1, 0),
1081 diff_status: DiffHunkStatusKind::Deleted,
1082 old_text: "def\n".into(),
1083 },
1084 HunkStatus {
1085 range: Point::new(3, 0)..Point::new(3, 0),
1086 diff_status: DiffHunkStatusKind::Deleted,
1087 old_text: "mno\n".into(),
1088 }
1089 ],
1090 )]
1091 );
1092
1093 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1094 cx.run_until_parked();
1095 assert_eq!(
1096 buffer.read_with(cx, |buffer, _| buffer.text()),
1097 "abc\nghi\njkl\nmno\npqr"
1098 );
1099 assert_eq!(
1100 unreviewed_hunks(&action_log, cx),
1101 vec![(
1102 buffer.clone(),
1103 vec![HunkStatus {
1104 range: Point::new(1, 0)..Point::new(1, 0),
1105 diff_status: DiffHunkStatusKind::Deleted,
1106 old_text: "def\n".into(),
1107 }],
1108 )]
1109 );
1110
1111 action_log.update(cx, |log, cx| {
1112 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1113 });
1114 cx.run_until_parked();
1115 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1116 }
1117
1118 #[gpui::test(iterations = 10)]
1119 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1120 init_test(cx);
1121
1122 let fs = FakeFs::new(cx.executor());
1123 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1124 .await;
1125 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1126 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1127 let file_path = project
1128 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1129 .unwrap();
1130 let buffer = project
1131 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1132 .await
1133 .unwrap();
1134
1135 cx.update(|cx| {
1136 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1137 buffer.update(cx, |buffer, cx| {
1138 buffer
1139 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1140 .unwrap()
1141 });
1142 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1143 });
1144 cx.run_until_parked();
1145 assert_eq!(
1146 buffer.read_with(cx, |buffer, _| buffer.text()),
1147 "abc\ndeF\nGHI\njkl\nmno"
1148 );
1149 assert_eq!(
1150 unreviewed_hunks(&action_log, cx),
1151 vec![(
1152 buffer.clone(),
1153 vec![HunkStatus {
1154 range: Point::new(1, 0)..Point::new(3, 0),
1155 diff_status: DiffHunkStatusKind::Modified,
1156 old_text: "def\nghi\n".into(),
1157 }],
1158 )]
1159 );
1160
1161 buffer.update(cx, |buffer, cx| {
1162 buffer.edit(
1163 [
1164 (Point::new(0, 2)..Point::new(0, 2), "X"),
1165 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1166 ],
1167 None,
1168 cx,
1169 )
1170 });
1171 cx.run_until_parked();
1172 assert_eq!(
1173 buffer.read_with(cx, |buffer, _| buffer.text()),
1174 "abXc\ndeF\nGHI\nYjkl\nmno"
1175 );
1176 assert_eq!(
1177 unreviewed_hunks(&action_log, cx),
1178 vec![(
1179 buffer.clone(),
1180 vec![HunkStatus {
1181 range: Point::new(1, 0)..Point::new(3, 0),
1182 diff_status: DiffHunkStatusKind::Modified,
1183 old_text: "def\nghi\n".into(),
1184 }],
1185 )]
1186 );
1187
1188 buffer.update(cx, |buffer, cx| {
1189 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1190 });
1191 cx.run_until_parked();
1192 assert_eq!(
1193 buffer.read_with(cx, |buffer, _| buffer.text()),
1194 "abXc\ndZeF\nGHI\nYjkl\nmno"
1195 );
1196 assert_eq!(
1197 unreviewed_hunks(&action_log, cx),
1198 vec![(
1199 buffer.clone(),
1200 vec![HunkStatus {
1201 range: Point::new(1, 0)..Point::new(3, 0),
1202 diff_status: DiffHunkStatusKind::Modified,
1203 old_text: "def\nghi\n".into(),
1204 }],
1205 )]
1206 );
1207
1208 action_log.update(cx, |log, cx| {
1209 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1210 });
1211 cx.run_until_parked();
1212 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1213 }
1214
1215 #[gpui::test(iterations = 10)]
1216 async fn test_creating_files(cx: &mut TestAppContext) {
1217 init_test(cx);
1218
1219 let fs = FakeFs::new(cx.executor());
1220 fs.insert_tree(path!("/dir"), json!({})).await;
1221 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1222 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1223 let file_path = project
1224 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1225 .unwrap();
1226
1227 let buffer = project
1228 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1229 .await
1230 .unwrap();
1231 cx.update(|cx| {
1232 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1233 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1234 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1235 });
1236 project
1237 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1238 .await
1239 .unwrap();
1240 cx.run_until_parked();
1241 assert_eq!(
1242 unreviewed_hunks(&action_log, cx),
1243 vec![(
1244 buffer.clone(),
1245 vec![HunkStatus {
1246 range: Point::new(0, 0)..Point::new(0, 5),
1247 diff_status: DiffHunkStatusKind::Added,
1248 old_text: "".into(),
1249 }],
1250 )]
1251 );
1252
1253 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1254 cx.run_until_parked();
1255 assert_eq!(
1256 unreviewed_hunks(&action_log, cx),
1257 vec![(
1258 buffer.clone(),
1259 vec![HunkStatus {
1260 range: Point::new(0, 0)..Point::new(0, 6),
1261 diff_status: DiffHunkStatusKind::Added,
1262 old_text: "".into(),
1263 }],
1264 )]
1265 );
1266
1267 action_log.update(cx, |log, cx| {
1268 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1269 });
1270 cx.run_until_parked();
1271 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1272 }
1273
1274 #[gpui::test(iterations = 10)]
1275 async fn test_overwriting_files(cx: &mut TestAppContext) {
1276 init_test(cx);
1277
1278 let fs = FakeFs::new(cx.executor());
1279 fs.insert_tree(
1280 path!("/dir"),
1281 json!({
1282 "file1": "Lorem ipsum dolor"
1283 }),
1284 )
1285 .await;
1286 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1287 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1288 let file_path = project
1289 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1290 .unwrap();
1291
1292 let buffer = project
1293 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1294 .await
1295 .unwrap();
1296 cx.update(|cx| {
1297 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1298 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1299 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1300 });
1301 project
1302 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1303 .await
1304 .unwrap();
1305 cx.run_until_parked();
1306 assert_eq!(
1307 unreviewed_hunks(&action_log, cx),
1308 vec![(
1309 buffer.clone(),
1310 vec![HunkStatus {
1311 range: Point::new(0, 0)..Point::new(0, 19),
1312 diff_status: DiffHunkStatusKind::Added,
1313 old_text: "".into(),
1314 }],
1315 )]
1316 );
1317
1318 action_log
1319 .update(cx, |log, cx| {
1320 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1321 })
1322 .await
1323 .unwrap();
1324 cx.run_until_parked();
1325 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1326 assert_eq!(
1327 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1328 "Lorem ipsum dolor"
1329 );
1330 }
1331
1332 #[gpui::test(iterations = 10)]
1333 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1334 init_test(cx);
1335
1336 let fs = FakeFs::new(cx.executor());
1337 fs.insert_tree(
1338 path!("/dir"),
1339 json!({
1340 "file1": "Lorem ipsum dolor"
1341 }),
1342 )
1343 .await;
1344 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1345 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1346 let file_path = project
1347 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1348 .unwrap();
1349
1350 let buffer = project
1351 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1352 .await
1353 .unwrap();
1354 cx.update(|cx| {
1355 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1356 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1357 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1358 });
1359 project
1360 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1361 .await
1362 .unwrap();
1363 cx.run_until_parked();
1364 assert_eq!(
1365 unreviewed_hunks(&action_log, cx),
1366 vec![(
1367 buffer.clone(),
1368 vec![HunkStatus {
1369 range: Point::new(0, 0)..Point::new(0, 37),
1370 diff_status: DiffHunkStatusKind::Modified,
1371 old_text: "Lorem ipsum dolor".into(),
1372 }],
1373 )]
1374 );
1375
1376 cx.update(|cx| {
1377 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1378 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1379 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1380 });
1381 project
1382 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1383 .await
1384 .unwrap();
1385 cx.run_until_parked();
1386 assert_eq!(
1387 unreviewed_hunks(&action_log, cx),
1388 vec![(
1389 buffer.clone(),
1390 vec![HunkStatus {
1391 range: Point::new(0, 0)..Point::new(0, 9),
1392 diff_status: DiffHunkStatusKind::Added,
1393 old_text: "".into(),
1394 }],
1395 )]
1396 );
1397
1398 action_log
1399 .update(cx, |log, cx| {
1400 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1401 })
1402 .await
1403 .unwrap();
1404 cx.run_until_parked();
1405 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1406 assert_eq!(
1407 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1408 "Lorem ipsum dolor"
1409 );
1410 }
1411
1412 #[gpui::test(iterations = 10)]
1413 async fn test_deleting_files(cx: &mut TestAppContext) {
1414 init_test(cx);
1415
1416 let fs = FakeFs::new(cx.executor());
1417 fs.insert_tree(
1418 path!("/dir"),
1419 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1420 )
1421 .await;
1422
1423 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1424 let file1_path = project
1425 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1426 .unwrap();
1427 let file2_path = project
1428 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1429 .unwrap();
1430
1431 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1432 let buffer1 = project
1433 .update(cx, |project, cx| {
1434 project.open_buffer(file1_path.clone(), cx)
1435 })
1436 .await
1437 .unwrap();
1438 let buffer2 = project
1439 .update(cx, |project, cx| {
1440 project.open_buffer(file2_path.clone(), cx)
1441 })
1442 .await
1443 .unwrap();
1444
1445 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1446 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1447 project
1448 .update(cx, |project, cx| {
1449 project.delete_file(file1_path.clone(), false, cx)
1450 })
1451 .unwrap()
1452 .await
1453 .unwrap();
1454 project
1455 .update(cx, |project, cx| {
1456 project.delete_file(file2_path.clone(), false, cx)
1457 })
1458 .unwrap()
1459 .await
1460 .unwrap();
1461 cx.run_until_parked();
1462 assert_eq!(
1463 unreviewed_hunks(&action_log, cx),
1464 vec![
1465 (
1466 buffer1.clone(),
1467 vec![HunkStatus {
1468 range: Point::new(0, 0)..Point::new(0, 0),
1469 diff_status: DiffHunkStatusKind::Deleted,
1470 old_text: "lorem\n".into(),
1471 }]
1472 ),
1473 (
1474 buffer2.clone(),
1475 vec![HunkStatus {
1476 range: Point::new(0, 0)..Point::new(0, 0),
1477 diff_status: DiffHunkStatusKind::Deleted,
1478 old_text: "ipsum\n".into(),
1479 }],
1480 )
1481 ]
1482 );
1483
1484 // Simulate file1 being recreated externally.
1485 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1486 .await;
1487
1488 // Simulate file2 being recreated by a tool.
1489 let buffer2 = project
1490 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1491 .await
1492 .unwrap();
1493 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1494 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1495 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1496 project
1497 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1498 .await
1499 .unwrap();
1500
1501 cx.run_until_parked();
1502 assert_eq!(
1503 unreviewed_hunks(&action_log, cx),
1504 vec![(
1505 buffer2.clone(),
1506 vec![HunkStatus {
1507 range: Point::new(0, 0)..Point::new(0, 5),
1508 diff_status: DiffHunkStatusKind::Added,
1509 old_text: "".into(),
1510 }],
1511 )]
1512 );
1513
1514 // Simulate file2 being deleted externally.
1515 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1516 .await
1517 .unwrap();
1518 cx.run_until_parked();
1519 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1520 }
1521
1522 #[gpui::test(iterations = 10)]
1523 async fn test_reject_edits(cx: &mut TestAppContext) {
1524 init_test(cx);
1525
1526 let fs = FakeFs::new(cx.executor());
1527 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1528 .await;
1529 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1530 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1531 let file_path = project
1532 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1533 .unwrap();
1534 let buffer = project
1535 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1536 .await
1537 .unwrap();
1538
1539 cx.update(|cx| {
1540 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1541 buffer.update(cx, |buffer, cx| {
1542 buffer
1543 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1544 .unwrap()
1545 });
1546 buffer.update(cx, |buffer, cx| {
1547 buffer
1548 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1549 .unwrap()
1550 });
1551 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1552 });
1553 cx.run_until_parked();
1554 assert_eq!(
1555 buffer.read_with(cx, |buffer, _| buffer.text()),
1556 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1557 );
1558 assert_eq!(
1559 unreviewed_hunks(&action_log, cx),
1560 vec![(
1561 buffer.clone(),
1562 vec![
1563 HunkStatus {
1564 range: Point::new(1, 0)..Point::new(3, 0),
1565 diff_status: DiffHunkStatusKind::Modified,
1566 old_text: "def\n".into(),
1567 },
1568 HunkStatus {
1569 range: Point::new(5, 0)..Point::new(5, 3),
1570 diff_status: DiffHunkStatusKind::Modified,
1571 old_text: "mno".into(),
1572 }
1573 ],
1574 )]
1575 );
1576
1577 // If the rejected range doesn't overlap with any hunk, we ignore it.
1578 action_log
1579 .update(cx, |log, cx| {
1580 log.reject_edits_in_ranges(
1581 buffer.clone(),
1582 vec![Point::new(4, 0)..Point::new(4, 0)],
1583 cx,
1584 )
1585 })
1586 .await
1587 .unwrap();
1588 cx.run_until_parked();
1589 assert_eq!(
1590 buffer.read_with(cx, |buffer, _| buffer.text()),
1591 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1592 );
1593 assert_eq!(
1594 unreviewed_hunks(&action_log, cx),
1595 vec![(
1596 buffer.clone(),
1597 vec![
1598 HunkStatus {
1599 range: Point::new(1, 0)..Point::new(3, 0),
1600 diff_status: DiffHunkStatusKind::Modified,
1601 old_text: "def\n".into(),
1602 },
1603 HunkStatus {
1604 range: Point::new(5, 0)..Point::new(5, 3),
1605 diff_status: DiffHunkStatusKind::Modified,
1606 old_text: "mno".into(),
1607 }
1608 ],
1609 )]
1610 );
1611
1612 action_log
1613 .update(cx, |log, cx| {
1614 log.reject_edits_in_ranges(
1615 buffer.clone(),
1616 vec![Point::new(0, 0)..Point::new(1, 0)],
1617 cx,
1618 )
1619 })
1620 .await
1621 .unwrap();
1622 cx.run_until_parked();
1623 assert_eq!(
1624 buffer.read_with(cx, |buffer, _| buffer.text()),
1625 "abc\ndef\nghi\njkl\nmnO"
1626 );
1627 assert_eq!(
1628 unreviewed_hunks(&action_log, cx),
1629 vec![(
1630 buffer.clone(),
1631 vec![HunkStatus {
1632 range: Point::new(4, 0)..Point::new(4, 3),
1633 diff_status: DiffHunkStatusKind::Modified,
1634 old_text: "mno".into(),
1635 }],
1636 )]
1637 );
1638
1639 action_log
1640 .update(cx, |log, cx| {
1641 log.reject_edits_in_ranges(
1642 buffer.clone(),
1643 vec![Point::new(4, 0)..Point::new(4, 0)],
1644 cx,
1645 )
1646 })
1647 .await
1648 .unwrap();
1649 cx.run_until_parked();
1650 assert_eq!(
1651 buffer.read_with(cx, |buffer, _| buffer.text()),
1652 "abc\ndef\nghi\njkl\nmno"
1653 );
1654 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1655 }
1656
1657 #[gpui::test(iterations = 10)]
1658 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1659 init_test(cx);
1660
1661 let fs = FakeFs::new(cx.executor());
1662 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1663 .await;
1664 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1665 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1666 let file_path = project
1667 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1668 .unwrap();
1669 let buffer = project
1670 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1671 .await
1672 .unwrap();
1673
1674 cx.update(|cx| {
1675 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1676 buffer.update(cx, |buffer, cx| {
1677 buffer
1678 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1679 .unwrap()
1680 });
1681 buffer.update(cx, |buffer, cx| {
1682 buffer
1683 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1684 .unwrap()
1685 });
1686 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1687 });
1688 cx.run_until_parked();
1689 assert_eq!(
1690 buffer.read_with(cx, |buffer, _| buffer.text()),
1691 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1692 );
1693 assert_eq!(
1694 unreviewed_hunks(&action_log, cx),
1695 vec![(
1696 buffer.clone(),
1697 vec![
1698 HunkStatus {
1699 range: Point::new(1, 0)..Point::new(3, 0),
1700 diff_status: DiffHunkStatusKind::Modified,
1701 old_text: "def\n".into(),
1702 },
1703 HunkStatus {
1704 range: Point::new(5, 0)..Point::new(5, 3),
1705 diff_status: DiffHunkStatusKind::Modified,
1706 old_text: "mno".into(),
1707 }
1708 ],
1709 )]
1710 );
1711
1712 action_log.update(cx, |log, cx| {
1713 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1714 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1715 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1716 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1717
1718 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1719 .detach();
1720 assert_eq!(
1721 buffer.read_with(cx, |buffer, _| buffer.text()),
1722 "abc\ndef\nghi\njkl\nmno"
1723 );
1724 });
1725 cx.run_until_parked();
1726 assert_eq!(
1727 buffer.read_with(cx, |buffer, _| buffer.text()),
1728 "abc\ndef\nghi\njkl\nmno"
1729 );
1730 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1731 }
1732
1733 #[gpui::test(iterations = 10)]
1734 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1735 init_test(cx);
1736
1737 let fs = FakeFs::new(cx.executor());
1738 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1739 .await;
1740 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1741 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1742 let file_path = project
1743 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1744 .unwrap();
1745 let buffer = project
1746 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1747 .await
1748 .unwrap();
1749
1750 cx.update(|cx| {
1751 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1752 });
1753 project
1754 .update(cx, |project, cx| {
1755 project.delete_file(file_path.clone(), false, cx)
1756 })
1757 .unwrap()
1758 .await
1759 .unwrap();
1760 cx.run_until_parked();
1761 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1762 assert_eq!(
1763 unreviewed_hunks(&action_log, cx),
1764 vec![(
1765 buffer.clone(),
1766 vec![HunkStatus {
1767 range: Point::new(0, 0)..Point::new(0, 0),
1768 diff_status: DiffHunkStatusKind::Deleted,
1769 old_text: "content".into(),
1770 }]
1771 )]
1772 );
1773
1774 action_log
1775 .update(cx, |log, cx| {
1776 log.reject_edits_in_ranges(
1777 buffer.clone(),
1778 vec![Point::new(0, 0)..Point::new(0, 0)],
1779 cx,
1780 )
1781 })
1782 .await
1783 .unwrap();
1784 cx.run_until_parked();
1785 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1786 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1787 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1788 }
1789
1790 #[gpui::test(iterations = 10)]
1791 async fn test_reject_created_file(cx: &mut TestAppContext) {
1792 init_test(cx);
1793
1794 let fs = FakeFs::new(cx.executor());
1795 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1796 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1797 let file_path = project
1798 .read_with(cx, |project, cx| {
1799 project.find_project_path("dir/new_file", cx)
1800 })
1801 .unwrap();
1802 let buffer = project
1803 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1804 .await
1805 .unwrap();
1806 cx.update(|cx| {
1807 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1808 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1809 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1810 });
1811 project
1812 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1813 .await
1814 .unwrap();
1815 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1816 cx.run_until_parked();
1817 assert_eq!(
1818 unreviewed_hunks(&action_log, cx),
1819 vec![(
1820 buffer.clone(),
1821 vec![HunkStatus {
1822 range: Point::new(0, 0)..Point::new(0, 7),
1823 diff_status: DiffHunkStatusKind::Added,
1824 old_text: "".into(),
1825 }],
1826 )]
1827 );
1828
1829 action_log
1830 .update(cx, |log, cx| {
1831 log.reject_edits_in_ranges(
1832 buffer.clone(),
1833 vec![Point::new(0, 0)..Point::new(0, 11)],
1834 cx,
1835 )
1836 })
1837 .await
1838 .unwrap();
1839 cx.run_until_parked();
1840 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1841 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1842 }
1843
1844 #[gpui::test]
1845 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1846 init_test(cx);
1847
1848 let fs = FakeFs::new(cx.executor());
1849 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1850 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1851
1852 let file_path = project
1853 .read_with(cx, |project, cx| {
1854 project.find_project_path("dir/new_file", cx)
1855 })
1856 .unwrap();
1857 let buffer = project
1858 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1859 .await
1860 .unwrap();
1861
1862 // AI creates file with initial content
1863 cx.update(|cx| {
1864 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1865 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1866 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1867 });
1868
1869 project
1870 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1871 .await
1872 .unwrap();
1873
1874 cx.run_until_parked();
1875
1876 // User makes additional edits
1877 cx.update(|cx| {
1878 buffer.update(cx, |buffer, cx| {
1879 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1880 });
1881 });
1882
1883 project
1884 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1885 .await
1886 .unwrap();
1887
1888 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1889
1890 // Reject all
1891 action_log
1892 .update(cx, |log, cx| {
1893 log.reject_edits_in_ranges(
1894 buffer.clone(),
1895 vec![Point::new(0, 0)..Point::new(100, 0)],
1896 cx,
1897 )
1898 })
1899 .await
1900 .unwrap();
1901 cx.run_until_parked();
1902
1903 // File should still contain all the content
1904 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1905
1906 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1907 assert_eq!(content, "ai content\nuser added this line");
1908 }
1909
1910 #[gpui::test(iterations = 100)]
1911 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
1912 init_test(cx);
1913
1914 let operations = env::var("OPERATIONS")
1915 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1916 .unwrap_or(20);
1917
1918 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
1919 let fs = FakeFs::new(cx.executor());
1920 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
1921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1922 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1923 let file_path = project
1924 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1925 .unwrap();
1926 let buffer = project
1927 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1928 .await
1929 .unwrap();
1930
1931 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1932
1933 for _ in 0..operations {
1934 match rng.gen_range(0..100) {
1935 0..25 => {
1936 action_log.update(cx, |log, cx| {
1937 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1938 log::info!("keeping edits in range {:?}", range);
1939 log.keep_edits_in_range(buffer.clone(), range, cx)
1940 });
1941 }
1942 25..50 => {
1943 action_log
1944 .update(cx, |log, cx| {
1945 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1946 log::info!("rejecting edits in range {:?}", range);
1947 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
1948 })
1949 .await
1950 .unwrap();
1951 }
1952 _ => {
1953 let is_agent_edit = rng.gen_bool(0.5);
1954 if is_agent_edit {
1955 log::info!("agent edit");
1956 } else {
1957 log::info!("user edit");
1958 }
1959 cx.update(|cx| {
1960 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
1961 if is_agent_edit {
1962 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1963 }
1964 });
1965 }
1966 }
1967
1968 if rng.gen_bool(0.2) {
1969 quiesce(&action_log, &buffer, cx);
1970 }
1971 }
1972
1973 quiesce(&action_log, &buffer, cx);
1974
1975 fn quiesce(
1976 action_log: &Entity<ActionLog>,
1977 buffer: &Entity<Buffer>,
1978 cx: &mut TestAppContext,
1979 ) {
1980 log::info!("quiescing...");
1981 cx.run_until_parked();
1982 action_log.update(cx, |log, cx| {
1983 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
1984 let mut old_text = tracked_buffer.diff_base.clone();
1985 let new_text = buffer.read(cx).as_rope();
1986 for edit in tracked_buffer.unreviewed_edits.edits() {
1987 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
1988 let old_end = old_text.point_to_offset(cmp::min(
1989 Point::new(edit.new.start + edit.old_len(), 0),
1990 old_text.max_point(),
1991 ));
1992 old_text.replace(
1993 old_start..old_end,
1994 &new_text.slice_rows(edit.new.clone()).to_string(),
1995 );
1996 }
1997 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
1998 })
1999 }
2000 }
2001
2002 #[gpui::test]
2003 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let fs = FakeFs::new(cx.background_executor.clone());
2007 fs.insert_tree(
2008 path!("/project"),
2009 json!({
2010 ".git": {},
2011 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2012 }),
2013 )
2014 .await;
2015 fs.set_head_for_repo(
2016 path!("/project/.git").as_ref(),
2017 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2018 "0000000",
2019 );
2020 cx.run_until_parked();
2021
2022 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2023 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2024
2025 let file_path = project
2026 .read_with(cx, |project, cx| {
2027 project.find_project_path(path!("/project/file.txt"), cx)
2028 })
2029 .unwrap();
2030 let buffer = project
2031 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2032 .await
2033 .unwrap();
2034
2035 cx.update(|cx| {
2036 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2037 buffer.update(cx, |buffer, cx| {
2038 buffer.edit(
2039 [
2040 // Edit at the very start: a -> A
2041 (Point::new(0, 0)..Point::new(0, 1), "A"),
2042 // Deletion in the middle: remove lines d and e
2043 (Point::new(3, 0)..Point::new(5, 0), ""),
2044 // Modification: g -> GGG
2045 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2046 // Addition: insert new line after h
2047 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2048 // Edit the very last character: j -> J
2049 (Point::new(9, 0)..Point::new(9, 1), "J"),
2050 ],
2051 None,
2052 cx,
2053 );
2054 });
2055 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2056 });
2057 cx.run_until_parked();
2058 assert_eq!(
2059 unreviewed_hunks(&action_log, cx),
2060 vec![(
2061 buffer.clone(),
2062 vec![
2063 HunkStatus {
2064 range: Point::new(0, 0)..Point::new(1, 0),
2065 diff_status: DiffHunkStatusKind::Modified,
2066 old_text: "a\n".into()
2067 },
2068 HunkStatus {
2069 range: Point::new(3, 0)..Point::new(3, 0),
2070 diff_status: DiffHunkStatusKind::Deleted,
2071 old_text: "d\ne\n".into()
2072 },
2073 HunkStatus {
2074 range: Point::new(4, 0)..Point::new(5, 0),
2075 diff_status: DiffHunkStatusKind::Modified,
2076 old_text: "g\n".into()
2077 },
2078 HunkStatus {
2079 range: Point::new(6, 0)..Point::new(7, 0),
2080 diff_status: DiffHunkStatusKind::Added,
2081 old_text: "".into()
2082 },
2083 HunkStatus {
2084 range: Point::new(8, 0)..Point::new(8, 1),
2085 diff_status: DiffHunkStatusKind::Modified,
2086 old_text: "j".into()
2087 }
2088 ]
2089 )]
2090 );
2091
2092 // Simulate a git commit that matches some edits but not others:
2093 // - Accepts the first edit (a -> A)
2094 // - Accepts the deletion (remove d and e)
2095 // - Makes a different change to g (g -> G instead of GGG)
2096 // - Ignores the NEW line addition
2097 // - Ignores the last line edit (j stays as j)
2098 fs.set_head_for_repo(
2099 path!("/project/.git").as_ref(),
2100 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2101 "0000001",
2102 );
2103 cx.run_until_parked();
2104 assert_eq!(
2105 unreviewed_hunks(&action_log, cx),
2106 vec![(
2107 buffer.clone(),
2108 vec![
2109 HunkStatus {
2110 range: Point::new(4, 0)..Point::new(5, 0),
2111 diff_status: DiffHunkStatusKind::Modified,
2112 old_text: "g\n".into()
2113 },
2114 HunkStatus {
2115 range: Point::new(6, 0)..Point::new(7, 0),
2116 diff_status: DiffHunkStatusKind::Added,
2117 old_text: "".into()
2118 },
2119 HunkStatus {
2120 range: Point::new(8, 0)..Point::new(8, 1),
2121 diff_status: DiffHunkStatusKind::Modified,
2122 old_text: "j".into()
2123 }
2124 ]
2125 )]
2126 );
2127
2128 // Make another commit that accepts the NEW line but with different content
2129 fs.set_head_for_repo(
2130 path!("/project/.git").as_ref(),
2131 &[(
2132 "file.txt".into(),
2133 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2134 )],
2135 "0000002",
2136 );
2137 cx.run_until_parked();
2138 assert_eq!(
2139 unreviewed_hunks(&action_log, cx),
2140 vec![(
2141 buffer.clone(),
2142 vec![
2143 HunkStatus {
2144 range: Point::new(6, 0)..Point::new(7, 0),
2145 diff_status: DiffHunkStatusKind::Added,
2146 old_text: "".into()
2147 },
2148 HunkStatus {
2149 range: Point::new(8, 0)..Point::new(8, 1),
2150 diff_status: DiffHunkStatusKind::Modified,
2151 old_text: "j".into()
2152 }
2153 ]
2154 )]
2155 );
2156
2157 // Final commit that accepts all remaining edits
2158 fs.set_head_for_repo(
2159 path!("/project/.git").as_ref(),
2160 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2161 "0000003",
2162 );
2163 cx.run_until_parked();
2164 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2165 }
2166
2167 #[derive(Debug, Clone, PartialEq, Eq)]
2168 struct HunkStatus {
2169 range: Range<Point>,
2170 diff_status: DiffHunkStatusKind,
2171 old_text: String,
2172 }
2173
2174 fn unreviewed_hunks(
2175 action_log: &Entity<ActionLog>,
2176 cx: &TestAppContext,
2177 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2178 cx.read(|cx| {
2179 action_log
2180 .read(cx)
2181 .changed_buffers(cx)
2182 .into_iter()
2183 .map(|(buffer, diff)| {
2184 let snapshot = buffer.read(cx).snapshot();
2185 (
2186 buffer,
2187 diff.read(cx)
2188 .hunks(&snapshot, cx)
2189 .map(|hunk| HunkStatus {
2190 diff_status: hunk.status().kind,
2191 range: hunk.range,
2192 old_text: diff
2193 .read(cx)
2194 .base_text()
2195 .text_for_range(hunk.diff_base_byte_range)
2196 .collect(),
2197 })
2198 .collect(),
2199 )
2200 })
2201 .collect()
2202 })
2203 }
2204}