1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{RangeExt, ResultExt as _};
12
13/// Tracks actions performed by tools in a thread
14pub struct ActionLog {
15 /// Buffers that we want to notify the model about when they change.
16 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
17 /// Has the model edited a file since it last checked diagnostics?
18 edited_since_project_diagnostics_check: bool,
19 /// The project this action log is associated with
20 project: Entity<Project>,
21 /// Tracks which buffer versions have already been notified as changed externally
22 notified_versions: BTreeMap<Entity<Buffer>, clock::Global>,
23}
24
25impl ActionLog {
26 /// Creates a new, empty action log associated with the given project.
27 pub fn new(project: Entity<Project>) -> Self {
28 Self {
29 tracked_buffers: BTreeMap::default(),
30 edited_since_project_diagnostics_check: false,
31 project,
32 notified_versions: BTreeMap::default(),
33 }
34 }
35
36 pub fn project(&self) -> &Entity<Project> {
37 &self.project
38 }
39
40 /// Notifies a diagnostics check
41 pub fn checked_project_diagnostics(&mut self) {
42 self.edited_since_project_diagnostics_check = false;
43 }
44
45 /// Returns true if any files have been edited since the last project diagnostics check
46 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
47 self.edited_since_project_diagnostics_check
48 }
49
50 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
51 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
52 }
53
54 fn track_buffer_internal(
55 &mut self,
56 buffer: Entity<Buffer>,
57 is_created: bool,
58 cx: &mut Context<Self>,
59 ) -> &mut TrackedBuffer {
60 let status = if is_created {
61 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
62 self.notified_versions.remove(&buffer);
63 match tracked.status {
64 TrackedBufferStatus::Created {
65 existing_file_content,
66 } => TrackedBufferStatus::Created {
67 existing_file_content,
68 },
69 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
70 TrackedBufferStatus::Created {
71 existing_file_content: Some(tracked.diff_base),
72 }
73 }
74 }
75 } else if buffer
76 .read(cx)
77 .file()
78 .map_or(false, |file| file.disk_state().exists())
79 {
80 TrackedBufferStatus::Created {
81 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
82 }
83 } else {
84 TrackedBufferStatus::Created {
85 existing_file_content: None,
86 }
87 }
88 } else {
89 TrackedBufferStatus::Modified
90 };
91
92 let tracked_buffer = self
93 .tracked_buffers
94 .entry(buffer.clone())
95 .or_insert_with(|| {
96 let open_lsp_handle = self.project.update(cx, |project, cx| {
97 project.register_buffer_with_language_servers(&buffer, cx)
98 });
99
100 let text_snapshot = buffer.read(cx).text_snapshot();
101 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
102 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
103 let diff_base;
104 let unreviewed_edits;
105 if is_created {
106 diff_base = Rope::default();
107 unreviewed_edits = Patch::new(vec![Edit {
108 old: 0..1,
109 new: 0..text_snapshot.max_point().row + 1,
110 }])
111 } else {
112 diff_base = buffer.read(cx).as_rope().clone();
113 unreviewed_edits = Patch::default();
114 }
115 TrackedBuffer {
116 buffer: buffer.clone(),
117 diff_base,
118 unreviewed_edits,
119 snapshot: text_snapshot.clone(),
120 status,
121 version: buffer.read(cx).version(),
122 diff,
123 diff_update: diff_update_tx,
124 _open_lsp_handle: open_lsp_handle,
125 _maintain_diff: cx.spawn({
126 let buffer = buffer.clone();
127 async move |this, cx| {
128 Self::maintain_diff(this, buffer, diff_update_rx, cx)
129 .await
130 .ok();
131 }
132 }),
133 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
134 }
135 });
136 tracked_buffer.version = buffer.read(cx).version();
137 tracked_buffer
138 }
139
140 fn handle_buffer_event(
141 &mut self,
142 buffer: Entity<Buffer>,
143 event: &BufferEvent,
144 cx: &mut Context<Self>,
145 ) {
146 match event {
147 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
148 BufferEvent::FileHandleChanged => {
149 self.handle_buffer_file_changed(buffer, cx);
150 }
151 _ => {}
152 };
153 }
154
155 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
156 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
157 return;
158 };
159 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
160 }
161
162 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
163 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
164 return;
165 };
166
167 match tracked_buffer.status {
168 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
169 if buffer
170 .read(cx)
171 .file()
172 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
173 {
174 // If the buffer had been edited by a tool, but it got
175 // deleted externally, we want to stop tracking it.
176 self.tracked_buffers.remove(&buffer);
177 self.notified_versions.remove(&buffer);
178 }
179 cx.notify();
180 }
181 TrackedBufferStatus::Deleted => {
182 if buffer
183 .read(cx)
184 .file()
185 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
186 {
187 // If the buffer had been deleted by a tool, but it got
188 // resurrected externally, we want to clear the edits we
189 // were tracking and reset the buffer's state.
190 self.tracked_buffers.remove(&buffer);
191 self.notified_versions.remove(&buffer);
192 self.track_buffer_internal(buffer, false, cx);
193 }
194 cx.notify();
195 }
196 }
197 }
198
199 async fn maintain_diff(
200 this: WeakEntity<Self>,
201 buffer: Entity<Buffer>,
202 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
203 cx: &mut AsyncApp,
204 ) -> Result<()> {
205 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
206 let git_diff = this
207 .update(cx, |this, cx| {
208 this.project.update(cx, |project, cx| {
209 project.open_uncommitted_diff(buffer.clone(), cx)
210 })
211 })?
212 .await
213 .ok();
214 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
215 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
216 })?;
217
218 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
219 let _repo_subscription =
220 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
221 cx.update(|cx| {
222 let mut old_head = buffer_repo.read(cx).head_commit.clone();
223 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
224 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
225 let new_head = buffer_repo.read(cx).head_commit.clone();
226 if new_head != old_head {
227 old_head = new_head;
228 git_diff_updates_tx.send(()).ok();
229 }
230 }
231 _ => {}
232 }))
233 })?
234 } else {
235 None
236 };
237
238 loop {
239 futures::select_biased! {
240 buffer_update = buffer_updates.next() => {
241 if let Some((author, buffer_snapshot)) = buffer_update {
242 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
243 } else {
244 break;
245 }
246 }
247 _ = git_diff_updates_rx.changed().fuse() => {
248 if let Some(git_diff) = git_diff.as_ref() {
249 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
250 }
251 }
252 }
253 }
254
255 Ok(())
256 }
257
258 async fn track_edits(
259 this: &WeakEntity<ActionLog>,
260 buffer: &Entity<Buffer>,
261 author: ChangeAuthor,
262 buffer_snapshot: text::BufferSnapshot,
263 cx: &mut AsyncApp,
264 ) -> Result<()> {
265 let rebase = this.read_with(cx, |this, cx| {
266 let tracked_buffer = this
267 .tracked_buffers
268 .get(buffer)
269 .context("buffer not tracked")?;
270
271 let rebase = cx.background_spawn({
272 let mut base_text = tracked_buffer.diff_base.clone();
273 let old_snapshot = tracked_buffer.snapshot.clone();
274 let new_snapshot = buffer_snapshot.clone();
275 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
276 async move {
277 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
278 if let ChangeAuthor::User = author {
279 apply_non_conflicting_edits(
280 &unreviewed_edits,
281 edits,
282 &mut base_text,
283 new_snapshot.as_rope(),
284 );
285 }
286 (Arc::new(base_text.to_string()), base_text)
287 }
288 });
289
290 anyhow::Ok(rebase)
291 })??;
292 let (new_base_text, new_diff_base) = rebase.await;
293 Self::update_diff(
294 this,
295 buffer,
296 buffer_snapshot,
297 new_base_text,
298 new_diff_base,
299 cx,
300 )
301 .await
302 }
303
304 async fn keep_committed_edits(
305 this: &WeakEntity<ActionLog>,
306 buffer: &Entity<Buffer>,
307 git_diff: &Entity<BufferDiff>,
308 cx: &mut AsyncApp,
309 ) -> Result<()> {
310 let buffer_snapshot = this.read_with(cx, |this, _cx| {
311 let tracked_buffer = this
312 .tracked_buffers
313 .get(buffer)
314 .context("buffer not tracked")?;
315 anyhow::Ok(tracked_buffer.snapshot.clone())
316 })??;
317 let (new_base_text, new_diff_base) = this
318 .read_with(cx, |this, cx| {
319 let tracked_buffer = this
320 .tracked_buffers
321 .get(buffer)
322 .context("buffer not tracked")?;
323 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
324 let agent_diff_base = tracked_buffer.diff_base.clone();
325 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
326 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
327 anyhow::Ok(cx.background_spawn(async move {
328 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
329 let committed_edits = language::line_diff(
330 &agent_diff_base.to_string(),
331 &git_diff_base.to_string(),
332 )
333 .into_iter()
334 .map(|(old, new)| Edit { old, new });
335
336 let mut new_agent_diff_base = agent_diff_base.clone();
337 let mut row_delta = 0i32;
338 for committed in committed_edits {
339 while let Some(unreviewed) = old_unreviewed_edits.peek() {
340 // If the committed edit matches the unreviewed
341 // edit, assume the user wants to keep it.
342 if committed.old == unreviewed.old {
343 let unreviewed_new =
344 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
345 let committed_new =
346 git_diff_base.slice_rows(committed.new.clone()).to_string();
347 if unreviewed_new == committed_new {
348 let old_byte_start =
349 new_agent_diff_base.point_to_offset(Point::new(
350 (unreviewed.old.start as i32 + row_delta) as u32,
351 0,
352 ));
353 let old_byte_end =
354 new_agent_diff_base.point_to_offset(cmp::min(
355 Point::new(
356 (unreviewed.old.end as i32 + row_delta) as u32,
357 0,
358 ),
359 new_agent_diff_base.max_point(),
360 ));
361 new_agent_diff_base
362 .replace(old_byte_start..old_byte_end, &unreviewed_new);
363 row_delta +=
364 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
365 }
366 } else if unreviewed.old.start >= committed.old.end {
367 break;
368 }
369
370 old_unreviewed_edits.next().unwrap();
371 }
372 }
373
374 (
375 Arc::new(new_agent_diff_base.to_string()),
376 new_agent_diff_base,
377 )
378 }))
379 })??
380 .await;
381
382 Self::update_diff(
383 this,
384 buffer,
385 buffer_snapshot,
386 new_base_text,
387 new_diff_base,
388 cx,
389 )
390 .await
391 }
392
393 async fn update_diff(
394 this: &WeakEntity<ActionLog>,
395 buffer: &Entity<Buffer>,
396 buffer_snapshot: text::BufferSnapshot,
397 new_base_text: Arc<String>,
398 new_diff_base: Rope,
399 cx: &mut AsyncApp,
400 ) -> Result<()> {
401 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
402 let tracked_buffer = this
403 .tracked_buffers
404 .get(buffer)
405 .context("buffer not tracked")?;
406 anyhow::Ok((
407 tracked_buffer.diff.clone(),
408 buffer.read(cx).language().cloned(),
409 buffer.read(cx).language_registry().clone(),
410 ))
411 })??;
412 let diff_snapshot = BufferDiff::update_diff(
413 diff.clone(),
414 buffer_snapshot.clone(),
415 Some(new_base_text),
416 true,
417 false,
418 language,
419 language_registry,
420 cx,
421 )
422 .await;
423 let mut unreviewed_edits = Patch::default();
424 if let Ok(diff_snapshot) = diff_snapshot {
425 unreviewed_edits = cx
426 .background_spawn({
427 let diff_snapshot = diff_snapshot.clone();
428 let buffer_snapshot = buffer_snapshot.clone();
429 let new_diff_base = new_diff_base.clone();
430 async move {
431 let mut unreviewed_edits = Patch::default();
432 for hunk in diff_snapshot
433 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
434 {
435 let old_range = new_diff_base
436 .offset_to_point(hunk.diff_base_byte_range.start)
437 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
438 let new_range = hunk.range.start..hunk.range.end;
439 unreviewed_edits.push(point_to_row_edit(
440 Edit {
441 old: old_range,
442 new: new_range,
443 },
444 &new_diff_base,
445 &buffer_snapshot.as_rope(),
446 ));
447 }
448 unreviewed_edits
449 }
450 })
451 .await;
452
453 diff.update(cx, |diff, cx| {
454 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
455 })?;
456 }
457 this.update(cx, |this, cx| {
458 let tracked_buffer = this
459 .tracked_buffers
460 .get_mut(buffer)
461 .context("buffer not tracked")?;
462 tracked_buffer.diff_base = new_diff_base;
463 tracked_buffer.snapshot = buffer_snapshot;
464 tracked_buffer.unreviewed_edits = unreviewed_edits;
465 cx.notify();
466 anyhow::Ok(())
467 })?
468 }
469
470 /// Track a buffer as read by agent, so we can notify the model about user edits.
471 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
472 self.track_buffer_internal(buffer, false, cx);
473 }
474
475 /// Mark a buffer as created by agent, so we can refresh it in the context
476 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
477 self.edited_since_project_diagnostics_check = true;
478 self.track_buffer_internal(buffer.clone(), true, cx);
479 }
480
481 /// Mark a buffer as edited by agent, so we can refresh it in the context
482 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
483 self.edited_since_project_diagnostics_check = true;
484
485 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
486 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
487 tracked_buffer.status = TrackedBufferStatus::Modified;
488 }
489 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
490 }
491
492 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
493 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
494 match tracked_buffer.status {
495 TrackedBufferStatus::Created { .. } => {
496 self.tracked_buffers.remove(&buffer);
497 self.notified_versions.remove(&buffer);
498 cx.notify();
499 }
500 TrackedBufferStatus::Modified => {
501 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
502 tracked_buffer.status = TrackedBufferStatus::Deleted;
503 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
504 }
505 TrackedBufferStatus::Deleted => {}
506 }
507 cx.notify();
508 }
509
510 pub fn keep_edits_in_range(
511 &mut self,
512 buffer: Entity<Buffer>,
513 buffer_range: Range<impl language::ToPoint>,
514 cx: &mut Context<Self>,
515 ) {
516 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
517 return;
518 };
519
520 match tracked_buffer.status {
521 TrackedBufferStatus::Deleted => {
522 self.tracked_buffers.remove(&buffer);
523 self.notified_versions.remove(&buffer);
524 cx.notify();
525 }
526 _ => {
527 let buffer = buffer.read(cx);
528 let buffer_range =
529 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
530 let mut delta = 0i32;
531
532 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
533 edit.old.start = (edit.old.start as i32 + delta) as u32;
534 edit.old.end = (edit.old.end as i32 + delta) as u32;
535
536 if buffer_range.end.row < edit.new.start
537 || buffer_range.start.row > edit.new.end
538 {
539 true
540 } else {
541 let old_range = tracked_buffer
542 .diff_base
543 .point_to_offset(Point::new(edit.old.start, 0))
544 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
545 Point::new(edit.old.end, 0),
546 tracked_buffer.diff_base.max_point(),
547 ));
548 let new_range = tracked_buffer
549 .snapshot
550 .point_to_offset(Point::new(edit.new.start, 0))
551 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
552 Point::new(edit.new.end, 0),
553 tracked_buffer.snapshot.max_point(),
554 ));
555 tracked_buffer.diff_base.replace(
556 old_range,
557 &tracked_buffer
558 .snapshot
559 .text_for_range(new_range)
560 .collect::<String>(),
561 );
562 delta += edit.new_len() as i32 - edit.old_len() as i32;
563 false
564 }
565 });
566 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
567 }
568 }
569 }
570
571 pub fn reject_edits_in_ranges(
572 &mut self,
573 buffer: Entity<Buffer>,
574 buffer_ranges: Vec<Range<impl language::ToPoint>>,
575 cx: &mut Context<Self>,
576 ) -> Task<Result<()>> {
577 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
578 return Task::ready(Ok(()));
579 };
580
581 match &tracked_buffer.status {
582 TrackedBufferStatus::Created {
583 existing_file_content,
584 } => {
585 let task = if let Some(existing_file_content) = existing_file_content {
586 buffer.update(cx, |buffer, cx| {
587 buffer.start_transaction();
588 buffer.set_text("", cx);
589 for chunk in existing_file_content.chunks() {
590 buffer.append(chunk, cx);
591 }
592 buffer.end_transaction(cx);
593 });
594 self.project
595 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
596 } else {
597 // For a file created by AI with no pre-existing content,
598 // only delete the file if we're certain it contains only AI content
599 // with no edits from the user.
600
601 let initial_version = tracked_buffer.version.clone();
602 let current_version = buffer.read(cx).version();
603
604 let current_content = buffer.read(cx).text();
605 let tracked_content = tracked_buffer.snapshot.text();
606
607 let is_ai_only_content =
608 initial_version == current_version && current_content == tracked_content;
609
610 if is_ai_only_content {
611 buffer
612 .read(cx)
613 .entry_id(cx)
614 .and_then(|entry_id| {
615 self.project.update(cx, |project, cx| {
616 project.delete_entry(entry_id, false, cx)
617 })
618 })
619 .unwrap_or(Task::ready(Ok(())))
620 } else {
621 // Not sure how to disentangle edits made by the user
622 // from edits made by the AI at this point.
623 // For now, preserve both to avoid data loss.
624 //
625 // TODO: Better solution (disable "Reject" after user makes some
626 // edit or find a way to differentiate between AI and user edits)
627 Task::ready(Ok(()))
628 }
629 };
630
631 self.tracked_buffers.remove(&buffer);
632 self.notified_versions.remove(&buffer);
633 cx.notify();
634 task
635 }
636 TrackedBufferStatus::Deleted => {
637 buffer.update(cx, |buffer, cx| {
638 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
639 });
640 let save = self
641 .project
642 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
643
644 // Clear all tracked edits for this buffer and start over as if we just read it.
645 self.tracked_buffers.remove(&buffer);
646 self.notified_versions.remove(&buffer);
647 self.buffer_read(buffer.clone(), cx);
648 cx.notify();
649 save
650 }
651 TrackedBufferStatus::Modified => {
652 buffer.update(cx, |buffer, cx| {
653 let mut buffer_row_ranges = buffer_ranges
654 .into_iter()
655 .map(|range| {
656 range.start.to_point(buffer).row..range.end.to_point(buffer).row
657 })
658 .peekable();
659
660 let mut edits_to_revert = Vec::new();
661 for edit in tracked_buffer.unreviewed_edits.edits() {
662 let new_range = tracked_buffer
663 .snapshot
664 .anchor_before(Point::new(edit.new.start, 0))
665 ..tracked_buffer.snapshot.anchor_after(cmp::min(
666 Point::new(edit.new.end, 0),
667 tracked_buffer.snapshot.max_point(),
668 ));
669 let new_row_range = new_range.start.to_point(buffer).row
670 ..new_range.end.to_point(buffer).row;
671
672 let mut revert = false;
673 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
674 if buffer_row_range.end < new_row_range.start {
675 buffer_row_ranges.next();
676 } else if buffer_row_range.start > new_row_range.end {
677 break;
678 } else {
679 revert = true;
680 break;
681 }
682 }
683
684 if revert {
685 let old_range = tracked_buffer
686 .diff_base
687 .point_to_offset(Point::new(edit.old.start, 0))
688 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
689 Point::new(edit.old.end, 0),
690 tracked_buffer.diff_base.max_point(),
691 ));
692 let old_text = tracked_buffer
693 .diff_base
694 .chunks_in_range(old_range)
695 .collect::<String>();
696 edits_to_revert.push((new_range, old_text));
697 }
698 }
699
700 buffer.edit(edits_to_revert, None, cx);
701 });
702 self.project
703 .update(cx, |project, cx| project.save_buffer(buffer, cx))
704 }
705 }
706 }
707
708 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
709 self.tracked_buffers
710 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
711 TrackedBufferStatus::Deleted => false,
712 _ => {
713 tracked_buffer.unreviewed_edits.clear();
714 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
715 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
716 true
717 }
718 });
719 cx.notify();
720 }
721
722 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
723 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
724 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
725
726 async move {
727 reject.await.log_err();
728 }
729 });
730
731 let task = futures::future::join_all(futures);
732
733 cx.spawn(async move |_, _| {
734 task.await;
735 })
736 }
737
738 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
739 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
740 self.tracked_buffers
741 .iter()
742 .filter(|(_, tracked)| tracked.has_edits(cx))
743 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
744 .collect()
745 }
746
747 /// Returns stale buffers that haven't been notified yet
748 pub fn unnotified_stale_buffers<'a>(
749 &'a self,
750 cx: &'a App,
751 ) -> impl Iterator<Item = &'a Entity<Buffer>> {
752 self.stale_buffers(cx).filter(|buffer| {
753 let buffer_entity = buffer.read(cx);
754 self.notified_versions
755 .get(buffer)
756 .map_or(true, |notified_version| {
757 *notified_version != buffer_entity.version
758 })
759 })
760 }
761
762 /// Marks the given buffers as notified at their current versions
763 pub fn mark_buffers_as_notified(
764 &mut self,
765 buffers: impl IntoIterator<Item = Entity<Buffer>>,
766 cx: &App,
767 ) {
768 for buffer in buffers {
769 let version = buffer.read(cx).version.clone();
770 self.notified_versions.insert(buffer, version);
771 }
772 }
773
774 /// Iterate over buffers changed since last read or edited by the model
775 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
776 self.tracked_buffers
777 .iter()
778 .filter(|(buffer, tracked)| {
779 let buffer = buffer.read(cx);
780
781 tracked.version != buffer.version
782 && buffer
783 .file()
784 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
785 })
786 .map(|(buffer, _)| buffer)
787 }
788}
789
790fn apply_non_conflicting_edits(
791 patch: &Patch<u32>,
792 edits: Vec<Edit<u32>>,
793 old_text: &mut Rope,
794 new_text: &Rope,
795) {
796 let mut old_edits = patch.edits().iter().cloned().peekable();
797 let mut new_edits = edits.into_iter().peekable();
798 let mut applied_delta = 0i32;
799 let mut rebased_delta = 0i32;
800
801 while let Some(mut new_edit) = new_edits.next() {
802 let mut conflict = false;
803
804 // Push all the old edits that are before this new edit or that intersect with it.
805 while let Some(old_edit) = old_edits.peek() {
806 if new_edit.old.end < old_edit.new.start
807 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
808 {
809 break;
810 } else if new_edit.old.start > old_edit.new.end
811 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
812 {
813 let old_edit = old_edits.next().unwrap();
814 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
815 } else {
816 conflict = true;
817 if new_edits
818 .peek()
819 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
820 {
821 new_edit = new_edits.next().unwrap();
822 } else {
823 let old_edit = old_edits.next().unwrap();
824 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
825 }
826 }
827 }
828
829 if !conflict {
830 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
831 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
832 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
833 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
834 ..old_text.point_to_offset(cmp::min(
835 Point::new(new_edit.old.end, 0),
836 old_text.max_point(),
837 ));
838 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
839 ..new_text.point_to_offset(cmp::min(
840 Point::new(new_edit.new.end, 0),
841 new_text.max_point(),
842 ));
843
844 old_text.replace(
845 old_bytes,
846 &new_text.chunks_in_range(new_bytes).collect::<String>(),
847 );
848 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
849 }
850 }
851}
852
853fn diff_snapshots(
854 old_snapshot: &text::BufferSnapshot,
855 new_snapshot: &text::BufferSnapshot,
856) -> Vec<Edit<u32>> {
857 let mut edits = new_snapshot
858 .edits_since::<Point>(&old_snapshot.version)
859 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
860 .peekable();
861 let mut row_edits = Vec::new();
862 while let Some(mut edit) = edits.next() {
863 while let Some(next_edit) = edits.peek() {
864 if edit.old.end >= next_edit.old.start {
865 edit.old.end = next_edit.old.end;
866 edit.new.end = next_edit.new.end;
867 edits.next();
868 } else {
869 break;
870 }
871 }
872 row_edits.push(edit);
873 }
874 row_edits
875}
876
877fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
878 if edit.old.start.column == old_text.line_len(edit.old.start.row)
879 && new_text
880 .chars_at(new_text.point_to_offset(edit.new.start))
881 .next()
882 == Some('\n')
883 && edit.old.start != old_text.max_point()
884 {
885 Edit {
886 old: edit.old.start.row + 1..edit.old.end.row + 1,
887 new: edit.new.start.row + 1..edit.new.end.row + 1,
888 }
889 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
890 Edit {
891 old: edit.old.start.row..edit.old.end.row,
892 new: edit.new.start.row..edit.new.end.row,
893 }
894 } else {
895 Edit {
896 old: edit.old.start.row..edit.old.end.row + 1,
897 new: edit.new.start.row..edit.new.end.row + 1,
898 }
899 }
900}
901
902#[derive(Copy, Clone, Debug)]
903enum ChangeAuthor {
904 User,
905 Agent,
906}
907
908enum TrackedBufferStatus {
909 Created { existing_file_content: Option<Rope> },
910 Modified,
911 Deleted,
912}
913
914struct TrackedBuffer {
915 buffer: Entity<Buffer>,
916 diff_base: Rope,
917 unreviewed_edits: Patch<u32>,
918 status: TrackedBufferStatus,
919 version: clock::Global,
920 diff: Entity<BufferDiff>,
921 snapshot: text::BufferSnapshot,
922 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
923 _open_lsp_handle: OpenLspBufferHandle,
924 _maintain_diff: Task<()>,
925 _subscription: Subscription,
926}
927
928impl TrackedBuffer {
929 fn has_edits(&self, cx: &App) -> bool {
930 self.diff
931 .read(cx)
932 .hunks(&self.buffer.read(cx), cx)
933 .next()
934 .is_some()
935 }
936
937 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
938 self.diff_update
939 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
940 .ok();
941 }
942}
943
944pub struct ChangedBuffer {
945 pub diff: Entity<BufferDiff>,
946}
947
948#[cfg(test)]
949mod tests {
950 use super::*;
951 use buffer_diff::DiffHunkStatusKind;
952 use gpui::TestAppContext;
953 use language::Point;
954 use project::{FakeFs, Fs, Project, RemoveOptions};
955 use rand::prelude::*;
956 use serde_json::json;
957 use settings::SettingsStore;
958 use std::env;
959 use util::{RandomCharIter, path};
960
961 #[ctor::ctor]
962 fn init_logger() {
963 zlog::init_test();
964 }
965
966 fn init_test(cx: &mut TestAppContext) {
967 cx.update(|cx| {
968 let settings_store = SettingsStore::test(cx);
969 cx.set_global(settings_store);
970 language::init(cx);
971 Project::init_settings(cx);
972 });
973 }
974
975 #[gpui::test(iterations = 10)]
976 async fn test_keep_edits(cx: &mut TestAppContext) {
977 init_test(cx);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
981 .await;
982 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
983 let action_log = cx.new(|_| ActionLog::new(project.clone()));
984 let file_path = project
985 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
986 .unwrap();
987 let buffer = project
988 .update(cx, |project, cx| project.open_buffer(file_path, cx))
989 .await
990 .unwrap();
991
992 cx.update(|cx| {
993 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
994 buffer.update(cx, |buffer, cx| {
995 buffer
996 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
997 .unwrap()
998 });
999 buffer.update(cx, |buffer, cx| {
1000 buffer
1001 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1002 .unwrap()
1003 });
1004 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1005 });
1006 cx.run_until_parked();
1007 assert_eq!(
1008 buffer.read_with(cx, |buffer, _| buffer.text()),
1009 "abc\ndEf\nghi\njkl\nmnO"
1010 );
1011 assert_eq!(
1012 unreviewed_hunks(&action_log, cx),
1013 vec![(
1014 buffer.clone(),
1015 vec![
1016 HunkStatus {
1017 range: Point::new(1, 0)..Point::new(2, 0),
1018 diff_status: DiffHunkStatusKind::Modified,
1019 old_text: "def\n".into(),
1020 },
1021 HunkStatus {
1022 range: Point::new(4, 0)..Point::new(4, 3),
1023 diff_status: DiffHunkStatusKind::Modified,
1024 old_text: "mno".into(),
1025 }
1026 ],
1027 )]
1028 );
1029
1030 action_log.update(cx, |log, cx| {
1031 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1032 });
1033 cx.run_until_parked();
1034 assert_eq!(
1035 unreviewed_hunks(&action_log, cx),
1036 vec![(
1037 buffer.clone(),
1038 vec![HunkStatus {
1039 range: Point::new(1, 0)..Point::new(2, 0),
1040 diff_status: DiffHunkStatusKind::Modified,
1041 old_text: "def\n".into(),
1042 }],
1043 )]
1044 );
1045
1046 action_log.update(cx, |log, cx| {
1047 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1048 });
1049 cx.run_until_parked();
1050 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1051 }
1052
1053 #[gpui::test(iterations = 10)]
1054 async fn test_deletions(cx: &mut TestAppContext) {
1055 init_test(cx);
1056
1057 let fs = FakeFs::new(cx.executor());
1058 fs.insert_tree(
1059 path!("/dir"),
1060 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1061 )
1062 .await;
1063 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1064 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1065 let file_path = project
1066 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1067 .unwrap();
1068 let buffer = project
1069 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1070 .await
1071 .unwrap();
1072
1073 cx.update(|cx| {
1074 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1075 buffer.update(cx, |buffer, cx| {
1076 buffer
1077 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1078 .unwrap();
1079 buffer.finalize_last_transaction();
1080 });
1081 buffer.update(cx, |buffer, cx| {
1082 buffer
1083 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1084 .unwrap();
1085 buffer.finalize_last_transaction();
1086 });
1087 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1088 });
1089 cx.run_until_parked();
1090 assert_eq!(
1091 buffer.read_with(cx, |buffer, _| buffer.text()),
1092 "abc\nghi\njkl\npqr"
1093 );
1094 assert_eq!(
1095 unreviewed_hunks(&action_log, cx),
1096 vec![(
1097 buffer.clone(),
1098 vec![
1099 HunkStatus {
1100 range: Point::new(1, 0)..Point::new(1, 0),
1101 diff_status: DiffHunkStatusKind::Deleted,
1102 old_text: "def\n".into(),
1103 },
1104 HunkStatus {
1105 range: Point::new(3, 0)..Point::new(3, 0),
1106 diff_status: DiffHunkStatusKind::Deleted,
1107 old_text: "mno\n".into(),
1108 }
1109 ],
1110 )]
1111 );
1112
1113 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1114 cx.run_until_parked();
1115 assert_eq!(
1116 buffer.read_with(cx, |buffer, _| buffer.text()),
1117 "abc\nghi\njkl\nmno\npqr"
1118 );
1119 assert_eq!(
1120 unreviewed_hunks(&action_log, cx),
1121 vec![(
1122 buffer.clone(),
1123 vec![HunkStatus {
1124 range: Point::new(1, 0)..Point::new(1, 0),
1125 diff_status: DiffHunkStatusKind::Deleted,
1126 old_text: "def\n".into(),
1127 }],
1128 )]
1129 );
1130
1131 action_log.update(cx, |log, cx| {
1132 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1133 });
1134 cx.run_until_parked();
1135 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1136 }
1137
1138 #[gpui::test(iterations = 10)]
1139 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1140 init_test(cx);
1141
1142 let fs = FakeFs::new(cx.executor());
1143 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1144 .await;
1145 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1146 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1147 let file_path = project
1148 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1149 .unwrap();
1150 let buffer = project
1151 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1152 .await
1153 .unwrap();
1154
1155 cx.update(|cx| {
1156 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1157 buffer.update(cx, |buffer, cx| {
1158 buffer
1159 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1160 .unwrap()
1161 });
1162 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1163 });
1164 cx.run_until_parked();
1165 assert_eq!(
1166 buffer.read_with(cx, |buffer, _| buffer.text()),
1167 "abc\ndeF\nGHI\njkl\nmno"
1168 );
1169 assert_eq!(
1170 unreviewed_hunks(&action_log, cx),
1171 vec![(
1172 buffer.clone(),
1173 vec![HunkStatus {
1174 range: Point::new(1, 0)..Point::new(3, 0),
1175 diff_status: DiffHunkStatusKind::Modified,
1176 old_text: "def\nghi\n".into(),
1177 }],
1178 )]
1179 );
1180
1181 buffer.update(cx, |buffer, cx| {
1182 buffer.edit(
1183 [
1184 (Point::new(0, 2)..Point::new(0, 2), "X"),
1185 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1186 ],
1187 None,
1188 cx,
1189 )
1190 });
1191 cx.run_until_parked();
1192 assert_eq!(
1193 buffer.read_with(cx, |buffer, _| buffer.text()),
1194 "abXc\ndeF\nGHI\nYjkl\nmno"
1195 );
1196 assert_eq!(
1197 unreviewed_hunks(&action_log, cx),
1198 vec![(
1199 buffer.clone(),
1200 vec![HunkStatus {
1201 range: Point::new(1, 0)..Point::new(3, 0),
1202 diff_status: DiffHunkStatusKind::Modified,
1203 old_text: "def\nghi\n".into(),
1204 }],
1205 )]
1206 );
1207
1208 buffer.update(cx, |buffer, cx| {
1209 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1210 });
1211 cx.run_until_parked();
1212 assert_eq!(
1213 buffer.read_with(cx, |buffer, _| buffer.text()),
1214 "abXc\ndZeF\nGHI\nYjkl\nmno"
1215 );
1216 assert_eq!(
1217 unreviewed_hunks(&action_log, cx),
1218 vec![(
1219 buffer.clone(),
1220 vec![HunkStatus {
1221 range: Point::new(1, 0)..Point::new(3, 0),
1222 diff_status: DiffHunkStatusKind::Modified,
1223 old_text: "def\nghi\n".into(),
1224 }],
1225 )]
1226 );
1227
1228 action_log.update(cx, |log, cx| {
1229 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1230 });
1231 cx.run_until_parked();
1232 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1233 }
1234
1235 #[gpui::test(iterations = 10)]
1236 async fn test_creating_files(cx: &mut TestAppContext) {
1237 init_test(cx);
1238
1239 let fs = FakeFs::new(cx.executor());
1240 fs.insert_tree(path!("/dir"), json!({})).await;
1241 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1242 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1243 let file_path = project
1244 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1245 .unwrap();
1246
1247 let buffer = project
1248 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1249 .await
1250 .unwrap();
1251 cx.update(|cx| {
1252 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1253 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1254 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1255 });
1256 project
1257 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1258 .await
1259 .unwrap();
1260 cx.run_until_parked();
1261 assert_eq!(
1262 unreviewed_hunks(&action_log, cx),
1263 vec![(
1264 buffer.clone(),
1265 vec![HunkStatus {
1266 range: Point::new(0, 0)..Point::new(0, 5),
1267 diff_status: DiffHunkStatusKind::Added,
1268 old_text: "".into(),
1269 }],
1270 )]
1271 );
1272
1273 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1274 cx.run_until_parked();
1275 assert_eq!(
1276 unreviewed_hunks(&action_log, cx),
1277 vec![(
1278 buffer.clone(),
1279 vec![HunkStatus {
1280 range: Point::new(0, 0)..Point::new(0, 6),
1281 diff_status: DiffHunkStatusKind::Added,
1282 old_text: "".into(),
1283 }],
1284 )]
1285 );
1286
1287 action_log.update(cx, |log, cx| {
1288 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1289 });
1290 cx.run_until_parked();
1291 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1292 }
1293
1294 #[gpui::test(iterations = 10)]
1295 async fn test_overwriting_files(cx: &mut TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "file1": "Lorem ipsum dolor"
1303 }),
1304 )
1305 .await;
1306 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1307 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1308 let file_path = project
1309 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1310 .unwrap();
1311
1312 let buffer = project
1313 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1314 .await
1315 .unwrap();
1316 cx.update(|cx| {
1317 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1318 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1319 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1320 });
1321 project
1322 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1323 .await
1324 .unwrap();
1325 cx.run_until_parked();
1326 assert_eq!(
1327 unreviewed_hunks(&action_log, cx),
1328 vec![(
1329 buffer.clone(),
1330 vec![HunkStatus {
1331 range: Point::new(0, 0)..Point::new(0, 19),
1332 diff_status: DiffHunkStatusKind::Added,
1333 old_text: "".into(),
1334 }],
1335 )]
1336 );
1337
1338 action_log
1339 .update(cx, |log, cx| {
1340 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1341 })
1342 .await
1343 .unwrap();
1344 cx.run_until_parked();
1345 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1346 assert_eq!(
1347 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1348 "Lorem ipsum dolor"
1349 );
1350 }
1351
1352 #[gpui::test(iterations = 10)]
1353 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1354 init_test(cx);
1355
1356 let fs = FakeFs::new(cx.executor());
1357 fs.insert_tree(
1358 path!("/dir"),
1359 json!({
1360 "file1": "Lorem ipsum dolor"
1361 }),
1362 )
1363 .await;
1364 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1365 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1366 let file_path = project
1367 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1368 .unwrap();
1369
1370 let buffer = project
1371 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1372 .await
1373 .unwrap();
1374 cx.update(|cx| {
1375 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1376 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1377 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1378 });
1379 project
1380 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1381 .await
1382 .unwrap();
1383 cx.run_until_parked();
1384 assert_eq!(
1385 unreviewed_hunks(&action_log, cx),
1386 vec![(
1387 buffer.clone(),
1388 vec![HunkStatus {
1389 range: Point::new(0, 0)..Point::new(0, 37),
1390 diff_status: DiffHunkStatusKind::Modified,
1391 old_text: "Lorem ipsum dolor".into(),
1392 }],
1393 )]
1394 );
1395
1396 cx.update(|cx| {
1397 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1398 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1399 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1400 });
1401 project
1402 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1403 .await
1404 .unwrap();
1405 cx.run_until_parked();
1406 assert_eq!(
1407 unreviewed_hunks(&action_log, cx),
1408 vec![(
1409 buffer.clone(),
1410 vec![HunkStatus {
1411 range: Point::new(0, 0)..Point::new(0, 9),
1412 diff_status: DiffHunkStatusKind::Added,
1413 old_text: "".into(),
1414 }],
1415 )]
1416 );
1417
1418 action_log
1419 .update(cx, |log, cx| {
1420 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1421 })
1422 .await
1423 .unwrap();
1424 cx.run_until_parked();
1425 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1426 assert_eq!(
1427 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1428 "Lorem ipsum dolor"
1429 );
1430 }
1431
1432 #[gpui::test(iterations = 10)]
1433 async fn test_deleting_files(cx: &mut TestAppContext) {
1434 init_test(cx);
1435
1436 let fs = FakeFs::new(cx.executor());
1437 fs.insert_tree(
1438 path!("/dir"),
1439 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1440 )
1441 .await;
1442
1443 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1444 let file1_path = project
1445 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1446 .unwrap();
1447 let file2_path = project
1448 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1449 .unwrap();
1450
1451 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1452 let buffer1 = project
1453 .update(cx, |project, cx| {
1454 project.open_buffer(file1_path.clone(), cx)
1455 })
1456 .await
1457 .unwrap();
1458 let buffer2 = project
1459 .update(cx, |project, cx| {
1460 project.open_buffer(file2_path.clone(), cx)
1461 })
1462 .await
1463 .unwrap();
1464
1465 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1466 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1467 project
1468 .update(cx, |project, cx| {
1469 project.delete_file(file1_path.clone(), false, cx)
1470 })
1471 .unwrap()
1472 .await
1473 .unwrap();
1474 project
1475 .update(cx, |project, cx| {
1476 project.delete_file(file2_path.clone(), false, cx)
1477 })
1478 .unwrap()
1479 .await
1480 .unwrap();
1481 cx.run_until_parked();
1482 assert_eq!(
1483 unreviewed_hunks(&action_log, cx),
1484 vec![
1485 (
1486 buffer1.clone(),
1487 vec![HunkStatus {
1488 range: Point::new(0, 0)..Point::new(0, 0),
1489 diff_status: DiffHunkStatusKind::Deleted,
1490 old_text: "lorem\n".into(),
1491 }]
1492 ),
1493 (
1494 buffer2.clone(),
1495 vec![HunkStatus {
1496 range: Point::new(0, 0)..Point::new(0, 0),
1497 diff_status: DiffHunkStatusKind::Deleted,
1498 old_text: "ipsum\n".into(),
1499 }],
1500 )
1501 ]
1502 );
1503
1504 // Simulate file1 being recreated externally.
1505 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1506 .await;
1507
1508 // Simulate file2 being recreated by a tool.
1509 let buffer2 = project
1510 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1511 .await
1512 .unwrap();
1513 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1514 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1515 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1516 project
1517 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1518 .await
1519 .unwrap();
1520
1521 cx.run_until_parked();
1522 assert_eq!(
1523 unreviewed_hunks(&action_log, cx),
1524 vec![(
1525 buffer2.clone(),
1526 vec![HunkStatus {
1527 range: Point::new(0, 0)..Point::new(0, 5),
1528 diff_status: DiffHunkStatusKind::Added,
1529 old_text: "".into(),
1530 }],
1531 )]
1532 );
1533
1534 // Simulate file2 being deleted externally.
1535 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1536 .await
1537 .unwrap();
1538 cx.run_until_parked();
1539 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1540 }
1541
1542 #[gpui::test(iterations = 10)]
1543 async fn test_reject_edits(cx: &mut TestAppContext) {
1544 init_test(cx);
1545
1546 let fs = FakeFs::new(cx.executor());
1547 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1548 .await;
1549 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1550 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1551 let file_path = project
1552 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1553 .unwrap();
1554 let buffer = project
1555 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1556 .await
1557 .unwrap();
1558
1559 cx.update(|cx| {
1560 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1561 buffer.update(cx, |buffer, cx| {
1562 buffer
1563 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1564 .unwrap()
1565 });
1566 buffer.update(cx, |buffer, cx| {
1567 buffer
1568 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1569 .unwrap()
1570 });
1571 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1572 });
1573 cx.run_until_parked();
1574 assert_eq!(
1575 buffer.read_with(cx, |buffer, _| buffer.text()),
1576 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1577 );
1578 assert_eq!(
1579 unreviewed_hunks(&action_log, cx),
1580 vec![(
1581 buffer.clone(),
1582 vec![
1583 HunkStatus {
1584 range: Point::new(1, 0)..Point::new(3, 0),
1585 diff_status: DiffHunkStatusKind::Modified,
1586 old_text: "def\n".into(),
1587 },
1588 HunkStatus {
1589 range: Point::new(5, 0)..Point::new(5, 3),
1590 diff_status: DiffHunkStatusKind::Modified,
1591 old_text: "mno".into(),
1592 }
1593 ],
1594 )]
1595 );
1596
1597 // If the rejected range doesn't overlap with any hunk, we ignore it.
1598 action_log
1599 .update(cx, |log, cx| {
1600 log.reject_edits_in_ranges(
1601 buffer.clone(),
1602 vec![Point::new(4, 0)..Point::new(4, 0)],
1603 cx,
1604 )
1605 })
1606 .await
1607 .unwrap();
1608 cx.run_until_parked();
1609 assert_eq!(
1610 buffer.read_with(cx, |buffer, _| buffer.text()),
1611 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1612 );
1613 assert_eq!(
1614 unreviewed_hunks(&action_log, cx),
1615 vec![(
1616 buffer.clone(),
1617 vec![
1618 HunkStatus {
1619 range: Point::new(1, 0)..Point::new(3, 0),
1620 diff_status: DiffHunkStatusKind::Modified,
1621 old_text: "def\n".into(),
1622 },
1623 HunkStatus {
1624 range: Point::new(5, 0)..Point::new(5, 3),
1625 diff_status: DiffHunkStatusKind::Modified,
1626 old_text: "mno".into(),
1627 }
1628 ],
1629 )]
1630 );
1631
1632 action_log
1633 .update(cx, |log, cx| {
1634 log.reject_edits_in_ranges(
1635 buffer.clone(),
1636 vec![Point::new(0, 0)..Point::new(1, 0)],
1637 cx,
1638 )
1639 })
1640 .await
1641 .unwrap();
1642 cx.run_until_parked();
1643 assert_eq!(
1644 buffer.read_with(cx, |buffer, _| buffer.text()),
1645 "abc\ndef\nghi\njkl\nmnO"
1646 );
1647 assert_eq!(
1648 unreviewed_hunks(&action_log, cx),
1649 vec![(
1650 buffer.clone(),
1651 vec![HunkStatus {
1652 range: Point::new(4, 0)..Point::new(4, 3),
1653 diff_status: DiffHunkStatusKind::Modified,
1654 old_text: "mno".into(),
1655 }],
1656 )]
1657 );
1658
1659 action_log
1660 .update(cx, |log, cx| {
1661 log.reject_edits_in_ranges(
1662 buffer.clone(),
1663 vec![Point::new(4, 0)..Point::new(4, 0)],
1664 cx,
1665 )
1666 })
1667 .await
1668 .unwrap();
1669 cx.run_until_parked();
1670 assert_eq!(
1671 buffer.read_with(cx, |buffer, _| buffer.text()),
1672 "abc\ndef\nghi\njkl\nmno"
1673 );
1674 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1675 }
1676
1677 #[gpui::test(iterations = 10)]
1678 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1679 init_test(cx);
1680
1681 let fs = FakeFs::new(cx.executor());
1682 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1683 .await;
1684 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1685 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1686 let file_path = project
1687 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1688 .unwrap();
1689 let buffer = project
1690 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1691 .await
1692 .unwrap();
1693
1694 cx.update(|cx| {
1695 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1696 buffer.update(cx, |buffer, cx| {
1697 buffer
1698 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1699 .unwrap()
1700 });
1701 buffer.update(cx, |buffer, cx| {
1702 buffer
1703 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1704 .unwrap()
1705 });
1706 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1707 });
1708 cx.run_until_parked();
1709 assert_eq!(
1710 buffer.read_with(cx, |buffer, _| buffer.text()),
1711 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1712 );
1713 assert_eq!(
1714 unreviewed_hunks(&action_log, cx),
1715 vec![(
1716 buffer.clone(),
1717 vec![
1718 HunkStatus {
1719 range: Point::new(1, 0)..Point::new(3, 0),
1720 diff_status: DiffHunkStatusKind::Modified,
1721 old_text: "def\n".into(),
1722 },
1723 HunkStatus {
1724 range: Point::new(5, 0)..Point::new(5, 3),
1725 diff_status: DiffHunkStatusKind::Modified,
1726 old_text: "mno".into(),
1727 }
1728 ],
1729 )]
1730 );
1731
1732 action_log.update(cx, |log, cx| {
1733 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1734 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1735 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1736 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1737
1738 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1739 .detach();
1740 assert_eq!(
1741 buffer.read_with(cx, |buffer, _| buffer.text()),
1742 "abc\ndef\nghi\njkl\nmno"
1743 );
1744 });
1745 cx.run_until_parked();
1746 assert_eq!(
1747 buffer.read_with(cx, |buffer, _| buffer.text()),
1748 "abc\ndef\nghi\njkl\nmno"
1749 );
1750 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1751 }
1752
1753 #[gpui::test(iterations = 10)]
1754 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1755 init_test(cx);
1756
1757 let fs = FakeFs::new(cx.executor());
1758 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1759 .await;
1760 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1761 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1762 let file_path = project
1763 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1764 .unwrap();
1765 let buffer = project
1766 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1767 .await
1768 .unwrap();
1769
1770 cx.update(|cx| {
1771 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1772 });
1773 project
1774 .update(cx, |project, cx| {
1775 project.delete_file(file_path.clone(), false, cx)
1776 })
1777 .unwrap()
1778 .await
1779 .unwrap();
1780 cx.run_until_parked();
1781 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1782 assert_eq!(
1783 unreviewed_hunks(&action_log, cx),
1784 vec![(
1785 buffer.clone(),
1786 vec![HunkStatus {
1787 range: Point::new(0, 0)..Point::new(0, 0),
1788 diff_status: DiffHunkStatusKind::Deleted,
1789 old_text: "content".into(),
1790 }]
1791 )]
1792 );
1793
1794 action_log
1795 .update(cx, |log, cx| {
1796 log.reject_edits_in_ranges(
1797 buffer.clone(),
1798 vec![Point::new(0, 0)..Point::new(0, 0)],
1799 cx,
1800 )
1801 })
1802 .await
1803 .unwrap();
1804 cx.run_until_parked();
1805 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1806 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1807 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1808 }
1809
1810 #[gpui::test(iterations = 10)]
1811 async fn test_reject_created_file(cx: &mut TestAppContext) {
1812 init_test(cx);
1813
1814 let fs = FakeFs::new(cx.executor());
1815 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1816 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1817 let file_path = project
1818 .read_with(cx, |project, cx| {
1819 project.find_project_path("dir/new_file", cx)
1820 })
1821 .unwrap();
1822 let buffer = project
1823 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1824 .await
1825 .unwrap();
1826 cx.update(|cx| {
1827 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1828 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1829 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1830 });
1831 project
1832 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1833 .await
1834 .unwrap();
1835 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1836 cx.run_until_parked();
1837 assert_eq!(
1838 unreviewed_hunks(&action_log, cx),
1839 vec![(
1840 buffer.clone(),
1841 vec![HunkStatus {
1842 range: Point::new(0, 0)..Point::new(0, 7),
1843 diff_status: DiffHunkStatusKind::Added,
1844 old_text: "".into(),
1845 }],
1846 )]
1847 );
1848
1849 action_log
1850 .update(cx, |log, cx| {
1851 log.reject_edits_in_ranges(
1852 buffer.clone(),
1853 vec![Point::new(0, 0)..Point::new(0, 11)],
1854 cx,
1855 )
1856 })
1857 .await
1858 .unwrap();
1859 cx.run_until_parked();
1860 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1861 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1862 }
1863
1864 #[gpui::test]
1865 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
1866 init_test(cx);
1867
1868 let fs = FakeFs::new(cx.executor());
1869 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1870 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1871
1872 let file_path = project
1873 .read_with(cx, |project, cx| {
1874 project.find_project_path("dir/new_file", cx)
1875 })
1876 .unwrap();
1877 let buffer = project
1878 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1879 .await
1880 .unwrap();
1881
1882 // AI creates file with initial content
1883 cx.update(|cx| {
1884 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1885 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
1886 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1887 });
1888
1889 project
1890 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1891 .await
1892 .unwrap();
1893
1894 cx.run_until_parked();
1895
1896 // User makes additional edits
1897 cx.update(|cx| {
1898 buffer.update(cx, |buffer, cx| {
1899 buffer.edit([(10..10, "\nuser added this line")], None, cx);
1900 });
1901 });
1902
1903 project
1904 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1905 .await
1906 .unwrap();
1907
1908 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1909
1910 // Reject all
1911 action_log
1912 .update(cx, |log, cx| {
1913 log.reject_edits_in_ranges(
1914 buffer.clone(),
1915 vec![Point::new(0, 0)..Point::new(100, 0)],
1916 cx,
1917 )
1918 })
1919 .await
1920 .unwrap();
1921 cx.run_until_parked();
1922
1923 // File should still contain all the content
1924 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1925
1926 let content = buffer.read_with(cx, |buffer, _| buffer.text());
1927 assert_eq!(content, "ai content\nuser added this line");
1928 }
1929
1930 #[gpui::test(iterations = 100)]
1931 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
1932 init_test(cx);
1933
1934 let operations = env::var("OPERATIONS")
1935 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1936 .unwrap_or(20);
1937
1938 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
1939 let fs = FakeFs::new(cx.executor());
1940 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
1941 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1942 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1943 let file_path = project
1944 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1945 .unwrap();
1946 let buffer = project
1947 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1948 .await
1949 .unwrap();
1950
1951 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1952
1953 for _ in 0..operations {
1954 match rng.gen_range(0..100) {
1955 0..25 => {
1956 action_log.update(cx, |log, cx| {
1957 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1958 log::info!("keeping edits in range {:?}", range);
1959 log.keep_edits_in_range(buffer.clone(), range, cx)
1960 });
1961 }
1962 25..50 => {
1963 action_log
1964 .update(cx, |log, cx| {
1965 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1966 log::info!("rejecting edits in range {:?}", range);
1967 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
1968 })
1969 .await
1970 .unwrap();
1971 }
1972 _ => {
1973 let is_agent_edit = rng.gen_bool(0.5);
1974 if is_agent_edit {
1975 log::info!("agent edit");
1976 } else {
1977 log::info!("user edit");
1978 }
1979 cx.update(|cx| {
1980 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
1981 if is_agent_edit {
1982 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1983 }
1984 });
1985 }
1986 }
1987
1988 if rng.gen_bool(0.2) {
1989 quiesce(&action_log, &buffer, cx);
1990 }
1991 }
1992
1993 quiesce(&action_log, &buffer, cx);
1994
1995 fn quiesce(
1996 action_log: &Entity<ActionLog>,
1997 buffer: &Entity<Buffer>,
1998 cx: &mut TestAppContext,
1999 ) {
2000 log::info!("quiescing...");
2001 cx.run_until_parked();
2002 action_log.update(cx, |log, cx| {
2003 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
2004 let mut old_text = tracked_buffer.diff_base.clone();
2005 let new_text = buffer.read(cx).as_rope();
2006 for edit in tracked_buffer.unreviewed_edits.edits() {
2007 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2008 let old_end = old_text.point_to_offset(cmp::min(
2009 Point::new(edit.new.start + edit.old_len(), 0),
2010 old_text.max_point(),
2011 ));
2012 old_text.replace(
2013 old_start..old_end,
2014 &new_text.slice_rows(edit.new.clone()).to_string(),
2015 );
2016 }
2017 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2018 })
2019 }
2020 }
2021
2022 #[gpui::test]
2023 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2024 init_test(cx);
2025
2026 let fs = FakeFs::new(cx.background_executor.clone());
2027 fs.insert_tree(
2028 path!("/project"),
2029 json!({
2030 ".git": {},
2031 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2032 }),
2033 )
2034 .await;
2035 fs.set_head_for_repo(
2036 path!("/project/.git").as_ref(),
2037 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2038 "0000000",
2039 );
2040 cx.run_until_parked();
2041
2042 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2043 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2044
2045 let file_path = project
2046 .read_with(cx, |project, cx| {
2047 project.find_project_path(path!("/project/file.txt"), cx)
2048 })
2049 .unwrap();
2050 let buffer = project
2051 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2052 .await
2053 .unwrap();
2054
2055 cx.update(|cx| {
2056 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2057 buffer.update(cx, |buffer, cx| {
2058 buffer.edit(
2059 [
2060 // Edit at the very start: a -> A
2061 (Point::new(0, 0)..Point::new(0, 1), "A"),
2062 // Deletion in the middle: remove lines d and e
2063 (Point::new(3, 0)..Point::new(5, 0), ""),
2064 // Modification: g -> GGG
2065 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2066 // Addition: insert new line after h
2067 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2068 // Edit the very last character: j -> J
2069 (Point::new(9, 0)..Point::new(9, 1), "J"),
2070 ],
2071 None,
2072 cx,
2073 );
2074 });
2075 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2076 });
2077 cx.run_until_parked();
2078 assert_eq!(
2079 unreviewed_hunks(&action_log, cx),
2080 vec![(
2081 buffer.clone(),
2082 vec![
2083 HunkStatus {
2084 range: Point::new(0, 0)..Point::new(1, 0),
2085 diff_status: DiffHunkStatusKind::Modified,
2086 old_text: "a\n".into()
2087 },
2088 HunkStatus {
2089 range: Point::new(3, 0)..Point::new(3, 0),
2090 diff_status: DiffHunkStatusKind::Deleted,
2091 old_text: "d\ne\n".into()
2092 },
2093 HunkStatus {
2094 range: Point::new(4, 0)..Point::new(5, 0),
2095 diff_status: DiffHunkStatusKind::Modified,
2096 old_text: "g\n".into()
2097 },
2098 HunkStatus {
2099 range: Point::new(6, 0)..Point::new(7, 0),
2100 diff_status: DiffHunkStatusKind::Added,
2101 old_text: "".into()
2102 },
2103 HunkStatus {
2104 range: Point::new(8, 0)..Point::new(8, 1),
2105 diff_status: DiffHunkStatusKind::Modified,
2106 old_text: "j".into()
2107 }
2108 ]
2109 )]
2110 );
2111
2112 // Simulate a git commit that matches some edits but not others:
2113 // - Accepts the first edit (a -> A)
2114 // - Accepts the deletion (remove d and e)
2115 // - Makes a different change to g (g -> G instead of GGG)
2116 // - Ignores the NEW line addition
2117 // - Ignores the last line edit (j stays as j)
2118 fs.set_head_for_repo(
2119 path!("/project/.git").as_ref(),
2120 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2121 "0000001",
2122 );
2123 cx.run_until_parked();
2124 assert_eq!(
2125 unreviewed_hunks(&action_log, cx),
2126 vec![(
2127 buffer.clone(),
2128 vec![
2129 HunkStatus {
2130 range: Point::new(4, 0)..Point::new(5, 0),
2131 diff_status: DiffHunkStatusKind::Modified,
2132 old_text: "g\n".into()
2133 },
2134 HunkStatus {
2135 range: Point::new(6, 0)..Point::new(7, 0),
2136 diff_status: DiffHunkStatusKind::Added,
2137 old_text: "".into()
2138 },
2139 HunkStatus {
2140 range: Point::new(8, 0)..Point::new(8, 1),
2141 diff_status: DiffHunkStatusKind::Modified,
2142 old_text: "j".into()
2143 }
2144 ]
2145 )]
2146 );
2147
2148 // Make another commit that accepts the NEW line but with different content
2149 fs.set_head_for_repo(
2150 path!("/project/.git").as_ref(),
2151 &[(
2152 "file.txt".into(),
2153 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2154 )],
2155 "0000002",
2156 );
2157 cx.run_until_parked();
2158 assert_eq!(
2159 unreviewed_hunks(&action_log, cx),
2160 vec![(
2161 buffer.clone(),
2162 vec![
2163 HunkStatus {
2164 range: Point::new(6, 0)..Point::new(7, 0),
2165 diff_status: DiffHunkStatusKind::Added,
2166 old_text: "".into()
2167 },
2168 HunkStatus {
2169 range: Point::new(8, 0)..Point::new(8, 1),
2170 diff_status: DiffHunkStatusKind::Modified,
2171 old_text: "j".into()
2172 }
2173 ]
2174 )]
2175 );
2176
2177 // Final commit that accepts all remaining edits
2178 fs.set_head_for_repo(
2179 path!("/project/.git").as_ref(),
2180 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2181 "0000003",
2182 );
2183 cx.run_until_parked();
2184 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2185 }
2186
2187 #[derive(Debug, Clone, PartialEq, Eq)]
2188 struct HunkStatus {
2189 range: Range<Point>,
2190 diff_status: DiffHunkStatusKind,
2191 old_text: String,
2192 }
2193
2194 fn unreviewed_hunks(
2195 action_log: &Entity<ActionLog>,
2196 cx: &TestAppContext,
2197 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2198 cx.read(|cx| {
2199 action_log
2200 .read(cx)
2201 .changed_buffers(cx)
2202 .into_iter()
2203 .map(|(buffer, diff)| {
2204 let snapshot = buffer.read(cx).snapshot();
2205 (
2206 buffer,
2207 diff.read(cx)
2208 .hunks(&snapshot, cx)
2209 .map(|hunk| HunkStatus {
2210 diff_status: hunk.status().kind,
2211 range: hunk.range,
2212 old_text: diff
2213 .read(cx)
2214 .base_text()
2215 .text_for_range(hunk.diff_base_byte_range)
2216 .collect(),
2217 })
2218 .collect(),
2219 )
2220 })
2221 .collect()
2222 })
2223 }
2224}