1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{
12 RangeExt, ResultExt as _,
13 paths::{PathStyle, RemotePathBuf},
14};
15
16/// Tracks actions performed by tools in a thread
17pub struct ActionLog {
18 /// Buffers that we want to notify the model about when they change.
19 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
20 /// Has the model edited a file since it last checked diagnostics?
21 edited_since_project_diagnostics_check: bool,
22 /// The project this action log is associated with
23 project: Entity<Project>,
24}
25
26impl ActionLog {
27 /// Creates a new, empty action log associated with the given project.
28 pub fn new(project: Entity<Project>) -> Self {
29 Self {
30 tracked_buffers: BTreeMap::default(),
31 edited_since_project_diagnostics_check: false,
32 project,
33 }
34 }
35
36 pub fn project(&self) -> &Entity<Project> {
37 &self.project
38 }
39
40 /// Notifies a diagnostics check
41 pub fn checked_project_diagnostics(&mut self) {
42 self.edited_since_project_diagnostics_check = false;
43 }
44
45 /// Returns true if any files have been edited since the last project diagnostics check
46 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
47 self.edited_since_project_diagnostics_check
48 }
49
50 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
51 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
52 }
53
54 /// Return a unified diff patch with user edits made since last read or notification
55 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
56 let diffs = self
57 .tracked_buffers
58 .values()
59 .filter_map(|tracked| {
60 if !tracked.may_have_unnotified_user_edits {
61 return None;
62 }
63
64 let text_with_latest_user_edits = tracked.diff_base.to_string();
65 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
66 if text_with_latest_user_edits == text_with_last_seen_user_edits {
67 return None;
68 }
69 let patch = language::unified_diff(
70 &text_with_last_seen_user_edits,
71 &text_with_latest_user_edits,
72 );
73
74 let buffer = tracked.buffer.clone();
75 let file_path = buffer
76 .read(cx)
77 .file()
78 .map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
79 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
80
81 let mut result = String::new();
82 result.push_str(&format!("--- a/{}\n", file_path));
83 result.push_str(&format!("+++ b/{}\n", file_path));
84 result.push_str(&patch);
85
86 Some(result)
87 })
88 .collect::<Vec<_>>();
89
90 if diffs.is_empty() {
91 return None;
92 }
93
94 let unified_diff = diffs.join("\n\n");
95 Some(unified_diff)
96 }
97
98 /// Return a unified diff patch with user edits made since last read/notification
99 /// and mark them as notified
100 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
101 let patch = self.unnotified_user_edits(cx);
102 self.tracked_buffers.values_mut().for_each(|tracked| {
103 tracked.may_have_unnotified_user_edits = false;
104 tracked.last_seen_base = tracked.diff_base.clone();
105 });
106 patch
107 }
108
109 fn track_buffer_internal(
110 &mut self,
111 buffer: Entity<Buffer>,
112 is_created: bool,
113 cx: &mut Context<Self>,
114 ) -> &mut TrackedBuffer {
115 let status = if is_created {
116 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
117 match tracked.status {
118 TrackedBufferStatus::Created {
119 existing_file_content,
120 } => TrackedBufferStatus::Created {
121 existing_file_content,
122 },
123 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
124 TrackedBufferStatus::Created {
125 existing_file_content: Some(tracked.diff_base),
126 }
127 }
128 }
129 } else if buffer
130 .read(cx)
131 .file()
132 .map_or(false, |file| file.disk_state().exists())
133 {
134 TrackedBufferStatus::Created {
135 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
136 }
137 } else {
138 TrackedBufferStatus::Created {
139 existing_file_content: None,
140 }
141 }
142 } else {
143 TrackedBufferStatus::Modified
144 };
145
146 let tracked_buffer = self
147 .tracked_buffers
148 .entry(buffer.clone())
149 .or_insert_with(|| {
150 let open_lsp_handle = self.project.update(cx, |project, cx| {
151 project.register_buffer_with_language_servers(&buffer, cx)
152 });
153
154 let text_snapshot = buffer.read(cx).text_snapshot();
155 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
156 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
157 let diff_base;
158 let last_seen_base;
159 let unreviewed_edits;
160 if is_created {
161 diff_base = Rope::default();
162 last_seen_base = Rope::default();
163 unreviewed_edits = Patch::new(vec![Edit {
164 old: 0..1,
165 new: 0..text_snapshot.max_point().row + 1,
166 }])
167 } else {
168 diff_base = buffer.read(cx).as_rope().clone();
169 last_seen_base = diff_base.clone();
170 unreviewed_edits = Patch::default();
171 }
172 TrackedBuffer {
173 buffer: buffer.clone(),
174 diff_base,
175 last_seen_base,
176 unreviewed_edits,
177 snapshot: text_snapshot.clone(),
178 status,
179 version: buffer.read(cx).version(),
180 diff,
181 diff_update: diff_update_tx,
182 may_have_unnotified_user_edits: false,
183 _open_lsp_handle: open_lsp_handle,
184 _maintain_diff: cx.spawn({
185 let buffer = buffer.clone();
186 async move |this, cx| {
187 Self::maintain_diff(this, buffer, diff_update_rx, cx)
188 .await
189 .ok();
190 }
191 }),
192 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
193 }
194 });
195 tracked_buffer.version = buffer.read(cx).version();
196 tracked_buffer
197 }
198
199 fn handle_buffer_event(
200 &mut self,
201 buffer: Entity<Buffer>,
202 event: &BufferEvent,
203 cx: &mut Context<Self>,
204 ) {
205 match event {
206 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
207 BufferEvent::FileHandleChanged => {
208 self.handle_buffer_file_changed(buffer, cx);
209 }
210 _ => {}
211 };
212 }
213
214 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
215 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
216 return;
217 };
218 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
219 }
220
221 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
222 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
223 return;
224 };
225
226 match tracked_buffer.status {
227 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
228 if buffer
229 .read(cx)
230 .file()
231 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
232 {
233 // If the buffer had been edited by a tool, but it got
234 // deleted externally, we want to stop tracking it.
235 self.tracked_buffers.remove(&buffer);
236 }
237 cx.notify();
238 }
239 TrackedBufferStatus::Deleted => {
240 if buffer
241 .read(cx)
242 .file()
243 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
244 {
245 // If the buffer had been deleted by a tool, but it got
246 // resurrected externally, we want to clear the edits we
247 // were tracking and reset the buffer's state.
248 self.tracked_buffers.remove(&buffer);
249 self.track_buffer_internal(buffer, false, cx);
250 }
251 cx.notify();
252 }
253 }
254 }
255
256 async fn maintain_diff(
257 this: WeakEntity<Self>,
258 buffer: Entity<Buffer>,
259 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
260 cx: &mut AsyncApp,
261 ) -> Result<()> {
262 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
263 let git_diff = this
264 .update(cx, |this, cx| {
265 this.project.update(cx, |project, cx| {
266 project.open_uncommitted_diff(buffer.clone(), cx)
267 })
268 })?
269 .await
270 .ok();
271 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
272 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
273 })?;
274
275 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
276 let _repo_subscription =
277 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
278 cx.update(|cx| {
279 let mut old_head = buffer_repo.read(cx).head_commit.clone();
280 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
281 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
282 let new_head = buffer_repo.read(cx).head_commit.clone();
283 if new_head != old_head {
284 old_head = new_head;
285 git_diff_updates_tx.send(()).ok();
286 }
287 }
288 _ => {}
289 }))
290 })?
291 } else {
292 None
293 };
294
295 loop {
296 futures::select_biased! {
297 buffer_update = buffer_updates.next() => {
298 if let Some((author, buffer_snapshot)) = buffer_update {
299 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
300 } else {
301 break;
302 }
303 }
304 _ = git_diff_updates_rx.changed().fuse() => {
305 if let Some(git_diff) = git_diff.as_ref() {
306 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
307 }
308 }
309 }
310 }
311
312 Ok(())
313 }
314
315 async fn track_edits(
316 this: &WeakEntity<ActionLog>,
317 buffer: &Entity<Buffer>,
318 author: ChangeAuthor,
319 buffer_snapshot: text::BufferSnapshot,
320 cx: &mut AsyncApp,
321 ) -> Result<()> {
322 let rebase = this.update(cx, |this, cx| {
323 let tracked_buffer = this
324 .tracked_buffers
325 .get_mut(buffer)
326 .context("buffer not tracked")?;
327
328 let rebase = cx.background_spawn({
329 let mut base_text = tracked_buffer.diff_base.clone();
330 let old_snapshot = tracked_buffer.snapshot.clone();
331 let new_snapshot = buffer_snapshot.clone();
332 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
333 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
334 let mut has_user_changes = false;
335 async move {
336 if let ChangeAuthor::User = author {
337 has_user_changes = apply_non_conflicting_edits(
338 &unreviewed_edits,
339 edits,
340 &mut base_text,
341 new_snapshot.as_rope(),
342 );
343 }
344
345 (Arc::new(base_text.to_string()), base_text, has_user_changes)
346 }
347 });
348
349 anyhow::Ok(rebase)
350 })??;
351 let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
352
353 this.update(cx, |this, _| {
354 let tracked_buffer = this
355 .tracked_buffers
356 .get_mut(buffer)
357 .context("buffer not tracked")
358 .unwrap();
359 tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
360 })?;
361
362 Self::update_diff(
363 this,
364 buffer,
365 buffer_snapshot,
366 new_base_text,
367 new_diff_base,
368 cx,
369 )
370 .await
371 }
372
373 async fn keep_committed_edits(
374 this: &WeakEntity<ActionLog>,
375 buffer: &Entity<Buffer>,
376 git_diff: &Entity<BufferDiff>,
377 cx: &mut AsyncApp,
378 ) -> Result<()> {
379 let buffer_snapshot = this.read_with(cx, |this, _cx| {
380 let tracked_buffer = this
381 .tracked_buffers
382 .get(buffer)
383 .context("buffer not tracked")?;
384 anyhow::Ok(tracked_buffer.snapshot.clone())
385 })??;
386 let (new_base_text, new_diff_base) = this
387 .read_with(cx, |this, cx| {
388 let tracked_buffer = this
389 .tracked_buffers
390 .get(buffer)
391 .context("buffer not tracked")?;
392 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
393 let agent_diff_base = tracked_buffer.diff_base.clone();
394 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
395 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
396 anyhow::Ok(cx.background_spawn(async move {
397 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
398 let committed_edits = language::line_diff(
399 &agent_diff_base.to_string(),
400 &git_diff_base.to_string(),
401 )
402 .into_iter()
403 .map(|(old, new)| Edit { old, new });
404
405 let mut new_agent_diff_base = agent_diff_base.clone();
406 let mut row_delta = 0i32;
407 for committed in committed_edits {
408 while let Some(unreviewed) = old_unreviewed_edits.peek() {
409 // If the committed edit matches the unreviewed
410 // edit, assume the user wants to keep it.
411 if committed.old == unreviewed.old {
412 let unreviewed_new =
413 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
414 let committed_new =
415 git_diff_base.slice_rows(committed.new.clone()).to_string();
416 if unreviewed_new == committed_new {
417 let old_byte_start =
418 new_agent_diff_base.point_to_offset(Point::new(
419 (unreviewed.old.start as i32 + row_delta) as u32,
420 0,
421 ));
422 let old_byte_end =
423 new_agent_diff_base.point_to_offset(cmp::min(
424 Point::new(
425 (unreviewed.old.end as i32 + row_delta) as u32,
426 0,
427 ),
428 new_agent_diff_base.max_point(),
429 ));
430 new_agent_diff_base
431 .replace(old_byte_start..old_byte_end, &unreviewed_new);
432 row_delta +=
433 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
434 }
435 } else if unreviewed.old.start >= committed.old.end {
436 break;
437 }
438
439 old_unreviewed_edits.next().unwrap();
440 }
441 }
442
443 (
444 Arc::new(new_agent_diff_base.to_string()),
445 new_agent_diff_base,
446 )
447 }))
448 })??
449 .await;
450
451 Self::update_diff(
452 this,
453 buffer,
454 buffer_snapshot,
455 new_base_text,
456 new_diff_base,
457 cx,
458 )
459 .await
460 }
461
462 async fn update_diff(
463 this: &WeakEntity<ActionLog>,
464 buffer: &Entity<Buffer>,
465 buffer_snapshot: text::BufferSnapshot,
466 new_base_text: Arc<String>,
467 new_diff_base: Rope,
468 cx: &mut AsyncApp,
469 ) -> Result<()> {
470 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
471 let tracked_buffer = this
472 .tracked_buffers
473 .get(buffer)
474 .context("buffer not tracked")?;
475 anyhow::Ok((
476 tracked_buffer.diff.clone(),
477 buffer.read(cx).language().cloned(),
478 buffer.read(cx).language_registry().clone(),
479 ))
480 })??;
481 let diff_snapshot = BufferDiff::update_diff(
482 diff.clone(),
483 buffer_snapshot.clone(),
484 Some(new_base_text),
485 true,
486 false,
487 language,
488 language_registry,
489 cx,
490 )
491 .await;
492 let mut unreviewed_edits = Patch::default();
493 if let Ok(diff_snapshot) = diff_snapshot {
494 unreviewed_edits = cx
495 .background_spawn({
496 let diff_snapshot = diff_snapshot.clone();
497 let buffer_snapshot = buffer_snapshot.clone();
498 let new_diff_base = new_diff_base.clone();
499 async move {
500 let mut unreviewed_edits = Patch::default();
501 for hunk in diff_snapshot
502 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
503 {
504 let old_range = new_diff_base
505 .offset_to_point(hunk.diff_base_byte_range.start)
506 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
507 let new_range = hunk.range.start..hunk.range.end;
508 unreviewed_edits.push(point_to_row_edit(
509 Edit {
510 old: old_range,
511 new: new_range,
512 },
513 &new_diff_base,
514 &buffer_snapshot.as_rope(),
515 ));
516 }
517 unreviewed_edits
518 }
519 })
520 .await;
521
522 diff.update(cx, |diff, cx| {
523 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
524 })?;
525 }
526 this.update(cx, |this, cx| {
527 let tracked_buffer = this
528 .tracked_buffers
529 .get_mut(buffer)
530 .context("buffer not tracked")?;
531 tracked_buffer.diff_base = new_diff_base;
532 tracked_buffer.snapshot = buffer_snapshot;
533 tracked_buffer.unreviewed_edits = unreviewed_edits;
534 cx.notify();
535 anyhow::Ok(())
536 })?
537 }
538
539 /// Track a buffer as read by agent, so we can notify the model about user edits.
540 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
541 self.track_buffer_internal(buffer, false, cx);
542 }
543
544 /// Mark a buffer as created by agent, so we can refresh it in the context
545 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
546 self.edited_since_project_diagnostics_check = true;
547 self.track_buffer_internal(buffer.clone(), true, cx);
548 }
549
550 /// Mark a buffer as edited by agent, so we can refresh it in the context
551 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
552 self.edited_since_project_diagnostics_check = true;
553
554 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
555 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
556 tracked_buffer.status = TrackedBufferStatus::Modified;
557 }
558 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
559 }
560
561 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
562 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
563 match tracked_buffer.status {
564 TrackedBufferStatus::Created { .. } => {
565 self.tracked_buffers.remove(&buffer);
566 cx.notify();
567 }
568 TrackedBufferStatus::Modified => {
569 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
570 tracked_buffer.status = TrackedBufferStatus::Deleted;
571 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
572 }
573 TrackedBufferStatus::Deleted => {}
574 }
575 cx.notify();
576 }
577
578 pub fn keep_edits_in_range(
579 &mut self,
580 buffer: Entity<Buffer>,
581 buffer_range: Range<impl language::ToPoint>,
582 cx: &mut Context<Self>,
583 ) {
584 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
585 return;
586 };
587
588 match tracked_buffer.status {
589 TrackedBufferStatus::Deleted => {
590 self.tracked_buffers.remove(&buffer);
591 cx.notify();
592 }
593 _ => {
594 let buffer = buffer.read(cx);
595 let buffer_range =
596 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
597 let mut delta = 0i32;
598
599 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
600 edit.old.start = (edit.old.start as i32 + delta) as u32;
601 edit.old.end = (edit.old.end as i32 + delta) as u32;
602
603 if buffer_range.end.row < edit.new.start
604 || buffer_range.start.row > edit.new.end
605 {
606 true
607 } else {
608 let old_range = tracked_buffer
609 .diff_base
610 .point_to_offset(Point::new(edit.old.start, 0))
611 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
612 Point::new(edit.old.end, 0),
613 tracked_buffer.diff_base.max_point(),
614 ));
615 let new_range = tracked_buffer
616 .snapshot
617 .point_to_offset(Point::new(edit.new.start, 0))
618 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
619 Point::new(edit.new.end, 0),
620 tracked_buffer.snapshot.max_point(),
621 ));
622 tracked_buffer.diff_base.replace(
623 old_range,
624 &tracked_buffer
625 .snapshot
626 .text_for_range(new_range)
627 .collect::<String>(),
628 );
629 delta += edit.new_len() as i32 - edit.old_len() as i32;
630 false
631 }
632 });
633 if tracked_buffer.unreviewed_edits.is_empty() {
634 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
635 tracked_buffer.status = TrackedBufferStatus::Modified;
636 }
637 }
638 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
639 }
640 }
641 }
642
643 pub fn reject_edits_in_ranges(
644 &mut self,
645 buffer: Entity<Buffer>,
646 buffer_ranges: Vec<Range<impl language::ToPoint>>,
647 cx: &mut Context<Self>,
648 ) -> Task<Result<()>> {
649 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
650 return Task::ready(Ok(()));
651 };
652
653 match &tracked_buffer.status {
654 TrackedBufferStatus::Created {
655 existing_file_content,
656 } => {
657 let task = if let Some(existing_file_content) = existing_file_content {
658 buffer.update(cx, |buffer, cx| {
659 buffer.start_transaction();
660 buffer.set_text("", cx);
661 for chunk in existing_file_content.chunks() {
662 buffer.append(chunk, cx);
663 }
664 buffer.end_transaction(cx);
665 });
666 self.project
667 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
668 } else {
669 // For a file created by AI with no pre-existing content,
670 // only delete the file if we're certain it contains only AI content
671 // with no edits from the user.
672
673 let initial_version = tracked_buffer.version.clone();
674 let current_version = buffer.read(cx).version();
675
676 let current_content = buffer.read(cx).text();
677 let tracked_content = tracked_buffer.snapshot.text();
678
679 let is_ai_only_content =
680 initial_version == current_version && current_content == tracked_content;
681
682 if is_ai_only_content {
683 buffer
684 .read(cx)
685 .entry_id(cx)
686 .and_then(|entry_id| {
687 self.project.update(cx, |project, cx| {
688 project.delete_entry(entry_id, false, cx)
689 })
690 })
691 .unwrap_or(Task::ready(Ok(())))
692 } else {
693 // Not sure how to disentangle edits made by the user
694 // from edits made by the AI at this point.
695 // For now, preserve both to avoid data loss.
696 //
697 // TODO: Better solution (disable "Reject" after user makes some
698 // edit or find a way to differentiate between AI and user edits)
699 Task::ready(Ok(()))
700 }
701 };
702
703 self.tracked_buffers.remove(&buffer);
704 cx.notify();
705 task
706 }
707 TrackedBufferStatus::Deleted => {
708 buffer.update(cx, |buffer, cx| {
709 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
710 });
711 let save = self
712 .project
713 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
714
715 // Clear all tracked edits for this buffer and start over as if we just read it.
716 self.tracked_buffers.remove(&buffer);
717 self.buffer_read(buffer.clone(), cx);
718 cx.notify();
719 save
720 }
721 TrackedBufferStatus::Modified => {
722 buffer.update(cx, |buffer, cx| {
723 let mut buffer_row_ranges = buffer_ranges
724 .into_iter()
725 .map(|range| {
726 range.start.to_point(buffer).row..range.end.to_point(buffer).row
727 })
728 .peekable();
729
730 let mut edits_to_revert = Vec::new();
731 for edit in tracked_buffer.unreviewed_edits.edits() {
732 let new_range = tracked_buffer
733 .snapshot
734 .anchor_before(Point::new(edit.new.start, 0))
735 ..tracked_buffer.snapshot.anchor_after(cmp::min(
736 Point::new(edit.new.end, 0),
737 tracked_buffer.snapshot.max_point(),
738 ));
739 let new_row_range = new_range.start.to_point(buffer).row
740 ..new_range.end.to_point(buffer).row;
741
742 let mut revert = false;
743 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
744 if buffer_row_range.end < new_row_range.start {
745 buffer_row_ranges.next();
746 } else if buffer_row_range.start > new_row_range.end {
747 break;
748 } else {
749 revert = true;
750 break;
751 }
752 }
753
754 if revert {
755 let old_range = tracked_buffer
756 .diff_base
757 .point_to_offset(Point::new(edit.old.start, 0))
758 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
759 Point::new(edit.old.end, 0),
760 tracked_buffer.diff_base.max_point(),
761 ));
762 let old_text = tracked_buffer
763 .diff_base
764 .chunks_in_range(old_range)
765 .collect::<String>();
766 edits_to_revert.push((new_range, old_text));
767 }
768 }
769
770 buffer.edit(edits_to_revert, None, cx);
771 });
772 self.project
773 .update(cx, |project, cx| project.save_buffer(buffer, cx))
774 }
775 }
776 }
777
778 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
779 self.tracked_buffers
780 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
781 TrackedBufferStatus::Deleted => false,
782 _ => {
783 if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
784 tracked_buffer.status = TrackedBufferStatus::Modified;
785 }
786 tracked_buffer.unreviewed_edits.clear();
787 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
788 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
789 true
790 }
791 });
792 cx.notify();
793 }
794
795 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
796 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
797 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
798
799 async move {
800 reject.await.log_err();
801 }
802 });
803
804 let task = futures::future::join_all(futures);
805
806 cx.spawn(async move |_, _| {
807 task.await;
808 })
809 }
810
811 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
812 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
813 self.tracked_buffers
814 .iter()
815 .filter(|(_, tracked)| tracked.has_edits(cx))
816 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
817 .collect()
818 }
819
820 /// Iterate over buffers changed since last read or edited by the model
821 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
822 self.tracked_buffers
823 .iter()
824 .filter(|(buffer, tracked)| {
825 let buffer = buffer.read(cx);
826
827 tracked.version != buffer.version
828 && buffer
829 .file()
830 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
831 })
832 .map(|(buffer, _)| buffer)
833 }
834}
835
836fn apply_non_conflicting_edits(
837 patch: &Patch<u32>,
838 edits: Vec<Edit<u32>>,
839 old_text: &mut Rope,
840 new_text: &Rope,
841) -> bool {
842 let mut old_edits = patch.edits().iter().cloned().peekable();
843 let mut new_edits = edits.into_iter().peekable();
844 let mut applied_delta = 0i32;
845 let mut rebased_delta = 0i32;
846 let mut has_made_changes = false;
847
848 while let Some(mut new_edit) = new_edits.next() {
849 let mut conflict = false;
850
851 // Push all the old edits that are before this new edit or that intersect with it.
852 while let Some(old_edit) = old_edits.peek() {
853 if new_edit.old.end < old_edit.new.start
854 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
855 {
856 break;
857 } else if new_edit.old.start > old_edit.new.end
858 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
859 {
860 let old_edit = old_edits.next().unwrap();
861 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
862 } else {
863 conflict = true;
864 if new_edits
865 .peek()
866 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
867 {
868 new_edit = new_edits.next().unwrap();
869 } else {
870 let old_edit = old_edits.next().unwrap();
871 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
872 }
873 }
874 }
875
876 if !conflict {
877 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
878 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
879 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
880 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
881 ..old_text.point_to_offset(cmp::min(
882 Point::new(new_edit.old.end, 0),
883 old_text.max_point(),
884 ));
885 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
886 ..new_text.point_to_offset(cmp::min(
887 Point::new(new_edit.new.end, 0),
888 new_text.max_point(),
889 ));
890
891 old_text.replace(
892 old_bytes,
893 &new_text.chunks_in_range(new_bytes).collect::<String>(),
894 );
895 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
896 has_made_changes = true;
897 }
898 }
899 has_made_changes
900}
901
902fn diff_snapshots(
903 old_snapshot: &text::BufferSnapshot,
904 new_snapshot: &text::BufferSnapshot,
905) -> Vec<Edit<u32>> {
906 let mut edits = new_snapshot
907 .edits_since::<Point>(&old_snapshot.version)
908 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
909 .peekable();
910 let mut row_edits = Vec::new();
911 while let Some(mut edit) = edits.next() {
912 while let Some(next_edit) = edits.peek() {
913 if edit.old.end >= next_edit.old.start {
914 edit.old.end = next_edit.old.end;
915 edit.new.end = next_edit.new.end;
916 edits.next();
917 } else {
918 break;
919 }
920 }
921 row_edits.push(edit);
922 }
923 row_edits
924}
925
926fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
927 if edit.old.start.column == old_text.line_len(edit.old.start.row)
928 && new_text
929 .chars_at(new_text.point_to_offset(edit.new.start))
930 .next()
931 == Some('\n')
932 && edit.old.start != old_text.max_point()
933 {
934 Edit {
935 old: edit.old.start.row + 1..edit.old.end.row + 1,
936 new: edit.new.start.row + 1..edit.new.end.row + 1,
937 }
938 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
939 Edit {
940 old: edit.old.start.row..edit.old.end.row,
941 new: edit.new.start.row..edit.new.end.row,
942 }
943 } else {
944 Edit {
945 old: edit.old.start.row..edit.old.end.row + 1,
946 new: edit.new.start.row..edit.new.end.row + 1,
947 }
948 }
949}
950
951#[derive(Copy, Clone, Debug)]
952enum ChangeAuthor {
953 User,
954 Agent,
955}
956
957enum TrackedBufferStatus {
958 Created { existing_file_content: Option<Rope> },
959 Modified,
960 Deleted,
961}
962
963struct TrackedBuffer {
964 buffer: Entity<Buffer>,
965 diff_base: Rope,
966 last_seen_base: Rope,
967 unreviewed_edits: Patch<u32>,
968 status: TrackedBufferStatus,
969 version: clock::Global,
970 diff: Entity<BufferDiff>,
971 snapshot: text::BufferSnapshot,
972 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
973 may_have_unnotified_user_edits: bool,
974 _open_lsp_handle: OpenLspBufferHandle,
975 _maintain_diff: Task<()>,
976 _subscription: Subscription,
977}
978
979impl TrackedBuffer {
980 fn has_edits(&self, cx: &App) -> bool {
981 self.diff
982 .read(cx)
983 .hunks(&self.buffer.read(cx), cx)
984 .next()
985 .is_some()
986 }
987
988 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
989 self.diff_update
990 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
991 .ok();
992 }
993}
994
995pub struct ChangedBuffer {
996 pub diff: Entity<BufferDiff>,
997}
998
999#[cfg(test)]
1000mod tests {
1001 use super::*;
1002 use buffer_diff::DiffHunkStatusKind;
1003 use gpui::TestAppContext;
1004 use indoc::indoc;
1005 use language::Point;
1006 use project::{FakeFs, Fs, Project, RemoveOptions};
1007 use rand::prelude::*;
1008 use serde_json::json;
1009 use settings::SettingsStore;
1010 use std::env;
1011 use util::{RandomCharIter, path};
1012
1013 #[ctor::ctor]
1014 fn init_logger() {
1015 zlog::init_test();
1016 }
1017
1018 fn init_test(cx: &mut TestAppContext) {
1019 cx.update(|cx| {
1020 let settings_store = SettingsStore::test(cx);
1021 cx.set_global(settings_store);
1022 language::init(cx);
1023 Project::init_settings(cx);
1024 });
1025 }
1026
1027 #[gpui::test(iterations = 10)]
1028 async fn test_keep_edits(cx: &mut TestAppContext) {
1029 init_test(cx);
1030
1031 let fs = FakeFs::new(cx.executor());
1032 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1033 .await;
1034 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1035 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1036 let file_path = project
1037 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1038 .unwrap();
1039 let buffer = project
1040 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1041 .await
1042 .unwrap();
1043
1044 cx.update(|cx| {
1045 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1046 buffer.update(cx, |buffer, cx| {
1047 buffer
1048 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1049 .unwrap()
1050 });
1051 buffer.update(cx, |buffer, cx| {
1052 buffer
1053 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1054 .unwrap()
1055 });
1056 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1057 });
1058 cx.run_until_parked();
1059 assert_eq!(
1060 buffer.read_with(cx, |buffer, _| buffer.text()),
1061 "abc\ndEf\nghi\njkl\nmnO"
1062 );
1063 assert_eq!(
1064 unreviewed_hunks(&action_log, cx),
1065 vec![(
1066 buffer.clone(),
1067 vec![
1068 HunkStatus {
1069 range: Point::new(1, 0)..Point::new(2, 0),
1070 diff_status: DiffHunkStatusKind::Modified,
1071 old_text: "def\n".into(),
1072 },
1073 HunkStatus {
1074 range: Point::new(4, 0)..Point::new(4, 3),
1075 diff_status: DiffHunkStatusKind::Modified,
1076 old_text: "mno".into(),
1077 }
1078 ],
1079 )]
1080 );
1081
1082 action_log.update(cx, |log, cx| {
1083 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1084 });
1085 cx.run_until_parked();
1086 assert_eq!(
1087 unreviewed_hunks(&action_log, cx),
1088 vec![(
1089 buffer.clone(),
1090 vec![HunkStatus {
1091 range: Point::new(1, 0)..Point::new(2, 0),
1092 diff_status: DiffHunkStatusKind::Modified,
1093 old_text: "def\n".into(),
1094 }],
1095 )]
1096 );
1097
1098 action_log.update(cx, |log, cx| {
1099 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1100 });
1101 cx.run_until_parked();
1102 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1103 }
1104
1105 #[gpui::test(iterations = 10)]
1106 async fn test_deletions(cx: &mut TestAppContext) {
1107 init_test(cx);
1108
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 path!("/dir"),
1112 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1113 )
1114 .await;
1115 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1116 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1117 let file_path = project
1118 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1119 .unwrap();
1120 let buffer = project
1121 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1122 .await
1123 .unwrap();
1124
1125 cx.update(|cx| {
1126 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1127 buffer.update(cx, |buffer, cx| {
1128 buffer
1129 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1130 .unwrap();
1131 buffer.finalize_last_transaction();
1132 });
1133 buffer.update(cx, |buffer, cx| {
1134 buffer
1135 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1136 .unwrap();
1137 buffer.finalize_last_transaction();
1138 });
1139 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1140 });
1141 cx.run_until_parked();
1142 assert_eq!(
1143 buffer.read_with(cx, |buffer, _| buffer.text()),
1144 "abc\nghi\njkl\npqr"
1145 );
1146 assert_eq!(
1147 unreviewed_hunks(&action_log, cx),
1148 vec![(
1149 buffer.clone(),
1150 vec![
1151 HunkStatus {
1152 range: Point::new(1, 0)..Point::new(1, 0),
1153 diff_status: DiffHunkStatusKind::Deleted,
1154 old_text: "def\n".into(),
1155 },
1156 HunkStatus {
1157 range: Point::new(3, 0)..Point::new(3, 0),
1158 diff_status: DiffHunkStatusKind::Deleted,
1159 old_text: "mno\n".into(),
1160 }
1161 ],
1162 )]
1163 );
1164
1165 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1166 cx.run_until_parked();
1167 assert_eq!(
1168 buffer.read_with(cx, |buffer, _| buffer.text()),
1169 "abc\nghi\njkl\nmno\npqr"
1170 );
1171 assert_eq!(
1172 unreviewed_hunks(&action_log, cx),
1173 vec![(
1174 buffer.clone(),
1175 vec![HunkStatus {
1176 range: Point::new(1, 0)..Point::new(1, 0),
1177 diff_status: DiffHunkStatusKind::Deleted,
1178 old_text: "def\n".into(),
1179 }],
1180 )]
1181 );
1182
1183 action_log.update(cx, |log, cx| {
1184 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1185 });
1186 cx.run_until_parked();
1187 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1188 }
1189
1190 #[gpui::test(iterations = 10)]
1191 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1192 init_test(cx);
1193
1194 let fs = FakeFs::new(cx.executor());
1195 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1196 .await;
1197 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1198 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1199 let file_path = project
1200 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1201 .unwrap();
1202 let buffer = project
1203 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1204 .await
1205 .unwrap();
1206
1207 cx.update(|cx| {
1208 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1209 buffer.update(cx, |buffer, cx| {
1210 buffer
1211 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1212 .unwrap()
1213 });
1214 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1215 });
1216 cx.run_until_parked();
1217 assert_eq!(
1218 buffer.read_with(cx, |buffer, _| buffer.text()),
1219 "abc\ndeF\nGHI\njkl\nmno"
1220 );
1221 assert_eq!(
1222 unreviewed_hunks(&action_log, cx),
1223 vec![(
1224 buffer.clone(),
1225 vec![HunkStatus {
1226 range: Point::new(1, 0)..Point::new(3, 0),
1227 diff_status: DiffHunkStatusKind::Modified,
1228 old_text: "def\nghi\n".into(),
1229 }],
1230 )]
1231 );
1232
1233 buffer.update(cx, |buffer, cx| {
1234 buffer.edit(
1235 [
1236 (Point::new(0, 2)..Point::new(0, 2), "X"),
1237 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1238 ],
1239 None,
1240 cx,
1241 )
1242 });
1243 cx.run_until_parked();
1244 assert_eq!(
1245 buffer.read_with(cx, |buffer, _| buffer.text()),
1246 "abXc\ndeF\nGHI\nYjkl\nmno"
1247 );
1248 assert_eq!(
1249 unreviewed_hunks(&action_log, cx),
1250 vec![(
1251 buffer.clone(),
1252 vec![HunkStatus {
1253 range: Point::new(1, 0)..Point::new(3, 0),
1254 diff_status: DiffHunkStatusKind::Modified,
1255 old_text: "def\nghi\n".into(),
1256 }],
1257 )]
1258 );
1259
1260 buffer.update(cx, |buffer, cx| {
1261 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1262 });
1263 cx.run_until_parked();
1264 assert_eq!(
1265 buffer.read_with(cx, |buffer, _| buffer.text()),
1266 "abXc\ndZeF\nGHI\nYjkl\nmno"
1267 );
1268 assert_eq!(
1269 unreviewed_hunks(&action_log, cx),
1270 vec![(
1271 buffer.clone(),
1272 vec![HunkStatus {
1273 range: Point::new(1, 0)..Point::new(3, 0),
1274 diff_status: DiffHunkStatusKind::Modified,
1275 old_text: "def\nghi\n".into(),
1276 }],
1277 )]
1278 );
1279
1280 action_log.update(cx, |log, cx| {
1281 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1282 });
1283 cx.run_until_parked();
1284 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1285 }
1286
1287 #[gpui::test(iterations = 10)]
1288 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1289 init_test(cx);
1290
1291 let fs = FakeFs::new(cx.executor());
1292 fs.insert_tree(
1293 path!("/dir"),
1294 json!({"file": indoc! {"
1295 abc
1296 def
1297 ghi
1298 jkl
1299 mno"}}),
1300 )
1301 .await;
1302 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1303 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1304 let file_path = project
1305 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1306 .unwrap();
1307 let buffer = project
1308 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1309 .await
1310 .unwrap();
1311
1312 // Agent edits
1313 cx.update(|cx| {
1314 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1315 buffer.update(cx, |buffer, cx| {
1316 buffer
1317 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1318 .unwrap()
1319 });
1320 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1321 });
1322 cx.run_until_parked();
1323 assert_eq!(
1324 buffer.read_with(cx, |buffer, _| buffer.text()),
1325 indoc! {"
1326 abc
1327 deF
1328 GHI
1329 jkl
1330 mno"}
1331 );
1332 assert_eq!(
1333 unreviewed_hunks(&action_log, cx),
1334 vec![(
1335 buffer.clone(),
1336 vec![HunkStatus {
1337 range: Point::new(1, 0)..Point::new(3, 0),
1338 diff_status: DiffHunkStatusKind::Modified,
1339 old_text: "def\nghi\n".into(),
1340 }],
1341 )]
1342 );
1343
1344 // User edits
1345 buffer.update(cx, |buffer, cx| {
1346 buffer.edit(
1347 [
1348 (Point::new(0, 2)..Point::new(0, 2), "X"),
1349 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1350 ],
1351 None,
1352 cx,
1353 )
1354 });
1355 cx.run_until_parked();
1356 assert_eq!(
1357 buffer.read_with(cx, |buffer, _| buffer.text()),
1358 indoc! {"
1359 abXc
1360 deF
1361 GHI
1362 Yjkl
1363 mno"}
1364 );
1365
1366 // User edits should be stored separately from agent's
1367 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1368 assert_eq!(
1369 user_edits.expect("should have some user edits"),
1370 indoc! {"
1371 --- a/dir/file
1372 +++ b/dir/file
1373 @@ -1,5 +1,5 @@
1374 -abc
1375 +abXc
1376 def
1377 ghi
1378 -jkl
1379 +Yjkl
1380 mno
1381 "}
1382 );
1383
1384 action_log.update(cx, |log, cx| {
1385 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1386 });
1387 cx.run_until_parked();
1388 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1389 }
1390
1391 #[gpui::test(iterations = 10)]
1392 async fn test_creating_files(cx: &mut TestAppContext) {
1393 init_test(cx);
1394
1395 let fs = FakeFs::new(cx.executor());
1396 fs.insert_tree(path!("/dir"), json!({})).await;
1397 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1398 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1399 let file_path = project
1400 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1401 .unwrap();
1402
1403 let buffer = project
1404 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1405 .await
1406 .unwrap();
1407 cx.update(|cx| {
1408 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1409 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1410 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1411 });
1412 project
1413 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1414 .await
1415 .unwrap();
1416 cx.run_until_parked();
1417 assert_eq!(
1418 unreviewed_hunks(&action_log, cx),
1419 vec![(
1420 buffer.clone(),
1421 vec![HunkStatus {
1422 range: Point::new(0, 0)..Point::new(0, 5),
1423 diff_status: DiffHunkStatusKind::Added,
1424 old_text: "".into(),
1425 }],
1426 )]
1427 );
1428
1429 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1430 cx.run_until_parked();
1431 assert_eq!(
1432 unreviewed_hunks(&action_log, cx),
1433 vec![(
1434 buffer.clone(),
1435 vec![HunkStatus {
1436 range: Point::new(0, 0)..Point::new(0, 6),
1437 diff_status: DiffHunkStatusKind::Added,
1438 old_text: "".into(),
1439 }],
1440 )]
1441 );
1442
1443 action_log.update(cx, |log, cx| {
1444 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1445 });
1446 cx.run_until_parked();
1447 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1448 }
1449
1450 #[gpui::test(iterations = 10)]
1451 async fn test_overwriting_files(cx: &mut TestAppContext) {
1452 init_test(cx);
1453
1454 let fs = FakeFs::new(cx.executor());
1455 fs.insert_tree(
1456 path!("/dir"),
1457 json!({
1458 "file1": "Lorem ipsum dolor"
1459 }),
1460 )
1461 .await;
1462 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1463 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1464 let file_path = project
1465 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1466 .unwrap();
1467
1468 let buffer = project
1469 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1470 .await
1471 .unwrap();
1472 cx.update(|cx| {
1473 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1474 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1475 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1476 });
1477 project
1478 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1479 .await
1480 .unwrap();
1481 cx.run_until_parked();
1482 assert_eq!(
1483 unreviewed_hunks(&action_log, cx),
1484 vec![(
1485 buffer.clone(),
1486 vec![HunkStatus {
1487 range: Point::new(0, 0)..Point::new(0, 19),
1488 diff_status: DiffHunkStatusKind::Added,
1489 old_text: "".into(),
1490 }],
1491 )]
1492 );
1493
1494 action_log
1495 .update(cx, |log, cx| {
1496 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1497 })
1498 .await
1499 .unwrap();
1500 cx.run_until_parked();
1501 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1502 assert_eq!(
1503 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1504 "Lorem ipsum dolor"
1505 );
1506 }
1507
1508 #[gpui::test(iterations = 10)]
1509 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1510 init_test(cx);
1511
1512 let fs = FakeFs::new(cx.executor());
1513 fs.insert_tree(
1514 path!("/dir"),
1515 json!({
1516 "file1": "Lorem ipsum dolor"
1517 }),
1518 )
1519 .await;
1520 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1521 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1522 let file_path = project
1523 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1524 .unwrap();
1525
1526 let buffer = project
1527 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1528 .await
1529 .unwrap();
1530 cx.update(|cx| {
1531 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1532 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1533 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1534 });
1535 project
1536 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1537 .await
1538 .unwrap();
1539 cx.run_until_parked();
1540 assert_eq!(
1541 unreviewed_hunks(&action_log, cx),
1542 vec![(
1543 buffer.clone(),
1544 vec![HunkStatus {
1545 range: Point::new(0, 0)..Point::new(0, 37),
1546 diff_status: DiffHunkStatusKind::Modified,
1547 old_text: "Lorem ipsum dolor".into(),
1548 }],
1549 )]
1550 );
1551
1552 cx.update(|cx| {
1553 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1554 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1555 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1556 });
1557 project
1558 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1559 .await
1560 .unwrap();
1561 cx.run_until_parked();
1562 assert_eq!(
1563 unreviewed_hunks(&action_log, cx),
1564 vec![(
1565 buffer.clone(),
1566 vec![HunkStatus {
1567 range: Point::new(0, 0)..Point::new(0, 9),
1568 diff_status: DiffHunkStatusKind::Added,
1569 old_text: "".into(),
1570 }],
1571 )]
1572 );
1573
1574 action_log
1575 .update(cx, |log, cx| {
1576 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1577 })
1578 .await
1579 .unwrap();
1580 cx.run_until_parked();
1581 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1582 assert_eq!(
1583 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1584 "Lorem ipsum dolor"
1585 );
1586 }
1587
1588 #[gpui::test(iterations = 10)]
1589 async fn test_deleting_files(cx: &mut TestAppContext) {
1590 init_test(cx);
1591
1592 let fs = FakeFs::new(cx.executor());
1593 fs.insert_tree(
1594 path!("/dir"),
1595 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1596 )
1597 .await;
1598
1599 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1600 let file1_path = project
1601 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1602 .unwrap();
1603 let file2_path = project
1604 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1605 .unwrap();
1606
1607 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1608 let buffer1 = project
1609 .update(cx, |project, cx| {
1610 project.open_buffer(file1_path.clone(), cx)
1611 })
1612 .await
1613 .unwrap();
1614 let buffer2 = project
1615 .update(cx, |project, cx| {
1616 project.open_buffer(file2_path.clone(), cx)
1617 })
1618 .await
1619 .unwrap();
1620
1621 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1622 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1623 project
1624 .update(cx, |project, cx| {
1625 project.delete_file(file1_path.clone(), false, cx)
1626 })
1627 .unwrap()
1628 .await
1629 .unwrap();
1630 project
1631 .update(cx, |project, cx| {
1632 project.delete_file(file2_path.clone(), false, cx)
1633 })
1634 .unwrap()
1635 .await
1636 .unwrap();
1637 cx.run_until_parked();
1638 assert_eq!(
1639 unreviewed_hunks(&action_log, cx),
1640 vec![
1641 (
1642 buffer1.clone(),
1643 vec![HunkStatus {
1644 range: Point::new(0, 0)..Point::new(0, 0),
1645 diff_status: DiffHunkStatusKind::Deleted,
1646 old_text: "lorem\n".into(),
1647 }]
1648 ),
1649 (
1650 buffer2.clone(),
1651 vec![HunkStatus {
1652 range: Point::new(0, 0)..Point::new(0, 0),
1653 diff_status: DiffHunkStatusKind::Deleted,
1654 old_text: "ipsum\n".into(),
1655 }],
1656 )
1657 ]
1658 );
1659
1660 // Simulate file1 being recreated externally.
1661 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1662 .await;
1663
1664 // Simulate file2 being recreated by a tool.
1665 let buffer2 = project
1666 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1667 .await
1668 .unwrap();
1669 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1670 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1671 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1672 project
1673 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1674 .await
1675 .unwrap();
1676
1677 cx.run_until_parked();
1678 assert_eq!(
1679 unreviewed_hunks(&action_log, cx),
1680 vec![(
1681 buffer2.clone(),
1682 vec![HunkStatus {
1683 range: Point::new(0, 0)..Point::new(0, 5),
1684 diff_status: DiffHunkStatusKind::Added,
1685 old_text: "".into(),
1686 }],
1687 )]
1688 );
1689
1690 // Simulate file2 being deleted externally.
1691 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1692 .await
1693 .unwrap();
1694 cx.run_until_parked();
1695 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1696 }
1697
1698 #[gpui::test(iterations = 10)]
1699 async fn test_reject_edits(cx: &mut TestAppContext) {
1700 init_test(cx);
1701
1702 let fs = FakeFs::new(cx.executor());
1703 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1704 .await;
1705 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1706 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1707 let file_path = project
1708 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1709 .unwrap();
1710 let buffer = project
1711 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1712 .await
1713 .unwrap();
1714
1715 cx.update(|cx| {
1716 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1717 buffer.update(cx, |buffer, cx| {
1718 buffer
1719 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1720 .unwrap()
1721 });
1722 buffer.update(cx, |buffer, cx| {
1723 buffer
1724 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1725 .unwrap()
1726 });
1727 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1728 });
1729 cx.run_until_parked();
1730 assert_eq!(
1731 buffer.read_with(cx, |buffer, _| buffer.text()),
1732 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1733 );
1734 assert_eq!(
1735 unreviewed_hunks(&action_log, cx),
1736 vec![(
1737 buffer.clone(),
1738 vec![
1739 HunkStatus {
1740 range: Point::new(1, 0)..Point::new(3, 0),
1741 diff_status: DiffHunkStatusKind::Modified,
1742 old_text: "def\n".into(),
1743 },
1744 HunkStatus {
1745 range: Point::new(5, 0)..Point::new(5, 3),
1746 diff_status: DiffHunkStatusKind::Modified,
1747 old_text: "mno".into(),
1748 }
1749 ],
1750 )]
1751 );
1752
1753 // If the rejected range doesn't overlap with any hunk, we ignore it.
1754 action_log
1755 .update(cx, |log, cx| {
1756 log.reject_edits_in_ranges(
1757 buffer.clone(),
1758 vec![Point::new(4, 0)..Point::new(4, 0)],
1759 cx,
1760 )
1761 })
1762 .await
1763 .unwrap();
1764 cx.run_until_parked();
1765 assert_eq!(
1766 buffer.read_with(cx, |buffer, _| buffer.text()),
1767 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1768 );
1769 assert_eq!(
1770 unreviewed_hunks(&action_log, cx),
1771 vec![(
1772 buffer.clone(),
1773 vec![
1774 HunkStatus {
1775 range: Point::new(1, 0)..Point::new(3, 0),
1776 diff_status: DiffHunkStatusKind::Modified,
1777 old_text: "def\n".into(),
1778 },
1779 HunkStatus {
1780 range: Point::new(5, 0)..Point::new(5, 3),
1781 diff_status: DiffHunkStatusKind::Modified,
1782 old_text: "mno".into(),
1783 }
1784 ],
1785 )]
1786 );
1787
1788 action_log
1789 .update(cx, |log, cx| {
1790 log.reject_edits_in_ranges(
1791 buffer.clone(),
1792 vec![Point::new(0, 0)..Point::new(1, 0)],
1793 cx,
1794 )
1795 })
1796 .await
1797 .unwrap();
1798 cx.run_until_parked();
1799 assert_eq!(
1800 buffer.read_with(cx, |buffer, _| buffer.text()),
1801 "abc\ndef\nghi\njkl\nmnO"
1802 );
1803 assert_eq!(
1804 unreviewed_hunks(&action_log, cx),
1805 vec![(
1806 buffer.clone(),
1807 vec![HunkStatus {
1808 range: Point::new(4, 0)..Point::new(4, 3),
1809 diff_status: DiffHunkStatusKind::Modified,
1810 old_text: "mno".into(),
1811 }],
1812 )]
1813 );
1814
1815 action_log
1816 .update(cx, |log, cx| {
1817 log.reject_edits_in_ranges(
1818 buffer.clone(),
1819 vec![Point::new(4, 0)..Point::new(4, 0)],
1820 cx,
1821 )
1822 })
1823 .await
1824 .unwrap();
1825 cx.run_until_parked();
1826 assert_eq!(
1827 buffer.read_with(cx, |buffer, _| buffer.text()),
1828 "abc\ndef\nghi\njkl\nmno"
1829 );
1830 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1831 }
1832
1833 #[gpui::test(iterations = 10)]
1834 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1835 init_test(cx);
1836
1837 let fs = FakeFs::new(cx.executor());
1838 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1839 .await;
1840 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1841 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1842 let file_path = project
1843 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1844 .unwrap();
1845 let buffer = project
1846 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1847 .await
1848 .unwrap();
1849
1850 cx.update(|cx| {
1851 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1852 buffer.update(cx, |buffer, cx| {
1853 buffer
1854 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1855 .unwrap()
1856 });
1857 buffer.update(cx, |buffer, cx| {
1858 buffer
1859 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1860 .unwrap()
1861 });
1862 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1863 });
1864 cx.run_until_parked();
1865 assert_eq!(
1866 buffer.read_with(cx, |buffer, _| buffer.text()),
1867 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1868 );
1869 assert_eq!(
1870 unreviewed_hunks(&action_log, cx),
1871 vec![(
1872 buffer.clone(),
1873 vec![
1874 HunkStatus {
1875 range: Point::new(1, 0)..Point::new(3, 0),
1876 diff_status: DiffHunkStatusKind::Modified,
1877 old_text: "def\n".into(),
1878 },
1879 HunkStatus {
1880 range: Point::new(5, 0)..Point::new(5, 3),
1881 diff_status: DiffHunkStatusKind::Modified,
1882 old_text: "mno".into(),
1883 }
1884 ],
1885 )]
1886 );
1887
1888 action_log.update(cx, |log, cx| {
1889 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1890 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1891 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1892 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1893
1894 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1895 .detach();
1896 assert_eq!(
1897 buffer.read_with(cx, |buffer, _| buffer.text()),
1898 "abc\ndef\nghi\njkl\nmno"
1899 );
1900 });
1901 cx.run_until_parked();
1902 assert_eq!(
1903 buffer.read_with(cx, |buffer, _| buffer.text()),
1904 "abc\ndef\nghi\njkl\nmno"
1905 );
1906 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1907 }
1908
1909 #[gpui::test(iterations = 10)]
1910 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1911 init_test(cx);
1912
1913 let fs = FakeFs::new(cx.executor());
1914 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1915 .await;
1916 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1917 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1918 let file_path = project
1919 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1920 .unwrap();
1921 let buffer = project
1922 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1923 .await
1924 .unwrap();
1925
1926 cx.update(|cx| {
1927 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1928 });
1929 project
1930 .update(cx, |project, cx| {
1931 project.delete_file(file_path.clone(), false, cx)
1932 })
1933 .unwrap()
1934 .await
1935 .unwrap();
1936 cx.run_until_parked();
1937 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1938 assert_eq!(
1939 unreviewed_hunks(&action_log, cx),
1940 vec![(
1941 buffer.clone(),
1942 vec![HunkStatus {
1943 range: Point::new(0, 0)..Point::new(0, 0),
1944 diff_status: DiffHunkStatusKind::Deleted,
1945 old_text: "content".into(),
1946 }]
1947 )]
1948 );
1949
1950 action_log
1951 .update(cx, |log, cx| {
1952 log.reject_edits_in_ranges(
1953 buffer.clone(),
1954 vec![Point::new(0, 0)..Point::new(0, 0)],
1955 cx,
1956 )
1957 })
1958 .await
1959 .unwrap();
1960 cx.run_until_parked();
1961 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1962 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1963 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1964 }
1965
1966 #[gpui::test(iterations = 10)]
1967 async fn test_reject_created_file(cx: &mut TestAppContext) {
1968 init_test(cx);
1969
1970 let fs = FakeFs::new(cx.executor());
1971 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1972 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1973 let file_path = project
1974 .read_with(cx, |project, cx| {
1975 project.find_project_path("dir/new_file", cx)
1976 })
1977 .unwrap();
1978 let buffer = project
1979 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1980 .await
1981 .unwrap();
1982 cx.update(|cx| {
1983 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1984 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1985 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1986 });
1987 project
1988 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1989 .await
1990 .unwrap();
1991 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1992 cx.run_until_parked();
1993 assert_eq!(
1994 unreviewed_hunks(&action_log, cx),
1995 vec![(
1996 buffer.clone(),
1997 vec![HunkStatus {
1998 range: Point::new(0, 0)..Point::new(0, 7),
1999 diff_status: DiffHunkStatusKind::Added,
2000 old_text: "".into(),
2001 }],
2002 )]
2003 );
2004
2005 action_log
2006 .update(cx, |log, cx| {
2007 log.reject_edits_in_ranges(
2008 buffer.clone(),
2009 vec![Point::new(0, 0)..Point::new(0, 11)],
2010 cx,
2011 )
2012 })
2013 .await
2014 .unwrap();
2015 cx.run_until_parked();
2016 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2017 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2018 }
2019
2020 #[gpui::test]
2021 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2022 init_test(cx);
2023
2024 let fs = FakeFs::new(cx.executor());
2025 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2026 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2027
2028 let file_path = project
2029 .read_with(cx, |project, cx| {
2030 project.find_project_path("dir/new_file", cx)
2031 })
2032 .unwrap();
2033 let buffer = project
2034 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2035 .await
2036 .unwrap();
2037
2038 // AI creates file with initial content
2039 cx.update(|cx| {
2040 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2041 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2042 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2043 });
2044
2045 project
2046 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2047 .await
2048 .unwrap();
2049
2050 cx.run_until_parked();
2051
2052 // User makes additional edits
2053 cx.update(|cx| {
2054 buffer.update(cx, |buffer, cx| {
2055 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2056 });
2057 });
2058
2059 project
2060 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2061 .await
2062 .unwrap();
2063
2064 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2065
2066 // Reject all
2067 action_log
2068 .update(cx, |log, cx| {
2069 log.reject_edits_in_ranges(
2070 buffer.clone(),
2071 vec![Point::new(0, 0)..Point::new(100, 0)],
2072 cx,
2073 )
2074 })
2075 .await
2076 .unwrap();
2077 cx.run_until_parked();
2078
2079 // File should still contain all the content
2080 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2081
2082 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2083 assert_eq!(content, "ai content\nuser added this line");
2084 }
2085
2086 #[gpui::test]
2087 async fn test_reject_after_accepting_hunk_on_created_file(cx: &mut TestAppContext) {
2088 init_test(cx);
2089
2090 let fs = FakeFs::new(cx.executor());
2091 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2092 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2093
2094 let file_path = project
2095 .read_with(cx, |project, cx| {
2096 project.find_project_path("dir/new_file", cx)
2097 })
2098 .unwrap();
2099 let buffer = project
2100 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2101 .await
2102 .unwrap();
2103
2104 // AI creates file with initial content
2105 cx.update(|cx| {
2106 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2107 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2108 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2109 });
2110 project
2111 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2112 .await
2113 .unwrap();
2114 cx.run_until_parked();
2115 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2116
2117 // User accepts the single hunk
2118 action_log.update(cx, |log, cx| {
2119 log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
2120 });
2121 cx.run_until_parked();
2122 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2123 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2124
2125 // AI modifies the file
2126 cx.update(|cx| {
2127 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2128 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2129 });
2130 project
2131 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2132 .await
2133 .unwrap();
2134 cx.run_until_parked();
2135 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2136
2137 // User rejects the hunk
2138 action_log
2139 .update(cx, |log, cx| {
2140 log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
2141 })
2142 .await
2143 .unwrap();
2144 cx.run_until_parked();
2145 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await,);
2146 assert_eq!(
2147 buffer.read_with(cx, |buffer, _| buffer.text()),
2148 "ai content v1"
2149 );
2150 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2151 }
2152
2153 #[gpui::test]
2154 async fn test_reject_edits_on_previously_accepted_created_file(cx: &mut TestAppContext) {
2155 init_test(cx);
2156
2157 let fs = FakeFs::new(cx.executor());
2158 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2159 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2160
2161 let file_path = project
2162 .read_with(cx, |project, cx| {
2163 project.find_project_path("dir/new_file", cx)
2164 })
2165 .unwrap();
2166 let buffer = project
2167 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
2168 .await
2169 .unwrap();
2170
2171 // AI creates file with initial content
2172 cx.update(|cx| {
2173 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2174 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v1", cx));
2175 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2176 });
2177 project
2178 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2179 .await
2180 .unwrap();
2181 cx.run_until_parked();
2182
2183 // User clicks "Accept All"
2184 action_log.update(cx, |log, cx| log.keep_all_edits(cx));
2185 cx.run_until_parked();
2186 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2187 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
2188
2189 // AI modifies file again
2190 cx.update(|cx| {
2191 buffer.update(cx, |buffer, cx| buffer.set_text("ai content v2", cx));
2192 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2193 });
2194 project
2195 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2196 .await
2197 .unwrap();
2198 cx.run_until_parked();
2199 assert_ne!(unreviewed_hunks(&action_log, cx), vec![]);
2200
2201 // User clicks "Reject All"
2202 action_log
2203 .update(cx, |log, cx| log.reject_all_edits(cx))
2204 .await;
2205 cx.run_until_parked();
2206 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2207 assert_eq!(
2208 buffer.read_with(cx, |buffer, _| buffer.text()),
2209 "ai content v1"
2210 );
2211 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2212 }
2213
2214 #[gpui::test(iterations = 100)]
2215 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2216 init_test(cx);
2217
2218 let operations = env::var("OPERATIONS")
2219 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2220 .unwrap_or(20);
2221
2222 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2223 let fs = FakeFs::new(cx.executor());
2224 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2225 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2226 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2227 let file_path = project
2228 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2229 .unwrap();
2230 let buffer = project
2231 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2232 .await
2233 .unwrap();
2234
2235 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2236
2237 for _ in 0..operations {
2238 match rng.gen_range(0..100) {
2239 0..25 => {
2240 action_log.update(cx, |log, cx| {
2241 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2242 log::info!("keeping edits in range {:?}", range);
2243 log.keep_edits_in_range(buffer.clone(), range, cx)
2244 });
2245 }
2246 25..50 => {
2247 action_log
2248 .update(cx, |log, cx| {
2249 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2250 log::info!("rejecting edits in range {:?}", range);
2251 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
2252 })
2253 .await
2254 .unwrap();
2255 }
2256 _ => {
2257 let is_agent_edit = rng.gen_bool(0.5);
2258 if is_agent_edit {
2259 log::info!("agent edit");
2260 } else {
2261 log::info!("user edit");
2262 }
2263 cx.update(|cx| {
2264 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2265 if is_agent_edit {
2266 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2267 }
2268 });
2269 }
2270 }
2271
2272 if rng.gen_bool(0.2) {
2273 quiesce(&action_log, &buffer, cx);
2274 }
2275 }
2276
2277 quiesce(&action_log, &buffer, cx);
2278
2279 fn quiesce(
2280 action_log: &Entity<ActionLog>,
2281 buffer: &Entity<Buffer>,
2282 cx: &mut TestAppContext,
2283 ) {
2284 log::info!("quiescing...");
2285 cx.run_until_parked();
2286 action_log.update(cx, |log, cx| {
2287 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
2288 let mut old_text = tracked_buffer.diff_base.clone();
2289 let new_text = buffer.read(cx).as_rope();
2290 for edit in tracked_buffer.unreviewed_edits.edits() {
2291 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2292 let old_end = old_text.point_to_offset(cmp::min(
2293 Point::new(edit.new.start + edit.old_len(), 0),
2294 old_text.max_point(),
2295 ));
2296 old_text.replace(
2297 old_start..old_end,
2298 &new_text.slice_rows(edit.new.clone()).to_string(),
2299 );
2300 }
2301 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2302 })
2303 }
2304 }
2305
2306 #[gpui::test]
2307 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2308 init_test(cx);
2309
2310 let fs = FakeFs::new(cx.background_executor.clone());
2311 fs.insert_tree(
2312 path!("/project"),
2313 json!({
2314 ".git": {},
2315 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2316 }),
2317 )
2318 .await;
2319 fs.set_head_for_repo(
2320 path!("/project/.git").as_ref(),
2321 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2322 "0000000",
2323 );
2324 cx.run_until_parked();
2325
2326 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2327 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2328
2329 let file_path = project
2330 .read_with(cx, |project, cx| {
2331 project.find_project_path(path!("/project/file.txt"), cx)
2332 })
2333 .unwrap();
2334 let buffer = project
2335 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2336 .await
2337 .unwrap();
2338
2339 cx.update(|cx| {
2340 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2341 buffer.update(cx, |buffer, cx| {
2342 buffer.edit(
2343 [
2344 // Edit at the very start: a -> A
2345 (Point::new(0, 0)..Point::new(0, 1), "A"),
2346 // Deletion in the middle: remove lines d and e
2347 (Point::new(3, 0)..Point::new(5, 0), ""),
2348 // Modification: g -> GGG
2349 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2350 // Addition: insert new line after h
2351 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2352 // Edit the very last character: j -> J
2353 (Point::new(9, 0)..Point::new(9, 1), "J"),
2354 ],
2355 None,
2356 cx,
2357 );
2358 });
2359 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2360 });
2361 cx.run_until_parked();
2362 assert_eq!(
2363 unreviewed_hunks(&action_log, cx),
2364 vec![(
2365 buffer.clone(),
2366 vec![
2367 HunkStatus {
2368 range: Point::new(0, 0)..Point::new(1, 0),
2369 diff_status: DiffHunkStatusKind::Modified,
2370 old_text: "a\n".into()
2371 },
2372 HunkStatus {
2373 range: Point::new(3, 0)..Point::new(3, 0),
2374 diff_status: DiffHunkStatusKind::Deleted,
2375 old_text: "d\ne\n".into()
2376 },
2377 HunkStatus {
2378 range: Point::new(4, 0)..Point::new(5, 0),
2379 diff_status: DiffHunkStatusKind::Modified,
2380 old_text: "g\n".into()
2381 },
2382 HunkStatus {
2383 range: Point::new(6, 0)..Point::new(7, 0),
2384 diff_status: DiffHunkStatusKind::Added,
2385 old_text: "".into()
2386 },
2387 HunkStatus {
2388 range: Point::new(8, 0)..Point::new(8, 1),
2389 diff_status: DiffHunkStatusKind::Modified,
2390 old_text: "j".into()
2391 }
2392 ]
2393 )]
2394 );
2395
2396 // Simulate a git commit that matches some edits but not others:
2397 // - Accepts the first edit (a -> A)
2398 // - Accepts the deletion (remove d and e)
2399 // - Makes a different change to g (g -> G instead of GGG)
2400 // - Ignores the NEW line addition
2401 // - Ignores the last line edit (j stays as j)
2402 fs.set_head_for_repo(
2403 path!("/project/.git").as_ref(),
2404 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2405 "0000001",
2406 );
2407 cx.run_until_parked();
2408 assert_eq!(
2409 unreviewed_hunks(&action_log, cx),
2410 vec![(
2411 buffer.clone(),
2412 vec![
2413 HunkStatus {
2414 range: Point::new(4, 0)..Point::new(5, 0),
2415 diff_status: DiffHunkStatusKind::Modified,
2416 old_text: "g\n".into()
2417 },
2418 HunkStatus {
2419 range: Point::new(6, 0)..Point::new(7, 0),
2420 diff_status: DiffHunkStatusKind::Added,
2421 old_text: "".into()
2422 },
2423 HunkStatus {
2424 range: Point::new(8, 0)..Point::new(8, 1),
2425 diff_status: DiffHunkStatusKind::Modified,
2426 old_text: "j".into()
2427 }
2428 ]
2429 )]
2430 );
2431
2432 // Make another commit that accepts the NEW line but with different content
2433 fs.set_head_for_repo(
2434 path!("/project/.git").as_ref(),
2435 &[(
2436 "file.txt".into(),
2437 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2438 )],
2439 "0000002",
2440 );
2441 cx.run_until_parked();
2442 assert_eq!(
2443 unreviewed_hunks(&action_log, cx),
2444 vec![(
2445 buffer.clone(),
2446 vec![
2447 HunkStatus {
2448 range: Point::new(6, 0)..Point::new(7, 0),
2449 diff_status: DiffHunkStatusKind::Added,
2450 old_text: "".into()
2451 },
2452 HunkStatus {
2453 range: Point::new(8, 0)..Point::new(8, 1),
2454 diff_status: DiffHunkStatusKind::Modified,
2455 old_text: "j".into()
2456 }
2457 ]
2458 )]
2459 );
2460
2461 // Final commit that accepts all remaining edits
2462 fs.set_head_for_repo(
2463 path!("/project/.git").as_ref(),
2464 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2465 "0000003",
2466 );
2467 cx.run_until_parked();
2468 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2469 }
2470
2471 #[derive(Debug, Clone, PartialEq, Eq)]
2472 struct HunkStatus {
2473 range: Range<Point>,
2474 diff_status: DiffHunkStatusKind,
2475 old_text: String,
2476 }
2477
2478 fn unreviewed_hunks(
2479 action_log: &Entity<ActionLog>,
2480 cx: &TestAppContext,
2481 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2482 cx.read(|cx| {
2483 action_log
2484 .read(cx)
2485 .changed_buffers(cx)
2486 .into_iter()
2487 .map(|(buffer, diff)| {
2488 let snapshot = buffer.read(cx).snapshot();
2489 (
2490 buffer,
2491 diff.read(cx)
2492 .hunks(&snapshot, cx)
2493 .map(|hunk| HunkStatus {
2494 diff_status: hunk.status().kind,
2495 range: hunk.range,
2496 old_text: diff
2497 .read(cx)
2498 .base_text()
2499 .text_for_range(hunk.diff_base_byte_range)
2500 .collect(),
2501 })
2502 .collect(),
2503 )
2504 })
2505 .collect()
2506 })
2507 }
2508
2509 #[gpui::test]
2510 async fn test_format_patch(cx: &mut TestAppContext) {
2511 init_test(cx);
2512
2513 let fs = FakeFs::new(cx.executor());
2514 fs.insert_tree(
2515 path!("/dir"),
2516 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2517 )
2518 .await;
2519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2520 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2521
2522 let file_path = project
2523 .read_with(cx, |project, cx| {
2524 project.find_project_path("dir/test.txt", cx)
2525 })
2526 .unwrap();
2527 let buffer = project
2528 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2529 .await
2530 .unwrap();
2531
2532 cx.update(|cx| {
2533 // Track the buffer and mark it as read first
2534 action_log.update(cx, |log, cx| {
2535 log.buffer_read(buffer.clone(), cx);
2536 });
2537
2538 // Make some edits to create a patch
2539 buffer.update(cx, |buffer, cx| {
2540 buffer
2541 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2542 .unwrap(); // Replace "line2" with "CHANGED"
2543 });
2544 });
2545
2546 cx.run_until_parked();
2547
2548 // Get the patch
2549 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2550
2551 // Verify the patch format contains expected unified diff elements
2552 assert_eq!(
2553 patch.unwrap(),
2554 indoc! {"
2555 --- a/dir/test.txt
2556 +++ b/dir/test.txt
2557 @@ -1,3 +1,3 @@
2558 line 1
2559 -line 2
2560 +CHANGED
2561 line 3
2562 "}
2563 );
2564 }
2565}