1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{
12 RangeExt, ResultExt as _,
13 paths::{PathStyle, RemotePathBuf},
14};
15
16/// Tracks actions performed by tools in a thread
17pub struct ActionLog {
18 /// Buffers that we want to notify the model about when they change.
19 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
20 /// Has the model edited a file since it last checked diagnostics?
21 edited_since_project_diagnostics_check: bool,
22 /// The project this action log is associated with
23 project: Entity<Project>,
24}
25
26impl ActionLog {
27 /// Creates a new, empty action log associated with the given project.
28 pub fn new(project: Entity<Project>) -> Self {
29 Self {
30 tracked_buffers: BTreeMap::default(),
31 edited_since_project_diagnostics_check: false,
32 project,
33 }
34 }
35
36 pub fn project(&self) -> &Entity<Project> {
37 &self.project
38 }
39
40 /// Notifies a diagnostics check
41 pub fn checked_project_diagnostics(&mut self) {
42 self.edited_since_project_diagnostics_check = false;
43 }
44
45 /// Returns true if any files have been edited since the last project diagnostics check
46 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
47 self.edited_since_project_diagnostics_check
48 }
49
50 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
51 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
52 }
53
54 /// Return a unified diff patch with user edits made since last read or notification
55 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
56 let diffs = self
57 .tracked_buffers
58 .values()
59 .filter_map(|tracked| {
60 if !tracked.may_have_unnotified_user_edits {
61 return None;
62 }
63
64 let text_with_latest_user_edits = tracked.diff_base.to_string();
65 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
66 if text_with_latest_user_edits == text_with_last_seen_user_edits {
67 return None;
68 }
69 let patch = language::unified_diff(
70 &text_with_last_seen_user_edits,
71 &text_with_latest_user_edits,
72 );
73
74 let buffer = tracked.buffer.clone();
75 let file_path = buffer
76 .read(cx)
77 .file()
78 .map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
79 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
80
81 let mut result = String::new();
82 result.push_str(&format!("--- a/{}\n", file_path));
83 result.push_str(&format!("+++ b/{}\n", file_path));
84 result.push_str(&patch);
85
86 Some(result)
87 })
88 .collect::<Vec<_>>();
89
90 if diffs.is_empty() {
91 return None;
92 }
93
94 let unified_diff = diffs.join("\n\n");
95 Some(unified_diff)
96 }
97
98 /// Return a unified diff patch with user edits made since last read/notification
99 /// and mark them as notified
100 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
101 let patch = self.unnotified_user_edits(cx);
102 self.tracked_buffers.values_mut().for_each(|tracked| {
103 tracked.may_have_unnotified_user_edits = false;
104 tracked.last_seen_base = tracked.diff_base.clone();
105 });
106 patch
107 }
108
109 fn track_buffer_internal(
110 &mut self,
111 buffer: Entity<Buffer>,
112 is_created: bool,
113 cx: &mut Context<Self>,
114 ) -> &mut TrackedBuffer {
115 let status = if is_created {
116 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
117 match tracked.status {
118 TrackedBufferStatus::Created {
119 existing_file_content,
120 } => TrackedBufferStatus::Created {
121 existing_file_content,
122 },
123 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
124 TrackedBufferStatus::Created {
125 existing_file_content: Some(tracked.diff_base),
126 }
127 }
128 }
129 } else if buffer
130 .read(cx)
131 .file()
132 .map_or(false, |file| file.disk_state().exists())
133 {
134 TrackedBufferStatus::Created {
135 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
136 }
137 } else {
138 TrackedBufferStatus::Created {
139 existing_file_content: None,
140 }
141 }
142 } else {
143 TrackedBufferStatus::Modified
144 };
145
146 let tracked_buffer = self
147 .tracked_buffers
148 .entry(buffer.clone())
149 .or_insert_with(|| {
150 let open_lsp_handle = self.project.update(cx, |project, cx| {
151 project.register_buffer_with_language_servers(&buffer, cx)
152 });
153
154 let text_snapshot = buffer.read(cx).text_snapshot();
155 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
156 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
157 let diff_base;
158 let last_seen_base;
159 let unreviewed_edits;
160 if is_created {
161 diff_base = Rope::default();
162 last_seen_base = Rope::default();
163 unreviewed_edits = Patch::new(vec![Edit {
164 old: 0..1,
165 new: 0..text_snapshot.max_point().row + 1,
166 }])
167 } else {
168 diff_base = buffer.read(cx).as_rope().clone();
169 last_seen_base = diff_base.clone();
170 unreviewed_edits = Patch::default();
171 }
172 TrackedBuffer {
173 buffer: buffer.clone(),
174 diff_base,
175 last_seen_base,
176 unreviewed_edits,
177 snapshot: text_snapshot.clone(),
178 status,
179 version: buffer.read(cx).version(),
180 diff,
181 diff_update: diff_update_tx,
182 may_have_unnotified_user_edits: false,
183 _open_lsp_handle: open_lsp_handle,
184 _maintain_diff: cx.spawn({
185 let buffer = buffer.clone();
186 async move |this, cx| {
187 Self::maintain_diff(this, buffer, diff_update_rx, cx)
188 .await
189 .ok();
190 }
191 }),
192 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
193 }
194 });
195 tracked_buffer.version = buffer.read(cx).version();
196 tracked_buffer
197 }
198
199 fn handle_buffer_event(
200 &mut self,
201 buffer: Entity<Buffer>,
202 event: &BufferEvent,
203 cx: &mut Context<Self>,
204 ) {
205 match event {
206 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
207 BufferEvent::FileHandleChanged => {
208 self.handle_buffer_file_changed(buffer, cx);
209 }
210 _ => {}
211 };
212 }
213
214 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
215 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
216 return;
217 };
218 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
219 }
220
221 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
222 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
223 return;
224 };
225
226 match tracked_buffer.status {
227 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
228 if buffer
229 .read(cx)
230 .file()
231 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
232 {
233 // If the buffer had been edited by a tool, but it got
234 // deleted externally, we want to stop tracking it.
235 self.tracked_buffers.remove(&buffer);
236 }
237 cx.notify();
238 }
239 TrackedBufferStatus::Deleted => {
240 if buffer
241 .read(cx)
242 .file()
243 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
244 {
245 // If the buffer had been deleted by a tool, but it got
246 // resurrected externally, we want to clear the edits we
247 // were tracking and reset the buffer's state.
248 self.tracked_buffers.remove(&buffer);
249 self.track_buffer_internal(buffer, false, cx);
250 }
251 cx.notify();
252 }
253 }
254 }
255
256 async fn maintain_diff(
257 this: WeakEntity<Self>,
258 buffer: Entity<Buffer>,
259 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
260 cx: &mut AsyncApp,
261 ) -> Result<()> {
262 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
263 let git_diff = this
264 .update(cx, |this, cx| {
265 this.project.update(cx, |project, cx| {
266 project.open_uncommitted_diff(buffer.clone(), cx)
267 })
268 })?
269 .await
270 .ok();
271 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
272 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
273 })?;
274
275 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
276 let _repo_subscription =
277 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
278 cx.update(|cx| {
279 let mut old_head = buffer_repo.read(cx).head_commit.clone();
280 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
281 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
282 let new_head = buffer_repo.read(cx).head_commit.clone();
283 if new_head != old_head {
284 old_head = new_head;
285 git_diff_updates_tx.send(()).ok();
286 }
287 }
288 _ => {}
289 }))
290 })?
291 } else {
292 None
293 };
294
295 loop {
296 futures::select_biased! {
297 buffer_update = buffer_updates.next() => {
298 if let Some((author, buffer_snapshot)) = buffer_update {
299 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
300 } else {
301 break;
302 }
303 }
304 _ = git_diff_updates_rx.changed().fuse() => {
305 if let Some(git_diff) = git_diff.as_ref() {
306 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
307 }
308 }
309 }
310 }
311
312 Ok(())
313 }
314
315 async fn track_edits(
316 this: &WeakEntity<ActionLog>,
317 buffer: &Entity<Buffer>,
318 author: ChangeAuthor,
319 buffer_snapshot: text::BufferSnapshot,
320 cx: &mut AsyncApp,
321 ) -> Result<()> {
322 let rebase = this.update(cx, |this, cx| {
323 let tracked_buffer = this
324 .tracked_buffers
325 .get_mut(buffer)
326 .context("buffer not tracked")?;
327
328 let rebase = cx.background_spawn({
329 let mut base_text = tracked_buffer.diff_base.clone();
330 let old_snapshot = tracked_buffer.snapshot.clone();
331 let new_snapshot = buffer_snapshot.clone();
332 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
333 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
334 let mut has_user_changes = false;
335 async move {
336 if let ChangeAuthor::User = author {
337 has_user_changes = apply_non_conflicting_edits(
338 &unreviewed_edits,
339 edits,
340 &mut base_text,
341 new_snapshot.as_rope(),
342 );
343 }
344
345 (Arc::new(base_text.to_string()), base_text, has_user_changes)
346 }
347 });
348
349 anyhow::Ok(rebase)
350 })??;
351 let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
352
353 this.update(cx, |this, _| {
354 let tracked_buffer = this
355 .tracked_buffers
356 .get_mut(buffer)
357 .context("buffer not tracked")
358 .unwrap();
359 tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
360 })?;
361
362 Self::update_diff(
363 this,
364 buffer,
365 buffer_snapshot,
366 new_base_text,
367 new_diff_base,
368 cx,
369 )
370 .await
371 }
372
373 async fn keep_committed_edits(
374 this: &WeakEntity<ActionLog>,
375 buffer: &Entity<Buffer>,
376 git_diff: &Entity<BufferDiff>,
377 cx: &mut AsyncApp,
378 ) -> Result<()> {
379 let buffer_snapshot = this.read_with(cx, |this, _cx| {
380 let tracked_buffer = this
381 .tracked_buffers
382 .get(buffer)
383 .context("buffer not tracked")?;
384 anyhow::Ok(tracked_buffer.snapshot.clone())
385 })??;
386 let (new_base_text, new_diff_base) = this
387 .read_with(cx, |this, cx| {
388 let tracked_buffer = this
389 .tracked_buffers
390 .get(buffer)
391 .context("buffer not tracked")?;
392 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
393 let agent_diff_base = tracked_buffer.diff_base.clone();
394 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
395 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
396 anyhow::Ok(cx.background_spawn(async move {
397 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
398 let committed_edits = language::line_diff(
399 &agent_diff_base.to_string(),
400 &git_diff_base.to_string(),
401 )
402 .into_iter()
403 .map(|(old, new)| Edit { old, new });
404
405 let mut new_agent_diff_base = agent_diff_base.clone();
406 let mut row_delta = 0i32;
407 for committed in committed_edits {
408 while let Some(unreviewed) = old_unreviewed_edits.peek() {
409 // If the committed edit matches the unreviewed
410 // edit, assume the user wants to keep it.
411 if committed.old == unreviewed.old {
412 let unreviewed_new =
413 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
414 let committed_new =
415 git_diff_base.slice_rows(committed.new.clone()).to_string();
416 if unreviewed_new == committed_new {
417 let old_byte_start =
418 new_agent_diff_base.point_to_offset(Point::new(
419 (unreviewed.old.start as i32 + row_delta) as u32,
420 0,
421 ));
422 let old_byte_end =
423 new_agent_diff_base.point_to_offset(cmp::min(
424 Point::new(
425 (unreviewed.old.end as i32 + row_delta) as u32,
426 0,
427 ),
428 new_agent_diff_base.max_point(),
429 ));
430 new_agent_diff_base
431 .replace(old_byte_start..old_byte_end, &unreviewed_new);
432 row_delta +=
433 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
434 }
435 } else if unreviewed.old.start >= committed.old.end {
436 break;
437 }
438
439 old_unreviewed_edits.next().unwrap();
440 }
441 }
442
443 (
444 Arc::new(new_agent_diff_base.to_string()),
445 new_agent_diff_base,
446 )
447 }))
448 })??
449 .await;
450
451 Self::update_diff(
452 this,
453 buffer,
454 buffer_snapshot,
455 new_base_text,
456 new_diff_base,
457 cx,
458 )
459 .await
460 }
461
462 async fn update_diff(
463 this: &WeakEntity<ActionLog>,
464 buffer: &Entity<Buffer>,
465 buffer_snapshot: text::BufferSnapshot,
466 new_base_text: Arc<String>,
467 new_diff_base: Rope,
468 cx: &mut AsyncApp,
469 ) -> Result<()> {
470 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
471 let tracked_buffer = this
472 .tracked_buffers
473 .get(buffer)
474 .context("buffer not tracked")?;
475 anyhow::Ok((
476 tracked_buffer.diff.clone(),
477 buffer.read(cx).language().cloned(),
478 buffer.read(cx).language_registry().clone(),
479 ))
480 })??;
481 let diff_snapshot = BufferDiff::update_diff(
482 diff.clone(),
483 buffer_snapshot.clone(),
484 Some(new_base_text),
485 true,
486 false,
487 language,
488 language_registry,
489 cx,
490 )
491 .await;
492 let mut unreviewed_edits = Patch::default();
493 if let Ok(diff_snapshot) = diff_snapshot {
494 unreviewed_edits = cx
495 .background_spawn({
496 let diff_snapshot = diff_snapshot.clone();
497 let buffer_snapshot = buffer_snapshot.clone();
498 let new_diff_base = new_diff_base.clone();
499 async move {
500 let mut unreviewed_edits = Patch::default();
501 for hunk in diff_snapshot
502 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
503 {
504 let old_range = new_diff_base
505 .offset_to_point(hunk.diff_base_byte_range.start)
506 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
507 let new_range = hunk.range.start..hunk.range.end;
508 unreviewed_edits.push(point_to_row_edit(
509 Edit {
510 old: old_range,
511 new: new_range,
512 },
513 &new_diff_base,
514 &buffer_snapshot.as_rope(),
515 ));
516 }
517 unreviewed_edits
518 }
519 })
520 .await;
521
522 diff.update(cx, |diff, cx| {
523 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
524 })?;
525 }
526 this.update(cx, |this, cx| {
527 let tracked_buffer = this
528 .tracked_buffers
529 .get_mut(buffer)
530 .context("buffer not tracked")?;
531 tracked_buffer.diff_base = new_diff_base;
532 tracked_buffer.snapshot = buffer_snapshot;
533 tracked_buffer.unreviewed_edits = unreviewed_edits;
534 cx.notify();
535 anyhow::Ok(())
536 })?
537 }
538
539 /// Track a buffer as read by agent, so we can notify the model about user edits.
540 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
541 self.track_buffer_internal(buffer, false, cx);
542 }
543
544 /// Mark a buffer as created by agent, so we can refresh it in the context
545 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
546 self.edited_since_project_diagnostics_check = true;
547 self.track_buffer_internal(buffer.clone(), true, cx);
548 }
549
550 /// Mark a buffer as edited by agent, so we can refresh it in the context
551 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
552 self.edited_since_project_diagnostics_check = true;
553
554 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
555 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
556 tracked_buffer.status = TrackedBufferStatus::Modified;
557 }
558 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
559 }
560
561 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
562 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
563 match tracked_buffer.status {
564 TrackedBufferStatus::Created { .. } => {
565 self.tracked_buffers.remove(&buffer);
566 cx.notify();
567 }
568 TrackedBufferStatus::Modified => {
569 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
570 tracked_buffer.status = TrackedBufferStatus::Deleted;
571 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
572 }
573 TrackedBufferStatus::Deleted => {}
574 }
575 cx.notify();
576 }
577
578 pub fn keep_edits_in_range(
579 &mut self,
580 buffer: Entity<Buffer>,
581 buffer_range: Range<impl language::ToPoint>,
582 cx: &mut Context<Self>,
583 ) {
584 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
585 return;
586 };
587
588 match tracked_buffer.status {
589 TrackedBufferStatus::Deleted => {
590 self.tracked_buffers.remove(&buffer);
591 cx.notify();
592 }
593 _ => {
594 let buffer = buffer.read(cx);
595 let buffer_range =
596 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
597 let mut delta = 0i32;
598
599 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
600 edit.old.start = (edit.old.start as i32 + delta) as u32;
601 edit.old.end = (edit.old.end as i32 + delta) as u32;
602
603 if buffer_range.end.row < edit.new.start
604 || buffer_range.start.row > edit.new.end
605 {
606 true
607 } else {
608 let old_range = tracked_buffer
609 .diff_base
610 .point_to_offset(Point::new(edit.old.start, 0))
611 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
612 Point::new(edit.old.end, 0),
613 tracked_buffer.diff_base.max_point(),
614 ));
615 let new_range = tracked_buffer
616 .snapshot
617 .point_to_offset(Point::new(edit.new.start, 0))
618 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
619 Point::new(edit.new.end, 0),
620 tracked_buffer.snapshot.max_point(),
621 ));
622 tracked_buffer.diff_base.replace(
623 old_range,
624 &tracked_buffer
625 .snapshot
626 .text_for_range(new_range)
627 .collect::<String>(),
628 );
629 delta += edit.new_len() as i32 - edit.old_len() as i32;
630 false
631 }
632 });
633 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
634 }
635 }
636 }
637
638 pub fn reject_edits_in_ranges(
639 &mut self,
640 buffer: Entity<Buffer>,
641 buffer_ranges: Vec<Range<impl language::ToPoint>>,
642 cx: &mut Context<Self>,
643 ) -> Task<Result<()>> {
644 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
645 return Task::ready(Ok(()));
646 };
647
648 match &tracked_buffer.status {
649 TrackedBufferStatus::Created {
650 existing_file_content,
651 } => {
652 let task = if let Some(existing_file_content) = existing_file_content {
653 buffer.update(cx, |buffer, cx| {
654 buffer.start_transaction();
655 buffer.set_text("", cx);
656 for chunk in existing_file_content.chunks() {
657 buffer.append(chunk, cx);
658 }
659 buffer.end_transaction(cx);
660 });
661 self.project
662 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
663 } else {
664 // For a file created by AI with no pre-existing content,
665 // only delete the file if we're certain it contains only AI content
666 // with no edits from the user.
667
668 let initial_version = tracked_buffer.version.clone();
669 let current_version = buffer.read(cx).version();
670
671 let current_content = buffer.read(cx).text();
672 let tracked_content = tracked_buffer.snapshot.text();
673
674 let is_ai_only_content =
675 initial_version == current_version && current_content == tracked_content;
676
677 if is_ai_only_content {
678 buffer
679 .read(cx)
680 .entry_id(cx)
681 .and_then(|entry_id| {
682 self.project.update(cx, |project, cx| {
683 project.delete_entry(entry_id, false, cx)
684 })
685 })
686 .unwrap_or(Task::ready(Ok(())))
687 } else {
688 // Not sure how to disentangle edits made by the user
689 // from edits made by the AI at this point.
690 // For now, preserve both to avoid data loss.
691 //
692 // TODO: Better solution (disable "Reject" after user makes some
693 // edit or find a way to differentiate between AI and user edits)
694 Task::ready(Ok(()))
695 }
696 };
697
698 self.tracked_buffers.remove(&buffer);
699 cx.notify();
700 task
701 }
702 TrackedBufferStatus::Deleted => {
703 buffer.update(cx, |buffer, cx| {
704 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
705 });
706 let save = self
707 .project
708 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
709
710 // Clear all tracked edits for this buffer and start over as if we just read it.
711 self.tracked_buffers.remove(&buffer);
712 self.buffer_read(buffer.clone(), cx);
713 cx.notify();
714 save
715 }
716 TrackedBufferStatus::Modified => {
717 buffer.update(cx, |buffer, cx| {
718 let mut buffer_row_ranges = buffer_ranges
719 .into_iter()
720 .map(|range| {
721 range.start.to_point(buffer).row..range.end.to_point(buffer).row
722 })
723 .peekable();
724
725 let mut edits_to_revert = Vec::new();
726 for edit in tracked_buffer.unreviewed_edits.edits() {
727 let new_range = tracked_buffer
728 .snapshot
729 .anchor_before(Point::new(edit.new.start, 0))
730 ..tracked_buffer.snapshot.anchor_after(cmp::min(
731 Point::new(edit.new.end, 0),
732 tracked_buffer.snapshot.max_point(),
733 ));
734 let new_row_range = new_range.start.to_point(buffer).row
735 ..new_range.end.to_point(buffer).row;
736
737 let mut revert = false;
738 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
739 if buffer_row_range.end < new_row_range.start {
740 buffer_row_ranges.next();
741 } else if buffer_row_range.start > new_row_range.end {
742 break;
743 } else {
744 revert = true;
745 break;
746 }
747 }
748
749 if revert {
750 let old_range = tracked_buffer
751 .diff_base
752 .point_to_offset(Point::new(edit.old.start, 0))
753 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
754 Point::new(edit.old.end, 0),
755 tracked_buffer.diff_base.max_point(),
756 ));
757 let old_text = tracked_buffer
758 .diff_base
759 .chunks_in_range(old_range)
760 .collect::<String>();
761 edits_to_revert.push((new_range, old_text));
762 }
763 }
764
765 buffer.edit(edits_to_revert, None, cx);
766 });
767 self.project
768 .update(cx, |project, cx| project.save_buffer(buffer, cx))
769 }
770 }
771 }
772
773 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
774 self.tracked_buffers
775 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
776 TrackedBufferStatus::Deleted => false,
777 _ => {
778 tracked_buffer.unreviewed_edits.clear();
779 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
780 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
781 true
782 }
783 });
784 cx.notify();
785 }
786
787 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
788 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
789 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
790
791 async move {
792 reject.await.log_err();
793 }
794 });
795
796 let task = futures::future::join_all(futures);
797
798 cx.spawn(async move |_, _| {
799 task.await;
800 })
801 }
802
803 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
804 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
805 self.tracked_buffers
806 .iter()
807 .filter(|(_, tracked)| tracked.has_edits(cx))
808 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
809 .collect()
810 }
811
812 /// Iterate over buffers changed since last read or edited by the model
813 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
814 self.tracked_buffers
815 .iter()
816 .filter(|(buffer, tracked)| {
817 let buffer = buffer.read(cx);
818
819 tracked.version != buffer.version
820 && buffer
821 .file()
822 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
823 })
824 .map(|(buffer, _)| buffer)
825 }
826}
827
828fn apply_non_conflicting_edits(
829 patch: &Patch<u32>,
830 edits: Vec<Edit<u32>>,
831 old_text: &mut Rope,
832 new_text: &Rope,
833) -> bool {
834 let mut old_edits = patch.edits().iter().cloned().peekable();
835 let mut new_edits = edits.into_iter().peekable();
836 let mut applied_delta = 0i32;
837 let mut rebased_delta = 0i32;
838 let mut has_made_changes = false;
839
840 while let Some(mut new_edit) = new_edits.next() {
841 let mut conflict = false;
842
843 // Push all the old edits that are before this new edit or that intersect with it.
844 while let Some(old_edit) = old_edits.peek() {
845 if new_edit.old.end < old_edit.new.start
846 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
847 {
848 break;
849 } else if new_edit.old.start > old_edit.new.end
850 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
851 {
852 let old_edit = old_edits.next().unwrap();
853 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
854 } else {
855 conflict = true;
856 if new_edits
857 .peek()
858 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
859 {
860 new_edit = new_edits.next().unwrap();
861 } else {
862 let old_edit = old_edits.next().unwrap();
863 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
864 }
865 }
866 }
867
868 if !conflict {
869 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
870 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
871 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
872 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
873 ..old_text.point_to_offset(cmp::min(
874 Point::new(new_edit.old.end, 0),
875 old_text.max_point(),
876 ));
877 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
878 ..new_text.point_to_offset(cmp::min(
879 Point::new(new_edit.new.end, 0),
880 new_text.max_point(),
881 ));
882
883 old_text.replace(
884 old_bytes,
885 &new_text.chunks_in_range(new_bytes).collect::<String>(),
886 );
887 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
888 has_made_changes = true;
889 }
890 }
891 has_made_changes
892}
893
894fn diff_snapshots(
895 old_snapshot: &text::BufferSnapshot,
896 new_snapshot: &text::BufferSnapshot,
897) -> Vec<Edit<u32>> {
898 let mut edits = new_snapshot
899 .edits_since::<Point>(&old_snapshot.version)
900 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
901 .peekable();
902 let mut row_edits = Vec::new();
903 while let Some(mut edit) = edits.next() {
904 while let Some(next_edit) = edits.peek() {
905 if edit.old.end >= next_edit.old.start {
906 edit.old.end = next_edit.old.end;
907 edit.new.end = next_edit.new.end;
908 edits.next();
909 } else {
910 break;
911 }
912 }
913 row_edits.push(edit);
914 }
915 row_edits
916}
917
918fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
919 if edit.old.start.column == old_text.line_len(edit.old.start.row)
920 && new_text
921 .chars_at(new_text.point_to_offset(edit.new.start))
922 .next()
923 == Some('\n')
924 && edit.old.start != old_text.max_point()
925 {
926 Edit {
927 old: edit.old.start.row + 1..edit.old.end.row + 1,
928 new: edit.new.start.row + 1..edit.new.end.row + 1,
929 }
930 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
931 Edit {
932 old: edit.old.start.row..edit.old.end.row,
933 new: edit.new.start.row..edit.new.end.row,
934 }
935 } else {
936 Edit {
937 old: edit.old.start.row..edit.old.end.row + 1,
938 new: edit.new.start.row..edit.new.end.row + 1,
939 }
940 }
941}
942
943#[derive(Copy, Clone, Debug)]
944enum ChangeAuthor {
945 User,
946 Agent,
947}
948
949enum TrackedBufferStatus {
950 Created { existing_file_content: Option<Rope> },
951 Modified,
952 Deleted,
953}
954
955struct TrackedBuffer {
956 buffer: Entity<Buffer>,
957 diff_base: Rope,
958 last_seen_base: Rope,
959 unreviewed_edits: Patch<u32>,
960 status: TrackedBufferStatus,
961 version: clock::Global,
962 diff: Entity<BufferDiff>,
963 snapshot: text::BufferSnapshot,
964 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
965 may_have_unnotified_user_edits: bool,
966 _open_lsp_handle: OpenLspBufferHandle,
967 _maintain_diff: Task<()>,
968 _subscription: Subscription,
969}
970
971impl TrackedBuffer {
972 fn has_edits(&self, cx: &App) -> bool {
973 self.diff
974 .read(cx)
975 .hunks(&self.buffer.read(cx), cx)
976 .next()
977 .is_some()
978 }
979
980 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
981 self.diff_update
982 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
983 .ok();
984 }
985}
986
987pub struct ChangedBuffer {
988 pub diff: Entity<BufferDiff>,
989}
990
991#[cfg(test)]
992mod tests {
993 use super::*;
994 use buffer_diff::DiffHunkStatusKind;
995 use gpui::TestAppContext;
996 use indoc::indoc;
997 use language::Point;
998 use project::{FakeFs, Fs, Project, RemoveOptions};
999 use rand::prelude::*;
1000 use serde_json::json;
1001 use settings::SettingsStore;
1002 use std::env;
1003 use util::{RandomCharIter, path};
1004
1005 #[ctor::ctor]
1006 fn init_logger() {
1007 zlog::init_test();
1008 }
1009
1010 fn init_test(cx: &mut TestAppContext) {
1011 cx.update(|cx| {
1012 let settings_store = SettingsStore::test(cx);
1013 cx.set_global(settings_store);
1014 language::init(cx);
1015 Project::init_settings(cx);
1016 });
1017 }
1018
1019 #[gpui::test(iterations = 10)]
1020 async fn test_keep_edits(cx: &mut TestAppContext) {
1021 init_test(cx);
1022
1023 let fs = FakeFs::new(cx.executor());
1024 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1025 .await;
1026 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1027 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1028 let file_path = project
1029 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1030 .unwrap();
1031 let buffer = project
1032 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1033 .await
1034 .unwrap();
1035
1036 cx.update(|cx| {
1037 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1038 buffer.update(cx, |buffer, cx| {
1039 buffer
1040 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1041 .unwrap()
1042 });
1043 buffer.update(cx, |buffer, cx| {
1044 buffer
1045 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1046 .unwrap()
1047 });
1048 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1049 });
1050 cx.run_until_parked();
1051 assert_eq!(
1052 buffer.read_with(cx, |buffer, _| buffer.text()),
1053 "abc\ndEf\nghi\njkl\nmnO"
1054 );
1055 assert_eq!(
1056 unreviewed_hunks(&action_log, cx),
1057 vec![(
1058 buffer.clone(),
1059 vec![
1060 HunkStatus {
1061 range: Point::new(1, 0)..Point::new(2, 0),
1062 diff_status: DiffHunkStatusKind::Modified,
1063 old_text: "def\n".into(),
1064 },
1065 HunkStatus {
1066 range: Point::new(4, 0)..Point::new(4, 3),
1067 diff_status: DiffHunkStatusKind::Modified,
1068 old_text: "mno".into(),
1069 }
1070 ],
1071 )]
1072 );
1073
1074 action_log.update(cx, |log, cx| {
1075 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1076 });
1077 cx.run_until_parked();
1078 assert_eq!(
1079 unreviewed_hunks(&action_log, cx),
1080 vec![(
1081 buffer.clone(),
1082 vec![HunkStatus {
1083 range: Point::new(1, 0)..Point::new(2, 0),
1084 diff_status: DiffHunkStatusKind::Modified,
1085 old_text: "def\n".into(),
1086 }],
1087 )]
1088 );
1089
1090 action_log.update(cx, |log, cx| {
1091 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1092 });
1093 cx.run_until_parked();
1094 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1095 }
1096
1097 #[gpui::test(iterations = 10)]
1098 async fn test_deletions(cx: &mut TestAppContext) {
1099 init_test(cx);
1100
1101 let fs = FakeFs::new(cx.executor());
1102 fs.insert_tree(
1103 path!("/dir"),
1104 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1105 )
1106 .await;
1107 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1108 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1109 let file_path = project
1110 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1111 .unwrap();
1112 let buffer = project
1113 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1114 .await
1115 .unwrap();
1116
1117 cx.update(|cx| {
1118 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1119 buffer.update(cx, |buffer, cx| {
1120 buffer
1121 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1122 .unwrap();
1123 buffer.finalize_last_transaction();
1124 });
1125 buffer.update(cx, |buffer, cx| {
1126 buffer
1127 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1128 .unwrap();
1129 buffer.finalize_last_transaction();
1130 });
1131 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1132 });
1133 cx.run_until_parked();
1134 assert_eq!(
1135 buffer.read_with(cx, |buffer, _| buffer.text()),
1136 "abc\nghi\njkl\npqr"
1137 );
1138 assert_eq!(
1139 unreviewed_hunks(&action_log, cx),
1140 vec![(
1141 buffer.clone(),
1142 vec![
1143 HunkStatus {
1144 range: Point::new(1, 0)..Point::new(1, 0),
1145 diff_status: DiffHunkStatusKind::Deleted,
1146 old_text: "def\n".into(),
1147 },
1148 HunkStatus {
1149 range: Point::new(3, 0)..Point::new(3, 0),
1150 diff_status: DiffHunkStatusKind::Deleted,
1151 old_text: "mno\n".into(),
1152 }
1153 ],
1154 )]
1155 );
1156
1157 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1158 cx.run_until_parked();
1159 assert_eq!(
1160 buffer.read_with(cx, |buffer, _| buffer.text()),
1161 "abc\nghi\njkl\nmno\npqr"
1162 );
1163 assert_eq!(
1164 unreviewed_hunks(&action_log, cx),
1165 vec![(
1166 buffer.clone(),
1167 vec![HunkStatus {
1168 range: Point::new(1, 0)..Point::new(1, 0),
1169 diff_status: DiffHunkStatusKind::Deleted,
1170 old_text: "def\n".into(),
1171 }],
1172 )]
1173 );
1174
1175 action_log.update(cx, |log, cx| {
1176 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1177 });
1178 cx.run_until_parked();
1179 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1180 }
1181
1182 #[gpui::test(iterations = 10)]
1183 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1184 init_test(cx);
1185
1186 let fs = FakeFs::new(cx.executor());
1187 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1188 .await;
1189 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1190 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1191 let file_path = project
1192 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1193 .unwrap();
1194 let buffer = project
1195 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1196 .await
1197 .unwrap();
1198
1199 cx.update(|cx| {
1200 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1201 buffer.update(cx, |buffer, cx| {
1202 buffer
1203 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1204 .unwrap()
1205 });
1206 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1207 });
1208 cx.run_until_parked();
1209 assert_eq!(
1210 buffer.read_with(cx, |buffer, _| buffer.text()),
1211 "abc\ndeF\nGHI\njkl\nmno"
1212 );
1213 assert_eq!(
1214 unreviewed_hunks(&action_log, cx),
1215 vec![(
1216 buffer.clone(),
1217 vec![HunkStatus {
1218 range: Point::new(1, 0)..Point::new(3, 0),
1219 diff_status: DiffHunkStatusKind::Modified,
1220 old_text: "def\nghi\n".into(),
1221 }],
1222 )]
1223 );
1224
1225 buffer.update(cx, |buffer, cx| {
1226 buffer.edit(
1227 [
1228 (Point::new(0, 2)..Point::new(0, 2), "X"),
1229 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1230 ],
1231 None,
1232 cx,
1233 )
1234 });
1235 cx.run_until_parked();
1236 assert_eq!(
1237 buffer.read_with(cx, |buffer, _| buffer.text()),
1238 "abXc\ndeF\nGHI\nYjkl\nmno"
1239 );
1240 assert_eq!(
1241 unreviewed_hunks(&action_log, cx),
1242 vec![(
1243 buffer.clone(),
1244 vec![HunkStatus {
1245 range: Point::new(1, 0)..Point::new(3, 0),
1246 diff_status: DiffHunkStatusKind::Modified,
1247 old_text: "def\nghi\n".into(),
1248 }],
1249 )]
1250 );
1251
1252 buffer.update(cx, |buffer, cx| {
1253 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1254 });
1255 cx.run_until_parked();
1256 assert_eq!(
1257 buffer.read_with(cx, |buffer, _| buffer.text()),
1258 "abXc\ndZeF\nGHI\nYjkl\nmno"
1259 );
1260 assert_eq!(
1261 unreviewed_hunks(&action_log, cx),
1262 vec![(
1263 buffer.clone(),
1264 vec![HunkStatus {
1265 range: Point::new(1, 0)..Point::new(3, 0),
1266 diff_status: DiffHunkStatusKind::Modified,
1267 old_text: "def\nghi\n".into(),
1268 }],
1269 )]
1270 );
1271
1272 action_log.update(cx, |log, cx| {
1273 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1274 });
1275 cx.run_until_parked();
1276 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1277 }
1278
1279 #[gpui::test(iterations = 10)]
1280 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1281 init_test(cx);
1282
1283 let fs = FakeFs::new(cx.executor());
1284 fs.insert_tree(
1285 path!("/dir"),
1286 json!({"file": indoc! {"
1287 abc
1288 def
1289 ghi
1290 jkl
1291 mno"}}),
1292 )
1293 .await;
1294 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1295 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1296 let file_path = project
1297 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1298 .unwrap();
1299 let buffer = project
1300 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1301 .await
1302 .unwrap();
1303
1304 // Agent edits
1305 cx.update(|cx| {
1306 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1307 buffer.update(cx, |buffer, cx| {
1308 buffer
1309 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1310 .unwrap()
1311 });
1312 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1313 });
1314 cx.run_until_parked();
1315 assert_eq!(
1316 buffer.read_with(cx, |buffer, _| buffer.text()),
1317 indoc! {"
1318 abc
1319 deF
1320 GHI
1321 jkl
1322 mno"}
1323 );
1324 assert_eq!(
1325 unreviewed_hunks(&action_log, cx),
1326 vec![(
1327 buffer.clone(),
1328 vec![HunkStatus {
1329 range: Point::new(1, 0)..Point::new(3, 0),
1330 diff_status: DiffHunkStatusKind::Modified,
1331 old_text: "def\nghi\n".into(),
1332 }],
1333 )]
1334 );
1335
1336 // User edits
1337 buffer.update(cx, |buffer, cx| {
1338 buffer.edit(
1339 [
1340 (Point::new(0, 2)..Point::new(0, 2), "X"),
1341 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1342 ],
1343 None,
1344 cx,
1345 )
1346 });
1347 cx.run_until_parked();
1348 assert_eq!(
1349 buffer.read_with(cx, |buffer, _| buffer.text()),
1350 indoc! {"
1351 abXc
1352 deF
1353 GHI
1354 Yjkl
1355 mno"}
1356 );
1357
1358 // User edits should be stored separately from agent's
1359 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1360 assert_eq!(
1361 user_edits.expect("should have some user edits"),
1362 indoc! {"
1363 --- a/dir/file
1364 +++ b/dir/file
1365 @@ -1,5 +1,5 @@
1366 -abc
1367 +abXc
1368 def
1369 ghi
1370 -jkl
1371 +Yjkl
1372 mno
1373 "}
1374 );
1375
1376 action_log.update(cx, |log, cx| {
1377 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1378 });
1379 cx.run_until_parked();
1380 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1381 }
1382
1383 #[gpui::test(iterations = 10)]
1384 async fn test_creating_files(cx: &mut TestAppContext) {
1385 init_test(cx);
1386
1387 let fs = FakeFs::new(cx.executor());
1388 fs.insert_tree(path!("/dir"), json!({})).await;
1389 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1390 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1391 let file_path = project
1392 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1393 .unwrap();
1394
1395 let buffer = project
1396 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1397 .await
1398 .unwrap();
1399 cx.update(|cx| {
1400 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1401 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1402 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1403 });
1404 project
1405 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1406 .await
1407 .unwrap();
1408 cx.run_until_parked();
1409 assert_eq!(
1410 unreviewed_hunks(&action_log, cx),
1411 vec![(
1412 buffer.clone(),
1413 vec![HunkStatus {
1414 range: Point::new(0, 0)..Point::new(0, 5),
1415 diff_status: DiffHunkStatusKind::Added,
1416 old_text: "".into(),
1417 }],
1418 )]
1419 );
1420
1421 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1422 cx.run_until_parked();
1423 assert_eq!(
1424 unreviewed_hunks(&action_log, cx),
1425 vec![(
1426 buffer.clone(),
1427 vec![HunkStatus {
1428 range: Point::new(0, 0)..Point::new(0, 6),
1429 diff_status: DiffHunkStatusKind::Added,
1430 old_text: "".into(),
1431 }],
1432 )]
1433 );
1434
1435 action_log.update(cx, |log, cx| {
1436 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1437 });
1438 cx.run_until_parked();
1439 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1440 }
1441
1442 #[gpui::test(iterations = 10)]
1443 async fn test_overwriting_files(cx: &mut TestAppContext) {
1444 init_test(cx);
1445
1446 let fs = FakeFs::new(cx.executor());
1447 fs.insert_tree(
1448 path!("/dir"),
1449 json!({
1450 "file1": "Lorem ipsum dolor"
1451 }),
1452 )
1453 .await;
1454 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1455 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1456 let file_path = project
1457 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1458 .unwrap();
1459
1460 let buffer = project
1461 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1462 .await
1463 .unwrap();
1464 cx.update(|cx| {
1465 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1466 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1467 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1468 });
1469 project
1470 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1471 .await
1472 .unwrap();
1473 cx.run_until_parked();
1474 assert_eq!(
1475 unreviewed_hunks(&action_log, cx),
1476 vec![(
1477 buffer.clone(),
1478 vec![HunkStatus {
1479 range: Point::new(0, 0)..Point::new(0, 19),
1480 diff_status: DiffHunkStatusKind::Added,
1481 old_text: "".into(),
1482 }],
1483 )]
1484 );
1485
1486 action_log
1487 .update(cx, |log, cx| {
1488 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1489 })
1490 .await
1491 .unwrap();
1492 cx.run_until_parked();
1493 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1494 assert_eq!(
1495 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1496 "Lorem ipsum dolor"
1497 );
1498 }
1499
1500 #[gpui::test(iterations = 10)]
1501 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1502 init_test(cx);
1503
1504 let fs = FakeFs::new(cx.executor());
1505 fs.insert_tree(
1506 path!("/dir"),
1507 json!({
1508 "file1": "Lorem ipsum dolor"
1509 }),
1510 )
1511 .await;
1512 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1513 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1514 let file_path = project
1515 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1516 .unwrap();
1517
1518 let buffer = project
1519 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1520 .await
1521 .unwrap();
1522 cx.update(|cx| {
1523 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1524 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1525 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1526 });
1527 project
1528 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1529 .await
1530 .unwrap();
1531 cx.run_until_parked();
1532 assert_eq!(
1533 unreviewed_hunks(&action_log, cx),
1534 vec![(
1535 buffer.clone(),
1536 vec![HunkStatus {
1537 range: Point::new(0, 0)..Point::new(0, 37),
1538 diff_status: DiffHunkStatusKind::Modified,
1539 old_text: "Lorem ipsum dolor".into(),
1540 }],
1541 )]
1542 );
1543
1544 cx.update(|cx| {
1545 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1546 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1547 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1548 });
1549 project
1550 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1551 .await
1552 .unwrap();
1553 cx.run_until_parked();
1554 assert_eq!(
1555 unreviewed_hunks(&action_log, cx),
1556 vec![(
1557 buffer.clone(),
1558 vec![HunkStatus {
1559 range: Point::new(0, 0)..Point::new(0, 9),
1560 diff_status: DiffHunkStatusKind::Added,
1561 old_text: "".into(),
1562 }],
1563 )]
1564 );
1565
1566 action_log
1567 .update(cx, |log, cx| {
1568 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1569 })
1570 .await
1571 .unwrap();
1572 cx.run_until_parked();
1573 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1574 assert_eq!(
1575 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1576 "Lorem ipsum dolor"
1577 );
1578 }
1579
1580 #[gpui::test(iterations = 10)]
1581 async fn test_deleting_files(cx: &mut TestAppContext) {
1582 init_test(cx);
1583
1584 let fs = FakeFs::new(cx.executor());
1585 fs.insert_tree(
1586 path!("/dir"),
1587 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1588 )
1589 .await;
1590
1591 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1592 let file1_path = project
1593 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1594 .unwrap();
1595 let file2_path = project
1596 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1597 .unwrap();
1598
1599 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1600 let buffer1 = project
1601 .update(cx, |project, cx| {
1602 project.open_buffer(file1_path.clone(), cx)
1603 })
1604 .await
1605 .unwrap();
1606 let buffer2 = project
1607 .update(cx, |project, cx| {
1608 project.open_buffer(file2_path.clone(), cx)
1609 })
1610 .await
1611 .unwrap();
1612
1613 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1614 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1615 project
1616 .update(cx, |project, cx| {
1617 project.delete_file(file1_path.clone(), false, cx)
1618 })
1619 .unwrap()
1620 .await
1621 .unwrap();
1622 project
1623 .update(cx, |project, cx| {
1624 project.delete_file(file2_path.clone(), false, cx)
1625 })
1626 .unwrap()
1627 .await
1628 .unwrap();
1629 cx.run_until_parked();
1630 assert_eq!(
1631 unreviewed_hunks(&action_log, cx),
1632 vec![
1633 (
1634 buffer1.clone(),
1635 vec![HunkStatus {
1636 range: Point::new(0, 0)..Point::new(0, 0),
1637 diff_status: DiffHunkStatusKind::Deleted,
1638 old_text: "lorem\n".into(),
1639 }]
1640 ),
1641 (
1642 buffer2.clone(),
1643 vec![HunkStatus {
1644 range: Point::new(0, 0)..Point::new(0, 0),
1645 diff_status: DiffHunkStatusKind::Deleted,
1646 old_text: "ipsum\n".into(),
1647 }],
1648 )
1649 ]
1650 );
1651
1652 // Simulate file1 being recreated externally.
1653 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1654 .await;
1655
1656 // Simulate file2 being recreated by a tool.
1657 let buffer2 = project
1658 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1659 .await
1660 .unwrap();
1661 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1662 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1663 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1664 project
1665 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1666 .await
1667 .unwrap();
1668
1669 cx.run_until_parked();
1670 assert_eq!(
1671 unreviewed_hunks(&action_log, cx),
1672 vec![(
1673 buffer2.clone(),
1674 vec![HunkStatus {
1675 range: Point::new(0, 0)..Point::new(0, 5),
1676 diff_status: DiffHunkStatusKind::Added,
1677 old_text: "".into(),
1678 }],
1679 )]
1680 );
1681
1682 // Simulate file2 being deleted externally.
1683 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1684 .await
1685 .unwrap();
1686 cx.run_until_parked();
1687 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1688 }
1689
1690 #[gpui::test(iterations = 10)]
1691 async fn test_reject_edits(cx: &mut TestAppContext) {
1692 init_test(cx);
1693
1694 let fs = FakeFs::new(cx.executor());
1695 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1696 .await;
1697 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1698 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1699 let file_path = project
1700 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1701 .unwrap();
1702 let buffer = project
1703 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1704 .await
1705 .unwrap();
1706
1707 cx.update(|cx| {
1708 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1709 buffer.update(cx, |buffer, cx| {
1710 buffer
1711 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1712 .unwrap()
1713 });
1714 buffer.update(cx, |buffer, cx| {
1715 buffer
1716 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1717 .unwrap()
1718 });
1719 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1720 });
1721 cx.run_until_parked();
1722 assert_eq!(
1723 buffer.read_with(cx, |buffer, _| buffer.text()),
1724 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1725 );
1726 assert_eq!(
1727 unreviewed_hunks(&action_log, cx),
1728 vec![(
1729 buffer.clone(),
1730 vec![
1731 HunkStatus {
1732 range: Point::new(1, 0)..Point::new(3, 0),
1733 diff_status: DiffHunkStatusKind::Modified,
1734 old_text: "def\n".into(),
1735 },
1736 HunkStatus {
1737 range: Point::new(5, 0)..Point::new(5, 3),
1738 diff_status: DiffHunkStatusKind::Modified,
1739 old_text: "mno".into(),
1740 }
1741 ],
1742 )]
1743 );
1744
1745 // If the rejected range doesn't overlap with any hunk, we ignore it.
1746 action_log
1747 .update(cx, |log, cx| {
1748 log.reject_edits_in_ranges(
1749 buffer.clone(),
1750 vec![Point::new(4, 0)..Point::new(4, 0)],
1751 cx,
1752 )
1753 })
1754 .await
1755 .unwrap();
1756 cx.run_until_parked();
1757 assert_eq!(
1758 buffer.read_with(cx, |buffer, _| buffer.text()),
1759 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1760 );
1761 assert_eq!(
1762 unreviewed_hunks(&action_log, cx),
1763 vec![(
1764 buffer.clone(),
1765 vec![
1766 HunkStatus {
1767 range: Point::new(1, 0)..Point::new(3, 0),
1768 diff_status: DiffHunkStatusKind::Modified,
1769 old_text: "def\n".into(),
1770 },
1771 HunkStatus {
1772 range: Point::new(5, 0)..Point::new(5, 3),
1773 diff_status: DiffHunkStatusKind::Modified,
1774 old_text: "mno".into(),
1775 }
1776 ],
1777 )]
1778 );
1779
1780 action_log
1781 .update(cx, |log, cx| {
1782 log.reject_edits_in_ranges(
1783 buffer.clone(),
1784 vec![Point::new(0, 0)..Point::new(1, 0)],
1785 cx,
1786 )
1787 })
1788 .await
1789 .unwrap();
1790 cx.run_until_parked();
1791 assert_eq!(
1792 buffer.read_with(cx, |buffer, _| buffer.text()),
1793 "abc\ndef\nghi\njkl\nmnO"
1794 );
1795 assert_eq!(
1796 unreviewed_hunks(&action_log, cx),
1797 vec![(
1798 buffer.clone(),
1799 vec![HunkStatus {
1800 range: Point::new(4, 0)..Point::new(4, 3),
1801 diff_status: DiffHunkStatusKind::Modified,
1802 old_text: "mno".into(),
1803 }],
1804 )]
1805 );
1806
1807 action_log
1808 .update(cx, |log, cx| {
1809 log.reject_edits_in_ranges(
1810 buffer.clone(),
1811 vec![Point::new(4, 0)..Point::new(4, 0)],
1812 cx,
1813 )
1814 })
1815 .await
1816 .unwrap();
1817 cx.run_until_parked();
1818 assert_eq!(
1819 buffer.read_with(cx, |buffer, _| buffer.text()),
1820 "abc\ndef\nghi\njkl\nmno"
1821 );
1822 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1823 }
1824
1825 #[gpui::test(iterations = 10)]
1826 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1827 init_test(cx);
1828
1829 let fs = FakeFs::new(cx.executor());
1830 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1831 .await;
1832 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1833 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1834 let file_path = project
1835 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1836 .unwrap();
1837 let buffer = project
1838 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1839 .await
1840 .unwrap();
1841
1842 cx.update(|cx| {
1843 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1844 buffer.update(cx, |buffer, cx| {
1845 buffer
1846 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1847 .unwrap()
1848 });
1849 buffer.update(cx, |buffer, cx| {
1850 buffer
1851 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1852 .unwrap()
1853 });
1854 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1855 });
1856 cx.run_until_parked();
1857 assert_eq!(
1858 buffer.read_with(cx, |buffer, _| buffer.text()),
1859 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1860 );
1861 assert_eq!(
1862 unreviewed_hunks(&action_log, cx),
1863 vec![(
1864 buffer.clone(),
1865 vec![
1866 HunkStatus {
1867 range: Point::new(1, 0)..Point::new(3, 0),
1868 diff_status: DiffHunkStatusKind::Modified,
1869 old_text: "def\n".into(),
1870 },
1871 HunkStatus {
1872 range: Point::new(5, 0)..Point::new(5, 3),
1873 diff_status: DiffHunkStatusKind::Modified,
1874 old_text: "mno".into(),
1875 }
1876 ],
1877 )]
1878 );
1879
1880 action_log.update(cx, |log, cx| {
1881 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1882 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1883 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1884 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1885
1886 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1887 .detach();
1888 assert_eq!(
1889 buffer.read_with(cx, |buffer, _| buffer.text()),
1890 "abc\ndef\nghi\njkl\nmno"
1891 );
1892 });
1893 cx.run_until_parked();
1894 assert_eq!(
1895 buffer.read_with(cx, |buffer, _| buffer.text()),
1896 "abc\ndef\nghi\njkl\nmno"
1897 );
1898 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1899 }
1900
1901 #[gpui::test(iterations = 10)]
1902 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1903 init_test(cx);
1904
1905 let fs = FakeFs::new(cx.executor());
1906 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1907 .await;
1908 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1909 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1910 let file_path = project
1911 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1912 .unwrap();
1913 let buffer = project
1914 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1915 .await
1916 .unwrap();
1917
1918 cx.update(|cx| {
1919 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1920 });
1921 project
1922 .update(cx, |project, cx| {
1923 project.delete_file(file_path.clone(), false, cx)
1924 })
1925 .unwrap()
1926 .await
1927 .unwrap();
1928 cx.run_until_parked();
1929 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1930 assert_eq!(
1931 unreviewed_hunks(&action_log, cx),
1932 vec![(
1933 buffer.clone(),
1934 vec![HunkStatus {
1935 range: Point::new(0, 0)..Point::new(0, 0),
1936 diff_status: DiffHunkStatusKind::Deleted,
1937 old_text: "content".into(),
1938 }]
1939 )]
1940 );
1941
1942 action_log
1943 .update(cx, |log, cx| {
1944 log.reject_edits_in_ranges(
1945 buffer.clone(),
1946 vec![Point::new(0, 0)..Point::new(0, 0)],
1947 cx,
1948 )
1949 })
1950 .await
1951 .unwrap();
1952 cx.run_until_parked();
1953 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1954 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1955 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1956 }
1957
1958 #[gpui::test(iterations = 10)]
1959 async fn test_reject_created_file(cx: &mut TestAppContext) {
1960 init_test(cx);
1961
1962 let fs = FakeFs::new(cx.executor());
1963 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1964 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1965 let file_path = project
1966 .read_with(cx, |project, cx| {
1967 project.find_project_path("dir/new_file", cx)
1968 })
1969 .unwrap();
1970 let buffer = project
1971 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1972 .await
1973 .unwrap();
1974 cx.update(|cx| {
1975 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1976 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1977 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1978 });
1979 project
1980 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1981 .await
1982 .unwrap();
1983 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1984 cx.run_until_parked();
1985 assert_eq!(
1986 unreviewed_hunks(&action_log, cx),
1987 vec![(
1988 buffer.clone(),
1989 vec![HunkStatus {
1990 range: Point::new(0, 0)..Point::new(0, 7),
1991 diff_status: DiffHunkStatusKind::Added,
1992 old_text: "".into(),
1993 }],
1994 )]
1995 );
1996
1997 action_log
1998 .update(cx, |log, cx| {
1999 log.reject_edits_in_ranges(
2000 buffer.clone(),
2001 vec![Point::new(0, 0)..Point::new(0, 11)],
2002 cx,
2003 )
2004 })
2005 .await
2006 .unwrap();
2007 cx.run_until_parked();
2008 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2009 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2010 }
2011
2012 #[gpui::test]
2013 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2014 init_test(cx);
2015
2016 let fs = FakeFs::new(cx.executor());
2017 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2018 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2019
2020 let file_path = project
2021 .read_with(cx, |project, cx| {
2022 project.find_project_path("dir/new_file", cx)
2023 })
2024 .unwrap();
2025 let buffer = project
2026 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2027 .await
2028 .unwrap();
2029
2030 // AI creates file with initial content
2031 cx.update(|cx| {
2032 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2033 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2034 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2035 });
2036
2037 project
2038 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2039 .await
2040 .unwrap();
2041
2042 cx.run_until_parked();
2043
2044 // User makes additional edits
2045 cx.update(|cx| {
2046 buffer.update(cx, |buffer, cx| {
2047 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2048 });
2049 });
2050
2051 project
2052 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2053 .await
2054 .unwrap();
2055
2056 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2057
2058 // Reject all
2059 action_log
2060 .update(cx, |log, cx| {
2061 log.reject_edits_in_ranges(
2062 buffer.clone(),
2063 vec![Point::new(0, 0)..Point::new(100, 0)],
2064 cx,
2065 )
2066 })
2067 .await
2068 .unwrap();
2069 cx.run_until_parked();
2070
2071 // File should still contain all the content
2072 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2073
2074 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2075 assert_eq!(content, "ai content\nuser added this line");
2076 }
2077
2078 #[gpui::test(iterations = 100)]
2079 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2080 init_test(cx);
2081
2082 let operations = env::var("OPERATIONS")
2083 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2084 .unwrap_or(20);
2085
2086 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2087 let fs = FakeFs::new(cx.executor());
2088 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2089 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2090 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2091 let file_path = project
2092 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2093 .unwrap();
2094 let buffer = project
2095 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2096 .await
2097 .unwrap();
2098
2099 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2100
2101 for _ in 0..operations {
2102 match rng.gen_range(0..100) {
2103 0..25 => {
2104 action_log.update(cx, |log, cx| {
2105 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2106 log::info!("keeping edits in range {:?}", range);
2107 log.keep_edits_in_range(buffer.clone(), range, cx)
2108 });
2109 }
2110 25..50 => {
2111 action_log
2112 .update(cx, |log, cx| {
2113 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2114 log::info!("rejecting edits in range {:?}", range);
2115 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
2116 })
2117 .await
2118 .unwrap();
2119 }
2120 _ => {
2121 let is_agent_edit = rng.gen_bool(0.5);
2122 if is_agent_edit {
2123 log::info!("agent edit");
2124 } else {
2125 log::info!("user edit");
2126 }
2127 cx.update(|cx| {
2128 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2129 if is_agent_edit {
2130 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2131 }
2132 });
2133 }
2134 }
2135
2136 if rng.gen_bool(0.2) {
2137 quiesce(&action_log, &buffer, cx);
2138 }
2139 }
2140
2141 quiesce(&action_log, &buffer, cx);
2142
2143 fn quiesce(
2144 action_log: &Entity<ActionLog>,
2145 buffer: &Entity<Buffer>,
2146 cx: &mut TestAppContext,
2147 ) {
2148 log::info!("quiescing...");
2149 cx.run_until_parked();
2150 action_log.update(cx, |log, cx| {
2151 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
2152 let mut old_text = tracked_buffer.diff_base.clone();
2153 let new_text = buffer.read(cx).as_rope();
2154 for edit in tracked_buffer.unreviewed_edits.edits() {
2155 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2156 let old_end = old_text.point_to_offset(cmp::min(
2157 Point::new(edit.new.start + edit.old_len(), 0),
2158 old_text.max_point(),
2159 ));
2160 old_text.replace(
2161 old_start..old_end,
2162 &new_text.slice_rows(edit.new.clone()).to_string(),
2163 );
2164 }
2165 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2166 })
2167 }
2168 }
2169
2170 #[gpui::test]
2171 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2172 init_test(cx);
2173
2174 let fs = FakeFs::new(cx.background_executor.clone());
2175 fs.insert_tree(
2176 path!("/project"),
2177 json!({
2178 ".git": {},
2179 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2180 }),
2181 )
2182 .await;
2183 fs.set_head_for_repo(
2184 path!("/project/.git").as_ref(),
2185 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2186 "0000000",
2187 );
2188 cx.run_until_parked();
2189
2190 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2191 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2192
2193 let file_path = project
2194 .read_with(cx, |project, cx| {
2195 project.find_project_path(path!("/project/file.txt"), cx)
2196 })
2197 .unwrap();
2198 let buffer = project
2199 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2200 .await
2201 .unwrap();
2202
2203 cx.update(|cx| {
2204 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2205 buffer.update(cx, |buffer, cx| {
2206 buffer.edit(
2207 [
2208 // Edit at the very start: a -> A
2209 (Point::new(0, 0)..Point::new(0, 1), "A"),
2210 // Deletion in the middle: remove lines d and e
2211 (Point::new(3, 0)..Point::new(5, 0), ""),
2212 // Modification: g -> GGG
2213 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2214 // Addition: insert new line after h
2215 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2216 // Edit the very last character: j -> J
2217 (Point::new(9, 0)..Point::new(9, 1), "J"),
2218 ],
2219 None,
2220 cx,
2221 );
2222 });
2223 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2224 });
2225 cx.run_until_parked();
2226 assert_eq!(
2227 unreviewed_hunks(&action_log, cx),
2228 vec![(
2229 buffer.clone(),
2230 vec![
2231 HunkStatus {
2232 range: Point::new(0, 0)..Point::new(1, 0),
2233 diff_status: DiffHunkStatusKind::Modified,
2234 old_text: "a\n".into()
2235 },
2236 HunkStatus {
2237 range: Point::new(3, 0)..Point::new(3, 0),
2238 diff_status: DiffHunkStatusKind::Deleted,
2239 old_text: "d\ne\n".into()
2240 },
2241 HunkStatus {
2242 range: Point::new(4, 0)..Point::new(5, 0),
2243 diff_status: DiffHunkStatusKind::Modified,
2244 old_text: "g\n".into()
2245 },
2246 HunkStatus {
2247 range: Point::new(6, 0)..Point::new(7, 0),
2248 diff_status: DiffHunkStatusKind::Added,
2249 old_text: "".into()
2250 },
2251 HunkStatus {
2252 range: Point::new(8, 0)..Point::new(8, 1),
2253 diff_status: DiffHunkStatusKind::Modified,
2254 old_text: "j".into()
2255 }
2256 ]
2257 )]
2258 );
2259
2260 // Simulate a git commit that matches some edits but not others:
2261 // - Accepts the first edit (a -> A)
2262 // - Accepts the deletion (remove d and e)
2263 // - Makes a different change to g (g -> G instead of GGG)
2264 // - Ignores the NEW line addition
2265 // - Ignores the last line edit (j stays as j)
2266 fs.set_head_for_repo(
2267 path!("/project/.git").as_ref(),
2268 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2269 "0000001",
2270 );
2271 cx.run_until_parked();
2272 assert_eq!(
2273 unreviewed_hunks(&action_log, cx),
2274 vec![(
2275 buffer.clone(),
2276 vec![
2277 HunkStatus {
2278 range: Point::new(4, 0)..Point::new(5, 0),
2279 diff_status: DiffHunkStatusKind::Modified,
2280 old_text: "g\n".into()
2281 },
2282 HunkStatus {
2283 range: Point::new(6, 0)..Point::new(7, 0),
2284 diff_status: DiffHunkStatusKind::Added,
2285 old_text: "".into()
2286 },
2287 HunkStatus {
2288 range: Point::new(8, 0)..Point::new(8, 1),
2289 diff_status: DiffHunkStatusKind::Modified,
2290 old_text: "j".into()
2291 }
2292 ]
2293 )]
2294 );
2295
2296 // Make another commit that accepts the NEW line but with different content
2297 fs.set_head_for_repo(
2298 path!("/project/.git").as_ref(),
2299 &[(
2300 "file.txt".into(),
2301 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2302 )],
2303 "0000002",
2304 );
2305 cx.run_until_parked();
2306 assert_eq!(
2307 unreviewed_hunks(&action_log, cx),
2308 vec![(
2309 buffer.clone(),
2310 vec![
2311 HunkStatus {
2312 range: Point::new(6, 0)..Point::new(7, 0),
2313 diff_status: DiffHunkStatusKind::Added,
2314 old_text: "".into()
2315 },
2316 HunkStatus {
2317 range: Point::new(8, 0)..Point::new(8, 1),
2318 diff_status: DiffHunkStatusKind::Modified,
2319 old_text: "j".into()
2320 }
2321 ]
2322 )]
2323 );
2324
2325 // Final commit that accepts all remaining edits
2326 fs.set_head_for_repo(
2327 path!("/project/.git").as_ref(),
2328 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2329 "0000003",
2330 );
2331 cx.run_until_parked();
2332 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2333 }
2334
2335 #[derive(Debug, Clone, PartialEq, Eq)]
2336 struct HunkStatus {
2337 range: Range<Point>,
2338 diff_status: DiffHunkStatusKind,
2339 old_text: String,
2340 }
2341
2342 fn unreviewed_hunks(
2343 action_log: &Entity<ActionLog>,
2344 cx: &TestAppContext,
2345 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2346 cx.read(|cx| {
2347 action_log
2348 .read(cx)
2349 .changed_buffers(cx)
2350 .into_iter()
2351 .map(|(buffer, diff)| {
2352 let snapshot = buffer.read(cx).snapshot();
2353 (
2354 buffer,
2355 diff.read(cx)
2356 .hunks(&snapshot, cx)
2357 .map(|hunk| HunkStatus {
2358 diff_status: hunk.status().kind,
2359 range: hunk.range,
2360 old_text: diff
2361 .read(cx)
2362 .base_text()
2363 .text_for_range(hunk.diff_base_byte_range)
2364 .collect(),
2365 })
2366 .collect(),
2367 )
2368 })
2369 .collect()
2370 })
2371 }
2372
2373 #[gpui::test]
2374 async fn test_format_patch(cx: &mut TestAppContext) {
2375 init_test(cx);
2376
2377 let fs = FakeFs::new(cx.executor());
2378 fs.insert_tree(
2379 path!("/dir"),
2380 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2381 )
2382 .await;
2383 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2384 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2385
2386 let file_path = project
2387 .read_with(cx, |project, cx| {
2388 project.find_project_path("dir/test.txt", cx)
2389 })
2390 .unwrap();
2391 let buffer = project
2392 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2393 .await
2394 .unwrap();
2395
2396 cx.update(|cx| {
2397 // Track the buffer and mark it as read first
2398 action_log.update(cx, |log, cx| {
2399 log.buffer_read(buffer.clone(), cx);
2400 });
2401
2402 // Make some edits to create a patch
2403 buffer.update(cx, |buffer, cx| {
2404 buffer
2405 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2406 .unwrap(); // Replace "line2" with "CHANGED"
2407 });
2408 });
2409
2410 cx.run_until_parked();
2411
2412 // Get the patch
2413 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2414
2415 // Verify the patch format contains expected unified diff elements
2416 assert_eq!(
2417 patch.unwrap(),
2418 indoc! {"
2419 --- a/dir/test.txt
2420 +++ b/dir/test.txt
2421 @@ -1,3 +1,3 @@
2422 line 1
2423 -line 2
2424 +CHANGED
2425 line 3
2426 "}
2427 );
2428 }
2429}