1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{
12 RangeExt, ResultExt as _,
13 paths::{PathStyle, RemotePathBuf},
14};
15
16/// Tracks actions performed by tools in a thread
17pub struct ActionLog {
18 /// Buffers that we want to notify the model about when they change.
19 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
20 /// Has the model edited a file since it last checked diagnostics?
21 edited_since_project_diagnostics_check: bool,
22 /// The project this action log is associated with
23 project: Entity<Project>,
24}
25
26impl ActionLog {
27 /// Creates a new, empty action log associated with the given project.
28 pub fn new(project: Entity<Project>) -> Self {
29 Self {
30 tracked_buffers: BTreeMap::default(),
31 edited_since_project_diagnostics_check: false,
32 project,
33 }
34 }
35
36 pub fn project(&self) -> &Entity<Project> {
37 &self.project
38 }
39
40 /// Notifies a diagnostics check
41 pub fn checked_project_diagnostics(&mut self) {
42 self.edited_since_project_diagnostics_check = false;
43 }
44
45 /// Returns true if any files have been edited since the last project diagnostics check
46 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
47 self.edited_since_project_diagnostics_check
48 }
49
50 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
51 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
52 }
53
54 pub fn has_unnotified_user_edits(&self) -> bool {
55 self.tracked_buffers
56 .values()
57 .any(|tracked| tracked.has_unnotified_user_edits)
58 }
59
60 /// Return a unified diff patch with user edits made since last read or notification
61 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
62 if !self.has_unnotified_user_edits() {
63 return None;
64 }
65
66 let unified_diff = self
67 .tracked_buffers
68 .values()
69 .filter_map(|tracked| {
70 if !tracked.has_unnotified_user_edits {
71 return None;
72 }
73
74 let text_with_latest_user_edits = tracked.diff_base.to_string();
75 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
76 if text_with_latest_user_edits == text_with_last_seen_user_edits {
77 return None;
78 }
79 let patch = language::unified_diff(
80 &text_with_last_seen_user_edits,
81 &text_with_latest_user_edits,
82 );
83
84 let buffer = tracked.buffer.clone();
85 let file_path = buffer
86 .read(cx)
87 .file()
88 .map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
89 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
90
91 let mut result = String::new();
92 result.push_str(&format!("--- a/{}\n", file_path));
93 result.push_str(&format!("+++ b/{}\n", file_path));
94 result.push_str(&patch);
95
96 Some(result)
97 })
98 .collect::<Vec<_>>()
99 .join("\n\n");
100
101 Some(unified_diff)
102 }
103
104 /// Return a unified diff patch with user edits made since last read/notification
105 /// and mark them as notified
106 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
107 let patch = self.unnotified_user_edits(cx);
108 self.tracked_buffers.values_mut().for_each(|tracked| {
109 tracked.has_unnotified_user_edits = false;
110 tracked.last_seen_base = tracked.diff_base.clone();
111 });
112 patch
113 }
114
115 fn track_buffer_internal(
116 &mut self,
117 buffer: Entity<Buffer>,
118 is_created: bool,
119 cx: &mut Context<Self>,
120 ) -> &mut TrackedBuffer {
121 let status = if is_created {
122 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
123 match tracked.status {
124 TrackedBufferStatus::Created {
125 existing_file_content,
126 } => TrackedBufferStatus::Created {
127 existing_file_content,
128 },
129 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
130 TrackedBufferStatus::Created {
131 existing_file_content: Some(tracked.diff_base),
132 }
133 }
134 }
135 } else if buffer
136 .read(cx)
137 .file()
138 .map_or(false, |file| file.disk_state().exists())
139 {
140 TrackedBufferStatus::Created {
141 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
142 }
143 } else {
144 TrackedBufferStatus::Created {
145 existing_file_content: None,
146 }
147 }
148 } else {
149 TrackedBufferStatus::Modified
150 };
151
152 let tracked_buffer = self
153 .tracked_buffers
154 .entry(buffer.clone())
155 .or_insert_with(|| {
156 let open_lsp_handle = self.project.update(cx, |project, cx| {
157 project.register_buffer_with_language_servers(&buffer, cx)
158 });
159
160 let text_snapshot = buffer.read(cx).text_snapshot();
161 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
162 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
163 let diff_base;
164 let last_seen_base;
165 let unreviewed_edits;
166 if is_created {
167 diff_base = Rope::default();
168 last_seen_base = Rope::default();
169 unreviewed_edits = Patch::new(vec![Edit {
170 old: 0..1,
171 new: 0..text_snapshot.max_point().row + 1,
172 }])
173 } else {
174 diff_base = buffer.read(cx).as_rope().clone();
175 last_seen_base = diff_base.clone();
176 unreviewed_edits = Patch::default();
177 }
178 TrackedBuffer {
179 buffer: buffer.clone(),
180 diff_base,
181 last_seen_base,
182 unreviewed_edits,
183 snapshot: text_snapshot.clone(),
184 status,
185 version: buffer.read(cx).version(),
186 diff,
187 diff_update: diff_update_tx,
188 has_unnotified_user_edits: false,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
213 BufferEvent::FileHandleChanged => {
214 self.handle_buffer_file_changed(buffer, cx);
215 }
216 _ => {}
217 };
218 }
219
220 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
221 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
222 return;
223 };
224 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
225 }
226
227 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
228 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
229 return;
230 };
231
232 match tracked_buffer.status {
233 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
234 if buffer
235 .read(cx)
236 .file()
237 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
238 {
239 // If the buffer had been edited by a tool, but it got
240 // deleted externally, we want to stop tracking it.
241 self.tracked_buffers.remove(&buffer);
242 }
243 cx.notify();
244 }
245 TrackedBufferStatus::Deleted => {
246 if buffer
247 .read(cx)
248 .file()
249 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
250 {
251 // If the buffer had been deleted by a tool, but it got
252 // resurrected externally, we want to clear the edits we
253 // were tracking and reset the buffer's state.
254 self.tracked_buffers.remove(&buffer);
255 self.track_buffer_internal(buffer, false, cx);
256 }
257 cx.notify();
258 }
259 }
260 }
261
262 async fn maintain_diff(
263 this: WeakEntity<Self>,
264 buffer: Entity<Buffer>,
265 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
266 cx: &mut AsyncApp,
267 ) -> Result<()> {
268 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
269 let git_diff = this
270 .update(cx, |this, cx| {
271 this.project.update(cx, |project, cx| {
272 project.open_uncommitted_diff(buffer.clone(), cx)
273 })
274 })?
275 .await
276 .ok();
277 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
278 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
279 })?;
280
281 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
282 let _repo_subscription =
283 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
284 cx.update(|cx| {
285 let mut old_head = buffer_repo.read(cx).head_commit.clone();
286 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
287 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
288 let new_head = buffer_repo.read(cx).head_commit.clone();
289 if new_head != old_head {
290 old_head = new_head;
291 git_diff_updates_tx.send(()).ok();
292 }
293 }
294 _ => {}
295 }))
296 })?
297 } else {
298 None
299 };
300
301 loop {
302 futures::select_biased! {
303 buffer_update = buffer_updates.next() => {
304 if let Some((author, buffer_snapshot)) = buffer_update {
305 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
306 } else {
307 break;
308 }
309 }
310 _ = git_diff_updates_rx.changed().fuse() => {
311 if let Some(git_diff) = git_diff.as_ref() {
312 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
313 }
314 }
315 }
316 }
317
318 Ok(())
319 }
320
321 async fn track_edits(
322 this: &WeakEntity<ActionLog>,
323 buffer: &Entity<Buffer>,
324 author: ChangeAuthor,
325 buffer_snapshot: text::BufferSnapshot,
326 cx: &mut AsyncApp,
327 ) -> Result<()> {
328 let rebase = this.update(cx, |this, cx| {
329 let tracked_buffer = this
330 .tracked_buffers
331 .get_mut(buffer)
332 .context("buffer not tracked")?;
333
334 let rebase = cx.background_spawn({
335 let mut base_text = tracked_buffer.diff_base.clone();
336 let old_snapshot = tracked_buffer.snapshot.clone();
337 let new_snapshot = buffer_snapshot.clone();
338 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
339 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
340 if let ChangeAuthor::User = author
341 && !edits.is_empty()
342 {
343 tracked_buffer.has_unnotified_user_edits = true;
344 }
345 async move {
346 if let ChangeAuthor::User = author {
347 apply_non_conflicting_edits(
348 &unreviewed_edits,
349 edits,
350 &mut base_text,
351 new_snapshot.as_rope(),
352 );
353 }
354 (Arc::new(base_text.to_string()), base_text)
355 }
356 });
357
358 anyhow::Ok(rebase)
359 })??;
360 let (new_base_text, new_diff_base) = rebase.await;
361 Self::update_diff(
362 this,
363 buffer,
364 buffer_snapshot,
365 new_base_text,
366 new_diff_base,
367 cx,
368 )
369 .await
370 }
371
372 async fn keep_committed_edits(
373 this: &WeakEntity<ActionLog>,
374 buffer: &Entity<Buffer>,
375 git_diff: &Entity<BufferDiff>,
376 cx: &mut AsyncApp,
377 ) -> Result<()> {
378 let buffer_snapshot = this.read_with(cx, |this, _cx| {
379 let tracked_buffer = this
380 .tracked_buffers
381 .get(buffer)
382 .context("buffer not tracked")?;
383 anyhow::Ok(tracked_buffer.snapshot.clone())
384 })??;
385 let (new_base_text, new_diff_base) = this
386 .read_with(cx, |this, cx| {
387 let tracked_buffer = this
388 .tracked_buffers
389 .get(buffer)
390 .context("buffer not tracked")?;
391 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
392 let agent_diff_base = tracked_buffer.diff_base.clone();
393 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
394 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
395 anyhow::Ok(cx.background_spawn(async move {
396 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
397 let committed_edits = language::line_diff(
398 &agent_diff_base.to_string(),
399 &git_diff_base.to_string(),
400 )
401 .into_iter()
402 .map(|(old, new)| Edit { old, new });
403
404 let mut new_agent_diff_base = agent_diff_base.clone();
405 let mut row_delta = 0i32;
406 for committed in committed_edits {
407 while let Some(unreviewed) = old_unreviewed_edits.peek() {
408 // If the committed edit matches the unreviewed
409 // edit, assume the user wants to keep it.
410 if committed.old == unreviewed.old {
411 let unreviewed_new =
412 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
413 let committed_new =
414 git_diff_base.slice_rows(committed.new.clone()).to_string();
415 if unreviewed_new == committed_new {
416 let old_byte_start =
417 new_agent_diff_base.point_to_offset(Point::new(
418 (unreviewed.old.start as i32 + row_delta) as u32,
419 0,
420 ));
421 let old_byte_end =
422 new_agent_diff_base.point_to_offset(cmp::min(
423 Point::new(
424 (unreviewed.old.end as i32 + row_delta) as u32,
425 0,
426 ),
427 new_agent_diff_base.max_point(),
428 ));
429 new_agent_diff_base
430 .replace(old_byte_start..old_byte_end, &unreviewed_new);
431 row_delta +=
432 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
433 }
434 } else if unreviewed.old.start >= committed.old.end {
435 break;
436 }
437
438 old_unreviewed_edits.next().unwrap();
439 }
440 }
441
442 (
443 Arc::new(new_agent_diff_base.to_string()),
444 new_agent_diff_base,
445 )
446 }))
447 })??
448 .await;
449
450 Self::update_diff(
451 this,
452 buffer,
453 buffer_snapshot,
454 new_base_text,
455 new_diff_base,
456 cx,
457 )
458 .await
459 }
460
461 async fn update_diff(
462 this: &WeakEntity<ActionLog>,
463 buffer: &Entity<Buffer>,
464 buffer_snapshot: text::BufferSnapshot,
465 new_base_text: Arc<String>,
466 new_diff_base: Rope,
467 cx: &mut AsyncApp,
468 ) -> Result<()> {
469 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
470 let tracked_buffer = this
471 .tracked_buffers
472 .get(buffer)
473 .context("buffer not tracked")?;
474 anyhow::Ok((
475 tracked_buffer.diff.clone(),
476 buffer.read(cx).language().cloned(),
477 buffer.read(cx).language_registry().clone(),
478 ))
479 })??;
480 let diff_snapshot = BufferDiff::update_diff(
481 diff.clone(),
482 buffer_snapshot.clone(),
483 Some(new_base_text),
484 true,
485 false,
486 language,
487 language_registry,
488 cx,
489 )
490 .await;
491 let mut unreviewed_edits = Patch::default();
492 if let Ok(diff_snapshot) = diff_snapshot {
493 unreviewed_edits = cx
494 .background_spawn({
495 let diff_snapshot = diff_snapshot.clone();
496 let buffer_snapshot = buffer_snapshot.clone();
497 let new_diff_base = new_diff_base.clone();
498 async move {
499 let mut unreviewed_edits = Patch::default();
500 for hunk in diff_snapshot
501 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
502 {
503 let old_range = new_diff_base
504 .offset_to_point(hunk.diff_base_byte_range.start)
505 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
506 let new_range = hunk.range.start..hunk.range.end;
507 unreviewed_edits.push(point_to_row_edit(
508 Edit {
509 old: old_range,
510 new: new_range,
511 },
512 &new_diff_base,
513 &buffer_snapshot.as_rope(),
514 ));
515 }
516 unreviewed_edits
517 }
518 })
519 .await;
520
521 diff.update(cx, |diff, cx| {
522 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
523 })?;
524 }
525 this.update(cx, |this, cx| {
526 let tracked_buffer = this
527 .tracked_buffers
528 .get_mut(buffer)
529 .context("buffer not tracked")?;
530 tracked_buffer.diff_base = new_diff_base;
531 tracked_buffer.snapshot = buffer_snapshot;
532 tracked_buffer.unreviewed_edits = unreviewed_edits;
533 cx.notify();
534 anyhow::Ok(())
535 })?
536 }
537
538 /// Track a buffer as read by agent, so we can notify the model about user edits.
539 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
540 self.track_buffer_internal(buffer, false, cx);
541 }
542
543 /// Mark a buffer as created by agent, so we can refresh it in the context
544 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
545 self.edited_since_project_diagnostics_check = true;
546 self.track_buffer_internal(buffer.clone(), true, cx);
547 }
548
549 /// Mark a buffer as edited by agent, so we can refresh it in the context
550 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
551 self.edited_since_project_diagnostics_check = true;
552
553 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
554 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
555 tracked_buffer.status = TrackedBufferStatus::Modified;
556 }
557 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
558 }
559
560 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
561 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
562 match tracked_buffer.status {
563 TrackedBufferStatus::Created { .. } => {
564 self.tracked_buffers.remove(&buffer);
565 cx.notify();
566 }
567 TrackedBufferStatus::Modified => {
568 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
569 tracked_buffer.status = TrackedBufferStatus::Deleted;
570 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
571 }
572 TrackedBufferStatus::Deleted => {}
573 }
574 cx.notify();
575 }
576
577 pub fn keep_edits_in_range(
578 &mut self,
579 buffer: Entity<Buffer>,
580 buffer_range: Range<impl language::ToPoint>,
581 cx: &mut Context<Self>,
582 ) {
583 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
584 return;
585 };
586
587 match tracked_buffer.status {
588 TrackedBufferStatus::Deleted => {
589 self.tracked_buffers.remove(&buffer);
590 cx.notify();
591 }
592 _ => {
593 let buffer = buffer.read(cx);
594 let buffer_range =
595 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
596 let mut delta = 0i32;
597
598 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
599 edit.old.start = (edit.old.start as i32 + delta) as u32;
600 edit.old.end = (edit.old.end as i32 + delta) as u32;
601
602 if buffer_range.end.row < edit.new.start
603 || buffer_range.start.row > edit.new.end
604 {
605 true
606 } else {
607 let old_range = tracked_buffer
608 .diff_base
609 .point_to_offset(Point::new(edit.old.start, 0))
610 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
611 Point::new(edit.old.end, 0),
612 tracked_buffer.diff_base.max_point(),
613 ));
614 let new_range = tracked_buffer
615 .snapshot
616 .point_to_offset(Point::new(edit.new.start, 0))
617 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
618 Point::new(edit.new.end, 0),
619 tracked_buffer.snapshot.max_point(),
620 ));
621 tracked_buffer.diff_base.replace(
622 old_range,
623 &tracked_buffer
624 .snapshot
625 .text_for_range(new_range)
626 .collect::<String>(),
627 );
628 delta += edit.new_len() as i32 - edit.old_len() as i32;
629 false
630 }
631 });
632 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
633 }
634 }
635 }
636
637 pub fn reject_edits_in_ranges(
638 &mut self,
639 buffer: Entity<Buffer>,
640 buffer_ranges: Vec<Range<impl language::ToPoint>>,
641 cx: &mut Context<Self>,
642 ) -> Task<Result<()>> {
643 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
644 return Task::ready(Ok(()));
645 };
646
647 match &tracked_buffer.status {
648 TrackedBufferStatus::Created {
649 existing_file_content,
650 } => {
651 let task = if let Some(existing_file_content) = existing_file_content {
652 buffer.update(cx, |buffer, cx| {
653 buffer.start_transaction();
654 buffer.set_text("", cx);
655 for chunk in existing_file_content.chunks() {
656 buffer.append(chunk, cx);
657 }
658 buffer.end_transaction(cx);
659 });
660 self.project
661 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
662 } else {
663 // For a file created by AI with no pre-existing content,
664 // only delete the file if we're certain it contains only AI content
665 // with no edits from the user.
666
667 let initial_version = tracked_buffer.version.clone();
668 let current_version = buffer.read(cx).version();
669
670 let current_content = buffer.read(cx).text();
671 let tracked_content = tracked_buffer.snapshot.text();
672
673 let is_ai_only_content =
674 initial_version == current_version && current_content == tracked_content;
675
676 if is_ai_only_content {
677 buffer
678 .read(cx)
679 .entry_id(cx)
680 .and_then(|entry_id| {
681 self.project.update(cx, |project, cx| {
682 project.delete_entry(entry_id, false, cx)
683 })
684 })
685 .unwrap_or(Task::ready(Ok(())))
686 } else {
687 // Not sure how to disentangle edits made by the user
688 // from edits made by the AI at this point.
689 // For now, preserve both to avoid data loss.
690 //
691 // TODO: Better solution (disable "Reject" after user makes some
692 // edit or find a way to differentiate between AI and user edits)
693 Task::ready(Ok(()))
694 }
695 };
696
697 self.tracked_buffers.remove(&buffer);
698 cx.notify();
699 task
700 }
701 TrackedBufferStatus::Deleted => {
702 buffer.update(cx, |buffer, cx| {
703 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
704 });
705 let save = self
706 .project
707 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
708
709 // Clear all tracked edits for this buffer and start over as if we just read it.
710 self.tracked_buffers.remove(&buffer);
711 self.buffer_read(buffer.clone(), cx);
712 cx.notify();
713 save
714 }
715 TrackedBufferStatus::Modified => {
716 buffer.update(cx, |buffer, cx| {
717 let mut buffer_row_ranges = buffer_ranges
718 .into_iter()
719 .map(|range| {
720 range.start.to_point(buffer).row..range.end.to_point(buffer).row
721 })
722 .peekable();
723
724 let mut edits_to_revert = Vec::new();
725 for edit in tracked_buffer.unreviewed_edits.edits() {
726 let new_range = tracked_buffer
727 .snapshot
728 .anchor_before(Point::new(edit.new.start, 0))
729 ..tracked_buffer.snapshot.anchor_after(cmp::min(
730 Point::new(edit.new.end, 0),
731 tracked_buffer.snapshot.max_point(),
732 ));
733 let new_row_range = new_range.start.to_point(buffer).row
734 ..new_range.end.to_point(buffer).row;
735
736 let mut revert = false;
737 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
738 if buffer_row_range.end < new_row_range.start {
739 buffer_row_ranges.next();
740 } else if buffer_row_range.start > new_row_range.end {
741 break;
742 } else {
743 revert = true;
744 break;
745 }
746 }
747
748 if revert {
749 let old_range = tracked_buffer
750 .diff_base
751 .point_to_offset(Point::new(edit.old.start, 0))
752 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
753 Point::new(edit.old.end, 0),
754 tracked_buffer.diff_base.max_point(),
755 ));
756 let old_text = tracked_buffer
757 .diff_base
758 .chunks_in_range(old_range)
759 .collect::<String>();
760 edits_to_revert.push((new_range, old_text));
761 }
762 }
763
764 buffer.edit(edits_to_revert, None, cx);
765 });
766 self.project
767 .update(cx, |project, cx| project.save_buffer(buffer, cx))
768 }
769 }
770 }
771
772 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
773 self.tracked_buffers
774 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
775 TrackedBufferStatus::Deleted => false,
776 _ => {
777 tracked_buffer.unreviewed_edits.clear();
778 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
779 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
780 true
781 }
782 });
783 cx.notify();
784 }
785
786 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
787 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
788 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
789
790 async move {
791 reject.await.log_err();
792 }
793 });
794
795 let task = futures::future::join_all(futures);
796
797 cx.spawn(async move |_, _| {
798 task.await;
799 })
800 }
801
802 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
803 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
804 self.tracked_buffers
805 .iter()
806 .filter(|(_, tracked)| tracked.has_edits(cx))
807 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
808 .collect()
809 }
810
811 /// Iterate over buffers changed since last read or edited by the model
812 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
813 self.tracked_buffers
814 .iter()
815 .filter(|(buffer, tracked)| {
816 let buffer = buffer.read(cx);
817
818 tracked.version != buffer.version
819 && buffer
820 .file()
821 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
822 })
823 .map(|(buffer, _)| buffer)
824 }
825}
826
827fn apply_non_conflicting_edits(
828 patch: &Patch<u32>,
829 edits: Vec<Edit<u32>>,
830 old_text: &mut Rope,
831 new_text: &Rope,
832) {
833 let mut old_edits = patch.edits().iter().cloned().peekable();
834 let mut new_edits = edits.into_iter().peekable();
835 let mut applied_delta = 0i32;
836 let mut rebased_delta = 0i32;
837
838 while let Some(mut new_edit) = new_edits.next() {
839 let mut conflict = false;
840
841 // Push all the old edits that are before this new edit or that intersect with it.
842 while let Some(old_edit) = old_edits.peek() {
843 if new_edit.old.end < old_edit.new.start
844 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
845 {
846 break;
847 } else if new_edit.old.start > old_edit.new.end
848 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
849 {
850 let old_edit = old_edits.next().unwrap();
851 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
852 } else {
853 conflict = true;
854 if new_edits
855 .peek()
856 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
857 {
858 new_edit = new_edits.next().unwrap();
859 } else {
860 let old_edit = old_edits.next().unwrap();
861 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
862 }
863 }
864 }
865
866 if !conflict {
867 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
868 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
869 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
870 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
871 ..old_text.point_to_offset(cmp::min(
872 Point::new(new_edit.old.end, 0),
873 old_text.max_point(),
874 ));
875 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
876 ..new_text.point_to_offset(cmp::min(
877 Point::new(new_edit.new.end, 0),
878 new_text.max_point(),
879 ));
880
881 old_text.replace(
882 old_bytes,
883 &new_text.chunks_in_range(new_bytes).collect::<String>(),
884 );
885 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
886 }
887 }
888}
889
890fn diff_snapshots(
891 old_snapshot: &text::BufferSnapshot,
892 new_snapshot: &text::BufferSnapshot,
893) -> Vec<Edit<u32>> {
894 let mut edits = new_snapshot
895 .edits_since::<Point>(&old_snapshot.version)
896 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
897 .peekable();
898 let mut row_edits = Vec::new();
899 while let Some(mut edit) = edits.next() {
900 while let Some(next_edit) = edits.peek() {
901 if edit.old.end >= next_edit.old.start {
902 edit.old.end = next_edit.old.end;
903 edit.new.end = next_edit.new.end;
904 edits.next();
905 } else {
906 break;
907 }
908 }
909 row_edits.push(edit);
910 }
911 row_edits
912}
913
914fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
915 if edit.old.start.column == old_text.line_len(edit.old.start.row)
916 && new_text
917 .chars_at(new_text.point_to_offset(edit.new.start))
918 .next()
919 == Some('\n')
920 && edit.old.start != old_text.max_point()
921 {
922 Edit {
923 old: edit.old.start.row + 1..edit.old.end.row + 1,
924 new: edit.new.start.row + 1..edit.new.end.row + 1,
925 }
926 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
927 Edit {
928 old: edit.old.start.row..edit.old.end.row,
929 new: edit.new.start.row..edit.new.end.row,
930 }
931 } else {
932 Edit {
933 old: edit.old.start.row..edit.old.end.row + 1,
934 new: edit.new.start.row..edit.new.end.row + 1,
935 }
936 }
937}
938
939#[derive(Copy, Clone, Debug)]
940enum ChangeAuthor {
941 User,
942 Agent,
943}
944
945enum TrackedBufferStatus {
946 Created { existing_file_content: Option<Rope> },
947 Modified,
948 Deleted,
949}
950
951struct TrackedBuffer {
952 buffer: Entity<Buffer>,
953 diff_base: Rope,
954 last_seen_base: Rope,
955 unreviewed_edits: Patch<u32>,
956 status: TrackedBufferStatus,
957 version: clock::Global,
958 diff: Entity<BufferDiff>,
959 snapshot: text::BufferSnapshot,
960 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
961 has_unnotified_user_edits: bool,
962 _open_lsp_handle: OpenLspBufferHandle,
963 _maintain_diff: Task<()>,
964 _subscription: Subscription,
965}
966
967impl TrackedBuffer {
968 fn has_edits(&self, cx: &App) -> bool {
969 self.diff
970 .read(cx)
971 .hunks(&self.buffer.read(cx), cx)
972 .next()
973 .is_some()
974 }
975
976 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
977 self.diff_update
978 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
979 .ok();
980 }
981}
982
983pub struct ChangedBuffer {
984 pub diff: Entity<BufferDiff>,
985}
986
987#[cfg(test)]
988mod tests {
989 use super::*;
990 use buffer_diff::DiffHunkStatusKind;
991 use gpui::TestAppContext;
992 use indoc::indoc;
993 use language::Point;
994 use project::{FakeFs, Fs, Project, RemoveOptions};
995 use rand::prelude::*;
996 use serde_json::json;
997 use settings::SettingsStore;
998 use std::env;
999 use util::{RandomCharIter, path};
1000
1001 #[ctor::ctor]
1002 fn init_logger() {
1003 zlog::init_test();
1004 }
1005
1006 fn init_test(cx: &mut TestAppContext) {
1007 cx.update(|cx| {
1008 let settings_store = SettingsStore::test(cx);
1009 cx.set_global(settings_store);
1010 language::init(cx);
1011 Project::init_settings(cx);
1012 });
1013 }
1014
1015 #[gpui::test(iterations = 10)]
1016 async fn test_keep_edits(cx: &mut TestAppContext) {
1017 init_test(cx);
1018
1019 let fs = FakeFs::new(cx.executor());
1020 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1021 .await;
1022 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1023 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1024 let file_path = project
1025 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1026 .unwrap();
1027 let buffer = project
1028 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1029 .await
1030 .unwrap();
1031
1032 cx.update(|cx| {
1033 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1034 buffer.update(cx, |buffer, cx| {
1035 buffer
1036 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1037 .unwrap()
1038 });
1039 buffer.update(cx, |buffer, cx| {
1040 buffer
1041 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1042 .unwrap()
1043 });
1044 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1045 });
1046 cx.run_until_parked();
1047 assert_eq!(
1048 buffer.read_with(cx, |buffer, _| buffer.text()),
1049 "abc\ndEf\nghi\njkl\nmnO"
1050 );
1051 assert_eq!(
1052 unreviewed_hunks(&action_log, cx),
1053 vec![(
1054 buffer.clone(),
1055 vec![
1056 HunkStatus {
1057 range: Point::new(1, 0)..Point::new(2, 0),
1058 diff_status: DiffHunkStatusKind::Modified,
1059 old_text: "def\n".into(),
1060 },
1061 HunkStatus {
1062 range: Point::new(4, 0)..Point::new(4, 3),
1063 diff_status: DiffHunkStatusKind::Modified,
1064 old_text: "mno".into(),
1065 }
1066 ],
1067 )]
1068 );
1069
1070 action_log.update(cx, |log, cx| {
1071 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1072 });
1073 cx.run_until_parked();
1074 assert_eq!(
1075 unreviewed_hunks(&action_log, cx),
1076 vec![(
1077 buffer.clone(),
1078 vec![HunkStatus {
1079 range: Point::new(1, 0)..Point::new(2, 0),
1080 diff_status: DiffHunkStatusKind::Modified,
1081 old_text: "def\n".into(),
1082 }],
1083 )]
1084 );
1085
1086 action_log.update(cx, |log, cx| {
1087 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1088 });
1089 cx.run_until_parked();
1090 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1091 }
1092
1093 #[gpui::test(iterations = 10)]
1094 async fn test_deletions(cx: &mut TestAppContext) {
1095 init_test(cx);
1096
1097 let fs = FakeFs::new(cx.executor());
1098 fs.insert_tree(
1099 path!("/dir"),
1100 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1101 )
1102 .await;
1103 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1104 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1105 let file_path = project
1106 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1107 .unwrap();
1108 let buffer = project
1109 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1110 .await
1111 .unwrap();
1112
1113 cx.update(|cx| {
1114 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1115 buffer.update(cx, |buffer, cx| {
1116 buffer
1117 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1118 .unwrap();
1119 buffer.finalize_last_transaction();
1120 });
1121 buffer.update(cx, |buffer, cx| {
1122 buffer
1123 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1124 .unwrap();
1125 buffer.finalize_last_transaction();
1126 });
1127 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1128 });
1129 cx.run_until_parked();
1130 assert_eq!(
1131 buffer.read_with(cx, |buffer, _| buffer.text()),
1132 "abc\nghi\njkl\npqr"
1133 );
1134 assert_eq!(
1135 unreviewed_hunks(&action_log, cx),
1136 vec![(
1137 buffer.clone(),
1138 vec![
1139 HunkStatus {
1140 range: Point::new(1, 0)..Point::new(1, 0),
1141 diff_status: DiffHunkStatusKind::Deleted,
1142 old_text: "def\n".into(),
1143 },
1144 HunkStatus {
1145 range: Point::new(3, 0)..Point::new(3, 0),
1146 diff_status: DiffHunkStatusKind::Deleted,
1147 old_text: "mno\n".into(),
1148 }
1149 ],
1150 )]
1151 );
1152
1153 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1154 cx.run_until_parked();
1155 assert_eq!(
1156 buffer.read_with(cx, |buffer, _| buffer.text()),
1157 "abc\nghi\njkl\nmno\npqr"
1158 );
1159 assert_eq!(
1160 unreviewed_hunks(&action_log, cx),
1161 vec![(
1162 buffer.clone(),
1163 vec![HunkStatus {
1164 range: Point::new(1, 0)..Point::new(1, 0),
1165 diff_status: DiffHunkStatusKind::Deleted,
1166 old_text: "def\n".into(),
1167 }],
1168 )]
1169 );
1170
1171 action_log.update(cx, |log, cx| {
1172 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1173 });
1174 cx.run_until_parked();
1175 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1176 }
1177
1178 #[gpui::test(iterations = 10)]
1179 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1180 init_test(cx);
1181
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1184 .await;
1185 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1186 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1187 let file_path = project
1188 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1189 .unwrap();
1190 let buffer = project
1191 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1192 .await
1193 .unwrap();
1194
1195 cx.update(|cx| {
1196 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1197 buffer.update(cx, |buffer, cx| {
1198 buffer
1199 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1200 .unwrap()
1201 });
1202 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1203 });
1204 cx.run_until_parked();
1205 assert_eq!(
1206 buffer.read_with(cx, |buffer, _| buffer.text()),
1207 "abc\ndeF\nGHI\njkl\nmno"
1208 );
1209 assert_eq!(
1210 unreviewed_hunks(&action_log, cx),
1211 vec![(
1212 buffer.clone(),
1213 vec![HunkStatus {
1214 range: Point::new(1, 0)..Point::new(3, 0),
1215 diff_status: DiffHunkStatusKind::Modified,
1216 old_text: "def\nghi\n".into(),
1217 }],
1218 )]
1219 );
1220
1221 buffer.update(cx, |buffer, cx| {
1222 buffer.edit(
1223 [
1224 (Point::new(0, 2)..Point::new(0, 2), "X"),
1225 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1226 ],
1227 None,
1228 cx,
1229 )
1230 });
1231 cx.run_until_parked();
1232 assert_eq!(
1233 buffer.read_with(cx, |buffer, _| buffer.text()),
1234 "abXc\ndeF\nGHI\nYjkl\nmno"
1235 );
1236 assert_eq!(
1237 unreviewed_hunks(&action_log, cx),
1238 vec![(
1239 buffer.clone(),
1240 vec![HunkStatus {
1241 range: Point::new(1, 0)..Point::new(3, 0),
1242 diff_status: DiffHunkStatusKind::Modified,
1243 old_text: "def\nghi\n".into(),
1244 }],
1245 )]
1246 );
1247
1248 buffer.update(cx, |buffer, cx| {
1249 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1250 });
1251 cx.run_until_parked();
1252 assert_eq!(
1253 buffer.read_with(cx, |buffer, _| buffer.text()),
1254 "abXc\ndZeF\nGHI\nYjkl\nmno"
1255 );
1256 assert_eq!(
1257 unreviewed_hunks(&action_log, cx),
1258 vec![(
1259 buffer.clone(),
1260 vec![HunkStatus {
1261 range: Point::new(1, 0)..Point::new(3, 0),
1262 diff_status: DiffHunkStatusKind::Modified,
1263 old_text: "def\nghi\n".into(),
1264 }],
1265 )]
1266 );
1267
1268 action_log.update(cx, |log, cx| {
1269 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1270 });
1271 cx.run_until_parked();
1272 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1273 }
1274
1275 #[gpui::test(iterations = 10)]
1276 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1277 init_test(cx);
1278
1279 let fs = FakeFs::new(cx.executor());
1280 fs.insert_tree(
1281 path!("/dir"),
1282 json!({"file": indoc! {"
1283 abc
1284 def
1285 ghi
1286 jkl
1287 mno"}}),
1288 )
1289 .await;
1290 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1291 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1292 let file_path = project
1293 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1294 .unwrap();
1295 let buffer = project
1296 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1297 .await
1298 .unwrap();
1299
1300 // Agent edits
1301 cx.update(|cx| {
1302 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1303 buffer.update(cx, |buffer, cx| {
1304 buffer
1305 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1306 .unwrap()
1307 });
1308 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1309 });
1310 cx.run_until_parked();
1311 assert_eq!(
1312 buffer.read_with(cx, |buffer, _| buffer.text()),
1313 indoc! {"
1314 abc
1315 deF
1316 GHI
1317 jkl
1318 mno"}
1319 );
1320 assert_eq!(
1321 unreviewed_hunks(&action_log, cx),
1322 vec![(
1323 buffer.clone(),
1324 vec![HunkStatus {
1325 range: Point::new(1, 0)..Point::new(3, 0),
1326 diff_status: DiffHunkStatusKind::Modified,
1327 old_text: "def\nghi\n".into(),
1328 }],
1329 )]
1330 );
1331
1332 // User edits
1333 buffer.update(cx, |buffer, cx| {
1334 buffer.edit(
1335 [
1336 (Point::new(0, 2)..Point::new(0, 2), "X"),
1337 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1338 ],
1339 None,
1340 cx,
1341 )
1342 });
1343 cx.run_until_parked();
1344 assert_eq!(
1345 buffer.read_with(cx, |buffer, _| buffer.text()),
1346 indoc! {"
1347 abXc
1348 deF
1349 GHI
1350 Yjkl
1351 mno"}
1352 );
1353
1354 // User edits should be stored separately from agent's
1355 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1356 assert_eq!(
1357 user_edits.expect("should have some user edits"),
1358 indoc! {"
1359 --- a/dir/file
1360 +++ b/dir/file
1361 @@ -1,5 +1,5 @@
1362 -abc
1363 +abXc
1364 def
1365 ghi
1366 -jkl
1367 +Yjkl
1368 mno
1369 "}
1370 );
1371
1372 action_log.update(cx, |log, cx| {
1373 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1374 });
1375 cx.run_until_parked();
1376 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1377 }
1378
1379 #[gpui::test(iterations = 10)]
1380 async fn test_creating_files(cx: &mut TestAppContext) {
1381 init_test(cx);
1382
1383 let fs = FakeFs::new(cx.executor());
1384 fs.insert_tree(path!("/dir"), json!({})).await;
1385 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1386 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1387 let file_path = project
1388 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1389 .unwrap();
1390
1391 let buffer = project
1392 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1393 .await
1394 .unwrap();
1395 cx.update(|cx| {
1396 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1397 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1398 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1399 });
1400 project
1401 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1402 .await
1403 .unwrap();
1404 cx.run_until_parked();
1405 assert_eq!(
1406 unreviewed_hunks(&action_log, cx),
1407 vec![(
1408 buffer.clone(),
1409 vec![HunkStatus {
1410 range: Point::new(0, 0)..Point::new(0, 5),
1411 diff_status: DiffHunkStatusKind::Added,
1412 old_text: "".into(),
1413 }],
1414 )]
1415 );
1416
1417 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1418 cx.run_until_parked();
1419 assert_eq!(
1420 unreviewed_hunks(&action_log, cx),
1421 vec![(
1422 buffer.clone(),
1423 vec![HunkStatus {
1424 range: Point::new(0, 0)..Point::new(0, 6),
1425 diff_status: DiffHunkStatusKind::Added,
1426 old_text: "".into(),
1427 }],
1428 )]
1429 );
1430
1431 action_log.update(cx, |log, cx| {
1432 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1433 });
1434 cx.run_until_parked();
1435 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1436 }
1437
1438 #[gpui::test(iterations = 10)]
1439 async fn test_overwriting_files(cx: &mut TestAppContext) {
1440 init_test(cx);
1441
1442 let fs = FakeFs::new(cx.executor());
1443 fs.insert_tree(
1444 path!("/dir"),
1445 json!({
1446 "file1": "Lorem ipsum dolor"
1447 }),
1448 )
1449 .await;
1450 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1451 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1452 let file_path = project
1453 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1454 .unwrap();
1455
1456 let buffer = project
1457 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1458 .await
1459 .unwrap();
1460 cx.update(|cx| {
1461 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1462 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1463 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1464 });
1465 project
1466 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1467 .await
1468 .unwrap();
1469 cx.run_until_parked();
1470 assert_eq!(
1471 unreviewed_hunks(&action_log, cx),
1472 vec![(
1473 buffer.clone(),
1474 vec![HunkStatus {
1475 range: Point::new(0, 0)..Point::new(0, 19),
1476 diff_status: DiffHunkStatusKind::Added,
1477 old_text: "".into(),
1478 }],
1479 )]
1480 );
1481
1482 action_log
1483 .update(cx, |log, cx| {
1484 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1485 })
1486 .await
1487 .unwrap();
1488 cx.run_until_parked();
1489 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1490 assert_eq!(
1491 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1492 "Lorem ipsum dolor"
1493 );
1494 }
1495
1496 #[gpui::test(iterations = 10)]
1497 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1498 init_test(cx);
1499
1500 let fs = FakeFs::new(cx.executor());
1501 fs.insert_tree(
1502 path!("/dir"),
1503 json!({
1504 "file1": "Lorem ipsum dolor"
1505 }),
1506 )
1507 .await;
1508 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1509 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1510 let file_path = project
1511 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1512 .unwrap();
1513
1514 let buffer = project
1515 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1516 .await
1517 .unwrap();
1518 cx.update(|cx| {
1519 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1520 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1521 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1522 });
1523 project
1524 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1525 .await
1526 .unwrap();
1527 cx.run_until_parked();
1528 assert_eq!(
1529 unreviewed_hunks(&action_log, cx),
1530 vec![(
1531 buffer.clone(),
1532 vec![HunkStatus {
1533 range: Point::new(0, 0)..Point::new(0, 37),
1534 diff_status: DiffHunkStatusKind::Modified,
1535 old_text: "Lorem ipsum dolor".into(),
1536 }],
1537 )]
1538 );
1539
1540 cx.update(|cx| {
1541 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1542 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1543 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1544 });
1545 project
1546 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1547 .await
1548 .unwrap();
1549 cx.run_until_parked();
1550 assert_eq!(
1551 unreviewed_hunks(&action_log, cx),
1552 vec![(
1553 buffer.clone(),
1554 vec![HunkStatus {
1555 range: Point::new(0, 0)..Point::new(0, 9),
1556 diff_status: DiffHunkStatusKind::Added,
1557 old_text: "".into(),
1558 }],
1559 )]
1560 );
1561
1562 action_log
1563 .update(cx, |log, cx| {
1564 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1565 })
1566 .await
1567 .unwrap();
1568 cx.run_until_parked();
1569 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1570 assert_eq!(
1571 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1572 "Lorem ipsum dolor"
1573 );
1574 }
1575
1576 #[gpui::test(iterations = 10)]
1577 async fn test_deleting_files(cx: &mut TestAppContext) {
1578 init_test(cx);
1579
1580 let fs = FakeFs::new(cx.executor());
1581 fs.insert_tree(
1582 path!("/dir"),
1583 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1584 )
1585 .await;
1586
1587 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1588 let file1_path = project
1589 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1590 .unwrap();
1591 let file2_path = project
1592 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1593 .unwrap();
1594
1595 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1596 let buffer1 = project
1597 .update(cx, |project, cx| {
1598 project.open_buffer(file1_path.clone(), cx)
1599 })
1600 .await
1601 .unwrap();
1602 let buffer2 = project
1603 .update(cx, |project, cx| {
1604 project.open_buffer(file2_path.clone(), cx)
1605 })
1606 .await
1607 .unwrap();
1608
1609 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1610 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1611 project
1612 .update(cx, |project, cx| {
1613 project.delete_file(file1_path.clone(), false, cx)
1614 })
1615 .unwrap()
1616 .await
1617 .unwrap();
1618 project
1619 .update(cx, |project, cx| {
1620 project.delete_file(file2_path.clone(), false, cx)
1621 })
1622 .unwrap()
1623 .await
1624 .unwrap();
1625 cx.run_until_parked();
1626 assert_eq!(
1627 unreviewed_hunks(&action_log, cx),
1628 vec![
1629 (
1630 buffer1.clone(),
1631 vec![HunkStatus {
1632 range: Point::new(0, 0)..Point::new(0, 0),
1633 diff_status: DiffHunkStatusKind::Deleted,
1634 old_text: "lorem\n".into(),
1635 }]
1636 ),
1637 (
1638 buffer2.clone(),
1639 vec![HunkStatus {
1640 range: Point::new(0, 0)..Point::new(0, 0),
1641 diff_status: DiffHunkStatusKind::Deleted,
1642 old_text: "ipsum\n".into(),
1643 }],
1644 )
1645 ]
1646 );
1647
1648 // Simulate file1 being recreated externally.
1649 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1650 .await;
1651
1652 // Simulate file2 being recreated by a tool.
1653 let buffer2 = project
1654 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1655 .await
1656 .unwrap();
1657 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1658 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1659 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1660 project
1661 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1662 .await
1663 .unwrap();
1664
1665 cx.run_until_parked();
1666 assert_eq!(
1667 unreviewed_hunks(&action_log, cx),
1668 vec![(
1669 buffer2.clone(),
1670 vec![HunkStatus {
1671 range: Point::new(0, 0)..Point::new(0, 5),
1672 diff_status: DiffHunkStatusKind::Added,
1673 old_text: "".into(),
1674 }],
1675 )]
1676 );
1677
1678 // Simulate file2 being deleted externally.
1679 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1680 .await
1681 .unwrap();
1682 cx.run_until_parked();
1683 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1684 }
1685
1686 #[gpui::test(iterations = 10)]
1687 async fn test_reject_edits(cx: &mut TestAppContext) {
1688 init_test(cx);
1689
1690 let fs = FakeFs::new(cx.executor());
1691 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1692 .await;
1693 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1694 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1695 let file_path = project
1696 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1697 .unwrap();
1698 let buffer = project
1699 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1700 .await
1701 .unwrap();
1702
1703 cx.update(|cx| {
1704 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1705 buffer.update(cx, |buffer, cx| {
1706 buffer
1707 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1708 .unwrap()
1709 });
1710 buffer.update(cx, |buffer, cx| {
1711 buffer
1712 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1713 .unwrap()
1714 });
1715 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1716 });
1717 cx.run_until_parked();
1718 assert_eq!(
1719 buffer.read_with(cx, |buffer, _| buffer.text()),
1720 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1721 );
1722 assert_eq!(
1723 unreviewed_hunks(&action_log, cx),
1724 vec![(
1725 buffer.clone(),
1726 vec![
1727 HunkStatus {
1728 range: Point::new(1, 0)..Point::new(3, 0),
1729 diff_status: DiffHunkStatusKind::Modified,
1730 old_text: "def\n".into(),
1731 },
1732 HunkStatus {
1733 range: Point::new(5, 0)..Point::new(5, 3),
1734 diff_status: DiffHunkStatusKind::Modified,
1735 old_text: "mno".into(),
1736 }
1737 ],
1738 )]
1739 );
1740
1741 // If the rejected range doesn't overlap with any hunk, we ignore it.
1742 action_log
1743 .update(cx, |log, cx| {
1744 log.reject_edits_in_ranges(
1745 buffer.clone(),
1746 vec![Point::new(4, 0)..Point::new(4, 0)],
1747 cx,
1748 )
1749 })
1750 .await
1751 .unwrap();
1752 cx.run_until_parked();
1753 assert_eq!(
1754 buffer.read_with(cx, |buffer, _| buffer.text()),
1755 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1756 );
1757 assert_eq!(
1758 unreviewed_hunks(&action_log, cx),
1759 vec![(
1760 buffer.clone(),
1761 vec![
1762 HunkStatus {
1763 range: Point::new(1, 0)..Point::new(3, 0),
1764 diff_status: DiffHunkStatusKind::Modified,
1765 old_text: "def\n".into(),
1766 },
1767 HunkStatus {
1768 range: Point::new(5, 0)..Point::new(5, 3),
1769 diff_status: DiffHunkStatusKind::Modified,
1770 old_text: "mno".into(),
1771 }
1772 ],
1773 )]
1774 );
1775
1776 action_log
1777 .update(cx, |log, cx| {
1778 log.reject_edits_in_ranges(
1779 buffer.clone(),
1780 vec![Point::new(0, 0)..Point::new(1, 0)],
1781 cx,
1782 )
1783 })
1784 .await
1785 .unwrap();
1786 cx.run_until_parked();
1787 assert_eq!(
1788 buffer.read_with(cx, |buffer, _| buffer.text()),
1789 "abc\ndef\nghi\njkl\nmnO"
1790 );
1791 assert_eq!(
1792 unreviewed_hunks(&action_log, cx),
1793 vec![(
1794 buffer.clone(),
1795 vec![HunkStatus {
1796 range: Point::new(4, 0)..Point::new(4, 3),
1797 diff_status: DiffHunkStatusKind::Modified,
1798 old_text: "mno".into(),
1799 }],
1800 )]
1801 );
1802
1803 action_log
1804 .update(cx, |log, cx| {
1805 log.reject_edits_in_ranges(
1806 buffer.clone(),
1807 vec![Point::new(4, 0)..Point::new(4, 0)],
1808 cx,
1809 )
1810 })
1811 .await
1812 .unwrap();
1813 cx.run_until_parked();
1814 assert_eq!(
1815 buffer.read_with(cx, |buffer, _| buffer.text()),
1816 "abc\ndef\nghi\njkl\nmno"
1817 );
1818 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1819 }
1820
1821 #[gpui::test(iterations = 10)]
1822 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1823 init_test(cx);
1824
1825 let fs = FakeFs::new(cx.executor());
1826 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1827 .await;
1828 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1829 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1830 let file_path = project
1831 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1832 .unwrap();
1833 let buffer = project
1834 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1835 .await
1836 .unwrap();
1837
1838 cx.update(|cx| {
1839 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1840 buffer.update(cx, |buffer, cx| {
1841 buffer
1842 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1843 .unwrap()
1844 });
1845 buffer.update(cx, |buffer, cx| {
1846 buffer
1847 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1848 .unwrap()
1849 });
1850 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1851 });
1852 cx.run_until_parked();
1853 assert_eq!(
1854 buffer.read_with(cx, |buffer, _| buffer.text()),
1855 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1856 );
1857 assert_eq!(
1858 unreviewed_hunks(&action_log, cx),
1859 vec![(
1860 buffer.clone(),
1861 vec![
1862 HunkStatus {
1863 range: Point::new(1, 0)..Point::new(3, 0),
1864 diff_status: DiffHunkStatusKind::Modified,
1865 old_text: "def\n".into(),
1866 },
1867 HunkStatus {
1868 range: Point::new(5, 0)..Point::new(5, 3),
1869 diff_status: DiffHunkStatusKind::Modified,
1870 old_text: "mno".into(),
1871 }
1872 ],
1873 )]
1874 );
1875
1876 action_log.update(cx, |log, cx| {
1877 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1878 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1879 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1880 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1881
1882 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1883 .detach();
1884 assert_eq!(
1885 buffer.read_with(cx, |buffer, _| buffer.text()),
1886 "abc\ndef\nghi\njkl\nmno"
1887 );
1888 });
1889 cx.run_until_parked();
1890 assert_eq!(
1891 buffer.read_with(cx, |buffer, _| buffer.text()),
1892 "abc\ndef\nghi\njkl\nmno"
1893 );
1894 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1895 }
1896
1897 #[gpui::test(iterations = 10)]
1898 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1899 init_test(cx);
1900
1901 let fs = FakeFs::new(cx.executor());
1902 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1903 .await;
1904 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1905 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1906 let file_path = project
1907 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1908 .unwrap();
1909 let buffer = project
1910 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1911 .await
1912 .unwrap();
1913
1914 cx.update(|cx| {
1915 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1916 });
1917 project
1918 .update(cx, |project, cx| {
1919 project.delete_file(file_path.clone(), false, cx)
1920 })
1921 .unwrap()
1922 .await
1923 .unwrap();
1924 cx.run_until_parked();
1925 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1926 assert_eq!(
1927 unreviewed_hunks(&action_log, cx),
1928 vec![(
1929 buffer.clone(),
1930 vec![HunkStatus {
1931 range: Point::new(0, 0)..Point::new(0, 0),
1932 diff_status: DiffHunkStatusKind::Deleted,
1933 old_text: "content".into(),
1934 }]
1935 )]
1936 );
1937
1938 action_log
1939 .update(cx, |log, cx| {
1940 log.reject_edits_in_ranges(
1941 buffer.clone(),
1942 vec![Point::new(0, 0)..Point::new(0, 0)],
1943 cx,
1944 )
1945 })
1946 .await
1947 .unwrap();
1948 cx.run_until_parked();
1949 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1950 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1951 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1952 }
1953
1954 #[gpui::test(iterations = 10)]
1955 async fn test_reject_created_file(cx: &mut TestAppContext) {
1956 init_test(cx);
1957
1958 let fs = FakeFs::new(cx.executor());
1959 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1960 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1961 let file_path = project
1962 .read_with(cx, |project, cx| {
1963 project.find_project_path("dir/new_file", cx)
1964 })
1965 .unwrap();
1966 let buffer = project
1967 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1968 .await
1969 .unwrap();
1970 cx.update(|cx| {
1971 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1972 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1973 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1974 });
1975 project
1976 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1977 .await
1978 .unwrap();
1979 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1980 cx.run_until_parked();
1981 assert_eq!(
1982 unreviewed_hunks(&action_log, cx),
1983 vec![(
1984 buffer.clone(),
1985 vec![HunkStatus {
1986 range: Point::new(0, 0)..Point::new(0, 7),
1987 diff_status: DiffHunkStatusKind::Added,
1988 old_text: "".into(),
1989 }],
1990 )]
1991 );
1992
1993 action_log
1994 .update(cx, |log, cx| {
1995 log.reject_edits_in_ranges(
1996 buffer.clone(),
1997 vec![Point::new(0, 0)..Point::new(0, 11)],
1998 cx,
1999 )
2000 })
2001 .await
2002 .unwrap();
2003 cx.run_until_parked();
2004 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2005 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2006 }
2007
2008 #[gpui::test]
2009 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2010 init_test(cx);
2011
2012 let fs = FakeFs::new(cx.executor());
2013 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2014 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2015
2016 let file_path = project
2017 .read_with(cx, |project, cx| {
2018 project.find_project_path("dir/new_file", cx)
2019 })
2020 .unwrap();
2021 let buffer = project
2022 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2023 .await
2024 .unwrap();
2025
2026 // AI creates file with initial content
2027 cx.update(|cx| {
2028 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2029 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2030 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2031 });
2032
2033 project
2034 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2035 .await
2036 .unwrap();
2037
2038 cx.run_until_parked();
2039
2040 // User makes additional edits
2041 cx.update(|cx| {
2042 buffer.update(cx, |buffer, cx| {
2043 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2044 });
2045 });
2046
2047 project
2048 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2049 .await
2050 .unwrap();
2051
2052 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2053
2054 // Reject all
2055 action_log
2056 .update(cx, |log, cx| {
2057 log.reject_edits_in_ranges(
2058 buffer.clone(),
2059 vec![Point::new(0, 0)..Point::new(100, 0)],
2060 cx,
2061 )
2062 })
2063 .await
2064 .unwrap();
2065 cx.run_until_parked();
2066
2067 // File should still contain all the content
2068 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2069
2070 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2071 assert_eq!(content, "ai content\nuser added this line");
2072 }
2073
2074 #[gpui::test(iterations = 100)]
2075 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2076 init_test(cx);
2077
2078 let operations = env::var("OPERATIONS")
2079 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2080 .unwrap_or(20);
2081
2082 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2083 let fs = FakeFs::new(cx.executor());
2084 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2085 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2086 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2087 let file_path = project
2088 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2089 .unwrap();
2090 let buffer = project
2091 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2092 .await
2093 .unwrap();
2094
2095 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2096
2097 for _ in 0..operations {
2098 match rng.gen_range(0..100) {
2099 0..25 => {
2100 action_log.update(cx, |log, cx| {
2101 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2102 log::info!("keeping edits in range {:?}", range);
2103 log.keep_edits_in_range(buffer.clone(), range, cx)
2104 });
2105 }
2106 25..50 => {
2107 action_log
2108 .update(cx, |log, cx| {
2109 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2110 log::info!("rejecting edits in range {:?}", range);
2111 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
2112 })
2113 .await
2114 .unwrap();
2115 }
2116 _ => {
2117 let is_agent_edit = rng.gen_bool(0.5);
2118 if is_agent_edit {
2119 log::info!("agent edit");
2120 } else {
2121 log::info!("user edit");
2122 }
2123 cx.update(|cx| {
2124 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2125 if is_agent_edit {
2126 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2127 }
2128 });
2129 }
2130 }
2131
2132 if rng.gen_bool(0.2) {
2133 quiesce(&action_log, &buffer, cx);
2134 }
2135 }
2136
2137 quiesce(&action_log, &buffer, cx);
2138
2139 fn quiesce(
2140 action_log: &Entity<ActionLog>,
2141 buffer: &Entity<Buffer>,
2142 cx: &mut TestAppContext,
2143 ) {
2144 log::info!("quiescing...");
2145 cx.run_until_parked();
2146 action_log.update(cx, |log, cx| {
2147 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
2148 let mut old_text = tracked_buffer.diff_base.clone();
2149 let new_text = buffer.read(cx).as_rope();
2150 for edit in tracked_buffer.unreviewed_edits.edits() {
2151 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2152 let old_end = old_text.point_to_offset(cmp::min(
2153 Point::new(edit.new.start + edit.old_len(), 0),
2154 old_text.max_point(),
2155 ));
2156 old_text.replace(
2157 old_start..old_end,
2158 &new_text.slice_rows(edit.new.clone()).to_string(),
2159 );
2160 }
2161 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2162 })
2163 }
2164 }
2165
2166 #[gpui::test]
2167 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2168 init_test(cx);
2169
2170 let fs = FakeFs::new(cx.background_executor.clone());
2171 fs.insert_tree(
2172 path!("/project"),
2173 json!({
2174 ".git": {},
2175 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2176 }),
2177 )
2178 .await;
2179 fs.set_head_for_repo(
2180 path!("/project/.git").as_ref(),
2181 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2182 "0000000",
2183 );
2184 cx.run_until_parked();
2185
2186 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2187 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2188
2189 let file_path = project
2190 .read_with(cx, |project, cx| {
2191 project.find_project_path(path!("/project/file.txt"), cx)
2192 })
2193 .unwrap();
2194 let buffer = project
2195 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2196 .await
2197 .unwrap();
2198
2199 cx.update(|cx| {
2200 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2201 buffer.update(cx, |buffer, cx| {
2202 buffer.edit(
2203 [
2204 // Edit at the very start: a -> A
2205 (Point::new(0, 0)..Point::new(0, 1), "A"),
2206 // Deletion in the middle: remove lines d and e
2207 (Point::new(3, 0)..Point::new(5, 0), ""),
2208 // Modification: g -> GGG
2209 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2210 // Addition: insert new line after h
2211 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2212 // Edit the very last character: j -> J
2213 (Point::new(9, 0)..Point::new(9, 1), "J"),
2214 ],
2215 None,
2216 cx,
2217 );
2218 });
2219 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2220 });
2221 cx.run_until_parked();
2222 assert_eq!(
2223 unreviewed_hunks(&action_log, cx),
2224 vec![(
2225 buffer.clone(),
2226 vec![
2227 HunkStatus {
2228 range: Point::new(0, 0)..Point::new(1, 0),
2229 diff_status: DiffHunkStatusKind::Modified,
2230 old_text: "a\n".into()
2231 },
2232 HunkStatus {
2233 range: Point::new(3, 0)..Point::new(3, 0),
2234 diff_status: DiffHunkStatusKind::Deleted,
2235 old_text: "d\ne\n".into()
2236 },
2237 HunkStatus {
2238 range: Point::new(4, 0)..Point::new(5, 0),
2239 diff_status: DiffHunkStatusKind::Modified,
2240 old_text: "g\n".into()
2241 },
2242 HunkStatus {
2243 range: Point::new(6, 0)..Point::new(7, 0),
2244 diff_status: DiffHunkStatusKind::Added,
2245 old_text: "".into()
2246 },
2247 HunkStatus {
2248 range: Point::new(8, 0)..Point::new(8, 1),
2249 diff_status: DiffHunkStatusKind::Modified,
2250 old_text: "j".into()
2251 }
2252 ]
2253 )]
2254 );
2255
2256 // Simulate a git commit that matches some edits but not others:
2257 // - Accepts the first edit (a -> A)
2258 // - Accepts the deletion (remove d and e)
2259 // - Makes a different change to g (g -> G instead of GGG)
2260 // - Ignores the NEW line addition
2261 // - Ignores the last line edit (j stays as j)
2262 fs.set_head_for_repo(
2263 path!("/project/.git").as_ref(),
2264 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2265 "0000001",
2266 );
2267 cx.run_until_parked();
2268 assert_eq!(
2269 unreviewed_hunks(&action_log, cx),
2270 vec![(
2271 buffer.clone(),
2272 vec![
2273 HunkStatus {
2274 range: Point::new(4, 0)..Point::new(5, 0),
2275 diff_status: DiffHunkStatusKind::Modified,
2276 old_text: "g\n".into()
2277 },
2278 HunkStatus {
2279 range: Point::new(6, 0)..Point::new(7, 0),
2280 diff_status: DiffHunkStatusKind::Added,
2281 old_text: "".into()
2282 },
2283 HunkStatus {
2284 range: Point::new(8, 0)..Point::new(8, 1),
2285 diff_status: DiffHunkStatusKind::Modified,
2286 old_text: "j".into()
2287 }
2288 ]
2289 )]
2290 );
2291
2292 // Make another commit that accepts the NEW line but with different content
2293 fs.set_head_for_repo(
2294 path!("/project/.git").as_ref(),
2295 &[(
2296 "file.txt".into(),
2297 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2298 )],
2299 "0000002",
2300 );
2301 cx.run_until_parked();
2302 assert_eq!(
2303 unreviewed_hunks(&action_log, cx),
2304 vec![(
2305 buffer.clone(),
2306 vec![
2307 HunkStatus {
2308 range: Point::new(6, 0)..Point::new(7, 0),
2309 diff_status: DiffHunkStatusKind::Added,
2310 old_text: "".into()
2311 },
2312 HunkStatus {
2313 range: Point::new(8, 0)..Point::new(8, 1),
2314 diff_status: DiffHunkStatusKind::Modified,
2315 old_text: "j".into()
2316 }
2317 ]
2318 )]
2319 );
2320
2321 // Final commit that accepts all remaining edits
2322 fs.set_head_for_repo(
2323 path!("/project/.git").as_ref(),
2324 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2325 "0000003",
2326 );
2327 cx.run_until_parked();
2328 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2329 }
2330
2331 #[derive(Debug, Clone, PartialEq, Eq)]
2332 struct HunkStatus {
2333 range: Range<Point>,
2334 diff_status: DiffHunkStatusKind,
2335 old_text: String,
2336 }
2337
2338 fn unreviewed_hunks(
2339 action_log: &Entity<ActionLog>,
2340 cx: &TestAppContext,
2341 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2342 cx.read(|cx| {
2343 action_log
2344 .read(cx)
2345 .changed_buffers(cx)
2346 .into_iter()
2347 .map(|(buffer, diff)| {
2348 let snapshot = buffer.read(cx).snapshot();
2349 (
2350 buffer,
2351 diff.read(cx)
2352 .hunks(&snapshot, cx)
2353 .map(|hunk| HunkStatus {
2354 diff_status: hunk.status().kind,
2355 range: hunk.range,
2356 old_text: diff
2357 .read(cx)
2358 .base_text()
2359 .text_for_range(hunk.diff_base_byte_range)
2360 .collect(),
2361 })
2362 .collect(),
2363 )
2364 })
2365 .collect()
2366 })
2367 }
2368
2369 #[gpui::test]
2370 async fn test_format_patch(cx: &mut TestAppContext) {
2371 init_test(cx);
2372
2373 let fs = FakeFs::new(cx.executor());
2374 fs.insert_tree(
2375 path!("/dir"),
2376 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2377 )
2378 .await;
2379 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2380 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2381
2382 let file_path = project
2383 .read_with(cx, |project, cx| {
2384 project.find_project_path("dir/test.txt", cx)
2385 })
2386 .unwrap();
2387 let buffer = project
2388 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2389 .await
2390 .unwrap();
2391
2392 cx.update(|cx| {
2393 // Track the buffer and mark it as read first
2394 action_log.update(cx, |log, cx| {
2395 log.buffer_read(buffer.clone(), cx);
2396 });
2397
2398 // Make some edits to create a patch
2399 buffer.update(cx, |buffer, cx| {
2400 buffer
2401 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2402 .unwrap(); // Replace "line2" with "CHANGED"
2403 });
2404 });
2405
2406 cx.run_until_parked();
2407
2408 // Get the patch
2409 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2410
2411 // Verify the patch format contains expected unified diff elements
2412 assert_eq!(
2413 patch.unwrap(),
2414 indoc! {"
2415 --- a/dir/test.txt
2416 +++ b/dir/test.txt
2417 @@ -1,3 +1,3 @@
2418 line 1
2419 -line 2
2420 +CHANGED
2421 line 3
2422 "}
2423 );
2424 }
2425}