1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use clock;
4use collections::BTreeMap;
5use futures::{FutureExt, StreamExt, channel::mpsc};
6use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
7use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
8use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
9use std::{cmp, ops::Range, sync::Arc};
10use text::{Edit, Patch, Rope};
11use util::{
12 RangeExt, ResultExt as _,
13 paths::{PathStyle, RemotePathBuf},
14};
15
16/// Tracks actions performed by tools in a thread
17pub struct ActionLog {
18 /// Buffers that we want to notify the model about when they change.
19 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
20 /// Has the model edited a file since it last checked diagnostics?
21 edited_since_project_diagnostics_check: bool,
22 /// The project this action log is associated with
23 project: Entity<Project>,
24}
25
26impl ActionLog {
27 /// Creates a new, empty action log associated with the given project.
28 pub fn new(project: Entity<Project>) -> Self {
29 Self {
30 tracked_buffers: BTreeMap::default(),
31 edited_since_project_diagnostics_check: false,
32 project,
33 }
34 }
35
36 pub fn project(&self) -> &Entity<Project> {
37 &self.project
38 }
39
40 /// Notifies a diagnostics check
41 pub fn checked_project_diagnostics(&mut self) {
42 self.edited_since_project_diagnostics_check = false;
43 }
44
45 /// Returns true if any files have been edited since the last project diagnostics check
46 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
47 self.edited_since_project_diagnostics_check
48 }
49
50 pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
51 Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
52 }
53
54 pub fn has_unnotified_user_edits(&self) -> bool {
55 self.tracked_buffers
56 .values()
57 .any(|tracked| tracked.has_unnotified_user_edits)
58 }
59
60 /// Return a unified diff patch with user edits made since last read or notification
61 pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
62 if !self.has_unnotified_user_edits() {
63 return None;
64 }
65
66 let unified_diff = self
67 .tracked_buffers
68 .values()
69 .filter_map(|tracked| {
70 if !tracked.has_unnotified_user_edits {
71 return None;
72 }
73
74 let text_with_latest_user_edits = tracked.diff_base.to_string();
75 let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
76 if text_with_latest_user_edits == text_with_last_seen_user_edits {
77 return None;
78 }
79 let patch = language::unified_diff(
80 &text_with_last_seen_user_edits,
81 &text_with_latest_user_edits,
82 );
83
84 let buffer = tracked.buffer.clone();
85 let file_path = buffer
86 .read(cx)
87 .file()
88 .map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
89 .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
90
91 let mut result = String::new();
92 result.push_str(&format!("--- a/{}\n", file_path));
93 result.push_str(&format!("+++ b/{}\n", file_path));
94 result.push_str(&patch);
95
96 Some(result)
97 })
98 .collect::<Vec<_>>()
99 .join("\n\n");
100
101 Some(unified_diff)
102 }
103
104 /// Return a unified diff patch with user edits made since last read/notification
105 /// and mark them as notified
106 pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
107 let patch = self.unnotified_user_edits(cx);
108 self.tracked_buffers.values_mut().for_each(|tracked| {
109 tracked.has_unnotified_user_edits = false;
110 tracked.last_seen_base = tracked.diff_base.clone();
111 });
112 patch
113 }
114
115 fn track_buffer_internal(
116 &mut self,
117 buffer: Entity<Buffer>,
118 is_created: bool,
119 cx: &mut Context<Self>,
120 ) -> &mut TrackedBuffer {
121 let status = if is_created {
122 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
123 match tracked.status {
124 TrackedBufferStatus::Created {
125 existing_file_content,
126 } => TrackedBufferStatus::Created {
127 existing_file_content,
128 },
129 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
130 TrackedBufferStatus::Created {
131 existing_file_content: Some(tracked.diff_base),
132 }
133 }
134 }
135 } else if buffer
136 .read(cx)
137 .file()
138 .map_or(false, |file| file.disk_state().exists())
139 {
140 TrackedBufferStatus::Created {
141 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
142 }
143 } else {
144 TrackedBufferStatus::Created {
145 existing_file_content: None,
146 }
147 }
148 } else {
149 TrackedBufferStatus::Modified
150 };
151
152 let tracked_buffer = self
153 .tracked_buffers
154 .entry(buffer.clone())
155 .or_insert_with(|| {
156 let open_lsp_handle = self.project.update(cx, |project, cx| {
157 project.register_buffer_with_language_servers(&buffer, cx)
158 });
159
160 let text_snapshot = buffer.read(cx).text_snapshot();
161 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
162 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
163 let diff_base;
164 let last_seen_base;
165 let unreviewed_edits;
166 if is_created {
167 diff_base = Rope::default();
168 last_seen_base = Rope::default();
169 unreviewed_edits = Patch::new(vec![Edit {
170 old: 0..1,
171 new: 0..text_snapshot.max_point().row + 1,
172 }])
173 } else {
174 diff_base = buffer.read(cx).as_rope().clone();
175 last_seen_base = diff_base.clone();
176 unreviewed_edits = Patch::default();
177 }
178 TrackedBuffer {
179 buffer: buffer.clone(),
180 diff_base,
181 last_seen_base,
182 unreviewed_edits,
183 snapshot: text_snapshot.clone(),
184 status,
185 version: buffer.read(cx).version(),
186 diff,
187 diff_update: diff_update_tx,
188 has_unnotified_user_edits: false,
189 _open_lsp_handle: open_lsp_handle,
190 _maintain_diff: cx.spawn({
191 let buffer = buffer.clone();
192 async move |this, cx| {
193 Self::maintain_diff(this, buffer, diff_update_rx, cx)
194 .await
195 .ok();
196 }
197 }),
198 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
199 }
200 });
201 tracked_buffer.version = buffer.read(cx).version();
202 tracked_buffer
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
213 BufferEvent::FileHandleChanged => {
214 self.handle_buffer_file_changed(buffer, cx);
215 }
216 _ => {}
217 };
218 }
219
220 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
221 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
222 return;
223 };
224 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
225 }
226
227 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
228 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
229 return;
230 };
231
232 match tracked_buffer.status {
233 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
234 if buffer
235 .read(cx)
236 .file()
237 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
238 {
239 // If the buffer had been edited by a tool, but it got
240 // deleted externally, we want to stop tracking it.
241 self.tracked_buffers.remove(&buffer);
242 }
243 cx.notify();
244 }
245 TrackedBufferStatus::Deleted => {
246 if buffer
247 .read(cx)
248 .file()
249 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
250 {
251 // If the buffer had been deleted by a tool, but it got
252 // resurrected externally, we want to clear the edits we
253 // were tracking and reset the buffer's state.
254 self.tracked_buffers.remove(&buffer);
255 self.track_buffer_internal(buffer, false, cx);
256 }
257 cx.notify();
258 }
259 }
260 }
261
262 async fn maintain_diff(
263 this: WeakEntity<Self>,
264 buffer: Entity<Buffer>,
265 mut buffer_updates: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
266 cx: &mut AsyncApp,
267 ) -> Result<()> {
268 let git_store = this.read_with(cx, |this, cx| this.project.read(cx).git_store().clone())?;
269 let git_diff = this
270 .update(cx, |this, cx| {
271 this.project.update(cx, |project, cx| {
272 project.open_uncommitted_diff(buffer.clone(), cx)
273 })
274 })?
275 .await
276 .ok();
277 let buffer_repo = git_store.read_with(cx, |git_store, cx| {
278 git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
279 })?;
280
281 let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
282 let _repo_subscription =
283 if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
284 cx.update(|cx| {
285 let mut old_head = buffer_repo.read(cx).head_commit.clone();
286 Some(cx.subscribe(git_diff, move |_, event, cx| match event {
287 buffer_diff::BufferDiffEvent::DiffChanged { .. } => {
288 let new_head = buffer_repo.read(cx).head_commit.clone();
289 if new_head != old_head {
290 old_head = new_head;
291 git_diff_updates_tx.send(()).ok();
292 }
293 }
294 _ => {}
295 }))
296 })?
297 } else {
298 None
299 };
300
301 loop {
302 futures::select_biased! {
303 buffer_update = buffer_updates.next() => {
304 if let Some((author, buffer_snapshot)) = buffer_update {
305 Self::track_edits(&this, &buffer, author, buffer_snapshot, cx).await?;
306 } else {
307 break;
308 }
309 }
310 _ = git_diff_updates_rx.changed().fuse() => {
311 if let Some(git_diff) = git_diff.as_ref() {
312 Self::keep_committed_edits(&this, &buffer, &git_diff, cx).await?;
313 }
314 }
315 }
316 }
317
318 Ok(())
319 }
320
321 async fn track_edits(
322 this: &WeakEntity<ActionLog>,
323 buffer: &Entity<Buffer>,
324 author: ChangeAuthor,
325 buffer_snapshot: text::BufferSnapshot,
326 cx: &mut AsyncApp,
327 ) -> Result<()> {
328 let rebase = this.update(cx, |this, cx| {
329 let tracked_buffer = this
330 .tracked_buffers
331 .get_mut(buffer)
332 .context("buffer not tracked")?;
333
334 if let ChangeAuthor::User = author {
335 tracked_buffer.has_unnotified_user_edits = true;
336 }
337
338 let rebase = cx.background_spawn({
339 let mut base_text = tracked_buffer.diff_base.clone();
340 let old_snapshot = tracked_buffer.snapshot.clone();
341 let new_snapshot = buffer_snapshot.clone();
342 let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
343 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
344 async move {
345 if let ChangeAuthor::User = author {
346 apply_non_conflicting_edits(
347 &unreviewed_edits,
348 edits,
349 &mut base_text,
350 new_snapshot.as_rope(),
351 );
352 }
353 (Arc::new(base_text.to_string()), base_text)
354 }
355 });
356
357 anyhow::Ok(rebase)
358 })??;
359 let (new_base_text, new_diff_base) = rebase.await;
360 Self::update_diff(
361 this,
362 buffer,
363 buffer_snapshot,
364 new_base_text,
365 new_diff_base,
366 cx,
367 )
368 .await
369 }
370
371 async fn keep_committed_edits(
372 this: &WeakEntity<ActionLog>,
373 buffer: &Entity<Buffer>,
374 git_diff: &Entity<BufferDiff>,
375 cx: &mut AsyncApp,
376 ) -> Result<()> {
377 let buffer_snapshot = this.read_with(cx, |this, _cx| {
378 let tracked_buffer = this
379 .tracked_buffers
380 .get(buffer)
381 .context("buffer not tracked")?;
382 anyhow::Ok(tracked_buffer.snapshot.clone())
383 })??;
384 let (new_base_text, new_diff_base) = this
385 .read_with(cx, |this, cx| {
386 let tracked_buffer = this
387 .tracked_buffers
388 .get(buffer)
389 .context("buffer not tracked")?;
390 let old_unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
391 let agent_diff_base = tracked_buffer.diff_base.clone();
392 let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
393 let buffer_text = tracked_buffer.snapshot.as_rope().clone();
394 anyhow::Ok(cx.background_spawn(async move {
395 let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
396 let committed_edits = language::line_diff(
397 &agent_diff_base.to_string(),
398 &git_diff_base.to_string(),
399 )
400 .into_iter()
401 .map(|(old, new)| Edit { old, new });
402
403 let mut new_agent_diff_base = agent_diff_base.clone();
404 let mut row_delta = 0i32;
405 for committed in committed_edits {
406 while let Some(unreviewed) = old_unreviewed_edits.peek() {
407 // If the committed edit matches the unreviewed
408 // edit, assume the user wants to keep it.
409 if committed.old == unreviewed.old {
410 let unreviewed_new =
411 buffer_text.slice_rows(unreviewed.new.clone()).to_string();
412 let committed_new =
413 git_diff_base.slice_rows(committed.new.clone()).to_string();
414 if unreviewed_new == committed_new {
415 let old_byte_start =
416 new_agent_diff_base.point_to_offset(Point::new(
417 (unreviewed.old.start as i32 + row_delta) as u32,
418 0,
419 ));
420 let old_byte_end =
421 new_agent_diff_base.point_to_offset(cmp::min(
422 Point::new(
423 (unreviewed.old.end as i32 + row_delta) as u32,
424 0,
425 ),
426 new_agent_diff_base.max_point(),
427 ));
428 new_agent_diff_base
429 .replace(old_byte_start..old_byte_end, &unreviewed_new);
430 row_delta +=
431 unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
432 }
433 } else if unreviewed.old.start >= committed.old.end {
434 break;
435 }
436
437 old_unreviewed_edits.next().unwrap();
438 }
439 }
440
441 (
442 Arc::new(new_agent_diff_base.to_string()),
443 new_agent_diff_base,
444 )
445 }))
446 })??
447 .await;
448
449 Self::update_diff(
450 this,
451 buffer,
452 buffer_snapshot,
453 new_base_text,
454 new_diff_base,
455 cx,
456 )
457 .await
458 }
459
460 async fn update_diff(
461 this: &WeakEntity<ActionLog>,
462 buffer: &Entity<Buffer>,
463 buffer_snapshot: text::BufferSnapshot,
464 new_base_text: Arc<String>,
465 new_diff_base: Rope,
466 cx: &mut AsyncApp,
467 ) -> Result<()> {
468 let (diff, language, language_registry) = this.read_with(cx, |this, cx| {
469 let tracked_buffer = this
470 .tracked_buffers
471 .get(buffer)
472 .context("buffer not tracked")?;
473 anyhow::Ok((
474 tracked_buffer.diff.clone(),
475 buffer.read(cx).language().cloned(),
476 buffer.read(cx).language_registry().clone(),
477 ))
478 })??;
479 let diff_snapshot = BufferDiff::update_diff(
480 diff.clone(),
481 buffer_snapshot.clone(),
482 Some(new_base_text),
483 true,
484 false,
485 language,
486 language_registry,
487 cx,
488 )
489 .await;
490 let mut unreviewed_edits = Patch::default();
491 if let Ok(diff_snapshot) = diff_snapshot {
492 unreviewed_edits = cx
493 .background_spawn({
494 let diff_snapshot = diff_snapshot.clone();
495 let buffer_snapshot = buffer_snapshot.clone();
496 let new_diff_base = new_diff_base.clone();
497 async move {
498 let mut unreviewed_edits = Patch::default();
499 for hunk in diff_snapshot
500 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
501 {
502 let old_range = new_diff_base
503 .offset_to_point(hunk.diff_base_byte_range.start)
504 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
505 let new_range = hunk.range.start..hunk.range.end;
506 unreviewed_edits.push(point_to_row_edit(
507 Edit {
508 old: old_range,
509 new: new_range,
510 },
511 &new_diff_base,
512 &buffer_snapshot.as_rope(),
513 ));
514 }
515 unreviewed_edits
516 }
517 })
518 .await;
519
520 diff.update(cx, |diff, cx| {
521 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx);
522 })?;
523 }
524 this.update(cx, |this, cx| {
525 let tracked_buffer = this
526 .tracked_buffers
527 .get_mut(buffer)
528 .context("buffer not tracked")?;
529 tracked_buffer.diff_base = new_diff_base;
530 tracked_buffer.snapshot = buffer_snapshot;
531 tracked_buffer.unreviewed_edits = unreviewed_edits;
532 cx.notify();
533 anyhow::Ok(())
534 })?
535 }
536
537 /// Track a buffer as read by agent, so we can notify the model about user edits.
538 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
539 self.track_buffer_internal(buffer, false, cx);
540 }
541
542 /// Mark a buffer as created by agent, so we can refresh it in the context
543 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
544 self.edited_since_project_diagnostics_check = true;
545 self.track_buffer_internal(buffer.clone(), true, cx);
546 }
547
548 /// Mark a buffer as edited by agent, so we can refresh it in the context
549 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
550 self.edited_since_project_diagnostics_check = true;
551
552 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
553 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
554 tracked_buffer.status = TrackedBufferStatus::Modified;
555 }
556 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
557 }
558
559 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
560 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
561 match tracked_buffer.status {
562 TrackedBufferStatus::Created { .. } => {
563 self.tracked_buffers.remove(&buffer);
564 cx.notify();
565 }
566 TrackedBufferStatus::Modified => {
567 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
568 tracked_buffer.status = TrackedBufferStatus::Deleted;
569 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
570 }
571 TrackedBufferStatus::Deleted => {}
572 }
573 cx.notify();
574 }
575
576 pub fn keep_edits_in_range(
577 &mut self,
578 buffer: Entity<Buffer>,
579 buffer_range: Range<impl language::ToPoint>,
580 cx: &mut Context<Self>,
581 ) {
582 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
583 return;
584 };
585
586 match tracked_buffer.status {
587 TrackedBufferStatus::Deleted => {
588 self.tracked_buffers.remove(&buffer);
589 cx.notify();
590 }
591 _ => {
592 let buffer = buffer.read(cx);
593 let buffer_range =
594 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
595 let mut delta = 0i32;
596
597 tracked_buffer.unreviewed_edits.retain_mut(|edit| {
598 edit.old.start = (edit.old.start as i32 + delta) as u32;
599 edit.old.end = (edit.old.end as i32 + delta) as u32;
600
601 if buffer_range.end.row < edit.new.start
602 || buffer_range.start.row > edit.new.end
603 {
604 true
605 } else {
606 let old_range = tracked_buffer
607 .diff_base
608 .point_to_offset(Point::new(edit.old.start, 0))
609 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
610 Point::new(edit.old.end, 0),
611 tracked_buffer.diff_base.max_point(),
612 ));
613 let new_range = tracked_buffer
614 .snapshot
615 .point_to_offset(Point::new(edit.new.start, 0))
616 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
617 Point::new(edit.new.end, 0),
618 tracked_buffer.snapshot.max_point(),
619 ));
620 tracked_buffer.diff_base.replace(
621 old_range,
622 &tracked_buffer
623 .snapshot
624 .text_for_range(new_range)
625 .collect::<String>(),
626 );
627 delta += edit.new_len() as i32 - edit.old_len() as i32;
628 false
629 }
630 });
631 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
632 }
633 }
634 }
635
636 pub fn reject_edits_in_ranges(
637 &mut self,
638 buffer: Entity<Buffer>,
639 buffer_ranges: Vec<Range<impl language::ToPoint>>,
640 cx: &mut Context<Self>,
641 ) -> Task<Result<()>> {
642 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
643 return Task::ready(Ok(()));
644 };
645
646 match &tracked_buffer.status {
647 TrackedBufferStatus::Created {
648 existing_file_content,
649 } => {
650 let task = if let Some(existing_file_content) = existing_file_content {
651 buffer.update(cx, |buffer, cx| {
652 buffer.start_transaction();
653 buffer.set_text("", cx);
654 for chunk in existing_file_content.chunks() {
655 buffer.append(chunk, cx);
656 }
657 buffer.end_transaction(cx);
658 });
659 self.project
660 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
661 } else {
662 // For a file created by AI with no pre-existing content,
663 // only delete the file if we're certain it contains only AI content
664 // with no edits from the user.
665
666 let initial_version = tracked_buffer.version.clone();
667 let current_version = buffer.read(cx).version();
668
669 let current_content = buffer.read(cx).text();
670 let tracked_content = tracked_buffer.snapshot.text();
671
672 let is_ai_only_content =
673 initial_version == current_version && current_content == tracked_content;
674
675 if is_ai_only_content {
676 buffer
677 .read(cx)
678 .entry_id(cx)
679 .and_then(|entry_id| {
680 self.project.update(cx, |project, cx| {
681 project.delete_entry(entry_id, false, cx)
682 })
683 })
684 .unwrap_or(Task::ready(Ok(())))
685 } else {
686 // Not sure how to disentangle edits made by the user
687 // from edits made by the AI at this point.
688 // For now, preserve both to avoid data loss.
689 //
690 // TODO: Better solution (disable "Reject" after user makes some
691 // edit or find a way to differentiate between AI and user edits)
692 Task::ready(Ok(()))
693 }
694 };
695
696 self.tracked_buffers.remove(&buffer);
697 cx.notify();
698 task
699 }
700 TrackedBufferStatus::Deleted => {
701 buffer.update(cx, |buffer, cx| {
702 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
703 });
704 let save = self
705 .project
706 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
707
708 // Clear all tracked edits for this buffer and start over as if we just read it.
709 self.tracked_buffers.remove(&buffer);
710 self.buffer_read(buffer.clone(), cx);
711 cx.notify();
712 save
713 }
714 TrackedBufferStatus::Modified => {
715 buffer.update(cx, |buffer, cx| {
716 let mut buffer_row_ranges = buffer_ranges
717 .into_iter()
718 .map(|range| {
719 range.start.to_point(buffer).row..range.end.to_point(buffer).row
720 })
721 .peekable();
722
723 let mut edits_to_revert = Vec::new();
724 for edit in tracked_buffer.unreviewed_edits.edits() {
725 let new_range = tracked_buffer
726 .snapshot
727 .anchor_before(Point::new(edit.new.start, 0))
728 ..tracked_buffer.snapshot.anchor_after(cmp::min(
729 Point::new(edit.new.end, 0),
730 tracked_buffer.snapshot.max_point(),
731 ));
732 let new_row_range = new_range.start.to_point(buffer).row
733 ..new_range.end.to_point(buffer).row;
734
735 let mut revert = false;
736 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
737 if buffer_row_range.end < new_row_range.start {
738 buffer_row_ranges.next();
739 } else if buffer_row_range.start > new_row_range.end {
740 break;
741 } else {
742 revert = true;
743 break;
744 }
745 }
746
747 if revert {
748 let old_range = tracked_buffer
749 .diff_base
750 .point_to_offset(Point::new(edit.old.start, 0))
751 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
752 Point::new(edit.old.end, 0),
753 tracked_buffer.diff_base.max_point(),
754 ));
755 let old_text = tracked_buffer
756 .diff_base
757 .chunks_in_range(old_range)
758 .collect::<String>();
759 edits_to_revert.push((new_range, old_text));
760 }
761 }
762
763 buffer.edit(edits_to_revert, None, cx);
764 });
765 self.project
766 .update(cx, |project, cx| project.save_buffer(buffer, cx))
767 }
768 }
769 }
770
771 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
772 self.tracked_buffers
773 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
774 TrackedBufferStatus::Deleted => false,
775 _ => {
776 tracked_buffer.unreviewed_edits.clear();
777 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
778 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
779 true
780 }
781 });
782 cx.notify();
783 }
784
785 pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
786 let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
787 let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
788
789 async move {
790 reject.await.log_err();
791 }
792 });
793
794 let task = futures::future::join_all(futures);
795
796 cx.spawn(async move |_, _| {
797 task.await;
798 })
799 }
800
801 /// Returns the set of buffers that contain edits that haven't been reviewed by the user.
802 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
803 self.tracked_buffers
804 .iter()
805 .filter(|(_, tracked)| tracked.has_edits(cx))
806 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
807 .collect()
808 }
809
810 /// Iterate over buffers changed since last read or edited by the model
811 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
812 self.tracked_buffers
813 .iter()
814 .filter(|(buffer, tracked)| {
815 let buffer = buffer.read(cx);
816
817 tracked.version != buffer.version
818 && buffer
819 .file()
820 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
821 })
822 .map(|(buffer, _)| buffer)
823 }
824}
825
826fn apply_non_conflicting_edits(
827 patch: &Patch<u32>,
828 edits: Vec<Edit<u32>>,
829 old_text: &mut Rope,
830 new_text: &Rope,
831) {
832 let mut old_edits = patch.edits().iter().cloned().peekable();
833 let mut new_edits = edits.into_iter().peekable();
834 let mut applied_delta = 0i32;
835 let mut rebased_delta = 0i32;
836
837 while let Some(mut new_edit) = new_edits.next() {
838 let mut conflict = false;
839
840 // Push all the old edits that are before this new edit or that intersect with it.
841 while let Some(old_edit) = old_edits.peek() {
842 if new_edit.old.end < old_edit.new.start
843 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
844 {
845 break;
846 } else if new_edit.old.start > old_edit.new.end
847 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
848 {
849 let old_edit = old_edits.next().unwrap();
850 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
851 } else {
852 conflict = true;
853 if new_edits
854 .peek()
855 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
856 {
857 new_edit = new_edits.next().unwrap();
858 } else {
859 let old_edit = old_edits.next().unwrap();
860 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
861 }
862 }
863 }
864
865 if !conflict {
866 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
867 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
868 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
869 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
870 ..old_text.point_to_offset(cmp::min(
871 Point::new(new_edit.old.end, 0),
872 old_text.max_point(),
873 ));
874 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
875 ..new_text.point_to_offset(cmp::min(
876 Point::new(new_edit.new.end, 0),
877 new_text.max_point(),
878 ));
879
880 old_text.replace(
881 old_bytes,
882 &new_text.chunks_in_range(new_bytes).collect::<String>(),
883 );
884 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
885 }
886 }
887}
888
889fn diff_snapshots(
890 old_snapshot: &text::BufferSnapshot,
891 new_snapshot: &text::BufferSnapshot,
892) -> Vec<Edit<u32>> {
893 let mut edits = new_snapshot
894 .edits_since::<Point>(&old_snapshot.version)
895 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
896 .peekable();
897 let mut row_edits = Vec::new();
898 while let Some(mut edit) = edits.next() {
899 while let Some(next_edit) = edits.peek() {
900 if edit.old.end >= next_edit.old.start {
901 edit.old.end = next_edit.old.end;
902 edit.new.end = next_edit.new.end;
903 edits.next();
904 } else {
905 break;
906 }
907 }
908 row_edits.push(edit);
909 }
910 row_edits
911}
912
913fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
914 if edit.old.start.column == old_text.line_len(edit.old.start.row)
915 && new_text
916 .chars_at(new_text.point_to_offset(edit.new.start))
917 .next()
918 == Some('\n')
919 && edit.old.start != old_text.max_point()
920 {
921 Edit {
922 old: edit.old.start.row + 1..edit.old.end.row + 1,
923 new: edit.new.start.row + 1..edit.new.end.row + 1,
924 }
925 } else if edit.old.start.column == 0 && edit.old.end.column == 0 && edit.new.end.column == 0 {
926 Edit {
927 old: edit.old.start.row..edit.old.end.row,
928 new: edit.new.start.row..edit.new.end.row,
929 }
930 } else {
931 Edit {
932 old: edit.old.start.row..edit.old.end.row + 1,
933 new: edit.new.start.row..edit.new.end.row + 1,
934 }
935 }
936}
937
938#[derive(Copy, Clone, Debug)]
939enum ChangeAuthor {
940 User,
941 Agent,
942}
943
944enum TrackedBufferStatus {
945 Created { existing_file_content: Option<Rope> },
946 Modified,
947 Deleted,
948}
949
950struct TrackedBuffer {
951 buffer: Entity<Buffer>,
952 diff_base: Rope,
953 last_seen_base: Rope,
954 unreviewed_edits: Patch<u32>,
955 status: TrackedBufferStatus,
956 version: clock::Global,
957 diff: Entity<BufferDiff>,
958 snapshot: text::BufferSnapshot,
959 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
960 has_unnotified_user_edits: bool,
961 _open_lsp_handle: OpenLspBufferHandle,
962 _maintain_diff: Task<()>,
963 _subscription: Subscription,
964}
965
966impl TrackedBuffer {
967 fn has_edits(&self, cx: &App) -> bool {
968 self.diff
969 .read(cx)
970 .hunks(&self.buffer.read(cx), cx)
971 .next()
972 .is_some()
973 }
974
975 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
976 self.diff_update
977 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
978 .ok();
979 }
980}
981
982pub struct ChangedBuffer {
983 pub diff: Entity<BufferDiff>,
984}
985
986#[cfg(test)]
987mod tests {
988 use super::*;
989 use buffer_diff::DiffHunkStatusKind;
990 use gpui::TestAppContext;
991 use indoc::indoc;
992 use language::Point;
993 use project::{FakeFs, Fs, Project, RemoveOptions};
994 use rand::prelude::*;
995 use serde_json::json;
996 use settings::SettingsStore;
997 use std::env;
998 use util::{RandomCharIter, path};
999
1000 #[ctor::ctor]
1001 fn init_logger() {
1002 zlog::init_test();
1003 }
1004
1005 fn init_test(cx: &mut TestAppContext) {
1006 cx.update(|cx| {
1007 let settings_store = SettingsStore::test(cx);
1008 cx.set_global(settings_store);
1009 language::init(cx);
1010 Project::init_settings(cx);
1011 });
1012 }
1013
1014 #[gpui::test(iterations = 10)]
1015 async fn test_keep_edits(cx: &mut TestAppContext) {
1016 init_test(cx);
1017
1018 let fs = FakeFs::new(cx.executor());
1019 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1020 .await;
1021 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1022 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1023 let file_path = project
1024 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1025 .unwrap();
1026 let buffer = project
1027 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1028 .await
1029 .unwrap();
1030
1031 cx.update(|cx| {
1032 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1033 buffer.update(cx, |buffer, cx| {
1034 buffer
1035 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
1036 .unwrap()
1037 });
1038 buffer.update(cx, |buffer, cx| {
1039 buffer
1040 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
1041 .unwrap()
1042 });
1043 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1044 });
1045 cx.run_until_parked();
1046 assert_eq!(
1047 buffer.read_with(cx, |buffer, _| buffer.text()),
1048 "abc\ndEf\nghi\njkl\nmnO"
1049 );
1050 assert_eq!(
1051 unreviewed_hunks(&action_log, cx),
1052 vec![(
1053 buffer.clone(),
1054 vec![
1055 HunkStatus {
1056 range: Point::new(1, 0)..Point::new(2, 0),
1057 diff_status: DiffHunkStatusKind::Modified,
1058 old_text: "def\n".into(),
1059 },
1060 HunkStatus {
1061 range: Point::new(4, 0)..Point::new(4, 3),
1062 diff_status: DiffHunkStatusKind::Modified,
1063 old_text: "mno".into(),
1064 }
1065 ],
1066 )]
1067 );
1068
1069 action_log.update(cx, |log, cx| {
1070 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
1071 });
1072 cx.run_until_parked();
1073 assert_eq!(
1074 unreviewed_hunks(&action_log, cx),
1075 vec![(
1076 buffer.clone(),
1077 vec![HunkStatus {
1078 range: Point::new(1, 0)..Point::new(2, 0),
1079 diff_status: DiffHunkStatusKind::Modified,
1080 old_text: "def\n".into(),
1081 }],
1082 )]
1083 );
1084
1085 action_log.update(cx, |log, cx| {
1086 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
1087 });
1088 cx.run_until_parked();
1089 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1090 }
1091
1092 #[gpui::test(iterations = 10)]
1093 async fn test_deletions(cx: &mut TestAppContext) {
1094 init_test(cx);
1095
1096 let fs = FakeFs::new(cx.executor());
1097 fs.insert_tree(
1098 path!("/dir"),
1099 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
1100 )
1101 .await;
1102 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1103 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1104 let file_path = project
1105 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1106 .unwrap();
1107 let buffer = project
1108 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1109 .await
1110 .unwrap();
1111
1112 cx.update(|cx| {
1113 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1114 buffer.update(cx, |buffer, cx| {
1115 buffer
1116 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
1117 .unwrap();
1118 buffer.finalize_last_transaction();
1119 });
1120 buffer.update(cx, |buffer, cx| {
1121 buffer
1122 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
1123 .unwrap();
1124 buffer.finalize_last_transaction();
1125 });
1126 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1127 });
1128 cx.run_until_parked();
1129 assert_eq!(
1130 buffer.read_with(cx, |buffer, _| buffer.text()),
1131 "abc\nghi\njkl\npqr"
1132 );
1133 assert_eq!(
1134 unreviewed_hunks(&action_log, cx),
1135 vec![(
1136 buffer.clone(),
1137 vec![
1138 HunkStatus {
1139 range: Point::new(1, 0)..Point::new(1, 0),
1140 diff_status: DiffHunkStatusKind::Deleted,
1141 old_text: "def\n".into(),
1142 },
1143 HunkStatus {
1144 range: Point::new(3, 0)..Point::new(3, 0),
1145 diff_status: DiffHunkStatusKind::Deleted,
1146 old_text: "mno\n".into(),
1147 }
1148 ],
1149 )]
1150 );
1151
1152 buffer.update(cx, |buffer, cx| buffer.undo(cx));
1153 cx.run_until_parked();
1154 assert_eq!(
1155 buffer.read_with(cx, |buffer, _| buffer.text()),
1156 "abc\nghi\njkl\nmno\npqr"
1157 );
1158 assert_eq!(
1159 unreviewed_hunks(&action_log, cx),
1160 vec![(
1161 buffer.clone(),
1162 vec![HunkStatus {
1163 range: Point::new(1, 0)..Point::new(1, 0),
1164 diff_status: DiffHunkStatusKind::Deleted,
1165 old_text: "def\n".into(),
1166 }],
1167 )]
1168 );
1169
1170 action_log.update(cx, |log, cx| {
1171 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
1172 });
1173 cx.run_until_parked();
1174 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1175 }
1176
1177 #[gpui::test(iterations = 10)]
1178 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
1179 init_test(cx);
1180
1181 let fs = FakeFs::new(cx.executor());
1182 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1183 .await;
1184 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1185 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1186 let file_path = project
1187 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1188 .unwrap();
1189 let buffer = project
1190 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1191 .await
1192 .unwrap();
1193
1194 cx.update(|cx| {
1195 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1196 buffer.update(cx, |buffer, cx| {
1197 buffer
1198 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1199 .unwrap()
1200 });
1201 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1202 });
1203 cx.run_until_parked();
1204 assert_eq!(
1205 buffer.read_with(cx, |buffer, _| buffer.text()),
1206 "abc\ndeF\nGHI\njkl\nmno"
1207 );
1208 assert_eq!(
1209 unreviewed_hunks(&action_log, cx),
1210 vec![(
1211 buffer.clone(),
1212 vec![HunkStatus {
1213 range: Point::new(1, 0)..Point::new(3, 0),
1214 diff_status: DiffHunkStatusKind::Modified,
1215 old_text: "def\nghi\n".into(),
1216 }],
1217 )]
1218 );
1219
1220 buffer.update(cx, |buffer, cx| {
1221 buffer.edit(
1222 [
1223 (Point::new(0, 2)..Point::new(0, 2), "X"),
1224 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1225 ],
1226 None,
1227 cx,
1228 )
1229 });
1230 cx.run_until_parked();
1231 assert_eq!(
1232 buffer.read_with(cx, |buffer, _| buffer.text()),
1233 "abXc\ndeF\nGHI\nYjkl\nmno"
1234 );
1235 assert_eq!(
1236 unreviewed_hunks(&action_log, cx),
1237 vec![(
1238 buffer.clone(),
1239 vec![HunkStatus {
1240 range: Point::new(1, 0)..Point::new(3, 0),
1241 diff_status: DiffHunkStatusKind::Modified,
1242 old_text: "def\nghi\n".into(),
1243 }],
1244 )]
1245 );
1246
1247 buffer.update(cx, |buffer, cx| {
1248 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
1249 });
1250 cx.run_until_parked();
1251 assert_eq!(
1252 buffer.read_with(cx, |buffer, _| buffer.text()),
1253 "abXc\ndZeF\nGHI\nYjkl\nmno"
1254 );
1255 assert_eq!(
1256 unreviewed_hunks(&action_log, cx),
1257 vec![(
1258 buffer.clone(),
1259 vec![HunkStatus {
1260 range: Point::new(1, 0)..Point::new(3, 0),
1261 diff_status: DiffHunkStatusKind::Modified,
1262 old_text: "def\nghi\n".into(),
1263 }],
1264 )]
1265 );
1266
1267 action_log.update(cx, |log, cx| {
1268 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1269 });
1270 cx.run_until_parked();
1271 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1272 }
1273
1274 #[gpui::test(iterations = 10)]
1275 async fn test_user_edits_notifications(cx: &mut TestAppContext) {
1276 init_test(cx);
1277
1278 let fs = FakeFs::new(cx.executor());
1279 fs.insert_tree(
1280 path!("/dir"),
1281 json!({"file": indoc! {"
1282 abc
1283 def
1284 ghi
1285 jkl
1286 mno"}}),
1287 )
1288 .await;
1289 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1290 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1291 let file_path = project
1292 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1293 .unwrap();
1294 let buffer = project
1295 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1296 .await
1297 .unwrap();
1298
1299 // Agent edits
1300 cx.update(|cx| {
1301 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1302 buffer.update(cx, |buffer, cx| {
1303 buffer
1304 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
1305 .unwrap()
1306 });
1307 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1308 });
1309 cx.run_until_parked();
1310 assert_eq!(
1311 buffer.read_with(cx, |buffer, _| buffer.text()),
1312 indoc! {"
1313 abc
1314 deF
1315 GHI
1316 jkl
1317 mno"}
1318 );
1319 assert_eq!(
1320 unreviewed_hunks(&action_log, cx),
1321 vec![(
1322 buffer.clone(),
1323 vec![HunkStatus {
1324 range: Point::new(1, 0)..Point::new(3, 0),
1325 diff_status: DiffHunkStatusKind::Modified,
1326 old_text: "def\nghi\n".into(),
1327 }],
1328 )]
1329 );
1330
1331 // User edits
1332 buffer.update(cx, |buffer, cx| {
1333 buffer.edit(
1334 [
1335 (Point::new(0, 2)..Point::new(0, 2), "X"),
1336 (Point::new(3, 0)..Point::new(3, 0), "Y"),
1337 ],
1338 None,
1339 cx,
1340 )
1341 });
1342 cx.run_until_parked();
1343 assert_eq!(
1344 buffer.read_with(cx, |buffer, _| buffer.text()),
1345 indoc! {"
1346 abXc
1347 deF
1348 GHI
1349 Yjkl
1350 mno"}
1351 );
1352
1353 // User edits should be stored separately from agent's
1354 let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
1355 assert_eq!(
1356 user_edits.expect("should have some user edits"),
1357 indoc! {"
1358 --- a/dir/file
1359 +++ b/dir/file
1360 @@ -1,5 +1,5 @@
1361 -abc
1362 +abXc
1363 def
1364 ghi
1365 -jkl
1366 +Yjkl
1367 mno
1368 "}
1369 );
1370
1371 action_log.update(cx, |log, cx| {
1372 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
1373 });
1374 cx.run_until_parked();
1375 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1376 }
1377
1378 #[gpui::test(iterations = 10)]
1379 async fn test_creating_files(cx: &mut TestAppContext) {
1380 init_test(cx);
1381
1382 let fs = FakeFs::new(cx.executor());
1383 fs.insert_tree(path!("/dir"), json!({})).await;
1384 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1385 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1386 let file_path = project
1387 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1388 .unwrap();
1389
1390 let buffer = project
1391 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1392 .await
1393 .unwrap();
1394 cx.update(|cx| {
1395 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1396 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1397 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1398 });
1399 project
1400 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1401 .await
1402 .unwrap();
1403 cx.run_until_parked();
1404 assert_eq!(
1405 unreviewed_hunks(&action_log, cx),
1406 vec![(
1407 buffer.clone(),
1408 vec![HunkStatus {
1409 range: Point::new(0, 0)..Point::new(0, 5),
1410 diff_status: DiffHunkStatusKind::Added,
1411 old_text: "".into(),
1412 }],
1413 )]
1414 );
1415
1416 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1417 cx.run_until_parked();
1418 assert_eq!(
1419 unreviewed_hunks(&action_log, cx),
1420 vec![(
1421 buffer.clone(),
1422 vec![HunkStatus {
1423 range: Point::new(0, 0)..Point::new(0, 6),
1424 diff_status: DiffHunkStatusKind::Added,
1425 old_text: "".into(),
1426 }],
1427 )]
1428 );
1429
1430 action_log.update(cx, |log, cx| {
1431 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1432 });
1433 cx.run_until_parked();
1434 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1435 }
1436
1437 #[gpui::test(iterations = 10)]
1438 async fn test_overwriting_files(cx: &mut TestAppContext) {
1439 init_test(cx);
1440
1441 let fs = FakeFs::new(cx.executor());
1442 fs.insert_tree(
1443 path!("/dir"),
1444 json!({
1445 "file1": "Lorem ipsum dolor"
1446 }),
1447 )
1448 .await;
1449 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1450 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1451 let file_path = project
1452 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1453 .unwrap();
1454
1455 let buffer = project
1456 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1457 .await
1458 .unwrap();
1459 cx.update(|cx| {
1460 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1461 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1462 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1463 });
1464 project
1465 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1466 .await
1467 .unwrap();
1468 cx.run_until_parked();
1469 assert_eq!(
1470 unreviewed_hunks(&action_log, cx),
1471 vec![(
1472 buffer.clone(),
1473 vec![HunkStatus {
1474 range: Point::new(0, 0)..Point::new(0, 19),
1475 diff_status: DiffHunkStatusKind::Added,
1476 old_text: "".into(),
1477 }],
1478 )]
1479 );
1480
1481 action_log
1482 .update(cx, |log, cx| {
1483 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1484 })
1485 .await
1486 .unwrap();
1487 cx.run_until_parked();
1488 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1489 assert_eq!(
1490 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1491 "Lorem ipsum dolor"
1492 );
1493 }
1494
1495 #[gpui::test(iterations = 10)]
1496 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1497 init_test(cx);
1498
1499 let fs = FakeFs::new(cx.executor());
1500 fs.insert_tree(
1501 path!("/dir"),
1502 json!({
1503 "file1": "Lorem ipsum dolor"
1504 }),
1505 )
1506 .await;
1507 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1508 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1509 let file_path = project
1510 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1511 .unwrap();
1512
1513 let buffer = project
1514 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1515 .await
1516 .unwrap();
1517 cx.update(|cx| {
1518 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1519 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1520 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1521 });
1522 project
1523 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1524 .await
1525 .unwrap();
1526 cx.run_until_parked();
1527 assert_eq!(
1528 unreviewed_hunks(&action_log, cx),
1529 vec![(
1530 buffer.clone(),
1531 vec![HunkStatus {
1532 range: Point::new(0, 0)..Point::new(0, 37),
1533 diff_status: DiffHunkStatusKind::Modified,
1534 old_text: "Lorem ipsum dolor".into(),
1535 }],
1536 )]
1537 );
1538
1539 cx.update(|cx| {
1540 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1541 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1542 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1543 });
1544 project
1545 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1546 .await
1547 .unwrap();
1548 cx.run_until_parked();
1549 assert_eq!(
1550 unreviewed_hunks(&action_log, cx),
1551 vec![(
1552 buffer.clone(),
1553 vec![HunkStatus {
1554 range: Point::new(0, 0)..Point::new(0, 9),
1555 diff_status: DiffHunkStatusKind::Added,
1556 old_text: "".into(),
1557 }],
1558 )]
1559 );
1560
1561 action_log
1562 .update(cx, |log, cx| {
1563 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1564 })
1565 .await
1566 .unwrap();
1567 cx.run_until_parked();
1568 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1569 assert_eq!(
1570 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1571 "Lorem ipsum dolor"
1572 );
1573 }
1574
1575 #[gpui::test(iterations = 10)]
1576 async fn test_deleting_files(cx: &mut TestAppContext) {
1577 init_test(cx);
1578
1579 let fs = FakeFs::new(cx.executor());
1580 fs.insert_tree(
1581 path!("/dir"),
1582 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1583 )
1584 .await;
1585
1586 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1587 let file1_path = project
1588 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1589 .unwrap();
1590 let file2_path = project
1591 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1592 .unwrap();
1593
1594 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1595 let buffer1 = project
1596 .update(cx, |project, cx| {
1597 project.open_buffer(file1_path.clone(), cx)
1598 })
1599 .await
1600 .unwrap();
1601 let buffer2 = project
1602 .update(cx, |project, cx| {
1603 project.open_buffer(file2_path.clone(), cx)
1604 })
1605 .await
1606 .unwrap();
1607
1608 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1609 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1610 project
1611 .update(cx, |project, cx| {
1612 project.delete_file(file1_path.clone(), false, cx)
1613 })
1614 .unwrap()
1615 .await
1616 .unwrap();
1617 project
1618 .update(cx, |project, cx| {
1619 project.delete_file(file2_path.clone(), false, cx)
1620 })
1621 .unwrap()
1622 .await
1623 .unwrap();
1624 cx.run_until_parked();
1625 assert_eq!(
1626 unreviewed_hunks(&action_log, cx),
1627 vec![
1628 (
1629 buffer1.clone(),
1630 vec![HunkStatus {
1631 range: Point::new(0, 0)..Point::new(0, 0),
1632 diff_status: DiffHunkStatusKind::Deleted,
1633 old_text: "lorem\n".into(),
1634 }]
1635 ),
1636 (
1637 buffer2.clone(),
1638 vec![HunkStatus {
1639 range: Point::new(0, 0)..Point::new(0, 0),
1640 diff_status: DiffHunkStatusKind::Deleted,
1641 old_text: "ipsum\n".into(),
1642 }],
1643 )
1644 ]
1645 );
1646
1647 // Simulate file1 being recreated externally.
1648 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1649 .await;
1650
1651 // Simulate file2 being recreated by a tool.
1652 let buffer2 = project
1653 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1654 .await
1655 .unwrap();
1656 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1657 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1658 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1659 project
1660 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1661 .await
1662 .unwrap();
1663
1664 cx.run_until_parked();
1665 assert_eq!(
1666 unreviewed_hunks(&action_log, cx),
1667 vec![(
1668 buffer2.clone(),
1669 vec![HunkStatus {
1670 range: Point::new(0, 0)..Point::new(0, 5),
1671 diff_status: DiffHunkStatusKind::Added,
1672 old_text: "".into(),
1673 }],
1674 )]
1675 );
1676
1677 // Simulate file2 being deleted externally.
1678 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1679 .await
1680 .unwrap();
1681 cx.run_until_parked();
1682 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1683 }
1684
1685 #[gpui::test(iterations = 10)]
1686 async fn test_reject_edits(cx: &mut TestAppContext) {
1687 init_test(cx);
1688
1689 let fs = FakeFs::new(cx.executor());
1690 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1691 .await;
1692 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1693 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1694 let file_path = project
1695 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1696 .unwrap();
1697 let buffer = project
1698 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1699 .await
1700 .unwrap();
1701
1702 cx.update(|cx| {
1703 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1704 buffer.update(cx, |buffer, cx| {
1705 buffer
1706 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1707 .unwrap()
1708 });
1709 buffer.update(cx, |buffer, cx| {
1710 buffer
1711 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1712 .unwrap()
1713 });
1714 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1715 });
1716 cx.run_until_parked();
1717 assert_eq!(
1718 buffer.read_with(cx, |buffer, _| buffer.text()),
1719 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1720 );
1721 assert_eq!(
1722 unreviewed_hunks(&action_log, cx),
1723 vec![(
1724 buffer.clone(),
1725 vec![
1726 HunkStatus {
1727 range: Point::new(1, 0)..Point::new(3, 0),
1728 diff_status: DiffHunkStatusKind::Modified,
1729 old_text: "def\n".into(),
1730 },
1731 HunkStatus {
1732 range: Point::new(5, 0)..Point::new(5, 3),
1733 diff_status: DiffHunkStatusKind::Modified,
1734 old_text: "mno".into(),
1735 }
1736 ],
1737 )]
1738 );
1739
1740 // If the rejected range doesn't overlap with any hunk, we ignore it.
1741 action_log
1742 .update(cx, |log, cx| {
1743 log.reject_edits_in_ranges(
1744 buffer.clone(),
1745 vec![Point::new(4, 0)..Point::new(4, 0)],
1746 cx,
1747 )
1748 })
1749 .await
1750 .unwrap();
1751 cx.run_until_parked();
1752 assert_eq!(
1753 buffer.read_with(cx, |buffer, _| buffer.text()),
1754 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1755 );
1756 assert_eq!(
1757 unreviewed_hunks(&action_log, cx),
1758 vec![(
1759 buffer.clone(),
1760 vec![
1761 HunkStatus {
1762 range: Point::new(1, 0)..Point::new(3, 0),
1763 diff_status: DiffHunkStatusKind::Modified,
1764 old_text: "def\n".into(),
1765 },
1766 HunkStatus {
1767 range: Point::new(5, 0)..Point::new(5, 3),
1768 diff_status: DiffHunkStatusKind::Modified,
1769 old_text: "mno".into(),
1770 }
1771 ],
1772 )]
1773 );
1774
1775 action_log
1776 .update(cx, |log, cx| {
1777 log.reject_edits_in_ranges(
1778 buffer.clone(),
1779 vec![Point::new(0, 0)..Point::new(1, 0)],
1780 cx,
1781 )
1782 })
1783 .await
1784 .unwrap();
1785 cx.run_until_parked();
1786 assert_eq!(
1787 buffer.read_with(cx, |buffer, _| buffer.text()),
1788 "abc\ndef\nghi\njkl\nmnO"
1789 );
1790 assert_eq!(
1791 unreviewed_hunks(&action_log, cx),
1792 vec![(
1793 buffer.clone(),
1794 vec![HunkStatus {
1795 range: Point::new(4, 0)..Point::new(4, 3),
1796 diff_status: DiffHunkStatusKind::Modified,
1797 old_text: "mno".into(),
1798 }],
1799 )]
1800 );
1801
1802 action_log
1803 .update(cx, |log, cx| {
1804 log.reject_edits_in_ranges(
1805 buffer.clone(),
1806 vec![Point::new(4, 0)..Point::new(4, 0)],
1807 cx,
1808 )
1809 })
1810 .await
1811 .unwrap();
1812 cx.run_until_parked();
1813 assert_eq!(
1814 buffer.read_with(cx, |buffer, _| buffer.text()),
1815 "abc\ndef\nghi\njkl\nmno"
1816 );
1817 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1818 }
1819
1820 #[gpui::test(iterations = 10)]
1821 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1822 init_test(cx);
1823
1824 let fs = FakeFs::new(cx.executor());
1825 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1826 .await;
1827 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1828 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1829 let file_path = project
1830 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1831 .unwrap();
1832 let buffer = project
1833 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1834 .await
1835 .unwrap();
1836
1837 cx.update(|cx| {
1838 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1839 buffer.update(cx, |buffer, cx| {
1840 buffer
1841 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1842 .unwrap()
1843 });
1844 buffer.update(cx, |buffer, cx| {
1845 buffer
1846 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1847 .unwrap()
1848 });
1849 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1850 });
1851 cx.run_until_parked();
1852 assert_eq!(
1853 buffer.read_with(cx, |buffer, _| buffer.text()),
1854 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1855 );
1856 assert_eq!(
1857 unreviewed_hunks(&action_log, cx),
1858 vec![(
1859 buffer.clone(),
1860 vec![
1861 HunkStatus {
1862 range: Point::new(1, 0)..Point::new(3, 0),
1863 diff_status: DiffHunkStatusKind::Modified,
1864 old_text: "def\n".into(),
1865 },
1866 HunkStatus {
1867 range: Point::new(5, 0)..Point::new(5, 3),
1868 diff_status: DiffHunkStatusKind::Modified,
1869 old_text: "mno".into(),
1870 }
1871 ],
1872 )]
1873 );
1874
1875 action_log.update(cx, |log, cx| {
1876 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1877 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1878 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1879 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1880
1881 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1882 .detach();
1883 assert_eq!(
1884 buffer.read_with(cx, |buffer, _| buffer.text()),
1885 "abc\ndef\nghi\njkl\nmno"
1886 );
1887 });
1888 cx.run_until_parked();
1889 assert_eq!(
1890 buffer.read_with(cx, |buffer, _| buffer.text()),
1891 "abc\ndef\nghi\njkl\nmno"
1892 );
1893 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1894 }
1895
1896 #[gpui::test(iterations = 10)]
1897 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1898 init_test(cx);
1899
1900 let fs = FakeFs::new(cx.executor());
1901 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1902 .await;
1903 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1904 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1905 let file_path = project
1906 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1907 .unwrap();
1908 let buffer = project
1909 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1910 .await
1911 .unwrap();
1912
1913 cx.update(|cx| {
1914 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1915 });
1916 project
1917 .update(cx, |project, cx| {
1918 project.delete_file(file_path.clone(), false, cx)
1919 })
1920 .unwrap()
1921 .await
1922 .unwrap();
1923 cx.run_until_parked();
1924 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1925 assert_eq!(
1926 unreviewed_hunks(&action_log, cx),
1927 vec![(
1928 buffer.clone(),
1929 vec![HunkStatus {
1930 range: Point::new(0, 0)..Point::new(0, 0),
1931 diff_status: DiffHunkStatusKind::Deleted,
1932 old_text: "content".into(),
1933 }]
1934 )]
1935 );
1936
1937 action_log
1938 .update(cx, |log, cx| {
1939 log.reject_edits_in_ranges(
1940 buffer.clone(),
1941 vec![Point::new(0, 0)..Point::new(0, 0)],
1942 cx,
1943 )
1944 })
1945 .await
1946 .unwrap();
1947 cx.run_until_parked();
1948 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1949 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1950 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1951 }
1952
1953 #[gpui::test(iterations = 10)]
1954 async fn test_reject_created_file(cx: &mut TestAppContext) {
1955 init_test(cx);
1956
1957 let fs = FakeFs::new(cx.executor());
1958 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1959 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1960 let file_path = project
1961 .read_with(cx, |project, cx| {
1962 project.find_project_path("dir/new_file", cx)
1963 })
1964 .unwrap();
1965 let buffer = project
1966 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1967 .await
1968 .unwrap();
1969 cx.update(|cx| {
1970 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1971 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1972 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1973 });
1974 project
1975 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1976 .await
1977 .unwrap();
1978 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1979 cx.run_until_parked();
1980 assert_eq!(
1981 unreviewed_hunks(&action_log, cx),
1982 vec![(
1983 buffer.clone(),
1984 vec![HunkStatus {
1985 range: Point::new(0, 0)..Point::new(0, 7),
1986 diff_status: DiffHunkStatusKind::Added,
1987 old_text: "".into(),
1988 }],
1989 )]
1990 );
1991
1992 action_log
1993 .update(cx, |log, cx| {
1994 log.reject_edits_in_ranges(
1995 buffer.clone(),
1996 vec![Point::new(0, 0)..Point::new(0, 11)],
1997 cx,
1998 )
1999 })
2000 .await
2001 .unwrap();
2002 cx.run_until_parked();
2003 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
2004 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2005 }
2006
2007 #[gpui::test]
2008 async fn test_reject_created_file_with_user_edits(cx: &mut TestAppContext) {
2009 init_test(cx);
2010
2011 let fs = FakeFs::new(cx.executor());
2012 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2013 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2014
2015 let file_path = project
2016 .read_with(cx, |project, cx| {
2017 project.find_project_path("dir/new_file", cx)
2018 })
2019 .unwrap();
2020 let buffer = project
2021 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2022 .await
2023 .unwrap();
2024
2025 // AI creates file with initial content
2026 cx.update(|cx| {
2027 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
2028 buffer.update(cx, |buffer, cx| buffer.set_text("ai content", cx));
2029 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2030 });
2031
2032 project
2033 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2034 .await
2035 .unwrap();
2036
2037 cx.run_until_parked();
2038
2039 // User makes additional edits
2040 cx.update(|cx| {
2041 buffer.update(cx, |buffer, cx| {
2042 buffer.edit([(10..10, "\nuser added this line")], None, cx);
2043 });
2044 });
2045
2046 project
2047 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2048 .await
2049 .unwrap();
2050
2051 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2052
2053 // Reject all
2054 action_log
2055 .update(cx, |log, cx| {
2056 log.reject_edits_in_ranges(
2057 buffer.clone(),
2058 vec![Point::new(0, 0)..Point::new(100, 0)],
2059 cx,
2060 )
2061 })
2062 .await
2063 .unwrap();
2064 cx.run_until_parked();
2065
2066 // File should still contain all the content
2067 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
2068
2069 let content = buffer.read_with(cx, |buffer, _| buffer.text());
2070 assert_eq!(content, "ai content\nuser added this line");
2071 }
2072
2073 #[gpui::test(iterations = 100)]
2074 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
2075 init_test(cx);
2076
2077 let operations = env::var("OPERATIONS")
2078 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
2079 .unwrap_or(20);
2080
2081 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
2082 let fs = FakeFs::new(cx.executor());
2083 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
2084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2085 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2086 let file_path = project
2087 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
2088 .unwrap();
2089 let buffer = project
2090 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2091 .await
2092 .unwrap();
2093
2094 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2095
2096 for _ in 0..operations {
2097 match rng.gen_range(0..100) {
2098 0..25 => {
2099 action_log.update(cx, |log, cx| {
2100 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2101 log::info!("keeping edits in range {:?}", range);
2102 log.keep_edits_in_range(buffer.clone(), range, cx)
2103 });
2104 }
2105 25..50 => {
2106 action_log
2107 .update(cx, |log, cx| {
2108 let range = buffer.read(cx).random_byte_range(0, &mut rng);
2109 log::info!("rejecting edits in range {:?}", range);
2110 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
2111 })
2112 .await
2113 .unwrap();
2114 }
2115 _ => {
2116 let is_agent_edit = rng.gen_bool(0.5);
2117 if is_agent_edit {
2118 log::info!("agent edit");
2119 } else {
2120 log::info!("user edit");
2121 }
2122 cx.update(|cx| {
2123 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
2124 if is_agent_edit {
2125 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2126 }
2127 });
2128 }
2129 }
2130
2131 if rng.gen_bool(0.2) {
2132 quiesce(&action_log, &buffer, cx);
2133 }
2134 }
2135
2136 quiesce(&action_log, &buffer, cx);
2137
2138 fn quiesce(
2139 action_log: &Entity<ActionLog>,
2140 buffer: &Entity<Buffer>,
2141 cx: &mut TestAppContext,
2142 ) {
2143 log::info!("quiescing...");
2144 cx.run_until_parked();
2145 action_log.update(cx, |log, cx| {
2146 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
2147 let mut old_text = tracked_buffer.diff_base.clone();
2148 let new_text = buffer.read(cx).as_rope();
2149 for edit in tracked_buffer.unreviewed_edits.edits() {
2150 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
2151 let old_end = old_text.point_to_offset(cmp::min(
2152 Point::new(edit.new.start + edit.old_len(), 0),
2153 old_text.max_point(),
2154 ));
2155 old_text.replace(
2156 old_start..old_end,
2157 &new_text.slice_rows(edit.new.clone()).to_string(),
2158 );
2159 }
2160 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
2161 })
2162 }
2163 }
2164
2165 #[gpui::test]
2166 async fn test_keep_edits_on_commit(cx: &mut gpui::TestAppContext) {
2167 init_test(cx);
2168
2169 let fs = FakeFs::new(cx.background_executor.clone());
2170 fs.insert_tree(
2171 path!("/project"),
2172 json!({
2173 ".git": {},
2174 "file.txt": "a\nb\nc\nd\ne\nf\ng\nh\ni\nj",
2175 }),
2176 )
2177 .await;
2178 fs.set_head_for_repo(
2179 path!("/project/.git").as_ref(),
2180 &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())],
2181 "0000000",
2182 );
2183 cx.run_until_parked();
2184
2185 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
2186 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2187
2188 let file_path = project
2189 .read_with(cx, |project, cx| {
2190 project.find_project_path(path!("/project/file.txt"), cx)
2191 })
2192 .unwrap();
2193 let buffer = project
2194 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2195 .await
2196 .unwrap();
2197
2198 cx.update(|cx| {
2199 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
2200 buffer.update(cx, |buffer, cx| {
2201 buffer.edit(
2202 [
2203 // Edit at the very start: a -> A
2204 (Point::new(0, 0)..Point::new(0, 1), "A"),
2205 // Deletion in the middle: remove lines d and e
2206 (Point::new(3, 0)..Point::new(5, 0), ""),
2207 // Modification: g -> GGG
2208 (Point::new(6, 0)..Point::new(6, 1), "GGG"),
2209 // Addition: insert new line after h
2210 (Point::new(7, 1)..Point::new(7, 1), "\nNEW"),
2211 // Edit the very last character: j -> J
2212 (Point::new(9, 0)..Point::new(9, 1), "J"),
2213 ],
2214 None,
2215 cx,
2216 );
2217 });
2218 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
2219 });
2220 cx.run_until_parked();
2221 assert_eq!(
2222 unreviewed_hunks(&action_log, cx),
2223 vec![(
2224 buffer.clone(),
2225 vec![
2226 HunkStatus {
2227 range: Point::new(0, 0)..Point::new(1, 0),
2228 diff_status: DiffHunkStatusKind::Modified,
2229 old_text: "a\n".into()
2230 },
2231 HunkStatus {
2232 range: Point::new(3, 0)..Point::new(3, 0),
2233 diff_status: DiffHunkStatusKind::Deleted,
2234 old_text: "d\ne\n".into()
2235 },
2236 HunkStatus {
2237 range: Point::new(4, 0)..Point::new(5, 0),
2238 diff_status: DiffHunkStatusKind::Modified,
2239 old_text: "g\n".into()
2240 },
2241 HunkStatus {
2242 range: Point::new(6, 0)..Point::new(7, 0),
2243 diff_status: DiffHunkStatusKind::Added,
2244 old_text: "".into()
2245 },
2246 HunkStatus {
2247 range: Point::new(8, 0)..Point::new(8, 1),
2248 diff_status: DiffHunkStatusKind::Modified,
2249 old_text: "j".into()
2250 }
2251 ]
2252 )]
2253 );
2254
2255 // Simulate a git commit that matches some edits but not others:
2256 // - Accepts the first edit (a -> A)
2257 // - Accepts the deletion (remove d and e)
2258 // - Makes a different change to g (g -> G instead of GGG)
2259 // - Ignores the NEW line addition
2260 // - Ignores the last line edit (j stays as j)
2261 fs.set_head_for_repo(
2262 path!("/project/.git").as_ref(),
2263 &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())],
2264 "0000001",
2265 );
2266 cx.run_until_parked();
2267 assert_eq!(
2268 unreviewed_hunks(&action_log, cx),
2269 vec![(
2270 buffer.clone(),
2271 vec![
2272 HunkStatus {
2273 range: Point::new(4, 0)..Point::new(5, 0),
2274 diff_status: DiffHunkStatusKind::Modified,
2275 old_text: "g\n".into()
2276 },
2277 HunkStatus {
2278 range: Point::new(6, 0)..Point::new(7, 0),
2279 diff_status: DiffHunkStatusKind::Added,
2280 old_text: "".into()
2281 },
2282 HunkStatus {
2283 range: Point::new(8, 0)..Point::new(8, 1),
2284 diff_status: DiffHunkStatusKind::Modified,
2285 old_text: "j".into()
2286 }
2287 ]
2288 )]
2289 );
2290
2291 // Make another commit that accepts the NEW line but with different content
2292 fs.set_head_for_repo(
2293 path!("/project/.git").as_ref(),
2294 &[(
2295 "file.txt".into(),
2296 "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(),
2297 )],
2298 "0000002",
2299 );
2300 cx.run_until_parked();
2301 assert_eq!(
2302 unreviewed_hunks(&action_log, cx),
2303 vec![(
2304 buffer.clone(),
2305 vec![
2306 HunkStatus {
2307 range: Point::new(6, 0)..Point::new(7, 0),
2308 diff_status: DiffHunkStatusKind::Added,
2309 old_text: "".into()
2310 },
2311 HunkStatus {
2312 range: Point::new(8, 0)..Point::new(8, 1),
2313 diff_status: DiffHunkStatusKind::Modified,
2314 old_text: "j".into()
2315 }
2316 ]
2317 )]
2318 );
2319
2320 // Final commit that accepts all remaining edits
2321 fs.set_head_for_repo(
2322 path!("/project/.git").as_ref(),
2323 &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())],
2324 "0000003",
2325 );
2326 cx.run_until_parked();
2327 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
2328 }
2329
2330 #[derive(Debug, Clone, PartialEq, Eq)]
2331 struct HunkStatus {
2332 range: Range<Point>,
2333 diff_status: DiffHunkStatusKind,
2334 old_text: String,
2335 }
2336
2337 fn unreviewed_hunks(
2338 action_log: &Entity<ActionLog>,
2339 cx: &TestAppContext,
2340 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
2341 cx.read(|cx| {
2342 action_log
2343 .read(cx)
2344 .changed_buffers(cx)
2345 .into_iter()
2346 .map(|(buffer, diff)| {
2347 let snapshot = buffer.read(cx).snapshot();
2348 (
2349 buffer,
2350 diff.read(cx)
2351 .hunks(&snapshot, cx)
2352 .map(|hunk| HunkStatus {
2353 diff_status: hunk.status().kind,
2354 range: hunk.range,
2355 old_text: diff
2356 .read(cx)
2357 .base_text()
2358 .text_for_range(hunk.diff_base_byte_range)
2359 .collect(),
2360 })
2361 .collect(),
2362 )
2363 })
2364 .collect()
2365 })
2366 }
2367
2368 #[gpui::test]
2369 async fn test_format_patch(cx: &mut TestAppContext) {
2370 init_test(cx);
2371
2372 let fs = FakeFs::new(cx.executor());
2373 fs.insert_tree(
2374 path!("/dir"),
2375 json!({"test.txt": "line 1\nline 2\nline 3\n"}),
2376 )
2377 .await;
2378 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
2379 let action_log = cx.new(|_| ActionLog::new(project.clone()));
2380
2381 let file_path = project
2382 .read_with(cx, |project, cx| {
2383 project.find_project_path("dir/test.txt", cx)
2384 })
2385 .unwrap();
2386 let buffer = project
2387 .update(cx, |project, cx| project.open_buffer(file_path, cx))
2388 .await
2389 .unwrap();
2390
2391 cx.update(|cx| {
2392 // Track the buffer and mark it as read first
2393 action_log.update(cx, |log, cx| {
2394 log.buffer_read(buffer.clone(), cx);
2395 });
2396
2397 // Make some edits to create a patch
2398 buffer.update(cx, |buffer, cx| {
2399 buffer
2400 .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
2401 .unwrap(); // Replace "line2" with "CHANGED"
2402 });
2403 });
2404
2405 cx.run_until_parked();
2406
2407 // Get the patch
2408 let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
2409
2410 // Verify the patch format contains expected unified diff elements
2411 assert_eq!(
2412 patch.unwrap(),
2413 indoc! {"
2414 --- a/dir/test.txt
2415 +++ b/dir/test.txt
2416 @@ -1,3 +1,3 @@
2417 line 1
2418 -line 2
2419 +CHANGED
2420 line 3
2421 "}
2422 );
2423 }
2424}