1use anyhow::{Context as _, Result};
2use buffer_diff::BufferDiff;
3use collections::BTreeMap;
4use futures::{StreamExt, channel::mpsc};
5use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
6use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
7use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
8use std::{cmp, ops::Range, sync::Arc};
9use text::{Edit, Patch, Rope};
10use util::RangeExt;
11
12/// Tracks actions performed by tools in a thread
13pub struct ActionLog {
14 /// Buffers that we want to notify the model about when they change.
15 tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
16 /// Has the model edited a file since it last checked diagnostics?
17 edited_since_project_diagnostics_check: bool,
18 /// The project this action log is associated with
19 project: Entity<Project>,
20}
21
22impl ActionLog {
23 /// Creates a new, empty action log associated with the given project.
24 pub fn new(project: Entity<Project>) -> Self {
25 Self {
26 tracked_buffers: BTreeMap::default(),
27 edited_since_project_diagnostics_check: false,
28 project,
29 }
30 }
31
32 pub fn project(&self) -> &Entity<Project> {
33 &self.project
34 }
35
36 /// Notifies a diagnostics check
37 pub fn checked_project_diagnostics(&mut self) {
38 self.edited_since_project_diagnostics_check = false;
39 }
40
41 /// Returns true if any files have been edited since the last project diagnostics check
42 pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
43 self.edited_since_project_diagnostics_check
44 }
45
46 fn track_buffer_internal(
47 &mut self,
48 buffer: Entity<Buffer>,
49 is_created: bool,
50 cx: &mut Context<Self>,
51 ) -> &mut TrackedBuffer {
52 let status = if is_created {
53 if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
54 match tracked.status {
55 TrackedBufferStatus::Created {
56 existing_file_content,
57 } => TrackedBufferStatus::Created {
58 existing_file_content,
59 },
60 TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
61 TrackedBufferStatus::Created {
62 existing_file_content: Some(tracked.diff_base),
63 }
64 }
65 }
66 } else if buffer
67 .read(cx)
68 .file()
69 .map_or(false, |file| file.disk_state().exists())
70 {
71 TrackedBufferStatus::Created {
72 existing_file_content: Some(buffer.read(cx).as_rope().clone()),
73 }
74 } else {
75 TrackedBufferStatus::Created {
76 existing_file_content: None,
77 }
78 }
79 } else {
80 TrackedBufferStatus::Modified
81 };
82
83 let tracked_buffer = self
84 .tracked_buffers
85 .entry(buffer.clone())
86 .or_insert_with(|| {
87 let open_lsp_handle = self.project.update(cx, |project, cx| {
88 project.register_buffer_with_language_servers(&buffer, cx)
89 });
90
91 let text_snapshot = buffer.read(cx).text_snapshot();
92 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
93 let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
94 let diff_base;
95 let unreviewed_changes;
96 if is_created {
97 diff_base = Rope::default();
98 unreviewed_changes = Patch::new(vec![Edit {
99 old: 0..1,
100 new: 0..text_snapshot.max_point().row + 1,
101 }])
102 } else {
103 diff_base = buffer.read(cx).as_rope().clone();
104 unreviewed_changes = Patch::default();
105 }
106 TrackedBuffer {
107 buffer: buffer.clone(),
108 diff_base,
109 unreviewed_changes,
110 snapshot: text_snapshot.clone(),
111 status,
112 version: buffer.read(cx).version(),
113 diff,
114 diff_update: diff_update_tx,
115 _open_lsp_handle: open_lsp_handle,
116 _maintain_diff: cx.spawn({
117 let buffer = buffer.clone();
118 async move |this, cx| {
119 Self::maintain_diff(this, buffer, diff_update_rx, cx)
120 .await
121 .ok();
122 }
123 }),
124 _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
125 }
126 });
127 tracked_buffer.version = buffer.read(cx).version();
128 tracked_buffer
129 }
130
131 fn handle_buffer_event(
132 &mut self,
133 buffer: Entity<Buffer>,
134 event: &BufferEvent,
135 cx: &mut Context<Self>,
136 ) {
137 match event {
138 BufferEvent::Edited { .. } => self.handle_buffer_edited(buffer, cx),
139 BufferEvent::FileHandleChanged => {
140 self.handle_buffer_file_changed(buffer, cx);
141 }
142 _ => {}
143 };
144 }
145
146 fn handle_buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
147 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
148 return;
149 };
150 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
151 }
152
153 fn handle_buffer_file_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
154 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
155 return;
156 };
157
158 match tracked_buffer.status {
159 TrackedBufferStatus::Created { .. } | TrackedBufferStatus::Modified => {
160 if buffer
161 .read(cx)
162 .file()
163 .map_or(false, |file| file.disk_state() == DiskState::Deleted)
164 {
165 // If the buffer had been edited by a tool, but it got
166 // deleted externally, we want to stop tracking it.
167 self.tracked_buffers.remove(&buffer);
168 }
169 cx.notify();
170 }
171 TrackedBufferStatus::Deleted => {
172 if buffer
173 .read(cx)
174 .file()
175 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
176 {
177 // If the buffer had been deleted by a tool, but it got
178 // resurrected externally, we want to clear the changes we
179 // were tracking and reset the buffer's state.
180 self.tracked_buffers.remove(&buffer);
181 self.track_buffer_internal(buffer, false, cx);
182 }
183 cx.notify();
184 }
185 }
186 }
187
188 async fn maintain_diff(
189 this: WeakEntity<Self>,
190 buffer: Entity<Buffer>,
191 mut diff_update: mpsc::UnboundedReceiver<(ChangeAuthor, text::BufferSnapshot)>,
192 cx: &mut AsyncApp,
193 ) -> Result<()> {
194 while let Some((author, buffer_snapshot)) = diff_update.next().await {
195 let (rebase, diff, language, language_registry) =
196 this.read_with(cx, |this, cx| {
197 let tracked_buffer = this
198 .tracked_buffers
199 .get(&buffer)
200 .context("buffer not tracked")?;
201
202 let rebase = cx.background_spawn({
203 let mut base_text = tracked_buffer.diff_base.clone();
204 let old_snapshot = tracked_buffer.snapshot.clone();
205 let new_snapshot = buffer_snapshot.clone();
206 let unreviewed_changes = tracked_buffer.unreviewed_changes.clone();
207 async move {
208 let edits = diff_snapshots(&old_snapshot, &new_snapshot);
209 if let ChangeAuthor::User = author {
210 apply_non_conflicting_edits(
211 &unreviewed_changes,
212 edits,
213 &mut base_text,
214 new_snapshot.as_rope(),
215 );
216 }
217 (Arc::new(base_text.to_string()), base_text)
218 }
219 });
220
221 anyhow::Ok((
222 rebase,
223 tracked_buffer.diff.clone(),
224 tracked_buffer.buffer.read(cx).language().cloned(),
225 tracked_buffer.buffer.read(cx).language_registry(),
226 ))
227 })??;
228
229 let (new_base_text, new_diff_base) = rebase.await;
230 let diff_snapshot = BufferDiff::update_diff(
231 diff.clone(),
232 buffer_snapshot.clone(),
233 Some(new_base_text),
234 true,
235 false,
236 language,
237 language_registry,
238 cx,
239 )
240 .await;
241
242 let mut unreviewed_changes = Patch::default();
243 if let Ok(diff_snapshot) = diff_snapshot {
244 unreviewed_changes = cx
245 .background_spawn({
246 let diff_snapshot = diff_snapshot.clone();
247 let buffer_snapshot = buffer_snapshot.clone();
248 let new_diff_base = new_diff_base.clone();
249 async move {
250 let mut unreviewed_changes = Patch::default();
251 for hunk in diff_snapshot.hunks_intersecting_range(
252 Anchor::MIN..Anchor::MAX,
253 &buffer_snapshot,
254 ) {
255 let old_range = new_diff_base
256 .offset_to_point(hunk.diff_base_byte_range.start)
257 ..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
258 let new_range = hunk.range.start..hunk.range.end;
259 unreviewed_changes.push(point_to_row_edit(
260 Edit {
261 old: old_range,
262 new: new_range,
263 },
264 &new_diff_base,
265 &buffer_snapshot.as_rope(),
266 ));
267 }
268 unreviewed_changes
269 }
270 })
271 .await;
272
273 diff.update(cx, |diff, cx| {
274 diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx)
275 })?;
276 }
277 this.update(cx, |this, cx| {
278 let tracked_buffer = this
279 .tracked_buffers
280 .get_mut(&buffer)
281 .context("buffer not tracked")?;
282 tracked_buffer.diff_base = new_diff_base;
283 tracked_buffer.snapshot = buffer_snapshot;
284 tracked_buffer.unreviewed_changes = unreviewed_changes;
285 cx.notify();
286 anyhow::Ok(())
287 })??;
288 }
289
290 Ok(())
291 }
292
293 /// Track a buffer as read, so we can notify the model about user edits.
294 pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
295 self.track_buffer_internal(buffer, false, cx);
296 }
297
298 /// Mark a buffer as edited, so we can refresh it in the context
299 pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
300 self.edited_since_project_diagnostics_check = true;
301 self.track_buffer_internal(buffer.clone(), true, cx);
302 }
303
304 /// Mark a buffer as edited, so we can refresh it in the context
305 pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
306 self.edited_since_project_diagnostics_check = true;
307
308 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
309 if let TrackedBufferStatus::Deleted = tracked_buffer.status {
310 tracked_buffer.status = TrackedBufferStatus::Modified;
311 }
312 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
313 }
314
315 pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
316 let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
317 match tracked_buffer.status {
318 TrackedBufferStatus::Created { .. } => {
319 self.tracked_buffers.remove(&buffer);
320 cx.notify();
321 }
322 TrackedBufferStatus::Modified => {
323 buffer.update(cx, |buffer, cx| buffer.set_text("", cx));
324 tracked_buffer.status = TrackedBufferStatus::Deleted;
325 tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
326 }
327 TrackedBufferStatus::Deleted => {}
328 }
329 cx.notify();
330 }
331
332 pub fn keep_edits_in_range(
333 &mut self,
334 buffer: Entity<Buffer>,
335 buffer_range: Range<impl language::ToPoint>,
336 cx: &mut Context<Self>,
337 ) {
338 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
339 return;
340 };
341
342 match tracked_buffer.status {
343 TrackedBufferStatus::Deleted => {
344 self.tracked_buffers.remove(&buffer);
345 cx.notify();
346 }
347 _ => {
348 let buffer = buffer.read(cx);
349 let buffer_range =
350 buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
351 let mut delta = 0i32;
352
353 tracked_buffer.unreviewed_changes.retain_mut(|edit| {
354 edit.old.start = (edit.old.start as i32 + delta) as u32;
355 edit.old.end = (edit.old.end as i32 + delta) as u32;
356
357 if buffer_range.end.row < edit.new.start
358 || buffer_range.start.row > edit.new.end
359 {
360 true
361 } else {
362 let old_range = tracked_buffer
363 .diff_base
364 .point_to_offset(Point::new(edit.old.start, 0))
365 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
366 Point::new(edit.old.end, 0),
367 tracked_buffer.diff_base.max_point(),
368 ));
369 let new_range = tracked_buffer
370 .snapshot
371 .point_to_offset(Point::new(edit.new.start, 0))
372 ..tracked_buffer.snapshot.point_to_offset(cmp::min(
373 Point::new(edit.new.end, 0),
374 tracked_buffer.snapshot.max_point(),
375 ));
376 tracked_buffer.diff_base.replace(
377 old_range,
378 &tracked_buffer
379 .snapshot
380 .text_for_range(new_range)
381 .collect::<String>(),
382 );
383 delta += edit.new_len() as i32 - edit.old_len() as i32;
384 false
385 }
386 });
387 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
388 }
389 }
390 }
391
392 pub fn reject_edits_in_ranges(
393 &mut self,
394 buffer: Entity<Buffer>,
395 buffer_ranges: Vec<Range<impl language::ToPoint>>,
396 cx: &mut Context<Self>,
397 ) -> Task<Result<()>> {
398 let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
399 return Task::ready(Ok(()));
400 };
401
402 match &tracked_buffer.status {
403 TrackedBufferStatus::Created {
404 existing_file_content,
405 } => {
406 let task = if let Some(existing_file_content) = existing_file_content {
407 buffer.update(cx, |buffer, cx| {
408 buffer.start_transaction();
409 buffer.set_text("", cx);
410 for chunk in existing_file_content.chunks() {
411 buffer.append(chunk, cx);
412 }
413 buffer.end_transaction(cx);
414 });
415 self.project
416 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
417 } else {
418 buffer
419 .read(cx)
420 .entry_id(cx)
421 .and_then(|entry_id| {
422 self.project
423 .update(cx, |project, cx| project.delete_entry(entry_id, false, cx))
424 })
425 .unwrap_or(Task::ready(Ok(())))
426 };
427
428 self.tracked_buffers.remove(&buffer);
429 cx.notify();
430 task
431 }
432 TrackedBufferStatus::Deleted => {
433 buffer.update(cx, |buffer, cx| {
434 buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
435 });
436 let save = self
437 .project
438 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
439
440 // Clear all tracked changes for this buffer and start over as if we just read it.
441 self.tracked_buffers.remove(&buffer);
442 self.buffer_read(buffer.clone(), cx);
443 cx.notify();
444 save
445 }
446 TrackedBufferStatus::Modified => {
447 buffer.update(cx, |buffer, cx| {
448 let mut buffer_row_ranges = buffer_ranges
449 .into_iter()
450 .map(|range| {
451 range.start.to_point(buffer).row..range.end.to_point(buffer).row
452 })
453 .peekable();
454
455 let mut edits_to_revert = Vec::new();
456 for edit in tracked_buffer.unreviewed_changes.edits() {
457 let new_range = tracked_buffer
458 .snapshot
459 .anchor_before(Point::new(edit.new.start, 0))
460 ..tracked_buffer.snapshot.anchor_after(cmp::min(
461 Point::new(edit.new.end, 0),
462 tracked_buffer.snapshot.max_point(),
463 ));
464 let new_row_range = new_range.start.to_point(buffer).row
465 ..new_range.end.to_point(buffer).row;
466
467 let mut revert = false;
468 while let Some(buffer_row_range) = buffer_row_ranges.peek() {
469 if buffer_row_range.end < new_row_range.start {
470 buffer_row_ranges.next();
471 } else if buffer_row_range.start > new_row_range.end {
472 break;
473 } else {
474 revert = true;
475 break;
476 }
477 }
478
479 if revert {
480 let old_range = tracked_buffer
481 .diff_base
482 .point_to_offset(Point::new(edit.old.start, 0))
483 ..tracked_buffer.diff_base.point_to_offset(cmp::min(
484 Point::new(edit.old.end, 0),
485 tracked_buffer.diff_base.max_point(),
486 ));
487 let old_text = tracked_buffer
488 .diff_base
489 .chunks_in_range(old_range)
490 .collect::<String>();
491 edits_to_revert.push((new_range, old_text));
492 }
493 }
494
495 buffer.edit(edits_to_revert, None, cx);
496 });
497 self.project
498 .update(cx, |project, cx| project.save_buffer(buffer, cx))
499 }
500 }
501 }
502
503 pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
504 self.tracked_buffers
505 .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
506 TrackedBufferStatus::Deleted => false,
507 _ => {
508 tracked_buffer.unreviewed_changes.clear();
509 tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
510 tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
511 true
512 }
513 });
514 cx.notify();
515 }
516
517 /// Returns the set of buffers that contain changes that haven't been reviewed by the user.
518 pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
519 self.tracked_buffers
520 .iter()
521 .filter(|(_, tracked)| tracked.has_changes(cx))
522 .map(|(buffer, tracked)| (buffer.clone(), tracked.diff.clone()))
523 .collect()
524 }
525
526 /// Iterate over buffers changed since last read or edited by the model
527 pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
528 self.tracked_buffers
529 .iter()
530 .filter(|(buffer, tracked)| {
531 let buffer = buffer.read(cx);
532
533 tracked.version != buffer.version
534 && buffer
535 .file()
536 .map_or(false, |file| file.disk_state() != DiskState::Deleted)
537 })
538 .map(|(buffer, _)| buffer)
539 }
540}
541
542fn apply_non_conflicting_edits(
543 patch: &Patch<u32>,
544 edits: Vec<Edit<u32>>,
545 old_text: &mut Rope,
546 new_text: &Rope,
547) {
548 let mut old_edits = patch.edits().iter().cloned().peekable();
549 let mut new_edits = edits.into_iter().peekable();
550 let mut applied_delta = 0i32;
551 let mut rebased_delta = 0i32;
552
553 while let Some(mut new_edit) = new_edits.next() {
554 let mut conflict = false;
555
556 // Push all the old edits that are before this new edit or that intersect with it.
557 while let Some(old_edit) = old_edits.peek() {
558 if new_edit.old.end < old_edit.new.start
559 || (!old_edit.new.is_empty() && new_edit.old.end == old_edit.new.start)
560 {
561 break;
562 } else if new_edit.old.start > old_edit.new.end
563 || (!old_edit.new.is_empty() && new_edit.old.start == old_edit.new.end)
564 {
565 let old_edit = old_edits.next().unwrap();
566 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
567 } else {
568 conflict = true;
569 if new_edits
570 .peek()
571 .map_or(false, |next_edit| next_edit.old.overlaps(&old_edit.new))
572 {
573 new_edit = new_edits.next().unwrap();
574 } else {
575 let old_edit = old_edits.next().unwrap();
576 rebased_delta += old_edit.new_len() as i32 - old_edit.old_len() as i32;
577 }
578 }
579 }
580
581 if !conflict {
582 // This edit doesn't intersect with any old edit, so we can apply it to the old text.
583 new_edit.old.start = (new_edit.old.start as i32 + applied_delta - rebased_delta) as u32;
584 new_edit.old.end = (new_edit.old.end as i32 + applied_delta - rebased_delta) as u32;
585 let old_bytes = old_text.point_to_offset(Point::new(new_edit.old.start, 0))
586 ..old_text.point_to_offset(cmp::min(
587 Point::new(new_edit.old.end, 0),
588 old_text.max_point(),
589 ));
590 let new_bytes = new_text.point_to_offset(Point::new(new_edit.new.start, 0))
591 ..new_text.point_to_offset(cmp::min(
592 Point::new(new_edit.new.end, 0),
593 new_text.max_point(),
594 ));
595
596 old_text.replace(
597 old_bytes,
598 &new_text.chunks_in_range(new_bytes).collect::<String>(),
599 );
600 applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
601 }
602 }
603}
604
605fn diff_snapshots(
606 old_snapshot: &text::BufferSnapshot,
607 new_snapshot: &text::BufferSnapshot,
608) -> Vec<Edit<u32>> {
609 let mut edits = new_snapshot
610 .edits_since::<Point>(&old_snapshot.version)
611 .map(|edit| point_to_row_edit(edit, old_snapshot.as_rope(), new_snapshot.as_rope()))
612 .peekable();
613 let mut row_edits = Vec::new();
614 while let Some(mut edit) = edits.next() {
615 while let Some(next_edit) = edits.peek() {
616 if edit.old.end >= next_edit.old.start {
617 edit.old.end = next_edit.old.end;
618 edit.new.end = next_edit.new.end;
619 edits.next();
620 } else {
621 break;
622 }
623 }
624 row_edits.push(edit);
625 }
626 row_edits
627}
628
629fn point_to_row_edit(edit: Edit<Point>, old_text: &Rope, new_text: &Rope) -> Edit<u32> {
630 if edit.old.start.column == old_text.line_len(edit.old.start.row)
631 && new_text
632 .chars_at(new_text.point_to_offset(edit.new.start))
633 .next()
634 == Some('\n')
635 && edit.old.start != old_text.max_point()
636 {
637 Edit {
638 old: edit.old.start.row + 1..edit.old.end.row + 1,
639 new: edit.new.start.row + 1..edit.new.end.row + 1,
640 }
641 } else if edit.old.start.column == 0
642 && edit.old.end.column == 0
643 && edit.new.end.column == 0
644 && edit.old.end != old_text.max_point()
645 {
646 Edit {
647 old: edit.old.start.row..edit.old.end.row,
648 new: edit.new.start.row..edit.new.end.row,
649 }
650 } else {
651 Edit {
652 old: edit.old.start.row..edit.old.end.row + 1,
653 new: edit.new.start.row..edit.new.end.row + 1,
654 }
655 }
656}
657
658#[derive(Copy, Clone, Debug)]
659enum ChangeAuthor {
660 User,
661 Agent,
662}
663
664enum TrackedBufferStatus {
665 Created { existing_file_content: Option<Rope> },
666 Modified,
667 Deleted,
668}
669
670struct TrackedBuffer {
671 buffer: Entity<Buffer>,
672 diff_base: Rope,
673 unreviewed_changes: Patch<u32>,
674 status: TrackedBufferStatus,
675 version: clock::Global,
676 diff: Entity<BufferDiff>,
677 snapshot: text::BufferSnapshot,
678 diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
679 _open_lsp_handle: OpenLspBufferHandle,
680 _maintain_diff: Task<()>,
681 _subscription: Subscription,
682}
683
684impl TrackedBuffer {
685 fn has_changes(&self, cx: &App) -> bool {
686 self.diff
687 .read(cx)
688 .hunks(&self.buffer.read(cx), cx)
689 .next()
690 .is_some()
691 }
692
693 fn schedule_diff_update(&self, author: ChangeAuthor, cx: &App) {
694 self.diff_update
695 .unbounded_send((author, self.buffer.read(cx).text_snapshot()))
696 .ok();
697 }
698}
699
700pub struct ChangedBuffer {
701 pub diff: Entity<BufferDiff>,
702}
703
704#[cfg(test)]
705mod tests {
706 use std::env;
707
708 use super::*;
709 use buffer_diff::DiffHunkStatusKind;
710 use gpui::TestAppContext;
711 use language::Point;
712 use project::{FakeFs, Fs, Project, RemoveOptions};
713 use rand::prelude::*;
714 use serde_json::json;
715 use settings::SettingsStore;
716 use util::{RandomCharIter, path};
717
718 #[ctor::ctor]
719 fn init_logger() {
720 if std::env::var("RUST_LOG").is_ok() {
721 env_logger::init();
722 }
723 }
724
725 fn init_test(cx: &mut TestAppContext) {
726 cx.update(|cx| {
727 let settings_store = SettingsStore::test(cx);
728 cx.set_global(settings_store);
729 language::init(cx);
730 Project::init_settings(cx);
731 });
732 }
733
734 #[gpui::test(iterations = 10)]
735 async fn test_keep_edits(cx: &mut TestAppContext) {
736 init_test(cx);
737
738 let fs = FakeFs::new(cx.executor());
739 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
740 .await;
741 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
742 let action_log = cx.new(|_| ActionLog::new(project.clone()));
743 let file_path = project
744 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
745 .unwrap();
746 let buffer = project
747 .update(cx, |project, cx| project.open_buffer(file_path, cx))
748 .await
749 .unwrap();
750
751 cx.update(|cx| {
752 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
753 buffer.update(cx, |buffer, cx| {
754 buffer
755 .edit([(Point::new(1, 1)..Point::new(1, 2), "E")], None, cx)
756 .unwrap()
757 });
758 buffer.update(cx, |buffer, cx| {
759 buffer
760 .edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
761 .unwrap()
762 });
763 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
764 });
765 cx.run_until_parked();
766 assert_eq!(
767 buffer.read_with(cx, |buffer, _| buffer.text()),
768 "abc\ndEf\nghi\njkl\nmnO"
769 );
770 assert_eq!(
771 unreviewed_hunks(&action_log, cx),
772 vec![(
773 buffer.clone(),
774 vec![
775 HunkStatus {
776 range: Point::new(1, 0)..Point::new(2, 0),
777 diff_status: DiffHunkStatusKind::Modified,
778 old_text: "def\n".into(),
779 },
780 HunkStatus {
781 range: Point::new(4, 0)..Point::new(4, 3),
782 diff_status: DiffHunkStatusKind::Modified,
783 old_text: "mno".into(),
784 }
785 ],
786 )]
787 );
788
789 action_log.update(cx, |log, cx| {
790 log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
791 });
792 cx.run_until_parked();
793 assert_eq!(
794 unreviewed_hunks(&action_log, cx),
795 vec![(
796 buffer.clone(),
797 vec![HunkStatus {
798 range: Point::new(1, 0)..Point::new(2, 0),
799 diff_status: DiffHunkStatusKind::Modified,
800 old_text: "def\n".into(),
801 }],
802 )]
803 );
804
805 action_log.update(cx, |log, cx| {
806 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
807 });
808 cx.run_until_parked();
809 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
810 }
811
812 #[gpui::test(iterations = 10)]
813 async fn test_deletions(cx: &mut TestAppContext) {
814 init_test(cx);
815
816 let fs = FakeFs::new(cx.executor());
817 fs.insert_tree(
818 path!("/dir"),
819 json!({"file": "abc\ndef\nghi\njkl\nmno\npqr"}),
820 )
821 .await;
822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
823 let action_log = cx.new(|_| ActionLog::new(project.clone()));
824 let file_path = project
825 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
826 .unwrap();
827 let buffer = project
828 .update(cx, |project, cx| project.open_buffer(file_path, cx))
829 .await
830 .unwrap();
831
832 cx.update(|cx| {
833 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
834 buffer.update(cx, |buffer, cx| {
835 buffer
836 .edit([(Point::new(1, 0)..Point::new(2, 0), "")], None, cx)
837 .unwrap();
838 buffer.finalize_last_transaction();
839 });
840 buffer.update(cx, |buffer, cx| {
841 buffer
842 .edit([(Point::new(3, 0)..Point::new(4, 0), "")], None, cx)
843 .unwrap();
844 buffer.finalize_last_transaction();
845 });
846 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
847 });
848 cx.run_until_parked();
849 assert_eq!(
850 buffer.read_with(cx, |buffer, _| buffer.text()),
851 "abc\nghi\njkl\npqr"
852 );
853 assert_eq!(
854 unreviewed_hunks(&action_log, cx),
855 vec![(
856 buffer.clone(),
857 vec![
858 HunkStatus {
859 range: Point::new(1, 0)..Point::new(1, 0),
860 diff_status: DiffHunkStatusKind::Deleted,
861 old_text: "def\n".into(),
862 },
863 HunkStatus {
864 range: Point::new(3, 0)..Point::new(3, 0),
865 diff_status: DiffHunkStatusKind::Deleted,
866 old_text: "mno\n".into(),
867 }
868 ],
869 )]
870 );
871
872 buffer.update(cx, |buffer, cx| buffer.undo(cx));
873 cx.run_until_parked();
874 assert_eq!(
875 buffer.read_with(cx, |buffer, _| buffer.text()),
876 "abc\nghi\njkl\nmno\npqr"
877 );
878 assert_eq!(
879 unreviewed_hunks(&action_log, cx),
880 vec![(
881 buffer.clone(),
882 vec![HunkStatus {
883 range: Point::new(1, 0)..Point::new(1, 0),
884 diff_status: DiffHunkStatusKind::Deleted,
885 old_text: "def\n".into(),
886 }],
887 )]
888 );
889
890 action_log.update(cx, |log, cx| {
891 log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
892 });
893 cx.run_until_parked();
894 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
895 }
896
897 #[gpui::test(iterations = 10)]
898 async fn test_overlapping_user_edits(cx: &mut TestAppContext) {
899 init_test(cx);
900
901 let fs = FakeFs::new(cx.executor());
902 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
903 .await;
904 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
905 let action_log = cx.new(|_| ActionLog::new(project.clone()));
906 let file_path = project
907 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
908 .unwrap();
909 let buffer = project
910 .update(cx, |project, cx| project.open_buffer(file_path, cx))
911 .await
912 .unwrap();
913
914 cx.update(|cx| {
915 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
916 buffer.update(cx, |buffer, cx| {
917 buffer
918 .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
919 .unwrap()
920 });
921 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
922 });
923 cx.run_until_parked();
924 assert_eq!(
925 buffer.read_with(cx, |buffer, _| buffer.text()),
926 "abc\ndeF\nGHI\njkl\nmno"
927 );
928 assert_eq!(
929 unreviewed_hunks(&action_log, cx),
930 vec![(
931 buffer.clone(),
932 vec![HunkStatus {
933 range: Point::new(1, 0)..Point::new(3, 0),
934 diff_status: DiffHunkStatusKind::Modified,
935 old_text: "def\nghi\n".into(),
936 }],
937 )]
938 );
939
940 buffer.update(cx, |buffer, cx| {
941 buffer.edit(
942 [
943 (Point::new(0, 2)..Point::new(0, 2), "X"),
944 (Point::new(3, 0)..Point::new(3, 0), "Y"),
945 ],
946 None,
947 cx,
948 )
949 });
950 cx.run_until_parked();
951 assert_eq!(
952 buffer.read_with(cx, |buffer, _| buffer.text()),
953 "abXc\ndeF\nGHI\nYjkl\nmno"
954 );
955 assert_eq!(
956 unreviewed_hunks(&action_log, cx),
957 vec![(
958 buffer.clone(),
959 vec![HunkStatus {
960 range: Point::new(1, 0)..Point::new(3, 0),
961 diff_status: DiffHunkStatusKind::Modified,
962 old_text: "def\nghi\n".into(),
963 }],
964 )]
965 );
966
967 buffer.update(cx, |buffer, cx| {
968 buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "Z")], None, cx)
969 });
970 cx.run_until_parked();
971 assert_eq!(
972 buffer.read_with(cx, |buffer, _| buffer.text()),
973 "abXc\ndZeF\nGHI\nYjkl\nmno"
974 );
975 assert_eq!(
976 unreviewed_hunks(&action_log, cx),
977 vec![(
978 buffer.clone(),
979 vec![HunkStatus {
980 range: Point::new(1, 0)..Point::new(3, 0),
981 diff_status: DiffHunkStatusKind::Modified,
982 old_text: "def\nghi\n".into(),
983 }],
984 )]
985 );
986
987 action_log.update(cx, |log, cx| {
988 log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
989 });
990 cx.run_until_parked();
991 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
992 }
993
994 #[gpui::test(iterations = 10)]
995 async fn test_creating_files(cx: &mut TestAppContext) {
996 init_test(cx);
997
998 let fs = FakeFs::new(cx.executor());
999 fs.insert_tree(path!("/dir"), json!({})).await;
1000 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1001 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1002 let file_path = project
1003 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1004 .unwrap();
1005
1006 let buffer = project
1007 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1008 .await
1009 .unwrap();
1010 cx.update(|cx| {
1011 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1012 buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
1013 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1014 });
1015 project
1016 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1017 .await
1018 .unwrap();
1019 cx.run_until_parked();
1020 assert_eq!(
1021 unreviewed_hunks(&action_log, cx),
1022 vec![(
1023 buffer.clone(),
1024 vec![HunkStatus {
1025 range: Point::new(0, 0)..Point::new(0, 5),
1026 diff_status: DiffHunkStatusKind::Added,
1027 old_text: "".into(),
1028 }],
1029 )]
1030 );
1031
1032 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "X")], None, cx));
1033 cx.run_until_parked();
1034 assert_eq!(
1035 unreviewed_hunks(&action_log, cx),
1036 vec![(
1037 buffer.clone(),
1038 vec![HunkStatus {
1039 range: Point::new(0, 0)..Point::new(0, 6),
1040 diff_status: DiffHunkStatusKind::Added,
1041 old_text: "".into(),
1042 }],
1043 )]
1044 );
1045
1046 action_log.update(cx, |log, cx| {
1047 log.keep_edits_in_range(buffer.clone(), 0..5, cx)
1048 });
1049 cx.run_until_parked();
1050 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1051 }
1052
1053 #[gpui::test(iterations = 10)]
1054 async fn test_overwriting_files(cx: &mut TestAppContext) {
1055 init_test(cx);
1056
1057 let fs = FakeFs::new(cx.executor());
1058 fs.insert_tree(
1059 path!("/dir"),
1060 json!({
1061 "file1": "Lorem ipsum dolor"
1062 }),
1063 )
1064 .await;
1065 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1066 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1067 let file_path = project
1068 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1069 .unwrap();
1070
1071 let buffer = project
1072 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1073 .await
1074 .unwrap();
1075 cx.update(|cx| {
1076 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1077 buffer.update(cx, |buffer, cx| buffer.set_text("sit amet consecteur", cx));
1078 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1079 });
1080 project
1081 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1082 .await
1083 .unwrap();
1084 cx.run_until_parked();
1085 assert_eq!(
1086 unreviewed_hunks(&action_log, cx),
1087 vec![(
1088 buffer.clone(),
1089 vec![HunkStatus {
1090 range: Point::new(0, 0)..Point::new(0, 19),
1091 diff_status: DiffHunkStatusKind::Added,
1092 old_text: "".into(),
1093 }],
1094 )]
1095 );
1096
1097 action_log
1098 .update(cx, |log, cx| {
1099 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1100 })
1101 .await
1102 .unwrap();
1103 cx.run_until_parked();
1104 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1105 assert_eq!(
1106 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1107 "Lorem ipsum dolor"
1108 );
1109 }
1110
1111 #[gpui::test(iterations = 10)]
1112 async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
1113 init_test(cx);
1114
1115 let fs = FakeFs::new(cx.executor());
1116 fs.insert_tree(
1117 path!("/dir"),
1118 json!({
1119 "file1": "Lorem ipsum dolor"
1120 }),
1121 )
1122 .await;
1123 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1124 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1125 let file_path = project
1126 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1127 .unwrap();
1128
1129 let buffer = project
1130 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1131 .await
1132 .unwrap();
1133 cx.update(|cx| {
1134 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1135 buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
1136 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1137 });
1138 project
1139 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1140 .await
1141 .unwrap();
1142 cx.run_until_parked();
1143 assert_eq!(
1144 unreviewed_hunks(&action_log, cx),
1145 vec![(
1146 buffer.clone(),
1147 vec![HunkStatus {
1148 range: Point::new(0, 0)..Point::new(0, 37),
1149 diff_status: DiffHunkStatusKind::Modified,
1150 old_text: "Lorem ipsum dolor".into(),
1151 }],
1152 )]
1153 );
1154
1155 cx.update(|cx| {
1156 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1157 buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
1158 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1159 });
1160 project
1161 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1162 .await
1163 .unwrap();
1164 cx.run_until_parked();
1165 assert_eq!(
1166 unreviewed_hunks(&action_log, cx),
1167 vec![(
1168 buffer.clone(),
1169 vec![HunkStatus {
1170 range: Point::new(0, 0)..Point::new(0, 9),
1171 diff_status: DiffHunkStatusKind::Added,
1172 old_text: "".into(),
1173 }],
1174 )]
1175 );
1176
1177 action_log
1178 .update(cx, |log, cx| {
1179 log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
1180 })
1181 .await
1182 .unwrap();
1183 cx.run_until_parked();
1184 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1185 assert_eq!(
1186 buffer.read_with(cx, |buffer, _cx| buffer.text()),
1187 "Lorem ipsum dolor"
1188 );
1189 }
1190
1191 #[gpui::test(iterations = 10)]
1192 async fn test_deleting_files(cx: &mut TestAppContext) {
1193 init_test(cx);
1194
1195 let fs = FakeFs::new(cx.executor());
1196 fs.insert_tree(
1197 path!("/dir"),
1198 json!({"file1": "lorem\n", "file2": "ipsum\n"}),
1199 )
1200 .await;
1201
1202 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1203 let file1_path = project
1204 .read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
1205 .unwrap();
1206 let file2_path = project
1207 .read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
1208 .unwrap();
1209
1210 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1211 let buffer1 = project
1212 .update(cx, |project, cx| {
1213 project.open_buffer(file1_path.clone(), cx)
1214 })
1215 .await
1216 .unwrap();
1217 let buffer2 = project
1218 .update(cx, |project, cx| {
1219 project.open_buffer(file2_path.clone(), cx)
1220 })
1221 .await
1222 .unwrap();
1223
1224 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer1.clone(), cx));
1225 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer2.clone(), cx));
1226 project
1227 .update(cx, |project, cx| {
1228 project.delete_file(file1_path.clone(), false, cx)
1229 })
1230 .unwrap()
1231 .await
1232 .unwrap();
1233 project
1234 .update(cx, |project, cx| {
1235 project.delete_file(file2_path.clone(), false, cx)
1236 })
1237 .unwrap()
1238 .await
1239 .unwrap();
1240 cx.run_until_parked();
1241 assert_eq!(
1242 unreviewed_hunks(&action_log, cx),
1243 vec![
1244 (
1245 buffer1.clone(),
1246 vec![HunkStatus {
1247 range: Point::new(0, 0)..Point::new(0, 0),
1248 diff_status: DiffHunkStatusKind::Deleted,
1249 old_text: "lorem\n".into(),
1250 }]
1251 ),
1252 (
1253 buffer2.clone(),
1254 vec![HunkStatus {
1255 range: Point::new(0, 0)..Point::new(0, 0),
1256 diff_status: DiffHunkStatusKind::Deleted,
1257 old_text: "ipsum\n".into(),
1258 }],
1259 )
1260 ]
1261 );
1262
1263 // Simulate file1 being recreated externally.
1264 fs.insert_file(path!("/dir/file1"), "LOREM".as_bytes().to_vec())
1265 .await;
1266
1267 // Simulate file2 being recreated by a tool.
1268 let buffer2 = project
1269 .update(cx, |project, cx| project.open_buffer(file2_path, cx))
1270 .await
1271 .unwrap();
1272 action_log.update(cx, |log, cx| log.buffer_created(buffer2.clone(), cx));
1273 buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
1274 action_log.update(cx, |log, cx| log.buffer_edited(buffer2.clone(), cx));
1275 project
1276 .update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
1277 .await
1278 .unwrap();
1279
1280 cx.run_until_parked();
1281 assert_eq!(
1282 unreviewed_hunks(&action_log, cx),
1283 vec![(
1284 buffer2.clone(),
1285 vec![HunkStatus {
1286 range: Point::new(0, 0)..Point::new(0, 5),
1287 diff_status: DiffHunkStatusKind::Added,
1288 old_text: "".into(),
1289 }],
1290 )]
1291 );
1292
1293 // Simulate file2 being deleted externally.
1294 fs.remove_file(path!("/dir/file2").as_ref(), RemoveOptions::default())
1295 .await
1296 .unwrap();
1297 cx.run_until_parked();
1298 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1299 }
1300
1301 #[gpui::test(iterations = 10)]
1302 async fn test_reject_edits(cx: &mut TestAppContext) {
1303 init_test(cx);
1304
1305 let fs = FakeFs::new(cx.executor());
1306 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1307 .await;
1308 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1309 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1310 let file_path = project
1311 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1312 .unwrap();
1313 let buffer = project
1314 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1315 .await
1316 .unwrap();
1317
1318 cx.update(|cx| {
1319 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1320 buffer.update(cx, |buffer, cx| {
1321 buffer
1322 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1323 .unwrap()
1324 });
1325 buffer.update(cx, |buffer, cx| {
1326 buffer
1327 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1328 .unwrap()
1329 });
1330 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1331 });
1332 cx.run_until_parked();
1333 assert_eq!(
1334 buffer.read_with(cx, |buffer, _| buffer.text()),
1335 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1336 );
1337 assert_eq!(
1338 unreviewed_hunks(&action_log, cx),
1339 vec![(
1340 buffer.clone(),
1341 vec![
1342 HunkStatus {
1343 range: Point::new(1, 0)..Point::new(3, 0),
1344 diff_status: DiffHunkStatusKind::Modified,
1345 old_text: "def\n".into(),
1346 },
1347 HunkStatus {
1348 range: Point::new(5, 0)..Point::new(5, 3),
1349 diff_status: DiffHunkStatusKind::Modified,
1350 old_text: "mno".into(),
1351 }
1352 ],
1353 )]
1354 );
1355
1356 // If the rejected range doesn't overlap with any hunk, we ignore it.
1357 action_log
1358 .update(cx, |log, cx| {
1359 log.reject_edits_in_ranges(
1360 buffer.clone(),
1361 vec![Point::new(4, 0)..Point::new(4, 0)],
1362 cx,
1363 )
1364 })
1365 .await
1366 .unwrap();
1367 cx.run_until_parked();
1368 assert_eq!(
1369 buffer.read_with(cx, |buffer, _| buffer.text()),
1370 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1371 );
1372 assert_eq!(
1373 unreviewed_hunks(&action_log, cx),
1374 vec![(
1375 buffer.clone(),
1376 vec![
1377 HunkStatus {
1378 range: Point::new(1, 0)..Point::new(3, 0),
1379 diff_status: DiffHunkStatusKind::Modified,
1380 old_text: "def\n".into(),
1381 },
1382 HunkStatus {
1383 range: Point::new(5, 0)..Point::new(5, 3),
1384 diff_status: DiffHunkStatusKind::Modified,
1385 old_text: "mno".into(),
1386 }
1387 ],
1388 )]
1389 );
1390
1391 action_log
1392 .update(cx, |log, cx| {
1393 log.reject_edits_in_ranges(
1394 buffer.clone(),
1395 vec![Point::new(0, 0)..Point::new(1, 0)],
1396 cx,
1397 )
1398 })
1399 .await
1400 .unwrap();
1401 cx.run_until_parked();
1402 assert_eq!(
1403 buffer.read_with(cx, |buffer, _| buffer.text()),
1404 "abc\ndef\nghi\njkl\nmnO"
1405 );
1406 assert_eq!(
1407 unreviewed_hunks(&action_log, cx),
1408 vec![(
1409 buffer.clone(),
1410 vec![HunkStatus {
1411 range: Point::new(4, 0)..Point::new(4, 3),
1412 diff_status: DiffHunkStatusKind::Modified,
1413 old_text: "mno".into(),
1414 }],
1415 )]
1416 );
1417
1418 action_log
1419 .update(cx, |log, cx| {
1420 log.reject_edits_in_ranges(
1421 buffer.clone(),
1422 vec![Point::new(4, 0)..Point::new(4, 0)],
1423 cx,
1424 )
1425 })
1426 .await
1427 .unwrap();
1428 cx.run_until_parked();
1429 assert_eq!(
1430 buffer.read_with(cx, |buffer, _| buffer.text()),
1431 "abc\ndef\nghi\njkl\nmno"
1432 );
1433 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1434 }
1435
1436 #[gpui::test(iterations = 10)]
1437 async fn test_reject_multiple_edits(cx: &mut TestAppContext) {
1438 init_test(cx);
1439
1440 let fs = FakeFs::new(cx.executor());
1441 fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
1442 .await;
1443 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1444 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1445 let file_path = project
1446 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1447 .unwrap();
1448 let buffer = project
1449 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1450 .await
1451 .unwrap();
1452
1453 cx.update(|cx| {
1454 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1455 buffer.update(cx, |buffer, cx| {
1456 buffer
1457 .edit([(Point::new(1, 1)..Point::new(1, 2), "E\nXYZ")], None, cx)
1458 .unwrap()
1459 });
1460 buffer.update(cx, |buffer, cx| {
1461 buffer
1462 .edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
1463 .unwrap()
1464 });
1465 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1466 });
1467 cx.run_until_parked();
1468 assert_eq!(
1469 buffer.read_with(cx, |buffer, _| buffer.text()),
1470 "abc\ndE\nXYZf\nghi\njkl\nmnO"
1471 );
1472 assert_eq!(
1473 unreviewed_hunks(&action_log, cx),
1474 vec![(
1475 buffer.clone(),
1476 vec![
1477 HunkStatus {
1478 range: Point::new(1, 0)..Point::new(3, 0),
1479 diff_status: DiffHunkStatusKind::Modified,
1480 old_text: "def\n".into(),
1481 },
1482 HunkStatus {
1483 range: Point::new(5, 0)..Point::new(5, 3),
1484 diff_status: DiffHunkStatusKind::Modified,
1485 old_text: "mno".into(),
1486 }
1487 ],
1488 )]
1489 );
1490
1491 action_log.update(cx, |log, cx| {
1492 let range_1 = buffer.read(cx).anchor_before(Point::new(0, 0))
1493 ..buffer.read(cx).anchor_before(Point::new(1, 0));
1494 let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
1495 ..buffer.read(cx).anchor_before(Point::new(5, 3));
1496
1497 log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
1498 .detach();
1499 assert_eq!(
1500 buffer.read_with(cx, |buffer, _| buffer.text()),
1501 "abc\ndef\nghi\njkl\nmno"
1502 );
1503 });
1504 cx.run_until_parked();
1505 assert_eq!(
1506 buffer.read_with(cx, |buffer, _| buffer.text()),
1507 "abc\ndef\nghi\njkl\nmno"
1508 );
1509 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1510 }
1511
1512 #[gpui::test(iterations = 10)]
1513 async fn test_reject_deleted_file(cx: &mut TestAppContext) {
1514 init_test(cx);
1515
1516 let fs = FakeFs::new(cx.executor());
1517 fs.insert_tree(path!("/dir"), json!({"file": "content"}))
1518 .await;
1519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1520 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1521 let file_path = project
1522 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1523 .unwrap();
1524 let buffer = project
1525 .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx))
1526 .await
1527 .unwrap();
1528
1529 cx.update(|cx| {
1530 action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
1531 });
1532 project
1533 .update(cx, |project, cx| {
1534 project.delete_file(file_path.clone(), false, cx)
1535 })
1536 .unwrap()
1537 .await
1538 .unwrap();
1539 cx.run_until_parked();
1540 assert!(!fs.is_file(path!("/dir/file").as_ref()).await);
1541 assert_eq!(
1542 unreviewed_hunks(&action_log, cx),
1543 vec![(
1544 buffer.clone(),
1545 vec![HunkStatus {
1546 range: Point::new(0, 0)..Point::new(0, 0),
1547 diff_status: DiffHunkStatusKind::Deleted,
1548 old_text: "content".into(),
1549 }]
1550 )]
1551 );
1552
1553 action_log
1554 .update(cx, |log, cx| {
1555 log.reject_edits_in_ranges(
1556 buffer.clone(),
1557 vec![Point::new(0, 0)..Point::new(0, 0)],
1558 cx,
1559 )
1560 })
1561 .await
1562 .unwrap();
1563 cx.run_until_parked();
1564 assert_eq!(buffer.read_with(cx, |buffer, _| buffer.text()), "content");
1565 assert!(fs.is_file(path!("/dir/file").as_ref()).await);
1566 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1567 }
1568
1569 #[gpui::test(iterations = 10)]
1570 async fn test_reject_created_file(cx: &mut TestAppContext) {
1571 init_test(cx);
1572
1573 let fs = FakeFs::new(cx.executor());
1574 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1575 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1576 let file_path = project
1577 .read_with(cx, |project, cx| {
1578 project.find_project_path("dir/new_file", cx)
1579 })
1580 .unwrap();
1581
1582 let buffer = project
1583 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1584 .await
1585 .unwrap();
1586 cx.update(|cx| {
1587 action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
1588 buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
1589 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1590 });
1591 project
1592 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
1593 .await
1594 .unwrap();
1595 assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
1596 cx.run_until_parked();
1597 assert_eq!(
1598 unreviewed_hunks(&action_log, cx),
1599 vec![(
1600 buffer.clone(),
1601 vec![HunkStatus {
1602 range: Point::new(0, 0)..Point::new(0, 7),
1603 diff_status: DiffHunkStatusKind::Added,
1604 old_text: "".into(),
1605 }],
1606 )]
1607 );
1608
1609 action_log
1610 .update(cx, |log, cx| {
1611 log.reject_edits_in_ranges(
1612 buffer.clone(),
1613 vec![Point::new(0, 0)..Point::new(0, 11)],
1614 cx,
1615 )
1616 })
1617 .await
1618 .unwrap();
1619 cx.run_until_parked();
1620 assert!(!fs.is_file(path!("/dir/new_file").as_ref()).await);
1621 assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
1622 }
1623
1624 #[gpui::test(iterations = 100)]
1625 async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
1626 init_test(cx);
1627
1628 let operations = env::var("OPERATIONS")
1629 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1630 .unwrap_or(20);
1631
1632 let text = RandomCharIter::new(&mut rng).take(50).collect::<String>();
1633 let fs = FakeFs::new(cx.executor());
1634 fs.insert_tree(path!("/dir"), json!({"file": text})).await;
1635 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1636 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1637 let file_path = project
1638 .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
1639 .unwrap();
1640 let buffer = project
1641 .update(cx, |project, cx| project.open_buffer(file_path, cx))
1642 .await
1643 .unwrap();
1644
1645 action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
1646
1647 for _ in 0..operations {
1648 match rng.gen_range(0..100) {
1649 0..25 => {
1650 action_log.update(cx, |log, cx| {
1651 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1652 log::info!("keeping edits in range {:?}", range);
1653 log.keep_edits_in_range(buffer.clone(), range, cx)
1654 });
1655 }
1656 25..50 => {
1657 action_log
1658 .update(cx, |log, cx| {
1659 let range = buffer.read(cx).random_byte_range(0, &mut rng);
1660 log::info!("rejecting edits in range {:?}", range);
1661 log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
1662 })
1663 .await
1664 .unwrap();
1665 }
1666 _ => {
1667 let is_agent_change = rng.gen_bool(0.5);
1668 if is_agent_change {
1669 log::info!("agent edit");
1670 } else {
1671 log::info!("user edit");
1672 }
1673 cx.update(|cx| {
1674 buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
1675 if is_agent_change {
1676 action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
1677 }
1678 });
1679 }
1680 }
1681
1682 if rng.gen_bool(0.2) {
1683 quiesce(&action_log, &buffer, cx);
1684 }
1685 }
1686
1687 quiesce(&action_log, &buffer, cx);
1688
1689 fn quiesce(
1690 action_log: &Entity<ActionLog>,
1691 buffer: &Entity<Buffer>,
1692 cx: &mut TestAppContext,
1693 ) {
1694 log::info!("quiescing...");
1695 cx.run_until_parked();
1696 action_log.update(cx, |log, cx| {
1697 let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
1698 let mut old_text = tracked_buffer.diff_base.clone();
1699 let new_text = buffer.read(cx).as_rope();
1700 for edit in tracked_buffer.unreviewed_changes.edits() {
1701 let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
1702 let old_end = old_text.point_to_offset(cmp::min(
1703 Point::new(edit.new.start + edit.old_len(), 0),
1704 old_text.max_point(),
1705 ));
1706 old_text.replace(
1707 old_start..old_end,
1708 &new_text.slice_rows(edit.new.clone()).to_string(),
1709 );
1710 }
1711 pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());
1712 })
1713 }
1714 }
1715
1716 #[derive(Debug, Clone, PartialEq, Eq)]
1717 struct HunkStatus {
1718 range: Range<Point>,
1719 diff_status: DiffHunkStatusKind,
1720 old_text: String,
1721 }
1722
1723 fn unreviewed_hunks(
1724 action_log: &Entity<ActionLog>,
1725 cx: &TestAppContext,
1726 ) -> Vec<(Entity<Buffer>, Vec<HunkStatus>)> {
1727 cx.read(|cx| {
1728 action_log
1729 .read(cx)
1730 .changed_buffers(cx)
1731 .into_iter()
1732 .map(|(buffer, diff)| {
1733 let snapshot = buffer.read(cx).snapshot();
1734 (
1735 buffer,
1736 diff.read(cx)
1737 .hunks(&snapshot, cx)
1738 .map(|hunk| HunkStatus {
1739 diff_status: hunk.status().kind,
1740 range: hunk.range,
1741 old_text: diff
1742 .read(cx)
1743 .base_text()
1744 .text_for_range(hunk.diff_base_byte_range)
1745 .collect(),
1746 })
1747 .collect(),
1748 )
1749 })
1750 .collect()
1751 })
1752 }
1753}