project_diff.rs
1use std::{
2 any::{Any, TypeId},
3 cmp::Ordering,
4 collections::HashSet,
5 ops::Range,
6 time::Duration,
7};
8
9use anyhow::{anyhow, Context as _};
10use collections::{BTreeMap, HashMap};
11use feature_flags::FeatureFlagAppExt;
12use git::{
13 diff::{BufferDiff, DiffHunk},
14 repository::GitFileStatus,
15};
16use gpui::{
17 actions, AnyElement, AnyView, AppContext, EventEmitter, FocusHandle, FocusableView,
18 InteractiveElement, Model, Render, Subscription, Task, View, WeakView,
19};
20use language::{Buffer, BufferRow};
21use multi_buffer::{ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer};
22use project::{Project, ProjectEntryId, ProjectPath, WorktreeId};
23use text::{OffsetRangeExt, ToPoint};
24use theme::ActiveTheme;
25use ui::prelude::*;
26use util::{paths::compare_paths, ResultExt};
27use workspace::{
28 item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams},
29 ItemNavHistory, ToolbarItemLocation, Workspace,
30};
31
32use crate::{Editor, EditorEvent, DEFAULT_MULTIBUFFER_CONTEXT};
33
34actions!(project_diff, [Deploy]);
35
36pub fn init(cx: &mut AppContext) {
37 cx.observe_new_views(ProjectDiffEditor::register).detach();
38}
39
40const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50);
41
42struct ProjectDiffEditor {
43 buffer_changes: BTreeMap<WorktreeId, HashMap<ProjectEntryId, Changes>>,
44 entry_order: HashMap<WorktreeId, Vec<(ProjectPath, ProjectEntryId)>>,
45 excerpts: Model<MultiBuffer>,
46 editor: View<Editor>,
47
48 project: Model<Project>,
49 workspace: WeakView<Workspace>,
50 focus_handle: FocusHandle,
51 worktree_rescans: HashMap<WorktreeId, Task<()>>,
52 _subscriptions: Vec<Subscription>,
53}
54
55#[derive(Debug)]
56struct Changes {
57 _status: GitFileStatus,
58 buffer: Model<Buffer>,
59 hunks: Vec<DiffHunk>,
60}
61
62impl ProjectDiffEditor {
63 fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
64 workspace.register_action(Self::deploy);
65 }
66
67 fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
68 if !cx.is_staff() {
69 return;
70 }
71
72 if let Some(existing) = workspace.item_of_type::<Self>(cx) {
73 workspace.activate_item(&existing, true, true, cx);
74 } else {
75 let workspace_handle = cx.view().downgrade();
76 let project_diff =
77 cx.new_view(|cx| Self::new(workspace.project().clone(), workspace_handle, cx));
78 workspace.add_item_to_active_pane(Box::new(project_diff), None, true, cx);
79 }
80 }
81
82 fn new(
83 project: Model<Project>,
84 workspace: WeakView<Workspace>,
85 cx: &mut ViewContext<Self>,
86 ) -> Self {
87 // TODO diff change subscriptions. For that, needed:
88 // * `-20/+50` stats retrieval: some background process that reacts on file changes
89 let focus_handle = cx.focus_handle();
90 let changed_entries_subscription =
91 cx.subscribe(&project, |project_diff_editor, _, e, cx| {
92 let mut worktree_to_rescan = None;
93 match e {
94 project::Event::WorktreeAdded(id) => {
95 worktree_to_rescan = Some(*id);
96 // project_diff_editor
97 // .buffer_changes
98 // .insert(*id, HashMap::default());
99 }
100 project::Event::WorktreeRemoved(id) => {
101 project_diff_editor.buffer_changes.remove(id);
102 }
103 project::Event::WorktreeUpdatedEntries(id, _updated_entries) => {
104 // TODO cannot invalidate buffer entries without invalidating the corresponding excerpts and order entries.
105 worktree_to_rescan = Some(*id);
106 // let entry_changes =
107 // project_diff_editor.buffer_changes.entry(*id).or_default();
108 // for (_, entry_id, change) in updated_entries.iter() {
109 // let changes = entry_changes.entry(*entry_id);
110 // match change {
111 // project::PathChange::Removed => {
112 // if let hash_map::Entry::Occupied(entry) = changes {
113 // entry.remove();
114 // }
115 // }
116 // // TODO understand the invalidation case better: now, we do that but still rescan the entire worktree
117 // // What if we already have the buffer loaded inside the diff multi buffer and it was edited there? We should not do anything.
118 // _ => match changes {
119 // hash_map::Entry::Occupied(mut o) => o.get_mut().invalidate(),
120 // hash_map::Entry::Vacant(v) => {
121 // v.insert(None);
122 // }
123 // },
124 // }
125 // }
126 }
127 project::Event::WorktreeUpdatedGitRepositories(id) => {
128 worktree_to_rescan = Some(*id);
129 // project_diff_editor.buffer_changes.clear();
130 }
131 project::Event::DeletedEntry(id, _entry_id) => {
132 worktree_to_rescan = Some(*id);
133 // if let Some(entries) = project_diff_editor.buffer_changes.get_mut(id) {
134 // entries.remove(entry_id);
135 // }
136 }
137 project::Event::Closed => {
138 project_diff_editor.buffer_changes.clear();
139 }
140 _ => {}
141 }
142
143 if let Some(worktree_to_rescan) = worktree_to_rescan {
144 project_diff_editor.schedule_worktree_rescan(worktree_to_rescan, cx);
145 }
146 });
147
148 let excerpts = cx.new_model(|cx| MultiBuffer::new(project.read(cx).capability()));
149
150 let editor = cx.new_view(|cx| {
151 let mut diff_display_editor =
152 Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, cx);
153 diff_display_editor.set_expand_all_diff_hunks();
154 diff_display_editor
155 });
156
157 let mut new_self = Self {
158 project,
159 workspace,
160 buffer_changes: BTreeMap::default(),
161 entry_order: HashMap::default(),
162 worktree_rescans: HashMap::default(),
163 focus_handle,
164 editor,
165 excerpts,
166 _subscriptions: vec![changed_entries_subscription],
167 };
168 new_self.schedule_rescan_all(cx);
169 new_self
170 }
171
172 fn schedule_rescan_all(&mut self, cx: &mut ViewContext<Self>) {
173 let mut current_worktrees = HashSet::<WorktreeId>::default();
174 for worktree in self.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
175 let worktree_id = worktree.read(cx).id();
176 current_worktrees.insert(worktree_id);
177 self.schedule_worktree_rescan(worktree_id, cx);
178 }
179
180 self.worktree_rescans
181 .retain(|worktree_id, _| current_worktrees.contains(worktree_id));
182 self.buffer_changes
183 .retain(|worktree_id, _| current_worktrees.contains(worktree_id));
184 self.entry_order
185 .retain(|worktree_id, _| current_worktrees.contains(worktree_id));
186 }
187
188 fn schedule_worktree_rescan(&mut self, id: WorktreeId, cx: &mut ViewContext<Self>) {
189 let project = self.project.clone();
190 self.worktree_rescans.insert(
191 id,
192 cx.spawn(|project_diff_editor, mut cx| async move {
193 cx.background_executor().timer(UPDATE_DEBOUNCE).await;
194 let open_tasks = project
195 .update(&mut cx, |project, cx| {
196 let worktree = project.worktree_for_id(id, cx)?;
197 let snapshot = worktree.read(cx).snapshot();
198 let applicable_entries = snapshot
199 .repositories()
200 .iter()
201 .flat_map(|entry| {
202 entry.status().map(|git_entry| {
203 (git_entry.combined_status(), entry.join(git_entry.repo_path))
204 })
205 })
206 .filter_map(|(status, path)| {
207 let id = snapshot.entry_for_path(&path)?.id;
208 Some((
209 status,
210 id,
211 ProjectPath {
212 worktree_id: snapshot.id(),
213 path: path.into(),
214 },
215 ))
216 })
217 .collect::<Vec<_>>();
218 Some(
219 applicable_entries
220 .into_iter()
221 .map(|(status, entry_id, entry_path)| {
222 let open_task = project.open_path(entry_path.clone(), cx);
223 (status, entry_id, entry_path, open_task)
224 })
225 .collect::<Vec<_>>(),
226 )
227 })
228 .ok()
229 .flatten()
230 .unwrap_or_default();
231
232 let Some((buffers, mut new_entries, change_sets)) = cx
233 .spawn(|mut cx| async move {
234 let mut new_entries = Vec::new();
235 let mut buffers = HashMap::<
236 ProjectEntryId,
237 (
238 GitFileStatus,
239 text::BufferSnapshot,
240 Model<Buffer>,
241 BufferDiff,
242 ),
243 >::default();
244 let mut change_sets = Vec::new();
245 for (status, entry_id, entry_path, open_task) in open_tasks {
246 let Some(buffer) = open_task
247 .await
248 .and_then(|(_, opened_model)| {
249 opened_model
250 .downcast::<Buffer>()
251 .map_err(|_| anyhow!("Unexpected non-buffer"))
252 })
253 .with_context(|| {
254 format!("loading {:?} for git diff", entry_path.path)
255 })
256 .log_err()
257 else {
258 continue;
259 };
260
261 let Some(change_set) = project
262 .update(&mut cx, |project, cx| {
263 project.open_unstaged_changes(buffer.clone(), cx)
264 })?
265 .await
266 .log_err()
267 else {
268 continue;
269 };
270
271 cx.update(|cx| {
272 buffers.insert(
273 entry_id,
274 (
275 status,
276 buffer.read(cx).text_snapshot(),
277 buffer,
278 change_set.read(cx).diff_to_buffer.clone(),
279 ),
280 );
281 })?;
282 change_sets.push(change_set);
283 new_entries.push((entry_path, entry_id));
284 }
285
286 anyhow::Ok((buffers, new_entries, change_sets))
287 })
288 .await
289 .log_err()
290 else {
291 return;
292 };
293
294 let (new_changes, new_entry_order) = cx
295 .background_executor()
296 .spawn(async move {
297 let mut new_changes = HashMap::<ProjectEntryId, Changes>::default();
298 for (entry_id, (status, buffer_snapshot, buffer, buffer_diff)) in buffers {
299 new_changes.insert(
300 entry_id,
301 Changes {
302 _status: status,
303 buffer,
304 hunks: buffer_diff
305 .hunks_in_row_range(0..BufferRow::MAX, &buffer_snapshot)
306 .collect::<Vec<_>>(),
307 },
308 );
309 }
310
311 new_entries.sort_by(|(project_path_a, _), (project_path_b, _)| {
312 compare_paths(
313 (project_path_a.path.as_ref(), true),
314 (project_path_b.path.as_ref(), true),
315 )
316 });
317 (new_changes, new_entries)
318 })
319 .await;
320
321 project_diff_editor
322 .update(&mut cx, |project_diff_editor, cx| {
323 project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx);
324 project_diff_editor.editor.update(cx, |editor, cx| {
325 for change_set in change_sets {
326 editor.diff_map.add_change_set(change_set, cx)
327 }
328 });
329 })
330 .ok();
331 }),
332 );
333 }
334
335 fn update_excerpts(
336 &mut self,
337 worktree_id: WorktreeId,
338 new_changes: HashMap<ProjectEntryId, Changes>,
339 new_entry_order: Vec<(ProjectPath, ProjectEntryId)>,
340 cx: &mut ViewContext<ProjectDiffEditor>,
341 ) {
342 if let Some(current_order) = self.entry_order.get(&worktree_id) {
343 let current_entries = self.buffer_changes.entry(worktree_id).or_default();
344 let mut new_order_entries = new_entry_order.iter().fuse().peekable();
345 let mut excerpts_to_remove = Vec::new();
346 let mut new_excerpt_hunks = BTreeMap::<
347 ExcerptId,
348 Vec<(ProjectPath, Model<Buffer>, Vec<Range<text::Anchor>>)>,
349 >::new();
350 let mut excerpt_to_expand =
351 HashMap::<(u32, ExpandExcerptDirection), Vec<ExcerptId>>::default();
352 let mut latest_excerpt_id = ExcerptId::min();
353
354 for (current_path, current_entry_id) in current_order {
355 let current_changes = match current_entries.get(current_entry_id) {
356 Some(current_changes) => {
357 if current_changes.hunks.is_empty() {
358 continue;
359 }
360 current_changes
361 }
362 None => continue,
363 };
364 let buffer_excerpts = self
365 .excerpts
366 .read(cx)
367 .excerpts_for_buffer(¤t_changes.buffer, cx);
368 let last_current_excerpt_id =
369 buffer_excerpts.last().map(|(excerpt_id, _)| *excerpt_id);
370 let mut current_excerpts = buffer_excerpts.into_iter().fuse().peekable();
371 loop {
372 match new_order_entries.peek() {
373 Some((new_path, new_entry)) => {
374 match compare_paths(
375 (current_path.path.as_ref(), true),
376 (new_path.path.as_ref(), true),
377 ) {
378 Ordering::Less => {
379 excerpts_to_remove
380 .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id));
381 break;
382 }
383 Ordering::Greater => {
384 if let Some(new_changes) = new_changes.get(new_entry) {
385 if !new_changes.hunks.is_empty() {
386 let hunks = new_excerpt_hunks
387 .entry(latest_excerpt_id)
388 .or_default();
389 match hunks.binary_search_by(|(probe, ..)| {
390 compare_paths(
391 (new_path.path.as_ref(), true),
392 (probe.path.as_ref(), true),
393 )
394 }) {
395 Ok(i) => hunks[i].2.extend(
396 new_changes
397 .hunks
398 .iter()
399 .map(|hunk| hunk.buffer_range.clone()),
400 ),
401 Err(i) => hunks.insert(
402 i,
403 (
404 new_path.clone(),
405 new_changes.buffer.clone(),
406 new_changes
407 .hunks
408 .iter()
409 .map(|hunk| hunk.buffer_range.clone())
410 .collect(),
411 ),
412 ),
413 }
414 }
415 };
416 let _ = new_order_entries.next();
417 }
418 Ordering::Equal => {
419 match new_changes.get(new_entry) {
420 Some(new_changes) => {
421 let buffer_snapshot =
422 new_changes.buffer.read(cx).snapshot();
423 let mut current_hunks =
424 current_changes.hunks.iter().fuse().peekable();
425 let mut new_hunks_unchanged =
426 Vec::with_capacity(new_changes.hunks.len());
427 let mut new_hunks_with_updates =
428 Vec::with_capacity(new_changes.hunks.len());
429 'new_changes: for new_hunk in &new_changes.hunks {
430 loop {
431 match current_hunks.peek() {
432 Some(current_hunk) => {
433 match (
434 current_hunk
435 .buffer_range
436 .start
437 .cmp(
438 &new_hunk
439 .buffer_range
440 .start,
441 &buffer_snapshot,
442 ),
443 current_hunk.buffer_range.end.cmp(
444 &new_hunk.buffer_range.end,
445 &buffer_snapshot,
446 ),
447 ) {
448 (
449 Ordering::Equal,
450 Ordering::Equal,
451 ) => {
452 new_hunks_unchanged
453 .push(new_hunk);
454 let _ = current_hunks.next();
455 continue 'new_changes;
456 }
457 (Ordering::Equal, _)
458 | (_, Ordering::Equal) => {
459 new_hunks_with_updates
460 .push(new_hunk);
461 continue 'new_changes;
462 }
463 (
464 Ordering::Less,
465 Ordering::Greater,
466 )
467 | (
468 Ordering::Greater,
469 Ordering::Less,
470 ) => {
471 new_hunks_with_updates
472 .push(new_hunk);
473 continue 'new_changes;
474 }
475 (
476 Ordering::Less,
477 Ordering::Less,
478 ) => {
479 if current_hunk
480 .buffer_range
481 .start
482 .cmp(
483 &new_hunk
484 .buffer_range
485 .end,
486 &buffer_snapshot,
487 )
488 .is_le()
489 {
490 new_hunks_with_updates
491 .push(new_hunk);
492 continue 'new_changes;
493 } else {
494 let _ =
495 current_hunks.next();
496 }
497 }
498 (
499 Ordering::Greater,
500 Ordering::Greater,
501 ) => {
502 if current_hunk
503 .buffer_range
504 .end
505 .cmp(
506 &new_hunk
507 .buffer_range
508 .start,
509 &buffer_snapshot,
510 )
511 .is_ge()
512 {
513 new_hunks_with_updates
514 .push(new_hunk);
515 continue 'new_changes;
516 } else {
517 let _ =
518 current_hunks.next();
519 }
520 }
521 }
522 }
523 None => {
524 new_hunks_with_updates.push(new_hunk);
525 continue 'new_changes;
526 }
527 }
528 }
529 }
530
531 let mut excerpts_with_new_changes =
532 HashSet::<ExcerptId>::default();
533 'new_hunks: for new_hunk in new_hunks_with_updates {
534 loop {
535 match current_excerpts.peek() {
536 Some((
537 current_excerpt_id,
538 current_excerpt_range,
539 )) => {
540 match (
541 current_excerpt_range
542 .context
543 .start
544 .cmp(
545 &new_hunk
546 .buffer_range
547 .start,
548 &buffer_snapshot,
549 ),
550 current_excerpt_range
551 .context
552 .end
553 .cmp(
554 &new_hunk.buffer_range.end,
555 &buffer_snapshot,
556 ),
557 ) {
558 (
559 Ordering::Less
560 | Ordering::Equal,
561 Ordering::Greater
562 | Ordering::Equal,
563 ) => {
564 excerpts_with_new_changes
565 .insert(
566 *current_excerpt_id,
567 );
568 continue 'new_hunks;
569 }
570 (
571 Ordering::Greater
572 | Ordering::Equal,
573 Ordering::Less
574 | Ordering::Equal,
575 ) => {
576 let expand_up = current_excerpt_range
577 .context
578 .start
579 .to_point(&buffer_snapshot)
580 .row
581 .saturating_sub(
582 new_hunk
583 .buffer_range
584 .start
585 .to_point(&buffer_snapshot)
586 .row,
587 );
588 let expand_down = new_hunk
589 .buffer_range
590 .end
591 .to_point(&buffer_snapshot)
592 .row
593 .saturating_sub(
594 current_excerpt_range
595 .context
596 .end
597 .to_point(
598 &buffer_snapshot,
599 )
600 .row,
601 );
602 excerpt_to_expand.entry((expand_up.max(expand_down).max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::UpAndDown)).or_default().push(*current_excerpt_id);
603 excerpts_with_new_changes
604 .insert(
605 *current_excerpt_id,
606 );
607 continue 'new_hunks;
608 }
609 (
610 Ordering::Less,
611 Ordering::Less,
612 ) => {
613 if current_excerpt_range
614 .context
615 .start
616 .cmp(
617 &new_hunk
618 .buffer_range
619 .end,
620 &buffer_snapshot,
621 )
622 .is_le()
623 {
624 let expand_up = current_excerpt_range
625 .context
626 .start
627 .to_point(&buffer_snapshot)
628 .row
629 .saturating_sub(
630 new_hunk.buffer_range
631 .start
632 .to_point(
633 &buffer_snapshot,
634 )
635 .row,
636 );
637 excerpt_to_expand.entry((expand_up.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Up)).or_default().push(*current_excerpt_id);
638 excerpts_with_new_changes
639 .insert(
640 *current_excerpt_id,
641 );
642 continue 'new_hunks;
643 } else {
644 if !new_changes
645 .hunks
646 .is_empty()
647 {
648 let hunks = new_excerpt_hunks
649 .entry(latest_excerpt_id)
650 .or_default();
651 match hunks.binary_search_by(|(probe, ..)| {
652 compare_paths(
653 (new_path.path.as_ref(), true),
654 (probe.path.as_ref(), true),
655 )
656 }) {
657 Ok(i) => hunks[i].2.extend(
658 new_changes
659 .hunks
660 .iter()
661 .map(|hunk| hunk.buffer_range.clone()),
662 ),
663 Err(i) => hunks.insert(
664 i,
665 (
666 new_path.clone(),
667 new_changes.buffer.clone(),
668 new_changes
669 .hunks
670 .iter()
671 .map(|hunk| hunk.buffer_range.clone())
672 .collect(),
673 ),
674 ),
675 }
676 }
677 continue 'new_hunks;
678 }
679 }
680 /* TODO remove or leave?
681 [ ><<<<<<<<new_e
682 ----[---->--]----<--
683 cur_s > cur_e <
684 > <
685 new_s>>>>>>>><
686 */
687 (
688 Ordering::Greater,
689 Ordering::Greater,
690 ) => {
691 if current_excerpt_range
692 .context
693 .end
694 .cmp(
695 &new_hunk
696 .buffer_range
697 .start,
698 &buffer_snapshot,
699 )
700 .is_ge()
701 {
702 let expand_down = new_hunk
703 .buffer_range
704 .end
705 .to_point(&buffer_snapshot)
706 .row
707 .saturating_sub(
708 current_excerpt_range
709 .context
710 .end
711 .to_point(
712 &buffer_snapshot,
713 )
714 .row,
715 );
716 excerpt_to_expand.entry((expand_down.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Down)).or_default().push(*current_excerpt_id);
717 excerpts_with_new_changes
718 .insert(
719 *current_excerpt_id,
720 );
721 continue 'new_hunks;
722 } else {
723 latest_excerpt_id =
724 *current_excerpt_id;
725 let _ =
726 current_excerpts.next();
727 }
728 }
729 }
730 }
731 None => {
732 let hunks = new_excerpt_hunks
733 .entry(latest_excerpt_id)
734 .or_default();
735 match hunks.binary_search_by(
736 |(probe, ..)| {
737 compare_paths(
738 (
739 new_path.path.as_ref(),
740 true,
741 ),
742 (probe.path.as_ref(), true),
743 )
744 },
745 ) {
746 Ok(i) => hunks[i].2.extend(
747 new_changes.hunks.iter().map(
748 |hunk| {
749 hunk.buffer_range
750 .clone()
751 },
752 ),
753 ),
754 Err(i) => hunks.insert(
755 i,
756 (
757 new_path.clone(),
758 new_changes.buffer.clone(),
759 new_changes
760 .hunks
761 .iter()
762 .map(|hunk| {
763 hunk.buffer_range
764 .clone()
765 })
766 .collect(),
767 ),
768 ),
769 }
770 continue 'new_hunks;
771 }
772 }
773 }
774 }
775
776 for (excerpt_id, excerpt_range) in current_excerpts {
777 if !excerpts_with_new_changes.contains(&excerpt_id)
778 && !new_hunks_unchanged.iter().any(|hunk| {
779 excerpt_range
780 .context
781 .start
782 .cmp(
783 &hunk.buffer_range.end,
784 &buffer_snapshot,
785 )
786 .is_le()
787 && excerpt_range
788 .context
789 .end
790 .cmp(
791 &hunk.buffer_range.start,
792 &buffer_snapshot,
793 )
794 .is_ge()
795 })
796 {
797 excerpts_to_remove.push(excerpt_id);
798 }
799 latest_excerpt_id = excerpt_id;
800 }
801 }
802 None => excerpts_to_remove.extend(
803 current_excerpts.map(|(excerpt_id, _)| excerpt_id),
804 ),
805 }
806 let _ = new_order_entries.next();
807 break;
808 }
809 }
810 }
811 None => {
812 excerpts_to_remove
813 .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id));
814 break;
815 }
816 }
817 }
818 latest_excerpt_id = last_current_excerpt_id.unwrap_or(latest_excerpt_id);
819 }
820
821 for (path, project_entry_id) in new_order_entries {
822 if let Some(changes) = new_changes.get(project_entry_id) {
823 if !changes.hunks.is_empty() {
824 let hunks = new_excerpt_hunks.entry(latest_excerpt_id).or_default();
825 match hunks.binary_search_by(|(probe, ..)| {
826 compare_paths((path.path.as_ref(), true), (probe.path.as_ref(), true))
827 }) {
828 Ok(i) => hunks[i]
829 .2
830 .extend(changes.hunks.iter().map(|hunk| hunk.buffer_range.clone())),
831 Err(i) => hunks.insert(
832 i,
833 (
834 path.clone(),
835 changes.buffer.clone(),
836 changes
837 .hunks
838 .iter()
839 .map(|hunk| hunk.buffer_range.clone())
840 .collect(),
841 ),
842 ),
843 }
844 }
845 }
846 }
847
848 self.excerpts.update(cx, |multi_buffer, cx| {
849 for (mut after_excerpt_id, excerpts_to_add) in new_excerpt_hunks {
850 for (_, buffer, hunk_ranges) in excerpts_to_add {
851 let buffer_snapshot = buffer.read(cx).snapshot();
852 let max_point = buffer_snapshot.max_point();
853 let new_excerpts = multi_buffer.insert_excerpts_after(
854 after_excerpt_id,
855 buffer,
856 hunk_ranges.into_iter().map(|range| {
857 let mut extended_point_range = range.to_point(&buffer_snapshot);
858 extended_point_range.start.row = extended_point_range
859 .start
860 .row
861 .saturating_sub(DEFAULT_MULTIBUFFER_CONTEXT);
862 extended_point_range.end.row = (extended_point_range.end.row
863 + DEFAULT_MULTIBUFFER_CONTEXT)
864 .min(max_point.row);
865 ExcerptRange {
866 context: extended_point_range,
867 primary: None,
868 }
869 }),
870 cx,
871 );
872 after_excerpt_id = new_excerpts.last().copied().unwrap_or(after_excerpt_id);
873 }
874 }
875 multi_buffer.remove_excerpts(excerpts_to_remove, cx);
876 for ((line_count, direction), excerpts) in excerpt_to_expand {
877 multi_buffer.expand_excerpts(excerpts, line_count, direction, cx);
878 }
879 });
880 } else {
881 self.excerpts.update(cx, |multi_buffer, cx| {
882 for new_changes in new_entry_order
883 .iter()
884 .filter_map(|(_, entry_id)| new_changes.get(entry_id))
885 {
886 multi_buffer.push_excerpts_with_context_lines(
887 new_changes.buffer.clone(),
888 new_changes
889 .hunks
890 .iter()
891 .map(|hunk| hunk.buffer_range.clone())
892 .collect(),
893 DEFAULT_MULTIBUFFER_CONTEXT,
894 cx,
895 );
896 }
897 });
898 };
899
900 let mut new_changes = new_changes;
901 let mut new_entry_order = new_entry_order;
902 std::mem::swap(
903 self.buffer_changes.entry(worktree_id).or_default(),
904 &mut new_changes,
905 );
906 std::mem::swap(
907 self.entry_order.entry(worktree_id).or_default(),
908 &mut new_entry_order,
909 );
910 }
911}
912
913impl EventEmitter<EditorEvent> for ProjectDiffEditor {}
914
915impl FocusableView for ProjectDiffEditor {
916 fn focus_handle(&self, _: &AppContext) -> FocusHandle {
917 self.focus_handle.clone()
918 }
919}
920
921impl Item for ProjectDiffEditor {
922 type Event = EditorEvent;
923
924 fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
925 Editor::to_item_events(event, f)
926 }
927
928 fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
929 self.editor.update(cx, |editor, cx| editor.deactivated(cx));
930 }
931
932 fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
933 self.editor
934 .update(cx, |editor, cx| editor.navigate(data, cx))
935 }
936
937 fn tab_tooltip_text(&self, _: &AppContext) -> Option<SharedString> {
938 Some("Project Diff".into())
939 }
940
941 fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement {
942 if self.buffer_changes.is_empty() {
943 Label::new("No changes")
944 .color(if params.selected {
945 Color::Default
946 } else {
947 Color::Muted
948 })
949 .into_any_element()
950 } else {
951 h_flex()
952 .gap_1()
953 .when(true, |then| {
954 then.child(
955 h_flex()
956 .gap_1()
957 .child(Icon::new(IconName::XCircle).color(Color::Error))
958 .child(Label::new(self.buffer_changes.len().to_string()).color(
959 if params.selected {
960 Color::Default
961 } else {
962 Color::Muted
963 },
964 )),
965 )
966 })
967 .when(true, |then| {
968 then.child(
969 h_flex()
970 .gap_1()
971 .child(Icon::new(IconName::Indicator).color(Color::Warning))
972 .child(Label::new(self.buffer_changes.len().to_string()).color(
973 if params.selected {
974 Color::Default
975 } else {
976 Color::Muted
977 },
978 )),
979 )
980 })
981 .into_any_element()
982 }
983 }
984
985 fn telemetry_event_text(&self) -> Option<&'static str> {
986 Some("project diagnostics")
987 }
988
989 fn for_each_project_item(
990 &self,
991 cx: &AppContext,
992 f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem),
993 ) {
994 self.editor.for_each_project_item(cx, f)
995 }
996
997 fn is_singleton(&self, _: &AppContext) -> bool {
998 false
999 }
1000
1001 fn set_nav_history(&mut self, nav_history: ItemNavHistory, cx: &mut ViewContext<Self>) {
1002 self.editor.update(cx, |editor, _| {
1003 editor.set_nav_history(Some(nav_history));
1004 });
1005 }
1006
1007 fn clone_on_split(
1008 &self,
1009 _workspace_id: Option<workspace::WorkspaceId>,
1010 cx: &mut ViewContext<Self>,
1011 ) -> Option<View<Self>>
1012 where
1013 Self: Sized,
1014 {
1015 Some(cx.new_view(|cx| {
1016 ProjectDiffEditor::new(self.project.clone(), self.workspace.clone(), cx)
1017 }))
1018 }
1019
1020 fn is_dirty(&self, cx: &AppContext) -> bool {
1021 self.excerpts.read(cx).is_dirty(cx)
1022 }
1023
1024 fn has_conflict(&self, cx: &AppContext) -> bool {
1025 self.excerpts.read(cx).has_conflict(cx)
1026 }
1027
1028 fn can_save(&self, _: &AppContext) -> bool {
1029 true
1030 }
1031
1032 fn save(
1033 &mut self,
1034 format: bool,
1035 project: Model<Project>,
1036 cx: &mut ViewContext<Self>,
1037 ) -> Task<anyhow::Result<()>> {
1038 self.editor.save(format, project, cx)
1039 }
1040
1041 fn save_as(
1042 &mut self,
1043 _: Model<Project>,
1044 _: ProjectPath,
1045 _: &mut ViewContext<Self>,
1046 ) -> Task<anyhow::Result<()>> {
1047 unreachable!()
1048 }
1049
1050 fn reload(
1051 &mut self,
1052 project: Model<Project>,
1053 cx: &mut ViewContext<Self>,
1054 ) -> Task<anyhow::Result<()>> {
1055 self.editor.reload(project, cx)
1056 }
1057
1058 fn act_as_type<'a>(
1059 &'a self,
1060 type_id: TypeId,
1061 self_handle: &'a View<Self>,
1062 _: &'a AppContext,
1063 ) -> Option<AnyView> {
1064 if type_id == TypeId::of::<Self>() {
1065 Some(self_handle.to_any())
1066 } else if type_id == TypeId::of::<Editor>() {
1067 Some(self.editor.to_any())
1068 } else {
1069 None
1070 }
1071 }
1072
1073 fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation {
1074 ToolbarItemLocation::PrimaryLeft
1075 }
1076
1077 fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<BreadcrumbText>> {
1078 self.editor.breadcrumbs(theme, cx)
1079 }
1080
1081 fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
1082 self.editor
1083 .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx));
1084 }
1085}
1086
1087impl Render for ProjectDiffEditor {
1088 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
1089 let child = if self.buffer_changes.is_empty() {
1090 div()
1091 .bg(cx.theme().colors().editor_background)
1092 .flex()
1093 .items_center()
1094 .justify_center()
1095 .size_full()
1096 .child(Label::new("No changes in the workspace"))
1097 } else {
1098 div().size_full().child(self.editor.clone())
1099 };
1100
1101 div()
1102 .track_focus(&self.focus_handle)
1103 .size_full()
1104 .child(child)
1105 }
1106}
1107
1108#[cfg(test)]
1109mod tests {
1110 use gpui::{SemanticVersion, TestAppContext, VisualTestContext};
1111 use project::buffer_store::BufferChangeSet;
1112 use serde_json::json;
1113 use settings::SettingsStore;
1114 use std::{
1115 ops::Deref as _,
1116 path::{Path, PathBuf},
1117 };
1118
1119 use super::*;
1120
1121 // TODO finish
1122 // #[gpui::test]
1123 // async fn randomized_tests(cx: &mut TestAppContext) {
1124 // // Create a new project (how?? temp fs?),
1125 // let fs = FakeFs::new(cx.executor());
1126 // let project = Project::test(fs, [], cx).await;
1127
1128 // // create random files with random content
1129
1130 // // Commit it into git somehow (technically can do with "real" fs in a temp dir)
1131 // //
1132 // // Apply randomized changes to the project: select a random file, random change and apply to buffers
1133 // }
1134
1135 #[gpui::test(iterations = 30)]
1136 async fn simple_edit_test(cx: &mut TestAppContext) {
1137 cx.executor().allow_parking();
1138 init_test(cx);
1139
1140 let fs = fs::FakeFs::new(cx.executor().clone());
1141 fs.insert_tree(
1142 "/root",
1143 json!({
1144 ".git": {},
1145 "file_a": "This is file_a",
1146 "file_b": "This is file_b",
1147 }),
1148 )
1149 .await;
1150
1151 let project = Project::test(fs.clone(), [Path::new("/root")], cx).await;
1152 let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
1153 let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
1154
1155 let file_a_editor = workspace
1156 .update(cx, |workspace, cx| {
1157 let file_a_editor =
1158 workspace.open_abs_path(PathBuf::from("/root/file_a"), true, cx);
1159 ProjectDiffEditor::deploy(workspace, &Deploy, cx);
1160 file_a_editor
1161 })
1162 .unwrap()
1163 .await
1164 .expect("did not open an item at all")
1165 .downcast::<Editor>()
1166 .expect("did not open an editor for file_a");
1167 let project_diff_editor = workspace
1168 .update(cx, |workspace, cx| {
1169 workspace
1170 .active_pane()
1171 .read(cx)
1172 .items()
1173 .find_map(|item| item.downcast::<ProjectDiffEditor>())
1174 })
1175 .unwrap()
1176 .expect("did not find a ProjectDiffEditor");
1177 project_diff_editor.update(cx, |project_diff_editor, cx| {
1178 assert!(
1179 project_diff_editor.editor.read(cx).text(cx).is_empty(),
1180 "Should have no changes after opening the diff on no git changes"
1181 );
1182 });
1183
1184 let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx));
1185 let change = "an edit after git add";
1186 file_a_editor
1187 .update(cx, |file_a_editor, cx| {
1188 file_a_editor.insert(change, cx);
1189 file_a_editor.save(false, project.clone(), cx)
1190 })
1191 .await
1192 .expect("failed to save a file");
1193 file_a_editor.update(cx, |file_a_editor, cx| {
1194 let change_set = cx.new_model(|cx| {
1195 BufferChangeSet::new_with_base_text(
1196 old_text.clone(),
1197 file_a_editor
1198 .buffer()
1199 .read(cx)
1200 .as_singleton()
1201 .unwrap()
1202 .read(cx)
1203 .text_snapshot(),
1204 cx,
1205 )
1206 });
1207 file_a_editor
1208 .diff_map
1209 .add_change_set(change_set.clone(), cx);
1210 project.update(cx, |project, cx| {
1211 project.buffer_store().update(cx, |buffer_store, cx| {
1212 buffer_store.set_change_set(
1213 file_a_editor
1214 .buffer()
1215 .read(cx)
1216 .as_singleton()
1217 .unwrap()
1218 .read(cx)
1219 .remote_id(),
1220 change_set,
1221 );
1222 });
1223 });
1224 });
1225 fs.set_status_for_repo_via_git_operation(
1226 Path::new("/root/.git"),
1227 &[(Path::new("file_a"), GitFileStatus::Modified)],
1228 );
1229 cx.executor()
1230 .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
1231 cx.run_until_parked();
1232
1233 project_diff_editor.update(cx, |project_diff_editor, cx| {
1234 assert_eq!(
1235 // TODO assert it better: extract added text (based on the background changes) and deleted text (based on the deleted blocks added)
1236 project_diff_editor.editor.read(cx).text(cx),
1237 format!("{change}{old_text}"),
1238 "Should have a new change shown in the beginning, and the old text shown as deleted text afterwards"
1239 );
1240 });
1241 }
1242
1243 fn init_test(cx: &mut gpui::TestAppContext) {
1244 if std::env::var("RUST_LOG").is_ok() {
1245 env_logger::try_init().ok();
1246 }
1247
1248 cx.update(|cx| {
1249 assets::Assets.load_test_fonts(cx);
1250 let settings_store = SettingsStore::test(cx);
1251 cx.set_global(settings_store);
1252 theme::init(theme::LoadThemes::JustBase, cx);
1253 release_channel::init(SemanticVersion::default(), cx);
1254 client::init_settings(cx);
1255 language::init(cx);
1256 Project::init_settings(cx);
1257 workspace::init_settings(cx);
1258 crate::init(cx);
1259 cx.set_staff(true);
1260 });
1261 }
1262}