project_diff.rs
1use std::{
2 any::{Any, TypeId},
3 cmp::Ordering,
4 collections::HashSet,
5 ops::Range,
6 time::Duration,
7};
8
9use anyhow::{anyhow, Context as _};
10use collections::{BTreeMap, HashMap};
11use feature_flags::FeatureFlagAppExt;
12use git::{
13 diff::{BufferDiff, DiffHunk},
14 repository::GitFileStatus,
15};
16use gpui::{
17 actions, AnyElement, AnyView, AppContext, EventEmitter, FocusHandle, FocusableView,
18 InteractiveElement, Model, Render, Subscription, Task, View, WeakView,
19};
20use language::{Buffer, BufferRow};
21use multi_buffer::{ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer};
22use project::{Project, ProjectEntryId, ProjectPath, WorktreeId};
23use text::{OffsetRangeExt, ToPoint};
24use theme::ActiveTheme;
25use ui::{
26 div, h_flex, Color, Context, FluentBuilder, Icon, IconName, IntoElement, Label, LabelCommon,
27 ParentElement, SharedString, Styled, ViewContext, VisualContext, WindowContext,
28};
29use util::{paths::compare_paths, ResultExt};
30use workspace::{
31 item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams},
32 ItemNavHistory, ToolbarItemLocation, Workspace,
33};
34
35use crate::{Editor, EditorEvent, DEFAULT_MULTIBUFFER_CONTEXT};
36
37actions!(project_diff, [Deploy]);
38
39pub fn init(cx: &mut AppContext) {
40 cx.observe_new_views(ProjectDiffEditor::register).detach();
41}
42
43const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50);
44
45struct ProjectDiffEditor {
46 buffer_changes: BTreeMap<WorktreeId, HashMap<ProjectEntryId, Changes>>,
47 entry_order: HashMap<WorktreeId, Vec<(ProjectPath, ProjectEntryId)>>,
48 excerpts: Model<MultiBuffer>,
49 editor: View<Editor>,
50
51 project: Model<Project>,
52 workspace: WeakView<Workspace>,
53 focus_handle: FocusHandle,
54 worktree_rescans: HashMap<WorktreeId, Task<()>>,
55 _subscriptions: Vec<Subscription>,
56}
57
58#[derive(Debug)]
59struct Changes {
60 _status: GitFileStatus,
61 buffer: Model<Buffer>,
62 hunks: Vec<DiffHunk>,
63}
64
65impl ProjectDiffEditor {
66 fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
67 workspace.register_action(Self::deploy);
68 }
69
70 fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
71 if !cx.is_staff() {
72 return;
73 }
74
75 if let Some(existing) = workspace.item_of_type::<Self>(cx) {
76 workspace.activate_item(&existing, true, true, cx);
77 } else {
78 let workspace_handle = cx.view().downgrade();
79 let project_diff =
80 cx.new_view(|cx| Self::new(workspace.project().clone(), workspace_handle, cx));
81 workspace.add_item_to_active_pane(Box::new(project_diff), None, true, cx);
82 }
83 }
84
85 fn new(
86 project: Model<Project>,
87 workspace: WeakView<Workspace>,
88 cx: &mut ViewContext<Self>,
89 ) -> Self {
90 // TODO diff change subscriptions. For that, needed:
91 // * `-20/+50` stats retrieval: some background process that reacts on file changes
92 let focus_handle = cx.focus_handle();
93 let changed_entries_subscription =
94 cx.subscribe(&project, |project_diff_editor, _, e, cx| {
95 let mut worktree_to_rescan = None;
96 match e {
97 project::Event::WorktreeAdded(id) => {
98 worktree_to_rescan = Some(*id);
99 // project_diff_editor
100 // .buffer_changes
101 // .insert(*id, HashMap::default());
102 }
103 project::Event::WorktreeRemoved(id) => {
104 project_diff_editor.buffer_changes.remove(id);
105 }
106 project::Event::WorktreeUpdatedEntries(id, _updated_entries) => {
107 // TODO cannot invalidate buffer entries without invalidating the corresponding excerpts and order entries.
108 worktree_to_rescan = Some(*id);
109 // let entry_changes =
110 // project_diff_editor.buffer_changes.entry(*id).or_default();
111 // for (_, entry_id, change) in updated_entries.iter() {
112 // let changes = entry_changes.entry(*entry_id);
113 // match change {
114 // project::PathChange::Removed => {
115 // if let hash_map::Entry::Occupied(entry) = changes {
116 // entry.remove();
117 // }
118 // }
119 // // TODO understand the invalidation case better: now, we do that but still rescan the entire worktree
120 // // What if we already have the buffer loaded inside the diff multi buffer and it was edited there? We should not do anything.
121 // _ => match changes {
122 // hash_map::Entry::Occupied(mut o) => o.get_mut().invalidate(),
123 // hash_map::Entry::Vacant(v) => {
124 // v.insert(None);
125 // }
126 // },
127 // }
128 // }
129 }
130 project::Event::WorktreeUpdatedGitRepositories(id) => {
131 worktree_to_rescan = Some(*id);
132 // project_diff_editor.buffer_changes.clear();
133 }
134 project::Event::DeletedEntry(id, _entry_id) => {
135 worktree_to_rescan = Some(*id);
136 // if let Some(entries) = project_diff_editor.buffer_changes.get_mut(id) {
137 // entries.remove(entry_id);
138 // }
139 }
140 project::Event::Closed => {
141 project_diff_editor.buffer_changes.clear();
142 }
143 _ => {}
144 }
145
146 if let Some(worktree_to_rescan) = worktree_to_rescan {
147 project_diff_editor.schedule_worktree_rescan(worktree_to_rescan, cx);
148 }
149 });
150
151 let excerpts = cx.new_model(|cx| MultiBuffer::new(project.read(cx).capability()));
152
153 let editor = cx.new_view(|cx| {
154 let mut diff_display_editor =
155 Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, cx);
156 diff_display_editor.set_expand_all_diff_hunks();
157 diff_display_editor
158 });
159
160 let mut new_self = Self {
161 project,
162 workspace,
163 buffer_changes: BTreeMap::default(),
164 entry_order: HashMap::default(),
165 worktree_rescans: HashMap::default(),
166 focus_handle,
167 editor,
168 excerpts,
169 _subscriptions: vec![changed_entries_subscription],
170 };
171 new_self.schedule_rescan_all(cx);
172 new_self
173 }
174
175 fn schedule_rescan_all(&mut self, cx: &mut ViewContext<Self>) {
176 let mut current_worktrees = HashSet::<WorktreeId>::default();
177 for worktree in self.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
178 let worktree_id = worktree.read(cx).id();
179 current_worktrees.insert(worktree_id);
180 self.schedule_worktree_rescan(worktree_id, cx);
181 }
182
183 self.worktree_rescans
184 .retain(|worktree_id, _| current_worktrees.contains(worktree_id));
185 self.buffer_changes
186 .retain(|worktree_id, _| current_worktrees.contains(worktree_id));
187 self.entry_order
188 .retain(|worktree_id, _| current_worktrees.contains(worktree_id));
189 }
190
191 fn schedule_worktree_rescan(&mut self, id: WorktreeId, cx: &mut ViewContext<Self>) {
192 let project = self.project.clone();
193 self.worktree_rescans.insert(
194 id,
195 cx.spawn(|project_diff_editor, mut cx| async move {
196 cx.background_executor().timer(UPDATE_DEBOUNCE).await;
197 let open_tasks = project
198 .update(&mut cx, |project, cx| {
199 let worktree = project.worktree_for_id(id, cx)?;
200 let applicable_entries = worktree
201 .read(cx)
202 .entries(false, 0)
203 .filter(|entry| !entry.is_external)
204 .filter(|entry| entry.is_file())
205 .filter_map(|entry| Some((entry.git_status?, entry)))
206 .filter_map(|(git_status, entry)| {
207 Some((git_status, entry.id, project.path_for_entry(entry.id, cx)?))
208 })
209 .collect::<Vec<_>>();
210 Some(
211 applicable_entries
212 .into_iter()
213 .map(|(status, entry_id, entry_path)| {
214 let open_task = project.open_path(entry_path.clone(), cx);
215 (status, entry_id, entry_path, open_task)
216 })
217 .collect::<Vec<_>>(),
218 )
219 })
220 .ok()
221 .flatten()
222 .unwrap_or_default();
223
224 let Some((buffers, mut new_entries, change_sets)) = cx
225 .spawn(|mut cx| async move {
226 let mut new_entries = Vec::new();
227 let mut buffers = HashMap::<
228 ProjectEntryId,
229 (
230 GitFileStatus,
231 text::BufferSnapshot,
232 Model<Buffer>,
233 BufferDiff,
234 ),
235 >::default();
236 let mut change_sets = Vec::new();
237 for (status, entry_id, entry_path, open_task) in open_tasks {
238 let Some(buffer) = open_task
239 .await
240 .and_then(|(_, opened_model)| {
241 opened_model
242 .downcast::<Buffer>()
243 .map_err(|_| anyhow!("Unexpected non-buffer"))
244 })
245 .with_context(|| {
246 format!("loading {} for git diff", entry_path.path.display())
247 })
248 .log_err()
249 else {
250 continue;
251 };
252
253 let Some(change_set) = project
254 .update(&mut cx, |project, cx| {
255 project.open_unstaged_changes(buffer.clone(), cx)
256 })?
257 .await
258 .log_err()
259 else {
260 continue;
261 };
262
263 cx.update(|cx| {
264 buffers.insert(
265 entry_id,
266 (
267 status,
268 buffer.read(cx).text_snapshot(),
269 buffer,
270 change_set.read(cx).diff_to_buffer.clone(),
271 ),
272 );
273 })?;
274 change_sets.push(change_set);
275 new_entries.push((entry_path, entry_id));
276 }
277
278 anyhow::Ok((buffers, new_entries, change_sets))
279 })
280 .await
281 .log_err()
282 else {
283 return;
284 };
285
286 let (new_changes, new_entry_order) = cx
287 .background_executor()
288 .spawn(async move {
289 let mut new_changes = HashMap::<ProjectEntryId, Changes>::default();
290 for (entry_id, (status, buffer_snapshot, buffer, buffer_diff)) in buffers {
291 new_changes.insert(
292 entry_id,
293 Changes {
294 _status: status,
295 buffer,
296 hunks: buffer_diff
297 .hunks_in_row_range(0..BufferRow::MAX, &buffer_snapshot)
298 .collect::<Vec<_>>(),
299 },
300 );
301 }
302
303 new_entries.sort_by(|(project_path_a, _), (project_path_b, _)| {
304 compare_paths(
305 (project_path_a.path.as_ref(), true),
306 (project_path_b.path.as_ref(), true),
307 )
308 });
309 (new_changes, new_entries)
310 })
311 .await;
312
313 project_diff_editor
314 .update(&mut cx, |project_diff_editor, cx| {
315 project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx);
316 for change_set in change_sets {
317 project_diff_editor.editor.update(cx, |editor, cx| {
318 editor.diff_map.add_change_set(change_set, cx)
319 });
320 }
321 })
322 .ok();
323 }),
324 );
325 }
326
327 fn update_excerpts(
328 &mut self,
329 worktree_id: WorktreeId,
330 new_changes: HashMap<ProjectEntryId, Changes>,
331 new_entry_order: Vec<(ProjectPath, ProjectEntryId)>,
332 cx: &mut ViewContext<ProjectDiffEditor>,
333 ) {
334 if let Some(current_order) = self.entry_order.get(&worktree_id) {
335 let current_entries = self.buffer_changes.entry(worktree_id).or_default();
336 let mut new_order_entries = new_entry_order.iter().fuse().peekable();
337 let mut excerpts_to_remove = Vec::new();
338 let mut new_excerpt_hunks = BTreeMap::<
339 ExcerptId,
340 Vec<(ProjectPath, Model<Buffer>, Vec<Range<text::Anchor>>)>,
341 >::new();
342 let mut excerpt_to_expand =
343 HashMap::<(u32, ExpandExcerptDirection), Vec<ExcerptId>>::default();
344 let mut latest_excerpt_id = ExcerptId::min();
345
346 for (current_path, current_entry_id) in current_order {
347 let current_changes = match current_entries.get(current_entry_id) {
348 Some(current_changes) => {
349 if current_changes.hunks.is_empty() {
350 continue;
351 }
352 current_changes
353 }
354 None => continue,
355 };
356 let buffer_excerpts = self
357 .excerpts
358 .read(cx)
359 .excerpts_for_buffer(¤t_changes.buffer, cx);
360 let last_current_excerpt_id =
361 buffer_excerpts.last().map(|(excerpt_id, _)| *excerpt_id);
362 let mut current_excerpts = buffer_excerpts.into_iter().fuse().peekable();
363 loop {
364 match new_order_entries.peek() {
365 Some((new_path, new_entry)) => {
366 match compare_paths(
367 (current_path.path.as_ref(), true),
368 (new_path.path.as_ref(), true),
369 ) {
370 Ordering::Less => {
371 excerpts_to_remove
372 .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id));
373 break;
374 }
375 Ordering::Greater => {
376 if let Some(new_changes) = new_changes.get(new_entry) {
377 if !new_changes.hunks.is_empty() {
378 let hunks = new_excerpt_hunks
379 .entry(latest_excerpt_id)
380 .or_default();
381 match hunks.binary_search_by(|(probe, ..)| {
382 compare_paths(
383 (new_path.path.as_ref(), true),
384 (probe.path.as_ref(), true),
385 )
386 }) {
387 Ok(i) => hunks[i].2.extend(
388 new_changes
389 .hunks
390 .iter()
391 .map(|hunk| hunk.buffer_range.clone()),
392 ),
393 Err(i) => hunks.insert(
394 i,
395 (
396 new_path.clone(),
397 new_changes.buffer.clone(),
398 new_changes
399 .hunks
400 .iter()
401 .map(|hunk| hunk.buffer_range.clone())
402 .collect(),
403 ),
404 ),
405 }
406 }
407 };
408 let _ = new_order_entries.next();
409 }
410 Ordering::Equal => {
411 match new_changes.get(new_entry) {
412 Some(new_changes) => {
413 let buffer_snapshot =
414 new_changes.buffer.read(cx).snapshot();
415 let mut current_hunks =
416 current_changes.hunks.iter().fuse().peekable();
417 let mut new_hunks_unchanged =
418 Vec::with_capacity(new_changes.hunks.len());
419 let mut new_hunks_with_updates =
420 Vec::with_capacity(new_changes.hunks.len());
421 'new_changes: for new_hunk in &new_changes.hunks {
422 loop {
423 match current_hunks.peek() {
424 Some(current_hunk) => {
425 match (
426 current_hunk
427 .buffer_range
428 .start
429 .cmp(
430 &new_hunk
431 .buffer_range
432 .start,
433 &buffer_snapshot,
434 ),
435 current_hunk.buffer_range.end.cmp(
436 &new_hunk.buffer_range.end,
437 &buffer_snapshot,
438 ),
439 ) {
440 (
441 Ordering::Equal,
442 Ordering::Equal,
443 ) => {
444 new_hunks_unchanged
445 .push(new_hunk);
446 let _ = current_hunks.next();
447 continue 'new_changes;
448 }
449 (Ordering::Equal, _)
450 | (_, Ordering::Equal) => {
451 new_hunks_with_updates
452 .push(new_hunk);
453 continue 'new_changes;
454 }
455 (
456 Ordering::Less,
457 Ordering::Greater,
458 )
459 | (
460 Ordering::Greater,
461 Ordering::Less,
462 ) => {
463 new_hunks_with_updates
464 .push(new_hunk);
465 continue 'new_changes;
466 }
467 (
468 Ordering::Less,
469 Ordering::Less,
470 ) => {
471 if current_hunk
472 .buffer_range
473 .start
474 .cmp(
475 &new_hunk
476 .buffer_range
477 .end,
478 &buffer_snapshot,
479 )
480 .is_le()
481 {
482 new_hunks_with_updates
483 .push(new_hunk);
484 continue 'new_changes;
485 } else {
486 let _ =
487 current_hunks.next();
488 }
489 }
490 (
491 Ordering::Greater,
492 Ordering::Greater,
493 ) => {
494 if current_hunk
495 .buffer_range
496 .end
497 .cmp(
498 &new_hunk
499 .buffer_range
500 .start,
501 &buffer_snapshot,
502 )
503 .is_ge()
504 {
505 new_hunks_with_updates
506 .push(new_hunk);
507 continue 'new_changes;
508 } else {
509 let _ =
510 current_hunks.next();
511 }
512 }
513 }
514 }
515 None => {
516 new_hunks_with_updates.push(new_hunk);
517 continue 'new_changes;
518 }
519 }
520 }
521 }
522
523 let mut excerpts_with_new_changes =
524 HashSet::<ExcerptId>::default();
525 'new_hunks: for new_hunk in new_hunks_with_updates {
526 loop {
527 match current_excerpts.peek() {
528 Some((
529 current_excerpt_id,
530 current_excerpt_range,
531 )) => {
532 match (
533 current_excerpt_range
534 .context
535 .start
536 .cmp(
537 &new_hunk
538 .buffer_range
539 .start,
540 &buffer_snapshot,
541 ),
542 current_excerpt_range
543 .context
544 .end
545 .cmp(
546 &new_hunk.buffer_range.end,
547 &buffer_snapshot,
548 ),
549 ) {
550 (
551 Ordering::Less
552 | Ordering::Equal,
553 Ordering::Greater
554 | Ordering::Equal,
555 ) => {
556 excerpts_with_new_changes
557 .insert(
558 *current_excerpt_id,
559 );
560 continue 'new_hunks;
561 }
562 (
563 Ordering::Greater
564 | Ordering::Equal,
565 Ordering::Less
566 | Ordering::Equal,
567 ) => {
568 let expand_up = current_excerpt_range
569 .context
570 .start
571 .to_point(&buffer_snapshot)
572 .row
573 .saturating_sub(
574 new_hunk
575 .buffer_range
576 .start
577 .to_point(&buffer_snapshot)
578 .row,
579 );
580 let expand_down = new_hunk
581 .buffer_range
582 .end
583 .to_point(&buffer_snapshot)
584 .row
585 .saturating_sub(
586 current_excerpt_range
587 .context
588 .end
589 .to_point(
590 &buffer_snapshot,
591 )
592 .row,
593 );
594 excerpt_to_expand.entry((expand_up.max(expand_down).max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::UpAndDown)).or_default().push(*current_excerpt_id);
595 excerpts_with_new_changes
596 .insert(
597 *current_excerpt_id,
598 );
599 continue 'new_hunks;
600 }
601 (
602 Ordering::Less,
603 Ordering::Less,
604 ) => {
605 if current_excerpt_range
606 .context
607 .start
608 .cmp(
609 &new_hunk
610 .buffer_range
611 .end,
612 &buffer_snapshot,
613 )
614 .is_le()
615 {
616 let expand_up = current_excerpt_range
617 .context
618 .start
619 .to_point(&buffer_snapshot)
620 .row
621 .saturating_sub(
622 new_hunk.buffer_range
623 .start
624 .to_point(
625 &buffer_snapshot,
626 )
627 .row,
628 );
629 excerpt_to_expand.entry((expand_up.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Up)).or_default().push(*current_excerpt_id);
630 excerpts_with_new_changes
631 .insert(
632 *current_excerpt_id,
633 );
634 continue 'new_hunks;
635 } else {
636 if !new_changes
637 .hunks
638 .is_empty()
639 {
640 let hunks = new_excerpt_hunks
641 .entry(latest_excerpt_id)
642 .or_default();
643 match hunks.binary_search_by(|(probe, ..)| {
644 compare_paths(
645 (new_path.path.as_ref(), true),
646 (probe.path.as_ref(), true),
647 )
648 }) {
649 Ok(i) => hunks[i].2.extend(
650 new_changes
651 .hunks
652 .iter()
653 .map(|hunk| hunk.buffer_range.clone()),
654 ),
655 Err(i) => hunks.insert(
656 i,
657 (
658 new_path.clone(),
659 new_changes.buffer.clone(),
660 new_changes
661 .hunks
662 .iter()
663 .map(|hunk| hunk.buffer_range.clone())
664 .collect(),
665 ),
666 ),
667 }
668 }
669 continue 'new_hunks;
670 }
671 }
672 /* TODO remove or leave?
673 [ ><<<<<<<<new_e
674 ----[---->--]----<--
675 cur_s > cur_e <
676 > <
677 new_s>>>>>>>><
678 */
679 (
680 Ordering::Greater,
681 Ordering::Greater,
682 ) => {
683 if current_excerpt_range
684 .context
685 .end
686 .cmp(
687 &new_hunk
688 .buffer_range
689 .start,
690 &buffer_snapshot,
691 )
692 .is_ge()
693 {
694 let expand_down = new_hunk
695 .buffer_range
696 .end
697 .to_point(&buffer_snapshot)
698 .row
699 .saturating_sub(
700 current_excerpt_range
701 .context
702 .end
703 .to_point(
704 &buffer_snapshot,
705 )
706 .row,
707 );
708 excerpt_to_expand.entry((expand_down.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Down)).or_default().push(*current_excerpt_id);
709 excerpts_with_new_changes
710 .insert(
711 *current_excerpt_id,
712 );
713 continue 'new_hunks;
714 } else {
715 latest_excerpt_id =
716 *current_excerpt_id;
717 let _ =
718 current_excerpts.next();
719 }
720 }
721 }
722 }
723 None => {
724 let hunks = new_excerpt_hunks
725 .entry(latest_excerpt_id)
726 .or_default();
727 match hunks.binary_search_by(
728 |(probe, ..)| {
729 compare_paths(
730 (
731 new_path.path.as_ref(),
732 true,
733 ),
734 (probe.path.as_ref(), true),
735 )
736 },
737 ) {
738 Ok(i) => hunks[i].2.extend(
739 new_changes.hunks.iter().map(
740 |hunk| {
741 hunk.buffer_range
742 .clone()
743 },
744 ),
745 ),
746 Err(i) => hunks.insert(
747 i,
748 (
749 new_path.clone(),
750 new_changes.buffer.clone(),
751 new_changes
752 .hunks
753 .iter()
754 .map(|hunk| {
755 hunk.buffer_range
756 .clone()
757 })
758 .collect(),
759 ),
760 ),
761 }
762 continue 'new_hunks;
763 }
764 }
765 }
766 }
767
768 for (excerpt_id, excerpt_range) in current_excerpts {
769 if !excerpts_with_new_changes.contains(&excerpt_id)
770 && !new_hunks_unchanged.iter().any(|hunk| {
771 excerpt_range
772 .context
773 .start
774 .cmp(
775 &hunk.buffer_range.end,
776 &buffer_snapshot,
777 )
778 .is_le()
779 && excerpt_range
780 .context
781 .end
782 .cmp(
783 &hunk.buffer_range.start,
784 &buffer_snapshot,
785 )
786 .is_ge()
787 })
788 {
789 excerpts_to_remove.push(excerpt_id);
790 }
791 latest_excerpt_id = excerpt_id;
792 }
793 }
794 None => excerpts_to_remove.extend(
795 current_excerpts.map(|(excerpt_id, _)| excerpt_id),
796 ),
797 }
798 let _ = new_order_entries.next();
799 break;
800 }
801 }
802 }
803 None => {
804 excerpts_to_remove
805 .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id));
806 break;
807 }
808 }
809 }
810 latest_excerpt_id = last_current_excerpt_id.unwrap_or(latest_excerpt_id);
811 }
812
813 for (path, project_entry_id) in new_order_entries {
814 if let Some(changes) = new_changes.get(project_entry_id) {
815 if !changes.hunks.is_empty() {
816 let hunks = new_excerpt_hunks.entry(latest_excerpt_id).or_default();
817 match hunks.binary_search_by(|(probe, ..)| {
818 compare_paths((path.path.as_ref(), true), (probe.path.as_ref(), true))
819 }) {
820 Ok(i) => hunks[i]
821 .2
822 .extend(changes.hunks.iter().map(|hunk| hunk.buffer_range.clone())),
823 Err(i) => hunks.insert(
824 i,
825 (
826 path.clone(),
827 changes.buffer.clone(),
828 changes
829 .hunks
830 .iter()
831 .map(|hunk| hunk.buffer_range.clone())
832 .collect(),
833 ),
834 ),
835 }
836 }
837 }
838 }
839
840 self.excerpts.update(cx, |multi_buffer, cx| {
841 for (mut after_excerpt_id, excerpts_to_add) in new_excerpt_hunks {
842 for (_, buffer, hunk_ranges) in excerpts_to_add {
843 let buffer_snapshot = buffer.read(cx).snapshot();
844 let max_point = buffer_snapshot.max_point();
845 let new_excerpts = multi_buffer.insert_excerpts_after(
846 after_excerpt_id,
847 buffer,
848 hunk_ranges.into_iter().map(|range| {
849 let mut extended_point_range = range.to_point(&buffer_snapshot);
850 extended_point_range.start.row = extended_point_range
851 .start
852 .row
853 .saturating_sub(DEFAULT_MULTIBUFFER_CONTEXT);
854 extended_point_range.end.row = (extended_point_range.end.row
855 + DEFAULT_MULTIBUFFER_CONTEXT)
856 .min(max_point.row);
857 ExcerptRange {
858 context: extended_point_range,
859 primary: None,
860 }
861 }),
862 cx,
863 );
864 after_excerpt_id = new_excerpts.last().copied().unwrap_or(after_excerpt_id);
865 }
866 }
867 multi_buffer.remove_excerpts(excerpts_to_remove, cx);
868 for ((line_count, direction), excerpts) in excerpt_to_expand {
869 multi_buffer.expand_excerpts(excerpts, line_count, direction, cx);
870 }
871 });
872 } else {
873 self.excerpts.update(cx, |multi_buffer, cx| {
874 for new_changes in new_entry_order
875 .iter()
876 .filter_map(|(_, entry_id)| new_changes.get(entry_id))
877 {
878 multi_buffer.push_excerpts_with_context_lines(
879 new_changes.buffer.clone(),
880 new_changes
881 .hunks
882 .iter()
883 .map(|hunk| hunk.buffer_range.clone())
884 .collect(),
885 DEFAULT_MULTIBUFFER_CONTEXT,
886 cx,
887 );
888 }
889 });
890 };
891
892 let mut new_changes = new_changes;
893 let mut new_entry_order = new_entry_order;
894 std::mem::swap(
895 self.buffer_changes.entry(worktree_id).or_default(),
896 &mut new_changes,
897 );
898 std::mem::swap(
899 self.entry_order.entry(worktree_id).or_default(),
900 &mut new_entry_order,
901 );
902 }
903}
904
905impl EventEmitter<EditorEvent> for ProjectDiffEditor {}
906
907impl FocusableView for ProjectDiffEditor {
908 fn focus_handle(&self, _: &AppContext) -> FocusHandle {
909 self.focus_handle.clone()
910 }
911}
912
913impl Item for ProjectDiffEditor {
914 type Event = EditorEvent;
915
916 fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
917 Editor::to_item_events(event, f)
918 }
919
920 fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
921 self.editor.update(cx, |editor, cx| editor.deactivated(cx));
922 }
923
924 fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
925 self.editor
926 .update(cx, |editor, cx| editor.navigate(data, cx))
927 }
928
929 fn tab_tooltip_text(&self, _: &AppContext) -> Option<SharedString> {
930 Some("Project Diff".into())
931 }
932
933 fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement {
934 if self.buffer_changes.is_empty() {
935 Label::new("No changes")
936 .color(if params.selected {
937 Color::Default
938 } else {
939 Color::Muted
940 })
941 .into_any_element()
942 } else {
943 h_flex()
944 .gap_1()
945 .when(true, |then| {
946 then.child(
947 h_flex()
948 .gap_1()
949 .child(Icon::new(IconName::XCircle).color(Color::Error))
950 .child(Label::new(self.buffer_changes.len().to_string()).color(
951 if params.selected {
952 Color::Default
953 } else {
954 Color::Muted
955 },
956 )),
957 )
958 })
959 .when(true, |then| {
960 then.child(
961 h_flex()
962 .gap_1()
963 .child(Icon::new(IconName::Indicator).color(Color::Warning))
964 .child(Label::new(self.buffer_changes.len().to_string()).color(
965 if params.selected {
966 Color::Default
967 } else {
968 Color::Muted
969 },
970 )),
971 )
972 })
973 .into_any_element()
974 }
975 }
976
977 fn telemetry_event_text(&self) -> Option<&'static str> {
978 Some("project diagnostics")
979 }
980
981 fn for_each_project_item(
982 &self,
983 cx: &AppContext,
984 f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem),
985 ) {
986 self.editor.for_each_project_item(cx, f)
987 }
988
989 fn is_singleton(&self, _: &AppContext) -> bool {
990 false
991 }
992
993 fn set_nav_history(&mut self, nav_history: ItemNavHistory, cx: &mut ViewContext<Self>) {
994 self.editor.update(cx, |editor, _| {
995 editor.set_nav_history(Some(nav_history));
996 });
997 }
998
999 fn clone_on_split(
1000 &self,
1001 _workspace_id: Option<workspace::WorkspaceId>,
1002 cx: &mut ViewContext<Self>,
1003 ) -> Option<View<Self>>
1004 where
1005 Self: Sized,
1006 {
1007 Some(cx.new_view(|cx| {
1008 ProjectDiffEditor::new(self.project.clone(), self.workspace.clone(), cx)
1009 }))
1010 }
1011
1012 fn is_dirty(&self, cx: &AppContext) -> bool {
1013 self.excerpts.read(cx).is_dirty(cx)
1014 }
1015
1016 fn has_conflict(&self, cx: &AppContext) -> bool {
1017 self.excerpts.read(cx).has_conflict(cx)
1018 }
1019
1020 fn can_save(&self, _: &AppContext) -> bool {
1021 true
1022 }
1023
1024 fn save(
1025 &mut self,
1026 format: bool,
1027 project: Model<Project>,
1028 cx: &mut ViewContext<Self>,
1029 ) -> Task<anyhow::Result<()>> {
1030 self.editor.save(format, project, cx)
1031 }
1032
1033 fn save_as(
1034 &mut self,
1035 _: Model<Project>,
1036 _: ProjectPath,
1037 _: &mut ViewContext<Self>,
1038 ) -> Task<anyhow::Result<()>> {
1039 unreachable!()
1040 }
1041
1042 fn reload(
1043 &mut self,
1044 project: Model<Project>,
1045 cx: &mut ViewContext<Self>,
1046 ) -> Task<anyhow::Result<()>> {
1047 self.editor.reload(project, cx)
1048 }
1049
1050 fn act_as_type<'a>(
1051 &'a self,
1052 type_id: TypeId,
1053 self_handle: &'a View<Self>,
1054 _: &'a AppContext,
1055 ) -> Option<AnyView> {
1056 if type_id == TypeId::of::<Self>() {
1057 Some(self_handle.to_any())
1058 } else if type_id == TypeId::of::<Editor>() {
1059 Some(self.editor.to_any())
1060 } else {
1061 None
1062 }
1063 }
1064
1065 fn breadcrumb_location(&self, _: &AppContext) -> ToolbarItemLocation {
1066 ToolbarItemLocation::PrimaryLeft
1067 }
1068
1069 fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<BreadcrumbText>> {
1070 self.editor.breadcrumbs(theme, cx)
1071 }
1072
1073 fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
1074 self.editor
1075 .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx));
1076 }
1077}
1078
1079impl Render for ProjectDiffEditor {
1080 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
1081 let child = if self.buffer_changes.is_empty() {
1082 div()
1083 .bg(cx.theme().colors().editor_background)
1084 .flex()
1085 .items_center()
1086 .justify_center()
1087 .size_full()
1088 .child(Label::new("No changes in the workspace"))
1089 } else {
1090 div().size_full().child(self.editor.clone())
1091 };
1092
1093 div()
1094 .track_focus(&self.focus_handle)
1095 .size_full()
1096 .child(child)
1097 }
1098}
1099
1100#[cfg(test)]
1101mod tests {
1102 use gpui::{SemanticVersion, TestAppContext, VisualTestContext};
1103 use project::buffer_store::BufferChangeSet;
1104 use serde_json::json;
1105 use settings::SettingsStore;
1106 use std::{
1107 ops::Deref as _,
1108 path::{Path, PathBuf},
1109 };
1110
1111 use super::*;
1112
1113 // TODO finish
1114 // #[gpui::test]
1115 // async fn randomized_tests(cx: &mut TestAppContext) {
1116 // // Create a new project (how?? temp fs?),
1117 // let fs = FakeFs::new(cx.executor());
1118 // let project = Project::test(fs, [], cx).await;
1119
1120 // // create random files with random content
1121
1122 // // Commit it into git somehow (technically can do with "real" fs in a temp dir)
1123 // //
1124 // // Apply randomized changes to the project: select a random file, random change and apply to buffers
1125 // }
1126
1127 #[gpui::test(iterations = 30)]
1128 async fn simple_edit_test(cx: &mut TestAppContext) {
1129 cx.executor().allow_parking();
1130 init_test(cx);
1131
1132 let fs = fs::FakeFs::new(cx.executor().clone());
1133 fs.insert_tree(
1134 "/root",
1135 json!({
1136 ".git": {},
1137 "file_a": "This is file_a",
1138 "file_b": "This is file_b",
1139 }),
1140 )
1141 .await;
1142
1143 let project = Project::test(fs.clone(), [Path::new("/root")], cx).await;
1144 let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
1145 let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
1146
1147 let file_a_editor = workspace
1148 .update(cx, |workspace, cx| {
1149 let file_a_editor =
1150 workspace.open_abs_path(PathBuf::from("/root/file_a"), true, cx);
1151 ProjectDiffEditor::deploy(workspace, &Deploy, cx);
1152 file_a_editor
1153 })
1154 .unwrap()
1155 .await
1156 .expect("did not open an item at all")
1157 .downcast::<Editor>()
1158 .expect("did not open an editor for file_a");
1159 let project_diff_editor = workspace
1160 .update(cx, |workspace, cx| {
1161 workspace
1162 .active_pane()
1163 .read(cx)
1164 .items()
1165 .find_map(|item| item.downcast::<ProjectDiffEditor>())
1166 })
1167 .unwrap()
1168 .expect("did not find a ProjectDiffEditor");
1169 project_diff_editor.update(cx, |project_diff_editor, cx| {
1170 assert!(
1171 project_diff_editor.editor.read(cx).text(cx).is_empty(),
1172 "Should have no changes after opening the diff on no git changes"
1173 );
1174 });
1175
1176 let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx));
1177 let change = "an edit after git add";
1178 file_a_editor
1179 .update(cx, |file_a_editor, cx| {
1180 file_a_editor.insert(change, cx);
1181 file_a_editor.save(false, project.clone(), cx)
1182 })
1183 .await
1184 .expect("failed to save a file");
1185 file_a_editor.update(cx, |file_a_editor, cx| {
1186 let change_set = cx.new_model(|cx| {
1187 BufferChangeSet::new_with_base_text(
1188 old_text.clone(),
1189 file_a_editor
1190 .buffer()
1191 .read(cx)
1192 .as_singleton()
1193 .unwrap()
1194 .read(cx)
1195 .text_snapshot(),
1196 cx,
1197 )
1198 });
1199 file_a_editor
1200 .diff_map
1201 .add_change_set(change_set.clone(), cx);
1202 project.update(cx, |project, cx| {
1203 project.buffer_store().update(cx, |buffer_store, cx| {
1204 buffer_store.set_change_set(
1205 file_a_editor
1206 .buffer()
1207 .read(cx)
1208 .as_singleton()
1209 .unwrap()
1210 .read(cx)
1211 .remote_id(),
1212 change_set,
1213 );
1214 });
1215 });
1216 });
1217 fs.set_status_for_repo_via_git_operation(
1218 Path::new("/root/.git"),
1219 &[(Path::new("file_a"), GitFileStatus::Modified)],
1220 );
1221 cx.executor()
1222 .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
1223 cx.run_until_parked();
1224
1225 project_diff_editor.update(cx, |project_diff_editor, cx| {
1226 assert_eq!(
1227 // TODO assert it better: extract added text (based on the background changes) and deleted text (based on the deleted blocks added)
1228 project_diff_editor.editor.read(cx).text(cx),
1229 format!("{change}{old_text}"),
1230 "Should have a new change shown in the beginning, and the old text shown as deleted text afterwards"
1231 );
1232 });
1233 }
1234
1235 fn init_test(cx: &mut gpui::TestAppContext) {
1236 if std::env::var("RUST_LOG").is_ok() {
1237 env_logger::try_init().ok();
1238 }
1239
1240 cx.update(|cx| {
1241 assets::Assets.load_test_fonts(cx);
1242 let settings_store = SettingsStore::test(cx);
1243 cx.set_global(settings_store);
1244 theme::init(theme::LoadThemes::JustBase, cx);
1245 release_channel::init(SemanticVersion::default(), cx);
1246 client::init_settings(cx);
1247 language::init(cx);
1248 Project::init_settings(cx);
1249 workspace::init_settings(cx);
1250 crate::init(cx);
1251 cx.set_staff(true);
1252 });
1253 }
1254}