Detailed changes
@@ -2029,11 +2029,15 @@ name = "buffer_diff"
version = "0.1.0"
dependencies = [
"anyhow",
+ "ctor",
+ "env_logger 0.11.6",
"futures 0.3.31",
"git2",
"gpui",
"language",
+ "log",
"pretty_assertions",
+ "rand 0.8.5",
"rope",
"serde_json",
"sum_tree",
@@ -5367,6 +5371,8 @@ dependencies = [
"serde_json",
"settings",
"theme",
+ "time",
+ "time_format",
"ui",
"util",
"windows 0.58.0",
@@ -14410,6 +14416,7 @@ dependencies = [
"strum",
"theme",
"ui_macros",
+ "util",
"windows 0.58.0",
]
@@ -16845,9 +16852,9 @@ dependencies = [
[[package]]
name = "zed_llm_client"
-version = "0.4.0"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "614669bead4741b2fc352ae1967318be16949cf46f59013e548c6dbfdfc01252"
+checksum = "1bf21350eced858d129840589158a8f6895c4fa4327ae56dd8c7d6a98495bed4"
dependencies = [
"serde",
"serde_json",
@@ -17069,6 +17076,7 @@ dependencies = [
"postage",
"project",
"regex",
+ "release_channel",
"reqwest_client",
"rpc",
"serde",
@@ -17078,6 +17086,7 @@ dependencies = [
"telemetry",
"telemetry_events",
"theme",
+ "thiserror 1.0.69",
"tree-sitter-go",
"tree-sitter-rust",
"ui",
@@ -18,6 +18,7 @@
"bash_logout": "terminal",
"bash_profile": "terminal",
"bashrc": "terminal",
+ "bicep": "bicep",
"bmp": "image",
"c": "c",
"c++": "cpp",
@@ -631,7 +631,7 @@
}
},
{
- "context": "GitPanel || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome",
+ "context": "ChangesList || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || Welcome",
"bindings": {
":": "command_palette::Toggle",
"g /": "pane::DeploySearch"
@@ -20,14 +20,18 @@ futures.workspace = true
git2.workspace = true
gpui.workspace = true
language.workspace = true
+log.workspace = true
rope.workspace = true
sum_tree.workspace = true
text.workspace = true
util.workspace = true
[dev-dependencies]
+ctor.workspace = true
+env_logger.workspace = true
+gpui = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
+rand.workspace = true
serde_json.workspace = true
text = { workspace = true, features = ["test-support"] }
-gpui = { workspace = true, features = ["test-support"] }
unindent.workspace = true
@@ -5,6 +5,7 @@ use language::{Language, LanguageRegistry};
use rope::Rope;
use std::{cmp, future::Future, iter, ops::Range, sync::Arc};
use sum_tree::SumTree;
+use text::ToOffset as _;
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
use util::ResultExt;
@@ -14,10 +15,11 @@ pub struct BufferDiff {
secondary_diff: Option<Entity<BufferDiff>>,
}
-#[derive(Clone)]
+#[derive(Clone, Debug)]
pub struct BufferDiffSnapshot {
inner: BufferDiffInner,
secondary_diff: Option<Box<BufferDiffSnapshot>>,
+ pub is_single_insertion: bool,
}
#[derive(Clone)]
@@ -40,21 +42,6 @@ pub enum DiffHunkSecondaryStatus {
None,
}
-// to stage a hunk:
-// - assume hunk starts out as not staged
-// - hunk exists with the same buffer range in the unstaged diff and the uncommitted diff
-// - we want to construct a "version" of the file that
-// - starts from the index base text
-// - has the single hunk applied to it
-// - the hunk is the one from the UNSTAGED diff, so that the diff base offset range is correct to apply to that diff base
-// - write that new version of the file into the index
-
-// to unstage a hunk
-// - no hunk in the unstaged diff intersects this hunk from the uncommitted diff
-// - we want to compute the hunk that
-// - we can apply to the index text
-// - at the end of applying it,
-
/// A diff hunk resolved to rows in the buffer.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DiffHunk {
@@ -65,6 +52,7 @@ pub struct DiffHunk {
/// The range in the buffer's diff base text to which this hunk corresponds.
pub diff_base_byte_range: Range<usize>,
pub secondary_status: DiffHunkSecondaryStatus,
+ pub secondary_diff_base_byte_range: Option<Range<usize>>,
}
/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range.
@@ -166,6 +154,99 @@ impl BufferDiffSnapshot {
}
}
}
+
+ fn buffer_range_to_unchanged_diff_base_range(
+ &self,
+ buffer_range: Range<Anchor>,
+ buffer: &text::BufferSnapshot,
+ ) -> Option<Range<usize>> {
+ let mut hunks = self.inner.hunks.iter();
+ let mut start = 0;
+ let mut pos = buffer.anchor_before(0);
+ while let Some(hunk) = hunks.next() {
+ assert!(buffer_range.start.cmp(&pos, buffer).is_ge());
+ assert!(hunk.buffer_range.start.cmp(&pos, buffer).is_ge());
+ if hunk
+ .buffer_range
+ .start
+ .cmp(&buffer_range.end, buffer)
+ .is_ge()
+ {
+ // target buffer range is contained in the unchanged stretch leading up to this next hunk,
+ // so do a final adjustment based on that
+ break;
+ }
+
+ // if the target buffer range intersects this hunk at all, no dice
+ if buffer_range
+ .start
+ .cmp(&hunk.buffer_range.end, buffer)
+ .is_lt()
+ {
+ return None;
+ }
+
+ start += hunk.buffer_range.start.to_offset(buffer) - pos.to_offset(buffer);
+ start += hunk.diff_base_byte_range.end - hunk.diff_base_byte_range.start;
+ pos = hunk.buffer_range.end;
+ }
+ start += buffer_range.start.to_offset(buffer) - pos.to_offset(buffer);
+ let end = start + buffer_range.end.to_offset(buffer) - buffer_range.start.to_offset(buffer);
+ Some(start..end)
+ }
+
+ pub fn secondary_edits_for_stage_or_unstage(
+ &self,
+ stage: bool,
+ hunks: impl Iterator<Item = (Range<usize>, Option<Range<usize>>, Range<Anchor>)>,
+ buffer: &text::BufferSnapshot,
+ ) -> Vec<(Range<usize>, String)> {
+ let Some(secondary_diff) = self.secondary_diff() else {
+ log::debug!("no secondary diff");
+ return Vec::new();
+ };
+ let index_base = secondary_diff.base_text().map_or_else(
+ || Rope::from(""),
+ |snapshot| snapshot.text.as_rope().clone(),
+ );
+ let head_base = self.base_text().map_or_else(
+ || Rope::from(""),
+ |snapshot| snapshot.text.as_rope().clone(),
+ );
+ log::debug!("original: {:?}", index_base.to_string());
+ let mut edits = Vec::new();
+ for (diff_base_byte_range, secondary_diff_base_byte_range, buffer_range) in hunks {
+ let (index_byte_range, replacement_text) = if stage {
+ log::debug!("staging");
+ let mut replacement_text = String::new();
+ let Some(index_byte_range) = secondary_diff_base_byte_range.clone() else {
+ log::debug!("not a stageable hunk");
+ continue;
+ };
+ log::debug!("using {:?}", index_byte_range);
+ for chunk in buffer.text_for_range(buffer_range.clone()) {
+ replacement_text.push_str(chunk);
+ }
+ (index_byte_range, replacement_text)
+ } else {
+ log::debug!("unstaging");
+ let mut replacement_text = String::new();
+ let Some(index_byte_range) = secondary_diff
+ .buffer_range_to_unchanged_diff_base_range(buffer_range.clone(), &buffer)
+ else {
+ log::debug!("not an unstageable hunk");
+ continue;
+ };
+ for chunk in head_base.chunks_in_range(diff_base_byte_range.clone()) {
+ replacement_text.push_str(chunk);
+ }
+ (index_byte_range, replacement_text)
+ };
+ edits.push((index_byte_range, replacement_text));
+ }
+ log::debug!("edits: {edits:?}");
+ edits
+ }
}
impl BufferDiffInner {
@@ -225,6 +306,7 @@ impl BufferDiffInner {
}
let mut secondary_status = DiffHunkSecondaryStatus::None;
+ let mut secondary_diff_base_byte_range = None;
if let Some(secondary_cursor) = secondary_cursor.as_mut() {
if start_anchor
.cmp(&secondary_cursor.start().buffer_range.start, buffer)
@@ -234,9 +316,15 @@ impl BufferDiffInner {
}
if let Some(secondary_hunk) = secondary_cursor.item() {
- let secondary_range = secondary_hunk.buffer_range.to_point(buffer);
+ let mut secondary_range = secondary_hunk.buffer_range.to_point(buffer);
+ if secondary_range.end.column > 0 {
+ secondary_range.end.row += 1;
+ secondary_range.end.column = 0;
+ }
if secondary_range == (start_point..end_point) {
secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
+ secondary_diff_base_byte_range =
+ Some(secondary_hunk.diff_base_byte_range.clone());
} else if secondary_range.start <= end_point {
secondary_status = DiffHunkSecondaryStatus::OverlapsWithSecondaryHunk;
}
@@ -248,6 +336,7 @@ impl BufferDiffInner {
diff_base_byte_range: start_base..end_base,
buffer_range: start_anchor..end_anchor,
secondary_status,
+ secondary_diff_base_byte_range,
});
})
}
@@ -282,6 +371,7 @@ impl BufferDiffInner {
buffer_range: hunk.buffer_range.clone(),
// The secondary status is not used by callers of this method.
secondary_status: DiffHunkSecondaryStatus::None,
+ secondary_diff_base_byte_range: None,
})
})
}
@@ -351,12 +441,12 @@ impl BufferDiffInner {
}
fn compute_hunks(
- diff_base: Option<Arc<String>>,
+ diff_base: Option<(Arc<String>, Rope)>,
buffer: text::BufferSnapshot,
) -> SumTree<InternalDiffHunk> {
let mut tree = SumTree::new(&buffer);
- if let Some(diff_base) = diff_base {
+ if let Some((diff_base, diff_base_rope)) = diff_base {
let buffer_text = buffer.as_rope().to_string();
let mut options = GitOptions::default();
@@ -387,7 +477,13 @@ fn compute_hunks(
if let Some(patch) = patch {
let mut divergence = 0;
for hunk_index in 0..patch.num_hunks() {
- let hunk = process_patch_hunk(&patch, hunk_index, &buffer, &mut divergence);
+ let hunk = process_patch_hunk(
+ &patch,
+ hunk_index,
+ &diff_base_rope,
+ &buffer,
+ &mut divergence,
+ );
tree.push(hunk, &buffer);
}
}
@@ -399,6 +495,7 @@ fn compute_hunks(
fn process_patch_hunk(
patch: &GitPatch<'_>,
hunk_index: usize,
+ diff_base: &Rope,
buffer: &text::BufferSnapshot,
buffer_row_divergence: &mut i64,
) -> InternalDiffHunk {
@@ -408,50 +505,59 @@ fn process_patch_hunk(
let mut first_deletion_buffer_row: Option<u32> = None;
let mut buffer_row_range: Option<Range<u32>> = None;
let mut diff_base_byte_range: Option<Range<usize>> = None;
+ let mut first_addition_old_row: Option<u32> = None;
for line_index in 0..line_item_count {
let line = patch.line_in_hunk(hunk_index, line_index).unwrap();
let kind = line.origin_value();
let content_offset = line.content_offset() as isize;
let content_len = line.content().len() as isize;
+ match kind {
+ GitDiffLineType::Addition => {
+ if first_addition_old_row.is_none() {
+ first_addition_old_row = Some(
+ (line.new_lineno().unwrap() as i64 - *buffer_row_divergence - 1) as u32,
+ );
+ }
+ *buffer_row_divergence += 1;
+ let row = line.new_lineno().unwrap().saturating_sub(1);
- if kind == GitDiffLineType::Addition {
- *buffer_row_divergence += 1;
- let row = line.new_lineno().unwrap().saturating_sub(1);
-
- match &mut buffer_row_range {
- Some(buffer_row_range) => buffer_row_range.end = row + 1,
- None => buffer_row_range = Some(row..row + 1),
+ match &mut buffer_row_range {
+ Some(Range { end, .. }) => *end = row + 1,
+ None => buffer_row_range = Some(row..row + 1),
+ }
}
- }
+ GitDiffLineType::Deletion => {
+ let end = content_offset + content_len;
- if kind == GitDiffLineType::Deletion {
- let end = content_offset + content_len;
+ match &mut diff_base_byte_range {
+ Some(head_byte_range) => head_byte_range.end = end as usize,
+ None => diff_base_byte_range = Some(content_offset as usize..end as usize),
+ }
- match &mut diff_base_byte_range {
- Some(head_byte_range) => head_byte_range.end = end as usize,
- None => diff_base_byte_range = Some(content_offset as usize..end as usize),
- }
+ if first_deletion_buffer_row.is_none() {
+ let old_row = line.old_lineno().unwrap().saturating_sub(1);
+ let row = old_row as i64 + *buffer_row_divergence;
+ first_deletion_buffer_row = Some(row as u32);
+ }
- if first_deletion_buffer_row.is_none() {
- let old_row = line.old_lineno().unwrap().saturating_sub(1);
- let row = old_row as i64 + *buffer_row_divergence;
- first_deletion_buffer_row = Some(row as u32);
+ *buffer_row_divergence -= 1;
}
-
- *buffer_row_divergence -= 1;
+ _ => {}
}
}
- //unwrap_or deletion without addition
let buffer_row_range = buffer_row_range.unwrap_or_else(|| {
- //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk
+ // Pure deletion hunk without addition.
let row = first_deletion_buffer_row.unwrap();
row..row
});
-
- //unwrap_or addition without deletion
- let diff_base_byte_range = diff_base_byte_range.unwrap_or(0..0);
+ let diff_base_byte_range = diff_base_byte_range.unwrap_or_else(|| {
+ // Pure addition hunk without deletion.
+ let row = first_addition_old_row.unwrap();
+ let offset = diff_base.point_to_offset(Point::new(row, 0));
+ offset..offset
+ });
let start = Point::new(buffer_row_range.start, 0);
let end = Point::new(buffer_row_range.end, 0);
@@ -499,9 +605,11 @@ impl BufferDiff {
language_registry: Option<Arc<LanguageRegistry>>,
cx: &mut App,
) -> impl Future<Output = BufferDiffInner> {
- let base_text_snapshot = diff_base.as_ref().map(|base_text| {
+ let diff_base =
+ diff_base.map(|diff_base| (diff_base.clone(), Rope::from(diff_base.as_str())));
+ let base_text_snapshot = diff_base.as_ref().map(|(_, diff_base)| {
language::Buffer::build_snapshot(
- Rope::from(base_text.as_str()),
+ diff_base.clone(),
language.clone(),
language_registry.clone(),
cx,
@@ -528,6 +636,11 @@ impl BufferDiff {
diff_base_buffer: Option<language::BufferSnapshot>,
cx: &App,
) -> impl Future<Output = BufferDiffInner> {
+ let diff_base = diff_base.clone().zip(
+ diff_base_buffer
+ .clone()
+ .map(|buffer| buffer.as_rope().clone()),
+ );
cx.background_executor().spawn(async move {
BufferDiffInner {
hunks: compute_hunks(diff_base, buffer),
@@ -545,6 +658,7 @@ impl BufferDiff {
pub fn build_with_single_insertion(
insertion_present_in_secondary_diff: bool,
+ buffer: language::BufferSnapshot,
cx: &mut App,
) -> BufferDiffSnapshot {
let base_text = language::Buffer::build_empty_snapshot(cx);
@@ -560,17 +674,23 @@ impl BufferDiff {
hunks: hunks.clone(),
base_text: Some(base_text.clone()),
},
- secondary_diff: if insertion_present_in_secondary_diff {
- Some(Box::new(BufferDiffSnapshot {
- inner: BufferDiffInner {
- hunks,
- base_text: Some(base_text),
+ secondary_diff: Some(Box::new(BufferDiffSnapshot {
+ inner: BufferDiffInner {
+ hunks: if insertion_present_in_secondary_diff {
+ hunks
+ } else {
+ SumTree::new(&buffer.text)
},
- secondary_diff: None,
- }))
- } else {
- None
- },
+ base_text: Some(if insertion_present_in_secondary_diff {
+ base_text
+ } else {
+ buffer
+ }),
+ },
+ secondary_diff: None,
+ is_single_insertion: true,
+ })),
+ is_single_insertion: true,
}
}
@@ -675,6 +795,7 @@ impl BufferDiff {
.secondary_diff
.as_ref()
.map(|diff| Box::new(diff.read(cx).snapshot(cx))),
+ is_single_insertion: false,
}
}
@@ -875,13 +996,21 @@ pub fn assert_hunks<Iter>(
#[cfg(test)]
mod tests {
- use std::assert_eq;
+ use std::fmt::Write as _;
use super::*;
- use gpui::TestAppContext;
- use text::{Buffer, BufferId};
+ use gpui::{AppContext as _, TestAppContext};
+ use rand::{rngs::StdRng, Rng as _};
+ use text::{Buffer, BufferId, Rope};
use unindent::Unindent as _;
+ #[ctor::ctor]
+ fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::init();
+ }
+ }
+
#[gpui::test]
async fn test_buffer_diff_simple(cx: &mut gpui::TestAppContext) {
let diff_base = "
@@ -1200,4 +1329,192 @@ mod tests {
let range = diff_6.compare(&diff_5, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0));
}
+
+ #[gpui::test(iterations = 100)]
+ async fn test_secondary_edits_for_stage_unstage(cx: &mut TestAppContext, mut rng: StdRng) {
+ fn gen_line(rng: &mut StdRng) -> String {
+ if rng.gen_bool(0.2) {
+ "\n".to_owned()
+ } else {
+ let c = rng.gen_range('A'..='Z');
+ format!("{c}{c}{c}\n")
+ }
+ }
+
+ fn gen_working_copy(rng: &mut StdRng, head: &str) -> String {
+ let mut old_lines = {
+ let mut old_lines = Vec::new();
+ let mut old_lines_iter = head.lines();
+ while let Some(line) = old_lines_iter.next() {
+ assert!(!line.ends_with("\n"));
+ old_lines.push(line.to_owned());
+ }
+ if old_lines.last().is_some_and(|line| line.is_empty()) {
+ old_lines.pop();
+ }
+ old_lines.into_iter()
+ };
+ let mut result = String::new();
+ let unchanged_count = rng.gen_range(0..=old_lines.len());
+ result +=
+ &old_lines
+ .by_ref()
+ .take(unchanged_count)
+ .fold(String::new(), |mut s, line| {
+ writeln!(&mut s, "{line}").unwrap();
+ s
+ });
+ while old_lines.len() > 0 {
+ let deleted_count = rng.gen_range(0..=old_lines.len());
+ let _advance = old_lines
+ .by_ref()
+ .take(deleted_count)
+ .map(|line| line.len() + 1)
+ .sum::<usize>();
+ let minimum_added = if deleted_count == 0 { 1 } else { 0 };
+ let added_count = rng.gen_range(minimum_added..=5);
+ let addition = (0..added_count).map(|_| gen_line(rng)).collect::<String>();
+ result += &addition;
+
+ if old_lines.len() > 0 {
+ let blank_lines = old_lines.clone().take_while(|line| line.is_empty()).count();
+ if blank_lines == old_lines.len() {
+ break;
+ };
+ let unchanged_count = rng.gen_range((blank_lines + 1).max(1)..=old_lines.len());
+ result += &old_lines.by_ref().take(unchanged_count).fold(
+ String::new(),
+ |mut s, line| {
+ writeln!(&mut s, "{line}").unwrap();
+ s
+ },
+ );
+ }
+ }
+ result
+ }
+
+ fn uncommitted_diff(
+ working_copy: &language::BufferSnapshot,
+ index_text: &Entity<language::Buffer>,
+ head_text: String,
+ cx: &mut TestAppContext,
+ ) -> BufferDiff {
+ let inner = BufferDiff::build_sync(working_copy.text.clone(), head_text, cx);
+ let secondary = BufferDiff {
+ buffer_id: working_copy.remote_id(),
+ inner: BufferDiff::build_sync(
+ working_copy.text.clone(),
+ index_text.read_with(cx, |index_text, _| index_text.text()),
+ cx,
+ ),
+ secondary_diff: None,
+ };
+ let secondary = cx.new(|_| secondary);
+ BufferDiff {
+ buffer_id: working_copy.remote_id(),
+ inner,
+ secondary_diff: Some(secondary),
+ }
+ }
+
+ let operations = std::env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let rng = &mut rng;
+ let head_text = ('a'..='z').fold(String::new(), |mut s, c| {
+ writeln!(&mut s, "{c}{c}{c}").unwrap();
+ s
+ });
+ let working_copy = gen_working_copy(rng, &head_text);
+ let working_copy = cx.new(|cx| {
+ language::Buffer::local_normalized(
+ Rope::from(working_copy.as_str()),
+ text::LineEnding::default(),
+ cx,
+ )
+ });
+ let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot());
+ let index_text = cx.new(|cx| {
+ language::Buffer::local_normalized(
+ if rng.gen() {
+ Rope::from(head_text.as_str())
+ } else {
+ working_copy.as_rope().clone()
+ },
+ text::LineEnding::default(),
+ cx,
+ )
+ });
+
+ let mut diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx);
+ let mut hunks = cx.update(|cx| {
+ diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx)
+ .collect::<Vec<_>>()
+ });
+ if hunks.len() == 0 {
+ return;
+ }
+
+ for _ in 0..operations {
+ let i = rng.gen_range(0..hunks.len());
+ let hunk = &mut hunks[i];
+ let hunk_fields = (
+ hunk.diff_base_byte_range.clone(),
+ hunk.secondary_diff_base_byte_range.clone(),
+ hunk.buffer_range.clone(),
+ );
+ let stage = match (
+ hunk.secondary_status,
+ hunk.secondary_diff_base_byte_range.clone(),
+ ) {
+ (DiffHunkSecondaryStatus::HasSecondaryHunk, Some(_)) => {
+ hunk.secondary_status = DiffHunkSecondaryStatus::None;
+ hunk.secondary_diff_base_byte_range = None;
+ true
+ }
+ (DiffHunkSecondaryStatus::None, None) => {
+ hunk.secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
+ // We don't look at this, just notice whether it's Some or not.
+ hunk.secondary_diff_base_byte_range = Some(17..17);
+ false
+ }
+ _ => unreachable!(),
+ };
+
+ let snapshot = cx.update(|cx| diff.snapshot(cx));
+ let edits = snapshot.secondary_edits_for_stage_or_unstage(
+ stage,
+ [hunk_fields].into_iter(),
+ &working_copy,
+ );
+ index_text.update(cx, |index_text, cx| {
+ index_text.edit(edits, None, cx);
+ });
+
+ diff = uncommitted_diff(&working_copy, &index_text, head_text.clone(), cx);
+ let found_hunks = cx.update(|cx| {
+ diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &working_copy, cx)
+ .collect::<Vec<_>>()
+ });
+ assert_eq!(hunks.len(), found_hunks.len());
+ for (expected_hunk, found_hunk) in hunks.iter().zip(&found_hunks) {
+ assert_eq!(
+ expected_hunk.buffer_range.to_point(&working_copy),
+ found_hunk.buffer_range.to_point(&working_copy)
+ );
+ assert_eq!(
+ expected_hunk.diff_base_byte_range,
+ found_hunk.diff_base_byte_range
+ );
+ assert_eq!(expected_hunk.secondary_status, found_hunk.secondary_status);
+ assert_eq!(
+ expected_hunk.secondary_diff_base_byte_range.is_some(),
+ found_hunk.secondary_diff_base_byte_range.is_some()
+ )
+ }
+ hunks = found_hunks;
+ }
+ }
}
@@ -101,6 +101,7 @@ CREATE TABLE "worktree_repositories" (
"scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL,
"current_merge_conflicts" VARCHAR,
+ "branch_summary" VARCHAR,
PRIMARY KEY(project_id, worktree_id, work_directory_id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
@@ -0,0 +1,2 @@
+ALTER TABLE worktree_repositories
+ADD COLUMN worktree_repositories VARCHAR NULL;
@@ -0,0 +1 @@
+ALTER TABLE worktree_repositories ADD COLUMN branch_summary TEXT NULL;
@@ -326,16 +326,26 @@ impl Database {
if !update.updated_repositories.is_empty() {
worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
- |repository| worktree_repository::ActiveModel {
- project_id: ActiveValue::set(project_id),
- worktree_id: ActiveValue::set(worktree_id),
- work_directory_id: ActiveValue::set(repository.work_directory_id as i64),
- scan_id: ActiveValue::set(update.scan_id as i64),
- branch: ActiveValue::set(repository.branch.clone()),
- is_deleted: ActiveValue::set(false),
- current_merge_conflicts: ActiveValue::Set(Some(
- serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
- )),
+ |repository| {
+ worktree_repository::ActiveModel {
+ project_id: ActiveValue::set(project_id),
+ worktree_id: ActiveValue::set(worktree_id),
+ work_directory_id: ActiveValue::set(
+ repository.work_directory_id as i64,
+ ),
+ scan_id: ActiveValue::set(update.scan_id as i64),
+ branch: ActiveValue::set(repository.branch.clone()),
+ is_deleted: ActiveValue::set(false),
+ branch_summary: ActiveValue::Set(
+ repository
+ .branch_summary
+ .as_ref()
+ .map(|summary| serde_json::to_string(summary).unwrap()),
+ ),
+ current_merge_conflicts: ActiveValue::Set(Some(
+ serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
+ )),
+ }
},
))
.on_conflict(
@@ -347,6 +357,8 @@ impl Database {
.update_columns([
worktree_repository::Column::ScanId,
worktree_repository::Column::Branch,
+ worktree_repository::Column::BranchSummary,
+ worktree_repository::Column::CurrentMergeConflicts,
])
.to_owned(),
)
@@ -779,6 +791,13 @@ impl Database {
.transpose()?
.unwrap_or_default();
+ let branch_summary = db_repository_entry
+ .branch_summary
+ .as_ref()
+ .map(|branch_summary| serde_json::from_str(&branch_summary))
+ .transpose()?
+ .unwrap_or_default();
+
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
@@ -787,6 +806,7 @@ impl Database {
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
+ branch_summary,
},
);
}
@@ -743,12 +743,20 @@ impl Database {
.transpose()?
.unwrap_or_default();
+ let branch_summary = db_repository
+ .branch_summary
+ .as_ref()
+ .map(|branch_summary| serde_json::from_str(&branch_summary))
+ .transpose()?
+ .unwrap_or_default();
+
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
updated_statuses,
removed_statuses,
current_merge_conflicts,
+ branch_summary,
});
}
}
@@ -15,6 +15,8 @@ pub struct Model {
pub is_deleted: bool,
// JSON array typed string
pub current_merge_conflicts: Option<String>,
+ // A JSON object representing the current Branch values
+ pub branch_summary: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -395,6 +395,9 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::Stage>)
.add_request_handler(forward_mutating_project_request::<proto::Unstage>)
.add_request_handler(forward_mutating_project_request::<proto::Commit>)
+ .add_request_handler(forward_read_only_project_request::<proto::GitShow>)
+ .add_request_handler(forward_read_only_project_request::<proto::GitReset>)
+ .add_request_handler(forward_mutating_project_request::<proto::SetIndexText>)
.add_request_handler(forward_mutating_project_request::<proto::OpenCommitMessageBuffer>)
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
.add_message_handler(update_context)
@@ -2895,7 +2895,10 @@ async fn test_git_branch_name(
assert_eq!(worktrees.len(), 1);
let worktree = worktrees[0].clone();
let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap();
- assert_eq!(root_entry.branch(), branch_name.map(Into::into));
+ assert_eq!(
+ root_entry.branch().map(|branch| branch.name.to_string()),
+ branch_name
+ );
}
// Smoke test branch reading
@@ -6783,7 +6786,7 @@ async fn test_remote_git_branches(
})
});
- assert_eq!(host_branch.as_ref(), branches[2]);
+ assert_eq!(host_branch.name, branches[2]);
// Also try creating a new branch
cx_b.update(|cx| {
@@ -6804,5 +6807,5 @@ async fn test_remote_git_branches(
})
});
- assert_eq!(host_branch.as_ref(), "totally-new-branch");
+ assert_eq!(host_branch.name, "totally-new-branch");
}
@@ -314,7 +314,7 @@ async fn test_ssh_collaboration_git_branches(
})
});
- assert_eq!(server_branch.as_ref(), branches[2]);
+ assert_eq!(server_branch.name, branches[2]);
// Also try creating a new branch
cx_b.update(|cx| {
@@ -337,7 +337,7 @@ async fn test_ssh_collaboration_git_branches(
})
});
- assert_eq!(server_branch.as_ref(), "totally-new-branch");
+ assert_eq!(server_branch.name, "totally-new-branch");
}
#[gpui::test]
@@ -402,6 +402,7 @@ gpui::actions!(
ToggleInlayHints,
ToggleEditPrediction,
ToggleLineNumbers,
+ ToggleStagedSelectedDiffHunks,
SwapSelectionEnds,
SetMark,
ToggleRelativeLineNumbers,
@@ -1,28 +1,48 @@
use futures::Future;
use git::blame::BlameEntry;
-use git::Oid;
+use git::PullRequest;
use gpui::{
App, Asset, ClipboardItem, Element, ParentElement, Render, ScrollHandle,
StatefulInteractiveElement, WeakEntity,
};
+use language::ParsedMarkdown;
use settings::Settings;
use std::hash::Hash;
use theme::ThemeSettings;
-use time::UtcOffset;
+use time::{OffsetDateTime, UtcOffset};
+use time_format::format_local_timestamp;
use ui::{prelude::*, tooltip_container, Avatar, Divider, IconButtonShape};
+use url::Url;
use workspace::Workspace;
-use crate::git::blame::{CommitDetails, GitRemote};
+use crate::git::blame::GitRemote;
use crate::EditorStyle;
+#[derive(Clone, Debug)]
+pub struct CommitDetails {
+ pub sha: SharedString,
+ pub committer_name: SharedString,
+ pub committer_email: SharedString,
+ pub commit_time: OffsetDateTime,
+ pub message: Option<ParsedCommitMessage>,
+}
+
+#[derive(Clone, Debug, Default)]
+pub struct ParsedCommitMessage {
+ pub message: SharedString,
+ pub parsed_message: ParsedMarkdown,
+ pub permalink: Option<Url>,
+ pub pull_request: Option<PullRequest>,
+ pub remote: Option<GitRemote>,
+}
+
struct CommitAvatar<'a> {
- details: Option<&'a CommitDetails>,
- sha: Oid,
+ commit: &'a CommitDetails,
}
impl<'a> CommitAvatar<'a> {
- fn new(details: Option<&'a CommitDetails>, sha: Oid) -> Self {
- Self { details, sha }
+ fn new(details: &'a CommitDetails) -> Self {
+ Self { commit: details }
}
}
@@ -30,14 +50,16 @@ impl<'a> CommitAvatar<'a> {
fn render(
&'a self,
window: &mut Window,
- cx: &mut Context<BlameEntryTooltip>,
+ cx: &mut Context<CommitTooltip>,
) -> Option<impl IntoElement> {
let remote = self
- .details
+ .commit
+ .message
+ .as_ref()
.and_then(|details| details.remote.as_ref())
.filter(|remote| remote.host_supports_avatars())?;
- let avatar_url = CommitAvatarAsset::new(remote.clone(), self.sha);
+ let avatar_url = CommitAvatarAsset::new(remote.clone(), self.commit.sha.clone());
let element = match window.use_asset::<CommitAvatarAsset>(&avatar_url, cx) {
// Loading or no avatar found
@@ -54,7 +76,7 @@ impl<'a> CommitAvatar<'a> {
#[derive(Clone, Debug)]
struct CommitAvatarAsset {
- sha: Oid,
+ sha: SharedString,
remote: GitRemote,
}
@@ -66,7 +88,7 @@ impl Hash for CommitAvatarAsset {
}
impl CommitAvatarAsset {
- fn new(remote: GitRemote, sha: Oid) -> Self {
+ fn new(remote: GitRemote, sha: SharedString) -> Self {
Self { remote, sha }
}
}
@@ -91,50 +113,78 @@ impl Asset for CommitAvatarAsset {
}
}
-pub(crate) struct BlameEntryTooltip {
- blame_entry: BlameEntry,
- details: Option<CommitDetails>,
+pub struct CommitTooltip {
+ commit: CommitDetails,
editor_style: EditorStyle,
workspace: Option<WeakEntity<Workspace>>,
scroll_handle: ScrollHandle,
}
-impl BlameEntryTooltip {
- pub(crate) fn new(
- blame_entry: BlameEntry,
- details: Option<CommitDetails>,
- style: &EditorStyle,
+impl CommitTooltip {
+ pub fn blame_entry(
+ blame: BlameEntry,
+ details: Option<ParsedCommitMessage>,
+ style: EditorStyle,
+ workspace: Option<WeakEntity<Workspace>>,
+ ) -> Self {
+ let commit_time = blame
+ .committer_time
+ .and_then(|t| OffsetDateTime::from_unix_timestamp(t).ok())
+ .unwrap_or(OffsetDateTime::now_utc());
+ Self::new(
+ CommitDetails {
+ sha: blame.sha.to_string().into(),
+ commit_time,
+ committer_name: blame
+ .committer_name
+ .unwrap_or("<no name>".to_string())
+ .into(),
+ committer_email: blame.committer_email.unwrap_or("".to_string()).into(),
+ message: details,
+ },
+ style,
+ workspace,
+ )
+ }
+
+ pub fn new(
+ commit: CommitDetails,
+ editor_style: EditorStyle,
workspace: Option<WeakEntity<Workspace>>,
) -> Self {
Self {
- editor_style: style.clone(),
- blame_entry,
- details,
+ editor_style,
+ commit,
workspace,
scroll_handle: ScrollHandle::new(),
}
}
}
-impl Render for BlameEntryTooltip {
+impl Render for CommitTooltip {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let avatar =
- CommitAvatar::new(self.details.as_ref(), self.blame_entry.sha).render(window, cx);
+ let avatar = CommitAvatar::new(&self.commit).render(window, cx);
- let author = self
- .blame_entry
- .author
- .clone()
- .unwrap_or("<no name>".to_string());
+ let author = self.commit.committer_name.clone();
- let author_email = self.blame_entry.author_mail.clone();
+ let author_email = self.commit.committer_email.clone();
- let short_commit_id = self.blame_entry.sha.display_short();
- let full_sha = self.blame_entry.sha.to_string().clone();
- let absolute_timestamp = blame_entry_absolute_timestamp(&self.blame_entry);
+ let short_commit_id = self
+ .commit
+ .sha
+ .get(0..8)
+ .map(|sha| sha.to_string().into())
+ .unwrap_or_else(|| self.commit.sha.clone());
+ let full_sha = self.commit.sha.to_string().clone();
+ let absolute_timestamp = format_local_timestamp(
+ self.commit.commit_time,
+ OffsetDateTime::now_utc(),
+ time_format::TimestampFormat::MediumAbsolute,
+ );
let message = self
- .details
+ .commit
+ .message
.as_ref()
.map(|details| {
crate::render_parsed_markdown(
@@ -149,7 +199,8 @@ impl Render for BlameEntryTooltip {
.unwrap_or("<no commit message>".into_any());
let pull_request = self
- .details
+ .commit
+ .message
.as_ref()
.and_then(|details| details.pull_request.clone());
@@ -171,7 +222,7 @@ impl Render for BlameEntryTooltip {
.flex_wrap()
.children(avatar)
.child(author)
- .when_some(author_email, |this, author_email| {
+ .when(!author_email.is_empty(), |this| {
this.child(
div()
.text_color(cx.theme().colors().text_muted)
@@ -231,12 +282,16 @@ impl Render for BlameEntryTooltip {
.icon_color(Color::Muted)
.icon_position(IconPosition::Start)
.disabled(
- self.details.as_ref().map_or(true, |details| {
- details.permalink.is_none()
- }),
+ self.commit
+ .message
+ .as_ref()
+ .map_or(true, |details| {
+ details.permalink.is_none()
+ }),
)
.when_some(
- self.details
+ self.commit
+ .message
.as_ref()
.and_then(|details| details.permalink.clone()),
|this, url| {
@@ -284,7 +339,3 @@ fn blame_entry_timestamp(blame_entry: &BlameEntry, format: time_format::Timestam
pub fn blame_entry_relative_timestamp(blame_entry: &BlameEntry) -> String {
blame_entry_timestamp(blame_entry, time_format::TimestampFormat::Relative)
}
-
-fn blame_entry_absolute_timestamp(blame_entry: &BlameEntry) -> String {
- blame_entry_timestamp(blame_entry, time_format::TimestampFormat::MediumAbsolute)
-}
@@ -13,10 +13,10 @@
//!
//! If you're looking to improve Vim mode, you should check out Vim crate that wraps Editor and overrides its behavior.
pub mod actions;
-mod blame_entry_tooltip;
mod blink_manager;
mod clangd_ext;
mod code_context_menus;
+pub mod commit_tooltip;
pub mod display_map;
mod editor_settings;
mod editor_settings_controls;
@@ -52,6 +52,7 @@ pub use actions::{AcceptEditPrediction, OpenExcerpts, OpenExcerptsSplit};
use aho_corasick::AhoCorasick;
use anyhow::{anyhow, Context as _, Result};
use blink_manager::BlinkManager;
+use buffer_diff::DiffHunkSecondaryStatus;
use client::{Collaborator, ParticipantIndex};
use clock::ReplicaId;
use collections::{BTreeMap, HashMap, HashSet, VecDeque};
@@ -95,7 +96,7 @@ use itertools::Itertools;
use language::{
language_settings::{self, all_language_settings, language_settings, InlayHintSettings},
markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CharKind, CodeLabel,
- CompletionDocumentation, CursorShape, Diagnostic, EditPredictionsMode, EditPreview,
+ CompletionDocumentation, CursorShape, Diagnostic, DiskState, EditPredictionsMode, EditPreview,
HighlightedText, IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection,
SelectionGoal, TextObject, TransactionId, TreeSitterOptions,
};
@@ -160,7 +161,7 @@ use sum_tree::TreeMap;
use text::{BufferId, OffsetUtf16, Rope};
use theme::{ActiveTheme, PlayerColor, StatusColors, SyntaxTheme, ThemeColors, ThemeSettings};
use ui::{
- h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize,
+ h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize, Key,
Tooltip,
};
use util::{defer, maybe, post_inc, RangeExt, ResultExt, TakeUntilExt, TryFutureExt};
@@ -5656,29 +5657,39 @@ impl Editor {
fn render_edit_prediction_accept_keybind(&self, window: &mut Window, cx: &App) -> Option<Div> {
let accept_binding = self.accept_edit_prediction_keybind(window, cx);
let accept_keystroke = accept_binding.keystroke()?;
- let colors = cx.theme().colors();
- let accent_color = colors.text_accent;
- let editor_bg_color = colors.editor_background;
- let bg_color = editor_bg_color.blend(accent_color.opacity(0.1));
+
+ let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac;
+
+ let modifiers_color = if accept_keystroke.modifiers == window.modifiers() {
+ Color::Accent
+ } else {
+ Color::Muted
+ };
h_flex()
.px_0p5()
- .gap_1()
- .bg(bg_color)
+ .when(is_platform_style_mac, |parent| parent.gap_0p5())
.font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
.text_size(TextSize::XSmall.rems(cx))
- .children(ui::render_modifiers(
+ .child(h_flex().children(ui::render_modifiers(
&accept_keystroke.modifiers,
PlatformStyle::platform(),
- Some(if accept_keystroke.modifiers == window.modifiers() {
- Color::Accent
- } else {
- Color::Muted
- }),
+ Some(modifiers_color),
Some(IconSize::XSmall.rems().into()),
- false,
- ))
- .child(accept_keystroke.key.clone())
+ true,
+ )))
+ .when(is_platform_style_mac, |parent| {
+ parent.child(accept_keystroke.key.clone())
+ })
+ .when(!is_platform_style_mac, |parent| {
+ parent.child(
+ Key::new(
+ util::capitalize(&accept_keystroke.key),
+ Some(Color::Default),
+ )
+ .size(Some(IconSize::XSmall.rems().into())),
+ )
+ })
.into()
}
@@ -5807,13 +5818,13 @@ impl Editor {
},
)
.child(Label::new("Hold").size(LabelSize::Small))
- .children(ui::render_modifiers(
+ .child(h_flex().children(ui::render_modifiers(
&accept_keystroke.modifiers,
PlatformStyle::platform(),
Some(Color::Default),
Some(IconSize::Small.rems().into()),
- true,
- ))
+ false,
+ )))
.into_any(),
);
}
@@ -5857,6 +5868,7 @@ impl Editor {
let has_completion = self.active_inline_completion.is_some();
+ let is_platform_style_mac = PlatformStyle::platform() == PlatformStyle::Mac;
Some(
h_flex()
.min_w(min_width)
@@ -5885,8 +5897,8 @@ impl Editor {
.child(
h_flex()
.font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
- .gap_1()
- .children(ui::render_modifiers(
+ .when(is_platform_style_mac, |parent| parent.gap_1())
+ .child(h_flex().children(ui::render_modifiers(
&accept_keystroke.modifiers,
PlatformStyle::platform(),
Some(if !has_completion {
@@ -5895,8 +5907,8 @@ impl Editor {
Color::Default
}),
None,
- true,
- )),
+ false,
+ ))),
)
.child(Label::new("Preview").into_any_element())
.opacity(if has_completion { 1.0 } else { 0.4 }),
@@ -12431,6 +12443,121 @@ impl Editor {
self.toggle_diff_hunks_in_ranges(ranges, cx);
}
+ fn diff_hunks_in_ranges<'a>(
+ &'a self,
+ ranges: &'a [Range<Anchor>],
+ buffer: &'a MultiBufferSnapshot,
+ ) -> impl 'a + Iterator<Item = MultiBufferDiffHunk> {
+ ranges.iter().flat_map(move |range| {
+ let end_excerpt_id = range.end.excerpt_id;
+ let range = range.to_point(buffer);
+ let mut peek_end = range.end;
+ if range.end.row < buffer.max_row().0 {
+ peek_end = Point::new(range.end.row + 1, 0);
+ }
+ buffer
+ .diff_hunks_in_range(range.start..peek_end)
+ .filter(move |hunk| hunk.excerpt_id.cmp(&end_excerpt_id, buffer).is_le())
+ })
+ }
+
+ pub fn has_stageable_diff_hunks_in_ranges(
+ &self,
+ ranges: &[Range<Anchor>],
+ snapshot: &MultiBufferSnapshot,
+ ) -> bool {
+ let mut hunks = self.diff_hunks_in_ranges(ranges, &snapshot);
+ hunks.any(|hunk| {
+ log::debug!("considering {hunk:?}");
+ hunk.secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk
+ })
+ }
+
+ pub fn toggle_staged_selected_diff_hunks(
+ &mut self,
+ _: &ToggleStagedSelectedDiffHunks,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect();
+ self.stage_or_unstage_diff_hunks(&ranges, cx);
+ }
+
+ pub fn stage_or_unstage_diff_hunks(
+ &mut self,
+ ranges: &[Range<Anchor>],
+ cx: &mut Context<Self>,
+ ) {
+ let Some(project) = &self.project else {
+ return;
+ };
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+ let stage = self.has_stageable_diff_hunks_in_ranges(ranges, &snapshot);
+
+ let chunk_by = self
+ .diff_hunks_in_ranges(&ranges, &snapshot)
+ .chunk_by(|hunk| hunk.buffer_id);
+ for (buffer_id, hunks) in &chunk_by {
+ let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
+ log::debug!("no buffer for id");
+ continue;
+ };
+ let buffer = buffer.read(cx).snapshot();
+ let Some((repo, path)) = project
+ .read(cx)
+ .repository_and_path_for_buffer_id(buffer_id, cx)
+ else {
+ log::debug!("no git repo for buffer id");
+ continue;
+ };
+ let Some(diff) = snapshot.diff_for_buffer_id(buffer_id) else {
+ log::debug!("no diff for buffer id");
+ continue;
+ };
+ let Some(secondary_diff) = diff.secondary_diff() else {
+ log::debug!("no secondary diff for buffer id");
+ continue;
+ };
+
+ let edits = diff.secondary_edits_for_stage_or_unstage(
+ stage,
+ hunks.map(|hunk| {
+ (
+ hunk.diff_base_byte_range.clone(),
+ hunk.secondary_diff_base_byte_range.clone(),
+ hunk.buffer_range.clone(),
+ )
+ }),
+ &buffer,
+ );
+
+ let index_base = secondary_diff.base_text().map_or_else(
+ || Rope::from(""),
+ |snapshot| snapshot.text.as_rope().clone(),
+ );
+ let index_buffer = cx.new(|cx| {
+ Buffer::local_normalized(index_base.clone(), text::LineEnding::default(), cx)
+ });
+ let new_index_text = index_buffer.update(cx, |index_buffer, cx| {
+ index_buffer.edit(edits, None, cx);
+ index_buffer.snapshot().as_rope().to_string()
+ });
+ let new_index_text = if new_index_text.is_empty()
+ && (diff.is_single_insertion
+ || buffer
+ .file()
+ .map_or(false, |file| file.disk_state() == DiskState::New))
+ {
+ log::debug!("removing from index");
+ None
+ } else {
+ Some(new_index_text)
+ };
+
+ let _ = repo.read(cx).set_index_text(&path, new_index_text);
+ }
+ }
+
pub fn expand_selected_diff_hunks(&mut self, cx: &mut Context<Self>) {
let ranges: Vec<_> = self.selections.disjoint.iter().map(|s| s.range()).collect();
self.buffer
@@ -14047,6 +14047,59 @@ async fn test_edit_after_expanded_modification_hunk(
);
}
+#[gpui::test]
+async fn test_stage_and_unstage_added_file_hunk(
+ executor: BackgroundExecutor,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ cx.update_editor(|editor, _, cx| {
+ editor.set_expand_all_diff_hunks(cx);
+ });
+
+ let working_copy = r#"
+ ˇfn main() {
+ println!("hello, world!");
+ }
+ "#
+ .unindent();
+
+ cx.set_state(&working_copy);
+ executor.run_until_parked();
+
+ cx.assert_state_with_diff(
+ r#"
+ + ˇfn main() {
+ + println!("hello, world!");
+ + }
+ "#
+ .unindent(),
+ );
+ cx.assert_index_text(None);
+
+ cx.update_editor(|editor, window, cx| {
+ editor.toggle_staged_selected_diff_hunks(&ToggleStagedSelectedDiffHunks, window, cx);
+ });
+ executor.run_until_parked();
+ cx.assert_index_text(Some(&working_copy.replace("ˇ", "")));
+ cx.assert_state_with_diff(
+ r#"
+ + ˇfn main() {
+ + println!("hello, world!");
+ + }
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, window, cx| {
+ editor.toggle_staged_selected_diff_hunks(&ToggleStagedSelectedDiffHunks, window, cx);
+ });
+ executor.run_until_parked();
+ cx.assert_index_text(None);
+}
+
async fn setup_indent_guides_editor(
text: &str,
cx: &mut gpui::TestAppContext,
@@ -1,6 +1,6 @@
use crate::{
- blame_entry_tooltip::{blame_entry_relative_timestamp, BlameEntryTooltip},
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
+ commit_tooltip::{blame_entry_relative_timestamp, CommitTooltip, ParsedCommitMessage},
display_map::{
Block, BlockContext, BlockStyle, DisplaySnapshot, HighlightedChunk, ToDisplayPoint,
},
@@ -8,7 +8,7 @@ use crate::{
CurrentLineHighlight, DoubleClickInMultibuffer, MultiCursorModifier, ScrollBeyondLastLine,
ScrollbarDiagnostics, ShowScrollbar,
},
- git::blame::{CommitDetails, GitBlame},
+ git::blame::GitBlame,
hover_popover::{
self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
},
@@ -417,7 +417,9 @@ impl EditorElement {
register_action(editor, window, Editor::toggle_git_blame);
register_action(editor, window, Editor::toggle_git_blame_inline);
register_action(editor, window, Editor::toggle_selected_diff_hunks);
+ register_action(editor, window, Editor::toggle_staged_selected_diff_hunks);
register_action(editor, window, Editor::expand_all_diff_hunks);
+
register_action(editor, window, |editor, action, window, cx| {
if let Some(task) = editor.format(action, window, cx) {
task.detach_and_notify_err(window, cx);
@@ -3806,39 +3808,44 @@ impl EditorElement {
);
let styled_text = highlighted_edits.to_styled_text(&style.text);
+ let line_count = highlighted_edits.text.lines().count();
- const ACCEPT_INDICATOR_HEIGHT: Pixels = px(24.);
+ const BORDER_WIDTH: Pixels = px(1.);
- let mut element = v_flex()
- .items_end()
+ let mut element = h_flex()
+ .items_start()
.child(
h_flex()
- .h(ACCEPT_INDICATOR_HEIGHT)
- .mb(px(-1.))
- .px_1p5()
- .gap_1()
+ .bg(cx.theme().colors().editor_background)
+ .border(BORDER_WIDTH)
.shadow_sm()
- .bg(Editor::edit_prediction_line_popover_bg_color(cx))
- .border_1()
- .border_b_0()
.border_color(cx.theme().colors().border)
- .rounded_t_lg()
- .children(editor.render_edit_prediction_accept_keybind(window, cx)),
+ .rounded_l_lg()
+ .when(line_count > 1, |el| el.rounded_br_lg())
+ .pr_1()
+ .child(styled_text),
)
.child(
- div()
- .bg(cx.theme().colors().editor_background)
- .border_1()
- .shadow_sm()
+ h_flex()
+ .h(line_height + BORDER_WIDTH * px(2.))
+ .px_1p5()
+ .gap_1()
+ // Workaround: For some reason, there's a gap if we don't do this
+ .ml(-BORDER_WIDTH)
+ .shadow(smallvec![gpui::BoxShadow {
+ color: gpui::black().opacity(0.05),
+ offset: point(px(1.), px(1.)),
+ blur_radius: px(2.),
+ spread_radius: px(0.),
+ }])
+ .bg(Editor::edit_prediction_line_popover_bg_color(cx))
+ .border(BORDER_WIDTH)
.border_color(cx.theme().colors().border)
- .rounded_lg()
- .rounded_tr(Pixels::ZERO)
- .child(styled_text),
+ .rounded_r_lg()
+ .children(editor.render_edit_prediction_accept_keybind(window, cx)),
)
.into_any();
- let line_count = highlighted_edits.text.lines().count();
-
let longest_row =
editor_snapshot.longest_row_in_range(edit_start.row()..edit_end.row() + 1);
let longest_line_width = if visible_row_range.contains(&longest_row) {
@@ -3869,55 +3876,50 @@ impl EditorElement {
// Fully visible if it can be displayed within the window (allow overlapping other
// panes). However, this is only allowed if the popover starts within text_bounds.
- let is_fully_visible = x_after_longest < text_bounds.right()
+ let can_position_to_the_right = x_after_longest < text_bounds.right()
&& x_after_longest + element_bounds.width < viewport_bounds.right();
- let mut origin = if is_fully_visible {
+ let mut origin = if can_position_to_the_right {
point(
x_after_longest,
text_bounds.origin.y + edit_start.row().as_f32() * line_height
- scroll_pixel_position.y,
)
} else {
- // Avoid overlapping both the edited rows and the user's cursor.
- let target_above = DisplayRow(
- edit_start
- .row()
- .0
- .min(
- newest_selection_head
- .map_or(u32::MAX, |cursor_row| cursor_row.row().0),
- )
- .saturating_sub(line_count as u32),
- );
- let mut row_target;
- if visible_row_range.contains(&DisplayRow(target_above.0.saturating_sub(1))) {
- row_target = target_above;
- } else {
- row_target = DisplayRow(
- edit_end.row().0.max(
- newest_selection_head.map_or(0, |cursor_row| cursor_row.row().0),
- ) + 1,
- );
- if !visible_row_range.contains(&row_target) {
- // Not visible, so fallback on displaying immediately below the cursor.
- if let Some(cursor) = newest_selection_head {
- row_target = DisplayRow(cursor.row().0 + 1);
- } else {
- // Not visible and no cursor visible, so fallback on displaying at the top of the editor.
- row_target = DisplayRow(0);
- }
- }
- };
+ let cursor_row = newest_selection_head.map(|head| head.row());
+ let above_edit = edit_start
+ .row()
+ .0
+ .checked_sub(line_count as u32)
+ .map(DisplayRow);
+ let below_edit = Some(edit_end.row() + 1);
+ let above_cursor = cursor_row
+ .and_then(|row| row.0.checked_sub(line_count as u32).map(DisplayRow));
+ let below_cursor = cursor_row.map(|cursor_row| cursor_row + 1);
+
+ // Place the edit popover adjacent to the edit if there is a location
+ // available that is onscreen and does not obscure the cursor. Otherwise,
+ // place it adjacent to the cursor.
+ let row_target = [above_edit, below_edit, above_cursor, below_cursor]
+ .into_iter()
+ .flatten()
+ .find(|&start_row| {
+ let end_row = start_row + line_count as u32;
+ visible_row_range.contains(&start_row)
+ && visible_row_range.contains(&end_row)
+ && cursor_row.map_or(true, |cursor_row| {
+ !((start_row..end_row).contains(&cursor_row))
+ })
+ })?;
- text_bounds.origin
+ content_origin
+ point(
-scroll_pixel_position.x,
row_target.as_f32() * line_height - scroll_pixel_position.y,
)
};
- origin.y -= ACCEPT_INDICATOR_HEIGHT;
+ origin.x -= BORDER_WIDTH;
window.defer_draw(element, origin, 1);
@@ -5937,7 +5939,8 @@ fn render_inline_blame_entry(
let details = blame.read(cx).details_for_entry(&blame_entry);
- let tooltip = cx.new(|_| BlameEntryTooltip::new(blame_entry, details, style, workspace));
+ let tooltip =
+ cx.new(|_| CommitTooltip::blame_entry(blame_entry, details, style.clone(), workspace));
h_flex()
.id("inline-blame")
@@ -5987,8 +5990,14 @@ fn render_blame_entry(
let workspace = editor.read(cx).workspace.as_ref().map(|(w, _)| w.clone());
- let tooltip =
- cx.new(|_| BlameEntryTooltip::new(blame_entry.clone(), details.clone(), style, workspace));
+ let tooltip = cx.new(|_| {
+ CommitTooltip::blame_entry(
+ blame_entry.clone(),
+ details.clone(),
+ style.clone(),
+ workspace,
+ )
+ });
h_flex()
.w_full()
@@ -6038,7 +6047,7 @@ fn render_blame_entry(
fn deploy_blame_entry_context_menu(
blame_entry: &BlameEntry,
- details: Option<&CommitDetails>,
+ details: Option<&ParsedCommitMessage>,
editor: Entity<Editor>,
position: gpui::Point<Pixels>,
window: &mut Window,
@@ -7186,7 +7195,7 @@ impl Element for EditorElement {
let autoscrolled = if autoscroll_horizontally {
editor.autoscroll_horizontally(
start_row,
- editor_width - (letter_size.width / 2.0),
+ editor_width - (letter_size.width / 2.0) + style.scrollbar_width,
scroll_width,
em_width,
&line_layouts,
@@ -7277,7 +7286,7 @@ impl Element for EditorElement {
let autoscrolled = if autoscroll_horizontally {
editor.autoscroll_horizontally(
start_row,
- editor_width - (letter_size.width / 2.0),
+ editor_width - (letter_size.width / 2.0) + style.scrollbar_width,
scroll_width,
em_width,
&line_layouts,
@@ -2,7 +2,7 @@ use anyhow::Result;
use collections::HashMap;
use git::{
blame::{Blame, BlameEntry},
- parse_git_remote_url, GitHostingProvider, GitHostingProviderRegistry, Oid, PullRequest,
+ parse_git_remote_url, GitHostingProvider, GitHostingProviderRegistry, Oid,
};
use gpui::{App, Context, Entity, Subscription, Task};
use http_client::HttpClient;
@@ -12,8 +12,11 @@ use project::{Project, ProjectItem};
use smallvec::SmallVec;
use std::{sync::Arc, time::Duration};
use sum_tree::SumTree;
+use ui::SharedString;
use url::Url;
+use crate::commit_tooltip::ParsedCommitMessage;
+
#[derive(Clone, Debug, Default)]
pub struct GitBlameEntry {
pub rows: u32,
@@ -77,7 +80,11 @@ impl GitRemote {
self.host.supports_avatars()
}
- pub async fn avatar_url(&self, commit: Oid, client: Arc<dyn HttpClient>) -> Option<Url> {
+ pub async fn avatar_url(
+ &self,
+ commit: SharedString,
+ client: Arc<dyn HttpClient>,
+ ) -> Option<Url> {
self.host
.commit_author_avatar_url(&self.owner, &self.repo, commit, client)
.await
@@ -85,21 +92,11 @@ impl GitRemote {
.flatten()
}
}
-
-#[derive(Clone, Debug)]
-pub struct CommitDetails {
- pub message: String,
- pub parsed_message: ParsedMarkdown,
- pub permalink: Option<Url>,
- pub pull_request: Option<PullRequest>,
- pub remote: Option<GitRemote>,
-}
-
pub struct GitBlame {
project: Entity<Project>,
buffer: Entity<Buffer>,
entries: SumTree<GitBlameEntry>,
- commit_details: HashMap<Oid, CommitDetails>,
+ commit_details: HashMap<Oid, crate::commit_tooltip::ParsedCommitMessage>,
buffer_snapshot: BufferSnapshot,
buffer_edits: text::Subscription,
task: Task<Result<()>>,
@@ -187,7 +184,7 @@ impl GitBlame {
self.generated
}
- pub fn details_for_entry(&self, entry: &BlameEntry) -> Option<CommitDetails> {
+ pub fn details_for_entry(&self, entry: &BlameEntry) -> Option<ParsedCommitMessage> {
self.commit_details.get(&entry.sha).cloned()
}
@@ -480,7 +477,7 @@ async fn parse_commit_messages(
deprecated_permalinks: &HashMap<Oid, Url>,
provider_registry: Arc<GitHostingProviderRegistry>,
languages: &Arc<LanguageRegistry>,
-) -> HashMap<Oid, CommitDetails> {
+) -> HashMap<Oid, ParsedCommitMessage> {
let mut commit_details = HashMap::default();
let parsed_remote_url = remote_url
@@ -519,8 +516,8 @@ async fn parse_commit_messages(
commit_details.insert(
oid,
- CommitDetails {
- message,
+ ParsedCommitMessage {
+ message: message.into(),
parsed_message,
permalink,
remote,
@@ -298,6 +298,18 @@ impl EditorTestContext {
self.cx.run_until_parked();
}
+ pub fn assert_index_text(&mut self, expected: Option<&str>) {
+ let fs = self.update_editor(|editor, _, cx| {
+ editor.project.as_ref().unwrap().read(cx).fs().as_fake()
+ });
+ let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
+ let mut found = None;
+ fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| {
+ found = git_state.index_contents.get(path.as_ref()).cloned();
+ });
+ assert_eq!(expected, found.as_deref());
+ }
+
/// Change the editor's text and selections using a string containing
/// embedded range markers that represent the ranges and directions of
/// each selection.
@@ -64,6 +64,12 @@ impl FeatureFlag for PredictEditsFeatureFlag {
const NAME: &'static str = "predict-edits";
}
+/// A feature flag that controls things that shouldn't go live until the predictive edits launch.
+pub struct PredictEditsLaunchFeatureFlag;
+impl FeatureFlag for PredictEditsLaunchFeatureFlag {
+ const NAME: &'static str = "predict-edits-launch";
+}
+
pub struct PredictEditsRateCompletionsFeatureFlag;
impl FeatureFlag for PredictEditsRateCompletionsFeatureFlag {
const NAME: &'static str = "predict-edits-rate-completions";
@@ -132,8 +132,8 @@ pub struct BlameEntry {
pub author_time: Option<i64>,
pub author_tz: Option<String>,
- pub committer: Option<String>,
- pub committer_mail: Option<String>,
+ pub committer_name: Option<String>,
+ pub committer_email: Option<String>,
pub committer_time: Option<i64>,
pub committer_tz: Option<String>,
@@ -255,10 +255,12 @@ fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
.clone_from(&existing_entry.author_mail);
new_entry.author_time = existing_entry.author_time;
new_entry.author_tz.clone_from(&existing_entry.author_tz);
- new_entry.committer.clone_from(&existing_entry.committer);
new_entry
- .committer_mail
- .clone_from(&existing_entry.committer_mail);
+ .committer_name
+ .clone_from(&existing_entry.committer_name);
+ new_entry
+ .committer_email
+ .clone_from(&existing_entry.committer_email);
new_entry.committer_time = existing_entry.committer_time;
new_entry
.committer_tz
@@ -288,8 +290,8 @@ fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
}
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
- "committer" if is_committed => entry.committer = Some(value.into()),
- "committer-mail" if is_committed => entry.committer_mail = Some(value.into()),
+ "committer" if is_committed => entry.committer_name = Some(value.into()),
+ "committer-mail" if is_committed => entry.committer_email = Some(value.into()),
"committer-time" if is_committed => {
entry.committer_time = Some(value.parse::<i64>()?)
}
@@ -38,6 +38,7 @@ actions!(
StageAll,
UnstageAll,
RevertAll,
+ Uncommit,
Commit,
ClearCommitMessage
]
@@ -4,13 +4,11 @@ use anyhow::Result;
use async_trait::async_trait;
use collections::BTreeMap;
use derive_more::{Deref, DerefMut};
-use gpui::{App, Global};
+use gpui::{App, Global, SharedString};
use http_client::HttpClient;
use parking_lot::RwLock;
use url::Url;
-use crate::Oid;
-
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct PullRequest {
pub number: u32,
@@ -83,7 +81,7 @@ pub trait GitHostingProvider {
&self,
_repo_owner: &str,
_repo: &str,
- _commit: Oid,
+ _commit: SharedString,
_http_client: Arc<dyn HttpClient>,
) -> Result<Option<Url>> {
Ok(None)
@@ -1,13 +1,15 @@
use crate::status::FileStatus;
use crate::GitHostingProviderRegistry;
use crate::{blame::Blame, status::GitStatus};
-use anyhow::{anyhow, Context as _, Result};
+use anyhow::{anyhow, Context, Result};
use collections::{HashMap, HashSet};
use git2::BranchType;
use gpui::SharedString;
use parking_lot::Mutex;
use rope::Rope;
use std::borrow::Borrow;
+use std::io::Write as _;
+use std::process::Stdio;
use std::sync::LazyLock;
use std::{
cmp::Ordering,
@@ -18,12 +20,63 @@ use sum_tree::MapSeekTarget;
use util::command::new_std_command;
use util::ResultExt;
-#[derive(Clone, Debug, Hash, PartialEq)]
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Branch {
pub is_head: bool,
pub name: SharedString,
- /// Timestamp of most recent commit, normalized to Unix Epoch format.
- pub unix_timestamp: Option<i64>,
+ pub upstream: Option<Upstream>,
+ pub most_recent_commit: Option<CommitSummary>,
+}
+
+impl Branch {
+ pub fn priority_key(&self) -> (bool, Option<i64>) {
+ (
+ self.is_head,
+ self.most_recent_commit
+ .as_ref()
+ .map(|commit| commit.commit_timestamp),
+ )
+ }
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Upstream {
+ pub ref_name: SharedString,
+ pub tracking: Option<UpstreamTracking>,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub struct UpstreamTracking {
+ pub ahead: u32,
+ pub behind: u32,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub struct CommitSummary {
+ pub sha: SharedString,
+ pub subject: SharedString,
+ /// This is a unix timestamp
+ pub commit_timestamp: i64,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub struct CommitDetails {
+ pub sha: SharedString,
+ pub message: SharedString,
+ pub commit_timestamp: i64,
+ pub committer_email: SharedString,
+ pub committer_name: SharedString,
+}
+
+pub enum ResetMode {
+ // reset the branch pointer, leave index and worktree unchanged
+ // (this will make it look like things that were committed are now
+ // staged)
+ Soft,
+ // reset the branch pointer and index, leave worktree unchanged
+ // (this makes it look as though things that were committed are now
+ // unstaged)
+ Mixed,
}
pub trait GitRepository: Send + Sync {
@@ -39,9 +92,10 @@ pub trait GitRepository: Send + Sync {
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
fn load_committed_text(&self, path: &RepoPath) -> Option<String>;
+ fn set_index_text(&self, path: &RepoPath, content: Option<String>) -> anyhow::Result<()>;
+
/// Returns the URL of the remote with the given name.
fn remote_url(&self, name: &str) -> Option<String>;
- fn branch_name(&self) -> Option<String>;
/// Returns the SHA of the current HEAD.
fn head_sha(&self) -> Option<String>;
@@ -56,6 +110,10 @@ pub trait GitRepository: Send + Sync {
fn create_branch(&self, _: &str) -> Result<()>;
fn branch_exits(&self, _: &str) -> Result<bool>;
+ fn reset(&self, commit: &str, mode: ResetMode) -> Result<()>;
+
+ fn show(&self, commit: &str) -> Result<CommitDetails>;
+
fn blame(&self, path: &Path, content: Rope) -> Result<crate::blame::Blame>;
/// Returns the absolute path to the repository. For worktrees, this will be the path to the
@@ -128,6 +186,53 @@ impl GitRepository for RealGitRepository {
repo.commondir().into()
}
+ fn show(&self, commit: &str) -> Result<CommitDetails> {
+ let repo = self.repository.lock();
+ let Ok(commit) = repo.revparse_single(commit)?.into_commit() else {
+ anyhow::bail!("{} is not a commit", commit);
+ };
+ let details = CommitDetails {
+ sha: commit.id().to_string().into(),
+ message: String::from_utf8_lossy(commit.message_raw_bytes())
+ .to_string()
+ .into(),
+ commit_timestamp: commit.time().seconds(),
+ committer_email: String::from_utf8_lossy(commit.committer().email_bytes())
+ .to_string()
+ .into(),
+ committer_name: String::from_utf8_lossy(commit.committer().name_bytes())
+ .to_string()
+ .into(),
+ };
+ Ok(details)
+ }
+
+ fn reset(&self, commit: &str, mode: ResetMode) -> Result<()> {
+ let working_directory = self
+ .repository
+ .lock()
+ .workdir()
+ .context("failed to read git work directory")?
+ .to_path_buf();
+
+ let mode_flag = match mode {
+ ResetMode::Mixed => "--mixed",
+ ResetMode::Soft => "--soft",
+ };
+
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["reset", mode_flag, commit])
+ .output()?;
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to reset:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
+ }
+ Ok(())
+ }
+
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
const STAGE_NORMAL: i32 = 0;
@@ -161,19 +266,56 @@ impl GitRepository for RealGitRepository {
Some(content)
}
+ fn set_index_text(&self, path: &RepoPath, content: Option<String>) -> anyhow::Result<()> {
+ let working_directory = self
+ .repository
+ .lock()
+ .workdir()
+ .context("failed to read git work directory")?
+ .to_path_buf();
+ if let Some(content) = content {
+ let mut child = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["hash-object", "-w", "--stdin"])
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .spawn()?;
+ child.stdin.take().unwrap().write_all(content.as_bytes())?;
+ let output = child.wait_with_output()?.stdout;
+ let sha = String::from_utf8(output)?;
+
+ log::debug!("indexing SHA: {sha}, path {path:?}");
+
+ let status = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["update-index", "--add", "--cacheinfo", "100644", &sha])
+ .arg(path.as_ref())
+ .status()?;
+
+ if !status.success() {
+ return Err(anyhow!("Failed to add to index: {status:?}"));
+ }
+ } else {
+ let status = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(["update-index", "--force-remove"])
+ .arg(path.as_ref())
+ .status()?;
+
+ if !status.success() {
+ return Err(anyhow!("Failed to remove from index: {status:?}"));
+ }
+ }
+
+ Ok(())
+ }
+
fn remote_url(&self, name: &str) -> Option<String> {
let repo = self.repository.lock();
let remote = repo.find_remote(name).ok()?;
remote.url().map(|url| url.to_string())
}
- fn branch_name(&self) -> Option<String> {
- let repo = self.repository.lock();
- let head = repo.head().log_err()?;
- let branch = String::from_utf8_lossy(head.shorthand_bytes());
- Some(branch.to_string())
- }
-
fn head_sha(&self) -> Option<String> {
Some(self.repository.lock().head().ok()?.target()?.to_string())
}
@@ -213,33 +355,62 @@ impl GitRepository for RealGitRepository {
}
fn branches(&self) -> Result<Vec<Branch>> {
- let repo = self.repository.lock();
- let local_branches = repo.branches(Some(BranchType::Local))?;
- let valid_branches = local_branches
- .filter_map(|branch| {
- branch.ok().and_then(|(branch, _)| {
- let is_head = branch.is_head();
- let name = branch
- .name()
- .ok()
- .flatten()
- .map(|name| name.to_string().into())?;
- let timestamp = branch.get().peel_to_commit().ok()?.time();
- let unix_timestamp = timestamp.seconds();
- let timezone_offset = timestamp.offset_minutes();
- let utc_offset =
- time::UtcOffset::from_whole_seconds(timezone_offset * 60).ok()?;
- let unix_timestamp =
- time::OffsetDateTime::from_unix_timestamp(unix_timestamp).ok()?;
- Some(Branch {
- is_head,
- name,
- unix_timestamp: Some(unix_timestamp.to_offset(utc_offset).unix_timestamp()),
- })
- })
- })
- .collect();
- Ok(valid_branches)
+ let working_directory = self
+ .repository
+ .lock()
+ .workdir()
+ .context("failed to read git work directory")?
+ .to_path_buf();
+ let fields = [
+ "%(HEAD)",
+ "%(objectname)",
+ "%(refname)",
+ "%(upstream)",
+ "%(upstream:track)",
+ "%(committerdate:unix)",
+ "%(contents:subject)",
+ ]
+ .join("%00");
+ let args = vec!["for-each-ref", "refs/heads/*", "--format", &fields];
+
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(args)
+ .output()?;
+
+ if !output.status.success() {
+ return Err(anyhow!(
+ "Failed to git git branches:\n{}",
+ String::from_utf8_lossy(&output.stderr)
+ ));
+ }
+
+ let input = String::from_utf8_lossy(&output.stdout);
+
+ let mut branches = parse_branch_input(&input)?;
+ if branches.is_empty() {
+ let args = vec!["symbolic-ref", "--quiet", "--short", "HEAD"];
+
+ let output = new_std_command(&self.git_binary_path)
+ .current_dir(&working_directory)
+ .args(args)
+ .output()?;
+
+ // git symbolic-ref returns a non-0 exit code if HEAD points
+ // to something other than a branch
+ if output.status.success() {
+ let name = String::from_utf8_lossy(&output.stdout).trim().to_string();
+
+ branches.push(Branch {
+ name: name.into(),
+ is_head: true,
+ upstream: None,
+ most_recent_commit: None,
+ });
+ }
+ }
+
+ Ok(branches)
}
fn change_branch(&self, name: &str) -> Result<()> {
@@ -412,13 +583,22 @@ impl GitRepository for FakeGitRepository {
state.head_contents.get(path.as_ref()).cloned()
}
- fn remote_url(&self, _name: &str) -> Option<String> {
- None
+ fn set_index_text(&self, path: &RepoPath, content: Option<String>) -> anyhow::Result<()> {
+ let mut state = self.state.lock();
+ if let Some(content) = content {
+ state.index_contents.insert(path.clone(), content);
+ } else {
+ state.index_contents.remove(path);
+ }
+ state
+ .event_emitter
+ .try_send(state.path.clone())
+ .expect("Dropped repo change event");
+ Ok(())
}
- fn branch_name(&self) -> Option<String> {
- let state = self.state.lock();
- state.current_branch_name.clone()
+ fn remote_url(&self, _name: &str) -> Option<String> {
+ None
}
fn head_sha(&self) -> Option<String> {
@@ -429,6 +609,14 @@ impl GitRepository for FakeGitRepository {
vec![]
}
+ fn show(&self, _: &str) -> Result<CommitDetails> {
+ unimplemented!()
+ }
+
+ fn reset(&self, _: &str, _: ResetMode) -> Result<()> {
+ unimplemented!()
+ }
+
fn path(&self) -> PathBuf {
let state = self.state.lock();
state.path.clone()
@@ -471,7 +659,8 @@ impl GitRepository for FakeGitRepository {
.map(|branch_name| Branch {
is_head: Some(branch_name) == current_branch.as_ref(),
name: branch_name.into(),
- unix_timestamp: None,
+ most_recent_commit: None,
+ upstream: None,
})
.collect())
}
@@ -641,3 +830,106 @@ impl<'a> MapSeekTarget<RepoPath> for RepoPathDescendants<'a> {
}
}
}
+
+fn parse_branch_input(input: &str) -> Result<Vec<Branch>> {
+ let mut branches = Vec::new();
+ for line in input.split('\n') {
+ if line.is_empty() {
+ continue;
+ }
+ let mut fields = line.split('\x00');
+ let is_current_branch = fields.next().context("no HEAD")? == "*";
+ let head_sha: SharedString = fields.next().context("no objectname")?.to_string().into();
+ let ref_name: SharedString = fields
+ .next()
+ .context("no refname")?
+ .strip_prefix("refs/heads/")
+ .context("unexpected format for refname")?
+ .to_string()
+ .into();
+ let upstream_name = fields.next().context("no upstream")?.to_string();
+ let upstream_tracking = parse_upstream_track(fields.next().context("no upstream:track")?)?;
+ let commiterdate = fields.next().context("no committerdate")?.parse::<i64>()?;
+ let subject: SharedString = fields
+ .next()
+ .context("no contents:subject")?
+ .to_string()
+ .into();
+
+ branches.push(Branch {
+ is_head: is_current_branch,
+ name: ref_name,
+ most_recent_commit: Some(CommitSummary {
+ sha: head_sha,
+ subject,
+ commit_timestamp: commiterdate,
+ }),
+ upstream: if upstream_name.is_empty() {
+ None
+ } else {
+ Some(Upstream {
+ ref_name: upstream_name.into(),
+ tracking: upstream_tracking,
+ })
+ },
+ })
+ }
+
+ Ok(branches)
+}
+
+fn parse_upstream_track(upstream_track: &str) -> Result<Option<UpstreamTracking>> {
+ if upstream_track == "" {
+ return Ok(Some(UpstreamTracking {
+ ahead: 0,
+ behind: 0,
+ }));
+ }
+
+ let upstream_track = upstream_track
+ .strip_prefix("[")
+ .ok_or_else(|| anyhow!("missing ["))?;
+ let upstream_track = upstream_track
+ .strip_suffix("]")
+ .ok_or_else(|| anyhow!("missing ["))?;
+ let mut ahead: u32 = 0;
+ let mut behind: u32 = 0;
+ for component in upstream_track.split(", ") {
+ if component == "gone" {
+ return Ok(None);
+ }
+ if let Some(ahead_num) = component.strip_prefix("ahead ") {
+ ahead = ahead_num.parse::<u32>()?;
+ }
+ if let Some(behind_num) = component.strip_prefix("behind ") {
+ behind = behind_num.parse::<u32>()?;
+ }
+ }
+ Ok(Some(UpstreamTracking { ahead, behind }))
+}
+
+#[test]
+fn test_branches_parsing() {
+ // suppress "help: octal escapes are not supported, `\0` is always null"
+ #[allow(clippy::octal_escapes)]
+ let input = "*\0060964da10574cd9bf06463a53bf6e0769c5c45e\0refs/heads/zed-patches\0refs/remotes/origin/zed-patches\0\01733187470\0generated protobuf\n";
+ assert_eq!(
+ parse_branch_input(&input).unwrap(),
+ vec![Branch {
+ is_head: true,
+ name: "zed-patches".into(),
+ upstream: Some(Upstream {
+ ref_name: "refs/remotes/origin/zed-patches".into(),
+ tracking: Some(UpstreamTracking {
+ ahead: 0,
+ behind: 0
+ })
+ }),
+ most_recent_commit: Some(CommitSummary {
+ sha: "060964da10574cd9bf06463a53bf6e0769c5c45e".into(),
+ subject: "generated protobuf".into(),
+ commit_timestamp: 1733187470,
+ })
+ }]
+ )
+}
@@ -10,8 +10,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -29,8 +29,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -48,8 +48,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -67,8 +67,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -86,8 +86,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -105,8 +105,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -124,8 +124,8 @@
"author_mail": "<64036912+mmkaram@users.noreply.github.com>",
"author_time": 1708621949,
"author_tz": "-0800",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1708621949,
"committer_tz": "-0700",
"summary": "Add option to either use system clipboard or vim clipboard (#7936)",
@@ -143,8 +143,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -162,8 +162,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -181,8 +181,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -200,8 +200,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -219,8 +219,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -238,8 +238,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -257,8 +257,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -276,8 +276,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -295,8 +295,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -314,8 +314,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -333,8 +333,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1707520689,
"author_tz": "-0700",
- "committer": "GitHub",
- "committer_mail": "<noreply@github.com>",
+ "committer_name": "GitHub",
+ "committer_email": "<noreply@github.com>",
"committer_time": 1707520689,
"committer_tz": "-0700",
"summary": "Highlight selections on vim yank (#7638)",
@@ -352,8 +352,8 @@
"author_mail": "<maxbrunsfeld@gmail.com>",
"author_time": 1705619094,
"author_tz": "-0800",
- "committer": "Max Brunsfeld",
- "committer_mail": "<maxbrunsfeld@gmail.com>",
+ "committer_name": "Max Brunsfeld",
+ "committer_email": "<maxbrunsfeld@gmail.com>",
"committer_time": 1705619205,
"committer_tz": "-0800",
"summary": "Merge branch 'main' into language-api-docs",
@@ -371,8 +371,8 @@
"author_mail": "<maxbrunsfeld@gmail.com>",
"author_time": 1705619094,
"author_tz": "-0800",
- "committer": "Max Brunsfeld",
- "committer_mail": "<maxbrunsfeld@gmail.com>",
+ "committer_name": "Max Brunsfeld",
+ "committer_email": "<maxbrunsfeld@gmail.com>",
"committer_time": 1705619205,
"committer_tz": "-0800",
"summary": "Merge branch 'main' into language-api-docs",
@@ -390,8 +390,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1694798044,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1694798044,
"committer_tz": "-0600",
"summary": "Fix Y on last line with no trailing new line",
@@ -409,8 +409,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1694798044,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1694798044,
"committer_tz": "-0600",
"summary": "Fix Y on last line with no trailing new line",
@@ -428,8 +428,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692855942,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692856812,
"committer_tz": "-0600",
"summary": "vim: Fix linewise copy of last line with no trailing newline",
@@ -447,8 +447,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692855942,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692856812,
"committer_tz": "-0600",
"summary": "vim: Fix linewise copy of last line with no trailing newline",
@@ -466,8 +466,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692855942,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692856812,
"committer_tz": "-0600",
"summary": "vim: Fix linewise copy of last line with no trailing newline",
@@ -485,8 +485,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692855942,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692856812,
"committer_tz": "-0600",
"summary": "vim: Fix linewise copy of last line with no trailing newline",
@@ -504,8 +504,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692855942,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692856812,
"committer_tz": "-0600",
"summary": "vim: Fix linewise copy of last line with no trailing newline",
@@ -523,8 +523,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692644159,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692732477,
"committer_tz": "-0600",
"summary": "Rewrite paste",
@@ -542,8 +542,8 @@
"author_mail": "<conrad@zed.dev>",
"author_time": 1692644159,
"author_tz": "-0600",
- "committer": "Conrad Irwin",
- "committer_mail": "<conrad@zed.dev>",
+ "committer_name": "Conrad Irwin",
+ "committer_email": "<conrad@zed.dev>",
"committer_time": 1692732477,
"committer_tz": "-0600",
"summary": "Rewrite paste",
@@ -561,8 +561,8 @@
"author_mail": "<maxbrunsfeld@gmail.com>",
"author_time": 1659072896,
"author_tz": "-0700",
- "committer": "Max Brunsfeld",
- "committer_mail": "<maxbrunsfeld@gmail.com>",
+ "committer_name": "Max Brunsfeld",
+ "committer_email": "<maxbrunsfeld@gmail.com>",
"committer_time": 1659073230,
"committer_tz": "-0700",
"summary": ":art: Rename and simplify some autoindent stuff",
@@ -580,8 +580,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653424557,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Unify visual line_mode and non line_mode operators",
@@ -599,8 +599,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -618,8 +618,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -637,8 +637,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -656,8 +656,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -675,8 +675,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -694,8 +694,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -713,8 +713,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -732,8 +732,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -751,8 +751,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -770,8 +770,8 @@
"author_mail": "<kay@the-simmons.net>",
"author_time": 1653007350,
"author_tz": "-0700",
- "committer": "Kaylee Simmons",
- "committer_mail": "<kay@the-simmons.net>",
+ "committer_name": "Kaylee Simmons",
+ "committer_email": "<kay@the-simmons.net>",
"committer_time": 1653609725,
"committer_tz": "-0700",
"summary": "Enable copy and paste in vim mode",
@@ -10,8 +10,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1710764113,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1710764113,
"committer_tz": "+0100",
"summary": "Another commit",
@@ -29,8 +29,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1710764113,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1710764113,
"committer_tz": "+0100",
"summary": "Another commit",
@@ -48,8 +48,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1710764087,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1710764087,
"committer_tz": "+0100",
"summary": "Another commit",
@@ -67,8 +67,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1710764087,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1710764087,
"committer_tz": "+0100",
"summary": "Another commit",
@@ -86,8 +86,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1709299737,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1709299737,
"committer_tz": "+0100",
"summary": "Initial",
@@ -105,8 +105,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1709299737,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1709299737,
"committer_tz": "+0100",
"summary": "Initial",
@@ -124,8 +124,8 @@
"author_mail": "<mrnugget@gmail.com>",
"author_time": 1709299737,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@gmail.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@gmail.com>",
"committer_time": 1709299737,
"committer_tz": "+0100",
"summary": "Initial",
@@ -10,8 +10,8 @@
"author_mail": "<mrnugget@example.com>",
"author_time": 1709808710,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@example.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@example.com>",
"committer_time": 1709808710,
"committer_tz": "+0100",
"summary": "Make a commit",
@@ -29,8 +29,8 @@
"author_mail": "<joe.schmoe@example.com>",
"author_time": 1709741400,
"author_tz": "+0100",
- "committer": "Joe Schmoe",
- "committer_mail": "<joe.schmoe@example.com>",
+ "committer_name": "Joe Schmoe",
+ "committer_email": "<joe.schmoe@example.com>",
"committer_time": 1709741400,
"committer_tz": "+0100",
"summary": "Joe's cool commit",
@@ -48,8 +48,8 @@
"author_mail": "<joe.schmoe@example.com>",
"author_time": 1709741400,
"author_tz": "+0100",
- "committer": "Joe Schmoe",
- "committer_mail": "<joe.schmoe@example.com>",
+ "committer_name": "Joe Schmoe",
+ "committer_email": "<joe.schmoe@example.com>",
"committer_time": 1709741400,
"committer_tz": "+0100",
"summary": "Joe's cool commit",
@@ -67,8 +67,8 @@
"author_mail": "<joe.schmoe@example.com>",
"author_time": 1709741400,
"author_tz": "+0100",
- "committer": "Joe Schmoe",
- "committer_mail": "<joe.schmoe@example.com>",
+ "committer_name": "Joe Schmoe",
+ "committer_email": "<joe.schmoe@example.com>",
"committer_time": 1709741400,
"committer_tz": "+0100",
"summary": "Joe's cool commit",
@@ -86,8 +86,8 @@
"author_mail": "<mrnugget@example.com>",
"author_time": 1709129122,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@example.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@example.com>",
"committer_time": 1709129122,
"committer_tz": "+0100",
"summary": "Get to a state where eslint would change code and imports",
@@ -105,8 +105,8 @@
"author_mail": "<mrnugget@example.com>",
"author_time": 1709128963,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@example.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@example.com>",
"committer_time": 1709128963,
"committer_tz": "+0100",
"summary": "Add some stuff",
@@ -124,8 +124,8 @@
"author_mail": "<mrnugget@example.com>",
"author_time": 1709128963,
"author_tz": "+0100",
- "committer": "Thorsten Ball",
- "committer_mail": "<mrnugget@example.com>",
+ "committer_name": "Thorsten Ball",
+ "committer_email": "<mrnugget@example.com>",
"committer_time": 1709128963,
"committer_tz": "+0100",
"summary": "Add some stuff",
@@ -4,12 +4,13 @@ use std::sync::Arc;
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
+use gpui::SharedString;
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
use serde::Deserialize;
use url::Url;
use git::{
- BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote,
+ BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
RemoteUrl,
};
@@ -160,7 +161,7 @@ impl GitHostingProvider for Codeberg {
&self,
repo_owner: &str,
repo: &str,
- commit: Oid,
+ commit: SharedString,
http_client: Arc<dyn HttpClient>,
) -> Result<Option<Url>> {
let commit = commit.to_string();
@@ -4,13 +4,14 @@ use std::sync::{Arc, LazyLock};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
+use gpui::SharedString;
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request};
use regex::Regex;
use serde::Deserialize;
use url::Url;
use git::{
- BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, Oid, ParsedGitRemote,
+ BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
PullRequest, RemoteUrl,
};
@@ -178,7 +179,7 @@ impl GitHostingProvider for Github {
&self,
repo_owner: &str,
repo: &str,
- commit: Oid,
+ commit: SharedString,
http_client: Arc<dyn HttpClient>,
) -> Result<Option<Url>> {
let commit = commit.to_string();
@@ -36,6 +36,8 @@ serde_derive.workspace = true
serde_json.workspace = true
settings.workspace = true
theme.workspace = true
+time.workspace = true
+time_format.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
@@ -190,9 +190,7 @@ impl PickerDelegate for BranchListDelegate {
// Truncate list of recent branches
// Do a partial sort to show recent-ish branches first.
branches.select_nth_unstable_by(RECENT_BRANCHES_COUNT - 1, |lhs, rhs| {
- rhs.is_head
- .cmp(&lhs.is_head)
- .then(rhs.unix_timestamp.cmp(&lhs.unix_timestamp))
+ rhs.priority_key().cmp(&lhs.priority_key())
});
branches.truncate(RECENT_BRANCHES_COUNT);
}
@@ -255,6 +253,25 @@ impl PickerDelegate for BranchListDelegate {
let Some(branch) = self.matches.get(self.selected_index()) else {
return;
};
+
+ let current_branch = self
+ .workspace
+ .update(cx, |workspace, cx| {
+ workspace
+ .project()
+ .read(cx)
+ .active_repository(cx)
+ .and_then(|repo| repo.read(cx).branch())
+ .map(|branch| branch.name.to_string())
+ })
+ .ok()
+ .flatten();
+
+ if current_branch == Some(branch.name().to_string()) {
+ cx.emit(DismissEvent);
+ return;
+ }
+
cx.spawn_in(window, {
let branch = branch.clone();
|picker, mut cx| async move {
@@ -6,13 +6,15 @@ use crate::{
};
use collections::HashMap;
use db::kvp::KEY_VALUE_STORE;
+use editor::commit_tooltip::CommitTooltip;
use editor::{
actions::MoveToEnd, scroll::ScrollbarAutoHide, Editor, EditorElement, EditorMode,
EditorSettings, MultiBuffer, ShowScrollbar,
};
+use git::repository::{CommitDetails, ResetMode};
use git::{repository::RepoPath, status::FileStatus, Commit, ToggleStaged};
use gpui::*;
-use language::{Buffer, File};
+use language::{markdown, Buffer, File, ParsedMarkdown};
use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev};
use multi_buffer::ExcerptInfo;
use panel::{panel_editor_container, panel_editor_style, panel_filled_button, PanelHeader};
@@ -23,6 +25,7 @@ use project::{
use serde::{Deserialize, Serialize};
use settings::Settings as _;
use std::{collections::HashSet, path::PathBuf, sync::Arc, time::Duration, usize};
+use time::OffsetDateTime;
use ui::{
prelude::*, ButtonLike, Checkbox, CheckboxWithLabel, Divider, DividerColor, ElevationIndex,
IndentGuideColors, ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip,
@@ -207,7 +210,7 @@ impl GitPanel {
) -> Entity<Self> {
let fs = workspace.app_state().fs.clone();
let project = workspace.project().clone();
- let git_state = project.read(cx).git_state().clone();
+ let git_store = project.read(cx).git_store().clone();
let active_repository = project.read(cx).active_repository(cx);
let workspace = cx.entity().downgrade();
@@ -231,14 +234,14 @@ impl GitPanel {
let scroll_handle = UniformListScrollHandle::new();
cx.subscribe_in(
- &git_state,
+ &git_store,
window,
- move |this, git_state, event, window, cx| match event {
+ move |this, git_store, event, window, cx| match event {
GitEvent::FileSystemUpdated => {
this.schedule_update(false, window, cx);
}
GitEvent::ActiveRepositoryChanged | GitEvent::GitStateUpdated => {
- this.active_repository = git_state.read(cx).active_repository();
+ this.active_repository = git_store.read(cx).active_repository();
this.schedule_update(true, window, cx);
}
},
@@ -744,6 +747,40 @@ impl GitPanel {
self.pending_commit = Some(task);
}
+ fn uncommit(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ let Some(repo) = self.active_repository.clone() else {
+ return;
+ };
+ let prior_head = self.load_commit_details("HEAD", cx);
+
+ let task = cx.spawn(|_, mut cx| async move {
+ let prior_head = prior_head.await?;
+
+ repo.update(&mut cx, |repo, _| repo.reset("HEAD^", ResetMode::Soft))?
+ .await??;
+
+ Ok(prior_head)
+ });
+
+ let task = cx.spawn_in(window, |this, mut cx| async move {
+ let result = task.await;
+ this.update_in(&mut cx, |this, window, cx| {
+ this.pending_commit.take();
+ match result {
+ Ok(prior_commit) => {
+ this.commit_editor.update(cx, |editor, cx| {
+ editor.set_text(prior_commit.message, window, cx)
+ });
+ }
+ Err(e) => this.show_err_toast(e, cx),
+ }
+ })
+ .ok();
+ });
+
+ self.pending_commit = Some(task);
+ }
+
fn fill_co_authors(&mut self, _: &FillCoAuthors, window: &mut Window, cx: &mut Context<Self>) {
const CO_AUTHOR_PREFIX: &str = "Co-authored-by: ";
@@ -1131,16 +1168,10 @@ impl GitPanel {
let all_repositories = self
.project
.read(cx)
- .git_state()
+ .git_store()
.read(cx)
.all_repositories();
- let branch = self
- .active_repository
- .as_ref()
- .and_then(|repository| repository.read(cx).branch())
- .unwrap_or_else(|| "(no current branch)".into());
-
let has_repo_above = all_repositories.iter().any(|repo| {
repo.read(cx)
.repository_entry
@@ -1148,26 +1179,7 @@ impl GitPanel {
.is_above_project()
});
- let icon_button = Button::new("branch-selector", branch)
- .color(Color::Muted)
- .style(ButtonStyle::Subtle)
- .icon(IconName::GitBranch)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .size(ButtonSize::Compact)
- .icon_position(IconPosition::Start)
- .tooltip(Tooltip::for_action_title(
- "Switch Branch",
- &zed_actions::git::Branch,
- ))
- .on_click(cx.listener(|_, _, window, cx| {
- window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx);
- }))
- .style(ButtonStyle::Transparent);
-
self.panel_header_container(window, cx)
- .child(h_flex().pl_1().child(icon_button))
- .child(div().flex_grow())
.when(all_repositories.len() > 1 || has_repo_above, |el| {
el.child(self.render_repository_selector(cx))
})
@@ -1200,6 +1212,7 @@ impl GitPanel {
&& !editor.read(cx).is_empty(cx)
&& !self.has_unstaged_conflicts()
&& self.has_write_access(cx);
+
// let can_commit_all =
// !self.commit_pending && self.can_commit_all && !editor.read(cx).is_empty(cx);
let panel_editor_style = panel_editor_style(true, window, cx);
@@ -1274,10 +1287,108 @@ impl GitPanel {
)
}
+ fn render_previous_commit(&self, cx: &mut Context<Self>) -> Option<impl IntoElement> {
+ let active_repository = self.active_repository.as_ref()?;
+ let branch = active_repository.read(cx).branch()?;
+ let commit = branch.most_recent_commit.as_ref()?.clone();
+
+ if branch.upstream.as_ref().is_some_and(|upstream| {
+ if let Some(tracking) = &upstream.tracking {
+ tracking.ahead == 0
+ } else {
+ true
+ }
+ }) {
+ return None;
+ }
+
+ let _branch_selector = Button::new("branch-selector", branch.name.clone())
+ .color(Color::Muted)
+ .style(ButtonStyle::Subtle)
+ .icon(IconName::GitBranch)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .size(ButtonSize::Compact)
+ .icon_position(IconPosition::Start)
+ .tooltip(Tooltip::for_action_title(
+ "Switch Branch",
+ &zed_actions::git::Branch,
+ ))
+ .on_click(cx.listener(|_, _, window, cx| {
+ window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx);
+ }))
+ .style(ButtonStyle::Transparent);
+
+ let _timestamp = Label::new(time_format::format_local_timestamp(
+ OffsetDateTime::from_unix_timestamp(commit.commit_timestamp).log_err()?,
+ OffsetDateTime::now_utc(),
+ time_format::TimestampFormat::Relative,
+ ))
+ .size(LabelSize::Small)
+ .color(Color::Muted);
+
+ let tooltip = if self.has_staged_changes() {
+ "git reset HEAD^ --soft"
+ } else {
+ "git reset HEAD^"
+ };
+
+ let this = cx.entity();
+ Some(
+ h_flex()
+ .items_center()
+ .py_1p5()
+ .px(px(8.))
+ .bg(cx.theme().colors().background)
+ .border_t_1()
+ .border_color(cx.theme().colors().border)
+ .gap_1p5()
+ .child(
+ div()
+ .flex_grow()
+ .overflow_hidden()
+ .max_w(relative(0.6))
+ .h_full()
+ .child(
+ Label::new(commit.subject.clone())
+ .size(LabelSize::Small)
+ .text_ellipsis(),
+ )
+ .id("commit-msg-hover")
+ .hoverable_tooltip(move |window, cx| {
+ GitPanelMessageTooltip::new(
+ this.clone(),
+ commit.sha.clone(),
+ window,
+ cx,
+ )
+ .into()
+ }),
+ )
+ .child(div().flex_1())
+ .child(
+ panel_filled_button("Uncommit")
+ .icon(IconName::Undo)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .icon_position(IconPosition::Start)
+ .tooltip(Tooltip::for_action_title(tooltip, &git::Uncommit))
+ .on_click(cx.listener(|this, _, window, cx| this.uncommit(window, cx))),
+ // .child(
+ // panel_filled_button("Push")
+ // .icon(IconName::ArrowUp)
+ // .icon_size(IconSize::Small)
+ // .icon_color(Color::Muted)
+ // .icon_position(IconPosition::Start), // .disabled(true),
+ // ),
+ ),
+ )
+ }
+
fn render_empty_state(&self, cx: &mut Context<Self>) -> impl IntoElement {
h_flex()
.h_full()
- .flex_1()
+ .flex_grow()
.justify_center()
.items_center()
.child(
@@ -1563,6 +1674,17 @@ impl GitPanel {
.into_any_element()
}
+ fn load_commit_details(
+ &self,
+ sha: &str,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<CommitDetails>> {
+ let Some(repo) = self.active_repository.clone() else {
+ return Task::ready(Err(anyhow::anyhow!("no active repo")));
+ };
+ repo.update(cx, |repo, cx| repo.show(sha, cx))
+ }
+
fn render_entry(
&self,
ix: usize,
@@ -1757,6 +1879,7 @@ impl Render for GitPanel {
} else {
self.render_empty_state(cx).into_any_element()
})
+ .children(self.render_previous_commit(cx))
.child(self.render_commit_editor(window, cx))
}
}
@@ -1843,3 +1966,81 @@ impl Panel for GitPanel {
}
impl PanelHeader for GitPanel {}
+
+struct GitPanelMessageTooltip {
+ commit_tooltip: Option<Entity<CommitTooltip>>,
+}
+
+impl GitPanelMessageTooltip {
+ fn new(
+ git_panel: Entity<GitPanel>,
+ sha: SharedString,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Entity<Self> {
+ let workspace = git_panel.read(cx).workspace.clone();
+ cx.new(|cx| {
+ cx.spawn_in(window, |this, mut cx| async move {
+ let language_registry = workspace.update(&mut cx, |workspace, _cx| {
+ workspace.app_state().languages.clone()
+ })?;
+
+ let details = git_panel
+ .update(&mut cx, |git_panel, cx| {
+ git_panel.load_commit_details(&sha, cx)
+ })?
+ .await?;
+
+ let mut parsed_message = ParsedMarkdown::default();
+ markdown::parse_markdown_block(
+ &details.message,
+ Some(&language_registry),
+ None,
+ &mut parsed_message.text,
+ &mut parsed_message.highlights,
+ &mut parsed_message.region_ranges,
+ &mut parsed_message.regions,
+ )
+ .await;
+
+ let commit_details = editor::commit_tooltip::CommitDetails {
+ sha: details.sha.clone(),
+ committer_name: details.committer_name.clone(),
+ committer_email: details.committer_email.clone(),
+ commit_time: OffsetDateTime::from_unix_timestamp(details.commit_timestamp)?,
+ message: Some(editor::commit_tooltip::ParsedCommitMessage {
+ message: details.message.clone(),
+ parsed_message,
+ ..Default::default()
+ }),
+ };
+
+ this.update_in(&mut cx, |this: &mut GitPanelMessageTooltip, window, cx| {
+ this.commit_tooltip = Some(cx.new(move |cx| {
+ CommitTooltip::new(
+ commit_details,
+ panel_editor_style(true, window, cx),
+ Some(workspace),
+ )
+ }));
+ cx.notify();
+ })
+ })
+ .detach();
+
+ Self {
+ commit_tooltip: None,
+ }
+ })
+ }
+}
+
+impl Render for GitPanelMessageTooltip {
+ fn render(&mut self, _window: &mut Window, _cx: &mut Context<'_, Self>) -> impl IntoElement {
+ if let Some(commit_tooltip) = &self.commit_tooltip {
+ commit_tooltip.clone().into_any_element()
+ } else {
+ gpui::Empty.into_any_element()
+ }
+ }
+}
@@ -12,7 +12,7 @@ use gpui::{
};
use language::{Anchor, Buffer, Capability, OffsetRangeExt, Point};
use multi_buffer::{MultiBuffer, PathKey};
-use project::{git::GitState, Project, ProjectPath};
+use project::{git::GitStore, Project, ProjectPath};
use theme::ActiveTheme;
use ui::prelude::*;
use util::ResultExt as _;
@@ -31,7 +31,7 @@ pub(crate) struct ProjectDiff {
editor: Entity<Editor>,
project: Entity<Project>,
git_panel: Entity<GitPanel>,
- git_state: Entity<GitState>,
+ git_store: Entity<GitStore>,
workspace: WeakEntity<Workspace>,
focus_handle: FocusHandle,
update_needed: postage::watch::Sender<()>,
@@ -69,6 +69,7 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Workspace>,
) {
+ workspace.open_panel::<GitPanel>(window, cx);
Self::deploy_at(workspace, None, window, cx)
}
@@ -136,11 +137,11 @@ impl ProjectDiff {
cx.subscribe_in(&editor, window, Self::handle_editor_event)
.detach();
- let git_state = project.read(cx).git_state().clone();
- let git_state_subscription = cx.subscribe_in(
- &git_state,
+ let git_store = project.read(cx).git_store().clone();
+ let git_store_subscription = cx.subscribe_in(
+ &git_store,
window,
- move |this, _git_state, _event, _window, _cx| {
+ move |this, _git_store, _event, _window, _cx| {
*this.update_needed.borrow_mut() = ();
},
);
@@ -155,7 +156,7 @@ impl ProjectDiff {
Self {
project,
- git_state: git_state.clone(),
+ git_store: git_store.clone(),
git_panel: git_panel.clone(),
workspace: workspace.downgrade(),
focus_handle,
@@ -164,7 +165,7 @@ impl ProjectDiff {
pending_scroll: None,
update_needed: send,
_task: worker,
- _subscription: git_state_subscription,
+ _subscription: git_store_subscription,
}
}
@@ -174,7 +175,7 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Self>,
) {
- let Some(git_repo) = self.git_state.read(cx).active_repository() else {
+ let Some(git_repo) = self.git_store.read(cx).active_repository() else {
return;
};
let repo = git_repo.read(cx);
@@ -247,7 +248,7 @@ impl ProjectDiff {
}
fn load_buffers(&mut self, cx: &mut Context<Self>) -> Vec<Task<Result<DiffBuffer>>> {
- let Some(repo) = self.git_state.read(cx).active_repository() else {
+ let Some(repo) = self.git_store.read(cx).active_repository() else {
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.clear(cx);
});
@@ -98,7 +98,7 @@ impl QuickCommitModal {
commit_message_buffer: Option<Entity<Buffer>>,
cx: &mut Context<Self>,
) -> Self {
- let git_state = project.read(cx).git_state().clone();
+ let git_store = project.read(cx).git_store().clone();
let active_repository = project.read(cx).active_repository(cx);
let focus_handle = cx.focus_handle();
@@ -130,7 +130,7 @@ impl QuickCommitModal {
let all_repositories = self
.project
.read(cx)
- .git_state()
+ .git_store()
.read(cx)
.all_repositories();
let entry_count = self
@@ -4,7 +4,7 @@ use gpui::{
};
use picker::{Picker, PickerDelegate};
use project::{
- git::{GitState, Repository},
+ git::{GitStore, Repository},
Project,
};
use std::sync::Arc;
@@ -20,8 +20,8 @@ pub struct RepositorySelector {
impl RepositorySelector {
pub fn new(project: Entity<Project>, window: &mut Window, cx: &mut Context<Self>) -> Self {
- let git_state = project.read(cx).git_state().clone();
- let all_repositories = git_state.read(cx).all_repositories();
+ let git_store = project.read(cx).git_store().clone();
+ let all_repositories = git_store.read(cx).all_repositories();
let filtered_repositories = all_repositories.clone();
let delegate = RepositorySelectorDelegate {
project: project.downgrade(),
@@ -38,7 +38,7 @@ impl RepositorySelector {
});
let _subscriptions =
- vec![cx.subscribe_in(&git_state, window, Self::handle_project_git_event)];
+ vec![cx.subscribe_in(&git_store, window, Self::handle_project_git_event)];
RepositorySelector {
picker,
@@ -49,7 +49,7 @@ impl RepositorySelector {
fn handle_project_git_event(
&mut self,
- git_state: &Entity<GitState>,
+ git_store: &Entity<GitStore>,
_event: &project::git::GitEvent,
window: &mut Window,
cx: &mut Context<Self>,
@@ -57,7 +57,7 @@ impl RepositorySelector {
// TODO handle events individually
let task = self.picker.update(cx, |this, cx| {
let query = this.query(cx);
- this.delegate.repository_entries = git_state.read(cx).all_repositories();
+ this.delegate.repository_entries = git_store.read(cx).all_repositories();
this.delegate.update_matches(query, window, cx)
});
self.update_matches_task = Some(task);
@@ -133,6 +133,7 @@ pub struct MultiBufferDiffHunk {
pub diff_base_byte_range: Range<usize>,
/// Whether or not this hunk also appears in the 'secondary diff'.
pub secondary_status: DiffHunkSecondaryStatus,
+ pub secondary_diff_base_byte_range: Option<Range<usize>>,
}
impl MultiBufferDiffHunk {
@@ -2191,7 +2192,11 @@ impl MultiBuffer {
let secondary_diff_insertion = new_diff
.secondary_diff()
.map_or(true, |secondary_diff| secondary_diff.base_text().is_none());
- new_diff = BufferDiff::build_with_single_insertion(secondary_diff_insertion, cx);
+ new_diff = BufferDiff::build_with_single_insertion(
+ secondary_diff_insertion,
+ buffer.snapshot(),
+ cx,
+ );
}
let mut snapshot = self.snapshot.borrow_mut();
@@ -3477,6 +3482,7 @@ impl MultiBufferSnapshot {
buffer_range: hunk.buffer_range.clone(),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
secondary_status: hunk.secondary_status,
+ secondary_diff_base_byte_range: hunk.secondary_diff_base_byte_range,
})
})
}
@@ -3846,6 +3852,7 @@ impl MultiBufferSnapshot {
buffer_range: hunk.buffer_range.clone(),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
secondary_status: hunk.secondary_status,
+ secondary_diff_base_byte_range: hunk.secondary_diff_base_byte_range,
});
}
}
@@ -5937,6 +5944,10 @@ impl MultiBufferSnapshot {
pub fn show_headers(&self) -> bool {
self.show_headers
}
+
+ pub fn diff_for_buffer_id(&self, buffer_id: BufferId) -> Option<&BufferDiffSnapshot> {
+ self.diffs.get(&buffer_id)
+ }
}
#[cfg(any(test, feature = "test-support"))]
@@ -189,6 +189,7 @@ impl BufferDiffState {
buffer: text::BufferSnapshot,
cx: &mut Context<Self>,
) -> oneshot::Receiver<()> {
+ log::debug!("recalculate diffs");
let (tx, rx) = oneshot::channel();
self.diff_updated_futures.push(tx);
@@ -2721,8 +2722,8 @@ fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::B
author_mail: entry.author_mail.clone(),
author_time: entry.author_time,
author_tz: entry.author_tz.clone(),
- committer: entry.committer.clone(),
- committer_mail: entry.committer_mail.clone(),
+ committer: entry.committer_name.clone(),
+ committer_mail: entry.committer_email.clone(),
committer_time: entry.committer_time,
committer_tz: entry.committer_tz.clone(),
summary: entry.summary.clone(),
@@ -2771,10 +2772,10 @@ fn deserialize_blame_buffer_response(
sha: git::Oid::from_bytes(&entry.sha).ok()?,
range: entry.start_line..entry.end_line,
original_line_number: entry.original_line_number,
- committer: entry.committer,
+ committer_name: entry.committer,
committer_time: entry.committer_time,
committer_tz: entry.committer_tz,
- committer_mail: entry.committer_mail,
+ committer_email: entry.committer_mail,
author: entry.author,
author_mail: entry.author_mail,
author_time: entry.author_time,
@@ -1,20 +1,22 @@
use crate::buffer_store::BufferStore;
use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent};
use crate::{Project, ProjectPath};
-use anyhow::Context as _;
+use anyhow::{Context as _, Result};
use client::ProjectId;
use futures::channel::{mpsc, oneshot};
use futures::StreamExt as _;
+use git::repository::{Branch, CommitDetails, ResetMode};
use git::{
repository::{GitRepository, RepoPath},
status::{GitSummary, TrackedSummary},
};
use gpui::{
- App, AppContext, Context, Entity, EventEmitter, SharedString, Subscription, Task, WeakEntity,
+ App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
+ WeakEntity,
};
use language::{Buffer, LanguageRegistry};
-use rpc::proto::ToProto;
-use rpc::{proto, AnyProtoClient};
+use rpc::proto::{git_reset, ToProto};
+use rpc::{proto, AnyProtoClient, TypedEnvelope};
use settings::WorktreeId;
use std::path::{Path, PathBuf};
use std::sync::Arc;
@@ -22,22 +24,23 @@ use text::BufferId;
use util::{maybe, ResultExt};
use worktree::{ProjectEntryId, RepositoryEntry, StatusEntry};
-pub struct GitState {
- project_id: Option<ProjectId>,
- client: Option<AnyProtoClient>,
+pub struct GitStore {
+ pub(super) project_id: Option<ProjectId>,
+ pub(super) client: Option<AnyProtoClient>,
+ buffer_store: Entity<BufferStore>,
repositories: Vec<Entity<Repository>>,
active_index: Option<usize>,
- update_sender: mpsc::UnboundedSender<(Message, oneshot::Sender<anyhow::Result<()>>)>,
+ update_sender: mpsc::UnboundedSender<(Message, oneshot::Sender<Result<()>>)>,
_subscription: Subscription,
}
pub struct Repository {
commit_message_buffer: Option<Entity<Buffer>>,
- git_state: WeakEntity<GitState>,
+ git_store: WeakEntity<GitStore>,
pub worktree_id: WorktreeId,
pub repository_entry: RepositoryEntry,
pub git_repo: GitRepo,
- update_sender: mpsc::UnboundedSender<(Message, oneshot::Sender<anyhow::Result<()>>)>,
+ update_sender: mpsc::UnboundedSender<(Message, oneshot::Sender<Result<()>>)>,
}
#[derive(Clone)]
@@ -51,14 +54,20 @@ pub enum GitRepo {
},
}
-enum Message {
+pub enum Message {
Commit {
git_repo: GitRepo,
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
},
+ Reset {
+ repo: GitRepo,
+ commit: SharedString,
+ reset_mode: ResetMode,
+ },
Stage(GitRepo, Vec<RepoPath>),
Unstage(GitRepo, Vec<RepoPath>),
+ SetIndexText(GitRepo, RepoPath, Option<String>),
}
pub enum GitEvent {
@@ -67,11 +76,12 @@ pub enum GitEvent {
GitStateUpdated,
}
-impl EventEmitter<GitEvent> for GitState {}
+impl EventEmitter<GitEvent> for GitStore {}
-impl GitState {
+impl GitStore {
pub fn new(
worktree_store: &Entity<WorktreeStore>,
+ buffer_store: Entity<BufferStore>,
client: Option<AnyProtoClient>,
project_id: Option<ProjectId>,
cx: &mut Context<'_, Self>,
@@ -79,9 +89,10 @@ impl GitState {
let update_sender = Self::spawn_git_worker(cx);
let _subscription = cx.subscribe(worktree_store, Self::on_worktree_store_event);
- GitState {
+ GitStore {
project_id,
client,
+ buffer_store,
repositories: Vec::new(),
active_index: None,
update_sender,
@@ -89,6 +100,16 @@ impl GitState {
}
}
+ pub fn init(client: &AnyProtoClient) {
+ client.add_entity_request_handler(Self::handle_stage);
+ client.add_entity_request_handler(Self::handle_unstage);
+ client.add_entity_request_handler(Self::handle_commit);
+ client.add_entity_request_handler(Self::handle_reset);
+ client.add_entity_request_handler(Self::handle_show);
+ client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
+ client.add_entity_request_handler(Self::handle_set_index_text);
+ }
+
pub fn active_repository(&self) -> Option<Entity<Repository>> {
self.active_index
.map(|index| self.repositories[index].clone())
@@ -152,7 +173,7 @@ impl GitState {
existing_handle
} else {
cx.new(|_| Repository {
- git_state: this.clone(),
+ git_store: this.clone(),
worktree_id,
repository_entry: repo.clone(),
git_repo,
@@ -188,10 +209,10 @@ impl GitState {
}
fn spawn_git_worker(
- cx: &mut Context<'_, GitState>,
- ) -> mpsc::UnboundedSender<(Message, oneshot::Sender<anyhow::Result<()>>)> {
+ cx: &mut Context<'_, GitStore>,
+ ) -> mpsc::UnboundedSender<(Message, oneshot::Sender<Result<()>>)> {
let (update_sender, mut update_receiver) =
- mpsc::unbounded::<(Message, oneshot::Sender<anyhow::Result<()>>)>();
+ mpsc::unbounded::<(Message, oneshot::Sender<Result<()>>)>();
cx.spawn(|_, cx| async move {
while let Some((msg, respond)) = update_receiver.next().await {
let result = cx
@@ -205,7 +226,7 @@ impl GitState {
update_sender
}
- async fn process_git_msg(msg: Message) -> Result<(), anyhow::Error> {
+ async fn process_git_msg(msg: Message) -> Result<()> {
match msg {
Message::Stage(repo, paths) => {
match repo {
@@ -232,6 +253,35 @@ impl GitState {
}
Ok(())
}
+ Message::Reset {
+ repo,
+ commit,
+ reset_mode,
+ } => {
+ match repo {
+ GitRepo::Local(repo) => repo.reset(&commit, reset_mode)?,
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => {
+ client
+ .request(proto::GitReset {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ commit: commit.into(),
+ mode: match reset_mode {
+ ResetMode::Soft => git_reset::ResetMode::Soft.into(),
+ ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
+ },
+ })
+ .await?;
+ }
+ }
+ Ok(())
+ }
Message::Unstage(repo, paths) => {
match repo {
GitRepo::Local(repo) => repo.unstage_paths(&paths)?,
@@ -291,16 +341,236 @@ impl GitState {
}
Ok(())
}
+ Message::SetIndexText(git_repo, path, text) => match git_repo {
+ GitRepo::Local(repo) => repo.set_index_text(&path, text),
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => client.send(proto::SetIndexText {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ path: path.as_ref().to_proto(),
+ text,
+ }),
+ },
}
}
+
+ async fn handle_stage(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Stage>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+
+ let entries = envelope
+ .payload
+ .paths
+ .into_iter()
+ .map(PathBuf::from)
+ .map(RepoPath::new)
+ .collect();
+
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.stage_entries(entries)
+ })?
+ .await??;
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_unstage(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Unstage>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+
+ let entries = envelope
+ .payload
+ .paths
+ .into_iter()
+ .map(PathBuf::from)
+ .map(RepoPath::new)
+ .collect();
+
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.unstage_entries(entries)
+ })?
+ .await??;
+
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_set_index_text(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::SetIndexText>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.set_index_text(
+ &RepoPath::from_str(&envelope.payload.path),
+ envelope.payload.text,
+ )
+ })?
+ .await??;
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_commit(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Commit>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+
+ let message = SharedString::from(envelope.payload.message);
+ let name = envelope.payload.name.map(SharedString::from);
+ let email = envelope.payload.email.map(SharedString::from);
+
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.commit(message, name.zip(email))
+ })?
+ .await??;
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_show(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitShow>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GitCommitDetails> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+
+ let commit = repository_handle
+ .update(&mut cx, |repository_handle, cx| {
+ repository_handle.show(&envelope.payload.commit, cx)
+ })?
+ .await?;
+ Ok(proto::GitCommitDetails {
+ sha: commit.sha.into(),
+ message: commit.message.into(),
+ commit_timestamp: commit.commit_timestamp,
+ committer_email: commit.committer_email.into(),
+ committer_name: commit.committer_name.into(),
+ })
+ }
+
+ async fn handle_reset(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitReset>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+
+ let mode = match envelope.payload.mode() {
+ git_reset::ResetMode::Soft => ResetMode::Soft,
+ git_reset::ResetMode::Mixed => ResetMode::Mixed,
+ };
+
+ repository_handle
+ .update(&mut cx, |repository_handle, _| {
+ repository_handle.reset(&envelope.payload.commit, mode)
+ })?
+ .await??;
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_open_commit_message_buffer(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::OpenBufferResponse> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository =
+ Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
+ let buffer = repository
+ .update(&mut cx, |repository, cx| {
+ repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
+ })?
+ .await?;
+
+ let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
+ this.update(&mut cx, |this, cx| {
+ this.buffer_store.update(cx, |buffer_store, cx| {
+ buffer_store
+ .create_buffer_for_peer(
+ &buffer,
+ envelope.original_sender_id.unwrap_or(envelope.sender_id),
+ cx,
+ )
+ .detach_and_log_err(cx);
+ })
+ })?;
+
+ Ok(proto::OpenBufferResponse {
+ buffer_id: buffer_id.to_proto(),
+ })
+ }
+
+ fn repository_for_request(
+ this: &Entity<Self>,
+ worktree_id: WorktreeId,
+ work_directory_id: ProjectEntryId,
+ cx: &mut AsyncApp,
+ ) -> Result<Entity<Repository>> {
+ this.update(cx, |this, cx| {
+ let repository_handle = this
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.read(cx).worktree_id == worktree_id
+ && repository_handle
+ .read(cx)
+ .repository_entry
+ .work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })?
+ }
}
+impl GitRepo {}
+
impl Repository {
+ pub fn git_store(&self) -> Option<Entity<GitStore>> {
+ self.git_store.upgrade()
+ }
+
fn id(&self) -> (WorktreeId, ProjectEntryId) {
(self.worktree_id, self.repository_entry.work_directory_id())
}
- pub fn branch(&self) -> Option<Arc<str>> {
+ pub fn branch(&self) -> Option<&Branch> {
self.repository_entry.branch()
}
@@ -322,19 +592,19 @@ impl Repository {
}
pub fn activate(&self, cx: &mut Context<Self>) {
- let Some(git_state) = self.git_state.upgrade() else {
+ let Some(git_store) = self.git_store.upgrade() else {
return;
};
let entity = cx.entity();
- git_state.update(cx, |git_state, cx| {
- let Some(index) = git_state
+ git_store.update(cx, |git_store, cx| {
+ let Some(index) = git_store
.repositories
.iter()
.position(|handle| *handle == entity)
else {
return;
};
- git_state.active_index = Some(index);
+ git_store.active_index = Some(index);
cx.emit(GitEvent::ActiveRepositoryChanged);
});
}
@@ -374,7 +644,7 @@ impl Repository {
languages: Option<Arc<LanguageRegistry>>,
buffer_store: Entity<BufferStore>,
cx: &mut Context<Self>,
- ) -> Task<anyhow::Result<Entity<Buffer>>> {
+ ) -> Task<Result<Entity<Buffer>>> {
if let Some(buffer) = self.commit_message_buffer.clone() {
return Task::ready(Ok(buffer));
}
@@ -422,7 +692,7 @@ impl Repository {
language_registry: Option<Arc<LanguageRegistry>>,
buffer_store: Entity<BufferStore>,
cx: &mut Context<Self>,
- ) -> Task<anyhow::Result<Entity<Buffer>>> {
+ ) -> Task<Result<Entity<Buffer>>> {
cx.spawn(|repository, mut cx| async move {
let buffer = buffer_store
.update(&mut cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
@@ -442,7 +712,57 @@ impl Repository {
})
}
- pub fn stage_entries(&self, entries: Vec<RepoPath>) -> oneshot::Receiver<anyhow::Result<()>> {
+ pub fn reset(&self, commit: &str, reset_mode: ResetMode) -> oneshot::Receiver<Result<()>> {
+ let (result_tx, result_rx) = futures::channel::oneshot::channel();
+ let commit = commit.to_string().into();
+ self.update_sender
+ .unbounded_send((
+ Message::Reset {
+ repo: self.git_repo.clone(),
+ commit,
+ reset_mode,
+ },
+ result_tx,
+ ))
+ .ok();
+ result_rx
+ }
+
+ pub fn show(&self, commit: &str, cx: &Context<Self>) -> Task<Result<CommitDetails>> {
+ let commit = commit.to_string();
+ match self.git_repo.clone() {
+ GitRepo::Local(git_repository) => {
+ let commit = commit.to_string();
+ cx.background_executor()
+ .spawn(async move { git_repository.show(&commit) })
+ }
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => cx.background_executor().spawn(async move {
+ let resp = client
+ .request(proto::GitShow {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ commit,
+ })
+ .await?;
+
+ Ok(CommitDetails {
+ sha: resp.sha.into(),
+ message: resp.message.into(),
+ commit_timestamp: resp.commit_timestamp,
+ committer_email: resp.committer_email.into(),
+ committer_name: resp.committer_name.into(),
+ })
+ }),
+ }
+ }
+
+ pub fn stage_entries(&self, entries: Vec<RepoPath>) -> oneshot::Receiver<Result<()>> {
let (result_tx, result_rx) = futures::channel::oneshot::channel();
if entries.is_empty() {
result_tx.send(Ok(())).ok();
@@ -454,7 +774,7 @@ impl Repository {
result_rx
}
- pub fn unstage_entries(&self, entries: Vec<RepoPath>) -> oneshot::Receiver<anyhow::Result<()>> {
+ pub fn unstage_entries(&self, entries: Vec<RepoPath>) -> oneshot::Receiver<Result<()>> {
let (result_tx, result_rx) = futures::channel::oneshot::channel();
if entries.is_empty() {
result_tx.send(Ok(())).ok();
@@ -466,7 +786,7 @@ impl Repository {
result_rx
}
- pub fn stage_all(&self) -> oneshot::Receiver<anyhow::Result<()>> {
+ pub fn stage_all(&self) -> oneshot::Receiver<Result<()>> {
let to_stage = self
.repository_entry
.status()
@@ -476,7 +796,7 @@ impl Repository {
self.stage_entries(to_stage)
}
- pub fn unstage_all(&self) -> oneshot::Receiver<anyhow::Result<()>> {
+ pub fn unstage_all(&self) -> oneshot::Receiver<Result<()>> {
let to_unstage = self
.repository_entry
.status()
@@ -508,7 +828,7 @@ impl Repository {
&self,
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
- ) -> oneshot::Receiver<anyhow::Result<()>> {
+ ) -> oneshot::Receiver<Result<()>> {
let (result_tx, result_rx) = futures::channel::oneshot::channel();
self.update_sender
.unbounded_send((
@@ -522,4 +842,19 @@ impl Repository {
.ok();
result_rx
}
+
+ pub fn set_index_text(
+ &self,
+ path: &RepoPath,
+ content: Option<String>,
+ ) -> oneshot::Receiver<anyhow::Result<()>> {
+ let (result_tx, result_rx) = futures::channel::oneshot::channel();
+ self.update_sender
+ .unbounded_send((
+ Message::SetIndexText(self.git_repo.clone(), path.clone(), content),
+ result_tx,
+ ))
+ .ok();
+ result_rx
+ }
}
@@ -27,7 +27,7 @@ use git::Repository;
pub mod search_history;
mod yarn;
-use crate::git::GitState;
+use crate::git::GitStore;
use anyhow::{anyhow, Context as _, Result};
use buffer_store::{BufferStore, BufferStoreEvent};
use client::{
@@ -95,7 +95,10 @@ use task_store::TaskStore;
use terminals::Terminals;
use text::{Anchor, BufferId};
use toolchain_store::EmptyToolchainStore;
-use util::{paths::compare_paths, ResultExt as _};
+use util::{
+ paths::{compare_paths, SanitizedPath},
+ ResultExt as _,
+};
use worktree::{CreatedEntry, Snapshot, Traversal};
use worktree_store::{WorktreeStore, WorktreeStoreEvent};
@@ -158,7 +161,7 @@ pub struct Project {
fs: Arc<dyn Fs>,
ssh_client: Option<Entity<SshRemoteClient>>,
client_state: ProjectClientState,
- git_state: Entity<GitState>,
+ git_store: Entity<GitStore>,
collaborators: HashMap<proto::PeerId, Collaborator>,
client_subscriptions: Vec<client::Subscription>,
worktree_store: Entity<WorktreeStore>,
@@ -607,14 +610,10 @@ impl Project {
client.add_entity_request_handler(Self::handle_open_new_buffer);
client.add_entity_message_handler(Self::handle_create_buffer_for_peer);
- client.add_entity_request_handler(Self::handle_stage);
- client.add_entity_request_handler(Self::handle_unstage);
- client.add_entity_request_handler(Self::handle_commit);
- client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
-
WorktreeStore::init(&client);
BufferStore::init(&client);
LspStore::init(&client);
+ GitStore::init(&client);
SettingsObserver::init(&client);
TaskStore::init(Some(&client));
ToolchainStore::init(&client);
@@ -701,7 +700,8 @@ impl Project {
)
});
- let git_state = cx.new(|cx| GitState::new(&worktree_store, None, None, cx));
+ let git_store =
+ cx.new(|cx| GitStore::new(&worktree_store, buffer_store.clone(), None, None, cx));
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
@@ -714,7 +714,7 @@ impl Project {
lsp_store,
join_project_response_message_id: 0,
client_state: ProjectClientState::Local,
- git_state,
+ git_store,
client_subscriptions: Vec::new(),
_subscriptions: vec![cx.on_release(Self::release)],
active_entry: None,
@@ -821,9 +821,10 @@ impl Project {
});
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
- let git_state = cx.new(|cx| {
- GitState::new(
+ let git_store = cx.new(|cx| {
+ GitStore::new(
&worktree_store,
+ buffer_store.clone(),
Some(ssh_proto.clone()),
Some(ProjectId(SSH_PROJECT_ID)),
cx,
@@ -842,7 +843,7 @@ impl Project {
lsp_store,
join_project_response_message_id: 0,
client_state: ProjectClientState::Local,
- git_state,
+ git_store,
client_subscriptions: Vec::new(),
_subscriptions: vec![
cx.on_release(Self::release),
@@ -892,6 +893,7 @@ impl Project {
ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.worktree_store);
ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.lsp_store);
ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer);
+ ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.git_store);
ssh_proto.add_entity_message_handler(Self::handle_create_buffer_for_peer);
ssh_proto.add_entity_message_handler(Self::handle_update_worktree);
@@ -905,6 +907,7 @@ impl Project {
SettingsObserver::init(&ssh_proto);
TaskStore::init(Some(&ssh_proto));
ToolchainStore::init(&ssh_proto);
+ GitStore::init(&ssh_proto);
this
})
@@ -1026,9 +1029,10 @@ impl Project {
SettingsObserver::new_remote(worktree_store.clone(), task_store.clone(), cx)
})?;
- let git_state = cx.new(|cx| {
- GitState::new(
+ let git_store = cx.new(|cx| {
+ GitStore::new(
&worktree_store,
+ buffer_store.clone(),
Some(client.clone().into()),
Some(ProjectId(remote_id)),
cx,
@@ -1085,7 +1089,7 @@ impl Project {
remote_id,
replica_id,
},
- git_state,
+ git_store,
buffers_needing_diff: Default::default(),
git_diff_debouncer: DebouncedDelay::new(),
terminals: Terminals {
@@ -1483,22 +1487,37 @@ impl Project {
.and_then(|worktree| worktree.read(cx).status_for_file(&project_path.path))
}
- pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &App) -> Option<bool> {
+ pub fn visibility_for_paths(
+ &self,
+ paths: &[PathBuf],
+ metadatas: &[Metadata],
+ exclude_sub_dirs: bool,
+ cx: &App,
+ ) -> Option<bool> {
paths
.iter()
- .map(|path| self.visibility_for_path(path, cx))
+ .zip(metadatas)
+ .map(|(path, metadata)| self.visibility_for_path(path, metadata, exclude_sub_dirs, cx))
.max()
.flatten()
}
- pub fn visibility_for_path(&self, path: &Path, cx: &App) -> Option<bool> {
+ pub fn visibility_for_path(
+ &self,
+ path: &Path,
+ metadata: &Metadata,
+ exclude_sub_dirs: bool,
+ cx: &App,
+ ) -> Option<bool> {
+ let sanitized_path = SanitizedPath::from(path);
+ let path = sanitized_path.as_path();
self.worktrees(cx)
.filter_map(|worktree| {
let worktree = worktree.read(cx);
- worktree
- .as_local()?
- .contains_abs_path(path)
- .then(|| worktree.is_visible())
+ let abs_path = worktree.as_local()?.abs_path();
+ let contains = path == abs_path
+ || (path.starts_with(abs_path) && (!exclude_sub_dirs || !metadata.is_dir));
+ contains.then(|| worktree.is_visible())
})
.max()
}
@@ -1656,6 +1675,9 @@ impl Project {
self.client
.subscribe_to_entity(project_id)?
.set_entity(&self.settings_observer, &mut cx.to_async()),
+ self.client
+ .subscribe_to_entity(project_id)?
+ .set_entity(&self.git_store, &mut cx.to_async()),
]);
self.buffer_store.update(cx, |buffer_store, cx| {
@@ -4019,121 +4041,6 @@ impl Project {
Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
}
- async fn handle_stage(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::Stage>,
- mut cx: AsyncApp,
- ) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
-
- let entries = envelope
- .payload
- .paths
- .into_iter()
- .map(PathBuf::from)
- .map(RepoPath::new)
- .collect();
-
- repository_handle
- .update(&mut cx, |repository_handle, _| {
- repository_handle.stage_entries(entries)
- })?
- .await??;
- Ok(proto::Ack {})
- }
-
- async fn handle_unstage(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::Unstage>,
- mut cx: AsyncApp,
- ) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
-
- let entries = envelope
- .payload
- .paths
- .into_iter()
- .map(PathBuf::from)
- .map(RepoPath::new)
- .collect();
-
- repository_handle
- .update(&mut cx, |repository_handle, _| {
- repository_handle.unstage_entries(entries)
- })?
- .await??;
- Ok(proto::Ack {})
- }
-
- async fn handle_commit(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::Commit>,
- mut cx: AsyncApp,
- ) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
-
- let message = SharedString::from(envelope.payload.message);
- let name = envelope.payload.name.map(SharedString::from);
- let email = envelope.payload.email.map(SharedString::from);
- repository_handle
- .update(&mut cx, |repository_handle, _| {
- repository_handle.commit(message, name.zip(email))
- })?
- .await??;
- Ok(proto::Ack {})
- }
-
- async fn handle_open_commit_message_buffer(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
- mut cx: AsyncApp,
- ) -> Result<proto::OpenBufferResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
- let buffer = repository_handle
- .update(&mut cx, |repository_handle, cx| {
- repository_handle.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
- })?
- .await?;
-
- let peer_id = envelope.original_sender_id()?;
- Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
- }
-
- fn repository_for_request(
- this: &Entity<Self>,
- worktree_id: WorktreeId,
- work_directory_id: ProjectEntryId,
- cx: &mut AsyncApp,
- ) -> Result<Entity<Repository>> {
- this.update(cx, |project, cx| {
- let repository_handle = project
- .git_state()
- .read(cx)
- .all_repositories()
- .into_iter()
- .find(|repository_handle| {
- let repository_handle = repository_handle.read(cx);
- repository_handle.worktree_id == worktree_id
- && repository_handle.repository_entry.work_directory_id()
- == work_directory_id
- })
- .context("missing repository handle")?;
- anyhow::Ok(repository_handle)
- })?
- }
-
fn respond_to_open_buffer_request(
this: Entity<Self>,
buffer: Entity<Buffer>,
@@ -4325,16 +4232,37 @@ impl Project {
&self.buffer_store
}
- pub fn git_state(&self) -> &Entity<GitState> {
- &self.git_state
+ pub fn git_store(&self) -> &Entity<GitStore> {
+ &self.git_store
}
pub fn active_repository(&self, cx: &App) -> Option<Entity<Repository>> {
- self.git_state.read(cx).active_repository()
+ self.git_store.read(cx).active_repository()
}
pub fn all_repositories(&self, cx: &App) -> Vec<Entity<Repository>> {
- self.git_state.read(cx).all_repositories()
+ self.git_store.read(cx).all_repositories()
+ }
+
+ pub fn repository_and_path_for_buffer_id(
+ &self,
+ buffer_id: BufferId,
+ cx: &App,
+ ) -> Option<(Entity<Repository>, RepoPath)> {
+ let path = self
+ .buffer_for_id(buffer_id, cx)?
+ .read(cx)
+ .project_path(cx)?;
+ self.git_store
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find_map(|repo| {
+ Some((
+ repo.clone(),
+ repo.read(cx).repository_entry.relativize(&path.path).ok()?,
+ ))
+ })
}
}
@@ -12,6 +12,7 @@ use futures::{
future::{BoxFuture, Shared},
FutureExt, SinkExt,
};
+use git::repository::Branch;
use gpui::{App, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity};
use postage::oneshot;
use rpc::{
@@ -24,7 +25,10 @@ use smol::{
};
use text::ReplicaId;
use util::{paths::SanitizedPath, ResultExt};
-use worktree::{Entry, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId, WorktreeSettings};
+use worktree::{
+ branch_to_proto, Entry, ProjectEntryId, UpdatedEntriesSet, Worktree, WorktreeId,
+ WorktreeSettings,
+};
use crate::{search::SearchQuery, ProjectPath};
@@ -133,11 +137,12 @@ impl WorktreeStore {
.find(|worktree| worktree.read(cx).id() == id)
}
- pub fn current_branch(&self, repository: ProjectPath, cx: &App) -> Option<Arc<str>> {
+ pub fn current_branch(&self, repository: ProjectPath, cx: &App) -> Option<Branch> {
self.worktree_for_id(repository.worktree_id, cx)?
.read(cx)
.git_entry(repository.path)?
.branch()
+ .cloned()
}
pub fn worktree_for_entry(
@@ -938,9 +943,24 @@ impl WorktreeStore {
.map(|proto_branch| git::repository::Branch {
is_head: proto_branch.is_head,
name: proto_branch.name.into(),
- unix_timestamp: proto_branch
- .unix_timestamp
- .map(|timestamp| timestamp as i64),
+ upstream: proto_branch.upstream.map(|upstream| {
+ git::repository::Upstream {
+ ref_name: upstream.ref_name.into(),
+ tracking: upstream.tracking.map(|tracking| {
+ git::repository::UpstreamTracking {
+ ahead: tracking.ahead as u32,
+ behind: tracking.behind as u32,
+ }
+ }),
+ }
+ }),
+ most_recent_commit: proto_branch.most_recent_commit.map(|commit| {
+ git::repository::CommitSummary {
+ sha: commit.sha.into(),
+ subject: commit.subject.into(),
+ commit_timestamp: commit.commit_timestamp,
+ }
+ }),
})
.collect();
@@ -1126,14 +1146,7 @@ impl WorktreeStore {
.await?;
Ok(proto::GitBranchesResponse {
- branches: branches
- .into_iter()
- .map(|branch| proto::Branch {
- is_head: branch.is_head,
- name: branch.name.to_string(),
- unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64),
- })
- .collect(),
+ branches: branches.iter().map(branch_to_proto).collect(),
})
}
@@ -315,7 +315,12 @@ message Envelope {
OpenCommitMessageBuffer open_commit_message_buffer = 296;
OpenUncommittedDiff open_uncommitted_diff = 297;
- OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; // current max
+ OpenUncommittedDiffResponse open_uncommitted_diff_response = 298;
+ GitShow git_show = 300;
+ GitReset git_reset = 301;
+ GitCommitDetails git_commit_details = 302;
+
+ SetIndexText set_index_text = 299; // current max
}
reserved 87 to 88;
@@ -1798,12 +1803,14 @@ message Entry {
message RepositoryEntry {
uint64 work_directory_id = 1;
- optional string branch = 2;
+ optional string branch = 2; // deprecated
+ optional Branch branch_summary = 6;
repeated StatusEntry updated_statuses = 3;
repeated string removed_statuses = 4;
repeated string current_merge_conflicts = 5;
}
+
message StatusEntry {
string repo_path = 1;
// Can be removed once collab's min version is >=0.171.0.
@@ -2087,6 +2094,14 @@ message OpenUncommittedDiffResponse {
Mode mode = 3;
}
+message SetIndexText {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ uint64 work_directory_id = 3;
+ string path = 4;
+ optional string text = 5;
+}
+
message GetNotifications {
optional uint64 before_id = 1;
}
@@ -2605,10 +2620,26 @@ message ActiveToolchainResponse {
optional Toolchain toolchain = 1;
}
+message CommitSummary {
+ string sha = 1;
+ string subject = 2;
+ int64 commit_timestamp = 3;
+}
+
message Branch {
bool is_head = 1;
string name = 2;
optional uint64 unix_timestamp = 3;
+ optional GitUpstream upstream = 4;
+ optional CommitSummary most_recent_commit = 5;
+}
+message GitUpstream {
+ string ref_name = 1;
+ optional UpstreamTracking tracking = 2;
+}
+message UpstreamTracking {
+ uint64 ahead = 1;
+ uint64 behind = 2;
}
message GitBranches {
@@ -2629,6 +2660,33 @@ message UpdateGitBranch {
message GetPanicFiles {
}
+message GitShow {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ uint64 work_directory_id = 3;
+ string commit = 4;
+}
+
+message GitCommitDetails {
+ string sha = 1;
+ string message = 2;
+ int64 commit_timestamp = 3;
+ string committer_email = 4;
+ string committer_name = 5;
+}
+
+message GitReset {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ uint64 work_directory_id = 3;
+ string commit = 4;
+ ResetMode mode = 5;
+ enum ResetMode {
+ SOFT = 0;
+ MIXED = 1;
+ }
+}
+
message GetPanicFilesResponse {
repeated string file_contents = 2;
}
@@ -440,6 +440,10 @@ messages!(
(SyncExtensionsResponse, Background),
(InstallExtension, Background),
(RegisterBufferWithLanguageServers, Background),
+ (GitReset, Background),
+ (GitShow, Background),
+ (GitCommitDetails, Background),
+ (SetIndexText, Background),
);
request_messages!(
@@ -573,6 +577,9 @@ request_messages!(
(SyncExtensions, SyncExtensionsResponse),
(InstallExtension, Ack),
(RegisterBufferWithLanguageServers, Ack),
+ (GitShow, GitCommitDetails),
+ (GitReset, Ack),
+ (SetIndexText, Ack),
);
entity_messages!(
@@ -665,6 +672,9 @@ entity_messages!(
GetPathMetadata,
CancelLanguageServerWork,
RegisterBufferWithLanguageServers,
+ GitShow,
+ GitReset,
+ SetIndexText,
);
entity_messages!(
@@ -1,22 +1,20 @@
use ::proto::{FromProto, ToProto};
-use anyhow::{anyhow, Context as _, Result};
+use anyhow::{anyhow, Result};
use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore;
use fs::Fs;
-use git::repository::RepoPath;
-use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, SharedString};
+use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel};
use http_client::HttpClient;
use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
use node_runtime::NodeRuntime;
use project::{
buffer_store::{BufferStore, BufferStoreEvent},
- git::{GitState, Repository},
+ git::GitStore,
project_settings::SettingsObserver,
search::SearchQuery,
task_store::TaskStore,
worktree_store::WorktreeStore,
- LspStore, LspStoreEvent, PrettierStore, ProjectEntryId, ProjectPath, ToolchainStore,
- WorktreeId,
+ LspStore, LspStoreEvent, PrettierStore, ProjectPath, ToolchainStore, WorktreeId,
};
use remote::ssh_session::ChannelClient;
use rpc::{
@@ -44,7 +42,7 @@ pub struct HeadlessProject {
pub next_entry_id: Arc<AtomicUsize>,
pub languages: Arc<LanguageRegistry>,
pub extensions: Entity<HeadlessExtensionStore>,
- pub git_state: Entity<GitState>,
+ pub git_store: Entity<GitStore>,
}
pub struct HeadlessAppState {
@@ -83,13 +81,14 @@ impl HeadlessProject {
store
});
- let git_state = cx.new(|cx| GitState::new(&worktree_store, None, None, cx));
-
let buffer_store = cx.new(|cx| {
let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
buffer_store
});
+
+ let git_store =
+ cx.new(|cx| GitStore::new(&worktree_store, buffer_store.clone(), None, None, cx));
let prettier_store = cx.new(|cx| {
PrettierStore::new(
node_runtime.clone(),
@@ -180,6 +179,7 @@ impl HeadlessProject {
session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
+ session.subscribe_to_entity(SSH_PROJECT_ID, &git_store);
client.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
client.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
@@ -197,11 +197,6 @@ impl HeadlessProject {
client.add_entity_request_handler(BufferStore::handle_update_buffer);
client.add_entity_message_handler(BufferStore::handle_close_buffer);
- client.add_entity_request_handler(Self::handle_stage);
- client.add_entity_request_handler(Self::handle_unstage);
- client.add_entity_request_handler(Self::handle_commit);
- client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
-
client.add_request_handler(
extensions.clone().downgrade(),
HeadlessExtensionStore::handle_sync_extensions,
@@ -217,6 +212,7 @@ impl HeadlessProject {
LspStore::init(&client);
TaskStore::init(Some(&client));
ToolchainStore::init(&client);
+ GitStore::init(&client);
HeadlessProject {
session: client,
@@ -229,7 +225,7 @@ impl HeadlessProject {
next_entry_id: Default::default(),
languages,
extensions,
- git_state,
+ git_store,
}
}
@@ -615,137 +611,6 @@ impl HeadlessProject {
log::debug!("Received ping from client");
Ok(proto::Ack {})
}
-
- async fn handle_stage(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::Stage>,
- mut cx: AsyncApp,
- ) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
-
- let entries = envelope
- .payload
- .paths
- .into_iter()
- .map(PathBuf::from)
- .map(RepoPath::new)
- .collect();
-
- repository_handle
- .update(&mut cx, |repository_handle, _| {
- repository_handle.stage_entries(entries)
- })?
- .await??;
- Ok(proto::Ack {})
- }
-
- async fn handle_unstage(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::Unstage>,
- mut cx: AsyncApp,
- ) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
-
- let entries = envelope
- .payload
- .paths
- .into_iter()
- .map(PathBuf::from)
- .map(RepoPath::new)
- .collect();
-
- repository_handle
- .update(&mut cx, |repository_handle, _| {
- repository_handle.unstage_entries(entries)
- })?
- .await??;
-
- Ok(proto::Ack {})
- }
-
- async fn handle_commit(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::Commit>,
- mut cx: AsyncApp,
- ) -> Result<proto::Ack> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository_handle =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
-
- let message = SharedString::from(envelope.payload.message);
- let name = envelope.payload.name.map(SharedString::from);
- let email = envelope.payload.email.map(SharedString::from);
-
- repository_handle
- .update(&mut cx, |repository_handle, _| {
- repository_handle.commit(message, name.zip(email))
- })?
- .await??;
- Ok(proto::Ack {})
- }
-
- async fn handle_open_commit_message_buffer(
- this: Entity<Self>,
- envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
- mut cx: AsyncApp,
- ) -> Result<proto::OpenBufferResponse> {
- let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
- let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
- let repository =
- Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
- let buffer = repository
- .update(&mut cx, |repository, cx| {
- repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
- })?
- .await?;
-
- let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
- this.update(&mut cx, |headless_project, cx| {
- headless_project
- .buffer_store
- .update(cx, |buffer_store, cx| {
- buffer_store
- .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
- .detach_and_log_err(cx);
- })
- })?;
-
- Ok(proto::OpenBufferResponse {
- buffer_id: buffer_id.to_proto(),
- })
- }
-
- fn repository_for_request(
- this: &Entity<Self>,
- worktree_id: WorktreeId,
- work_directory_id: ProjectEntryId,
- cx: &mut AsyncApp,
- ) -> Result<Entity<Repository>> {
- this.update(cx, |project, cx| {
- let repository_handle = project
- .git_state
- .read(cx)
- .all_repositories()
- .into_iter()
- .find(|repository_handle| {
- repository_handle.read(cx).worktree_id == worktree_id
- && repository_handle
- .read(cx)
- .repository_entry
- .work_directory_id()
- == work_directory_id
- })
- .context("missing repository handle")?;
- anyhow::Ok(repository_handle)
- })?
- }
}
fn prompt_to_proto(
@@ -1364,7 +1364,7 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
})
});
- assert_eq!(server_branch.as_ref(), branches[2]);
+ assert_eq!(server_branch.name, branches[2]);
// Also try creating a new branch
cx.update(|cx| {
@@ -1387,7 +1387,7 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
})
});
- assert_eq!(server_branch.as_ref(), "totally-new-branch");
+ assert_eq!(server_branch.name, "totally-new-branch");
}
pub async fn init_test(
@@ -447,7 +447,7 @@ where
summary.0
}
- /// Returns whether we found the item you where seeking for
+ /// Returns whether we found the item you were seeking for
#[track_caller]
fn seek_internal(
&mut self,
@@ -61,6 +61,7 @@ pub struct IconDefinition {
const FILE_ICONS: &[(&str, &str)] = &[
("astro", "icons/file_icons/astro.svg"),
("audio", "icons/file_icons/audio.svg"),
+ ("bicep", "icons/file_icons/file.svg"),
("bun", "icons/file_icons/bun.svg"),
("c", "icons/file_icons/c.svg"),
("code", "icons/file_icons/code.svg"),
@@ -24,19 +24,21 @@ pub fn format_localized_timestamp(
) -> String {
let timestamp_local = timestamp.to_offset(timezone);
let reference_local = reference.to_offset(timezone);
+ format_local_timestamp(timestamp_local, reference_local, format)
+}
+/// Formats a timestamp, which respects the user's date and time preferences/custom format.
+pub fn format_local_timestamp(
+ timestamp: OffsetDateTime,
+ reference: OffsetDateTime,
+ format: TimestampFormat,
+) -> String {
match format {
- TimestampFormat::Absolute => {
- format_absolute_timestamp(timestamp_local, reference_local, false)
- }
- TimestampFormat::EnhancedAbsolute => {
- format_absolute_timestamp(timestamp_local, reference_local, true)
- }
- TimestampFormat::MediumAbsolute => {
- format_absolute_timestamp_medium(timestamp_local, reference_local)
- }
- TimestampFormat::Relative => format_relative_time(timestamp_local, reference_local)
- .unwrap_or_else(|| format_relative_date(timestamp_local, reference_local)),
+ TimestampFormat::Absolute => format_absolute_timestamp(timestamp, reference, false),
+ TimestampFormat::EnhancedAbsolute => format_absolute_timestamp(timestamp, reference, true),
+ TimestampFormat::MediumAbsolute => format_absolute_timestamp_medium(timestamp, reference),
+ TimestampFormat::Relative => format_relative_time(timestamp, reference)
+ .unwrap_or_else(|| format_relative_date(timestamp, reference)),
}
}
@@ -521,6 +521,7 @@ impl TitleBar {
let branch_name = entry
.as_ref()
.and_then(|entry| entry.branch())
+ .map(|branch| branch.name.clone())
.map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?;
Some(
Button::new("project_branch_trigger", branch_name)
@@ -16,7 +16,7 @@ path = "src/ui.rs"
chrono.workspace = true
component.workspace = true
gpui.workspace = true
-itertools = { workspace = true, optional = true }
+itertools.workspace = true
linkme.workspace = true
menu.workspace = true
serde.workspace = true
@@ -26,13 +26,14 @@ story = { workspace = true, optional = true }
strum = { workspace = true, features = ["derive"] }
theme.workspace = true
ui_macros.workspace = true
+util.workspace = true
[target.'cfg(windows)'.dependencies]
windows.workspace = true
[features]
default = []
-stories = ["dep:itertools", "dep:story"]
+stories = ["dep:story"]
# cargo-machete doesn't understand that linkme is used in the component macro
[package.metadata.cargo-machete]
@@ -92,7 +92,7 @@ impl RenderOnce for KeyBinding {
self.platform_style,
None,
self.size,
- false,
+ true,
))
.map(|el| {
el.child(render_key(&keystroke, self.platform_style, None, self.size))
@@ -110,7 +110,7 @@ pub fn render_key(
let key_icon = icon_for_key(keystroke, platform_style);
match key_icon {
Some(icon) => KeyIcon::new(icon, color).size(size).into_any_element(),
- None => Key::new(capitalize(&keystroke.key), color)
+ None => Key::new(util::capitalize(&keystroke.key), color)
.size(size)
.into_any_element(),
}
@@ -145,10 +145,12 @@ pub fn render_modifiers(
platform_style: PlatformStyle,
color: Option<Color>,
size: Option<AbsoluteLength>,
- standalone: bool,
+ trailing_separator: bool,
) -> impl Iterator<Item = AnyElement> {
+ #[derive(Clone)]
enum KeyOrIcon {
Key(&'static str),
+ Plus,
Icon(IconName),
}
@@ -200,23 +202,34 @@ pub fn render_modifiers(
.into_iter()
.filter(|modifier| modifier.enabled)
.collect::<Vec<_>>();
- let last_ix = filtered.len().saturating_sub(1);
- filtered
+ let platform_keys = filtered
.into_iter()
- .enumerate()
- .flat_map(move |(ix, modifier)| match platform_style {
- PlatformStyle::Mac => vec![modifier.mac],
- PlatformStyle::Linux if standalone && ix == last_ix => vec![modifier.linux],
- PlatformStyle::Linux => vec![modifier.linux, KeyOrIcon::Key("+")],
- PlatformStyle::Windows if standalone && ix == last_ix => {
- vec![modifier.windows]
- }
- PlatformStyle::Windows => vec![modifier.windows, KeyOrIcon::Key("+")],
+ .map(move |modifier| match platform_style {
+ PlatformStyle::Mac => Some(modifier.mac),
+ PlatformStyle::Linux => Some(modifier.linux),
+ PlatformStyle::Windows => Some(modifier.windows),
+ });
+
+ let separator = match platform_style {
+ PlatformStyle::Mac => None,
+ PlatformStyle::Linux => Some(KeyOrIcon::Plus),
+ PlatformStyle::Windows => Some(KeyOrIcon::Plus),
+ };
+
+ let platform_keys = itertools::intersperse(platform_keys, separator.clone());
+
+ platform_keys
+ .chain(if modifiers.modified() && trailing_separator {
+ Some(separator)
+ } else {
+ None
})
+ .flatten()
.map(move |key_or_icon| match key_or_icon {
KeyOrIcon::Key(key) => Key::new(key, color).size(size).into_any_element(),
KeyOrIcon::Icon(icon) => KeyIcon::new(icon, color).size(size).into_any_element(),
+ KeyOrIcon::Plus => "+".into_any_element(),
})
}
@@ -230,7 +243,9 @@ pub struct Key {
impl RenderOnce for Key {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let single_char = self.key.len() == 1;
- let size = self.size.unwrap_or(px(14.).into());
+ let size = self
+ .size
+ .unwrap_or_else(|| TextSize::default().rems(cx).into());
div()
.py_0()
@@ -387,7 +402,7 @@ pub fn text_for_keystroke(keystroke: &Keystroke, platform_style: PlatformStyle)
let key = match keystroke.key.as_str() {
"pageup" => "PageUp",
"pagedown" => "PageDown",
- key => &capitalize(key),
+ key => &util::capitalize(key),
};
text.push_str(key);
@@ -395,14 +410,6 @@ pub fn text_for_keystroke(keystroke: &Keystroke, platform_style: PlatformStyle)
text
}
-fn capitalize(str: &str) -> String {
- let mut chars = str.chars();
- match chars.next() {
- None => String::new(),
- Some(first_char) => first_char.to_uppercase().collect::<String>() + chars.as_str(),
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -787,6 +787,28 @@ impl<'a> PartialOrd for NumericPrefixWithSuffix<'a> {
}
}
+/// Capitalizes the first character of a string.
+///
+/// This function takes a string slice as input and returns a new `String` with the first character
+/// capitalized.
+///
+/// # Examples
+///
+/// ```
+/// use util::capitalize;
+///
+/// assert_eq!(capitalize("hello"), "Hello");
+/// assert_eq!(capitalize("WORLD"), "WORLD");
+/// assert_eq!(capitalize(""), "");
+/// ```
+pub fn capitalize(str: &str) -> String {
+ let mut chars = str.chars();
+ match chars.next() {
+ None => String::new(),
+ Some(first_char) => first_char.to_uppercase().collect::<String>() + chars.as_str(),
+ }
+}
+
fn emoji_regex() -> &'static Regex {
static EMOJI_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new("(\\p{Emoji}|\u{200D})").unwrap());
@@ -5958,7 +5958,6 @@ pub struct OpenOptions {
pub replace_window: Option<WindowHandle<Workspace>>,
pub env: Option<HashMap<String, String>>,
}
-
#[allow(clippy::type_complexity)]
pub fn open_paths(
abs_paths: &[PathBuf],
@@ -5976,58 +5975,65 @@ pub fn open_paths(
let mut best_match = None;
let mut open_visible = OpenVisible::All;
- if open_options.open_new_workspace != Some(true) {
- for window in local_workspace_windows(cx) {
- if let Ok(workspace) = window.read(cx) {
- let m = workspace
- .project
- .read(cx)
- .visibility_for_paths(&abs_paths, cx);
- if m > best_match {
- existing = Some(window);
- best_match = m;
- } else if best_match.is_none() && open_options.open_new_workspace == Some(false) {
- existing = Some(window)
- }
- }
- }
- }
-
cx.spawn(move |mut cx| async move {
- if open_options.open_new_workspace.is_none() && existing.is_none() {
- let all_files = abs_paths.iter().map(|path| app_state.fs.metadata(path));
- if futures::future::join_all(all_files)
+ if open_options.open_new_workspace != Some(true) {
+ let all_paths = abs_paths.iter().map(|path| app_state.fs.metadata(path));
+ let all_metadatas = futures::future::join_all(all_paths)
.await
.into_iter()
.filter_map(|result| result.ok().flatten())
- .all(|file| !file.is_dir)
- {
- cx.update(|cx| {
- if let Some(window) = cx
- .active_window()
- .and_then(|window| window.downcast::<Workspace>())
- {
- if let Ok(workspace) = window.read(cx) {
- let project = workspace.project().read(cx);
- if project.is_local() && !project.is_via_collab() {
- existing = Some(window);
- open_visible = OpenVisible::None;
- return;
- }
+ .collect::<Vec<_>>();
+
+ cx.update(|cx| {
+ for window in local_workspace_windows(&cx) {
+ if let Ok(workspace) = window.read(&cx) {
+ let m = workspace.project.read(&cx).visibility_for_paths(
+ &abs_paths,
+ &all_metadatas,
+ open_options.open_new_workspace == None,
+ cx,
+ );
+ if m > best_match {
+ existing = Some(window);
+ best_match = m;
+ } else if best_match.is_none()
+ && open_options.open_new_workspace == Some(false)
+ {
+ existing = Some(window)
}
}
- for window in local_workspace_windows(cx) {
- if let Ok(workspace) = window.read(cx) {
- let project = workspace.project().read(cx);
- if project.is_via_collab() {
- continue;
+ }
+ })?;
+
+ if open_options.open_new_workspace.is_none() && existing.is_none() {
+ if all_metadatas.iter().all(|file| !file.is_dir) {
+ cx.update(|cx| {
+ if let Some(window) = cx
+ .active_window()
+ .and_then(|window| window.downcast::<Workspace>())
+ {
+ if let Ok(workspace) = window.read(cx) {
+ let project = workspace.project().read(cx);
+ if project.is_local() && !project.is_via_collab() {
+ existing = Some(window);
+ open_visible = OpenVisible::None;
+ return;
+ }
}
- existing = Some(window);
- open_visible = OpenVisible::None;
- break;
}
- }
- })?;
+ for window in local_workspace_windows(cx) {
+ if let Ok(workspace) = window.read(cx) {
+ let project = workspace.project().read(cx);
+ if project.is_via_collab() {
+ continue;
+ }
+ existing = Some(window);
+ open_visible = OpenVisible::None;
+ break;
+ }
+ }
+ })?;
+ }
}
}
@@ -19,7 +19,7 @@ use futures::{
};
use fuzzy::CharBag;
use git::{
- repository::{GitRepository, RepoPath},
+ repository::{Branch, GitRepository, RepoPath},
status::{
FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
},
@@ -201,7 +201,7 @@ pub struct RepositoryEntry {
pub(crate) statuses_by_path: SumTree<StatusEntry>,
work_directory_id: ProjectEntryId,
pub work_directory: WorkDirectory,
- pub(crate) branch: Option<Arc<str>>,
+ pub(crate) branch: Option<Branch>,
pub current_merge_conflicts: TreeSet<RepoPath>,
}
@@ -214,8 +214,8 @@ impl Deref for RepositoryEntry {
}
impl RepositoryEntry {
- pub fn branch(&self) -> Option<Arc<str>> {
- self.branch.clone()
+ pub fn branch(&self) -> Option<&Branch> {
+ self.branch.as_ref()
}
pub fn work_directory_id(&self) -> ProjectEntryId {
@@ -243,7 +243,8 @@ impl RepositoryEntry {
pub fn initial_update(&self) -> proto::RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: self.work_directory_id.to_proto(),
- branch: self.branch.as_ref().map(|branch| branch.to_string()),
+ branch: self.branch.as_ref().map(|branch| branch.name.to_string()),
+ branch_summary: self.branch.as_ref().map(branch_to_proto),
updated_statuses: self
.statuses_by_path
.iter()
@@ -302,7 +303,8 @@ impl RepositoryEntry {
proto::RepositoryEntry {
work_directory_id: self.work_directory_id.to_proto(),
- branch: self.branch.as_ref().map(|branch| branch.to_string()),
+ branch: self.branch.as_ref().map(|branch| branch.name.to_string()),
+ branch_summary: self.branch.as_ref().map(branch_to_proto),
updated_statuses,
removed_statuses,
current_merge_conflicts: self
@@ -314,6 +316,61 @@ impl RepositoryEntry {
}
}
+pub fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
+ proto::Branch {
+ is_head: branch.is_head,
+ name: branch.name.to_string(),
+ unix_timestamp: branch
+ .most_recent_commit
+ .as_ref()
+ .map(|commit| commit.commit_timestamp as u64),
+ upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
+ ref_name: upstream.ref_name.to_string(),
+ tracking: upstream
+ .tracking
+ .as_ref()
+ .map(|upstream| proto::UpstreamTracking {
+ ahead: upstream.ahead as u64,
+ behind: upstream.behind as u64,
+ }),
+ }),
+ most_recent_commit: branch
+ .most_recent_commit
+ .as_ref()
+ .map(|commit| proto::CommitSummary {
+ sha: commit.sha.to_string(),
+ subject: commit.subject.to_string(),
+ commit_timestamp: commit.commit_timestamp,
+ }),
+ }
+}
+
+pub fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
+ git::repository::Branch {
+ is_head: proto.is_head,
+ name: proto.name.clone().into(),
+ upstream: proto
+ .upstream
+ .as_ref()
+ .map(|upstream| git::repository::Upstream {
+ ref_name: upstream.ref_name.to_string().into(),
+ tracking: upstream.tracking.as_ref().map(|tracking| {
+ git::repository::UpstreamTracking {
+ ahead: tracking.ahead as u32,
+ behind: tracking.behind as u32,
+ }
+ }),
+ }),
+ most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
+ git::repository::CommitSummary {
+ sha: commit.sha.to_string().into(),
+ subject: commit.subject.to_string().into(),
+ commit_timestamp: commit.commit_timestamp,
+ }
+ }),
+ }
+}
+
/// This path corresponds to the 'content path' of a repository in relation
/// to Zed's project root.
/// In the majority of the cases, this is the folder that contains the .git folder.
@@ -1335,11 +1392,6 @@ impl LocalWorktree {
&self.fs
}
- pub fn contains_abs_path(&self, path: &Path) -> bool {
- let path = SanitizedPath::from(path);
- path.starts_with(&self.abs_path)
- }
-
pub fn is_path_private(&self, path: &Path) -> bool {
!self.share_private_files && self.settings.is_path_private(path)
}
@@ -2630,7 +2682,7 @@ impl Snapshot {
self.repositories
.update(&PathKey(work_dir_entry.path.clone()), &(), |repo| {
- repo.branch = repository.branch.map(Into::into);
+ repo.branch = repository.branch_summary.as_ref().map(proto_to_branch);
repo.statuses_by_path.edit(edits, &());
repo.current_merge_conflicts = conflicted_paths
});
@@ -2652,7 +2704,7 @@ impl Snapshot {
work_directory: WorkDirectory::InProject {
relative_path: work_dir_entry.path.clone(),
},
- branch: repository.branch.map(Into::into),
+ branch: repository.branch_summary.as_ref().map(proto_to_branch),
statuses_by_path: statuses,
current_merge_conflicts: conflicted_paths,
},
@@ -3454,7 +3506,7 @@ impl BackgroundScannerState {
RepositoryEntry {
work_directory_id: work_dir_id,
work_directory: work_directory.clone(),
- branch: repository.branch_name().map(Into::into),
+ branch: None,
statuses_by_path: Default::default(),
current_merge_conflicts: Default::default(),
},
@@ -4203,6 +4255,7 @@ impl BackgroundScanner {
// the git repository in an ancestor directory. Find any gitignore files
// in ancestor directories.
let root_abs_path = self.state.lock().snapshot.abs_path.clone();
+ let mut containing_git_repository = None;
for (index, ancestor) in root_abs_path.as_path().ancestors().enumerate() {
if index != 0 {
if let Ok(ignore) =
@@ -4232,7 +4285,7 @@ impl BackgroundScanner {
{
// We associate the external git repo with our root folder and
// also mark where in the git repo the root folder is located.
- self.state.lock().insert_git_repository_for_path(
+ let local_repository = self.state.lock().insert_git_repository_for_path(
WorkDirectory::AboveProject {
absolute_path: ancestor.into(),
location_in_repo: root_abs_path
@@ -4241,10 +4294,14 @@ impl BackgroundScanner {
.unwrap()
.into(),
},
- ancestor_dot_git.into(),
+ ancestor_dot_git.clone().into(),
self.fs.as_ref(),
self.watcher.as_ref(),
);
+
+ if local_repository.is_some() {
+ containing_git_repository = Some(ancestor_dot_git)
+ }
};
}
@@ -4290,6 +4347,9 @@ impl BackgroundScanner {
self.process_events(paths.into_iter().map(Into::into).collect())
.await;
}
+ if let Some(abs_path) = containing_git_repository {
+ self.process_events(vec![abs_path]).await;
+ }
// Continue processing events until the worktree is dropped.
self.phase = BackgroundScannerPhase::Events;
@@ -4708,7 +4768,7 @@ impl BackgroundScanner {
);
if let Some(local_repo) = repo {
- self.update_git_statuses(UpdateGitStatusesJob {
+ self.update_git_repository(UpdateGitRepoJob {
local_repository: local_repo,
});
}
@@ -5260,15 +5320,6 @@ impl BackgroundScanner {
if local_repository.git_dir_scan_id == scan_id {
continue;
}
- let Some(work_dir) = state
- .snapshot
- .entry_for_id(local_repository.work_directory_id)
- .map(|entry| entry.path.clone())
- else {
- continue;
- };
-
- let branch = local_repository.repo_ptr.branch_name();
local_repository.repo_ptr.reload_index();
state.snapshot.git_repositories.update(
@@ -5278,17 +5329,12 @@ impl BackgroundScanner {
entry.status_scan_id = scan_id;
},
);
- state.snapshot.snapshot.repositories.update(
- &PathKey(work_dir.clone()),
- &(),
- |entry| entry.branch = branch.map(Into::into),
- );
local_repository
}
};
- repo_updates.push(UpdateGitStatusesJob { local_repository });
+ repo_updates.push(UpdateGitRepoJob { local_repository });
}
// Remove any git repositories whose .git entry no longer exists.
@@ -5324,7 +5370,7 @@ impl BackgroundScanner {
.scoped(|scope| {
scope.spawn(async {
for repo_update in repo_updates {
- self.update_git_statuses(repo_update);
+ self.update_git_repository(repo_update);
}
updates_done_tx.blocking_send(()).ok();
});
@@ -5348,22 +5394,37 @@ impl BackgroundScanner {
.await;
}
- /// Update the git statuses for a given batch of entries.
- fn update_git_statuses(&self, job: UpdateGitStatusesJob) {
+ fn update_branches(&self, job: &UpdateGitRepoJob) -> Result<()> {
+ let branches = job.local_repository.repo().branches()?;
+ let snapshot = self.state.lock().snapshot.snapshot.clone();
+
+ let mut repository = snapshot
+ .repository(job.local_repository.work_directory.path_key())
+ .context("Missing repository")?;
+
+ repository.branch = branches.into_iter().find(|branch| branch.is_head);
+
+ let mut state = self.state.lock();
+ state
+ .snapshot
+ .repositories
+ .insert_or_replace(repository, &());
+
+ Ok(())
+ }
+
+ fn update_statuses(&self, job: &UpdateGitRepoJob) -> Result<()> {
log::trace!(
"updating git statuses for repo {:?}",
job.local_repository.work_directory.display_name()
);
let t0 = Instant::now();
- let Some(statuses) = job
+ let statuses = job
.local_repository
.repo()
- .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()])
- .log_err()
- else {
- return;
- };
+ .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()])?;
+
log::trace!(
"computed git statuses for repo {:?} in {:?}",
job.local_repository.work_directory.display_name(),
@@ -5374,13 +5435,9 @@ impl BackgroundScanner {
let mut changed_paths = Vec::new();
let snapshot = self.state.lock().snapshot.snapshot.clone();
- let Some(mut repository) =
- snapshot.repository(job.local_repository.work_directory.path_key())
- else {
- // happens when a folder is deleted
- log::debug!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot");
- return;
- };
+ let mut repository = snapshot
+ .repository(job.local_repository.work_directory.path_key())
+ .context("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot")?;
let merge_head_shas = job.local_repository.repo().merge_head_shas();
if merge_head_shas != job.local_repository.current_merge_head_shas {
@@ -5408,6 +5465,7 @@ impl BackgroundScanner {
}
repository.statuses_by_path = new_entries_by_path;
+
let mut state = self.state.lock();
state
.snapshot
@@ -5433,6 +5491,13 @@ impl BackgroundScanner {
job.local_repository.work_directory.display_name(),
t0.elapsed(),
);
+ Ok(())
+ }
+
+ /// Update the git statuses for a given batch of entries.
+ fn update_git_repository(&self, job: UpdateGitRepoJob) {
+ self.update_branches(&job).log_err();
+ self.update_statuses(&job).log_err();
}
fn build_change_set(
@@ -5642,7 +5707,7 @@ struct UpdateIgnoreStatusJob {
scan_queue: Sender<ScanJob>,
}
-struct UpdateGitStatusesJob {
+struct UpdateGitRepoJob {
local_repository: LocalRepositoryEntry,
}
@@ -264,7 +264,13 @@ fn assign_edit_prediction_provider(
}
}
- let zeta = zeta::Zeta::register(worktree, client.clone(), user_store, cx);
+ let zeta = zeta::Zeta::register(
+ Some(cx.entity()),
+ worktree,
+ client.clone(),
+ user_store,
+ cx,
+ );
if let Some(buffer) = &singleton_buffer {
if buffer.read(cx).file().is_some() {
@@ -39,6 +39,7 @@ menu.workspace = true
postage.workspace = true
project.workspace = true
regex.workspace = true
+release_channel.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
@@ -46,6 +47,7 @@ similar.workspace = true
telemetry.workspace = true
telemetry_events.workspace = true
theme.workspace = true
+thiserror.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
@@ -66,7 +66,7 @@ impl ZedPredictModal {
}
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
- cx.open_url("https://zed.dev/blog/"); // TODO Add the link when live
+ cx.open_url("https://zed.dev/blog/edit-predictions");
cx.notify();
onboarding_event!("Blog Link clicked");
@@ -272,19 +272,16 @@ impl Render for ZedPredictModal {
)),
));
- let blog_post_button = if cx.is_staff() {
- Some(
+ let blog_post_button = cx
+ .has_flag::<feature_flags::PredictEditsLaunchFeatureFlag>()
+ .then(|| {
Button::new("view-blog", "Read the Blog Post")
.full_width()
.icon(IconName::ArrowUpRight)
.icon_size(IconSize::Indicator)
.icon_color(Color::Muted)
- .on_click(cx.listener(Self::view_blog)),
- )
- } else {
- // TODO: put back when blog post is published
- None
- };
+ .on_click(cx.listener(Self::view_blog))
+ });
if self.user_store.read(cx).current_user().is_some() {
let copy = match self.sign_in_status {
@@ -9,6 +9,7 @@ mod rate_completion_modal;
pub(crate) use completion_diff_element::*;
use db::kvp::KEY_VALUE_STORE;
+use editor::Editor;
pub use init::*;
use inline_completion::DataCollectionState;
pub use license_detection::is_license_eligible_for_data_collection;
@@ -20,10 +21,10 @@ use anyhow::{anyhow, Context as _, Result};
use arrayvec::ArrayVec;
use client::{Client, UserStore};
use collections::{HashMap, HashSet, VecDeque};
-use feature_flags::FeatureFlagAppExt as _;
use futures::AsyncReadExt;
use gpui::{
- actions, App, AppContext as _, AsyncApp, Context, Entity, EntityId, Global, Subscription, Task,
+ actions, App, AppContext as _, AsyncApp, Context, Entity, EntityId, Global, SemanticVersion,
+ Subscription, Task,
};
use http_client::{HttpClient, Method};
use input_excerpt::excerpt_for_cursor_position;
@@ -34,7 +35,9 @@ use language::{
use language_models::LlmApiToken;
use postage::watch;
use project::Project;
+use release_channel::AppVersion;
use settings::WorktreeId;
+use std::str::FromStr;
use std::{
borrow::Cow,
cmp,
@@ -48,10 +51,16 @@ use std::{
time::{Duration, Instant},
};
use telemetry_events::InlineCompletionRating;
+use thiserror::Error;
use util::ResultExt;
use uuid::Uuid;
+use workspace::notifications::{ErrorMessagePrompt, NotificationId};
+use workspace::Workspace;
use worktree::Worktree;
-use zed_llm_client::{PredictEditsBody, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME};
+use zed_llm_client::{
+ PredictEditsBody, PredictEditsResponse, EXPIRED_LLM_TOKEN_HEADER_NAME,
+ MINIMUM_REQUIRED_VERSION_HEADER_NAME,
+};
const CURSOR_MARKER: &'static str = "<|user_cursor_is_here|>";
const START_OF_FILE_MARKER: &'static str = "<|start_of_file|>";
@@ -178,6 +187,7 @@ impl std::fmt::Debug for InlineCompletion {
}
pub struct Zeta {
+ editor: Option<Entity<Editor>>,
client: Arc<Client>,
events: VecDeque<Event>,
registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
@@ -188,6 +198,8 @@ pub struct Zeta {
_llm_token_subscription: Subscription,
/// Whether the terms of service have been accepted.
tos_accepted: bool,
+ /// Whether an update to a newer version of Zed is required to continue using Zeta.
+ update_required: bool,
_user_store_subscription: Subscription,
license_detection_watchers: HashMap<WorktreeId, Rc<LicenseDetectionWatcher>>,
}
@@ -198,13 +210,14 @@ impl Zeta {
}
pub fn register(
+ editor: Option<Entity<Editor>>,
worktree: Option<Entity<Worktree>>,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: &mut App,
) -> Entity<Self> {
let this = Self::global(cx).unwrap_or_else(|| {
- let entity = cx.new(|cx| Self::new(client, user_store, cx));
+ let entity = cx.new(|cx| Self::new(editor, client, user_store, cx));
cx.set_global(ZetaGlobal(entity.clone()));
entity
});
@@ -226,13 +239,19 @@ impl Zeta {
self.events.clear();
}
- fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
+ fn new(
+ editor: Option<Entity<Editor>>,
+ client: Arc<Client>,
+ user_store: Entity<UserStore>,
+ cx: &mut Context<Self>,
+ ) -> Self {
let refresh_llm_token_listener = language_models::RefreshLlmTokenListener::global(cx);
let data_collection_choice = Self::load_data_collection_choices();
let data_collection_choice = cx.new(|_| data_collection_choice);
Self {
+ editor,
client,
events: VecDeque::new(),
shown_completions: VecDeque::new(),
@@ -256,6 +275,7 @@ impl Zeta {
.read(cx)
.current_user_has_accepted_terms()
.unwrap_or(false),
+ update_required: false,
_user_store_subscription: cx.subscribe(&user_store, |this, user_store, event, cx| {
match event {
client::user::Event::PrivateUserInfoUpdated => {
@@ -335,8 +355,10 @@ impl Zeta {
}
}
- pub fn request_completion_impl<F, R>(
+ #[allow(clippy::too_many_arguments)]
+ fn request_completion_impl<F, R>(
&mut self,
+ workspace: Option<Entity<Workspace>>,
project: Option<&Entity<Project>>,
buffer: &Entity<Buffer>,
cursor: language::Anchor,
@@ -345,7 +367,7 @@ impl Zeta {
perform_predict_edits: F,
) -> Task<Result<Option<InlineCompletion>>>
where
- F: FnOnce(Arc<Client>, LlmApiToken, bool, PredictEditsBody) -> R + 'static,
+ F: FnOnce(PerformPredictEditsParams) -> R + 'static,
R: Future<Output = Result<PredictEditsResponse>> + Send + 'static,
{
let snapshot = self.report_changes_for_buffer(&buffer, cx);
@@ -358,9 +380,10 @@ impl Zeta {
.map(|f| Arc::from(f.full_path(cx).as_path()))
.unwrap_or_else(|| Arc::from(Path::new("untitled")));
+ let zeta = cx.entity();
let client = self.client.clone();
let llm_token = self.llm_token.clone();
- let is_staff = cx.is_staff();
+ let app_version = AppVersion::global(cx);
let buffer = buffer.clone();
@@ -447,7 +470,46 @@ impl Zeta {
}),
};
- let response = perform_predict_edits(client, llm_token, is_staff, body).await?;
+ let response = perform_predict_edits(PerformPredictEditsParams {
+ client,
+ llm_token,
+ app_version,
+ body,
+ })
+ .await;
+ let response = match response {
+ Ok(response) => response,
+ Err(err) => {
+ if err.is::<ZedUpdateRequiredError>() {
+ cx.update(|cx| {
+ zeta.update(cx, |zeta, _cx| {
+ zeta.update_required = true;
+ });
+
+ if let Some(workspace) = workspace {
+ workspace.update(cx, |workspace, cx| {
+ workspace.show_notification(
+ NotificationId::unique::<ZedUpdateRequiredError>(),
+ cx,
+ |cx| {
+ cx.new(|_| {
+ ErrorMessagePrompt::new(err.to_string())
+ .with_link_button(
+ "Update Zed",
+ "https://zed.dev/releases",
+ )
+ })
+ },
+ );
+ });
+ }
+ })
+ .ok();
+ }
+
+ return Err(err);
+ }
+ };
log::debug!("completion response: {}", &response.output_excerpt);
@@ -632,7 +694,7 @@ and then another
) -> Task<Result<Option<InlineCompletion>>> {
use std::future::ready;
- self.request_completion_impl(project, buffer, position, false, cx, |_, _, _, _| {
+ self.request_completion_impl(None, project, buffer, position, false, cx, |_params| {
ready(Ok(response))
})
}
@@ -645,7 +707,12 @@ and then another
can_collect_data: bool,
cx: &mut Context<Self>,
) -> Task<Result<Option<InlineCompletion>>> {
+ let workspace = self
+ .editor
+ .as_ref()
+ .and_then(|editor| editor.read(cx).workspace());
self.request_completion_impl(
+ workspace,
project,
buffer,
position,
@@ -656,12 +723,17 @@ and then another
}
fn perform_predict_edits(
- client: Arc<Client>,
- llm_token: LlmApiToken,
- _is_staff: bool,
- body: PredictEditsBody,
+ params: PerformPredictEditsParams,
) -> impl Future<Output = Result<PredictEditsResponse>> {
async move {
+ let PerformPredictEditsParams {
+ client,
+ llm_token,
+ app_version,
+ body,
+ ..
+ } = params;
+
let http_client = client.http_client();
let mut token = llm_token.acquire(&client).await?;
let mut did_retry = false;
@@ -685,6 +757,18 @@ and then another
let mut response = http_client.send(request).await?;
+ if let Some(minimum_required_version) = response
+ .headers()
+ .get(MINIMUM_REQUIRED_VERSION_HEADER_NAME)
+ .and_then(|version| SemanticVersion::from_str(version.to_str().ok()?).ok())
+ {
+ if app_version < minimum_required_version {
+ return Err(anyhow!(ZedUpdateRequiredError {
+ minimum_version: minimum_required_version
+ }));
+ }
+ }
+
if response.status().is_success() {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
@@ -1011,6 +1095,21 @@ and then another
}
}
+struct PerformPredictEditsParams {
+ pub client: Arc<Client>,
+ pub llm_token: LlmApiToken,
+ pub app_version: SemanticVersion,
+ pub body: PredictEditsBody,
+}
+
+#[derive(Error, Debug)]
+#[error(
+ "You must update to Zed version {minimum_version} or higher to continue using edit predictions."
+)]
+pub struct ZedUpdateRequiredError {
+ minimum_version: SemanticVersion,
+}
+
struct LicenseDetectionWatcher {
is_open_source_rx: watch::Receiver<bool>,
_is_open_source_task: Task<()>,
@@ -1406,6 +1505,10 @@ impl inline_completion::EditPredictionProvider for ZetaInlineCompletionProvider
return;
}
+ if self.zeta.read(cx).update_required {
+ return;
+ }
+
if let Some(current_completion) = self.current_completion.as_ref() {
let snapshot = buffer.read(cx).snapshot();
if current_completion
@@ -1837,7 +1940,7 @@ mod tests {
});
let server = FakeServer::for_client(42, &client, cx).await;
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- let zeta = cx.new(|cx| Zeta::new(client, user_store, cx));
+ let zeta = cx.new(|cx| Zeta::new(None, client, user_store, cx));
let buffer = cx.new(|cx| Buffer::local(buffer_content, cx));
let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0)));
@@ -1890,7 +1993,7 @@ mod tests {
});
let server = FakeServer::for_client(42, &client, cx).await;
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- let zeta = cx.new(|cx| Zeta::new(client, user_store, cx));
+ let zeta = cx.new(|cx| Zeta::new(None, client, user_store, cx));
let buffer = cx.new(|cx| Buffer::local(buffer_content, cx));
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
@@ -7,7 +7,7 @@
"danger": "danger"
},
"devDependencies": {
- "danger": "12.3.3",
+ "danger": "12.3.4",
"danger-plugin-pr-hygiene": "0.5.0"
}
}
@@ -9,8 +9,8 @@ importers:
.:
devDependencies:
danger:
- specifier: 12.3.3
- version: 12.3.3
+ specifier: 12.3.4
+ version: 12.3.4
danger-plugin-pr-hygiene:
specifier: 0.5.0
version: 0.5.0
@@ -122,8 +122,8 @@ packages:
danger-plugin-pr-hygiene@0.5.0:
resolution: {integrity: sha512-5z8vImexNVLG0V3LpGMp4RbMoU5Unjn9Na0Dv79gozYqgKJgIlaVRfxGTWxdJP0/TXO8NwDAQYdlwy+vqvfTsg==}
- danger@12.3.3:
- resolution: {integrity: sha512-nZKzpgXN21rr4dwa6bFhM7G2JEa79dZRJiT3RVRSyi4yk1/hgZ2f8HDGoa7tMladTmu8WjJFyE3LpBIihh+aDw==}
+ danger@12.3.4:
+ resolution: {integrity: sha512-esr6iowAryWjWkMzOKyOmMRkamPkDRhC6OAj2tO48i0oobObdP0d8I/YE+qSj9m+/RRcrhaKnysvPL51eW1m3w==}
engines: {node: '>=18'}
hasBin: true
@@ -601,7 +601,7 @@ snapshots:
fp-ts: 2.12.2
io-ts: 2.2.17(fp-ts@2.12.2)
- danger@12.3.3:
+ danger@12.3.4:
dependencies:
'@gitbeaker/rest': 38.12.1
'@octokit/rest': 18.12.0