Merge remote-tracking branch 'origin/main' into new-thread

Danilo Leal created

Change summary

.github/workflows/cherry_pick.yml                          |   6 
.github/workflows/compliance_check.yml                     |   5 
.github/workflows/release.yml                              |  12 
assets/settings/default.json                               |   3 
crates/cloud_api_types/src/cloud_api_types.rs              |   1 
crates/collab_ui/src/collab_panel.rs                       |  22 
crates/dev_container/src/docker.rs                         |  38 +
crates/edit_prediction/src/edit_prediction.rs              |   8 
crates/edit_prediction/src/metrics/kept_rate.rs            | 245 +++++--
crates/edit_prediction/src/zed_edit_prediction_delegate.rs |  42 +
crates/edit_prediction_cli/src/example.rs                  |   8 
crates/edit_prediction_cli/src/score.rs                    | 124 +++
crates/edit_prediction_types/src/edit_prediction_types.rs  |   9 
crates/edit_prediction_ui/src/edit_prediction_button.rs    |   2 
crates/editor/src/editor_settings.rs                       |   2 
crates/editor/src/element.rs                               |   5 
crates/git/src/repository.rs                               |   2 
crates/git_ui/src/branch_picker.rs                         |   4 
crates/git_ui/src/git_picker.rs                            |   2 
crates/git_ui/src/git_ui.rs                                |  27 
crates/git_ui/src/project_diff.rs                          |  19 
crates/gpui/src/window.rs                                  |   7 
crates/markdown/src/markdown.rs                            | 167 ++++
crates/markdown/src/parser.rs                              |  80 ++
crates/project/src/git_store.rs                            |  43 +
crates/recent_projects/src/recent_projects.rs              |  27 
crates/remote/src/transport/docker.rs                      |   3 
crates/settings/src/vscode_import.rs                       |   1 
crates/settings_content/src/editor.rs                      |   5 
crates/settings_ui/src/page_data.rs                        |  15 
crates/title_bar/src/title_bar.rs                          |  56 -
crates/workspace/src/workspace.rs                          |  23 
crates/zed/src/zed.rs                                      |  60 +
docs/src/reference/all-settings.md                         |  10 
tooling/xtask/src/tasks/workflows/cherry_pick.rs           |  12 
tooling/xtask/src/tasks/workflows/compliance_check.rs      |  43 
tooling/xtask/src/tasks/workflows/release.rs               | 139 +++-
tooling/xtask/src/tasks/workflows/vars.rs                  |   2 
38 files changed, 939 insertions(+), 340 deletions(-)

Detailed changes

.github/workflows/cherry_pick.yml 🔗

@@ -44,8 +44,10 @@ jobs:
         BRANCH: ${{ inputs.branch }}
         COMMIT: ${{ inputs.commit }}
         CHANNEL: ${{ inputs.channel }}
-        GIT_COMMITTER_NAME: Zed Zippy
-        GIT_COMMITTER_EMAIL: hi@zed.dev
+        GIT_AUTHOR_NAME: zed-zippy[bot]
+        GIT_AUTHOR_EMAIL: <234243425+zed-zippy[bot]@users.noreply.github.com>
+        GIT_COMMITTER_NAME: zed-zippy[bot]
+        GIT_COMMITTER_EMAIL: <234243425+zed-zippy[bot]@users.noreply.github.com>
         GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
 defaults:
   run:

.github/workflows/compliance_check.yml 🔗

@@ -34,13 +34,14 @@ jobs:
         echo "Checking compliance for $TAG"
         echo "tag=$TAG" >> "$GITHUB_OUTPUT"
     - id: run-compliance-check
-      name: compliance_check::scheduled_compliance_check::run_compliance_check
+      name: release::add_compliance_steps::run_compliance_check
       run: |
         cargo xtask compliance "$LATEST_TAG" --branch main --report-path "compliance-report-${GITHUB_REF_NAME}.md"
       env:
-        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+      continue-on-error: true
     - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md'
       if: always()
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4

.github/workflows/release.yml 🔗

@@ -307,7 +307,7 @@ jobs:
         cache: rust
         path: ~/.rustup
     - id: run-compliance-check
-      name: release::run_compliance_check
+      name: release::add_compliance_steps::run_compliance_check
       run: |
         cargo xtask compliance "$GITHUB_REF_NAME" --report-path "compliance-report-${GITHUB_REF_NAME}.md"
       env:
@@ -328,7 +328,7 @@ jobs:
             STATUS="✅ Compliance check passed for $COMPLIANCE_TAG"
             MESSAGE=$(printf "%s\n\nReport: %s" "$STATUS" "$ARTIFACT_URL")
         else
-            STATUS="❌ Compliance check failed for $COMPLIANCE_TAG"
+            STATUS="❌ Preliminary compliance check failed (but this can still be fixed while the builds are running!) for $COMPLIANCE_TAG"
             MESSAGE=$(printf "%s\n\nReport: %s\nPRs needing review: %s" "$STATUS" "$ARTIFACT_URL" "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22")
         fi
 
@@ -340,6 +340,8 @@ jobs:
         COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
         COMPLIANCE_TAG: ${{ github.ref_name }}
         ARTIFACT_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts
+    outputs:
+      outcome: ${{ steps.run-compliance-check.outputs.outcome }}
     timeout-minutes: 60
   bundle_linux_aarch64:
     needs:
@@ -641,6 +643,7 @@ jobs:
   validate_release_assets:
     needs:
     - upload_release_assets
+    - compliance_check
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: release::validate_release_assets
@@ -673,13 +676,12 @@ jobs:
         cache: rust
         path: ~/.rustup
     - id: run-compliance-check
-      name: release::run_compliance_check
+      name: release::add_compliance_steps::run_compliance_check
       run: |
         cargo xtask compliance "$GITHUB_REF_NAME" --report-path "compliance-report-${GITHUB_REF_NAME}.md"
       env:
         GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
         GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
-      continue-on-error: true
     - name: '@actions/upload-artifact compliance-report-${GITHUB_REF_NAME}.md'
       if: always()
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
@@ -689,7 +691,7 @@ jobs:
         if-no-files-found: error
         overwrite: true
     - name: send_compliance_slack_notification
-      if: always()
+      if: failure() || needs.compliance_check.outputs.outcome != 'success'
       run: |
         if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
             STATUS="✅ Compliance check passed for $COMPLIANCE_TAG"

assets/settings/default.json 🔗

@@ -636,6 +636,9 @@
   // Scroll sensitivity multiplier. This multiplier is applied
   // to both the horizontal and vertical delta values while scrolling.
   "scroll_sensitivity": 1.0,
+  // Whether to zoom the editor font size with the mouse wheel
+  // while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms).
+  "mouse_wheel_zoom": false,
   // Scroll sensitivity multiplier for fast scrolling. This multiplier is applied
   // to both the horizontal and vertical delta values while scrolling. Fast scrolling
   // happens when a user holds the alt or option key while scrolling.

crates/cloud_api_types/src/cloud_api_types.rs 🔗

@@ -56,6 +56,7 @@ pub struct Organization {
 pub struct OrganizationConfiguration {
     pub is_zed_model_provider_enabled: bool,
     pub is_agent_thread_feedback_enabled: bool,
+    pub is_collaboration_enabled: bool,
     pub edit_prediction: OrganizationEditPredictionConfiguration,
 }
 

crates/collab_ui/src/collab_panel.rs 🔗

@@ -2620,6 +2620,18 @@ impl CollabPanel {
         cx.write_to_clipboard(item)
     }
 
+    fn render_disabled_by_organization(&mut self, _cx: &mut Context<Self>) -> Div {
+        v_flex()
+            .p_4()
+            .gap_4()
+            .size_full()
+            .text_center()
+            .justify_center()
+            .child(Label::new(
+                "Collaboration is disabled for this organization.",
+            ))
+    }
+
     fn render_signed_out(&mut self, cx: &mut Context<Self>) -> Div {
         let collab_blurb = "Work with your team in realtime with collaborative editing, voice, shared notes and more.";
 
@@ -3645,6 +3657,12 @@ impl Render for CollabPanel {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let status = *self.client.status().borrow();
 
+        let is_collaboration_disabled = self
+            .user_store
+            .read(cx)
+            .current_organization_configuration()
+            .is_some_and(|config| !config.is_collaboration_enabled);
+
         v_flex()
             .key_context(self.dispatch_context(window, cx))
             .on_action(cx.listener(CollabPanel::cancel))
@@ -3664,7 +3682,9 @@ impl Render for CollabPanel {
             .on_action(cx.listener(CollabPanel::move_channel_down))
             .track_focus(&self.focus_handle)
             .size_full()
-            .child(if !status.is_or_was_connected() || status.is_signing_in() {
+            .child(if is_collaboration_disabled {
+                self.render_disabled_by_organization(cx)
+            } else if !status.is_or_was_connected() || status.is_signing_in() {
                 self.render_signed_out(cx)
             } else {
                 self.render_signed_in(window, cx)

crates/dev_container/src/docker.rs 🔗

@@ -487,10 +487,18 @@ where
     let s: Option<String> = Option::deserialize(deserializer)?;
     match s {
         Some(json_string) => {
+            // The devcontainer metadata label can be either a JSON array (e.g. from
+            // image-based devcontainers) or a single JSON object (e.g. from
+            // docker-compose-based devcontainers created by the devcontainer CLI).
+            // Handle both formats.
             let parsed: Vec<HashMap<String, serde_json_lenient::Value>> =
-                serde_json_lenient::from_str(&json_string).map_err(|e| {
-                    log::error!("Error deserializing metadata: {e}");
-                    serde::de::Error::custom(e)
+                serde_json_lenient::from_str(&json_string).or_else(|_| {
+                    let single: HashMap<String, serde_json_lenient::Value> =
+                        serde_json_lenient::from_str(&json_string).map_err(|e| {
+                            log::error!("Error deserializing metadata: {e}");
+                            serde::de::Error::custom(e)
+                        })?;
+                    Ok(vec![single])
                 })?;
             Ok(Some(parsed))
         }
@@ -936,6 +944,30 @@ mod test {
         assert_eq!(target_dir.unwrap(), "/workspaces/cli/".to_string());
     }
 
+    #[test]
+    fn should_deserialize_object_metadata_from_docker_compose_container() {
+        // The devcontainer CLI writes metadata as a bare JSON object (not an array)
+        // when there is only one metadata entry (e.g. docker-compose with no features).
+        // See https://github.com/devcontainers/cli/issues/1054
+        let given_config = r#"
+    {
+      "Id": "dc4e7b8ff4bf",
+      "Config": {
+        "Labels": {
+          "devcontainer.metadata": "{\"remoteUser\":\"ubuntu\"}"
+        }
+      }
+    }
+                "#;
+        let config = serde_json_lenient::from_str::<DockerInspect>(given_config).unwrap();
+
+        assert!(config.config.labels.metadata.is_some());
+        let metadata = config.config.labels.metadata.unwrap();
+        assert_eq!(metadata.len(), 1);
+        assert!(metadata[0].contains_key("remoteUser"));
+        assert_eq!(metadata[0]["remoteUser"], "ubuntu");
+    }
+
     #[test]
     fn should_deserialize_docker_compose_config() {
         let given_config = r#"

crates/edit_prediction/src/edit_prediction.rs 🔗

@@ -1690,12 +1690,16 @@ impl EditPredictionStore {
                     settled_editable_region,
                     ts_error_count_before_prediction,
                     ts_error_count_after_prediction,
-                    edit_bytes_predicted_new = kept_rate_result.predicted_new_chars,
-                    edit_bytes_final_new = kept_rate_result.final_new_chars,
+                    edit_bytes_candidate_new = kept_rate_result.candidate_new_chars,
+                    edit_bytes_reference_new = kept_rate_result.reference_new_chars,
+                    edit_bytes_candidate_deleted = kept_rate_result.candidate_deleted_chars,
+                    edit_bytes_reference_deleted = kept_rate_result.reference_deleted_chars,
                     edit_bytes_kept = kept_rate_result.kept_chars,
+                    edit_bytes_correctly_deleted = kept_rate_result.correctly_deleted_chars,
                     edit_bytes_discarded = kept_rate_result.discarded_chars,
                     edit_bytes_context = kept_rate_result.context_chars,
                     edit_bytes_kept_rate = kept_rate_result.kept_rate,
+                    edit_bytes_recall_rate = kept_rate_result.recall_rate,
                     example,
                     e2e_latency = e2e_latency.as_millis(),
                 );

crates/edit_prediction/src/metrics/kept_rate.rs 🔗

@@ -13,12 +13,33 @@ pub enum TokenAnnotation {
 #[allow(dead_code)]
 #[derive(Debug, Clone)]
 pub struct KeptRateResult {
-    pub predicted_new_chars: usize,
-    pub final_new_chars: usize,
+    /// Characters newly introduced by the candidate
+    pub candidate_new_chars: usize,
+    /// Characters newly introduced by the reference
+    pub reference_new_chars: usize,
+    /// Characters from `base` that are deleted by the candidate.
+    pub candidate_deleted_chars: usize,
+    /// Characters from `base` that are deleted by the reference.
+    pub reference_deleted_chars: usize,
+    /// Candidate new characters that are also present in the reference.
     pub kept_chars: usize,
+    /// Base characters deleted by both the candidate and the reference.
+    pub correctly_deleted_chars: usize,
+    /// Candidate new characters that are not kept in the reference.
     pub discarded_chars: usize,
+    /// Candidate characters treated as unchanged context
     pub context_chars: usize,
+    /// Fraction of candidate edit characters that match the reference edit.
+    ///
+    /// This includes both kept newly introduced characters and correctly
+    /// deleted base characters.
     pub kept_rate: f64,
+    /// Fraction of reference edit characters covered by the candidate edit.
+    ///
+    /// This includes both kept newly introduced characters and correctly
+    /// deleted base characters.
+    pub recall_rate: f64,
+    /// Per-token classification for candidate tokens used by tests.
     #[cfg(test)]
     pub token_annotations: Vec<TokenAnnotation>,
 }
@@ -188,89 +209,127 @@ fn analyze_masked_tokens<'a>(tokens: &[&'a str], mask: &[bool]) -> (Vec<&'a str>
     (unmasked_tokens, unmasked_chars, masked_chars)
 }
 
-fn should_bail_for_dirty_final(base: &str, predicted: &str, final_text: &str) -> bool {
-    let predicted_delta_chars = predicted.len().abs_diff(base.len());
-    let final_delta_chars = final_text.len().abs_diff(base.len());
-    predicted_delta_chars.abs_diff(final_delta_chars) > MAX_DIRTY_LENGTH_DELTA_CHARS
+fn count_unmasked_chars(tokens: &[&str], mask: &[bool]) -> usize {
+    tokens
+        .iter()
+        .zip(mask.iter())
+        .filter_map(|(&token, &is_masked)| (!is_masked).then_some(token.len()))
+        .sum()
+}
+
+fn should_bail_for_dirty_final(base: &str, candidate: &str, reference: &str) -> bool {
+    let candidate_delta_chars = candidate.len().abs_diff(base.len());
+    let reference_delta_chars = reference.len().abs_diff(base.len());
+    candidate_delta_chars.abs_diff(reference_delta_chars) > MAX_DIRTY_LENGTH_DELTA_CHARS
 }
 
-pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptRateResult {
-    if base == predicted && predicted == final_text {
-        let predicted_tokens = tokenize(predicted);
-        let context_chars = predicted_tokens.iter().map(|token| token.len()).sum();
+pub fn compute_kept_rate(base: &str, candidate: &str, reference: &str) -> KeptRateResult {
+    if base == candidate && candidate == reference {
+        let candidate_tokens = tokenize(candidate);
+        let context_chars = candidate_tokens.iter().map(|token| token.len()).sum();
         return KeptRateResult {
-            predicted_new_chars: 0,
-            final_new_chars: 0,
+            candidate_new_chars: 0,
+            reference_new_chars: 0,
+            candidate_deleted_chars: 0,
+            reference_deleted_chars: 0,
             kept_chars: 0,
+            correctly_deleted_chars: 0,
             discarded_chars: 0,
             context_chars,
             kept_rate: 1.0,
+            recall_rate: 1.0,
             #[cfg(test)]
-            token_annotations: vec![TokenAnnotation::Context; predicted_tokens.len()],
+            token_annotations: vec![TokenAnnotation::Context; candidate_tokens.len()],
         };
     }
 
-    if should_bail_for_dirty_final(base, predicted, final_text) {
-        let predicted_new_chars = predicted.len().abs_diff(base.len());
-        let final_new_chars = final_text.len().abs_diff(base.len());
+    if should_bail_for_dirty_final(base, candidate, reference) {
+        let candidate_new_chars = candidate.len().abs_diff(base.len());
+        let reference_new_chars = reference.len().abs_diff(base.len());
         return KeptRateResult {
-            predicted_new_chars,
-            final_new_chars,
+            candidate_new_chars,
+            reference_new_chars,
+            candidate_deleted_chars: 0,
+            reference_deleted_chars: 0,
             kept_chars: 0,
-            discarded_chars: predicted_new_chars,
+            correctly_deleted_chars: 0,
+            discarded_chars: candidate_new_chars,
             context_chars: 0,
             kept_rate: 0.0,
+            recall_rate: 0.0,
             #[cfg(test)]
-            token_annotations: vec![TokenAnnotation::Discarded; tokenize(predicted).len()],
+            token_annotations: vec![TokenAnnotation::Discarded; tokenize(candidate).len()],
         };
     }
 
     let base_tokens = tokenize(base);
-    let predicted_tokens = tokenize(predicted);
-    let final_tokens = tokenize(final_text);
-
-    let pred_base_mask = lcs_keep_mask(&predicted_tokens, &base_tokens);
-    let (pred_final_mask, final_pred_mask) = lcs_keep_masks(&predicted_tokens, &final_tokens);
-    let context_mask: Vec<bool> = pred_base_mask
+    let candidate_tokens = tokenize(candidate);
+    let reference_tokens = tokenize(reference);
+
+    let (candidate_base_mask, base_candidate_mask) =
+        lcs_keep_masks(&candidate_tokens, &base_tokens);
+    let (candidate_reference_mask, reference_candidate_mask) =
+        lcs_keep_masks(&candidate_tokens, &reference_tokens);
+    let context_mask: Vec<bool> = candidate_base_mask
         .iter()
-        .zip(pred_final_mask.iter())
-        .map(|(&in_base, &in_final)| in_base && in_final)
+        .zip(candidate_reference_mask.iter())
+        .map(|(&in_base, &in_reference)| in_base && in_reference)
         .collect();
 
-    let (stripped_predicted, predicted_new_chars, context_chars) =
-        analyze_masked_tokens(&predicted_tokens, &context_mask);
+    let (stripped_candidate, candidate_new_chars, context_chars) =
+        analyze_masked_tokens(&candidate_tokens, &context_mask);
 
-    let final_base_mask = lcs_keep_mask(&final_tokens, &base_tokens);
-    let final_context_mask: Vec<bool> = final_base_mask
+    let (reference_base_mask, base_reference_mask) =
+        lcs_keep_masks(&reference_tokens, &base_tokens);
+    let reference_context_mask: Vec<bool> = reference_base_mask
         .iter()
-        .zip(final_pred_mask.iter())
-        .map(|(&in_base, &in_predicted)| in_base && in_predicted)
+        .zip(reference_candidate_mask.iter())
+        .map(|(&in_base, &in_candidate)| in_base && in_candidate)
         .collect();
 
-    let (stripped_final, final_new_chars, _) =
-        analyze_masked_tokens(&final_tokens, &final_context_mask);
+    let (stripped_reference, reference_new_chars, _) =
+        analyze_masked_tokens(&reference_tokens, &reference_context_mask);
 
-    let keep_mask = lcs_keep_mask(&stripped_predicted, &stripped_final);
+    let keep_mask = lcs_keep_mask(&stripped_candidate, &stripped_reference);
 
-    let kept_chars: usize = stripped_predicted
+    let kept_chars: usize = stripped_candidate
         .iter()
         .zip(keep_mask.iter())
         .filter_map(|(&token, &is_kept)| is_kept.then_some(token.len()))
         .sum();
 
-    let discarded_chars = predicted_new_chars - kept_chars;
+    let candidate_deleted_chars = count_unmasked_chars(&base_tokens, &base_candidate_mask);
+    let reference_deleted_chars = count_unmasked_chars(&base_tokens, &base_reference_mask);
+    let correctly_deleted_chars: usize = base_tokens
+        .iter()
+        .zip(base_candidate_mask.iter().zip(base_reference_mask.iter()))
+        .filter_map(|(&token, (&in_candidate, &in_reference))| {
+            (!in_candidate && !in_reference).then_some(token.len())
+        })
+        .sum();
+
+    let discarded_chars = candidate_new_chars - kept_chars;
+    let matched_edit_chars = kept_chars + correctly_deleted_chars;
+    let candidate_edit_chars = candidate_new_chars + candidate_deleted_chars;
+    let reference_edit_chars = reference_new_chars + reference_deleted_chars;
 
-    let kept_rate = if predicted_new_chars == 0 {
-        if final_new_chars == 0 { 1.0 } else { 0.0 }
+    let kept_rate = if candidate_edit_chars == 0 {
+        if reference_edit_chars == 0 { 1.0 } else { 0.0 }
     } else {
-        kept_chars as f64 / predicted_new_chars as f64
+        matched_edit_chars as f64 / candidate_edit_chars as f64
+    };
+
+    let recall_rate = if reference_edit_chars == 0 {
+        if candidate_edit_chars == 0 { 1.0 } else { 0.0 }
+    } else {
+        matched_edit_chars as f64 / reference_edit_chars as f64
     };
 
     #[cfg(test)]
     let token_annotations = {
-        let mut token_annotations = Vec::with_capacity(predicted_tokens.len());
+        let mut token_annotations = Vec::with_capacity(candidate_tokens.len());
         let mut new_index = 0;
-        for (token_index, _token) in predicted_tokens.iter().enumerate() {
+        for (token_index, _token) in candidate_tokens.iter().enumerate() {
             if context_mask[token_index] {
                 token_annotations.push(TokenAnnotation::Context);
             } else {
@@ -288,12 +347,16 @@ pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptR
     };
 
     KeptRateResult {
-        predicted_new_chars,
-        final_new_chars,
+        candidate_new_chars,
+        reference_new_chars,
+        candidate_deleted_chars,
+        reference_deleted_chars,
         kept_chars,
+        correctly_deleted_chars,
         discarded_chars,
         context_chars,
         kept_rate,
+        recall_rate,
         #[cfg(test)]
         token_annotations,
     }
@@ -327,7 +390,8 @@ mod test_kept_rate {
     fn test_rate_extremes() {
         let no_change = compute_kept_rate("foo bar", "foo bar", "foo bar");
         assert!((no_change.kept_rate - 1.0).abs() < 1e-6);
-        assert_eq!(no_change.predicted_new_chars, 0);
+        assert!((no_change.recall_rate - 1.0).abs() < 1e-6);
+        assert_eq!(no_change.candidate_new_chars, 0);
         assert!(
             no_change
                 .token_annotations
@@ -337,15 +401,17 @@ mod test_kept_rate {
 
         let accepted = compute_kept_rate("old", "new", "new");
         assert!((accepted.kept_rate - 1.0).abs() < 1e-6);
+        assert!((accepted.recall_rate - 1.0).abs() < 1e-6);
 
         let discarded = compute_kept_rate("old", "old", "new");
         assert!((discarded.kept_rate - 0.0).abs() < 1e-6);
+        assert!((discarded.recall_rate - 0.0).abs() < 1e-6);
     }
 
     #[test]
     fn test_pure_addition() {
         let kept = compute_kept_rate("", "brand new line\n", "brand new line\n");
-        assert_eq!(kept.kept_chars, kept.predicted_new_chars);
+        assert_eq!(kept.kept_chars, kept.candidate_new_chars);
         assert!(
             kept.token_annotations
                 .iter()
@@ -354,26 +420,28 @@ mod test_kept_rate {
 
         let discarded =
             compute_kept_rate("", "brand new line\n", "something completely different\n");
-        assert!(discarded.kept_chars < discarded.predicted_new_chars);
+        assert!(discarded.kept_chars < discarded.candidate_new_chars);
     }
 
     #[test]
     fn test_decoy_when_base_excluded() {
         let base = "    decoy.when(mock_sync_hardware_api.sp()).then_return(SpeedStatus.IDLE)\n";
-        let predicted = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
-        let final_text = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
-        let result = compute_kept_rate(base, predicted, final_text);
+        let candidate = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
+        let reference = "    decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
+        let result = compute_kept_rate(base, candidate, reference);
         let expected_new = "mock_sync_module_hardware".len() + "speed_status".len();
-        assert_eq!(result.predicted_new_chars, expected_new);
+        assert_eq!(result.candidate_new_chars, expected_new);
+        assert!(result.correctly_deleted_chars > 0);
         assert!((result.kept_rate - 1.0).abs() < 1e-6);
+        assert!((result.recall_rate - 1.0).abs() < 1e-6);
     }
 
     #[test]
     fn test_missing_deletion() {
         let base = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\n";
-        let predicted = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\neprintln!(\"\");\n";
-        let final_text = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
-        let result = compute_kept_rate(base, predicted, final_text);
+        let candidate = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\neprintln!(\"\");\n";
+        let reference = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
+        let result = compute_kept_rate(base, candidate, reference);
         assert!(
             result.kept_rate < 0.85,
             "expected kept_rate < 0.85, got {}",
@@ -385,7 +453,12 @@ mod test_kept_rate {
     #[test]
     fn test_empty_prediction() {
         let result = compute_kept_rate("old line\n", "", "new line\n");
-        assert!((result.kept_rate - 0.0).abs() < 1e-6);
+        assert_eq!(result.candidate_new_chars, 0);
+        assert!(result.candidate_deleted_chars > 0);
+        assert!(result.correctly_deleted_chars > 0);
+        assert!(result.correctly_deleted_chars < result.candidate_deleted_chars);
+        assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0);
+        assert!(result.recall_rate > 0.0 && result.recall_rate < 1.0);
     }
 
     #[test]
@@ -399,24 +472,25 @@ mod test_kept_rate {
     #[test]
     fn test_bails_for_dirty_final() {
         let base = "fn example() {\n    work();\n}\n";
-        let predicted = "fn example() {\n    work();\n    predicted();\n}\n";
-        let final_text = format!(
+        let candidate = "fn example() {\n    work();\n    predicted();\n}\n";
+        let reference = format!(
             "fn example() {{\n    work();\n    {}\n}}\n",
             "settled();\n    ".repeat(MAX_DIRTY_LENGTH_DELTA_CHARS / 8 + 64)
         );
 
-        let result = compute_kept_rate(base, predicted, &final_text);
+        let result = compute_kept_rate(base, candidate, &reference);
         assert_eq!(result.kept_rate, 0.0);
+        assert_eq!(result.recall_rate, 0.0);
         assert_eq!(result.kept_chars, 0);
-        assert_eq!(result.discarded_chars, result.predicted_new_chars);
+        assert_eq!(result.discarded_chars, result.candidate_new_chars);
     }
 
     #[test]
     fn test_eprintln_token_alignment() {
         let base = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\n";
-        let predicted = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
-        let final_text = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
-        let result = compute_kept_rate(base, predicted, final_text);
+        let candidate = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
+        let reference = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
+        let result = compute_kept_rate(base, candidate, reference);
         assert!(result.discarded_chars > 0);
         assert!(result.kept_chars > 0);
         assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0);
@@ -427,14 +501,18 @@ mod test_kept_rate {
     #[test]
     fn test_annotations_rename() {
         let base = "    foo(old_name)\n";
-        let predicted = "    foo(new_name)\n";
-        let final_text = "    foo(new_name)\n";
-        let result = compute_kept_rate(base, predicted, final_text);
-
-        assert_eq!(result.predicted_new_chars, "new_name".len());
-        assert_eq!(result.token_annotations.len(), tokenize(predicted).len());
-
-        for (&token, &annotation) in tokenize(predicted).iter().zip(&result.token_annotations) {
+        let candidate = "    foo(new_name)\n";
+        let reference = "    foo(new_name)\n";
+        let result = compute_kept_rate(base, candidate, reference);
+
+        assert_eq!(result.candidate_new_chars, "new_name".len());
+        assert_eq!(result.candidate_deleted_chars, "old_name".len());
+        assert_eq!(result.reference_deleted_chars, "old_name".len());
+        assert_eq!(result.correctly_deleted_chars, "old_name".len());
+        assert!((result.recall_rate - 1.0).abs() < 1e-6);
+        assert_eq!(result.token_annotations.len(), tokenize(candidate).len());
+
+        for (&token, &annotation) in tokenize(candidate).iter().zip(&result.token_annotations) {
             if token == "new_name" {
                 assert_eq!(annotation, TokenAnnotation::Kept);
             } else {
@@ -446,12 +524,12 @@ mod test_kept_rate {
     #[test]
     fn test_annotations_eprintln_coloring() {
         let base = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        epr\n";
-        let predicted = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
-        let final_text = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
-        let result = compute_kept_rate(base, predicted, final_text);
-        let predicted_tokens = tokenize(predicted);
+        let candidate = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"hello world!\");\n";
+        let reference = "    fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n        eprintln!(\"\");\n";
+        let result = compute_kept_rate(base, candidate, reference);
+        let candidate_tokens = tokenize(candidate);
 
-        let eprintln_index = predicted_tokens
+        let eprintln_index = candidate_tokens
             .iter()
             .position(|&token| token == "eprintln")
             .expect("eprintln token not found");
@@ -485,12 +563,15 @@ mod test_kept_rate {
     #[test]
     fn test_repetitive_tokens_remain_discarded() {
         let base = "foo + foo + foo + foo + foo\n".repeat(16);
-        let predicted = "foo + foo + prediction_token + foo + foo\n".repeat(16);
-        let final_text = "foo + foo + kept_token + foo + foo\n".repeat(16);
-        let result = compute_kept_rate(&base, &predicted, &final_text);
+        let candidate = "foo + foo + prediction_token + foo + foo\n".repeat(16);
+        let reference = "foo + foo + kept_token + foo + foo\n".repeat(16);
+        let result = compute_kept_rate(&base, &candidate, &reference);
 
         assert_eq!(result.kept_chars, 0);
-        assert_eq!(result.discarded_chars, result.predicted_new_chars);
-        assert_eq!(result.predicted_new_chars, "prediction_token".len() * 16);
+        assert_eq!(result.correctly_deleted_chars, "foo".len() * 16);
+        assert_eq!(result.discarded_chars, result.candidate_new_chars);
+        assert_eq!(result.candidate_new_chars, "prediction_token".len() * 16);
+        assert!(result.kept_rate > 0.0);
+        assert!(result.recall_rate > 0.0);
     }
 }

crates/edit_prediction/src/zed_edit_prediction_delegate.rs 🔗

@@ -6,6 +6,7 @@ use edit_prediction_types::{
     DataCollectionState, EditPredictionDelegate, EditPredictionDiscardReason,
     EditPredictionIconSet, SuggestionDisplayType,
 };
+use feature_flags::FeatureFlagAppExt;
 use gpui::{App, Entity, prelude::*};
 use language::{Buffer, ToPoint as _};
 use project::Project;
@@ -73,6 +74,24 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
                 self.store
                     .read(cx)
                     .is_file_open_source(&self.project, file, cx);
+
+            if let Some(organization_configuration) = self
+                .store
+                .read(cx)
+                .user_store
+                .read(cx)
+                .current_organization_configuration()
+            {
+                if !organization_configuration
+                    .edit_prediction
+                    .is_feedback_enabled
+                {
+                    return DataCollectionState::Disabled {
+                        is_project_open_source,
+                    };
+                }
+            }
+
             if self.store.read(cx).data_collection_choice.is_enabled(cx) {
                 DataCollectionState::Enabled {
                     is_project_open_source,
@@ -89,6 +108,29 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
         }
     }
 
+    fn can_toggle_data_collection(&self, cx: &App) -> bool {
+        if cx.is_staff() {
+            return false;
+        }
+
+        if let Some(organization_configuration) = self
+            .store
+            .read(cx)
+            .user_store
+            .read(cx)
+            .current_organization_configuration()
+        {
+            if !organization_configuration
+                .edit_prediction
+                .is_feedback_enabled
+            {
+                return false;
+            }
+        }
+
+        true
+    }
+
     fn toggle_data_collection(&mut self, cx: &mut App) {
         self.store.update(cx, |store, cx| {
             store.toggle_data_collection_choice(cx);

crates/edit_prediction_cli/src/example.rs 🔗

@@ -187,6 +187,14 @@ pub struct ExampleScore {
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub kept_rate: Option<f64>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub recall_rate: Option<f64>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub kept_chars: Option<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub correctly_deleted_chars: Option<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
+    pub discarded_chars: Option<usize>,
+    #[serde(default, skip_serializing_if = "Option::is_none")]
     pub cumulative_logprob: Option<f64>,
     #[serde(default, skip_serializing_if = "Option::is_none")]
     pub avg_logprob: Option<f64>,

crates/edit_prediction_cli/src/score.rs 🔗

@@ -85,6 +85,10 @@ pub async fn run_scoring(
         inserted_tokens: 0,
         deleted_tokens: 0,
         kept_rate: None,
+        recall_rate: None,
+        kept_chars: None,
+        correctly_deleted_chars: None,
+        discarded_chars: None,
         cumulative_logprob: None,
         avg_logprob: None,
     };
@@ -187,9 +191,20 @@ pub async fn run_scoring(
             prediction.actual_cursor.as_ref(),
         );
 
-        let kept_rate = best_expected_text.map(|final_text| {
-            metrics::compute_kept_rate(original_text, &actual_text, final_text).kept_rate
-        });
+        let (kept_rate, recall_rate, kept_chars, correctly_deleted_chars, discarded_chars) =
+            best_expected_text
+                .map(|reference_text| {
+                    let result =
+                        metrics::compute_kept_rate(original_text, &actual_text, reference_text);
+                    (
+                        Some(result.kept_rate),
+                        Some(result.recall_rate),
+                        Some(result.kept_chars),
+                        Some(result.correctly_deleted_chars),
+                        Some(result.discarded_chars),
+                    )
+                })
+                .unwrap_or((None, None, None, None, None));
 
         scores.push(ExampleScore {
             delta_chr_f: best_delta_chr_f_metrics.score as f32,
@@ -211,6 +226,10 @@ pub async fn run_scoring(
             inserted_tokens: token_changes.inserted_tokens,
             deleted_tokens: token_changes.deleted_tokens,
             kept_rate,
+            recall_rate,
+            kept_chars,
+            correctly_deleted_chars,
+            discarded_chars,
             cumulative_logprob: prediction.cumulative_logprob,
             avg_logprob: prediction.avg_logprob,
         });
@@ -277,6 +296,11 @@ pub fn print_report(examples: &[Example], verbose: bool) {
     let mut isolated_whitespace_count: usize = 0;
     let mut kept_rate_sum: f64 = 0.0;
     let mut kept_rate_count: usize = 0;
+    let mut kept_chars_total: usize = 0;
+    let mut correctly_deleted_chars_total: usize = 0;
+    let mut discarded_chars_total: usize = 0;
+    let mut recall_rate_sum: f64 = 0.0;
+    let mut recall_rate_count: usize = 0;
     let mut patch_inserted_tokens: Vec<usize> = Vec::new();
     let mut patch_deleted_tokens: Vec<usize> = Vec::new();
     let mut predictions_with_patch: usize = 0;
@@ -369,11 +393,24 @@ pub fn print_report(examples: &[Example], verbose: bool) {
                 isolated_whitespace_count += 1;
             }
 
-            // Accumulate kept rate metrics
+            // Accumulate kept and recall rate metrics
             if let Some(kr) = score.kept_rate {
                 kept_rate_sum += kr;
                 kept_rate_count += 1;
             }
+            if let Some(kept_chars) = score.kept_chars {
+                kept_chars_total += kept_chars;
+            }
+            if let Some(correctly_deleted_chars) = score.correctly_deleted_chars {
+                correctly_deleted_chars_total += correctly_deleted_chars;
+            }
+            if let Some(discarded_chars) = score.discarded_chars {
+                discarded_chars_total += discarded_chars;
+            }
+            if let Some(rr) = score.recall_rate {
+                recall_rate_sum += rr;
+                recall_rate_count += 1;
+            }
 
             // Accumulate token change metrics (only for predictions that produced a patch)
             let has_patch = example
@@ -504,13 +541,24 @@ pub fn print_report(examples: &[Example], verbose: bool) {
             println!("Isolated whitespace changes: {}", isolated_ws_str);
         }
 
-        // Print kept rate metrics
+        // Print kept and recall rate metrics
         if kept_rate_count > 0 {
             let avg_kept_rate = kept_rate_sum / kept_rate_count as f64;
             println!(
-                "Kept rate: {:.1}% avg ({} evaluated)",
+                "Kept rate: {:.1}% avg ({} evaluated, kept chars: {}, correctly deleted chars: {}, discarded chars: {})",
                 avg_kept_rate * 100.0,
-                kept_rate_count
+                kept_rate_count,
+                kept_chars_total,
+                correctly_deleted_chars_total,
+                discarded_chars_total
+            );
+        }
+        if recall_rate_count > 0 {
+            let avg_recall_rate = recall_rate_sum / recall_rate_count as f64;
+            println!(
+                "Recall rate: {:.1}% avg ({} evaluated)",
+                avg_recall_rate * 100.0,
+                recall_rate_count
             );
         }
 
@@ -618,6 +666,14 @@ pub struct SummaryJson {
     pub isolated_whitespace_rate: Option<f32>,
     #[serde(skip_serializing_if = "Option::is_none")]
     pub avg_kept_rate: Option<f64>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub avg_recall_rate: Option<f64>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub total_kept_chars: Option<usize>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub total_correctly_deleted_chars: Option<usize>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub total_discarded_chars: Option<usize>,
 }
 
 pub fn compute_summary(examples: &[Example]) -> SummaryJson {
@@ -645,6 +701,14 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
     let mut isolated_whitespace_count: usize = 0;
     let mut kept_rate_sum: f64 = 0.0;
     let mut kept_rate_count: usize = 0;
+    let mut kept_chars_total: usize = 0;
+    let mut kept_chars_count: usize = 0;
+    let mut correctly_deleted_chars_total: usize = 0;
+    let mut correctly_deleted_chars_count: usize = 0;
+    let mut discarded_chars_total: usize = 0;
+    let mut discarded_chars_count: usize = 0;
+    let mut recall_rate_sum: f64 = 0.0;
+    let mut recall_rate_count: usize = 0;
 
     for example in examples {
         for (score_idx, score) in example.score.iter().enumerate() {
@@ -685,11 +749,27 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
                 isolated_whitespace_count += 1;
             }
 
-            // Accumulate kept rate metrics
+            // Accumulate kept and recall rate metrics
             if let Some(kr) = score.kept_rate {
                 kept_rate_sum += kr;
                 kept_rate_count += 1;
             }
+            if let Some(kept_chars) = score.kept_chars {
+                kept_chars_total += kept_chars;
+                kept_chars_count += 1;
+            }
+            if let Some(correctly_deleted_chars) = score.correctly_deleted_chars {
+                correctly_deleted_chars_total += correctly_deleted_chars;
+                correctly_deleted_chars_count += 1;
+            }
+            if let Some(discarded_chars) = score.discarded_chars {
+                discarded_chars_total += discarded_chars;
+                discarded_chars_count += 1;
+            }
+            if let Some(rr) = score.recall_rate {
+                recall_rate_sum += rr;
+                recall_rate_count += 1;
+            }
 
             // Accumulate cursor metrics
             if let Some(exact_match) = score.cursor_exact_match {
@@ -771,6 +851,30 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
         None
     };
 
+    let avg_recall_rate = if recall_rate_count > 0 {
+        Some(recall_rate_sum / recall_rate_count as f64)
+    } else {
+        None
+    };
+
+    let total_kept_chars = if kept_chars_count > 0 {
+        Some(kept_chars_total)
+    } else {
+        None
+    };
+
+    let total_correctly_deleted_chars = if correctly_deleted_chars_count > 0 {
+        Some(correctly_deleted_chars_total)
+    } else {
+        None
+    };
+
+    let total_discarded_chars = if discarded_chars_count > 0 {
+        Some(discarded_chars_total)
+    } else {
+        None
+    };
+
     SummaryJson {
         total_examples: total_scores,
         avg_delta_chr_f,
@@ -804,6 +908,10 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
         wrong_editable_region_rate,
         isolated_whitespace_rate,
         avg_kept_rate,
+        avg_recall_rate,
+        total_kept_chars,
+        total_correctly_deleted_chars,
+        total_discarded_chars,
     }
 }
 

crates/edit_prediction_types/src/edit_prediction_types.rs 🔗

@@ -168,6 +168,10 @@ pub trait EditPredictionDelegate: 'static + Sized {
         None
     }
 
+    fn can_toggle_data_collection(&self, _cx: &App) -> bool {
+        true
+    }
+
     fn toggle_data_collection(&mut self, _cx: &mut App) {}
     fn is_enabled(
         &self,
@@ -209,6 +213,7 @@ pub trait EditPredictionDelegateHandle {
     fn icons(&self, cx: &App) -> EditPredictionIconSet;
     fn data_collection_state(&self, cx: &App) -> DataCollectionState;
     fn usage(&self, cx: &App) -> Option<EditPredictionUsage>;
+    fn can_toggle_data_collection(&self, cx: &App) -> bool;
     fn toggle_data_collection(&self, cx: &mut App);
     fn is_refreshing(&self, cx: &App) -> bool;
     fn refresh(
@@ -265,6 +270,10 @@ where
         self.read(cx).usage(cx)
     }
 
+    fn can_toggle_data_collection(&self, cx: &App) -> bool {
+        self.read(cx).can_toggle_data_collection(cx)
+    }
+
     fn toggle_data_collection(&self, cx: &mut App) {
         self.update(cx, |this, cx| this.toggle_data_collection(cx))
     }

crates/edit_prediction_ui/src/edit_prediction_button.rs 🔗

@@ -790,7 +790,7 @@ impl EditPredictionButton {
                             .toggleable(IconPosition::Start, data_collection.is_enabled())
                             .icon(icon_name)
                             .icon_color(icon_color)
-                            .disabled(cx.is_staff())
+                            .disabled(!provider.can_toggle_data_collection(cx))
                             .documentation_aside(DocumentationSide::Left, move |cx| {
                                 let (msg, label_color, icon_name, icon_color) = match (is_open_source, is_collecting) {
                                     (true, true) => (

crates/editor/src/editor_settings.rs 🔗

@@ -33,6 +33,7 @@ pub struct EditorSettings {
     pub autoscroll_on_clicks: bool,
     pub horizontal_scroll_margin: f32,
     pub scroll_sensitivity: f32,
+    pub mouse_wheel_zoom: bool,
     pub fast_scroll_sensitivity: f32,
     pub sticky_scroll: StickyScroll,
     pub relative_line_numbers: RelativeLineNumbers,
@@ -251,6 +252,7 @@ impl Settings for EditorSettings {
             autoscroll_on_clicks: editor.autoscroll_on_clicks.unwrap(),
             horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(),
             scroll_sensitivity: editor.scroll_sensitivity.unwrap(),
+            mouse_wheel_zoom: editor.mouse_wheel_zoom.unwrap(),
             fast_scroll_sensitivity: editor.fast_scroll_sensitivity.unwrap(),
             sticky_scroll: StickyScroll {
                 enabled: sticky_scroll.enabled.unwrap(),

crates/editor/src/element.rs 🔗

@@ -7675,7 +7675,10 @@ impl EditorElement {
                 if phase == DispatchPhase::Bubble && hitbox.should_handle_scroll(window) {
                     delta = delta.coalesce(event.delta);
 
-                    if event.modifiers.secondary() && editor.read(cx).enable_mouse_wheel_zoom {
+                    if event.modifiers.secondary()
+                        && editor.read(cx).enable_mouse_wheel_zoom
+                        && EditorSettings::get_global(cx).mouse_wheel_zoom
+                    {
                         let delta_y = match event.delta {
                             ScrollDelta::Pixels(pixels) => pixels.y.into(),
                             ScrollDelta::Lines(lines) => lines.y,

crates/git/src/repository.rs 🔗

@@ -1462,7 +1462,7 @@ impl GitRepository for RealGitRepository {
                     log::debug!("indexing SHA: {sha}, path {path:?}");
 
                     let output = git
-                        .build_command(&["update-index", "--add", "--cacheinfo", mode, sha, "--"])
+                        .build_command(&["update-index", "--add", "--cacheinfo", mode, sha])
                         .envs(env.iter())
                         .arg(path.as_unix_str())
                         .output()

crates/git_ui/src/branch_picker.rs 🔗

@@ -22,7 +22,7 @@ use util::ResultExt;
 use workspace::notifications::DetachAndPromptErr;
 use workspace::{ModalView, Workspace};
 
-use crate::{branch_picker, git_panel::show_error_toast, resolve_active_repository};
+use crate::{branch_picker, git_panel::show_error_toast};
 
 actions!(
     branch_picker,
@@ -59,7 +59,7 @@ pub fn open(
     cx: &mut Context<Workspace>,
 ) {
     let workspace_handle = workspace.weak_handle();
-    let repository = resolve_active_repository(workspace, cx);
+    let repository = workspace.project().read(cx).active_repository(cx);
 
     workspace.toggle_modal(window, cx, |window, cx| {
         BranchList::new(

crates/git_ui/src/git_picker.rs 🔗

@@ -582,7 +582,7 @@ fn open_with_tab(
     cx: &mut Context<Workspace>,
 ) {
     let workspace_handle = workspace.weak_handle();
-    let repository = crate::resolve_active_repository(workspace, cx);
+    let repository = workspace.project().read(cx).active_repository(cx);
 
     workspace.toggle_modal(window, cx, |window, cx| {
         GitPicker::new(workspace_handle, repository, tab, rems(34.), window, cx)

crates/git_ui/src/git_ui.rs 🔗

@@ -281,33 +281,6 @@ fn open_modified_files(
     }
 }
 
-/// Resolves the repository for git operations, respecting the workspace's
-/// active worktree override from the project dropdown.
-pub fn resolve_active_repository(workspace: &Workspace, cx: &App) -> Option<Entity<Repository>> {
-    let project = workspace.project().read(cx);
-    workspace
-        .active_worktree_override()
-        .and_then(|override_id| {
-            project
-                .worktree_for_id(override_id, cx)
-                .and_then(|worktree| {
-                    let worktree_abs_path = worktree.read(cx).abs_path();
-                    let git_store = project.git_store().read(cx);
-                    git_store
-                        .repositories()
-                        .values()
-                        .filter(|repo| {
-                            let repo_path = &repo.read(cx).work_directory_abs_path;
-                            *repo_path == worktree_abs_path
-                                || worktree_abs_path.starts_with(repo_path.as_ref())
-                        })
-                        .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
-                        .cloned()
-                })
-        })
-        .or_else(|| project.active_repository(cx))
-}
-
 pub fn git_status_icon(status: FileStatus) -> impl IntoElement {
     GitStatusIcon::new(status)
 }

crates/git_ui/src/project_diff.rs 🔗

@@ -2,7 +2,6 @@ use crate::{
     conflict_view::ConflictAddon,
     git_panel::{GitPanel, GitPanelAddon, GitStatusEntry},
     git_panel_settings::GitPanelSettings,
-    resolve_active_repository,
 };
 use agent_settings::AgentSettings;
 use anyhow::{Context as _, Result, anyhow};
@@ -205,7 +204,7 @@ impl ProjectDiff {
                 "Action"
             }
         );
-        let intended_repo = resolve_active_repository(workspace, cx);
+        let intended_repo = workspace.project().read(cx).active_repository(cx);
 
         let existing = workspace
             .items_of_type::<Self>(cx)
@@ -2708,7 +2707,7 @@ mod tests {
     }
 
     #[gpui::test]
-    async fn test_deploy_at_respects_worktree_override(cx: &mut TestAppContext) {
+    async fn test_deploy_at_respects_active_repository_selection(cx: &mut TestAppContext) {
         init_test(cx);
 
         let fs = FakeFs::new(cx.executor());
@@ -2759,9 +2758,12 @@ mod tests {
         let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
         cx.run_until_parked();
 
-        // Select project A via the dropdown override and open the diff.
+        // Select project A explicitly and open the diff.
         workspace.update(cx, |workspace, cx| {
-            workspace.set_active_worktree_override(Some(worktree_a_id), cx);
+            let git_store = workspace.project().read(cx).git_store().clone();
+            git_store.update(cx, |git_store, cx| {
+                git_store.set_active_repo_for_worktree(worktree_a_id, cx);
+            });
         });
         cx.focus(&workspace);
         cx.update(|window, cx| {
@@ -2776,9 +2778,12 @@ mod tests {
         assert_eq!(paths_a.len(), 1);
         assert_eq!(*paths_a[0], *"a.txt");
 
-        // Switch the override to project B and re-run the diff action.
+        // Switch the explicit active repository to project B and re-run the diff action.
         workspace.update(cx, |workspace, cx| {
-            workspace.set_active_worktree_override(Some(worktree_b_id), cx);
+            let git_store = workspace.project().read(cx).git_store().clone();
+            git_store.update(cx, |git_store, cx| {
+                git_store.set_active_repo_for_worktree(worktree_b_id, cx);
+            });
         });
         cx.focus(&workspace);
         cx.update(|window, cx| {

crates/gpui/src/window.rs 🔗

@@ -1232,6 +1232,13 @@ impl Window {
                     if let Some(last_frame) = last_frame_time.get()
                         && now.duration_since(last_frame) < min_interval
                     {
+                        // Must still complete the frame on platforms that require it.
+                        // On Wayland, `surface.frame()` was already called to request the
+                        // next frame callback, so we must call `surface.commit()` (via
+                        // `complete_frame`) or the compositor won't send another callback.
+                        handle
+                            .update(&mut cx, |_, window, _| window.complete_frame())
+                            .log_err();
                         return;
                     }
                 }

crates/markdown/src/markdown.rs 🔗

@@ -249,6 +249,7 @@ pub struct Markdown {
     source: SharedString,
     selection: Selection,
     pressed_link: Option<RenderedLink>,
+    pressed_footnote_ref: Option<RenderedFootnoteRef>,
     autoscroll_request: Option<usize>,
     active_root_block: Option<usize>,
     parsed_markdown: ParsedMarkdown,
@@ -419,6 +420,7 @@ impl Markdown {
             source,
             selection: Selection::default(),
             pressed_link: None,
+            pressed_footnote_ref: None,
             autoscroll_request: None,
             active_root_block: None,
             should_reparse: false,
@@ -532,6 +534,13 @@ impl Markdown {
         cx.refresh_windows();
     }
 
+    fn footnote_definition_content_start(&self, label: &SharedString) -> Option<usize> {
+        self.parsed_markdown
+            .footnote_definitions
+            .get(label)
+            .copied()
+    }
+
     pub fn set_active_root_for_source_index(
         &mut self,
         source_index: Option<usize>,
@@ -696,6 +705,7 @@ impl Markdown {
                         html_blocks: BTreeMap::default(),
                         mermaid_diagrams: BTreeMap::default(),
                         heading_slugs: HashMap::default(),
+                        footnote_definitions: HashMap::default(),
                     },
                     Default::default(),
                 );
@@ -709,6 +719,7 @@ impl Markdown {
             let root_block_starts = parsed.root_block_starts;
             let html_blocks = parsed.html_blocks;
             let heading_slugs = parsed.heading_slugs;
+            let footnote_definitions = parsed.footnote_definitions;
             let mermaid_diagrams = if should_render_mermaid_diagrams {
                 extract_mermaid_diagrams(&source, &events)
             } else {
@@ -776,6 +787,7 @@ impl Markdown {
                     html_blocks,
                     mermaid_diagrams,
                     heading_slugs,
+                    footnote_definitions,
                 },
                 images_by_source_offset,
             )
@@ -900,6 +912,7 @@ pub struct ParsedMarkdown {
     pub(crate) html_blocks: BTreeMap<usize, html::html_parser::ParsedHtmlBlock>,
     pub(crate) mermaid_diagrams: BTreeMap<usize, ParsedMarkdownMermaidDiagram>,
     pub heading_slugs: HashMap<SharedString, usize>,
+    pub footnote_definitions: HashMap<SharedString, usize>,
 }
 
 impl ParsedMarkdown {
@@ -1300,18 +1313,22 @@ impl MarkdownElement {
             return;
         }
 
-        let is_hovering_link = hitbox.is_hovered(window)
+        let is_hovering_clickable = hitbox.is_hovered(window)
             && !self.markdown.read(cx).selection.pending
             && rendered_text
-                .link_for_position(window.mouse_position())
-                .is_some();
-
-        if !self.style.prevent_mouse_interaction {
-            if is_hovering_link {
-                window.set_cursor_style(CursorStyle::PointingHand, hitbox);
-            } else {
-                window.set_cursor_style(CursorStyle::IBeam, hitbox);
-            }
+                .source_index_for_position(window.mouse_position())
+                .ok()
+                .is_some_and(|source_index| {
+                    rendered_text.link_for_source_index(source_index).is_some()
+                        || rendered_text
+                            .footnote_ref_for_source_index(source_index)
+                            .is_some()
+                });
+
+        if is_hovering_clickable {
+            window.set_cursor_style(CursorStyle::PointingHand, hitbox);
+        } else {
+            window.set_cursor_style(CursorStyle::IBeam, hitbox);
         }
 
         let on_open_url = self.on_url_click.take();
@@ -1336,13 +1353,27 @@ impl MarkdownElement {
             move |markdown, event: &MouseDownEvent, phase, window, cx| {
                 if hitbox.is_hovered(window) {
                     if phase.bubble() {
-                        if let Some(link) = rendered_text.link_for_position(event.position) {
-                            markdown.pressed_link = Some(link.clone());
-                        } else {
-                            let source_index =
-                                match rendered_text.source_index_for_position(event.position) {
-                                    Ok(ix) | Err(ix) => ix,
-                                };
+                        let position_result =
+                            rendered_text.source_index_for_position(event.position);
+
+                        if let Ok(source_index) = position_result {
+                            if let Some(footnote_ref) =
+                                rendered_text.footnote_ref_for_source_index(source_index)
+                            {
+                                markdown.pressed_footnote_ref = Some(footnote_ref.clone());
+                            } else if let Some(link) =
+                                rendered_text.link_for_source_index(source_index)
+                            {
+                                markdown.pressed_link = Some(link.clone());
+                            }
+                        }
+
+                        if markdown.pressed_footnote_ref.is_none()
+                            && markdown.pressed_link.is_none()
+                        {
+                            let source_index = match position_result {
+                                Ok(ix) | Err(ix) => ix,
+                            };
                             if let Some(handler) = on_source_click.as_ref() {
                                 let blocked = handler(source_index, event.click_count, window, cx);
                                 if blocked {
@@ -1398,7 +1429,7 @@ impl MarkdownElement {
         self.on_mouse_event(window, cx, {
             let rendered_text = rendered_text.clone();
             let hitbox = hitbox.clone();
-            let was_hovering_link = is_hovering_link;
+            let was_hovering_clickable = is_hovering_clickable;
             move |markdown, event: &MouseMoveEvent, phase, window, cx| {
                 if phase.capture() {
                     return;
@@ -1414,9 +1445,17 @@ impl MarkdownElement {
                     markdown.autoscroll_request = Some(source_index);
                     cx.notify();
                 } else {
-                    let is_hovering_link = hitbox.is_hovered(window)
-                        && rendered_text.link_for_position(event.position).is_some();
-                    if is_hovering_link != was_hovering_link {
+                    let is_hovering_clickable = hitbox.is_hovered(window)
+                        && rendered_text
+                            .source_index_for_position(event.position)
+                            .ok()
+                            .is_some_and(|source_index| {
+                                rendered_text.link_for_source_index(source_index).is_some()
+                                    || rendered_text
+                                        .footnote_ref_for_source_index(source_index)
+                                        .is_some()
+                            });
+                    if is_hovering_clickable != was_hovering_clickable {
                         cx.notify();
                     }
                 }
@@ -1426,8 +1465,21 @@ impl MarkdownElement {
             let rendered_text = rendered_text.clone();
             move |markdown, event: &MouseUpEvent, phase, window, cx| {
                 if phase.bubble() {
-                    if let Some(pressed_link) = markdown.pressed_link.take()
-                        && Some(&pressed_link) == rendered_text.link_for_position(event.position)
+                    let source_index = rendered_text.source_index_for_position(event.position).ok();
+                    if let Some(pressed_footnote_ref) = markdown.pressed_footnote_ref.take()
+                        && source_index
+                            .and_then(|ix| rendered_text.footnote_ref_for_source_index(ix))
+                            == Some(&pressed_footnote_ref)
+                    {
+                        if let Some(source_index) =
+                            markdown.footnote_definition_content_start(&pressed_footnote_ref.label)
+                        {
+                            markdown.autoscroll_request = Some(source_index);
+                            cx.notify();
+                        }
+                    } else if let Some(pressed_link) = markdown.pressed_link.take()
+                        && source_index.and_then(|ix| rendered_text.link_for_source_index(ix))
+                            == Some(&pressed_link)
                     {
                         if let Some(open_url) = on_open_url.as_ref() {
                             open_url(pressed_link.destination_url, window, cx);
@@ -1818,6 +1870,36 @@ impl Element for MarkdownElement {
                                 builder.push_text_style(style)
                             }
                         }
+                        MarkdownTag::FootnoteDefinition(label) => {
+                            if !builder.rendered_footnote_separator {
+                                builder.rendered_footnote_separator = true;
+                                builder.push_div(
+                                    div()
+                                        .border_t_1()
+                                        .mt_2()
+                                        .border_color(self.style.rule_color),
+                                    range,
+                                    markdown_end,
+                                );
+                                builder.pop_div();
+                            }
+                            builder.push_div(
+                                div()
+                                    .pt_1()
+                                    .mb_1()
+                                    .line_height(rems(1.3))
+                                    .text_size(rems(0.85))
+                                    .h_flex()
+                                    .items_start()
+                                    .gap_2()
+                                    .child(
+                                        div().text_size(rems(0.85)).child(format!("{}.", label)),
+                                    ),
+                                range,
+                                markdown_end,
+                            );
+                            builder.push_div(div().flex_1().w_0(), range, markdown_end);
+                        }
                         MarkdownTag::MetadataBlock(_) => {}
                         MarkdownTag::Table(alignments) => {
                             builder.table.start(alignments.clone());
@@ -1973,6 +2055,10 @@ impl Element for MarkdownElement {
                         builder.pop_div();
                         builder.table.end_cell();
                     }
+                    MarkdownTagEnd::FootnoteDefinition => {
+                        builder.pop_div();
+                        builder.pop_div();
+                    }
                     _ => log::debug!("unsupported markdown tag end: {:?}", tag),
                 },
                 MarkdownEvent::Text => {
@@ -2028,7 +2114,12 @@ impl Element for MarkdownElement {
                 MarkdownEvent::TaskListMarker(_) => {
                     // handled inside the `MarkdownTag::Item` case
                 }
-                _ => log::debug!("unsupported markdown event {:?}", event),
+                MarkdownEvent::FootnoteReference(label) => {
+                    builder.push_footnote_ref(label.clone(), range.clone());
+                    builder.push_text_style(self.style.link.clone());
+                    builder.push_text(&format!("[{label}]"), range.clone());
+                    builder.pop_text_style();
+                }
             }
         }
         if self.style.code_block_overflow_x_scroll {
@@ -2270,8 +2361,10 @@ struct MarkdownElementBuilder {
     rendered_lines: Vec<RenderedLine>,
     pending_line: PendingLine,
     rendered_links: Vec<RenderedLink>,
+    rendered_footnote_refs: Vec<RenderedFootnoteRef>,
     current_source_index: usize,
     html_comment: bool,
+    rendered_footnote_separator: bool,
     base_text_style: TextStyle,
     text_style_stack: Vec<TextStyleRefinement>,
     code_block_stack: Vec<Option<Arc<Language>>>,
@@ -2306,8 +2399,10 @@ impl MarkdownElementBuilder {
             rendered_lines: Vec::new(),
             pending_line: PendingLine::default(),
             rendered_links: Vec::new(),
+            rendered_footnote_refs: Vec::new(),
             current_source_index: 0,
             html_comment: false,
+            rendered_footnote_separator: false,
             base_text_style,
             text_style_stack: Vec::new(),
             code_block_stack: Vec::new(),
@@ -2459,6 +2554,13 @@ impl MarkdownElementBuilder {
         });
     }
 
+    fn push_footnote_ref(&mut self, label: SharedString, source_range: Range<usize>) {
+        self.rendered_footnote_refs.push(RenderedFootnoteRef {
+            source_range,
+            label,
+        });
+    }
+
     fn push_text(&mut self, text: &str, source_range: Range<usize>) {
         self.pending_line.source_mappings.push(SourceMapping {
             rendered_index: self.pending_line.text.len(),
@@ -2576,6 +2678,7 @@ impl MarkdownElementBuilder {
             text: RenderedText {
                 lines: self.rendered_lines.into(),
                 links: self.rendered_links.into(),
+                footnote_refs: self.rendered_footnote_refs.into(),
             },
         }
     }
@@ -2690,6 +2793,7 @@ pub struct RenderedMarkdown {
 struct RenderedText {
     lines: Rc<[RenderedLine]>,
     links: Rc<[RenderedLink]>,
+    footnote_refs: Rc<[RenderedFootnoteRef]>,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
@@ -2698,6 +2802,12 @@ struct RenderedLink {
     destination_url: SharedString,
 }
 
+#[derive(Debug, Clone, Eq, PartialEq)]
+struct RenderedFootnoteRef {
+    source_range: Range<usize>,
+    label: SharedString,
+}
+
 impl RenderedText {
     fn source_index_for_position(&self, position: Point<Pixels>) -> Result<usize, usize> {
         let mut lines = self.lines.iter().peekable();
@@ -2844,12 +2954,17 @@ impl RenderedText {
         accumulator
     }
 
-    fn link_for_position(&self, position: Point<Pixels>) -> Option<&RenderedLink> {
-        let source_index = self.source_index_for_position(position).ok()?;
+    fn link_for_source_index(&self, source_index: usize) -> Option<&RenderedLink> {
         self.links
             .iter()
             .find(|link| link.source_range.contains(&source_index))
     }
+
+    fn footnote_ref_for_source_index(&self, source_index: usize) -> Option<&RenderedFootnoteRef> {
+        self.footnote_refs
+            .iter()
+            .find(|fref| fref.source_range.contains(&source_index))
+    }
 }
 
 #[cfg(test)]

crates/markdown/src/parser.rs 🔗

@@ -38,6 +38,7 @@ pub(crate) struct ParsedMarkdownData {
     pub root_block_starts: Vec<usize>,
     pub html_blocks: BTreeMap<usize, html::html_parser::ParsedHtmlBlock>,
     pub heading_slugs: HashMap<SharedString, usize>,
+    pub footnote_definitions: HashMap<SharedString, usize>,
 }
 
 impl ParseState {
@@ -499,9 +500,10 @@ pub(crate) fn parse_markdown_with_options(
             pulldown_cmark::Event::InlineHtml(_) => {
                 state.push_event(range, MarkdownEvent::InlineHtml)
             }
-            pulldown_cmark::Event::FootnoteReference(_) => {
-                state.push_event(range, MarkdownEvent::FootnoteReference)
-            }
+            pulldown_cmark::Event::FootnoteReference(label) => state.push_event(
+                range,
+                MarkdownEvent::FootnoteReference(SharedString::from(label.to_string())),
+            ),
             pulldown_cmark::Event::SoftBreak => state.push_event(range, MarkdownEvent::SoftBreak),
             pulldown_cmark::Event::HardBreak => state.push_event(range, MarkdownEvent::HardBreak),
             pulldown_cmark::Event::Rule => state.push_event(range, MarkdownEvent::Rule),
@@ -517,6 +519,7 @@ pub(crate) fn parse_markdown_with_options(
     } else {
         HashMap::default()
     };
+    let footnote_definitions = build_footnote_definitions(&state.events);
 
     ParsedMarkdownData {
         events: state.events,
@@ -525,7 +528,34 @@ pub(crate) fn parse_markdown_with_options(
         root_block_starts: state.root_block_starts,
         html_blocks,
         heading_slugs,
+        footnote_definitions,
+    }
+}
+
+fn build_footnote_definitions(
+    events: &[(Range<usize>, MarkdownEvent)],
+) -> HashMap<SharedString, usize> {
+    let mut definitions = HashMap::default();
+    let mut current_label: Option<SharedString> = None;
+
+    for (range, event) in events {
+        match event {
+            MarkdownEvent::Start(MarkdownTag::FootnoteDefinition(label)) => {
+                current_label = Some(label.clone());
+            }
+            MarkdownEvent::End(MarkdownTagEnd::FootnoteDefinition) => {
+                current_label = None;
+            }
+            MarkdownEvent::Text if current_label.is_some() => {
+                if let Some(label) = current_label.take() {
+                    definitions.entry(label).or_insert(range.start);
+                }
+            }
+            _ => {}
+        }
     }
+
+    definitions
 }
 
 pub fn parse_links_only(text: &str) -> Vec<(Range<usize>, MarkdownEvent)> {
@@ -589,7 +619,7 @@ pub enum MarkdownEvent {
     /// A reference to a footnote with given label, which may or may not be defined
     /// by an event with a `Tag::FootnoteDefinition` tag. Definitions and references to them may
     /// occur in any order.
-    FootnoteReference,
+    FootnoteReference(SharedString),
     /// A soft line break.
     SoftBreak,
     /// A hard line break.
@@ -1111,6 +1141,48 @@ mod tests {
         assert_eq!(extract_code_block_content_range(input), 3..3);
     }
 
+    #[test]
+    fn test_footnotes() {
+        let parsed = parse_markdown_with_options(
+            "Text with a footnote[^1] and some more text.\n\n[^1]: This is the footnote content.",
+            false,
+            false,
+        );
+        assert_eq!(
+            parsed.events,
+            vec![
+                (0..45, RootStart),
+                (0..45, Start(Paragraph)),
+                (0..20, Text),
+                (20..24, FootnoteReference("1".into())),
+                (24..44, Text),
+                (0..45, End(MarkdownTagEnd::Paragraph)),
+                (0..45, RootEnd(0)),
+                (46..81, RootStart),
+                (46..81, Start(FootnoteDefinition("1".into()))),
+                (52..81, Start(Paragraph)),
+                (52..81, Text),
+                (52..81, End(MarkdownTagEnd::Paragraph)),
+                (46..81, End(MarkdownTagEnd::FootnoteDefinition)),
+                (46..81, RootEnd(1)),
+            ]
+        );
+        assert_eq!(parsed.footnote_definitions.len(), 1);
+        assert_eq!(parsed.footnote_definitions.get("1").copied(), Some(52));
+    }
+
+    #[test]
+    fn test_footnote_definitions_multiple() {
+        let parsed = parse_markdown_with_options(
+            "Text[^a] and[^b].\n\n[^a]: First.\n\n[^b]: Second.",
+            false,
+            false,
+        );
+        assert_eq!(parsed.footnote_definitions.len(), 2);
+        assert!(parsed.footnote_definitions.contains_key("a"));
+        assert!(parsed.footnote_definitions.contains_key("b"));
+    }
+
     #[test]
     fn test_links_split_across_fragments() {
         // This test verifies that links split across multiple text fragments due to escaping or other issues

crates/project/src/git_store.rs 🔗

@@ -594,16 +594,49 @@ impl GitStore {
     pub fn is_local(&self) -> bool {
         matches!(self.state, GitStoreState::Local { .. })
     }
+
+    fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
+        if self.active_repo_id != Some(repo_id) {
+            self.active_repo_id = Some(repo_id);
+            cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
+        }
+    }
+
     pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
         if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
-            let id = repo.read(cx).id;
-            if self.active_repo_id != Some(id) {
-                self.active_repo_id = Some(id);
-                cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
-            }
+            self.set_active_repo_id(repo.read(cx).id, cx);
         }
     }
 
+    pub fn set_active_repo_for_worktree(
+        &mut self,
+        worktree_id: WorktreeId,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(worktree) = self
+            .worktree_store
+            .read(cx)
+            .worktree_for_id(worktree_id, cx)
+        else {
+            return;
+        };
+        let worktree_abs_path = worktree.read(cx).abs_path();
+        let Some(repo_id) = self
+            .repositories
+            .values()
+            .filter(|repo| {
+                let repo_path = &repo.read(cx).work_directory_abs_path;
+                *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
+            })
+            .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
+            .map(|repo| repo.read(cx).id)
+        else {
+            return;
+        };
+
+        self.set_active_repo_id(repo_id, cx);
+    }
+
     pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
         match &mut self.state {
             GitStoreState::Remote {

crates/recent_projects/src/recent_projects.rs 🔗

@@ -168,22 +168,20 @@ fn get_open_folders(workspace: &Workspace, cx: &App) -> Vec<OpenFolderEntry> {
         return Vec::new();
     }
 
-    let active_worktree_id = workspace.active_worktree_override().or_else(|| {
-        if let Some(repo) = project.active_repository(cx) {
-            let repo = repo.read(cx);
-            let repo_path = &repo.work_directory_abs_path;
-            for worktree in project.visible_worktrees(cx) {
-                let worktree_path = worktree.read(cx).abs_path();
-                if worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref()) {
-                    return Some(worktree.read(cx).id());
-                }
-            }
-        }
+    let active_worktree_id = if let Some(repo) = project.active_repository(cx) {
+        let repo = repo.read(cx);
+        let repo_path = &repo.work_directory_abs_path;
+        project.visible_worktrees(cx).find_map(|worktree| {
+            let worktree_path = worktree.read(cx).abs_path();
+            (worktree_path == *repo_path || worktree_path.starts_with(repo_path.as_ref()))
+                .then(|| worktree.read(cx).id())
+        })
+    } else {
         project
             .visible_worktrees(cx)
             .next()
             .map(|wt| wt.read(cx).id())
-    });
+    };
 
     let mut all_paths: Vec<PathBuf> = visible_worktrees
         .iter()
@@ -1118,7 +1116,10 @@ impl PickerDelegate for RecentProjectsDelegate {
                 let worktree_id = folder.worktree_id;
                 if let Some(workspace) = self.workspace.upgrade() {
                     workspace.update(cx, |workspace, cx| {
-                        workspace.set_active_worktree_override(Some(worktree_id), cx);
+                        let git_store = workspace.project().read(cx).git_store().clone();
+                        git_store.update(cx, |git_store, cx| {
+                            git_store.set_active_repo_for_worktree(worktree_id, cx);
+                        });
                     });
                 }
                 cx.emit(DismissEvent);

crates/remote/src/transport/docker.rs 🔗

@@ -761,7 +761,8 @@ impl RemoteConnection for DockerExecConnection {
             const TILDE_PREFIX: &'static str = "~/";
             if working_dir.starts_with(TILDE_PREFIX) {
                 let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
-                parsed_working_dir = Some(format!("$HOME/{working_dir}"));
+                parsed_working_dir =
+                    Some(format!("{}/{}", self.remote_dir_for_server, working_dir));
             } else {
                 parsed_working_dir = Some(working_dir);
             }

crates/settings/src/vscode_import.rs 🔗

@@ -286,6 +286,7 @@ impl VsCodeSettings {
             }),
             rounded_selection: self.read_bool("editor.roundedSelection"),
             scroll_beyond_last_line: None,
+            mouse_wheel_zoom: self.read_bool("editor.mouseWheelZoom"),
             scroll_sensitivity: self.read_f32("editor.mouseWheelScrollSensitivity"),
             scrollbar: self.scrollbar_content(),
             search: self.search_content(),

crates/settings_content/src/editor.rs 🔗

@@ -89,6 +89,11 @@ pub struct EditorSettingsContent {
     /// Default: 1.0
     #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
     pub scroll_sensitivity: Option<f32>,
+    /// Whether to zoom the editor font size with the mouse wheel
+    /// while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms).
+    ///
+    /// Default: false
+    pub mouse_wheel_zoom: Option<bool>,
     /// Scroll sensitivity multiplier for fast scrolling. This multiplier is applied
     /// to both the horizontal and vertical delta values while scrolling. Fast scrolling
     /// happens when a user holds the alt or option key while scrolling.

crates/settings_ui/src/page_data.rs 🔗

@@ -1571,7 +1571,7 @@ fn editor_page() -> SettingsPage {
         ]
     }
 
-    fn scrolling_section() -> [SettingsPageItem; 8] {
+    fn scrolling_section() -> [SettingsPageItem; 9] {
         [
             SettingsPageItem::SectionHeader("Scrolling"),
             SettingsPageItem::SettingItem(SettingItem {
@@ -1632,6 +1632,19 @@ fn editor_page() -> SettingsPage {
                 metadata: None,
                 files: USER,
             }),
+            SettingsPageItem::SettingItem(SettingItem {
+                title: "Mouse Wheel Zoom",
+                description: "Whether to zoom the editor font size with the mouse wheel while holding the primary modifier key.",
+                field: Box::new(SettingField {
+                    json_path: Some("mouse_wheel_zoom"),
+                    pick: |settings_content| settings_content.editor.mouse_wheel_zoom.as_ref(),
+                    write: |settings_content, value| {
+                        settings_content.editor.mouse_wheel_zoom = value;
+                    },
+                }),
+                metadata: None,
+                files: USER,
+            }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Fast Scroll Sensitivity",
                 description: "Fast scroll sensitivity multiplier for both horizontal and vertical scrolling.",

crates/title_bar/src/title_bar.rs 🔗

@@ -33,7 +33,6 @@ use onboarding_banner::OnboardingBanner;
 use project::{Project, git_store::GitStoreEvent, trusted_worktrees::TrustedWorktrees};
 use remote::RemoteConnectionOptions;
 use settings::Settings;
-use settings::WorktreeId;
 
 use std::sync::Arc;
 use std::time::Duration;
@@ -377,27 +376,13 @@ impl TitleBar {
                 cx.notify()
             }),
         );
-        subscriptions.push(
-            cx.subscribe(&project, |this, _, event: &project::Event, cx| {
-                if let project::Event::BufferEdited = event {
-                    // Clear override when user types in any editor,
-                    // so the title bar reflects the project they're actually working in
-                    this.clear_active_worktree_override(cx);
-                    cx.notify();
-                }
-            }),
-        );
+
         subscriptions.push(cx.observe(&active_call, |this, _, cx| this.active_call_changed(cx)));
         subscriptions.push(cx.observe_window_activation(window, Self::window_activation_changed));
         subscriptions.push(
-            cx.subscribe(&git_store, move |this, _, event, cx| match event {
-                GitStoreEvent::ActiveRepositoryChanged(_) => {
-                    // Clear override when focus-derived active repo changes
-                    // (meaning the user focused a file from a different project)
-                    this.clear_active_worktree_override(cx);
-                    cx.notify();
-                }
-                GitStoreEvent::RepositoryUpdated(_, _, true) => {
+            cx.subscribe(&git_store, move |_, _, event, cx| match event {
+                GitStoreEvent::ActiveRepositoryChanged(_)
+                | GitStoreEvent::RepositoryUpdated(_, _, true) => {
                     cx.notify();
                 }
                 _ => {}
@@ -451,20 +436,11 @@ impl TitleBar {
     }
 
     /// Returns the worktree to display in the title bar.
-    /// - If there's an override set on the workspace, use that (if still valid)
-    /// - Otherwise, derive from the active repository
+    /// - Prefer the worktree owning the project's active repository
     /// - Fall back to the first visible worktree
     pub fn effective_active_worktree(&self, cx: &App) -> Option<Entity<project::Worktree>> {
         let project = self.project.read(cx);
 
-        if let Some(workspace) = self.workspace.upgrade() {
-            if let Some(override_id) = workspace.read(cx).active_worktree_override() {
-                if let Some(worktree) = project.worktree_for_id(override_id, cx) {
-                    return Some(worktree);
-                }
-            }
-        }
-
         if let Some(repo) = project.active_repository(cx) {
             let repo = repo.read(cx);
             let repo_path = &repo.work_directory_abs_path;
@@ -480,28 +456,6 @@ impl TitleBar {
         project.visible_worktrees(cx).next()
     }
 
-    pub fn set_active_worktree_override(
-        &mut self,
-        worktree_id: WorktreeId,
-        cx: &mut Context<Self>,
-    ) {
-        if let Some(workspace) = self.workspace.upgrade() {
-            workspace.update(cx, |workspace, cx| {
-                workspace.set_active_worktree_override(Some(worktree_id), cx);
-            });
-        }
-        cx.notify();
-    }
-
-    fn clear_active_worktree_override(&mut self, cx: &mut Context<Self>) {
-        if let Some(workspace) = self.workspace.upgrade() {
-            workspace.update(cx, |workspace, cx| {
-                workspace.clear_active_worktree_override(cx);
-            });
-        }
-        cx.notify();
-    }
-
     fn get_repository_for_worktree(
         &self,
         worktree: &Entity<project::Worktree>,

crates/workspace/src/workspace.rs 🔗

@@ -1325,7 +1325,6 @@ pub struct Workspace {
     bottom_dock: Entity<Dock>,
     right_dock: Entity<Dock>,
     panes: Vec<Entity<Pane>>,
-    active_worktree_override: Option<WorktreeId>,
     panes_by_item: HashMap<EntityId, WeakEntity<Pane>>,
     active_pane: Entity<Pane>,
     last_active_center_pane: Option<WeakEntity<Pane>>,
@@ -1758,7 +1757,6 @@ impl Workspace {
             modal_layer,
             toast_layer,
             titlebar_item: None,
-            active_worktree_override: None,
             notifications: Notifications::default(),
             suppressed_notifications: HashSet::default(),
             left_dock,
@@ -2951,27 +2949,6 @@ impl Workspace {
         self.titlebar_item.clone()
     }
 
-    /// Returns the worktree override set by the user (e.g., via the project dropdown).
-    /// When set, git-related operations should use this worktree instead of deriving
-    /// the active worktree from the focused file.
-    pub fn active_worktree_override(&self) -> Option<WorktreeId> {
-        self.active_worktree_override
-    }
-
-    pub fn set_active_worktree_override(
-        &mut self,
-        worktree_id: Option<WorktreeId>,
-        cx: &mut Context<Self>,
-    ) {
-        self.active_worktree_override = worktree_id;
-        cx.notify();
-    }
-
-    pub fn clear_active_worktree_override(&mut self, cx: &mut Context<Self>) {
-        self.active_worktree_override = None;
-        cx.notify();
-    }
-
     /// Call the given callback with a workspace whose project is local or remote via WSL (allowing host access).
     ///
     /// If the given workspace has a local project, then it will be passed

crates/zed/src/zed.rs 🔗

@@ -4144,6 +4144,7 @@ mod tests {
             window.draw(cx).clear();
         });
 
+        // mouse_wheel_zoom is disabled by default — zoom should not work.
         let initial_font_size =
             cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
 
@@ -4154,6 +4155,34 @@ mod tests {
             ..Default::default()
         });
 
+        let font_size_after_disabled_zoom =
+            cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
+
+        assert_eq!(
+            initial_font_size, font_size_after_disabled_zoom,
+            "Editor buffer font-size should not change when mouse_wheel_zoom is disabled"
+        );
+
+        // Enable mouse_wheel_zoom and verify zoom works.
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.editor.mouse_wheel_zoom = Some(true);
+                });
+            });
+        });
+
+        cx.update(|window, cx| {
+            window.draw(cx).clear();
+        });
+
+        cx.simulate_event(gpui::ScrollWheelEvent {
+            position: mouse_position,
+            delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))),
+            modifiers: event_modifiers,
+            ..Default::default()
+        });
+
         let increased_font_size =
             cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
 
@@ -4180,6 +4209,37 @@ mod tests {
             decreased_font_size < increased_font_size,
             "Editor buffer font-size should have decreased from scroll-zoom"
         );
+
+        // Disable mouse_wheel_zoom again and verify zoom stops working.
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.editor.mouse_wheel_zoom = Some(false);
+                });
+            });
+        });
+
+        let font_size_before =
+            cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
+
+        cx.update(|window, cx| {
+            window.draw(cx).clear();
+        });
+
+        cx.simulate_event(gpui::ScrollWheelEvent {
+            position: mouse_position,
+            delta: gpui::ScrollDelta::Pixels(point(px(0.), px(1.))),
+            modifiers: event_modifiers,
+            ..Default::default()
+        });
+
+        let font_size_after =
+            cx.update(|_, cx| ThemeSettings::get_global(cx).buffer_font_size(cx).as_f32());
+
+        assert_eq!(
+            font_size_before, font_size_after,
+            "Editor buffer font-size should not change when mouse_wheel_zoom is re-disabled"
+        );
     }
 
     #[gpui::test]

docs/src/reference/all-settings.md 🔗

@@ -3396,6 +3396,16 @@ List of strings containing any combination of:
 
 Positive `float` values
 
+### Mouse Wheel Zoom
+
+- Description: Whether to zoom the editor font size with the mouse wheel while holding the primary modifier key (Cmd on macOS, Ctrl on other platforms).
+- Setting: `mouse_wheel_zoom`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
 ### Fast Scroll Sensitivity
 
 - Description: Scroll sensitivity multiplier for fast scrolling. This multiplier is applied to both the horizontal and vertical delta values while scrolling. Fast scrolling happens when a user holds the alt or option key while scrolling.

tooling/xtask/src/tasks/workflows/cherry_pick.rs 🔗

@@ -39,8 +39,16 @@ fn run_cherry_pick(
             .add_env(("BRANCH", branch.to_string()))
             .add_env(("COMMIT", commit.to_string()))
             .add_env(("CHANNEL", channel.to_string()))
-            .add_env(("GIT_COMMITTER_NAME", "Zed Zippy"))
-            .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev"))
+            .add_env(("GIT_AUTHOR_NAME", "zed-zippy[bot]"))
+            .add_env((
+                "GIT_AUTHOR_EMAIL",
+                "<234243425+zed-zippy[bot]@users.noreply.github.com>",
+            ))
+            .add_env(("GIT_COMMITTER_NAME", "zed-zippy[bot]"))
+            .add_env((
+                "GIT_COMMITTER_EMAIL",
+                "<234243425+zed-zippy[bot]@users.noreply.github.com>",
+            ))
             .add_env(("GITHUB_TOKEN", token))
     }
 

tooling/xtask/src/tasks/workflows/compliance_check.rs 🔗

@@ -1,14 +1,10 @@
-use gh_workflow::{Event, Job, Run, Schedule, Step, Workflow, WorkflowDispatch};
-use indoc::formatdoc;
+use gh_workflow::{Event, Job, Schedule, Workflow, WorkflowDispatch};
 
 use crate::tasks::workflows::{
-    release::{
-        COMPLIANCE_REPORT_PATH, COMPLIANCE_STEP_ID, ComplianceContext,
-        add_compliance_notification_steps,
-    },
+    release::{ComplianceContext, add_compliance_steps},
     runners,
     steps::{self, CommonJobConditions, named},
-    vars::{self, StepOutput},
+    vars::StepOutput,
 };
 
 pub fn compliance_check() -> Workflow {
@@ -37,31 +33,20 @@ fn scheduled_compliance_check() -> steps::NamedJob {
 
     let tag_output = StepOutput::new(&determine_version_step, "tag");
 
-    fn run_compliance_check(tag: &StepOutput) -> Step<Run> {
-        named::bash(
-            formatdoc! {r#"
-                cargo xtask compliance "$LATEST_TAG" --branch main --report-path "{COMPLIANCE_REPORT_PATH}"
-                "#,
-            }
-        )
-        .id(COMPLIANCE_STEP_ID)
-        .add_env(("LATEST_TAG", tag.to_string()))
-        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
-        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
-    }
-
     let job = Job::default()
         .with_repository_owner_guard()
         .runs_on(runners::LINUX_SMALL)
         .add_step(steps::checkout_repo().with_full_history())
         .add_step(steps::cache_rust_dependencies_namespace())
-        .add_step(determine_version_step)
-        .add_step(run_compliance_check(&tag_output));
-
-    named::job(add_compliance_notification_steps(
-        job,
-        ComplianceContext::Scheduled {
-            tag_source: tag_output,
-        },
-    ))
+        .add_step(determine_version_step);
+
+    named::job(
+        add_compliance_steps(
+            job,
+            ComplianceContext::Scheduled {
+                tag_source: tag_output,
+            },
+        )
+        .0,
+    )
 }

tooling/xtask/src/tasks/workflows/release.rs 🔗

@@ -6,7 +6,7 @@ use crate::tasks::workflows::{
     run_tests,
     runners::{self, Arch, Platform},
     steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job},
-    vars::{self, StepOutput, assets},
+    vars::{self, JobOutput, StepOutput, assets},
 };
 
 const CURRENT_ACTION_RUN_URL: &str =
@@ -22,7 +22,7 @@ pub(crate) fn release() -> Workflow {
     let check_scripts = run_tests::check_scripts();
 
     let create_draft_release = create_draft_release();
-    let compliance = compliance_check();
+    let (non_blocking_compliance_run, job_output) = compliance_check();
 
     let bundle = ReleaseBundleJobs {
         linux_aarch64: bundle_linux(
@@ -58,7 +58,10 @@ pub(crate) fn release() -> Workflow {
     };
 
     let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle);
-    let validate_release_assets = validate_release_assets(&[&upload_release_assets]);
+    let validate_release_assets = validate_release_assets(
+        &[&upload_release_assets, &non_blocking_compliance_run],
+        job_output,
+    );
 
     let auto_release_preview = auto_release_preview(&[&validate_release_assets]);
 
@@ -93,7 +96,10 @@ pub(crate) fn release() -> Workflow {
         .add_job(windows_clippy.name, windows_clippy.job)
         .add_job(check_scripts.name, check_scripts.job)
         .add_job(create_draft_release.name, create_draft_release.job)
-        .add_job(compliance.name, compliance.job)
+        .add_job(
+            non_blocking_compliance_run.name,
+            non_blocking_compliance_run.job,
+        )
         .map(|mut workflow| {
             for job in bundle.into_jobs() {
                 workflow = workflow.add_job(job.name, job.job);
@@ -156,25 +162,65 @@ pub(crate) const COMPLIANCE_STEP_ID: &str = "run-compliance-check";
 const NEEDS_REVIEW_PULLS_URL: &str = "https://github.com/zed-industries/zed/pulls?q=is%3Apr+is%3Aclosed+label%3A%22PR+state%3Aneeds+review%22";
 
 pub(crate) enum ComplianceContext {
-    Release,
+    Release { non_blocking_outcome: JobOutput },
     ReleaseNonBlocking,
     Scheduled { tag_source: StepOutput },
 }
 
-pub(crate) fn add_compliance_notification_steps(
+impl ComplianceContext {
+    fn tag_source(&self) -> Option<&StepOutput> {
+        match self {
+            ComplianceContext::Scheduled { tag_source } => Some(tag_source),
+            _ => None,
+        }
+    }
+}
+
+pub(crate) fn add_compliance_steps(
     job: gh_workflow::Job,
     context: ComplianceContext,
-) -> gh_workflow::Job {
+) -> (gh_workflow::Job, StepOutput) {
+    fn run_compliance_check(context: &ComplianceContext) -> (Step<Run>, StepOutput) {
+        let job = named::bash(
+            formatdoc! {r#"
+                cargo xtask compliance {target} --report-path "{COMPLIANCE_REPORT_PATH}"
+                "#,
+                target = if context.tag_source().is_some() { r#""$LATEST_TAG" --branch main"# } else { r#""$GITHUB_REF_NAME""# },
+            }
+        )
+        .id(COMPLIANCE_STEP_ID)
+        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+        .when_some(context.tag_source(), |step, tag_source| {
+            step.add_env(("LATEST_TAG", tag_source.to_string()))
+        })
+        .when(
+            matches!(
+                context,
+                ComplianceContext::Scheduled { .. } | ComplianceContext::ReleaseNonBlocking
+            ),
+            |step| step.continue_on_error(true),
+        );
+
+        let result = StepOutput::new_unchecked(&job, "outcome");
+        (job, result)
+    }
+
     let upload_step = upload_artifact(COMPLIANCE_REPORT_PATH)
         .if_condition(Expression::new("always()"))
-        .when(matches!(context, ComplianceContext::Release), |step| {
-            step.add_with(("overwrite", true))
-        });
+        .when(
+            matches!(context, ComplianceContext::Release { .. }),
+            |step| step.add_with(("overwrite", true)),
+        );
 
     let (success_prefix, failure_prefix) = match context {
-        ComplianceContext::Release | ComplianceContext::ReleaseNonBlocking => {
+        ComplianceContext::Release { .. } => {
             ("✅ Compliance check passed", "❌ Compliance check failed")
         }
+        ComplianceContext::ReleaseNonBlocking => (
+            "✅ Compliance check passed",
+            "❌ Preliminary compliance check failed (but this can still be fixed while the builds are running!)",
+        ),
         ComplianceContext::Scheduled { .. } => (
             "✅ Scheduled compliance check passed",
             "⚠️ Scheduled compliance check failed",
@@ -198,7 +244,17 @@ pub(crate) fn add_compliance_notification_steps(
 
     let notification_step = Step::new("send_compliance_slack_notification")
         .run(&script)
-        .if_condition(Expression::new("always()"))
+        .if_condition(match &context {
+            ComplianceContext::Release {
+                non_blocking_outcome,
+            } => Expression::new(format!(
+                "failure() || {prior_outcome} != 'success'",
+                prior_outcome = non_blocking_outcome.expr()
+            )),
+            ComplianceContext::Scheduled { .. } | ComplianceContext::ReleaseNonBlocking => {
+                Expression::new("always()")
+            }
+        })
         .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
         .add_env((
             "COMPLIANCE_OUTCOME",
@@ -206,8 +262,8 @@ pub(crate) fn add_compliance_notification_steps(
         ))
         .add_env((
             "COMPLIANCE_TAG",
-            match context {
-                ComplianceContext::Release | ComplianceContext::ReleaseNonBlocking => {
+            match &context {
+                ComplianceContext::Release { .. } | ComplianceContext::ReleaseNonBlocking => {
                     Context::github().ref_name().to_string()
                 }
                 ComplianceContext::Scheduled { tag_source } => tag_source.to_string(),
@@ -218,21 +274,21 @@ pub(crate) fn add_compliance_notification_steps(
             format!("{CURRENT_ACTION_RUN_URL}#artifacts"),
         ));
 
-    job.add_step(upload_step).add_step(notification_step)
-}
+    let (compliance_step, check_result) = run_compliance_check(&context);
 
-fn run_compliance_check() -> Step<Run> {
-    named::bash(formatdoc! {r#"
-        cargo xtask compliance "$GITHUB_REF_NAME" --report-path "{COMPLIANCE_REPORT_PATH}"
-        "#,
-    })
-    .id(COMPLIANCE_STEP_ID)
-    .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
-    .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
-    .continue_on_error(true)
+    (
+        job.add_step(compliance_step)
+            .add_step(upload_step)
+            .add_step(notification_step)
+            .when(
+                matches!(context, ComplianceContext::ReleaseNonBlocking),
+                |step| step.outputs([("outcome".to_string(), check_result.to_string())]),
+            ),
+        check_result,
+    )
 }
 
-fn compliance_check() -> NamedJob {
+fn compliance_check() -> (NamedJob, JobOutput) {
     let job = release_job(&[])
         .runs_on(runners::LINUX_SMALL)
         .add_step(
@@ -240,16 +296,17 @@ fn compliance_check() -> NamedJob {
                 .with_full_history()
                 .with_ref(Context::github().ref_()),
         )
-        .add_step(steps::cache_rust_dependencies_namespace())
-        .add_step(run_compliance_check());
+        .add_step(steps::cache_rust_dependencies_namespace());
+
+    let (compliance_job, check_result) =
+        add_compliance_steps(job, ComplianceContext::ReleaseNonBlocking);
+    let compliance_job = named::job(compliance_job);
+    let check_result = check_result.as_job_output(&compliance_job);
 
-    named::job(add_compliance_notification_steps(
-        job,
-        ComplianceContext::ReleaseNonBlocking,
-    ))
+    (compliance_job, check_result)
 }
 
-fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
+fn validate_release_assets(deps: &[&NamedJob], context_check_result: JobOutput) -> NamedJob {
     let expected_assets: Vec<String> = assets::all().iter().map(|a| format!("\"{a}\"")).collect();
     let expected_assets_json = format!("[{}]", expected_assets.join(", "));
 
@@ -279,13 +336,17 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
                 .with_full_history()
                 .with_ref(Context::github().ref_()),
         )
-        .add_step(steps::cache_rust_dependencies_namespace())
-        .add_step(run_compliance_check());
+        .add_step(steps::cache_rust_dependencies_namespace());
 
-    named::job(add_compliance_notification_steps(
-        job,
-        ComplianceContext::Release,
-    ))
+    named::job(
+        add_compliance_steps(
+            job,
+            ComplianceContext::Release {
+                non_blocking_outcome: context_check_result,
+            },
+        )
+        .0,
+    )
 }
 
 fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob {

tooling/xtask/src/tasks/workflows/vars.rs 🔗

@@ -167,7 +167,7 @@ impl StepOutput {
                 .run
                 .as_ref()
                 .is_none_or(|run_command| run_command.contains(name)),
-            "Step Output name {name} must occur at least once in run command with ID {step_id}!"
+            "Step output with name '{name}' must occur at least once in run command with ID {step_id}!"
         );
 
         Self { name, step_id }