From 6976208e21436e88a4a5a094b440d41c482c5c84 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 18 Dec 2025 15:23:09 -0700 Subject: [PATCH 01/46] Move autofix stuff to zippy (#45304) Although I wanted to avoid the dependency, it's hard to get github to do what we want. Release Notes: - N/A --- .github/workflows/extension_tests.yml | 3 +- .github/workflows/release.yml | 16 +--- .github/workflows/release_nightly.yml | 6 +- .github/workflows/run_tests.yml | 47 ++---------- .../xtask/src/tasks/workflows/run_tests.rs | 73 +------------------ tooling/xtask/src/tasks/workflows/steps.rs | 33 +-------- 6 files changed, 19 insertions(+), 159 deletions(-) diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index 7a7fff9b97d694c1b02dd426f5d59301fe2be81e..9f0917e388c74cffed8f342f7504bc111e6f5147 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -61,8 +61,7 @@ jobs: uses: namespacelabs/nscloud-cache-action@v1 with: cache: rust - - id: cargo_fmt - name: steps::cargo_fmt + - name: steps::cargo_fmt run: cargo fmt --all -- --check shell: bash -euxo pipefail {0} - name: extension_tests::run_clippy diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 317d5a8df37a62887ce4ddcdd67c8d77b48d56d6..ffc2554a55e00a5bdb7bd1ee0bfeebd5667755d5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -26,8 +26,7 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large @@ -72,15 +71,9 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - - id: record_clippy_failure - name: steps::record_clippy_failure - if: always() - run: echo "failed=${{ steps.clippy.outcome == 'failure' }}" >> "$GITHUB_OUTPUT" - shell: bash -euxo pipefail {0} - name: steps::cargo_install_nextest uses: taiki-e/install-action@nextest - name: steps::clear_target_dir_if_large @@ -94,8 +87,6 @@ jobs: run: | rm -rf ./../.cargo shell: bash -euxo pipefail {0} - outputs: - clippy_failed: ${{ steps.record_clippy_failure.outputs.failed == 'true' }} timeout-minutes: 60 run_tests_windows: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') @@ -114,8 +105,7 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy.ps1 shell: pwsh - name: steps::clear_target_dir_if_large diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index b23e4b7518a672c0d586ea5ba437db5cf8f94bb6..d76244175accc3e816cbd7d5dc322d2529a0a236 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -20,8 +20,7 @@ jobs: with: clean: false fetch-depth: 0 - - id: cargo_fmt - name: steps::cargo_fmt + - name: steps::cargo_fmt run: cargo fmt --all -- --check shell: bash -euxo pipefail {0} - name: ./script/clippy @@ -45,8 +44,7 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy.ps1 shell: pwsh - name: steps::clear_target_dir_if_large diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index fac3221d63a080fa53b7ba1c5b7249e6a405c73c..256bb2916a56485c06c2ebc4de8724151d622c4f 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -74,19 +74,12 @@ jobs: uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 with: version: '9' - - id: prettier - name: steps::prettier + - name: steps::prettier run: ./script/prettier shell: bash -euxo pipefail {0} - - id: cargo_fmt - name: steps::cargo_fmt + - name: steps::cargo_fmt run: cargo fmt --all -- --check shell: bash -euxo pipefail {0} - - id: record_style_failure - name: steps::record_style_failure - if: always() - run: echo "failed=${{ steps.prettier.outcome == 'failure' || steps.cargo_fmt.outcome == 'failure' }}" >> "$GITHUB_OUTPUT" - shell: bash -euxo pipefail {0} - name: ./script/check-todos run: ./script/check-todos shell: bash -euxo pipefail {0} @@ -97,8 +90,6 @@ jobs: uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 with: config: ./typos.toml - outputs: - style_failed: ${{ steps.record_style_failure.outputs.failed == 'true' }} timeout-minutes: 60 run_tests_windows: needs: @@ -119,8 +110,7 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy.ps1 shell: pwsh - name: steps::clear_target_dir_if_large @@ -167,15 +157,9 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - - id: record_clippy_failure - name: steps::record_clippy_failure - if: always() - run: echo "failed=${{ steps.clippy.outcome == 'failure' }}" >> "$GITHUB_OUTPUT" - shell: bash -euxo pipefail {0} - name: steps::cargo_install_nextest uses: taiki-e/install-action@nextest - name: steps::clear_target_dir_if_large @@ -189,8 +173,6 @@ jobs: run: | rm -rf ./../.cargo shell: bash -euxo pipefail {0} - outputs: - clippy_failed: ${{ steps.record_clippy_failure.outputs.failed == 'true' }} timeout-minutes: 60 run_tests_mac: needs: @@ -211,8 +193,7 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' - - id: clippy - name: steps::clippy + - name: steps::clippy run: ./script/clippy shell: bash -euxo pipefail {0} - name: steps::clear_target_dir_if_large @@ -592,24 +573,6 @@ jobs: exit $EXIT_CODE shell: bash -euxo pipefail {0} - call_autofix: - needs: - - check_style - - run_tests_linux - if: always() && (needs.check_style.outputs.style_failed == 'true' || needs.run_tests_linux.outputs.clippy_failed == 'true') && github.event_name == 'pull_request' && github.actor != 'zed-zippy[bot]' - runs-on: namespace-profile-2x4-ubuntu-2404 - steps: - - id: get-app-token - name: steps::authenticate_as_zippy - uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1 - with: - app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} - private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} - - name: run_tests::call_autofix::dispatch_autofix - run: gh workflow run autofix_pr.yml -f pr_number=${{ github.event.pull_request.number }} -f run_clippy=${{ needs.run_tests_linux.outputs.clippy_failed == 'true' }} - shell: bash -euxo pipefail {0} - env: - GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index d0caab82b057f21735b7f828c8917a358dd548b2..f726f48740eb7819fbbd3fed369e5e4e89c526c9 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -45,15 +45,11 @@ pub(crate) fn run_tests() -> Workflow { &should_run_tests, ]); - let check_style = check_style(); - let run_tests_linux = run_platform_tests(Platform::Linux); - let call_autofix = call_autofix(&check_style, &run_tests_linux); - let mut jobs = vec![ orchestrate, - check_style, + check_style(), should_run_tests.guard(run_platform_tests(Platform::Windows)), - should_run_tests.guard(run_tests_linux), + should_run_tests.guard(run_platform_tests(Platform::Linux)), should_run_tests.guard(run_platform_tests(Platform::Mac)), should_run_tests.guard(doctests()), should_run_tests.guard(check_workspace_binaries()), @@ -110,7 +106,6 @@ pub(crate) fn run_tests() -> Workflow { workflow }) .add_job(tests_pass.name, tests_pass.job) - .add_job(call_autofix.name, call_autofix.job) } // Generates a bash script that checks changed files against regex patterns @@ -226,8 +221,6 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { named::job(job) } -pub const STYLE_FAILED_OUTPUT: &str = "style_failed"; - fn check_style() -> NamedJob { fn check_for_typos() -> Step { named::uses( @@ -245,56 +238,12 @@ fn check_style() -> NamedJob { .add_step(steps::setup_pnpm()) .add_step(steps::prettier()) .add_step(steps::cargo_fmt()) - .add_step(steps::record_style_failure()) .add_step(steps::script("./script/check-todos")) .add_step(steps::script("./script/check-keymaps")) - .add_step(check_for_typos()) - .outputs([( - STYLE_FAILED_OUTPUT.to_owned(), - format!( - "${{{{ steps.{}.outputs.failed == 'true' }}}}", - steps::RECORD_STYLE_FAILURE_STEP_ID - ), - )]), + .add_step(check_for_typos()), ) } -fn call_autofix(check_style: &NamedJob, run_tests_linux: &NamedJob) -> NamedJob { - fn dispatch_autofix(run_tests_linux_name: &str) -> Step { - let clippy_failed_expr = format!( - "needs.{}.outputs.{} == 'true'", - run_tests_linux_name, CLIPPY_FAILED_OUTPUT - ); - named::bash(format!( - "gh workflow run autofix_pr.yml -f pr_number=${{{{ github.event.pull_request.number }}}} -f run_clippy=${{{{ {} }}}}", - clippy_failed_expr - )) - .add_env(("GITHUB_TOKEN", "${{ steps.get-app-token.outputs.token }}")) - } - - let style_failed_expr = format!( - "needs.{}.outputs.{} == 'true'", - check_style.name, STYLE_FAILED_OUTPUT - ); - let clippy_failed_expr = format!( - "needs.{}.outputs.{} == 'true'", - run_tests_linux.name, CLIPPY_FAILED_OUTPUT - ); - let (authenticate, _token) = steps::authenticate_as_zippy(); - - let job = Job::default() - .runs_on(runners::LINUX_SMALL) - .cond(Expression::new(format!( - "always() && ({} || {}) && github.event_name == 'pull_request' && github.actor != 'zed-zippy[bot]'", - style_failed_expr, clippy_failed_expr - ))) - .needs(vec![check_style.name.clone(), run_tests_linux.name.clone()]) - .add_step(authenticate) - .add_step(dispatch_autofix(&run_tests_linux.name)); - - named::job(job) -} - fn check_dependencies() -> NamedJob { fn install_cargo_machete() -> Step { named::uses( @@ -355,8 +304,6 @@ fn check_workspace_binaries() -> NamedJob { ) } -pub const CLIPPY_FAILED_OUTPUT: &str = "clippy_failed"; - pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { let runner = match platform { Platform::Windows => runners::WINDOWS_DEFAULT, @@ -378,24 +325,12 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { ) .add_step(steps::setup_node()) .add_step(steps::clippy(platform)) - .when(platform == Platform::Linux, |job| { - job.add_step(steps::record_clippy_failure()) - }) .when(platform == Platform::Linux, |job| { job.add_step(steps::cargo_install_nextest()) }) .add_step(steps::clear_target_dir_if_large(platform)) .add_step(steps::cargo_nextest(platform)) - .add_step(steps::cleanup_cargo_config(platform)) - .when(platform == Platform::Linux, |job| { - job.outputs([( - CLIPPY_FAILED_OUTPUT.to_owned(), - format!( - "${{{{ steps.{}.outputs.failed == 'true' }}}}", - steps::RECORD_CLIPPY_FAILURE_STEP_ID - ), - )]) - }), + .add_step(steps::cleanup_cargo_config(platform)), } } diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index eaa51dc35205f51e7fe3a56668ed0679e92999f0..a0b071cd6c31654b42adddbba47dd24c60da7df2 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -54,25 +54,12 @@ pub fn setup_sentry() -> Step { .add_with(("token", vars::SENTRY_AUTH_TOKEN)) } -pub const PRETTIER_STEP_ID: &str = "prettier"; -pub const CARGO_FMT_STEP_ID: &str = "cargo_fmt"; -pub const RECORD_STYLE_FAILURE_STEP_ID: &str = "record_style_failure"; - pub fn prettier() -> Step { - named::bash("./script/prettier").id(PRETTIER_STEP_ID) + named::bash("./script/prettier") } pub fn cargo_fmt() -> Step { - named::bash("cargo fmt --all -- --check").id(CARGO_FMT_STEP_ID) -} - -pub fn record_style_failure() -> Step { - named::bash(format!( - "echo \"failed=${{{{ steps.{}.outcome == 'failure' || steps.{}.outcome == 'failure' }}}}\" >> \"$GITHUB_OUTPUT\"", - PRETTIER_STEP_ID, CARGO_FMT_STEP_ID - )) - .id(RECORD_STYLE_FAILURE_STEP_ID) - .if_condition(Expression::new("always()")) + named::bash("cargo fmt --all -- --check") } pub fn cargo_install_nextest() -> Step { @@ -118,25 +105,13 @@ pub fn clear_target_dir_if_large(platform: Platform) -> Step { } } -pub const CLIPPY_STEP_ID: &str = "clippy"; -pub const RECORD_CLIPPY_FAILURE_STEP_ID: &str = "record_clippy_failure"; - pub fn clippy(platform: Platform) -> Step { match platform { - Platform::Windows => named::pwsh("./script/clippy.ps1").id(CLIPPY_STEP_ID), - _ => named::bash("./script/clippy").id(CLIPPY_STEP_ID), + Platform::Windows => named::pwsh("./script/clippy.ps1"), + _ => named::bash("./script/clippy"), } } -pub fn record_clippy_failure() -> Step { - named::bash(format!( - "echo \"failed=${{{{ steps.{}.outcome == 'failure' }}}}\" >> \"$GITHUB_OUTPUT\"", - CLIPPY_STEP_ID - )) - .id(RECORD_CLIPPY_FAILURE_STEP_ID) - .if_condition(Expression::new("always()")) -} - pub fn cache_rust_dependencies_namespace() -> Step { named::uses("namespacelabs", "nscloud-cache-action", "v1").add_with(("cache", "rust")) } From e0ff995e2d5673af67af275187271776b57436d7 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 18 Dec 2025 22:18:41 -0300 Subject: [PATCH 02/46] agent ui: Make some UI elements more consistent (#45319) - Both the mode, profile, and model selectors have the option to cycle through its options with a keybinding. In the tooltip that shows it, in some of them the "Cycle Through..." label was at the top, and in others at the bottom. Now it's all at the bottom. - We used different language in different places for "going to a file". The tool call edit card's header said "_Jump_ to File" while the edit files list said "_Go_ to File". Now it's both "Go to File". Release Notes: - N/A --- crates/agent_ui/src/acp/mode_selector.rs | 14 +++++++------- crates/agent_ui/src/acp/thread_view.rs | 2 +- crates/agent_ui/src/profile_selector.rs | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/agent_ui/src/acp/mode_selector.rs b/crates/agent_ui/src/acp/mode_selector.rs index 1f50ce74321d393ba6c7f5083bd889bc3dc2c0e1..22af75a6e96edc4f597819e04e2e84b80ba0417a 100644 --- a/crates/agent_ui/src/acp/mode_selector.rs +++ b/crates/agent_ui/src/acp/mode_selector.rs @@ -188,25 +188,25 @@ impl Render for ModeSelector { .gap_1() .child( h_flex() - .pb_1() .gap_2() .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border_variant) - .child(Label::new("Cycle Through Modes")) + .child(Label::new("Toggle Mode Menu")) .child(KeyBinding::for_action_in( - &CycleModeSelector, + &ToggleProfileSelector, &focus_handle, cx, )), ) .child( h_flex() + .pb_1() .gap_2() .justify_between() - .child(Label::new("Toggle Mode Menu")) + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(Label::new("Cycle Through Modes")) .child(KeyBinding::for_action_in( - &ToggleProfileSelector, + &CycleModeSelector, &focus_handle, cx, )), diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 8364fd8c0f4d8fd55df8f2e74e990e603029db78..32b2de2c0d850676bf7a6a80ee88950d62aa24e0 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -2718,7 +2718,7 @@ impl AcpThreadView { ..default_markdown_style(false, true, window, cx) }, )) - .tooltip(Tooltip::text("Jump to File")) + .tooltip(Tooltip::text("Go to File")) .on_click(cx.listener(move |this, _, window, cx| { this.open_tool_call_location(entry_ix, 0, window, cx); })) diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index ac08070fcefa92854b51bc8a66d4d388d08e087d..327d2c67e2d5e87e67935ecdfa7fb6cd41acbcb5 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -191,6 +191,9 @@ impl Render for ProfileSelector { let container = || h_flex().gap_1().justify_between(); v_flex() .gap_1() + .child(container().child(Label::new("Toggle Profile Menu")).child( + KeyBinding::for_action_in(&ToggleProfileSelector, &focus_handle, cx), + )) .child( container() .pb_1() @@ -203,9 +206,6 @@ impl Render for ProfileSelector { cx, )), ) - .child(container().child(Label::new("Toggle Profile Menu")).child( - KeyBinding::for_action_in(&ToggleProfileSelector, &focus_handle, cx), - )) .into_any() } }), From 435d4c5f2415f569d192ee27bf8d6ed5157360f6 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 18 Dec 2025 20:56:47 -0600 Subject: [PATCH 03/46] vim: Make `vaf` include const for arrow functions in JS/TS/TSX (#45327) Closes #24264 Release Notes: - N/A *or* Added/Fixed/Improved ... --- .../src/test/editor_lsp_test_context.rs | 86 ++++ crates/language/src/buffer_tests.rs | 98 +++++ crates/language/src/syntax_map.rs | 61 ++- .../languages/src/javascript/textobjects.scm | 38 +- crates/languages/src/tsx/textobjects.scm | 38 +- .../languages/src/typescript/textobjects.scm | 39 +- crates/vim/src/object.rs | 386 ++++++++++++++++++ crates/vim/src/visual.rs | 16 +- 8 files changed, 742 insertions(+), 20 deletions(-) diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index 7c4c0e48d36dbb9f74a1c835c63fa2b91c5681d9..3e7c47c2ac5efeedde51f180bcfcb424aec31c86 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -205,6 +205,49 @@ impl EditorLspTestContext { (_ "{" "}" @end) @indent (_ "(" ")" @end) @indent "#})), + text_objects: Some(Cow::from(indoc! {r#" + (function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + + (method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + + ; Arrow function in variable declaration - capture the full declaration + ([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + ]) @function.around + + ([ + (lexical_declaration + (variable_declarator + value: (arrow_function))) + (variable_declaration + (variable_declarator + value: (arrow_function))) + ]) @function.around + + ; Catch-all for arrow functions in other contexts (callbacks, etc.) + ((arrow_function) @function.around (#not-has-parent? @function.around variable_declarator)) + "#})), ..Default::default() }) .expect("Could not parse queries"); @@ -276,6 +319,49 @@ impl EditorLspTestContext { (jsx_opening_element) @start (jsx_closing_element)? @end) @indent "#})), + text_objects: Some(Cow::from(indoc! {r#" + (function_declaration + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + + (method_definition + body: (_ + "{" + (_)* @function.inside + "}")) @function.around + + ; Arrow function in variable declaration - capture the full declaration + ([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + ]) @function.around + + ([ + (lexical_declaration + (variable_declarator + value: (arrow_function))) + (variable_declaration + (variable_declarator + value: (arrow_function))) + ]) @function.around + + ; Catch-all for arrow functions in other contexts (callbacks, etc.) + ((arrow_function) @function.around (#not-has-parent? @function.around variable_declarator)) + "#})), ..Default::default() }) .expect("Could not parse queries"); diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 54e2ef4065460547f4a3f86db7d3a3986dff65eb..2c2d93c8239f0f3fcb1de0956de2d3400f13e96b 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -1141,6 +1141,104 @@ fn test_text_objects(cx: &mut App) { ) } +#[gpui::test] +fn test_text_objects_with_has_parent_predicate(cx: &mut App) { + use std::borrow::Cow; + + // Create a language with a custom text_objects query that uses #has-parent? + // This query only matches closure_expression when it's inside a call_expression + let language = Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_queries(LanguageQueries { + text_objects: Some(Cow::from(indoc! {r#" + ; Only match closures that are arguments to function calls + (closure_expression) @function.around + (#has-parent? @function.around arguments) + "#})), + ..Default::default() + }) + .expect("Could not parse queries"); + + let (text, ranges) = marked_text_ranges( + indoc! {r#" + fn main() { + let standalone = |x| x + 1; + let result = foo(|y| y * ˇ2); + }"# + }, + false, + ); + + let buffer = cx.new(|cx| Buffer::local(text.clone(), cx).with_language(Arc::new(language), cx)); + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); + + let matches = snapshot + .text_object_ranges(ranges[0].clone(), TreeSitterOptions::default()) + .map(|(range, text_object)| (&text[range], text_object)) + .collect::>(); + + // Should only match the closure inside foo(), not the standalone closure + assert_eq!(matches, &[("|y| y * 2", TextObject::AroundFunction),]); +} + +#[gpui::test] +fn test_text_objects_with_not_has_parent_predicate(cx: &mut App) { + use std::borrow::Cow; + + // Create a language with a custom text_objects query that uses #not-has-parent? + // This query only matches closure_expression when it's NOT inside a call_expression + let language = Language::new( + LanguageConfig { + name: "Rust".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_queries(LanguageQueries { + text_objects: Some(Cow::from(indoc! {r#" + ; Only match closures that are NOT arguments to function calls + (closure_expression) @function.around + (#not-has-parent? @function.around arguments) + "#})), + ..Default::default() + }) + .expect("Could not parse queries"); + + let (text, ranges) = marked_text_ranges( + indoc! {r#" + fn main() { + let standalone = |x| x +ˇ 1; + let result = foo(|y| y * 2); + }"# + }, + false, + ); + + let buffer = cx.new(|cx| Buffer::local(text.clone(), cx).with_language(Arc::new(language), cx)); + let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); + + let matches = snapshot + .text_object_ranges(ranges[0].clone(), TreeSitterOptions::default()) + .map(|(range, text_object)| (&text[range], text_object)) + .collect::>(); + + // Should only match the standalone closure, not the one inside foo() + assert_eq!(matches, &[("|x| x + 1", TextObject::AroundFunction),]); +} + #[gpui::test] fn test_enclosing_bracket_ranges(cx: &mut App) { #[track_caller] diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 77e90c4ca89d0b6e5b8cb0a604175ec9a97e719e..db4ab4f459c35a98752bef1eb5be558084b5c906 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -19,7 +19,10 @@ use std::{ use streaming_iterator::StreamingIterator; use sum_tree::{Bias, Dimensions, SeekTarget, SumTree}; use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; -use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree}; +use tree_sitter::{ + Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatch, QueryMatches, + QueryPredicateArg, Tree, +}; pub const MAX_BYTES_TO_QUERY: usize = 16 * 1024; @@ -82,6 +85,7 @@ struct SyntaxMapMatchesLayer<'a> { next_captures: Vec>, has_next: bool, matches: QueryMatches<'a, 'a, TextProvider<'a>, &'a [u8]>, + query: &'a Query, grammar_index: usize, _query_cursor: QueryCursorHandle, } @@ -1163,6 +1167,7 @@ impl<'a> SyntaxMapMatches<'a> { depth: layer.depth, grammar_index, matches, + query, next_pattern_index: 0, next_captures: Vec::new(), has_next: false, @@ -1260,13 +1265,20 @@ impl SyntaxMapCapturesLayer<'_> { impl SyntaxMapMatchesLayer<'_> { fn advance(&mut self) { - if let Some(mat) = self.matches.next() { - self.next_captures.clear(); - self.next_captures.extend_from_slice(mat.captures); - self.next_pattern_index = mat.pattern_index; - self.has_next = true; - } else { - self.has_next = false; + loop { + if let Some(mat) = self.matches.next() { + if !satisfies_custom_predicates(self.query, mat) { + continue; + } + self.next_captures.clear(); + self.next_captures.extend_from_slice(mat.captures); + self.next_pattern_index = mat.pattern_index; + self.has_next = true; + return; + } else { + self.has_next = false; + return; + } } } @@ -1295,6 +1307,39 @@ impl<'a> Iterator for SyntaxMapCaptures<'a> { } } +fn satisfies_custom_predicates(query: &Query, mat: &QueryMatch) -> bool { + for predicate in query.general_predicates(mat.pattern_index) { + let satisfied = match predicate.operator.as_ref() { + "has-parent?" => has_parent(&predicate.args, mat), + "not-has-parent?" => !has_parent(&predicate.args, mat), + _ => true, + }; + if !satisfied { + return false; + } + } + true +} + +fn has_parent(args: &[QueryPredicateArg], mat: &QueryMatch) -> bool { + let ( + Some(QueryPredicateArg::Capture(capture_ix)), + Some(QueryPredicateArg::String(parent_kind)), + ) = (args.first(), args.get(1)) + else { + return false; + }; + + let Some(capture) = mat.captures.iter().find(|c| c.index == *capture_ix) else { + return false; + }; + + capture + .node + .parent() + .is_some_and(|p| p.kind() == parent_kind.as_ref()) +} + fn join_ranges( a: impl Iterator>, b: impl Iterator>, diff --git a/crates/languages/src/javascript/textobjects.scm b/crates/languages/src/javascript/textobjects.scm index 1a273ddb5000ba920868272bb4ac31d270095442..eace658e6b9847bcc651deedad2bc27cbfbf6975 100644 --- a/crates/languages/src/javascript/textobjects.scm +++ b/crates/languages/src/javascript/textobjects.scm @@ -18,13 +18,47 @@ (_)* @function.inside "}")) @function.around -(arrow_function +((arrow_function body: (statement_block "{" (_)* @function.inside "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) -(arrow_function) @function.around +; Arrow function in variable declaration - capture the full declaration +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) +]) @function.around + +; Arrow function in variable declaration (captures body for expression-bodied arrows) +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) +]) @function.around + +; Catch-all for arrow functions in other contexts (callbacks, etc.) +((arrow_function + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) (generator_function body: (_ diff --git a/crates/languages/src/tsx/textobjects.scm b/crates/languages/src/tsx/textobjects.scm index 836fed35ba1c1093b84e48a8da19d89177a69944..628a921f3ac9ea04ff59654d72caf73cebbc9071 100644 --- a/crates/languages/src/tsx/textobjects.scm +++ b/crates/languages/src/tsx/textobjects.scm @@ -18,13 +18,47 @@ (_)* @function.inside "}")) @function.around -(arrow_function +((arrow_function body: (statement_block "{" (_)* @function.inside "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) -(arrow_function) @function.around +; Arrow function in variable declaration - capture the full declaration +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) +]) @function.around + +; Arrow function in variable declaration (expression body fallback) +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) +]) @function.around + +; Catch-all for arrow functions in other contexts (callbacks, etc.) +((arrow_function + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) (function_signature) @function.around (generator_function diff --git a/crates/languages/src/typescript/textobjects.scm b/crates/languages/src/typescript/textobjects.scm index 836fed35ba1c1093b84e48a8da19d89177a69944..96289f058cd7b605a8f5b4c8966e3c372022d065 100644 --- a/crates/languages/src/typescript/textobjects.scm +++ b/crates/languages/src/typescript/textobjects.scm @@ -18,13 +18,48 @@ (_)* @function.inside "}")) @function.around -(arrow_function +((arrow_function body: (statement_block "{" (_)* @function.inside "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) -(arrow_function) @function.around +; Arrow function in variable declaration - capture the full declaration +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) +]) @function.around + +; Arrow function in variable declaration - capture body as @function.inside +; (for statement blocks, the more specific pattern above captures just the contents) +([ + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) +]) @function.around + +; Catch-all for arrow functions in other contexts (callbacks, etc.) +((arrow_function + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) (function_signature) @function.around (generator_function diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 02150332405c6d5ea4d5dd78f477348be968fddf..e9a2f4fc63d31f78a9a7abce8aac785b56eb1fd4 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -3407,4 +3407,390 @@ mod test { .assert_eq(" ˇf = (x: unknown) => {"); cx.shared_clipboard().await.assert_eq("const "); } + + #[gpui::test] + async fn test_arrow_function_text_object(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new_typescript(cx).await; + + cx.set_state( + indoc! {" + const foo = () => { + return ˇ1; + }; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «const foo = () => { + return 1; + };ˇ» + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + arr.map(() => { + return ˇ1; + }); + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + arr.map(«() => { + return 1; + }ˇ»); + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + const foo = () => { + return ˇ1; + }; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v i f"); + cx.assert_state( + indoc! {" + const foo = () => { + «return 1;ˇ» + }; + "}, + Mode::Visual, + ); + + cx.set_state( + indoc! {" + (() => { + console.log(ˇ1); + })(); + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + («() => { + console.log(1); + }ˇ»)(); + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + const foo = () => { + return ˇ1; + }; + export { foo }; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «const foo = () => { + return 1; + };ˇ» + export { foo }; + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + let bar = () => { + return ˇ2; + }; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «let bar = () => { + return 2; + };ˇ» + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + var baz = () => { + return ˇ3; + }; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «var baz = () => { + return 3; + };ˇ» + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + const add = (a, b) => a + ˇb; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «const add = (a, b) => a + b;ˇ» + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + const add = ˇ(a, b) => a + b; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «const add = (a, b) => a + b;ˇ» + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + const add = (a, b) => a + bˇ; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «const add = (a, b) => a + b;ˇ» + "}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {" + const add = (a, b) =ˇ> a + b; + "}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {" + «const add = (a, b) => a + b;ˇ» + "}, + Mode::VisualLine, + ); + } + + #[gpui::test] + async fn test_arrow_function_in_jsx(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new_tsx(cx).await; + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
{ + alert("Hello world!"); + console.log(ˇ"clicked"); + }}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
{ + alert("Hello world!"); + console.log("clicked"); + }ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clickˇed")}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked")ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked")}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked")ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked"ˇ)}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked")ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked")}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
console.log("clicked")ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
{ + console.log("cliˇcked"); + }}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
{ + console.log("clicked"); + }ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + + cx.set_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
fˇoo()}>Hello world!
+
+ ); + }; + "#}, + Mode::Normal, + ); + cx.simulate_keystrokes("v a f"); + cx.assert_state( + indoc! {r#" + export const MyComponent = () => { + return ( +
+
foo()ˇ»}>Hello world!
+
+ ); + }; + "#}, + Mode::VisualLine, + ); + } } diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 3c6f237435e3924a907e059ed1a878641c287e7e..5667190bb7239ee3e534a5556d96452a7c68b1ef 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -522,12 +522,16 @@ impl Vim { selection.start = original_point.to_display_point(map) } } else { - selection.end = movement::saturating_right( - map, - original_point.to_display_point(map), - ); - if original_point.column > 0 { - selection.reversed = true + let original_display_point = + original_point.to_display_point(map); + if selection.end <= original_display_point { + selection.end = movement::saturating_right( + map, + original_display_point, + ); + if original_point.column > 0 { + selection.reversed = true + } } } } From 3f67c5220d3834817e84c454c29a8f92f2688c1d Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 18 Dec 2025 20:59:05 -0600 Subject: [PATCH 04/46] Remove `zed` dependency from `docs_preprocessor` (#45130) Closes #ISSUE Uses the existing `--dump-all-actions` arg on the Zed binary to generate an asset of all of our actions so that the `docs_preprocessor` can injest it, rather than depending on the Zed crate itself to collect all action names Release Notes: - N/A *or* Added/Fixed/Improved ... --------- Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- .github/workflows/run_tests.yml | 3 + .gitignore | 1 + Cargo.lock | 3 - crates/docs_preprocessor/Cargo.toml | 5 +- crates/docs_preprocessor/src/main.rs | 88 +++++++++++-------- crates/title_bar/src/application_menu.rs | 20 ++--- crates/zed/Cargo.toml | 4 - crates/zed/src/main.rs | 13 +-- crates/zed/src/zed-main.rs | 8 -- crates/zed/src/zed.rs | 1 - script/generate-action-metadata | 10 +++ .../xtask/src/tasks/workflows/run_tests.rs | 1 + 12 files changed, 83 insertions(+), 74 deletions(-) delete mode 100644 crates/zed/src/zed-main.rs create mode 100755 script/generate-action-metadata diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 256bb2916a56485c06c2ebc4de8724151d622c4f..47a84574e7c33fb8a40a90c67cd4f7dadb356978 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -353,6 +353,9 @@ jobs: - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk shell: bash -euxo pipefail {0} + - name: ./script/generate-action-metadata + run: ./script/generate-action-metadata + shell: bash -euxo pipefail {0} - name: run_tests::check_docs::install_mdbook uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 with: diff --git a/.gitignore b/.gitignore index 54faaf1374299ee8f97925a95a93b375c349d707..c71417c32bff76af9d4c9c67661556e1625c9d15 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,7 @@ DerivedData/ Packages xcuserdata/ +crates/docs_preprocessor/actions.json # Don't commit any secrets to the repo. .env diff --git a/Cargo.lock b/Cargo.lock index 1bb39f2bdf8c5745b3e5c0e5ad1200be34ec6ab0..3f7077e721e934cd6cb05af0cdaefef75602b429 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5021,8 +5021,6 @@ name = "docs_preprocessor" version = "0.1.0" dependencies = [ "anyhow", - "command_palette", - "gpui", "mdbook", "regex", "serde", @@ -5031,7 +5029,6 @@ dependencies = [ "task", "theme", "util", - "zed", "zlog", ] diff --git a/crates/docs_preprocessor/Cargo.toml b/crates/docs_preprocessor/Cargo.toml index e71f9ae3f3f6fcff790db27fb1e377f0d1c20e40..07da23899956822f7577118ae85b6338b4cefae7 100644 --- a/crates/docs_preprocessor/Cargo.toml +++ b/crates/docs_preprocessor/Cargo.toml @@ -7,8 +7,6 @@ license = "GPL-3.0-or-later" [dependencies] anyhow.workspace = true -command_palette.workspace = true -gpui.workspace = true # We are specifically pinning this version of mdbook, as later versions introduce issues with double-nested subdirectories. # Ask @maxdeviant about this before bumping. mdbook = "= 0.4.40" @@ -17,7 +15,6 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true util.workspace = true -zed.workspace = true zlog.workspace = true task.workspace = true theme.workspace = true @@ -27,4 +24,4 @@ workspace = true [[bin]] name = "docs_preprocessor" -path = "src/main.rs" +path = "src/main.rs" \ No newline at end of file diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index b614a8251139413f4b316937db1d4e3c0d551df6..d90dcc10db9fbd8d27a968094ea8d733a79b7e80 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -22,16 +22,13 @@ static KEYMAP_WINDOWS: LazyLock = LazyLock::new(|| { load_keymap("keymaps/default-windows.json").expect("Failed to load Windows keymap") }); -static ALL_ACTIONS: LazyLock> = LazyLock::new(dump_all_gpui_actions); +static ALL_ACTIONS: LazyLock> = LazyLock::new(load_all_actions); const FRONT_MATTER_COMMENT: &str = ""; fn main() -> Result<()> { zlog::init(); zlog::init_output_stderr(); - // call a zed:: function so everything in `zed` crate is linked and - // all actions in the actual app are registered - zed::stdout_is_a_pty(); let args = std::env::args().skip(1).collect::>(); match args.get(0).map(String::as_str) { @@ -72,8 +69,8 @@ enum PreprocessorError { impl PreprocessorError { fn new_for_not_found_action(action_name: String) -> Self { for action in &*ALL_ACTIONS { - for alias in action.deprecated_aliases { - if alias == &action_name { + for alias in &action.deprecated_aliases { + if alias == action_name.as_str() { return PreprocessorError::DeprecatedActionUsed { used: action_name, should_be: action.name.to_string(), @@ -214,7 +211,7 @@ fn template_and_validate_keybindings(book: &mut Book, errors: &mut HashSet{}", name); }; format!("{}", &action.human_name) }) @@ -257,11 +256,19 @@ fn template_and_validate_actions(book: &mut Book, errors: &mut HashSet Option<&ActionDef> { ALL_ACTIONS - .binary_search_by(|action| action.name.cmp(name)) + .binary_search_by(|action| action.name.as_str().cmp(name)) .ok() .map(|index| &ALL_ACTIONS[index]) } +fn actions_available() -> bool { + !ALL_ACTIONS.is_empty() +} + +fn is_missing_action(name: &str) -> bool { + actions_available() && find_action_by_name(name).is_none() +} + fn find_binding(os: &str, action: &str) -> Option { let keymap = match os { "macos" => &KEYMAP_MACOS, @@ -384,18 +391,13 @@ fn template_and_validate_json_snippets(book: &mut Book, errors: &mut HashSet
, _>>()
-                            .context("Failed to parse keystroke")?;
+                    for (_keystrokes, action) in section.bindings() {
                         if let Some((action_name, _)) = settings::KeymapFile::parse_action(action)
                             .map_err(|err| anyhow::format_err!(err))
                             .context("Failed to parse action")?
                         {
                             anyhow::ensure!(
-                                find_action_by_name(action_name).is_some(),
+                                !is_missing_action(action_name),
                                 "Action not found: {}",
                                 action_name
                             );
@@ -491,27 +493,35 @@ where
     });
 }
 
-#[derive(Debug, serde::Serialize)]
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
 struct ActionDef {
-    name: &'static str,
+    name: String,
     human_name: String,
-    deprecated_aliases: &'static [&'static str],
-    docs: Option<&'static str>,
+    deprecated_aliases: Vec,
+    #[serde(rename = "documentation")]
+    docs: Option,
 }
 
-fn dump_all_gpui_actions() -> Vec {
-    let mut actions = gpui::generate_list_of_all_registered_actions()
-        .map(|action| ActionDef {
-            name: action.name,
-            human_name: command_palette::humanize_action_name(action.name),
-            deprecated_aliases: action.deprecated_aliases,
-            docs: action.documentation,
-        })
-        .collect::>();
-
-    actions.sort_by_key(|a| a.name);
-
-    actions
+fn load_all_actions() -> Vec {
+    let asset_path = concat!(env!("CARGO_MANIFEST_DIR"), "/actions.json");
+    match std::fs::read_to_string(asset_path) {
+        Ok(content) => {
+            let mut actions: Vec =
+                serde_json::from_str(&content).expect("Failed to parse actions.json");
+            actions.sort_by(|a, b| a.name.cmp(&b.name));
+            actions
+        }
+        Err(err) => {
+            if std::env::var("CI").is_ok() {
+                panic!("actions.json not found at {}: {}", asset_path, err);
+            }
+            eprintln!(
+                "Warning: actions.json not found, action validation will be skipped: {}",
+                err
+            );
+            Vec::new()
+        }
+    }
 }
 
 fn handle_postprocessing() -> Result<()> {
@@ -647,7 +657,7 @@ fn generate_big_table_of_actions() -> String {
     let mut output = String::new();
 
     let mut actions_sorted = actions.iter().collect::>();
-    actions_sorted.sort_by_key(|a| a.name);
+    actions_sorted.sort_by_key(|a| a.name.as_str());
 
     // Start the definition list with custom styling for better spacing
     output.push_str("
\n"); @@ -664,7 +674,7 @@ fn generate_big_table_of_actions() -> String { output.push_str("
\n"); // Add the description, escaping HTML if needed - if let Some(description) = action.docs { + if let Some(description) = action.docs.as_ref() { output.push_str( &description .replace("&", "&") @@ -674,7 +684,7 @@ fn generate_big_table_of_actions() -> String { output.push_str("
\n"); } output.push_str("Keymap Name: "); - output.push_str(action.name); + output.push_str(&action.name); output.push_str("
\n"); if !action.deprecated_aliases.is_empty() { output.push_str("Deprecated Alias(es): "); diff --git a/crates/title_bar/src/application_menu.rs b/crates/title_bar/src/application_menu.rs index 817b73c45ecd2df4a76e9a67f425b2b459c0c026..579e4dadbd590981a4aee15019bbe73e2bb28d5c 100644 --- a/crates/title_bar/src/application_menu.rs +++ b/crates/title_bar/src/application_menu.rs @@ -1,12 +1,7 @@ -use gpui::{Entity, OwnedMenu, OwnedMenuItem}; +use gpui::{Action, Entity, OwnedMenu, OwnedMenuItem, actions}; use settings::Settings; -#[cfg(not(target_os = "macos"))] -use gpui::{Action, actions}; - -#[cfg(not(target_os = "macos"))] use schemars::JsonSchema; -#[cfg(not(target_os = "macos"))] use serde::Deserialize; use smallvec::SmallVec; @@ -14,18 +9,23 @@ use ui::{ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*}; use crate::title_bar_settings::TitleBarSettings; -#[cfg(not(target_os = "macos"))] actions!( app_menu, [ - /// Navigates to the menu item on the right. + /// Activates the menu on the right in the client-side application menu. + /// + /// Does not apply to platform menu bars (e.g. on macOS). ActivateMenuRight, - /// Navigates to the menu item on the left. + /// Activates the menu on the left in the client-side application menu. + /// + /// Does not apply to platform menu bars (e.g. on macOS). ActivateMenuLeft ] ); -#[cfg(not(target_os = "macos"))] +/// Opens the named menu in the client-side application menu. +/// +/// Does not apply to platform menu bars (e.g. on macOS). #[derive(Clone, Deserialize, JsonSchema, PartialEq, Default, Action)] #[action(namespace = app_menu)] pub struct OpenApplicationMenu(String); diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index fd160759f4440e2736d57cea62abb6bdb138ae72..80eca20e00309bb8d22552287a1c39cb9891307d 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -15,10 +15,6 @@ tracy = ["ztracing/tracy"] [[bin]] name = "zed" -path = "src/zed-main.rs" - -[lib] -name = "zed" path = "src/main.rs" [dependencies] diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 7008e491c5e2ade35fa96cafbd9d8969c008fa96..312d16f0cd674a6dda81176863a859f3b763c2c0 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -1,3 +1,6 @@ +// Disable command line from opening on release mode +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] + mod reliability; mod zed; @@ -163,9 +166,9 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut App) { .detach(); } } -pub static STARTUP_TIME: OnceLock = OnceLock::new(); +static STARTUP_TIME: OnceLock = OnceLock::new(); -pub fn main() { +fn main() { STARTUP_TIME.get_or_init(|| Instant::now()); #[cfg(unix)] @@ -1301,7 +1304,7 @@ fn init_paths() -> HashMap> { }) } -pub fn stdout_is_a_pty() -> bool { +fn stdout_is_a_pty() -> bool { std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && io::stdout().is_terminal() } @@ -1547,14 +1550,14 @@ fn dump_all_gpui_actions() { struct ActionDef { name: &'static str, human_name: String, - aliases: &'static [&'static str], + deprecated_aliases: &'static [&'static str], documentation: Option<&'static str>, } let mut actions = gpui::generate_list_of_all_registered_actions() .map(|action| ActionDef { name: action.name, human_name: command_palette::humanize_action_name(action.name), - aliases: action.deprecated_aliases, + deprecated_aliases: action.deprecated_aliases, documentation: action.documentation, }) .collect::>(); diff --git a/crates/zed/src/zed-main.rs b/crates/zed/src/zed-main.rs deleted file mode 100644 index 6c49c197dda01e97828c3662aa09ecf57804dfbc..0000000000000000000000000000000000000000 --- a/crates/zed/src/zed-main.rs +++ /dev/null @@ -1,8 +0,0 @@ -// Disable command line from opening on release mode -#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] - -pub fn main() { - // separated out so that the file containing the main function can be imported by other crates, - // while having all gpui resources that are registered in main (primarily actions) initialized - zed::main(); -} diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index d088df00839814e32a9c246a3486ac5ad5ca4b9e..3441cb88d96b06dfdbb65a58553d2c58f435d157 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4780,7 +4780,6 @@ mod tests { "activity_indicator", "agent", "agents", - #[cfg(not(target_os = "macos"))] "app_menu", "assistant", "assistant2", diff --git a/script/generate-action-metadata b/script/generate-action-metadata new file mode 100755 index 0000000000000000000000000000000000000000..146b1f0d78ef92c47322a70dccf0e9e1f3f530d3 --- /dev/null +++ b/script/generate-action-metadata @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -euo pipefail + +cd "$(dirname "$0")/.." + +echo "Generating action metadata..." +cargo run -p zed -- --dump-all-actions > crates/docs_preprocessor/actions.json + +echo "Generated crates/docs_preprocessor/actions.json with $(grep -c '"name":' crates/docs_preprocessor/actions.json) actions" diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index f726f48740eb7819fbbd3fed369e5e4e89c526c9..aceb575b647e7ea0b2d8a74da9fbc153767d149d 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -448,6 +448,7 @@ fn check_docs() -> NamedJob { lychee_link_check("./docs/src/**/*"), // check markdown links ) .map(steps::install_linux_dependencies) + .add_step(steps::script("./script/generate-action-metadata")) .add_step(install_mdbook()) .add_step(build_docs()) .add_step( From 63c4406137916175ab23a2e65978f3bf508d122c Mon Sep 17 00:00:00 2001 From: Alvaro Parker <64918109+AlvaroParker@users.noreply.github.com> Date: Fri, 19 Dec 2025 00:21:46 -0300 Subject: [PATCH 05/46] git: Add git clone open listener (#41669) --- crates/git_ui/src/clone.rs | 155 ++++++++++++++++++++++++++++ crates/git_ui/src/git_panel.rs | 92 ++--------------- crates/git_ui/src/git_ui.rs | 1 + crates/zed/src/main.rs | 39 +++++++ crates/zed/src/zed/open_listener.rs | 102 ++++++++++++++++++ 5 files changed, 304 insertions(+), 85 deletions(-) create mode 100644 crates/git_ui/src/clone.rs diff --git a/crates/git_ui/src/clone.rs b/crates/git_ui/src/clone.rs new file mode 100644 index 0000000000000000000000000000000000000000..a6767d33304d3f20b7a5e78340f62c89ebe3ae58 --- /dev/null +++ b/crates/git_ui/src/clone.rs @@ -0,0 +1,155 @@ +use gpui::{App, Context, WeakEntity, Window}; +use notifications::status_toast::{StatusToast, ToastIcon}; +use std::sync::Arc; +use ui::{Color, IconName, SharedString}; +use util::ResultExt; +use workspace::{self, Workspace}; + +pub fn clone_and_open( + repo_url: SharedString, + workspace: WeakEntity, + window: &mut Window, + cx: &mut App, + on_success: Arc< + dyn Fn(&mut Workspace, &mut Window, &mut Context) + Send + Sync + 'static, + >, +) { + let destination_prompt = cx.prompt_for_paths(gpui::PathPromptOptions { + files: false, + directories: true, + multiple: false, + prompt: Some("Select as Repository Destination".into()), + }); + + window + .spawn(cx, async move |cx| { + let mut paths = destination_prompt.await.ok()?.ok()??; + let mut destination_dir = paths.pop()?; + + let repo_name = repo_url + .split('/') + .next_back() + .map(|name| name.strip_suffix(".git").unwrap_or(name)) + .unwrap_or("repository") + .to_owned(); + + let clone_task = workspace + .update(cx, |workspace, cx| { + let fs = workspace.app_state().fs.clone(); + let destination_dir = destination_dir.clone(); + let repo_url = repo_url.clone(); + cx.spawn(async move |_workspace, _cx| { + fs.git_clone(&repo_url, destination_dir.as_path()).await + }) + }) + .ok()?; + + if let Err(error) = clone_task.await { + workspace + .update(cx, |workspace, cx| { + let toast = StatusToast::new(error.to_string(), cx, |this, _| { + this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error)) + .dismiss_button(true) + }); + workspace.toggle_status_toast(toast, cx); + }) + .log_err(); + return None; + } + + let has_worktrees = workspace + .read_with(cx, |workspace, cx| { + workspace.project().read(cx).worktrees(cx).next().is_some() + }) + .ok()?; + + let prompt_answer = if has_worktrees { + cx.update(|window, cx| { + window.prompt( + gpui::PromptLevel::Info, + &format!("Git Clone: {}", repo_name), + None, + &["Add repo to project", "Open repo in new project"], + cx, + ) + }) + .ok()? + .await + .ok()? + } else { + // Don't ask if project is empty + 0 + }; + + destination_dir.push(&repo_name); + + match prompt_answer { + 0 => { + workspace + .update_in(cx, |workspace, window, cx| { + let create_task = workspace.project().update(cx, |project, cx| { + project.create_worktree(destination_dir.as_path(), true, cx) + }); + + let workspace_weak = cx.weak_entity(); + let on_success = on_success.clone(); + cx.spawn_in(window, async move |_window, cx| { + if create_task.await.log_err().is_some() { + workspace_weak + .update_in(cx, |workspace, window, cx| { + (on_success)(workspace, window, cx); + }) + .ok(); + } + }) + .detach(); + }) + .ok()?; + } + 1 => { + workspace + .update(cx, move |workspace, cx| { + let app_state = workspace.app_state().clone(); + let destination_path = destination_dir.clone(); + let on_success = on_success.clone(); + + workspace::open_new( + Default::default(), + app_state, + cx, + move |workspace, window, cx| { + cx.activate(true); + + let create_task = + workspace.project().update(cx, |project, cx| { + project.create_worktree( + destination_path.as_path(), + true, + cx, + ) + }); + + let workspace_weak = cx.weak_entity(); + cx.spawn_in(window, async move |_window, cx| { + if create_task.await.log_err().is_some() { + workspace_weak + .update_in(cx, |workspace, window, cx| { + (on_success)(workspace, window, cx); + }) + .ok(); + } + }) + .detach(); + }, + ) + .detach(); + }) + .ok(); + } + _ => {} + } + + Some(()) + }) + .detach(); +} diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 0f967e68d1fab829fb37b626c23ecfebe69fb5dd..532f9a099a823796706be48ed14cc7da820c5d8b 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -2849,93 +2849,15 @@ impl GitPanel { } pub(crate) fn git_clone(&mut self, repo: String, window: &mut Window, cx: &mut Context) { - let path = cx.prompt_for_paths(gpui::PathPromptOptions { - files: false, - directories: true, - multiple: false, - prompt: Some("Select as Repository Destination".into()), - }); - let workspace = self.workspace.clone(); - cx.spawn_in(window, async move |this, cx| { - let mut paths = path.await.ok()?.ok()??; - let mut path = paths.pop()?; - let repo_name = repo.split("/").last()?.strip_suffix(".git")?.to_owned(); - - let fs = this.read_with(cx, |this, _| this.fs.clone()).ok()?; - - let prompt_answer = match fs.git_clone(&repo, path.as_path()).await { - Ok(_) => cx.update(|window, cx| { - window.prompt( - PromptLevel::Info, - &format!("Git Clone: {}", repo_name), - None, - &["Add repo to project", "Open repo in new project"], - cx, - ) - }), - Err(e) => { - this.update(cx, |this: &mut GitPanel, cx| { - let toast = StatusToast::new(e.to_string(), cx, |this, _| { - this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error)) - .dismiss_button(true) - }); - - this.workspace - .update(cx, |workspace, cx| { - workspace.toggle_status_toast(toast, cx); - }) - .ok(); - }) - .ok()?; - - return None; - } - } - .ok()?; - - path.push(repo_name); - match prompt_answer.await.ok()? { - 0 => { - workspace - .update(cx, |workspace, cx| { - workspace - .project() - .update(cx, |project, cx| { - project.create_worktree(path.as_path(), true, cx) - }) - .detach(); - }) - .ok(); - } - 1 => { - workspace - .update(cx, move |workspace, cx| { - workspace::open_new( - Default::default(), - workspace.app_state().clone(), - cx, - move |workspace, _, cx| { - cx.activate(true); - workspace - .project() - .update(cx, |project, cx| { - project.create_worktree(&path, true, cx) - }) - .detach(); - }, - ) - .detach(); - }) - .ok(); - } - _ => {} - } - - Some(()) - }) - .detach(); + crate::clone::clone_and_open( + repo.into(), + workspace, + window, + cx, + Arc::new(|_workspace: &mut workspace::Workspace, _window, _cx| {}), + ); } pub(crate) fn git_init(&mut self, window: &mut Window, cx: &mut Context) { diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 5f50e4ef8029d8f57cd159bc7da68b668b628f48..053c41bf10c5d97f9f5326fd17d6b5bf91297a03 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -10,6 +10,7 @@ use ui::{ }; mod blame_ui; +pub mod clone; use git::{ repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus}, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 312d16f0cd674a6dda81176863a859f3b763c2c0..03e02bb0107d736c07eb3fc9626856943f8d80a6 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -18,11 +18,13 @@ use extension::ExtensionHostProxy; use fs::{Fs, RealFs}; use futures::{StreamExt, channel::oneshot, future}; use git::GitHostingProviderRegistry; +use git_ui::clone::clone_and_open; use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, UpdateGlobal as _}; use gpui_tokio::Tokio; use language::LanguageRegistry; use onboarding::{FIRST_OPEN, show_onboarding_view}; +use project_panel::ProjectPanel; use prompt_store::PromptBuilder; use remote::RemoteConnectionOptions; use reqwest_client::ReqwestClient; @@ -36,10 +38,12 @@ use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use session::{AppSession, Session}; use settings::{BaseKeymap, Settings, SettingsStore, watch_config_file}; use std::{ + cell::RefCell, env, io::{self, IsTerminal}, path::{Path, PathBuf}, process, + rc::Rc, sync::{Arc, OnceLock}, time::Instant, }; @@ -896,6 +900,41 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut }) .detach_and_log_err(cx); } + OpenRequestKind::GitClone { repo_url } => { + workspace::with_active_or_new_workspace(cx, |_workspace, window, cx| { + if window.is_window_active() { + clone_and_open( + repo_url, + cx.weak_entity(), + window, + cx, + Arc::new(|workspace: &mut workspace::Workspace, window, cx| { + workspace.focus_panel::(window, cx); + }), + ); + return; + } + + let subscription = Rc::new(RefCell::new(None)); + subscription.replace(Some(cx.observe_in(&cx.entity(), window, { + let subscription = subscription.clone(); + let repo_url = repo_url; + move |_, workspace_entity, window, cx| { + if window.is_window_active() && subscription.take().is_some() { + clone_and_open( + repo_url.clone(), + workspace_entity.downgrade(), + window, + cx, + Arc::new(|workspace: &mut workspace::Workspace, window, cx| { + workspace.focus_panel::(window, cx); + }), + ); + } + } + }))); + }); + } OpenRequestKind::GitCommit { sha } => { cx.spawn(async move |cx| { let paths_with_position = diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index d61de0a291f3d3e7869225c0e07424cc3523f69b..842f98520133c70f711d84d3f490bec1ec59e16f 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -25,6 +25,7 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use std::thread; use std::time::Duration; +use ui::SharedString; use util::ResultExt; use util::paths::PathWithPosition; use workspace::PathList; @@ -58,6 +59,9 @@ pub enum OpenRequestKind { /// `None` opens settings without navigating to a specific path. setting_path: Option, }, + GitClone { + repo_url: SharedString, + }, GitCommit { sha: String, }, @@ -113,6 +117,8 @@ impl OpenRequest { this.kind = Some(OpenRequestKind::Setting { setting_path: Some(setting_path.to_string()), }); + } else if let Some(clone_path) = url.strip_prefix("zed://git/clone") { + this.parse_git_clone_url(clone_path)? } else if let Some(commit_path) = url.strip_prefix("zed://git/commit/") { this.parse_git_commit_url(commit_path)? } else if url.starts_with("ssh://") { @@ -143,6 +149,26 @@ impl OpenRequest { } } + fn parse_git_clone_url(&mut self, clone_path: &str) -> Result<()> { + // Format: /?repo= or ?repo= + let clone_path = clone_path.strip_prefix('/').unwrap_or(clone_path); + + let query = clone_path + .strip_prefix('?') + .context("invalid git clone url: missing query string")?; + + let repo_url = url::form_urlencoded::parse(query.as_bytes()) + .find_map(|(key, value)| (key == "repo").then_some(value)) + .filter(|s| !s.is_empty()) + .context("invalid git clone url: missing repo query parameter")? + .to_string() + .into(); + + self.kind = Some(OpenRequestKind::GitClone { repo_url }); + + Ok(()) + } + fn parse_git_commit_url(&mut self, commit_path: &str) -> Result<()> { // Format: ?repo= let (sha, query) = commit_path @@ -1087,4 +1113,80 @@ mod tests { assert!(!errored_reuse); } + + #[gpui::test] + fn test_parse_git_clone_url(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![ + "zed://git/clone/?repo=https://github.com/zed-industries/zed.git".into(), + ], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::GitClone { repo_url }) => { + assert_eq!(repo_url, "https://github.com/zed-industries/zed.git"); + } + _ => panic!("Expected GitClone kind"), + } + } + + #[gpui::test] + fn test_parse_git_clone_url_without_slash(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![ + "zed://git/clone?repo=https://github.com/zed-industries/zed.git".into(), + ], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::GitClone { repo_url }) => { + assert_eq!(repo_url, "https://github.com/zed-industries/zed.git"); + } + _ => panic!("Expected GitClone kind"), + } + } + + #[gpui::test] + fn test_parse_git_clone_url_with_encoding(cx: &mut TestAppContext) { + let _app_state = init_test(cx); + + let request = cx.update(|cx| { + OpenRequest::parse( + RawOpenRequest { + urls: vec![ + "zed://git/clone/?repo=https%3A%2F%2Fgithub.com%2Fzed-industries%2Fzed.git" + .into(), + ], + ..Default::default() + }, + cx, + ) + .unwrap() + }); + + match request.kind { + Some(OpenRequestKind::GitClone { repo_url }) => { + assert_eq!(repo_url, "https://github.com/zed-industries/zed.git"); + } + _ => panic!("Expected GitClone kind"), + } + } } From 05ce34eea4687ce1006df499bbbdff9527a8e41e Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 18 Dec 2025 21:40:27 -0600 Subject: [PATCH 06/46] ci: Fix docs build post #45130 (#45330) Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --- .github/actions/build_docs/action.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/actions/build_docs/action.yml b/.github/actions/build_docs/action.yml index d2e62d5b22ee49c7dcb9b42085a648098fbdb6bb..1ff271f73ff6b800ec3a94615f31c35a7729bb47 100644 --- a/.github/actions/build_docs/action.yml +++ b/.github/actions/build_docs/action.yml @@ -19,6 +19,18 @@ runs: shell: bash -euxo pipefail {0} run: ./script/linux + - name: Install mold linker + shell: bash -euxo pipefail {0} + run: ./script/install-mold + + - name: Download WASI SDK + shell: bash -euxo pipefail {0} + run: ./script/download-wasi-sdk + + - name: Generate action metadata + shell: bash -euxo pipefail {0} + run: ./script/generate-action-metadata + - name: Check for broken links (in MD) uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1 with: From 0531035b86a7a035b1b661c62cc9436a2e1b5394 Mon Sep 17 00:00:00 2001 From: Ryan Steil <183886708+ryansteil@users.noreply.github.com> Date: Thu, 18 Dec 2025 22:47:40 -0600 Subject: [PATCH 07/46] docs: Fix link to Anthropic prompt engineering resource (#45329) --- docs/src/ai/rules.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/ai/rules.md b/docs/src/ai/rules.md index 4169920425e66eb41a895deb60da3a198d74df08..972bbc94e82937502739cf585cc8f60dbcda8808 100644 --- a/docs/src/ai/rules.md +++ b/docs/src/ai/rules.md @@ -46,7 +46,7 @@ Having a series of rules files specifically tailored to prompt engineering can a Here are a couple of helpful resources for writing better rules: -- [Anthropic: Prompt Engineering](https://docs.anthropic.com/en/docs/build-with-claude/prompt-engineering/overview) +- [Anthropic: Prompt Engineering](https://platform.claude.com/docs/en/build-with-claude/prompt-engineering/overview) - [OpenAI: Prompt Engineering](https://platform.openai.com/docs/guides/prompt-engineering) ### Editing the Default Rules {#default-rules} From e052127e1c31ce6b8286fc931aaee2217b62ada1 Mon Sep 17 00:00:00 2001 From: rabsef-bicrym <52549148+rabsef-bicrym@users.noreply.github.com> Date: Thu, 18 Dec 2025 22:33:59 -0800 Subject: [PATCH 08/46] terminal: Prevent scrollbar arithmetic underflow panic (#45282) ## Summary Fixes arithmetic underflow panics in `terminal_scrollbar.rs` by converting unsafe subtractions to `saturating_sub`. Closes #45281 ## Problem Two locations perform raw subtraction on `usize` values that panic when underflow occurs: - `offset()`: `state.total_lines - state.viewport_lines - state.display_offset` - `set_offset()`: `state.total_lines - state.viewport_lines` This happens when `total_lines < viewport_lines + display_offset`, which can occur during terminal creation, with small window sizes, or when display state becomes stale. ## Solution Replace the two unsafe subtractions with `saturating_sub`, which returns 0 on underflow instead of panicking. Also standardizes the existing `checked_sub().unwrap_or(0)` in `max_offset()` to `saturating_sub` for consistency across the file. ## Changes - N/A --- crates/terminal_view/src/terminal_scrollbar.rs | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/crates/terminal_view/src/terminal_scrollbar.rs b/crates/terminal_view/src/terminal_scrollbar.rs index 871bb602306cccc92b8cffe62c4912c42b7a87e2..82ca0b4097dad1be899879b0241aed50d8e60bfa 100644 --- a/crates/terminal_view/src/terminal_scrollbar.rs +++ b/crates/terminal_view/src/terminal_scrollbar.rs @@ -50,28 +50,24 @@ impl ScrollableHandle for TerminalScrollHandle { let state = self.state.borrow(); size( Pixels::ZERO, - state - .total_lines - .checked_sub(state.viewport_lines) - .unwrap_or(0) as f32 - * state.line_height, + state.total_lines.saturating_sub(state.viewport_lines) as f32 * state.line_height, ) } fn offset(&self) -> Point { let state = self.state.borrow(); - let scroll_offset = state.total_lines - state.viewport_lines - state.display_offset; - Point::new( - Pixels::ZERO, - -(scroll_offset as f32 * self.state.borrow().line_height), - ) + let scroll_offset = state + .total_lines + .saturating_sub(state.viewport_lines) + .saturating_sub(state.display_offset); + Point::new(Pixels::ZERO, -(scroll_offset as f32 * state.line_height)) } fn set_offset(&self, point: Point) { let state = self.state.borrow(); let offset_delta = (point.y / state.line_height).round() as i32; - let max_offset = state.total_lines - state.viewport_lines; + let max_offset = state.total_lines.saturating_sub(state.viewport_lines); let display_offset = (max_offset as i32 + offset_delta).clamp(0, max_offset as i32); self.future_display_offset From e44529ed7ba3da10f796a5d37d494a5ef883e17a Mon Sep 17 00:00:00 2001 From: Mustaque Ahmed Date: Fri, 19 Dec 2025 13:54:30 +0530 Subject: [PATCH 09/46] Hide inline overlays when context menu is open (#45266) Closes #23367 **Summary** - Prevents inline diagnostics, code actions, blame annotations, and hover popovers from overlapping with the right-click context menu by checking for `mouse_context_menu` presence before rendering these UI elements. PS: Same behaviour is present in other editors like VS Code. **Screen recording** https://github.com/user-attachments/assets/8290412b-0f86-4985-8c70-13440686e530 Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/editor/src/element.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index f7b6aa949e74dca9bee73419fa2b87899f9986fd..4c3b44335bcad10be4303d545a8d2ad505938098 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -5417,6 +5417,12 @@ impl EditorElement { .max(MIN_POPOVER_LINE_HEIGHT * line_height), // Apply minimum height of 4 lines ); + // Don't show hover popovers when context menu is open to avoid overlap + let has_context_menu = self.editor.read(cx).mouse_context_menu.is_some(); + if has_context_menu { + return; + } + let hover_popovers = self.editor.update(cx, |editor, cx| { editor.hover_state.render( snapshot, From 596826f74113d0db2d2f6794ca1c7d2275da5b96 Mon Sep 17 00:00:00 2001 From: Mayank Verma Date: Fri, 19 Dec 2025 14:13:35 +0530 Subject: [PATCH 10/46] editor: Strip trailing newlines from completion documentation (#45342) Closes #45337 Release Notes: - Fixed broken completion menu layout caused by trailing newlines in ty documentation
Before After
before after
--- crates/editor/src/code_context_menus.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 2336a38fa7767fa6184608066f69d3b0520234ff..96739defc506414f573e2454dc31f9c32d8e4adf 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -893,7 +893,7 @@ impl CompletionsMenu { None } else { Some( - Label::new(text.clone()) + Label::new(text.trim().to_string()) .ml_4() .size(LabelSize::Small) .color(Color::Muted), From 6d776c3157a33b947406215e298997b7ea159a1a Mon Sep 17 00:00:00 2001 From: prayansh_chhablani <135210710+prayanshchh@users.noreply.github.com> Date: Fri, 19 Dec 2025 14:41:36 +0530 Subject: [PATCH 11/46] project: Sanitize single-line completions from trailing newlines (#44965) Closes #43991 trim documentation string to prevent completion overlap previous [Screencast from 2025-12-16 14-55-58.webm](https://github.com/user-attachments/assets/d7674d82-63b0-4a85-a90f-b5c5091e4a82) after change [Screencast from 2025-12-16 14-50-05.webm](https://github.com/user-attachments/assets/109c22b5-3fff-49c8-a2ec-b1af467d6320) Release Notes: - Fixed an issue where completions in the completion menu would span multiple lines. --- crates/project/src/lsp_store.rs | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 7e8624daad628fd653326647537eb51dad208a02..5841be02b2db80b2fa15667833b8a3d3eec4ec11 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -13776,7 +13776,7 @@ impl From for CompletionDocumentation { match docs { lsp::Documentation::String(text) => { if text.lines().count() <= 1 { - CompletionDocumentation::SingleLine(text.into()) + CompletionDocumentation::SingleLine(text.trim().to_string().into()) } else { CompletionDocumentation::MultiLinePlainText(text.into()) } @@ -14368,4 +14368,22 @@ mod tests { ) ); } + + #[test] + fn test_trailing_newline_in_completion_documentation() { + let doc = lsp::Documentation::String( + "Inappropriate argument value (of correct type).\n".to_string(), + ); + let completion_doc: CompletionDocumentation = doc.into(); + assert!( + matches!(completion_doc, CompletionDocumentation::SingleLine(s) if s == "Inappropriate argument value (of correct type).") + ); + + let doc = lsp::Documentation::String(" some value \n".to_string()); + let completion_doc: CompletionDocumentation = doc.into(); + assert!(matches!( + completion_doc, + CompletionDocumentation::SingleLine(s) if s == "some value" + )); + } } From f2495a6f98524f589b384db91d938c96c3c7819e Mon Sep 17 00:00:00 2001 From: Korbin de Man <113640462+korbindeman@users.noreply.github.com> Date: Fri, 19 Dec 2025 11:12:01 +0100 Subject: [PATCH 12/46] Add Restore File action in project_panel for git modified files (#42490) Co-authored-by: cameron --- Cargo.lock | 1 + crates/project_panel/Cargo.toml | 1 + crates/project_panel/src/project_panel.rs | 115 ++++++++++++++++++++++ 3 files changed, 117 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 3f7077e721e934cd6cb05af0cdaefef75602b429..4beb6c11f427fb86b5586c2833c50b7cd5b9dd01 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12570,6 +12570,7 @@ dependencies = [ "gpui", "language", "menu", + "notifications", "pretty_assertions", "project", "rayon", diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index 2c47efd0b0e2490bbfd6125069fa5ca1438ffb51..0385c3789e923da95a1eca7a5a469bad00020639 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -45,6 +45,7 @@ workspace.workspace = true language.workspace = true zed_actions.workspace = true telemetry.workspace = true +notifications.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 00aba96ef428eea643e8868e513ab9c3aaa1b910..43f63d90789a65bce54814f3adbc6f1d53235568 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -29,6 +29,7 @@ use gpui::{ }; use language::DiagnosticSeverity; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; +use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ Entry, EntryKind, Fs, GitEntry, GitEntryRef, GitTraversal, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId, @@ -1140,6 +1141,12 @@ impl ProjectPanel { "Copy Relative Path", Box::new(zed_actions::workspace::CopyRelativePath), ) + .when(!is_dir && self.has_git_changes(entry_id), |menu| { + menu.separator().action( + "Restore File", + Box::new(git::RestoreFile { skip_prompt: false }), + ) + }) .when(has_git_repo, |menu| { menu.separator() .action("View File History", Box::new(git::FileHistory)) @@ -1180,6 +1187,19 @@ impl ProjectPanel { cx.notify(); } + fn has_git_changes(&self, entry_id: ProjectEntryId) -> bool { + for visible in &self.state.visible_entries { + if let Some(git_entry) = visible.entries.iter().find(|e| e.id == entry_id) { + let total_modified = + git_entry.git_summary.index.modified + git_entry.git_summary.worktree.modified; + let total_deleted = + git_entry.git_summary.index.deleted + git_entry.git_summary.worktree.deleted; + return total_modified > 0 || total_deleted > 0; + } + } + false + } + fn is_unfoldable(&self, entry: &Entry, worktree: &Worktree) -> bool { if !entry.is_dir() || self.state.unfolded_dir_ids.contains(&entry.id) { return false; @@ -2041,6 +2061,100 @@ impl ProjectPanel { self.remove(false, action.skip_prompt, window, cx); } + fn restore_file( + &mut self, + action: &git::RestoreFile, + window: &mut Window, + cx: &mut Context, + ) { + maybe!({ + let selection = self.state.selection?; + let project = self.project.read(cx); + + let (_worktree, entry) = self.selected_sub_entry(cx)?; + if entry.is_dir() { + return None; + } + + let project_path = project.path_for_entry(selection.entry_id, cx)?; + + let git_store = project.git_store(); + let (repository, repo_path) = git_store + .read(cx) + .repository_and_path_for_project_path(&project_path, cx)?; + + let snapshot = repository.read(cx).snapshot(); + let status = snapshot.status_for_path(&repo_path)?; + if !status.status.is_modified() && !status.status.is_deleted() { + return None; + } + + let file_name = entry.path.file_name()?.to_string(); + + let answer = if !action.skip_prompt { + let prompt = format!("Discard changes to {}?", file_name); + Some(window.prompt(PromptLevel::Info, &prompt, None, &["Restore", "Cancel"], cx)) + } else { + None + }; + + cx.spawn_in(window, async move |panel, cx| { + if let Some(answer) = answer + && answer.await != Ok(0) + { + return anyhow::Ok(()); + } + + let task = panel.update(cx, |_panel, cx| { + repository.update(cx, |repo, cx| { + repo.checkout_files("HEAD", vec![repo_path], cx) + }) + })?; + + if let Err(e) = task.await { + panel + .update(cx, |panel, cx| { + let message = format!("Failed to restore {}: {}", file_name, e); + let toast = StatusToast::new(message, cx, |this, _| { + this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error)) + .dismiss_button(true) + }); + panel + .workspace + .update(cx, |workspace, cx| { + workspace.toggle_status_toast(toast, cx); + }) + .ok(); + }) + .ok(); + } + + panel + .update(cx, |panel, cx| { + panel.project.update(cx, |project, cx| { + if let Some(buffer_id) = project + .buffer_store() + .read(cx) + .buffer_id_for_project_path(&project_path) + { + if let Some(buffer) = project.buffer_for_id(*buffer_id, cx) { + buffer.update(cx, |buffer, cx| { + let _ = buffer.reload(cx); + }); + } + } + }) + }) + .ok(); + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + + Some(()) + }); + } + fn remove( &mut self, trash: bool, @@ -5631,6 +5745,7 @@ impl Render for ProjectPanel { .on_action(cx.listener(Self::copy)) .on_action(cx.listener(Self::paste)) .on_action(cx.listener(Self::duplicate)) + .on_action(cx.listener(Self::restore_file)) .when(!project.is_remote(), |el| { el.on_action(cx.listener(Self::trash)) }) From 7ee56e1a18c8bc557aa7f050a516419861f3a32e Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Fri, 19 Dec 2025 11:18:36 +0100 Subject: [PATCH 13/46] chore: Add worktree_benchmarks to cargo workspace (#45344) Idk why it was missing, but Release Notes: - N/A --- Cargo.lock | 10 ++++++++++ Cargo.toml | 1 + crates/worktree_benchmarks/src/main.rs | 4 ++-- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4beb6c11f427fb86b5586c2833c50b7cd5b9dd01..f9acd6989be8734b6c5b528435fccea62d10f027 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20265,6 +20265,16 @@ dependencies = [ "zlog", ] +[[package]] +name = "worktree_benchmarks" +version = "0.1.0" +dependencies = [ + "fs", + "gpui", + "settings", + "worktree", +] + [[package]] name = "writeable" version = "0.6.1" diff --git a/Cargo.toml b/Cargo.toml index 825dc79e08978d8ccd03cea93883f698986ee12f..b507e8824484ea670619b5225fef9cfd41c81d4c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -198,6 +198,7 @@ members = [ "crates/web_search_providers", "crates/workspace", "crates/worktree", + "crates/worktree_benchmarks", "crates/x_ai", "crates/zed", "crates/zed_actions", diff --git a/crates/worktree_benchmarks/src/main.rs b/crates/worktree_benchmarks/src/main.rs index 00f268b75fc5f1e7d6033ec46f3718ea39cdccda..c1b76f9e3c483ec6c989cc255a11c5320d4b49f7 100644 --- a/crates/worktree_benchmarks/src/main.rs +++ b/crates/worktree_benchmarks/src/main.rs @@ -5,8 +5,7 @@ use std::{ use fs::RealFs; use gpui::Application; -use settings::Settings; -use worktree::{Worktree, WorktreeSettings}; +use worktree::Worktree; fn main() { let Some(worktree_root_path) = std::env::args().nth(1) else { @@ -27,6 +26,7 @@ fn main() { true, fs, Arc::new(AtomicUsize::new(0)), + true, cx, ) .await From 3104482c6c7c06c02be7d63927487c64695ea290 Mon Sep 17 00:00:00 2001 From: Angelo Verlain <37999241+vixalien@users.noreply.github.com> Date: Fri, 19 Dec 2025 12:34:40 +0200 Subject: [PATCH 14/46] languages: Detect `.bst` files as YAML (#45015) These files are used by the BuildStream build project: https://buildstream.build/index.html Release Notes: - Added recognition for .bst files as yaml. --- crates/languages/src/yaml/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/yaml/config.toml b/crates/languages/src/yaml/config.toml index 51e8e1224a40904e0dfbb0204eb531e6b2664825..9a07a560b06766ac00dd73b6210023c4cddd491d 100644 --- a/crates/languages/src/yaml/config.toml +++ b/crates/languages/src/yaml/config.toml @@ -1,6 +1,6 @@ name = "YAML" grammar = "yaml" -path_suffixes = ["yml", "yaml", "pixi.lock", "clang-format", "clangd"] +path_suffixes = ["yml", "yaml", "pixi.lock", "clang-format", "clangd", "bst"] line_comments = ["# "] autoclose_before = ",]}" brackets = [ From 1ac170e663339e05457261959bcb0870961d127b Mon Sep 17 00:00:00 2001 From: Lena <241371603+zelenenka@users.noreply.github.com> Date: Fri, 19 Dec 2025 12:46:20 +0100 Subject: [PATCH 15/46] Upgrade stalebot and make testing it easier (#45350) - adjust wording for the upcoming simplified process - upgrade to the github action version that has a fix for configuring issue types the bot should look at - add two inputs for the manual runs of stalebot that help testing it in a safe and controlled manner Release Notes: - N/A --- .../community_close_stale_issues.yml | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/.github/workflows/community_close_stale_issues.yml b/.github/workflows/community_close_stale_issues.yml index 14c1a0a08338ee513a8269094b41ee404beef726..113e5ed131d1443c5481ff2966fac6a234561a20 100644 --- a/.github/workflows/community_close_stale_issues.yml +++ b/.github/workflows/community_close_stale_issues.yml @@ -3,27 +3,38 @@ on: schedule: - cron: "0 8 31 DEC *" workflow_dispatch: + inputs: + debug-only: + description: "Run in dry-run mode (no changes made)" + type: boolean + default: false + operations-per-run: + description: "Max number of issues to process (default: 1000)" + type: number + default: 1000 jobs: stale: if: github.repository_owner == 'zed-industries' runs-on: ubuntu-latest steps: - - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9 + - uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: > - Hi there! 👋 - - We're working to clean up our issue tracker by closing older bugs that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and it will be kept open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, it will close automatically in 14 days. + Hi there! + Zed development moves fast and a significant number of bugs become outdated. + If you can reproduce this bug on the latest stable Zed, please let us know by leaving a comment with the Zed version. + If the bug doesn't appear for you anymore, feel free to close the issue yourself; otherwise, the bot will close it in a couple of weeks. Thanks for your help! - close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue." + close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please leave a comment with your Zed version so that we can reopen the issue." days-before-stale: 60 days-before-close: 14 only-issue-types: "Bug,Crash" - operations-per-run: 1000 + operations-per-run: ${{ inputs.operations-per-run || 1000 }} ascending: true enable-statistics: true + debug-only: ${{ inputs.debug-only }} stale-issue-label: "stale" exempt-issue-labels: "never stale" From 95ae388c0ce8ae4a0b8d149ace3ecba1ea417491 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Fri, 19 Dec 2025 09:19:04 -0300 Subject: [PATCH 16/46] Fix title bar spacing when building on the macOS Tahoe SDK (#45351) The size and spacing around the traffic light buttons changes after macOS SDK 26. Our official builds aren't using this SDK yet, but dev builds sometimes are and the official will in the future.
Before After
CleanShot 2025-12-19 at 08 58 53@2x CleanShot 2025-12-19 at 08 57 02@2x
CleanShot 2025-12-19 at 08 59 40@2x CleanShot 2025-12-19 at 09 01 17@2x
Release Notes: - N/A --- crates/title_bar/build.rs | 28 +++++++++ .../title_bar/src/platforms/platform_mac.rs | 14 +++-- crates/title_bar/src/title_bar.rs | 58 ++++++++++--------- 3 files changed, 68 insertions(+), 32 deletions(-) create mode 100644 crates/title_bar/build.rs diff --git a/crates/title_bar/build.rs b/crates/title_bar/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..ef70268ad3127baf113824348cb3e8685392a52b --- /dev/null +++ b/crates/title_bar/build.rs @@ -0,0 +1,28 @@ +#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")] + +fn main() { + println!("cargo::rustc-check-cfg=cfg(macos_sdk_26)"); + + #[cfg(target_os = "macos")] + { + use std::process::Command; + + let output = Command::new("xcrun") + .args(["--sdk", "macosx", "--show-sdk-version"]) + .output() + .unwrap(); + + let sdk_version = String::from_utf8(output.stdout).unwrap(); + let major_version: Option = sdk_version + .trim() + .split('.') + .next() + .and_then(|v| v.parse().ok()); + + if let Some(major) = major_version + && major >= 26 + { + println!("cargo:rustc-cfg=macos_sdk_26"); + } + } +} diff --git a/crates/title_bar/src/platforms/platform_mac.rs b/crates/title_bar/src/platforms/platform_mac.rs index c7becde6c1af48bf37e06c0d2dcf991ad3c9f19f..5e8e4e5087054e59f66527915ae97e352a9ff525 100644 --- a/crates/title_bar/src/platforms/platform_mac.rs +++ b/crates/title_bar/src/platforms/platform_mac.rs @@ -1,6 +1,10 @@ -/// Use pixels here instead of a rem-based size because the macOS traffic -/// lights are a static size, and don't scale with the rest of the UI. -/// -/// Magic number: There is one extra pixel of padding on the left side due to -/// the 1px border around the window on macOS apps. +// Use pixels here instead of a rem-based size because the macOS traffic +// lights are a static size, and don't scale with the rest of the UI. +// +// Magic number: There is one extra pixel of padding on the left side due to +// the 1px border around the window on macOS apps. +#[cfg(macos_sdk_26)] +pub const TRAFFIC_LIGHT_PADDING: f32 = 78.; + +#[cfg(not(macos_sdk_26))] pub const TRAFFIC_LIGHT_PADDING: f32 = 71.; diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 23572677919509d859a141cb09cce8f5822697ef..d7759b0df8019eed2ad59b73bcaffaa3ffcfb866 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -447,34 +447,38 @@ impl TitleBar { return None; } - Some( - Button::new("restricted_mode_trigger", "Restricted Mode") - .style(ButtonStyle::Tinted(TintColor::Warning)) - .label_size(LabelSize::Small) - .color(Color::Warning) - .icon(IconName::Warning) - .icon_color(Color::Warning) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .tooltip(|_, cx| { - Tooltip::with_meta( - "You're in Restricted Mode", - Some(&ToggleWorktreeSecurity), - "Mark this project as trusted and unlock all features", - cx, - ) - }) - .on_click({ - cx.listener(move |this, _, window, cx| { - this.workspace - .update(cx, |workspace, cx| { - workspace.show_worktree_trust_security_modal(true, window, cx) - }) - .log_err(); - }) + let button = Button::new("restricted_mode_trigger", "Restricted Mode") + .style(ButtonStyle::Tinted(TintColor::Warning)) + .label_size(LabelSize::Small) + .color(Color::Warning) + .icon(IconName::Warning) + .icon_color(Color::Warning) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .tooltip(|_, cx| { + Tooltip::with_meta( + "You're in Restricted Mode", + Some(&ToggleWorktreeSecurity), + "Mark this project as trusted and unlock all features", + cx, + ) + }) + .on_click({ + cx.listener(move |this, _, window, cx| { + this.workspace + .update(cx, |workspace, cx| { + workspace.show_worktree_trust_security_modal(true, window, cx) + }) + .log_err(); }) - .into_any_element(), - ) + }); + + if cfg!(macos_sdk_26) { + // Make up for Tahoe's traffic light buttons having less spacing around them + Some(div().child(button).ml_0p5().into_any_element()) + } else { + Some(button.into_any_element()) + } } pub fn render_project_host(&self, cx: &mut Context) -> Option { From b9aef75f2df4ae8fa1707d348b4d588c3784526b Mon Sep 17 00:00:00 2001 From: Lena <241371603+zelenenka@users.noreply.github.com> Date: Fri, 19 Dec 2025 13:41:03 +0100 Subject: [PATCH 17/46] Turn on the fixed stalebot (#45355) It will run weekly and it promised not to touch issues of the wrong types anymore. Release Notes: - N/A --- .github/workflows/community_close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/community_close_stale_issues.yml b/.github/workflows/community_close_stale_issues.yml index 113e5ed131d1443c5481ff2966fac6a234561a20..6347b713257f49c02f981774faa0d0359e05e4d3 100644 --- a/.github/workflows/community_close_stale_issues.yml +++ b/.github/workflows/community_close_stale_issues.yml @@ -1,7 +1,7 @@ name: "Close Stale Issues" on: schedule: - - cron: "0 8 31 DEC *" + - cron: "0 2 * * 5" workflow_dispatch: inputs: debug-only: From 1dc5de4592ebf0ec51ba77bbd41c495aee67184e Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 19 Dec 2025 13:54:30 +0100 Subject: [PATCH 18/46] workspace: Auto-switch git context when focus changed (#45354) Closes #44955 Release Notes: - Fixed workspace incorrectly automatically switching Git repository/branch context in multi-repository projects when repo/branch switched manually from the Git panel. --- crates/workspace/src/workspace.rs | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0c5c9ffa5d0bfb1f70ce6a861b0209f321222fc0..139fa88359c574e1565ed778fdfbd5fa1c8f7944 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4223,7 +4223,7 @@ impl Workspace { cx: &mut Context, ) { self.active_pane = pane.clone(); - self.active_item_path_changed(window, cx); + self.active_item_path_changed(true, window, cx); self.last_active_center_pane = Some(pane.downgrade()); } @@ -4280,7 +4280,7 @@ impl Workspace { } serialize_workspace = *focus_changed || pane != self.active_pane(); if pane == self.active_pane() { - self.active_item_path_changed(window, cx); + self.active_item_path_changed(*focus_changed, window, cx); self.update_active_view_for_followers(window, cx); } else if *local { self.set_active_pane(pane, window, cx); @@ -4296,7 +4296,7 @@ impl Workspace { } pane::Event::ChangeItemTitle => { if *pane == self.active_pane { - self.active_item_path_changed(window, cx); + self.active_item_path_changed(false, window, cx); } serialize_workspace = false; } @@ -4465,7 +4465,7 @@ impl Workspace { cx.notify(); } else { - self.active_item_path_changed(window, cx); + self.active_item_path_changed(true, window, cx); } cx.emit(Event::PaneRemoved); } @@ -4719,14 +4719,19 @@ impl Workspace { self.follower_states.contains_key(&id.into()) } - fn active_item_path_changed(&mut self, window: &mut Window, cx: &mut Context) { + fn active_item_path_changed( + &mut self, + focus_changed: bool, + window: &mut Window, + cx: &mut Context, + ) { cx.emit(Event::ActiveItemChanged); let active_entry = self.active_project_path(cx); self.project.update(cx, |project, cx| { project.set_active_path(active_entry.clone(), cx) }); - if let Some(project_path) = &active_entry { + if focus_changed && let Some(project_path) = &active_entry { let git_store_entity = self.project.read(cx).git_store().clone(); git_store_entity.update(cx, |git_store, cx| { git_store.set_active_repo_for_path(project_path, cx); From 69f6eeaa3ae821c43b7bf0dfc241c2792573b338 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Fri, 19 Dec 2025 14:06:15 +0100 Subject: [PATCH 19/46] toolchains: Fix persistence by not relying on unstable worktree id (#45357) Closes #42268 We've migrated user selections when a given workspace has a single worktree (as then we could determine what the target worktree is). Release Notes: - python: Fixed selected virtual environments not being persisted/deserialized correctly within long-running Zed sessions (where multiple different projects might've been opened). This is a breaking change for users of multi-worktree projects - your selected toolchain for those projects will be reset. Co-authored-by: Dino --- crates/language/src/toolchain.rs | 7 +- crates/project/src/project.rs | 7 +- crates/project/src/toolchain_store.rs | 26 +++++- crates/project/src/x.py | 1 + .../src/active_toolchain.rs | 9 ++- .../src/toolchain_selector.rs | 43 +++++++--- crates/workspace/src/persistence.rs | 81 ++++++++++++++----- crates/workspace/src/workspace.rs | 31 ++++++- 8 files changed, 165 insertions(+), 40 deletions(-) create mode 100644 crates/project/src/x.py diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 5717ffb5143e38bce736c354b43febc86e321f32..815ece30a1ed46ae65ec4af2ba64501ff3489718 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -4,7 +4,10 @@ //! which is a set of tools used to interact with the projects written in said language. //! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. -use std::{path::PathBuf, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; use async_trait::async_trait; use collections::HashMap; @@ -36,7 +39,7 @@ pub struct Toolchain { /// - Only in the subproject they're currently in. #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] pub enum ToolchainScope { - Subproject(WorktreeId, Arc), + Subproject(Arc, Arc), Project, /// Available in all projects on this box. It wouldn't make sense to show suggestions across machines. Global, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5e31f2a90cf137f1e4d788952832e1eb2ee0ec35..25a19788fdb464f5f289ef3bc3513f21743e3a9a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1330,7 +1330,12 @@ impl Project { cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); let toolchain_store = cx.new(|cx| { - ToolchainStore::remote(REMOTE_SERVER_PROJECT_ID, remote.read(cx).proto_client(), cx) + ToolchainStore::remote( + REMOTE_SERVER_PROJECT_ID, + worktree_store.clone(), + remote.read(cx).proto_client(), + cx, + ) }); let task_store = cx.new(|cx| { TaskStore::remote( diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 21b74bd784d1d9af12fe43e3fe82051afc103b0d..7afc70827f85e1a1bafcad436409936876fd3b45 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -32,6 +32,7 @@ use crate::{ pub struct ToolchainStore { mode: ToolchainStoreInner, user_toolchains: BTreeMap>, + worktree_store: Entity, _sub: Subscription, } @@ -66,7 +67,7 @@ impl ToolchainStore { ) -> Self { let entity = cx.new(|_| LocalToolchainStore { languages, - worktree_store, + worktree_store: worktree_store.clone(), project_environment, active_toolchains: Default::default(), manifest_tree, @@ -77,12 +78,18 @@ impl ToolchainStore { }); Self { mode: ToolchainStoreInner::Local(entity), + worktree_store, user_toolchains: Default::default(), _sub, } } - pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut Context) -> Self { + pub(super) fn remote( + project_id: u64, + worktree_store: Entity, + client: AnyProtoClient, + cx: &mut Context, + ) -> Self { let entity = cx.new(|_| RemoteToolchainStore { client, project_id }); let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { cx.emit(e.clone()) @@ -90,6 +97,7 @@ impl ToolchainStore { Self { mode: ToolchainStoreInner::Remote(entity), user_toolchains: Default::default(), + worktree_store, _sub, } } @@ -165,12 +173,22 @@ impl ToolchainStore { language_name: LanguageName, cx: &mut Context, ) -> Task> { + let Some(worktree) = self + .worktree_store + .read(cx) + .worktree_for_id(path.worktree_id, cx) + else { + return Task::ready(None); + }; + let target_root_path = worktree.read_with(cx, |this, _| this.abs_path()); + let user_toolchains = self .user_toolchains .iter() .filter(|(scope, _)| { - if let ToolchainScope::Subproject(worktree_id, relative_path) = scope { - path.worktree_id == *worktree_id && relative_path.starts_with(&path.path) + if let ToolchainScope::Subproject(subproject_root_path, relative_path) = scope { + target_root_path == *subproject_root_path + && relative_path.starts_with(&path.path) } else { true } diff --git a/crates/project/src/x.py b/crates/project/src/x.py new file mode 100644 index 0000000000000000000000000000000000000000..58947a58a41bcc2e2f8c2046b5e1c8c38c0fbbb8 --- /dev/null +++ b/crates/project/src/x.py @@ -0,0 +1 @@ +Gliwice makerspace \ No newline at end of file diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index 03c152e3fd3df0c62ab2f5c7e4a4746875ac955a..06f7d1cdf3e27f43bdb5013038b943b9e5193680 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -198,10 +198,17 @@ impl ActiveToolchain { .or_else(|| toolchains.toolchains.first()) .cloned(); if let Some(toolchain) = &default_choice { + let worktree_root_path = project + .read_with(cx, |this, cx| { + this.worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }) + .ok() + .flatten()?; workspace::WORKSPACE_DB .set_toolchain( workspace_id, - worktree_id, + worktree_root_path, relative_path.clone(), toolchain.clone(), ) diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index f7262c248f15f0f68fcd7a903ee01cac6b22d0af..36ef2b960a8abfe684628cea465b68e6eab5e463 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -1,6 +1,7 @@ mod active_toolchain; pub use active_toolchain::ActiveToolchain; +use anyhow::Context as _; use convert_case::Casing as _; use editor::Editor; use file_finder::OpenPathDelegate; @@ -62,6 +63,7 @@ struct AddToolchainState { language_name: LanguageName, root_path: ProjectPath, weak: WeakEntity, + worktree_root_path: Arc, } struct ScopePickerState { @@ -99,12 +101,17 @@ impl AddToolchainState { root_path: ProjectPath, window: &mut Window, cx: &mut Context, - ) -> Entity { + ) -> anyhow::Result> { let weak = cx.weak_entity(); - - cx.new(|cx| { + let worktree_root_path = project + .read(cx) + .worktree_for_id(root_path.worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + .context("Could not find worktree")?; + Ok(cx.new(|cx| { let (lister, rx) = Self::create_path_browser_delegate(project.clone(), cx); let picker = cx.new(|cx| Picker::uniform_list(lister, window, cx)); + Self { state: AddState::Path { _subscription: cx.subscribe(&picker, |_, _, _: &DismissEvent, cx| { @@ -118,8 +125,9 @@ impl AddToolchainState { language_name, root_path, weak, + worktree_root_path, } - }) + })) } fn create_path_browser_delegate( @@ -237,7 +245,15 @@ impl AddToolchainState { // Suggest a default scope based on the applicability. let scope = if let Some(project_path) = resolved_toolchain_path { if !root_path.path.as_ref().is_empty() && project_path.starts_with(&root_path) { - ToolchainScope::Subproject(root_path.worktree_id, root_path.path) + let worktree_root_path = project + .read_with(cx, |this, cx| { + this.worktree_for_id(root_path.worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }) + .ok() + .flatten() + .context("Could not find a worktree with a given worktree ID")?; + ToolchainScope::Subproject(worktree_root_path, root_path.path) } else { ToolchainScope::Project } @@ -400,7 +416,7 @@ impl Render for AddToolchainState { ToolchainScope::Global, ToolchainScope::Project, ToolchainScope::Subproject( - self.root_path.worktree_id, + self.worktree_root_path.clone(), self.root_path.path.clone(), ), ]; @@ -693,7 +709,7 @@ impl ToolchainSelector { cx: &mut Context, ) { if matches!(self.state, State::Search(_)) { - self.state = State::AddToolchain(AddToolchainState::new( + let Ok(state) = AddToolchainState::new( self.project.clone(), self.language_name.clone(), ProjectPath { @@ -702,7 +718,10 @@ impl ToolchainSelector { }, window, cx, - )); + ) else { + return; + }; + self.state = State::AddToolchain(state); self.state.focus_handle(cx).focus(window, cx); cx.notify(); } @@ -899,11 +918,17 @@ impl PickerDelegate for ToolchainSelectorDelegate { { let workspace = self.workspace.clone(); let worktree_id = self.worktree_id; + let worktree_abs_path_root = self.worktree_abs_path_root.clone(); let path = self.relative_path.clone(); let relative_path = self.relative_path.clone(); cx.spawn_in(window, async move |_, cx| { workspace::WORKSPACE_DB - .set_toolchain(workspace_id, worktree_id, relative_path, toolchain.clone()) + .set_toolchain( + workspace_id, + worktree_abs_path_root, + relative_path, + toolchain.clone(), + ) .await .log_err(); workspace diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 094d03494e726677dc43235d96fc62c076673bf5..8d20339ec952020416e4b8d5846bf44f5f8e9b98 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -24,7 +24,6 @@ use project::{ }; use language::{LanguageName, Toolchain, ToolchainScope}; -use project::WorktreeId; use remote::{ DockerConnectionOptions, RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions, }; @@ -845,6 +844,44 @@ impl Domain for WorkspaceDb { host_name TEXT ) STRICT; ), + sql!(CREATE TABLE toolchains2 ( + workspace_id INTEGER, + worktree_root_path TEXT NOT NULL, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + raw_json TEXT NOT NULL, + relative_worktree_path TEXT NOT NULL, + PRIMARY KEY (workspace_id, worktree_root_path, language_name, relative_worktree_path)) STRICT; + INSERT OR REPLACE INTO toolchains2 + // The `instr(paths, '\n') = 0` part allows us to find all + // workspaces that have a single worktree, as `\n` is used as a + // separator when serializing the workspace paths, so if no `\n` is + // found, we know we have a single worktree. + SELECT toolchains.workspace_id, paths, language_name, name, path, raw_json, relative_worktree_path FROM toolchains INNER JOIN workspaces ON toolchains.workspace_id = workspaces.workspace_id AND instr(paths, '\n') = 0; + DROP TABLE toolchains; + ALTER TABLE toolchains2 RENAME TO toolchains; + ), + sql!(CREATE TABLE user_toolchains2 ( + remote_connection_id INTEGER, + workspace_id INTEGER NOT NULL, + worktree_root_path TEXT NOT NULL, + relative_worktree_path TEXT NOT NULL, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + raw_json TEXT NOT NULL, + + PRIMARY KEY (workspace_id, worktree_root_path, relative_worktree_path, language_name, name, path, raw_json)) STRICT; + INSERT OR REPLACE INTO user_toolchains2 + // The `instr(paths, '\n') = 0` part allows us to find all + // workspaces that have a single worktree, as `\n` is used as a + // separator when serializing the workspace paths, so if no `\n` is + // found, we know we have a single worktree. + SELECT user_toolchains.remote_connection_id, user_toolchains.workspace_id, paths, relative_worktree_path, language_name, name, path, raw_json FROM user_toolchains INNER JOIN workspaces ON user_toolchains.workspace_id = workspaces.workspace_id AND instr(paths, '\n') = 0; + DROP TABLE user_toolchains; + ALTER TABLE user_toolchains2 RENAME TO user_toolchains; + ), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -1030,11 +1067,11 @@ impl WorkspaceDb { workspace_id: WorkspaceId, remote_connection_id: Option, ) -> BTreeMap> { - type RowKind = (WorkspaceId, u64, String, String, String, String, String); + type RowKind = (WorkspaceId, String, String, String, String, String, String); let toolchains: Vec = self .select_bound(sql! { - SELECT workspace_id, worktree_id, relative_worktree_path, + SELECT workspace_id, worktree_root_path, relative_worktree_path, language_name, name, path, raw_json FROM user_toolchains WHERE remote_connection_id IS ?1 AND ( workspace_id IN (0, ?2) @@ -1048,7 +1085,7 @@ impl WorkspaceDb { for ( _workspace_id, - worktree_id, + worktree_root_path, relative_worktree_path, language_name, name, @@ -1058,22 +1095,24 @@ impl WorkspaceDb { { // INTEGER's that are primary keys (like workspace ids, remote connection ids and such) start at 1, so we're safe to let scope = if _workspace_id == WorkspaceId(0) { - debug_assert_eq!(worktree_id, u64::MAX); + debug_assert_eq!(worktree_root_path, String::default()); debug_assert_eq!(relative_worktree_path, String::default()); ToolchainScope::Global } else { debug_assert_eq!(workspace_id, _workspace_id); debug_assert_eq!( - worktree_id == u64::MAX, + worktree_root_path == String::default(), relative_worktree_path == String::default() ); let Some(relative_path) = RelPath::unix(&relative_worktree_path).log_err() else { continue; }; - if worktree_id != u64::MAX && relative_worktree_path != String::default() { + if worktree_root_path != String::default() + && relative_worktree_path != String::default() + { ToolchainScope::Subproject( - WorktreeId::from_usize(worktree_id as usize), + Arc::from(worktree_root_path.as_ref()), relative_path.into(), ) } else { @@ -1159,13 +1198,13 @@ impl WorkspaceDb { for (scope, toolchains) in workspace.user_toolchains { for toolchain in toolchains { - let query = sql!(INSERT OR REPLACE INTO user_toolchains(remote_connection_id, workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)); - let (workspace_id, worktree_id, relative_worktree_path) = match scope { - ToolchainScope::Subproject(worktree_id, ref path) => (Some(workspace.id), Some(worktree_id), Some(path.as_unix_str().to_owned())), + let query = sql!(INSERT OR REPLACE INTO user_toolchains(remote_connection_id, workspace_id, worktree_root_path, relative_worktree_path, language_name, name, path, raw_json) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)); + let (workspace_id, worktree_root_path, relative_worktree_path) = match scope { + ToolchainScope::Subproject(ref worktree_root_path, ref path) => (Some(workspace.id), Some(worktree_root_path.to_string_lossy().into_owned()), Some(path.as_unix_str().to_owned())), ToolchainScope::Project => (Some(workspace.id), None, None), ToolchainScope::Global => (None, None, None), }; - let args = (remote_connection_id, workspace_id.unwrap_or(WorkspaceId(0)), worktree_id.map_or(usize::MAX,|id| id.to_usize()), relative_worktree_path.unwrap_or_default(), + let args = (remote_connection_id, workspace_id.unwrap_or(WorkspaceId(0)), worktree_root_path.unwrap_or_default(), relative_worktree_path.unwrap_or_default(), toolchain.language_name.as_ref().to_owned(), toolchain.name.to_string(), toolchain.path.to_string(), toolchain.as_json.to_string()); if let Err(err) = conn.exec_bound(query)?(args) { log::error!("{err}"); @@ -1844,24 +1883,24 @@ impl WorkspaceDb { pub(crate) async fn toolchains( &self, workspace_id: WorkspaceId, - ) -> Result)>> { + ) -> Result, Arc)>> { self.write(move |this| { let mut select = this .select_bound(sql!( SELECT - name, path, worktree_id, relative_worktree_path, language_name, raw_json + name, path, worktree_root_path, relative_worktree_path, language_name, raw_json FROM toolchains WHERE workspace_id = ? )) .context("select toolchains")?; - let toolchain: Vec<(String, String, u64, String, String, String)> = + let toolchain: Vec<(String, String, String, String, String, String)> = select(workspace_id)?; Ok(toolchain .into_iter() .filter_map( - |(name, path, worktree_id, relative_worktree_path, language, json)| { + |(name, path, worktree_root_path, relative_worktree_path, language, json)| { Some(( Toolchain { name: name.into(), @@ -1869,7 +1908,7 @@ impl WorkspaceDb { language_name: LanguageName::new(&language), as_json: serde_json::Value::from_str(&json).ok()?, }, - WorktreeId::from_proto(worktree_id), + Arc::from(worktree_root_path.as_ref()), RelPath::from_proto(&relative_worktree_path).log_err()?, )) }, @@ -1882,18 +1921,18 @@ impl WorkspaceDb { pub async fn set_toolchain( &self, workspace_id: WorkspaceId, - worktree_id: WorktreeId, + worktree_root_path: Arc, relative_worktree_path: Arc, toolchain: Toolchain, ) -> Result<()> { log::debug!( - "Setting toolchain for workspace, worktree: {worktree_id:?}, relative path: {relative_worktree_path:?}, toolchain: {}", + "Setting toolchain for workspace, worktree: {worktree_root_path:?}, relative path: {relative_worktree_path:?}, toolchain: {}", toolchain.name ); self.write(move |conn| { let mut insert = conn .exec_bound(sql!( - INSERT INTO toolchains(workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json) VALUES (?, ?, ?, ?, ?, ?, ?) + INSERT INTO toolchains(workspace_id, worktree_root_path, relative_worktree_path, language_name, name, path, raw_json) VALUES (?, ?, ?, ?, ?, ?, ?) ON CONFLICT DO UPDATE SET name = ?5, @@ -1904,7 +1943,7 @@ impl WorkspaceDb { insert(( workspace_id, - worktree_id.to_usize(), + worktree_root_path.to_string_lossy().into_owned(), relative_worktree_path.as_unix_str(), toolchain.language_name.as_ref(), toolchain.name.as_ref(), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 139fa88359c574e1565ed778fdfbd5fa1c8f7944..53b0cc0623fa4b3ce7de5f1d8e3fd2262210a09a 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1697,8 +1697,22 @@ impl Workspace { let toolchains = DB.toolchains(workspace_id).await?; - for (toolchain, worktree_id, path) in toolchains { + for (toolchain, worktree_path, path) in toolchains { let toolchain_path = PathBuf::from(toolchain.path.clone().to_string()); + let Some(worktree_id) = project_handle.read_with(cx, |this, cx| { + this.find_worktree(&worktree_path, cx) + .and_then(|(worktree, rel_path)| { + if rel_path.is_empty() { + Some(worktree.read(cx).id()) + } else { + None + } + }) + })? + else { + // We did not find a worktree with a given path, but that's whatever. + continue; + }; if !app_state.fs.is_file(toolchain_path.as_path()).await { continue; } @@ -8217,9 +8231,22 @@ async fn open_remote_project_inner( cx: &mut AsyncApp, ) -> Result>>> { let toolchains = DB.toolchains(workspace_id).await?; - for (toolchain, worktree_id, path) in toolchains { + for (toolchain, worktree_path, path) in toolchains { project .update(cx, |this, cx| { + let Some(worktree_id) = + this.find_worktree(&worktree_path, cx) + .and_then(|(worktree, rel_path)| { + if rel_path.is_empty() { + Some(worktree.read(cx).id()) + } else { + None + } + }) + else { + return Task::ready(None); + }; + this.activate_toolchain(ProjectPath { worktree_id, path }, toolchain, cx) })? .await; From 62d36b22fd1c497ae586f89f21b7a80ea6d8091a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 19 Dec 2025 10:25:19 -0300 Subject: [PATCH 20/46] gpui: Add `text_ellipsis_start` method (#45122) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR is an additive change introducing the `truncate_start` method to labels, which gives us the ability to add an ellipsis at the beginning of the text as opposed to the regular `truncate`. This will be generally used for truncating file paths, where the end is typically more relevant than the beginning, but given it's a general method, there's the possibility to be used anywhere else, too. Screenshot 2025-12-17 at 12  35@2x Release Notes: - N/A --------- Co-authored-by: Lukas Wirth --- crates/editor/src/code_context_menus.rs | 8 +- crates/gpui/src/elements/text.rs | 14 +- crates/gpui/src/style.rs | 8 +- crates/gpui/src/styled.rs | 10 +- crates/gpui/src/text_system/line_wrapper.rs | 228 ++++++++++++++++--- crates/ui/src/components/label/label.rs | 9 +- crates/ui/src/components/label/label_like.rs | 13 +- 7 files changed, 248 insertions(+), 42 deletions(-) diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 96739defc506414f573e2454dc31f9c32d8e4adf..e5520be88e34307220126ebafdba6c6371a5db12 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -1615,8 +1615,12 @@ impl CodeActionsMenu { window.text_style().font(), window.text_style().font_size.to_pixels(window.rem_size()), ); - let is_truncated = - line_wrapper.should_truncate_line(&label, CODE_ACTION_MENU_MAX_WIDTH, "…"); + let is_truncated = line_wrapper.should_truncate_line( + &label, + CODE_ACTION_MENU_MAX_WIDTH, + "…", + gpui::TruncateFrom::End, + ); if is_truncated.is_none() { return None; diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 1b1bfd778c7bc746c67551eb31cf70f60b1485ea..942a0a326526431dc65f389e9cff67bac252d571 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -2,8 +2,8 @@ use crate::{ ActiveTooltip, AnyView, App, Bounds, DispatchPhase, Element, ElementId, GlobalElementId, HighlightStyle, Hitbox, HitboxBehavior, InspectorElementId, IntoElement, LayoutId, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Point, SharedString, Size, TextOverflow, - TextRun, TextStyle, TooltipId, WhiteSpace, Window, WrappedLine, WrappedLineLayout, - register_tooltip_mouse_handlers, set_tooltip_on_window, + TextRun, TextStyle, TooltipId, TruncateFrom, WhiteSpace, Window, WrappedLine, + WrappedLineLayout, register_tooltip_mouse_handlers, set_tooltip_on_window, }; use anyhow::Context as _; use itertools::Itertools; @@ -354,7 +354,7 @@ impl TextLayout { None }; - let (truncate_width, truncation_suffix) = + let (truncate_width, truncation_affix, truncate_from) = if let Some(text_overflow) = text_style.text_overflow.clone() { let width = known_dimensions.width.or(match available_space.width { crate::AvailableSpace::Definite(x) => match text_style.line_clamp { @@ -365,10 +365,11 @@ impl TextLayout { }); match text_overflow { - TextOverflow::Truncate(s) => (width, s), + TextOverflow::Truncate(s) => (width, s, TruncateFrom::End), + TextOverflow::TruncateStart(s) => (width, s, TruncateFrom::Start), } } else { - (None, "".into()) + (None, "".into(), TruncateFrom::End) }; if let Some(text_layout) = element_state.0.borrow().as_ref() @@ -383,8 +384,9 @@ impl TextLayout { line_wrapper.truncate_line( text.clone(), truncate_width, - &truncation_suffix, + &truncation_affix, &runs, + truncate_from, ) } else { (text.clone(), Cow::Borrowed(&*runs)) diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index 4d6e6f490d81d967692a3e9d8316af75a7a4d306..7481b8001e5752599b90625450d7adb0c66ea2ca 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -334,9 +334,13 @@ pub enum WhiteSpace { /// How to truncate text that overflows the width of the element #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] pub enum TextOverflow { - /// Truncate the text when it doesn't fit, and represent this truncation by displaying the - /// provided string. + /// Truncate the text at the end when it doesn't fit, and represent this truncation by + /// displaying the provided string (e.g., "very long te…"). Truncate(SharedString), + /// Truncate the text at the start when it doesn't fit, and represent this truncation by + /// displaying the provided string at the beginning (e.g., "…ong text here"). + /// Typically more adequate for file paths where the end is more important than the beginning. + TruncateStart(SharedString), } /// How to align text within the element diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index e8088a84d7fc141d0a320988c6399afe2b93ce07..c5eef0d4496edea4d30c665c82dc0a9f00bb83be 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -75,13 +75,21 @@ pub trait Styled: Sized { self } - /// Sets the truncate overflowing text with an ellipsis (…) if needed. + /// Sets the truncate overflowing text with an ellipsis (…) at the end if needed. /// [Docs](https://tailwindcss.com/docs/text-overflow#ellipsis) fn text_ellipsis(mut self) -> Self { self.text_style().text_overflow = Some(TextOverflow::Truncate(ELLIPSIS)); self } + /// Sets the truncate overflowing text with an ellipsis (…) at the start if needed. + /// Typically more adequate for file paths where the end is more important than the beginning. + /// Note: This doesn't exist in Tailwind CSS. + fn text_ellipsis_start(mut self) -> Self { + self.text_style().text_overflow = Some(TextOverflow::TruncateStart(ELLIPSIS)); + self + } + /// Sets the text overflow behavior of the element. fn text_overflow(mut self, overflow: TextOverflow) -> Self { self.text_style().text_overflow = Some(overflow); diff --git a/crates/gpui/src/text_system/line_wrapper.rs b/crates/gpui/src/text_system/line_wrapper.rs index 95cd55d04443c6b2c351bf8533ccb57d49e8dcd9..457316f353a48fa112de1736b2b7eaa2d4c72313 100644 --- a/crates/gpui/src/text_system/line_wrapper.rs +++ b/crates/gpui/src/text_system/line_wrapper.rs @@ -2,6 +2,15 @@ use crate::{FontId, FontRun, Pixels, PlatformTextSystem, SharedString, TextRun, use collections::HashMap; use std::{borrow::Cow, iter, sync::Arc}; +/// Determines whether to truncate text from the start or end. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TruncateFrom { + /// Truncate text from the start. + Start, + /// Truncate text from the end. + End, +} + /// The GPUI line wrapper, used to wrap lines of text to a given width. pub struct LineWrapper { platform_text_system: Arc, @@ -129,29 +138,50 @@ impl LineWrapper { } /// Determines if a line should be truncated based on its width. + /// + /// Returns the truncation index in `line`. pub fn should_truncate_line( &mut self, line: &str, truncate_width: Pixels, - truncation_suffix: &str, + truncation_affix: &str, + truncate_from: TruncateFrom, ) -> Option { let mut width = px(0.); - let suffix_width = truncation_suffix + let suffix_width = truncation_affix .chars() .map(|c| self.width_for_char(c)) .fold(px(0.0), |a, x| a + x); let mut truncate_ix = 0; - for (ix, c) in line.char_indices() { - if width + suffix_width < truncate_width { - truncate_ix = ix; + match truncate_from { + TruncateFrom::Start => { + for (ix, c) in line.char_indices().rev() { + if width + suffix_width < truncate_width { + truncate_ix = ix; + } + + let char_width = self.width_for_char(c); + width += char_width; + + if width.floor() > truncate_width { + return Some(truncate_ix); + } + } } + TruncateFrom::End => { + for (ix, c) in line.char_indices() { + if width + suffix_width < truncate_width { + truncate_ix = ix; + } - let char_width = self.width_for_char(c); - width += char_width; + let char_width = self.width_for_char(c); + width += char_width; - if width.floor() > truncate_width { - return Some(truncate_ix); + if width.floor() > truncate_width { + return Some(truncate_ix); + } + } } } @@ -163,16 +193,23 @@ impl LineWrapper { &mut self, line: SharedString, truncate_width: Pixels, - truncation_suffix: &str, + truncation_affix: &str, runs: &'a [TextRun], + truncate_from: TruncateFrom, ) -> (SharedString, Cow<'a, [TextRun]>) { if let Some(truncate_ix) = - self.should_truncate_line(&line, truncate_width, truncation_suffix) + self.should_truncate_line(&line, truncate_width, truncation_affix, truncate_from) { - let result = - SharedString::from(format!("{}{}", &line[..truncate_ix], truncation_suffix)); + let result = match truncate_from { + TruncateFrom::Start => { + SharedString::from(format!("{truncation_affix}{}", &line[truncate_ix + 1..])) + } + TruncateFrom::End => { + SharedString::from(format!("{}{truncation_affix}", &line[..truncate_ix])) + } + }; let mut runs = runs.to_vec(); - update_runs_after_truncation(&result, truncation_suffix, &mut runs); + update_runs_after_truncation(&result, truncation_affix, &mut runs, truncate_from); (result, Cow::Owned(runs)) } else { (line, Cow::Borrowed(runs)) @@ -245,15 +282,35 @@ impl LineWrapper { } } -fn update_runs_after_truncation(result: &str, ellipsis: &str, runs: &mut Vec) { +fn update_runs_after_truncation( + result: &str, + ellipsis: &str, + runs: &mut Vec, + truncate_from: TruncateFrom, +) { let mut truncate_at = result.len() - ellipsis.len(); - for (run_index, run) in runs.iter_mut().enumerate() { - if run.len <= truncate_at { - truncate_at -= run.len; - } else { - run.len = truncate_at + ellipsis.len(); - runs.truncate(run_index + 1); - break; + match truncate_from { + TruncateFrom::Start => { + for (run_index, run) in runs.iter_mut().enumerate().rev() { + if run.len <= truncate_at { + truncate_at -= run.len; + } else { + run.len = truncate_at + ellipsis.len(); + runs.splice(..run_index, std::iter::empty()); + break; + } + } + } + TruncateFrom::End => { + for (run_index, run) in runs.iter_mut().enumerate() { + if run.len <= truncate_at { + truncate_at -= run.len; + } else { + run.len = truncate_at + ellipsis.len(); + runs.truncate(run_index + 1); + break; + } + } } } } @@ -503,7 +560,7 @@ mod tests { } #[test] - fn test_truncate_line() { + fn test_truncate_line_end() { let mut wrapper = build_wrapper(); fn perform_test( @@ -514,8 +571,13 @@ mod tests { ) { let dummy_run_lens = vec![text.len()]; let dummy_runs = generate_test_runs(&dummy_run_lens); - let (result, dummy_runs) = - wrapper.truncate_line(text.into(), px(220.), ellipsis, &dummy_runs); + let (result, dummy_runs) = wrapper.truncate_line( + text.into(), + px(220.), + ellipsis, + &dummy_runs, + TruncateFrom::End, + ); assert_eq!(result, expected); assert_eq!(dummy_runs.first().unwrap().len, result.len()); } @@ -541,7 +603,50 @@ mod tests { } #[test] - fn test_truncate_multiple_runs() { + fn test_truncate_line_start() { + let mut wrapper = build_wrapper(); + + fn perform_test( + wrapper: &mut LineWrapper, + text: &'static str, + expected: &'static str, + ellipsis: &str, + ) { + let dummy_run_lens = vec![text.len()]; + let dummy_runs = generate_test_runs(&dummy_run_lens); + let (result, dummy_runs) = wrapper.truncate_line( + text.into(), + px(220.), + ellipsis, + &dummy_runs, + TruncateFrom::Start, + ); + assert_eq!(result, expected); + assert_eq!(dummy_runs.first().unwrap().len, result.len()); + } + + perform_test( + &mut wrapper, + "aaaa bbbb cccc ddddd eeee fff gg", + "cccc ddddd eeee fff gg", + "", + ); + perform_test( + &mut wrapper, + "aaaa bbbb cccc ddddd eeee fff gg", + "…ccc ddddd eeee fff gg", + "…", + ); + perform_test( + &mut wrapper, + "aaaa bbbb cccc ddddd eeee fff gg", + "......dddd eeee fff gg", + "......", + ); + } + + #[test] + fn test_truncate_multiple_runs_end() { let mut wrapper = build_wrapper(); fn perform_test( @@ -554,7 +659,7 @@ mod tests { ) { let dummy_runs = generate_test_runs(run_lens); let (result, dummy_runs) = - wrapper.truncate_line(text.into(), line_width, "…", &dummy_runs); + wrapper.truncate_line(text.into(), line_width, "…", &dummy_runs, TruncateFrom::End); assert_eq!(result, expected); for (run, result_len) in dummy_runs.iter().zip(result_run_len) { assert_eq!(run.len, *result_len); @@ -600,10 +705,75 @@ mod tests { } #[test] - fn test_update_run_after_truncation() { + fn test_truncate_multiple_runs_start() { + let mut wrapper = build_wrapper(); + + #[track_caller] + fn perform_test( + wrapper: &mut LineWrapper, + text: &'static str, + expected: &str, + run_lens: &[usize], + result_run_len: &[usize], + line_width: Pixels, + ) { + let dummy_runs = generate_test_runs(run_lens); + let (result, dummy_runs) = wrapper.truncate_line( + text.into(), + line_width, + "…", + &dummy_runs, + TruncateFrom::Start, + ); + assert_eq!(result, expected); + for (run, result_len) in dummy_runs.iter().zip(result_run_len) { + assert_eq!(run.len, *result_len); + } + } + // Case 0: Normal + // Text: abcdefghijkl + // Runs: Run0 { len: 12, ... } + // + // Truncate res: …ijkl (truncate_at = 9) + // Run res: Run0 { string: …ijkl, len: 7, ... } + perform_test(&mut wrapper, "abcdefghijkl", "…ijkl", &[12], &[7], px(50.)); + // Case 1: Drop some runs + // Text: abcdefghijkl + // Runs: Run0 { len: 4, ... }, Run1 { len: 4, ... }, Run2 { len: 4, ... } + // + // Truncate res: …ghijkl (truncate_at = 7) + // Runs res: Run0 { string: …gh, len: 5, ... }, Run1 { string: ijkl, len: + // 4, ... } + perform_test( + &mut wrapper, + "abcdefghijkl", + "…ghijkl", + &[4, 4, 4], + &[5, 4], + px(70.), + ); + // Case 2: Truncate at start of some run + // Text: abcdefghijkl + // Runs: Run0 { len: 4, ... }, Run1 { len: 4, ... }, Run2 { len: 4, ... } + // + // Truncate res: abcdefgh… (truncate_at = 3) + // Runs res: Run0 { string: …, len: 3, ... }, Run1 { string: efgh, len: + // 4, ... }, Run2 { string: ijkl, len: 4, ... } + perform_test( + &mut wrapper, + "abcdefghijkl", + "…efghijkl", + &[4, 4, 4], + &[3, 4, 4], + px(90.), + ); + } + + #[test] + fn test_update_run_after_truncation_end() { fn perform_test(result: &str, run_lens: &[usize], result_run_lens: &[usize]) { let mut dummy_runs = generate_test_runs(run_lens); - update_runs_after_truncation(result, "…", &mut dummy_runs); + update_runs_after_truncation(result, "…", &mut dummy_runs, TruncateFrom::End); for (run, result_len) in dummy_runs.iter().zip(result_run_lens) { assert_eq!(run.len, *result_len); } diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index 49e2de94a1f86196c10e41879797b02070517e65..d0f50c00336eb971621e2da7bbaf53cf09569caa 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -56,6 +56,12 @@ impl Label { pub fn set_text(&mut self, text: impl Into) { self.label = text.into(); } + + /// Truncates the label from the start, keeping the end visible. + pub fn truncate_start(mut self) -> Self { + self.base = self.base.truncate_start(); + self + } } // Style methods. @@ -256,7 +262,8 @@ impl Component for Label { "Special Cases", vec![ single_example("Single Line", Label::new("Line 1\nLine 2\nLine 3").single_line().into_any_element()), - single_example("Text Ellipsis", div().max_w_24().child(Label::new("This is a very long file name that should be truncated: very_long_file_name_with_many_words.rs").truncate()).into_any_element()), + single_example("Regular Truncation", div().max_w_24().child(Label::new("This is a very long file name that should be truncated: very_long_file_name_with_many_words.rs").truncate()).into_any_element()), + single_example("Start Truncation", div().max_w_24().child(Label::new("zed/crates/ui/src/components/label/truncate/label/label.rs").truncate_start()).into_any_element()), ], ), ]) diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index 31fb7bfd88f1343ac6145c86f228bdcbd6a22e10..10d54845dabf371b8da6fed5ebbcd2b8d82ea711 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -88,6 +88,7 @@ pub struct LabelLike { underline: bool, single_line: bool, truncate: bool, + truncate_start: bool, } impl Default for LabelLike { @@ -113,6 +114,7 @@ impl LabelLike { underline: false, single_line: false, truncate: false, + truncate_start: false, } } } @@ -126,6 +128,12 @@ impl LabelLike { gpui::margin_style_methods!({ visibility: pub }); + + /// Truncates overflowing text with an ellipsis (`…`) at the start if needed. + pub fn truncate_start(mut self) -> Self { + self.truncate_start = true; + self + } } impl LabelCommon for LabelLike { @@ -169,7 +177,7 @@ impl LabelCommon for LabelLike { self } - /// Truncates overflowing text with an ellipsis (`…`) if needed. + /// Truncates overflowing text with an ellipsis (`…`) at the end if needed. fn truncate(mut self) -> Self { self.truncate = true; self @@ -235,6 +243,9 @@ impl RenderOnce for LabelLike { .when(self.truncate, |this| { this.overflow_x_hidden().text_ellipsis() }) + .when(self.truncate_start, |this| { + this.overflow_x_hidden().text_ellipsis_start() + }) .text_color(color) .font_weight( self.weight From 4e0471cf663b88f38246440f5ae06ca4828225d8 Mon Sep 17 00:00:00 2001 From: ozzy <109994179+ddoemonn@users.noreply.github.com> Date: Fri, 19 Dec 2025 16:50:35 +0300 Subject: [PATCH 21/46] git panel: Truncate file paths from the left (#43462) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://github.com/user-attachments/assets/758e1ec9-6c34-4e13-b605-cf00c18ca16f Release Notes: - Improved: Git panel now truncates long file paths from the left, showing "…path/filename" when space is limited, keeping filenames always visible. @cole-miller @mattermill --------- Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Co-authored-by: Danilo Leal --- crates/git_ui/src/git_panel.rs | 2 +- crates/ui/src/components/label/label_like.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 532f9a099a823796706be48ed14cc7da820c5d8b..1323ee014f76ebde42b8dff436b2abed851d13f0 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -5203,7 +5203,7 @@ impl GitPanel { this.child( self.entry_label(path_name, path_color) - .truncate() + .truncate_start() .when(strikethrough, Label::strikethrough), ) }) diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index 10d54845dabf371b8da6fed5ebbcd2b8d82ea711..f6e7a1b893d54fff425618d5c604f591144a7385 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -56,7 +56,7 @@ pub trait LabelCommon { /// Sets the alpha property of the label, overwriting the alpha value of the color. fn alpha(self, alpha: f32) -> Self; - /// Truncates overflowing text with an ellipsis (`…`) if needed. + /// Truncates overflowing text with an ellipsis (`…`) at the end if needed. fn truncate(self) -> Self; /// Sets the label to render as a single line. From ae44c3c8811472dfb105932eaf4e24a2f2853a4e Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 19 Dec 2025 09:39:58 -0500 Subject: [PATCH 22/46] Fix extra terminal being created when a task replaces a terminal in the center pane (#45317) Closes https://github.com/zed-industries/zed/issues/21144 Release Notes: - Fixed spawned tasks creating an extra terminal in the dock in some cases. --- crates/terminal_view/src/terminal_panel.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index ed43d94e9d3d7c08c1ff4570e08726310360cd93..738a0b4502642423377bdf69b49d26250536761f 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -939,7 +939,6 @@ impl TerminalPanel { cx: &mut Context, ) -> Task>> { let reveal = spawn_task.reveal; - let reveal_target = spawn_task.reveal_target; let task_workspace = self.workspace.clone(); cx.spawn_in(window, async move |terminal_panel, cx| { let project = terminal_panel.update(cx, |this, cx| { @@ -955,6 +954,14 @@ impl TerminalPanel { terminal_to_replace.set_terminal(new_terminal.clone(), window, cx); })?; + let reveal_target = terminal_panel.update(cx, |panel, _| { + if panel.center.panes().iter().any(|p| **p == task_pane) { + RevealTarget::Dock + } else { + RevealTarget::Center + } + })?; + match reveal { RevealStrategy::Always => match reveal_target { RevealTarget::Center => { From 7427924405dabda02cca87c24143f2d5f27eb433 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yara=20=F0=9F=8F=B3=EF=B8=8F=E2=80=8D=E2=9A=A7=EF=B8=8F?= Date: Fri, 19 Dec 2025 16:03:35 +0100 Subject: [PATCH 23/46] adjusted scheduler prioritization algorithm (#45367) This fixes a number of issues where zed depends on the order of polling which changed when switching scheduler. We have adjusted the algorithm so it matches the previous order while keeping the prioritization feature. Release Notes: - N/A --- crates/gpui/src/queue.rs | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/crates/gpui/src/queue.rs b/crates/gpui/src/queue.rs index 9e9da710977ee80df1853791918eebe5e7f01096..1ecec183c6c58a86e305343f7bcd1056cda7a581 100644 --- a/crates/gpui/src/queue.rs +++ b/crates/gpui/src/queue.rs @@ -1,4 +1,5 @@ use std::{ + collections::VecDeque, fmt, iter::FusedIterator, sync::{Arc, atomic::AtomicUsize}, @@ -9,9 +10,9 @@ use rand::{Rng, SeedableRng, rngs::SmallRng}; use crate::Priority; struct PriorityQueues { - high_priority: Vec, - medium_priority: Vec, - low_priority: Vec, + high_priority: VecDeque, + medium_priority: VecDeque, + low_priority: VecDeque, } impl PriorityQueues { @@ -42,9 +43,9 @@ impl PriorityQueueState { let mut queues = self.queues.lock(); match priority { Priority::Realtime(_) => unreachable!(), - Priority::High => queues.high_priority.push(item), - Priority::Medium => queues.medium_priority.push(item), - Priority::Low => queues.low_priority.push(item), + Priority::High => queues.high_priority.push_back(item), + Priority::Medium => queues.medium_priority.push_back(item), + Priority::Low => queues.low_priority.push_back(item), }; self.condvar.notify_one(); Ok(()) @@ -141,9 +142,9 @@ impl PriorityQueueReceiver { pub(crate) fn new() -> (PriorityQueueSender, Self) { let state = PriorityQueueState { queues: parking_lot::Mutex::new(PriorityQueues { - high_priority: Vec::new(), - medium_priority: Vec::new(), - low_priority: Vec::new(), + high_priority: VecDeque::new(), + medium_priority: VecDeque::new(), + low_priority: VecDeque::new(), }), condvar: parking_lot::Condvar::new(), receiver_count: AtomicUsize::new(1), @@ -226,7 +227,7 @@ impl PriorityQueueReceiver { if !queues.high_priority.is_empty() { let flip = self.rand.random_ratio(P::High.probability(), mass); if flip { - return Ok(queues.high_priority.pop()); + return Ok(queues.high_priority.pop_front()); } mass -= P::High.probability(); } @@ -234,7 +235,7 @@ impl PriorityQueueReceiver { if !queues.medium_priority.is_empty() { let flip = self.rand.random_ratio(P::Medium.probability(), mass); if flip { - return Ok(queues.medium_priority.pop()); + return Ok(queues.medium_priority.pop_front()); } mass -= P::Medium.probability(); } @@ -242,7 +243,7 @@ impl PriorityQueueReceiver { if !queues.low_priority.is_empty() { let flip = self.rand.random_ratio(P::Low.probability(), mass); if flip { - return Ok(queues.low_priority.pop()); + return Ok(queues.low_priority.pop_front()); } } From b603372f44a60f8ed98ae95b36094582ead47b89 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 19 Dec 2025 16:06:28 +0100 Subject: [PATCH 24/46] Reduce GPU usage by activating VRR optimization only during high-rate input (#45369) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #29073 This PR reduces unnecessary GPU usage by being more selective about when we present frames to prevent display underclocking (VRR optimization). ## Problem Previously, we would keep presenting frames for 1 second after *any* input event, regardless of whether it triggered a re-render. This caused unnecessary GPU work when the user was idle or during low-frequency interactions. ## Solution 1. **Only track input that triggers re-renders**: We now only record input timestamps when the input actually causes the window to become dirty, rather than on every input event. 2. **Rate-based activation**: The VRR optimization now only activates when input arrives at a high rate (≥ 60fps over the last 100ms). This means casual mouse movements or occasional keystrokes won't trigger continuous frame presentation. 3. **Sustained optimization**: Once high-rate input is detected (e.g., during scrolling or dragging), we sustain frame presentation for 1 second to prevent display underclocking, even if input briefly pauses. ## Implementation Added `InputRateTracker` which: - Tracks input timestamps in a 100ms sliding window - Activates when the window contains ≥ 6 events (60fps × 0.1s) - Extends a `sustain_until` timestamp by 1 second each time high rate is detected Release Notes: - Reduced GPU usage when idle by only presenting frames during bursts of high-frequency input. --- crates/gpui/src/window.rs | 70 +++++++++++++++++++++++++++++++++------ 1 file changed, 59 insertions(+), 11 deletions(-) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 2ccd7edac86bced89048cbe5dbf196d8fbcf95f3..8df421feb968677be0abbb642a7127871881bcf3 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -876,7 +876,9 @@ pub struct Window { active: Rc>, hovered: Rc>, pub(crate) needs_present: Rc>, - pub(crate) last_input_timestamp: Rc>, + /// Tracks recent input event timestamps to determine if input is arriving at a high rate. + /// Used to selectively enable VRR optimization only when input rate exceeds 60fps. + pub(crate) input_rate_tracker: Rc>, last_input_modality: InputModality, pub(crate) refreshing: bool, pub(crate) activation_observers: SubscriberSet<(), AnyObserver>, @@ -897,6 +899,51 @@ struct ModifierState { saw_keystroke: bool, } +/// Tracks input event timestamps to determine if input is arriving at a high rate. +/// Used for selective VRR (Variable Refresh Rate) optimization. +#[derive(Clone, Debug)] +pub(crate) struct InputRateTracker { + timestamps: Vec, + window: Duration, + inputs_per_second: u32, + sustain_until: Instant, + sustain_duration: Duration, +} + +impl Default for InputRateTracker { + fn default() -> Self { + Self { + timestamps: Vec::new(), + window: Duration::from_millis(100), + inputs_per_second: 60, + sustain_until: Instant::now(), + sustain_duration: Duration::from_secs(1), + } + } +} + +impl InputRateTracker { + pub fn record_input(&mut self) { + let now = Instant::now(); + self.timestamps.push(now); + self.prune_old_timestamps(now); + + let min_events = self.inputs_per_second as u128 * self.window.as_millis() / 1000; + if self.timestamps.len() as u128 >= min_events { + self.sustain_until = now + self.sustain_duration; + } + } + + pub fn is_high_rate(&self) -> bool { + Instant::now() < self.sustain_until + } + + fn prune_old_timestamps(&mut self, now: Instant) { + self.timestamps + .retain(|&t| now.duration_since(t) <= self.window); + } +} + #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub(crate) enum DrawPhase { None, @@ -1047,7 +1094,7 @@ impl Window { let hovered = Rc::new(Cell::new(platform_window.is_hovered())); let needs_present = Rc::new(Cell::new(false)); let next_frame_callbacks: Rc>> = Default::default(); - let last_input_timestamp = Rc::new(Cell::new(Instant::now())); + let input_rate_tracker = Rc::new(RefCell::new(InputRateTracker::default())); platform_window .request_decorations(window_decorations.unwrap_or(WindowDecorations::Server)); @@ -1075,7 +1122,7 @@ impl Window { let active = active.clone(); let needs_present = needs_present.clone(); let next_frame_callbacks = next_frame_callbacks.clone(); - let last_input_timestamp = last_input_timestamp.clone(); + let input_rate_tracker = input_rate_tracker.clone(); move |request_frame_options| { let next_frame_callbacks = next_frame_callbacks.take(); if !next_frame_callbacks.is_empty() { @@ -1088,12 +1135,12 @@ impl Window { .log_err(); } - // Keep presenting the current scene for 1 extra second since the - // last input to prevent the display from underclocking the refresh rate. + // Keep presenting if input was recently arriving at a high rate (>= 60fps). + // Once high-rate input is detected, we sustain presentation for 1 second + // to prevent display underclocking during active input. let needs_present = request_frame_options.require_presentation || needs_present.get() - || (active.get() - && last_input_timestamp.get().elapsed() < Duration::from_secs(1)); + || (active.get() && input_rate_tracker.borrow_mut().is_high_rate()); if invalidator.is_dirty() || request_frame_options.force_render { measure("frame duration", || { @@ -1101,7 +1148,6 @@ impl Window { .update(&mut cx, |_, window, cx| { let arena_clear_needed = window.draw(cx); window.present(); - // drop the arena elements after present to reduce latency arena_clear_needed.clear(); }) .log_err(); @@ -1299,7 +1345,7 @@ impl Window { active, hovered, needs_present, - last_input_timestamp, + input_rate_tracker, last_input_modality: InputModality::Mouse, refreshing: false, activation_observers: SubscriberSet::new(), @@ -3691,8 +3737,6 @@ impl Window { /// Dispatch a mouse or keyboard event on the window. #[profiling::function] pub fn dispatch_event(&mut self, event: PlatformInput, cx: &mut App) -> DispatchEventResult { - self.last_input_timestamp.set(Instant::now()); - // Track whether this input was keyboard-based for focus-visible styling self.last_input_modality = match &event { PlatformInput::KeyDown(_) | PlatformInput::ModifiersChanged(_) => { @@ -3793,6 +3837,10 @@ impl Window { self.dispatch_key_event(any_key_event, cx); } + if self.invalidator.is_dirty() { + self.input_rate_tracker.borrow_mut().record_input(); + } + DispatchEventResult { propagate: cx.propagate_event, default_prevented: self.default_prevented, From 8001877df2104f86ee236f9c813e2405346e716f Mon Sep 17 00:00:00 2001 From: AidanV <84053180+AidanV@users.noreply.github.com> Date: Fri, 19 Dec 2025 10:31:16 -0500 Subject: [PATCH 25/46] vim: Add `:r[ead] [name]` command (#45332) This adds the following Vim commands: - `:r[ead] [name]` - `:{range}r[ead] [name]` The most important parts of this feature are outlined [here](https://vimhelp.org/insert.txt.html#%3Ar). The only intentional difference between this and Vim is that Vim only allows `:read` (no filename) for buffers with a file attached. I am allowing it for all buffers because I think that could be useful. Release Notes: - vim: Added the [`:r[ead] [name]` Vim command](https://vimhelp.org/insert.txt.html#:read) --------- Co-authored-by: Ben Kunkle --- crates/vim/src/command.rs | 200 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 200 insertions(+) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 205097130d152fe255feb02a449956124586d8e6..2228c23f02beb954bdb26b2b36f078249e423d7d 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -230,6 +230,14 @@ struct VimEdit { pub filename: String, } +/// Pastes the specified file's contents. +#[derive(Clone, PartialEq, Action)] +#[action(namespace = vim, no_json, no_register)] +struct VimRead { + pub range: Option, + pub filename: String, +} + #[derive(Clone, PartialEq, Action)] #[action(namespace = vim, no_json, no_register)] struct VimNorm { @@ -643,6 +651,107 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { }); }); + Vim::action(editor, cx, |vim, action: &VimRead, window, cx| { + vim.update_editor(cx, |vim, editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let end = if let Some(range) = action.range.clone() { + let Some(multi_range) = range.buffer_range(vim, editor, window, cx).log_err() + else { + return; + }; + + match &range.start { + // inserting text above the first line uses the command ":0r {name}" + Position::Line { row: 0, offset: 0 } if range.end.is_none() => { + snapshot.clip_point(Point::new(0, 0), Bias::Right) + } + _ => snapshot.clip_point(Point::new(multi_range.end.0 + 1, 0), Bias::Right), + } + } else { + let end_row = editor + .selections + .newest::(&editor.display_snapshot(cx)) + .range() + .end + .row; + snapshot.clip_point(Point::new(end_row + 1, 0), Bias::Right) + }; + let is_end_of_file = end == snapshot.max_point(); + let edit_range = snapshot.anchor_before(end)..snapshot.anchor_before(end); + + let mut text = if is_end_of_file { + String::from('\n') + } else { + String::new() + }; + + let mut task = None; + if action.filename.is_empty() { + text.push_str( + &editor + .buffer() + .read(cx) + .as_singleton() + .map(|buffer| buffer.read(cx).text()) + .unwrap_or_default(), + ); + } else { + if let Some(project) = editor.project().cloned() { + project.update(cx, |project, cx| { + let Some(worktree) = project.visible_worktrees(cx).next() else { + return; + }; + let path_style = worktree.read(cx).path_style(); + let Some(path) = + RelPath::new(Path::new(&action.filename), path_style).log_err() + else { + return; + }; + task = + Some(worktree.update(cx, |worktree, cx| worktree.load_file(&path, cx))); + }); + } else { + return; + } + }; + + cx.spawn_in(window, async move |editor, cx| { + if let Some(task) = task { + text.push_str( + &task + .await + .log_err() + .map(|loaded_file| loaded_file.text) + .unwrap_or_default(), + ); + } + + if !text.is_empty() && !is_end_of_file { + text.push('\n'); + } + + let _ = editor.update_in(cx, |editor, window, cx| { + editor.transact(window, cx, |editor, window, cx| { + editor.edit([(edit_range.clone(), text)], cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + editor.change_selections(Default::default(), window, cx, |s| { + let point = if is_end_of_file { + Point::new( + edit_range.start.to_point(&snapshot).row.saturating_add(1), + 0, + ) + } else { + Point::new(edit_range.start.to_point(&snapshot).row, 0) + }; + s.select_ranges([point..point]); + }) + }); + }); + }) + .detach(); + }); + }); + Vim::action(editor, cx, |vim, action: &VimNorm, window, cx| { let keystrokes = action .command @@ -1338,6 +1447,27 @@ fn generate_commands(_: &App) -> Vec { VimCommand::new(("e", "dit"), editor::actions::ReloadFile) .bang(editor::actions::ReloadFile) .filename(|_, filename| Some(VimEdit { filename }.boxed_clone())), + VimCommand::new( + ("r", "ead"), + VimRead { + range: None, + filename: "".into(), + }, + ) + .filename(|_, filename| { + Some( + VimRead { + range: None, + filename, + } + .boxed_clone(), + ) + }) + .range(|action, range| { + let mut action: VimRead = action.as_any().downcast_ref::().unwrap().clone(); + action.range.replace(range.clone()); + Some(Box::new(action)) + }), VimCommand::new(("sp", "lit"), workspace::SplitHorizontal).filename(|_, filename| { Some( VimSplit { @@ -2575,6 +2705,76 @@ mod test { assert_eq!(fs.load(path).await.unwrap().replace("\r\n", "\n"), "@@\n"); } + #[gpui::test] + async fn test_command_read(cx: &mut TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + let fs = cx.workspace(|workspace, _, cx| workspace.project().read(cx).fs().clone()); + let path = Path::new(path!("/root/dir/other.rs")); + fs.as_fake().insert_file(path, "1\n2\n3".into()).await; + + cx.workspace(|workspace, _, cx| { + assert_active_item(workspace, path!("/root/dir/file.rs"), "", cx); + }); + + // File without trailing newline + cx.set_state("one\ntwo\nthreeˇ", Mode::Normal); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\ntwo\nthree\nˇ1\n2\n3", Mode::Normal); + + cx.set_state("oneˇ\ntwo\nthree", Mode::Normal); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\nˇ1\n2\n3\ntwo\nthree", Mode::Normal); + + cx.set_state("one\nˇtwo\nthree", Mode::Normal); + cx.simulate_keystrokes(": 0 r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("ˇ1\n2\n3\none\ntwo\nthree", Mode::Normal); + + cx.set_state("one\n«ˇtwo\nthree\nfour»\nfive", Mode::Visual); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.run_until_parked(); + cx.assert_state("one\ntwo\nthree\nfour\nˇ1\n2\n3\nfive", Mode::Normal); + + // Empty filename + cx.set_state("oneˇ\ntwo\nthree", Mode::Normal); + cx.simulate_keystrokes(": r"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\nˇone\ntwo\nthree\ntwo\nthree", Mode::Normal); + + // File with trailing newline + fs.as_fake().insert_file(path, "1\n2\n3\n".into()).await; + cx.set_state("one\ntwo\nthreeˇ", Mode::Normal); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\ntwo\nthree\nˇ1\n2\n3\n", Mode::Normal); + + cx.set_state("oneˇ\ntwo\nthree", Mode::Normal); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\nˇ1\n2\n3\n\ntwo\nthree", Mode::Normal); + + cx.set_state("one\n«ˇtwo\nthree\nfour»\nfive", Mode::Visual); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\ntwo\nthree\nfour\nˇ1\n2\n3\n\nfive", Mode::Normal); + + cx.set_state("«one\ntwo\nthreeˇ»", Mode::Visual); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\ntwo\nthree\nˇ1\n2\n3\n", Mode::Normal); + + // Empty file + fs.as_fake().insert_file(path, "".into()).await; + cx.set_state("ˇone\ntwo\nthree", Mode::Normal); + cx.simulate_keystrokes(": r space d i r / o t h e r . r s"); + cx.simulate_keystrokes("enter"); + cx.assert_state("one\nˇtwo\nthree", Mode::Normal); + } + #[gpui::test] async fn test_command_quit(cx: &mut TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; From a7d43063d45d616573b26733778fb7af0fbbbf4b Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 19 Dec 2025 13:01:48 -0300 Subject: [PATCH 26/46] workspace: Make title bar pickers render nearby the trigger when mouse-triggered (#45361) From Zed's title bar, you can click on buttons to open three modal pickers: remote projects, projects, and branches. All of these pickers use the modal layer, which by default, renders them centered on the UI. However, a UX issue we've been bothered by is that when you _click_ to open them, they show up just way too far from where your mouse likely is (nearby the trigger you just clicked). So, this PR introduces a `ModalPlacement` enum to the modal layer, so that we can pick between the "centered" and "anchored" options to render the picker. This way, we can make the pickers use anchored positioning when triggered through a mouse click and use the default centered positioning when triggered through the keybinding. One thing to note is that the anchored positioning here is not as polished as regular popovers/dropdowns, because it simply uses the x and y coordinates of the click to place the picker as opposed to using GPUI's `Corner` enum, thus making them more connected to their triggers. I chose to do it this way for now because it's a simpler and more contained change, given it wouldn't require a tighter connection at the code level between trigger and picker. But maybe we will want to do that in the near future because we can bake in some other related behaviors like automatically hiding the button trigger tooltip if the picker is open and changing its text color to communicate which button triggered the open picker. https://github.com/user-attachments/assets/30d9c26a-24de-4702-8b7d-018b397f77e1 Release Notes: - Improved the UX of title bar modal pickers (remote projects, projects, and branches) by making them open closer to the trigger when triggering them with the mouse. --- crates/title_bar/src/title_bar.rs | 272 +++++++++++++++++----------- crates/workspace/src/modal_layer.rs | 77 ++++++-- crates/workspace/src/workspace.rs | 17 +- 3 files changed, 240 insertions(+), 126 deletions(-) diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index d7759b0df8019eed2ad59b73bcaffaa3ffcfb866..9b75d35eccafa3c30f23329d9c0ee890ed2b2405 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -166,11 +166,11 @@ impl Render for TitleBar { .when(title_bar_settings.show_project_items, |title_bar| { title_bar .children(self.render_restricted_mode(cx)) - .children(self.render_project_host(cx)) - .child(self.render_project_name(cx)) + .children(self.render_project_host(window, cx)) + .child(self.render_project_name(window, cx)) }) .when(title_bar_settings.show_branch_name, |title_bar| { - title_bar.children(self.render_project_repo(cx)) + title_bar.children(self.render_project_repo(window, cx)) }) }) }) @@ -350,7 +350,14 @@ impl TitleBar { .next() } - fn render_remote_project_connection(&self, cx: &mut Context) -> Option { + fn render_remote_project_connection( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let workspace = self.workspace.clone(); + let is_picker_open = self.is_picker_open(window, cx); + let options = self.project.read(cx).remote_connection_options(cx)?; let host: SharedString = options.display_name().into(); @@ -395,7 +402,7 @@ impl TitleBar { let meta = SharedString::from(meta); Some( - ButtonLike::new("ssh-server-icon") + ButtonLike::new("remote_project") .child( h_flex() .gap_2() @@ -410,26 +417,35 @@ impl TitleBar { ) .child(Label::new(nickname).size(LabelSize::Small).truncate()), ) - .tooltip(move |_window, cx| { - Tooltip::with_meta( - tooltip_title, - Some(&OpenRemote { - from_existing_connection: false, - create_new_window: false, - }), - meta.clone(), - cx, - ) + .when(!is_picker_open, |this| { + this.tooltip(move |_window, cx| { + Tooltip::with_meta( + tooltip_title, + Some(&OpenRemote { + from_existing_connection: false, + create_new_window: false, + }), + meta.clone(), + cx, + ) + }) }) - .on_click(|_, window, cx| { - window.dispatch_action( - OpenRemote { - from_existing_connection: false, - create_new_window: false, - } - .boxed_clone(), - cx, - ); + .on_click(move |event, window, cx| { + let position = event.position(); + let _ = workspace.update(cx, |this, cx| { + this.set_next_modal_placement(workspace::ModalPlacement::Anchored { + position, + }); + + window.dispatch_action( + OpenRemote { + from_existing_connection: false, + create_new_window: false, + } + .boxed_clone(), + cx, + ); + }); }) .into_any_element(), ) @@ -481,9 +497,13 @@ impl TitleBar { } } - pub fn render_project_host(&self, cx: &mut Context) -> Option { + pub fn render_project_host( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option { if self.project.read(cx).is_via_remote_server() { - return self.render_remote_project_connection(cx); + return self.render_remote_project_connection(window, cx); } if self.project.read(cx).is_disconnected(cx) { @@ -491,7 +511,6 @@ impl TitleBar { Button::new("disconnected", "Disconnected") .disabled(true) .color(Color::Disabled) - .style(ButtonStyle::Subtle) .label_size(LabelSize::Small) .into_any_element(), ); @@ -504,15 +523,19 @@ impl TitleBar { .read(cx) .participant_indices() .get(&host_user.id)?; + Some( Button::new("project_owner_trigger", host_user.github_login.clone()) .color(Color::Player(participant_index.0)) - .style(ButtonStyle::Subtle) .label_size(LabelSize::Small) - .tooltip(Tooltip::text(format!( - "{} is sharing this project. Click to follow.", - host_user.github_login - ))) + .tooltip(move |_, cx| { + let tooltip_title = format!( + "{} is sharing this project. Click to follow.", + host_user.github_login + ); + + Tooltip::with_meta(tooltip_title, None, "Click to Follow", cx) + }) .on_click({ let host_peer_id = host.peer_id; cx.listener(move |this, _, window, cx| { @@ -527,7 +550,14 @@ impl TitleBar { ) } - pub fn render_project_name(&self, cx: &mut Context) -> impl IntoElement { + pub fn render_project_name( + &self, + window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement { + let workspace = self.workspace.clone(); + let is_picker_open = self.is_picker_open(window, cx); + let name = self.project_name(cx); let is_project_selected = name.is_some(); let name = if let Some(name) = name { @@ -537,19 +567,25 @@ impl TitleBar { }; Button::new("project_name_trigger", name) - .when(!is_project_selected, |b| b.color(Color::Muted)) - .style(ButtonStyle::Subtle) .label_size(LabelSize::Small) - .tooltip(move |_window, cx| { - Tooltip::for_action( - "Recent Projects", - &zed_actions::OpenRecent { - create_new_window: false, - }, - cx, - ) + .when(!is_project_selected, |s| s.color(Color::Muted)) + .when(!is_picker_open, |this| { + this.tooltip(move |_window, cx| { + Tooltip::for_action( + "Recent Projects", + &zed_actions::OpenRecent { + create_new_window: false, + }, + cx, + ) + }) }) - .on_click(cx.listener(move |_, _, window, cx| { + .on_click(move |event, window, cx| { + let position = event.position(); + let _ = workspace.update(cx, |this, _cx| { + this.set_next_modal_placement(workspace::ModalPlacement::Anchored { position }) + }); + window.dispatch_action( OpenRecent { create_new_window: false, @@ -557,84 +593,102 @@ impl TitleBar { .boxed_clone(), cx, ); - })) + }) } - pub fn render_project_repo(&self, cx: &mut Context) -> Option { - let settings = TitleBarSettings::get_global(cx); + pub fn render_project_repo( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option { let repository = self.project.read(cx).active_repository(cx)?; let repository_count = self.project.read(cx).repositories(cx).len(); let workspace = self.workspace.upgrade()?; - let repo = repository.read(cx); - let branch_name = repo - .branch - .as_ref() - .map(|branch| branch.name()) - .map(|name| util::truncate_and_trailoff(name, MAX_BRANCH_NAME_LENGTH)) - .or_else(|| { - repo.head_commit.as_ref().map(|commit| { - commit - .sha - .chars() - .take(MAX_SHORT_SHA_LENGTH) - .collect::() - }) - })?; - let project_name = self.project_name(cx); - let repo_name = repo - .work_directory_abs_path - .file_name() - .and_then(|name| name.to_str()) - .map(SharedString::new); - let show_repo_name = - repository_count > 1 && repo.branch.is_some() && repo_name != project_name; - let branch_name = if let Some(repo_name) = repo_name.filter(|_| show_repo_name) { - format!("{repo_name}/{branch_name}") - } else { - branch_name + + let (branch_name, icon_info) = { + let repo = repository.read(cx); + let branch_name = repo + .branch + .as_ref() + .map(|branch| branch.name()) + .map(|name| util::truncate_and_trailoff(name, MAX_BRANCH_NAME_LENGTH)) + .or_else(|| { + repo.head_commit.as_ref().map(|commit| { + commit + .sha + .chars() + .take(MAX_SHORT_SHA_LENGTH) + .collect::() + }) + }); + + let branch_name = branch_name?; + + let project_name = self.project_name(cx); + let repo_name = repo + .work_directory_abs_path + .file_name() + .and_then(|name| name.to_str()) + .map(SharedString::new); + let show_repo_name = + repository_count > 1 && repo.branch.is_some() && repo_name != project_name; + let branch_name = if let Some(repo_name) = repo_name.filter(|_| show_repo_name) { + format!("{repo_name}/{branch_name}") + } else { + branch_name + }; + + let status = repo.status_summary(); + let tracked = status.index + status.worktree; + let icon_info = if status.conflict > 0 { + (IconName::Warning, Color::VersionControlConflict) + } else if tracked.modified > 0 { + (IconName::SquareDot, Color::VersionControlModified) + } else if tracked.added > 0 || status.untracked > 0 { + (IconName::SquarePlus, Color::VersionControlAdded) + } else if tracked.deleted > 0 { + (IconName::SquareMinus, Color::VersionControlDeleted) + } else { + (IconName::GitBranch, Color::Muted) + }; + + (branch_name, icon_info) }; + let is_picker_open = self.is_picker_open(window, cx); + let settings = TitleBarSettings::get_global(cx); + Some( Button::new("project_branch_trigger", branch_name) - .color(Color::Muted) - .style(ButtonStyle::Subtle) .label_size(LabelSize::Small) - .tooltip(move |_window, cx| { - Tooltip::with_meta( - "Recent Branches", - Some(&zed_actions::git::Branch), - "Local branches only", - cx, - ) - }) - .on_click(move |_, window, cx| { - let _ = workspace.update(cx, |this, cx| { - window.focus(&this.active_pane().focus_handle(cx), cx); - window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); - }); + .color(Color::Muted) + .when(!is_picker_open, |this| { + this.tooltip(move |_window, cx| { + Tooltip::with_meta( + "Recent Branches", + Some(&zed_actions::git::Branch), + "Local branches only", + cx, + ) + }) }) .when(settings.show_branch_icon, |branch_button| { - let (icon, icon_color) = { - let status = repo.status_summary(); - let tracked = status.index + status.worktree; - if status.conflict > 0 { - (IconName::Warning, Color::VersionControlConflict) - } else if tracked.modified > 0 { - (IconName::SquareDot, Color::VersionControlModified) - } else if tracked.added > 0 || status.untracked > 0 { - (IconName::SquarePlus, Color::VersionControlAdded) - } else if tracked.deleted > 0 { - (IconName::SquareMinus, Color::VersionControlDeleted) - } else { - (IconName::GitBranch, Color::Muted) - } - }; - + let (icon, icon_color) = icon_info; branch_button .icon(icon) .icon_position(IconPosition::Start) .icon_color(icon_color) .icon_size(IconSize::Indicator) + }) + .on_click(move |event, window, cx| { + let position = event.position(); + let _ = workspace.update(cx, |this, cx| { + this.set_next_modal_placement(workspace::ModalPlacement::Anchored { + position, + }); + window.focus(&this.active_pane().focus_handle(cx), cx); + window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); + }); }), ) } @@ -726,7 +780,7 @@ impl TitleBar { pub fn render_sign_in_button(&mut self, _: &mut Context) -> Button { let client = self.client.clone(); - Button::new("sign_in", "Sign in") + Button::new("sign_in", "Sign In") .label_size(LabelSize::Small) .on_click(move |_, window, cx| { let client = client.clone(); @@ -848,4 +902,10 @@ impl TitleBar { }) .anchor(gpui::Corner::TopRight) } + + fn is_picker_open(&self, window: &mut Window, cx: &mut Context) -> bool { + self.workspace + .update(cx, |workspace, cx| workspace.has_active_modal(window, cx)) + .unwrap_or(false) + } } diff --git a/crates/workspace/src/modal_layer.rs b/crates/workspace/src/modal_layer.rs index 58667e7ffa8ad4fe5a22d293e4fc4aa71015a3bd..4087e1a398ac2b89257fea6b4dce53278d0872a8 100644 --- a/crates/workspace/src/modal_layer.rs +++ b/crates/workspace/src/modal_layer.rs @@ -1,9 +1,18 @@ use gpui::{ AnyView, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable as _, ManagedView, - MouseButton, Subscription, + MouseButton, Pixels, Point, Subscription, }; use ui::prelude::*; +#[derive(Debug, Clone, Copy, Default)] +pub enum ModalPlacement { + #[default] + Centered, + Anchored { + position: Point, + }, +} + #[derive(Debug)] pub enum DismissDecision { Dismiss(bool), @@ -58,6 +67,7 @@ pub struct ActiveModal { _subscriptions: [Subscription; 2], previous_focus_handle: Option, focus_handle: FocusHandle, + placement: ModalPlacement, } pub struct ModalLayer { @@ -87,6 +97,19 @@ impl ModalLayer { where V: ModalView, B: FnOnce(&mut Window, &mut Context) -> V, + { + self.toggle_modal_with_placement(window, cx, ModalPlacement::Centered, build_view); + } + + pub fn toggle_modal_with_placement( + &mut self, + window: &mut Window, + cx: &mut Context, + placement: ModalPlacement, + build_view: B, + ) where + V: ModalView, + B: FnOnce(&mut Window, &mut Context) -> V, { if let Some(active_modal) = &self.active_modal { let is_close = active_modal.modal.view().downcast::().is_ok(); @@ -96,12 +119,17 @@ impl ModalLayer { } } let new_modal = cx.new(|cx| build_view(window, cx)); - self.show_modal(new_modal, window, cx); + self.show_modal(new_modal, placement, window, cx); cx.emit(ModalOpenedEvent); } - fn show_modal(&mut self, new_modal: Entity, window: &mut Window, cx: &mut Context) - where + fn show_modal( + &mut self, + new_modal: Entity, + placement: ModalPlacement, + window: &mut Window, + cx: &mut Context, + ) where V: ModalView, { let focus_handle = cx.focus_handle(); @@ -123,6 +151,7 @@ impl ModalLayer { ], previous_focus_handle: window.focused(cx), focus_handle, + placement, }); cx.defer_in(window, move |_, window, cx| { window.focus(&new_modal.focus_handle(cx), cx); @@ -183,6 +212,30 @@ impl Render for ModalLayer { return active_modal.modal.view().into_any_element(); } + let content = h_flex() + .occlude() + .child(active_modal.modal.view()) + .on_mouse_down(MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }); + + let positioned = match active_modal.placement { + ModalPlacement::Centered => v_flex() + .h(px(0.0)) + .top_20() + .items_center() + .track_focus(&active_modal.focus_handle) + .child(content) + .into_any_element(), + ModalPlacement::Anchored { position } => div() + .absolute() + .left(position.x) + .top(position.y - px(20.)) + .track_focus(&active_modal.focus_handle) + .child(content) + .into_any_element(), + }; + div() .absolute() .size_full() @@ -199,21 +252,7 @@ impl Render for ModalLayer { this.hide_modal(window, cx); }), ) - .child( - v_flex() - .h(px(0.0)) - .top_20() - .items_center() - .track_focus(&active_modal.focus_handle) - .child( - h_flex() - .occlude() - .child(active_modal.modal.view()) - .on_mouse_down(MouseButton::Left, |_, _, cx| { - cx.stop_propagation(); - }), - ), - ) + .child(positioned) .into_any_element() } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 53b0cc0623fa4b3ce7de5f1d8e3fd2262210a09a..c88386281e73b243dbddd6cb00c80fb26595409e 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1204,6 +1204,7 @@ pub struct Workspace { last_open_dock_positions: Vec, removing: bool, utility_panes: UtilityPaneState, + next_modal_placement: Option, } impl EventEmitter for Workspace {} @@ -1620,6 +1621,7 @@ impl Workspace { last_open_dock_positions: Vec::new(), removing: false, utility_panes: UtilityPaneState::default(), + next_modal_placement: None, } } @@ -6326,12 +6328,25 @@ impl Workspace { self.modal_layer.read(cx).active_modal() } + pub fn is_modal_open(&self, cx: &App) -> bool { + self.modal_layer.read(cx).active_modal::().is_some() + } + + pub fn set_next_modal_placement(&mut self, placement: ModalPlacement) { + self.next_modal_placement = Some(placement); + } + + fn take_next_modal_placement(&mut self) -> ModalPlacement { + self.next_modal_placement.take().unwrap_or_default() + } + pub fn toggle_modal(&mut self, window: &mut Window, cx: &mut App, build: B) where B: FnOnce(&mut Window, &mut Context) -> V, { + let placement = self.take_next_modal_placement(); self.modal_layer.update(cx, |modal_layer, cx| { - modal_layer.toggle_modal(window, cx, build) + modal_layer.toggle_modal_with_placement(window, cx, placement, build) }) } From ea34cc5324c12ee44d7128f15f9bc5de42d3f358 Mon Sep 17 00:00:00 2001 From: feeiyu <158308373+feeiyu@users.noreply.github.com> Date: Sat, 20 Dec 2025 00:06:16 +0800 Subject: [PATCH 27/46] Fix terminal doesn't switch to project directory when opening remote project on Windows (#45328) Closes #45253 Release Notes: - Fixed terminal doesn't switch to project directory when opening remote project on Windows --- crates/remote/src/transport/ssh.rs | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 6c8eb49c1c2158322a275e064162b53e2f5f3d5e..d13e1c4934947e39b08e05eb32e2787548e621e1 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -32,8 +32,7 @@ use tempfile::TempDir; use util::{ paths::{PathStyle, RemotePathBuf}, rel_path::RelPath, - shell::{Shell, ShellKind}, - shell_builder::ShellBuilder, + shell::ShellKind, }; pub(crate) struct SshRemoteConnection { @@ -1544,8 +1543,6 @@ fn build_command( } else { write!(exec, "{ssh_shell} -l")?; }; - let (command, command_args) = ShellBuilder::new(&Shell::Program(ssh_shell.to_owned()), false) - .build(Some(exec.clone()), &[]); let mut args = Vec::new(); args.extend(ssh_args); @@ -1556,8 +1553,7 @@ fn build_command( } args.push("-t".into()); - args.push(command); - args.extend(command_args); + args.push(exec); Ok(CommandTemplate { program: "ssh".into(), @@ -1597,9 +1593,6 @@ mod tests { "-p", "2222", "-t", - "/bin/fish", - "-i", - "-c", "cd \"$HOME/work\" && exec env INPUT_VA=val remote_program arg1 arg2" ] ); @@ -1632,9 +1625,6 @@ mod tests { "-L", "1:foo:2", "-t", - "/bin/fish", - "-i", - "-c", "cd && exec env INPUT_VA=val /bin/fish -l" ] ); From a7e07010e58bc53bcc8a33adbb0e5e31d251c432 Mon Sep 17 00:00:00 2001 From: "Raduan A." <36044389+0xRaduan@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:14:02 +0100 Subject: [PATCH 28/46] editor: Add automatic markdown list continuation on newline and indent on tab (#42800) Closes #5089 Release notes: - Markdown lists now continue automatically when you press Enter (unordered, ordered, and task lists). This can be configured with `extend_list_on_newline` (default: true). - You can now indent list markers with Tab to quickly create nested lists. This can be configured with `indent_list_on_tab` (default: true). --------- Co-authored-by: Claude Co-authored-by: Smit Barmase --- assets/settings/default.json | 4 + crates/editor/src/editor.rs | 459 +++++++++++--- crates/editor/src/editor_tests.rs | 588 ++++++++++++++++-- crates/language/src/language.rs | 45 ++ crates/language/src/language_settings.rs | 6 + crates/languages/src/markdown/config.toml | 3 + .../settings/src/settings_content/language.rs | 8 + crates/settings/src/vscode_import.rs | 2 + docs/src/configuring-zed.md | 20 + docs/src/languages/markdown.md | 34 + 10 files changed, 1021 insertions(+), 148 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 154fe2d6e34e6573e95e7ffedbb46df8bbf10634..746ccb5986d0fd1d5ef11df525303e344a7393d2 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1178,6 +1178,10 @@ "remove_trailing_whitespace_on_save": true, // Whether to start a new line with a comment when a previous line is a comment as well. "extend_comment_on_newline": true, + // Whether to continue markdown lists when pressing enter. + "extend_list_on_newline": true, + // Whether to indent list items when pressing tab after a list marker. + "indent_list_on_tab": true, // Removes any lines containing only whitespace at the end of the file and // ensures just one newline at the end. "ensure_final_newline_on_save": true, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8560705802264dad55b87dbf21e1f9aa7625edf8..6e4744335b8e9fba50a6c2c8b241607b0e05d276 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -163,6 +163,7 @@ use project::{ project_settings::{DiagnosticSeverity, GoToDiagnosticSeverityFilter, ProjectSettings}, }; use rand::seq::SliceRandom; +use regex::Regex; use rpc::{ErrorCode, ErrorExt, proto::PeerId}; use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager}; use selections_collection::{MutableSelectionsCollection, SelectionsCollection}; @@ -4787,82 +4788,146 @@ impl Editor { let end = selection.end; let selection_is_empty = start == end; let language_scope = buffer.language_scope_at(start); - let (comment_delimiter, doc_delimiter, newline_formatting) = - if let Some(language) = &language_scope { - let mut newline_formatting = - NewlineFormatting::new(&buffer, start..end, language); - - // Comment extension on newline is allowed only for cursor selections - let comment_delimiter = maybe!({ - if !selection_is_empty { - return None; - } + let (delimiter, newline_config) = if let Some(language) = &language_scope { + let needs_extra_newline = NewlineConfig::insert_extra_newline_brackets( + &buffer, + start..end, + language, + ) + || NewlineConfig::insert_extra_newline_tree_sitter( + &buffer, + start..end, + ); - if !multi_buffer.language_settings(cx).extend_comment_on_newline - { - return None; - } + let mut newline_config = NewlineConfig::Newline { + additional_indent: IndentSize::spaces(0), + extra_line_additional_indent: if needs_extra_newline { + Some(IndentSize::spaces(0)) + } else { + None + }, + prevent_auto_indent: false, + }; - return comment_delimiter_for_newline( - &start_point, - &buffer, - language, - ); - }); + let comment_delimiter = maybe!({ + if !selection_is_empty { + return None; + } - let doc_delimiter = maybe!({ - if !selection_is_empty { - return None; - } + if !multi_buffer.language_settings(cx).extend_comment_on_newline { + return None; + } - if !multi_buffer.language_settings(cx).extend_comment_on_newline - { - return None; - } + return comment_delimiter_for_newline( + &start_point, + &buffer, + language, + ); + }); - return documentation_delimiter_for_newline( - &start_point, - &buffer, - language, - &mut newline_formatting, - ); - }); + let doc_delimiter = maybe!({ + if !selection_is_empty { + return None; + } - (comment_delimiter, doc_delimiter, newline_formatting) - } else { - (None, None, NewlineFormatting::default()) - }; + if !multi_buffer.language_settings(cx).extend_comment_on_newline { + return None; + } - let prevent_auto_indent = doc_delimiter.is_some(); - let delimiter = comment_delimiter.or(doc_delimiter); + return documentation_delimiter_for_newline( + &start_point, + &buffer, + language, + &mut newline_config, + ); + }); - let capacity_for_delimiter = - delimiter.as_deref().map(str::len).unwrap_or_default(); - let mut new_text = String::with_capacity( - 1 + capacity_for_delimiter - + existing_indent.len as usize - + newline_formatting.indent_on_newline.len as usize - + newline_formatting.indent_on_extra_newline.len as usize, - ); - new_text.push('\n'); - new_text.extend(existing_indent.chars()); - new_text.extend(newline_formatting.indent_on_newline.chars()); + let list_delimiter = maybe!({ + if !selection_is_empty { + return None; + } - if let Some(delimiter) = &delimiter { - new_text.push_str(delimiter); - } + if !multi_buffer.language_settings(cx).extend_list_on_newline { + return None; + } - if newline_formatting.insert_extra_newline { - new_text.push('\n'); - new_text.extend(existing_indent.chars()); - new_text.extend(newline_formatting.indent_on_extra_newline.chars()); - } + return list_delimiter_for_newline( + &start_point, + &buffer, + language, + &mut newline_config, + ); + }); + + ( + comment_delimiter.or(doc_delimiter).or(list_delimiter), + newline_config, + ) + } else { + ( + None, + NewlineConfig::Newline { + additional_indent: IndentSize::spaces(0), + extra_line_additional_indent: None, + prevent_auto_indent: false, + }, + ) + }; + + let (edit_start, new_text, prevent_auto_indent) = match &newline_config { + NewlineConfig::ClearCurrentLine => { + let row_start = + buffer.point_to_offset(Point::new(start_point.row, 0)); + (row_start, String::new(), false) + } + NewlineConfig::UnindentCurrentLine { continuation } => { + let row_start = + buffer.point_to_offset(Point::new(start_point.row, 0)); + let tab_size = buffer.language_settings_at(start, cx).tab_size; + let tab_size_indent = IndentSize::spaces(tab_size.get()); + let reduced_indent = + existing_indent.with_delta(Ordering::Less, tab_size_indent); + let mut new_text = String::new(); + new_text.extend(reduced_indent.chars()); + new_text.push_str(continuation); + (row_start, new_text, true) + } + NewlineConfig::Newline { + additional_indent, + extra_line_additional_indent, + prevent_auto_indent, + } => { + let capacity_for_delimiter = + delimiter.as_deref().map(str::len).unwrap_or_default(); + let extra_line_len = extra_line_additional_indent + .map(|i| 1 + existing_indent.len as usize + i.len as usize) + .unwrap_or(0); + let mut new_text = String::with_capacity( + 1 + capacity_for_delimiter + + existing_indent.len as usize + + additional_indent.len as usize + + extra_line_len, + ); + new_text.push('\n'); + new_text.extend(existing_indent.chars()); + new_text.extend(additional_indent.chars()); + if let Some(delimiter) = &delimiter { + new_text.push_str(delimiter); + } + if let Some(extra_indent) = extra_line_additional_indent { + new_text.push('\n'); + new_text.extend(existing_indent.chars()); + new_text.extend(extra_indent.chars()); + } + (start, new_text, *prevent_auto_indent) + } + }; let anchor = buffer.anchor_after(end); let new_selection = selection.map(|_| anchor); ( - ((start..end, new_text), prevent_auto_indent), - (newline_formatting.insert_extra_newline, new_selection), + ((edit_start..end, new_text), prevent_auto_indent), + (newline_config.has_extra_line(), new_selection), ) }) .unzip() @@ -10387,6 +10452,22 @@ impl Editor { } prev_edited_row = selection.end.row; + // If cursor is after a list prefix, make selection non-empty to trigger line indent + if selection.is_empty() { + let cursor = selection.head(); + let settings = buffer.language_settings_at(cursor, cx); + if settings.indent_list_on_tab { + if let Some(language) = snapshot.language_scope_at(Point::new(cursor.row, 0)) { + if is_list_prefix_row(MultiBufferRow(cursor.row), &snapshot, &language) { + row_delta = Self::indent_selection( + buffer, &snapshot, selection, &mut edits, row_delta, cx, + ); + continue; + } + } + } + } + // If the selection is non-empty, then increase the indentation of the selected lines. if !selection.is_empty() { row_delta = @@ -23355,7 +23436,7 @@ fn documentation_delimiter_for_newline( start_point: &Point, buffer: &MultiBufferSnapshot, language: &LanguageScope, - newline_formatting: &mut NewlineFormatting, + newline_config: &mut NewlineConfig, ) -> Option> { let BlockCommentConfig { start: start_tag, @@ -23407,6 +23488,9 @@ fn documentation_delimiter_for_newline( } }; + let mut needs_extra_line = false; + let mut extra_line_additional_indent = IndentSize::spaces(0); + let cursor_is_before_end_tag_if_exists = { let mut char_position = 0u32; let mut end_tag_offset = None; @@ -23424,11 +23508,11 @@ fn documentation_delimiter_for_newline( let cursor_is_before_end_tag = column <= end_tag_offset; if cursor_is_after_start_tag { if cursor_is_before_end_tag { - newline_formatting.insert_extra_newline = true; + needs_extra_line = true; } let cursor_is_at_start_of_end_tag = column == end_tag_offset; if cursor_is_at_start_of_end_tag { - newline_formatting.indent_on_extra_newline.len = *len; + extra_line_additional_indent.len = *len; } } cursor_is_before_end_tag @@ -23440,39 +23524,240 @@ fn documentation_delimiter_for_newline( if (cursor_is_after_start_tag || cursor_is_after_delimiter) && cursor_is_before_end_tag_if_exists { - if cursor_is_after_start_tag { - newline_formatting.indent_on_newline.len = *len; - } + let additional_indent = if cursor_is_after_start_tag { + IndentSize::spaces(*len) + } else { + IndentSize::spaces(0) + }; + + *newline_config = NewlineConfig::Newline { + additional_indent, + extra_line_additional_indent: if needs_extra_line { + Some(extra_line_additional_indent) + } else { + None + }, + prevent_auto_indent: true, + }; Some(delimiter.clone()) } else { None } } -#[derive(Debug, Default)] -struct NewlineFormatting { - insert_extra_newline: bool, - indent_on_newline: IndentSize, - indent_on_extra_newline: IndentSize, +const ORDERED_LIST_MAX_MARKER_LEN: usize = 16; + +fn list_delimiter_for_newline( + start_point: &Point, + buffer: &MultiBufferSnapshot, + language: &LanguageScope, + newline_config: &mut NewlineConfig, +) -> Option> { + let (snapshot, range) = buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?; + + let num_of_whitespaces = snapshot + .chars_for_range(range.clone()) + .take_while(|c| c.is_whitespace()) + .count(); + + let task_list_entries: Vec<_> = language + .task_list() + .into_iter() + .flat_map(|config| { + config + .prefixes + .iter() + .map(|prefix| (prefix.as_ref(), config.continuation.as_ref())) + }) + .collect(); + let unordered_list_entries: Vec<_> = language + .unordered_list() + .iter() + .map(|marker| (marker.as_ref(), marker.as_ref())) + .collect(); + + let all_entries: Vec<_> = task_list_entries + .into_iter() + .chain(unordered_list_entries) + .collect(); + + if let Some(max_prefix_len) = all_entries.iter().map(|(p, _)| p.len()).max() { + let candidate: String = snapshot + .chars_for_range(range.clone()) + .skip(num_of_whitespaces) + .take(max_prefix_len) + .collect(); + + if let Some((prefix, continuation)) = all_entries + .iter() + .filter(|(prefix, _)| candidate.starts_with(*prefix)) + .max_by_key(|(prefix, _)| prefix.len()) + { + let end_of_prefix = num_of_whitespaces + prefix.len(); + let cursor_is_after_prefix = end_of_prefix <= start_point.column as usize; + let has_content_after_marker = snapshot + .chars_for_range(range) + .skip(end_of_prefix) + .any(|c| !c.is_whitespace()); + + if has_content_after_marker && cursor_is_after_prefix { + return Some((*continuation).into()); + } + + if start_point.column as usize == end_of_prefix { + if num_of_whitespaces == 0 { + *newline_config = NewlineConfig::ClearCurrentLine; + } else { + *newline_config = NewlineConfig::UnindentCurrentLine { + continuation: (*continuation).into(), + }; + } + } + + return None; + } + } + + let candidate: String = snapshot + .chars_for_range(range.clone()) + .skip(num_of_whitespaces) + .take(ORDERED_LIST_MAX_MARKER_LEN) + .collect(); + + for ordered_config in language.ordered_list() { + let regex = match Regex::new(&ordered_config.pattern) { + Ok(r) => r, + Err(_) => continue, + }; + + if let Some(captures) = regex.captures(&candidate) { + let full_match = captures.get(0)?; + let marker_len = full_match.len(); + let end_of_prefix = num_of_whitespaces + marker_len; + let cursor_is_after_prefix = end_of_prefix <= start_point.column as usize; + + let has_content_after_marker = snapshot + .chars_for_range(range) + .skip(end_of_prefix) + .any(|c| !c.is_whitespace()); + + if has_content_after_marker && cursor_is_after_prefix { + let number: u32 = captures.get(1)?.as_str().parse().ok()?; + let continuation = ordered_config + .format + .replace("{1}", &(number + 1).to_string()); + return Some(continuation.into()); + } + + if start_point.column as usize == end_of_prefix { + let continuation = ordered_config.format.replace("{1}", "1"); + if num_of_whitespaces == 0 { + *newline_config = NewlineConfig::ClearCurrentLine; + } else { + *newline_config = NewlineConfig::UnindentCurrentLine { + continuation: continuation.into(), + }; + } + } + + return None; + } + } + + None } -impl NewlineFormatting { - fn new( - buffer: &MultiBufferSnapshot, - range: Range, - language: &LanguageScope, - ) -> Self { - Self { - insert_extra_newline: Self::insert_extra_newline_brackets( - buffer, - range.clone(), - language, - ) || Self::insert_extra_newline_tree_sitter(buffer, range), - indent_on_newline: IndentSize::spaces(0), - indent_on_extra_newline: IndentSize::spaces(0), +fn is_list_prefix_row( + row: MultiBufferRow, + buffer: &MultiBufferSnapshot, + language: &LanguageScope, +) -> bool { + let Some((snapshot, range)) = buffer.buffer_line_for_row(row) else { + return false; + }; + + let num_of_whitespaces = snapshot + .chars_for_range(range.clone()) + .take_while(|c| c.is_whitespace()) + .count(); + + let task_list_prefixes: Vec<_> = language + .task_list() + .into_iter() + .flat_map(|config| { + config + .prefixes + .iter() + .map(|p| p.as_ref()) + .collect::>() + }) + .collect(); + let unordered_list_markers: Vec<_> = language + .unordered_list() + .iter() + .map(|marker| marker.as_ref()) + .collect(); + let all_prefixes: Vec<_> = task_list_prefixes + .into_iter() + .chain(unordered_list_markers) + .collect(); + if let Some(max_prefix_len) = all_prefixes.iter().map(|p| p.len()).max() { + let candidate: String = snapshot + .chars_for_range(range.clone()) + .skip(num_of_whitespaces) + .take(max_prefix_len) + .collect(); + if all_prefixes + .iter() + .any(|prefix| candidate.starts_with(*prefix)) + { + return true; } } + let ordered_list_candidate: String = snapshot + .chars_for_range(range) + .skip(num_of_whitespaces) + .take(ORDERED_LIST_MAX_MARKER_LEN) + .collect(); + for ordered_config in language.ordered_list() { + let regex = match Regex::new(&ordered_config.pattern) { + Ok(r) => r, + Err(_) => continue, + }; + if let Some(captures) = regex.captures(&ordered_list_candidate) { + return captures.get(0).is_some(); + } + } + + false +} + +#[derive(Debug)] +enum NewlineConfig { + /// Insert newline with optional additional indent and optional extra blank line + Newline { + additional_indent: IndentSize, + extra_line_additional_indent: Option, + prevent_auto_indent: bool, + }, + /// Clear the current line + ClearCurrentLine, + /// Unindent the current line and add continuation + UnindentCurrentLine { continuation: Arc }, +} + +impl NewlineConfig { + fn has_extra_line(&self) -> bool { + matches!( + self, + Self::Newline { + extra_line_additional_indent: Some(_), + .. + } + ) + } + fn insert_extra_newline_brackets( buffer: &MultiBufferSnapshot, range: Range, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index c0112c5eda406c9cb3b3b9d004d20853b710f6e1..87674d8c507b1c294779b1f9ddba458320fc7671 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -28021,7 +28021,7 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) { " }); - // Case 2: Test adding new line after nested list preserves indent of previous line + // Case 2: Test adding new line after nested list continues the list with unchecked task cx.set_state(&indoc! {" - [ ] Item 1 - [ ] Item 1.a @@ -28038,32 +28038,12 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) { - [x] Item 2 - [x] Item 2.a - [x] Item 2.b - ˇ" + - [ ] ˇ" }); - // Case 3: Test adding a new nested list item preserves indent - cx.set_state(&indoc! {" - - [ ] Item 1 - - [ ] Item 1.a - - [x] Item 2 - - [x] Item 2.a - - [x] Item 2.b - ˇ" - }); - cx.update_editor(|editor, window, cx| { - editor.handle_input("-", window, cx); - }); - cx.run_until_parked(); - cx.assert_editor_state(indoc! {" - - [ ] Item 1 - - [ ] Item 1.a - - [x] Item 2 - - [x] Item 2.a - - [x] Item 2.b - -ˇ" - }); + // Case 3: Test adding content to continued list item cx.update_editor(|editor, window, cx| { - editor.handle_input(" [x] Item 2.c", window, cx); + editor.handle_input("Item 2.c", window, cx); }); cx.run_until_parked(); cx.assert_editor_state(indoc! {" @@ -28072,10 +28052,10 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) { - [x] Item 2 - [x] Item 2.a - [x] Item 2.b - - [x] Item 2.cˇ" + - [ ] Item 2.cˇ" }); - // Case 4: Test adding new line after nested ordered list preserves indent of previous line + // Case 4: Test adding new line after nested ordered list continues with next number cx.set_state(indoc! {" 1. Item 1 1. Item 1.a @@ -28092,44 +28072,12 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) { 2. Item 2 1. Item 2.a 2. Item 2.b - ˇ" + 3. ˇ" }); - // Case 5: Adding new ordered list item preserves indent - cx.set_state(indoc! {" - 1. Item 1 - 1. Item 1.a - 2. Item 2 - 1. Item 2.a - 2. Item 2.b - ˇ" - }); - cx.update_editor(|editor, window, cx| { - editor.handle_input("3", window, cx); - }); - cx.run_until_parked(); - cx.assert_editor_state(indoc! {" - 1. Item 1 - 1. Item 1.a - 2. Item 2 - 1. Item 2.a - 2. Item 2.b - 3ˇ" - }); - cx.update_editor(|editor, window, cx| { - editor.handle_input(".", window, cx); - }); - cx.run_until_parked(); - cx.assert_editor_state(indoc! {" - 1. Item 1 - 1. Item 1.a - 2. Item 2 - 1. Item 2.a - 2. Item 2.b - 3.ˇ" - }); + // Case 5: Adding content to continued ordered list item cx.update_editor(|editor, window, cx| { - editor.handle_input(" Item 2.c", window, cx); + editor.handle_input("Item 2.c", window, cx); }); cx.run_until_parked(); cx.assert_editor_state(indoc! {" @@ -29497,6 +29445,524 @@ async fn test_find_references_single_case(cx: &mut TestAppContext) { cx.assert_editor_state(after); } +#[gpui::test] +async fn test_newline_task_list_continuation(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = Some(2.try_into().unwrap()); + }); + + let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + + // Case 1: Adding newline after (whitespace + prefix + any non-whitespace) adds marker + cx.set_state(indoc! {" + - [ ] taskˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [ ] task + - [ ] ˇ + "}); + + // Case 2: Works with checked task items too + cx.set_state(indoc! {" + - [x] completed taskˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [x] completed task + - [ ] ˇ + "}); + + // Case 3: Cursor position doesn't matter - content after marker is what counts + cx.set_state(indoc! {" + - [ ] taˇsk + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [ ] ta + - [ ] ˇsk + "}); + + // Case 4: Adding newline after (whitespace + prefix + some whitespace) does NOT add marker + cx.set_state(indoc! {" + - [ ] ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state( + indoc! {" + - [ ]$$ + ˇ + "} + .replace("$", " ") + .as_str(), + ); + + // Case 5: Adding newline with content adds marker preserving indentation + cx.set_state(indoc! {" + - [ ] task + - [ ] indentedˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [ ] task + - [ ] indented + - [ ] ˇ + "}); + + // Case 6: Adding newline with cursor right after prefix, unindents + cx.set_state(indoc! {" + - [ ] task + - [ ] sub task + - [ ] ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [ ] task + - [ ] sub task + - [ ] ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + + // Case 7: Adding newline with cursor right after prefix, removes marker + cx.assert_editor_state(indoc! {" + - [ ] task + - [ ] sub task + - [ ] ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [ ] task + - [ ] sub task + ˇ + "}); + + // Case 8: Cursor before or inside prefix does not add marker + cx.set_state(indoc! {" + ˇ- [ ] task + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + + ˇ- [ ] task + "}); + + cx.set_state(indoc! {" + - [ˇ ] task + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - [ + ˇ + ] task + "}); +} + +#[gpui::test] +async fn test_newline_unordered_list_continuation(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = Some(2.try_into().unwrap()); + }); + + let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + + // Case 1: Adding newline after (whitespace + marker + any non-whitespace) adds marker + cx.set_state(indoc! {" + - itemˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - item + - ˇ + "}); + + // Case 2: Works with different markers + cx.set_state(indoc! {" + * starred itemˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + * starred item + * ˇ + "}); + + cx.set_state(indoc! {" + + plus itemˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + + plus item + + ˇ + "}); + + // Case 3: Cursor position doesn't matter - content after marker is what counts + cx.set_state(indoc! {" + - itˇem + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - it + - ˇem + "}); + + // Case 4: Adding newline after (whitespace + marker + some whitespace) does NOT add marker + cx.set_state(indoc! {" + - ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state( + indoc! {" + - $ + ˇ + "} + .replace("$", " ") + .as_str(), + ); + + // Case 5: Adding newline with content adds marker preserving indentation + cx.set_state(indoc! {" + - item + - indentedˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - item + - indented + - ˇ + "}); + + // Case 6: Adding newline with cursor right after marker, unindents + cx.set_state(indoc! {" + - item + - sub item + - ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - item + - sub item + - ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + + // Case 7: Adding newline with cursor right after marker, removes marker + cx.assert_editor_state(indoc! {" + - item + - sub item + - ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - item + - sub item + ˇ + "}); + + // Case 8: Cursor before or inside prefix does not add marker + cx.set_state(indoc! {" + ˇ- item + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + + ˇ- item + "}); + + cx.set_state(indoc! {" + -ˇ item + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + - + ˇitem + "}); +} + +#[gpui::test] +async fn test_newline_ordered_list_continuation(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = Some(2.try_into().unwrap()); + }); + + let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + + // Case 1: Adding newline after (whitespace + marker + any non-whitespace) increments number + cx.set_state(indoc! {" + 1. first itemˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1. first item + 2. ˇ + "}); + + // Case 2: Works with larger numbers + cx.set_state(indoc! {" + 10. tenth itemˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 10. tenth item + 11. ˇ + "}); + + // Case 3: Cursor position doesn't matter - content after marker is what counts + cx.set_state(indoc! {" + 1. itˇem + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1. it + 2. ˇem + "}); + + // Case 4: Adding newline after (whitespace + marker + some whitespace) does NOT add marker + cx.set_state(indoc! {" + 1. ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state( + indoc! {" + 1. $ + ˇ + "} + .replace("$", " ") + .as_str(), + ); + + // Case 5: Adding newline with content adds marker preserving indentation + cx.set_state(indoc! {" + 1. item + 2. indentedˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1. item + 2. indented + 3. ˇ + "}); + + // Case 6: Adding newline with cursor right after marker, unindents + cx.set_state(indoc! {" + 1. item + 2. sub item + 3. ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1. item + 2. sub item + 1. ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + + // Case 7: Adding newline with cursor right after marker, removes marker + cx.assert_editor_state(indoc! {" + 1. item + 2. sub item + 1. ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1. item + 2. sub item + ˇ + "}); + + // Case 8: Cursor before or inside prefix does not add marker + cx.set_state(indoc! {" + ˇ1. item + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + + ˇ1. item + "}); + + cx.set_state(indoc! {" + 1ˇ. item + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1 + ˇ. item + "}); +} + +#[gpui::test] +async fn test_newline_should_not_autoindent_ordered_list(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = Some(2.try_into().unwrap()); + }); + + let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + + // Case 1: Adding newline after (whitespace + marker + any non-whitespace) increments number + cx.set_state(indoc! {" + 1. first item + 1. sub first item + 2. sub second item + 3. ˇ + "}); + cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx)); + cx.wait_for_autoindent_applied().await; + cx.assert_editor_state(indoc! {" + 1. first item + 1. sub first item + 2. sub second item + 1. ˇ + "}); +} + +#[gpui::test] +async fn test_tab_list_indent(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = Some(2.try_into().unwrap()); + }); + + let markdown_language = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + + // Case 1: Unordered list - cursor after prefix, adds indent before prefix + cx.set_state(indoc! {" + - ˇitem + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + $$- ˇitem + "}; + cx.assert_editor_state(expected.replace("$", " ").as_str()); + + // Case 2: Task list - cursor after prefix + cx.set_state(indoc! {" + - [ ] ˇtask + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + $$- [ ] ˇtask + "}; + cx.assert_editor_state(expected.replace("$", " ").as_str()); + + // Case 3: Ordered list - cursor after prefix + cx.set_state(indoc! {" + 1. ˇfirst + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + $$1. ˇfirst + "}; + cx.assert_editor_state(expected.replace("$", " ").as_str()); + + // Case 4: With existing indentation - adds more indent + let initial = indoc! {" + $$- ˇitem + "}; + cx.set_state(initial.replace("$", " ").as_str()); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + $$$$- ˇitem + "}; + cx.assert_editor_state(expected.replace("$", " ").as_str()); + + // Case 5: Empty list item + cx.set_state(indoc! {" + - ˇ + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + $$- ˇ + "}; + cx.assert_editor_state(expected.replace("$", " ").as_str()); + + // Case 6: Cursor at end of line with content + cx.set_state(indoc! {" + - itemˇ + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + $$- itemˇ + "}; + cx.assert_editor_state(expected.replace("$", " ").as_str()); + + // Case 7: Cursor at start of list item, indents it + cx.set_state(indoc! {" + - item + ˇ - sub item + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + - item + ˇ - sub item + "}; + cx.assert_editor_state(expected); + + // Case 8: Cursor at start of list item, moves the cursor when "indent_list_on_tab" is false + cx.update_editor(|_, _, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.defaults.indent_list_on_tab = Some(false); + }); + }); + }); + cx.set_state(indoc! {" + - item + ˇ - sub item + "}); + cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx)); + cx.wait_for_autoindent_applied().await; + let expected = indoc! {" + - item + ˇ- sub item + "}; + cx.assert_editor_state(expected); +} + #[gpui::test] async fn test_local_worktree_trust(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index a573e3d78a4de03c6ccf382c80bc33eaf0b5690d..290cad4e4497015ef63f79e58a0dacf231168c9f 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -827,6 +827,15 @@ pub struct LanguageConfig { /// Delimiters and configuration for recognizing and formatting documentation comments. #[serde(default, alias = "documentation")] pub documentation_comment: Option, + /// List markers that are inserted unchanged on newline (e.g., `- `, `* `, `+ `). + #[serde(default)] + pub unordered_list: Vec>, + /// Configuration for ordered lists with auto-incrementing numbers on newline (e.g., `1. ` becomes `2. `). + #[serde(default)] + pub ordered_list: Vec, + /// Configuration for task lists where multiple markers map to a single continuation prefix (e.g., `- [x] ` continues as `- [ ] `). + #[serde(default)] + pub task_list: Option, /// A list of additional regex patterns that should be treated as prefixes /// for creating boundaries during rewrapping, ensuring content from one /// prefixed section doesn't merge with another (e.g., markdown list items). @@ -898,6 +907,24 @@ pub struct DecreaseIndentConfig { pub valid_after: Vec, } +/// Configuration for continuing ordered lists with auto-incrementing numbers. +#[derive(Clone, Debug, Deserialize, JsonSchema)] +pub struct OrderedListConfig { + /// A regex pattern with a capture group for the number portion (e.g., `(\\d+)\\. `). + pub pattern: String, + /// A format string where `{1}` is replaced with the incremented number (e.g., `{1}. `). + pub format: String, +} + +/// Configuration for continuing task lists on newline. +#[derive(Clone, Debug, Deserialize, JsonSchema)] +pub struct TaskListConfig { + /// The list markers to match (e.g., `- [ ] `, `- [x] `). + pub prefixes: Vec>, + /// The marker to insert when continuing the list on a new line (e.g., `- [ ] `). + pub continuation: Arc, +} + #[derive(Clone, Debug, Serialize, Deserialize, Default, JsonSchema)] pub struct LanguageMatcher { /// Given a list of `LanguageConfig`'s, the language of a file can be determined based on the path extension matching any of the `path_suffixes`. @@ -1068,6 +1095,9 @@ impl Default for LanguageConfig { line_comments: Default::default(), block_comment: Default::default(), documentation_comment: Default::default(), + unordered_list: Default::default(), + ordered_list: Default::default(), + task_list: Default::default(), rewrap_prefixes: Default::default(), scope_opt_in_language_servers: Default::default(), overrides: Default::default(), @@ -2153,6 +2183,21 @@ impl LanguageScope { self.language.config.documentation_comment.as_ref() } + /// Returns list markers that are inserted unchanged on newline (e.g., `- `, `* `, `+ `). + pub fn unordered_list(&self) -> &[Arc] { + &self.language.config.unordered_list + } + + /// Returns configuration for ordered lists with auto-incrementing numbers (e.g., `1. ` becomes `2. `). + pub fn ordered_list(&self) -> &[OrderedListConfig] { + &self.language.config.ordered_list + } + + /// Returns configuration for task list continuation, if any (e.g., `- [x] ` continues as `- [ ] `). + pub fn task_list(&self) -> Option<&TaskListConfig> { + self.language.config.task_list.as_ref() + } + /// Returns additional regex patterns that act as prefix markers for creating /// boundaries during rewrapping. /// diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index fccaa545b79c1f24589889df8fcd163fbc5b6c7d..205f2431c6d9deeaa7661b583caa516bdc77ae79 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -122,6 +122,10 @@ pub struct LanguageSettings { pub whitespace_map: WhitespaceMap, /// Whether to start a new line with a comment when a previous line is a comment as well. pub extend_comment_on_newline: bool, + /// Whether to continue markdown lists when pressing enter. + pub extend_list_on_newline: bool, + /// Whether to indent list items when pressing tab after a list marker. + pub indent_list_on_tab: bool, /// Inlay hint related settings. pub inlay_hints: InlayHintSettings, /// Whether to automatically close brackets. @@ -567,6 +571,8 @@ impl settings::Settings for AllLanguageSettings { tab: SharedString::new(whitespace_map.tab.unwrap().to_string()), }, extend_comment_on_newline: settings.extend_comment_on_newline.unwrap(), + extend_list_on_newline: settings.extend_list_on_newline.unwrap(), + indent_list_on_tab: settings.indent_list_on_tab.unwrap(), inlay_hints: InlayHintSettings { enabled: inlay_hints.enabled.unwrap(), show_value_hints: inlay_hints.show_value_hints.unwrap(), diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 84c79d2538a0af470ec16d55fe9cf2d1ae05805b..423a4c008f6e8a64f3c4e883b0d6e2bde65c88ae 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -20,6 +20,9 @@ rewrap_prefixes = [ ">\\s*", "[-*+]\\s+\\[[\\sx]\\]\\s+" ] +unordered_list = ["- ", "* ", "+ "] +ordered_list = [{ pattern = "(\\d+)\\. ", format = "{1}. " }] +task_list = { prefixes = ["- [ ] ", "- [x] "], continuation = "- [ ] " } auto_indent_on_paste = false auto_indent_using_last_non_empty_line = false diff --git a/crates/settings/src/settings_content/language.rs b/crates/settings/src/settings_content/language.rs index f9c85f18f380a7ad82b0d8bc202fe3763ba3a832..cf8cf7b63589e84a96e6b9d92f23a4488479d1f3 100644 --- a/crates/settings/src/settings_content/language.rs +++ b/crates/settings/src/settings_content/language.rs @@ -363,6 +363,14 @@ pub struct LanguageSettingsContent { /// /// Default: true pub extend_comment_on_newline: Option, + /// Whether to continue markdown lists when pressing enter. + /// + /// Default: true + pub extend_list_on_newline: Option, + /// Whether to indent list items when pressing tab after a list marker. + /// + /// Default: true + pub indent_list_on_tab: Option, /// Inlay hint related settings. pub inlay_hints: Option, /// Whether to automatically type closing characters for you. For example, diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index d77754f611e8eb1746ee9061ce5b5e1dfdbdafdb..64343b05fd57c33eb9cfb0d8cb8674971266b464 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -430,6 +430,8 @@ impl VsCodeSettings { enable_language_server: None, ensure_final_newline_on_save: self.read_bool("files.insertFinalNewline"), extend_comment_on_newline: None, + extend_list_on_newline: None, + indent_list_on_tab: None, format_on_save: self.read_bool("editor.guides.formatOnSave").map(|b| { if b { FormatOnSave::On diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 8a638d9f7857e1a55aaa5589a77110a7b803bbfe..81318aa8885fe883acc394e7fe983d7721dd33a5 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1585,6 +1585,26 @@ Positive `integer` value between 1 and 32. Values outside of this range will be `boolean` values +## Extend List On Newline + +- Description: Whether to continue lists when pressing Enter at the end of a list item. Supports unordered, ordered, and task lists. Pressing Enter on an empty list item removes the marker and exits the list. +- Setting: `extend_list_on_newline` +- Default: `true` + +**Options** + +`boolean` values + +## Indent List On Tab + +- Description: Whether to indent list items when pressing Tab on a line containing only a list marker. This enables quick creation of nested lists. +- Setting: `indent_list_on_tab` +- Default: `true` + +**Options** + +`boolean` values + ## Status Bar - Description: Control various elements in the status bar. Note that some items in the status bar have their own settings set elsewhere. diff --git a/docs/src/languages/markdown.md b/docs/src/languages/markdown.md index 36ce734f7cfbcc066bb8026568209738655a6be9..64c9e7070569a23daa5bcb8aa4dace12e0021b03 100644 --- a/docs/src/languages/markdown.md +++ b/docs/src/languages/markdown.md @@ -33,6 +33,40 @@ Zed supports using Prettier to automatically re-format Markdown documents. You c }, ``` +### List Continuation + +Zed automatically continues lists when you press Enter at the end of a list item. Supported list types: + +- Unordered lists (`-`, `*`, or `+` markers) +- Ordered lists (numbers are auto-incremented) +- Task lists (`- [ ]` and `- [x]`) + +Pressing Enter on an empty list item removes the marker and exits the list. + +To disable this behavior: + +```json [settings] + "languages": { + "Markdown": { + "extend_list_on_newline": false + } + }, +``` + +### List Indentation + +Zed indents list items when you press Tab while the cursor is on a line containing only a list marker. This allows you to quickly create nested lists. + +To disable this behavior: + +```json [settings] + "languages": { + "Markdown": { + "indent_list_on_tab": false + } + }, +``` + ### Trailing Whitespace By default Zed will remove trailing whitespace on save. If you rely on invisible trailing whitespace being converted to `
` in Markdown files you can disable this behavior with: From 32600f255aea17ffc4557b39f2fc8c275db3b59a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 19 Dec 2025 13:14:31 -0300 Subject: [PATCH 29/46] gpui: Fix truncation flickering (#45373) It's been a little that we've noticed some flickering and other weird resizing behavior with text truncation in Zed: https://github.com/user-attachments/assets/4d5691a3-cd3d-45e0-8b96-74a4e0e273d2 https://github.com/user-attachments/assets/d1d0e587-7676-4da0-8818-f4e50f0e294e Initially, we suspected this could be due to how we calculate the length of a line to insert truncation, which is based first on the length of each individual character, and then second goes through a pass calculating the line length as a whole. This could cause mismatch and culminate in our bug. However, even though that felt like a reasonable suspicion, I realized something rather simple at some point: the `truncate` and `truncate_start` methods in the `Label` didn't use `whitespace_nowrap`. If you take Tailwind as an example, their `truncate` utility class takes `overflow: hidden; text-overflow: ellipsis; white-space: nowrap;`. This pointed out to a potential bug with `whitespace_nowrap` where that was blocking truncation entirely, even though that's technically part of what's necessary to truncate as you don't want text that will be truncated to wrap. Ultimately, what was happening was that the text element was caching its layout based on its `wrap_width` but not considering its `truncate_width`. The truncate width is essentially the new definitive width of the text based on the available space, which was never being computed. So the fix here was to add `truncate_width.is_none()` to the cache validation check, so that it only uses the cached text element size _if the truncation width is untouched_. But if that changes, we need to account for the new width. Then, in the Label component, we added `min_w_0` to allow the label div to shrink below its original size, and finally, we added `whitespace_nowrap()` as the cache check fundamentally fixed that method's problem. In a future PR, we can basically remove the `single_line()` label method because: 1) whenever you want a single label, you most likely want it to truncate, and 2) most instances of `truncate` are already followed by `single_line` in Zed today, so we can cut that part. Result is no flickering with truncated labels! https://github.com/user-attachments/assets/ae17cbde-0de7-42ca-98a4-22fcb452016b Release Notes: - Fixed a bug in GPUI where truncated text would flicker as you resized the container in which the text was in. Co-authored-by: Lukas Wirth --- crates/gpui/src/elements/text.rs | 10 ++++++++-- crates/ui/src/components/label/label_like.rs | 10 ++++++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/gpui/src/elements/text.rs b/crates/gpui/src/elements/text.rs index 942a0a326526431dc65f389e9cff67bac252d571..770c1f871432afbecc9ffd4e903dfeddcfcba6ee 100644 --- a/crates/gpui/src/elements/text.rs +++ b/crates/gpui/src/elements/text.rs @@ -372,11 +372,17 @@ impl TextLayout { (None, "".into(), TruncateFrom::End) }; + // Only use cached layout if: + // 1. We have a cached size + // 2. wrap_width matches (or both are None) + // 3. truncate_width is None (if truncate_width is Some, we need to re-layout + // because the previous layout may have been computed without truncation) if let Some(text_layout) = element_state.0.borrow().as_ref() - && text_layout.size.is_some() + && let Some(size) = text_layout.size && (wrap_width.is_none() || wrap_width == text_layout.wrap_width) + && truncate_width.is_none() { - return text_layout.size.unwrap(); + return size; } let mut line_wrapper = cx.text_system().line_wrapper(text_style.font(), font_size); diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index f6e7a1b893d54fff425618d5c604f591144a7385..03fde4083d5e9a8e07f38c830edd5116f14e6d70 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -241,10 +241,16 @@ impl RenderOnce for LabelLike { .when(self.strikethrough, |this| this.line_through()) .when(self.single_line, |this| this.whitespace_nowrap()) .when(self.truncate, |this| { - this.overflow_x_hidden().text_ellipsis() + this.min_w_0() + .overflow_x_hidden() + .whitespace_nowrap() + .text_ellipsis() }) .when(self.truncate_start, |this| { - this.overflow_x_hidden().text_ellipsis_start() + this.min_w_0() + .overflow_x_hidden() + .whitespace_nowrap() + .text_ellipsis_start() }) .text_color(color) .font_weight( From e05dcecac40566c0a49b9754934d9030e4e7ad76 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Fri, 19 Dec 2025 10:21:56 -0600 Subject: [PATCH 30/46] Make `pane::CloseAllItems` best effort (#45368) Closes #ISSUE Release Notes: - Fixed an issue where the `pane: close all items` action would give up if you hit "Cancel" on the prompt for what to do with a dirty buffer --- crates/workspace/src/pane.rs | 81 ++++++++++++++++++++++++++----- crates/workspace/src/workspace.rs | 26 ++++++---- 2 files changed, 85 insertions(+), 22 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index f6256aee46b9e2b5c29c020e9ee12f6ff510210f..dd17c338a935571f4d0fe9d46b3b10fac9ffe218 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1846,6 +1846,7 @@ impl Pane { } for item_to_close in items_to_close { + let mut should_close = true; let mut should_save = true; if save_intent == SaveIntent::Close { workspace.update(cx, |workspace, cx| { @@ -1861,7 +1862,7 @@ impl Pane { { Ok(success) => { if !success { - break; + should_close = false; } } Err(err) => { @@ -1880,23 +1881,25 @@ impl Pane { })?; match answer.await { Ok(0) => {} - Ok(1..) | Err(_) => break, + Ok(1..) | Err(_) => should_close = false, } } } } // Remove the item from the pane. - pane.update_in(cx, |pane, window, cx| { - pane.remove_item( - item_to_close.item_id(), - false, - pane.close_pane_if_empty, - window, - cx, - ); - }) - .ok(); + if should_close { + pane.update_in(cx, |pane, window, cx| { + pane.remove_item( + item_to_close.item_id(), + false, + pane.close_pane_if_empty, + window, + cx, + ); + }) + .ok(); + } } pane.update(cx, |_, cx| cx.notify()).ok(); @@ -6614,6 +6617,60 @@ mod tests { cx.simulate_prompt_answer("Discard all"); save.await.unwrap(); assert_item_labels(&pane, [], cx); + + add_labeled_item(&pane, "A", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new_dirty(1, "A.txt", cx)) + }); + add_labeled_item(&pane, "B", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new_dirty(2, "B.txt", cx)) + }); + add_labeled_item(&pane, "C", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new_dirty(3, "C.txt", cx)) + }); + assert_item_labels(&pane, ["A^", "B^", "C*^"], cx); + + let close_task = pane.update_in(cx, |pane, window, cx| { + pane.close_all_items( + &CloseAllItems { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) + }); + + cx.executor().run_until_parked(); + cx.simulate_prompt_answer("Discard all"); + close_task.await.unwrap(); + assert_item_labels(&pane, [], cx); + + add_labeled_item(&pane, "Clean1", false, cx); + add_labeled_item(&pane, "Dirty", true, cx).update(cx, |item, cx| { + item.project_items + .push(TestProjectItem::new_dirty(1, "Dirty.txt", cx)) + }); + add_labeled_item(&pane, "Clean2", false, cx); + assert_item_labels(&pane, ["Clean1", "Dirty^", "Clean2*"], cx); + + let close_task = pane.update_in(cx, |pane, window, cx| { + pane.close_all_items( + &CloseAllItems { + save_intent: None, + close_pinned: false, + }, + window, + cx, + ) + }); + + cx.executor().run_until_parked(); + cx.simulate_prompt_answer("Cancel"); + close_task.await.unwrap(); + assert_item_labels(&pane, ["Dirty*^"], cx); } #[gpui::test] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index c88386281e73b243dbddd6cb00c80fb26595409e..fa8e3a3dc2af33054907ea8a8c1ba095a3259207 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -9424,7 +9424,7 @@ mod tests { let right_pane = right_pane.await.unwrap(); cx.focus(&right_pane); - let mut close = right_pane.update_in(cx, |pane, window, cx| { + let close = right_pane.update_in(cx, |pane, window, cx| { pane.close_all_items(&CloseAllItems::default(), window, cx) .unwrap() }); @@ -9436,9 +9436,16 @@ mod tests { assert!(!msg.contains("3.txt")); assert!(!msg.contains("4.txt")); + // With best-effort close, cancelling item 1 keeps it open but items 4 + // and (3,4) still close since their entries exist in left pane. cx.simulate_prompt_answer("Cancel"); close.await; + right_pane.read_with(cx, |pane, _| { + assert_eq!(pane.items_len(), 1); + }); + + // Remove item 3 from left pane, making (2,3) the only item with entry 3. left_pane .update_in(cx, |left_pane, window, cx| { left_pane.close_item_by_id( @@ -9451,26 +9458,25 @@ mod tests { .await .unwrap(); - close = right_pane.update_in(cx, |pane, window, cx| { + let close = left_pane.update_in(cx, |pane, window, cx| { pane.close_all_items(&CloseAllItems::default(), window, cx) .unwrap() }); cx.executor().run_until_parked(); let details = cx.pending_prompt().unwrap().1; - assert!(details.contains("1.txt")); - assert!(!details.contains("2.txt")); + assert!(details.contains("0.txt")); assert!(details.contains("3.txt")); - // ideally this assertion could be made, but today we can only - // save whole items not project items, so the orphaned item 3 causes - // 4 to be saved too. - // assert!(!details.contains("4.txt")); + assert!(details.contains("4.txt")); + // Ideally 2.txt wouldn't appear since entry 2 still exists in item 2. + // But we can only save whole items, so saving (2,3) for entry 3 includes 2. + // assert!(!details.contains("2.txt")); cx.simulate_prompt_answer("Save all"); - cx.executor().run_until_parked(); close.await; - right_pane.read_with(cx, |pane, _| { + + left_pane.read_with(cx, |pane, _| { assert_eq!(pane.items_len(), 0); }); } From d7e41f74fb3e0279b24f190c19b4ccd3680d01b5 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Fri, 19 Dec 2025 13:31:27 -0300 Subject: [PATCH 31/46] search: Respect macOS' find pasteboard (#45311) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #17467 Release Notes: - On macOS, buffer search now syncs with the system find pasteboard, allowing ⌘E and ⌘G to work seamlessly across Zed and other apps. --- crates/gpui/src/app.rs | 36 +- crates/gpui/src/platform.rs | 12 +- crates/gpui/src/platform/mac.rs | 3 +- .../src/platform/mac/attributed_string.rs | 129 ------ crates/gpui/src/platform/mac/pasteboard.rs | 344 +++++++++++++++ crates/gpui/src/platform/mac/platform.rs | 397 ++---------------- crates/gpui/src/platform/test/platform.rs | 24 +- crates/search/src/buffer_search.rs | 89 +++- 8 files changed, 499 insertions(+), 535 deletions(-) delete mode 100644 crates/gpui/src/platform/mac/attributed_string.rs create mode 100644 crates/gpui/src/platform/mac/pasteboard.rs diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 75600a9ee1b440a092a89456cbe8fbabe6fdccfa..96f815ac0b592600f22b3c9b9686571487ff77a2 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1077,11 +1077,9 @@ impl App { self.platform.window_appearance() } - /// Writes data to the primary selection buffer. - /// Only available on Linux. - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - pub fn write_to_primary(&self, item: ClipboardItem) { - self.platform.write_to_primary(item) + /// Reads data from the platform clipboard. + pub fn read_from_clipboard(&self) -> Option { + self.platform.read_from_clipboard() } /// Writes data to the platform clipboard. @@ -1096,9 +1094,31 @@ impl App { self.platform.read_from_primary() } - /// Reads data from the platform clipboard. - pub fn read_from_clipboard(&self) -> Option { - self.platform.read_from_clipboard() + /// Writes data to the primary selection buffer. + /// Only available on Linux. + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + pub fn write_to_primary(&self, item: ClipboardItem) { + self.platform.write_to_primary(item) + } + + /// Reads data from macOS's "Find" pasteboard. + /// + /// Used to share the current search string between apps. + /// + /// https://developer.apple.com/documentation/appkit/nspasteboard/name-swift.struct/find + #[cfg(target_os = "macos")] + pub fn read_from_find_pasteboard(&self) -> Option { + self.platform.read_from_find_pasteboard() + } + + /// Writes data to macOS's "Find" pasteboard. + /// + /// Used to share the current search string between apps. + /// + /// https://developer.apple.com/documentation/appkit/nspasteboard/name-swift.struct/find + #[cfg(target_os = "macos")] + pub fn write_to_find_pasteboard(&self, item: ClipboardItem) { + self.platform.write_to_find_pasteboard(item) } /// Writes credentials to the platform keychain. diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 22f4c46921132a7b8badfb7afd4fd38058c638b4..112775890ef6e478f0b2d347bc9c9ae56dac3c73 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -262,12 +262,18 @@ pub(crate) trait Platform: 'static { fn set_cursor_style(&self, style: CursorStyle); fn should_auto_hide_scrollbars(&self) -> bool; - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - fn write_to_primary(&self, item: ClipboardItem); + fn read_from_clipboard(&self) -> Option; fn write_to_clipboard(&self, item: ClipboardItem); + #[cfg(any(target_os = "linux", target_os = "freebsd"))] fn read_from_primary(&self) -> Option; - fn read_from_clipboard(&self) -> Option; + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + fn write_to_primary(&self, item: ClipboardItem); + + #[cfg(target_os = "macos")] + fn read_from_find_pasteboard(&self) -> Option; + #[cfg(target_os = "macos")] + fn write_to_find_pasteboard(&self, item: ClipboardItem); fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task>; fn read_credentials(&self, url: &str) -> Task)>>>; diff --git a/crates/gpui/src/platform/mac.rs b/crates/gpui/src/platform/mac.rs index aa056846e6bc56e53d95c41a44444dbb89a16237..a229ec7dce928597ec73b1f4be50edd1ea3e5114 100644 --- a/crates/gpui/src/platform/mac.rs +++ b/crates/gpui/src/platform/mac.rs @@ -5,6 +5,7 @@ mod display; mod display_link; mod events; mod keyboard; +mod pasteboard; #[cfg(feature = "screen-capture")] mod screen_capture; @@ -21,8 +22,6 @@ use metal_renderer as renderer; #[cfg(feature = "macos-blade")] use crate::platform::blade as renderer; -mod attributed_string; - #[cfg(feature = "font-kit")] mod open_type; diff --git a/crates/gpui/src/platform/mac/attributed_string.rs b/crates/gpui/src/platform/mac/attributed_string.rs deleted file mode 100644 index 42fe1e5bf7a396a4eaa8ade26977a207d43b49b5..0000000000000000000000000000000000000000 --- a/crates/gpui/src/platform/mac/attributed_string.rs +++ /dev/null @@ -1,129 +0,0 @@ -use cocoa::base::id; -use cocoa::foundation::NSRange; -use objc::{class, msg_send, sel, sel_impl}; - -/// The `cocoa` crate does not define NSAttributedString (and related Cocoa classes), -/// which are needed for copying rich text (that is, text intermingled with images) -/// to the clipboard. This adds access to those APIs. -#[allow(non_snake_case)] -pub trait NSAttributedString: Sized { - unsafe fn alloc(_: Self) -> id { - msg_send![class!(NSAttributedString), alloc] - } - - unsafe fn init_attributed_string(self, string: id) -> id; - unsafe fn appendAttributedString_(self, attr_string: id); - unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id; - unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id; - unsafe fn string(self) -> id; -} - -impl NSAttributedString for id { - unsafe fn init_attributed_string(self, string: id) -> id { - msg_send![self, initWithString: string] - } - - unsafe fn appendAttributedString_(self, attr_string: id) { - let _: () = msg_send![self, appendAttributedString: attr_string]; - } - - unsafe fn RTFDFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id { - msg_send![self, RTFDFromRange: range documentAttributes: attrs] - } - - unsafe fn RTFFromRange_documentAttributes_(self, range: NSRange, attrs: id) -> id { - msg_send![self, RTFFromRange: range documentAttributes: attrs] - } - - unsafe fn string(self) -> id { - msg_send![self, string] - } -} - -pub trait NSMutableAttributedString: NSAttributedString { - unsafe fn alloc(_: Self) -> id { - msg_send![class!(NSMutableAttributedString), alloc] - } -} - -impl NSMutableAttributedString for id {} - -#[cfg(test)] -mod tests { - use crate::platform::mac::ns_string; - - use super::*; - use cocoa::appkit::NSImage; - use cocoa::base::nil; - use cocoa::foundation::NSAutoreleasePool; - #[test] - #[ignore] // This was SIGSEGV-ing on CI but not locally; need to investigate https://github.com/zed-industries/zed/actions/runs/10362363230/job/28684225486?pr=15782#step:4:1348 - fn test_nsattributed_string() { - // TODO move these to parent module once it's actually ready to be used - #[allow(non_snake_case)] - pub trait NSTextAttachment: Sized { - unsafe fn alloc(_: Self) -> id { - msg_send![class!(NSTextAttachment), alloc] - } - } - - impl NSTextAttachment for id {} - - unsafe { - let image: id = { - let img: id = msg_send![class!(NSImage), alloc]; - let img: id = msg_send![img, initWithContentsOfFile: ns_string("test.jpeg")]; - let img: id = msg_send![img, autorelease]; - img - }; - let _size = image.size(); - - let string = ns_string("Test String"); - let attr_string = NSMutableAttributedString::alloc(nil) - .init_attributed_string(string) - .autorelease(); - let hello_string = ns_string("Hello World"); - let hello_attr_string = NSAttributedString::alloc(nil) - .init_attributed_string(hello_string) - .autorelease(); - attr_string.appendAttributedString_(hello_attr_string); - - let attachment: id = msg_send![NSTextAttachment::alloc(nil), autorelease]; - let _: () = msg_send![attachment, setImage: image]; - let image_attr_string = - msg_send![class!(NSAttributedString), attributedStringWithAttachment: attachment]; - attr_string.appendAttributedString_(image_attr_string); - - let another_string = ns_string("Another String"); - let another_attr_string = NSAttributedString::alloc(nil) - .init_attributed_string(another_string) - .autorelease(); - attr_string.appendAttributedString_(another_attr_string); - - let _len: cocoa::foundation::NSUInteger = msg_send![attr_string, length]; - - /////////////////////////////////////////////////// - // pasteboard.clearContents(); - - let rtfd_data = attr_string.RTFDFromRange_documentAttributes_( - NSRange::new(0, msg_send![attr_string, length]), - nil, - ); - assert_ne!(rtfd_data, nil); - // if rtfd_data != nil { - // pasteboard.setData_forType(rtfd_data, NSPasteboardTypeRTFD); - // } - - // let rtf_data = attributed_string.RTFFromRange_documentAttributes_( - // NSRange::new(0, attributed_string.length()), - // nil, - // ); - // if rtf_data != nil { - // pasteboard.setData_forType(rtf_data, NSPasteboardTypeRTF); - // } - - // let plain_text = attributed_string.string(); - // pasteboard.setString_forType(plain_text, NSPasteboardTypeString); - } - } -} diff --git a/crates/gpui/src/platform/mac/pasteboard.rs b/crates/gpui/src/platform/mac/pasteboard.rs new file mode 100644 index 0000000000000000000000000000000000000000..38710951f15b25515d906afc738c5b971b1bb135 --- /dev/null +++ b/crates/gpui/src/platform/mac/pasteboard.rs @@ -0,0 +1,344 @@ +use core::slice; +use std::ffi::c_void; + +use cocoa::{ + appkit::{NSPasteboard, NSPasteboardTypePNG, NSPasteboardTypeString, NSPasteboardTypeTIFF}, + base::{id, nil}, + foundation::NSData, +}; +use objc::{msg_send, runtime::Object, sel, sel_impl}; +use strum::IntoEnumIterator as _; + +use crate::{ + ClipboardEntry, ClipboardItem, ClipboardString, Image, ImageFormat, asset_cache::hash, + platform::mac::ns_string, +}; + +pub struct Pasteboard { + inner: id, + text_hash_type: id, + metadata_type: id, +} + +impl Pasteboard { + pub fn general() -> Self { + unsafe { Self::new(NSPasteboard::generalPasteboard(nil)) } + } + + pub fn find() -> Self { + unsafe { Self::new(NSPasteboard::pasteboardWithName(nil, NSPasteboardNameFind)) } + } + + #[cfg(test)] + pub fn unique() -> Self { + unsafe { Self::new(NSPasteboard::pasteboardWithUniqueName(nil)) } + } + + unsafe fn new(inner: id) -> Self { + Self { + inner, + text_hash_type: unsafe { ns_string("zed-text-hash") }, + metadata_type: unsafe { ns_string("zed-metadata") }, + } + } + + pub fn read(&self) -> Option { + // First, see if it's a string. + unsafe { + let pasteboard_types: id = self.inner.types(); + let string_type: id = ns_string("public.utf8-plain-text"); + + if msg_send![pasteboard_types, containsObject: string_type] { + let data = self.inner.dataForType(string_type); + if data == nil { + return None; + } else if data.bytes().is_null() { + // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc + // "If the length of the NSData object is 0, this property returns nil." + return Some(self.read_string(&[])); + } else { + let bytes = + slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize); + + return Some(self.read_string(bytes)); + } + } + + // If it wasn't a string, try the various supported image types. + for format in ImageFormat::iter() { + if let Some(item) = self.read_image(format) { + return Some(item); + } + } + } + + // If it wasn't a string or a supported image type, give up. + None + } + + fn read_image(&self, format: ImageFormat) -> Option { + let mut ut_type: UTType = format.into(); + + unsafe { + let types: id = self.inner.types(); + if msg_send![types, containsObject: ut_type.inner()] { + self.data_for_type(ut_type.inner_mut()).map(|bytes| { + let bytes = bytes.to_vec(); + let id = hash(&bytes); + + ClipboardItem { + entries: vec![ClipboardEntry::Image(Image { format, bytes, id })], + } + }) + } else { + None + } + } + } + + fn read_string(&self, text_bytes: &[u8]) -> ClipboardItem { + unsafe { + let text = String::from_utf8_lossy(text_bytes).to_string(); + let metadata = self + .data_for_type(self.text_hash_type) + .and_then(|hash_bytes| { + let hash_bytes = hash_bytes.try_into().ok()?; + let hash = u64::from_be_bytes(hash_bytes); + let metadata = self.data_for_type(self.metadata_type)?; + + if hash == ClipboardString::text_hash(&text) { + String::from_utf8(metadata.to_vec()).ok() + } else { + None + } + }); + + ClipboardItem { + entries: vec![ClipboardEntry::String(ClipboardString { text, metadata })], + } + } + } + + unsafe fn data_for_type(&self, kind: id) -> Option<&[u8]> { + unsafe { + let data = self.inner.dataForType(kind); + if data == nil { + None + } else { + Some(slice::from_raw_parts( + data.bytes() as *mut u8, + data.length() as usize, + )) + } + } + } + + pub fn write(&self, item: ClipboardItem) { + unsafe { + match item.entries.as_slice() { + [] => { + // Writing an empty list of entries just clears the clipboard. + self.inner.clearContents(); + } + [ClipboardEntry::String(string)] => { + self.write_plaintext(string); + } + [ClipboardEntry::Image(image)] => { + self.write_image(image); + } + [ClipboardEntry::ExternalPaths(_)] => {} + _ => { + // Agus NB: We're currently only writing string entries to the clipboard when we have more than one. + // + // This was the existing behavior before I refactored the outer clipboard code: + // https://github.com/zed-industries/zed/blob/65f7412a0265552b06ce122655369d6cc7381dd6/crates/gpui/src/platform/mac/platform.rs#L1060-L1110 + // + // Note how `any_images` is always `false`. We should fix that, but that's orthogonal to the refactor. + + let mut combined = ClipboardString { + text: String::new(), + metadata: None, + }; + + for entry in item.entries { + match entry { + ClipboardEntry::String(text) => { + combined.text.push_str(&text.text()); + if combined.metadata.is_none() { + combined.metadata = text.metadata; + } + } + _ => {} + } + } + + self.write_plaintext(&combined); + } + } + } + } + + fn write_plaintext(&self, string: &ClipboardString) { + unsafe { + self.inner.clearContents(); + + let text_bytes = NSData::dataWithBytes_length_( + nil, + string.text.as_ptr() as *const c_void, + string.text.len() as u64, + ); + self.inner + .setData_forType(text_bytes, NSPasteboardTypeString); + + if let Some(metadata) = string.metadata.as_ref() { + let hash_bytes = ClipboardString::text_hash(&string.text).to_be_bytes(); + let hash_bytes = NSData::dataWithBytes_length_( + nil, + hash_bytes.as_ptr() as *const c_void, + hash_bytes.len() as u64, + ); + self.inner.setData_forType(hash_bytes, self.text_hash_type); + + let metadata_bytes = NSData::dataWithBytes_length_( + nil, + metadata.as_ptr() as *const c_void, + metadata.len() as u64, + ); + self.inner + .setData_forType(metadata_bytes, self.metadata_type); + } + } + } + + unsafe fn write_image(&self, image: &Image) { + unsafe { + self.inner.clearContents(); + + let bytes = NSData::dataWithBytes_length_( + nil, + image.bytes.as_ptr() as *const c_void, + image.bytes.len() as u64, + ); + + self.inner + .setData_forType(bytes, Into::::into(image.format).inner_mut()); + } + } +} + +#[link(name = "AppKit", kind = "framework")] +unsafe extern "C" { + /// [Apple's documentation](https://developer.apple.com/documentation/appkit/nspasteboardnamefind?language=objc) + pub static NSPasteboardNameFind: id; +} + +impl From for UTType { + fn from(value: ImageFormat) -> Self { + match value { + ImageFormat::Png => Self::png(), + ImageFormat::Jpeg => Self::jpeg(), + ImageFormat::Tiff => Self::tiff(), + ImageFormat::Webp => Self::webp(), + ImageFormat::Gif => Self::gif(), + ImageFormat::Bmp => Self::bmp(), + ImageFormat::Svg => Self::svg(), + ImageFormat::Ico => Self::ico(), + } + } +} + +// See https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/ +pub struct UTType(id); + +impl UTType { + pub fn png() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/png + Self(unsafe { NSPasteboardTypePNG }) // This is a rare case where there's a built-in NSPasteboardType + } + + pub fn jpeg() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/jpeg + Self(unsafe { ns_string("public.jpeg") }) + } + + pub fn gif() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/gif + Self(unsafe { ns_string("com.compuserve.gif") }) + } + + pub fn webp() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/webp + Self(unsafe { ns_string("org.webmproject.webp") }) + } + + pub fn bmp() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/bmp + Self(unsafe { ns_string("com.microsoft.bmp") }) + } + + pub fn svg() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/svg + Self(unsafe { ns_string("public.svg-image") }) + } + + pub fn ico() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/ico + Self(unsafe { ns_string("com.microsoft.ico") }) + } + + pub fn tiff() -> Self { + // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/tiff + Self(unsafe { NSPasteboardTypeTIFF }) // This is a rare case where there's a built-in NSPasteboardType + } + + fn inner(&self) -> *const Object { + self.0 + } + + pub fn inner_mut(&self) -> *mut Object { + self.0 as *mut _ + } +} + +#[cfg(test)] +mod tests { + use cocoa::{appkit::NSPasteboardTypeString, foundation::NSData}; + + use crate::{ClipboardEntry, ClipboardItem, ClipboardString}; + + use super::*; + + #[test] + fn test_string() { + let pasteboard = Pasteboard::unique(); + assert_eq!(pasteboard.read(), None); + + let item = ClipboardItem::new_string("1".to_string()); + pasteboard.write(item.clone()); + assert_eq!(pasteboard.read(), Some(item)); + + let item = ClipboardItem { + entries: vec![ClipboardEntry::String( + ClipboardString::new("2".to_string()).with_json_metadata(vec![3, 4]), + )], + }; + pasteboard.write(item.clone()); + assert_eq!(pasteboard.read(), Some(item)); + + let text_from_other_app = "text from other app"; + unsafe { + let bytes = NSData::dataWithBytes_length_( + nil, + text_from_other_app.as_ptr() as *const c_void, + text_from_other_app.len() as u64, + ); + pasteboard + .inner + .setData_forType(bytes, NSPasteboardTypeString); + } + assert_eq!( + pasteboard.read(), + Some(ClipboardItem::new_string(text_from_other_app.to_string())) + ); + } +} diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index ee67f465e34bd8109246f68b311e225aa8f9fd0a..9b32c6735bf6215fecc0455defc4237fd25e8cb0 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -1,29 +1,24 @@ use super::{ - BoolExt, MacKeyboardLayout, MacKeyboardMapper, - attributed_string::{NSAttributedString, NSMutableAttributedString}, - events::key_to_native, - ns_string, renderer, + BoolExt, MacKeyboardLayout, MacKeyboardMapper, events::key_to_native, ns_string, renderer, }; use crate::{ - Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem, ClipboardString, - CursorStyle, ForegroundExecutor, Image, ImageFormat, KeyContext, Keymap, MacDispatcher, - MacDisplay, MacWindow, Menu, MenuItem, OsMenu, OwnedMenu, PathPromptOptions, Platform, - PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, - PlatformWindow, Result, SystemMenuType, Task, WindowAppearance, WindowParams, hash, + Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, + KeyContext, Keymap, MacDispatcher, MacDisplay, MacWindow, Menu, MenuItem, OsMenu, OwnedMenu, + PathPromptOptions, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, + PlatformTextSystem, PlatformWindow, Result, SystemMenuType, Task, WindowAppearance, + WindowParams, platform::mac::pasteboard::Pasteboard, }; use anyhow::{Context as _, anyhow}; use block::ConcreteBlock; use cocoa::{ appkit::{ NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular, - NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSPasteboard, - NSPasteboardTypePNG, NSPasteboardTypeRTF, NSPasteboardTypeRTFD, NSPasteboardTypeString, - NSPasteboardTypeTIFF, NSSavePanel, NSVisualEffectState, NSVisualEffectView, NSWindow, + NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSSavePanel, + NSVisualEffectState, NSVisualEffectView, NSWindow, }, base::{BOOL, NO, YES, id, nil, selector}, foundation::{ - NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSRange, NSString, - NSUInteger, NSURL, + NSArray, NSAutoreleasePool, NSBundle, NSInteger, NSProcessInfo, NSString, NSUInteger, NSURL, }, }; use core_foundation::{ @@ -49,7 +44,6 @@ use ptr::null_mut; use semver::Version; use std::{ cell::Cell, - convert::TryInto, ffi::{CStr, OsStr, c_void}, os::{raw::c_char, unix::ffi::OsStrExt}, path::{Path, PathBuf}, @@ -58,7 +52,6 @@ use std::{ slice, str, sync::{Arc, OnceLock}, }; -use strum::IntoEnumIterator; use util::{ ResultExt, command::{new_smol_command, new_std_command}, @@ -164,9 +157,8 @@ pub(crate) struct MacPlatformState { text_system: Arc, renderer_context: renderer::Context, headless: bool, - pasteboard: id, - text_hash_pasteboard_type: id, - metadata_pasteboard_type: id, + general_pasteboard: Pasteboard, + find_pasteboard: Pasteboard, reopen: Option>, on_keyboard_layout_change: Option>, quit: Option>, @@ -206,9 +198,8 @@ impl MacPlatform { background_executor: BackgroundExecutor::new(dispatcher.clone()), foreground_executor: ForegroundExecutor::new(dispatcher), renderer_context: renderer::Context::default(), - pasteboard: unsafe { NSPasteboard::generalPasteboard(nil) }, - text_hash_pasteboard_type: unsafe { ns_string("zed-text-hash") }, - metadata_pasteboard_type: unsafe { ns_string("zed-metadata") }, + general_pasteboard: Pasteboard::general(), + find_pasteboard: Pasteboard::find(), reopen: None, quit: None, menu_command: None, @@ -224,20 +215,6 @@ impl MacPlatform { })) } - unsafe fn read_from_pasteboard(&self, pasteboard: *mut Object, kind: id) -> Option<&[u8]> { - unsafe { - let data = pasteboard.dataForType(kind); - if data == nil { - None - } else { - Some(slice::from_raw_parts( - data.bytes() as *mut u8, - data.length() as usize, - )) - } - } - } - unsafe fn create_menu_bar( &self, menus: &Vec, @@ -1034,119 +1011,24 @@ impl Platform for MacPlatform { } } - fn write_to_clipboard(&self, item: ClipboardItem) { - use crate::ClipboardEntry; - - unsafe { - // We only want to use NSAttributedString if there are multiple entries to write. - if item.entries.len() <= 1 { - match item.entries.first() { - Some(entry) => match entry { - ClipboardEntry::String(string) => { - self.write_plaintext_to_clipboard(string); - } - ClipboardEntry::Image(image) => { - self.write_image_to_clipboard(image); - } - ClipboardEntry::ExternalPaths(_) => {} - }, - None => { - // Writing an empty list of entries just clears the clipboard. - let state = self.0.lock(); - state.pasteboard.clearContents(); - } - } - } else { - let mut any_images = false; - let attributed_string = { - let mut buf = NSMutableAttributedString::alloc(nil) - // TODO can we skip this? Or at least part of it? - .init_attributed_string(ns_string("")) - .autorelease(); - - for entry in item.entries { - if let ClipboardEntry::String(ClipboardString { text, metadata: _ }) = entry - { - let to_append = NSAttributedString::alloc(nil) - .init_attributed_string(ns_string(&text)) - .autorelease(); - - buf.appendAttributedString_(to_append); - } - } - - buf - }; - - let state = self.0.lock(); - state.pasteboard.clearContents(); - - // Only set rich text clipboard types if we actually have 1+ images to include. - if any_images { - let rtfd_data = attributed_string.RTFDFromRange_documentAttributes_( - NSRange::new(0, msg_send![attributed_string, length]), - nil, - ); - if rtfd_data != nil { - state - .pasteboard - .setData_forType(rtfd_data, NSPasteboardTypeRTFD); - } - - let rtf_data = attributed_string.RTFFromRange_documentAttributes_( - NSRange::new(0, attributed_string.length()), - nil, - ); - if rtf_data != nil { - state - .pasteboard - .setData_forType(rtf_data, NSPasteboardTypeRTF); - } - } - - let plain_text = attributed_string.string(); - state - .pasteboard - .setString_forType(plain_text, NSPasteboardTypeString); - } - } - } - fn read_from_clipboard(&self) -> Option { let state = self.0.lock(); - let pasteboard = state.pasteboard; - - // First, see if it's a string. - unsafe { - let types: id = pasteboard.types(); - let string_type: id = ns_string("public.utf8-plain-text"); - - if msg_send![types, containsObject: string_type] { - let data = pasteboard.dataForType(string_type); - if data == nil { - return None; - } else if data.bytes().is_null() { - // https://developer.apple.com/documentation/foundation/nsdata/1410616-bytes?language=objc - // "If the length of the NSData object is 0, this property returns nil." - return Some(self.read_string_from_clipboard(&state, &[])); - } else { - let bytes = - slice::from_raw_parts(data.bytes() as *mut u8, data.length() as usize); + state.general_pasteboard.read() + } - return Some(self.read_string_from_clipboard(&state, bytes)); - } - } + fn write_to_clipboard(&self, item: ClipboardItem) { + let state = self.0.lock(); + state.general_pasteboard.write(item); + } - // If it wasn't a string, try the various supported image types. - for format in ImageFormat::iter() { - if let Some(item) = try_clipboard_image(pasteboard, format) { - return Some(item); - } - } - } + fn read_from_find_pasteboard(&self) -> Option { + let state = self.0.lock(); + state.find_pasteboard.read() + } - // If it wasn't a string or a supported image type, give up. - None + fn write_to_find_pasteboard(&self, item: ClipboardItem) { + let state = self.0.lock(); + state.find_pasteboard.write(item); } fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Task> { @@ -1255,116 +1137,6 @@ impl Platform for MacPlatform { } } -impl MacPlatform { - unsafe fn read_string_from_clipboard( - &self, - state: &MacPlatformState, - text_bytes: &[u8], - ) -> ClipboardItem { - unsafe { - let text = String::from_utf8_lossy(text_bytes).to_string(); - let metadata = self - .read_from_pasteboard(state.pasteboard, state.text_hash_pasteboard_type) - .and_then(|hash_bytes| { - let hash_bytes = hash_bytes.try_into().ok()?; - let hash = u64::from_be_bytes(hash_bytes); - let metadata = self - .read_from_pasteboard(state.pasteboard, state.metadata_pasteboard_type)?; - - if hash == ClipboardString::text_hash(&text) { - String::from_utf8(metadata.to_vec()).ok() - } else { - None - } - }); - - ClipboardItem { - entries: vec![ClipboardEntry::String(ClipboardString { text, metadata })], - } - } - } - - unsafe fn write_plaintext_to_clipboard(&self, string: &ClipboardString) { - unsafe { - let state = self.0.lock(); - state.pasteboard.clearContents(); - - let text_bytes = NSData::dataWithBytes_length_( - nil, - string.text.as_ptr() as *const c_void, - string.text.len() as u64, - ); - state - .pasteboard - .setData_forType(text_bytes, NSPasteboardTypeString); - - if let Some(metadata) = string.metadata.as_ref() { - let hash_bytes = ClipboardString::text_hash(&string.text).to_be_bytes(); - let hash_bytes = NSData::dataWithBytes_length_( - nil, - hash_bytes.as_ptr() as *const c_void, - hash_bytes.len() as u64, - ); - state - .pasteboard - .setData_forType(hash_bytes, state.text_hash_pasteboard_type); - - let metadata_bytes = NSData::dataWithBytes_length_( - nil, - metadata.as_ptr() as *const c_void, - metadata.len() as u64, - ); - state - .pasteboard - .setData_forType(metadata_bytes, state.metadata_pasteboard_type); - } - } - } - - unsafe fn write_image_to_clipboard(&self, image: &Image) { - unsafe { - let state = self.0.lock(); - state.pasteboard.clearContents(); - - let bytes = NSData::dataWithBytes_length_( - nil, - image.bytes.as_ptr() as *const c_void, - image.bytes.len() as u64, - ); - - state - .pasteboard - .setData_forType(bytes, Into::::into(image.format).inner_mut()); - } - } -} - -fn try_clipboard_image(pasteboard: id, format: ImageFormat) -> Option { - let mut ut_type: UTType = format.into(); - - unsafe { - let types: id = pasteboard.types(); - if msg_send![types, containsObject: ut_type.inner()] { - let data = pasteboard.dataForType(ut_type.inner_mut()); - if data == nil { - None - } else { - let bytes = Vec::from(slice::from_raw_parts( - data.bytes() as *mut u8, - data.length() as usize, - )); - let id = hash(&bytes); - - Some(ClipboardItem { - entries: vec![ClipboardEntry::Image(Image { format, bytes, id })], - }) - } - } else { - None - } - } -} - unsafe fn path_from_objc(path: id) -> PathBuf { let len = msg_send![path, lengthOfBytesUsingEncoding: NSUTF8StringEncoding]; let bytes = unsafe { path.UTF8String() as *const u8 }; @@ -1605,120 +1377,3 @@ mod security { pub const errSecUserCanceled: OSStatus = -128; pub const errSecItemNotFound: OSStatus = -25300; } - -impl From for UTType { - fn from(value: ImageFormat) -> Self { - match value { - ImageFormat::Png => Self::png(), - ImageFormat::Jpeg => Self::jpeg(), - ImageFormat::Tiff => Self::tiff(), - ImageFormat::Webp => Self::webp(), - ImageFormat::Gif => Self::gif(), - ImageFormat::Bmp => Self::bmp(), - ImageFormat::Svg => Self::svg(), - ImageFormat::Ico => Self::ico(), - } - } -} - -// See https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/ -struct UTType(id); - -impl UTType { - pub fn png() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/png - Self(unsafe { NSPasteboardTypePNG }) // This is a rare case where there's a built-in NSPasteboardType - } - - pub fn jpeg() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/jpeg - Self(unsafe { ns_string("public.jpeg") }) - } - - pub fn gif() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/gif - Self(unsafe { ns_string("com.compuserve.gif") }) - } - - pub fn webp() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/webp - Self(unsafe { ns_string("org.webmproject.webp") }) - } - - pub fn bmp() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/bmp - Self(unsafe { ns_string("com.microsoft.bmp") }) - } - - pub fn svg() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/svg - Self(unsafe { ns_string("public.svg-image") }) - } - - pub fn ico() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/ico - Self(unsafe { ns_string("com.microsoft.ico") }) - } - - pub fn tiff() -> Self { - // https://developer.apple.com/documentation/uniformtypeidentifiers/uttype-swift.struct/tiff - Self(unsafe { NSPasteboardTypeTIFF }) // This is a rare case where there's a built-in NSPasteboardType - } - - fn inner(&self) -> *const Object { - self.0 - } - - fn inner_mut(&self) -> *mut Object { - self.0 as *mut _ - } -} - -#[cfg(test)] -mod tests { - use crate::ClipboardItem; - - use super::*; - - #[test] - fn test_clipboard() { - let platform = build_platform(); - assert_eq!(platform.read_from_clipboard(), None); - - let item = ClipboardItem::new_string("1".to_string()); - platform.write_to_clipboard(item.clone()); - assert_eq!(platform.read_from_clipboard(), Some(item)); - - let item = ClipboardItem { - entries: vec![ClipboardEntry::String( - ClipboardString::new("2".to_string()).with_json_metadata(vec![3, 4]), - )], - }; - platform.write_to_clipboard(item.clone()); - assert_eq!(platform.read_from_clipboard(), Some(item)); - - let text_from_other_app = "text from other app"; - unsafe { - let bytes = NSData::dataWithBytes_length_( - nil, - text_from_other_app.as_ptr() as *const c_void, - text_from_other_app.len() as u64, - ); - platform - .0 - .lock() - .pasteboard - .setData_forType(bytes, NSPasteboardTypeString); - } - assert_eq!( - platform.read_from_clipboard(), - Some(ClipboardItem::new_string(text_from_other_app.to_string())) - ); - } - - fn build_platform() -> MacPlatform { - let platform = MacPlatform::new(false); - platform.0.lock().pasteboard = unsafe { NSPasteboard::pasteboardWithUniqueName(nil) }; - platform - } -} diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index dfada364667989792325e02f8530e6c91bdf4716..ca9d5e2c3b7d405e40f208f5406f879467eafc5c 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -32,6 +32,8 @@ pub(crate) struct TestPlatform { current_clipboard_item: Mutex>, #[cfg(any(target_os = "linux", target_os = "freebsd"))] current_primary_item: Mutex>, + #[cfg(target_os = "macos")] + current_find_pasteboard_item: Mutex>, pub(crate) prompts: RefCell, screen_capture_sources: RefCell>, pub opened_url: RefCell>, @@ -117,6 +119,8 @@ impl TestPlatform { current_clipboard_item: Mutex::new(None), #[cfg(any(target_os = "linux", target_os = "freebsd"))] current_primary_item: Mutex::new(None), + #[cfg(target_os = "macos")] + current_find_pasteboard_item: Mutex::new(None), weak: weak.clone(), opened_url: Default::default(), #[cfg(target_os = "windows")] @@ -398,9 +402,8 @@ impl Platform for TestPlatform { false } - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - fn write_to_primary(&self, item: ClipboardItem) { - *self.current_primary_item.lock() = Some(item); + fn read_from_clipboard(&self) -> Option { + self.current_clipboard_item.lock().clone() } fn write_to_clipboard(&self, item: ClipboardItem) { @@ -412,8 +415,19 @@ impl Platform for TestPlatform { self.current_primary_item.lock().clone() } - fn read_from_clipboard(&self) -> Option { - self.current_clipboard_item.lock().clone() + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + fn write_to_primary(&self, item: ClipboardItem) { + *self.current_primary_item.lock() = Some(item); + } + + #[cfg(target_os = "macos")] + fn read_from_find_pasteboard(&self) -> Option { + self.current_find_pasteboard_item.lock().clone() + } + + #[cfg(target_os = "macos")] + fn write_to_find_pasteboard(&self, item: ClipboardItem) { + *self.current_find_pasteboard_item.lock() = Some(item); } fn write_credentials(&self, _url: &str, _username: &str, _password: &[u8]) -> Task> { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 12b283ab22937b7952d18d63b1378d2914211f9b..be3331048bc78a91a8d3c5a3637d6bf6ea007e4d 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -106,7 +106,10 @@ pub struct BufferSearchBar { replacement_editor_focused: bool, active_searchable_item: Option>, active_match_index: Option, - active_searchable_item_subscription: Option, + #[cfg(target_os = "macos")] + active_searchable_item_subscriptions: Option<[Subscription; 2]>, + #[cfg(not(target_os = "macos"))] + active_searchable_item_subscriptions: Option, active_search: Option>, searchable_items_with_matches: HashMap, AnyVec>, pending_search: Option>, @@ -472,7 +475,7 @@ impl ToolbarItemView for BufferSearchBar { cx: &mut Context, ) -> ToolbarItemLocation { cx.notify(); - self.active_searchable_item_subscription.take(); + self.active_searchable_item_subscriptions.take(); self.active_searchable_item.take(); self.pending_search.take(); @@ -482,18 +485,58 @@ impl ToolbarItemView for BufferSearchBar { { let this = cx.entity().downgrade(); - self.active_searchable_item_subscription = - Some(searchable_item_handle.subscribe_to_search_events( - window, - cx, - Box::new(move |search_event, window, cx| { - if let Some(this) = this.upgrade() { - this.update(cx, |this, cx| { - this.on_active_searchable_item_event(search_event, window, cx) - }); + let search_event_subscription = searchable_item_handle.subscribe_to_search_events( + window, + cx, + Box::new(move |search_event, window, cx| { + if let Some(this) = this.upgrade() { + this.update(cx, |this, cx| { + this.on_active_searchable_item_event(search_event, window, cx) + }); + } + }), + ); + + #[cfg(target_os = "macos")] + { + let item_focus_handle = searchable_item_handle.item_focus_handle(cx); + + self.active_searchable_item_subscriptions = Some([ + search_event_subscription, + cx.on_focus(&item_focus_handle, window, |this, window, cx| { + if this.query_editor_focused || this.replacement_editor_focused { + // no need to read pasteboard since focus came from toolbar + return; } + + cx.defer_in(window, |this, window, cx| { + if let Some(item) = cx.read_from_find_pasteboard() + && let Some(text) = item.text() + { + if this.query(cx) != text { + let search_options = item + .metadata() + .and_then(|m| m.parse().ok()) + .and_then(SearchOptions::from_bits) + .unwrap_or(this.search_options); + + drop(this.search( + &text, + Some(search_options), + true, + window, + cx, + )); + } + } + }); }), - )); + ]); + } + #[cfg(not(target_os = "macos"))] + { + self.active_searchable_item_subscriptions = Some(search_event_subscription); + } let is_project_search = searchable_item_handle.supported_options(cx).find_in_results; self.active_searchable_item = Some(searchable_item_handle); @@ -663,7 +706,7 @@ impl BufferSearchBar { replacement_editor, replacement_editor_focused: false, active_searchable_item: None, - active_searchable_item_subscription: None, + active_searchable_item_subscriptions: None, active_match_index: None, searchable_items_with_matches: Default::default(), default_options: search_options, @@ -904,11 +947,21 @@ impl BufferSearchBar { }); self.set_search_options(options, cx); self.clear_matches(window, cx); + #[cfg(target_os = "macos")] + self.update_find_pasteboard(cx); cx.notify(); } self.update_matches(!updated, add_to_history, window, cx) } + #[cfg(target_os = "macos")] + pub fn update_find_pasteboard(&mut self, cx: &mut App) { + cx.write_to_find_pasteboard(gpui::ClipboardItem::new_string_with_metadata( + self.query(cx), + self.search_options.bits().to_string(), + )); + } + pub fn focus_editor(&mut self, _: &FocusEditor, window: &mut Window, cx: &mut Context) { if let Some(active_editor) = self.active_searchable_item.as_ref() { let handle = active_editor.item_focus_handle(cx); @@ -1098,11 +1151,12 @@ impl BufferSearchBar { cx.spawn_in(window, async move |this, cx| { if search.await.is_ok() { this.update_in(cx, |this, window, cx| { - this.activate_current_match(window, cx) - }) - } else { - Ok(()) + this.activate_current_match(window, cx); + #[cfg(target_os = "macos")] + this.update_find_pasteboard(cx); + })?; } + anyhow::Ok(()) }) .detach_and_log_err(cx); } @@ -1293,6 +1347,7 @@ impl BufferSearchBar { .insert(active_searchable_item.downgrade(), matches); this.update_match_index(window, cx); + if add_to_history { this.search_history .add(&mut this.search_history_cursor, query_text); From 361b8e0ba9b11b68c456d43d03adabf53d7f54f0 Mon Sep 17 00:00:00 2001 From: Julia Ryan Date: Fri, 19 Dec 2025 09:04:15 -0800 Subject: [PATCH 32/46] Fix sticky header scroll offset (#45377) Closes #43319 Release Notes: - Sticky headers no longer obscure the cursor when it moves. --------- Co-authored-by: HactarCE <6060305+HactarCE@users.noreply.github.com> --- crates/editor/src/scroll/autoscroll.rs | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index 28fd9442193bbec663d3f72eaa805214375dd8ca..fc2ecb9205109532da2b43c97821b5352f27aff2 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -5,7 +5,7 @@ use crate::{ }; use gpui::{Bounds, Context, Pixels, Window}; use language::Point; -use multi_buffer::Anchor; +use multi_buffer::{Anchor, ToPoint}; use std::cmp; #[derive(Debug, PartialEq, Eq, Clone, Copy)] @@ -186,6 +186,19 @@ impl Editor { } } + let style = self.style(cx).clone(); + let sticky_headers = self.sticky_headers(&style, cx).unwrap_or_default(); + let visible_sticky_headers = sticky_headers + .iter() + .filter(|h| { + let buffer_snapshot = display_map.buffer_snapshot(); + let buffer_range = + h.range.start.to_point(buffer_snapshot)..h.range.end.to_point(buffer_snapshot); + + buffer_range.contains(&Point::new(target_top as u32, 0)) + }) + .count(); + let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) { 0. } else { @@ -218,7 +231,7 @@ impl Editor { let was_autoscrolled = match strategy { AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => { let margin = margin.min(self.scroll_manager.vertical_scroll_margin); - let target_top = (target_top - margin).max(0.0); + let target_top = (target_top - margin - visible_sticky_headers as f64).max(0.0); let target_bottom = target_bottom + margin; let start_row = scroll_position.y; let end_row = start_row + visible_lines; From bfe3c66c3e38b79b7c24f94e0dcf4cf781a6dbd5 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 11:19:12 -0600 Subject: [PATCH 33/46] docs: Automatic Documentation Github Action using Droid (#45374) Adds a multi-step agentic loop to github actions for opening a once-daily documentation PR that can be merged only be a Zedi Release Notes: - N/A --------- Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .../prompts/docs-automation/phase2-explore.md | 55 +++ .../prompts/docs-automation/phase3-analyze.md | 57 +++ .../prompts/docs-automation/phase4-plan.md | 76 ++++ .../prompts/docs-automation/phase5-apply.md | 67 ++++ .../docs-automation/phase6-summarize.md | 54 +++ .../prompts/docs-automation/phase7-commit.md | 67 ++++ .github/workflows/docs_automation.yml | 256 +++++++++++++ docs/AGENTS.md | 353 ++++++++++++++++++ 8 files changed, 985 insertions(+) create mode 100644 .factory/prompts/docs-automation/phase2-explore.md create mode 100644 .factory/prompts/docs-automation/phase3-analyze.md create mode 100644 .factory/prompts/docs-automation/phase4-plan.md create mode 100644 .factory/prompts/docs-automation/phase5-apply.md create mode 100644 .factory/prompts/docs-automation/phase6-summarize.md create mode 100644 .factory/prompts/docs-automation/phase7-commit.md create mode 100644 .github/workflows/docs_automation.yml create mode 100644 docs/AGENTS.md diff --git a/.factory/prompts/docs-automation/phase2-explore.md b/.factory/prompts/docs-automation/phase2-explore.md new file mode 100644 index 0000000000000000000000000000000000000000..e8f0b1861912f9665d70e42dd02e1bb8a398b01e --- /dev/null +++ b/.factory/prompts/docs-automation/phase2-explore.md @@ -0,0 +1,55 @@ +# Phase 2: Explore Repository + +You are analyzing a codebase to understand its structure before reviewing documentation impact. + +## Objective +Produce a structured overview of the repository to inform subsequent documentation analysis. + +## Instructions + +1. **Identify Primary Languages and Frameworks** + - Scan for Cargo.toml, package.json, or other manifest files + - Note the primary language(s) and key dependencies + +2. **Map Documentation Structure** + - This project uses **mdBook** (https://rust-lang.github.io/mdBook/) + - Documentation is in `docs/src/` + - Table of contents: `docs/src/SUMMARY.md` (mdBook format: https://rust-lang.github.io/mdBook/format/summary.html) + - Style guide: `docs/.rules` + - Agent guidelines: `docs/AGENTS.md` + - Formatting: Prettier (config in `docs/.prettierrc`) + +3. **Identify Build and Tooling** + - Note build systems (cargo, npm, etc.) + - Identify documentation tooling (mdbook, etc.) + +4. **Output Format** +Produce a JSON summary: + +```json +{ + "primary_language": "Rust", + "frameworks": ["GPUI"], + "documentation": { + "system": "mdBook", + "location": "docs/src/", + "toc_file": "docs/src/SUMMARY.md", + "toc_format": "https://rust-lang.github.io/mdBook/format/summary.html", + "style_guide": "docs/.rules", + "agent_guidelines": "docs/AGENTS.md", + "formatter": "prettier", + "formatter_config": "docs/.prettierrc", + "custom_preprocessor": "docs_preprocessor (handles {#kb action::Name} syntax)" + }, + "key_directories": { + "source": "crates/", + "docs": "docs/src/", + "extensions": "extensions/" + } +} +``` + +## Constraints +- Read-only: Do not modify any files +- Focus on structure, not content details +- Complete within 2 minutes diff --git a/.factory/prompts/docs-automation/phase3-analyze.md b/.factory/prompts/docs-automation/phase3-analyze.md new file mode 100644 index 0000000000000000000000000000000000000000..8fc8622434d3be2e6be7997e9773c1b2435202c6 --- /dev/null +++ b/.factory/prompts/docs-automation/phase3-analyze.md @@ -0,0 +1,57 @@ +# Phase 3: Analyze Changes + +You are analyzing code changes to understand their nature and scope. + +## Objective +Produce a clear, neutral summary of what changed in the codebase. + +## Input +You will receive: +- List of changed files from the triggering commit/PR +- Repository structure from Phase 2 + +## Instructions + +1. **Categorize Changed Files** + - Source code (which crates/modules) + - Configuration + - Tests + - Documentation (already existing) + - Other + +2. **Analyze Each Change** + - Review diffs for files likely to impact documentation + - Focus on: public APIs, settings, keybindings, commands, user-visible behavior + +3. **Identify What Did NOT Change** + - Note stable interfaces or behaviors + - Important for avoiding unnecessary documentation updates + +4. **Output Format** +Produce a markdown summary: + +```markdown +## Change Analysis + +### Changed Files Summary +| Category | Files | Impact Level | +| --- | --- | --- | +| Source - [crate] | file1.rs, file2.rs | High/Medium/Low | +| Settings | settings.json | Medium | +| Tests | test_*.rs | None | + +### Behavioral Changes +- **[Feature/Area]**: Description of what changed from user perspective +- **[Feature/Area]**: Description... + +### Unchanged Areas +- [Area]: Confirmed no changes to [specific behavior] + +### Files Requiring Deeper Review +- `path/to/file.rs`: Reason for deeper review +``` + +## Constraints +- Read-only: Do not modify any files +- Neutral tone: Describe what changed, not whether it's good/bad +- Do not propose documentation changes yet diff --git a/.factory/prompts/docs-automation/phase4-plan.md b/.factory/prompts/docs-automation/phase4-plan.md new file mode 100644 index 0000000000000000000000000000000000000000..9e6a15814e7813cbf27afb0413987afa539feaf6 --- /dev/null +++ b/.factory/prompts/docs-automation/phase4-plan.md @@ -0,0 +1,76 @@ +# Phase 4: Plan Documentation Impact + +You are determining whether and how documentation should be updated based on code changes. + +## Objective +Produce a structured documentation plan that will guide Phase 5 execution. + +## Documentation System +This is an **mdBook** site (https://rust-lang.github.io/mdBook/): +- `docs/src/SUMMARY.md` defines book structure per https://rust-lang.github.io/mdBook/format/summary.html +- If adding new pages, they MUST be added to SUMMARY.md +- Use `{#kb action::ActionName}` syntax for keybindings (custom preprocessor expands these) +- Prettier formatting (80 char width) will be applied automatically + +## Input +You will receive: +- Change analysis from Phase 3 +- Repository structure from Phase 2 +- Documentation guidelines from `docs/AGENTS.md` + +## Instructions + +1. **Review AGENTS.md** + - Load and apply all rules from `docs/AGENTS.md` + - Respect scope boundaries (in-scope vs out-of-scope) + +2. **Evaluate Documentation Impact** + For each behavioral change from Phase 3: + - Does existing documentation cover this area? + - Is the documentation now inaccurate or incomplete? + - Classify per AGENTS.md "Change Classification" section + +3. **Identify Specific Updates** + For each required update: + - Exact file path + - Specific section or heading + - Type of change (update existing, add new, deprecate) + - Description of the change + +4. **Flag Uncertainty** + Explicitly mark: + - Assumptions you're making + - Areas where human confirmation is needed + - Ambiguous requirements + +5. **Output Format** +Use the exact format specified in `docs/AGENTS.md` Phase 4 section: + +```markdown +## Documentation Impact Assessment + +### Summary +Brief description of code changes analyzed. + +### Documentation Updates Required: [Yes/No] + +### Planned Changes + +#### 1. [File Path] +- **Section**: [Section name or "New section"] +- **Change Type**: [Update/Add/Deprecate] +- **Reason**: Why this change is needed +- **Description**: What will be added/modified + +### Uncertainty Flags +- [ ] [Description of any assumptions or areas needing confirmation] + +### No Changes Needed +- [List files reviewed but not requiring updates, with brief reason] +``` + +## Constraints +- Read-only: Do not modify any files +- Conservative: When uncertain, flag for human review rather than planning changes +- Scoped: Only plan changes that trace directly to code changes from Phase 3 +- No scope expansion: Do not plan "improvements" unrelated to triggering changes diff --git a/.factory/prompts/docs-automation/phase5-apply.md b/.factory/prompts/docs-automation/phase5-apply.md new file mode 100644 index 0000000000000000000000000000000000000000..9cc63071fccf880443b729baa06f0ddbd769276b --- /dev/null +++ b/.factory/prompts/docs-automation/phase5-apply.md @@ -0,0 +1,67 @@ +# Phase 5: Apply Documentation Plan + +You are executing a pre-approved documentation plan for an **mdBook** documentation site. + +## Objective +Implement exactly the changes specified in the documentation plan from Phase 4. + +## Documentation System +- **mdBook**: https://rust-lang.github.io/mdBook/ +- **SUMMARY.md**: Follows mdBook format (https://rust-lang.github.io/mdBook/format/summary.html) +- **Prettier**: Will be run automatically after this phase (80 char line width) +- **Custom preprocessor**: Use `{#kb action::ActionName}` for keybindings instead of hardcoding + +## Input +You will receive: +- Documentation plan from Phase 4 +- Documentation guidelines from `docs/AGENTS.md` +- Style rules from `docs/.rules` + +## Instructions + +1. **Validate Plan** + - Confirm all planned files are within scope per AGENTS.md + - Verify no out-of-scope files are targeted + +2. **Execute Each Planned Change** + For each item in "Planned Changes": + - Navigate to the specified file + - Locate the specified section + - Apply the described change + - Follow style rules from `docs/.rules` + +3. **Style Compliance** + Every edit must follow `docs/.rules`: + - Second person, present tense + - No hedging words ("simply", "just", "easily") + - Proper keybinding format (`Cmd+Shift+P`) + - Settings Editor first, JSON second + - Correct terminology (folder not directory, etc.) + +4. **Preserve Context** + - Maintain surrounding content structure + - Keep consistent heading levels + - Preserve existing cross-references + +## Constraints +- Execute ONLY changes listed in the plan +- Do not discover new documentation targets +- Do not make stylistic improvements outside planned sections +- Do not expand scope beyond what Phase 4 specified +- If a planned change cannot be applied (file missing, section not found), skip and note it + +## Output +After applying changes, output a summary: + +```markdown +## Applied Changes + +### Successfully Applied +- `path/to/file.md`: [Brief description of change] + +### Skipped (Could Not Apply) +- `path/to/file.md`: [Reason - e.g., "Section not found"] + +### Warnings +- [Any issues encountered during application] +``` diff --git a/.factory/prompts/docs-automation/phase6-summarize.md b/.factory/prompts/docs-automation/phase6-summarize.md new file mode 100644 index 0000000000000000000000000000000000000000..b1480ac9431702539fa2a570c2b456bcdfae46af --- /dev/null +++ b/.factory/prompts/docs-automation/phase6-summarize.md @@ -0,0 +1,54 @@ +# Phase 6: Summarize Changes + +You are generating a summary of documentation updates for PR review. + +## Objective +Create a clear, reviewable summary of all documentation changes made. + +## Input +You will receive: +- Applied changes report from Phase 5 +- Original change analysis from Phase 3 +- Git diff of documentation changes + +## Instructions + +1. **Gather Change Information** + - List all modified documentation files + - Identify the corresponding code changes that triggered each update + +2. **Generate Summary** + Use the format specified in `docs/AGENTS.md` Phase 6 section: + +```markdown +## Documentation Update Summary + +### Changes Made +| File | Change | Related Code | +| --- | --- | --- | +| docs/src/path.md | Brief description | PR #123 or commit SHA | + +### Rationale +Brief explanation of why these updates were made, linking back to the triggering code changes. + +### Review Notes +- Items reviewers should pay special attention to +- Any uncertainty flags from Phase 4 that were addressed +- Assumptions made during documentation +``` + +3. **Add Context for Reviewers** + - Highlight any changes that might be controversial + - Note if any planned changes were skipped and why + - Flag areas where reviewer expertise is especially needed + +## Output Format +The summary should be suitable for: +- PR description body +- Commit message (condensed version) +- Team communication + +## Constraints +- Read-only (documentation changes already applied in Phase 5) +- Factual: Describe what was done, not justify why it's good +- Complete: Account for all changes, including skipped items diff --git a/.factory/prompts/docs-automation/phase7-commit.md b/.factory/prompts/docs-automation/phase7-commit.md new file mode 100644 index 0000000000000000000000000000000000000000..adfd92eec7d3058af3917f2663228b4f6ee5c445 --- /dev/null +++ b/.factory/prompts/docs-automation/phase7-commit.md @@ -0,0 +1,67 @@ +# Phase 7: Commit and Open PR + +You are creating a git branch, committing documentation changes, and opening a PR. + +## Objective +Package documentation updates into a reviewable pull request. + +## Input +You will receive: +- Summary from Phase 6 +- List of modified files + +## Instructions + +1. **Create Branch** + ```sh + git checkout -b docs/auto-update-{date} + ``` + Use format: `docs/auto-update-YYYY-MM-DD` or `docs/auto-update-{short-sha}` + +2. **Stage and Commit** + - Stage only documentation files in `docs/src/` + - Do not stage any other files + + Commit message format: + ``` + docs: auto-update documentation for [brief description] + + [Summary from Phase 6, condensed] + + Triggered by: [commit SHA or PR reference] + + Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> + ``` + +3. **Push Branch** + ```sh + git push -u origin docs/auto-update-{date} + ``` + +4. **Create Pull Request** + Use the Phase 6 summary as the PR body. + + PR Title: `docs: [Brief description of documentation updates]` + + Labels (if available): `documentation`, `automated` + + Base branch: `main` + +## Constraints +- Do NOT auto-merge +- Do NOT request specific reviewers (let CODEOWNERS handle it) +- Do NOT modify files outside `docs/src/` +- If no changes to commit, exit gracefully with message "No documentation changes to commit" + +## Output +```markdown +## PR Created + +- **Branch**: docs/auto-update-{date} +- **PR URL**: https://github.com/zed-industries/zed/pull/XXXX +- **Status**: Ready for review + +### Commit +- SHA: {commit-sha} +- Files: {count} documentation files modified +``` diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml new file mode 100644 index 0000000000000000000000000000000000000000..e4aa79c7fc09d6d7735ac82e2315d68b923d5323 --- /dev/null +++ b/.github/workflows/docs_automation.yml @@ -0,0 +1,256 @@ +name: Documentation Automation + +on: + push: + branches: [main] + paths: + - 'crates/**' + - 'extensions/**' + workflow_dispatch: + inputs: + pr_number: + description: 'PR number to analyze (gets full PR diff)' + required: false + type: string + trigger_sha: + description: 'Commit SHA to analyze (ignored if pr_number is set)' + required: false + type: string + +permissions: + contents: write + pull-requests: write + +env: + FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }} + DROID_MODEL: claude-opus-4-5 + +jobs: + docs-automation: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install Droid CLI + run: | + curl -fsSL https://cli.factory.ai/install.sh | bash + echo "${HOME}/.factory/bin" >> "$GITHUB_PATH" + + - name: Setup Node.js (for Prettier) + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install Prettier + run: npm install -g prettier + + - name: Get changed files + id: changed + run: | + if [ -n "${{ inputs.pr_number }}" ]; then + # Get full PR diff + echo "Analyzing PR #${{ inputs.pr_number }}" + echo "source=pr" >> "$GITHUB_OUTPUT" + echo "ref=${{ inputs.pr_number }}" >> "$GITHUB_OUTPUT" + gh pr diff "${{ inputs.pr_number }}" --name-only > /tmp/changed_files.txt + elif [ -n "${{ inputs.trigger_sha }}" ]; then + # Get single commit diff + SHA="${{ inputs.trigger_sha }}" + echo "Analyzing commit $SHA" + echo "source=commit" >> "$GITHUB_OUTPUT" + echo "ref=$SHA" >> "$GITHUB_OUTPUT" + git diff --name-only "${SHA}^" "$SHA" > /tmp/changed_files.txt + else + # Default to current commit + SHA="${{ github.sha }}" + echo "Analyzing commit $SHA" + echo "source=commit" >> "$GITHUB_OUTPUT" + echo "ref=$SHA" >> "$GITHUB_OUTPUT" + git diff --name-only "${SHA}^" "$SHA" > /tmp/changed_files.txt || git diff --name-only HEAD~1 HEAD > /tmp/changed_files.txt + fi + + echo "Changed files:" + cat /tmp/changed_files.txt + env: + GH_TOKEN: ${{ github.token }} + + # Phase 0: Guardrails are loaded via AGENTS.md in each phase + + # Phase 2: Explore Repository (Read-Only) + - name: "Phase 2: Explore Repository" + id: phase2 + run: | + droid exec \ + --model "$DROID_MODEL" \ + --autonomy read-only \ + --prompt-file .factory/prompts/docs-automation/phase2-explore.md \ + --output /tmp/phase2-output.json \ + --format json + echo "Repository exploration complete" + cat /tmp/phase2-output.json + + # Phase 3: Analyze Changes (Read-Only) + - name: "Phase 3: Analyze Changes" + id: phase3 + run: | + CHANGED_FILES=$(tr '\n' ' ' < /tmp/changed_files.txt) + droid exec \ + --model "$DROID_MODEL" \ + --autonomy read-only \ + --prompt-file .factory/prompts/docs-automation/phase3-analyze.md \ + --context "Changed files: $CHANGED_FILES" \ + --context-file /tmp/phase2-output.json \ + --output /tmp/phase3-output.md \ + --format markdown + echo "Change analysis complete" + cat /tmp/phase3-output.md + + # Phase 4: Plan Documentation Impact (Read-Only) + - name: "Phase 4: Plan Documentation Impact" + id: phase4 + run: | + droid exec \ + --model "$DROID_MODEL" \ + --autonomy read-only \ + --prompt-file .factory/prompts/docs-automation/phase4-plan.md \ + --context-file /tmp/phase3-output.md \ + --context-file docs/AGENTS.md \ + --output /tmp/phase4-plan.md \ + --format markdown + echo "Documentation plan complete" + cat /tmp/phase4-plan.md + + # Check if updates are required + if grep -q "Documentation Updates Required: No" /tmp/phase4-plan.md; then + echo "updates_required=false" >> "$GITHUB_OUTPUT" + else + echo "updates_required=true" >> "$GITHUB_OUTPUT" + fi + + # Phase 5: Apply Plan (Write-Enabled) + - name: "Phase 5: Apply Documentation Plan" + id: phase5 + if: steps.phase4.outputs.updates_required == 'true' + run: | + droid exec \ + --model "$DROID_MODEL" \ + --autonomy medium \ + --prompt-file .factory/prompts/docs-automation/phase5-apply.md \ + --context-file /tmp/phase4-plan.md \ + --context-file docs/AGENTS.md \ + --context-file docs/.rules \ + --output /tmp/phase5-report.md \ + --format markdown + echo "Documentation updates applied" + cat /tmp/phase5-report.md + + # Phase 5b: Format with Prettier + - name: "Phase 5b: Format with Prettier" + id: phase5b + if: steps.phase4.outputs.updates_required == 'true' + run: | + echo "Formatting documentation with Prettier..." + cd docs && prettier --write src/ + + echo "Verifying Prettier formatting passes..." + cd docs && prettier --check src/ + + echo "Prettier formatting complete" + + # Phase 6: Summarize Changes + - name: "Phase 6: Summarize Changes" + id: phase6 + if: steps.phase4.outputs.updates_required == 'true' + run: | + # Get git diff of docs + git diff docs/src/ > /tmp/docs-diff.txt || true + + droid exec \ + --model "$DROID_MODEL" \ + --autonomy read-only \ + --prompt-file .factory/prompts/docs-automation/phase6-summarize.md \ + --context-file /tmp/phase5-report.md \ + --context-file /tmp/phase3-output.md \ + --context "Trigger SHA: ${{ steps.changed.outputs.sha }}" \ + --output /tmp/phase6-summary.md \ + --format markdown + echo "Summary generated" + cat /tmp/phase6-summary.md + + # Phase 7: Commit and Open PR + - name: "Phase 7: Create PR" + id: phase7 + if: steps.phase4.outputs.updates_required == 'true' + run: | + # Check if there are actual changes + if git diff --quiet docs/src/; then + echo "No documentation changes detected" + exit 0 + fi + + # Configure git + git config user.name "factory-droid[bot]" + git config user.email "138933559+factory-droid[bot]@users.noreply.github.com" + + # Daily batch branch - one branch per day, multiple commits accumulate + BRANCH_NAME="docs/auto-update-$(date +%Y-%m-%d)" + + # Check if branch already exists on remote + if git ls-remote --exit-code --heads origin "$BRANCH_NAME" > /dev/null 2>&1; then + echo "Branch $BRANCH_NAME exists, checking out and updating..." + git fetch origin "$BRANCH_NAME" + git checkout -B "$BRANCH_NAME" "origin/$BRANCH_NAME" + else + echo "Creating new branch $BRANCH_NAME..." + git checkout -b "$BRANCH_NAME" + fi + + # Stage and commit + git add docs/src/ + SUMMARY=$(head -50 < /tmp/phase6-summary.md) + git commit -m "docs: auto-update documentation + + ${SUMMARY} + + Triggered by: ${{ steps.changed.outputs.source }} ${{ steps.changed.outputs.ref }} + + Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com>" + + # Push + git push -u origin "$BRANCH_NAME" + + # Check if PR already exists for this branch + EXISTING_PR=$(gh pr list --head "$BRANCH_NAME" --json number --jq '.[0].number' || echo "") + + if [ -n "$EXISTING_PR" ]; then + echo "PR #$EXISTING_PR already exists for branch $BRANCH_NAME, updated with new commit" + else + # Create new PR + gh pr create \ + --title "docs: automated documentation update ($(date +%Y-%m-%d))" \ + --body-file /tmp/phase6-summary.md \ + --base main || true + echo "PR created on branch: $BRANCH_NAME" + fi + env: + GH_TOKEN: ${{ github.token }} + + # Summary output + - name: "Summary" + if: always() + run: | + echo "## Documentation Automation Summary" >> "$GITHUB_STEP_SUMMARY" + echo "" >> "$GITHUB_STEP_SUMMARY" + + if [ "${{ steps.phase4.outputs.updates_required }}" == "false" ]; then + echo "No documentation updates required for this change." >> "$GITHUB_STEP_SUMMARY" + elif [ -f /tmp/phase6-summary.md ]; then + cat /tmp/phase6-summary.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Workflow completed. Check individual phase outputs for details." >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/docs/AGENTS.md b/docs/AGENTS.md new file mode 100644 index 0000000000000000000000000000000000000000..fdd61ff6aeaf8cd09ae0b017c5199e7033fba964 --- /dev/null +++ b/docs/AGENTS.md @@ -0,0 +1,353 @@ +# Documentation Automation Agent Guidelines + +This file governs automated documentation updates triggered by code changes. All automation phases must comply with these rules. + +## Documentation System + +This documentation uses **mdBook** (https://rust-lang.github.io/mdBook/). + +### Key Files + +- **`docs/src/SUMMARY.md`**: Table of contents following mdBook format (https://rust-lang.github.io/mdBook/format/summary.html) +- **`docs/book.toml`**: mdBook configuration +- **`docs/.prettierrc`**: Prettier config (80 char line width) + +### SUMMARY.md Format + +The `SUMMARY.md` file defines the book structure. Format rules: + +- Chapter titles are links: `[Title](./path/to/file.md)` +- Nesting via indentation (2 spaces per level) +- Separators: `---` for horizontal rules between sections +- Draft chapters: `[Title]()` (empty parens, not yet written) + +Example: + +```markdown +# Section Title + +- [Chapter](./chapter.md) + - [Nested Chapter](./nested.md) + +--- + +# Another Section +``` + +### Custom Preprocessor + +The docs use a custom preprocessor (`docs_preprocessor`) that expands special commands: + +| Syntax | Purpose | Example | +| ----------------------------- | ------------------------------------- | ------------------------------- | +| `{#kb action::ActionName}` | Keybinding for action | `{#kb agent::ToggleFocus}` | +| `{#action agent::ActionName}` | Action reference (renders as command) | `{#action agent::OpenSettings}` | + +**Rules:** + +- Always use preprocessor syntax for keybindings instead of hardcoding +- Action names use `snake_case` in the namespace, `PascalCase` for the action +- Common namespaces: `agent::`, `editor::`, `assistant::`, `vim::` + +### Formatting Requirements + +All documentation must pass **Prettier** formatting: + +```sh +cd docs && npx prettier --check src/ +``` + +Before any documentation change is considered complete: + +1. Run Prettier to format: `cd docs && npx prettier --write src/` +2. Verify it passes: `cd docs && npx prettier --check src/` + +Prettier config: 80 character line width (`docs/.prettierrc`) + +### Section Anchors + +Use `{#anchor-id}` syntax for linkable section headers: + +```markdown +## Getting Started {#getting-started} + +### Custom Models {#anthropic-custom-models} +``` + +Anchor IDs should be: + +- Lowercase with hyphens +- Unique within the page +- Descriptive (can include parent context like `anthropic-custom-models`) + +### Code Block Annotations + +Use annotations after the language identifier to indicate file context: + +```markdown +\`\`\`json [settings] +{ +"agent": { ... } +} +\`\`\` + +\`\`\`json [keymap] +[ +{ "bindings": { ... } } +] +\`\`\` +``` + +Valid annotations: `[settings]` (for settings.json), `[keymap]` (for keymap.json) + +### Blockquote Formatting + +Use bold labels for callouts: + +```markdown +> **Note:** Important information the user should know. + +> **Tip:** Helpful advice that saves time or improves workflow. + +> **Warn:** Caution about potential issues or gotchas. +``` + +### Image References + +Images are hosted externally. Reference format: + +```markdown +![Alt text description](https://zed.dev/img/path/to/image.webp) +``` + +### Cross-Linking + +- Relative links for same-directory: `[Agent Panel](./agent-panel.md)` +- With anchors: `[Custom Models](./llm-providers.md#anthropic-custom-models)` +- Parent directory: `[Telemetry](../telemetry.md)` + +## Scope + +### In-Scope Documentation + +- All Markdown files in `docs/src/` +- `docs/src/SUMMARY.md` (mdBook table of contents) +- Language-specific docs in `docs/src/languages/` +- Feature docs (AI, extensions, configuration, etc.) + +### Out-of-Scope (Do Not Modify) + +- `CHANGELOG.md`, `CONTRIBUTING.md`, `README.md` at repo root +- Inline code comments and rustdoc +- `CLAUDE.md`, `GEMINI.md`, or other AI instruction files +- Build configuration (`book.toml`, theme files, `docs_preprocessor`) +- Any file outside `docs/src/` + +## Page Structure Patterns + +### Standard Page Layout + +Most documentation pages follow this structure: + +1. **Title** (H1) - Single sentence or phrase +2. **Overview/Introduction** - 1-3 paragraphs explaining what this is +3. **Getting Started** `{#getting-started}` - Prerequisites and first steps +4. **Main Content** - Feature details, organized by topic +5. **Advanced/Configuration** - Power user options +6. **See Also** (optional) - Related documentation links + +### Settings Documentation Pattern + +When documenting settings: + +1. Show the Settings Editor (UI) approach first +2. Then show JSON as "Or add this to your settings.json:" +3. Always show complete, valid JSON with surrounding structure: + +```json [settings] +{ + "agent": { + "default_model": { + "provider": "anthropic", + "model": "claude-sonnet-4" + } + } +} +``` + +### Provider/Feature Documentation Pattern + +For each provider or distinct feature: + +1. H3 heading with anchor: `### Provider Name {#provider-name}` +2. Brief description (1-2 sentences) +3. Setup steps (numbered list) +4. Configuration example (JSON code block) +5. Custom models section if applicable: `#### Custom Models {#provider-custom-models}` + +## Style Rules + +Inherit all conventions from `docs/.rules`. Key points: + +### Voice + +- Second person ("you"), present tense +- Direct and concise—no hedging ("simply", "just", "easily") +- Honest about limitations; no promotional language + +### Formatting + +- Keybindings: backticks with `+` for simultaneous keys (`Cmd+Shift+P`) +- Show both macOS and Linux/Windows variants when they differ +- Use `sh` code blocks for terminal commands +- Settings: show Settings Editor UI first, JSON as secondary + +### Terminology + +| Use | Instead of | +| --------------- | -------------------------------------- | +| folder | directory | +| project | workspace | +| Settings Editor | settings UI | +| command palette | command bar | +| panel | sidebar (be specific: "Project Panel") | + +## Zed-Specific Conventions + +### Recognized Rules Files + +When documenting rules/instructions for AI, note that Zed recognizes these files (in priority order): + +- `.rules` +- `.cursorrules` +- `.windsurfrules` +- `.clinerules` +- `.github/copilot-instructions.md` +- `AGENT.md` +- `AGENTS.md` +- `CLAUDE.md` +- `GEMINI.md` + +### Settings File Locations + +- macOS: `~/.config/zed/settings.json` +- Linux: `~/.config/zed/settings.json` +- Windows: `%AppData%\Zed\settings.json` + +### Keymap File Locations + +- macOS: `~/.config/zed/keymap.json` +- Linux: `~/.config/zed/keymap.json` +- Windows: `%AppData%\Zed\keymap.json` + +## Safety Constraints + +### Must Not + +- Delete existing documentation files +- Remove sections documenting existing functionality +- Change URLs or anchor links without verifying references +- Modify `SUMMARY.md` structure without corresponding content +- Add speculative documentation for unreleased features +- Include internal implementation details not relevant to users + +### Must + +- Preserve existing structure when updating content +- Maintain backward compatibility of documented settings/commands +- Flag uncertainty explicitly rather than guessing +- Link to related documentation when adding new sections + +## Change Classification + +### Requires Documentation Update + +- New user-facing features or commands +- Changed keybindings or default behaviors +- Modified settings schema or options +- Deprecated or removed functionality +- API changes affecting extensions + +### Does Not Require Documentation Update + +- Internal refactoring without behavioral changes +- Performance optimizations (unless user-visible) +- Bug fixes that restore documented behavior +- Test changes +- CI/CD changes + +## Output Format + +### Phase 4 Documentation Plan + +When generating a documentation plan, use this structure: + +```markdown +## Documentation Impact Assessment + +### Summary + +Brief description of code changes analyzed. + +### Documentation Updates Required: [Yes/No] + +### Planned Changes + +#### 1. [File Path] + +- **Section**: [Section name or "New section"] +- **Change Type**: [Update/Add/Deprecate] +- **Reason**: Why this change is needed +- **Description**: What will be added/modified + +#### 2. [File Path] + +... + +### Uncertainty Flags + +- [ ] [Description of any assumptions or areas needing confirmation] + +### No Changes Needed + +- [List files reviewed but not requiring updates, with brief reason] +``` + +### Phase 6 Summary Format + +```markdown +## Documentation Update Summary + +### Changes Made + +| File | Change | Related Code | +| -------------- | ----------------- | ----------------- | +| path/to/doc.md | Brief description | link to PR/commit | + +### Rationale + +Brief explanation of why these updates were made. + +### Review Notes + +Any items reviewers should pay special attention to. +``` + +## Behavioral Guidelines + +### Conservative by Default + +- When uncertain whether to document something, flag it for human review +- Prefer smaller, focused updates over broad rewrites +- Do not "improve" documentation unrelated to the triggering code change + +### Traceability + +- Every documentation change should trace to a specific code change +- Include references to relevant commits, PRs, or issues in summaries + +### Incremental Updates + +- Update existing sections rather than creating parallel documentation +- Maintain consistency with surrounding content +- Follow the established patterns in each documentation area From 4ef5d2c8148e555388668b094e059633c5bc405a Mon Sep 17 00:00:00 2001 From: Andrew Farkas <6060305+HactarCE@users.noreply.github.com> Date: Fri, 19 Dec 2025 12:32:38 -0500 Subject: [PATCH 34/46] Fix relative line numbers in sticky headers (#45164) Closes #42586 This includes a rewrite of `calculate_relative_line_numbers()`. Now it's linear-time with respect to the number of rows displayed, instead of linear time with respect to the number of rows displayed _plus_ the distance to the base row. Release Notes: - Improved performance when using relative line numbers in large files - Fixed relative line numbers not appearing in sticky headers --- crates/editor/src/editor.rs | 66 ++++++++++- crates/editor/src/editor_tests.rs | 127 ++++++++++++++++++++- crates/editor/src/element.rs | 176 ++++++++++++------------------ 3 files changed, 258 insertions(+), 111 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6e4744335b8e9fba50a6c2c8b241607b0e05d276..d985a4d269f2eaeb3fa6056192095b7913b579b6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -20475,7 +20475,7 @@ impl Editor { EditorSettings::get_global(cx).gutter.line_numbers } - pub fn relative_line_numbers(&self, cx: &mut App) -> RelativeLineNumbers { + pub fn relative_line_numbers(&self, cx: &App) -> RelativeLineNumbers { match ( self.use_relative_line_numbers, EditorSettings::get_global(cx).relative_line_numbers, @@ -25294,6 +25294,70 @@ impl EditorSnapshot { let digit_count = self.widest_line_number().ilog10() + 1; column_pixels(style, digit_count as usize, window) } + + /// Returns the line delta from `base` to `line` in the multibuffer, ignoring wrapped lines. + /// + /// This is positive if `base` is before `line`. + fn relative_line_delta(&self, base: DisplayRow, line: DisplayRow) -> i64 { + let point = DisplayPoint::new(line, 0).to_point(self); + self.relative_line_delta_to_point(base, point) + } + + /// Returns the line delta from `base` to `point` in the multibuffer, ignoring wrapped lines. + /// + /// This is positive if `base` is before `point`. + pub fn relative_line_delta_to_point(&self, base: DisplayRow, point: Point) -> i64 { + let base_point = DisplayPoint::new(base, 0).to_point(self); + point.row as i64 - base_point.row as i64 + } + + /// Returns the line delta from `base` to `line` in the multibuffer, counting wrapped lines. + /// + /// This is positive if `base` is before `line`. + fn relative_wrapped_line_delta(&self, base: DisplayRow, line: DisplayRow) -> i64 { + let point = DisplayPoint::new(line, 0).to_point(self); + self.relative_wrapped_line_delta_to_point(base, point) + } + + /// Returns the line delta from `base` to `point` in the multibuffer, counting wrapped lines. + /// + /// This is positive if `base` is before `point`. + pub fn relative_wrapped_line_delta_to_point(&self, base: DisplayRow, point: Point) -> i64 { + let base_point = DisplayPoint::new(base, 0).to_point(self); + let wrap_snapshot = self.wrap_snapshot(); + let base_wrap_row = wrap_snapshot.make_wrap_point(base_point, Bias::Left).row(); + let wrap_row = wrap_snapshot.make_wrap_point(point, Bias::Left).row(); + wrap_row.0 as i64 - base_wrap_row.0 as i64 + } + + /// Returns the unsigned relative line number to display for each row in `rows`. + /// + /// Wrapped rows are excluded from the hashmap if `count_relative_lines` is `false`. + pub fn calculate_relative_line_numbers( + &self, + rows: &Range, + relative_to: DisplayRow, + count_wrapped_lines: bool, + ) -> HashMap { + let initial_offset = if count_wrapped_lines { + self.relative_wrapped_line_delta(relative_to, rows.start) + } else { + self.relative_line_delta(relative_to, rows.start) + }; + let display_row_infos = self + .row_infos(rows.start) + .take(rows.len()) + .enumerate() + .map(|(i, row_info)| (DisplayRow(rows.start.0 + i as u32), row_info)); + display_row_infos + .filter(|(_row, row_info)| { + row_info.buffer_row.is_some() + || (count_wrapped_lines && row_info.wrapped_buffer_row.is_some()) + }) + .enumerate() + .map(|(i, (row, _row_info))| (row, (initial_offset + i as i64).unsigned_abs() as u32)) + .collect() + } } pub fn column_pixels(style: &EditorStyle, column: usize, window: &Window) -> Pixels { diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 87674d8c507b1c294779b1f9ddba458320fc7671..613850428a8720ed37efa447a1312c262a05571a 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -36,7 +36,8 @@ use languages::markdown_lang; use languages::rust_lang; use lsp::CompletionParams; use multi_buffer::{ - IndentGuide, MultiBufferFilterMode, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey, + ExcerptRange, IndentGuide, MultiBuffer, MultiBufferFilterMode, MultiBufferOffset, + MultiBufferOffsetUtf16, PathKey, }; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_ne}; @@ -28633,6 +28634,130 @@ async fn test_sticky_scroll(cx: &mut TestAppContext) { assert_eq!(sticky_headers(10.0), vec![]); } +#[gpui::test] +fn test_relative_line_numbers(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let buffer_1 = cx.new(|cx| Buffer::local("aaaaaaaaaa\nbbb\n", cx)); + let buffer_2 = cx.new(|cx| Buffer::local("cccccccccc\nddd\n", cx)); + let buffer_3 = cx.new(|cx| Buffer::local("eee\nffffffffff\n", cx)); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(ReadWrite); + multibuffer.push_excerpts( + buffer_1.clone(), + [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + cx, + ); + multibuffer.push_excerpts( + buffer_3.clone(), + [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))], + cx, + ); + multibuffer + }); + + // wrapped contents of multibuffer: + // aaa + // aaa + // aaa + // a + // bbb + // + // ccc + // ccc + // ccc + // c + // ddd + // + // eee + // fff + // fff + // fff + // f + + let (editor, cx) = cx.add_window_view(|window, cx| build_editor(multibuffer, window, cx)); + editor.update_in(cx, |editor, window, cx| { + editor.set_wrap_width(Some(30.0.into()), cx); // every 3 characters + + // includes trailing newlines. + let expected_line_numbers = [2, 6, 7, 10, 14, 15, 18, 19, 23]; + let expected_wrapped_line_numbers = [ + 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 18, 19, 20, 21, 22, 23, + ]; + + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([ + Point::new(7, 0)..Point::new(7, 1), // second row of `ccc` + ]); + }); + + let snapshot = editor.snapshot(window, cx); + + // these are all 0-indexed + let base_display_row = DisplayRow(11); + let base_row = 3; + let wrapped_base_row = 7; + + // test not counting wrapped lines + let expected_relative_numbers = expected_line_numbers + .into_iter() + .enumerate() + .map(|(i, row)| (DisplayRow(row), i.abs_diff(base_row) as u32)) + .collect_vec(); + let actual_relative_numbers = snapshot + .calculate_relative_line_numbers( + &(DisplayRow(0)..DisplayRow(24)), + base_display_row, + false, + ) + .into_iter() + .sorted() + .collect_vec(); + assert_eq!(expected_relative_numbers, actual_relative_numbers); + // check `calculate_relative_line_numbers()` against `relative_line_delta()` for each line + for (display_row, relative_number) in expected_relative_numbers { + assert_eq!( + relative_number, + snapshot + .relative_line_delta(display_row, base_display_row) + .unsigned_abs() as u32, + ); + } + + // test counting wrapped lines + let expected_wrapped_relative_numbers = expected_wrapped_line_numbers + .into_iter() + .enumerate() + .map(|(i, row)| (DisplayRow(row), i.abs_diff(wrapped_base_row) as u32)) + .collect_vec(); + let actual_relative_numbers = snapshot + .calculate_relative_line_numbers( + &(DisplayRow(0)..DisplayRow(24)), + base_display_row, + true, + ) + .into_iter() + .sorted() + .collect_vec(); + assert_eq!(expected_wrapped_relative_numbers, actual_relative_numbers); + // check `calculate_relative_line_numbers()` against `relative_wrapped_line_delta()` for each line + for (display_row, relative_number) in expected_wrapped_relative_numbers { + assert_eq!( + relative_number, + snapshot + .relative_wrapped_line_delta(display_row, base_display_row) + .unsigned_abs() as u32, + ); + } + }); +} + #[gpui::test] async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4c3b44335bcad10be4303d545a8d2ad505938098..b2e355dc5158214eabd07d519649591be8a325a8 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -66,7 +66,7 @@ use project::{ }; use settings::{ GitGutterSetting, GitHunkStyleSetting, IndentGuideBackgroundColoring, IndentGuideColoring, - Settings, + RelativeLineNumbers, Settings, }; use smallvec::{SmallVec, smallvec}; use std::{ @@ -194,8 +194,6 @@ pub struct EditorElement { style: EditorStyle, } -type DisplayRowDelta = u32; - impl EditorElement { pub(crate) const SCROLLBAR_WIDTH: Pixels = px(15.); @@ -3225,64 +3223,6 @@ impl EditorElement { .collect() } - fn calculate_relative_line_numbers( - &self, - snapshot: &EditorSnapshot, - rows: &Range, - relative_to: Option, - count_wrapped_lines: bool, - ) -> HashMap { - let mut relative_rows: HashMap = Default::default(); - let Some(relative_to) = relative_to else { - return relative_rows; - }; - - let start = rows.start.min(relative_to); - let end = rows.end.max(relative_to); - - let buffer_rows = snapshot - .row_infos(start) - .take(1 + end.minus(start) as usize) - .collect::>(); - - let head_idx = relative_to.minus(start); - let mut delta = 1; - let mut i = head_idx + 1; - let should_count_line = |row_info: &RowInfo| { - if count_wrapped_lines { - row_info.buffer_row.is_some() || row_info.wrapped_buffer_row.is_some() - } else { - row_info.buffer_row.is_some() - } - }; - while i < buffer_rows.len() as u32 { - if should_count_line(&buffer_rows[i as usize]) { - if rows.contains(&DisplayRow(i + start.0)) { - relative_rows.insert(DisplayRow(i + start.0), delta); - } - delta += 1; - } - i += 1; - } - delta = 1; - i = head_idx.min(buffer_rows.len().saturating_sub(1) as u32); - while i > 0 && buffer_rows[i as usize].buffer_row.is_none() && !count_wrapped_lines { - i -= 1; - } - - while i > 0 { - i -= 1; - if should_count_line(&buffer_rows[i as usize]) { - if rows.contains(&DisplayRow(i + start.0)) { - relative_rows.insert(DisplayRow(i + start.0), delta); - } - delta += 1; - } - } - - relative_rows - } - fn layout_line_numbers( &self, gutter_hitbox: Option<&Hitbox>, @@ -3292,7 +3232,7 @@ impl EditorElement { rows: Range, buffer_rows: &[RowInfo], active_rows: &BTreeMap, - newest_selection_head: Option, + relative_line_base: Option, snapshot: &EditorSnapshot, window: &mut Window, cx: &mut App, @@ -3304,32 +3244,16 @@ impl EditorElement { return Arc::default(); } - let (newest_selection_head, relative) = self.editor.update(cx, |editor, cx| { - let newest_selection_head = newest_selection_head.unwrap_or_else(|| { - let newest = editor - .selections - .newest::(&editor.display_snapshot(cx)); - SelectionLayout::new( - newest, - editor.selections.line_mode(), - editor.cursor_offset_on_selection, - editor.cursor_shape, - &snapshot.display_snapshot, - true, - true, - None, - ) - .head - }); - let relative = editor.relative_line_numbers(cx); - (newest_selection_head, relative) - }); + let relative = self.editor.read(cx).relative_line_numbers(cx); let relative_line_numbers_enabled = relative.enabled(); - let relative_to = relative_line_numbers_enabled.then(|| newest_selection_head.row()); + let relative_rows = if relative_line_numbers_enabled && let Some(base) = relative_line_base + { + snapshot.calculate_relative_line_numbers(&rows, base, relative.wrapped()) + } else { + Default::default() + }; - let relative_rows = - self.calculate_relative_line_numbers(snapshot, &rows, relative_to, relative.wrapped()); let mut line_number = String::new(); let segments = buffer_rows.iter().enumerate().flat_map(|(ix, row_info)| { let display_row = DisplayRow(rows.start.0 + ix as u32); @@ -4652,6 +4576,8 @@ impl EditorElement { gutter_hitbox: &Hitbox, text_hitbox: &Hitbox, style: &EditorStyle, + relative_line_numbers: RelativeLineNumbers, + relative_to: Option, window: &mut Window, cx: &mut App, ) -> Option { @@ -4681,9 +4607,21 @@ impl EditorElement { ); let line_number = show_line_numbers.then(|| { - let number = (start_point.row + 1).to_string(); + let relative_number = relative_to.and_then(|base| match relative_line_numbers { + RelativeLineNumbers::Disabled => None, + RelativeLineNumbers::Enabled => { + Some(snapshot.relative_line_delta_to_point(base, start_point)) + } + RelativeLineNumbers::Wrapped => { + Some(snapshot.relative_wrapped_line_delta_to_point(base, start_point)) + } + }); + let number = relative_number + .filter(|&delta| delta != 0) + .map(|delta| delta.unsigned_abs() as u32) + .unwrap_or(start_point.row + 1); let color = cx.theme().colors().editor_line_number; - self.shape_line_number(SharedString::from(number), color, window) + self.shape_line_number(SharedString::from(number.to_string()), color, window) }); lines.push(StickyHeaderLine::new( @@ -9436,6 +9374,28 @@ impl Element for EditorElement { window, cx, ); + + // relative rows are based on newest selection, even outside the visible area + let relative_row_base = self.editor.update(cx, |editor, cx| { + if editor.selections.count()==0 { + return None; + } + let newest = editor + .selections + .newest::(&editor.display_snapshot(cx)); + Some(SelectionLayout::new( + newest, + editor.selections.line_mode(), + editor.cursor_offset_on_selection, + editor.cursor_shape, + &snapshot.display_snapshot, + true, + true, + None, + ) + .head.row()) + }); + let mut breakpoint_rows = self.editor.update(cx, |editor, cx| { editor.active_breakpoints(start_row..end_row, window, cx) }); @@ -9453,7 +9413,7 @@ impl Element for EditorElement { start_row..end_row, &row_infos, &active_rows, - newest_selection_head, + relative_row_base, &snapshot, window, cx, @@ -9773,6 +9733,7 @@ impl Element for EditorElement { && is_singleton && EditorSettings::get_global(cx).sticky_scroll.enabled { + let relative = self.editor.read(cx).relative_line_numbers(cx); self.layout_sticky_headers( &snapshot, editor_width, @@ -9784,6 +9745,8 @@ impl Element for EditorElement { &gutter_hitbox, &text_hitbox, &style, + relative, + relative_row_base, window, cx, ) @@ -11631,7 +11594,7 @@ mod tests { } #[gpui::test] - fn test_shape_line_numbers(cx: &mut TestAppContext) { + fn test_layout_line_numbers(cx: &mut TestAppContext) { init_test(cx, |_| {}); let window = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); @@ -11671,7 +11634,7 @@ mod tests { }) .collect::>(), &BTreeMap::default(), - Some(DisplayPoint::new(DisplayRow(0), 0)), + Some(DisplayRow(0)), &snapshot, window, cx, @@ -11683,10 +11646,9 @@ mod tests { let relative_rows = window .update(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - element.calculate_relative_line_numbers( - &snapshot, + snapshot.calculate_relative_line_numbers( &(DisplayRow(0)..DisplayRow(6)), - Some(DisplayRow(3)), + DisplayRow(3), false, ) }) @@ -11702,10 +11664,9 @@ mod tests { let relative_rows = window .update(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - element.calculate_relative_line_numbers( - &snapshot, + snapshot.calculate_relative_line_numbers( &(DisplayRow(3)..DisplayRow(6)), - Some(DisplayRow(1)), + DisplayRow(1), false, ) }) @@ -11719,10 +11680,9 @@ mod tests { let relative_rows = window .update(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - element.calculate_relative_line_numbers( - &snapshot, + snapshot.calculate_relative_line_numbers( &(DisplayRow(0)..DisplayRow(3)), - Some(DisplayRow(6)), + DisplayRow(6), false, ) }) @@ -11759,7 +11719,7 @@ mod tests { }) .collect::>(), &BTreeMap::default(), - Some(DisplayPoint::new(DisplayRow(0), 0)), + Some(DisplayRow(0)), &snapshot, window, cx, @@ -11774,7 +11734,7 @@ mod tests { } #[gpui::test] - fn test_shape_line_numbers_wrapping(cx: &mut TestAppContext) { + fn test_layout_line_numbers_wrapping(cx: &mut TestAppContext) { init_test(cx, |_| {}); let window = cx.add_window(|window, cx| { let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); @@ -11819,7 +11779,7 @@ mod tests { }) .collect::>(), &BTreeMap::default(), - Some(DisplayPoint::new(DisplayRow(0), 0)), + Some(DisplayRow(0)), &snapshot, window, cx, @@ -11831,10 +11791,9 @@ mod tests { let relative_rows = window .update(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - element.calculate_relative_line_numbers( - &snapshot, + snapshot.calculate_relative_line_numbers( &(DisplayRow(0)..DisplayRow(6)), - Some(DisplayRow(3)), + DisplayRow(3), true, ) }) @@ -11871,7 +11830,7 @@ mod tests { }) .collect::>(), &BTreeMap::from_iter([(DisplayRow(0), LineHighlightSpec::default())]), - Some(DisplayPoint::new(DisplayRow(0), 0)), + Some(DisplayRow(0)), &snapshot, window, cx, @@ -11886,10 +11845,9 @@ mod tests { let relative_rows = window .update(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - element.calculate_relative_line_numbers( - &snapshot, + snapshot.calculate_relative_line_numbers( &(DisplayRow(0)..DisplayRow(6)), - Some(DisplayRow(3)), + DisplayRow(3), true, ) }) From b53f661515af405c339400fda585e2372b96bb1b Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 11:53:39 -0600 Subject: [PATCH 35/46] docs: Fix auto docs GitHub Action (#45383) Small fixes to Droid workflow Release Notes: - N/A --------- Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .github/workflows/docs_automation.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml index e4aa79c7fc09d6d7735ac82e2315d68b923d5323..bf0d27ba632e72eb79253c602bf6363bd5fa8e79 100644 --- a/.github/workflows/docs_automation.yml +++ b/.github/workflows/docs_automation.yml @@ -37,9 +37,13 @@ jobs: fetch-depth: 0 - name: Install Droid CLI + id: install-droid run: | curl -fsSL https://cli.factory.ai/install.sh | bash echo "${HOME}/.factory/bin" >> "$GITHUB_PATH" + echo "DROID_BIN=${HOME}/.factory/bin/droid" >> "$GITHUB_ENV" + # Verify installation + "${HOME}/.factory/bin/droid" --version - name: Setup Node.js (for Prettier) uses: actions/setup-node@v4 @@ -85,7 +89,7 @@ jobs: - name: "Phase 2: Explore Repository" id: phase2 run: | - droid exec \ + "$DROID_BIN" exec \ --model "$DROID_MODEL" \ --autonomy read-only \ --prompt-file .factory/prompts/docs-automation/phase2-explore.md \ @@ -99,7 +103,7 @@ jobs: id: phase3 run: | CHANGED_FILES=$(tr '\n' ' ' < /tmp/changed_files.txt) - droid exec \ + "$DROID_BIN" exec \ --model "$DROID_MODEL" \ --autonomy read-only \ --prompt-file .factory/prompts/docs-automation/phase3-analyze.md \ @@ -114,7 +118,7 @@ jobs: - name: "Phase 4: Plan Documentation Impact" id: phase4 run: | - droid exec \ + "$DROID_BIN" exec \ --model "$DROID_MODEL" \ --autonomy read-only \ --prompt-file .factory/prompts/docs-automation/phase4-plan.md \ @@ -137,7 +141,7 @@ jobs: id: phase5 if: steps.phase4.outputs.updates_required == 'true' run: | - droid exec \ + "$DROID_BIN" exec \ --model "$DROID_MODEL" \ --autonomy medium \ --prompt-file .factory/prompts/docs-automation/phase5-apply.md \ @@ -170,7 +174,7 @@ jobs: # Get git diff of docs git diff docs/src/ > /tmp/docs-diff.txt || true - droid exec \ + "$DROID_BIN" exec \ --model "$DROID_MODEL" \ --autonomy read-only \ --prompt-file .factory/prompts/docs-automation/phase6-summarize.md \ From 1edd050baf20cfde8bfc8885d632ab163b302370 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Raz=20Guzm=C3=A1n=20Macedo?= Date: Fri, 19 Dec 2025 12:09:40 -0600 Subject: [PATCH 36/46] Add script/triage_watcher.jl (#45384) Release Notes: - N/A --- script/triage_watcher.jl | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 script/triage_watcher.jl diff --git a/script/triage_watcher.jl b/script/triage_watcher.jl new file mode 100644 index 0000000000000000000000000000000000000000..905d2fdd4eb73c7bcbed304f9d4d94d0d94943f2 --- /dev/null +++ b/script/triage_watcher.jl @@ -0,0 +1,38 @@ +## Triage Watcher v0.1 +# This is a small script to watch for new issues on the Zed repository and open them in a new browser tab interactively. +# +## Installing Julia +# +# You need Julia installed on your system: +# curl -fsSL https://install.julialang.org | sh +# +## Running this script: +# 1. It only works on Macos/Linux +# Open a new Julia repl with `julia` inside the `zed` repo +# 2. Paste the following code +# 3. Whenever you close your computer, just type the Up arrow on the REPL + enter to rerun the loop again to resume +function get_issues() + entries = filter(x -> occursin("state:needs triage", x), split(read(`gh issue list -L 10`, String), '\n')) + top = findfirst.('\t', entries) .- 1 + [entries[i][begin:top[i]] for i in eachindex(entries)] +end + +nums = get_issues(); +while true + new_nums = get_issues() + # Open each new issue in a new browser tab + for issue_num in setdiff(new_nums, nums) + url = "https://github.com/zed-industries/zed/issues/" * issue_num + println("\nOpening $url") + open_tab = `/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome $url` + try + sound_file = "/Users/mrg/Downloads/mario_coin_sound.mp3" + run(`afplay -v 0.02 $sound_file`) + finally + end + run(open_tab) + end + nums = new_nums + print("🧘🏼") + sleep(60) +end From 22916311cd2b7d98d89d1ba72b5a567373472e2d Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 12:12:14 -0600 Subject: [PATCH 37/46] ci: Fix Factory CLI installation URL (#45386) Change from cli.factory.ai/install.sh to app.factory.ai/cli per official Factory documentation. Release Notes: - N/A Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .github/workflows/docs_automation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml index bf0d27ba632e72eb79253c602bf6363bd5fa8e79..a59022e6f641fa5a351c29240fc6bdcc560228f5 100644 --- a/.github/workflows/docs_automation.yml +++ b/.github/workflows/docs_automation.yml @@ -39,7 +39,7 @@ jobs: - name: Install Droid CLI id: install-droid run: | - curl -fsSL https://cli.factory.ai/install.sh | bash + curl -fsSL https://app.factory.ai/cli | sh echo "${HOME}/.factory/bin" >> "$GITHUB_PATH" echo "DROID_BIN=${HOME}/.factory/bin/droid" >> "$GITHUB_ENV" # Verify installation From 1bc3fa81543f6159a5f607a9b01c83c2ae5c309a Mon Sep 17 00:00:00 2001 From: Ichimura Tomoo Date: Sat, 20 Dec 2025 03:18:20 +0900 Subject: [PATCH 38/46] Correct UTF-16 saving and add heuristic encoding detection (#45243) This commit fixes an issue where saving UTF-16 files resulted in UTF-8 bytes due to `encoding_rs` default behavior. It also introduces a heuristic to detect BOM-less UTF-16 and binary files. Changes: - Manually implement UTF-16LE/BE encoding during file save to avoid implicit UTF-8 conversion. - Add `analyze_byte_content` to guess UTF-16LE/BE or Binary based on null byte distribution. - Prevent loading binary files as text by returning an error when binary content is detected. Special thanks to @CrazyboyQCD for pointing out the `encoding_rs` behavior and providing the fix, and to @ConradIrwin for the suggestion on the detection heuristic. Closes #14654 Release Notes: - (nightly only) Fixed an issue where saving files with UTF-16 encoding incorrectly wrote them as UTF-8. Also improved detection for binary files and BOM-less UTF-16. --- crates/language/src/buffer.rs | 18 +- crates/worktree/src/worktree.rs | 140 +++++++++--- crates/worktree/src/worktree_tests.rs | 306 +++++++++++++++++--------- 3 files changed, 331 insertions(+), 133 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 99e0c8d4ebdad709eea0e9ab6dbdf9d889d54ec5..5f46340b41a876443f1d12724450d2d8b30f9b33 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1490,19 +1490,23 @@ impl Buffer { let (tx, rx) = futures::channel::oneshot::channel(); let prev_version = self.text.version(); self.reload_task = Some(cx.spawn(async move |this, cx| { - let Some((new_mtime, new_text)) = this.update(cx, |this, cx| { + let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| { let file = this.file.as_ref()?.as_local()?; - - Some((file.disk_state().mtime(), file.load(cx))) + Some(( + file.disk_state().mtime(), + file.load_bytes(cx), + this.encoding, + )) })? else { return Ok(()); }; - let new_text = new_text.await?; - let diff = this - .update(cx, |this, cx| this.diff(new_text.clone(), cx))? - .await; + let bytes = load_bytes_task.await?; + let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes); + let new_text = cow.into_owned(); + + let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await; this.update(cx, |this, cx| { if this.version() == diff.base_version { this.finalize_last_transaction(); diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 7145bccd514fbb5d6093efda765a826162c91260..f5f632e65d71b683d1a491b1fc9e9a612f5c24a5 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -1361,7 +1361,7 @@ impl LocalWorktree { } let content = fs.load_bytes(&abs_path).await?; - let (text, encoding, has_bom) = decode_byte(content); + let (text, encoding, has_bom) = decode_byte(content)?; let worktree = this.upgrade().context("worktree was dropped")?; let file = match entry.await? { @@ -1489,25 +1489,12 @@ impl LocalWorktree { let fs = fs.clone(); let abs_path = abs_path.clone(); async move { - let bom_bytes = if has_bom { - if encoding == encoding_rs::UTF_16LE { - vec![0xFF, 0xFE] - } else if encoding == encoding_rs::UTF_16BE { - vec![0xFE, 0xFF] - } else if encoding == encoding_rs::UTF_8 { - vec![0xEF, 0xBB, 0xBF] - } else { - vec![] - } - } else { - vec![] - }; - // For UTF-8, use the optimized `fs.save` which writes Rope chunks directly to disk // without allocating a contiguous string. if encoding == encoding_rs::UTF_8 && !has_bom { return fs.save(&abs_path, &text, line_ending).await; } + // For legacy encodings (e.g. Shift-JIS), we fall back to converting the entire Rope // to a String/Bytes in memory before writing. // @@ -1520,13 +1507,45 @@ impl LocalWorktree { LineEnding::Windows => text_string.replace('\n', "\r\n"), }; - let (cow, _, _) = encoding.encode(&normalized_text); - let bytes = if !bom_bytes.is_empty() { - let mut bytes = bom_bytes; - bytes.extend_from_slice(&cow); - bytes.into() + // Create the byte vector manually for UTF-16 encodings because encoding_rs encodes to UTF-8 by default (per WHATWG standards), + // which is not what we want for saving files. + let bytes = if encoding == encoding_rs::UTF_16BE { + let mut data = Vec::with_capacity(normalized_text.len() * 2 + 2); + if has_bom { + data.extend_from_slice(&[0xFE, 0xFF]); // BOM + } + let utf16be_bytes = + normalized_text.encode_utf16().flat_map(|u| u.to_be_bytes()); + data.extend(utf16be_bytes); + data.into() + } else if encoding == encoding_rs::UTF_16LE { + let mut data = Vec::with_capacity(normalized_text.len() * 2 + 2); + if has_bom { + data.extend_from_slice(&[0xFF, 0xFE]); // BOM + } + let utf16le_bytes = + normalized_text.encode_utf16().flat_map(|u| u.to_le_bytes()); + data.extend(utf16le_bytes); + data.into() } else { - cow + // For other encodings (Shift-JIS, UTF-8 with BOM, etc.), delegate to encoding_rs. + let bom_bytes = if has_bom { + if encoding == encoding_rs::UTF_8 { + vec![0xEF, 0xBB, 0xBF] + } else { + vec![] + } + } else { + vec![] + }; + let (cow, _, _) = encoding.encode(&normalized_text); + if !bom_bytes.is_empty() { + let mut bytes = bom_bytes; + bytes.extend_from_slice(&cow); + bytes.into() + } else { + cow + } }; fs.write(&abs_path, &bytes).await @@ -5842,11 +5861,28 @@ impl fs::Watcher for NullWatcher { } } -fn decode_byte(bytes: Vec) -> (String, &'static Encoding, bool) { +fn decode_byte(bytes: Vec) -> anyhow::Result<(String, &'static Encoding, bool)> { // check BOM if let Some((encoding, _bom_len)) = Encoding::for_bom(&bytes) { let (cow, _) = encoding.decode_with_bom_removal(&bytes); - return (cow.into_owned(), encoding, true); + return Ok((cow.into_owned(), encoding, true)); + } + + match analyze_byte_content(&bytes) { + ByteContent::Utf16Le => { + let encoding = encoding_rs::UTF_16LE; + let (cow, _, _) = encoding.decode(&bytes); + return Ok((cow.into_owned(), encoding, false)); + } + ByteContent::Utf16Be => { + let encoding = encoding_rs::UTF_16BE; + let (cow, _, _) = encoding.decode(&bytes); + return Ok((cow.into_owned(), encoding, false)); + } + ByteContent::Binary => { + anyhow::bail!("Binary files are not supported"); + } + ByteContent::Unknown => {} } fn detect_encoding(bytes: Vec) -> (String, &'static Encoding) { @@ -5867,14 +5903,66 @@ fn decode_byte(bytes: Vec) -> (String, &'static Encoding, bool) { // displaying raw escape sequences instead of the correct characters. if text.contains('\x1b') { let (s, enc) = detect_encoding(text.into_bytes()); - (s, enc, false) + Ok((s, enc, false)) } else { - (text, encoding_rs::UTF_8, false) + Ok((text, encoding_rs::UTF_8, false)) } } Err(e) => { let (s, enc) = detect_encoding(e.into_bytes()); - (s, enc, false) + Ok((s, enc, false)) } } } + +#[derive(PartialEq)] +enum ByteContent { + Utf16Le, + Utf16Be, + Binary, + Unknown, +} +// Heuristic check using null byte distribution. +// NOTE: This relies on the presence of ASCII characters (which become `0x00` in UTF-16). +// Files consisting purely of non-ASCII characters (like Japanese) may not be detected here +// and will result in `Unknown`. +fn analyze_byte_content(bytes: &[u8]) -> ByteContent { + if bytes.len() < 2 { + return ByteContent::Unknown; + } + + let check_len = bytes.len().min(1024); + let sample = &bytes[..check_len]; + + if !sample.contains(&0) { + return ByteContent::Unknown; + } + + let mut even_nulls = 0; + let mut odd_nulls = 0; + + for (i, &byte) in sample.iter().enumerate() { + if byte == 0 { + if i % 2 == 0 { + even_nulls += 1; + } else { + odd_nulls += 1; + } + } + } + + let total_nulls = even_nulls + odd_nulls; + if total_nulls < check_len / 10 { + return ByteContent::Unknown; + } + + if even_nulls > odd_nulls * 4 { + return ByteContent::Utf16Be; + } + + if odd_nulls > even_nulls * 4 { + return ByteContent::Utf16Le; + } + + ByteContent::Binary +} diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 094a6d52ea4168752578eab06cea511a57e65c10..45d39710c6ea825aded4d29f447124ee4c2ecb33 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -1,5 +1,5 @@ use crate::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle}; -use anyhow::{Context as _, Result}; +use anyhow::Result; use encoding_rs; use fs::{FakeFs, Fs, RealFs, RemoveOptions}; use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE}; @@ -2568,71 +2568,87 @@ fn init_test(cx: &mut gpui::TestAppContext) { #[gpui::test] async fn test_load_file_encoding(cx: &mut TestAppContext) { init_test(cx); - let test_cases: Vec<(&str, &[u8], &str)> = vec![ - ("utf8.txt", "こんにちは".as_bytes(), "こんにちは"), // "こんにちは" is Japanese "Hello" - ( - "sjis.txt", - &[0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd], - "こんにちは", - ), - ( - "eucjp.txt", - &[0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf], - "こんにちは", - ), - ( - "iso2022jp.txt", - &[ + + struct TestCase { + name: &'static str, + bytes: Vec, + expected_text: &'static str, + } + + // --- Success Cases --- + let success_cases = vec![ + TestCase { + name: "utf8.txt", + bytes: "こんにちは".as_bytes().to_vec(), + expected_text: "こんにちは", + }, + TestCase { + name: "sjis.txt", + bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd], + expected_text: "こんにちは", + }, + TestCase { + name: "eucjp.txt", + bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf], + expected_text: "こんにちは", + }, + TestCase { + name: "iso2022jp.txt", + bytes: vec![ 0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b, 0x28, 0x42, ], - "こんにちは", - ), - // Western Europe (Windows-1252) - // "Café" -> 0xE9 is 'é' in Windows-1252 (it is typically 0xC3 0xA9 in UTF-8) - ("win1252.txt", &[0x43, 0x61, 0x66, 0xe9], "Café"), - // Chinese Simplified (GBK) - // Note: We use a slightly longer string here because short byte sequences can be ambiguous - // in multi-byte encodings. Providing more context helps the heuristic detector guess correctly. - // Text: "今天天气不错" (Today's weather is not bad / nice) - // Bytes: - // 今: BD F1 - // 天: CC EC - // 天: CC EC - // 气: C6 F8 - // 不: B2 BB - // 错: B4 ED - ( - "gbk.txt", - &[ + expected_text: "こんにちは", + }, + TestCase { + name: "win1252.txt", + bytes: vec![0x43, 0x61, 0x66, 0xe9], + expected_text: "Café", + }, + TestCase { + name: "gbk.txt", + bytes: vec![ 0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed, ], - "今天天气不错", - ), - ( - "utf16le_bom.txt", - &[ + expected_text: "今天天气不错", + }, + // UTF-16LE with BOM + TestCase { + name: "utf16le_bom.txt", + bytes: vec![ 0xFF, 0xFE, // BOM - 0x53, 0x30, // こ - 0x93, 0x30, // ん - 0x6B, 0x30, // に - 0x61, 0x30, // ち - 0x6F, 0x30, // は + 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30, ], - "こんにちは", - ), - ( - "utf8_bom.txt", - &[ - 0xEF, 0xBB, 0xBF, // UTF-8 BOM - 0xE3, 0x81, 0x93, // こ - 0xE3, 0x82, 0x93, // ん - 0xE3, 0x81, 0xAB, // に - 0xE3, 0x81, 0xA1, // ち - 0xE3, 0x81, 0xAF, // は + expected_text: "こんにちは", + }, + // UTF-16BE with BOM + TestCase { + name: "utf16be_bom.txt", + bytes: vec![ + 0xFE, 0xFF, // BOM + 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, ], - "こんにちは", - ), + expected_text: "こんにちは", + }, + // UTF-16LE without BOM (ASCII only) + // This relies on the "null byte heuristic" we implemented. + // "ABC" -> 41 00 42 00 43 00 + TestCase { + name: "utf16le_ascii_no_bom.txt", + bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00], + expected_text: "ABC", + }, + ]; + + // --- Failure Cases --- + let failure_cases = vec![ + // Binary File (Should be detected by heuristic and return Error) + // Contains random bytes and mixed nulls that don't match UTF-16 patterns + TestCase { + name: "binary.bin", + bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00], + expected_text: "", // Not used + }, ]; let root_path = if cfg!(windows) { @@ -2642,15 +2658,11 @@ async fn test_load_file_encoding(cx: &mut TestAppContext) { }; let fs = FakeFs::new(cx.background_executor.clone()); + fs.create_dir(root_path).await.unwrap(); - let mut files_json = serde_json::Map::new(); - for (name, _, _) in &test_cases { - files_json.insert(name.to_string(), serde_json::Value::String("".to_string())); - } - - for (name, bytes, _) in &test_cases { - let path = root_path.join(name); - fs.write(&path, bytes).await.unwrap(); + for case in success_cases.iter().chain(failure_cases.iter()) { + let path = root_path.join(case.name); + fs.write(&path, &case.bytes).await.unwrap(); } let tree = Worktree::local( @@ -2667,34 +2679,54 @@ async fn test_load_file_encoding(cx: &mut TestAppContext) { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; - for (name, _, expected) in test_cases { - let loaded = tree - .update(cx, |tree, cx| tree.load_file(rel_path(name), cx)) - .await - .with_context(|| format!("Failed to load {}", name)) - .unwrap(); + let rel_path = |name: &str| { + RelPath::new(&Path::new(name), PathStyle::local()) + .unwrap() + .into_arc() + }; + // Run Success Tests + for case in success_cases { + let loaded = tree + .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx)) + .await; + if let Err(e) = &loaded { + panic!("Failed to load success case '{}': {:?}", case.name, e); + } + let loaded = loaded.unwrap(); assert_eq!( - loaded.text, expected, + loaded.text, case.expected_text, "Encoding mismatch for file: {}", - name + case.name ); } + + // Run Failure Tests + for case in failure_cases { + let loaded = tree + .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx)) + .await; + assert!( + loaded.is_err(), + "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.", + case.name + ); + let err_msg = loaded.unwrap_err().to_string(); + println!("Got expected error for {}: {}", case.name, err_msg); + } } #[gpui::test] async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); + let root_path = if cfg!(windows) { Path::new("C:\\root") } else { Path::new("/root") }; fs.create_dir(root_path).await.unwrap(); - let file_path = root_path.join("test.txt"); - - fs.insert_file(&file_path, "initial".into()).await; let worktree = Worktree::local( root_path, @@ -2707,33 +2739,107 @@ async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) { .await .unwrap(); - let path: Arc = Path::new("test.txt").into(); - let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc(); + // Define test case structure + struct TestCase { + name: &'static str, + text: &'static str, + encoding: &'static encoding_rs::Encoding, + has_bom: bool, + expected_bytes: Vec, + } - let text = text::Rope::from("こんにちは"); + let cases = vec![ + // Shift_JIS with Japanese + TestCase { + name: "Shift_JIS with Japanese", + text: "こんにちは", + encoding: encoding_rs::SHIFT_JIS, + has_bom: false, + expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd], + }, + // UTF-8 No BOM + TestCase { + name: "UTF-8 No BOM", + text: "AB", + encoding: encoding_rs::UTF_8, + has_bom: false, + expected_bytes: vec![0x41, 0x42], + }, + // UTF-8 with BOM + TestCase { + name: "UTF-8 with BOM", + text: "AB", + encoding: encoding_rs::UTF_8, + has_bom: true, + expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42], + }, + // UTF-16LE No BOM with Japanese + // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`. + TestCase { + name: "UTF-16LE No BOM with Japanese", + text: "こんにちは", + encoding: encoding_rs::UTF_16LE, + has_bom: false, + expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30], + }, + // UTF-16LE with BOM + TestCase { + name: "UTF-16LE with BOM", + text: "A", + encoding: encoding_rs::UTF_16LE, + has_bom: true, + expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00], + }, + // UTF-16BE No BOM with Japanese + // NOTE: This passes thanks to the manual encoding fix. + TestCase { + name: "UTF-16BE No BOM with Japanese", + text: "こんにちは", + encoding: encoding_rs::UTF_16BE, + has_bom: false, + expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f], + }, + // UTF-16BE with BOM + TestCase { + name: "UTF-16BE with BOM", + text: "A", + encoding: encoding_rs::UTF_16BE, + has_bom: true, + expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41], + }, + ]; - let task = worktree.update(cx, |wt, cx| { - wt.write_file( - rel_path, - text, - text::LineEnding::Unix, - encoding_rs::SHIFT_JIS, - false, - cx, - ) - }); + for (i, case) in cases.into_iter().enumerate() { + let file_name = format!("test_{}.txt", i); + let path: Arc = Path::new(&file_name).into(); + let file_path = root_path.join(&file_name); - task.await.unwrap(); + fs.insert_file(&file_path, "".into()).await; - let bytes = fs.load_bytes(&file_path).await.unwrap(); + let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc(); + let text = text::Rope::from(case.text); - let expected_bytes = vec![ - 0x82, 0xb1, // こ - 0x82, 0xf1, // ん - 0x82, 0xc9, // に - 0x82, 0xbf, // ち - 0x82, 0xcd, // は - ]; + let task = worktree.update(cx, |wt, cx| { + wt.write_file( + rel_path, + text, + text::LineEnding::Unix, + case.encoding, + case.has_bom, + cx, + ) + }); + + if let Err(e) = task.await { + panic!("Unexpected error in case '{}': {:?}", case.name, e); + } + + let bytes = fs.load_bytes(&file_path).await.unwrap(); - assert_eq!(bytes, expected_bytes, "Should be saved as Shift-JIS"); + assert_eq!( + bytes, case.expected_bytes, + "case '{}' mismatch. Expected {:?}, but got {:?}", + case.name, case.expected_bytes, bytes + ); + } } From e61f9081d44a99f4f04259c7b8efdcd1cc8c0ca7 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 12:24:23 -0600 Subject: [PATCH 39/46] docs: More droid docs debugging (#45388) Path issues Release Notes: - N/A --------- Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .github/workflows/docs_automation.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml index a59022e6f641fa5a351c29240fc6bdcc560228f5..ba0236fe91d3efe5975777e3df31eedf884e224d 100644 --- a/.github/workflows/docs_automation.yml +++ b/.github/workflows/docs_automation.yml @@ -40,10 +40,10 @@ jobs: id: install-droid run: | curl -fsSL https://app.factory.ai/cli | sh - echo "${HOME}/.factory/bin" >> "$GITHUB_PATH" - echo "DROID_BIN=${HOME}/.factory/bin/droid" >> "$GITHUB_ENV" + echo "${HOME}/.local/bin" >> "$GITHUB_PATH" + echo "DROID_BIN=${HOME}/.local/bin/droid" >> "$GITHUB_ENV" # Verify installation - "${HOME}/.factory/bin/droid" --version + "${HOME}/.local/bin/droid" --version - name: Setup Node.js (for Prettier) uses: actions/setup-node@v4 From 07db88a327baa2904d6c3ed9bc17cda7ff1f0e86 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 19 Dec 2025 14:08:49 -0500 Subject: [PATCH 40/46] git: Optimistically stage hunks when staging a file, take 2 (#45278) Relanding #43434 with an improved approach. Release Notes: - N/A --------- Co-authored-by: Ramon <55579979+van-sprundel@users.noreply.github.com> --- crates/buffer_diff/src/buffer_diff.rs | 28 ++++ crates/project/src/git_store.rs | 229 +++++++++++++++++--------- crates/project/src/project_tests.rs | 143 ++++++++++++++++ 3 files changed, 321 insertions(+), 79 deletions(-) diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 22525096d3cbca456aa114b5acc9b4239b570dda..111b18233b6500de7de4485c8a408eec1e8cb822 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1159,6 +1159,34 @@ impl BufferDiff { new_index_text } + pub fn stage_or_unstage_all_hunks( + &mut self, + stage: bool, + buffer: &text::BufferSnapshot, + file_exists: bool, + cx: &mut Context, + ) { + let hunks = self + .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) + .collect::>(); + let Some(secondary) = self.secondary_diff.as_ref() else { + return; + }; + self.inner.stage_or_unstage_hunks_impl( + &secondary.read(cx).inner, + stage, + &hunks, + buffer, + file_exists, + ); + if let Some((first, last)) = hunks.first().zip(hunks.last()) { + let changed_range = first.buffer_range.start..last.buffer_range.end; + cx.emit(BufferDiffEvent::DiffChanged { + changed_range: Some(changed_range), + }); + } + } + pub fn range_to_hunk_range( &self, range: Range, diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 85ff38ab67f873d8197729de9577075951676597..d490a2cfdc843a1984bf3f719692af2dcf39aaaa 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -4205,74 +4205,29 @@ impl Repository { entries: Vec, cx: &mut Context, ) -> Task> { - if entries.is_empty() { - return Task::ready(Ok(())); - } - let id = self.id; - let save_tasks = self.save_buffers(&entries, cx); - let paths = entries - .iter() - .map(|p| p.as_unix_str()) - .collect::>() - .join(" "); - let status = format!("git add {paths}"); - let job_key = GitJobKey::WriteIndex(entries.clone()); - - self.spawn_job_with_tracking( - entries.clone(), - pending_op::GitStatus::Staged, - cx, - async move |this, cx| { - for save_task in save_tasks { - save_task.await?; - } - - this.update(cx, |this, _| { - this.send_keyed_job( - Some(job_key), - Some(status.into()), - move |git_repo, _cx| async move { - match git_repo { - RepositoryState::Local(LocalRepositoryState { - backend, - environment, - .. - }) => backend.stage_paths(entries, environment.clone()).await, - RepositoryState::Remote(RemoteRepositoryState { - project_id, - client, - }) => { - client - .request(proto::Stage { - project_id: project_id.0, - repository_id: id.to_proto(), - paths: entries - .into_iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - }) - .await - .context("sending stage request")?; - - Ok(()) - } - } - }, - ) - })? - .await? - }, - ) + self.stage_or_unstage_entries(true, entries, cx) } pub fn unstage_entries( &mut self, entries: Vec, cx: &mut Context, + ) -> Task> { + self.stage_or_unstage_entries(false, entries, cx) + } + + fn stage_or_unstage_entries( + &mut self, + stage: bool, + entries: Vec, + cx: &mut Context, ) -> Task> { if entries.is_empty() { return Task::ready(Ok(())); } + let Some(git_store) = self.git_store.upgrade() else { + return Task::ready(Ok(())); + }; let id = self.id; let save_tasks = self.save_buffers(&entries, cx); let paths = entries @@ -4280,48 +4235,164 @@ impl Repository { .map(|p| p.as_unix_str()) .collect::>() .join(" "); - let status = format!("git reset {paths}"); + let status = if stage { + format!("git add {paths}") + } else { + format!("git reset {paths}") + }; let job_key = GitJobKey::WriteIndex(entries.clone()); self.spawn_job_with_tracking( entries.clone(), - pending_op::GitStatus::Unstaged, + if stage { + pending_op::GitStatus::Staged + } else { + pending_op::GitStatus::Unstaged + }, cx, async move |this, cx| { for save_task in save_tasks { save_task.await?; } - this.update(cx, |this, _| { + this.update(cx, |this, cx| { + let weak_this = cx.weak_entity(); this.send_keyed_job( Some(job_key), Some(status.into()), - move |git_repo, _cx| async move { - match git_repo { + move |git_repo, mut cx| async move { + let hunk_staging_operation_counts = weak_this + .update(&mut cx, |this, cx| { + let mut hunk_staging_operation_counts = HashMap::default(); + for path in &entries { + let Some(project_path) = + this.repo_path_to_project_path(path, cx) + else { + continue; + }; + let Some(buffer) = git_store + .read(cx) + .buffer_store + .read(cx) + .get_by_path(&project_path) + else { + continue; + }; + let Some(diff_state) = git_store + .read(cx) + .diffs + .get(&buffer.read(cx).remote_id()) + .cloned() + else { + continue; + }; + let Some(uncommitted_diff) = + diff_state.read(cx).uncommitted_diff.as_ref().and_then( + |uncommitted_diff| uncommitted_diff.upgrade(), + ) + else { + continue; + }; + let buffer_snapshot = buffer.read(cx).text_snapshot(); + let file_exists = buffer + .read(cx) + .file() + .is_some_and(|file| file.disk_state().exists()); + let hunk_staging_operation_count = + diff_state.update(cx, |diff_state, cx| { + uncommitted_diff.update( + cx, + |uncommitted_diff, cx| { + uncommitted_diff + .stage_or_unstage_all_hunks( + stage, + &buffer_snapshot, + file_exists, + cx, + ); + }, + ); + + diff_state.hunk_staging_operation_count += 1; + diff_state.hunk_staging_operation_count + }); + hunk_staging_operation_counts.insert( + diff_state.downgrade(), + hunk_staging_operation_count, + ); + } + hunk_staging_operation_counts + }) + .unwrap_or_default(); + + let result = match git_repo { RepositoryState::Local(LocalRepositoryState { backend, environment, .. - }) => backend.unstage_paths(entries, environment).await, + }) => { + if stage { + backend.stage_paths(entries, environment.clone()).await + } else { + backend.unstage_paths(entries, environment.clone()).await + } + } RepositoryState::Remote(RemoteRepositoryState { project_id, client, }) => { - client - .request(proto::Unstage { - project_id: project_id.0, - repository_id: id.to_proto(), - paths: entries - .into_iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - }) - .await - .context("sending unstage request")?; - - Ok(()) + if stage { + client + .request(proto::Stage { + project_id: project_id.0, + repository_id: id.to_proto(), + paths: entries + .into_iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + }) + .await + .context("sending stage request") + .map(|_| ()) + } else { + client + .request(proto::Unstage { + project_id: project_id.0, + repository_id: id.to_proto(), + paths: entries + .into_iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + }) + .await + .context("sending unstage request") + .map(|_| ()) + } } + }; + + for (diff_state, hunk_staging_operation_count) in + hunk_staging_operation_counts + { + diff_state + .update(&mut cx, |diff_state, cx| { + if result.is_ok() { + diff_state.hunk_staging_operation_count_as_of_write = + hunk_staging_operation_count; + } else if let Some(uncommitted_diff) = + &diff_state.uncommitted_diff + { + uncommitted_diff + .update(cx, |uncommitted_diff, cx| { + uncommitted_diff.clear_pending_hunks(cx); + }) + .ok(); + } + }) + .ok(); } + + result }, ) })? @@ -4347,7 +4418,7 @@ impl Repository { } }) .collect(); - self.stage_entries(to_stage, cx) + self.stage_or_unstage_entries(true, to_stage, cx) } pub fn unstage_all(&mut self, cx: &mut Context) -> Task> { @@ -4367,7 +4438,7 @@ impl Repository { } }) .collect(); - self.unstage_entries(to_unstage, cx) + self.stage_or_unstage_entries(false, to_unstage, cx) } pub fn stash_all(&mut self, cx: &mut Context) -> Task> { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 4cebc72073cfda1bf07f028b1aff9fa7410c527d..921ca16323b300af3a02cc2e7f38b1cc6305615c 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -10922,3 +10922,146 @@ async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) { }); assert!(active_repo_path.is_none()); } + +#[gpui::test] +async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) { + use DiffHunkSecondaryStatus::*; + init_test(cx); + + let committed_contents = r#" + one + two + three + "# + .unindent(); + let file_contents = r#" + one + TWO + three + "# + .unindent(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/dir"), + json!({ + ".git": {}, + "file.txt": file_contents.clone() + }), + ) + .await; + + fs.set_head_and_index_for_repo( + path!("/dir/.git").as_ref(), + &[("file.txt", committed_contents.clone())], + ); + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/file.txt"), cx) + }) + .await + .unwrap(); + let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + let uncommitted_diff = project + .update(cx, |project, cx| { + project.open_uncommitted_diff(buffer.clone(), cx) + }) + .await + .unwrap(); + + // The hunk is initially unstaged. + uncommitted_diff.read_with(cx, |diff, cx| { + assert_hunks( + diff.hunks(&snapshot, cx), + &snapshot, + &diff.base_text_string().unwrap(), + &[( + 1..2, + "two\n", + "TWO\n", + DiffHunkStatus::modified(HasSecondaryHunk), + )], + ); + }); + + // Get the repository handle. + let repo = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + // Stage the file. + let stage_task = repo.update(cx, |repo, cx| { + repo.stage_entries(vec![repo_path("file.txt")], cx) + }); + + // Run a few ticks to let the job start and mark hunks as pending, + // but don't run_until_parked which would complete the entire operation. + for _ in 0..10 { + cx.executor().tick(); + let [hunk]: [_; 1] = uncommitted_diff + .read_with(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::>()) + .try_into() + .unwrap(); + match hunk.secondary_status { + HasSecondaryHunk => {} + SecondaryHunkRemovalPending => break, + NoSecondaryHunk => panic!("hunk was not optimistically staged"), + _ => panic!("unexpected hunk state"), + } + } + uncommitted_diff.read_with(cx, |diff, cx| { + assert_hunks( + diff.hunks(&snapshot, cx), + &snapshot, + &diff.base_text_string().unwrap(), + &[( + 1..2, + "two\n", + "TWO\n", + DiffHunkStatus::modified(SecondaryHunkRemovalPending), + )], + ); + }); + + // Let the staging complete. + stage_task.await.unwrap(); + cx.run_until_parked(); + + // The hunk is now fully staged. + uncommitted_diff.read_with(cx, |diff, cx| { + assert_hunks( + diff.hunks(&snapshot, cx), + &snapshot, + &diff.base_text_string().unwrap(), + &[( + 1..2, + "two\n", + "TWO\n", + DiffHunkStatus::modified(NoSecondaryHunk), + )], + ); + }); + + // Simulate a commit by updating HEAD to match the current file contents. + // The FakeGitRepository's commit method is a no-op, so we need to manually + // update HEAD to simulate the commit completing. + fs.set_head_for_repo( + path!("/dir/.git").as_ref(), + &[("file.txt", file_contents.clone())], + "newhead", + ); + cx.run_until_parked(); + + // After committing, there are no more hunks. + uncommitted_diff.read_with(cx, |diff, cx| { + assert_hunks( + diff.hunks(&snapshot, cx), + &snapshot, + &diff.base_text_string().unwrap(), + &[] as &[(Range, &str, &str, DiffHunkStatus)], + ); + }); +} From bb2f037407289605081f44a8d956b11763d034f0 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 13:20:05 -0600 Subject: [PATCH 41/46] docs: Droid doesn't know its own commands (#45391) Correctly uses droid commands in auto docs actions Release Notes: - N/A --------- Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .github/workflows/docs_automation.yml | 82 +++++++++++++-------------- 1 file changed, 40 insertions(+), 42 deletions(-) diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml index ba0236fe91d3efe5975777e3df31eedf884e224d..4c1551c0a91d243896aceef161d85472e8b07021 100644 --- a/.github/workflows/docs_automation.yml +++ b/.github/workflows/docs_automation.yml @@ -85,71 +85,74 @@ jobs: # Phase 0: Guardrails are loaded via AGENTS.md in each phase - # Phase 2: Explore Repository (Read-Only) + # Phase 2: Explore Repository (Read-Only - default) - name: "Phase 2: Explore Repository" id: phase2 run: | "$DROID_BIN" exec \ - --model "$DROID_MODEL" \ - --autonomy read-only \ - --prompt-file .factory/prompts/docs-automation/phase2-explore.md \ - --output /tmp/phase2-output.json \ - --format json + -m "$DROID_MODEL" \ + -f .factory/prompts/docs-automation/phase2-explore.md \ + > /tmp/phase2-output.txt 2>&1 || true echo "Repository exploration complete" - cat /tmp/phase2-output.json + cat /tmp/phase2-output.txt - # Phase 3: Analyze Changes (Read-Only) + # Phase 3: Analyze Changes (Read-Only - default) - name: "Phase 3: Analyze Changes" id: phase3 run: | CHANGED_FILES=$(tr '\n' ' ' < /tmp/changed_files.txt) + echo "Analyzing changes in: $CHANGED_FILES" + + # Build prompt with context + cat > /tmp/phase3-prompt.md << 'EOF' + $(cat .factory/prompts/docs-automation/phase3-analyze.md) + + ## Context + + ### Changed Files + $CHANGED_FILES + + ### Phase 2 Output + $(cat /tmp/phase2-output.txt) + EOF + "$DROID_BIN" exec \ - --model "$DROID_MODEL" \ - --autonomy read-only \ - --prompt-file .factory/prompts/docs-automation/phase3-analyze.md \ - --context "Changed files: $CHANGED_FILES" \ - --context-file /tmp/phase2-output.json \ - --output /tmp/phase3-output.md \ - --format markdown + -m "$DROID_MODEL" \ + "$(cat .factory/prompts/docs-automation/phase3-analyze.md) + + Changed files: $CHANGED_FILES" \ + > /tmp/phase3-output.md 2>&1 || true echo "Change analysis complete" cat /tmp/phase3-output.md - # Phase 4: Plan Documentation Impact (Read-Only) + # Phase 4: Plan Documentation Impact (Read-Only - default) - name: "Phase 4: Plan Documentation Impact" id: phase4 run: | "$DROID_BIN" exec \ - --model "$DROID_MODEL" \ - --autonomy read-only \ - --prompt-file .factory/prompts/docs-automation/phase4-plan.md \ - --context-file /tmp/phase3-output.md \ - --context-file docs/AGENTS.md \ - --output /tmp/phase4-plan.md \ - --format markdown + -m "$DROID_MODEL" \ + -f .factory/prompts/docs-automation/phase4-plan.md \ + > /tmp/phase4-plan.md 2>&1 || true echo "Documentation plan complete" cat /tmp/phase4-plan.md # Check if updates are required - if grep -q "Documentation Updates Required: No" /tmp/phase4-plan.md; then + if grep -q "NO_UPDATES_REQUIRED" /tmp/phase4-plan.md; then echo "updates_required=false" >> "$GITHUB_OUTPUT" else echo "updates_required=true" >> "$GITHUB_OUTPUT" fi - # Phase 5: Apply Plan (Write-Enabled) + # Phase 5: Apply Plan (Write-Enabled with --auto medium) - name: "Phase 5: Apply Documentation Plan" id: phase5 if: steps.phase4.outputs.updates_required == 'true' run: | "$DROID_BIN" exec \ - --model "$DROID_MODEL" \ - --autonomy medium \ - --prompt-file .factory/prompts/docs-automation/phase5-apply.md \ - --context-file /tmp/phase4-plan.md \ - --context-file docs/AGENTS.md \ - --context-file docs/.rules \ - --output /tmp/phase5-report.md \ - --format markdown + -m "$DROID_MODEL" \ + --auto medium \ + -f .factory/prompts/docs-automation/phase5-apply.md \ + > /tmp/phase5-report.md 2>&1 || true echo "Documentation updates applied" cat /tmp/phase5-report.md @@ -166,7 +169,7 @@ jobs: echo "Prettier formatting complete" - # Phase 6: Summarize Changes + # Phase 6: Summarize Changes (Read-Only - default) - name: "Phase 6: Summarize Changes" id: phase6 if: steps.phase4.outputs.updates_required == 'true' @@ -175,14 +178,9 @@ jobs: git diff docs/src/ > /tmp/docs-diff.txt || true "$DROID_BIN" exec \ - --model "$DROID_MODEL" \ - --autonomy read-only \ - --prompt-file .factory/prompts/docs-automation/phase6-summarize.md \ - --context-file /tmp/phase5-report.md \ - --context-file /tmp/phase3-output.md \ - --context "Trigger SHA: ${{ steps.changed.outputs.sha }}" \ - --output /tmp/phase6-summary.md \ - --format markdown + -m "$DROID_MODEL" \ + -f .factory/prompts/docs-automation/phase6-summarize.md \ + > /tmp/phase6-summary.md 2>&1 || true echo "Summary generated" cat /tmp/phase6-summary.md From 56646e6bc32a1c66eb6972edfe59ad65f64af3a7 Mon Sep 17 00:00:00 2001 From: Michael Benfield Date: Fri, 19 Dec 2025 11:37:57 -0800 Subject: [PATCH 42/46] Inline assistant: Don't scroll up too high (#45171) In the case of large vertical_scroll_margin, we could scroll up such that the assistant was out of view. Now, keep it no lower than the center of the editor. Closes #18058 Release Notes: - N/A --- crates/agent_ui/src/inline_assistant.rs | 28 ++++++++++++------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 671579f9ef018b495b7993279a852595c78d3e02..b3c14c5a0ec332f66c300023759db9f09b94dc6f 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1259,28 +1259,26 @@ impl InlineAssistant { let bottom = top + 1.0; (top, bottom) }); - let mut scroll_target_top = scroll_target_range.0; - let mut scroll_target_bottom = scroll_target_range.1; - - scroll_target_top -= editor.vertical_scroll_margin() as ScrollOffset; - scroll_target_bottom += editor.vertical_scroll_margin() as ScrollOffset; - let height_in_lines = editor.visible_line_count().unwrap_or(0.); + let vertical_scroll_margin = editor.vertical_scroll_margin() as ScrollOffset; + let scroll_target_top = (scroll_target_range.0 - vertical_scroll_margin) + // Don't scroll up too far in the case of a large vertical_scroll_margin. + .max(scroll_target_range.0 - height_in_lines / 2.0); + let scroll_target_bottom = (scroll_target_range.1 + vertical_scroll_margin) + // Don't scroll down past where the top would still be visible. + .min(scroll_target_top + height_in_lines); + let scroll_top = editor.scroll_position(cx).y; let scroll_bottom = scroll_top + height_in_lines; if scroll_target_top < scroll_top { editor.set_scroll_position(point(0., scroll_target_top), window, cx); } else if scroll_target_bottom > scroll_bottom { - if (scroll_target_bottom - scroll_target_top) <= height_in_lines { - editor.set_scroll_position( - point(0., scroll_target_bottom - height_in_lines), - window, - cx, - ); - } else { - editor.set_scroll_position(point(0., scroll_target_top), window, cx); - } + editor.set_scroll_position( + point(0., scroll_target_bottom - height_in_lines), + window, + cx, + ); } }); } From 99224ccc758bbe0735a6ebd4f88e817e9cc8a259 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 13:43:10 -0600 Subject: [PATCH 43/46] docs: Droid needs a real model (#45393) Droid needs a specific model with a date Release Notes: - N/A --------- Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .github/workflows/docs_automation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml index 4c1551c0a91d243896aceef161d85472e8b07021..7fa39168c3cfac8b79273906d2c1110a33df69f7 100644 --- a/.github/workflows/docs_automation.yml +++ b/.github/workflows/docs_automation.yml @@ -23,7 +23,7 @@ permissions: env: FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }} - DROID_MODEL: claude-opus-4-5 + DROID_MODEL: claude-opus-4-5-20251101 jobs: docs-automation: From 8e5d33ebc6093c6fd2dbcd4dc3a5e0c35a15ec49 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 19 Dec 2025 12:59:01 -0700 Subject: [PATCH 44/46] Make prompt store fail-open when DB contains undecodable records (#45312) Release Notes - N/A --------- Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/prompt_store/src/prompt_store.rs | 91 ++++++++++++++++++++++++- 1 file changed, 89 insertions(+), 2 deletions(-) diff --git a/crates/prompt_store/src/prompt_store.rs b/crates/prompt_store/src/prompt_store.rs index 2c45410c2aa172c8a4f7118a914cacca69ea7ca8..1f63acb1965428cf3dbc6b9b5739e249c13a9c31 100644 --- a/crates/prompt_store/src/prompt_store.rs +++ b/crates/prompt_store/src/prompt_store.rs @@ -193,7 +193,15 @@ impl MetadataCache { ) -> Result { let mut cache = MetadataCache::default(); for result in db.iter(txn)? { - let (prompt_id, metadata) = result?; + // Fail-open: skip records that can't be decoded (e.g. from a different branch) + // rather than failing the entire prompt store initialization. + let Ok((prompt_id, metadata)) = result else { + log::warn!( + "Skipping unreadable prompt record in database: {:?}", + result.err() + ); + continue; + }; cache.metadata.push(metadata.clone()); cache.metadata_by_id.insert(prompt_id, metadata); } @@ -677,7 +685,86 @@ mod tests { assert_eq!( loaded_after_reset.trim(), expected_content_after_reset.trim(), - "After saving default content, load should return default" + "Content should be back to default after saving default content" + ); + } + + /// Test that the prompt store initializes successfully even when the database + /// contains records with incompatible/undecodable PromptId keys (e.g., from + /// a different branch that used a different serialization format). + /// + /// This is a regression test for the "fail-open" behavior: we should skip + /// bad records rather than failing the entire store initialization. + #[gpui::test] + async fn test_prompt_store_handles_incompatible_db_records(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + + let temp_dir = tempfile::tempdir().unwrap(); + let db_path = temp_dir.path().join("prompts-db-with-bad-records"); + std::fs::create_dir_all(&db_path).unwrap(); + + // First, create the DB and write an incompatible record directly. + // We simulate a record written by a different branch that used + // `{"kind":"CommitMessage"}` instead of `{"kind":"BuiltIn", ...}`. + { + let db_env = unsafe { + heed::EnvOpenOptions::new() + .map_size(1024 * 1024 * 1024) + .max_dbs(4) + .open(&db_path) + .unwrap() + }; + + let mut txn = db_env.write_txn().unwrap(); + // Create the metadata.v2 database with raw bytes so we can write + // an incompatible key format. + let metadata_db: Database = db_env + .create_database(&mut txn, Some("metadata.v2")) + .unwrap(); + + // Write an incompatible PromptId key: `{"kind":"CommitMessage"}` + // This is the old/branch format that current code can't decode. + let bad_key = br#"{"kind":"CommitMessage"}"#; + let dummy_metadata = br#"{"id":{"kind":"CommitMessage"},"title":"Bad Record","default":false,"saved_at":"2024-01-01T00:00:00Z"}"#; + metadata_db.put(&mut txn, bad_key, dummy_metadata).unwrap(); + + // Also write a valid record to ensure we can still read good data. + let good_key = br#"{"kind":"User","uuid":"550e8400-e29b-41d4-a716-446655440000"}"#; + let good_metadata = br#"{"id":{"kind":"User","uuid":"550e8400-e29b-41d4-a716-446655440000"},"title":"Good Record","default":false,"saved_at":"2024-01-01T00:00:00Z"}"#; + metadata_db.put(&mut txn, good_key, good_metadata).unwrap(); + + txn.commit().unwrap(); + } + + // Now try to create a PromptStore from this DB. + // With fail-open behavior, this should succeed and skip the bad record. + // Without fail-open, this would return an error. + let store_result = cx.update(|cx| PromptStore::new(db_path, cx)).await; + + assert!( + store_result.is_ok(), + "PromptStore should initialize successfully even with incompatible DB records. \ + Got error: {:?}", + store_result.err() + ); + + let store = cx.new(|_cx| store_result.unwrap()); + + // Verify the good record was loaded. + let good_id = PromptId::User { + uuid: UserPromptId("550e8400-e29b-41d4-a716-446655440000".parse().unwrap()), + }; + let metadata = store.read_with(cx, |store, _| store.metadata(good_id)); + assert!( + metadata.is_some(), + "Valid records should still be loaded after skipping bad ones" + ); + assert_eq!( + metadata + .as_ref() + .and_then(|m| m.title.as_ref().map(|t| t.as_ref())), + Some("Good Record"), + "Valid record should have correct title" ); } } From b091cc4d9a42773e536960ffc0f2393775158bf9 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 19 Dec 2025 13:04:41 -0700 Subject: [PATCH 45/46] Enforce 5MB per-image limit when converting images for language models (#45313) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Problem When users paste or drag large images into the agent panel, the encoded payload can exceed upstream provider limits (e.g., Anthropic's 5MB per-image limit), causing API errors. ## Solution Enforce a default 5MB limit on encoded PNG bytes in `LanguageModelImage::from_image`: 1. Apply existing Anthropic dimension limits first (1568px max in either dimension) 2. Iteratively downscale by ~15% per pass until the encoded PNG is under 5MB 3. Return `None` if the image can't be shrunk within 8 passes (fail-safe) The limit is enforced at the `LanguageModelImage` conversion layer, which is the choke point for all image ingestion paths (agent panel paste/drag, file mentions, text threads, etc.). ## Future Work The 5MB limit is a conservative default. Provider-specific limits can be introduced later by adding a `from_image_with_constraints` API. ## Testing Added a regression test that: 1. Generates a noisy 4096x4096 PNG (guaranteed >5MB) 2. Converts it via `LanguageModelImage::from_image` 3. Asserts the result is ≤5MB and was actually downscaled --- **Note:** This PR builds on #45312 (prompt store fail-open fix). Please merge that first. cc @rtfeldman --------- Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/language_model/src/request.rs | 168 ++++++++++++++++++++++----- 1 file changed, 138 insertions(+), 30 deletions(-) diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 5e99cca4f9d6e61672c541cb90a3a1ca7da91203..96ed0907427c305211b1484e17ab61d434781ed6 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -8,6 +8,7 @@ use gpui::{ App, AppContext as _, DevicePixels, Image, ImageFormat, ObjectFit, SharedString, Size, Task, point, px, size, }; +use image::GenericImageView as _; use image::codecs::png::PngEncoder; use serde::{Deserialize, Serialize}; use util::ResultExt; @@ -80,6 +81,16 @@ impl std::fmt::Debug for LanguageModelImage { /// Anthropic wants uploaded images to be smaller than this in both dimensions. const ANTHROPIC_SIZE_LIMIT: f32 = 1568.; +/// Default per-image hard limit (in bytes) for the encoded image payload we send upstream. +/// +/// NOTE: `LanguageModelImage.source` is base64-encoded PNG bytes (without the `data:` prefix). +/// This limit is enforced on the encoded PNG bytes *before* base64 encoding. +const DEFAULT_IMAGE_MAX_BYTES: usize = 5 * 1024 * 1024; + +/// Conservative cap on how many times we'll attempt to shrink/re-encode an image to fit +/// `DEFAULT_IMAGE_MAX_BYTES`. +const MAX_IMAGE_DOWNSCALE_PASSES: usize = 8; + impl LanguageModelImage { pub fn empty() -> Self { Self { @@ -112,29 +123,62 @@ impl LanguageModelImage { let height = dynamic_image.height(); let image_size = size(DevicePixels(width as i32), DevicePixels(height as i32)); - let base64_image = { - if image_size.width.0 > ANTHROPIC_SIZE_LIMIT as i32 - || image_size.height.0 > ANTHROPIC_SIZE_LIMIT as i32 - { - let new_bounds = ObjectFit::ScaleDown.get_bounds( - gpui::Bounds { - origin: point(px(0.0), px(0.0)), - size: size(px(ANTHROPIC_SIZE_LIMIT), px(ANTHROPIC_SIZE_LIMIT)), - }, - image_size, - ); - let resized_image = dynamic_image.resize( - new_bounds.size.width.into(), - new_bounds.size.height.into(), - image::imageops::FilterType::Triangle, - ); - - encode_as_base64(data, resized_image) - } else { - encode_as_base64(data, dynamic_image) + // First apply any provider-specific dimension constraints we know about (Anthropic). + let mut processed_image = if image_size.width.0 > ANTHROPIC_SIZE_LIMIT as i32 + || image_size.height.0 > ANTHROPIC_SIZE_LIMIT as i32 + { + let new_bounds = ObjectFit::ScaleDown.get_bounds( + gpui::Bounds { + origin: point(px(0.0), px(0.0)), + size: size(px(ANTHROPIC_SIZE_LIMIT), px(ANTHROPIC_SIZE_LIMIT)), + }, + image_size, + ); + dynamic_image.resize( + new_bounds.size.width.into(), + new_bounds.size.height.into(), + image::imageops::FilterType::Triangle, + ) + } else { + dynamic_image + }; + + // Then enforce a default per-image size cap on the encoded PNG bytes. + // + // We always send PNG bytes (either original PNG bytes, or re-encoded PNG) base64'd. + // The upstream provider limit we want to respect is effectively on the binary image + // payload size, so we enforce against the encoded PNG bytes before base64 encoding. + let mut encoded_png = encode_png_bytes(&processed_image).log_err()?; + for _pass in 0..MAX_IMAGE_DOWNSCALE_PASSES { + if encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES { + break; } + + // Scale down geometrically to converge quickly. We don't know the final PNG size + // as a function of pixels, so we iteratively shrink. + let (w, h) = processed_image.dimensions(); + if w <= 1 || h <= 1 { + break; + } + + // Shrink by ~15% each pass (0.85). This is a compromise between speed and + // preserving image detail. + let new_w = ((w as f32) * 0.85).round().max(1.0) as u32; + let new_h = ((h as f32) * 0.85).round().max(1.0) as u32; + + processed_image = + processed_image.resize(new_w, new_h, image::imageops::FilterType::Triangle); + encoded_png = encode_png_bytes(&processed_image).log_err()?; } - .log_err()?; + + if encoded_png.len() > DEFAULT_IMAGE_MAX_BYTES { + // Still too large after multiple passes; treat as non-convertible for now. + // (Provider-specific handling can be introduced later.) + return None; + } + + // Now base64 encode the PNG bytes. + let base64_image = encode_bytes_as_base64(encoded_png.as_slice()).log_err()?; // SAFETY: The base64 encoder should not produce non-UTF8. let source = unsafe { String::from_utf8_unchecked(base64_image) }; @@ -164,21 +208,20 @@ impl LanguageModelImage { } } -fn encode_as_base64(data: Arc, image: image::DynamicImage) -> Result> { +fn encode_png_bytes(image: &image::DynamicImage) -> Result> { + let mut png = Vec::new(); + image.write_with_encoder(PngEncoder::new(&mut png))?; + Ok(png) +} + +fn encode_bytes_as_base64(bytes: &[u8]) -> Result> { let mut base64_image = Vec::new(); { let mut base64_encoder = EncoderWriter::new( Cursor::new(&mut base64_image), &base64::engine::general_purpose::STANDARD, ); - if data.format() == ImageFormat::Png { - base64_encoder.write_all(data.bytes())?; - } else { - let mut png = Vec::new(); - image.write_with_encoder(PngEncoder::new(&mut png))?; - - base64_encoder.write_all(png.as_slice())?; - } + base64_encoder.write_all(bytes)?; } Ok(base64_image) } @@ -417,6 +460,71 @@ pub struct LanguageModelResponseMessage { #[cfg(test)] mod tests { use super::*; + use base64::Engine as _; + use gpui::TestAppContext; + use image::ImageDecoder as _; + + fn base64_to_png_bytes(base64_png: &str) -> Vec { + base64::engine::general_purpose::STANDARD + .decode(base64_png.as_bytes()) + .expect("base64 should decode") + } + + fn png_dimensions(png_bytes: &[u8]) -> (u32, u32) { + let decoder = + image::codecs::png::PngDecoder::new(Cursor::new(png_bytes)).expect("png should decode"); + decoder.dimensions() + } + + fn make_noisy_png_bytes(width: u32, height: u32) -> Vec { + // Create an RGBA image with per-pixel variance to avoid PNG compressing too well. + let mut img = image::RgbaImage::new(width, height); + for y in 0..height { + for x in 0..width { + let r = ((x ^ y) & 0xFF) as u8; + let g = ((x.wrapping_mul(31) ^ y.wrapping_mul(17)) & 0xFF) as u8; + let b = ((x.wrapping_mul(131) ^ y.wrapping_mul(7)) & 0xFF) as u8; + img.put_pixel(x, y, image::Rgba([r, g, b, 0xFF])); + } + } + + let mut out = Vec::new(); + image::DynamicImage::ImageRgba8(img) + .write_with_encoder(PngEncoder::new(&mut out)) + .expect("png encoding should succeed"); + out + } + + #[gpui::test] + async fn test_from_image_downscales_to_default_5mb_limit(cx: &mut TestAppContext) { + // Pick a size that reliably produces a PNG > 5MB when filled with noise. + // If this fails (image is too small), bump dimensions. + let original_png = make_noisy_png_bytes(4096, 4096); + assert!( + original_png.len() > DEFAULT_IMAGE_MAX_BYTES, + "precondition failed: noisy PNG must exceed DEFAULT_IMAGE_MAX_BYTES" + ); + + let image = gpui::Image::from_bytes(ImageFormat::Png, original_png); + let lm_image = cx + .update(|cx| LanguageModelImage::from_image(Arc::new(image), cx)) + .await + .expect("image conversion should succeed"); + + let encoded_png = base64_to_png_bytes(lm_image.source.as_ref()); + assert!( + encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES, + "expected encoded PNG <= DEFAULT_IMAGE_MAX_BYTES, got {} bytes", + encoded_png.len() + ); + + // Ensure we actually downscaled in pixels (not just re-encoded). + let (w, h) = png_dimensions(&encoded_png); + assert!( + w < 4096 || h < 4096, + "expected image to be downscaled in at least one dimension; got {w}x{h}" + ); + } #[test] fn test_language_model_tool_result_content_deserialization() { From 71f4dc2481746c850e9a624582b27a6ff1dcae73 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 19 Dec 2025 14:10:16 -0600 Subject: [PATCH 46/46] docs: Stash local changes before branch checkout in droid auto docs CLI (#45395) Stashes local changes before branch checkout in droid auto docs CLI Release Notes: - N/A --------- Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- .github/workflows/docs_automation.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/docs_automation.yml b/.github/workflows/docs_automation.yml index 7fa39168c3cfac8b79273906d2c1110a33df69f7..5b72bc4051f34ae890bc291281f8797312f5d52d 100644 --- a/.github/workflows/docs_automation.yml +++ b/.github/workflows/docs_automation.yml @@ -202,6 +202,9 @@ jobs: # Daily batch branch - one branch per day, multiple commits accumulate BRANCH_NAME="docs/auto-update-$(date +%Y-%m-%d)" + # Stash local changes from phase 5 + git stash push -m "docs-automation-changes" -- docs/src/ + # Check if branch already exists on remote if git ls-remote --exit-code --heads origin "$BRANCH_NAME" > /dev/null 2>&1; then echo "Branch $BRANCH_NAME exists, checking out and updating..." @@ -212,6 +215,9 @@ jobs: git checkout -b "$BRANCH_NAME" fi + # Apply stashed changes + git stash pop || true + # Stage and commit git add docs/src/ SUMMARY=$(head -50 < /tmp/phase6-summary.md)