Detailed changes
@@ -54,6 +54,10 @@ jobs:
- name: autofix_pr::run_autofix::run_cargo_fmt
run: cargo fmt --all
shell: bash -euxo pipefail {0}
+ - name: autofix_pr::run_autofix::run_cargo_fix
+ if: ${{ inputs.run_clippy }}
+ run: cargo fix --workspace --release --all-targets --all-features --allow-dirty --allow-staged
+ shell: bash -euxo pipefail {0}
- name: autofix_pr::run_autofix::run_clippy_fix
if: ${{ inputs.run_clippy }}
run: cargo clippy --workspace --release --all-targets --all-features --fix --allow-dirty --allow-staged
@@ -1,11 +1,11 @@
name: Documentation Automation
on:
- push:
- branches: [main]
- paths:
- - 'crates/**'
- - 'extensions/**'
+ # push:
+ # branches: [main]
+ # paths:
+ # - 'crates/**'
+ # - 'extensions/**'
workflow_dispatch:
inputs:
pr_number:
@@ -23,13 +23,13 @@ permissions:
env:
FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }}
- DROID_MODEL: claude-opus-4-5
+ DROID_MODEL: claude-opus-4-5-20251101
jobs:
docs-automation:
runs-on: ubuntu-latest
timeout-minutes: 30
-
+
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -37,9 +37,13 @@ jobs:
fetch-depth: 0
- name: Install Droid CLI
+ id: install-droid
run: |
- curl -fsSL https://cli.factory.ai/install.sh | bash
- echo "${HOME}/.factory/bin" >> "$GITHUB_PATH"
+ curl -fsSL https://app.factory.ai/cli | sh
+ echo "${HOME}/.local/bin" >> "$GITHUB_PATH"
+ echo "DROID_BIN=${HOME}/.local/bin/droid" >> "$GITHUB_ENV"
+ # Verify installation
+ "${HOME}/.local/bin/droid" --version
- name: Setup Node.js (for Prettier)
uses: actions/setup-node@v4
@@ -73,7 +77,7 @@ jobs:
echo "ref=$SHA" >> "$GITHUB_OUTPUT"
git diff --name-only "${SHA}^" "$SHA" > /tmp/changed_files.txt || git diff --name-only HEAD~1 HEAD > /tmp/changed_files.txt
fi
-
+
echo "Changed files:"
cat /tmp/changed_files.txt
env:
@@ -81,71 +85,74 @@ jobs:
# Phase 0: Guardrails are loaded via AGENTS.md in each phase
- # Phase 2: Explore Repository (Read-Only)
+ # Phase 2: Explore Repository (Read-Only - default)
- name: "Phase 2: Explore Repository"
id: phase2
run: |
- droid exec \
- --model "$DROID_MODEL" \
- --autonomy read-only \
- --prompt-file .factory/prompts/docs-automation/phase2-explore.md \
- --output /tmp/phase2-output.json \
- --format json
+ "$DROID_BIN" exec \
+ -m "$DROID_MODEL" \
+ -f .factory/prompts/docs-automation/phase2-explore.md \
+ > /tmp/phase2-output.txt 2>&1 || true
echo "Repository exploration complete"
- cat /tmp/phase2-output.json
+ cat /tmp/phase2-output.txt
- # Phase 3: Analyze Changes (Read-Only)
+ # Phase 3: Analyze Changes (Read-Only - default)
- name: "Phase 3: Analyze Changes"
id: phase3
run: |
CHANGED_FILES=$(tr '\n' ' ' < /tmp/changed_files.txt)
- droid exec \
- --model "$DROID_MODEL" \
- --autonomy read-only \
- --prompt-file .factory/prompts/docs-automation/phase3-analyze.md \
- --context "Changed files: $CHANGED_FILES" \
- --context-file /tmp/phase2-output.json \
- --output /tmp/phase3-output.md \
- --format markdown
+ echo "Analyzing changes in: $CHANGED_FILES"
+
+ # Build prompt with context
+ cat > /tmp/phase3-prompt.md << 'EOF'
+ $(cat .factory/prompts/docs-automation/phase3-analyze.md)
+
+ ## Context
+
+ ### Changed Files
+ $CHANGED_FILES
+
+ ### Phase 2 Output
+ $(cat /tmp/phase2-output.txt)
+ EOF
+
+ "$DROID_BIN" exec \
+ -m "$DROID_MODEL" \
+ "$(cat .factory/prompts/docs-automation/phase3-analyze.md)
+
+ Changed files: $CHANGED_FILES" \
+ > /tmp/phase3-output.md 2>&1 || true
echo "Change analysis complete"
cat /tmp/phase3-output.md
- # Phase 4: Plan Documentation Impact (Read-Only)
+ # Phase 4: Plan Documentation Impact (Read-Only - default)
- name: "Phase 4: Plan Documentation Impact"
id: phase4
run: |
- droid exec \
- --model "$DROID_MODEL" \
- --autonomy read-only \
- --prompt-file .factory/prompts/docs-automation/phase4-plan.md \
- --context-file /tmp/phase3-output.md \
- --context-file docs/AGENTS.md \
- --output /tmp/phase4-plan.md \
- --format markdown
+ "$DROID_BIN" exec \
+ -m "$DROID_MODEL" \
+ -f .factory/prompts/docs-automation/phase4-plan.md \
+ > /tmp/phase4-plan.md 2>&1 || true
echo "Documentation plan complete"
cat /tmp/phase4-plan.md
-
+
# Check if updates are required
- if grep -q "Documentation Updates Required: No" /tmp/phase4-plan.md; then
+ if grep -q "NO_UPDATES_REQUIRED" /tmp/phase4-plan.md; then
echo "updates_required=false" >> "$GITHUB_OUTPUT"
else
echo "updates_required=true" >> "$GITHUB_OUTPUT"
fi
- # Phase 5: Apply Plan (Write-Enabled)
+ # Phase 5: Apply Plan (Write-Enabled with --auto medium)
- name: "Phase 5: Apply Documentation Plan"
id: phase5
if: steps.phase4.outputs.updates_required == 'true'
run: |
- droid exec \
- --model "$DROID_MODEL" \
- --autonomy medium \
- --prompt-file .factory/prompts/docs-automation/phase5-apply.md \
- --context-file /tmp/phase4-plan.md \
- --context-file docs/AGENTS.md \
- --context-file docs/.rules \
- --output /tmp/phase5-report.md \
- --format markdown
+ "$DROID_BIN" exec \
+ -m "$DROID_MODEL" \
+ --auto medium \
+ -f .factory/prompts/docs-automation/phase5-apply.md \
+ > /tmp/phase5-report.md 2>&1 || true
echo "Documentation updates applied"
cat /tmp/phase5-report.md
@@ -156,29 +163,24 @@ jobs:
run: |
echo "Formatting documentation with Prettier..."
cd docs && prettier --write src/
-
+
echo "Verifying Prettier formatting passes..."
cd docs && prettier --check src/
-
+
echo "Prettier formatting complete"
- # Phase 6: Summarize Changes
+ # Phase 6: Summarize Changes (Read-Only - default)
- name: "Phase 6: Summarize Changes"
id: phase6
if: steps.phase4.outputs.updates_required == 'true'
run: |
# Get git diff of docs
git diff docs/src/ > /tmp/docs-diff.txt || true
-
- droid exec \
- --model "$DROID_MODEL" \
- --autonomy read-only \
- --prompt-file .factory/prompts/docs-automation/phase6-summarize.md \
- --context-file /tmp/phase5-report.md \
- --context-file /tmp/phase3-output.md \
- --context "Trigger SHA: ${{ steps.changed.outputs.sha }}" \
- --output /tmp/phase6-summary.md \
- --format markdown
+
+ "$DROID_BIN" exec \
+ -m "$DROID_MODEL" \
+ -f .factory/prompts/docs-automation/phase6-summarize.md \
+ > /tmp/phase6-summary.md 2>&1 || true
echo "Summary generated"
cat /tmp/phase6-summary.md
@@ -192,14 +194,17 @@ jobs:
echo "No documentation changes detected"
exit 0
fi
-
+
# Configure git
git config user.name "factory-droid[bot]"
git config user.email "138933559+factory-droid[bot]@users.noreply.github.com"
-
+
# Daily batch branch - one branch per day, multiple commits accumulate
BRANCH_NAME="docs/auto-update-$(date +%Y-%m-%d)"
-
+
+ # Stash local changes from phase 5
+ git stash push -m "docs-automation-changes" -- docs/src/
+
# Check if branch already exists on remote
if git ls-remote --exit-code --heads origin "$BRANCH_NAME" > /dev/null 2>&1; then
echo "Branch $BRANCH_NAME exists, checking out and updating..."
@@ -209,7 +214,10 @@ jobs:
echo "Creating new branch $BRANCH_NAME..."
git checkout -b "$BRANCH_NAME"
fi
-
+
+ # Apply stashed changes
+ git stash pop || true
+
# Stage and commit
git add docs/src/
SUMMARY=$(head -50 < /tmp/phase6-summary.md)
@@ -220,13 +228,13 @@ jobs:
Triggered by: ${{ steps.changed.outputs.source }} ${{ steps.changed.outputs.ref }}
Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com>"
-
+
# Push
git push -u origin "$BRANCH_NAME"
-
+
# Check if PR already exists for this branch
EXISTING_PR=$(gh pr list --head "$BRANCH_NAME" --json number --jq '.[0].number' || echo "")
-
+
if [ -n "$EXISTING_PR" ]; then
echo "PR #$EXISTING_PR already exists for branch $BRANCH_NAME, updated with new commit"
else
@@ -246,7 +254,7 @@ jobs:
run: |
echo "## Documentation Automation Summary" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
-
+
if [ "${{ steps.phase4.outputs.updates_required }}" == "false" ]; then
echo "No documentation updates required for this change." >> "$GITHUB_STEP_SUMMARY"
elif [ -f /tmp/phase6-summary.md ]; then
@@ -3525,6 +3525,33 @@ dependencies = [
"theme",
]
+[[package]]
+name = "component_preview"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "client",
+ "collections",
+ "component",
+ "db",
+ "fs",
+ "gpui",
+ "language",
+ "log",
+ "node_runtime",
+ "notifications",
+ "project",
+ "release_channel",
+ "reqwest_client",
+ "session",
+ "settings",
+ "theme",
+ "ui",
+ "ui_input",
+ "uuid",
+ "workspace",
+]
+
[[package]]
name = "compression-codecs"
version = "0.4.31"
@@ -8618,6 +8645,7 @@ dependencies = [
"extension",
"gpui",
"language",
+ "lsp",
"paths",
"project",
"schemars",
@@ -20643,6 +20671,7 @@ dependencies = [
"collections",
"command_palette",
"component",
+ "component_preview",
"copilot",
"crashes",
"dap",
@@ -20748,7 +20777,6 @@ dependencies = [
"tree-sitter-md",
"tree-sitter-rust",
"ui",
- "ui_input",
"ui_prompt",
"url",
"urlencoding",
@@ -39,6 +39,7 @@ members = [
"crates/command_palette",
"crates/command_palette_hooks",
"crates/component",
+ "crates/component_preview",
"crates/context_server",
"crates/copilot",
"crates/crashes",
@@ -275,6 +276,7 @@ collections = { path = "crates/collections", version = "0.1.0" }
command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" }
component = { path = "crates/component" }
+component_preview = { path = "crates/component_preview" }
context_server = { path = "crates/context_server" }
copilot = { path = "crates/copilot" }
crashes = { path = "crates/crashes" }
@@ -11,6 +11,7 @@ use language::language_settings::FormatOnSave;
pub use mention::*;
use project::lsp_store::{FormatTrigger, LspFormatTarget};
use serde::{Deserialize, Serialize};
+use serde_json::to_string_pretty;
use settings::Settings as _;
use task::{Shell, ShellBuilder};
pub use terminal::*;
@@ -1992,37 +1993,42 @@ impl AcpThread {
fn update_last_checkpoint(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let git_store = self.project.read(cx).git_store().clone();
- let old_checkpoint = if let Some((_, message)) = self.last_user_message() {
- if let Some(checkpoint) = message.checkpoint.as_ref() {
- checkpoint.git_checkpoint.clone()
- } else {
- return Task::ready(Ok(()));
- }
- } else {
+ let Some((_, message)) = self.last_user_message() else {
+ return Task::ready(Ok(()));
+ };
+ let Some(user_message_id) = message.id.clone() else {
+ return Task::ready(Ok(()));
+ };
+ let Some(checkpoint) = message.checkpoint.as_ref() else {
return Task::ready(Ok(()));
};
+ let old_checkpoint = checkpoint.git_checkpoint.clone();
let new_checkpoint = git_store.update(cx, |git, cx| git.checkpoint(cx));
cx.spawn(async move |this, cx| {
- let new_checkpoint = new_checkpoint
+ let Some(new_checkpoint) = new_checkpoint
.await
.context("failed to get new checkpoint")
- .log_err();
- if let Some(new_checkpoint) = new_checkpoint {
- let equal = git_store
- .update(cx, |git, cx| {
- git.compare_checkpoints(old_checkpoint.clone(), new_checkpoint, cx)
- })?
- .await
- .unwrap_or(true);
- this.update(cx, |this, cx| {
- let (ix, message) = this.last_user_message().context("no user message")?;
- let checkpoint = message.checkpoint.as_mut().context("no checkpoint")?;
- checkpoint.show = !equal;
- cx.emit(AcpThreadEvent::EntryUpdated(ix));
- anyhow::Ok(())
- })??;
- }
+ .log_err()
+ else {
+ return Ok(());
+ };
+
+ let equal = git_store
+ .update(cx, |git, cx| {
+ git.compare_checkpoints(old_checkpoint.clone(), new_checkpoint, cx)
+ })?
+ .await
+ .unwrap_or(true);
+
+ this.update(cx, |this, cx| {
+ if let Some((ix, message)) = this.user_message_mut(&user_message_id) {
+ if let Some(checkpoint) = message.checkpoint.as_mut() {
+ checkpoint.show = !equal;
+ cx.emit(AcpThreadEvent::EntryUpdated(ix));
+ }
+ }
+ })?;
Ok(())
})
@@ -2422,8 +2428,10 @@ fn markdown_for_raw_output(
)
})),
value => Some(cx.new(|cx| {
+ let pretty_json = to_string_pretty(value).unwrap_or_else(|_| value.to_string());
+
Markdown::new(
- format!("```json\n{}\n```", value).into(),
+ format!("```json\n{}\n```", pretty_json).into(),
Some(language_registry.clone()),
None,
cx,
@@ -4066,4 +4074,67 @@ mod tests {
"Should have exactly 2 terminals (the completed ones from before checkpoint)"
);
}
+
+ /// Tests that update_last_checkpoint correctly updates the original message's checkpoint
+ /// even when a new user message is added while the async checkpoint comparison is in progress.
+ ///
+ /// This is a regression test for a bug where update_last_checkpoint would fail with
+ /// "no checkpoint" if a new user message (without a checkpoint) was added between when
+ /// update_last_checkpoint started and when its async closure ran.
+ #[gpui::test]
+ async fn test_update_last_checkpoint_with_new_message_added(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(path!("/test"), json!({".git": {}, "file.txt": "content"}))
+ .await;
+ let project = Project::test(fs.clone(), [Path::new(path!("/test"))], cx).await;
+
+ let handler_done = Arc::new(AtomicBool::new(false));
+ let handler_done_clone = handler_done.clone();
+ let connection = Rc::new(FakeAgentConnection::new().on_user_message(
+ move |_, _thread, _cx| {
+ handler_done_clone.store(true, SeqCst);
+ async move { Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) }.boxed_local()
+ },
+ ));
+
+ let thread = cx
+ .update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
+ .await
+ .unwrap();
+
+ let send_future = thread.update(cx, |thread, cx| thread.send_raw("First message", cx));
+ let send_task = cx.background_executor.spawn(send_future);
+
+ // Tick until handler completes, then a few more to let update_last_checkpoint start
+ while !handler_done.load(SeqCst) {
+ cx.executor().tick();
+ }
+ for _ in 0..5 {
+ cx.executor().tick();
+ }
+
+ thread.update(cx, |thread, cx| {
+ thread.push_entry(
+ AgentThreadEntry::UserMessage(UserMessage {
+ id: Some(UserMessageId::new()),
+ content: ContentBlock::Empty,
+ chunks: vec!["Injected message (no checkpoint)".into()],
+ checkpoint: None,
+ indented: false,
+ }),
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+ let result = send_task.await;
+
+ assert!(
+ result.is_ok(),
+ "send should succeed even when new message added during update_last_checkpoint: {:?}",
+ result.err()
+ );
+ }
}
@@ -6,7 +6,7 @@ use futures::{FutureExt, StreamExt, channel::mpsc};
use gpui::{
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
};
-use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
+use language::{Anchor, Buffer, BufferEvent, Point, ToPoint};
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
use std::{cmp, ops::Range, sync::Arc};
use text::{Edit, Patch, Rope};
@@ -150,7 +150,7 @@ impl ActionLog {
if buffer
.read(cx)
.file()
- .is_some_and(|file| file.disk_state() == DiskState::Deleted)
+ .is_some_and(|file| file.disk_state().is_deleted())
{
// If the buffer had been edited by a tool, but it got
// deleted externally, we want to stop tracking it.
@@ -162,7 +162,7 @@ impl ActionLog {
if buffer
.read(cx)
.file()
- .is_some_and(|file| file.disk_state() != DiskState::Deleted)
+ .is_some_and(|file| !file.disk_state().is_deleted())
{
// If the buffer had been deleted by a tool, but it got
// resurrected externally, we want to clear the edits we
@@ -769,7 +769,7 @@ impl ActionLog {
tracked.version != buffer.version
&& buffer
.file()
- .is_some_and(|file| file.disk_state() != DiskState::Deleted)
+ .is_some_and(|file| !file.disk_state().is_deleted())
})
.map(|(buffer, _)| buffer)
}
@@ -2489,9 +2489,11 @@ impl AcpThreadView {
.border_color(self.tool_card_border_color(cx))
.child(input_output_header("Raw Input:".into()))
.children(tool_call.raw_input_markdown.clone().map(|input| {
- self.render_markdown(
- input,
- default_markdown_style(false, false, window, cx),
+ div().id(("tool-call-raw-input-markdown", entry_ix)).child(
+ self.render_markdown(
+ input,
+ default_markdown_style(false, false, window, cx),
+ ),
)
}))
.child(input_output_header("Output:".into())),
@@ -2499,15 +2501,17 @@ impl AcpThreadView {
})
.children(tool_call.content.iter().enumerate().map(
|(content_ix, content)| {
- div().child(self.render_tool_call_content(
- entry_ix,
- content,
- content_ix,
- tool_call,
- use_card_layout,
- window,
- cx,
- ))
+ div().id(("tool-call-output", entry_ix)).child(
+ self.render_tool_call_content(
+ entry_ix,
+ content,
+ content_ix,
+ tool_call,
+ use_card_layout,
+ window,
+ cx,
+ ),
+ )
},
))
.into_any(),
@@ -17,7 +17,7 @@ use gpui::{
Global, SharedString, Subscription, Task, WeakEntity, Window, prelude::*,
};
-use language::{Buffer, Capability, DiskState, OffsetRangeExt, Point};
+use language::{Buffer, Capability, OffsetRangeExt, Point};
use multi_buffer::PathKey;
use project::{Project, ProjectItem, ProjectPath};
use settings::{Settings, SettingsStore};
@@ -192,7 +192,7 @@ impl AgentDiffPane {
&& buffer
.read(cx)
.file()
- .is_some_and(|file| file.disk_state() == DiskState::Deleted)
+ .is_some_and(|file| file.disk_state().is_deleted())
{
editor.fold_buffer(snapshot.text.remote_id(), cx)
}
@@ -1259,28 +1259,26 @@ impl InlineAssistant {
let bottom = top + 1.0;
(top, bottom)
});
- let mut scroll_target_top = scroll_target_range.0;
- let mut scroll_target_bottom = scroll_target_range.1;
-
- scroll_target_top -= editor.vertical_scroll_margin() as ScrollOffset;
- scroll_target_bottom += editor.vertical_scroll_margin() as ScrollOffset;
-
let height_in_lines = editor.visible_line_count().unwrap_or(0.);
+ let vertical_scroll_margin = editor.vertical_scroll_margin() as ScrollOffset;
+ let scroll_target_top = (scroll_target_range.0 - vertical_scroll_margin)
+ // Don't scroll up too far in the case of a large vertical_scroll_margin.
+ .max(scroll_target_range.0 - height_in_lines / 2.0);
+ let scroll_target_bottom = (scroll_target_range.1 + vertical_scroll_margin)
+ // Don't scroll down past where the top would still be visible.
+ .min(scroll_target_top + height_in_lines);
+
let scroll_top = editor.scroll_position(cx).y;
let scroll_bottom = scroll_top + height_in_lines;
if scroll_target_top < scroll_top {
editor.set_scroll_position(point(0., scroll_target_top), window, cx);
} else if scroll_target_bottom > scroll_bottom {
- if (scroll_target_bottom - scroll_target_top) <= height_in_lines {
- editor.set_scroll_position(
- point(0., scroll_target_bottom - height_in_lines),
- window,
- cx,
- );
- } else {
- editor.set_scroll_position(point(0., scroll_target_top), window, cx);
- }
+ editor.set_scroll_position(
+ point(0., scroll_target_bottom - height_in_lines),
+ window,
+ cx,
+ );
}
});
}
@@ -27,7 +27,7 @@ impl RenderOnce for HoldForDefault {
PlatformStyle::platform(),
None,
Some(TextSize::Default.rems(cx).into()),
- true,
+ false,
)))
.child(div().map(|this| {
if self.is_default {
@@ -1159,6 +1159,34 @@ impl BufferDiff {
new_index_text
}
+ pub fn stage_or_unstage_all_hunks(
+ &mut self,
+ stage: bool,
+ buffer: &text::BufferSnapshot,
+ file_exists: bool,
+ cx: &mut Context<Self>,
+ ) {
+ let hunks = self
+ .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
+ .collect::<Vec<_>>();
+ let Some(secondary) = self.secondary_diff.as_ref() else {
+ return;
+ };
+ self.inner.stage_or_unstage_hunks_impl(
+ &secondary.read(cx).inner,
+ stage,
+ &hunks,
+ buffer,
+ file_exists,
+ );
+ if let Some((first, last)) = hunks.first().zip(hunks.last()) {
+ let changed_range = first.buffer_range.start..last.buffer_range.end;
+ cx.emit(BufferDiffEvent::DiffChanged {
+ changed_range: Some(changed_range),
+ });
+ }
+ }
+
pub fn range_to_hunk_range(
&self,
range: Range<Anchor>,
@@ -0,0 +1,45 @@
+[package]
+name = "component_preview"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/component_preview.rs"
+
+[features]
+default = []
+preview = []
+test-support = ["db/test-support"]
+
+[dependencies]
+anyhow.workspace = true
+client.workspace = true
+collections.workspace = true
+component.workspace = true
+db.workspace = true
+fs.workspace = true
+gpui.workspace = true
+language.workspace = true
+log.workspace = true
+node_runtime.workspace = true
+notifications.workspace = true
+project.workspace = true
+release_channel.workspace = true
+reqwest_client.workspace = true
+session.workspace = true
+settings.workspace = true
+theme.workspace = true
+ui.workspace = true
+ui_input.workspace = true
+uuid.workspace = true
+workspace.workspace = true
+
+[[example]]
+name = "component_preview"
+path = "examples/component_preview.rs"
+required-features = ["preview"]
@@ -0,0 +1 @@
+LICENSE-GPL
@@ -0,0 +1,18 @@
+//! Component Preview Example
+//!
+//! Run with: `cargo run -p component_preview --example component_preview --features="preview"`
+//!
+//! To use this in other projects, add the following to your `Cargo.toml`:
+//!
+//! ```toml
+//! [dependencies]
+//! component_preview = { path = "../component_preview", features = ["preview"] }
+//!
+//! [[example]]
+//! name = "component_preview"
+//! path = "examples/component_preview.rs"
+//! ```
+
+fn main() {
+ component_preview::run_component_preview();
+}
@@ -1,7 +1,4 @@
-//! # Component Preview
-//!
-//! A view for exploring Zed components.
-
+mod component_preview_example;
mod persistence;
use client::UserStore;
@@ -11,18 +8,21 @@ use gpui::{
App, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity, Window, list, prelude::*,
};
use gpui::{ListState, ScrollHandle, ScrollStrategy, UniformListScrollHandle};
-use languages::LanguageRegistry;
+use language::LanguageRegistry;
use notifications::status_toast::{StatusToast, ToastIcon};
use persistence::COMPONENT_PREVIEW_DB;
use project::Project;
use std::{iter::Iterator, ops::Range, sync::Arc};
use ui::{ButtonLike, Divider, HighlightedLabel, ListItem, ListSubHeader, Tooltip, prelude::*};
use ui_input::InputField;
+use workspace::AppState;
use workspace::{
- AppState, Item, ItemId, SerializableItem, Workspace, WorkspaceId, delete_unloaded_items,
- item::ItemEvent,
+ Item, ItemId, SerializableItem, Workspace, WorkspaceId, delete_unloaded_items, item::ItemEvent,
};
+#[allow(unused_imports)]
+pub use component_preview_example::*;
+
pub fn init(app_state: Arc<AppState>, cx: &mut App) {
workspace::register_serializable_item::<ComponentPreview>(cx);
@@ -0,0 +1,145 @@
+/// Run the component preview application.
+///
+/// This initializes the application with minimal required infrastructure
+/// and opens a workspace with the ComponentPreview item.
+#[cfg(feature = "preview")]
+pub fn run_component_preview() {
+ use fs::RealFs;
+ use gpui::{
+ AppContext as _, Application, Bounds, KeyBinding, WindowBounds, WindowOptions, actions,
+ size,
+ };
+
+ use client::{Client, UserStore};
+ use language::LanguageRegistry;
+ use node_runtime::NodeRuntime;
+ use project::Project;
+ use reqwest_client::ReqwestClient;
+ use session::{AppSession, Session};
+ use std::sync::Arc;
+ use ui::{App, px};
+ use workspace::{AppState, Workspace, WorkspaceStore};
+
+ use crate::{ComponentPreview, init};
+
+ actions!(zed, [Quit]);
+
+ fn quit(_: &Quit, cx: &mut App) {
+ cx.quit();
+ }
+
+ Application::new().run(|cx| {
+ component::init();
+
+ cx.on_action(quit);
+ cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
+ let version = release_channel::AppVersion::load(env!("CARGO_PKG_VERSION"), None, None);
+ release_channel::init(version, cx);
+
+ let http_client =
+ ReqwestClient::user_agent("component_preview").expect("Failed to create HTTP client");
+ cx.set_http_client(Arc::new(http_client));
+
+ let fs = Arc::new(RealFs::new(None, cx.background_executor().clone()));
+ <dyn fs::Fs>::set_global(fs.clone(), cx);
+
+ settings::init(cx);
+ theme::init(theme::LoadThemes::JustBase, cx);
+
+ let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
+ let client = Client::production(cx);
+ client::init(&client, cx);
+
+ let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
+ let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
+ let session_id = uuid::Uuid::new_v4().to_string();
+ let session = cx.background_executor().block(Session::new(session_id));
+ let session = cx.new(|cx| AppSession::new(session, cx));
+ let node_runtime = NodeRuntime::unavailable();
+
+ let app_state = Arc::new(AppState {
+ languages,
+ client,
+ user_store,
+ workspace_store,
+ fs,
+ build_window_options: |_, _| Default::default(),
+ node_runtime,
+ session,
+ });
+ AppState::set_global(Arc::downgrade(&app_state), cx);
+
+ workspace::init(app_state.clone(), cx);
+ init(app_state.clone(), cx);
+
+ let size = size(px(1200.), px(800.));
+ let bounds = Bounds::centered(None, size, cx);
+
+ cx.open_window(
+ WindowOptions {
+ window_bounds: Some(WindowBounds::Windowed(bounds)),
+ ..Default::default()
+ },
+ {
+ move |window, cx| {
+ let app_state = app_state;
+ theme::setup_ui_font(window, cx);
+
+ let project = Project::local(
+ app_state.client.clone(),
+ app_state.node_runtime.clone(),
+ app_state.user_store.clone(),
+ app_state.languages.clone(),
+ app_state.fs.clone(),
+ None,
+ false,
+ cx,
+ );
+
+ let workspace = cx.new(|cx| {
+ Workspace::new(
+ Default::default(),
+ project.clone(),
+ app_state.clone(),
+ window,
+ cx,
+ )
+ });
+
+ workspace.update(cx, |workspace, cx| {
+ let weak_workspace = cx.entity().downgrade();
+ let language_registry = app_state.languages.clone();
+ let user_store = app_state.user_store.clone();
+
+ let component_preview = cx.new(|cx| {
+ ComponentPreview::new(
+ weak_workspace,
+ project,
+ language_registry,
+ user_store,
+ None,
+ None,
+ window,
+ cx,
+ )
+ .expect("Failed to create component preview")
+ });
+
+ workspace.add_item_to_active_pane(
+ Box::new(component_preview),
+ None,
+ true,
+ window,
+ cx,
+ );
+ });
+
+ workspace
+ }
+ },
+ )
+ .expect("Failed to open component preview window");
+
+ cx.activate(true);
+ });
+}
@@ -18346,7 +18346,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
);
update_test_project_settings(cx, |project_settings| {
- project_settings.lsp.insert(
+ project_settings.lsp.0.insert(
"Some other server name".into(),
LspSettings {
binary: None,
@@ -18367,7 +18367,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
);
update_test_project_settings(cx, |project_settings| {
- project_settings.lsp.insert(
+ project_settings.lsp.0.insert(
language_server_name.into(),
LspSettings {
binary: None,
@@ -18388,7 +18388,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
);
update_test_project_settings(cx, |project_settings| {
- project_settings.lsp.insert(
+ project_settings.lsp.0.insert(
language_server_name.into(),
LspSettings {
binary: None,
@@ -18409,7 +18409,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
);
update_test_project_settings(cx, |project_settings| {
- project_settings.lsp.insert(
+ project_settings.lsp.0.insert(
language_server_name.into(),
LspSettings {
binary: None,
@@ -46,9 +46,9 @@ use gpui::{
KeybindingKeystroke, Length, Modifiers, ModifiersChangedEvent, MouseButton, MouseClickEvent,
MouseDownEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, PaintQuad, ParentElement,
Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString,
- Size, StatefulInteractiveElement, Style, Styled, TextRun, TextStyleRefinement, WeakEntity,
- Window, anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, point, px,
- quad, relative, size, solid_background, transparent_black,
+ Size, StatefulInteractiveElement, Style, Styled, TextAlign, TextRun, TextStyleRefinement,
+ WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline,
+ point, px, quad, relative, size, solid_background, transparent_black,
};
use itertools::Itertools;
use language::{IndentGuideSettings, language_settings::ShowWhitespaceSetting};
@@ -1695,9 +1695,13 @@ impl EditorElement {
[cursor_position.row().minus(visible_display_row_range.start) as usize];
let cursor_column = cursor_position.column() as usize;
- let cursor_character_x = cursor_row_layout.x_for_index(cursor_column);
- let mut block_width =
- cursor_row_layout.x_for_index(cursor_column + 1) - cursor_character_x;
+ let cursor_character_x = cursor_row_layout.x_for_index(cursor_column)
+ + cursor_row_layout
+ .alignment_offset(self.style.text.text_align, text_hitbox.size.width);
+ let cursor_next_x = cursor_row_layout.x_for_index(cursor_column + 1)
+ + cursor_row_layout
+ .alignment_offset(self.style.text.text_align, text_hitbox.size.width);
+ let mut block_width = cursor_next_x - cursor_character_x;
if block_width == Pixels::ZERO {
block_width = em_advance;
}
@@ -6160,10 +6164,25 @@ impl EditorElement {
let color = cx.theme().colors().editor_hover_line_number;
let line = self.shape_line_number(shaped_line.text.clone(), color, window);
- line.paint(hitbox.origin, line_height, window, cx).log_err()
+ line.paint(
+ hitbox.origin,
+ line_height,
+ TextAlign::Left,
+ None,
+ window,
+ cx,
+ )
+ .log_err()
} else {
shaped_line
- .paint(hitbox.origin, line_height, window, cx)
+ .paint(
+ hitbox.origin,
+ line_height,
+ TextAlign::Left,
+ None,
+ window,
+ cx,
+ )
.log_err()
}) else {
continue;
@@ -7252,23 +7271,27 @@ impl EditorElement {
.map(|row| {
let line_layout =
&layout.position_map.line_layouts[row.minus(start_row) as usize];
+ let alignment_offset =
+ line_layout.alignment_offset(layout.text_align, layout.content_width);
HighlightedRangeLine {
start_x: if row == range.start.row() {
layout.content_origin.x
+ Pixels::from(
ScrollPixelOffset::from(
- line_layout.x_for_index(range.start.column() as usize),
+ line_layout.x_for_index(range.start.column() as usize)
+ + alignment_offset,
) - layout.position_map.scroll_pixel_position.x,
)
} else {
- layout.content_origin.x
+ layout.content_origin.x + alignment_offset
- Pixels::from(layout.position_map.scroll_pixel_position.x)
},
end_x: if row == range.end.row() {
layout.content_origin.x
+ Pixels::from(
ScrollPixelOffset::from(
- line_layout.x_for_index(range.end.column() as usize),
+ line_layout.x_for_index(range.end.column() as usize)
+ + alignment_offset,
) - layout.position_map.scroll_pixel_position.x,
)
} else {
@@ -7276,6 +7299,7 @@ impl EditorElement {
ScrollPixelOffset::from(
layout.content_origin.x
+ line_layout.width
+ + alignment_offset
+ line_end_overshoot,
) - layout.position_map.scroll_pixel_position.x,
)
@@ -8516,8 +8540,15 @@ impl LineWithInvisibles {
for fragment in &self.fragments {
match fragment {
LineFragment::Text(line) => {
- line.paint(fragment_origin, line_height, window, cx)
- .log_err();
+ line.paint(
+ fragment_origin,
+ line_height,
+ layout.text_align,
+ Some(layout.content_width),
+ window,
+ cx,
+ )
+ .log_err();
fragment_origin.x += line.width;
}
LineFragment::Element { size, .. } => {
@@ -8559,8 +8590,15 @@ impl LineWithInvisibles {
for fragment in &self.fragments {
match fragment {
LineFragment::Text(line) => {
- line.paint_background(fragment_origin, line_height, window, cx)
- .log_err();
+ line.paint_background(
+ fragment_origin,
+ line_height,
+ layout.text_align,
+ Some(layout.content_width),
+ window,
+ cx,
+ )
+ .log_err();
fragment_origin.x += line.width;
}
LineFragment::Element { size, .. } => {
@@ -8609,7 +8647,7 @@ impl LineWithInvisibles {
[token_offset, token_end_offset],
Box::new(move |window: &mut Window, cx: &mut App| {
invisible_symbol
- .paint(origin, line_height, window, cx)
+ .paint(origin, line_height, TextAlign::Left, None, window, cx)
.log_err();
}),
)
@@ -8770,6 +8808,15 @@ impl LineWithInvisibles {
None
}
+
+ pub fn alignment_offset(&self, text_align: TextAlign, content_width: Pixels) -> Pixels {
+ let line_width = self.width;
+ match text_align {
+ TextAlign::Left => px(0.0),
+ TextAlign::Center => (content_width - line_width) / 2.0,
+ TextAlign::Right => content_width - line_width,
+ }
+ }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -10172,6 +10219,8 @@ impl Element for EditorElement {
em_width,
em_advance,
snapshot,
+ text_align: self.style.text.text_align,
+ content_width: text_hitbox.size.width,
gutter_hitbox: gutter_hitbox.clone(),
text_hitbox: text_hitbox.clone(),
inline_blame_bounds: inline_blame_layout
@@ -10225,6 +10274,8 @@ impl Element for EditorElement {
sticky_buffer_header,
sticky_headers,
expand_toggles,
+ text_align: self.style.text.text_align,
+ content_width: text_hitbox.size.width,
}
})
})
@@ -10405,6 +10456,8 @@ pub struct EditorLayout {
sticky_buffer_header: Option<AnyElement>,
sticky_headers: Option<StickyHeaders>,
document_colors: Option<(DocumentColorsRenderMode, Vec<(Range<DisplayPoint>, Hsla)>)>,
+ text_align: TextAlign,
+ content_width: Pixels,
}
struct StickyHeaders {
@@ -10572,7 +10625,9 @@ impl StickyHeaderLine {
gutter_origin.x + gutter_width - gutter_right_padding - line_number.width,
gutter_origin.y,
);
- line_number.paint(origin, line_height, window, cx).log_err();
+ line_number
+ .paint(origin, line_height, TextAlign::Left, None, window, cx)
+ .log_err();
}
}
}
@@ -11011,6 +11066,8 @@ pub(crate) struct PositionMap {
pub visible_row_range: Range<DisplayRow>,
pub line_layouts: Vec<LineWithInvisibles>,
pub snapshot: EditorSnapshot,
+ pub text_align: TextAlign,
+ pub content_width: Pixels,
pub text_hitbox: Hitbox,
pub gutter_hitbox: Hitbox,
pub inline_blame_bounds: Option<(Bounds<Pixels>, BufferId, BlameEntry)>,
@@ -11076,10 +11133,12 @@ impl PositionMap {
.line_layouts
.get(row as usize - scroll_position.y as usize)
{
- if let Some(ix) = line.index_for_x(x) {
+ let alignment_offset = line.alignment_offset(self.text_align, self.content_width);
+ let x_relative_to_text = x - alignment_offset;
+ if let Some(ix) = line.index_for_x(x_relative_to_text) {
(ix as u32, px(0.))
} else {
- (line.len as u32, px(0.).max(x - line.width))
+ (line.len as u32, px(0.).max(x_relative_to_text - line.width))
}
} else {
(0, x)
@@ -11268,7 +11327,14 @@ impl CursorLayout {
if let Some(block_text) = &self.block_text {
block_text
- .paint(self.origin + origin, self.line_height, window, cx)
+ .paint(
+ self.origin + origin,
+ self.line_height,
+ TextAlign::Left,
+ None,
+ window,
+ cx,
+ )
.log_err();
}
}
@@ -17,8 +17,8 @@ use gpui::{
ParentElement, Pixels, SharedString, Styled, Task, WeakEntity, Window, point,
};
use language::{
- Bias, Buffer, BufferRow, CharKind, CharScopeContext, DiskState, LocalFile, Point,
- SelectionGoal, proto::serialize_anchor as serialize_text_anchor,
+ Bias, Buffer, BufferRow, CharKind, CharScopeContext, LocalFile, Point, SelectionGoal,
+ proto::serialize_anchor as serialize_text_anchor,
};
use lsp::DiagnosticSeverity;
use multi_buffer::MultiBufferOffset;
@@ -722,7 +722,7 @@ impl Item for Editor {
.read(cx)
.as_singleton()
.and_then(|buffer| buffer.read(cx).file())
- .is_some_and(|file| file.disk_state() == DiskState::Deleted);
+ .is_some_and(|file| file.disk_state().is_deleted());
h_flex()
.gap_2()
@@ -69,7 +69,7 @@ struct GitBlob {
path: RepoPath,
worktree_id: WorktreeId,
is_deleted: bool,
- display_name: Arc<str>,
+ display_name: String,
}
const COMMIT_MESSAGE_SORT_PREFIX: u64 = 0;
@@ -243,9 +243,8 @@ impl CommitView {
.path
.file_name()
.map(|name| name.to_string())
- .unwrap_or_else(|| file.path.display(PathStyle::Posix).to_string());
- let display_name: Arc<str> =
- Arc::from(format!("{short_sha} - {file_name}").into_boxed_str());
+ .unwrap_or_else(|| file.path.display(PathStyle::local()).to_string());
+ let display_name = format!("{short_sha} - {file_name}");
let file = Arc::new(GitBlob {
path: file.path.clone(),
@@ -661,15 +660,13 @@ impl language::File for GitBlob {
}
fn disk_state(&self) -> DiskState {
- if self.is_deleted {
- DiskState::Deleted
- } else {
- DiskState::New
+ DiskState::Historic {
+ was_deleted: self.is_deleted,
}
}
fn path_style(&self, _: &App) -> PathStyle {
- PathStyle::Posix
+ PathStyle::local()
}
fn path(&self) -> &Arc<RelPath> {
@@ -697,45 +694,6 @@ impl language::File for GitBlob {
}
}
-// No longer needed since metadata buffer is not created
-// impl language::File for CommitMetadataFile {
-// fn as_local(&self) -> Option<&dyn language::LocalFile> {
-// None
-// }
-//
-// fn disk_state(&self) -> DiskState {
-// DiskState::New
-// }
-//
-// fn path_style(&self, _: &App) -> PathStyle {
-// PathStyle::Posix
-// }
-//
-// fn path(&self) -> &Arc<RelPath> {
-// &self.title
-// }
-//
-// fn full_path(&self, _: &App) -> PathBuf {
-// self.title.as_std_path().to_path_buf()
-// }
-//
-// fn file_name<'a>(&'a self, _: &'a App) -> &'a str {
-// self.title.file_name().unwrap_or("commit")
-// }
-//
-// fn worktree_id(&self, _: &App) -> WorktreeId {
-// self.worktree_id
-// }
-//
-// fn to_proto(&self, _cx: &App) -> language::proto::File {
-// unimplemented!()
-// }
-//
-// fn is_private(&self) -> bool {
-// false
-// }
-// }
-
async fn build_buffer(
mut text: String,
blob: Arc<dyn File>,
@@ -546,8 +546,15 @@ impl Element for TextElement {
window.paint_quad(selection)
}
let line = prepaint.line.take().unwrap();
- line.paint(bounds.origin, window.line_height(), window, cx)
- .unwrap();
+ line.paint(
+ bounds.origin,
+ window.line_height(),
+ gpui::TextAlign::Left,
+ None,
+ window,
+ cx,
+ )
+ .unwrap();
if focus_handle.is_focused(window)
&& let Some(cursor) = prepaint.cursor.take()
@@ -0,0 +1,174 @@
+use gpui::{
+ App, Application, Context, Corner, Div, Hsla, Stateful, Window, WindowOptions, anchored,
+ deferred, div, prelude::*, px,
+};
+
+/// An example show use deferred to create a floating layers.
+struct HelloWorld {
+ open: bool,
+ secondary_open: bool,
+}
+
+fn button(id: &'static str) -> Stateful<Div> {
+ div()
+ .id(id)
+ .bg(gpui::black())
+ .text_color(gpui::white())
+ .px_3()
+ .py_1()
+}
+
+fn popover() -> Div {
+ div()
+ .flex()
+ .flex_col()
+ .items_center()
+ .justify_center()
+ .shadow_lg()
+ .p_3()
+ .rounded_md()
+ .bg(gpui::white())
+ .text_color(gpui::black())
+ .border_1()
+ .text_sm()
+ .border_color(gpui::black().opacity(0.1))
+}
+
+fn line(color: Hsla) -> Div {
+ div().w(px(480.)).h_2().bg(color.opacity(0.25))
+}
+
+impl HelloWorld {
+ fn render_secondary_popover(
+ &mut self,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
+ button("secondary-btn")
+ .mt_2()
+ .child("Child Popover")
+ .on_click(cx.listener(|this, _, _, cx| {
+ this.secondary_open = true;
+ cx.notify();
+ }))
+ .when(self.secondary_open, |this| {
+ this.child(
+ // GPUI can't support deferred here yet,
+ // it was inside another deferred element.
+ anchored()
+ .anchor(Corner::TopLeft)
+ .snap_to_window_with_margin(px(8.))
+ .child(
+ popover()
+ .child("This is second level Popover")
+ .bg(gpui::white())
+ .border_color(gpui::blue())
+ .on_mouse_down_out(cx.listener(|this, _, _, cx| {
+ this.secondary_open = false;
+ cx.notify();
+ })),
+ ),
+ )
+ })
+ }
+}
+
+impl Render for HelloWorld {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ div()
+ .flex()
+ .flex_col()
+ .gap_3()
+ .size_full()
+ .bg(gpui::white())
+ .text_color(gpui::black())
+ .justify_center()
+ .items_center()
+ .child(
+ div()
+ .flex()
+ .flex_row()
+ .gap_4()
+ .child(
+ button("popover0").child("Opened Popover").child(
+ deferred(
+ anchored()
+ .anchor(Corner::TopLeft)
+ .snap_to_window_with_margin(px(8.))
+ .child(popover().w_96().gap_3().child(
+ "This is a default opened Popover, \
+ we can use deferred to render it \
+ in a floating layer.",
+ )),
+ )
+ .priority(0),
+ ),
+ )
+ .child(
+ button("popover1")
+ .child("Open Popover")
+ .on_click(cx.listener(|this, _, _, cx| {
+ this.open = true;
+ cx.notify();
+ }))
+ .when(self.open, |this| {
+ this.child(
+ deferred(
+ anchored()
+ .anchor(Corner::TopLeft)
+ .snap_to_window_with_margin(px(8.))
+ .child(
+ popover()
+ .w_96()
+ .gap_3()
+ .child(
+ "This is first level Popover, \
+ we can use deferred to render it \
+ in a floating layer.\n\
+ Click outside to close.",
+ )
+ .when(!self.secondary_open, |this| {
+ this.on_mouse_down_out(cx.listener(
+ |this, _, _, cx| {
+ this.open = false;
+ cx.notify();
+ },
+ ))
+ })
+ // Here we need render popover after the content
+ // to ensure it will be on top layer.
+ .child(
+ self.render_secondary_popover(window, cx),
+ ),
+ ),
+ )
+ .priority(1),
+ )
+ }),
+ ),
+ )
+ .child(
+ "Here is an example text rendered, \
+ to ensure the Popover will float above this contents.",
+ )
+ .children([
+ line(gpui::red()),
+ line(gpui::yellow()),
+ line(gpui::blue()),
+ line(gpui::green()),
+ ])
+ }
+}
+
+fn main() {
+ Application::new().run(|cx: &mut App| {
+ cx.open_window(WindowOptions::default(), |_, cx| {
+ cx.new(|_| HelloWorld {
+ open: false,
+ secondary_open: false,
+ })
+ })
+ .unwrap();
+ cx.activate(true);
+ });
+}
@@ -130,6 +130,50 @@ impl Render for Example {
})),
),
)
+ .child(
+ div()
+ .id("group-1")
+ .tab_index(6)
+ .tab_group()
+ .tab_stop(false)
+ .child(
+ button("group-1-button-1")
+ .tab_index(1)
+ .child("Tab index [6, 1]"),
+ )
+ .child(
+ button("group-1-button-2")
+ .tab_index(2)
+ .child("Tab index [6, 2]"),
+ )
+ .child(
+ button("group-1-button-3")
+ .tab_index(3)
+ .child("Tab index [6, 3]"),
+ ),
+ )
+ .child(
+ div()
+ .id("group-2")
+ .tab_index(7)
+ .tab_group()
+ .tab_stop(false)
+ .child(
+ button("group-2-button-1")
+ .tab_index(1)
+ .child("Tab index [7, 1]"),
+ )
+ .child(
+ button("group-2-button-2")
+ .tab_index(2)
+ .child("Tab index [7, 2]"),
+ )
+ .child(
+ button("group-2-button-3")
+ .tab_index(3)
+ .child("Tab index [7, 3]"),
+ ),
+ )
}
}
@@ -1730,6 +1730,11 @@ impl Interactivity {
let clicked_state = clicked_state.borrow();
self.active = Some(clicked_state.element);
}
+ if self.hover_style.is_some() || self.group_hover_style.is_some() {
+ element_state
+ .hover_state
+ .get_or_insert_with(Default::default);
+ }
if let Some(active_tooltip) = element_state.active_tooltip.as_ref() {
if self.tooltip_builder.is_some() {
self.tooltip_id = set_tooltip_on_window(active_tooltip, window);
@@ -2150,14 +2155,46 @@ impl Interactivity {
{
let hitbox = hitbox.clone();
let was_hovered = hitbox.is_hovered(window);
+ let hover_state = self.hover_style.as_ref().and_then(|_| {
+ element_state
+ .as_ref()
+ .and_then(|state| state.hover_state.as_ref())
+ .cloned()
+ });
let current_view = window.current_view();
window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, cx| {
let hovered = hitbox.is_hovered(window);
if phase == DispatchPhase::Capture && hovered != was_hovered {
+ if let Some(hover_state) = &hover_state {
+ hover_state.borrow_mut().element = hovered;
+ }
cx.notify(current_view);
}
});
}
+
+ if let Some(group_hover) = self.group_hover_style.as_ref() {
+ if let Some(group_hitbox_id) = GroupHitboxes::get(&group_hover.group, cx) {
+ let hover_state = element_state
+ .as_ref()
+ .and_then(|element| element.hover_state.as_ref())
+ .cloned();
+
+ let was_group_hovered = group_hitbox_id.is_hovered(window);
+ let current_view = window.current_view();
+
+ window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, cx| {
+ let group_hovered = group_hitbox_id.is_hovered(window);
+ if phase == DispatchPhase::Capture && group_hovered != was_group_hovered {
+ if let Some(hover_state) = &hover_state {
+ hover_state.borrow_mut().group = group_hovered;
+ }
+ cx.notify(current_view);
+ }
+ });
+ }
+ }
+
let drag_cursor_style = self.base_style.as_ref().mouse_cursor;
let mut drag_listener = mem::take(&mut self.drag_listener);
@@ -2346,8 +2383,8 @@ impl Interactivity {
&& hitbox.is_hovered(window);
let mut was_hovered = was_hovered.borrow_mut();
- if is_hovered != *was_hovered {
- *was_hovered = is_hovered;
+ if is_hovered != was_hovered.element {
+ was_hovered.element = is_hovered;
drop(was_hovered);
hover_listener(&is_hovered, window, cx);
@@ -2580,22 +2617,46 @@ impl Interactivity {
}
}
- if let Some(hitbox) = hitbox {
- if !cx.has_active_drag() {
- if let Some(group_hover) = self.group_hover_style.as_ref()
- && let Some(group_hitbox_id) = GroupHitboxes::get(&group_hover.group, cx)
- && group_hitbox_id.is_hovered(window)
- {
+ if !cx.has_active_drag() {
+ if let Some(group_hover) = self.group_hover_style.as_ref() {
+ let is_group_hovered =
+ if let Some(group_hitbox_id) = GroupHitboxes::get(&group_hover.group, cx) {
+ group_hitbox_id.is_hovered(window)
+ } else if let Some(element_state) = element_state.as_ref() {
+ element_state
+ .hover_state
+ .as_ref()
+ .map(|state| state.borrow().group)
+ .unwrap_or(false)
+ } else {
+ false
+ };
+
+ if is_group_hovered {
style.refine(&group_hover.style);
}
+ }
- if let Some(hover_style) = self.hover_style.as_ref()
- && hitbox.is_hovered(window)
- {
+ if let Some(hover_style) = self.hover_style.as_ref() {
+ let is_hovered = if let Some(hitbox) = hitbox {
+ hitbox.is_hovered(window)
+ } else if let Some(element_state) = element_state.as_ref() {
+ element_state
+ .hover_state
+ .as_ref()
+ .map(|state| state.borrow().element)
+ .unwrap_or(false)
+ } else {
+ false
+ };
+
+ if is_hovered {
style.refine(hover_style);
}
}
+ }
+ if let Some(hitbox) = hitbox {
if let Some(drag) = cx.active_drag.take() {
let mut can_drop = true;
if let Some(can_drop_predicate) = &self.can_drop_predicate {
@@ -2654,7 +2715,7 @@ impl Interactivity {
pub struct InteractiveElementState {
pub(crate) focus_handle: Option<FocusHandle>,
pub(crate) clicked_state: Option<Rc<RefCell<ElementClickedState>>>,
- pub(crate) hover_state: Option<Rc<RefCell<bool>>>,
+ pub(crate) hover_state: Option<Rc<RefCell<ElementHoverState>>>,
pub(crate) pending_mouse_down: Option<Rc<RefCell<Option<MouseDownEvent>>>>,
pub(crate) scroll_offset: Option<Rc<RefCell<Point<Pixels>>>>,
pub(crate) active_tooltip: Option<Rc<RefCell<Option<ActiveTooltip>>>>,
@@ -2676,6 +2737,16 @@ impl ElementClickedState {
}
}
+/// Whether or not the element or a group that contains it is hovered.
+#[derive(Copy, Clone, Default, Eq, PartialEq)]
+pub struct ElementHoverState {
+ /// True if this element's group is hovered, false otherwise
+ pub group: bool,
+
+ /// True if this element is hovered, false otherwise
+ pub element: bool,
+}
+
pub(crate) enum ActiveTooltip {
/// Currently delaying before showing the tooltip.
WaitingForShow { _task: Task<()> },
@@ -944,6 +944,8 @@ impl X11Client {
let window = self.get_window(event.event)?;
window.set_active(false);
let mut state = self.0.borrow_mut();
+ // Set last scroll values to `None` so that a large delta isn't created if scrolling is done outside the window (the valuator is global)
+ reset_all_pointer_device_scroll_positions(&mut state.pointer_device_states);
state.keyboard_focused_window = None;
if let Some(compose_state) = state.compose_state.as_mut() {
compose_state.reset();
@@ -431,6 +431,7 @@ impl X11WindowState {
// https://stackoverflow.com/questions/43218127/x11-xlib-xcb-creating-a-window-requires-border-pixel-if-specifying-colormap-wh
.border_pixel(visual_set.black_pixel)
.colormap(colormap)
+ .override_redirect((params.kind == WindowKind::PopUp) as u32)
.event_mask(
xproto::EventMask::EXPOSURE
| xproto::EventMask::STRUCTURE_NOTIFY
@@ -156,9 +156,6 @@ impl MetalRenderer {
// https://developer.apple.com/documentation/metal/managing-your-game-window-for-metal-in-macos
layer.set_opaque(!transparent);
layer.set_maximum_drawable_count(3);
- // We already present at display sync with the display link
- // This allows to use direct-to-display even in window mode
- layer.set_display_sync_enabled(false);
unsafe {
let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
@@ -42,7 +42,7 @@ impl WindowsWindowInner {
let handled = match msg {
// eagerly activate the window, so calls to `active_window` will work correctly
WM_MOUSEACTIVATE => {
- unsafe { SetActiveWindow(handle).log_err() };
+ unsafe { SetActiveWindow(handle).ok() };
None
}
WM_ACTIVATE => self.handle_activate_msg(wparam),
@@ -740,8 +740,8 @@ impl PlatformWindow for WindowsWindow {
ShowWindowAsync(hwnd, SW_RESTORE).ok().log_err();
}
- SetActiveWindow(hwnd).log_err();
- SetFocus(Some(hwnd)).log_err();
+ SetActiveWindow(hwnd).ok();
+ SetFocus(Some(hwnd)).ok();
}
// premium ragebait by windows, this is needed because the window
@@ -64,6 +64,8 @@ impl ShapedLine {
&self,
origin: Point<Pixels>,
line_height: Pixels,
+ align: TextAlign,
+ align_width: Option<Pixels>,
window: &mut Window,
cx: &mut App,
) -> Result<()> {
@@ -71,8 +73,8 @@ impl ShapedLine {
origin,
&self.layout,
line_height,
- TextAlign::default(),
- None,
+ align,
+ align_width,
&self.decoration_runs,
&[],
window,
@@ -87,6 +89,8 @@ impl ShapedLine {
&self,
origin: Point<Pixels>,
line_height: Pixels,
+ align: TextAlign,
+ align_width: Option<Pixels>,
window: &mut Window,
cx: &mut App,
) -> Result<()> {
@@ -94,8 +98,8 @@ impl ShapedLine {
origin,
&self.layout,
line_height,
- TextAlign::default(),
- None,
+ align,
+ align_width,
&self.decoration_runs,
&[],
window,
@@ -11,7 +11,7 @@ use gpui::{
InteractiveElement, IntoElement, ObjectFit, ParentElement, Render, Styled, Task, WeakEntity,
Window, canvas, div, fill, img, opaque_grey, point, size,
};
-use language::{DiskState, File as _};
+use language::File as _;
use persistence::IMAGE_VIEWER;
use project::{ImageItem, Project, ProjectPath, image_store::ImageItemEvent};
use settings::Settings;
@@ -195,7 +195,7 @@ impl Item for ImageView {
}
fn has_deleted_file(&self, cx: &App) -> bool {
- self.image_item.read(cx).file.disk_state() == DiskState::Deleted
+ self.image_item.read(cx).file.disk_state().is_deleted()
}
fn buffer_kind(&self, _: &App) -> workspace::item::ItemBufferKind {
workspace::item::ItemBufferKind::Singleton
@@ -20,6 +20,7 @@ dap.workspace = true
extension.workspace = true
gpui.workspace = true
language.workspace = true
+lsp.workspace = true
paths.workspace = true
project.workspace = true
schemars.workspace = true
@@ -2,9 +2,11 @@
use std::{str::FromStr, sync::Arc};
use anyhow::{Context as _, Result};
-use gpui::{App, AsyncApp, BorrowAppContext as _, Entity, WeakEntity};
+use gpui::{App, AsyncApp, BorrowAppContext as _, Entity, Task, WeakEntity};
use language::{LanguageRegistry, language_settings::all_language_settings};
-use project::LspStore;
+use lsp::LanguageServerBinaryOptions;
+use project::{LspStore, lsp_store::LocalLspAdapterDelegate};
+use settings::LSP_SETTINGS_SCHEMA_URL_PREFIX;
use util::schemars::{AllowTrailingCommas, DefaultDenyUnknownFields};
// Origin: https://github.com/SchemaStore/schemastore
@@ -75,23 +77,28 @@ fn handle_schema_request(
lsp_store: Entity<LspStore>,
uri: String,
cx: &mut AsyncApp,
-) -> Result<String> {
- let languages = lsp_store.read_with(cx, |lsp_store, _| lsp_store.languages.clone())?;
- let schema = resolve_schema_request(&languages, uri, cx)?;
- serde_json::to_string(&schema).context("Failed to serialize schema")
+) -> Task<Result<String>> {
+ let languages = lsp_store.read_with(cx, |lsp_store, _| lsp_store.languages.clone());
+ cx.spawn(async move |cx| {
+ let languages = languages?;
+ let schema = resolve_schema_request(&languages, lsp_store, uri, cx).await?;
+ serde_json::to_string(&schema).context("Failed to serialize schema")
+ })
}
-pub fn resolve_schema_request(
+pub async fn resolve_schema_request(
languages: &Arc<LanguageRegistry>,
+ lsp_store: Entity<LspStore>,
uri: String,
cx: &mut AsyncApp,
) -> Result<serde_json::Value> {
let path = uri.strip_prefix("zed://schemas/").context("Invalid URI")?;
- resolve_schema_request_inner(languages, path, cx)
+ resolve_schema_request_inner(languages, lsp_store, path, cx).await
}
-pub fn resolve_schema_request_inner(
+pub async fn resolve_schema_request_inner(
languages: &Arc<LanguageRegistry>,
+ lsp_store: Entity<LspStore>,
path: &str,
cx: &mut AsyncApp,
) -> Result<serde_json::Value> {
@@ -99,37 +106,106 @@ pub fn resolve_schema_request_inner(
let schema_name = schema_name.unwrap_or(path);
let schema = match schema_name {
- "settings" => cx.update(|cx| {
- let font_names = &cx.text_system().all_font_names();
- let language_names = &languages
- .language_names()
+ "settings" if rest.is_some_and(|r| r.starts_with("lsp/")) => {
+ let lsp_name = rest
+ .and_then(|r| {
+ r.strip_prefix(
+ LSP_SETTINGS_SCHEMA_URL_PREFIX
+ .strip_prefix("zed://schemas/settings/")
+ .unwrap(),
+ )
+ })
+ .context("Invalid LSP schema path")?;
+
+ let adapter = languages
+ .all_lsp_adapters()
.into_iter()
- .map(|name| name.to_string())
+ .find(|adapter| adapter.name().as_ref() as &str == lsp_name)
+ .with_context(|| format!("LSP adapter not found: {}", lsp_name))?;
+
+ let delegate = cx.update(|inner_cx| {
+ lsp_store.update(inner_cx, |lsp_store, inner_cx| {
+ let Some(local) = lsp_store.as_local() else {
+ return None;
+ };
+ let Some(worktree) = local.worktree_store.read(inner_cx).worktrees().next() else {
+ return None;
+ };
+ Some(LocalLspAdapterDelegate::from_local_lsp(
+ local, &worktree, inner_cx,
+ ))
+ })
+ })?.context("Failed to create adapter delegate - either LSP store is not in local mode or no worktree is available")?;
+
+ let adapter_for_schema = adapter.clone();
+
+ let binary = adapter
+ .get_language_server_command(
+ delegate,
+ None,
+ LanguageServerBinaryOptions {
+ allow_path_lookup: true,
+ allow_binary_download: false,
+ pre_release: false,
+ },
+ cx,
+ )
+ .await
+ .await
+ .0.with_context(|| format!("Failed to find language server {lsp_name} to generate initialization params schema"))?;
+
+ adapter_for_schema
+ .adapter
+ .clone()
+ .initialization_options_schema(&binary)
+ .await
+ .unwrap_or_else(|| {
+ serde_json::json!({
+ "type": "object",
+ "additionalProperties": true
+ })
+ })
+ }
+ "settings" => {
+ let lsp_adapter_names = languages
+ .all_lsp_adapters()
+ .into_iter()
+ .map(|adapter| adapter.name().to_string())
.collect::<Vec<_>>();
- let mut icon_theme_names = vec![];
- let mut theme_names = vec![];
- if let Some(registry) = theme::ThemeRegistry::try_global(cx) {
- icon_theme_names.extend(
- registry
- .list_icon_themes()
- .into_iter()
- .map(|icon_theme| icon_theme.name),
- );
- theme_names.extend(registry.list_names());
- }
- let icon_theme_names = icon_theme_names.as_slice();
- let theme_names = theme_names.as_slice();
-
- cx.global::<settings::SettingsStore>().json_schema(
- &settings::SettingsJsonSchemaParams {
- language_names,
- font_names,
- theme_names,
- icon_theme_names,
- },
- )
- })?,
+ cx.update(|cx| {
+ let font_names = &cx.text_system().all_font_names();
+ let language_names = &languages
+ .language_names()
+ .into_iter()
+ .map(|name| name.to_string())
+ .collect::<Vec<_>>();
+
+ let mut icon_theme_names = vec![];
+ let mut theme_names = vec![];
+ if let Some(registry) = theme::ThemeRegistry::try_global(cx) {
+ icon_theme_names.extend(
+ registry
+ .list_icon_themes()
+ .into_iter()
+ .map(|icon_theme| icon_theme.name),
+ );
+ theme_names.extend(registry.list_names());
+ }
+ let icon_theme_names = icon_theme_names.as_slice();
+ let theme_names = theme_names.as_slice();
+
+ cx.global::<settings::SettingsStore>().json_schema(
+ &settings::SettingsJsonSchemaParams {
+ language_names,
+ font_names,
+ theme_names,
+ icon_theme_names,
+ lsp_adapter_names: &lsp_adapter_names,
+ },
+ )
+ })?
+ }
"keymap" => cx.update(settings::KeymapFile::generate_json_schema_for_registered_actions)?,
"action" => {
let normalized_action_name = rest.context("No Action name provided")?;
@@ -427,6 +427,9 @@ pub enum DiskState {
Present { mtime: MTime },
/// Deleted file that was previously present.
Deleted,
+ /// An old version of a file that was previously present
+ /// usually from a version control system. e.g. A git blob
+ Historic { was_deleted: bool },
}
impl DiskState {
@@ -436,6 +439,7 @@ impl DiskState {
DiskState::New => None,
DiskState::Present { mtime } => Some(mtime),
DiskState::Deleted => None,
+ DiskState::Historic { .. } => None,
}
}
@@ -444,6 +448,16 @@ impl DiskState {
DiskState::New => false,
DiskState::Present { .. } => true,
DiskState::Deleted => false,
+ DiskState::Historic { .. } => false,
+ }
+ }
+
+ /// Returns true if this state represents a deleted file.
+ pub fn is_deleted(&self) -> bool {
+ match self {
+ DiskState::Deleted => true,
+ DiskState::Historic { was_deleted } => *was_deleted,
+ _ => false,
}
}
}
@@ -1490,19 +1504,23 @@ impl Buffer {
let (tx, rx) = futures::channel::oneshot::channel();
let prev_version = self.text.version();
self.reload_task = Some(cx.spawn(async move |this, cx| {
- let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
+ let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
let file = this.file.as_ref()?.as_local()?;
-
- Some((file.disk_state().mtime(), file.load(cx)))
+ Some((
+ file.disk_state().mtime(),
+ file.load_bytes(cx),
+ this.encoding,
+ ))
})?
else {
return Ok(());
};
- let new_text = new_text.await?;
- let diff = this
- .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
- .await;
+ let bytes = load_bytes_task.await?;
+ let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
+ let new_text = cow.into_owned();
+
+ let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
this.update(cx, |this, cx| {
if this.version() == diff.base_version {
this.finalize_last_transaction();
@@ -2270,6 +2288,7 @@ impl Buffer {
None => true,
},
DiskState::Deleted => false,
+ DiskState::Historic { .. } => false,
}
}
@@ -461,6 +461,14 @@ pub trait LspAdapter: 'static + Send + Sync + DynLspInstaller {
Ok(None)
}
+ /// Returns the JSON schema of the initialization_options for the language server.
+ async fn initialization_options_schema(
+ self: Arc<Self>,
+ _language_server_binary: &LanguageServerBinary,
+ ) -> Option<serde_json::Value> {
+ None
+ }
+
async fn workspace_configuration(
self: Arc<Self>,
_: &Arc<dyn LspAdapterDelegate>,
@@ -8,6 +8,7 @@ use gpui::{
App, AppContext as _, DevicePixels, Image, ImageFormat, ObjectFit, SharedString, Size, Task,
point, px, size,
};
+use image::GenericImageView as _;
use image::codecs::png::PngEncoder;
use serde::{Deserialize, Serialize};
use util::ResultExt;
@@ -80,6 +81,16 @@ impl std::fmt::Debug for LanguageModelImage {
/// Anthropic wants uploaded images to be smaller than this in both dimensions.
const ANTHROPIC_SIZE_LIMIT: f32 = 1568.;
+/// Default per-image hard limit (in bytes) for the encoded image payload we send upstream.
+///
+/// NOTE: `LanguageModelImage.source` is base64-encoded PNG bytes (without the `data:` prefix).
+/// This limit is enforced on the encoded PNG bytes *before* base64 encoding.
+const DEFAULT_IMAGE_MAX_BYTES: usize = 5 * 1024 * 1024;
+
+/// Conservative cap on how many times we'll attempt to shrink/re-encode an image to fit
+/// `DEFAULT_IMAGE_MAX_BYTES`.
+const MAX_IMAGE_DOWNSCALE_PASSES: usize = 8;
+
impl LanguageModelImage {
pub fn empty() -> Self {
Self {
@@ -112,29 +123,62 @@ impl LanguageModelImage {
let height = dynamic_image.height();
let image_size = size(DevicePixels(width as i32), DevicePixels(height as i32));
- let base64_image = {
- if image_size.width.0 > ANTHROPIC_SIZE_LIMIT as i32
- || image_size.height.0 > ANTHROPIC_SIZE_LIMIT as i32
- {
- let new_bounds = ObjectFit::ScaleDown.get_bounds(
- gpui::Bounds {
- origin: point(px(0.0), px(0.0)),
- size: size(px(ANTHROPIC_SIZE_LIMIT), px(ANTHROPIC_SIZE_LIMIT)),
- },
- image_size,
- );
- let resized_image = dynamic_image.resize(
- new_bounds.size.width.into(),
- new_bounds.size.height.into(),
- image::imageops::FilterType::Triangle,
- );
-
- encode_as_base64(data, resized_image)
- } else {
- encode_as_base64(data, dynamic_image)
+ // First apply any provider-specific dimension constraints we know about (Anthropic).
+ let mut processed_image = if image_size.width.0 > ANTHROPIC_SIZE_LIMIT as i32
+ || image_size.height.0 > ANTHROPIC_SIZE_LIMIT as i32
+ {
+ let new_bounds = ObjectFit::ScaleDown.get_bounds(
+ gpui::Bounds {
+ origin: point(px(0.0), px(0.0)),
+ size: size(px(ANTHROPIC_SIZE_LIMIT), px(ANTHROPIC_SIZE_LIMIT)),
+ },
+ image_size,
+ );
+ dynamic_image.resize(
+ new_bounds.size.width.into(),
+ new_bounds.size.height.into(),
+ image::imageops::FilterType::Triangle,
+ )
+ } else {
+ dynamic_image
+ };
+
+ // Then enforce a default per-image size cap on the encoded PNG bytes.
+ //
+ // We always send PNG bytes (either original PNG bytes, or re-encoded PNG) base64'd.
+ // The upstream provider limit we want to respect is effectively on the binary image
+ // payload size, so we enforce against the encoded PNG bytes before base64 encoding.
+ let mut encoded_png = encode_png_bytes(&processed_image).log_err()?;
+ for _pass in 0..MAX_IMAGE_DOWNSCALE_PASSES {
+ if encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES {
+ break;
}
+
+ // Scale down geometrically to converge quickly. We don't know the final PNG size
+ // as a function of pixels, so we iteratively shrink.
+ let (w, h) = processed_image.dimensions();
+ if w <= 1 || h <= 1 {
+ break;
+ }
+
+ // Shrink by ~15% each pass (0.85). This is a compromise between speed and
+ // preserving image detail.
+ let new_w = ((w as f32) * 0.85).round().max(1.0) as u32;
+ let new_h = ((h as f32) * 0.85).round().max(1.0) as u32;
+
+ processed_image =
+ processed_image.resize(new_w, new_h, image::imageops::FilterType::Triangle);
+ encoded_png = encode_png_bytes(&processed_image).log_err()?;
}
- .log_err()?;
+
+ if encoded_png.len() > DEFAULT_IMAGE_MAX_BYTES {
+ // Still too large after multiple passes; treat as non-convertible for now.
+ // (Provider-specific handling can be introduced later.)
+ return None;
+ }
+
+ // Now base64 encode the PNG bytes.
+ let base64_image = encode_bytes_as_base64(encoded_png.as_slice()).log_err()?;
// SAFETY: The base64 encoder should not produce non-UTF8.
let source = unsafe { String::from_utf8_unchecked(base64_image) };
@@ -164,21 +208,20 @@ impl LanguageModelImage {
}
}
-fn encode_as_base64(data: Arc<Image>, image: image::DynamicImage) -> Result<Vec<u8>> {
+fn encode_png_bytes(image: &image::DynamicImage) -> Result<Vec<u8>> {
+ let mut png = Vec::new();
+ image.write_with_encoder(PngEncoder::new(&mut png))?;
+ Ok(png)
+}
+
+fn encode_bytes_as_base64(bytes: &[u8]) -> Result<Vec<u8>> {
let mut base64_image = Vec::new();
{
let mut base64_encoder = EncoderWriter::new(
Cursor::new(&mut base64_image),
&base64::engine::general_purpose::STANDARD,
);
- if data.format() == ImageFormat::Png {
- base64_encoder.write_all(data.bytes())?;
- } else {
- let mut png = Vec::new();
- image.write_with_encoder(PngEncoder::new(&mut png))?;
-
- base64_encoder.write_all(png.as_slice())?;
- }
+ base64_encoder.write_all(bytes)?;
}
Ok(base64_image)
}
@@ -417,6 +460,71 @@ pub struct LanguageModelResponseMessage {
#[cfg(test)]
mod tests {
use super::*;
+ use base64::Engine as _;
+ use gpui::TestAppContext;
+ use image::ImageDecoder as _;
+
+ fn base64_to_png_bytes(base64_png: &str) -> Vec<u8> {
+ base64::engine::general_purpose::STANDARD
+ .decode(base64_png.as_bytes())
+ .expect("base64 should decode")
+ }
+
+ fn png_dimensions(png_bytes: &[u8]) -> (u32, u32) {
+ let decoder =
+ image::codecs::png::PngDecoder::new(Cursor::new(png_bytes)).expect("png should decode");
+ decoder.dimensions()
+ }
+
+ fn make_noisy_png_bytes(width: u32, height: u32) -> Vec<u8> {
+ // Create an RGBA image with per-pixel variance to avoid PNG compressing too well.
+ let mut img = image::RgbaImage::new(width, height);
+ for y in 0..height {
+ for x in 0..width {
+ let r = ((x ^ y) & 0xFF) as u8;
+ let g = ((x.wrapping_mul(31) ^ y.wrapping_mul(17)) & 0xFF) as u8;
+ let b = ((x.wrapping_mul(131) ^ y.wrapping_mul(7)) & 0xFF) as u8;
+ img.put_pixel(x, y, image::Rgba([r, g, b, 0xFF]));
+ }
+ }
+
+ let mut out = Vec::new();
+ image::DynamicImage::ImageRgba8(img)
+ .write_with_encoder(PngEncoder::new(&mut out))
+ .expect("png encoding should succeed");
+ out
+ }
+
+ #[gpui::test]
+ async fn test_from_image_downscales_to_default_5mb_limit(cx: &mut TestAppContext) {
+ // Pick a size that reliably produces a PNG > 5MB when filled with noise.
+ // If this fails (image is too small), bump dimensions.
+ let original_png = make_noisy_png_bytes(4096, 4096);
+ assert!(
+ original_png.len() > DEFAULT_IMAGE_MAX_BYTES,
+ "precondition failed: noisy PNG must exceed DEFAULT_IMAGE_MAX_BYTES"
+ );
+
+ let image = gpui::Image::from_bytes(ImageFormat::Png, original_png);
+ let lm_image = cx
+ .update(|cx| LanguageModelImage::from_image(Arc::new(image), cx))
+ .await
+ .expect("image conversion should succeed");
+
+ let encoded_png = base64_to_png_bytes(lm_image.source.as_ref());
+ assert!(
+ encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES,
+ "expected encoded PNG <= DEFAULT_IMAGE_MAX_BYTES, got {} bytes",
+ encoded_png.len()
+ );
+
+ // Ensure we actually downscaled in pixels (not just re-encoded).
+ let (w, h) = png_dimensions(&encoded_png);
+ assert!(
+ w < 4096 || h < 4096,
+ "expected image to be downscaled in at least one dimension; got {w}x{h}"
+ );
+ }
#[test]
fn test_language_model_tool_result_content_deserialization() {
@@ -370,8 +370,8 @@ impl LanguageModel for OpenRouterLanguageModel {
LanguageModelCompletionError,
>,
> {
- let request = into_open_router(request, &self.model, self.max_output_tokens());
- let request = self.stream_completion(request, cx);
+ let openrouter_request = into_open_router(request, &self.model, self.max_output_tokens());
+ let request = self.stream_completion(openrouter_request, cx);
let future = self.request_limiter.stream(async move {
let response = request.await?;
Ok(OpenRouterEventMapper::new().map_stream(response))
@@ -385,15 +385,31 @@ pub fn into_open_router(
model: &Model,
max_output_tokens: Option<u64>,
) -> open_router::Request {
+ // Anthropic models via OpenRouter don't accept reasoning_details being echoed back
+ // in requests - it's an output-only field for them. However, Gemini models require
+ // the thought signatures to be echoed back for proper reasoning chain continuity.
+ // Note: OpenRouter's model API provides an `architecture.tokenizer` field (e.g. "Claude",
+ // "Gemini") which could replace this ID prefix check, but since this is the only place
+ // we need this distinction, we're just using this less invasive check instead.
+ // If we ever have a more formal distionction between the models in the future,
+ // we should revise this to use that instead.
+ let is_anthropic_model = model.id().starts_with("anthropic/");
+
let mut messages = Vec::new();
for message in request.messages {
- let reasoning_details = message.reasoning_details.clone();
+ let reasoning_details_for_message = if is_anthropic_model {
+ None
+ } else {
+ message.reasoning_details.clone()
+ };
+
for content in message.content {
match content {
MessageContent::Text(text) => add_message_content_part(
open_router::MessagePart::Text { text },
message.role,
&mut messages,
+ reasoning_details_for_message.clone(),
),
MessageContent::Thinking { .. } => {}
MessageContent::RedactedThinking(_) => {}
@@ -404,6 +420,7 @@ pub fn into_open_router(
},
message.role,
&mut messages,
+ reasoning_details_for_message.clone(),
);
}
MessageContent::ToolUse(tool_use) => {
@@ -419,21 +436,15 @@ pub fn into_open_router(
},
};
- if let Some(open_router::RequestMessage::Assistant {
- tool_calls,
- reasoning_details: existing_reasoning,
- ..
- }) = messages.last_mut()
+ if let Some(open_router::RequestMessage::Assistant { tool_calls, .. }) =
+ messages.last_mut()
{
tool_calls.push(tool_call);
- if existing_reasoning.is_none() && reasoning_details.is_some() {
- *existing_reasoning = reasoning_details.clone();
- }
} else {
messages.push(open_router::RequestMessage::Assistant {
content: None,
tool_calls: vec![tool_call],
- reasoning_details: reasoning_details.clone(),
+ reasoning_details: reasoning_details_for_message.clone(),
});
}
}
@@ -509,6 +520,7 @@ fn add_message_content_part(
new_part: open_router::MessagePart,
role: Role,
messages: &mut Vec<open_router::RequestMessage>,
+ reasoning_details: Option<serde_json::Value>,
) {
match (role, messages.last_mut()) {
(Role::User, Some(open_router::RequestMessage::User { content }))
@@ -532,7 +544,7 @@ fn add_message_content_part(
Role::Assistant => open_router::RequestMessage::Assistant {
content: Some(open_router::MessageContent::from(vec![new_part])),
tool_calls: Vec::new(),
- reasoning_details: None,
+ reasoning_details,
},
Role::System => open_router::RequestMessage::System {
content: open_router::MessageContent::from(vec![new_part]),
@@ -23,3 +23,9 @@
"{"
[(_) ","?]* @class.inside
"}")) @class.around
+
+(union_specifier
+ body: (_
+ "{"
+ (_)* @class.inside
+ "}")) @class.around
@@ -24,6 +24,12 @@
[(_) ","?]* @class.inside
"}")) @class.around
+(union_specifier
+ body: (_
+ "{"
+ (_)* @class.inside
+ "}")) @class.around
+
(class_specifier
body: (_
"{"
@@ -26,6 +26,7 @@ use settings::Settings;
use smol::lock::OnceCell;
use std::cmp::{Ordering, Reverse};
use std::env::consts;
+use std::process::Stdio;
use terminal::terminal_settings::TerminalSettings;
use util::command::new_smol_command;
use util::fs::{make_file_executable, remove_matching};
@@ -2173,6 +2174,119 @@ pub(crate) struct RuffLspAdapter {
fs: Arc<dyn Fs>,
}
+impl RuffLspAdapter {
+ fn convert_ruff_schema(raw_schema: &serde_json::Value) -> serde_json::Value {
+ let Some(schema_object) = raw_schema.as_object() else {
+ return raw_schema.clone();
+ };
+
+ let mut root_properties = serde_json::Map::new();
+
+ for (key, value) in schema_object {
+ let parts: Vec<&str> = key.split('.').collect();
+
+ if parts.is_empty() {
+ continue;
+ }
+
+ let mut current = &mut root_properties;
+
+ for (i, part) in parts.iter().enumerate() {
+ let is_last = i == parts.len() - 1;
+
+ if is_last {
+ let mut schema_entry = serde_json::Map::new();
+
+ if let Some(doc) = value.get("doc").and_then(|d| d.as_str()) {
+ schema_entry.insert(
+ "markdownDescription".to_string(),
+ serde_json::Value::String(doc.to_string()),
+ );
+ }
+
+ if let Some(default_val) = value.get("default") {
+ schema_entry.insert("default".to_string(), default_val.clone());
+ }
+
+ if let Some(value_type) = value.get("value_type").and_then(|v| v.as_str()) {
+ if value_type.contains('|') {
+ let enum_values: Vec<serde_json::Value> = value_type
+ .split('|')
+ .map(|s| s.trim().trim_matches('"'))
+ .filter(|s| !s.is_empty())
+ .map(|s| serde_json::Value::String(s.to_string()))
+ .collect();
+
+ if !enum_values.is_empty() {
+ schema_entry
+ .insert("type".to_string(), serde_json::json!("string"));
+ schema_entry.insert(
+ "enum".to_string(),
+ serde_json::Value::Array(enum_values),
+ );
+ }
+ } else if value_type.starts_with("list[") {
+ schema_entry.insert("type".to_string(), serde_json::json!("array"));
+ if let Some(item_type) = value_type
+ .strip_prefix("list[")
+ .and_then(|s| s.strip_suffix(']'))
+ {
+ let json_type = match item_type {
+ "str" => "string",
+ "int" => "integer",
+ "bool" => "boolean",
+ _ => "string",
+ };
+ schema_entry.insert(
+ "items".to_string(),
+ serde_json::json!({"type": json_type}),
+ );
+ }
+ } else if value_type.starts_with("dict[") {
+ schema_entry.insert("type".to_string(), serde_json::json!("object"));
+ } else {
+ let json_type = match value_type {
+ "bool" => "boolean",
+ "int" | "usize" => "integer",
+ "str" => "string",
+ _ => "string",
+ };
+ schema_entry.insert(
+ "type".to_string(),
+ serde_json::Value::String(json_type.to_string()),
+ );
+ }
+ }
+
+ current.insert(part.to_string(), serde_json::Value::Object(schema_entry));
+ } else {
+ let next_current = current
+ .entry(part.to_string())
+ .or_insert_with(|| {
+ serde_json::json!({
+ "type": "object",
+ "properties": {}
+ })
+ })
+ .as_object_mut()
+ .expect("should be an object")
+ .entry("properties")
+ .or_insert_with(|| serde_json::json!({}))
+ .as_object_mut()
+ .expect("properties should be an object");
+
+ current = next_current;
+ }
+ }
+ }
+
+ serde_json::json!({
+ "type": "object",
+ "properties": root_properties
+ })
+ }
+}
+
#[cfg(target_os = "macos")]
impl RuffLspAdapter {
const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz;
@@ -2225,6 +2339,36 @@ impl LspAdapter for RuffLspAdapter {
fn name(&self) -> LanguageServerName {
Self::SERVER_NAME
}
+
+ async fn initialization_options_schema(
+ self: Arc<Self>,
+ language_server_binary: &LanguageServerBinary,
+ ) -> Option<serde_json::Value> {
+ let mut command = util::command::new_smol_command(&language_server_binary.path);
+ command
+ .args(&["config", "--output-format", "json"])
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
+ let cmd = command
+ .spawn()
+ .map_err(|e| log::debug!("failed to spawn command {command:?}: {e}"))
+ .ok()?;
+ let output = cmd
+ .output()
+ .await
+ .map_err(|e| log::debug!("failed to execute command {command:?}: {e}"))
+ .ok()?;
+ if !output.status.success() {
+ return None;
+ }
+
+ let raw_schema: serde_json::Value = serde_json::from_slice(output.stdout.as_slice())
+ .map_err(|e| log::debug!("failed to parse ruff's JSON schema output: {e}"))
+ .ok()?;
+
+ let converted_schema = Self::convert_ruff_schema(&raw_schema);
+ Some(converted_schema)
+ }
}
impl LspInstaller for RuffLspAdapter {
@@ -2568,4 +2712,149 @@ mod tests {
);
}
}
+
+ #[test]
+ fn test_convert_ruff_schema() {
+ use super::RuffLspAdapter;
+
+ let raw_schema = serde_json::json!({
+ "line-length": {
+ "doc": "The line length to use when enforcing long-lines violations",
+ "default": "88",
+ "value_type": "int",
+ "scope": null,
+ "example": "line-length = 120",
+ "deprecated": null
+ },
+ "lint.select": {
+ "doc": "A list of rule codes or prefixes to enable",
+ "default": "[\"E4\", \"E7\", \"E9\", \"F\"]",
+ "value_type": "list[RuleSelector]",
+ "scope": null,
+ "example": "select = [\"E4\", \"E7\", \"E9\", \"F\", \"B\", \"Q\"]",
+ "deprecated": null
+ },
+ "lint.isort.case-sensitive": {
+ "doc": "Sort imports taking into account case sensitivity.",
+ "default": "false",
+ "value_type": "bool",
+ "scope": null,
+ "example": "case-sensitive = true",
+ "deprecated": null
+ },
+ "format.quote-style": {
+ "doc": "Configures the preferred quote character for strings.",
+ "default": "\"double\"",
+ "value_type": "\"double\" | \"single\" | \"preserve\"",
+ "scope": null,
+ "example": "quote-style = \"single\"",
+ "deprecated": null
+ }
+ });
+
+ let converted = RuffLspAdapter::convert_ruff_schema(&raw_schema);
+
+ assert!(converted.is_object());
+ assert_eq!(
+ converted.get("type").and_then(|v| v.as_str()),
+ Some("object")
+ );
+
+ let properties = converted
+ .get("properties")
+ .expect("should have properties")
+ .as_object()
+ .expect("properties should be an object");
+
+ assert!(properties.contains_key("line-length"));
+ assert!(properties.contains_key("lint"));
+ assert!(properties.contains_key("format"));
+
+ let line_length = properties
+ .get("line-length")
+ .expect("should have line-length")
+ .as_object()
+ .expect("line-length should be an object");
+
+ assert_eq!(
+ line_length.get("type").and_then(|v| v.as_str()),
+ Some("integer")
+ );
+ assert_eq!(
+ line_length.get("default").and_then(|v| v.as_str()),
+ Some("88")
+ );
+
+ let lint = properties
+ .get("lint")
+ .expect("should have lint")
+ .as_object()
+ .expect("lint should be an object");
+
+ let lint_props = lint
+ .get("properties")
+ .expect("lint should have properties")
+ .as_object()
+ .expect("lint properties should be an object");
+
+ assert!(lint_props.contains_key("select"));
+ assert!(lint_props.contains_key("isort"));
+
+ let select = lint_props.get("select").expect("should have select");
+ assert_eq!(select.get("type").and_then(|v| v.as_str()), Some("array"));
+
+ let isort = lint_props
+ .get("isort")
+ .expect("should have isort")
+ .as_object()
+ .expect("isort should be an object");
+
+ let isort_props = isort
+ .get("properties")
+ .expect("isort should have properties")
+ .as_object()
+ .expect("isort properties should be an object");
+
+ let case_sensitive = isort_props
+ .get("case-sensitive")
+ .expect("should have case-sensitive");
+
+ assert_eq!(
+ case_sensitive.get("type").and_then(|v| v.as_str()),
+ Some("boolean")
+ );
+ assert!(case_sensitive.get("markdownDescription").is_some());
+
+ let format = properties
+ .get("format")
+ .expect("should have format")
+ .as_object()
+ .expect("format should be an object");
+
+ let format_props = format
+ .get("properties")
+ .expect("format should have properties")
+ .as_object()
+ .expect("format properties should be an object");
+
+ let quote_style = format_props
+ .get("quote-style")
+ .expect("should have quote-style");
+
+ assert_eq!(
+ quote_style.get("type").and_then(|v| v.as_str()),
+ Some("string")
+ );
+
+ let enum_values = quote_style
+ .get("enum")
+ .expect("should have enum")
+ .as_array()
+ .expect("enum should be an array");
+
+ assert_eq!(enum_values.len(), 3);
+ assert!(enum_values.contains(&serde_json::json!("double")));
+ assert!(enum_values.contains(&serde_json::json!("single")));
+ assert!(enum_values.contains(&serde_json::json!("preserve")));
+ }
}
@@ -18,6 +18,7 @@ use smol::fs::{self};
use std::cmp::Reverse;
use std::fmt::Display;
use std::ops::Range;
+use std::process::Stdio;
use std::{
borrow::Cow,
path::{Path, PathBuf},
@@ -66,6 +67,68 @@ enum LibcType {
}
impl RustLspAdapter {
+ fn convert_rust_analyzer_schema(raw_schema: &serde_json::Value) -> serde_json::Value {
+ let Some(schema_array) = raw_schema.as_array() else {
+ return raw_schema.clone();
+ };
+
+ let mut root_properties = serde_json::Map::new();
+
+ for item in schema_array {
+ if let Some(props) = item.get("properties").and_then(|p| p.as_object()) {
+ for (key, value) in props {
+ let parts: Vec<&str> = key.split('.').collect();
+
+ if parts.is_empty() {
+ continue;
+ }
+
+ let parts_to_process = if parts.first() == Some(&"rust-analyzer") {
+ &parts[1..]
+ } else {
+ &parts[..]
+ };
+
+ if parts_to_process.is_empty() {
+ continue;
+ }
+
+ let mut current = &mut root_properties;
+
+ for (i, part) in parts_to_process.iter().enumerate() {
+ let is_last = i == parts_to_process.len() - 1;
+
+ if is_last {
+ current.insert(part.to_string(), value.clone());
+ } else {
+ let next_current = current
+ .entry(part.to_string())
+ .or_insert_with(|| {
+ serde_json::json!({
+ "type": "object",
+ "properties": {}
+ })
+ })
+ .as_object_mut()
+ .expect("should be an object")
+ .entry("properties")
+ .or_insert_with(|| serde_json::json!({}))
+ .as_object_mut()
+ .expect("properties should be an object");
+
+ current = next_current;
+ }
+ }
+ }
+ }
+ }
+
+ serde_json::json!({
+ "type": "object",
+ "properties": root_properties
+ })
+ }
+
#[cfg(target_os = "linux")]
async fn determine_libc_type() -> LibcType {
use futures::pin_mut;
@@ -448,6 +511,37 @@ impl LspAdapter for RustLspAdapter {
Some(label)
}
+ async fn initialization_options_schema(
+ self: Arc<Self>,
+ language_server_binary: &LanguageServerBinary,
+ ) -> Option<serde_json::Value> {
+ let mut command = util::command::new_smol_command(&language_server_binary.path);
+ command
+ .arg("--print-config-schema")
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
+ let cmd = command
+ .spawn()
+ .map_err(|e| log::debug!("failed to spawn command {command:?}: {e}"))
+ .ok()?;
+ let output = cmd
+ .output()
+ .await
+ .map_err(|e| log::debug!("failed to execute command {command:?}: {e}"))
+ .ok()?;
+ if !output.status.success() {
+ return None;
+ }
+
+ let raw_schema: serde_json::Value = serde_json::from_slice(output.stdout.as_slice())
+ .map_err(|e| log::debug!("failed to parse rust-analyzer's JSON schema output: {e}"))
+ .ok()?;
+
+ // Convert rust-analyzer's array-based schema format to nested JSON Schema
+ let converted_schema = Self::convert_rust_analyzer_schema(&raw_schema);
+ Some(converted_schema)
+ }
+
async fn label_for_symbol(
&self,
name: &str,
@@ -1912,4 +2006,90 @@ mod tests {
);
check([], "/project/src/main.rs", "--");
}
+
+ #[test]
+ fn test_convert_rust_analyzer_schema() {
+ let raw_schema = serde_json::json!([
+ {
+ "title": "Assist",
+ "properties": {
+ "rust-analyzer.assist.emitMustUse": {
+ "markdownDescription": "Insert #[must_use] when generating `as_` methods for enum variants.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "Assist",
+ "properties": {
+ "rust-analyzer.assist.expressionFillDefault": {
+ "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
+ "default": "todo",
+ "type": "string"
+ }
+ }
+ },
+ {
+ "title": "Cache Priming",
+ "properties": {
+ "rust-analyzer.cachePriming.enable": {
+ "markdownDescription": "Warm up caches on project load.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ }
+ ]);
+
+ let converted = RustLspAdapter::convert_rust_analyzer_schema(&raw_schema);
+
+ assert_eq!(
+ converted.get("type").and_then(|v| v.as_str()),
+ Some("object")
+ );
+
+ let properties = converted
+ .pointer("/properties")
+ .expect("should have properties")
+ .as_object()
+ .expect("properties should be object");
+
+ assert!(properties.contains_key("assist"));
+ assert!(properties.contains_key("cachePriming"));
+ assert!(!properties.contains_key("rust-analyzer"));
+
+ let assist_props = properties
+ .get("assist")
+ .expect("should have assist")
+ .pointer("/properties")
+ .expect("assist should have properties")
+ .as_object()
+ .expect("assist properties should be object");
+
+ assert!(assist_props.contains_key("emitMustUse"));
+ assert!(assist_props.contains_key("expressionFillDefault"));
+
+ let emit_must_use = assist_props
+ .get("emitMustUse")
+ .expect("should have emitMustUse");
+ assert_eq!(
+ emit_must_use.get("type").and_then(|v| v.as_str()),
+ Some("boolean")
+ );
+ assert_eq!(
+ emit_must_use.get("default").and_then(|v| v.as_bool()),
+ Some(false)
+ );
+
+ let cache_priming_props = properties
+ .get("cachePriming")
+ .expect("should have cachePriming")
+ .pointer("/properties")
+ .expect("cachePriming should have properties")
+ .as_object()
+ .expect("cachePriming properties should be object");
+
+ assert!(cache_priming_props.contains_key("enable"));
+ }
}
@@ -345,6 +345,7 @@ impl LspAdapter for VtslsLspAdapter {
let lsp_settings = content
.project
.lsp
+ .0
.entry(VTSLS_SERVER_NAME.into())
.or_default();
@@ -19,9 +19,9 @@ use gpui::{App, Context, Entity, EntityId, EventEmitter};
use itertools::Itertools;
use language::{
AutoindentMode, BracketMatch, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability,
- CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, DiskState,
- File, IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16,
- Outline, OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _,
+ CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File,
+ IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline,
+ OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _,
ToPoint as _, TransactionId, TreeSitterOptions, Unclipped,
language_settings::{LanguageSettings, language_settings},
};
@@ -2980,7 +2980,7 @@ impl MultiBuffer {
*is_dirty |= buffer.is_dirty();
*has_deleted_file |= buffer
.file()
- .is_some_and(|file| file.disk_state() == DiskState::Deleted);
+ .is_some_and(|file| file.disk_state().is_deleted());
*has_conflict |= buffer.has_conflict();
}
if edited {
@@ -23,7 +23,7 @@ use super::session::ThreadId;
mod breakpoints_in_file {
use collections::HashMap;
- use language::{BufferEvent, DiskState};
+ use language::BufferEvent;
use super::*;
@@ -82,7 +82,7 @@ mod breakpoints_in_file {
BufferEvent::FileHandleChanged => {
let entity_id = buffer.entity_id();
- if buffer.read(cx).file().is_none_or(|f| f.disk_state() == DiskState::Deleted) {
+ if buffer.read(cx).file().is_none_or(|f| f.disk_state().is_deleted()) {
breakpoint_store.breakpoints.retain(|_, breakpoints_in_file| {
breakpoints_in_file.buffer.entity_id() != entity_id
});
@@ -4205,74 +4205,29 @@ impl Repository {
entries: Vec<RepoPath>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
- if entries.is_empty() {
- return Task::ready(Ok(()));
- }
- let id = self.id;
- let save_tasks = self.save_buffers(&entries, cx);
- let paths = entries
- .iter()
- .map(|p| p.as_unix_str())
- .collect::<Vec<_>>()
- .join(" ");
- let status = format!("git add {paths}");
- let job_key = GitJobKey::WriteIndex(entries.clone());
-
- self.spawn_job_with_tracking(
- entries.clone(),
- pending_op::GitStatus::Staged,
- cx,
- async move |this, cx| {
- for save_task in save_tasks {
- save_task.await?;
- }
-
- this.update(cx, |this, _| {
- this.send_keyed_job(
- Some(job_key),
- Some(status.into()),
- move |git_repo, _cx| async move {
- match git_repo {
- RepositoryState::Local(LocalRepositoryState {
- backend,
- environment,
- ..
- }) => backend.stage_paths(entries, environment.clone()).await,
- RepositoryState::Remote(RemoteRepositoryState {
- project_id,
- client,
- }) => {
- client
- .request(proto::Stage {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- paths: entries
- .into_iter()
- .map(|repo_path| repo_path.to_proto())
- .collect(),
- })
- .await
- .context("sending stage request")?;
-
- Ok(())
- }
- }
- },
- )
- })?
- .await?
- },
- )
+ self.stage_or_unstage_entries(true, entries, cx)
}
pub fn unstage_entries(
&mut self,
entries: Vec<RepoPath>,
cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<()>> {
+ self.stage_or_unstage_entries(false, entries, cx)
+ }
+
+ fn stage_or_unstage_entries(
+ &mut self,
+ stage: bool,
+ entries: Vec<RepoPath>,
+ cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
if entries.is_empty() {
return Task::ready(Ok(()));
}
+ let Some(git_store) = self.git_store.upgrade() else {
+ return Task::ready(Ok(()));
+ };
let id = self.id;
let save_tasks = self.save_buffers(&entries, cx);
let paths = entries
@@ -4280,48 +4235,164 @@ impl Repository {
.map(|p| p.as_unix_str())
.collect::<Vec<_>>()
.join(" ");
- let status = format!("git reset {paths}");
+ let status = if stage {
+ format!("git add {paths}")
+ } else {
+ format!("git reset {paths}")
+ };
let job_key = GitJobKey::WriteIndex(entries.clone());
self.spawn_job_with_tracking(
entries.clone(),
- pending_op::GitStatus::Unstaged,
+ if stage {
+ pending_op::GitStatus::Staged
+ } else {
+ pending_op::GitStatus::Unstaged
+ },
cx,
async move |this, cx| {
for save_task in save_tasks {
save_task.await?;
}
- this.update(cx, |this, _| {
+ this.update(cx, |this, cx| {
+ let weak_this = cx.weak_entity();
this.send_keyed_job(
Some(job_key),
Some(status.into()),
- move |git_repo, _cx| async move {
- match git_repo {
+ move |git_repo, mut cx| async move {
+ let hunk_staging_operation_counts = weak_this
+ .update(&mut cx, |this, cx| {
+ let mut hunk_staging_operation_counts = HashMap::default();
+ for path in &entries {
+ let Some(project_path) =
+ this.repo_path_to_project_path(path, cx)
+ else {
+ continue;
+ };
+ let Some(buffer) = git_store
+ .read(cx)
+ .buffer_store
+ .read(cx)
+ .get_by_path(&project_path)
+ else {
+ continue;
+ };
+ let Some(diff_state) = git_store
+ .read(cx)
+ .diffs
+ .get(&buffer.read(cx).remote_id())
+ .cloned()
+ else {
+ continue;
+ };
+ let Some(uncommitted_diff) =
+ diff_state.read(cx).uncommitted_diff.as_ref().and_then(
+ |uncommitted_diff| uncommitted_diff.upgrade(),
+ )
+ else {
+ continue;
+ };
+ let buffer_snapshot = buffer.read(cx).text_snapshot();
+ let file_exists = buffer
+ .read(cx)
+ .file()
+ .is_some_and(|file| file.disk_state().exists());
+ let hunk_staging_operation_count =
+ diff_state.update(cx, |diff_state, cx| {
+ uncommitted_diff.update(
+ cx,
+ |uncommitted_diff, cx| {
+ uncommitted_diff
+ .stage_or_unstage_all_hunks(
+ stage,
+ &buffer_snapshot,
+ file_exists,
+ cx,
+ );
+ },
+ );
+
+ diff_state.hunk_staging_operation_count += 1;
+ diff_state.hunk_staging_operation_count
+ });
+ hunk_staging_operation_counts.insert(
+ diff_state.downgrade(),
+ hunk_staging_operation_count,
+ );
+ }
+ hunk_staging_operation_counts
+ })
+ .unwrap_or_default();
+
+ let result = match git_repo {
RepositoryState::Local(LocalRepositoryState {
backend,
environment,
..
- }) => backend.unstage_paths(entries, environment).await,
+ }) => {
+ if stage {
+ backend.stage_paths(entries, environment.clone()).await
+ } else {
+ backend.unstage_paths(entries, environment.clone()).await
+ }
+ }
RepositoryState::Remote(RemoteRepositoryState {
project_id,
client,
}) => {
- client
- .request(proto::Unstage {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- paths: entries
- .into_iter()
- .map(|repo_path| repo_path.to_proto())
- .collect(),
- })
- .await
- .context("sending unstage request")?;
-
- Ok(())
+ if stage {
+ client
+ .request(proto::Stage {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ paths: entries
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending stage request")
+ .map(|_| ())
+ } else {
+ client
+ .request(proto::Unstage {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ paths: entries
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending unstage request")
+ .map(|_| ())
+ }
}
+ };
+
+ for (diff_state, hunk_staging_operation_count) in
+ hunk_staging_operation_counts
+ {
+ diff_state
+ .update(&mut cx, |diff_state, cx| {
+ if result.is_ok() {
+ diff_state.hunk_staging_operation_count_as_of_write =
+ hunk_staging_operation_count;
+ } else if let Some(uncommitted_diff) =
+ &diff_state.uncommitted_diff
+ {
+ uncommitted_diff
+ .update(cx, |uncommitted_diff, cx| {
+ uncommitted_diff.clear_pending_hunks(cx);
+ })
+ .ok();
+ }
+ })
+ .ok();
}
+
+ result
},
)
})?
@@ -4347,7 +4418,7 @@ impl Repository {
}
})
.collect();
- self.stage_entries(to_stage, cx)
+ self.stage_or_unstage_entries(true, to_stage, cx)
}
pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
@@ -4367,7 +4438,7 @@ impl Repository {
}
})
.collect();
- self.unstage_entries(to_unstage, cx)
+ self.stage_or_unstage_entries(false, to_unstage, cx)
}
pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
@@ -257,7 +257,7 @@ struct DynamicRegistrations {
pub struct LocalLspStore {
weak: WeakEntity<LspStore>,
- worktree_store: Entity<WorktreeStore>,
+ pub worktree_store: Entity<WorktreeStore>,
toolchain_store: Entity<LocalToolchainStore>,
http_client: Arc<dyn HttpClient>,
environment: Entity<ProjectEnvironment>,
@@ -13953,7 +13953,7 @@ impl LocalLspAdapterDelegate {
})
}
- fn from_local_lsp(
+ pub fn from_local_lsp(
local: &LocalLspStore,
worktree: &Entity<Worktree>,
cx: &mut App,
@@ -1,5 +1,5 @@
use anyhow::{Context, Result};
-use gpui::{App, AsyncApp, Entity, Global, WeakEntity};
+use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity};
use lsp::LanguageServer;
use crate::LspStore;
@@ -22,7 +22,7 @@ impl lsp::request::Request for SchemaContentRequest {
const METHOD: &'static str = "vscode/content";
}
-type SchemaRequestHandler = fn(Entity<LspStore>, String, &mut AsyncApp) -> Result<String>;
+type SchemaRequestHandler = fn(Entity<LspStore>, String, &mut AsyncApp) -> Task<Result<String>>;
pub struct SchemaHandlingImpl(SchemaRequestHandler);
impl Global for SchemaHandlingImpl {}
@@ -72,9 +72,7 @@ pub fn notify_schema_changed(lsp_store: Entity<LspStore>, uri: String, cx: &App)
pub fn register_requests(lsp_store: WeakEntity<LspStore>, language_server: &LanguageServer) {
language_server
.on_request::<SchemaContentRequest, _, _>(move |params, cx| {
- let handler = cx.try_read_global::<SchemaHandlingImpl, _>(|handler, _| {
- handler.0
- });
+ let handler = cx.try_read_global::<SchemaHandlingImpl, _>(|handler, _| handler.0);
let mut cx = cx.clone();
let uri = params.clone().pop();
let lsp_store = lsp_store.clone();
@@ -82,7 +80,7 @@ pub fn register_requests(lsp_store: WeakEntity<LspStore>, language_server: &Lang
let lsp_store = lsp_store.upgrade().context("LSP store has been dropped")?;
let uri = uri.context("No URI")?;
let handle_schema_request = handler.context("No schema handler registered")?;
- handle_schema_request(lsp_store, uri, &mut cx)
+ handle_schema_request(lsp_store, uri, &mut cx).await
};
async move {
zlog::trace!(LOGGER => "Handling schema request for {:?}", ¶ms);
@@ -83,7 +83,7 @@ use gpui::{
Task, WeakEntity, Window,
};
use language::{
- Buffer, BufferEvent, Capability, CodeLabel, CursorShape, Language, LanguageName,
+ Buffer, BufferEvent, Capability, CodeLabel, CursorShape, DiskState, Language, LanguageName,
LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainMetadata,
ToolchainScope, Transaction, Unclipped, language_settings::InlayHintKind,
proto::split_operations,
@@ -5671,7 +5671,9 @@ impl ProjectItem for Buffer {
}
fn project_path(&self, cx: &App) -> Option<ProjectPath> {
- self.file().map(|file| ProjectPath {
+ let file = self.file()?;
+
+ (!matches!(file.disk_state(), DiskState::Historic { .. })).then(|| ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path().clone(),
})
@@ -10922,3 +10922,146 @@ async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
});
assert!(active_repo_path.is_none());
}
+
+#[gpui::test]
+async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
+ use DiffHunkSecondaryStatus::*;
+ init_test(cx);
+
+ let committed_contents = r#"
+ one
+ two
+ three
+ "#
+ .unindent();
+ let file_contents = r#"
+ one
+ TWO
+ three
+ "#
+ .unindent();
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ path!("/dir"),
+ json!({
+ ".git": {},
+ "file.txt": file_contents.clone()
+ }),
+ )
+ .await;
+
+ fs.set_head_and_index_for_repo(
+ path!("/dir/.git").as_ref(),
+ &[("file.txt", committed_contents.clone())],
+ );
+
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/file.txt"), cx)
+ })
+ .await
+ .unwrap();
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ let uncommitted_diff = project
+ .update(cx, |project, cx| {
+ project.open_uncommitted_diff(buffer.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ // The hunk is initially unstaged.
+ uncommitted_diff.read_with(cx, |diff, cx| {
+ assert_hunks(
+ diff.hunks(&snapshot, cx),
+ &snapshot,
+ &diff.base_text_string().unwrap(),
+ &[(
+ 1..2,
+ "two\n",
+ "TWO\n",
+ DiffHunkStatus::modified(HasSecondaryHunk),
+ )],
+ );
+ });
+
+ // Get the repository handle.
+ let repo = project.read_with(cx, |project, cx| {
+ project.repositories(cx).values().next().unwrap().clone()
+ });
+
+ // Stage the file.
+ let stage_task = repo.update(cx, |repo, cx| {
+ repo.stage_entries(vec![repo_path("file.txt")], cx)
+ });
+
+ // Run a few ticks to let the job start and mark hunks as pending,
+ // but don't run_until_parked which would complete the entire operation.
+ for _ in 0..10 {
+ cx.executor().tick();
+ let [hunk]: [_; 1] = uncommitted_diff
+ .read_with(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>())
+ .try_into()
+ .unwrap();
+ match hunk.secondary_status {
+ HasSecondaryHunk => {}
+ SecondaryHunkRemovalPending => break,
+ NoSecondaryHunk => panic!("hunk was not optimistically staged"),
+ _ => panic!("unexpected hunk state"),
+ }
+ }
+ uncommitted_diff.read_with(cx, |diff, cx| {
+ assert_hunks(
+ diff.hunks(&snapshot, cx),
+ &snapshot,
+ &diff.base_text_string().unwrap(),
+ &[(
+ 1..2,
+ "two\n",
+ "TWO\n",
+ DiffHunkStatus::modified(SecondaryHunkRemovalPending),
+ )],
+ );
+ });
+
+ // Let the staging complete.
+ stage_task.await.unwrap();
+ cx.run_until_parked();
+
+ // The hunk is now fully staged.
+ uncommitted_diff.read_with(cx, |diff, cx| {
+ assert_hunks(
+ diff.hunks(&snapshot, cx),
+ &snapshot,
+ &diff.base_text_string().unwrap(),
+ &[(
+ 1..2,
+ "two\n",
+ "TWO\n",
+ DiffHunkStatus::modified(NoSecondaryHunk),
+ )],
+ );
+ });
+
+ // Simulate a commit by updating HEAD to match the current file contents.
+ // The FakeGitRepository's commit method is a no-op, so we need to manually
+ // update HEAD to simulate the commit completing.
+ fs.set_head_for_repo(
+ path!("/dir/.git").as_ref(),
+ &[("file.txt", file_contents.clone())],
+ "newhead",
+ );
+ cx.run_until_parked();
+
+ // After committing, there are no more hunks.
+ uncommitted_diff.read_with(cx, |diff, cx| {
+ assert_hunks(
+ diff.hunks(&snapshot, cx),
+ &snapshot,
+ &diff.base_text_string().unwrap(),
+ &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
+ );
+ });
+}
@@ -193,7 +193,15 @@ impl MetadataCache {
) -> Result<Self> {
let mut cache = MetadataCache::default();
for result in db.iter(txn)? {
- let (prompt_id, metadata) = result?;
+ // Fail-open: skip records that can't be decoded (e.g. from a different branch)
+ // rather than failing the entire prompt store initialization.
+ let Ok((prompt_id, metadata)) = result else {
+ log::warn!(
+ "Skipping unreadable prompt record in database: {:?}",
+ result.err()
+ );
+ continue;
+ };
cache.metadata.push(metadata.clone());
cache.metadata_by_id.insert(prompt_id, metadata);
}
@@ -677,7 +685,86 @@ mod tests {
assert_eq!(
loaded_after_reset.trim(),
expected_content_after_reset.trim(),
- "After saving default content, load should return default"
+ "Content should be back to default after saving default content"
+ );
+ }
+
+ /// Test that the prompt store initializes successfully even when the database
+ /// contains records with incompatible/undecodable PromptId keys (e.g., from
+ /// a different branch that used a different serialization format).
+ ///
+ /// This is a regression test for the "fail-open" behavior: we should skip
+ /// bad records rather than failing the entire store initialization.
+ #[gpui::test]
+ async fn test_prompt_store_handles_incompatible_db_records(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+
+ let temp_dir = tempfile::tempdir().unwrap();
+ let db_path = temp_dir.path().join("prompts-db-with-bad-records");
+ std::fs::create_dir_all(&db_path).unwrap();
+
+ // First, create the DB and write an incompatible record directly.
+ // We simulate a record written by a different branch that used
+ // `{"kind":"CommitMessage"}` instead of `{"kind":"BuiltIn", ...}`.
+ {
+ let db_env = unsafe {
+ heed::EnvOpenOptions::new()
+ .map_size(1024 * 1024 * 1024)
+ .max_dbs(4)
+ .open(&db_path)
+ .unwrap()
+ };
+
+ let mut txn = db_env.write_txn().unwrap();
+ // Create the metadata.v2 database with raw bytes so we can write
+ // an incompatible key format.
+ let metadata_db: Database<heed::types::Bytes, heed::types::Bytes> = db_env
+ .create_database(&mut txn, Some("metadata.v2"))
+ .unwrap();
+
+ // Write an incompatible PromptId key: `{"kind":"CommitMessage"}`
+ // This is the old/branch format that current code can't decode.
+ let bad_key = br#"{"kind":"CommitMessage"}"#;
+ let dummy_metadata = br#"{"id":{"kind":"CommitMessage"},"title":"Bad Record","default":false,"saved_at":"2024-01-01T00:00:00Z"}"#;
+ metadata_db.put(&mut txn, bad_key, dummy_metadata).unwrap();
+
+ // Also write a valid record to ensure we can still read good data.
+ let good_key = br#"{"kind":"User","uuid":"550e8400-e29b-41d4-a716-446655440000"}"#;
+ let good_metadata = br#"{"id":{"kind":"User","uuid":"550e8400-e29b-41d4-a716-446655440000"},"title":"Good Record","default":false,"saved_at":"2024-01-01T00:00:00Z"}"#;
+ metadata_db.put(&mut txn, good_key, good_metadata).unwrap();
+
+ txn.commit().unwrap();
+ }
+
+ // Now try to create a PromptStore from this DB.
+ // With fail-open behavior, this should succeed and skip the bad record.
+ // Without fail-open, this would return an error.
+ let store_result = cx.update(|cx| PromptStore::new(db_path, cx)).await;
+
+ assert!(
+ store_result.is_ok(),
+ "PromptStore should initialize successfully even with incompatible DB records. \
+ Got error: {:?}",
+ store_result.err()
+ );
+
+ let store = cx.new(|_cx| store_result.unwrap());
+
+ // Verify the good record was loaded.
+ let good_id = PromptId::User {
+ uuid: UserPromptId("550e8400-e29b-41d4-a716-446655440000".parse().unwrap()),
+ };
+ let metadata = store.read_with(cx, |store, _| store.metadata(good_id));
+ assert!(
+ metadata.is_some(),
+ "Valid records should still be loaded after skipping bad ones"
+ );
+ assert_eq!(
+ metadata
+ .as_ref()
+ .and_then(|m| m.title.as_ref().map(|t| t.as_ref())),
+ Some("Good Record"),
+ "Valid record should have correct title"
);
}
}
@@ -2,161 +2,162 @@ syntax = "proto3";
package zed.messages;
message Timestamp {
- uint64 seconds = 1;
- uint32 nanos = 2;
+ uint64 seconds = 1;
+ uint32 nanos = 2;
}
message File {
- uint64 worktree_id = 1;
- optional uint64 entry_id = 2;
- string path = 3;
- Timestamp mtime = 4;
- bool is_deleted = 5;
+ uint64 worktree_id = 1;
+ optional uint64 entry_id = 2;
+ string path = 3;
+ Timestamp mtime = 4;
+ bool is_deleted = 5;
+ bool is_historic = 6;
}
message Entry {
- uint64 id = 1;
- bool is_dir = 2;
- string path = 3;
- uint64 inode = 4;
- Timestamp mtime = 5;
- bool is_ignored = 7;
- bool is_external = 8;
- reserved 6;
- reserved 9;
- bool is_fifo = 10;
- optional uint64 size = 11;
- optional string canonical_path = 12;
- bool is_hidden = 13;
+ uint64 id = 1;
+ bool is_dir = 2;
+ string path = 3;
+ uint64 inode = 4;
+ Timestamp mtime = 5;
+ bool is_ignored = 7;
+ bool is_external = 8;
+ reserved 6;
+ reserved 9;
+ bool is_fifo = 10;
+ optional uint64 size = 11;
+ optional string canonical_path = 12;
+ bool is_hidden = 13;
}
message AddWorktree {
- string path = 1;
- uint64 project_id = 2;
- bool visible = 3;
+ string path = 1;
+ uint64 project_id = 2;
+ bool visible = 3;
}
message AddWorktreeResponse {
- uint64 worktree_id = 1;
- string canonicalized_path = 2;
+ uint64 worktree_id = 1;
+ string canonicalized_path = 2;
}
message RemoveWorktree {
- uint64 worktree_id = 1;
+ uint64 worktree_id = 1;
}
message GetPathMetadata {
- uint64 project_id = 1;
- string path = 2;
+ uint64 project_id = 1;
+ string path = 2;
}
message GetPathMetadataResponse {
- bool exists = 1;
- string path = 2;
- bool is_dir = 3;
+ bool exists = 1;
+ string path = 2;
+ bool is_dir = 3;
}
message WorktreeMetadata {
- uint64 id = 1;
- string root_name = 2;
- bool visible = 3;
- string abs_path = 4;
+ uint64 id = 1;
+ string root_name = 2;
+ bool visible = 3;
+ string abs_path = 4;
}
message ProjectPath {
- uint64 worktree_id = 1;
- string path = 2;
+ uint64 worktree_id = 1;
+ string path = 2;
}
message ListRemoteDirectoryConfig {
- bool is_dir = 1;
+ bool is_dir = 1;
}
message ListRemoteDirectory {
- uint64 dev_server_id = 1;
- string path = 2;
- ListRemoteDirectoryConfig config = 3;
+ uint64 dev_server_id = 1;
+ string path = 2;
+ ListRemoteDirectoryConfig config = 3;
}
message EntryInfo {
- bool is_dir = 1;
+ bool is_dir = 1;
}
message ListRemoteDirectoryResponse {
- repeated string entries = 1;
- repeated EntryInfo entry_info = 2;
+ repeated string entries = 1;
+ repeated EntryInfo entry_info = 2;
}
message CreateProjectEntry {
- uint64 project_id = 1;
- uint64 worktree_id = 2;
- string path = 3;
- bool is_directory = 4;
- optional bytes content = 5;
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ string path = 3;
+ bool is_directory = 4;
+ optional bytes content = 5;
}
message RenameProjectEntry {
- uint64 project_id = 1;
- uint64 entry_id = 2;
- string new_path = 3;
- uint64 new_worktree_id = 4;
+ uint64 project_id = 1;
+ uint64 entry_id = 2;
+ string new_path = 3;
+ uint64 new_worktree_id = 4;
}
message CopyProjectEntry {
- uint64 project_id = 1;
- uint64 entry_id = 2;
- string new_path = 3;
- uint64 new_worktree_id = 5;
- reserved 4;
+ uint64 project_id = 1;
+ uint64 entry_id = 2;
+ string new_path = 3;
+ uint64 new_worktree_id = 5;
+ reserved 4;
}
message DeleteProjectEntry {
- uint64 project_id = 1;
- uint64 entry_id = 2;
- bool use_trash = 3;
+ uint64 project_id = 1;
+ uint64 entry_id = 2;
+ bool use_trash = 3;
}
message ExpandProjectEntry {
- uint64 project_id = 1;
- uint64 entry_id = 2;
+ uint64 project_id = 1;
+ uint64 entry_id = 2;
}
message ExpandProjectEntryResponse {
- uint64 worktree_scan_id = 1;
+ uint64 worktree_scan_id = 1;
}
message ExpandAllForProjectEntry {
- uint64 project_id = 1;
- uint64 entry_id = 2;
+ uint64 project_id = 1;
+ uint64 entry_id = 2;
}
message ExpandAllForProjectEntryResponse {
- uint64 worktree_scan_id = 1;
+ uint64 worktree_scan_id = 1;
}
message ProjectEntryResponse {
- optional Entry entry = 1;
- uint64 worktree_scan_id = 2;
+ optional Entry entry = 1;
+ uint64 worktree_scan_id = 2;
}
message UpdateWorktreeSettings {
- uint64 project_id = 1;
- uint64 worktree_id = 2;
- string path = 3;
- optional string content = 4;
- optional LocalSettingsKind kind = 5;
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ string path = 3;
+ optional string content = 4;
+ optional LocalSettingsKind kind = 5;
}
enum LocalSettingsKind {
- Settings = 0;
- Tasks = 1;
- Editorconfig = 2;
- Debug = 3;
+ Settings = 0;
+ Tasks = 1;
+ Editorconfig = 2;
+ Debug = 3;
}
message UpdateUserSettings {
- uint64 project_id = 1;
- string contents = 2;
+ uint64 project_id = 1;
+ string contents = 2;
}
message TrustWorktrees {
@@ -33,8 +33,9 @@ pub use serde_helper::*;
pub use settings_file::*;
pub use settings_json::*;
pub use settings_store::{
- InvalidSettingsError, LocalSettingsKind, MigrationStatus, ParseStatus, Settings, SettingsFile,
- SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsParseResult, SettingsStore,
+ InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, LocalSettingsKind, MigrationStatus,
+ ParseStatus, Settings, SettingsFile, SettingsJsonSchemaParams, SettingsKey, SettingsLocation,
+ SettingsParseResult, SettingsStore,
};
pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource};
@@ -11,6 +11,19 @@ use crate::{
SlashCommandSettings,
};
+#[with_fallible_options]
+#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
+pub struct LspSettingsMap(pub HashMap<Arc<str>, LspSettings>);
+
+impl IntoIterator for LspSettingsMap {
+ type Item = (Arc<str>, LspSettings);
+ type IntoIter = std::collections::hash_map::IntoIter<Arc<str>, LspSettings>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
#[with_fallible_options]
#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema, MergeFrom)]
pub struct ProjectSettingsContent {
@@ -29,7 +42,7 @@ pub struct ProjectSettingsContent {
/// name to the lsp value.
/// Default: null
#[serde(default)]
- pub lsp: HashMap<Arc<str>, LspSettings>,
+ pub lsp: LspSettingsMap,
pub terminal: Option<ProjectTerminalSettingsContent>,
@@ -32,7 +32,8 @@ pub type EditorconfigProperties = ec4rs::Properties;
use crate::{
ActiveSettingsProfileName, FontFamilyName, IconThemeName, LanguageSettingsContent,
- LanguageToSettingsMap, ThemeName, VsCodeSettings, WorktreeId, fallible_options,
+ LanguageToSettingsMap, LspSettings, LspSettingsMap, ThemeName, VsCodeSettings, WorktreeId,
+ fallible_options,
merge_from::MergeFrom,
settings_content::{
ExtensionsSettingsContent, ProjectSettingsContent, SettingsContent, UserSettingsContent,
@@ -41,6 +42,8 @@ use crate::{
use settings_json::{infer_json_indent_size, parse_json_with_comments, update_value_in_json_text};
+pub const LSP_SETTINGS_SCHEMA_URL_PREFIX: &str = "zed://schemas/settings/lsp/";
+
pub trait SettingsKey: 'static + Send + Sync {
/// The name of a key within the JSON file from which this setting should
/// be deserialized. If this is `None`, then the setting will be deserialized
@@ -256,6 +259,7 @@ pub struct SettingsJsonSchemaParams<'a> {
pub font_names: &'a [String],
pub theme_names: &'a [SharedString],
pub icon_theme_names: &'a [SharedString],
+ pub lsp_adapter_names: &'a [String],
}
impl SettingsStore {
@@ -1025,6 +1029,14 @@ impl SettingsStore {
.subschema_for::<LanguageSettingsContent>()
.to_value();
+ generator.subschema_for::<LspSettings>();
+
+ let lsp_settings_def = generator
+ .definitions()
+ .get("LspSettings")
+ .expect("LspSettings should be defined")
+ .clone();
+
replace_subschema::<LanguageToSettingsMap>(&mut generator, || {
json_schema!({
"type": "object",
@@ -1063,6 +1075,38 @@ impl SettingsStore {
})
});
+ replace_subschema::<LspSettingsMap>(&mut generator, || {
+ let mut lsp_properties = serde_json::Map::new();
+
+ for adapter_name in params.lsp_adapter_names {
+ let mut base_lsp_settings = lsp_settings_def
+ .as_object()
+ .expect("LspSettings should be an object")
+ .clone();
+
+ if let Some(properties) = base_lsp_settings.get_mut("properties") {
+ if let Some(props_obj) = properties.as_object_mut() {
+ props_obj.insert(
+ "initialization_options".to_string(),
+ serde_json::json!({
+ "$ref": format!("{LSP_SETTINGS_SCHEMA_URL_PREFIX}{adapter_name}")
+ }),
+ );
+ }
+ }
+
+ lsp_properties.insert(
+ adapter_name.clone(),
+ serde_json::Value::Object(base_lsp_settings),
+ );
+ }
+
+ json_schema!({
+ "type": "object",
+ "properties": lsp_properties,
+ })
+ });
+
generator
.root_schema_for::<UserSettingsContent>()
.to_value()
@@ -2304,4 +2348,39 @@ mod tests {
]
)
}
+
+ #[gpui::test]
+ fn test_lsp_settings_schema_generation(cx: &mut App) {
+ let store = SettingsStore::test(cx);
+
+ let schema = store.json_schema(&SettingsJsonSchemaParams {
+ language_names: &["Rust".to_string(), "TypeScript".to_string()],
+ font_names: &["Zed Mono".to_string()],
+ theme_names: &["One Dark".into()],
+ icon_theme_names: &["Zed Icons".into()],
+ lsp_adapter_names: &[
+ "rust-analyzer".to_string(),
+ "typescript-language-server".to_string(),
+ ],
+ });
+
+ let properties = schema
+ .pointer("/$defs/LspSettingsMap/properties")
+ .expect("LspSettingsMap should have properties")
+ .as_object()
+ .unwrap();
+
+ assert!(properties.contains_key("rust-analyzer"));
+ assert!(properties.contains_key("typescript-language-server"));
+
+ let init_options_ref = properties
+ .get("rust-analyzer")
+ .unwrap()
+ .pointer("/properties/initialization_options/$ref")
+ .expect("initialization_options should have a $ref")
+ .as_str()
+ .unwrap();
+
+ assert_eq!(init_options_ref, "zed://schemas/settings/lsp/rust-analyzer");
+ }
}
@@ -602,7 +602,7 @@ pub fn open_settings_editor(
focus: true,
show: true,
is_movable: true,
- kind: gpui::WindowKind::Floating,
+ kind: gpui::WindowKind::Normal,
window_background: cx.theme().window_background_appearance(),
app_id: Some(app_id.to_owned()),
window_decorations: Some(window_decorations),
@@ -151,7 +151,14 @@ impl BatchedTextRun {
std::slice::from_ref(&self.style),
Some(dimensions.cell_width),
)
- .paint(pos, dimensions.line_height, window, cx);
+ .paint(
+ pos,
+ dimensions.line_height,
+ gpui::TextAlign::Left,
+ None,
+ window,
+ cx,
+ );
}
}
@@ -1326,8 +1333,14 @@ impl Element for TerminalElement {
}],
None
);
- shaped_line
- .paint(ime_position, layout.dimensions.line_height, window, cx)
+ shaped_line.paint(
+ ime_position,
+ layout.dimensions.line_height,
+ gpui::TextAlign::Left,
+ None,
+ window,
+ cx,
+ )
.log_err();
}
@@ -5,8 +5,11 @@ use std::{
str::FromStr,
};
-use editor::{Editor, EditorStyle};
-use gpui::{ClickEvent, Entity, FocusHandle, Focusable, FontWeight, Modifiers};
+use editor::{Editor, actions::MoveDown, actions::MoveUp};
+use gpui::{
+ ClickEvent, Entity, FocusHandle, Focusable, FontWeight, Modifiers, TextAlign,
+ TextStyleRefinement, WeakEntity,
+};
use settings::{CenteredPaddingSettings, CodeFade, DelayMs, InactiveOpacity, MinimumContrast};
use ui::prelude::*;
@@ -235,12 +238,14 @@ impl_numeric_stepper_nonzero_int!(NonZeroU32, u32);
impl_numeric_stepper_nonzero_int!(NonZeroU64, u64);
impl_numeric_stepper_nonzero_int!(NonZero<usize>, usize);
-#[derive(RegisterComponent)]
-pub struct NumberField<T = usize> {
+#[derive(IntoElement, RegisterComponent)]
+pub struct NumberField<T: NumberFieldType = usize> {
id: ElementId,
value: T,
focus_handle: FocusHandle,
mode: Entity<NumberFieldMode>,
+ /// Stores a weak reference to the editor when in edit mode, so buttons can update its text
+ edit_editor: Entity<Option<WeakEntity<Editor>>>,
format: Box<dyn FnOnce(&T) -> String>,
large_step: T,
small_step: T,
@@ -256,15 +261,17 @@ impl<T: NumberFieldType> NumberField<T> {
pub fn new(id: impl Into<ElementId>, value: T, window: &mut Window, cx: &mut App) -> Self {
let id = id.into();
- let (mode, focus_handle) = window.with_id(id.clone(), |window| {
+ let (mode, focus_handle, edit_editor) = window.with_id(id.clone(), |window| {
let mode = window.use_state(cx, |_, _| NumberFieldMode::default());
let focus_handle = window.use_state(cx, |_, cx| cx.focus_handle());
- (mode, focus_handle)
+ let edit_editor = window.use_state(cx, |_, _| None);
+ (mode, focus_handle, edit_editor)
});
Self {
id,
mode,
+ edit_editor,
value,
focus_handle: focus_handle.read(cx).clone(),
format: Box::new(T::default_format),
@@ -309,6 +316,11 @@ impl<T: NumberFieldType> NumberField<T> {
self
}
+ pub fn mode(self, mode: NumberFieldMode, cx: &mut App) -> Self {
+ self.mode.write(cx, mode);
+ self
+ }
+
pub fn on_reset(
mut self,
on_reset: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static,
@@ -328,17 +340,16 @@ impl<T: NumberFieldType> NumberField<T> {
}
}
-impl<T: NumberFieldType> IntoElement for NumberField<T> {
- type Element = gpui::Component<Self>;
-
- fn into_element(self) -> Self::Element {
- gpui::Component::new(self)
- }
+#[derive(Clone, Copy)]
+enum ValueChangeDirection {
+ Increment,
+ Decrement,
}
impl<T: NumberFieldType> RenderOnce for NumberField<T> {
fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
let mut tab_index = self.tab_index;
+ let is_edit_mode = matches!(*self.mode.read(cx), NumberFieldMode::Edit);
let get_step = {
let large_step = self.large_step;
@@ -355,6 +366,67 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
}
};
+ let clamp_value = {
+ let min = self.min_value;
+ let max = self.max_value;
+ move |value: T| -> T {
+ if value < min {
+ min
+ } else if value > max {
+ max
+ } else {
+ value
+ }
+ }
+ };
+
+ let change_value = {
+ move |current: T, step: T, direction: ValueChangeDirection| -> T {
+ let new_value = match direction {
+ ValueChangeDirection::Increment => current.saturating_add(step),
+ ValueChangeDirection::Decrement => current.saturating_sub(step),
+ };
+ clamp_value(new_value)
+ }
+ };
+
+ let get_current_value = {
+ let value = self.value;
+ let edit_editor = self.edit_editor.clone();
+
+ Rc::new(move |cx: &App| -> T {
+ if !is_edit_mode {
+ return value;
+ }
+ edit_editor
+ .read(cx)
+ .as_ref()
+ .and_then(|weak| weak.upgrade())
+ .and_then(|editor| editor.read(cx).text(cx).parse::<T>().ok())
+ .unwrap_or(value)
+ })
+ };
+
+ let update_editor_text = {
+ let edit_editor = self.edit_editor.clone();
+
+ Rc::new(move |new_value: T, window: &mut Window, cx: &mut App| {
+ if !is_edit_mode {
+ return;
+ }
+ let Some(editor) = edit_editor
+ .read(cx)
+ .as_ref()
+ .and_then(|weak| weak.upgrade())
+ else {
+ return;
+ };
+ editor.update(cx, |editor, cx| {
+ editor.set_text(format!("{}", new_value), window, cx);
+ });
+ })
+ };
+
let bg_color = cx.theme().colors().surface_background;
let hover_bg_color = cx.theme().colors().element_hover;
@@ -395,13 +467,20 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
h_flex()
.map(|decrement| {
let decrement_handler = {
- let value = self.value;
let on_change = self.on_change.clone();
- let min = self.min_value;
+ let get_current_value = get_current_value.clone();
+ let update_editor_text = update_editor_text.clone();
+
move |click: &ClickEvent, window: &mut Window, cx: &mut App| {
+ let current_value = get_current_value(cx);
let step = get_step(click.modifiers());
- let new_value = value.saturating_sub(step);
- let new_value = if new_value < min { min } else { new_value };
+ let new_value = change_value(
+ current_value,
+ step,
+ ValueChangeDirection::Decrement,
+ );
+
+ update_editor_text(new_value, window, cx);
on_change(&new_value, window, cx);
}
};
@@ -438,46 +517,97 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
.justify_center()
.child(Label::new((self.format)(&self.value)))
.into_any_element(),
- // Edit mode is disabled until we implement center text alignment for editor
- // mode.write(cx, NumberFieldMode::Edit);
- //
- // When we get to making Edit mode work, we shouldn't even focus the decrement/increment buttons.
- // Focus should go instead straight to the editor, avoiding any double-step focus.
- // In this world, the buttons become a mouse-only interaction, given users should be able
- // to do everything they'd do with the buttons straight in the editor anyway.
NumberFieldMode::Edit => h_flex()
.flex_1()
.child(window.use_state(cx, {
|window, cx| {
- let previous_focus_handle = window.focused(cx);
let mut editor = Editor::single_line(window, cx);
- let mut style = EditorStyle::default();
- style.text.text_align = gpui::TextAlign::Right;
- editor.set_style(style, window, cx);
+
+ editor.set_text_style_refinement(TextStyleRefinement {
+ text_align: Some(TextAlign::Center),
+ ..Default::default()
+ });
editor.set_text(format!("{}", self.value), window, cx);
+
+ let editor_weak = cx.entity().downgrade();
+
+ self.edit_editor.update(cx, |state, _| {
+ *state = Some(editor_weak);
+ });
+
+ editor
+ .register_action::<MoveUp>({
+ let on_change = self.on_change.clone();
+ let editor_handle = cx.entity().downgrade();
+ move |_, window, cx| {
+ let Some(editor) = editor_handle.upgrade()
+ else {
+ return;
+ };
+ editor.update(cx, |editor, cx| {
+ if let Ok(current_value) =
+ editor.text(cx).parse::<T>()
+ {
+ let step =
+ get_step(window.modifiers());
+ let new_value = change_value(
+ current_value,
+ step,
+ ValueChangeDirection::Increment,
+ );
+ editor.set_text(
+ format!("{}", new_value),
+ window,
+ cx,
+ );
+ on_change(&new_value, window, cx);
+ }
+ });
+ }
+ })
+ .detach();
+
+ editor
+ .register_action::<MoveDown>({
+ let on_change = self.on_change.clone();
+ let editor_handle = cx.entity().downgrade();
+ move |_, window, cx| {
+ let Some(editor) = editor_handle.upgrade()
+ else {
+ return;
+ };
+ editor.update(cx, |editor, cx| {
+ if let Ok(current_value) =
+ editor.text(cx).parse::<T>()
+ {
+ let step =
+ get_step(window.modifiers());
+ let new_value = change_value(
+ current_value,
+ step,
+ ValueChangeDirection::Decrement,
+ );
+ editor.set_text(
+ format!("{}", new_value),
+ window,
+ cx,
+ );
+ on_change(&new_value, window, cx);
+ }
+ });
+ }
+ })
+ .detach();
+
cx.on_focus_out(&editor.focus_handle(cx), window, {
let mode = self.mode.clone();
- let min = self.min_value;
- let max = self.max_value;
let on_change = self.on_change.clone();
move |this, _, window, cx| {
- if let Ok(new_value) =
+ if let Ok(parsed_value) =
this.text(cx).parse::<T>()
{
- let new_value = if new_value < min {
- min
- } else if new_value > max {
- max
- } else {
- new_value
- };
-
- if let Some(previous) =
- previous_focus_handle.as_ref()
- {
- window.focus(previous, cx);
- }
+ let new_value = clamp_value(parsed_value);
on_change(&new_value, window, cx);
};
mode.write(cx, NumberFieldMode::Read);
@@ -500,13 +630,20 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
)
.map(|increment| {
let increment_handler = {
- let value = self.value;
let on_change = self.on_change.clone();
- let max = self.max_value;
+ let get_current_value = get_current_value.clone();
+ let update_editor_text = update_editor_text.clone();
+
move |click: &ClickEvent, window: &mut Window, cx: &mut App| {
+ let current_value = get_current_value(cx);
let step = get_step(click.modifiers());
- let new_value = value.saturating_add(step);
- let new_value = if new_value > max { max } else { new_value };
+ let new_value = change_value(
+ current_value,
+ step,
+ ValueChangeDirection::Increment,
+ );
+
+ update_editor_text(new_value, window, cx);
on_change(&new_value, window, cx);
}
};
@@ -541,36 +678,42 @@ impl Component for NumberField<usize> {
"Number Field"
}
- fn sort_name() -> &'static str {
- Self::name()
- }
-
fn description() -> Option<&'static str> {
Some("A numeric input element with increment and decrement buttons.")
}
fn preview(window: &mut Window, cx: &mut App) -> Option<AnyElement> {
- let stepper_example = window.use_state(cx, |_, _| 100.0);
+ let default_ex = window.use_state(cx, |_, _| 100.0);
+ let edit_ex = window.use_state(cx, |_, _| 500.0);
Some(
v_flex()
.gap_6()
- .children(vec![single_example(
- "Default Numeric Stepper",
- NumberField::new(
- "numeric-stepper-component-preview",
- *stepper_example.read(cx),
- window,
- cx,
- )
- .on_change({
- let stepper_example = stepper_example.clone();
- move |value, _, cx| stepper_example.write(cx, *value)
- })
- .min(1.0)
- .max(100.0)
- .into_any_element(),
- )])
+ .children(vec![
+ single_example(
+ "Button-Only Number Field",
+ NumberField::new("number-field", *default_ex.read(cx), window, cx)
+ .on_change({
+ let default_ex = default_ex.clone();
+ move |value, _, cx| default_ex.write(cx, *value)
+ })
+ .min(1.0)
+ .max(100.0)
+ .into_any_element(),
+ ),
+ single_example(
+ "Editable Number Field",
+ NumberField::new("editable-number-field", *edit_ex.read(cx), window, cx)
+ .on_change({
+ let edit_ex = edit_ex.clone();
+ move |value, _, cx| edit_ex.write(cx, *value)
+ })
+ .min(100.0)
+ .max(500.0)
+ .mode(NumberFieldMode::Edit, cx)
+ .into_any_element(),
+ ),
+ ])
.into_any_element(),
)
}
@@ -1361,7 +1361,7 @@ impl LocalWorktree {
}
let content = fs.load_bytes(&abs_path).await?;
- let (text, encoding, has_bom) = decode_byte(content);
+ let (text, encoding, has_bom) = decode_byte(content)?;
let worktree = this.upgrade().context("worktree was dropped")?;
let file = match entry.await? {
@@ -1489,25 +1489,12 @@ impl LocalWorktree {
let fs = fs.clone();
let abs_path = abs_path.clone();
async move {
- let bom_bytes = if has_bom {
- if encoding == encoding_rs::UTF_16LE {
- vec![0xFF, 0xFE]
- } else if encoding == encoding_rs::UTF_16BE {
- vec![0xFE, 0xFF]
- } else if encoding == encoding_rs::UTF_8 {
- vec![0xEF, 0xBB, 0xBF]
- } else {
- vec![]
- }
- } else {
- vec![]
- };
-
// For UTF-8, use the optimized `fs.save` which writes Rope chunks directly to disk
// without allocating a contiguous string.
if encoding == encoding_rs::UTF_8 && !has_bom {
return fs.save(&abs_path, &text, line_ending).await;
}
+
// For legacy encodings (e.g. Shift-JIS), we fall back to converting the entire Rope
// to a String/Bytes in memory before writing.
//
@@ -1520,13 +1507,45 @@ impl LocalWorktree {
LineEnding::Windows => text_string.replace('\n', "\r\n"),
};
- let (cow, _, _) = encoding.encode(&normalized_text);
- let bytes = if !bom_bytes.is_empty() {
- let mut bytes = bom_bytes;
- bytes.extend_from_slice(&cow);
- bytes.into()
+ // Create the byte vector manually for UTF-16 encodings because encoding_rs encodes to UTF-8 by default (per WHATWG standards),
+ // which is not what we want for saving files.
+ let bytes = if encoding == encoding_rs::UTF_16BE {
+ let mut data = Vec::with_capacity(normalized_text.len() * 2 + 2);
+ if has_bom {
+ data.extend_from_slice(&[0xFE, 0xFF]); // BOM
+ }
+ let utf16be_bytes =
+ normalized_text.encode_utf16().flat_map(|u| u.to_be_bytes());
+ data.extend(utf16be_bytes);
+ data.into()
+ } else if encoding == encoding_rs::UTF_16LE {
+ let mut data = Vec::with_capacity(normalized_text.len() * 2 + 2);
+ if has_bom {
+ data.extend_from_slice(&[0xFF, 0xFE]); // BOM
+ }
+ let utf16le_bytes =
+ normalized_text.encode_utf16().flat_map(|u| u.to_le_bytes());
+ data.extend(utf16le_bytes);
+ data.into()
} else {
- cow
+ // For other encodings (Shift-JIS, UTF-8 with BOM, etc.), delegate to encoding_rs.
+ let bom_bytes = if has_bom {
+ if encoding == encoding_rs::UTF_8 {
+ vec![0xEF, 0xBB, 0xBF]
+ } else {
+ vec![]
+ }
+ } else {
+ vec![]
+ };
+ let (cow, _, _) = encoding.encode(&normalized_text);
+ if !bom_bytes.is_empty() {
+ let mut bytes = bom_bytes;
+ bytes.extend_from_slice(&cow);
+ bytes.into()
+ } else {
+ cow
+ }
};
fs.write(&abs_path, &bytes).await
@@ -3216,7 +3235,8 @@ impl language::File for File {
entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.as_ref().to_proto(),
mtime: self.disk_state.mtime().map(|time| time.into()),
- is_deleted: self.disk_state == DiskState::Deleted,
+ is_deleted: self.disk_state.is_deleted(),
+ is_historic: matches!(self.disk_state, DiskState::Historic { .. }),
}
}
@@ -3277,7 +3297,11 @@ impl File {
"worktree id does not match file"
);
- let disk_state = if proto.is_deleted {
+ let disk_state = if proto.is_historic {
+ DiskState::Historic {
+ was_deleted: proto.is_deleted,
+ }
+ } else if proto.is_deleted {
DiskState::Deleted
} else if let Some(mtime) = proto.mtime.map(&Into::into) {
DiskState::Present { mtime }
@@ -5842,11 +5866,28 @@ impl fs::Watcher for NullWatcher {
}
}
-fn decode_byte(bytes: Vec<u8>) -> (String, &'static Encoding, bool) {
+fn decode_byte(bytes: Vec<u8>) -> anyhow::Result<(String, &'static Encoding, bool)> {
// check BOM
if let Some((encoding, _bom_len)) = Encoding::for_bom(&bytes) {
let (cow, _) = encoding.decode_with_bom_removal(&bytes);
- return (cow.into_owned(), encoding, true);
+ return Ok((cow.into_owned(), encoding, true));
+ }
+
+ match analyze_byte_content(&bytes) {
+ ByteContent::Utf16Le => {
+ let encoding = encoding_rs::UTF_16LE;
+ let (cow, _, _) = encoding.decode(&bytes);
+ return Ok((cow.into_owned(), encoding, false));
+ }
+ ByteContent::Utf16Be => {
+ let encoding = encoding_rs::UTF_16BE;
+ let (cow, _, _) = encoding.decode(&bytes);
+ return Ok((cow.into_owned(), encoding, false));
+ }
+ ByteContent::Binary => {
+ anyhow::bail!("Binary files are not supported");
+ }
+ ByteContent::Unknown => {}
}
fn detect_encoding(bytes: Vec<u8>) -> (String, &'static Encoding) {
@@ -5867,14 +5908,66 @@ fn decode_byte(bytes: Vec<u8>) -> (String, &'static Encoding, bool) {
// displaying raw escape sequences instead of the correct characters.
if text.contains('\x1b') {
let (s, enc) = detect_encoding(text.into_bytes());
- (s, enc, false)
+ Ok((s, enc, false))
} else {
- (text, encoding_rs::UTF_8, false)
+ Ok((text, encoding_rs::UTF_8, false))
}
}
Err(e) => {
let (s, enc) = detect_encoding(e.into_bytes());
- (s, enc, false)
+ Ok((s, enc, false))
}
}
}
+
+#[derive(PartialEq)]
+enum ByteContent {
+ Utf16Le,
+ Utf16Be,
+ Binary,
+ Unknown,
+}
+// Heuristic check using null byte distribution.
+// NOTE: This relies on the presence of ASCII characters (which become `0x00` in UTF-16).
+// Files consisting purely of non-ASCII characters (like Japanese) may not be detected here
+// and will result in `Unknown`.
+fn analyze_byte_content(bytes: &[u8]) -> ByteContent {
+ if bytes.len() < 2 {
+ return ByteContent::Unknown;
+ }
+
+ let check_len = bytes.len().min(1024);
+ let sample = &bytes[..check_len];
+
+ if !sample.contains(&0) {
+ return ByteContent::Unknown;
+ }
+
+ let mut even_nulls = 0;
+ let mut odd_nulls = 0;
+
+ for (i, &byte) in sample.iter().enumerate() {
+ if byte == 0 {
+ if i % 2 == 0 {
+ even_nulls += 1;
+ } else {
+ odd_nulls += 1;
+ }
+ }
+ }
+
+ let total_nulls = even_nulls + odd_nulls;
+ if total_nulls < check_len / 10 {
+ return ByteContent::Unknown;
+ }
+
+ if even_nulls > odd_nulls * 4 {
+ return ByteContent::Utf16Be;
+ }
+
+ if odd_nulls > even_nulls * 4 {
+ return ByteContent::Utf16Le;
+ }
+
+ ByteContent::Binary
+}
@@ -1,5 +1,5 @@
use crate::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use encoding_rs;
use fs::{FakeFs, Fs, RealFs, RemoveOptions};
use git::{DOT_GIT, GITIGNORE, REPO_EXCLUDE};
@@ -2568,71 +2568,87 @@ fn init_test(cx: &mut gpui::TestAppContext) {
#[gpui::test]
async fn test_load_file_encoding(cx: &mut TestAppContext) {
init_test(cx);
- let test_cases: Vec<(&str, &[u8], &str)> = vec![
- ("utf8.txt", "γγγ«γ‘γ―".as_bytes(), "γγγ«γ‘γ―"), // "γγγ«γ‘γ―" is Japanese "Hello"
- (
- "sjis.txt",
- &[0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
- "γγγ«γ‘γ―",
- ),
- (
- "eucjp.txt",
- &[0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
- "γγγ«γ‘γ―",
- ),
- (
- "iso2022jp.txt",
- &[
+
+ struct TestCase {
+ name: &'static str,
+ bytes: Vec<u8>,
+ expected_text: &'static str,
+ }
+
+ // --- Success Cases ---
+ let success_cases = vec![
+ TestCase {
+ name: "utf8.txt",
+ bytes: "γγγ«γ‘γ―".as_bytes().to_vec(),
+ expected_text: "γγγ«γ‘γ―",
+ },
+ TestCase {
+ name: "sjis.txt",
+ bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
+ expected_text: "γγγ«γ‘γ―",
+ },
+ TestCase {
+ name: "eucjp.txt",
+ bytes: vec![0xa4, 0xb3, 0xa4, 0xf3, 0xa4, 0xcb, 0xa4, 0xc1, 0xa4, 0xcf],
+ expected_text: "γγγ«γ‘γ―",
+ },
+ TestCase {
+ name: "iso2022jp.txt",
+ bytes: vec![
0x1b, 0x24, 0x42, 0x24, 0x33, 0x24, 0x73, 0x24, 0x4b, 0x24, 0x41, 0x24, 0x4f, 0x1b,
0x28, 0x42,
],
- "γγγ«γ‘γ―",
- ),
- // Western Europe (Windows-1252)
- // "CafΓ©" -> 0xE9 is 'Γ©' in Windows-1252 (it is typically 0xC3 0xA9 in UTF-8)
- ("win1252.txt", &[0x43, 0x61, 0x66, 0xe9], "CafΓ©"),
- // Chinese Simplified (GBK)
- // Note: We use a slightly longer string here because short byte sequences can be ambiguous
- // in multi-byte encodings. Providing more context helps the heuristic detector guess correctly.
- // Text: "δ»ε€©ε€©ζ°δΈι" (Today's weather is not bad / nice)
- // Bytes:
- // δ»: BD F1
- // 倩: CC EC
- // 倩: CC EC
- // ζ°: C6 F8
- // δΈ: B2 BB
- // ι: B4 ED
- (
- "gbk.txt",
- &[
+ expected_text: "γγγ«γ‘γ―",
+ },
+ TestCase {
+ name: "win1252.txt",
+ bytes: vec![0x43, 0x61, 0x66, 0xe9],
+ expected_text: "CafΓ©",
+ },
+ TestCase {
+ name: "gbk.txt",
+ bytes: vec![
0xbd, 0xf1, 0xcc, 0xec, 0xcc, 0xec, 0xc6, 0xf8, 0xb2, 0xbb, 0xb4, 0xed,
],
- "δ»ε€©ε€©ζ°δΈι",
- ),
- (
- "utf16le_bom.txt",
- &[
+ expected_text: "δ»ε€©ε€©ζ°δΈι",
+ },
+ // UTF-16LE with BOM
+ TestCase {
+ name: "utf16le_bom.txt",
+ bytes: vec![
0xFF, 0xFE, // BOM
- 0x53, 0x30, // γ
- 0x93, 0x30, // γ
- 0x6B, 0x30, // γ«
- 0x61, 0x30, // γ‘
- 0x6F, 0x30, // γ―
+ 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F, 0x30,
],
- "γγγ«γ‘γ―",
- ),
- (
- "utf8_bom.txt",
- &[
- 0xEF, 0xBB, 0xBF, // UTF-8 BOM
- 0xE3, 0x81, 0x93, // γ
- 0xE3, 0x82, 0x93, // γ
- 0xE3, 0x81, 0xAB, // γ«
- 0xE3, 0x81, 0xA1, // γ‘
- 0xE3, 0x81, 0xAF, // γ―
+ expected_text: "γγγ«γ‘γ―",
+ },
+ // UTF-16BE with BOM
+ TestCase {
+ name: "utf16be_bom.txt",
+ bytes: vec![
+ 0xFE, 0xFF, // BOM
+ 0x30, 0x53, 0x30, 0x93, 0x30, 0x6B, 0x30, 0x61, 0x30, 0x6F,
],
- "γγγ«γ‘γ―",
- ),
+ expected_text: "γγγ«γ‘γ―",
+ },
+ // UTF-16LE without BOM (ASCII only)
+ // This relies on the "null byte heuristic" we implemented.
+ // "ABC" -> 41 00 42 00 43 00
+ TestCase {
+ name: "utf16le_ascii_no_bom.txt",
+ bytes: vec![0x41, 0x00, 0x42, 0x00, 0x43, 0x00],
+ expected_text: "ABC",
+ },
+ ];
+
+ // --- Failure Cases ---
+ let failure_cases = vec![
+ // Binary File (Should be detected by heuristic and return Error)
+ // Contains random bytes and mixed nulls that don't match UTF-16 patterns
+ TestCase {
+ name: "binary.bin",
+ bytes: vec![0x00, 0xFF, 0x12, 0x00, 0x99, 0x88, 0x77, 0x66, 0x00],
+ expected_text: "", // Not used
+ },
];
let root_path = if cfg!(windows) {
@@ -2642,15 +2658,11 @@ async fn test_load_file_encoding(cx: &mut TestAppContext) {
};
let fs = FakeFs::new(cx.background_executor.clone());
+ fs.create_dir(root_path).await.unwrap();
- let mut files_json = serde_json::Map::new();
- for (name, _, _) in &test_cases {
- files_json.insert(name.to_string(), serde_json::Value::String("".to_string()));
- }
-
- for (name, bytes, _) in &test_cases {
- let path = root_path.join(name);
- fs.write(&path, bytes).await.unwrap();
+ for case in success_cases.iter().chain(failure_cases.iter()) {
+ let path = root_path.join(case.name);
+ fs.write(&path, &case.bytes).await.unwrap();
}
let tree = Worktree::local(
@@ -2667,34 +2679,54 @@ async fn test_load_file_encoding(cx: &mut TestAppContext) {
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
- for (name, _, expected) in test_cases {
- let loaded = tree
- .update(cx, |tree, cx| tree.load_file(rel_path(name), cx))
- .await
- .with_context(|| format!("Failed to load {}", name))
- .unwrap();
+ let rel_path = |name: &str| {
+ RelPath::new(&Path::new(name), PathStyle::local())
+ .unwrap()
+ .into_arc()
+ };
+ // Run Success Tests
+ for case in success_cases {
+ let loaded = tree
+ .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
+ .await;
+ if let Err(e) = &loaded {
+ panic!("Failed to load success case '{}': {:?}", case.name, e);
+ }
+ let loaded = loaded.unwrap();
assert_eq!(
- loaded.text, expected,
+ loaded.text, case.expected_text,
"Encoding mismatch for file: {}",
- name
+ case.name
);
}
+
+ // Run Failure Tests
+ for case in failure_cases {
+ let loaded = tree
+ .update(cx, |tree, cx| tree.load_file(&rel_path(case.name), cx))
+ .await;
+ assert!(
+ loaded.is_err(),
+ "Failure case '{}' unexpectedly succeeded! It should have been detected as binary.",
+ case.name
+ );
+ let err_msg = loaded.unwrap_err().to_string();
+ println!("Got expected error for {}: {}", case.name, err_msg);
+ }
}
#[gpui::test]
async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
+
let root_path = if cfg!(windows) {
Path::new("C:\\root")
} else {
Path::new("/root")
};
fs.create_dir(root_path).await.unwrap();
- let file_path = root_path.join("test.txt");
-
- fs.insert_file(&file_path, "initial".into()).await;
let worktree = Worktree::local(
root_path,
@@ -2707,33 +2739,107 @@ async fn test_write_file_encoding(cx: &mut gpui::TestAppContext) {
.await
.unwrap();
- let path: Arc<Path> = Path::new("test.txt").into();
- let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
+ // Define test case structure
+ struct TestCase {
+ name: &'static str,
+ text: &'static str,
+ encoding: &'static encoding_rs::Encoding,
+ has_bom: bool,
+ expected_bytes: Vec<u8>,
+ }
- let text = text::Rope::from("γγγ«γ‘γ―");
+ let cases = vec![
+ // Shift_JIS with Japanese
+ TestCase {
+ name: "Shift_JIS with Japanese",
+ text: "γγγ«γ‘γ―",
+ encoding: encoding_rs::SHIFT_JIS,
+ has_bom: false,
+ expected_bytes: vec![0x82, 0xb1, 0x82, 0xf1, 0x82, 0xc9, 0x82, 0xbf, 0x82, 0xcd],
+ },
+ // UTF-8 No BOM
+ TestCase {
+ name: "UTF-8 No BOM",
+ text: "AB",
+ encoding: encoding_rs::UTF_8,
+ has_bom: false,
+ expected_bytes: vec![0x41, 0x42],
+ },
+ // UTF-8 with BOM
+ TestCase {
+ name: "UTF-8 with BOM",
+ text: "AB",
+ encoding: encoding_rs::UTF_8,
+ has_bom: true,
+ expected_bytes: vec![0xEF, 0xBB, 0xBF, 0x41, 0x42],
+ },
+ // UTF-16LE No BOM with Japanese
+ // NOTE: This passes thanks to the manual encoding fix implemented in `write_file`.
+ TestCase {
+ name: "UTF-16LE No BOM with Japanese",
+ text: "γγγ«γ‘γ―",
+ encoding: encoding_rs::UTF_16LE,
+ has_bom: false,
+ expected_bytes: vec![0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f, 0x30],
+ },
+ // UTF-16LE with BOM
+ TestCase {
+ name: "UTF-16LE with BOM",
+ text: "A",
+ encoding: encoding_rs::UTF_16LE,
+ has_bom: true,
+ expected_bytes: vec![0xFF, 0xFE, 0x41, 0x00],
+ },
+ // UTF-16BE No BOM with Japanese
+ // NOTE: This passes thanks to the manual encoding fix.
+ TestCase {
+ name: "UTF-16BE No BOM with Japanese",
+ text: "γγγ«γ‘γ―",
+ encoding: encoding_rs::UTF_16BE,
+ has_bom: false,
+ expected_bytes: vec![0x30, 0x53, 0x30, 0x93, 0x30, 0x6b, 0x30, 0x61, 0x30, 0x6f],
+ },
+ // UTF-16BE with BOM
+ TestCase {
+ name: "UTF-16BE with BOM",
+ text: "A",
+ encoding: encoding_rs::UTF_16BE,
+ has_bom: true,
+ expected_bytes: vec![0xFE, 0xFF, 0x00, 0x41],
+ },
+ ];
- let task = worktree.update(cx, |wt, cx| {
- wt.write_file(
- rel_path,
- text,
- text::LineEnding::Unix,
- encoding_rs::SHIFT_JIS,
- false,
- cx,
- )
- });
+ for (i, case) in cases.into_iter().enumerate() {
+ let file_name = format!("test_{}.txt", i);
+ let path: Arc<Path> = Path::new(&file_name).into();
+ let file_path = root_path.join(&file_name);
- task.await.unwrap();
+ fs.insert_file(&file_path, "".into()).await;
- let bytes = fs.load_bytes(&file_path).await.unwrap();
+ let rel_path = RelPath::new(&path, PathStyle::local()).unwrap().into_arc();
+ let text = text::Rope::from(case.text);
- let expected_bytes = vec![
- 0x82, 0xb1, // γ
- 0x82, 0xf1, // γ
- 0x82, 0xc9, // γ«
- 0x82, 0xbf, // γ‘
- 0x82, 0xcd, // γ―
- ];
+ let task = worktree.update(cx, |wt, cx| {
+ wt.write_file(
+ rel_path,
+ text,
+ text::LineEnding::Unix,
+ case.encoding,
+ case.has_bom,
+ cx,
+ )
+ });
+
+ if let Err(e) = task.await {
+ panic!("Unexpected error in case '{}': {:?}", case.name, e);
+ }
+
+ let bytes = fs.load_bytes(&file_path).await.unwrap();
- assert_eq!(bytes, expected_bytes, "Should be saved as Shift-JIS");
+ assert_eq!(
+ bytes, case.expected_bytes,
+ "case '{}' mismatch. Expected {:?}, but got {:?}",
+ case.name, case.expected_bytes, bytes
+ );
+ }
}
@@ -41,6 +41,7 @@ collab_ui.workspace = true
collections.workspace = true
command_palette.workspace = true
component.workspace = true
+component_preview.workspace = true
copilot.workspace = true
crashes.workspace = true
dap_adapters.workspace = true
@@ -148,7 +149,6 @@ ztracing.workspace = true
tracing.workspace = true
toolchain_selector.workspace = true
ui.workspace = true
-ui_input.workspace = true
ui_prompt.workspace = true
url.workspace = true
urlencoding.workspace = true
@@ -774,7 +774,7 @@ fn main() {
let app_state = app_state.clone();
- crate::zed::component_preview::init(app_state.clone(), cx);
+ component_preview::init(app_state.clone(), cx);
cx.spawn(async move |cx| {
while let Some(urls) = open_rx.next().await {
@@ -833,12 +833,19 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
cx.spawn_in(window, async move |workspace, cx| {
let res = async move {
let json = app_state.languages.language_for_name("JSONC").await.ok();
+ let lsp_store = workspace.update(cx, |workspace, cx| {
+ workspace
+ .project()
+ .update(cx, |project, _| project.lsp_store())
+ })?;
let json_schema_content =
json_schema_store::resolve_schema_request_inner(
&app_state.languages,
+ lsp_store,
&schema_path,
cx,
- )?;
+ )
+ .await?;
let json_schema_content =
serde_json::to_string_pretty(&json_schema_content)
.context("Failed to serialize JSON Schema as JSON")?;
@@ -1,5 +1,4 @@
mod app_menus;
-pub mod component_preview;
pub mod edit_prediction_registry;
#[cfg(target_os = "macos")]
pub(crate) mod mac_only_instance;
@@ -216,7 +216,7 @@ Once the master connection is established, Zed will check to see if the remote s
If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"upload_binary_over_ssh":true}` in your settings for that server, it will download the binary to your local machine and then upload it to the remote server.
-If you'd like to maintain the server binary yourself you can. You can either download our prebuilt versions from [GitHub](https://github.com/zed-industries/zed/releases), or [build your own](https://zed.dev/docs/development) with `cargo build -p remote_server --release`. If you do this, you must upload it to `~/.zed_server/zed-remote-server-{RELEASE_CHANNEL}-{VERSION}` on the server, for example `~/.zed_server/zed-remote-server-stable-0.181.6`. The version must exactly match the version of Zed itself you are using.
+If you'd like to maintain the server binary yourself you can. You can either download our prebuilt versions from [GitHub](https://github.com/zed-industries/zed/releases), or [build your own](https://zed.dev/docs/development) with `cargo build -p remote_server --release`. If you do this, you must upload it to `~/.zed_server/zed-remote-server-{RELEASE_CHANNEL}-{VERSION}` on the server, for example `~/.zed_server/zed-remote-server-stable-0.217.3+stable.105.80433cb239e868271457ac376673a5f75bc4adb1`. The version must exactly match the version of Zed itself you are using.
## Maintaining the SSH connection
@@ -9,6 +9,7 @@
"returns"
"message"
"enum"
+ "extend"
"oneof"
"repeated"
"reserved"
@@ -0,0 +1,38 @@
+## Triage Watcher v0.1
+# This is a small script to watch for new issues on the Zed repository and open them in a new browser tab interactively.
+#
+## Installing Julia
+#
+# You need Julia installed on your system:
+# curl -fsSL https://install.julialang.org | sh
+#
+## Running this script:
+# 1. It only works on Macos/Linux
+# Open a new Julia repl with `julia` inside the `zed` repo
+# 2. Paste the following code
+# 3. Whenever you close your computer, just type the Up arrow on the REPL + enter to rerun the loop again to resume
+function get_issues()
+ entries = filter(x -> occursin("state:needs triage", x), split(read(`gh issue list -L 10`, String), '\n'))
+ top = findfirst.('\t', entries) .- 1
+ [entries[i][begin:top[i]] for i in eachindex(entries)]
+end
+
+nums = get_issues();
+while true
+ new_nums = get_issues()
+ # Open each new issue in a new browser tab
+ for issue_num in setdiff(new_nums, nums)
+ url = "https://github.com/zed-industries/zed/issues/" * issue_num
+ println("\nOpening $url")
+ open_tab = `/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome $url`
+ try
+ sound_file = "/Users/mrg/Downloads/mario_coin_sound.mp3"
+ run(`afplay -v 0.02 $sound_file`)
+ finally
+ end
+ run(open_tab)
+ end
+ nums = new_nums
+ print("π§πΌ")
+ sleep(60)
+end
@@ -63,6 +63,12 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo
named::bash("cargo fmt --all")
}
+ fn run_cargo_fix() -> Step<Run> {
+ named::bash(
+ "cargo fix --workspace --release --all-targets --all-features --allow-dirty --allow-staged",
+ )
+ }
+
fn run_clippy_fix() -> Step<Run> {
named::bash(
"cargo clippy --workspace --release --all-targets --all-features --fix --allow-dirty --allow-staged",
@@ -101,6 +107,7 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo
.add_step(steps::setup_pnpm())
.add_step(run_prettier_fix())
.add_step(run_cargo_fmt())
+ .add_step(run_cargo_fix().if_condition(Expression::new(run_clippy.to_string())))
.add_step(run_clippy_fix().if_condition(Expression::new(run_clippy.to_string())))
.add_step(create_patch())
.add_step(upload_patch_artifact())